diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 812fc3b139483c111167896c9573d41499e28f6b..0000000000000000000000000000000000000000 --- a/.coveragerc +++ /dev/null @@ -1,4 +0,0 @@ -[report] -omit = - */python?.?/* - */site-packages/nose/* diff --git a/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE/ISSUE_TEMPLATE.md similarity index 100% rename from ISSUE_TEMPLATE.md rename to .github/ISSUE_TEMPLATE/ISSUE_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE/molnet_pr_template.md b/.github/PULL_REQUEST_TEMPLATE/molnet_pr_template.md new file mode 100644 index 0000000000000000000000000000000000000000..fbc6d8114e9fc8f3a8ba7faf15d4a7126acf612e --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/molnet_pr_template.md @@ -0,0 +1,12 @@ +### Template for pull request contributing a new dataset to MoleculeNet +Category: {Quantum Mechanics, Materials Science, Physical Chemistry, Biophysics, Physiology} +Dataset: {short name identifying dataset} +Data Type: {SMILES, 3D coordinates} +Task Type: {Regression, Classification} +\# Tasks: {integer} +\# Compounds: {integer} +Rec - Split†: {Random, Stratified, Scaffold, Time} +Rec - Metric†: {MAE, RMSE, R^2, PRC-AUC, ROC-AUC} +Reference: {MLA style reference.} + +† For details on recommended split types and metrics, refer to the [MolNet paper](https://arxiv.org/abs/1703.00564). \ No newline at end of file diff --git a/.gitignore b/.gitignore index cb4c067d4b0d9a3ae8b5091b0b96c155d17ce991..4f96b9172ed18d27cd651ab1a8fcd22a56a453aa 100644 --- a/.gitignore +++ b/.gitignore @@ -71,15 +71,15 @@ target/ datasets/2008-2011_USPTO_reactionSmiles_filtered.zip datasets/2008-2011_USPTO_reactionSmiles_filtered/ datasets/autodock_vina_1_1_2_mac_catalina_64bit/ -datasets/chembl_25-featurized/ -datasets/chembl_25.csv.gz +datasets/chembl_25-featurized/ +datasets/chembl_25.csv.gz datasets/delaney-featurized/ -datasets/from-pdbbind/ -datasets/kinase/ -datasets/pdbbind/ +datasets/from-pdbbind/ +datasets/kinase/ +datasets/pdbbind/ datasets/pdbbind_v2015.tar.gz datasets/qm7-featurized/ -datasets/qm7.csv +datasets/qm7.csv datasets/qm7.mat datasets/sider-featurized/ datasets/sider.csv.gz @@ -99,3 +99,6 @@ datasets/pdbbind_v2019_PP.tar.gz datasets/pdbbind_v2019_other_PL.tar.gz datasets/pdbbind_v2019_refined.tar.gz datasets/qm8.csv + +.vscode/ +.python-version diff --git a/.readthedocs.yml b/.readthedocs.yml index a36db8a13998d337a2e418dced58b9dbef272f78..b168698a98bdb272595d57cec147ad4e6e809a4d 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,11 +7,7 @@ version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: docs/conf.py - -# Build documentation with MkDocs -#mkdocs: -# configuration: mkdocs.yml + configuration: docs/source/conf.py # Optionally build your docs in additional formats such as PDF and ePub formats: all diff --git a/.style.yapf b/.style.yapf deleted file mode 100644 index 4861cafe6c397dd0703b8206cc5f5736abbc03c6..0000000000000000000000000000000000000000 --- a/.style.yapf +++ /dev/null @@ -1,3 +0,0 @@ -[style] -based_on_style = google -indent_width = 2 diff --git a/.travis.yml b/.travis.yml index 322fbd46be7629b408cdf684b443e4de928a8d07..366b99285bccfb01ee0af75e3f93f658822b176d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,49 +1,68 @@ jobs: include: - - name: "Python 3.6" + - name: Linux Python 3.6 language: python - python: "3.6" + python: '3.6' sudo: required dist: xenial - - - name: "Python 3.7" + - name: Linux Python 3.7 language: python - python: "3.7" + python: '3.7' sudo: required dist: xenial - - - name: "Windows" - language: c # Not really, but travis doesn't support python on Windows - python: "3.7" + env: NIGHTLY_PKG_PUBLISH=true + - name: Windows Python 3.7 + language: c + python: '3.7' os: windows - + - name: Documentation + language: python + python: '3.7' + sudo: required + dist: xenial + env: CHECK_ONLY_DOCS=true +cache: pip install: - if [[ "$TRAVIS_OS_NAME" != "windows" ]]; then - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - export python_version=$TRAVIS_PYTHON_VERSION; - bash miniconda.sh -b -p $HOME/miniconda; - source "$HOME/miniconda/etc/profile.d/conda.sh"; + wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; + export python_version=$TRAVIS_PYTHON_VERSION; + bash miniconda.sh -b -p $HOME/miniconda; + source "$HOME/miniconda/etc/profile.d/conda.sh"; fi - - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - choco install miniconda3 --params="'/JustMe /AddToPath:1'"; - export PATH="/c/tools/miniconda3/:/c/tools/miniconda3/Scripts:/c/tools/miniconda3/Library/bin:$PATH"; - source /c/tools/miniconda3/etc/profile.d/conda.sh; + - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then + choco install miniconda3 --params="'/JustMe /AddToPath:1'"; + export PATH="/c/tools/miniconda3/:/c/tools/miniconda3/Scripts:/c/tools/miniconda3/Library/bin:$PATH"; + source /c/tools/miniconda3/etc/profile.d/conda.sh; fi - hash -r - conda config --set always_yes yes --set changeps1 no - conda update -q conda - - conda config --add channels http://conda.binstar.org/omnia - - bash scripts/install_deepchem_conda.sh deepchem + - bash scripts/install_deepchem_conda.sh cpu - conda activate deepchem - - pip install yapf==0.22.0 - - pip install coveralls - - python setup.py install + - pip install -e . script: - - pytest -m "not slow" --cov=deepchem deepchem + - if [[ "$CHECK_ONLY_DOCS" == "true" ]]; then + cd docs && pip install -r requirements.txt; + make clean html; + make doctest_tutorials; + make doctest_examples; + travis_terminate $?; + fi + - bash devtools/run_yapf.sh + - bash devtools/run_flake8.sh + - mypy -p deepchem + - pytest -v -m "not slow" --cov=deepchem deepchem - if [ $TRAVIS_PYTHON_VERSION == '3.7' ]; then - find ./deepchem | grep .py$ |xargs python -m doctest -v; + pytest -v --ignore-glob='deepchem/**/test*.py' --doctest-modules deepchem; fi - - bash devtools/travis-ci/test_format_code.sh after_success: - echo $TRAVIS_SECURE_ENV_VARS - coveralls +deploy: + provider: pypi + username: __token__ + password: + secure: b67LO8VZcoKEWo7gDlFdjS1yKUavCt578uAuXPyW6f+e+Tk/sEQRdkx1VYoZlQdfZQo8u4q+E3W184T+/j6ht65/cdy/HYH57LCQySjF/MY2M9+/lcP45aY7Z0F2QHeY9QgpRc8gKthGzgM/bHj2glxlEvT1diItEEoGqE2x/fw1K25cNOni08E4hqz0HPY1SXVwd8/9Z/t1YasrBcOjtJ8kcbyjnmeyhjfkaV/aTaAzuqh2MlqZTSz3dhwsBrZfZp86+8T2TgcoDSuIxCwb777QKW1QlvNyLEKlnfateKMYqrrP65oHrxXEEcHd/N3IH28Bz9wVnENjHLkGJ0vXyXyEWcJFe+V6T0k/8NkZamU4SZE5BM4v6mOdThs4l54vuFajctHDeGgIDjL55MfkDmkKd5lAvlWPwrdw8DERsmqetUfZ/TG7FE6/MT1puu2ffu3A9Ivcch5T46pojIggDWHHn9hUsc6iD3Ov7rVqd024Lzm9V8wXiDYU9EMqAu5lJQRIOO/hnr8Gn6zYRCE1n29MKuNJwauSHfdV/mBTRyOjZyWHSGNaiPw2hqE3tZrrIN4koEYaZiERRVnmVt8wMUTj40YglosTHYpL91SkDH/ResX1rtHKs4Am+R+MmcWULTUQ7UwEtqlsa3nVxTK9gfmJ0nX8Jhjtl2iRhVg5PP8= + edge: true + on: + condition: $NIGHTLY_PKG_PUBLISH = true diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 22580d24fb1b613e63b6f38a0f8a485ab42c987d..0000000000000000000000000000000000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -prune datasets -prune examples diff --git a/README.md b/README.md index 372acfc42c62daa0b48e2f09f8d7d017c8bb6668..7003166f568c03f9de0236a2f09a2a77340bedda 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,11 @@ [![Build Status](https://travis-ci.org/deepchem/deepchem.svg?branch=master)](https://travis-ci.org/deepchem/deepchem) [![Coverage Status](https://coveralls.io/repos/github/deepchem/deepchem/badge.svg?branch=master)](https://coveralls.io/github/deepchem/deepchem?branch=master) +[![Documentation Status](https://readthedocs.org/projects/deepchem/badge/?version=latest)](https://deepchem.readthedocs.io/en/latest/?badge=latest) [![Anaconda-Server Badge](https://anaconda.org/conda-forge/deepchem/badges/version.svg)](https://anaconda.org/conda-forge/deepchem) [![PyPI version](https://badge.fury.io/py/deepchem.svg)](https://badge.fury.io/py/deepchem) -[Website](https://deepchem.io/) | [Documentation (master)](https://deepchem.readthedocs.io/en/latest/)) | [Colab Tutorial](https://github.com/deepchem/deepchem/tree/master/examples/tutorials) | [Discussion Forum](https://forum.deepchem.io/) | [Gitter](https://gitter.im/deepchem/Lobby) +[Website](https://deepchem.io/) | [Documentation](https://deepchem.readthedocs.io/en/latest/) | [Colab Tutorial](https://github.com/deepchem/deepchem/tree/master/examples/tutorials) | [Discussion Forum](https://forum.deepchem.io/) | [Gitter](https://gitter.im/deepchem/Lobby) DeepChem aims to provide a high quality open-source toolchain that democratizes the use of deep-learning in drug discovery, @@ -15,11 +16,10 @@ materials science, quantum chemistry, and biology. - [Requirements](#requirements) - [Installation](#installation) - - [Install latest package with conda](#install-via-conda-recommendation) - - [Install latest package with pip (WIP)](#install-via-pip-wip) - - [Install from source](#install-from-source) - - [Install using a Docker](#install-using-a-docker) -- [FAQ and Troubleshooting](#faq-and-troubleshooting) + - [Stable version](#stable-version) + - [Nightly build version](#nightly-build-version) + - [Docker](#docker) + - [From source](#from-source) - [Getting Started](#getting-started) - [Contributing to DeepChem](/CONTRIBUTING.md) - [Code Style Guidelines](/CONTRIBUTING.md#code-style-guidelines) @@ -30,7 +30,7 @@ materials science, quantum chemistry, and biology. ## Requirements -DeepChem requires these packages on any condition. +DeepChem currently supports Python 3.6 through 3.7 and requires these packages on any condition. - [joblib](https://pypi.python.org/pypi/joblib) - [NumPy](https://numpy.org/) @@ -43,107 +43,49 @@ DeepChem requires these packages on any condition. ### Soft Requirements -DeepChem has a number of "soft" requirements. These are packages which are needed for various submodules of DeepChem but not for the package as a whole. - -- [BioPython](https://biopython.org/wiki/Documentation) -- [OpenAI Gym](https://gym.openai.com/) -- [matminer](https://hackingmaterials.lbl.gov/matminer/) -- [MDTraj](http://mdtraj.org/) -- [NetworkX](https://networkx.github.io/documentation/stable/index.html) -- [OpenMM](http://openmm.org/) -- [PDBFixer](https://github.com/pandegroup/pdbfixer) -- [Pillow](https://pypi.org/project/Pillow/) -- [pyGPGO](https://pygpgo.readthedocs.io/en/latest/) -- [Pymatgen](https://pymatgen.org/) -- [PyTorch](https://pytorch.org/) -- [RDKit](http://www.rdkit.org/docs/Install.html) -- [simdna](https://github.com/kundajelab/simdna) -- [XGBoost](https://xgboost.readthedocs.io/en/latest/) +DeepChem has a number of "soft" requirements. +If you face some errors like `ImportError: No module named XXXX`, you may need to install some packages. -## Installation - -### Install via conda (Recommendation) +Please check [the document](https://deepchem.readthedocs.io/en/latest/requirements.html##soft-requirements) about soft requirements. -RDKit is a soft requirement package, but many useful methods like molnet depend on it. -If you use conda, we recommend installing RDKit with deepchem. +## Installation -`deepchem>=2.4.0` +### Stable version -Coming soon... +**Caution!! : The latest stable version was published nearly a year ago. If you are a pip user or you face some errors, we recommend the nightly build version.** -`deepchem<2.4.0` +RDKit is a soft requirement package, but many useful methods like molnet depend on it. We recommend installing RDKit with deepchem. ```bash pip install tensorflow==1.14 -conda install -c rdkit -c conda-forge rdkit deepchem==2.3.0 +conda install -y -c conda-forge rdkit deepchem==2.3.0 ``` If you want GPU support: ```bash pip install tensorflow-gpu==1.14 -conda install -c rdkit -c conda-forge rdkit deepchem==2.3.0 -``` - -### Install via pip (WIP) - -You are able to try to install deepchem via pip using the following command. -However, pip installation is under development, so this command may not work well. - -`deepchem>=2.4.0` - -Coming soon... - -`deepchem<2.4.0` - -```bash -pip install pandas pillow scikit-learn==0.22 tensorflow==1.14 deepchem==2.2.1.dev54 -``` - -If you want GPU support: - -```bash -pip install pandas pillow scikit-learn==0.22 tensorflow-gpu==1.14 deepchem==2.2.1.dev54 +conda install -y -c conda-forge rdkit deepchem==2.3.0 ``` -### Install from source - -You can install deepchem in a new conda environment using the conda commands in `scripts/install_deepchem_conda.sh.` Installing via this script will ensure that you are **installing from the source**. -The following script requires `conda>=4.4` because it uses the `conda activate` command. (Please see the detail from [here](https://github.com/conda/conda/blob/a4c4feae404b2b378e106bd25f62cc8be15c768f/CHANGELOG.md#440-2017-12-20)) - -First, please clone the deepchem repository from GitHub. +### Nightly build version -```bash -git clone https://github.com/deepchem/deepchem.git -cd deepchem -``` - -Then, execute the shell script. +You install the nightly build version via pip. The nightly version is built by the HEAD of DeepChem. ```bash -bash scripts/install_deepchem_conda.sh deepchem -``` - -If you are using the Windows and the PowerShell: - -```ps1 -.\scripts\install_deepchem_conda.ps1 deepchem +pip install tensorflow==2.3.0 +pip install --pre deepchem ``` -Before activating deepchem environment, make sure conda has been initialized. -Check if there is a `(base)` in your command line. If not, use `conda init ` to activate it, then: +RDKit is a soft requirement package, but many useful methods like molnet depend on it. We recommend installing RDKit with deepchem if you use conda. ```bash -conda activate deepchem -python setup.py install -pytest -m "not slow" deepchem # optional +conda install -y -c conda-forge rdkit ``` -Check [this link](https://conda.io/projects/conda/en/latest/user-guide/install/index.html) for more information about the installation of conda environments. +### Docker -### Install using a Docker - -If you want to install using a docker, you can pull two kinds of images. +If you want to install deepchem using a docker, you can pull two kinds of images. DockerHub : https://hub.docker.com/repository/docker/deepchemio/deepchem - `deepchemio/deepchem:x.x.x` @@ -155,64 +97,19 @@ DockerHub : https://hub.docker.com/repository/docker/deepchemio/deepchem - The latest image is built every time we commit to the master branch - Dockerfile is put in `docker/master` directory -First, you pull the image you want to use. +You pull the image like this. ```bash docker pull deepchemio/deepchem:2.3.0 ``` -Then, you create a container based on the image. - -```bash -docker run --rm -it deepchemio/deepchem:2.3.0 -``` - -If you want GPU support: - -```bash -# If nvidia-docker is installed -nvidia-docker run --rm -it deepchemio/deepchem:2.3.0 -docker run --runtime nvidia --rm -it deepchemio/deepchem:2.3.0 - -# If nvidia-container-toolkit is installed -docker run --gpus all --rm -it deepchemio/deepchem:2.3.0 -``` +If you want to know docker usages with deepchem in more detail, please check [the document](https://deepchem.readthedocs.io/en/latest/installation.html#docker). -You are now in a docker container which deepchem was installed. You can start playing with it in the command line. +### From source -``` -(deepchem) root@xxxxxxxxxxxxx:~/mydir# python -Python 3.6.10 |Anaconda, Inc.| (default, May 8 2020, 02:54:21) -[GCC 7.3.0] on linux -Type "help", "copyright", "credits" or "license" for more information. ->>> import deepchem as dc -``` +If you try install all soft dependencies at once or contribute to deepchem, we recommend you should install deepchem from source. -If you want to check the tox21 benchmark: - -```bash -(deepchem) root@xxxxxxxxxxxxx:~/mydir# wget https://raw.githubusercontent.com/deepchem/deepchem/master/examples/benchmark.py -(deepchem) root@xxxxxxxxxxxxx:~/mydir# python benchmark.py -d tox21 -m graphconv -s random -``` - -## FAQ and Troubleshooting - -1. DeepChem currently supports Python 3.5 through 3.7, and is supported on 64 bit Linux and Mac OSX. Note that DeepChem is not currently maintained for older versions of Python or with other operating systems. -2. Question: I'm seeing some failures in my test suite having to do with MKL - `Intel MKL FATAL ERROR: Cannot load libmkl_avx.so or libmkl_def.so.` - - Answer: This is a general issue with the newest version of `scikit-learn` enabling MKL by default. This doesn't play well with many linux systems. See [BVLC/caffe#3884](https://github.com/BVLC/caffe/issues/3884) for discussions. The following seems to fix the issue - - ```bash - conda install nomkl numpy scipy scikit-learn numexpr - conda remove mkl mkl-service - ``` - -3. Note that when using Ubuntu 16.04 server or similar environments, you may need to ensure libxrender is provided via e.g.: - -```bash -sudo apt-get install -y libxrender-dev -``` +Please check [this introduction](https://deepchem.readthedocs.io/en/latest/installation.html#from-source). ## Getting Started @@ -246,4 +143,4 @@ To cite this book, please use this bibtex entry: ## Version -2.3.0 +2.4.0-rc diff --git a/contrib/rl/mcts.py b/contrib/rl/mcts.py index ecfc4758f05708d3934272208bcd2313f034b60e..4ed1b63ffb8d3811aebae27459d02478343d22d4 100644 --- a/contrib/rl/mcts.py +++ b/contrib/rl/mcts.py @@ -5,7 +5,10 @@ from deepchem.models.optimizers import Adam from deepchem.models.tensorgraph.layers import Feature, Weights, Label, Layer import numpy as np import tensorflow as tf -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection import copy import time @@ -109,7 +112,7 @@ class MCTS(object): self.n_search_episodes = n_search_episodes self.discount_factor = discount_factor self.value_weight = value_weight - self._state_is_list = isinstance(env.state_shape[0], collections.Sequence) + self._state_is_list = isinstance(env.state_shape[0], SequenceCollection) if optimizer is None: self._optimizer = Adam(learning_rate=0.001, beta1=0.9, beta2=0.999) else: diff --git a/contrib/tensorflow_models/robust_multitask.py b/contrib/tensorflow_models/robust_multitask.py index e5d05c05acc3e0396d6ed2c146b0a3477a5d9268..9ce0e61ba6c47170b39789f9f1fa149c23f99581 100644 --- a/contrib/tensorflow_models/robust_multitask.py +++ b/contrib/tensorflow_models/robust_multitask.py @@ -6,6 +6,11 @@ import warnings import numpy as np import tensorflow as tf +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection + from deepchem.nn import model_ops class RobustMultitaskClassifier(MultiTaskClassifier): @@ -73,15 +78,15 @@ class RobustMultitaskClassifier(MultiTaskClassifier): n_layers = len(layer_sizes) assert n_layers == len(bypass_layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * n_layers - if not isinstance(bypass_weight_init_stddevs, collections.Sequence): + if not isinstance(bypass_weight_init_stddevs, SequenceCollection): bypass_weight_init_stddevs = [bypass_weight_init_stddevs] * n_layers - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * n_layers - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers # Add the input features. diff --git a/deepchem/feat/adjacency_fingerprints.py b/contrib/torch/adjacency_fingerprint.py similarity index 100% rename from deepchem/feat/adjacency_fingerprints.py rename to contrib/torch/adjacency_fingerprint.py diff --git a/contrib/vina_model/vina_model.py b/contrib/vina_model/vina_model.py index 470cb8293ebbd2b4885887104e9d22dd9bce54f5..bf1c90630103d2281d6b6978b3b78e0aefaf9008 100644 --- a/contrib/vina_model/vina_model.py +++ b/contrib/vina_model/vina_model.py @@ -394,7 +394,7 @@ def h(d): class VinaModel(Model): def __init__(self, logdir=None, batch_size=50): - """Vina models. + r"""Vina models. .. math:: c = \sum_{i < j} f_{t_i,t_j}(r_{ij}) diff --git a/datasets/.gitignore b/datasets/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1e90aa753f8a63b4e1c19689daeec4845da27643 --- /dev/null +++ b/datasets/.gitignore @@ -0,0 +1,11 @@ +PPB.csv +SAMPL.csv +bace.csv +bace_c-featurized/ +chembl-featurized/ +clintox-featurized/ +clintox.csv.gz +core_grid.json +ppb-featurized/ +sampl-featurized/ +atom_init.json diff --git a/deepchem/__init__.py b/deepchem/__init__.py index 04402ba3b1a03861168a91d44b1c97d80a33b475..6e99dc317321d02b4ad4f3db803ee1c38f3652d1 100644 --- a/deepchem/__init__.py +++ b/deepchem/__init__.py @@ -1,7 +1,9 @@ """ Imports all submodules """ -__version__ = '2.3.0' + +# If you push the tag, please remove `.dev` +__version__ = '2.4.0-rc1.dev' import deepchem.data import deepchem.feat diff --git a/deepchem/data/__init__.py b/deepchem/data/__init__.py index 51253ba622b2d988a32ae178c56bf2eb787a2590..400f60e936dc7ecb06b5e1e51137d46a61eb9727 100644 --- a/deepchem/data/__init__.py +++ b/deepchem/data/__init__.py @@ -1,6 +1,8 @@ """ Gathers all datasets in one place for convenient imports """ +# flake8: noqa + # TODO(rbharath): Get rid of * import from deepchem.data.datasets import pad_features from deepchem.data.datasets import pad_batch @@ -14,6 +16,8 @@ from deepchem.data.supports import * from deepchem.data.data_loader import DataLoader from deepchem.data.data_loader import CSVLoader from deepchem.data.data_loader import UserCSVLoader +from deepchem.data.data_loader import JsonLoader from deepchem.data.data_loader import SDFLoader from deepchem.data.data_loader import FASTALoader from deepchem.data.data_loader import ImageLoader +from deepchem.data.data_loader import InMemoryLoader diff --git a/deepchem/data/data_loader.py b/deepchem/data/data_loader.py index 25a0b55878746f9dc4c75d101dbf049bdd701686..9f935ff96ecd1a7d5e136496fd16af3b8962a8ee 100644 --- a/deepchem/data/data_loader.py +++ b/deepchem/data/data_loader.py @@ -2,27 +2,27 @@ Process an input dataset into a format suitable for machine learning. """ import os -import gzip -import pandas as pd -import numpy as np -import csv -import numbers import tempfile +import zipfile import time -import sys import logging import warnings -from deepchem.utils.save import load_csv_files -from deepchem.utils.save import load_sdf_files -from deepchem.utils.genomics import encode_fasta_sequence -from deepchem.feat import UserDefinedFeaturizer -from deepchem.data import DiskDataset, NumpyDataset, ImageDataset -import zipfile +from typing import List, Optional, Tuple, Any, Sequence, Union, Iterator + +import pandas as pd +import numpy as np + +from deepchem.utils.typing import OneOrMany +from deepchem.utils.data_utils import load_image_files, load_csv_files, load_json_files, load_sdf_files +from deepchem.utils.genomics_utils import encode_bio_sequence +from deepchem.feat import UserDefinedFeaturizer, Featurizer +from deepchem.data import Dataset, DiskDataset, NumpyDataset, ImageDataset logger = logging.getLogger(__name__) -def _convert_df_to_numpy(df, tasks): +def _convert_df_to_numpy(df: pd.DataFrame, + tasks: List[str]) -> Tuple[np.ndarray, np.ndarray]: """Transforms a dataframe containing deepchem input into numpy arrays This is a private helper method intended to help parse labels and @@ -35,142 +35,28 @@ def _convert_df_to_numpy(df, tasks): ---------- df: pd.DataFrame Pandas dataframe with columns for all tasks - tasks: list + tasks: List[str] List of tasks + + Returns + ------- + Tuple[np.ndarray, np.ndarray] + The tuple is `(w, y)`. """ n_samples = df.shape[0] n_tasks = len(tasks) - time1 = time.time() y = np.hstack( [np.reshape(np.array(df[task].values), (n_samples, 1)) for task in tasks]) - time2 = time.time() - w = np.ones((n_samples, n_tasks)) - missing = np.zeros_like(y).astype(int) - feature_shape = None - - for ind in range(n_samples): - for task in range(n_tasks): - if y[ind, task] == "": - missing[ind, task] = 1 - - # ids = df[id_field].values - # Set missing data to have weight zero - for ind in range(n_samples): - for task in range(n_tasks): - if missing[ind, task]: - y[ind, task] = 0. - w[ind, task] = 0. + if y.dtype.kind in ['O', 'U']: + missing = (y == '') + y[missing] = 0 + w[missing] = 0 return y.astype(float), w.astype(float) -def _featurize_smiles_df(df, featurizer, field, log_every_n=1000): - """Featurize individual compounds in dataframe. - - Private helper that given a featurizer that operates on individual - chemical compounds or macromolecules, compute & add features for - that compound to the features dataframe - - Parameters - ---------- - df: pd.DataFrame - DataFrame that holds SMILES strings - featurizer: Featurizer - A featurizer object - field: str - The name of a column in `df` that holds SMILES strings - log_every_n: int, optional (default 1000) - Emit a logging statement every `log_every_n` rows. - """ - sample_elems = df[field].tolist() - - features = [] - from rdkit import Chem - from rdkit.Chem import rdmolfiles - from rdkit.Chem import rdmolops - for ind, elem in enumerate(sample_elems): - mol = Chem.MolFromSmiles(elem) - # TODO (ytz) this is a bandage solution to reorder the atoms - # so that they're always in the same canonical order. - # Presumably this should be correctly implemented in the - # future for graph mols. - if mol: - new_order = rdmolfiles.CanonicalRankAtoms(mol) - mol = rdmolops.RenumberAtoms(mol, new_order) - if ind % log_every_n == 0: - logger.info("Featurizing sample %d" % ind) - features.append(featurizer.featurize([mol])) - valid_inds = np.array( - [1 if elt.size > 0 else 0 for elt in features], dtype=bool) - features = [elt for (is_valid, elt) in zip(valid_inds, features) if is_valid] - return np.squeeze(np.array(features), axis=1), valid_inds - - -def _get_user_specified_features(df, featurizer): - """Extract and merge user specified features. - - Private helper methods that merges features included in dataset - provided by user into final features dataframe - - Three types of featurization here: - - 1) Molecule featurization - -) Smiles string featurization - -) Rdkit MOL featurization - 2) Complex featurization - -) PDB files for interacting molecules. - 3) User specified featurizations. - - Parameters - ---------- - df: pd.DataFrame - DataFrame that holds SMILES strings - featurizer: Featurizer - A featurizer object - """ - time1 = time.time() - df[featurizer.feature_fields] = df[featurizer.feature_fields].apply( - pd.to_numeric) - X_shard = df[featurizer.feature_fields].to_numpy() - time2 = time.time() - logger.info( - "TIMING: user specified processing took %0.3f s" % (time2 - time1)) - return X_shard - - -def _featurize_mol_df(df, featurizer, field, log_every_n=1000): - """Featurize individual compounds in dataframe. - - Used when processing .sdf files, so the 3-D structure should be - preserved. We use the rdkit "mol" object created from .sdf - instead of smiles string. Some featurizers such as - CoulombMatrix also require a 3-D structure. Featurizing from - .sdf is currently the only way to perform CM feautization. - - Parameters - ---------- - df: Pandas Dataframe - Should be created by dc.utils.save.load_sdf_files. - featurizer: dc.feat.MolecularFeaturizer - Featurizer for molecules. - log_every_n: int, optional - Controls how often logging statements are emitted. - """ - sample_elems = df[field].tolist() - - features = [] - for ind, mol in enumerate(sample_elems): - if ind % log_every_n == 0: - logger.info("Featurizing sample %d" % ind) - features.append(featurizer.featurize([mol])) - valid_inds = np.array( - [1 if elt.size > 0 else 0 for elt in features], dtype=bool) - features = [elt for (is_valid, elt) in zip(valid_inds, features) if is_valid] - return np.squeeze(np.array(features)), valid_inds - - class DataLoader(object): """Handles loading/featurizing of data from disk. @@ -191,7 +77,7 @@ class DataLoader(object): of `DataLoader` is specialized to handle one type of input data so you will have to pick the loader class suitable for your input data type. - + Note that it isn't necessary to use a data loader to process input data. You can directly use `Featurizer` objects to featurize provided input into numpy arrays, but note that this calculation @@ -201,7 +87,11 @@ class DataLoader(object): for you by performing this work under the hood. """ - def __init__(self, tasks, id_field=None, featurizer=None, log_every_n=1000): + def __init__(self, + tasks: List[str], + featurizer: Featurizer, + id_field: Optional[str] = None, + log_every_n: int = 1000): """Construct a DataLoader object. This constructor is provided as a template mainly. You @@ -209,17 +99,17 @@ class DataLoader(object): Parameters ---------- - tasks: list[str] + tasks: List[str] List of task names - id_field: str, optional + featurizer: Featurizer + Featurizer to use to process data. + id_field: str, optional (default None) Name of field that holds sample identifier. Note that the meaning of "field" depends on the input data type and can have a different meaning in different subclasses. For example, a CSV file could have a field as a column, and an SDF file could have a field as molecular property. - featurizer: dc.feat.Featurizer, optional - Featurizer to use to process data - log_every_n: int, optional + log_every_n: int, optional (default 1000) Writes a logging statement this often. """ if self.__class__ is DataLoader: @@ -236,7 +126,10 @@ class DataLoader(object): self.featurizer = featurizer self.log_every_n = log_every_n - def featurize(self, input_files, data_dir=None, shard_size=8192): + def featurize(self, + inputs: OneOrMany[Any], + data_dir: Optional[str] = None, + shard_size: Optional[int] = 8192) -> Dataset: """Featurize provided files and write to specified location. DEPRECATED: This method is now a wrapper for `create_dataset()` @@ -251,28 +144,32 @@ class DataLoader(object): Parameters ---------- - input_files: list - List of input filenames. - data_dir: str, optional + inputs: List + List of inputs to process. Entries can be filenames or arbitrary objects. + data_dir: str, default None Directory to store featurized dataset. - shard_size: int, optional + shard_size: int, optional (default 8192) Number of examples stored in each shard. Returns ------- - A `Dataset` object containing a featurized representation of data - from `input_files`. + Dataset + A `Dataset` object containing a featurized representation of data + from `inputs`. """ warnings.warn( - "featurize() is deprecated and has been renamed to create_dataset(). featurize() will be removed in DeepChem 3.0", - FutureWarning) - return self.create_dataset(input_files, data_dir, shard_size) + "featurize() is deprecated and has been renamed to create_dataset()." + "featurize() will be removed in DeepChem 3.0", FutureWarning) + return self.create_dataset(inputs, data_dir, shard_size) - def create_dataset(self, input_files, data_dir=None, shard_size=8192): + def create_dataset(self, + inputs: OneOrMany[Any], + data_dir: Optional[str] = None, + shard_size: Optional[int] = 8192) -> Dataset: """Creates and returns a `Dataset` object by featurizing provided files. - Reads in `input_files` and uses `self.featurizer` to featurize the - data in these input files. For large files, automatically shards + Reads in `inputs` and uses `self.featurizer` to featurize the + data in these inputs. For large files, automatically shards into smaller chunks of `shard_size` datapoints for convenience. Returns a `Dataset` object that contains the featurized dataset. @@ -283,27 +180,28 @@ class DataLoader(object): Parameters ---------- - input_files: list - List of input filenames. - data_dir: str, optional + inputs: List + List of inputs to process. Entries can be filenames or arbitrary objects. + data_dir: str, optional (default None) Directory to store featurized dataset. - shard_size: int, optional + shard_size: int, optional (default 8192) Number of examples stored in each shard. Returns ------- - A `Dataset` object containing a featurized representation of data - from `input_files`. + DiskDataset + A `DiskDataset` object containing a featurized representation of data + from `inputs`. """ logger.info("Loading raw samples now.") - logger.info("shard_size: %d" % shard_size) + logger.info("shard_size: %s" % str(shard_size)) - if not isinstance(input_files, list): - input_files = [input_files] + # Special case handling of single input + if not isinstance(inputs, list): + inputs = [inputs] def shard_generator(): - for shard_num, shard in enumerate( - self._get_shards(input_files, shard_size)): + for shard_num, shard in enumerate(self._get_shards(inputs, shard_size)): time1 = time.time() X, valid_inds = self._featurize_shard(shard) ids = shard[self.id_field].values @@ -327,11 +225,11 @@ class DataLoader(object): return DiskDataset.create_dataset(shard_generator(), data_dir, self.tasks) - def _get_shards(self, input_files, shard_size): + def _get_shards(self, inputs: List, shard_size: Optional[int]) -> Iterator: """Stub for children classes. Should implement a generator that walks over the source data in - `input_files` and returns a "shard" at a time. Here a shard is a + `inputs` and returns a "shard" at a time. Here a shard is a chunk of input data that can reasonably be handled in memory. For example, this may be a set of rows from a CSV file or a set of molecules from a SDF file. To re-use the @@ -340,148 +238,545 @@ class DataLoader(object): If you chose to override `create_dataset()` directly you don't need to override this helper method. - + Parameters ---------- - input_files: list - List of input filenames. + inputs: list + List of inputs to process. Entries can be filenames or arbitrary objects. shard_size: int, optional Number of examples stored in each shard. """ raise NotImplementedError - def _featurize_shard(self, shard): + def _featurize_shard(self, shard: Any): """Featurizes a shard of input data. Recall a shard is a chunk of input data that can reasonably be handled in memory. For example, this may be a set of rows from a CSV file or a set of molecules from a SDF file. Featurize this shard in memory and return the results. + + Parameters + ---------- + shard: Any + A chunk of input data """ raise NotImplementedError class CSVLoader(DataLoader): """ - Creates `Dataset` objects from input CSF files. + Creates `Dataset` objects from input CSV files. This class provides conveniences to load data from CSV files. It's possible to directly featurize data from CSV files using pandas, but this class may prove useful if you're processing large CSV files that you don't want to manipulate directly in memory. + + Examples + -------- + Let's suppose we have some smiles and labels + + >>> smiles = ["C", "CCC"] + >>> labels = [1.5, 2.3] + + Let's put these in a dataframe. + + >>> import pandas as pd + >>> df = pd.DataFrame(list(zip(smiles, labels)), columns=["smiles", "task1"]) + + Let's now write this to disk somewhere. We can now use `CSVLoader` to + process this CSV dataset. + + >>> import tempfile + >>> import deepchem as dc + >>> with tempfile.NamedTemporaryFile(mode='w') as tmpfile: + ... df.to_csv(tmpfile.name) + ... loader = dc.data.CSVLoader(["task1"], feature_field="smiles", + ... featurizer=dc.feat.CircularFingerprint()) + ... dataset = loader.create_dataset(tmpfile.name) + >>> len(dataset) + 2 + + Of course in practice you should already have your data in a CSV file if + you're using `CSVLoader`. If your data is already in memory, use + `InMemoryLoader` instead. """ def __init__(self, - tasks, - smiles_field=None, - id_field=None, - featurizer=None, - log_every_n=1000): + tasks: List[str], + featurizer: Featurizer, + feature_field: Optional[str] = None, + id_field: Optional[str] = None, + smiles_field: Optional[str] = None, + log_every_n: int = 1000): """Initializes CSVLoader. Parameters ---------- - tasks: list[str] + tasks: List[str] List of task names - smiles_field: str, optional - Name of field that holds smiles string - id_field: str, optional - Name of field that holds sample identifier - featurizer: dc.feat.Featurizer, optional - Featurizer to use to process data - log_every_n: int, optional + featurizer: Featurizer + Featurizer to use to process data. + feature_field: str, optional (default None) + Field with data to be featurized. + id_field: str, optional, (default None) + CSV column that holds sample identifier + smiles_field: str, optional (default None) (DEPRECATED) + Name of field that holds smiles string. + log_every_n: int, optional (default 1000) Writes a logging statement this often. """ if not isinstance(tasks, list): raise ValueError("tasks must be a list.") + if smiles_field is not None: + logger.warning( + "smiles_field is deprecated and will be removed in a future version of DeepChem." + "Use feature_field instead.") + if feature_field is not None and smiles_field != feature_field: + raise ValueError( + "smiles_field and feature_field if both set must have the same value." + ) + elif feature_field is None: + feature_field = smiles_field + self.tasks = tasks - self.smiles_field = smiles_field + self.feature_field = feature_field + self.id_field = id_field if id_field is None: - self.id_field = smiles_field + self.id_field = feature_field # Use features as unique ids if necessary else: self.id_field = id_field - #self.mol_field = mol_field self.user_specified_features = None if isinstance(featurizer, UserDefinedFeaturizer): self.user_specified_features = featurizer.feature_fields self.featurizer = featurizer self.log_every_n = log_every_n - def _get_shards(self, input_files, shard_size): - """Defines a generator which returns data for each shard""" + def _get_shards(self, input_files: List[str], + shard_size: Optional[int]) -> Iterator[pd.DataFrame]: + """Defines a generator which returns data for each shard + + Parameters + ---------- + input_files: List[str] + List of filenames to process + shard_size: int, optional + The size of a shard of data to process at a time. + + Returns + ------- + Iterator[pd.DataFrame] + Iterator over shards + """ return load_csv_files(input_files, shard_size) - def _featurize_shard(self, shard): - """Featurizes a shard of an input dataframe.""" - return _featurize_smiles_df( - shard, - self.featurizer, - field=self.smiles_field, - log_every_n=self.log_every_n) + def _featurize_shard(self, + shard: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]: + """Featurizes a shard of an input dataframe. + + Parameters + ---------- + shard: pd.DataFrame + DataFrame that holds a shard of the input CSV file + + Returns + ------- + features: np.ndarray + Features computed from CSV file. + valid_inds: np.ndarray + Indices of rows in source CSV with valid data. + """ + logger.info("About to featurize shard.") + if self.featurizer is None: + raise ValueError( + "featurizer must be specified in constructor to featurizer data/") + features = [elt for elt in self.featurizer(shard[self.feature_field])] + valid_inds = np.array( + [1 if np.array(elt).size > 0 else 0 for elt in features], dtype=bool) + features = [ + elt for (is_valid, elt) in zip(valid_inds, features) if is_valid + ] + return np.array(features), valid_inds class UserCSVLoader(CSVLoader): """ - Handles loading of CSV files with user-defined featurizers. + Handles loading of CSV files with user-defined features. + + This is a convenience class that allows for descriptors already present in a + CSV file to be extracted without any featurization necessary. + + Examples + -------- + Let's suppose we have some descriptors and labels. (Imagine that these + descriptors have been computed by an external program.) + + >>> desc1 = [1, 43] + >>> desc2 = [-2, -22] + >>> labels = [1.5, 2.3] + >>> ids = ["cp1", "cp2"] + + Let's put these in a dataframe. + + >>> import pandas as pd + >>> df = pd.DataFrame(list(zip(ids, desc1, desc2, labels)), columns=["id", "desc1", "desc2", "task1"]) + + Let's now write this to disk somewhere. We can now use `UserCSVLoader` to + process this CSV dataset. + + >>> import tempfile + >>> import deepchem as dc + >>> featurizer = dc.feat.UserDefinedFeaturizer(["desc1", "desc2"]) + >>> with tempfile.NamedTemporaryFile(mode='w') as tmpfile: + ... df.to_csv(tmpfile.name) + ... loader = dc.data.UserCSVLoader(["task1"], id_field="id", + ... featurizer=featurizer) + ... dataset = loader.create_dataset(tmpfile.name) + >>> len(dataset) + 2 + >>> dataset.X[0, 0] + 1 + + The difference between `UserCSVLoader` and `CSVLoader` is that our + descriptors (our features) have already been computed for us, but are spread + across multiple columns of the CSV file. + + Of course in practice you should already have your data in a CSV file if + you're using `UserCSVLoader`. If your data is already in memory, use + `InMemoryLoader` instead. """ - def _get_shards(self, input_files, shard_size): - """Defines a generator which returns data for each shard""" + def _get_shards(self, input_files: List[str], + shard_size: Optional[int]) -> Iterator[pd.DataFrame]: + """Defines a generator which returns data for each shard + + Parameters + ---------- + input_files: List[str] + List of filenames to process + shard_size: int, optional + The size of a shard of data to process at a time. + + Returns + ------- + Iterator[pd.DataFrame] + Iterator over shards + """ return load_csv_files(input_files, shard_size) - def _featurize_shard(self, shard): - """Featurizes a shard of an input dataframe.""" + def _featurize_shard(self, + shard: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]: + """Featurizes a shard of an input dataframe. + + Parameters + ---------- + shard: pd.DataFrame + DataFrame that holds a shard of the input CSV file + + Returns + ------- + features: np.ndarray + Features extracted from CSV file. + valid_inds: np.ndarray + Indices of rows in source CSV with valid data. + """ assert isinstance(self.featurizer, UserDefinedFeaturizer) - X = _get_user_specified_features(shard, self.featurizer) - return (X, np.ones(len(X), dtype=bool)) + time1 = time.time() + feature_fields = self.featurizer.feature_fields + shard[feature_fields] = shard[feature_fields].apply(pd.to_numeric) + X_shard = shard[feature_fields].to_numpy() + time2 = time.time() + logger.info( + "TIMING: user specified processing took %0.3f s" % (time2 - time1)) + return (X_shard, np.ones(len(X_shard), dtype=bool)) -class SDFLoader(DataLoader): +class JsonLoader(DataLoader): """ - Creates `Dataset` from SDF input files. + Creates `Dataset` objects from input json files. + + This class provides conveniences to load data from json files. + It's possible to directly featurize data from json files using + pandas, but this class may prove useful if you're processing + large json files that you don't want to manipulate directly in + memory. + + It is meant to load JSON files formatted as "records" in line + delimited format, which allows for sharding. + ``list like [{column -> value}, ... , {column -> value}]``. + + Examples + -------- + >> import pandas as pd + >> df = pd.DataFrame(some_data) + >> df.columns.tolist() + .. ['sample_data', 'sample_name', 'weight', 'task'] + >> df.to_json('file.json', orient='records', lines=True) + >> loader = JsonLoader(tasks=['task'], feature_field='sample_data', + label_field='task', weight_field='weight', id_field='sample_name') + >> dataset = loader.create_dataset('file.json') - This class provides conveniences to load data from SDF files. """ - def __init__(self, tasks, sanitize=False, featurizer=None, log_every_n=1000): + def __init__(self, + tasks: List[str], + feature_field: str, + featurizer: Featurizer, + label_field: Optional[str] = None, + weight_field: Optional[str] = None, + id_field: Optional[str] = None, + log_every_n: int = 1000): + """Initializes JsonLoader. + + Parameters + ---------- + tasks: List[str] + List of task names + feature_field: str + JSON field with data to be featurized. + featurizer: Featurizer + Featurizer to use to process data + label_field: str, optional (default None) + Field with target variables. + weight_field: str, optional (default None) + Field with weights. + id_field: str, optional (default None) + Field for identifying samples. + log_every_n: int, optional (default 1000) + Writes a logging statement this often. + """ + if not isinstance(tasks, list): + raise ValueError("Tasks must be a list.") + self.tasks = tasks + self.feature_field = feature_field + self.label_field = label_field + self.weight_field = weight_field + self.id_field = id_field + + self.user_specified_features = None + if isinstance(featurizer, UserDefinedFeaturizer): + self.user_specified_features = featurizer.feature_fields + self.featurizer = featurizer + self.log_every_n = log_every_n + + def create_dataset(self, + input_files: OneOrMany[str], + data_dir: Optional[str] = None, + shard_size: Optional[int] = 8192) -> DiskDataset: + """Creates a `Dataset` from input JSON files. + + Parameters + ---------- + input_files: OneOrMany[str] + List of JSON filenames. + data_dir: Optional[str], default None + Name of directory where featurized data is stored. + shard_size: int, optional (default 8192) + Shard size when loading data. + + Returns + ------- + DiskDataset + A `DiskDataset` object containing a featurized representation of data + from `input_files`. + """ + if not isinstance(input_files, list): + try: + if isinstance(input_files, str): + input_files = [input_files] + else: + input_files = list(input_files) + except TypeError: + raise ValueError( + "input_files is of an unrecognized form. Must be one filename or a list of filenames." + ) + + def shard_generator(): + """Yield X, y, w, and ids for shards.""" + for shard_num, shard in enumerate( + self._get_shards(input_files, shard_size)): + + time1 = time.time() + X, valid_inds = self._featurize_shard(shard) + if self.id_field: + ids = shard[self.id_field].values + else: + ids = np.ones(len(valid_inds)) + ids = ids[valid_inds] + + if len(self.tasks) > 0: + # Featurize task results if they exist. + y, w = _convert_df_to_numpy(shard, self.tasks) + + if self.label_field: + y = shard[self.label_field] + if self.weight_field: + w = shard[self.weight_field] + + # Filter out examples where featurization failed. + y, w = (y[valid_inds], w[valid_inds]) + assert len(X) == len(ids) == len(y) == len(w) + else: + # For prospective data where results are unknown, it + # makes no sense to have y values or weights. + y, w = (None, None) + assert len(X) == len(ids) + + time2 = time.time() + logger.info("TIMING: featurizing shard %d took %0.3f s" % + (shard_num, time2 - time1)) + yield X, y, w, ids + + return DiskDataset.create_dataset(shard_generator(), data_dir, self.tasks) + + def _get_shards(self, input_files: List[str], + shard_size: Optional[int]) -> Iterator[pd.DataFrame]: + """Defines a generator which returns data for each shard + + Parameters + ---------- + input_files: List[str] + List of filenames to process + shard_size: int, optional + The size of a shard of data to process at a time. + + Returns + ------- + Iterator[pd.DataFrame] + Iterator over shards + """ + return load_json_files(input_files, shard_size) + + def _featurize_shard(self, + shard: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]: + """Featurizes a shard of an input dataframe. + + Helper that computes features for the given shard of data. + + Parameters + ---------- + shard: pd.DataFrame + DataFrame that holds data to be featurized. + + Returns + ------- + features: np.ndarray + Array of feature vectors. Note that samples for which featurization has + failed will be filtered out. + valid_inds: np.ndarray + Boolean values indicating successful featurization for corresponding + sample in the source. + """ + logger.info("About to featurize shard.") + if self.featurizer is None: + raise ValueError( + "featurizer must be specified in constructor to featurizer data/") + features = [elt for elt in self.featurizer(shard[self.feature_field])] + valid_inds = np.array( + [1 if np.array(elt).size > 0 else 0 for elt in features], dtype=bool) + features = [ + elt for (is_valid, elt) in zip(valid_inds, features) if is_valid + ] + return np.array(features), valid_inds + + +class SDFLoader(DataLoader): + """Creates a `Dataset` object from SDF input files. + + This class provides conveniences to load and featurize data from + Structure Data Files (SDFs). SDF is a standard format for structural + information (3D coordinates of atoms and bonds) of molecular compounds. + + Examples + -------- + >>> import deepchem as dc + >>> import os + >>> current_dir = os.path.dirname(os.path.realpath(__file__)) + >>> featurizer = dc.feat.CircularFingerprint(size=16) + >>> loader = dc.data.SDFLoader(["LogP(RRCK)"], featurizer=featurizer, sanitize=True) + >>> dataset = loader.create_dataset(os.path.join(current_dir, "tests", "membrane_permeability.sdf")) # doctest:+ELLIPSIS + >>> len(dataset) + 2 + """ + + def __init__(self, + tasks: List[str], + featurizer: Featurizer, + sanitize: bool = False, + log_every_n: int = 1000): """Initialize SDF Loader Parameters ---------- tasks: list[str] List of tasknames. These will be loaded from the SDF file. - sanitize: bool, optional - Whether to sanitize molecules. - featurizer: dc.feat.Featurizer, optional + featurizer: Featurizer Featurizer to use to process data - log_every_n: int, optional + sanitize: bool, optional (default False) + Whether to sanitize molecules. + log_every_n: int, optional (default 1000) Writes a logging statement this often. """ self.featurizer = featurizer self.sanitize = sanitize self.tasks = tasks - # The field in which dc.utils.save.load_sdf_files stores - # RDKit mol objects + # The field in which dc.utils.save.load_sdf_files stores RDKit mol objects self.mol_field = "mol" - # The field in which load_sdf_files return value stores - # smiles + # The field in which load_sdf_files return value stores smiles self.id_field = "smiles" self.log_every_n = log_every_n - def _get_shards(self, input_files, shard_size): - """Defines a generator which returns data for each shard""" - return load_sdf_files(input_files, self.sanitize, tasks=self.tasks) + def _get_shards(self, input_files: List[str], + shard_size: Optional[int]) -> Iterator[pd.DataFrame]: + """Defines a generator which returns data for each shard + + Parameters + ---------- + input_files: List[str] + List of filenames to process + shard_size: int, optional + The size of a shard of data to process at a time. + + Returns + ------- + Iterator[pd.DataFrame] + Iterator over shards + """ + return load_sdf_files( + input_files=input_files, + clean_mols=self.sanitize, + tasks=self.tasks, + shard_size=shard_size) + + def _featurize_shard(self, + shard: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]: + """Featurizes a shard of an input dataframe. + + Helper that computes features for the given shard of data. - def _featurize_shard(self, shard): - """Featurizes a shard of an input dataframe.""" - logger.info("Currently featurizing feature_type: %s" % - self.featurizer.__class__.__name__) - return _featurize_mol_df( - shard, - self.featurizer, - field=self.mol_field, - log_every_n=self.log_every_n) + Parameters + ---------- + shard: pd.DataFrame + DataFrame that holds data to be featurized. + + Returns + ------- + features: np.ndarray + Array of feature vectors. Note that samples for which featurization has + failed will be filtered out. + valid_inds: np.ndarray + Boolean values indicating successful featurization for corresponding + sample in the source. + """ + features = [elt for elt in self.featurizer(shard[self.mol_field])] + valid_inds = np.array( + [1 if np.array(elt).size > 0 else 0 for elt in features], dtype=bool) + features = [ + elt for (is_valid, elt) in zip(valid_inds, features) if is_valid + ] + return np.array(features), valid_inds class FASTALoader(DataLoader): @@ -497,7 +792,10 @@ class FASTALoader(DataLoader): """Initialize loader.""" pass - def create_dataset(self, input_files, data_dir=None, shard_size=None): + def create_dataset(self, + input_files: OneOrMany[str], + data_dir: Optional[str] = None, + shard_size: Optional[int] = None) -> DiskDataset: """Creates a `Dataset` from input FASTA files. At present, FASTA support is limited and only allows for one-hot @@ -505,25 +803,26 @@ class FASTALoader(DataLoader): Parameters ---------- - input_files: list + input_files: List[str] List of fasta files. - data_dir: str, optional + data_dir: str, optional (default None) Name of directory where featurized data is stored. - shard_size: int, optional + shard_size: int, optional (default None) For now, this argument is ignored and each FASTA file gets its - own shard. + own shard. Returns ------- - A `Dataset` object containing a featurized representation of data - from `input_files`. + DiskDataset + A `DiskDataset` object containing a featurized representation of data + from `input_files`. """ - if not isinstance(input_files, list): + if isinstance(input_files, str): input_files = [input_files] def shard_generator(): for input_file in input_files: - X = encode_fasta_sequence(input_file) + X = encode_bio_sequence(input_file) ids = np.ones(len(X)) # (X, y, w, ids) yield X, None, None, ids @@ -540,7 +839,7 @@ class ImageLoader(DataLoader): traverse subdirectories which contain images. """ - def __init__(self, tasks=None): + def __init__(self, tasks: Optional[List[str]] = None): """Initialize image loader. At present, custom image featurizers aren't supported by this @@ -548,7 +847,7 @@ class ImageLoader(DataLoader): Parameters ---------- - tasks: list[str] + tasks: List[str], optional (default None) List of task names for image labels. """ if tasks is None: @@ -556,31 +855,55 @@ class ImageLoader(DataLoader): self.tasks = tasks def create_dataset(self, - input_files, - labels=None, - weights=None, - in_memory=False): + inputs: Union[OneOrMany[str], Tuple[Any]], + data_dir: Optional[str] = None, + shard_size: Optional[int] = 8192, + in_memory: bool = False) -> Dataset: """Creates and returns a `Dataset` object by featurizing provided image files and labels/weights. Parameters ---------- - input_files: list - Each file in this list should either be of a supported - image format (.png, .tif only for now) or of a compressed - folder of image files (only .zip for now). - labels: optional - If provided, a numpy ndarray of image labels - weights: optional - If provided, a numpy ndarray of image weights - in_memory: bool + inputs: `Union[OneOrMany[str], Tuple[Any]]` + The inputs provided should be one of the following + + - filename + - list of filenames + - Tuple (list of filenames, labels) + - Tuple (list of filenames, labels, weights) + + Each file in a given list of filenames should either be of a supported + image format (.png, .tif only for now) or of a compressed folder of + image files (only .zip for now). If `labels` or `weights` are provided, + they must correspond to the sorted order of all filenames provided, with + one label/weight per file. + data_dir: str, optional (default None) + Directory to store featurized dataset. + shard_size: int, optional (default 8192) + Shard size when loading data. + in_memory: bool, optioanl (default False) If true, return in-memory NumpyDataset. Else return ImageDataset. Returns ------- - A `Dataset` object containing a featurized representation of data - from `input_files`, `labels`, and `weights`. + Dataset + A `Dataset` object containing a featurized representation of data + from `input_files`, `labels`, and `weights`. """ - if not isinstance(input_files, list): + labels, weights = None, None + if isinstance(inputs, tuple): + if len(inputs) == 1: + input_files = inputs[0] + if isinstance(inputs, str): + input_files = [inputs] + elif len(inputs) == 2: + input_files, labels = inputs + elif len(inputs) == 3: + input_files, labels, weights = inputs + else: + raise ValueError("Input must be a tuple of length 1, 2, or 3") + else: + input_files = inputs + if isinstance(input_files, str): input_files = [input_files] image_files = [] @@ -616,26 +939,217 @@ class ImageLoader(DataLoader): raise ValueError("Unsupported file format") input_files = remainder + # Sort image files + image_files = sorted(image_files) + if in_memory: - return NumpyDataset( - self.load_img(image_files), y=labels, w=weights, ids=image_files) + if data_dir is None: + return NumpyDataset( + load_image_files(image_files), y=labels, w=weights, ids=image_files) + else: + dataset = DiskDataset.from_numpy( + load_image_files(image_files), + y=labels, + w=weights, + ids=image_files, + tasks=self.tasks, + data_dir=data_dir) + if shard_size is not None: + dataset.reshard(shard_size) + return dataset else: return ImageDataset(image_files, y=labels, w=weights, ids=image_files) - @staticmethod - def load_img(image_files): - from PIL import Image - images = [] - for image_file in image_files: - _, extension = os.path.splitext(image_file) - extension = extension.lower() - if extension == ".png": - image = np.array(Image.open(image_file)) - images.append(image) - elif extension == ".tif": - im = Image.open(image_file) - imarray = np.array(im) - images.append(imarray) - else: - raise ValueError("Unsupported image filetype for %s" % image_file) - return np.array(images) + +class InMemoryLoader(DataLoader): + """Facilitate Featurization of In-memory objects. + + When featurizing a dataset, it's often the case that the initial set of + data (pre-featurization) fits handily within memory. (For example, perhaps + it fits within a column of a pandas DataFrame.) In this case, it would be + convenient to directly be able to featurize this column of data. However, + the process of featurization often generates large arrays which quickly eat + up available memory. This class provides convenient capabilities to process + such in-memory data by checkpointing generated features periodically to + disk. + + Example + ------- + Here's an example with only datapoints and no labels or weights. + + >>> import deepchem as dc + >>> smiles = ["C", "CC", "CCC", "CCCC"] + >>> featurizer = dc.feat.CircularFingerprint() + >>> loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + >>> dataset = loader.create_dataset(smiles, shard_size=2) + >>> len(dataset) + 4 + + Here's an example with both datapoints and labels + + >>> import deepchem as dc + >>> smiles = ["C", "CC", "CCC", "CCCC"] + >>> labels = [1, 0, 1, 0] + >>> featurizer = dc.feat.CircularFingerprint() + >>> loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + >>> dataset = loader.create_dataset(zip(smiles, labels), shard_size=2) + >>> len(dataset) + 4 + + Here's an example with datapoints, labels, weights and ids all provided. + + >>> import deepchem as dc + >>> smiles = ["C", "CC", "CCC", "CCCC"] + >>> labels = [1, 0, 1, 0] + >>> weights = [1.5, 0, 1.5, 0] + >>> ids = ["C", "CC", "CCC", "CCCC"] + >>> featurizer = dc.feat.CircularFingerprint() + >>> loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + >>> dataset = loader.create_dataset(zip(smiles, labels, weights, ids), shard_size=2) + >>> len(dataset) + 4 + + """ + + def create_dataset(self, + inputs: Sequence[Any], + data_dir: Optional[str] = None, + shard_size: Optional[int] = 8192) -> DiskDataset: + """Creates and returns a `Dataset` object by featurizing provided files. + + Reads in `inputs` and uses `self.featurizer` to featurize the + data in these input files. For large files, automatically shards + into smaller chunks of `shard_size` datapoints for convenience. + Returns a `Dataset` object that contains the featurized dataset. + + This implementation assumes that the helper methods `_get_shards` + and `_featurize_shard` are implemented and that each shard + returned by `_get_shards` is a pandas dataframe. You may choose + to reuse or override this method in your subclass implementations. + + Parameters + ---------- + inputs: Sequence[Any] + List of inputs to process. Entries can be arbitrary objects so long as + they are understood by `self.featurizer` + data_dir: str, optional (default None) + Directory to store featurized dataset. + shard_size: int, optional (default 8192) + Number of examples stored in each shard. + + Returns + ------- + DiskDataset + A `DiskDataset` object containing a featurized representation of data + from `inputs`. + """ + logger.info("Loading raw samples now.") + logger.info("shard_size: %s" % str(shard_size)) + + if not isinstance(inputs, list): + try: + inputs = list(inputs) + except TypeError: + inputs = [inputs] + + def shard_generator(): + global_index = 0 + for shard_num, shard in enumerate(self._get_shards(inputs, shard_size)): + time1 = time.time() + X, y, w, ids = self._featurize_shard(shard, global_index) + global_index += len(shard) + + time2 = time.time() + logger.info("TIMING: featurizing shard %d took %0.3f s" % + (shard_num, time2 - time1)) + yield X, y, w, ids + + return DiskDataset.create_dataset(shard_generator(), data_dir, self.tasks) + + def _get_shards(self, inputs: List, + shard_size: Optional[int]) -> Iterator[pd.DataFrame]: + """Break up input into shards. + + Parameters + ---------- + inputs: List + Each entry in this list must be of the form `(featurization_input, + label, weight, id)` or `(featurization_input, label, weight)` or + `(featurization_input, label)` or `featurization_input` for one + datapoint, where `featurization_input` is any input that is recognized + by `self.featurizer`. + shard_size: int, optional + The size of shard to generate. + + Returns + ------- + Iterator[pd.DataFrame] + Iterator which iterates over shards of data. + """ + current_shard: List = [] + for i, datapoint in enumerate(inputs): + if i != 0 and shard_size is not None and i % shard_size == 0: + shard_data = current_shard + current_shard = [] + yield shard_data + current_shard.append(datapoint) + yield current_shard + + # FIXME: Signature of "_featurize_shard" incompatible with supertype "DataLoader" + def _featurize_shard( # type: ignore[override] + self, shard: List, global_index: int + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Featurizes a shard of an input data. + + Parameters + ---------- + shard: List + List each entry of which must be of the form `(featurization_input, + label, weight, id)` or `(featurization_input, label, weight)` or + `(featurization_input, label)` or `featurization_input` for one + datapoint, where `featurization_input` is any input that is recognized + by `self.featurizer`. + global_index: int + The starting index for this shard in the full set of provided inputs + + Returns + ------ + Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray] + The tuple is `(X, y, w, ids)`. All values are numpy arrays. + """ + features = [] + labels = [] + weights = [] + ids = [] + n_tasks = len(self.tasks) + for i, entry in enumerate(shard): + if not isinstance(entry, tuple): + entry = (entry,) + if len(entry) > 4: + raise ValueError( + "Entry is malformed and must be of length 1-4 containing featurization_input" + "and optionally label, weight, and id.") + if len(entry) == 4: + featurization_input, label, weight, entry_id = entry + elif len(entry) == 3: + featurization_input, label, weight = entry + entry_id = global_index + i + elif len(entry) == 2: + featurization_input, label = entry + weight = np.ones((n_tasks), np.float32) + entry_id = global_index + i + elif len(entry) == 1: + featurization_input = entry + label = np.zeros((n_tasks), np.float32) + weight = np.zeros((n_tasks), np.float32) + entry_id = global_index + i + feature = self.featurizer(featurization_input) + features.append(feature) + weights.append(weight) + labels.append(label) + ids.append(entry_id) + X = np.concatenate(features, axis=0) + y = np.array(labels) + w = np.array(weights) + ids = np.array(ids) + return X, y, w, ids diff --git a/deepchem/data/datasets.py b/deepchem/data/datasets.py index dc17012e5d30f94143e99fe995eb00eec54633cd..cdfeaf2844023094cc613c443c9323814618f7be 100644 --- a/deepchem/data/datasets.py +++ b/deepchem/data/datasets.py @@ -4,37 +4,41 @@ Contains wrapper class for datasets. import json import os import math -import deepchem as dc -import numpy as np -import pandas as pd import random import logging -from pandas import read_hdf import tempfile import time import shutil -import json -import warnings -from multiprocessing.dummy import Pool -from deepchem.utils.save import save_to_disk, save_metadata -from deepchem.utils.save import load_from_disk +import multiprocessing +from ast import literal_eval as make_tuple +from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Union + +import numpy as np +import pandas as pd + +import deepchem as dc +from deepchem.utils.typing import OneOrMany, Shape +from deepchem.utils.data_utils import save_to_disk, load_from_disk, load_image_files + +Batch = Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray] logger = logging.getLogger(__name__) -def sparsify_features(X): +def sparsify_features(X: np.ndarray) -> np.ndarray: """Extracts a sparse feature representation from dense feature array. Parameters ---------- X: np.ndarray - Of shape `(n_samples, ...) + A numpy array of shape `(n_samples, ...)`. Returns ------- - X_sparse, a np.ndarray with `dtype=object` where `X_sparse[i]` is a - typle of `(nonzero_inds, nonzero_vals)` with nonzero indices and - values in the i-th sample of `X`. + X_sparse: np.ndarray + A numpy array with `dtype=object` where `X_sparse[i]` is a + typle of `(nonzero_inds, nonzero_vals)` with nonzero indices and + values in the i-th sample of `X`. """ n_samples = len(X) X_sparse = [] @@ -46,7 +50,7 @@ def sparsify_features(X): return X_sparse -def densify_features(X_sparse, num_features): +def densify_features(X_sparse: np.ndarray, num_features: int) -> np.ndarray: """Expands sparse feature representation to dense feature array. Assumes that the sparse representation was constructed from an array @@ -63,7 +67,8 @@ def densify_features(X_sparse, num_features): Returns ------- - X, a np.ndarray of shape `(n_samples, num_features)`. + X: np.ndarray + A numpy array of shape `(n_samples, num_features)`. """ n_samples = len(X_sparse) X = np.zeros((n_samples, num_features)) @@ -73,7 +78,7 @@ def densify_features(X_sparse, num_features): return X -def pad_features(batch_size, X_b): +def pad_features(batch_size: int, X_b: np.ndarray) -> np.ndarray: """Pads a batch of features to have precisely batch_size elements. Given an array of features with length less than or equal to @@ -102,7 +107,8 @@ def pad_features(batch_size, X_b): Returns ------- - X_out, a np.ndarray with `len(X_out) == batch_size`. + X_out: np.ndarray + A numpy array with `len(X_out) == batch_size`. """ num_samples = len(X_b) if num_samples > batch_size: @@ -131,7 +137,8 @@ def pad_features(batch_size, X_b): return X_out -def pad_batch(batch_size, X_b, y_b, w_b, ids_b): +def pad_batch(batch_size: int, X_b: np.ndarray, y_b: np.ndarray, + w_b: np.ndarray, ids_b: np.ndarray) -> Batch: """Pads batch to have size precisely batch_size elements. Given arrays of features `X_b`, labels `y_b`, weights `w_b`, and @@ -155,7 +162,9 @@ def pad_batch(batch_size, X_b, y_b, w_b, ids_b): Returns ------- - (X_out, y_out, w_out, ids_out), all np.ndarray with length `batch_size`. + Batch + The batch is a tuple of `(X_out, y_out, w_out, ids_out)`, + all numpy arrays with length `batch_size`. """ num_samples = len(X_b) if num_samples == batch_size: @@ -225,38 +234,49 @@ class Dataset(object): Instead you will need to instantiate one of the concrete subclasses. """ - def __init__(self): + def __init__(self) -> None: raise NotImplementedError() - def __len__(self): - """ - Get the number of elements in the dataset. + def __len__(self) -> int: + """Get the number of elements in the dataset. + + Returns + ------- + int + The number of elements in the dataset. """ raise NotImplementedError() - def get_shape(self): + def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]: """Get the shape of the dataset. Returns four tuples, giving the shape of the X, y, w, and ids arrays. + + Returns + ------- + Tuple + The tuple contains four elements, which are the shapes of + the X, y, w, and ids arrays. """ raise NotImplementedError() - def get_task_names(self): + def get_task_names(self) -> np.ndarray: """Get the names of the tasks associated with this dataset.""" raise NotImplementedError() @property - def X(self): + def X(self) -> np.ndarray: """Get the X vector for this dataset as a single numpy array. Returns ------- - Numpy array of features `X`. + np.ndarray + A numpy array of identifiers `X`. - Note - ---- - If data is stored on disk, accesing this field may involve loading + Notes + ----- + If data is stored on disk, accessing this field may involve loading data from disk and could potentially be slow. Using `iterbatches()` or `itersamples()` may be more efficient for larger datasets. @@ -264,16 +284,17 @@ class Dataset(object): raise NotImplementedError() @property - def y(self): + def y(self) -> np.ndarray: """Get the y vector for this dataset as a single numpy array. Returns ------- - Numpy array of labels `y`. + np.ndarray + A numpy array of identifiers `y`. - Note - ---- - If data is stored on disk, accesing this field may involve loading + Notes + ----- + If data is stored on disk, accessing this field may involve loading data from disk and could potentially be slow. Using `iterbatches()` or `itersamples()` may be more efficient for larger datasets. @@ -281,91 +302,94 @@ class Dataset(object): raise NotImplementedError() @property - def ids(self): + def ids(self) -> np.ndarray: """Get the ids vector for this dataset as a single numpy array. Returns ------- - Numpy array of identifiers `ids`. + np.ndarray + A numpy array of identifiers `ids`. - Note - ---- - If data is stored on disk, accesing this field may involve loading + Notes + ----- + If data is stored on disk, accessing this field may involve loading data from disk and could potentially be slow. Using `iterbatches()` or `itersamples()` may be more efficient for larger datasets. """ - raise NotImplementedError() @property - def w(self): + def w(self) -> np.ndarray: """Get the weight vector for this dataset as a single numpy array. Returns ------- - Numpy array of weights `w`. + np.ndarray + A numpy array of weights `w`. - Note - ---- - If data is stored on disk, accesing this field may involve loading + Notes + ----- + If data is stored on disk, accessing this field may involve loading data from disk and could potentially be slow. Using `iterbatches()` or `itersamples()` may be more efficient for larger datasets. """ raise NotImplementedError() - def __repr__(self): + def __repr__(self) -> str: """Convert self to REPL print representation.""" threshold = dc.utils.get_print_threshold() task_str = np.array2string( np.array(self.get_task_names()), threshold=threshold) + X_shape, y_shape, w_shape, _ = self.get_shape() if self.__len__() < dc.utils.get_max_print_size(): id_str = np.array2string(self.ids, threshold=threshold) return "<%s X.shape: %s, y.shape: %s, w.shape: %s, ids: %s, task_names: %s>" % ( - self.__class__.__name__, str(self.X.shape), str(self.y.shape), - str(self.w.shape), id_str, task_str) + self.__class__.__name__, str(X_shape), str(y_shape), str(w_shape), + id_str, task_str) else: return "<%s X.shape: %s, y.shape: %s, w.shape: %s, task_names: %s>" % ( - self.__class__.__name__, str(self.X.shape), str(self.y.shape), - str(self.w.shape), task_str) + self.__class__.__name__, str(X_shape), str(y_shape), str(w_shape), + task_str) - def __str__(self): + def __str__(self) -> str: """Convert self to str representation.""" return self.__repr__() def iterbatches(self, - batch_size=None, - epochs=1, - deterministic=False, - pad_batches=False): + batch_size: Optional[int] = None, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False) -> Iterator[Batch]: """Get an object that iterates over minibatches from the dataset. - Each minibatch is returned as a tuple of four numpy arrays: `(X, - y, w, ids)`. + Each minibatch is returned as a tuple of four numpy arrays: + `(X, y, w, ids)`. Parameters ---------- - batch_size: int, optional - Number of elements in each batch - epochs: int, optional - Number of epochs to walk over dataset - deterministic: bool, optional + batch_size: int, optional (default None) + Number of elements in each batch. + epochs: int, optional (default 1) + Number of epochs to walk over dataset. + deterministic: bool, optional (default False) If True, follow deterministic order. - pad_batches: bool, optional + pad_batches: bool, optional (default False) If True, pad each batch to `batch_size`. Returns ------- - Generator which yields tuples of four numpy arrays `(X, y, w, ids)` + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. """ raise NotImplementedError() - def itersamples(self): + def itersamples(self) -> Iterator[Batch]: """Get an object that iterates over the samples in the dataset. - Example: - + Examples + -------- >>> dataset = NumpyDataset(np.ones((2,2))) >>> for x, y, w, id in dataset.itersamples(): ... print(x.tolist(), y.tolist(), w.tolist(), id) @@ -374,11 +398,10 @@ class Dataset(object): """ raise NotImplementedError() - def transform(self, fn, **args): + def transform(self, transformer: "dc.trans.Transformer", **args) -> "Dataset": """Construct a new dataset by applying a transformation to every sample in this dataset. The argument is a function that can be called as follows: - >> newx, newy, neww = fn(x, y, w) It might be called only once with the whole dataset, or multiple @@ -387,16 +410,31 @@ class Dataset(object): Parameters ---------- - fn: function - A function to apply to each sample in the dataset + transformer: dc.trans.Transformer + The transformation to apply to each sample in the dataset. Returns ------- - a newly constructed Dataset object + Dataset + A newly constructed Dataset object. + """ + raise NotImplementedError() + + def select(self, indices: Sequence[int], + select_dir: Optional[str] = None) -> "Dataset": + """Creates a new dataset from a selection of indices from self. + + Parameters + ---------- + indices: Sequence + List of indices to select. + select_dir: str, optional (default None) + Path to new directory that the selected indices will be copied to. """ raise NotImplementedError() - def get_statistics(self, X_stats=True, y_stats=True): + def get_statistics(self, X_stats: bool = True, + y_stats: bool = True) -> Tuple[float, ...]: """Compute and return statistics of this dataset. Uses `self.itersamples()` to compute means and standard deviations @@ -405,16 +443,17 @@ class Dataset(object): Parameters ---------- - X_stats: bool, optional + X_stats: bool, optional (default True) If True, compute feature-level mean and standard deviations. - y_stats: bool, optional + y_stats: bool, optional (default True) If True, compute label-level mean and standard deviations. Returns ------- - If `X_stats == True`, returns `(X_means, X_stds)`. If `y_stats == - True`, returns `(y_means, y_stds)`. If both are true, returns - `(X_means, X_stds, y_means, y_stds)`. + Tuple + If `X_stats == True`, returns `(X_means, X_stds)`. If `y_stats == True`, + returns `(y_means, y_stds)`. If both are true, returns + `(X_means, X_stds, y_means, y_stds)`. """ X_means = 0.0 X_m2 = 0.0 @@ -444,13 +483,13 @@ class Dataset(object): elif X_stats and y_stats: return X_means, X_stds, y_means, y_stds else: - return None + return tuple() def make_tf_dataset(self, - batch_size=100, - epochs=1, - deterministic=False, - pad_batches=False): + batch_size: int = 100, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False): """Create a tf.data.Dataset that iterates over the data in this Dataset. Each value returned by the Dataset's iterator is a tuple of (X, y, @@ -458,24 +497,32 @@ class Dataset(object): Parameters ---------- - batch_size: int - the number of samples to include in each batch - epochs: int - the number of times to iterate over the Dataset - deterministic: bool - if True, the data is produced in order. If False, a different + batch_size: int, default 100 + The number of samples to include in each batch. + epochs: int, default 1 + The number of times to iterate over the Dataset. + deterministic: bool, default False + If True, the data is produced in order. If False, a different random permutation of the data is used for each epoch. - pad_batches: bool - if True, batches are padded as necessary to make the size of + pad_batches: bool, default False + If True, batches are padded as necessary to make the size of each batch exactly equal batch_size. Returns ------- - tf.Dataset that iterates over the same data. + tf.data.Dataset + TensorFlow Dataset that iterates over the same data. + + Notes + ----- + This class requires TensorFlow to be installed. """ - # Retrieve the first sample so we can determine the dtypes. + try: + import tensorflow as tf + except: + raise ImportError("This method requires TensorFlow to be installed.") - import tensorflow as tf + # Retrieve the first sample so we can determine the dtypes. X, y, w, ids = next(self.itersamples()) dtypes = (tf.as_dtype(X.dtype), tf.as_dtype(y.dtype), tf.as_dtype(w.dtype)) shapes = (tf.TensorShape([None] + list(X.shape)), @@ -483,7 +530,6 @@ class Dataset(object): tf.TensorShape([None] + list(w.shape))) # Create a Tensorflow Dataset. - def gen_data(): for X, y, w, ids in self.iterbatches(batch_size, epochs, deterministic, pad_batches): @@ -491,39 +537,51 @@ class Dataset(object): return tf.data.Dataset.from_generator(gen_data, dtypes, shapes) - def make_pytorch_dataset(self, epochs=1, deterministic=False): + def make_pytorch_dataset(self, + epochs: int = 1, + deterministic: bool = False, + batch_size: Optional[int] = None): """Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset. - Each value returned by the Dataset's iterator is a tuple of (X, y, - w, id) for one sample. + Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) + containing the data for one batch, or for a single sample if batch_size is None. Parameters ---------- - epochs: int - the number of times to iterate over the Dataset - deterministic: bool - if True, the data is produced in order. If False, a different + epochs: int, default 1 + The number of times to iterate over the Dataset. + deterministic: bool, default False + If True, the data is produced in order. If False, a different random permutation of the data is used for each epoch. + batch_size: int, optional (default None) + The number of samples to return in each batch. If None, each returned + value is a single sample. Returns ------- - `torch.utils.data.IterableDataset` that iterates over the data in - this dataset. + torch.utils.data.IterableDataset + `torch.utils.data.IterableDataset` that iterates over the data in + this dataset. + + Notes + ----- + This class requires PyTorch to be installed. """ raise NotImplementedError() - def to_dataframe(self): + def to_dataframe(self) -> pd.DataFrame: """Construct a pandas DataFrame containing the data from this Dataset. Returns ------- - pandas dataframe. If there is only a single feature per datapoint, - will have column "X" else will have columns "X1,X2,..." for - features. If there is only a single label per datapoint, will - have column "y" else will have columns "y1,y2,..." for labels. If - there is only a single weight per datapoint will have column "w" - else will have columns "w1,w2,...". Will have column "ids" for - identifiers. + pd.DataFrame + Pandas dataframe. If there is only a single feature per datapoint, + will have column "X" else will have columns "X1,X2,..." for + features. If there is only a single label per datapoint, will + have column "y" else will have columns "y1,y2,..." for labels. If + there is only a single weight per datapoint will have column "w" + else will have columns "w1,w2,...". Will have column "ids" for + identifiers. """ X = self.X y = self.y @@ -548,32 +606,35 @@ class Dataset(object): return pd.concat([X_df, y_df, w_df, ids_df], axis=1, sort=False) @staticmethod - def from_dataframe(df, X=None, y=None, w=None, ids=None): + def from_dataframe(df: pd.DataFrame, + X: Optional[OneOrMany[str]] = None, + y: Optional[OneOrMany[str]] = None, + w: Optional[OneOrMany[str]] = None, + ids: Optional[str] = None): """Construct a Dataset from the contents of a pandas DataFrame. Parameters ---------- - df: DataFrame - the pandas DataFrame - X: string or list of strings - the name of the column or columns containing the X array. If + df: pd.DataFrame + The pandas DataFrame + X: str or List[str], optional (default None) + The name of the column or columns containing the X array. If this is None, it will look for default column names that match those produced by to_dataframe(). - y: string or list of strings - the name of the column or columns containing the y array. If + y: str or List[str], optional (default None) + The name of the column or columns containing the y array. If this is None, it will look for default column names that match those produced by to_dataframe(). - w: string or list of strings - the name of the column or columns containing the w array. If + w: str or List[str], optional (default None) + The name of the column or columns containing the w array. If this is None, it will look for default column names that match those produced by to_dataframe(). - ids: string - the name of the column containing the ids. If this is None, it + ids: str, optional (default None) + The name of the column containing the ids. If this is None, it will look for default column names that match those produced by to_dataframe(). """ # Find the X values. - if X is not None: X_val = df[X] elif 'X' in df.columns: @@ -589,7 +650,6 @@ class Dataset(object): X_val = np.expand_dims(X_val, 1) # Find the y values. - if y is not None: y_val = df[y] elif 'y' in df.columns: @@ -605,7 +665,6 @@ class Dataset(object): y_val = np.expand_dims(y_val, 1) # Find the w values. - if w is not None: w_val = df[w] elif 'w' in df.columns: @@ -621,7 +680,6 @@ class Dataset(object): w_val = np.expand_dims(w_val, 1) # Find the ids. - if ids is not None: ids_val = df[ids] elif 'ids' in df.columns: @@ -636,28 +694,35 @@ class NumpyDataset(Dataset): This subclass of `Dataset` stores arrays `X,y,w,ids` in memory as numpy arrays. This makes it very easy to construct `NumpyDataset` - objects. For example + objects. + Examples + -------- >>> import numpy as np >>> dataset = NumpyDataset(X=np.random.rand(5, 3), y=np.random.rand(5,), ids=np.arange(5)) """ - def __init__(self, X, y=None, w=None, ids=None, n_tasks=1): + def __init__(self, + X: np.ndarray, + y: Optional[np.ndarray] = None, + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None, + n_tasks: int = 1) -> None: """Initialize this object. Parameters ---------- X: np.ndarray - Input features. Of shape `(n_samples,...)` - y: np.ndarray, optional - Labels. Of shape `(n_samples, ...)`. Note that each label can + Input features. A numpy array of shape `(n_samples,...)`. + y: np.ndarray, optional (default None) + Labels. A numpy array of shape `(n_samples, ...)`. Note that each label can have an arbitrary shape. - w: np.ndarray, optional - Weights. Should either be 1D of shape `(n_samples,)` or if + w: np.ndarray, optional (default None) + Weights. Should either be 1D array of shape `(n_samples,)` or if there's more than one task, of shape `(n_samples, n_tasks)`. - ids: np.ndarray, optional - Identifiers. Of shape `(n_samples,)` - n_tasks: int, optional + ids: np.ndarray, optional (default None) + Identifiers. A numpy array of shape `(n_samples,)` + n_tasks: int, default 1 Number of learning tasks. """ n_samples = len(X) @@ -684,73 +749,72 @@ class NumpyDataset(Dataset): self._w = w self._ids = np.array(ids, dtype=object) - def __len__(self): - """ - Get the number of elements in the dataset. - """ + def __len__(self) -> int: + """Get the number of elements in the dataset.""" return len(self._y) - def get_shape(self): + def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]: """Get the shape of the dataset. - Returns four tuples, giving the shape of the X, y, w, and ids - arrays. + Returns four tuples, giving the shape of the X, y, w, and ids arrays. """ return self._X.shape, self._y.shape, self._w.shape, self._ids.shape - def get_task_names(self): + def get_task_names(self) -> np.ndarray: """Get the names of the tasks associated with this dataset.""" if len(self._y.shape) < 2: return np.array([0]) return np.arange(self._y.shape[1]) @property - def X(self): + def X(self) -> np.ndarray: """Get the X vector for this dataset as a single numpy array.""" return self._X @property - def y(self): + def y(self) -> np.ndarray: """Get the y vector for this dataset as a single numpy array.""" return self._y @property - def ids(self): + def ids(self) -> np.ndarray: """Get the ids vector for this dataset as a single numpy array.""" return self._ids @property - def w(self): + def w(self) -> np.ndarray: """Get the weight vector for this dataset as a single numpy array.""" return self._w def iterbatches(self, - batch_size=None, - epochs=1, - deterministic=False, - pad_batches=False): + batch_size: Optional[int] = None, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False) -> Iterator[Batch]: """Get an object that iterates over minibatches from the dataset. - Each minibatch is returned as a tuple of four numpy arrays: (X, y, - w, ids). + Each minibatch is returned as a tuple of four numpy arrays: + `(X, y, w, ids)`. Parameters ---------- - batch_size: int, optional - Number of elements in each batch - epochs: int, optional - Number of epochs to walk over dataset - deterministic: bool, optional + batch_size: int, optional (default None) + Number of elements in each batch. + epochs: int, default 1 + Number of epochs to walk over dataset. + deterministic: bool, optional (default False) If True, follow deterministic order. - pad_batches: bool, optional + pad_batches: bool, optional (default False) If True, pad each batch to `batch_size`. Returns ------- - Generator which yields tuples of four numpy arrays `(X, y, w, ids)` + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. """ - def iterate(dataset, batch_size, epochs, deterministic, pad_batches): + def iterate(dataset: NumpyDataset, batch_size: Optional[int], epochs: int, + deterministic: bool, pad_batches: bool): n_samples = dataset._X.shape[0] if deterministic: sample_perm = np.arange(n_samples) @@ -778,11 +842,16 @@ class NumpyDataset(Dataset): return iterate(self, batch_size, epochs, deterministic, pad_batches) - def itersamples(self): + def itersamples(self) -> Iterator[Batch]: """Get an object that iterates over the samples in the dataset. - Example: + Returns + ------- + Iterator[Batch] + Iterator which yields tuples of four numpy arrays `(X, y, w, ids)`. + Examples + -------- >>> dataset = NumpyDataset(np.ones((2,2))) >>> for x, y, w, id in dataset.itersamples(): ... print(x.tolist(), y.tolist(), w.tolist(), id) @@ -793,11 +862,11 @@ class NumpyDataset(Dataset): return ((self._X[i], self._y[i], self._w[i], self._ids[i]) for i in range(n_samples)) - def transform(self, fn, **args): + def transform(self, transformer: "dc.trans.Transformer", + **args) -> "NumpyDataset": """Construct a new dataset by applying a transformation to every sample in this dataset. The argument is a function that can be called as follows: - >> newx, newy, neww = fn(x, y, w) It might be called only once with the whole dataset, or multiple @@ -806,26 +875,34 @@ class NumpyDataset(Dataset): Parameters ---------- - fn: function - A function to apply to each sample in the dataset + transformer: dc.trans.Transformer + The transformation to apply to each sample in the dataset Returns ------- - a newly constructed Dataset object + NumpyDataset + A newly constructed NumpyDataset object """ - newx, newy, neww = fn(self._X, self._y, self._w) - return NumpyDataset(newx, newy, neww, self._ids[:]) + newx, newy, neww, newids = transformer.transform_array( + self._X, self._y, self._w, self._ids) + return NumpyDataset(newx, newy, neww, newids) - def select(self, indices, select_dir=None): + def select(self, indices: Sequence[int], + select_dir: Optional[str] = None) -> "NumpyDataset": """Creates a new dataset from a selection of indices from self. Parameters ---------- - indices: list + indices: List[int] List of indices to select. - select_dir: string + select_dir: str, optional (default None) Used to provide same API as `DiskDataset`. Ignored since `NumpyDataset` is purely in-memory. + + Returns + ------- + NumpyDataset + A selected NumpyDataset object """ X = self.X[indices] y = self.y[indices] @@ -833,67 +910,73 @@ class NumpyDataset(Dataset): ids = self.ids[indices] return NumpyDataset(X, y, w, ids) - def make_pytorch_dataset(self, epochs=1, deterministic=False): + def make_pytorch_dataset(self, + epochs: int = 1, + deterministic: bool = False, + batch_size: Optional[int] = None): """Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset. - Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) for - one sample. + Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) + containing the data for one batch, or for a single sample if batch_size is None. Parameters ---------- - epochs: int - the number of times to iterate over the Dataset - deterministic: bool - if True, the data is produced in order. If False, a different random - permutation of the data is used for each epoch. - """ - import torch - - def iterate(): - n_samples = self._X.shape[0] - worker_info = torch.utils.data.get_worker_info() - if worker_info is None: - first_sample = 0 - last_sample = n_samples - else: - first_sample = worker_info.id * n_samples // worker_info.num_workers - last_sample = ( - worker_info.id + 1) * n_samples // worker_info.num_workers - for epoch in range(epochs): - if deterministic: - order = first_sample + np.arange(last_sample - first_sample) - else: - order = first_sample + np.random.permutation(last_sample - - first_sample) - for i in order: - yield (self._X[i], self._y[i], self._w[i], self._ids[i]) + epochs: int, default 1 + The number of times to iterate over the Dataset + deterministic: bool, default False + If True, the data is produced in order. If False, a different + random permutation of the data is used for each epoch. + batch_size: int, optional (default None) + The number of samples to return in each batch. If None, each returned + value is a single sample. - class TorchDataset(torch.utils.data.IterableDataset): + Returns + ------- + torch.utils.data.IterableDataset + `torch.utils.data.IterableDataset` that iterates over the data in + this dataset. - def __iter__(self): - return iterate() + Notes + ----- + This method requires PyTorch to be installed. + """ + try: + from deepchem.data.pytorch_datasets import _TorchNumpyDataset + except: + raise ImportError("This method requires PyTorch to be installed.") - return TorchDataset() + pytorch_ds = _TorchNumpyDataset( + numpy_dataset=self, + epochs=epochs, + deterministic=deterministic, + batch_size=batch_size) + return pytorch_ds @staticmethod - def from_DiskDataset(ds): - """ + def from_DiskDataset(ds: "DiskDataset") -> "NumpyDataset": + """Convert DiskDataset to NumpyDataset. Parameters ---------- - ds : DiskDataset - DiskDataset to transorm to NumpyDataset + ds: DiskDataset + DiskDataset to transform to NumpyDataset. Returns ------- NumpyDataset - Data of ds as NumpyDataset - + A new NumpyDataset created from DiskDataset. """ return NumpyDataset(ds.X, ds.y, ds.w, ds.ids) @staticmethod - def to_json(self, fname): + def to_json(self, fname: str) -> None: + """Dump NumpyDataset to the json file . + + Parameters + ---------- + fname: str + The name of the json file. + """ d = { 'X': self.X.tolist(), 'y': self.y.tolist(), @@ -904,22 +987,36 @@ class NumpyDataset(Dataset): json.dump(d, fout) @staticmethod - def from_json(fname): + def from_json(fname: str) -> "NumpyDataset": + """Create NumpyDataset from the json file. + + Parameters + ---------- + fname: str + The name of the json file. + + Returns + ------- + NumpyDataset + A new NumpyDataset created from the json file. + """ with open(fname) as fin: d = json.load(fin) return NumpyDataset(d['X'], d['y'], d['w'], d['ids']) @staticmethod - def merge(datasets): - """ + def merge(datasets: Sequence[Dataset]) -> "NumpyDataset": + """Merge multiple NumpyDatasets. + Parameters ---------- - datasets: list of deepchem.data.NumpyDataset - list of datasets to merge + datasets: List[Dataset] + List of datasets to merge. Returns ------- - Single deepchem.data.NumpyDataset with data concatenated over axis 0 + NumpyDataset + A single NumpyDataset containing all the samples from all datasets. """ X, y, w, ids = datasets[0].X, datasets[0].y, datasets[0].w, datasets[0].ids for dataset in datasets[1:]: @@ -934,36 +1031,167 @@ class NumpyDataset(Dataset): return NumpyDataset(X, y, w, ids, n_tasks=y.shape[1]) +class _Shard(object): + + def __init__(self, X, y, w, ids): + self.X = X + self.y = y + self.w = w + self.ids = ids + + class DiskDataset(Dataset): """ A Dataset that is stored as a set of files on disk. + + The DiskDataset is the workhorse class of DeepChem that facilitates analyses + on large datasets. Use this class whenever you're working with a large + dataset that can't be easily manipulated in RAM. + + On disk, a `DiskDataset` has a simple structure. All files for a given + `DiskDataset` are stored in a `data_dir`. The contents of `data_dir` should + be laid out as follows: + + data_dir/ + | + ---> metadata.csv.gzip + | + ---> tasks.json + | + ---> shard-0-X.npy + | + ---> shard-0-y.npy + | + ---> shard-0-w.npy + | + ---> shard-0-ids.npy + | + ---> shard-1-X.npy + . + . + . + + The metadata is constructed by static method + `DiskDataset._construct_metadata` and saved to disk by + `DiskDataset._save_metadata`. The metadata itself consists of a csv file + which has columns `('ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape', + 'w_shape')`. `tasks.json` consists of a list of task names for this dataset. + + The actual data is stored in `.npy` files (numpy array files) of the form + 'shard-0-X.npy', 'shard-0-y.npy', etc. + + The basic structure of `DiskDataset` is quite robust and will likely serve + you well for datasets up to about 100 GB or larger. However note that + `DiskDataset` has not been tested for very large datasets at the terabyte + range and beyond. You may be better served by implementing a custom + `Dataset` class for those use cases. + + Examples + -------- + Let's walk through a simple example of constructing a new `DiskDataset`. + + >>> import deepchem as dc + >>> import numpy as np + >>> X = np.random.rand(10, 10) + >>> dataset = dc.data.DiskDataset.from_numpy(X) + + If you have already saved a `DiskDataset` to `data_dir`, you can reinitialize it with + + >> data_dir = "/path/to/my/data" + >> dataset = dc.data.DiskDataset(data_dir) + + Once you have a dataset you can access its attributes as follows + + >>> X = np.random.rand(10, 10) + >>> y = np.random.rand(10,) + >>> w = np.ones_like(y) + >>> dataset = dc.data.DiskDataset.from_numpy(X) + >>> X, y, w = dataset.X, dataset.y, dataset.w + + One thing to beware of is that `dataset.X`, `dataset.y`, `dataset.w` are + loading data from disk! If you have a large dataset, these operations can be + extremely slow. Instead try iterating through the dataset instead. + + >>> for (xi, yi, wi, idi) in dataset.itersamples(): + ... pass + + Attributes + ---------- + data_dir: str + Location of directory where this `DiskDataset` is stored to disk + metadata_df: pd.DataFrame + Pandas Dataframe holding metadata for this `DiskDataset` + legacy_metadata: bool + Whether this `DiskDataset` uses legacy format. + + Notes + ----- + `DiskDataset` originally had a simpler metadata format without shape + information. Older `DiskDataset` objects had metadata files with columns + `('ids', 'X', 'y', 'w') and not additional shape columns. `DiskDataset` + maintains backwards compatibility with this older metadata format, but we + recommend for performance reasons not using legacy metadata for new + projects. """ - def __init__(self, data_dir): - """ - Turns featurized dataframes into numpy files, writes them & metadata to disk. + def __init__(self, data_dir: str) -> None: + """Load a constructed DiskDataset from disk + + Note that this method cannot construct a new disk dataset. Instead use + static methods `DiskDataset.create_dataset` or `DiskDataset.from_numpy` + for that purpose. Use this constructor instead to load a `DiskDataset` + that has already been created on disk. + + Parameters + ---------- + data_dir: str + Location on disk of an existing `DiskDataset`. """ self.data_dir = data_dir logger.info("Loading dataset from disk.") self.tasks, self.metadata_df = self.load_metadata() - self._cached_shards = None + if len(self.metadata_df.columns) == 4 and list( + self.metadata_df.columns) == ['ids', 'X', 'y', 'w']: + logger.info( + "Detected legacy metatadata on disk. You can upgrade from legacy metadata " + "to the more efficient current metadata by resharding this dataset " + "by calling the reshard() method of this object.") + self.legacy_metadata = True + elif len(self.metadata_df.columns) == 8 and list( + self.metadata_df.columns) == [ + 'ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape', 'w_shape' + ]: # noqa + self.legacy_metadata = False + else: + raise ValueError( + "Malformed metadata on disk. Metadata must have columns 'ids', 'X', 'y', 'w', " + "'ids_shape', 'X_shape', 'y_shape', 'w_shape' (or if in legacy metadata format," + "columns 'ids', 'X', 'y', 'w')") + self._cached_shards: Optional[List] = None self._memory_cache_size = 20 * (1 << 20) # 20 MB self._cache_used = 0 @staticmethod - def create_dataset(shard_generator, data_dir=None, tasks=[]): + def create_dataset(shard_generator: Iterable[Batch], + data_dir: Optional[str] = None, + tasks: Optional[Sequence] = []) -> "DiskDataset": """Creates a new DiskDataset Parameters ---------- - shard_generator: Iterable + shard_generator: Iterable[Batch] An iterable (either a list or generator) that provides tuples of data (X, y, w, ids). Each tuple will be written to a separate shard on disk. - data_dir: str + data_dir: str, optional (default None) Filename for data directory. Creates a temp directory if none specified. - tasks: list + tasks: Sequence, optional (default []) List of tasks for this dataset. + + Returns + ------- + DiskDataset + A new `DiskDataset` constructed from the given data """ if data_dir is None: data_dir = tempfile.mkdtemp() @@ -978,12 +1206,13 @@ class DiskDataset(Dataset): DiskDataset.write_data_to_disk(data_dir, basename, tasks, X, y, w, ids)) metadata_df = DiskDataset._construct_metadata(metadata_rows) - save_metadata(tasks, metadata_df, data_dir) + DiskDataset._save_metadata(metadata_df, data_dir, tasks) time2 = time.time() logger.info("TIMING: dataset construction took %0.3f s" % (time2 - time1)) return DiskDataset(data_dir) - def load_metadata(self): + def load_metadata(self) -> Tuple[List[str], pd.DataFrame]: + """Helper method that loads metadata from disk.""" try: tasks_filename, metadata_filename = self._get_metadata_filename() with open(tasks_filename) as fin: @@ -991,7 +1220,7 @@ class DiskDataset(Dataset): metadata_df = pd.read_csv(metadata_filename, compression='gzip') metadata_df = metadata_df.where((pd.notnull(metadata_df)), None) return tasks, metadata_df - except Exception as e: + except Exception: pass # Load obsolete format -> save in new format @@ -1000,91 +1229,240 @@ class DiskDataset(Dataset): tasks, metadata_df = load_from_disk(metadata_filename) del metadata_df['task_names'] del metadata_df['basename'] - save_metadata(tasks, metadata_df, self.data_dir) + DiskDataset._save_metadata(metadata_df, self.data_dir, tasks) return tasks, metadata_df raise ValueError("No Metadata Found On Disk") @staticmethod - def _construct_metadata(metadata_entries): + def _save_metadata(metadata_df: pd.DataFrame, data_dir: str, + tasks: Optional[Sequence]) -> None: + """Saves the metadata for a DiskDataset + + Parameters + ---------- + metadata_df: pd.DataFrame + The dataframe which will be written to disk. + data_dir: str + Directory to store metadata. + tasks: Sequence, optional + Tasks of DiskDataset. If `None`, an empty list of tasks is written to + disk. + """ + if tasks is None: + tasks = [] + elif isinstance(tasks, np.ndarray): + tasks = tasks.tolist() + metadata_filename = os.path.join(data_dir, "metadata.csv.gzip") + tasks_filename = os.path.join(data_dir, "tasks.json") + with open(tasks_filename, 'w') as fout: + json.dump(tasks, fout) + metadata_df.to_csv(metadata_filename, index=False, compression='gzip') + + @staticmethod + def _construct_metadata(metadata_entries: List) -> pd.DataFrame: """Construct a dataframe containing metadata. - metadata_entries should have elements returned by write_data_to_disk - above. + Parameters + ---------- + metadata_entries: List + `metadata_entries` should have elements returned by write_data_to_disk + above. + + Returns + ------- + pd.DataFrame + A Pandas Dataframe object contains metadata. """ - columns = ('ids', 'X', 'y', 'w') + columns = ('ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape', + 'w_shape') metadata_df = pd.DataFrame(metadata_entries, columns=columns) return metadata_df @staticmethod - def write_data_to_disk(data_dir, - basename, - tasks, - X=None, - y=None, - w=None, - ids=None): + def write_data_to_disk( + data_dir: str, + basename: str, + tasks: np.ndarray, + X: Optional[np.ndarray] = None, + y: Optional[np.ndarray] = None, + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None) -> List[Optional[str]]: + """Static helper method to write data to disk. + + This helper method is used to write a shard of data to disk. + + Parameters + ---------- + data_dir: str + Data directory to write shard to. + basename: str + Basename for the shard in question. + tasks: np.ndarray + The names of the tasks in question. + X: np.ndarray, optional (default None) + The features array. + y: np.ndarray, optional (default None) + The labels array. + w: np.ndarray, optional (default None) + The weights array. + ids: np.ndarray, optional (default None) + The identifiers array. + + Returns + ------- + List[Optional[str]] + List with values `[out_ids, out_X, out_y, out_w, out_ids_shape, + out_X_shape, out_y_shape, out_w_shape]` with filenames of locations to + disk which these respective arrays were written. + """ if X is not None: - out_X = "%s-X.npy" % basename - save_to_disk(X, os.path.join(data_dir, out_X)) + out_X: Optional[str] = "%s-X.npy" % basename + save_to_disk(X, os.path.join(data_dir, out_X)) # type: ignore + out_X_shape = X.shape else: out_X = None + out_X_shape = None if y is not None: - out_y = "%s-y.npy" % basename - save_to_disk(y, os.path.join(data_dir, out_y)) + out_y: Optional[str] = "%s-y.npy" % basename + save_to_disk(y, os.path.join(data_dir, out_y)) # type: ignore + out_y_shape = y.shape else: out_y = None + out_y_shape = None if w is not None: - out_w = "%s-w.npy" % basename - save_to_disk(w, os.path.join(data_dir, out_w)) + out_w: Optional[str] = "%s-w.npy" % basename + save_to_disk(w, os.path.join(data_dir, out_w)) # type: ignore + out_w_shape = w.shape else: out_w = None + out_w_shape = None if ids is not None: - out_ids = "%s-ids.npy" % basename - save_to_disk(ids, os.path.join(data_dir, out_ids)) + out_ids: Optional[str] = "%s-ids.npy" % basename + save_to_disk(ids, os.path.join(data_dir, out_ids)) # type: ignore + out_ids_shape = ids.shape else: out_ids = None + out_ids_shape = None # note that this corresponds to the _construct_metadata column order - return [out_ids, out_X, out_y, out_w] + return [ + out_ids, out_X, out_y, out_w, out_ids_shape, out_X_shape, out_y_shape, + out_w_shape + ] - def save_to_disk(self): + def save_to_disk(self) -> None: """Save dataset to disk.""" - save_metadata(self.tasks, self.metadata_df, self.data_dir) + DiskDataset._save_metadata(self.metadata_df, self.data_dir, self.tasks) self._cached_shards = None - def move(self, new_data_dir): - """Moves dataset to new directory.""" - if os.path.isdir(new_data_dir): + def move(self, new_data_dir: str, + delete_if_exists: Optional[bool] = True) -> None: + """Moves dataset to new directory. + + Parameters + ---------- + new_data_dir: str + The new directory name to move this to dataset to. + delete_if_exists: bool, optional (default True) + If this option is set, delete the destination directory if it exists + before moving. This is set to True by default to be backwards compatible + with behavior in earlier versions of DeepChem. + + Notes + ----- + This is a stateful operation! `self.data_dir` will be moved into + `new_data_dir`. If `delete_if_exists` is set to `True` (by default this is + set `True`), then `new_data_dir` is deleted if it's a pre-existing + directory. + """ + if delete_if_exists and os.path.isdir(new_data_dir): shutil.rmtree(new_data_dir) shutil.move(self.data_dir, new_data_dir) - self.data_dir = new_data_dir + if delete_if_exists: + self.data_dir = new_data_dir + else: + self.data_dir = os.path.join(new_data_dir, + os.path.basename(self.data_dir)) - def get_task_names(self): - """ - Gets learning tasks associated with this dataset. + def copy(self, new_data_dir: str) -> "DiskDataset": + """Copies dataset to new directory. + + Parameters + ---------- + new_data_dir: str + The new directory name to copy this to dataset to. + + Returns + ------- + DiskDataset + A copied DiskDataset object. + + Notes + ----- + This is a stateful operation! Any data at `new_data_dir` will be deleted + and `self.data_dir` will be deep copied into `new_data_dir`. """ + if os.path.isdir(new_data_dir): + shutil.rmtree(new_data_dir) + shutil.copytree(self.data_dir, new_data_dir) + return DiskDataset(new_data_dir) + + def get_task_names(self) -> np.ndarray: + """Gets learning tasks associated with this dataset.""" return self.tasks - # if not len(self.metadata_df): - # raise ValueError("No data in dataset.") - # return next(self.metadata_df.iterrows())[1]['task_names'] - def reshard(self, shard_size): - """Reshards data to have specified shard size.""" + def reshard(self, shard_size: int) -> None: + """Reshards data to have specified shard size. + + Parameters + ---------- + shard_size: int + The size of shard. + + Examples + -------- + >>> import deepchem as dc + >>> import numpy as np + >>> X = np.random.rand(100, 10) + >>> d = dc.data.DiskDataset.from_numpy(X) + >>> d.reshard(shard_size=10) + >>> d.get_number_shards() + 10 + + Notes + ----- + If this `DiskDataset` is in `legacy_metadata` format, reshard will + convert this dataset to have non-legacy metadata. + """ # Create temp directory to store resharded version reshard_dir = tempfile.mkdtemp() - new_metadata = [] + n_shards = self.get_number_shards() + + # Get correct shapes for y/w + tasks = self.get_task_names() + _, y_shape, w_shape, _ = self.get_shape() + if len(y_shape) == 1: + y_shape = (len(y_shape), len(tasks)) + if len(w_shape) == 1: + w_shape = (len(w_shape), len(tasks)) # Write data in new shards def generator(): - tasks = self.get_task_names() X_next = np.zeros((0,) + self.get_data_shape()) - y_next = np.zeros((0,) + (len(tasks),)) - w_next = np.zeros((0,) + (len(tasks),)) + y_next = np.zeros((0,) + y_shape[1:]) + w_next = np.zeros((0,) + w_shape[1:]) ids_next = np.zeros((0,), dtype=object) - for (X, y, w, ids) in self.itershards(): + for shard_num, (X, y, w, ids) in enumerate(self.itershards()): + logger.info("Resharding shard %d/%d" % (shard_num + 1, n_shards)) + # Handle shapes + X = np.reshape(X, (len(X),) + self.get_data_shape()) + # Note that this means that DiskDataset resharding currently doesn't + # work for datasets that aren't regression/classification. + y = np.reshape(y, (len(y),) + y_shape[1:]) + w = np.reshape(w, (len(w),) + w_shape[1:]) X_next = np.concatenate([X_next, X], axis=0) y_next = np.concatenate([y_next, y], axis=0) w_next = np.concatenate([w_next, w], axis=0) @@ -1102,21 +1480,26 @@ class DiskDataset(Dataset): generator(), data_dir=reshard_dir, tasks=self.tasks) shutil.rmtree(self.data_dir) shutil.move(reshard_dir, self.data_dir) + # Should have updated to non-legacy metadata + self.legacy_metadata = False self.metadata_df = resharded_dataset.metadata_df + # Note that this resets the cache internally self.save_to_disk() - def get_data_shape(self): - """ - Gets array shape of datapoints in this dataset. - """ + def get_data_shape(self) -> Shape: + """Gets array shape of datapoints in this dataset.""" if not len(self.metadata_df): raise ValueError("No data in dataset.") - sample_X = load_from_disk( - os.path.join(self.data_dir, - next(self.metadata_df.iterrows())[1]['X'])) - return np.shape(sample_X)[1:] + if self.legacy_metadata: + sample_X = load_from_disk( + os.path.join(self.data_dir, + next(self.metadata_df.iterrows())[1]['X'])) + return np.shape(sample_X)[1:] + else: + X_shape, _, _, _ = self.get_shape() + return X_shape[1:] - def get_shard_size(self): + def get_shard_size(self) -> int: """Gets size of shards on disk.""" if not len(self.metadata_df): raise ValueError("No data in dataset.") @@ -1125,69 +1508,74 @@ class DiskDataset(Dataset): next(self.metadata_df.iterrows())[1]['y'])) return len(sample_y) - def _get_metadata_filename(self): - """ - Get standard location for metadata file. - """ + def _get_metadata_filename(self) -> Tuple[str, str]: + """Get standard location for metadata file.""" metadata_filename = os.path.join(self.data_dir, "metadata.csv.gzip") tasks_filename = os.path.join(self.data_dir, "tasks.json") return tasks_filename, metadata_filename - def get_number_shards(self): - """ - Returns the number of shards for this dataset. - """ + def get_number_shards(self) -> int: + """Returns the number of shards for this dataset.""" return self.metadata_df.shape[0] - def itershards(self): - """ - Return an object that iterates over all shards in dataset. + def itershards(self) -> Iterator[Batch]: + """Return an object that iterates over all shards in dataset. Datasets are stored in sharded fashion on disk. Each call to next() for the generator defined by this function returns the data from a particular shard. The order of shards returned is guaranteed to remain fixed. + + Returns + ------- + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. """ return (self.get_shard(i) for i in range(self.get_number_shards())) def iterbatches(self, - batch_size=None, - epochs=1, - deterministic=False, - pad_batches=False): + batch_size: Optional[int] = None, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False) -> Iterator[Batch]: """ Get an object that iterates over minibatches from the dataset. It is guaranteed that the number of batches returned is `math.ceil(len(dataset)/batch_size)`. Each minibatch is returned as a tuple of four numpy arrays: `(X, y, w, ids)`. - Parameters: - ----------- - batch_size: int + Parameters + ---------- + batch_size: int, optional (default None) Number of elements in a batch. If None, then it yields batches with size equal to the size of each individual shard. - epoch: int + epoch: int, default 1 Number of epochs to walk over dataset - deterministic: bool + deterministic: bool, default False Whether or not we should should shuffle each shard before generating the batches. Note that this is only local in the sense that it does not ever mix between different shards. - pad_batches: bool + pad_batches: bool, default False Whether or not we should pad the last batch, globally, such that it has exactly batch_size elements. + + Returns + ------- + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. """ shard_indices = list(range(self.get_number_shards())) return self._iterbatches_from_shards(shard_indices, batch_size, epochs, deterministic, pad_batches) def _iterbatches_from_shards(self, - shard_indices, - batch_size=None, - epochs=1, - deterministic=False, - pad_batches=False): + shard_indices: Sequence[int], + batch_size: Optional[int] = None, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False) -> Iterator[Batch]: """Get an object that iterates over batches from a restricted set of shards.""" - def iterate(dataset, batch_size, epochs): + def iterate(dataset: DiskDataset, batch_size: Optional[int], epochs: int): num_shards = len(shard_indices) if deterministic: shard_perm = np.arange(num_shards) @@ -1196,7 +1584,8 @@ class DiskDataset(Dataset): # than process based pools, since process based pools need to pickle/serialize # objects as an extra overhead. Also, as hideously as un-thread safe this looks, # we're actually protected by the GIL. - pool = Pool(1) # mp.dummy aliases ThreadPool to Pool + # mp.dummy aliases ThreadPool to Pool + pool = multiprocessing.dummy.Pool(1) if batch_size is None: num_global_batches = num_shards @@ -1287,11 +1676,16 @@ class DiskDataset(Dataset): return iterate(self, batch_size, epochs) - def itersamples(self): + def itersamples(self) -> Iterator[Batch]: """Get an object that iterates over the samples in the dataset. - Example: + Returns + ------- + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. + Examples + -------- >>> dataset = DiskDataset.from_numpy(np.ones((2,2)), np.ones((2,1))) >>> for x, y, w, id in dataset.itersamples(): ... print(x.tolist(), y.tolist(), w.tolist(), id) @@ -1314,124 +1708,195 @@ class DiskDataset(Dataset): return iterate(self) - def transform(self, fn, **args): + def transform(self, + transformer: "dc.trans.Transformer", + parallel: bool = False, + out_dir: Optional[str] = None, + **args) -> "DiskDataset": """Construct a new dataset by applying a transformation to every sample in this dataset. The argument is a function that can be called as follows: - >> newx, newy, neww = fn(x, y, w) - It might be called only once with the whole dataset, or multiple times with different - subsets of the data. Each time it is called, it should transform the samples and return - the transformed data. + It might be called only once with the whole dataset, or multiple times + with different subsets of the data. Each time it is called, it should + transform the samples and return the transformed data. Parameters ---------- - fn: function - A function to apply to each sample in the dataset - out_dir: string - The directory to save the new dataset in. If this is omitted, a temporary directory - is created automatically + transformer: dc.trans.Transformer + The transformation to apply to each sample in the dataset. + parallel: bool, default False + If True, use multiple processes to transform the dataset in parallel. + out_dir: str, optional (default None) + The directory to save the new dataset in. If this is omitted, a + temporary directory is created automaticall. Returns ------- - a newly constructed Dataset object + DiskDataset + A newly constructed Dataset object """ - if 'out_dir' in args: - out_dir = args['out_dir'] - else: + if out_dir is None: out_dir = tempfile.mkdtemp() tasks = self.get_task_names() + n_shards = self.get_number_shards() - def generator(): - for shard_num, row in self.metadata_df.iterrows(): - X, y, w, ids = self.get_shard(shard_num) - newx, newy, neww = fn(X, y, w) - yield (newx, newy, neww, ids) + time1 = time.time() + if parallel: + results = [] + pool = multiprocessing.Pool() + for i in range(self.get_number_shards()): + row = self.metadata_df.iloc[i] + X_file = os.path.join(self.data_dir, row['X']) + if row['y'] is not None: + y_file: Optional[str] = os.path.join(self.data_dir, row['y']) + else: + y_file = None + if row['w'] is not None: + w_file: Optional[str] = os.path.join(self.data_dir, row['w']) + else: + w_file = None + ids_file = os.path.join(self.data_dir, row['ids']) + results.append( + pool.apply_async(DiskDataset._transform_shard, + (transformer, i, X_file, y_file, w_file, ids_file, + out_dir, tasks))) + pool.close() + metadata_rows = [r.get() for r in results] + metadata_df = DiskDataset._construct_metadata(metadata_rows) + DiskDataset._save_metadata(metadata_df, out_dir, tasks) + dataset = DiskDataset(out_dir) + else: - return DiskDataset.create_dataset( - generator(), data_dir=out_dir, tasks=tasks) + def generator(): + for shard_num, row in self.metadata_df.iterrows(): + logger.info("Transforming shard %d/%d" % (shard_num, n_shards)) + X, y, w, ids = self.get_shard(shard_num) + newx, newy, neww, newids = transformer.transform_array(X, y, w, ids) + yield (newx, newy, neww, newids) + + dataset = DiskDataset.create_dataset( + generator(), data_dir=out_dir, tasks=tasks) + time2 = time.time() + logger.info("TIMING: transforming took %0.3f s" % (time2 - time1)) + return dataset - def make_pytorch_dataset(self, epochs=1, deterministic=False): + @staticmethod + def _transform_shard(transformer: "dc.trans.Transformer", shard_num: int, + X_file: str, y_file: str, w_file: str, ids_file: str, + out_dir: str, tasks: np.ndarray) -> List[Optional[str]]: + """This is called by transform() to transform a single shard.""" + X = None if X_file is None else np.array(load_from_disk(X_file)) + y = None if y_file is None else np.array(load_from_disk(y_file)) + w = None if w_file is None else np.array(load_from_disk(w_file)) + ids = np.array(load_from_disk(ids_file)) + X, y, w, ids = transformer.transform_array(X, y, w, ids) + basename = "shard-%d" % shard_num + return DiskDataset.write_data_to_disk(out_dir, basename, tasks, X, y, w, + ids) + + def make_pytorch_dataset(self, + epochs: int = 1, + deterministic: bool = False, + batch_size: Optional[int] = None): """Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset. - Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) for - one sample. + Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) + containing the data for one batch, or for a single sample if batch_size is None. Parameters ---------- - epochs: int - the number of times to iterate over the Dataset - deterministic: bool - if True, the data is produced in order. If False, a different random - permutation of the data is used for each epoch. - """ - import torch - - def iterate(): - worker_info = torch.utils.data.get_worker_info() - n_shards = self.get_number_shards() - if worker_info is None: - first_shard = 0 - last_shard = n_shards - else: - first_shard = worker_info.id * n_shards // worker_info.num_workers - last_shard = (worker_info.id + 1) * n_shards // worker_info.num_workers - if first_shard == last_shard: - return - shard_indices = list(range(first_shard, last_shard)) - for epoch in range(epochs): - for X, y, w, ids in self._iterbatches_from_shards( - shard_indices, deterministic=deterministic): - for i in range(X.shape[0]): - yield (X[i], y[i], w[i], ids[i]) + epochs: int, default 1 + The number of times to iterate over the Dataset + deterministic: bool, default False + If True, the data is produced in order. If False, a different + random permutation of the data is used for each epoch. + batch_size: int, optional (default None) + The number of samples to return in each batch. If None, each returned + value is a single sample. - class TorchDataset(torch.utils.data.IterableDataset): + Returns + ------- + torch.utils.data.IterableDataset + `torch.utils.data.IterableDataset` that iterates over the data in + this dataset. - def __iter__(self): - return iterate() + Notes + ----- + This method requires PyTorch to be installed. + """ + try: + from deepchem.data.pytorch_datasets import _TorchDiskDataset + except: + raise ImportError("This method requires PyTorch to be installed.") - return TorchDataset() + pytorch_ds = _TorchDiskDataset( + disk_dataset=self, + epochs=epochs, + deterministic=deterministic, + batch_size=batch_size) + return pytorch_ds @staticmethod - def from_numpy(X, y=None, w=None, ids=None, tasks=None, data_dir=None): - """Creates a DiskDataset object from specified Numpy arrays.""" - n_samples = len(X) - if ids is None: - ids = np.arange(n_samples) - - if y is not None: - if w is None: - if len(y.shape) == 1: - w = np.ones(y.shape[0], np.float32) - else: - w = np.ones((y.shape[0], 1), np.float32) - - if tasks is None: - if len(y.shape) > 1: - n_tasks = y.shape[1] - else: - n_tasks = 1 - tasks = np.arange(n_tasks) + def from_numpy(X: np.ndarray, + y: Optional[np.ndarray] = None, + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None, + tasks: Optional[Sequence] = None, + data_dir: Optional[str] = None) -> "DiskDataset": + """Creates a DiskDataset object from specified Numpy arrays. - else: - if w is not None: - warnings.warn('y is None but w is not None. Setting w to None', - UserWarning) - w = None + Parameters + ---------- + X: np.ndarray + Feature array. + y: np.ndarray, optional (default None) + Labels array. + w: np.ndarray, optional (default None) + Weights array. + ids: np.ndarray, optional (default None) + Identifiers array. + tasks: Sequence, optional (default None) + Tasks in this dataset + data_dir: str, optional (default None) + The directory to write this dataset to. If none is specified, will use + a temporary directory instead. - if tasks is not None: - warnings.warn('y is None but tasks is not None. Setting tasks to None', - UserWarning) - tasks = None + Returns + ------- + DiskDataset + A new `DiskDataset` constructed from the provided information. + """ + # To unify shape handling so from_numpy behaves like NumpyDataset, we just + # make a NumpyDataset under the hood + dataset = NumpyDataset(X, y, w, ids) + if tasks is None: + tasks = dataset.get_task_names() # raw_data = (X, y, w, ids) return DiskDataset.create_dataset( - [(X, y, w, ids)], data_dir=data_dir, tasks=tasks) + [(dataset.X, dataset.y, dataset.w, dataset.ids)], + data_dir=data_dir, + tasks=tasks) @staticmethod - def merge(datasets, merge_dir=None): - """Merges provided datasets into a merged dataset.""" + def merge(datasets: Iterable["Dataset"], + merge_dir: Optional[str] = None) -> "DiskDataset": + """Merges provided datasets into a merged dataset. + + Parameters + ---------- + datasets: Iterable[Dataset] + List of datasets to merge. + merge_dir: str, optional (default None) + The new directory path to store the merged DiskDataset. + + Returns + ------- + DiskDataset + A merged DiskDataset. + """ if merge_dir is not None: if not os.path.exists(merge_dir): os.makedirs(merge_dir) @@ -1445,25 +1910,43 @@ class DiskDataset(Dataset): tasks = [] for dataset in datasets: try: - tasks.append(dataset.tasks) + tasks.append(dataset.tasks) # type: ignore except AttributeError: pass if tasks: - if len(tasks) < len(datasets) or len(set(map(tuple, tasks))) > 1: + task_tuples = [tuple(task_list) for task_list in tasks] + if len(tasks) < len(datasets) or len(set(task_tuples)) > 1: raise ValueError( 'Cannot merge datasets with different task specifications') - tasks = tasks[0] + merge_tasks = tasks[0] + else: + merge_tasks = [] def generator(): for ind, dataset in enumerate(datasets): + logger.info("Merging in dataset %d/%d" % (ind, len(datasets))) X, y, w, ids = (dataset.X, dataset.y, dataset.w, dataset.ids) yield (X, y, w, ids) return DiskDataset.create_dataset( - generator(), data_dir=merge_dir, tasks=tasks) + generator(), data_dir=merge_dir, tasks=merge_tasks) + + def subset(self, shard_nums: Sequence[int], + subset_dir: Optional[str] = None) -> "DiskDataset": + """Creates a subset of the original dataset on disk. + + Parameters + ---------- + shard_nums: Sequence[int] + The indices of shard to extract from the original DiskDataset. + subset_dir: str, optional (default None) + The new directory path to store the subset DiskDataset. - def subset(self, shard_nums, subset_dir=None): - """Creates a subset of the original dataset on disk.""" + Returns + ------- + DiskDataset + A subset DiskDataset. + """ if subset_dir is not None: if not os.path.exists(subset_dir): os.makedirs(subset_dir) @@ -1481,20 +1964,34 @@ class DiskDataset(Dataset): return DiskDataset.create_dataset( generator(), data_dir=subset_dir, tasks=tasks) - def sparse_shuffle(self): + def sparse_shuffle(self) -> None: """Shuffling that exploits data sparsity to shuffle large datasets. - Only for 1-dimensional feature vectors (does not work for tensorial - featurizations). + If feature vectors are sparse, say circular fingerprints or any other + representation that contains few nonzero values, it can be possible to + exploit the sparsity of the vector to simplify shuffles. This method + implements a sparse shuffle by compressing sparse feature vectors down + into a compressed representation, then shuffles this compressed dataset in + memory and writes the results to disk. + + Notes + ----- + This method only works for 1-dimensional feature vectors (does not work + for tensorial featurizations). Note that this shuffle is performed in + place. """ time1 = time.time() shard_size = self.get_shard_size() num_shards = self.get_number_shards() - X_sparses, ys, ws, ids = [], [], [], [] - num_features = None + X_sparses: List[np.ndarray] = [] + ys: List[np.ndarray] = [] + ws: List[np.ndarray] = [] + ids: List[np.ndarray] = [] + num_features = -1 for i in range(num_shards): + logger.info("Sparsifying shard %d/%d" % (i, num_shards)) (X_s, y_s, w_s, ids_s) = self.get_shard(i) - if num_features is None: + if num_features == -1: num_features = X_s.shape[1] X_sparse = sparsify_features(X_s) X_sparses, ys, ws, ids = (X_sparses + [X_sparse], ys + [y_s], ws + [w_s], @@ -1509,6 +2006,7 @@ class DiskDataset(Dataset): w[permutation], ids[permutation]) # Write shuffled shards out to disk for i in range(num_shards): + logger.info("Sparse shuffling shard %d/%d" % (i, num_shards)) start, stop = i * shard_size, (i + 1) * shard_size (X_sparse_s, y_s, w_s, ids_s) = (X_sparse[start:stop], y[start:stop], w[start:stop], ids[start:stop]) @@ -1517,83 +2015,87 @@ class DiskDataset(Dataset): time2 = time.time() logger.info("TIMING: sparse_shuffle took %0.3f s" % (time2 - time1)) - def complete_shuffle(self, data_dir=None): - """ - Completely shuffle across all data, across all shards. + def complete_shuffle(self, data_dir: Optional[str] = None) -> Dataset: + """Completely shuffle across all data, across all shards. - Note: this loads all the data into ram, and can be prohibitively - expensive for larger datasets. + Notes + ----- + The algorithm used for this complete shuffle is O(N^2) where N is the + number of shards. It simply constructs each shard of the output dataset + one at a time. Since the complete shuffle can take a long time, it's + useful to watch the logging output. Each shuffled shard is constructed + using select() which logs as it selects from each original shard. This + will results in O(N^2) logging statements, one for each extraction of + shuffled shard i's contributions from original shard j. Parameters ---------- - shard_size: int - size of the resulting dataset's size. If None, then the first - shard's shard_size will be used. + data_dir: Optional[str], (default None) + Directory to write the shuffled dataset to. If none is specified a + temporary directory will be used. Returns ------- DiskDataset - A DiskDataset with a single shard. - - """ - all_X = [] - all_y = [] - all_w = [] - all_ids = [] - for Xs, ys, ws, ids in self.itershards(): - all_X.append(Xs) - if ys is not None: - all_y.append(ys) - if ws is not None: - all_w.append(ws) - all_ids.append(ids) - - all_X = np.concatenate(all_X) - all_y = np.concatenate(all_y) - all_w = np.concatenate(all_w) - all_ids = np.concatenate(all_ids) - - perm = np.random.permutation(all_X.shape[0]) - all_X = all_X[perm] - all_y = all_y[perm] - all_w = all_w[perm] - all_ids = all_ids[perm] - - return DiskDataset.from_numpy( - all_X, all_y, all_w, all_ids, data_dir=data_dir) - - def shuffle_each_shard(self): - """Shuffles elements within each shard of the datset.""" + A DiskDataset whose data is a randomly shuffled version of this dataset. + """ + N = len(self) + perm = np.random.permutation(N) + shard_size = self.get_shard_size() + return self.select(perm, data_dir, shard_size) + + def shuffle_each_shard(self, + shard_basenames: Optional[List[str]] = None) -> None: + """Shuffles elements within each shard of the dataset. + + Parameters + ---------- + shard_basenames: List[str], optional (default None) + The basenames for each shard. If this isn't specified, will assume the + basenames of form "shard-i" used by `create_dataset` and `reshard`. + """ tasks = self.get_task_names() # Shuffle the arrays corresponding to each row in metadata_df n_rows = len(self.metadata_df.index) - n_rows = len(self.metadata_df.index) - for i in range(n_rows): - row = self.metadata_df.iloc[i] + if shard_basenames is not None: + if len(shard_basenames) != n_rows: + raise ValueError( + "shard_basenames must provide a basename for each shard in this DiskDataset." + ) + else: + shard_basenames = ["shard-%d" % shard_num for shard_num in range(n_rows)] + for i, basename in zip(range(n_rows), shard_basenames): + logger.info("Shuffling shard %d/%d" % (i, n_rows)) X, y, w, ids = self.get_shard(i) n = X.shape[0] permutation = np.random.permutation(n) X, y, w, ids = (X[permutation], y[permutation], w[permutation], ids[permutation]) - DiskDataset.write_data_to_disk(self.data_dir, "", tasks, X, y, w, ids) + DiskDataset.write_data_to_disk(self.data_dir, basename, tasks, X, y, w, + ids) + # Reset cache + self._cached_shards = None - def shuffle_shards(self): + def shuffle_shards(self) -> None: """Shuffles the order of the shards for this dataset.""" metadata_rows = self.metadata_df.values.tolist() random.shuffle(metadata_rows) self.metadata_df = DiskDataset._construct_metadata(metadata_rows) self.save_to_disk() - def get_shard(self, i): - """Retrieves data for the i-th shard from disk.""" + def get_shard(self, i: int) -> Batch: + """Retrieves data for the i-th shard from disk. - class Shard(object): + Parameters + ---------- + i: int + Shard index for shard to retrieve batch from. - def __init__(self, X, y, w, ids): - self.X = X - self.y = y - self.w = w - self.ids = ids + Returns + ------- + Batch + A batch data for i-th shard. + """ # See if we have a cached copy of this shard. if self._cached_shards is None: @@ -1635,7 +2137,7 @@ class DiskDataset(Dataset): # shard again before the next time we want this one. So just cache as many # as we can and then stop. - shard = Shard(X, y, w, ids) + shard = _Shard(X, y, w, ids) shard_size = X.nbytes + ids.nbytes if y is not None: shard_size += y.nbytes @@ -1646,8 +2148,84 @@ class DiskDataset(Dataset): self._cache_used += shard_size return (shard.X, shard.y, shard.w, shard.ids) - def add_shard(self, X, y, w, ids): - """Adds a data shard.""" + def get_shard_ids(self, i: int) -> np.ndarray: + """Retrieves the list of IDs for the i-th shard from disk. + + Parameters + ---------- + i: int + Shard index for shard to retrieve weights from. + + Returns + ------- + np.ndarray + A numpy array of ids for i-th shard. + """ + + if self._cached_shards is not None and self._cached_shards[i] is not None: + return self._cached_shards[i].ids + row = self.metadata_df.iloc[i] + return np.array( + load_from_disk(os.path.join(self.data_dir, row['ids'])), dtype=object) + + def get_shard_y(self, i: int) -> np.ndarray: + """Retrieves the labels for the i-th shard from disk. + + Parameters + ---------- + i: int + Shard index for shard to retrieve labels from. + + Returns + ------- + np.ndarray + A numpy array of labels for i-th shard. + """ + + if self._cached_shards is not None and self._cached_shards[i] is not None: + return self._cached_shards[i].y + row = self.metadata_df.iloc[i] + return np.array( + load_from_disk(os.path.join(self.data_dir, row['y'])), dtype=object) + + def get_shard_w(self, i: int) -> np.ndarray: + """Retrieves the weights for the i-th shard from disk. + + Parameters + ---------- + i: int + Shard index for shard to retrieve weights from. + + Returns + ------- + np.ndarray + A numpy array of weights for i-th shard. + """ + + if self._cached_shards is not None and self._cached_shards[i] is not None: + return self._cached_shards[i].w + row = self.metadata_df.iloc[i] + return np.array( + load_from_disk(os.path.join(self.data_dir, row['w'])), dtype=object) + + def add_shard(self, + X: np.ndarray, + y: Optional[np.ndarray] = None, + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None) -> None: + """Adds a data shard. + + Parameters + ---------- + X: np.ndarray + Feature array. + y: np.ndarray, optioanl (default None) + Labels array. + w: np.ndarray, optioanl (default None) + Weights array. + ids: np.ndarray, optioanl (default None) + Identifiers array. + """ metadata_rows = self.metadata_df.values.tolist() shard_num = len(metadata_rows) basename = "shard-%d" % shard_num @@ -1658,82 +2236,203 @@ class DiskDataset(Dataset): self.metadata_df = DiskDataset._construct_metadata(metadata_rows) self.save_to_disk() - def set_shard(self, shard_num, X, y, w, ids): - """Writes data shard to disk""" + def set_shard(self, + shard_num: int, + X: np.ndarray, + y: Optional[np.ndarray] = None, + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None) -> None: + """Writes data shard to disk. + + Parameters + ---------- + shard_num: int + Shard index for shard to set new data. + X: np.ndarray + Feature array. + y: np.ndarray, optioanl (default None) + Labels array. + w: np.ndarray, optioanl (default None) + Weights array. + ids: np.ndarray, optioanl (default None) + Identifiers array. + """ basename = "shard-%d" % shard_num tasks = self.get_task_names() DiskDataset.write_data_to_disk(self.data_dir, basename, tasks, X, y, w, ids) self._cached_shards = None - def select(self, indices, select_dir=None): + def select(self, + indices: Sequence[int], + select_dir: Optional[str] = None, + select_shard_size: Optional[int] = None, + output_numpy_dataset: Optional[bool] = False) -> Dataset: """Creates a new dataset from a selection of indices from self. + Examples + -------- + >>> import numpy as np + >>> X = np.random.rand(10, 10) + >>> dataset = dc.data.DiskDataset.from_numpy(X) + >>> selected = dataset.select([1, 3, 4]) + >>> len(selected) + 3 + Parameters ---------- - indices: list + indices: Sequence List of indices to select. - select_dir: string - Path to new directory that the selected indices will be copied - to. - """ - if select_dir is not None: - if not os.path.exists(select_dir): - os.makedirs(select_dir) + select_dir: str, optional (default None) + Path to new directory that the selected indices will be copied to. + select_shard_size: Optional[int], (default None) + If specified, the shard-size to use for output selected `DiskDataset`. + If not output_numpy_dataset, then this is set to this current dataset's + shard size if not manually specified. + output_numpy_dataset: Optional[bool], (default False) + If True, output an in-memory `NumpyDataset` instead of a `DiskDataset`. + Note that `select_dir` and `select_shard_size` must be `None` if this + is `True` + + Returns + ------- + Dataset + A dataset containing the selected samples. The default dataset is `DiskDataset`. + If `output_numpy_dataset` is True, the dataset is `NumpyDataset`. + """ + if output_numpy_dataset and (select_dir is not None or + select_shard_size is not None): + raise ValueError( + "If output_numpy_dataset is set, then select_dir and select_shard_size must both be None" + ) + if output_numpy_dataset: + # When outputting a NumpyDataset, we have 1 in-memory shard + select_shard_size = len(indices) else: - select_dir = tempfile.mkdtemp() + if select_dir is not None: + if not os.path.exists(select_dir): + os.makedirs(select_dir) + else: + select_dir = tempfile.mkdtemp() + if select_shard_size is None: + select_shard_size = self.get_shard_size() # Handle edge case with empty indices if not len(indices): - return DiskDataset.create_dataset([], data_dir=select_dir) - indices = np.array(sorted(indices)).astype(int) + if not output_numpy_dataset: + return DiskDataset.create_dataset([], data_dir=select_dir) + else: + return NumpyDataset( + np.array([]), np.array([]), np.array([]), np.array([])) + + N = len(indices) + indices = np.array(indices).astype(int) tasks = self.get_task_names() + n_shards = self.get_number_shards() + # We use two loops here. The outer while loop walks over selection shards + # (the chunks of the indices to select that should go into separate + # output shards), while the inner for loop walks over the shards in the + # source datasets to select out the shard indices from that source shard def generator(): - count, indices_count = 0, 0 - for shard_num, (X, y, w, ids) in enumerate(self.itershards()): - shard_len = len(X) - # Find indices which rest in this shard - num_shard_elts = 0 - while indices[indices_count + num_shard_elts] < count + shard_len: - num_shard_elts += 1 - if indices_count + num_shard_elts >= len(indices): + start = 0 + select_shard_num = 0 + while start < N: + logger.info( + "Constructing selection output shard %d" % (select_shard_num + 1)) + end = min(start + select_shard_size, N) + select_shard_indices = indices[start:end] + sorted_indices = np.array(sorted(select_shard_indices)).astype(int) + + Xs, ys, ws, ids_s = [], [], [], [] + count, indices_count = 0, 0 + for shard_num in range(self.get_number_shards()): + logger.info( + "Selecting from input shard %d/%d for selection output shard %d" % + (shard_num + 1, n_shards, select_shard_num + 1)) + if self.legacy_metadata: + ids = self.get_shard_ids(shard_num) + shard_len = len(ids) + else: + shard_X_shape, _, _, _ = self._get_shard_shape(shard_num) + if len(shard_X_shape) > 0: + shard_len = shard_X_shape[0] + else: + shard_len = 0 + # Find indices which rest in this shard + num_shard_elts = 0 + while sorted_indices[indices_count + + num_shard_elts] < count + shard_len: + num_shard_elts += 1 + if (indices_count + num_shard_elts) >= len(sorted_indices): + break + if num_shard_elts == 0: + count += shard_len + continue + else: + X, y, w, ids = self.get_shard(shard_num) + # Need to offset indices to fit within shard_size + shard_inds = sorted_indices[indices_count:indices_count + + num_shard_elts] - count + # Handle empty case where no data from this shard needed + X_sel = X[shard_inds] + # Handle the case of datasets with y/w missing + if y is not None: + y_sel = y[shard_inds] + else: + y_sel = None + if w is not None: + w_sel = w[shard_inds] + else: + w_sel = None + ids_sel = ids[shard_inds] + Xs.append(X_sel) + ys.append(y_sel) + ws.append(w_sel) + ids_s.append(ids_sel) + indices_count += num_shard_elts + count += shard_len + # Break if all indices have been used up already + if indices_count >= len(sorted_indices): break - # Need to offset indices to fit within shard_size - shard_inds = indices[indices_count:indices_count + - num_shard_elts] - count - X_sel = X[shard_inds] - # Handle the case of datasets with y/w missing - if y is not None: - y_sel = y[shard_inds] - else: - y_sel = None - if w is not None: - w_sel = w[shard_inds] - else: - w_sel = None - ids_sel = ids[shard_inds] - yield (X_sel, y_sel, w_sel, ids_sel) - # Updating counts - indices_count += num_shard_elts - count += shard_len - # Break when all indices have been used up already - if indices_count >= len(indices): - return + # Note these will be in the sorted order + X = np.concatenate(Xs, axis=0) + y = np.concatenate(ys, axis=0) + w = np.concatenate(ws, axis=0) + ids = np.concatenate(ids_s, axis=0) + # We need to recover the original ordering. We can do this by using + # np.where to find the locatios of the original indices in the sorted + # indices. + reverted_indices = np.array( + # We know there's only one match for np.where since this is a + # permutation, so the [0][0] pulls out the exact match location. + [ + np.where(sorted_indices == orig_index)[0][0] + for orig_index in select_shard_indices + ]) + X, y, w, ids = X[reverted_indices], y[reverted_indices], w[ + reverted_indices], ids[reverted_indices] + yield (X, y, w, ids) + start = end + select_shard_num += 1 - return DiskDataset.create_dataset( - generator(), data_dir=select_dir, tasks=tasks) + if not output_numpy_dataset: + return DiskDataset.create_dataset( + generator(), data_dir=select_dir, tasks=tasks) + else: + X, y, w, ids = next(generator()) + return NumpyDataset(X, y, w, ids) @property - def ids(self): + def ids(self) -> np.ndarray: """Get the ids vector for this dataset as a single numpy array.""" if len(self) == 0: return np.array([]) ids = [] - for (_, _, _, ids_b) in self.itershards(): - ids.append(np.atleast_1d(np.squeeze(ids_b))) + for i in range(self.get_number_shards()): + ids.append(np.atleast_1d(np.squeeze(self.get_shard_ids(i)))) return np.concatenate(ids) @property - def X(self): + def X(self) -> np.ndarray: """Get the X vector for this dataset as a single numpy array.""" Xs = [] one_dimensional = False @@ -1747,11 +2446,14 @@ class DiskDataset(Dataset): return np.concatenate(Xs) @property - def y(self): + def y(self) -> np.ndarray: """Get the y vector for this dataset as a single numpy array.""" + if len(self) == 0: + return np.array([]) ys = [] one_dimensional = False - for (_, y_b, _, _) in self.itershards(): + for i in range(self.get_number_shards()): + y_b = self.get_shard_y(i) ys.append(y_b) if len(y_b.shape) == 1: one_dimensional = True @@ -1761,12 +2463,13 @@ class DiskDataset(Dataset): return np.concatenate(ys) @property - def w(self): + def w(self) -> np.ndarray: """Get the weight vector for this dataset as a single numpy array.""" ws = [] one_dimensional = False - for (_, _, w_b, _) in self.itershards(): - ws.append(np.array(w_b)) + for i in range(self.get_number_shards()): + w_b = self.get_shard_w(i) + ws.append(w_b) if len(w_b.shape) == 1: one_dimensional = True if not one_dimensional: @@ -1775,53 +2478,109 @@ class DiskDataset(Dataset): return np.concatenate(ws) @property - def memory_cache_size(self): + def memory_cache_size(self) -> int: """Get the size of the memory cache for this dataset, measured in bytes.""" return self._memory_cache_size @memory_cache_size.setter - def memory_cache_size(self, size): + def memory_cache_size(self, size: int) -> None: """Get the size of the memory cache for this dataset, measured in bytes.""" self._memory_cache_size = size if self._cache_used > size: self._cached_shards = None - def __len__(self): - """ - Finds number of elements in dataset. - """ + def __len__(self) -> int: + """Finds number of elements in dataset.""" total = 0 for _, row in self.metadata_df.iterrows(): y = load_from_disk(os.path.join(self.data_dir, row['ids'])) total += len(y) return total - def get_shape(self): - """Finds shape of dataset.""" + def _get_shard_shape(self, + shard_num: int) -> Tuple[Shape, Shape, Shape, Shape]: + """Finds the shape of the specified shard.""" + if self.legacy_metadata: + raise ValueError( + "This function requires the new metadata format to be called. Please reshard this dataset by calling the reshard() method." + ) n_tasks = len(self.get_task_names()) - for shard_num, (X, y, w, ids) in enumerate(self.itershards()): - if shard_num == 0: - X_shape = np.array(X.shape) - if n_tasks > 0: - y_shape = np.array(y.shape) - w_shape = np.array(w.shape) - else: - y_shape = tuple() - w_shape = tuple() - ids_shape = np.array(ids.shape) + row = self.metadata_df.iloc[shard_num] + if row['X_shape'] is not None: + shard_X_shape = make_tuple(str(row['X_shape'])) + else: + shard_X_shape = tuple() + if n_tasks > 0: + if row['y_shape'] is not None: + shard_y_shape = make_tuple(str(row['y_shape'])) + else: + shard_y_shape = tuple() + if row['w_shape'] is not None: + shard_w_shape = make_tuple(str(row['w_shape'])) else: - X_shape[0] += np.array(X.shape)[0] - if n_tasks > 0: - y_shape[0] += np.array(y.shape)[0] - w_shape[0] += np.array(w.shape)[0] - ids_shape[0] += np.array(ids.shape)[0] - return tuple(X_shape), tuple(y_shape), tuple(w_shape), tuple(ids_shape) - - def get_label_means(self): + shard_w_shape = tuple() + else: + shard_y_shape = tuple() + shard_w_shape = tuple() + if row['ids_shape'] is not None: + shard_ids_shape = make_tuple(str(row['ids_shape'])) + else: + shard_ids_shape = tuple() + X_shape, y_shape, w_shape, ids_shape = tuple( + np.array(shard_X_shape)), tuple(np.array(shard_y_shape)), tuple( + np.array(shard_w_shape)), tuple(np.array(shard_ids_shape)) + return X_shape, y_shape, w_shape, ids_shape + + def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]: + """Finds shape of dataset. + + Returns four tuples, giving the shape of the X, y, w, and ids arrays. + """ + n_tasks = len(self.get_task_names()) + n_rows = len(self.metadata_df.index) + # If shape metadata is available use it to directly compute shape from + # metadata + if not self.legacy_metadata: + for shard_num in range(n_rows): + shard_X_shape, shard_y_shape, shard_w_shape, shard_ids_shape = self._get_shard_shape( + shard_num) + if shard_num == 0: + X_shape, y_shape, w_shape, ids_shape = np.array( + shard_X_shape), np.array(shard_y_shape), np.array( + shard_w_shape), np.array(shard_ids_shape) + else: + X_shape[0] += shard_X_shape[0] + if n_tasks > 0: + y_shape[0] += shard_y_shape[0] + w_shape[0] += shard_w_shape[0] + ids_shape[0] += shard_ids_shape[0] + return tuple(X_shape), tuple(y_shape), tuple(w_shape), tuple(ids_shape) + # In absense of shape metadata, fall back to loading data from disk to + # find shape. + else: + for shard_num, (X, y, w, ids) in enumerate(self.itershards()): + if shard_num == 0: + X_shape = np.array(X.shape) + if n_tasks > 0: + y_shape = np.array(y.shape) + w_shape = np.array(w.shape) + else: + y_shape = tuple() + w_shape = tuple() + ids_shape = np.array(ids.shape) + else: + X_shape[0] += np.array(X.shape)[0] + if n_tasks > 0: + y_shape[0] += np.array(y.shape)[0] + w_shape[0] += np.array(w.shape)[0] + ids_shape[0] += np.array(ids.shape)[0] + return tuple(X_shape), tuple(y_shape), tuple(w_shape), tuple(ids_shape) + + def get_label_means(self) -> pd.DataFrame: """Return pandas series of label means.""" return self.metadata_df["y_means"] - def get_label_stds(self): + def get_label_stds(self) -> pd.DataFrame: """Return pandas series of label stds.""" return self.metadata_df["y_stds"] @@ -1829,23 +2588,27 @@ class DiskDataset(Dataset): class ImageDataset(Dataset): """A Dataset that loads data from image files on disk.""" - def __init__(self, X, y, w=None, ids=None): + def __init__(self, + X: Union[np.ndarray, List[str]], + y: Optional[Union[np.ndarray, List[str]]], + w: Optional[np.ndarray] = None, + ids: Optional[np.ndarray] = None) -> None: """Create a dataset whose X and/or y array is defined by image files on disk. Parameters ---------- - X: ndarray or list of strings + X: np.ndarray or List[str] The dataset's input data. This may be either a single NumPy array directly containing the data, or a list containing the paths to the image files - y: ndarray or list of strings + y: np.ndarray or List[str] The dataset's labels. This may be either a single NumPy array directly containing the data, or a list containing the paths to the image files - w: ndarray + w: np.ndarray, optional (default None) a 1D or 2D array containing the weights for each sample or sample/task pair - ids: ndarray + ids: np.ndarray, optional (default None) the sample IDs """ n_samples = len(X) @@ -1854,7 +2617,12 @@ class ImageDataset(Dataset): self._X_shape = self._find_array_shape(X) self._y_shape = self._find_array_shape(y) if w is None: - if len(self._y_shape) == 1: + if len(self._y_shape) == 0: + # Case n_samples should be 1 + if n_samples != 1: + raise ValueError("y can only be a scalar if n_samples == 1") + w = np.ones_like(y) + elif len(self._y_shape) == 1: w = np.ones(self._y_shape[0], np.float32) else: w = np.ones((self._y_shape[0], 1), np.float32) @@ -1867,68 +2635,81 @@ class ImageDataset(Dataset): ids = np.arange(n_samples) self._X = X self._y = y - self._w = w + self._w: np.ndarray = w self._ids = np.array(ids, dtype=object) - def _find_array_shape(self, array): + def _find_array_shape(self, array: Sequence) -> Shape: if isinstance(array, np.ndarray): return array.shape - image_shape = dc.data.ImageLoader.load_img([array[0]]).shape[1:] + image_shape = load_image_files([array[0]]).shape[1:] return np.concatenate([[len(array)], image_shape]) - def __len__(self): - """ - Get the number of elements in the dataset. - """ + def __len__(self) -> int: + """Get the number of elements in the dataset.""" return self._X_shape[0] - def get_shape(self): + def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]: """Get the shape of the dataset. - Returns four tuples, giving the shape of the X, y, w, and ids - arrays. + Returns four tuples, giving the shape of the X, y, w, and ids arrays. """ return self._X_shape, self._y_shape, self._w.shape, self._ids.shape - def get_task_names(self): + def get_task_names(self) -> np.ndarray: """Get the names of the tasks associated with this dataset.""" if len(self._y_shape) < 2: return np.array([0]) return np.arange(self._y_shape[1]) @property - def X(self): + def X(self) -> np.ndarray: """Get the X vector for this dataset as a single numpy array.""" if isinstance(self._X, np.ndarray): return self._X - return dc.data.ImageLoader.load_img(self._X) + return load_image_files(self._X) @property - def y(self): + def y(self) -> np.ndarray: """Get the y vector for this dataset as a single numpy array.""" if isinstance(self._y, np.ndarray): return self._y - return dc.data.ImageLoader.load_img(self._y) + return load_image_files(self._y) @property - def ids(self): + def ids(self) -> np.ndarray: """Get the ids vector for this dataset as a single numpy array.""" return self._ids @property - def w(self): + def w(self) -> np.ndarray: """Get the weight vector for this dataset as a single numpy array.""" return self._w def iterbatches(self, - batch_size=None, - epochs=1, - deterministic=False, - pad_batches=False): + batch_size: Optional[int] = None, + epochs: int = 1, + deterministic: bool = False, + pad_batches: bool = False) -> Iterator[Batch]: """Get an object that iterates over minibatches from the dataset. - Each minibatch is returned as a tuple of four numpy arrays: (X, y, - w, ids). + Each minibatch is returned as a tuple of four numpy arrays: + `(X, y, w, ids)`. + + Parameters + ---------- + batch_size: int, optional (default None) + Number of elements in each batch. + epochs: int, default 1 + Number of epochs to walk over dataset. + deterministic: bool, default False + If True, follow deterministic order. + pad_batches: bool, default False + If True, pad each batch to `batch_size`. + + Returns + ------- + Iterator[Batch] + Generator which yields tuples of four numpy arrays `(X, y, w, ids)`. """ def iterate(dataset, batch_size, epochs, deterministic, pad_batches): @@ -1950,13 +2731,11 @@ class ImageDataset(Dataset): if isinstance(dataset._X, np.ndarray): X_batch = dataset._X[perm_indices] else: - X_batch = dc.data.ImageLoader.load_img( - [dataset._X[i] for i in perm_indices]) + X_batch = load_image_files([dataset._X[i] for i in perm_indices]) if isinstance(dataset._y, np.ndarray): y_batch = dataset._y[perm_indices] else: - y_batch = dc.data.ImageLoader.load_img( - [dataset._y[i] for i in perm_indices]) + y_batch = load_image_files([dataset._y[i] for i in perm_indices]) w_batch = dataset._w[perm_indices] ids_batch = dataset._ids[perm_indices] if pad_batches: @@ -1967,28 +2746,45 @@ class ImageDataset(Dataset): return iterate(self, batch_size, epochs, deterministic, pad_batches) - def itersamples(self): - """Get an object that iterates over the samples in the dataset. + def _get_image(self, array: Union[np.ndarray, List[str]], + indices: Union[int, np.ndarray]) -> np.ndarray: + """Method for loading an image - Example: + Parameters + ---------- + array: Union[np.ndarray, List[str]] + A numpy array which contains images or List of image filenames + indices: Union[int, np.ndarray] + Index you want to get the images - >>> dataset = NumpyDataset(np.ones((2,2))) - >>> for x, y, w, id in dataset.itersamples(): - ... print(x.tolist(), y.tolist(), w.tolist(), id) - [1.0, 1.0] [0.0] [0.0] 0 - [1.0, 1.0] [0.0] [0.0] 1 + Returns + ------- + np.ndarray + Loaded images """ + if isinstance(array, np.ndarray): + return array[indices] + if isinstance(indices, np.ndarray): + return load_image_files([array[i] for i in indices]) + return load_image_files([array[indices]])[0] - def get_image(array, index): - if isinstance(array, np.ndarray): - return array[index] - return dc.data.ImageLoader.load_img([array[index]])[0] + def itersamples(self) -> Iterator[Batch]: + """Get an object that iterates over the samples in the dataset. + Returns + ------- + Iterator[Batch] + Iterator which yields tuples of four numpy arrays `(X, y, w, ids)`. + """ n_samples = self._X_shape[0] - return ((get_image(self._X, i), get_image(self._y, i), self._w[i], - self._ids[i]) for i in range(n_samples)) - - def transform(self, fn, **args): + return ((self._get_image(self._X, i), self._get_image(self._y, i), + self._w[i], self._ids[i]) for i in range(n_samples)) + + def transform( + self, + transformer: "dc.trans.Transformer", + **args, + ) -> "NumpyDataset": """Construct a new dataset by applying a transformation to every sample in this dataset. The argument is a function that can be called as follows: @@ -2001,26 +2797,34 @@ class ImageDataset(Dataset): Parameters ---------- - fn: function - A function to apply to each sample in the dataset + transformer: dc.trans.Transformer + The transformation to apply to each sample in the dataset Returns ------- - a newly constructed Dataset object + NumpyDataset + A newly constructed NumpyDataset object """ - newx, newy, neww = fn(self.X, self.y, self.w) - return NumpyDataset(newx, newy, neww, self.ids[:]) + newx, newy, neww, newids = transformer.transform_array( + self.X, self.y, self.w, self.ids) + return NumpyDataset(newx, newy, neww, newids) - def select(self, indices, select_dir=None): + def select(self, indices: Sequence[int], + select_dir: Optional[str] = None) -> "ImageDataset": """Creates a new dataset from a selection of indices from self. Parameters ---------- - indices: list + indices: Sequence List of indices to select. - select_dir: string + select_dir: str, optional (default None) Used to provide same API as `DiskDataset`. Ignored since `ImageDataset` is purely in-memory. + + Returns + ------- + ImageDataset + A selected ImageDataset object """ if isinstance(self._X, np.ndarray): X = self._X[indices] @@ -2034,58 +2838,47 @@ class ImageDataset(Dataset): ids = self._ids[indices] return ImageDataset(X, y, w, ids) - def make_pytorch_dataset(self, epochs=1, deterministic=False): + def make_pytorch_dataset(self, + epochs: int = 1, + deterministic: bool = False, + batch_size: Optional[int] = None): """Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset. - Each value returned by the Dataset's iterator is a tuple of (X, y, - w, id) for one sample. + Each value returned by the Dataset's iterator is a tuple of (X, y, w, id) + containing the data for one batch, or for a single sample if batch_size is None. Parameters ---------- - epochs: int - the number of times to iterate over the Dataset - deterministic: bool - if True, the data is produced in order. If False, a different + epochs: int, default 1 + The number of times to iterate over the Dataset. + deterministic: bool, default False + If True, the data is produced in order. If False, a different random permutation of the data is used for each epoch. + batch_size: int, optional (default None) + The number of samples to return in each batch. If None, each returned + value is a single sample. Returns ------- - `torch.utils.data.IterableDataset` iterating over the same data as - this dataset. - """ - import torch - - def get_image(array, index): - if isinstance(array, np.ndarray): - return array[index] - return dc.data.ImageLoader.load_img([array[index]])[0] - - def iterate(): - n_samples = self._X_shape[0] - worker_info = torch.utils.data.get_worker_info() - if worker_info is None: - first_sample = 0 - last_sample = n_samples - else: - first_sample = worker_info.id * n_samples // worker_info.num_workers - last_sample = ( - worker_info.id + 1) * n_samples // worker_info.num_workers - for epoch in range(epochs): - if deterministic: - order = first_sample + np.arange(last_sample - first_sample) - else: - order = first_sample + np.random.permutation(last_sample - - first_sample) - for i in order: - yield (get_image(self._X, i), get_image(self._y, i), self._w[i], - self._ids[i]) - - class TorchDataset(torch.utils.data.IterableDataset): + torch.utils.data.IterableDataset + `torch.utils.data.IterableDataset` that iterates over the data in + this dataset. - def __iter__(self): - return iterate() + Notes + ----- + This method requires PyTorch to be installed. + """ + try: + from deepchem.data.pytorch_datasets import _TorchImageDataset + except: + raise ValueError("This method requires PyTorch to be installed.") - return TorchDataset() + pytorch_ds = _TorchImageDataset( + image_dataset=self, + epochs=epochs, + deterministic=deterministic, + batch_size=batch_size) + return pytorch_ds class Databag(object): @@ -2112,12 +2905,12 @@ class Databag(object): from multiple `Dataset` objects at a time. """ - def __init__(self, datasets=None): + def __init__(self, datasets: Optional[Dict[Any, Dataset]] = None) -> None: """Initialize this `Databag`. Parameters ---------- - datasets: dict, optional + datasets: dict, optional (default None) A dictionary mapping keys to `Dataset` objects. """ if datasets is None: @@ -2125,19 +2918,19 @@ class Databag(object): else: self.datasets = datasets - def add_dataset(self, key, dataset): + def add_dataset(self, key: Any, dataset: Dataset) -> None: """Adds a dataset to this databag. Parameters ---------- - key: hashable value + key: Any, hashable value Key to be added - dataset: `Dataset` + dataset: Dataset The dataset that `key` should point to. """ self.datasets[key] = dataset - def iterbatches(self, **kwargs): + def iterbatches(self, **kwargs) -> Iterator[Dict[str, np.ndarray]]: """Loop through all internal datasets in the same order. Parameters @@ -2146,12 +2939,13 @@ class Databag(object): Number of samples from each dataset to return epochs: int Number of times to loop through the datasets - pad_batches: boolean + pad_batches: bool Should all batches==batch_size Returns ------- - Generator which yields a dictionary {key: dataset.X[batch]} + Iterator[Dict[str, np.ndarray]] + Generator which yields a dictionary {key: dataset.X[batch]} """ key_order = [x for x in self.datasets.keys()] if "epochs" in kwargs: diff --git a/deepchem/data/pytorch_datasets.py b/deepchem/data/pytorch_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..2584b9681a623518fb6593b307d6c8ef79e8ae76 --- /dev/null +++ b/deepchem/data/pytorch_datasets.py @@ -0,0 +1,165 @@ +import numpy as np +import torch + +from deepchem.data.datasets import NumpyDataset, DiskDataset, ImageDataset + + +class _TorchNumpyDataset(torch.utils.data.IterableDataset): # type: ignore + + def __init__(self, + numpy_dataset: NumpyDataset, + epochs: int, + deterministic: bool, + batch_size: int = None): + """ + Parameters + ---------- + numpy_dataset: NumpyDataset + The original NumpyDataset which you want to convert to PyTorch Dataset + epochs: int + the number of times to iterate over the Dataset + deterministic: bool + if True, the data is produced in order. If False, a different random + permutation of the data is used for each epoch. + batch_size: int + the number of samples to return in each batch. If None, each returned + value is a single sample. + """ + self.numpy_dataset = numpy_dataset + self.epochs = epochs + self.deterministic = deterministic + self.batch_size = batch_size + + def __iter__(self): + n_samples = self.numpy_dataset._X.shape[0] + worker_info = torch.utils.data.get_worker_info() + if worker_info is None: + first_sample = 0 + last_sample = n_samples + else: + first_sample = worker_info.id * n_samples // worker_info.num_workers + last_sample = (worker_info.id + 1) * n_samples // worker_info.num_workers + for epoch in range(self.epochs): + if self.deterministic: + order = first_sample + np.arange(last_sample - first_sample) + else: + # Ensure that every worker will pick the same random order for each epoch. + random = np.random.RandomState(epoch) + order = random.permutation(n_samples)[first_sample:last_sample] + if self.batch_size is None: + for i in order: + yield (self.numpy_dataset._X[i], self.numpy_dataset._y[i], + self.numpy_dataset._w[i], self.numpy_dataset._ids[i]) + else: + for i in range(0, len(order), self.batch_size): + indices = order[i:i + self.batch_size] + yield (self.numpy_dataset._X[indices], self.numpy_dataset._y[indices], + self.numpy_dataset._w[indices], + self.numpy_dataset._ids[indices]) + + +class _TorchDiskDataset(torch.utils.data.IterableDataset): # type: ignore + + def __init__(self, + disk_dataset: DiskDataset, + epochs: int, + deterministic: bool, + batch_size: int = None): + """ + Parameters + ---------- + disk_dataset: DiskDataset + The original DiskDataset which you want to convert to PyTorch Dataset + epochs: int + the number of times to iterate over the Dataset + deterministic: bool + if True, the data is produced in order. If False, a different random + permutation of the data is used for each epoch. + batch_size: int + the number of samples to return in each batch. If None, each returned + value is a single sample. + """ + self.disk_dataset = disk_dataset + self.epochs = epochs + self.deterministic = deterministic + self.batch_size = batch_size + + def __iter__(self): + worker_info = torch.utils.data.get_worker_info() + n_shards = self.disk_dataset.get_number_shards() + if worker_info is None: + first_shard = 0 + last_shard = n_shards + else: + first_shard = worker_info.id * n_shards // worker_info.num_workers + last_shard = (worker_info.id + 1) * n_shards // worker_info.num_workers + if first_shard == last_shard: + return + + shard_indices = list(range(first_shard, last_shard)) + for X, y, w, ids in self.disk_dataset._iterbatches_from_shards( + shard_indices, + batch_size=self.batch_size, + epochs=self.epochs, + deterministic=self.deterministic): + if self.batch_size is None: + for i in range(X.shape[0]): + yield (X[i], y[i], w[i], ids[i]) + else: + yield (X, y, w, ids) + + +class _TorchImageDataset(torch.utils.data.IterableDataset): # type: ignore + + def __init__(self, + image_dataset: ImageDataset, + epochs: int, + deterministic: bool, + batch_size: int = None): + """ + Parameters + ---------- + image_dataset: ImageDataset + The original ImageDataset which you want to convert to PyTorch Dataset + epochs: int + the number of times to iterate over the Dataset + deterministic: bool + if True, the data is produced in order. If False, a different random + permutation of the data is used for each epoch. + batch_size: int + the number of samples to return in each batch. If None, each returned + value is a single sample. + """ + self.image_dataset = image_dataset + self.epochs = epochs + self.deterministic = deterministic + self.batch_size = batch_size + + def __iter__(self): + n_samples = self.image_dataset._X_shape[0] + worker_info = torch.utils.data.get_worker_info() + if worker_info is None: + first_sample = 0 + last_sample = n_samples + else: + first_sample = worker_info.id * n_samples // worker_info.num_workers + last_sample = (worker_info.id + 1) * n_samples // worker_info.num_workers + for epoch in range(self.epochs): + if self.deterministic: + order = first_sample + np.arange(last_sample - first_sample) + else: + # Ensure that every worker will pick the same random order for each epoch. + random = np.random.RandomState(epoch) + order = random.permutation(n_samples)[first_sample:last_sample] + if self.batch_size is None: + for i in order: + yield (self.image_dataset._get_image(self.image_dataset._X, i), + self.image_dataset._get_image(self.image_dataset._y, i), + self.image_dataset._w[i], self.image_dataset._ids[i]) + else: + for i in range(0, len(order), self.batch_size): + indices = order[i:i + self.batch_size] + yield (self.image_dataset._get_image(self.image_dataset._X, indices), + self.image_dataset._get_image(self.image_dataset._y, indices), + self.image_dataset._w[indices], + self.image_dataset._ids[indices]) diff --git a/deepchem/data/supports.py b/deepchem/data/supports.py index 74a696cec9a8d77be96a7df1d02dbfad052fef21..0640b9b9b7e346291cd778f8d44988b0f18e215b 100644 --- a/deepchem/data/supports.py +++ b/deepchem/data/supports.py @@ -303,7 +303,7 @@ class EpisodeGenerator(object): raise StopIteration else: task = self.perm_tasks[self.task_num] # Get id from permutation - #support = self.supports[task][self.trial_num] + # support = self.supports[task][self.trial_num] task_supports, task_tests = self.task_episodes[task] support, test = (task_supports[self.trial_num], task_tests[self.trial_num]) @@ -367,7 +367,7 @@ class SupportGenerator(object): raise StopIteration else: task = self.perm_tasks[self.task_num] # Get id from permutation - #support = self.supports[task][self.trial_num] + # support = self.supports[task][self.trial_num] support = get_single_task_support( self.dataset, n_pos=self.n_pos, diff --git a/deepchem/data/tests/__init__.py b/deepchem/data/tests/__init__.py index dbc2309d6d26c96b0ad1daf43d860723abebf44b..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644 --- a/deepchem/data/tests/__init__.py +++ b/deepchem/data/tests/__init__.py @@ -1,122 +0,0 @@ -""" -General API for testing dataset objects -""" -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import unittest -import tempfile -import os -import shutil -import numpy as np -import deepchem as dc - - -def load_solubility_data(): - """Loads solubility dataset""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - task_type = "regression" - input_file = os.path.join(current_dir, "../../models/tests/example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - return loader.featurize(input_file) - - -def load_butina_data(): - """Loads solubility dataset""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["task"] - # task_type = "regression" - input_file = os.path.join(current_dir, - "../../models/tests/butina_example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - return loader.featurize(input_file) - - -def load_multitask_data(): - """Load example multitask data.""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = [ - "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", - "task8", "task9", "task10", "task11", "task12", "task13", "task14", - "task15", "task16" - ] - input_file = os.path.join(current_dir, - "../../models/tests/multitask_example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - return loader.featurize(input_file) - - -def load_classification_data(): - """Loads classification data from example.csv""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["outcome"] - task_type = "classification" - input_file = os.path.join(current_dir, - "../../models/tests/example_classification.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - return loader.featurize(input_file) - - -def load_sparse_multitask_dataset(): - """Load sparse tox multitask data, sample dataset.""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = [ - "task1", "task2", "task3", "task4", "task5", "task6", "task7", "task8", - "task9" - ] - input_file = os.path.join(current_dir, - "../../models/tests/sparse_multitask_example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - return loader.featurize(input_file) - - -def load_feat_multitask_data(): - """Load example with numerical features, tasks.""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - features = ["feat0", "feat1", "feat2", "feat3", "feat4", "feat5"] - featurizer = dc.feat.UserDefinedFeaturizer(features) - tasks = ["task0", "task1", "task2", "task3", "task4", "task5"] - input_file = os.path.join(current_dir, - "../../models/tests/feat_multitask_example.csv") - loader = dc.data.UserCSVLoader( - tasks=tasks, featurizer=featurizer, id_field="id") - return loader.featurize(input_file) - - -def load_gaussian_cdf_data(): - """Load example with numbers sampled from Gaussian normal distribution. - Each feature and task is a column of values that is sampled - from a normal distribution of mean 0, stdev 1.""" - current_dir = os.path.dirname(os.path.abspath(__file__)) - features = ["feat0", "feat1"] - featurizer = dc.feat.UserDefinedFeaturizer(features) - tasks = ["task0", "task1"] - input_file = os.path.join(current_dir, - "../../models/tests/gaussian_cdf_example.csv") - loader = dc.data.UserCSVLoader( - tasks=tasks, featurizer=featurizer, id_field="id") - return loader.featurize(input_file) - - -def load_unlabelled_data(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = [] - input_file = os.path.join(current_dir, "../../data/tests/no_labels.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - return loader.featurize(input_file) diff --git a/deepchem/data/tests/inorganic_crystal_sample_data.json b/deepchem/data/tests/inorganic_crystal_sample_data.json new file mode 100644 index 0000000000000000000000000000000000000000..2811039299a1e54a95193a8fc0d792d4d6044ad1 --- /dev/null +++ b/deepchem/data/tests/inorganic_crystal_sample_data.json @@ -0,0 +1,5 @@ +{"structure":{"@module":"pymatgen.core.structure","@class":"Structure","charge":null,"lattice":{"matrix":[[3.9545311068,0.0,0.0],[0.0,3.9545311068,0.0],[0.0,0.0,3.9545311068]],"a":3.9545311068,"b":3.9545311068,"c":3.9545311068,"alpha":90.0,"beta":90.0,"gamma":90.0,"volume":61.8422081649},"sites":[{"species":[{"element":"Rh","occu":1}],"abc":[0.0,0.0,0.0],"xyz":[0.0,0.0,0.0],"label":"Rh","properties":{}},{"species":[{"element":"Te","occu":1}],"abc":[0.5,0.5,0.5],"xyz":[1.9772655534,1.9772655534,1.9772655534],"label":"Te","properties":{}},{"species":[{"element":"N","occu":1}],"abc":[0.5,0.0,0.5],"xyz":[1.9772655534,0.0,1.9772655534],"label":"N","properties":{}},{"species":[{"element":"N","occu":1}],"abc":[0.5,0.5,0.0],"xyz":[1.9772655534,1.9772655534,0.0],"label":"N","properties":{}},{"species":[{"element":"N","occu":1}],"abc":[0.0,0.5,0.5],"xyz":[0.0,1.9772655534,1.9772655534],"label":"N","properties":{}}]},"e_form":2.16,"formula":"TeRhN3"} +{"structure":{"@module":"pymatgen.core.structure","@class":"Structure","charge":null,"lattice":{"matrix":[[4.2894318978,0.0,0.0],[0.0,4.2894318978,0.0],[0.0,0.0,4.2894318978]],"a":4.2894318978,"b":4.2894318978,"c":4.2894318978,"alpha":90.0,"beta":90.0,"gamma":90.0,"volume":78.9222269246},"sites":[{"species":[{"element":"Hf","occu":1}],"abc":[0.5922504528,0.0,0.0],"xyz":[2.5404179838,0.0,0.0],"label":"Hf","properties":{}},{"species":[{"element":"Te","occu":1}],"abc":[0.2378848852,0.5,0.5],"xyz":[1.0203910146,2.1447159489,2.1447159489],"label":"Te","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.5012320713,0.0,0.5],"xyz":[2.1500008347,0.0,2.1447159489],"label":"O","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.5012320713,0.5,0.0],"xyz":[2.1500008347,2.1447159489,0.0],"label":"O","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.7980811547,0.5,0.5],"xyz":[3.4233147622,2.1447159489,2.1447159489],"label":"O","properties":{}}]},"e_form":1.52,"formula":"HfTeO3"} +{"structure":{"@module":"pymatgen.core.structure","@class":"Structure","charge":null,"lattice":{"matrix":[[4.2926387638,0.0,0.0],[0.0,4.2926387638,0.0],[0.0,0.0,4.2926387638]],"a":4.2926387638,"b":4.2926387638,"c":4.2926387638,"alpha":90.0,"beta":90.0,"gamma":90.0,"volume":79.0993708544},"sites":[{"species":[{"element":"Re","occu":1}],"abc":[0.1416166515,0.0,0.0],"xyz":[0.6079091278,0.0,0.0],"label":"Re","properties":{}},{"species":[{"element":"As","occu":1}],"abc":[0.5093856748,0.5,0.5],"xyz":[2.1866086932,2.1463193819,2.1463193819],"label":"As","properties":{}},{"species":[{"element":"F","occu":1}],"abc":[0.5316865005,0.0,0.5],"xyz":[2.2823380822,0.0,2.1463193819],"label":"F","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.3074869463,0.5,0.0],"xyz":[1.319930385,2.1463193819,0.0],"label":"O","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.927582418,0.5,0.5],"xyz":[3.9817762444,2.1463193819,2.1463193819],"label":"O","properties":{}}]},"e_form":1.48,"formula":"ReAsO2F"} +{"structure":{"@module":"pymatgen.core.structure","@class":"Structure","charge":null,"lattice":{"matrix":[[4.1837305646,0.0,0.0],[0.0,4.1837305646,0.0],[0.0,0.0,4.1837305646]],"a":4.1837305646,"b":4.1837305646,"c":4.1837305646,"alpha":90.0,"beta":90.0,"gamma":90.0,"volume":73.2303523231},"sites":[{"species":[{"element":"W","occu":1}],"abc":[0.676648156,0.0,0.0],"xyz":[2.8309135716,0.0,0.0],"label":"W","properties":{}},{"species":[{"element":"Re","occu":1}],"abc":[0.6351628832,0.5,0.5],"xyz":[2.6573503678,2.0918652823,2.0918652823],"label":"Re","properties":{}},{"species":[{"element":"S","occu":1}],"abc":[0.3728524724,0.0,0.5],"xyz":[1.5599142849,0.0,2.0918652823],"label":"S","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.7238489421,0.5,0.0],"xyz":[3.0283889434,2.0918652823,0.0],"label":"O","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.0978520248,0.5,0.5],"xyz":[0.4093865068,2.0918652823,2.0918652823],"label":"O","properties":{}}]},"e_form":1.24,"formula":"ReWSO2"} +{"structure":{"@module":"pymatgen.core.structure","@class":"Structure","charge":null,"lattice":{"matrix":[[4.2811442539,0.0,0.0],[0.0,4.2811442539,0.0],[0.0,0.0,4.2811442539]],"a":4.2811442539,"b":4.2811442539,"c":4.2811442539,"alpha":90.0,"beta":90.0,"gamma":90.0,"volume":78.4656515166},"sites":[{"species":[{"element":"Bi","occu":1}],"abc":[0.0012121467,0.0,0.0],"xyz":[0.0051893747,0.0,0.0],"label":"Bi","properties":{}},{"species":[{"element":"Hf","occu":1}],"abc":[0.5074940801,0.5,0.5],"xyz":[2.1726553651,2.140572127,2.140572127],"label":"Hf","properties":{}},{"species":[{"element":"F","occu":1}],"abc":[0.4990106707,0.0,0.5],"xyz":[2.1363366656,0.0,2.140572127],"label":"F","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.499996373,0.5,0.0],"xyz":[2.1405565992,2.140572127,0.0],"label":"O","properties":{}},{"species":[{"element":"O","occu":1}],"abc":[0.002611863,0.5,0.5],"xyz":[0.0111817624,2.140572127,2.140572127],"label":"O","properties":{}}]},"e_form":0.62,"formula":"HfBiO2F"} \ No newline at end of file diff --git a/deepchem/data/tests/legacy_dataset/metadata.csv.gzip b/deepchem/data/tests/legacy_dataset/metadata.csv.gzip new file mode 100644 index 0000000000000000000000000000000000000000..e0201d6d7600a7b3939ed5a0d62a31573b0973a4 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset/metadata.csv.gzip differ diff --git a/deepchem/data/tests/legacy_dataset/shard-0-X.npy b/deepchem/data/tests/legacy_dataset/shard-0-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..ba2a9ddc078aa5846551da8111325258bb8a6e71 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset/shard-0-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset/shard-0-ids.npy b/deepchem/data/tests/legacy_dataset/shard-0-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..c5fc11ce81503ec12eb701f599ee7ea568f902ec Binary files /dev/null and b/deepchem/data/tests/legacy_dataset/shard-0-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset/shard-0-w.npy b/deepchem/data/tests/legacy_dataset/shard-0-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..bfd9ad03fff2a33f89f821469fdd900adccb845f Binary files /dev/null and b/deepchem/data/tests/legacy_dataset/shard-0-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset/shard-0-y.npy b/deepchem/data/tests/legacy_dataset/shard-0-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..28dc0f3bec3aa2ccbd3b432016583c7ec61d0489 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset/shard-0-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset/tasks.json b/deepchem/data/tests/legacy_dataset/tasks.json new file mode 100644 index 0000000000000000000000000000000000000000..eafc8000c9478085dc92733471421d5d4a5f0ff4 --- /dev/null +++ b/deepchem/data/tests/legacy_dataset/tasks.json @@ -0,0 +1 @@ +[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] \ No newline at end of file diff --git a/deepchem/data/tests/legacy_dataset_reshard/metadata.csv.gzip b/deepchem/data/tests/legacy_dataset_reshard/metadata.csv.gzip new file mode 100644 index 0000000000000000000000000000000000000000..3d4d37fa09f594d320bff175d38e49e17618d561 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/metadata.csv.gzip differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-0-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-0-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..9142b7637d3fc319db09ff880e1ed1e52e7bd7ba Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-0-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-0-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-0-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-0-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-0-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-0-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..5409ce26bfed8693e5134ce607d41f3a672abf6b Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-0-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-0-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-0-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..cf50b11b8004f7d204300368b43e71acb437fbe9 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-0-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-1-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-1-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..e7d9acf405a4c295d923e7714b87f8830e531e17 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-1-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-1-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-1-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-1-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-1-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-1-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..5f6a89d332270b09573892b25b351314ff9d2bd2 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-1-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-1-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-1-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..ff1024e973501a4d86fde0323e477cf501336bea Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-1-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-2-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-2-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..0f216f284dbf5180dd2b1f1b1657244155bd64b1 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-2-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-2-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-2-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-2-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-2-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-2-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..1178b38a71b75de98a04a87dbd57c8d6f82ef2f3 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-2-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-2-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-2-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..4f47e85a7ffb500fce9f14acd9f80573d3d121cc Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-2-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-3-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-3-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..44b93d9deecabd3da9247a21676189146b393dd6 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-3-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-3-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-3-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-3-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-3-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-3-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..56b221c6c7bdb284b2485c058bf494175856be56 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-3-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-3-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-3-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..41dba09c9a472dab8209e37cd72c1031c293a265 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-3-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-4-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-4-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..7b3c59775144530a57cd2651fbc41f860d8798ff Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-4-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-4-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-4-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-4-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-4-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-4-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..5d12f8fa15041b117c8c68a31eacded1278a4f0b Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-4-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-4-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-4-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..09d2eec2840b89b441abdae290addc4ea93306cd Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-4-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-5-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-5-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..b3e7ed092d02f4fdf0829201d71756a4860bc144 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-5-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-5-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-5-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-5-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-5-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-5-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..1e88c53d3e746ae269fbf73762438932445baea7 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-5-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-5-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-5-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..2f97da32364217afa3558a081a01a1d7feca6a09 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-5-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-6-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-6-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..8dc70f1291f70641421dd039f3fcd342f9c1cdbe Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-6-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-6-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-6-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-6-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-6-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-6-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..0d0b85a7c3ed2c0f97e9e3d5f90bd22dd7bbc9da Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-6-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-6-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-6-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..44dfa46dddf0f8bcbfd182a39f3ae17715c29740 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-6-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-7-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-7-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..15dcf8b66e74d353bc37dcaf128f7162c332c2f2 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-7-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-7-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-7-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-7-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-7-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-7-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..29527e1fcfc939fe8fb9fbd3e17ebfa8b6130ff4 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-7-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-7-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-7-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..97f495fdd6915f1511f4ba2da7eec0943e3fe3fd Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-7-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-8-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-8-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..5499ad1008e8954822a94ed721b8c5b005f48b90 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-8-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-8-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-8-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-8-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-8-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-8-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..4e0cb170c18bb9b8ec5685e40aafb3526a122201 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-8-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-8-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-8-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..d49d320514178f84eccd497592466acffe08c4b0 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-8-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-9-X.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-9-X.npy new file mode 100644 index 0000000000000000000000000000000000000000..d97612582fd0d2fa33c3880040e5c79361361fe7 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-9-X.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-9-ids.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-9-ids.npy new file mode 100644 index 0000000000000000000000000000000000000000..2418e35c86df5d81f448437d85d4942c4538b3d8 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-9-ids.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-9-w.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-9-w.npy new file mode 100644 index 0000000000000000000000000000000000000000..3d4ff3b78fe81bb92100b35bd92e5a9a8d14c22f Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-9-w.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/shard-9-y.npy b/deepchem/data/tests/legacy_dataset_reshard/shard-9-y.npy new file mode 100644 index 0000000000000000000000000000000000000000..fd772160b9905326233c3f70628576571acc56a2 Binary files /dev/null and b/deepchem/data/tests/legacy_dataset_reshard/shard-9-y.npy differ diff --git a/deepchem/data/tests/legacy_dataset_reshard/tasks.json b/deepchem/data/tests/legacy_dataset_reshard/tasks.json new file mode 100644 index 0000000000000000000000000000000000000000..eafc8000c9478085dc92733471421d5d4a5f0ff4 --- /dev/null +++ b/deepchem/data/tests/legacy_dataset_reshard/tasks.json @@ -0,0 +1 @@ +[0, 1, 2, 3, 4, 5, 6, 7, 8, 9] \ No newline at end of file diff --git a/deepchem/data/tests/membrane_permeability.sdf b/deepchem/data/tests/membrane_permeability.sdf new file mode 100644 index 0000000000000000000000000000000000000000..9dd9219958548bee318ddf0482412ce5881b2d57 --- /dev/null +++ b/deepchem/data/tests/membrane_permeability.sdf @@ -0,0 +1,218 @@ +10_filipski_40 + RDKit 3D + + 48 50 0 0 1 0 0 0 0 0999 V2000 + 9.1378 -7.4697 -1.1731 C 0 0 0 0 0 0 0 0 0 0 0 0 + 9.0300 -8.7563 -1.7553 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.1829 -9.4791 -2.1168 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.4593 -8.9144 -1.9184 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.5888 -7.6306 -1.3431 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.4211 -6.9229 -0.9733 C 0 0 0 0 0 0 0 0 0 0 0 0 + 8.0685 -6.6893 -0.7812 O 0 0 0 0 0 0 0 0 0 0 0 0 + 6.7356 -7.1730 -0.9323 C 0 0 0 0 0 0 0 0 0 0 0 0 + 5.8194 -5.9457 -0.8867 C 0 0 0 0 0 0 0 0 0 0 0 0 + 6.3937 -8.1606 0.1955 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.0417 -10.7213 -2.6806 O 0 0 0 0 0 0 0 0 0 0 0 0 + 10.6226 -11.7880 -2.0428 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.4794 -12.6365 -2.7738 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.0777 -13.7503 -2.1503 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.8056 -14.0231 -0.7953 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.9593 -13.1740 -0.0542 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.3610 -12.0614 -0.6807 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.5981 -15.4211 0.0061 S 0 0 0 0 0 0 0 0 0 0 0 0 + 14.1883 -14.7546 0.5873 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.8095 -15.8020 1.1921 O 0 0 0 0 0 0 0 0 0 0 0 0 + 12.8865 -16.4503 -1.0091 O 0 0 0 0 0 0 0 0 0 0 0 0 + 12.9447 -7.0276 -1.1268 C 0 0 0 0 0 0 0 0 0 0 0 0 + 14.1048 -7.6753 -1.5778 N 0 0 0 0 0 0 0 0 0 0 0 0 + 15.3664 -7.2188 -1.4378 C 0 0 0 0 0 0 0 0 0 0 0 0 + 15.4761 -5.9335 -0.7477 C 0 0 0 0 0 0 0 0 0 0 0 0 + 14.3478 -5.3279 -0.3229 C 0 0 0 0 0 0 0 0 0 0 0 0 + 13.0801 -5.8841 -0.5185 N 0 0 0 0 0 0 0 0 0 0 0 0 + 16.3235 -7.8662 -1.8727 O 0 0 0 0 0 0 0 0 0 0 0 0 + 17.0235 -5.2108 -0.4863 Cl 0 0 0 0 0 0 0 0 0 0 0 0 + 8.0727 -9.2223 -1.9323 H 0 0 0 0 0 0 0 0 0 0 0 0 + 12.3294 -9.4833 -2.2114 H 0 0 0 0 0 0 0 0 0 0 0 0 + 10.5000 -5.9395 -0.5309 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.5963 -7.6418 -1.9072 H 0 0 0 0 0 0 0 0 0 0 0 0 + 4.7728 -6.2316 -0.9963 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.9216 -5.4076 0.0563 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.0566 -5.2512 -1.6930 H 0 0 0 0 0 0 0 0 0 0 0 0 + 7.0376 -9.0392 0.1822 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.4989 -7.6921 1.1742 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.3655 -8.5122 0.1058 H 0 0 0 0 0 0 0 0 0 0 0 0 + 11.6797 -12.4320 -3.8159 H 0 0 0 0 0 0 0 0 0 0 0 0 + 12.7400 -14.3980 -2.7059 H 0 0 0 0 0 0 0 0 0 0 0 0 + 10.7684 -13.3823 0.9883 H 0 0 0 0 0 0 0 0 0 0 0 0 + 9.7026 -11.4187 -0.1132 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.7527 -14.3892 -0.2677 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.9992 -13.9328 1.2743 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.7461 -15.5395 1.0917 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.9997 -8.5573 -2.0516 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.3815 -4.3776 0.1907 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 2 0 + 1 6 1 0 + 1 7 1 0 + 2 3 1 0 + 2 30 1 0 + 3 4 2 0 + 3 11 1 0 + 4 5 1 0 + 4 31 1 0 + 5 6 2 0 + 5 22 1 0 + 6 32 1 0 + 7 8 1 0 + 8 9 1 0 + 8 10 1 0 + 8 33 1 0 + 9 34 1 0 + 9 35 1 0 + 9 36 1 0 + 10 37 1 0 + 10 38 1 0 + 10 39 1 0 + 11 12 1 0 + 12 13 2 0 + 12 17 1 0 + 13 14 1 0 + 13 40 1 0 + 14 15 2 0 + 14 41 1 0 + 15 16 1 0 + 15 18 1 0 + 16 17 2 0 + 16 42 1 0 + 17 43 1 0 + 18 19 1 0 + 18 20 2 0 + 18 21 2 0 + 19 44 1 0 + 19 45 1 0 + 19 46 1 0 + 22 23 1 0 + 22 27 2 0 + 23 24 1 0 + 23 47 1 0 + 24 25 1 0 + 24 28 2 0 + 25 26 2 0 + 25 29 1 0 + 26 27 1 0 + 26 48 1 0 +M END +> (1) +-5.08 + +$$$$ +10_filipski_42 + RDKit 3D + + 50 52 0 0 1 0 0 0 0 0999 V2000 + 8.8247 -7.3140 -1.2684 C 0 0 0 0 0 0 0 0 0 0 0 0 + 8.7978 -8.6432 -1.7590 C 0 0 0 0 0 0 0 0 0 0 0 0 + 9.9897 -9.2996 -2.1198 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.2249 -8.6287 -2.0043 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.2728 -7.3060 -1.5122 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.0677 -6.6605 -1.1523 C 0 0 0 0 0 0 0 0 0 0 0 0 + 7.7116 -6.5895 -0.8917 O 0 0 0 0 0 0 0 0 0 0 0 0 + 6.4156 -7.1795 -0.9644 C 0 0 0 0 0 0 0 0 0 0 0 0 + 5.4127 -6.0219 -0.9784 C 0 0 0 0 0 0 0 0 0 0 0 0 + 6.1822 -8.1019 0.2432 C 0 0 0 0 0 0 0 0 0 0 0 0 + 9.9229 -10.5823 -2.6015 O 0 0 0 0 0 0 0 0 0 0 0 0 + 10.6835 -11.5390 -1.9805 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.6377 -12.2535 -2.7336 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.4273 -13.2459 -2.1186 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.2573 -13.5245 -0.7480 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.3014 -12.8190 0.0104 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.5113 -11.8283 -0.6083 C 0 0 0 0 0 0 0 0 0 0 0 0 + 13.2741 -14.7862 0.0266 S 0 0 0 0 0 0 0 0 0 0 0 0 + 14.8007 -14.0207 0.1586 N 0 0 0 0 0 0 0 0 0 0 0 0 + 12.8065 -15.0295 1.4016 O 0 0 0 0 0 0 0 0 0 0 0 0 + 13.4508 -15.9197 -0.8955 O 0 0 0 0 0 0 0 0 0 0 0 0 + 12.5842 -6.5952 -1.3827 C 0 0 0 0 0 0 0 0 0 0 0 0 + 13.7938 -7.3067 -1.3965 N 0 0 0 0 0 0 0 0 0 0 0 0 + 15.0231 -6.7649 -1.2806 C 0 0 0 0 0 0 0 0 0 0 0 0 + 15.0328 -5.3156 -1.1306 C 0 0 0 0 0 0 0 0 0 0 0 0 + 13.8624 -4.6467 -1.1141 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.6344 -5.3016 -1.2387 N 0 0 0 0 0 0 0 0 0 0 0 0 + 16.0372 -7.4655 -1.3016 O 0 0 0 0 0 0 0 0 0 0 0 0 + 15.8470 -14.7127 0.9154 C 0 0 0 0 0 0 0 0 0 0 0 0 + 7.8748 -9.1913 -1.8675 H 0 0 0 0 0 0 0 0 0 0 0 0 + 12.1279 -9.1407 -2.3028 H 0 0 0 0 0 0 0 0 0 0 0 0 + 10.0889 -5.6466 -0.7773 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.2849 -7.7285 -1.8974 H 0 0 0 0 0 0 0 0 0 0 0 0 + 4.3881 -6.3892 -1.0449 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.4925 -5.4156 -0.0756 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.5848 -5.3681 -1.8339 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.8956 -8.9248 0.2754 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.2739 -7.5525 1.1802 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.1840 -8.5392 0.2119 H 0 0 0 0 0 0 0 0 0 0 0 0 + 11.7650 -12.0392 -3.7854 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.1598 -13.7962 -2.6907 H 0 0 0 0 0 0 0 0 0 0 0 0 + 11.1770 -13.0389 1.0604 H 0 0 0 0 0 0 0 0 0 0 0 0 + 9.7750 -11.2901 -0.0280 H 0 0 0 0 0 0 0 0 0 0 0 0 + 15.1266 -13.7716 -0.7786 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.7507 -8.3089 -1.4905 H 0 0 0 0 0 0 0 0 0 0 0 0 + 15.9705 -4.7876 -1.0331 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.8284 -3.5714 -1.0035 H 0 0 0 0 0 0 0 0 0 0 0 0 + 16.0696 -15.6784 0.4597 H 0 0 0 0 0 0 0 0 0 0 0 0 + 16.7610 -14.1182 0.9298 H 0 0 0 0 0 0 0 0 0 0 0 0 + 15.5270 -14.8822 1.9443 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 2 0 + 1 6 1 0 + 1 7 1 0 + 2 3 1 0 + 2 30 1 0 + 3 4 2 0 + 3 11 1 0 + 4 5 1 0 + 4 31 1 0 + 5 6 2 0 + 5 22 1 0 + 6 32 1 0 + 7 8 1 0 + 8 9 1 0 + 8 10 1 0 + 8 33 1 0 + 9 34 1 0 + 9 35 1 0 + 9 36 1 0 + 10 37 1 0 + 10 38 1 0 + 10 39 1 0 + 11 12 1 0 + 12 13 2 0 + 12 17 1 0 + 13 14 1 0 + 13 40 1 0 + 14 15 2 0 + 14 41 1 0 + 15 16 1 0 + 15 18 1 0 + 16 17 2 0 + 16 42 1 0 + 17 43 1 0 + 18 19 1 0 + 18 20 2 0 + 18 21 2 0 + 19 29 1 0 + 19 44 1 0 + 22 23 1 0 + 22 27 2 0 + 23 24 1 0 + 23 45 1 0 + 24 25 1 0 + 24 28 2 0 + 25 26 2 0 + 25 46 1 0 + 26 27 1 0 + 26 47 1 0 + 29 48 1 0 + 29 49 1 0 + 29 50 1 0 +M END +> (2) +-4.82 + +$$$$ diff --git a/deepchem/data/tests/singleton.sdf b/deepchem/data/tests/singleton.sdf new file mode 100644 index 0000000000000000000000000000000000000000..a7ae25e809606d0fdf24fc712171238d52d241e3 --- /dev/null +++ b/deepchem/data/tests/singleton.sdf @@ -0,0 +1,107 @@ +10_filipski_40 + RDKit 3D + + 48 50 0 0 1 0 0 0 0 0999 V2000 + 9.1378 -7.4697 -1.1731 C 0 0 0 0 0 0 0 0 0 0 0 0 + 9.0300 -8.7563 -1.7553 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.1829 -9.4791 -2.1168 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.4593 -8.9144 -1.9184 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.5888 -7.6306 -1.3431 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.4211 -6.9229 -0.9733 C 0 0 0 0 0 0 0 0 0 0 0 0 + 8.0685 -6.6893 -0.7812 O 0 0 0 0 0 0 0 0 0 0 0 0 + 6.7356 -7.1730 -0.9323 C 0 0 0 0 0 0 0 0 0 0 0 0 + 5.8194 -5.9457 -0.8867 C 0 0 0 0 0 0 0 0 0 0 0 0 + 6.3937 -8.1606 0.1955 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.0417 -10.7213 -2.6806 O 0 0 0 0 0 0 0 0 0 0 0 0 + 10.6226 -11.7880 -2.0428 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.4794 -12.6365 -2.7738 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.0777 -13.7503 -2.1503 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.8056 -14.0231 -0.7953 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.9593 -13.1740 -0.0542 C 0 0 0 0 0 0 0 0 0 0 0 0 + 10.3610 -12.0614 -0.6807 C 0 0 0 0 0 0 0 0 0 0 0 0 + 12.5981 -15.4211 0.0061 S 0 0 0 0 0 0 0 0 0 0 0 0 + 14.1883 -14.7546 0.5873 C 0 0 0 0 0 0 0 0 0 0 0 0 + 11.8095 -15.8020 1.1921 O 0 0 0 0 0 0 0 0 0 0 0 0 + 12.8865 -16.4503 -1.0091 O 0 0 0 0 0 0 0 0 0 0 0 0 + 12.9447 -7.0276 -1.1268 C 0 0 0 0 0 0 0 0 0 0 0 0 + 14.1048 -7.6753 -1.5778 N 0 0 0 0 0 0 0 0 0 0 0 0 + 15.3664 -7.2188 -1.4378 C 0 0 0 0 0 0 0 0 0 0 0 0 + 15.4761 -5.9335 -0.7477 C 0 0 0 0 0 0 0 0 0 0 0 0 + 14.3478 -5.3279 -0.3229 C 0 0 0 0 0 0 0 0 0 0 0 0 + 13.0801 -5.8841 -0.5185 N 0 0 0 0 0 0 0 0 0 0 0 0 + 16.3235 -7.8662 -1.8727 O 0 0 0 0 0 0 0 0 0 0 0 0 + 17.0235 -5.2108 -0.4863 Cl 0 0 0 0 0 0 0 0 0 0 0 0 + 8.0727 -9.2223 -1.9323 H 0 0 0 0 0 0 0 0 0 0 0 0 + 12.3294 -9.4833 -2.2114 H 0 0 0 0 0 0 0 0 0 0 0 0 + 10.5000 -5.9395 -0.5309 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.5963 -7.6418 -1.9072 H 0 0 0 0 0 0 0 0 0 0 0 0 + 4.7728 -6.2316 -0.9963 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.9216 -5.4076 0.0563 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.0566 -5.2512 -1.6930 H 0 0 0 0 0 0 0 0 0 0 0 0 + 7.0376 -9.0392 0.1822 H 0 0 0 0 0 0 0 0 0 0 0 0 + 6.4989 -7.6921 1.1742 H 0 0 0 0 0 0 0 0 0 0 0 0 + 5.3655 -8.5122 0.1058 H 0 0 0 0 0 0 0 0 0 0 0 0 + 11.6797 -12.4320 -3.8159 H 0 0 0 0 0 0 0 0 0 0 0 0 + 12.7400 -14.3980 -2.7059 H 0 0 0 0 0 0 0 0 0 0 0 0 + 10.7684 -13.3823 0.9883 H 0 0 0 0 0 0 0 0 0 0 0 0 + 9.7026 -11.4187 -0.1132 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.7527 -14.3892 -0.2677 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.9992 -13.9328 1.2743 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.7461 -15.5395 1.0917 H 0 0 0 0 0 0 0 0 0 0 0 0 + 13.9997 -8.5573 -2.0516 H 0 0 0 0 0 0 0 0 0 0 0 0 + 14.3815 -4.3776 0.1907 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 2 0 + 1 6 1 0 + 1 7 1 0 + 2 3 1 0 + 2 30 1 0 + 3 4 2 0 + 3 11 1 0 + 4 5 1 0 + 4 31 1 0 + 5 6 2 0 + 5 22 1 0 + 6 32 1 0 + 7 8 1 0 + 8 9 1 0 + 8 10 1 0 + 8 33 1 0 + 9 34 1 0 + 9 35 1 0 + 9 36 1 0 + 10 37 1 0 + 10 38 1 0 + 10 39 1 0 + 11 12 1 0 + 12 13 2 0 + 12 17 1 0 + 13 14 1 0 + 13 40 1 0 + 14 15 2 0 + 14 41 1 0 + 15 16 1 0 + 15 18 1 0 + 16 17 2 0 + 16 42 1 0 + 17 43 1 0 + 18 19 1 0 + 18 20 2 0 + 18 21 2 0 + 19 44 1 0 + 19 45 1 0 + 19 46 1 0 + 22 23 1 0 + 22 27 2 0 + 23 24 1 0 + 23 47 1 0 + 24 25 1 0 + 24 28 2 0 + 25 26 2 0 + 25 29 1 0 + 26 27 1 0 + 26 48 1 0 +M END +> (1) +-5.08 + +$$$$ diff --git a/deepchem/data/tests/test_copy_and_move.py b/deepchem/data/tests/test_copy_and_move.py new file mode 100644 index 0000000000000000000000000000000000000000..881019446108bc3ee5ea2e503c6cb2f0ad27c572 --- /dev/null +++ b/deepchem/data/tests/test_copy_and_move.py @@ -0,0 +1,57 @@ +import deepchem as dc +import tempfile +import numpy as np +import os + + +def test_copy(): + """Test that copy works correctly.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + + # legacy_dataset_reshard is a shared dataset in the legacy format kept + # around for testing resharding. + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + # Set cache to 0 size to avoid cache hiding errors + dataset.memory_cache_size = 0 + + with tempfile.TemporaryDirectory() as tmpdirname: + copy = dataset.copy(tmpdirname) + assert np.all(copy.X == dataset.X) + assert np.all(copy.y == dataset.y) + assert np.all(copy.w == dataset.w) + assert np.all(copy.ids == dataset.ids) + + +def test_move(): + """Test that move works correctly.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + + # legacy_dataset_reshard is a shared dataset in the legacy format kept + # around for testing resharding. + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + # Set cache to 0 size to avoid cache hiding errors + dataset.memory_cache_size = 0 + data_dir = dataset.data_dir + + with tempfile.TemporaryDirectory() as tmpdirname: + dataset.move(tmpdirname, delete_if_exists=False) + assert np.all(X == dataset.X) + assert np.all(y == dataset.y) + assert np.all(w == dataset.w) + assert np.all(ids == dataset.ids) + assert dataset.data_dir == os.path.join(tmpdirname, + os.path.basename(data_dir)) diff --git a/deepchem/data/tests/test_csv_loader.py b/deepchem/data/tests/test_csv_loader.py index a8945cb8801554f627e2ef91c6ad0ae2533363a4..902950a606b7044a81874c93601cc2814bc75b83 100644 --- a/deepchem/data/tests/test_csv_loader.py +++ b/deepchem/data/tests/test_csv_loader.py @@ -1,24 +1,17 @@ import os -from unittest import TestCase -from io import StringIO import tempfile -import shutil - import deepchem as dc -class TestCSVLoader(TestCase): - - def test_load_singleton_csv(self): - fin = tempfile.NamedTemporaryFile(mode='w', delete=False) - fin.write("smiles,endpoint\nc1ccccc1,1") - fin.close() - print(fin.name) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["endpoint"] - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) +def test_load_singleton_csv(): + fin = tempfile.NamedTemporaryFile(mode='w', delete=False) + fin.write("smiles,endpoint\nc1ccccc1,1") + fin.close() + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["endpoint"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) - X = loader.featurize(fin.name) - self.assertEqual(1, len(X)) - os.remove(fin.name) + X = loader.create_dataset(fin.name) + assert len(X) == 1 + os.remove(fin.name) diff --git a/deepchem/data/tests/test_data_loader.py b/deepchem/data/tests/test_data_loader.py index 3fd7ac7fdb930a26a4d7166e2323b5d1b05a5e64..a45e38c097b59be23bfaa2fb1fa647da797cb037 100644 --- a/deepchem/data/tests/test_data_loader.py +++ b/deepchem/data/tests/test_data_loader.py @@ -1,167 +1,139 @@ """ Tests for FeaturizedSamples class """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" import os -import unittest import tempfile import shutil import deepchem as dc -class TestDataLoader(unittest.TestCase): - """ - Test DataLoader - """ - - def setUp(self): - super(TestDataLoader, self).setUp() - self.current_dir = os.path.dirname(os.path.abspath(__file__)) - - def unlabelled_test(self): - input_file = os.path.join(self.current_dir, - "../../data/tests/no_labels.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - loader = dc.data.CSVLoader( - tasks=[], smiles_field="smiles", featurizer=featurizer) - loader.featurize(input_file) - - def scaffold_test_train_valid_test_split(self): - """Test of singletask RF ECFP regression API.""" - splittype = "scaffold" - input_transforms = [] - output_transforms = ["normalize"] - model_params = {} - tasks = ["log-solubility"] - task_type = "regression" - task_types = {task: task_type for task in tasks} - input_file = os.path.join(self.current_dir, - "../../models/tests/example.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - - input_file = os.path.join(self.current_dir, input_file) - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(input_file) - - # Splits featurized samples into train/test - splitter = dc.splits.ScaffoldSplitter() - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset) - assert len(train_dataset) == 8 - assert len(valid_dataset) == 1 - assert len(test_dataset) == 1 - - def scaffold_test_train_test_split(self): - """Test of singletask RF ECFP regression API.""" - splittype = "scaffold" - input_transforms = [] - output_transforms = ["normalize"] - model_params = {} - tasks = ["log-solubility"] - task_type = "regression" - task_types = {task: task_type for task in tasks} - input_file = os.path.join(self.current_dir, - "../../models/tests/example.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - - input_file = os.path.join(self.current_dir, input_file) - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(input_file) - - # Splits featurized samples into train/test - splitter = dc.splits.ScaffoldSplitter() - train_dataset, test_dataset = splitter.train_test_split(dataset) - assert len(train_dataset) == 8 - assert len(test_dataset) == 2 - - def random_test_train_valid_test_split(self): - """Test of singletask RF ECFP regression API.""" - input_transforms = [] - output_transforms = ["normalize"] - model_params = {} - tasks = ["log-solubility"] - task_type = "regression" - task_types = {task: task_type for task in tasks} - input_file = os.path.join(self.current_dir, - "../../models/tests/example.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - - input_file = os.path.join(self.current_dir, input_file) - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(input_file) - - # Splits featurized samples into train/test - splitter = dc.splits.RandomSplitter() - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset) - assert len(train_dataset) == 8 - assert len(valid_dataset) == 1 - assert len(test_dataset) == 1 - - def random_test_train_test_split(self): - """Test of singletask RF ECFP regression API.""" - #splittype = "random" - model_params = {} - tasks = ["log-solubility"] - task_type = "regression" - task_types = {task: task_type for task in tasks} - input_file = os.path.join(self.current_dir, - "../../models/tests/example.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(input_file) - - # Splits featurized samples into train/test - splitter = dc.splits.RandomSplitter() - train_dataset, test_dataset = splitter.train_test_split(dataset) - assert len(train_dataset) == 8 - assert len(test_dataset) == 2 - - def test_log_solubility_dataset(self): - """Test of loading for simple log-solubility dataset.""" - current_dir = os.path.dirname(os.path.realpath(__file__)) - input_file = "../../models/tests/example.csv" - input_file = os.path.join(current_dir, input_file) - - tasks = ["log-solubility"] - smiles_field = "smiles" - loader = dc.data.CSVLoader( - tasks=tasks, - smiles_field="smiles", - featurizer=dc.feat.CircularFingerprint(size=1024)) - dataset = loader.featurize(input_file) - - assert len(dataset) == 10 - - def test_dataset_move(self): - """Test that dataset can be moved and reloaded.""" - base_dir = tempfile.mkdtemp() - data_dir = os.path.join(base_dir, "data") - moved_data_dir = os.path.join(base_dir, "moved_data") - dataset_file = os.path.join(self.current_dir, - "../../models/tests/example.csv") - - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - featurized_dataset = loader.featurize(dataset_file, data_dir) - n_dataset = len(featurized_dataset) - - # Now perform move - shutil.move(data_dir, moved_data_dir) - - moved_featurized_dataset = dc.data.DiskDataset(moved_data_dir) - - assert len(moved_featurized_dataset) == n_dataset +def test_unlabelled(): + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, "../../data/tests/no_labels.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.CSVLoader( + tasks=[], feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + assert len(dataset.X) + + +def test_scaffold_test_train_valid_test_split(): + """Test of singletask RF ECFP regression API.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + + input_file = os.path.join(current_dir, input_file) + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + dataset = loader.create_dataset(input_file) + + # Splits featurized samples into train/test + splitter = dc.splits.ScaffoldSplitter() + train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( + dataset) + assert len(train_dataset) == 8 + assert len(valid_dataset) == 1 + assert len(test_dataset) == 1 + + +def test_scaffold_test_train_test_split(): + """Test of singletask RF ECFP regression API.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + + input_file = os.path.join(current_dir, input_file) + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + dataset = loader.create_dataset(input_file) + + # Splits featurized samples into train/test + splitter = dc.splits.ScaffoldSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + assert len(train_dataset) == 8 + assert len(test_dataset) == 2 + + +def test_random_test_train_valid_test_split(): + """Test of singletask RF ECFP regression API.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + + input_file = os.path.join(current_dir, input_file) + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + dataset = loader.create_dataset(input_file) + + # Splits featurized samples into train/test + splitter = dc.splits.RandomSplitter() + train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( + dataset) + assert len(train_dataset) == 8 + assert len(valid_dataset) == 1 + assert len(test_dataset) == 1 + + +def test_random_test_train_test_split(): + """Test of singletask RF ECFP regression API.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + dataset = loader.create_dataset(input_file) + + # Splits featurized samples into train/test + splitter = dc.splits.RandomSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + assert len(train_dataset) == 8 + assert len(test_dataset) == 2 + + +def test_log_solubility_dataset(): + """Test of loading for simple log-solubility dataset.""" + current_dir = os.path.dirname(os.path.realpath(__file__)) + input_file = "../../models/tests/example.csv" + input_file = os.path.join(current_dir, input_file) + + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, + feature_field="smiles", + featurizer=dc.feat.CircularFingerprint(size=1024)) + dataset = loader.create_dataset(input_file) + + assert len(dataset) == 10 + + +def test_dataset_move(): + """Test that dataset can be moved and reloaded.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + base_dir = tempfile.mkdtemp() + data_dir = os.path.join(base_dir, "data") + moved_data_dir = os.path.join(base_dir, "moved_data") + dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") + + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + featurized_dataset = loader.create_dataset(dataset_file, data_dir) + n_dataset = len(featurized_dataset) + + # Now perform move + shutil.move(data_dir, moved_data_dir) + + moved_featurized_dataset = dc.data.DiskDataset(moved_data_dir) + + assert len(moved_featurized_dataset) == n_dataset diff --git a/deepchem/data/tests/test_datasets.py b/deepchem/data/tests/test_datasets.py index 74d1d2c2382b409c8dcf4c1f88ea709b9ba96e0f..711529e2143ea7618f3e005a559b4f71d078f365 100644 --- a/deepchem/data/tests/test_datasets.py +++ b/deepchem/data/tests/test_datasets.py @@ -1,388 +1,67 @@ """ Tests for dataset creation """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import random import math import unittest -import tempfile import os -import shutil import numpy as np import deepchem as dc -import tensorflow as tf -import pandas as pd -from tensorflow.python.framework import test_util try: - import torch + import torch # noqa PYTORCH_IMPORT_FAILED = False except ImportError: PYTORCH_IMPORT_FAILED = True -class TestDatasets(test_util.TensorFlowTestCase): - """ - Test basic top-level API for dataset objects. - """ +def load_solubility_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) - def test_sparsify_and_densify(self): - """Test that sparsify and densify work as inverses.""" - # Test on identity matrix - num_samples = 10 - num_features = num_samples - X = np.eye(num_samples) - X_sparse = dc.data.sparsify_features(X) - X_reconstructed = dc.data.densify_features(X_sparse, num_features) - np.testing.assert_array_equal(X, X_reconstructed) - - # Generate random sparse features dataset - np.random.seed(123) - p = .05 - X = np.random.binomial(1, p, size=(num_samples, num_features)) - X_sparse = dc.data.sparsify_features(X) - X_reconstructed = dc.data.densify_features(X_sparse, num_features) - np.testing.assert_array_equal(X, X_reconstructed) - - # Test edge case with array of all zeros - X = np.zeros((num_samples, num_features)) - X_sparse = dc.data.sparsify_features(X) - X_reconstructed = dc.data.densify_features(X_sparse, num_features) - np.testing.assert_array_equal(X, X_reconstructed) - - def test_pad_features(self): - """Test that pad_features pads features correctly.""" - batch_size = 100 - num_features = 10 - num_tasks = 5 - - # Test cases where n_samples < 2*n_samples < batch_size - n_samples = 29 - X_b = np.zeros((n_samples, num_features)) - - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - # Test cases where n_samples < batch_size - n_samples = 79 - X_b = np.zeros((n_samples, num_features)) - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - # Test case where n_samples == batch_size - n_samples = 100 - X_b = np.zeros((n_samples, num_features)) - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - # Test case for object featurization. - n_samples = 2 - X_b = np.array([{"a": 1}, {"b": 2}]) - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - # Test case for more complicated object featurization - n_samples = 2 - X_b = np.array([(1, {"a": 1}), (2, {"b": 2})]) - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - # Test case with multidimensional data - n_samples = 50 - num_atoms = 15 - d = 3 - X_b = np.zeros((n_samples, num_atoms, d)) - X_out = dc.data.pad_features(batch_size, X_b) - assert len(X_out) == batch_size - - def test_pad_batches(self): - """Test that pad_batch pads batches correctly.""" - batch_size = 100 - num_features = 10 - num_tasks = 5 - - # Test cases where n_samples < 2*n_samples < batch_size - n_samples = 29 - X_b = np.zeros((n_samples, num_features)) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - # Test cases where n_samples < batch_size - n_samples = 79 - X_b = np.zeros((n_samples, num_features)) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - # Test case where n_samples == batch_size - n_samples = 100 - X_b = np.zeros((n_samples, num_features)) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - # Test case for object featurization. - n_samples = 2 - X_b = np.array([{"a": 1}, {"b": 2}]) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - # Test case for more complicated object featurization - n_samples = 2 - X_b = np.array([(1, {"a": 1}), (2, {"b": 2})]) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - # Test case with multidimensional data - n_samples = 50 - num_atoms = 15 - d = 3 - X_b = np.zeros((n_samples, num_atoms, d)) - y_b = np.zeros((n_samples, num_tasks)) - w_b = np.zeros((n_samples, num_tasks)) - ids_b = np.zeros((n_samples,)) - - X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, - ids_b) - assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size - - def test_get_task_names(self): - """Test that get_task_names returns correct task_names""" - solubility_dataset = dc.data.tests.load_solubility_data() - assert solubility_dataset.get_task_names() == ["log-solubility"] - - multitask_dataset = dc.data.tests.load_multitask_data() - assert sorted(multitask_dataset.get_task_names()) == sorted([ - "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", - "task8", "task9", "task10", "task11", "task12", "task13", "task14", - "task15", "task16" - ]) - - def test_get_data_shape(self): - """Test that get_data_shape returns currect data shape""" - solubility_dataset = dc.data.tests.load_solubility_data() - assert solubility_dataset.get_data_shape() == (1024,) - - multitask_dataset = dc.data.tests.load_multitask_data() - assert multitask_dataset.get_data_shape() == (1024,) - - def test_len(self): - """Test that len(dataset) works.""" - solubility_dataset = dc.data.tests.load_solubility_data() - assert len(solubility_dataset) == 10 - - def test_reshard(self): - """Test that resharding the dataset works.""" - solubility_dataset = dc.data.tests.load_solubility_data() - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - assert solubility_dataset.get_number_shards() == 1 - solubility_dataset.reshard(shard_size=1) - assert solubility_dataset.get_shard_size() == 1 - X_r, y_r, w_r, ids_r = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - assert solubility_dataset.get_number_shards() == 10 - solubility_dataset.reshard(shard_size=10) - assert solubility_dataset.get_shard_size() == 10 - X_rr, y_rr, w_rr, ids_rr = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - - # Test first resharding worked - np.testing.assert_array_equal(X, X_r) - np.testing.assert_array_equal(y, y_r) - np.testing.assert_array_equal(w, w_r) - np.testing.assert_array_equal(ids, ids_r) - - # Test second resharding worked - np.testing.assert_array_equal(X, X_rr) - np.testing.assert_array_equal(y, y_rr) - np.testing.assert_array_equal(w, w_rr) - np.testing.assert_array_equal(ids, ids_rr) - - def test_select(self): - """Test that dataset select works.""" - num_datapoints = 10 - num_features = 10 - num_tasks = 1 - X = np.random.rand(num_datapoints, num_features) - y = np.random.randint(2, size=(num_datapoints, num_tasks)) - w = np.ones((num_datapoints, num_tasks)) - ids = np.array(["id"] * num_datapoints) - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - - indices = [0, 4, 5, 8] - select_dataset = dataset.select(indices) - X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, - select_dataset.w, select_dataset.ids) - np.testing.assert_array_equal(X[indices], X_sel) - np.testing.assert_array_equal(y[indices], y_sel) - np.testing.assert_array_equal(w[indices], w_sel) - np.testing.assert_array_equal(ids[indices], ids_sel) - - def test_complete_shuffle(self): - shard_sizes = [1, 2, 3, 4, 5] - batch_size = 10 + return loader.create_dataset(input_file) - all_Xs, all_ys, all_ws, all_ids = [], [], [], [] - - def shard_generator(): - for sz in shard_sizes: - X_b = np.random.rand(sz, 1) - y_b = np.random.rand(sz, 1) - w_b = np.random.rand(sz, 1) - ids_b = np.random.rand(sz) - - all_Xs.append(X_b) - all_ys.append(y_b) - all_ws.append(w_b) - all_ids.append(ids_b) - yield X_b, y_b, w_b, ids_b +def load_multitask_data(): + """Load example multitask data.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = [ + "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", + "task8", "task9", "task10", "task11", "task12", "task13", "task14", + "task15", "task16" + ] + input_file = os.path.join(current_dir, + "../../models/tests/multitask_example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + return loader.create_dataset(input_file) - dataset = dc.data.DiskDataset.create_dataset(shard_generator()) - res = dataset.complete_shuffle() +class TestTransformer(dc.trans.Transformer): - # approx 1/15! chance of equality - np.testing.assert_equal(np.any(np.not_equal(dataset.X, res.X)), True) - np.testing.assert_equal(np.any(np.not_equal(dataset.y, res.w)), True) - np.testing.assert_equal(np.any(np.not_equal(dataset.w, res.y)), True) - np.testing.assert_equal(np.any(np.not_equal(dataset.ids, res.ids)), True) + def transform_array(self, X, y, w, ids): + return (2 * X, 1.5 * y, w, ids) - np.testing.assert_array_equal( - np.sort(dataset.X, axis=0), np.sort(res.X, axis=0)) - np.testing.assert_array_equal( - np.sort(dataset.y, axis=0), np.sort(res.y, axis=0)) - np.testing.assert_array_equal( - np.sort(dataset.w, axis=0), np.sort(res.w, axis=0)) - np.testing.assert_array_equal(np.sort(dataset.ids), np.sort(res.ids)) - - def test_get_shape(self): - """Test that get_shape works.""" - num_datapoints = 100 - num_features = 10 - num_tasks = 10 - # Generate data - X = np.random.rand(num_datapoints, num_features) - y = np.random.randint(2, size=(num_datapoints, num_tasks)) - w = np.random.randint(2, size=(num_datapoints, num_tasks)) - ids = np.array(["id"] * num_datapoints) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() - assert X_shape == X.shape - assert y_shape == y.shape - assert w_shape == w.shape - assert ids_shape == ids.shape - - def test_iterbatches(self): - """Test that iterating over batches of data works.""" - solubility_dataset = dc.data.tests.load_solubility_data() - batch_size = 2 - data_shape = solubility_dataset.get_data_shape() - tasks = solubility_dataset.get_task_names() - for (X_b, y_b, w_b, ids_b) in solubility_dataset.iterbatches(batch_size): - assert X_b.shape == (batch_size,) + data_shape - assert y_b.shape == (batch_size,) + (len(tasks),) - assert w_b.shape == (batch_size,) + (len(tasks),) - assert ids_b.shape == (batch_size,) - - def test_itersamples_numpy(self): - """Test that iterating over samples in a NumpyDataset works.""" - num_datapoints = 100 - num_features = 10 - num_tasks = 10 - # Generate data - X = np.random.rand(num_datapoints, num_features) - y = np.random.randint(2, size=(num_datapoints, num_tasks)) - w = np.random.randint(2, size=(num_datapoints, num_tasks)) - ids = np.array(["id"] * num_datapoints) - dataset = dc.data.NumpyDataset(X, y, w, ids) - for i, (sx, sy, sw, sid) in enumerate(dataset.itersamples()): - np.testing.assert_array_equal(sx, X[i]) - np.testing.assert_array_equal(sy, y[i]) - np.testing.assert_array_equal(sw, w[i]) - np.testing.assert_array_equal(sid, ids[i]) - - def test_itersamples_disk(self): - """Test that iterating over samples in a DiskDataset works.""" - solubility_dataset = dc.data.tests.load_solubility_data() - X = solubility_dataset.X - y = solubility_dataset.y - w = solubility_dataset.w - ids = solubility_dataset.ids - for i, (sx, sy, sw, sid) in enumerate(solubility_dataset.itersamples()): - np.testing.assert_array_equal(sx, X[i]) - np.testing.assert_array_equal(sy, y[i]) - np.testing.assert_array_equal(sw, w[i]) - np.testing.assert_array_equal(sid, ids[i]) - - def test_transform_numpy(self): - """Test that the transform() method works for NumpyDatasets.""" - num_datapoints = 100 - num_features = 10 - num_tasks = 10 - - # Generate data - X = np.random.rand(num_datapoints, num_features) - y = np.random.randint(2, size=(num_datapoints, num_tasks)) - w = np.random.randint(2, size=(num_datapoints, num_tasks)) - ids = np.array(["id"] * num_datapoints) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - # Transform it - - def fn(x, y, w): - return (2 * x, 1.5 * y, w) - - transformed = dataset.transform(fn) - np.testing.assert_array_equal(X, dataset.X) - np.testing.assert_array_equal(y, dataset.y) - np.testing.assert_array_equal(w, dataset.w) - np.testing.assert_array_equal(ids, dataset.ids) - np.testing.assert_array_equal(2 * X, transformed.X) - np.testing.assert_array_equal(1.5 * y, transformed.y) - np.testing.assert_array_equal(w, transformed.w) - np.testing.assert_array_equal(ids, transformed.ids) - def test_transform_disk(self): - """Test that the transform() method works for DiskDatasets.""" - dataset = dc.data.tests.load_solubility_data() - X = dataset.X - y = dataset.y - w = dataset.w - ids = dataset.ids +def test_transform_disk(): + """Test that the transform() method works for DiskDatasets.""" + dataset = load_solubility_data() + X = dataset.X + y = dataset.y + w = dataset.w + ids = dataset.ids - # Transform it - def fn(x, y, w): - return (2 * x, 1.5 * y, w) + # Transform it - transformed = dataset.transform(fn) + transformer = TestTransformer(transform_X=True, transform_y=True) + for parallel in (True, False): + transformed = dataset.transform(transformer, parallel=parallel) np.testing.assert_array_equal(X, dataset.X) np.testing.assert_array_equal(y, dataset.y) np.testing.assert_array_equal(w, dataset.w) @@ -392,57 +71,492 @@ class TestDatasets(test_util.TensorFlowTestCase): np.testing.assert_array_equal(w, transformed.w) np.testing.assert_array_equal(ids, transformed.ids) - def test_to_numpy(self): - """Test that transformation to numpy arrays is sensible.""" - solubility_dataset = dc.data.tests.load_solubility_data() - data_shape = solubility_dataset.get_data_shape() - tasks = solubility_dataset.get_task_names() - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - N_samples = len(solubility_dataset) - N_tasks = len(tasks) - - assert X.shape == (N_samples,) + data_shape - assert y.shape == (N_samples, N_tasks) - assert w.shape == (N_samples, N_tasks) - assert ids.shape == (N_samples,) - - def test_consistent_ordering(self): - """Test that ordering of labels is consistent over time.""" - solubility_dataset = dc.data.tests.load_solubility_data() - - ids1 = solubility_dataset.ids - ids2 = solubility_dataset.ids - - assert np.array_equal(ids1, ids2) - - def test_get_statistics(self): - """Test statistics computation of this dataset.""" - solubility_dataset = dc.data.tests.load_solubility_data() - X, y, _, _ = (solubility_dataset.X, solubility_dataset.y, + +def test_sparsify_and_densify(): + """Test that sparsify and densify work as inverses.""" + # Test on identity matrix + num_samples = 10 + num_features = num_samples + X = np.eye(num_samples) + X_sparse = dc.data.sparsify_features(X) + X_reconstructed = dc.data.densify_features(X_sparse, num_features) + np.testing.assert_array_equal(X, X_reconstructed) + + # Generate random sparse features dataset + np.random.seed(123) + p = .05 + X = np.random.binomial(1, p, size=(num_samples, num_features)) + X_sparse = dc.data.sparsify_features(X) + X_reconstructed = dc.data.densify_features(X_sparse, num_features) + np.testing.assert_array_equal(X, X_reconstructed) + + # Test edge case with array of all zeros + X = np.zeros((num_samples, num_features)) + X_sparse = dc.data.sparsify_features(X) + X_reconstructed = dc.data.densify_features(X_sparse, num_features) + np.testing.assert_array_equal(X, X_reconstructed) + + +def test_pad_features(): + """Test that pad_features pads features correctly.""" + batch_size = 100 + num_features = 10 + + # Test cases where n_samples < 2*n_samples < batch_size + n_samples = 29 + X_b = np.zeros((n_samples, num_features)) + + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + # Test cases where n_samples < batch_size + n_samples = 79 + X_b = np.zeros((n_samples, num_features)) + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + # Test case where n_samples == batch_size + n_samples = 100 + X_b = np.zeros((n_samples, num_features)) + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + # Test case for object featurization. + n_samples = 2 + X_b = np.array([{"a": 1}, {"b": 2}]) + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + # Test case for more complicated object featurization + n_samples = 2 + X_b = np.array([(1, {"a": 1}), (2, {"b": 2})]) + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + # Test case with multidimensional data + n_samples = 50 + num_atoms = 15 + d = 3 + X_b = np.zeros((n_samples, num_atoms, d)) + X_out = dc.data.pad_features(batch_size, X_b) + assert len(X_out) == batch_size + + +def test_pad_batches(): + """Test that pad_batch pads batches correctly.""" + batch_size = 100 + num_features = 10 + num_tasks = 5 + + # Test cases where n_samples < 2*n_samples < batch_size + n_samples = 29 + X_b = np.zeros((n_samples, num_features)) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + # Test cases where n_samples < batch_size + n_samples = 79 + X_b = np.zeros((n_samples, num_features)) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + # Test case where n_samples == batch_size + n_samples = 100 + X_b = np.zeros((n_samples, num_features)) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + # Test case for object featurization. + n_samples = 2 + X_b = np.array([{"a": 1}, {"b": 2}]) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + # Test case for more complicated object featurization + n_samples = 2 + X_b = np.array([(1, {"a": 1}), (2, {"b": 2})]) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + # Test case with multidimensional data + n_samples = 50 + num_atoms = 15 + d = 3 + X_b = np.zeros((n_samples, num_atoms, d)) + y_b = np.zeros((n_samples, num_tasks)) + w_b = np.zeros((n_samples, num_tasks)) + ids_b = np.zeros((n_samples,)) + + X_out, y_out, w_out, ids_out = dc.data.pad_batch(batch_size, X_b, y_b, w_b, + ids_b) + assert len(X_out) == len(y_out) == len(w_out) == len(ids_out) == batch_size + + +def test_get_task_names(): + """Test that get_task_names returns correct task_names""" + solubility_dataset = load_solubility_data() + assert solubility_dataset.get_task_names() == ["log-solubility"] + + multitask_dataset = load_multitask_data() + assert sorted(multitask_dataset.get_task_names()) == sorted([ + "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", + "task8", "task9", "task10", "task11", "task12", "task13", "task14", + "task15", "task16" + ]) + + +def test_get_data_shape(): + """Test that get_data_shape returns currect data shape""" + solubility_dataset = load_solubility_data() + assert solubility_dataset.get_data_shape() == (1024,) + + multitask_dataset = load_multitask_data() + assert multitask_dataset.get_data_shape() == (1024,) + + +def test_len(): + """Test that len(dataset) works.""" + solubility_dataset = load_solubility_data() + assert len(solubility_dataset) == 10 + + +def test_reshard(): + """Test that resharding the dataset works.""" + solubility_dataset = load_solubility_data() + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, solubility_dataset.w, solubility_dataset.ids) - X_means, y_means = np.mean(X, axis=0), np.mean(y, axis=0) - X_stds, y_stds = np.std(X, axis=0), np.std(y, axis=0) - comp_X_means, comp_X_stds, comp_y_means, comp_y_stds = \ - solubility_dataset.get_statistics() - np.testing.assert_allclose(comp_X_means, X_means) - np.testing.assert_allclose(comp_y_means, y_means) - np.testing.assert_allclose(comp_X_stds, X_stds) - np.testing.assert_allclose(comp_y_stds, y_stds) - - def test_disk_iterate_batch_size(self): - solubility_dataset = dc.data.tests.load_solubility_data() - X, y, _, _ = (solubility_dataset.X, solubility_dataset.y, + assert solubility_dataset.get_number_shards() == 1 + solubility_dataset.reshard(shard_size=1) + assert solubility_dataset.get_shard_size() == 1 + X_r, y_r, w_r, ids_r = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + assert solubility_dataset.get_number_shards() == 10 + solubility_dataset.reshard(shard_size=10) + assert solubility_dataset.get_shard_size() == 10 + X_rr, y_rr, w_rr, ids_rr = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + + # Test first resharding worked + np.testing.assert_array_equal(X, X_r) + np.testing.assert_array_equal(y, y_r) + np.testing.assert_array_equal(w, w_r) + np.testing.assert_array_equal(ids, ids_r) + + # Test second resharding worked + np.testing.assert_array_equal(X, X_rr) + np.testing.assert_array_equal(y, y_rr) + np.testing.assert_array_equal(w, w_rr) + np.testing.assert_array_equal(ids, ids_rr) + + +def test_complete_shuffle(): + shard_sizes = [1, 2, 3, 4, 5] + + all_Xs, all_ys, all_ws, all_ids = [], [], [], [] + + def shard_generator(): + for sz in shard_sizes: + X_b = np.random.rand(sz, 1) + y_b = np.random.rand(sz, 1) + w_b = np.random.rand(sz, 1) + ids_b = np.random.rand(sz) + + all_Xs.append(X_b) + all_ys.append(y_b) + all_ws.append(w_b) + all_ids.append(ids_b) + + yield X_b, y_b, w_b, ids_b + + dataset = dc.data.DiskDataset.create_dataset(shard_generator()) + + res = dataset.complete_shuffle() + + # approx 1/15! chance of equality + np.testing.assert_equal(np.any(np.not_equal(dataset.X, res.X)), True) + np.testing.assert_equal(np.any(np.not_equal(dataset.y, res.w)), True) + np.testing.assert_equal(np.any(np.not_equal(dataset.w, res.y)), True) + np.testing.assert_equal(np.any(np.not_equal(dataset.ids, res.ids)), True) + + np.testing.assert_array_equal( + np.sort(dataset.X, axis=0), np.sort(res.X, axis=0)) + np.testing.assert_array_equal( + np.sort(dataset.y, axis=0), np.sort(res.y, axis=0)) + np.testing.assert_array_equal( + np.sort(dataset.w, axis=0), np.sort(res.w, axis=0)) + np.testing.assert_array_equal(np.sort(dataset.ids), np.sort(res.ids)) + + +def test_iterbatches(): + """Test that iterating over batches of data works.""" + solubility_dataset = load_solubility_data() + batch_size = 2 + data_shape = solubility_dataset.get_data_shape() + tasks = solubility_dataset.get_task_names() + for (X_b, y_b, w_b, ids_b) in solubility_dataset.iterbatches(batch_size): + assert X_b.shape == (batch_size,) + data_shape + assert y_b.shape == (batch_size,) + (len(tasks),) + assert w_b.shape == (batch_size,) + (len(tasks),) + assert ids_b.shape == (batch_size,) + + +def test_itersamples_numpy(): + """Test that iterating over samples in a NumpyDataset works.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.NumpyDataset(X, y, w, ids) + for i, (sx, sy, sw, sid) in enumerate(dataset.itersamples()): + np.testing.assert_array_equal(sx, X[i]) + np.testing.assert_array_equal(sy, y[i]) + np.testing.assert_array_equal(sw, w[i]) + np.testing.assert_array_equal(sid, ids[i]) + + +def test_itersamples_disk(): + """Test that iterating over samples in a DiskDataset works.""" + solubility_dataset = load_solubility_data() + X = solubility_dataset.X + y = solubility_dataset.y + w = solubility_dataset.w + ids = solubility_dataset.ids + for i, (sx, sy, sw, sid) in enumerate(solubility_dataset.itersamples()): + np.testing.assert_array_equal(sx, X[i]) + np.testing.assert_array_equal(sy, y[i]) + np.testing.assert_array_equal(sw, w[i]) + np.testing.assert_array_equal(sid, ids[i]) + + +def test_transform_numpy(): + """Test that the transform() method works for NumpyDatasets.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + # Transform it + + transformer = TestTransformer(transform_X=True, transform_y=True) + transformed = dataset.transform(transformer) + np.testing.assert_array_equal(X, dataset.X) + np.testing.assert_array_equal(y, dataset.y) + np.testing.assert_array_equal(w, dataset.w) + np.testing.assert_array_equal(ids, dataset.ids) + np.testing.assert_array_equal(2 * X, transformed.X) + np.testing.assert_array_equal(1.5 * y, transformed.y) + np.testing.assert_array_equal(w, transformed.w) + np.testing.assert_array_equal(ids, transformed.ids) + + +def test_to_numpy(): + """Test that transformation to numpy arrays is sensible.""" + solubility_dataset = load_solubility_data() + data_shape = solubility_dataset.get_data_shape() + tasks = solubility_dataset.get_task_names() + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, solubility_dataset.w, solubility_dataset.ids) - batch_sizes = [] - for X, y, _, _ in solubility_dataset.iterbatches( - 3, epochs=2, pad_batches=False, deterministic=True): - batch_sizes.append(len(X)) - self.assertEqual([3, 3, 3, 1, 3, 3, 3, 1], batch_sizes) + N_samples = len(solubility_dataset) + N_tasks = len(tasks) + + assert X.shape == (N_samples,) + data_shape + assert y.shape == (N_samples, N_tasks) + assert w.shape == (N_samples, N_tasks) + assert ids.shape == (N_samples,) + + +def test_consistent_ordering(): + """Test that ordering of labels is consistent over time.""" + solubility_dataset = load_solubility_data() + + ids1 = solubility_dataset.ids + ids2 = solubility_dataset.ids + + assert np.array_equal(ids1, ids2) + + +def test_get_statistics(): + """Test statistics computation of this dataset.""" + solubility_dataset = load_solubility_data() + X, y, _, _ = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + X_means, y_means = np.mean(X, axis=0), np.mean(y, axis=0) + X_stds, y_stds = np.std(X, axis=0), np.std(y, axis=0) + comp_X_means, comp_X_stds, comp_y_means, comp_y_stds = \ + solubility_dataset.get_statistics() + np.testing.assert_allclose(comp_X_means, X_means) + np.testing.assert_allclose(comp_y_means, y_means) + np.testing.assert_allclose(comp_X_stds, X_stds) + np.testing.assert_allclose(comp_y_stds, y_stds) + + +def test_disk_iterate_batch_size(): + solubility_dataset = load_solubility_data() + X, y, _, _ = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + batch_sizes = [] + for X, y, _, _ in solubility_dataset.iterbatches( + 3, epochs=2, pad_batches=False, deterministic=True): + batch_sizes.append(len(X)) + assert [3, 3, 3, 1, 3, 3, 3, 1] == batch_sizes + + +def test_disk_pad_batches(): + shard_sizes = [21, 11, 41, 21, 51] + batch_size = 10 + + all_Xs, all_ys, all_ws, all_ids = [], [], [], [] + + def shard_generator(): + for sz in shard_sizes: + X_b = np.random.rand(sz, 1) + y_b = np.random.rand(sz, 1) + w_b = np.random.rand(sz, 1) + ids_b = np.random.rand(sz) + + all_Xs.append(X_b) + all_ys.append(y_b) + all_ws.append(w_b) + all_ids.append(ids_b) + + yield X_b, y_b, w_b, ids_b + + dataset = dc.data.DiskDataset.create_dataset(shard_generator()) + + all_Xs = np.concatenate(all_Xs, axis=0) + all_ys = np.concatenate(all_ys, axis=0) + all_ws = np.concatenate(all_ws, axis=0) + all_ids = np.concatenate(all_ids, axis=0) + + test_Xs, test_ys, test_ws, test_ids = [], [], [], [] + for bidx, (a, b, c, d) in enumerate( + dataset.iterbatches( + batch_size=batch_size, pad_batches=True, deterministic=True)): + + test_Xs.append(a) + test_ys.append(b) + test_ws.append(c) + test_ids.append(d) + + test_Xs = np.concatenate(test_Xs, axis=0) + test_ys = np.concatenate(test_ys, axis=0) + test_ws = np.concatenate(test_ws, axis=0) + test_ids = np.concatenate(test_ids, axis=0) + + total_size = sum(shard_sizes) + + assert bidx == math.ceil(total_size / batch_size) - 1 + + expected_batches = math.ceil(total_size / batch_size) * batch_size + + assert len(test_Xs) == expected_batches + assert len(test_ys) == expected_batches + assert len(test_ws) == expected_batches + assert len(test_ids) == expected_batches + + np.testing.assert_array_equal(all_Xs, test_Xs[:total_size, :]) + np.testing.assert_array_equal(all_ys, test_ys[:total_size, :]) + np.testing.assert_array_equal(all_ws, test_ws[:total_size, :]) + np.testing.assert_array_equal(all_ids, test_ids[:total_size]) + + +def test_disk_iterate_y_w_None(): + shard_sizes = [21, 11, 41, 21, 51] + batch_size = 10 + + all_Xs, all_ids = [], [] + + def shard_generator(): + for sz in shard_sizes: + X_b = np.random.rand(sz, 1) + ids_b = np.random.rand(sz) - def test_disk_pad_batches(self): - shard_sizes = [21, 11, 41, 21, 51] - batch_size = 10 + all_Xs.append(X_b) + all_ids.append(ids_b) + + yield X_b, None, None, ids_b + + dataset = dc.data.DiskDataset.create_dataset(shard_generator()) + + all_Xs = np.concatenate(all_Xs, axis=0) + all_ids = np.concatenate(all_ids, axis=0) + + test_Xs, test_ids = [], [] + for bidx, (a, _, _, d) in enumerate( + dataset.iterbatches( + batch_size=batch_size, pad_batches=True, deterministic=True)): + + test_Xs.append(a) + test_ids.append(d) + + test_Xs = np.concatenate(test_Xs, axis=0) + test_ids = np.concatenate(test_ids, axis=0) + + total_size = sum(shard_sizes) + + assert bidx == math.ceil(total_size / batch_size) - 1 + + expected_batches = math.ceil(total_size / batch_size) * batch_size + + assert len(test_Xs) == expected_batches + assert len(test_ids) == expected_batches + + np.testing.assert_array_equal(all_Xs, test_Xs[:total_size, :]) + np.testing.assert_array_equal(all_ids, test_ids[:total_size]) + + +def test_disk_iterate_batch(): + + all_batch_sizes = [None, 32, 17, 11] + all_shard_sizes = [[7, 3, 12, 4, 5], [1, 1, 1, 1, 1], [31, 31, 31, 31, 31], + [21, 11, 41, 21, 51]] + + for idx in range(25): + shard_length = random.randint(1, 32) + shard_sizes = [] + for _ in range(shard_length): + shard_sizes.append(random.randint(1, 128)) + all_shard_sizes.append(shard_sizes) + if idx == 0: + # special case to test + all_batch_sizes.append(None) + else: + all_batch_sizes.append(random.randint(1, 256)) + + for shard_sizes, batch_size in zip(all_shard_sizes, all_batch_sizes): all_Xs, all_ys, all_ws, all_ids = [], [], [], [] @@ -467,196 +581,235 @@ class TestDatasets(test_util.TensorFlowTestCase): all_ws = np.concatenate(all_ws, axis=0) all_ids = np.concatenate(all_ids, axis=0) + total_size = sum(shard_sizes) + + assert dataset.X.shape[0] == total_size + + # deterministic test_Xs, test_ys, test_ws, test_ids = [], [], [], [] for bidx, (a, b, c, d) in enumerate( dataset.iterbatches( - batch_size=batch_size, pad_batches=True, deterministic=True)): + batch_size=batch_size, pad_batches=False, deterministic=True)): test_Xs.append(a) test_ys.append(b) test_ws.append(c) test_ids.append(d) + if batch_size is None: + for idx, (tx, ty, tw, tids) in enumerate( + zip(test_Xs, test_ys, test_ws, test_ids)): + assert len(tx) == shard_sizes[idx] + assert len(ty) == shard_sizes[idx] + assert len(tw) == shard_sizes[idx] + assert len(tids) == shard_sizes[idx] + test_Xs = np.concatenate(test_Xs, axis=0) test_ys = np.concatenate(test_ys, axis=0) test_ws = np.concatenate(test_ws, axis=0) test_ids = np.concatenate(test_ids, axis=0) - total_size = sum(shard_sizes) - - assert bidx == math.ceil(total_size / batch_size) - 1 - - expected_batches = math.ceil(total_size / batch_size) * batch_size - - assert len(test_Xs) == expected_batches - assert len(test_ys) == expected_batches - assert len(test_ws) == expected_batches - assert len(test_ids) == expected_batches - - np.testing.assert_array_equal(all_Xs, test_Xs[:total_size, :]) - np.testing.assert_array_equal(all_ys, test_ys[:total_size, :]) - np.testing.assert_array_equal(all_ws, test_ws[:total_size, :]) - np.testing.assert_array_equal(all_ids, test_ids[:total_size]) - - def test_disk_iterate_y_w_None(self): - shard_sizes = [21, 11, 41, 21, 51] - batch_size = 10 - - all_Xs, all_ys, all_ws, all_ids = [], [], [], [] - - def shard_generator(): - for sz in shard_sizes: - X_b = np.random.rand(sz, 1) - ids_b = np.random.rand(sz) - - all_Xs.append(X_b) - all_ids.append(ids_b) + if batch_size is None: + assert bidx == len(shard_sizes) - 1 + else: + assert bidx == math.ceil(total_size / batch_size) - 1 - yield X_b, None, None, ids_b - - dataset = dc.data.DiskDataset.create_dataset(shard_generator()) + np.testing.assert_array_equal(all_Xs, test_Xs) + np.testing.assert_array_equal(all_ys, test_ys) + np.testing.assert_array_equal(all_ws, test_ws) + np.testing.assert_array_equal(all_ids, test_ids) - all_Xs = np.concatenate(all_Xs, axis=0) - all_ids = np.concatenate(all_ids, axis=0) + # non-deterministic + test_Xs, test_ys, test_ws, test_ids = [], [], [], [] - test_Xs, test_ids = [], [] - for bidx, (a, _, _, d) in enumerate( + for bidx, (a, b, c, d) in enumerate( dataset.iterbatches( - batch_size=batch_size, pad_batches=True, deterministic=True)): + batch_size=batch_size, pad_batches=False, deterministic=False)): test_Xs.append(a) + test_ys.append(b) + test_ws.append(c) test_ids.append(d) + # we don't know the order in which the shards are iterated in. test_Xs = np.concatenate(test_Xs, axis=0) + test_ys = np.concatenate(test_ys, axis=0) + test_ws = np.concatenate(test_ws, axis=0) test_ids = np.concatenate(test_ids, axis=0) - total_size = sum(shard_sizes) - - assert bidx == math.ceil(total_size / batch_size) - 1 - - expected_batches = math.ceil(total_size / batch_size) * batch_size - - assert len(test_Xs) == expected_batches - assert len(test_ids) == expected_batches - - np.testing.assert_array_equal(all_Xs, test_Xs[:total_size, :]) - np.testing.assert_array_equal(all_ids, test_ids[:total_size]) + if batch_size is None: + assert bidx == len(shard_sizes) - 1 + else: + assert bidx == math.ceil(total_size / batch_size) - 1 - def test_disk_iterate_batch(self): - - all_batch_sizes = [None, 32, 17, 11] - all_shard_sizes = [[7, 3, 12, 4, 5], [1, 1, 1, 1, 1], [31, 31, 31, 31, 31], - [21, 11, 41, 21, 51]] - - for idx in range(25): - shard_length = random.randint(1, 32) - shard_sizes = [] - for _ in range(shard_length): - shard_sizes.append(random.randint(1, 128)) - all_shard_sizes.append(shard_sizes) - if idx == 0: - # special case to test - all_batch_sizes.append(None) - else: - all_batch_sizes.append(random.randint(1, 256)) - - for shard_sizes, batch_size in zip(all_shard_sizes, all_batch_sizes): - - all_Xs, all_ys, all_ws, all_ids = [], [], [], [] - - def shard_generator(): - for sz in shard_sizes: - X_b = np.random.rand(sz, 1) - y_b = np.random.rand(sz, 1) - w_b = np.random.rand(sz, 1) - ids_b = np.random.rand(sz) - - all_Xs.append(X_b) - all_ys.append(y_b) - all_ws.append(w_b) - all_ids.append(ids_b) - - yield X_b, y_b, w_b, ids_b - - dataset = dc.data.DiskDataset.create_dataset(shard_generator()) - - all_Xs = np.concatenate(all_Xs, axis=0) - all_ys = np.concatenate(all_ys, axis=0) - all_ws = np.concatenate(all_ws, axis=0) - all_ids = np.concatenate(all_ids, axis=0) - - total_size = sum(shard_sizes) - - assert dataset.X.shape[0] == total_size - - # deterministic - test_Xs, test_ys, test_ws, test_ids = [], [], [], [] - for bidx, (a, b, c, d) in enumerate( - dataset.iterbatches( - batch_size=batch_size, pad_batches=False, deterministic=True)): - - test_Xs.append(a) - test_ys.append(b) - test_ws.append(c) - test_ids.append(d) - - if batch_size is None: - for idx, (tx, ty, tw, tids) in enumerate( - zip(test_Xs, test_ys, test_ws, test_ids)): - assert len(tx) == shard_sizes[idx] - assert len(ty) == shard_sizes[idx] - assert len(tw) == shard_sizes[idx] - assert len(tids) == shard_sizes[idx] - - test_Xs = np.concatenate(test_Xs, axis=0) - test_ys = np.concatenate(test_ys, axis=0) - test_ws = np.concatenate(test_ws, axis=0) - test_ids = np.concatenate(test_ids, axis=0) - - if batch_size is None: - assert bidx == len(shard_sizes) - 1 - else: - assert bidx == math.ceil(total_size / batch_size) - 1 - - np.testing.assert_array_equal(all_Xs, test_Xs) - np.testing.assert_array_equal(all_ys, test_ys) - np.testing.assert_array_equal(all_ws, test_ws) - np.testing.assert_array_equal(all_ids, test_ids) - - # non-deterministic - test_Xs, test_ys, test_ws, test_ids = [], [], [], [] - - for bidx, (a, b, c, d) in enumerate( - dataset.iterbatches( - batch_size=batch_size, pad_batches=False, deterministic=False)): - - test_Xs.append(a) - test_ys.append(b) - test_ws.append(c) - test_ids.append(d) - - # we don't know the order in which the shards are iterated in. - test_Xs = np.concatenate(test_Xs, axis=0) - test_ys = np.concatenate(test_ys, axis=0) - test_ws = np.concatenate(test_ws, axis=0) - test_ids = np.concatenate(test_ids, axis=0) - - if batch_size is None: - assert bidx == len(shard_sizes) - 1 - else: - assert bidx == math.ceil(total_size / batch_size) - 1 - - np.testing.assert_array_equal( - np.sort(all_Xs, axis=0), np.sort(test_Xs, axis=0)) - np.testing.assert_array_equal( - np.sort(all_ys, axis=0), np.sort(test_ys, axis=0)) - np.testing.assert_array_equal( - np.sort(all_ws, axis=0), np.sort(test_ws, axis=0)) - np.testing.assert_array_equal( - np.sort(all_ids, axis=0), np.sort(test_ids, axis=0)) + np.testing.assert_array_equal( + np.sort(all_Xs, axis=0), np.sort(test_Xs, axis=0)) + np.testing.assert_array_equal( + np.sort(all_ys, axis=0), np.sort(test_ys, axis=0)) + np.testing.assert_array_equal( + np.sort(all_ws, axis=0), np.sort(test_ws, axis=0)) + np.testing.assert_array_equal( + np.sort(all_ids, axis=0), np.sort(test_ids, axis=0)) + + +def test_merge(): + """Test that dataset merge works.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + num_datasets = 4 + datasets = [] + for i in range(num_datasets): + Xi = np.random.rand(num_datapoints, num_features) + yi = np.random.randint(2, size=(num_datapoints, num_tasks)) + wi = np.ones((num_datapoints, num_tasks)) + idsi = np.array(["id"] * num_datapoints) + dataseti = dc.data.DiskDataset.from_numpy(Xi, yi, wi, idsi) + datasets.append(dataseti) + + new_data = dc.data.datasets.DiskDataset.merge(datasets) + + # Check that we have all the data in + assert new_data.X.shape == (num_datapoints * num_datasets, num_features) + assert new_data.y.shape == (num_datapoints * num_datasets, num_tasks) + assert len(new_data.tasks) == len(datasets[0].tasks) + + +def test_make_tf_dataset(): + """Test creating a Tensorflow Iterator from a Dataset.""" + X = np.random.random((100, 5)) + y = np.random.random((100, 1)) + dataset = dc.data.NumpyDataset(X, y) + iterator = dataset.make_tf_dataset( + batch_size=10, epochs=2, deterministic=True) + for i, (batch_X, batch_y, batch_w) in enumerate(iterator): + offset = (i % 10) * 10 + np.testing.assert_array_equal(X[offset:offset + 10, :], batch_X) + np.testing.assert_array_equal(y[offset:offset + 10, :], batch_y) + np.testing.assert_array_equal(np.ones((10, 1)), batch_w) + assert i == 19 + + +def _validate_pytorch_dataset(dataset): + X = dataset.X + y = dataset.y + w = dataset.w + ids = dataset.ids + n_samples = X.shape[0] + + # Test iterating in order. + + ds = dataset.make_pytorch_dataset(epochs=2, deterministic=True) + for i, (iter_X, iter_y, iter_w, iter_id) in enumerate(ds): + j = i % n_samples + np.testing.assert_array_equal(X[j, :], iter_X) + np.testing.assert_array_equal(y[j, :], iter_y) + np.testing.assert_array_equal(w[j, :], iter_w) + assert ids[j] == iter_id + assert i == 2 * n_samples - 1 + + # Test iterating out of order. + + ds = dataset.make_pytorch_dataset(epochs=2, deterministic=False) + id_to_index = dict((id, i) for i, id in enumerate(ids)) + id_count = dict((id, 0) for id in ids) + for iter_X, iter_y, iter_w, iter_id in ds: + j = id_to_index[iter_id] + np.testing.assert_array_equal(X[j, :], iter_X) + np.testing.assert_array_equal(y[j, :], iter_y) + np.testing.assert_array_equal(w[j, :], iter_w) + id_count[iter_id] += 1 + assert all(id_count[id] == 2 for id in ids) + + # Test iterating in batches. + + ds = dataset.make_pytorch_dataset(epochs=2, deterministic=False, batch_size=7) + id_to_index = dict((id, i) for i, id in enumerate(ids)) + id_count = dict((id, 0) for id in ids) + for iter_X, iter_y, iter_w, iter_id in ds: + size = len(iter_id) + assert size <= 7 + for i in range(size): + j = id_to_index[iter_id[i]] + np.testing.assert_array_equal(X[j, :], iter_X[i]) + np.testing.assert_array_equal(y[j, :], iter_y[i]) + np.testing.assert_array_equal(w[j, :], iter_w[i]) + id_count[iter_id[i]] += 1 + assert all(id_count[id] == 2 for id in ids) + + # Test iterating with multiple workers. + + import torch # noqa + ds = dataset.make_pytorch_dataset(epochs=2, deterministic=False) + loader = torch.utils.data.DataLoader(ds, num_workers=3) + id_count = dict((id, 0) for id in ids) + for iter_X, iter_y, iter_w, iter_id in loader: + j = id_to_index[iter_id[0]] + np.testing.assert_array_equal(X[j, :], iter_X[0]) + np.testing.assert_array_equal(y[j, :], iter_y[0]) + np.testing.assert_array_equal(w[j, :], iter_w[0]) + id_count[iter_id[0]] += 1 + assert all(id_count[id] == 2 for id in ids) + + +def test_dataframe(): + """Test converting between Datasets and DataFrames.""" + dataset = load_solubility_data() + + # A round trip from Dataset to DataFrame to Dataset should produce identical arrays. + + df = dataset.to_dataframe() + dataset2 = dc.data.Dataset.from_dataframe(df) + np.testing.assert_array_equal(dataset.X, dataset2.X) + np.testing.assert_array_equal(dataset.y, dataset2.y) + np.testing.assert_array_equal(dataset.w, dataset2.w) + np.testing.assert_array_equal(dataset.ids, dataset2.ids) + + # Try specifying particular columns. + + dataset3 = dc.data.Dataset.from_dataframe( + df, X=['X2', 'X4'], y='w', w=['y', 'X1']) + np.testing.assert_array_equal(dataset.X[:, (1, 3)], dataset3.X) + np.testing.assert_array_equal(dataset.w, dataset3.y) + np.testing.assert_array_equal( + np.stack([dataset.y[:, 0], dataset.X[:, 0]], axis=1), dataset3.w) + + +def test_to_str(): + """Tests to string representation of Dataset.""" + dataset = dc.data.NumpyDataset( + X=np.random.rand(5, 3), y=np.random.rand(5,), ids=np.arange(5)) + ref_str = '' + assert str(dataset) == ref_str + + # Test id shrinkage + dc.utils.set_print_threshold(10) + dataset = dc.data.NumpyDataset( + X=np.random.rand(50, 3), y=np.random.rand(50,), ids=np.arange(50)) + ref_str = '' + assert str(dataset) == ref_str + + # Test task shrinkage + dataset = dc.data.NumpyDataset( + X=np.random.rand(50, 3), y=np.random.rand(50, 20), ids=np.arange(50)) + ref_str = '' + assert str(dataset) == ref_str + + # Test max print size + dc.utils.set_max_print_size(25) + dataset = dc.data.NumpyDataset( + X=np.random.rand(50, 3), y=np.random.rand(50,), ids=np.arange(50)) + ref_str = '' + assert str(dataset) == ref_str + + +class TestDatasets(unittest.TestCase): + """ + Test basic top-level API for dataset objects. + """ def test_numpy_iterate_batch_size(self): - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() X, y, _, _ = (solubility_dataset.X, solubility_dataset.y, solubility_dataset.w, solubility_dataset.ids) solubility_dataset = dc.data.NumpyDataset.from_DiskDataset( @@ -667,86 +820,6 @@ class TestDatasets(test_util.TensorFlowTestCase): batch_sizes.append(len(X)) self.assertEqual([3, 3, 3, 1, 3, 3, 3, 1], batch_sizes) - def test_merge(self): - """Test that dataset merge works.""" - num_datapoints = 10 - num_features = 10 - num_tasks = 1 - num_datasets = 4 - datasets = [] - for i in range(num_datasets): - Xi = np.random.rand(num_datapoints, num_features) - yi = np.random.randint(2, size=(num_datapoints, num_tasks)) - wi = np.ones((num_datapoints, num_tasks)) - idsi = np.array(["id"] * num_datapoints) - dataseti = dc.data.DiskDataset.from_numpy(Xi, yi, wi, idsi) - datasets.append(dataseti) - - new_data = dc.data.datasets.DiskDataset.merge(datasets) - - # Check that we have all the data in - assert new_data.X.shape == (num_datapoints * num_datasets, num_features) - assert new_data.y.shape == (num_datapoints * num_datasets, num_tasks) - assert len(new_data.tasks) == len(datasets[0].tasks) - - def test_make_tf_dataset(self): - """Test creating a Tensorflow Iterator from a Dataset.""" - X = np.random.random((100, 5)) - y = np.random.random((100, 1)) - dataset = dc.data.NumpyDataset(X, y) - iterator = dataset.make_tf_dataset( - batch_size=10, epochs=2, deterministic=True) - for i, (batch_X, batch_y, batch_w) in enumerate(iterator): - offset = (i % 10) * 10 - np.testing.assert_array_equal(X[offset:offset + 10, :], batch_X) - np.testing.assert_array_equal(y[offset:offset + 10, :], batch_y) - np.testing.assert_array_equal(np.ones((10, 1)), batch_w) - assert i == 19 - - def _validate_pytorch_dataset(self, dataset): - X = dataset.X - y = dataset.y - w = dataset.w - ids = dataset.ids - n_samples = X.shape[0] - - # Test iterating in order. - - ds = dataset.make_pytorch_dataset(epochs=2, deterministic=True) - for i, (iter_X, iter_y, iter_w, iter_id) in enumerate(ds): - j = i % n_samples - np.testing.assert_array_equal(X[j, :], iter_X) - np.testing.assert_array_equal(y[j, :], iter_y) - np.testing.assert_array_equal(w[j, :], iter_w) - assert ids[j] == iter_id - assert i == 2 * n_samples - 1 - - # Test iterating out of order. - - ds = dataset.make_pytorch_dataset(epochs=2, deterministic=False) - id_to_index = dict((id, i) for i, id in enumerate(ids)) - id_count = dict((id, 0) for id in ids) - for iter_X, iter_y, iter_w, iter_id in ds: - j = id_to_index[iter_id] - np.testing.assert_array_equal(X[j, :], iter_X) - np.testing.assert_array_equal(y[j, :], iter_y) - np.testing.assert_array_equal(w[j, :], iter_w) - id_count[iter_id] += 1 - assert all(id_count[id] == 2 for id in ids) - - # Test iterating with multiple workers. - - import torch - loader = torch.utils.data.DataLoader(ds, num_workers=3) - id_count = dict((id, 0) for id in ids) - for iter_X, iter_y, iter_w, iter_id in loader: - j = id_to_index[iter_id[0]] - np.testing.assert_array_equal(X[j, :], iter_X[0]) - np.testing.assert_array_equal(y[j, :], iter_y[0]) - np.testing.assert_array_equal(w[j, :], iter_w[0]) - id_count[iter_id[0]] += 1 - assert all(id_count[id] == 2 for id in ids) - @unittest.skipIf(PYTORCH_IMPORT_FAILED, 'PyTorch is not installed') def test_make_pytorch_dataset_from_numpy(self): """Test creating a PyTorch Dataset from a NumpyDataset.""" @@ -754,7 +827,7 @@ class TestDatasets(test_util.TensorFlowTestCase): y = np.random.random((100, 1)) ids = [str(i) for i in range(100)] dataset = dc.data.NumpyDataset(X, y, ids=ids) - self._validate_pytorch_dataset(dataset) + _validate_pytorch_dataset(dataset) @unittest.skipIf(PYTORCH_IMPORT_FAILED, 'PyTorch is not installed') def test_make_pytorch_dataset_from_images(self): @@ -764,59 +837,10 @@ class TestDatasets(test_util.TensorFlowTestCase): y = np.random.random((10, 1)) ids = [str(i) for i in range(len(files))] dataset = dc.data.ImageDataset(files, y, ids=ids) - self._validate_pytorch_dataset(dataset) + _validate_pytorch_dataset(dataset) @unittest.skipIf(PYTORCH_IMPORT_FAILED, 'PyTorch is not installed') def test_make_pytorch_dataset_from_disk(self): """Test creating a PyTorch Dataset from a DiskDataset.""" - dataset = dc.data.tests.load_solubility_data() - self._validate_pytorch_dataset(dataset) - - def test_dataframe(self): - """Test converting between Datasets and DataFrames.""" - dataset = dc.data.tests.load_solubility_data() - - # A round trip from Dataset to DataFrame to Dataset should produce identical arrays. - - df = dataset.to_dataframe() - dataset2 = dc.data.Dataset.from_dataframe(df) - np.testing.assert_array_equal(dataset.X, dataset2.X) - np.testing.assert_array_equal(dataset.y, dataset2.y) - np.testing.assert_array_equal(dataset.w, dataset2.w) - np.testing.assert_array_equal(dataset.ids, dataset2.ids) - - # Try specifying particular columns. - - dataset3 = dc.data.Dataset.from_dataframe( - df, X=['X2', 'X4'], y='w', w=['y', 'X1']) - np.testing.assert_array_equal(dataset.X[:, (1, 3)], dataset3.X) - np.testing.assert_array_equal(dataset.w, dataset3.y) - np.testing.assert_array_equal( - np.stack([dataset.y[:, 0], dataset.X[:, 0]], axis=1), dataset3.w) - - def test_to_str(self): - """Tests to string representation of Dataset.""" - dataset = dc.data.NumpyDataset( - X=np.random.rand(5, 3), y=np.random.rand(5,), ids=np.arange(5)) - ref_str = '' - assert str(dataset) == ref_str - - # Test id shrinkage - dc.utils.set_print_threshold(10) - dataset = dc.data.NumpyDataset( - X=np.random.rand(50, 3), y=np.random.rand(50,), ids=np.arange(50)) - ref_str = '' - assert str(dataset) == ref_str - - # Test task shrinkage - dataset = dc.data.NumpyDataset( - X=np.random.rand(50, 3), y=np.random.rand(50, 20), ids=np.arange(50)) - ref_str = '' - assert str(dataset) == ref_str - - # Test max print size - dc.utils.set_max_print_size(25) - dataset = dc.data.NumpyDataset( - X=np.random.rand(50, 3), y=np.random.rand(50,), ids=np.arange(50)) - ref_str = '' - assert str(dataset) == ref_str + dataset = load_solubility_data() + _validate_pytorch_dataset(dataset) diff --git a/deepchem/data/tests/test_drop.py b/deepchem/data/tests/test_drop.py index 959cad395f8f804ee115fee6c9f00cff93346490..cf265d9fa824e12d413a89666faea03fcd7b36d3 100644 --- a/deepchem/data/tests/test_drop.py +++ b/deepchem/data/tests/test_drop.py @@ -1,11 +1,7 @@ import os -import shutil import logging import unittest -import tempfile import deepchem as dc -import numpy as np -from sklearn.ensemble import RandomForestClassifier logger = logging.getLogger(__name__) @@ -19,10 +15,6 @@ class TestDrop(unittest.TestCase): def test_drop(self): """Test on dataset where RDKit fails on some strings.""" - # Set some global variables up top - reload = True - len_full = 25 - current_dir = os.path.dirname(os.path.realpath(__file__)) logger.info("About to load emols dataset.") dataset_file = os.path.join(current_dir, "mini_emols.csv") @@ -33,8 +25,8 @@ class TestDrop(unittest.TestCase): emols_tasks = ['activity'] loader = dc.data.CSVLoader( - tasks=emols_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) + tasks=emols_tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file) X, y, w, ids = (dataset.X, dataset.y, dataset.w, dataset.ids) assert len(X) == len(y) == len(w) == len(ids) diff --git a/deepchem/data/tests/test_fasta_loader.py b/deepchem/data/tests/test_fasta_loader.py index 6e4225dbe93123e78ee3fb4e178b541ed9288443..ed1b665f9d470a8f76656bb18d531662989e46cf 100644 --- a/deepchem/data/tests/test_fasta_loader.py +++ b/deepchem/data/tests/test_fasta_loader.py @@ -1,9 +1,6 @@ """ Tests that FASTA files can be loaded. """ -__author__ = "Bharath Ramsundar" -__license__ = "MIT" - import os import unittest @@ -23,7 +20,7 @@ class TestFASTALoader(unittest.TestCase): input_file = os.path.join(self.current_dir, "../../data/tests/example.fasta") loader = dc.data.FASTALoader() - sequences = loader.featurize(input_file) + sequences = loader.create_dataset(input_file) # example.fasta contains 3 sequences each of length 58. # The one-hot encoding turns base-pairs into vectors of length 5 (ATCGN). diff --git a/deepchem/data/tests/test_image_dataset.py b/deepchem/data/tests/test_image_dataset.py index ec914640acfaa997eb7f8af5e64763fdda61b877..8a16dd7123731779e86e6e13a82b67e77fa2c967 100644 --- a/deepchem/data/tests/test_image_dataset.py +++ b/deepchem/data/tests/test_image_dataset.py @@ -1,10 +1,6 @@ """ Tests for ImageDataset class """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import unittest import numpy as np import deepchem as dc diff --git a/deepchem/data/tests/test_image_loader.py b/deepchem/data/tests/test_image_loader.py index b678d9335e7cf05b74beeba4337e40d4eb784656..0e29a63ca5e31d7a3b1c48787c3dc4dc28b58d1f 100644 --- a/deepchem/data/tests/test_image_loader.py +++ b/deepchem/data/tests/test_image_loader.py @@ -7,6 +7,7 @@ import tempfile from scipy import misc import deepchem as dc import zipfile +import numpy as np class TestImageLoader(unittest.TestCase): @@ -29,7 +30,6 @@ class TestImageLoader(unittest.TestCase): Image.fromarray(self.face).save(self.face_copy_path) # Create zip of image file - #self.zip_path = "/home/rbharath/misc/cells.zip" self.zip_path = os.path.join(self.data_dir, "face.zip") zipf = zipfile.ZipFile(self.zip_path, "w", zipfile.ZIP_DEFLATED) zipf.write(self.face_path) @@ -58,38 +58,45 @@ class TestImageLoader(unittest.TestCase): def test_png_simple_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.face_path) + dataset = loader.create_dataset(self.face_path) # These are the known dimensions of face.png assert dataset.X.shape == (1, 768, 1024, 3) + def test_png_simple_load_with_labels(self): + loader = dc.data.ImageLoader() + dataset = loader.create_dataset((self.face_path, np.array(1))) + # These are the known dimensions of face.png + assert dataset.X.shape == (1, 768, 1024, 3) + assert (dataset.y == np.ones((1,))).all() + def test_tif_simple_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.tif_image_path) + dataset = loader.create_dataset(self.tif_image_path) # TODO(rbharath): Where are the color channels? assert dataset.X.shape == (1, 44, 330) def test_png_multi_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize([self.face_path, self.face_copy_path]) + dataset = loader.create_dataset([self.face_path, self.face_copy_path]) assert dataset.X.shape == (2, 768, 1024, 3) def test_png_zip_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.zip_path) + dataset = loader.create_dataset(self.zip_path) assert dataset.X.shape == (1, 768, 1024, 3) def test_png_multi_zip_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.multi_zip_path) + dataset = loader.create_dataset(self.multi_zip_path) assert dataset.X.shape == (2, 768, 1024, 3) def test_multitype_zip_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.multitype_zip_path) + dataset = loader.create_dataset(self.multitype_zip_path) # Since the different files have different shapes, makes an object array assert dataset.X.shape == (2,) def test_directory_load(self): loader = dc.data.ImageLoader() - dataset = loader.featurize(self.image_dir) + dataset = loader.create_dataset(self.image_dir) assert dataset.X.shape == (2, 768, 1024, 3) diff --git a/deepchem/data/tests/test_inmemory.py b/deepchem/data/tests/test_inmemory.py new file mode 100644 index 0000000000000000000000000000000000000000..87ce68122b9f404081e0df04ae6022f0dcf9d582 --- /dev/null +++ b/deepchem/data/tests/test_inmemory.py @@ -0,0 +1,58 @@ +import deepchem as dc +import numpy as np + + +def test_inmemory_features(): + smiles = ["C", "CC", "CCC", "CCCC"] + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + dataset = loader.create_dataset(smiles, shard_size=2) + assert len(dataset) == 4 + assert dataset.X.shape == (4, 1024) + assert dataset.get_number_shards() == 2 + assert (dataset.ids == np.arange(4)).all() + + +def test_inmemory_features_and_labels(): + smiles = ["C", "CC", "CCC", "CCCC"] + labels = [1, 0, 1, 0] + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + dataset = loader.create_dataset(zip(smiles, labels), shard_size=2) + assert len(dataset) == 4 + assert dataset.X.shape == (4, 1024) + assert (dataset.y == np.array(labels)).all() + assert dataset.get_number_shards() == 2 + assert (dataset.ids == np.arange(4)).all() + + +def test_inmemory_features_and_labels_and_weights(): + smiles = ["C", "CC", "CCC", "CCCC"] + labels = [1, 0, 1, 0] + weights = [1.5, 1.5, 1, 1] + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + dataset = loader.create_dataset(zip(smiles, labels, weights), shard_size=2) + assert len(dataset) == 4 + assert dataset.X.shape == (4, 1024) + assert (dataset.y == np.array(labels)).all() + assert (dataset.w == np.array(weights)).all() + assert (dataset.ids == np.arange(4)).all() + assert dataset.get_number_shards() == 2 + + +def test_inmemory_features_and_labels_and_weights_and_ids(): + smiles = ["C", "CC", "CCC", "CCCC"] + labels = [1, 0, 1, 0] + weights = [1.5, 1.5, 1, 1] + ids = smiles + featurizer = dc.feat.CircularFingerprint(size=1024) + loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer) + dataset = loader.create_dataset( + zip(smiles, labels, weights, ids), shard_size=2) + assert len(dataset) == 4 + assert dataset.X.shape == (4, 1024) + assert (dataset.y == np.array(labels)).all() + assert (dataset.w == np.array(weights)).all() + assert (dataset.ids == np.array(ids)).all() + assert dataset.get_number_shards() == 2 diff --git a/deepchem/data/tests/test_json_loader.py b/deepchem/data/tests/test_json_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..7f96e475e4f005789ada67bd8b6c6857f1be4041 --- /dev/null +++ b/deepchem/data/tests/test_json_loader.py @@ -0,0 +1,33 @@ +""" +Tests for JsonLoader class. +""" + +import os +import numpy as np +from deepchem.data.data_loader import JsonLoader +from deepchem.feat import SineCoulombMatrix + + +def test_json_loader(): + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, 'inorganic_crystal_sample_data.json') + featurizer = SineCoulombMatrix(max_atoms=5) + loader = JsonLoader( + tasks=['e_form'], + feature_field='structure', + id_field='formula', + label_field='e_form', + featurizer=featurizer) + + dataset = loader.create_dataset(input_file, shard_size=1) + + a = [4625.32086965, 6585.20209678, 61.00680193, 48.72230922, 48.72230922] + + assert dataset.X.shape == (5, 5) + assert np.allclose(dataset.X[0], a, atol=.5) + + dataset = loader.create_dataset(input_file, shard_size=None) + assert dataset.X.shape == (5, 5) + + dataset = loader.create_dataset([input_file, input_file], shard_size=5) + assert dataset.X.shape == (10, 5) diff --git a/deepchem/data/tests/test_legacy.py b/deepchem/data/tests/test_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..3f3f2d4a91ac7d7ededc06cdc440bab32ae8c98b --- /dev/null +++ b/deepchem/data/tests/test_legacy.py @@ -0,0 +1,50 @@ +import os +import deepchem as dc +import numpy as np +import tempfile + + +def test_make_legacy_dataset_from_numpy(): + """Test that legacy DiskDataset objects can be constructed.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + # legacy_dataset is a dataset in the legacy format kept around for testing purposes. + data_dir = os.path.join(current_dir, "legacy_dataset") + dataset = dc.data.DiskDataset(data_dir) + assert dataset.legacy_metadata + assert len(dataset.metadata_df.columns) == 4 + assert list(dataset.metadata_df.columns) == ['ids', 'X', 'y', 'w'] + + # Test constructor reload works for legacy format + dataset2 = dc.data.DiskDataset(dataset.data_dir) + assert dataset2.legacy_metadata + assert len(dataset2.metadata_df.columns) == 4 + assert list(dataset2.metadata_df.columns) == ['ids', 'X', 'y', 'w'] + + +def test_reshard(): + """Test that resharding updates legacy datasets.""" + # legacy_dataset_reshard is a sharded dataset in the legacy format kept + # around for testing resharding. + current_dir = os.path.dirname(os.path.abspath(__file__)) + data_dir = os.path.join(current_dir, "legacy_dataset_reshard") + dataset = dc.data.DiskDataset(data_dir) + assert dataset.legacy_metadata + assert len(dataset.metadata_df.columns) == 4 + assert list(dataset.metadata_df.columns) == ['ids', 'X', 'y', 'w'] + + with tempfile.TemporaryDirectory() as tmpdirname: + copy = dataset.copy(tmpdirname) + assert np.all(copy.X == dataset.X) + assert np.all(copy.y == dataset.y) + assert np.all(copy.w == dataset.w) + assert np.all(copy.ids == dataset.ids) + + # Reshard copy + copy.reshard(shard_size=10) + assert copy.get_number_shards() == 10 + # Check metadata has been updated + assert not copy.legacy_metadata + assert len(copy.metadata_df.columns) == 8 + assert list(copy.metadata_df.columns) == [ + 'ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape', 'w_shape' + ] diff --git a/deepchem/data/tests/test_load.py b/deepchem/data/tests/test_load.py index 0e0d1546d7186d252e81f8e5f5c91d3e4ea3e3a0..14b5a2fde402f2b81157d59c6b3bd6cefd019721 100644 --- a/deepchem/data/tests/test_load.py +++ b/deepchem/data/tests/test_load.py @@ -32,8 +32,8 @@ class TestLoad(unittest.TestCase): featurizer = dc.feat.CircularFingerprint(size=1024) tasks = ["log-solubility"] loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, data_dir) + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, data_dir) X, y, w, ids = (dataset.X, dataset.y, dataset.w, dataset.ids) shutil.move(data_dir, moved_data_dir) @@ -55,7 +55,7 @@ class TestLoad(unittest.TestCase): np.random.seed(123) current_dir = os.path.dirname(os.path.realpath(__file__)) - ##Make directories to store the raw and featurized datasets. + # Make directories to store the raw and featurized datasets. data_dir = tempfile.mkdtemp() # Load dataset @@ -68,27 +68,25 @@ class TestLoad(unittest.TestCase): featurizer = dc.feat.CircularFingerprint(size=1024) all_tasks = ["task%d" % i for i in range(17)] - ####### Do featurization + # featurization loader = dc.data.CSVLoader( - tasks=all_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, data_dir) + tasks=all_tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, data_dir) - # Do train/valid split. - X_multi, y_multi, w_multi, ids_multi = (dataset.X, dataset.y, dataset.w, - dataset.ids) + # train/valid split. + _, y_multi, w_multi, _ = (dataset.X, dataset.y, dataset.w, dataset.ids) - ####### Do singletask load + # singletask load y_tasks, w_tasks, = [], [] dataset = dc.data.DiskDataset(data_dir) for ind, task in enumerate(all_tasks): logger.info("Processing task %s" % task) - X_task, y_task, w_task, ids_task = (dataset.X, dataset.y, dataset.w, - dataset.ids) + _, y_task, w_task, _ = (dataset.X, dataset.y, dataset.w, dataset.ids) y_tasks.append(y_task[:, ind]) w_tasks.append(w_task[:, ind]) - ################## Do comparison + # comparison for ind, task in enumerate(all_tasks): y_multi_task = y_multi[:, ind] w_multi_task = w_multi[:, ind] @@ -104,11 +102,8 @@ class TestLoad(unittest.TestCase): # Only for debug! np.random.seed(123) - # Set some global variables up top - reload = True - current_dir = os.path.dirname(os.path.realpath(__file__)) - #Make directories to store the raw and featurized datasets. + # Make directories to store the raw and featurized datasets. data_dir = tempfile.mkdtemp() # Load dataset @@ -124,32 +119,31 @@ class TestLoad(unittest.TestCase): n_tasks = 17 tasks = all_tasks[0:n_tasks] - ####### Do multitask load + # multitask load loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, data_dir) + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, data_dir) # Do train/valid split. - X_multi, y_multi, w_multi, ids_multi = (dataset.X, dataset.y, dataset.w, - dataset.ids) + _, y_multi, w_multi, _ = (dataset.X, dataset.y, dataset.w, dataset.ids) - ####### Do singletask load + # singletask load y_tasks, w_tasks, ids_tasks = [], [], [] for task in tasks: logger.info("Processing task %s" % task) if os.path.exists(data_dir): shutil.rmtree(data_dir) loader = dc.data.CSVLoader( - tasks=[task], smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, data_dir) + tasks=[task], feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, data_dir) - X_task, y_task, w_task, ids_task = (dataset.X, dataset.y, dataset.w, - dataset.ids) + _, y_task, w_task, ids_task = (dataset.X, dataset.y, dataset.w, + dataset.ids) y_tasks.append(y_task) w_tasks.append(w_task) ids_tasks.append(ids_task) - ################## Do comparison + # comparison for ind, task in enumerate(tasks): y_multi_task = y_multi[:, ind] w_multi_task = w_multi[:, ind] diff --git a/deepchem/data/tests/test_merge.py b/deepchem/data/tests/test_merge.py index 7bc85a7fd8e100188ebc65b513e28b8ba0cbde93..cf64e424419f780fa6d5c29a3c45837b19695391 100644 --- a/deepchem/data/tests/test_merge.py +++ b/deepchem/data/tests/test_merge.py @@ -1,61 +1,50 @@ """ Testing singletask/multitask dataset merging """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import os -import shutil -import tempfile -import unittest import deepchem as dc import numpy as np -class TestMerge(unittest.TestCase): - """ - Test singletask/multitask dataset merging. - """ +def test_merge(): + """Test that datasets can be merged.""" + current_dir = os.path.dirname(os.path.realpath(__file__)) - def test_merge(self): - """Test that datasets can be merged.""" - current_dir = os.path.dirname(os.path.realpath(__file__)) + dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") - dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + first_dataset = loader.create_dataset(dataset_file) + second_dataset = loader.create_dataset(dataset_file) - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - first_dataset = loader.featurize(dataset_file) - second_dataset = loader.featurize(dataset_file) + merged_dataset = dc.data.DiskDataset.merge([first_dataset, second_dataset]) - merged_dataset = dc.data.DiskDataset.merge([first_dataset, second_dataset]) + assert len(merged_dataset) == len(first_dataset) + len(second_dataset) - assert len(merged_dataset) == len(first_dataset) + len(second_dataset) - def test_subset(self): - """Tests that subsetting of datasets works.""" - current_dir = os.path.dirname(os.path.realpath(__file__)) +def test_subset(): + """Tests that subsetting of datasets works.""" + current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") + dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=2) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, shard_size=2) - shard_nums = [1, 2] + shard_nums = [1, 2] - orig_ids = dataset.ids - _, _, _, ids_1 = dataset.get_shard(1) - _, _, _, ids_2 = dataset.get_shard(2) + orig_ids = dataset.ids + _, _, _, ids_1 = dataset.get_shard(1) + _, _, _, ids_2 = dataset.get_shard(2) - subset = dataset.subset(shard_nums) - after_ids = dataset.ids + subset = dataset.subset(shard_nums) + after_ids = dataset.ids - assert len(subset) == 4 - assert sorted(subset.ids) == sorted(np.concatenate([ids_1, ids_2])) - assert list(orig_ids) == list(after_ids) + assert len(subset) == 4 + assert sorted(subset.ids) == sorted(np.concatenate([ids_1, ids_2])) + assert list(orig_ids) == list(after_ids) diff --git a/deepchem/data/tests/test_non_classification_regression_datasets.py b/deepchem/data/tests/test_non_classification_regression_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..026cedd4f568aa5d2201217f3d311e809165a775 --- /dev/null +++ b/deepchem/data/tests/test_non_classification_regression_datasets.py @@ -0,0 +1,20 @@ +import deepchem as dc +import numpy as np + + +def test_disk_generative_dataset(): + """Test for a hypothetical generative dataset.""" + X = np.random.rand(100, 10, 10) + y = np.random.rand(100, 10, 10) + dataset = dc.data.DiskDataset.from_numpy(X, y) + assert (dataset.X == X).all() + assert (dataset.y == y).all() + + +def test_numpy_generative_dataset(): + """Test for a hypothetical generative dataset.""" + X = np.random.rand(100, 10, 10) + y = np.random.rand(100, 10, 10) + dataset = dc.data.NumpyDataset(X, y) + assert (dataset.X == X).all() + assert (dataset.y == y).all() diff --git a/deepchem/data/tests/test_property.py b/deepchem/data/tests/test_property.py new file mode 100644 index 0000000000000000000000000000000000000000..8933568d6e1bdadbe1fb4eb0fdca010da3033e3b --- /dev/null +++ b/deepchem/data/tests/test_property.py @@ -0,0 +1,30 @@ +import numpy as np +import deepchem as dc + + +def test_y_property(): + """Test that dataset.y works.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + y_out = dataset.y + np.testing.assert_array_equal(y, y_out) + + +def test_w_property(): + """Test that dataset.y works.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + w_out = dataset.w + np.testing.assert_array_equal(w, w_out) diff --git a/deepchem/data/tests/test_reload.py b/deepchem/data/tests/test_reload.py index 28f6236c0e0a6bd17df221383c51e87037a1a6ee..a2dfa9d939682f3ece2ab6163918a3f7dced718f 100644 --- a/deepchem/data/tests/test_reload.py +++ b/deepchem/data/tests/test_reload.py @@ -6,12 +6,9 @@ __copyright__ = "Copyright 2016, Stanford University" __license__ = "MIT" import os -import shutil import logging import unittest -import tempfile import deepchem as dc -import numpy as np logger = logging.getLogger(__name__) @@ -26,15 +23,15 @@ class TestReload(unittest.TestCase): # Load MUV dataset logger.info("About to featurize compounds") featurizer = dc.feat.CircularFingerprint(size=1024) - raw_dataset = dc.utils.save.load_from_disk(dataset_file) + raw_dataset = dc.utils.data_utils.load_from_disk(dataset_file) MUV_tasks = [ 'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548', 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', 'MUV-652', 'MUV-466', 'MUV-832' ] loader = dc.data.CSVLoader( - tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) + tasks=MUV_tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file) assert len(dataset) == len(raw_dataset) logger.info("About to split compounds into train/valid/test") @@ -56,9 +53,7 @@ class TestReload(unittest.TestCase): # TODO(rbharath): Transformers don't play nice with reload! Namely, # reloading will cause the transform to be reapplied. This is undesirable in # almost all cases. Need to understand a method to fix this. - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=train_dataset) - ] + transformers = [dc.trans.BalancingTransformer(dataset=train_dataset)] logger.info("Transforming datasets") for dataset in [train_dataset, valid_dataset, test_dataset]: for transformer in transformers: diff --git a/deepchem/data/tests/test_reshard.py b/deepchem/data/tests/test_reshard.py new file mode 100644 index 0000000000000000000000000000000000000000..9cbe3691d39a03d5fd76749c2d3d8c3b4cc60cdb --- /dev/null +++ b/deepchem/data/tests/test_reshard.py @@ -0,0 +1,71 @@ +import deepchem as dc +import numpy as np + + +def test_reshard_with_X(): + """Test resharding on a simple example""" + X = np.random.rand(100, 10) + dataset = dc.data.DiskDataset.from_numpy(X) + assert dataset.get_number_shards() == 1 + dataset.reshard(shard_size=10) + assert (dataset.X == X).all() + assert dataset.get_number_shards() == 10 + + +def test_reshard_with_X_y(): + """Test resharding on a simple example""" + X = np.random.rand(100, 10) + y = np.random.rand(100,) + dataset = dc.data.DiskDataset.from_numpy(X, y) + assert dataset.get_number_shards() == 1 + dataset.reshard(shard_size=10) + assert (dataset.X == X).all() + # This is necessary since from_numpy adds in shape information + assert (dataset.y.flatten() == y).all() + assert dataset.get_number_shards() == 10 + + +def test_reshard_with_X_y_generative(): + """Test resharding for a hypothetical generative dataset.""" + X = np.random.rand(100, 10, 10) + y = np.random.rand(100, 10, 10) + dataset = dc.data.DiskDataset.from_numpy(X, y) + assert (dataset.X == X).all() + assert (dataset.y == y).all() + assert dataset.get_number_shards() == 1 + dataset.reshard(shard_size=10) + assert (dataset.X == X).all() + assert (dataset.y == y).all() + assert dataset.get_number_shards() == 10 + + +def test_reshard_with_X_y_w(): + """Test resharding on a simple example""" + X = np.random.rand(100, 10) + y = np.random.rand(100,) + w = np.ones_like(y) + dataset = dc.data.DiskDataset.from_numpy(X, y, w) + assert dataset.get_number_shards() == 1 + dataset.reshard(shard_size=10) + assert (dataset.X == X).all() + # This is necessary since from_numpy adds in shape information + assert (dataset.y.flatten() == y).all() + assert (dataset.w.flatten() == w).all() + assert dataset.get_number_shards() == 10 + + +def test_reshard_with_X_y_w_ids(): + """Test resharding on a simple example""" + X = np.random.rand(100, 10) + y = np.random.rand(100,) + w = np.ones_like(y) + ids = np.arange(100) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + assert dataset.get_number_shards() == 1 + dataset.reshard(shard_size=10) + assert (dataset.X == X).all() + # This is necessary since from_numpy adds in shape information + assert (dataset.y.flatten() == y).all() + assert (dataset.w.flatten() == w).all() + assert (dataset.ids == ids).all() + assert dataset.get_number_shards() == 10 diff --git a/deepchem/data/tests/test_sdf_loader.py b/deepchem/data/tests/test_sdf_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..845038b0ba23a342d8528e372381065e0b53d8b6 --- /dev/null +++ b/deepchem/data/tests/test_sdf_loader.py @@ -0,0 +1,59 @@ +import os +import deepchem as dc + + +def test_sdf_load(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=16) + loader = dc.data.SDFLoader( + ["LogP(RRCK)"], featurizer=featurizer, sanitize=True) + dataset = loader.create_dataset( + os.path.join(current_dir, "membrane_permeability.sdf")) + assert len(dataset) == 2 + + +def test_singleton_sdf_load(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=16) + loader = dc.data.SDFLoader( + ["LogP(RRCK)"], featurizer=featurizer, sanitize=True) + dataset = loader.create_dataset(os.path.join(current_dir, "singleton.sdf")) + assert len(dataset) == 1 + + +def test_sharded_sdf_load(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=16) + loader = dc.data.SDFLoader( + ["LogP(RRCK)"], featurizer=featurizer, sanitize=True) + dataset = loader.create_dataset( + os.path.join(current_dir, "membrane_permeability.sdf"), shard_size=1) + assert dataset.get_number_shards() == 2 + assert len(dataset) == 2 + + +def test_sharded_multi_file_sdf_load(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=16) + loader = dc.data.SDFLoader( + ["LogP(RRCK)"], featurizer=featurizer, sanitize=True) + input_files = [ + os.path.join(current_dir, "membrane_permeability.sdf"), + os.path.join(current_dir, "singleton.sdf") + ] + dataset = loader.create_dataset(input_files, shard_size=1) + assert dataset.get_number_shards() == 3 + assert len(dataset) == 3 + + +def test_sdf_load_with_csv(): + """Test a case where SDF labels are in associated csv file""" + current_dir = os.path.dirname(os.path.realpath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=16) + loader = dc.data.SDFLoader( + ["atomization_energy"], featurizer=featurizer, sanitize=True) + dataset = loader.create_dataset( + os.path.join(current_dir, "water.sdf"), shard_size=1) + assert len(dataset) == 10 + assert dataset.get_number_shards() == 10 + assert dataset.get_task_names() == ["atomization_energy"] diff --git a/deepchem/data/tests/test_select.py b/deepchem/data/tests/test_select.py new file mode 100644 index 0000000000000000000000000000000000000000..80e36c493b62d9a8f1532b114425e42a0f97668e --- /dev/null +++ b/deepchem/data/tests/test_select.py @@ -0,0 +1,130 @@ +import deepchem as dc +import numpy as np +import os + + +def test_select(): + """Test that dataset select works.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + indices = [0, 4, 5, 8] + select_dataset = dataset.select(indices) + assert isinstance(select_dataset, dc.data.DiskDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(X[indices], X_sel) + np.testing.assert_array_equal(y[indices], y_sel) + np.testing.assert_array_equal(w[indices], w_sel) + np.testing.assert_array_equal(ids[indices], ids_sel) + + +def test_image_dataset_select(): + """Test that select works on image datasets.""" + path = os.path.join(os.path.dirname(__file__), 'images') + files = [os.path.join(path, f) for f in os.listdir(path)] + dataset = dc.data.ImageDataset(files, np.random.random(10)) + indices = [0, 4, 5, 8, 2] + select_dataset = dataset.select(indices) + assert isinstance(select_dataset, dc.data.ImageDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(dataset.X[indices], X_sel) + np.testing.assert_array_equal(dataset.y[indices], y_sel) + np.testing.assert_array_equal(dataset.w[indices], w_sel) + np.testing.assert_array_equal(dataset.ids[indices], ids_sel) + + +def test_numpy_dataset_select(): + """Test that dataset select works with numpy dataset.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + indices = [0, 4, 5, 8, 2] + select_dataset = dataset.select(indices) + assert isinstance(select_dataset, dc.data.NumpyDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(X[indices], X_sel) + np.testing.assert_array_equal(y[indices], y_sel) + np.testing.assert_array_equal(w[indices], w_sel) + np.testing.assert_array_equal(ids[indices], ids_sel) + + +def test_select_multishard(): + """Test that dataset select works with multiple shards.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + dataset.reshard(shard_size=10) + + indices = [10, 42, 51, 82, 2, 4, 6] + select_dataset = dataset.select(indices) + assert isinstance(select_dataset, dc.data.DiskDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(X[indices], X_sel) + np.testing.assert_array_equal(y[indices], y_sel) + np.testing.assert_array_equal(w[indices], w_sel) + np.testing.assert_array_equal(ids[indices], ids_sel) + + +def test_select_not_sorted(): + """Test that dataset select with ids not in sorted order.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + indices = [4, 2, 8, 5, 0] + select_dataset = dataset.select(indices) + assert isinstance(select_dataset, dc.data.DiskDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(X[indices], X_sel) + np.testing.assert_array_equal(y[indices], y_sel) + np.testing.assert_array_equal(w[indices], w_sel) + np.testing.assert_array_equal(ids[indices], ids_sel) + + +def test_select_to_numpy(): + """Test that dataset select works.""" + num_datapoints = 10 + num_features = 10 + num_tasks = 1 + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.ones((num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + indices = [0, 4, 5, 8] + select_dataset = dataset.select(indices, output_numpy_dataset=True) + assert isinstance(select_dataset, dc.data.NumpyDataset) + X_sel, y_sel, w_sel, ids_sel = (select_dataset.X, select_dataset.y, + select_dataset.w, select_dataset.ids) + np.testing.assert_array_equal(X[indices], X_sel) + np.testing.assert_array_equal(y[indices], y_sel) + np.testing.assert_array_equal(w[indices], w_sel) + np.testing.assert_array_equal(ids[indices], ids_sel) diff --git a/deepchem/data/tests/test_shape.py b/deepchem/data/tests/test_shape.py new file mode 100644 index 0000000000000000000000000000000000000000..346c7da31e6a63778d30dafe2cee6172d67bc3f4 --- /dev/null +++ b/deepchem/data/tests/test_shape.py @@ -0,0 +1,108 @@ +import deepchem as dc +import numpy as np +import os + + +def test_numpy_dataset_get_shape(): + """Test that get_shape works for numpy datasets.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() + assert X_shape == X.shape + assert y_shape == y.shape + assert w_shape == w.shape + assert ids_shape == ids.shape + + +def test_disk_dataset_get_shape_single_shard(): + """Test that get_shape works for disk dataset.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() + assert X_shape == X.shape + assert y_shape == y.shape + assert w_shape == w.shape + assert ids_shape == ids.shape + + +def test_disk_dataset_get_shape_multishard(): + """Test that get_shape works for multisharded disk dataset.""" + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + # Generate data + X = np.random.rand(num_datapoints, num_features) + y = np.random.randint(2, size=(num_datapoints, num_tasks)) + w = np.random.randint(2, size=(num_datapoints, num_tasks)) + ids = np.array(["id"] * num_datapoints) + + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + # Should now have 10 shards + dataset.reshard(shard_size=10) + + X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() + assert X_shape == X.shape + assert y_shape == y.shape + assert w_shape == w.shape + assert ids_shape == ids.shape + + +def test_disk_dataset_get_legacy_shape_single_shard(): + """Test that get_shape works for legacy disk dataset.""" + # This is the shape of legacy_data + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + + current_dir = os.path.dirname(os.path.abspath(__file__)) + # legacy_dataset is a dataset in the legacy format kept around for testing + # purposes. + data_dir = os.path.join(current_dir, "legacy_dataset") + dataset = dc.data.DiskDataset(data_dir) + + X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() + assert X_shape == (num_datapoints, num_features) + assert y_shape == (num_datapoints, num_tasks) + assert w_shape == (num_datapoints, num_tasks) + assert ids_shape == (num_datapoints,) + + +def test_disk_dataset_get_legacy_shape_multishard(): + """Test that get_shape works for multisharded legacy disk dataset.""" + # This is the shape of legacy_data_reshard + num_datapoints = 100 + num_features = 10 + num_tasks = 10 + + # legacy_dataset_reshard is a sharded dataset in the legacy format kept + # around for testing + current_dir = os.path.dirname(os.path.abspath(__file__)) + data_dir = os.path.join(current_dir, "legacy_dataset_reshard") + dataset = dc.data.DiskDataset(data_dir) + + # Should now have 10 shards + assert dataset.get_number_shards() == 10 + + X_shape, y_shape, w_shape, ids_shape = dataset.get_shape() + assert X_shape == (num_datapoints, num_features) + assert y_shape == (num_datapoints, num_tasks) + assert w_shape == (num_datapoints, num_tasks) + assert ids_shape == (num_datapoints,) diff --git a/deepchem/data/tests/test_shuffle.py b/deepchem/data/tests/test_shuffle.py index 2e22e09e634678163d04822f964ce32479f58477..3c7b251ac9ee9d39bdcabdd9d6d5f69dac3853c5 100644 --- a/deepchem/data/tests/test_shuffle.py +++ b/deepchem/data/tests/test_shuffle.py @@ -1,137 +1,200 @@ """ -Testing singletask/multitask dataset shuffling +Testing singletask/multitask dataset shuffling """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import os -import shutil -import tempfile -import unittest import deepchem as dc import numpy as np -class TestShuffle(unittest.TestCase): - """ - Test singletask/multitask dataset shuffling. - """ - - #def test_shuffle(self): - # """Test that datasets can be merged.""" - # current_dir = os.path.dirname(os.path.realpath(__file__)) - - # dataset_file = os.path.join( - # current_dir, "../../models/tests/example.csv") - - # featurizer = dc.feat.CircularFingerprint(size=1024) - # tasks = ["log-solubility"] - # loader = dc.data.CSVLoader( - # tasks=tasks, smiles_field="smiles", featurizer=featurizer) - # dataset = loader.featurize(dataset_file, shard_size=2) - - # X_orig, y_orig, w_orig, orig_ids = (dataset.X, dataset.y, dataset.w, - # dataset.ids) - # orig_len = len(dataset) - - # dataset.shuffle(iterations=5) - # X_new, y_new, w_new, new_ids = (dataset.X, dataset.y, dataset.w, - # dataset.ids) - # - # assert len(dataset) == orig_len - # # The shuffling should have switched up the ordering - # assert not np.array_equal(orig_ids, new_ids) - # # But all the same entries should still be present - # assert sorted(orig_ids) == sorted(new_ids) - # # All the data should have same shape - # assert X_orig.shape == X_new.shape - # assert y_orig.shape == y_new.shape - # assert w_orig.shape == w_new.shape - - def test_sparse_shuffle(self): - """Test that sparse datasets can be shuffled quickly.""" - current_dir = os.path.dirname(os.path.realpath(__file__)) - - dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") - - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=2) - - X_orig, y_orig, w_orig, orig_ids = (dataset.X, dataset.y, dataset.w, - dataset.ids) - orig_len = len(dataset) - - dataset.sparse_shuffle() - X_new, y_new, w_new, new_ids = (dataset.X, dataset.y, dataset.w, - dataset.ids) - - assert len(dataset) == orig_len - # The shuffling should have switched up the ordering - assert not np.array_equal(orig_ids, new_ids) - # But all the same entries should still be present - assert sorted(orig_ids) == sorted(new_ids) - # All the data should have same shape - assert X_orig.shape == X_new.shape - assert y_orig.shape == y_new.shape - assert w_orig.shape == w_new.shape - - def test_shuffle_each_shard(self): - """Test that shuffle_each_shard works.""" - n_samples = 100 - n_tasks = 10 - n_features = 10 - - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.random.randint(2, size=(n_samples, n_tasks)) - ids = np.arange(n_samples) - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - dataset.reshard(shard_size=10) - - dataset.shuffle_each_shard() - X_s, y_s, w_s, ids_s = (dataset.X, dataset.y, dataset.w, dataset.ids) - assert X_s.shape == X.shape - assert y_s.shape == y.shape - assert ids_s.shape == ids.shape - assert w_s.shape == w.shape - - # The ids should now store the performed permutation. Check that the - # original dataset is recoverable. - for i in range(n_samples): - np.testing.assert_array_equal(X_s[i], X[ids_s[i]]) - np.testing.assert_array_equal(y_s[i], y[ids_s[i]]) - np.testing.assert_array_equal(w_s[i], w[ids_s[i]]) - np.testing.assert_array_equal(ids_s[i], ids[ids_s[i]]) - - def test_shuffle_shards(self): - """Test that shuffle_shards works.""" - n_samples = 100 - n_tasks = 10 - n_features = 10 - - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.random.randint(2, size=(n_samples, n_tasks)) - ids = np.arange(n_samples) - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - dataset.reshard(shard_size=10) - dataset.shuffle_shards() - - X_s, y_s, w_s, ids_s = (dataset.X, dataset.y, dataset.w, dataset.ids) - - assert X_s.shape == X.shape - assert y_s.shape == y.shape - assert ids_s.shape == ids.shape - assert w_s.shape == w.shape - - # The ids should now store the performed permutation. Check that the - # original dataset is recoverable. - for i in range(n_samples): - np.testing.assert_array_equal(X_s[i], X[ids_s[i]]) - np.testing.assert_array_equal(y_s[i], y[ids_s[i]]) - np.testing.assert_array_equal(w_s[i], w[ids_s[i]]) - np.testing.assert_array_equal(ids_s[i], ids[ids_s[i]]) +def test_complete_shuffle_one_shard(): + """Test that complete shuffle works with only one shard.""" + X = np.random.rand(10, 10) + dataset = dc.data.DiskDataset.from_numpy(X) + shuffled = dataset.complete_shuffle() + assert len(shuffled) == len(dataset) + assert not np.array_equal(shuffled.ids, dataset.ids) + assert sorted(shuffled.ids) == sorted(dataset.ids) + assert shuffled.X.shape == dataset.X.shape + assert shuffled.y.shape == dataset.y.shape + assert shuffled.w.shape == dataset.w.shape + original_indices = dict((id, i) for i, id in enumerate(dataset.ids)) + shuffled_indices = dict((id, i) for i, id in enumerate(shuffled.ids)) + for id in dataset.ids: + i = original_indices[id] + j = shuffled_indices[id] + assert np.array_equal(dataset.X[i], shuffled.X[j]) + assert np.array_equal(dataset.y[i], shuffled.y[j]) + assert np.array_equal(dataset.w[i], shuffled.w[j]) + + +def test_complete_shuffle_multiple_shard(): + """Test that complete shuffle works with multiple shards.""" + X = np.random.rand(100, 10) + dataset = dc.data.DiskDataset.from_numpy(X) + dataset.reshard(shard_size=10) + shuffled = dataset.complete_shuffle() + assert len(shuffled) == len(dataset) + assert not np.array_equal(shuffled.ids, dataset.ids) + assert sorted(shuffled.ids) == sorted(dataset.ids) + assert shuffled.X.shape == dataset.X.shape + assert shuffled.y.shape == dataset.y.shape + assert shuffled.w.shape == dataset.w.shape + original_indices = dict((id, i) for i, id in enumerate(dataset.ids)) + shuffled_indices = dict((id, i) for i, id in enumerate(shuffled.ids)) + for id in dataset.ids: + i = original_indices[id] + j = shuffled_indices[id] + assert np.array_equal(dataset.X[i], shuffled.X[j]) + assert np.array_equal(dataset.y[i], shuffled.y[j]) + assert np.array_equal(dataset.w[i], shuffled.w[j]) + + +def test_complete_shuffle_multiple_shard_uneven(): + """Test that complete shuffle works with multiple shards and some shards not full size.""" + X = np.random.rand(57, 10) + dataset = dc.data.DiskDataset.from_numpy(X) + dataset.reshard(shard_size=10) + shuffled = dataset.complete_shuffle() + assert len(shuffled) == len(dataset) + assert not np.array_equal(shuffled.ids, dataset.ids) + assert sorted(shuffled.ids) == sorted(dataset.ids) + assert shuffled.X.shape == dataset.X.shape + assert shuffled.y.shape == dataset.y.shape + assert shuffled.w.shape == dataset.w.shape + original_indices = dict((id, i) for i, id in enumerate(dataset.ids)) + shuffled_indices = dict((id, i) for i, id in enumerate(shuffled.ids)) + for id in dataset.ids: + i = original_indices[id] + j = shuffled_indices[id] + assert np.array_equal(dataset.X[i], shuffled.X[j]) + assert np.array_equal(dataset.y[i], shuffled.y[j]) + assert np.array_equal(dataset.w[i], shuffled.w[j]) + + +def test_complete_shuffle(): + """Test that complete shuffle.""" + current_dir = os.path.dirname(os.path.realpath(__file__)) + + dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") + + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, shard_size=2) + + X_orig, y_orig, w_orig, orig_ids = (dataset.X, dataset.y, dataset.w, + dataset.ids) + orig_len = len(dataset) + + shuffled = dataset.complete_shuffle() + X_new, y_new, w_new, new_ids = (shuffled.X, shuffled.y, shuffled.w, + shuffled.ids) + + assert len(shuffled) == orig_len + # The shuffling should have switched up the ordering + assert not np.array_equal(orig_ids, new_ids) + # But all the same entries should still be present + assert sorted(orig_ids) == sorted(new_ids) + # All the data should have same shape + assert X_orig.shape == X_new.shape + assert y_orig.shape == y_new.shape + assert w_orig.shape == w_new.shape + original_indices = dict((id, i) for i, id in enumerate(dataset.ids)) + shuffled_indices = dict((id, i) for i, id in enumerate(shuffled.ids)) + for id in dataset.ids: + i = original_indices[id] + j = shuffled_indices[id] + assert np.array_equal(dataset.X[i], shuffled.X[j]) + assert np.array_equal(dataset.y[i], shuffled.y[j]) + assert np.array_equal(dataset.w[i], shuffled.w[j]) + + +def test_sparse_shuffle(): + """Test that sparse datasets can be shuffled quickly.""" + current_dir = os.path.dirname(os.path.realpath(__file__)) + + dataset_file = os.path.join(current_dir, "../../models/tests/example.csv") + + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(dataset_file, shard_size=2) + + X_orig, y_orig, w_orig, orig_ids = (dataset.X, dataset.y, dataset.w, + dataset.ids) + orig_len = len(dataset) + + dataset.sparse_shuffle() + X_new, y_new, w_new, new_ids = (dataset.X, dataset.y, dataset.w, dataset.ids) + + assert len(dataset) == orig_len + # The shuffling should have switched up the ordering + assert not np.array_equal(orig_ids, new_ids) + # But all the same entries should still be present + assert sorted(orig_ids) == sorted(new_ids) + # All the data should have same shape + assert X_orig.shape == X_new.shape + assert y_orig.shape == y_new.shape + assert w_orig.shape == w_new.shape + + +def test_shuffle_each_shard(): + """Test that shuffle_each_shard works.""" + n_samples = 100 + n_tasks = 10 + n_features = 10 + + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.random.randint(2, size=(n_samples, n_tasks)) + ids = np.arange(n_samples) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + dataset.reshard(shard_size=10) + + dataset.shuffle_each_shard() + X_s, y_s, w_s, ids_s = (dataset.X, dataset.y, dataset.w, dataset.ids) + assert X_s.shape == X.shape + assert y_s.shape == y.shape + assert ids_s.shape == ids.shape + assert w_s.shape == w.shape + assert not (ids_s == ids).all() + + # The ids should now store the performed permutation. Check that the + # original dataset is recoverable. + for i in range(n_samples): + np.testing.assert_array_equal(X_s[i], X[ids_s[i]]) + np.testing.assert_array_equal(y_s[i], y[ids_s[i]]) + np.testing.assert_array_equal(w_s[i], w[ids_s[i]]) + np.testing.assert_array_equal(ids_s[i], ids[ids_s[i]]) + + +def test_shuffle_shards(): + """Test that shuffle_shards works.""" + n_samples = 100 + n_tasks = 10 + n_features = 10 + + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.random.randint(2, size=(n_samples, n_tasks)) + ids = np.arange(n_samples) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + dataset.reshard(shard_size=10) + dataset.shuffle_shards() + + X_s, y_s, w_s, ids_s = (dataset.X, dataset.y, dataset.w, dataset.ids) + + assert X_s.shape == X.shape + assert y_s.shape == y.shape + assert ids_s.shape == ids.shape + assert w_s.shape == w.shape + + # The ids should now store the performed permutation. Check that the + # original dataset is recoverable. + for i in range(n_samples): + np.testing.assert_array_equal(X_s[i], X[ids_s[i]]) + np.testing.assert_array_equal(y_s[i], y[ids_s[i]]) + np.testing.assert_array_equal(w_s[i], w[ids_s[i]]) + np.testing.assert_array_equal(ids_s[i], ids[ids_s[i]]) diff --git a/deepchem/data/tests/test_support_generator.py b/deepchem/data/tests/test_support_generator.py index d510e12a783710ca35be8e2a9c6e5ad004af7b99..0824ba6808bd944de135edaa7c2a4862bb4cd87d 100644 --- a/deepchem/data/tests/test_support_generator.py +++ b/deepchem/data/tests/test_support_generator.py @@ -1,14 +1,9 @@ """ Simple Tests for Support Generation """ -__author__ = "Han Altae-Tran and Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import logging import numpy as np import unittest -import tensorflow as tf import deepchem as dc logger = logging.getLogger(__name__) @@ -45,7 +40,6 @@ class TestSupports(unittest.TestCase): n_samples = 20 n_features = 3 n_tasks = 1 - n_trials = 10 # Generate dummy dataset np.random.seed(123) @@ -71,7 +65,6 @@ class TestSupports(unittest.TestCase): n_samples = 20 n_features = 3 n_tasks = 1 - n_trials = 10 # Generate dummy dataset np.random.seed(123) @@ -102,7 +95,6 @@ class TestSupports(unittest.TestCase): n_samples = 20 n_features = 3 n_tasks = 1 - n_trials = 10 # Generate dummy dataset np.random.seed(123) @@ -139,7 +131,7 @@ class TestSupports(unittest.TestCase): dataset = dc.data.NumpyDataset(X, y, w, ids) # Create support generator - supp_gen = dc.data.SupportGenerator(dataset, n_pos, n_neg, n_trials) + _ = dc.data.SupportGenerator(dataset, n_pos, n_neg, n_trials) def test_simple_episode_generator(self): """Conducts simple test that episode generator runs.""" diff --git a/deepchem/data/tests/water.sdf b/deepchem/data/tests/water.sdf new file mode 100644 index 0000000000000000000000000000000000000000..6edfa97699f7512648e508571a66013dbac8245b --- /dev/null +++ b/deepchem/data/tests/water.sdf @@ -0,0 +1,160 @@ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/00/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.3522 -0.0789 -1.1805 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.1361 -1.0054 -1.2859 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.2308 0.3743 -1.7895 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.2501 0.1228 1.2735 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.1609 -0.0681 0.3398 H 0 0 0 0 0 0 0 0 0 0 0 0 + -1.1927 0.0889 1.4364 H 0 0 0 0 0 0 0 0 0 0 0 0 + 2 1 1 0 0 0 0 + 3 1 1 0 0 0 0 + 4 6 1 0 0 0 0 + 5 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/01/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + -0.6833 0.9705 0.3745 O 0 0 0 0 0 0 0 0 0 0 0 0 + -1.4703 0.7389 -0.1187 H 0 0 0 0 0 0 0 0 0 0 0 0 + -1.0013 1.1666 1.2558 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.8119 -0.9589 -0.4393 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.4470 -1.7720 -0.0901 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.2255 -0.2747 -0.1166 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 3 1 0 0 0 0 + 2 1 1 0 0 0 0 + 4 6 1 0 0 0 0 + 4 5 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/02/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.2250 -1.2400 0.1519 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.0488 -1.9014 0.8211 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.0675 -1.6423 -0.6659 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.2524 1.3362 -0.1080 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.0379 0.4038 -0.0769 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.4224 1.7191 -0.6687 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 1 0 0 0 0 + 3 1 1 0 0 0 0 + 4 5 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/03/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.5442 -1.2553 0.0884 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.2717 -0.3382 0.0601 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1.2009 -1.2892 0.7840 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.5724 1.1365 -0.1524 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.9053 1.4770 0.6779 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.1396 1.8852 -0.5627 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 3 1 0 0 0 0 + 2 1 1 0 0 0 0 + 4 5 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/04/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.5716 -0.9660 0.8167 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.0990 -0.2491 0.3937 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1.4503 -0.9302 0.4386 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.5869 0.8748 -0.7201 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.4764 0.7662 -1.6647 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.8321 1.7936 -0.6112 H 0 0 0 0 0 0 0 0 0 0 0 0 + 2 1 1 0 0 0 0 + 3 1 1 0 0 0 0 + 4 6 1 0 0 0 0 + 5 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/05/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + -1.2519 0.2981 0.1241 O 0 0 0 0 0 0 0 0 0 0 0 0 + -1.7775 -0.3172 0.6354 H 0 0 0 0 0 0 0 0 0 0 0 0 + -1.6386 1.1542 0.3076 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1.3413 -0.2661 -0.1294 O 0 0 0 0 0 0 0 0 0 0 0 0 + 1.5930 -0.9980 -0.6925 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.3851 -0.3004 -0.1042 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 3 1 0 0 0 0 + 1 2 1 0 0 0 0 + 4 6 1 0 0 0 0 + 5 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/06/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.9408 -0.0129 1.0150 O 0 0 0 0 0 0 0 0 0 0 0 0 + 1.8624 0.0338 0.7608 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.4636 -0.0294 0.1854 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.9270 -0.0427 -0.9440 O 0 0 0 0 0 0 0 0 0 0 0 0 + -1.7394 0.2078 -0.5042 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.7702 0.6586 -1.5762 H 0 0 0 0 0 0 0 0 0 0 0 0 + 2 1 1 0 0 0 0 + 3 1 1 0 0 0 0 + 4 5 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/07/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + -0.2170 0.6223 -1.1153 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.9903 0.6099 -1.6793 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.2030 1.4618 -1.3022 H 0 0 0 0 0 0 0 0 0 0 0 0 + 0.2096 -0.6802 1.2222 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.1250 -0.2173 0.4540 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1.0515 -1.0371 0.9394 H 0 0 0 0 0 0 0 0 0 0 0 0 + 2 1 1 0 0 0 0 + 3 1 1 0 0 0 0 + 5 4 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/08/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.4738 0.9229 -0.7887 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.2878 1.1716 -1.3126 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1.2208 1.2882 -1.2627 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.4689 -1.0248 0.8355 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.8800 -0.5469 1.5558 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.1112 -0.3421 0.2678 H 0 0 0 0 0 0 0 0 0 0 0 0 + 2 1 1 0 0 0 0 + 3 1 1 0 0 0 0 + 4 5 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ +Generated by ForceBalance from calcs/cluster-02/VLE/250K/09/qchem.out: Frame 1 of 1 + OpenBabel03241615583D + + 6 4 0 0 0 0 0 0 0 0999 V2000 + 0.3431 0.5546 1.2527 O 0 0 0 0 0 0 0 0 0 0 0 0 + 0.9254 -0.1303 1.5813 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.0195 0.1937 0.4437 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.3857 -0.4769 -1.1651 O 0 0 0 0 0 0 0 0 0 0 0 0 + -0.0194 -1.2992 -1.4903 H 0 0 0 0 0 0 0 0 0 0 0 0 + -0.2338 0.1499 -1.8724 H 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 1 0 0 0 0 + 3 1 1 0 0 0 0 + 5 4 1 0 0 0 0 + 6 4 1 0 0 0 0 +M END +$$$$ diff --git a/deepchem/data/tests/water.sdf.csv b/deepchem/data/tests/water.sdf.csv new file mode 100644 index 0000000000000000000000000000000000000000..97e30794929ca40a18134cf4c99af17483ece8e9 --- /dev/null +++ b/deepchem/data/tests/water.sdf.csv @@ -0,0 +1,11 @@ +atomization_energy +447.082359 +448.859851 +450.466600 +450.851977 +450.894234 +450.743387 +451.436905 +451.559751 +451.326782 +451.400550 diff --git a/deepchem/dock/__init__.py b/deepchem/dock/__init__.py index 46cf12820cb64cafd2b4825f688a28044df888be..ee059e825737acf677e6ecb97b9cdaffc3bcf2c6 100644 --- a/deepchem/dock/__init__.py +++ b/deepchem/dock/__init__.py @@ -1,6 +1,4 @@ -""" -Imports all submodules -""" +# flake8: noqa from deepchem.dock.pose_generation import PoseGenerator from deepchem.dock.pose_generation import VinaPoseGenerator from deepchem.dock.docking import Docker diff --git a/deepchem/dock/binding_pocket.py b/deepchem/dock/binding_pocket.py index c0582ac8581b43035b416c41eb09838c868a04da..5de0041b73a768876738099f372ac0a386da9d3d 100644 --- a/deepchem/dock/binding_pocket.py +++ b/deepchem/dock/binding_pocket.py @@ -1,41 +1,43 @@ """ Computes putative binding pockets on protein. """ -import os import logging -import tempfile import numpy as np -from subprocess import call -from deepchem.feat.fingerprints import CircularFingerprint -from deepchem.models.sklearn_models import SklearnModel -from deepchem.utils import rdkit_util -from deepchem.utils import coordinate_box_utils as box_utils -from deepchem.utils.fragment_util import get_contact_atom_indices +from typing import Any, List, Optional, Tuple + +from deepchem.models import Model +from deepchem.utils.rdkit_utils import load_molecule +from deepchem.utils.coordinate_box_utils \ + import CoordinateBox, get_face_boxes, merge_overlapping_boxes +from deepchem.utils.fragment_utils import get_contact_atom_indices logger = logging.getLogger(__name__) -def extract_active_site(protein_file, ligand_file, cutoff=4): +def extract_active_site(protein_file: str, + ligand_file: str, + cutoff: float = 4.0 + ) -> Tuple[CoordinateBox, np.ndarray]: """Extracts a box for the active site. - Params - ------ + Parameters + ---------- protein_file: str Location of protein PDB ligand_file: str Location of ligand input file - cutoff: int, optional + cutoff: float, optional (default 4.0) The distance in angstroms from the protein pocket to consider for featurization. Returns ------- - A tuple of `(CoordinateBox, np.ndarray)` where the second entry is - of shape `(N, 3)` with `N` the number of atoms in the active site. + Tuple[CoordinateBox, np.ndarray] + A tuple of `(CoordinateBox, np.ndarray)` where the second entry is + of shape `(N, 3)` with `N` the number of atoms in the active site. """ - protein = rdkit_util.load_molecule(protein_file, add_hydrogens=False) - ligand = rdkit_util.load_molecule( - ligand_file, add_hydrogens=True, calc_charges=True) + protein = load_molecule(protein_file, add_hydrogens=False) + ligand = load_molecule(ligand_file, add_hydrogens=True, calc_charges=True) protein_contacts, ligand_contacts = get_contact_atom_indices( [protein, ligand], cutoff=cutoff) protein_coords = protein[0] @@ -47,8 +49,8 @@ def extract_active_site(protein_file, ligand_file, cutoff=4): y_max = int(np.ceil(np.amax(pocket_coords[:, 1]))) z_min = int(np.floor(np.amin(pocket_coords[:, 2]))) z_max = int(np.ceil(np.amax(pocket_coords[:, 2]))) - box = box_utils.CoordinateBox((x_min, x_max), (y_min, y_max), (z_min, z_max)) - return (box, pocket_coords) + box = CoordinateBox((x_min, x_max), (y_min, y_max), (z_min, z_max)) + return box, pocket_coords class BindingPocketFinder(object): @@ -66,7 +68,7 @@ class BindingPocketFinder(object): technique to be used. """ - def find_pockets(self, molecule): + def find_pockets(self, molecule: Any): """Finds potential binding pockets in proteins. Parameters @@ -83,32 +85,37 @@ class ConvexHullPocketFinder(BindingPocketFinder): Based on https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4112621/pdf/1472-6807-14-18.pdf """ - def __init__(self, scoring_model=None, pad=5): + def __init__(self, scoring_model: Optional[Model] = None, pad: float = 5.0): """Initialize the pocket finder. Parameters ---------- - scoring_model: `dc.models.Model`, optional + scoring_model: Model, optional (default None) If specified, use this model to prune pockets. - pad: float, optional + pad: float, optional (default 5.0) The number of angstroms to pad around a binding pocket's atoms to get a binding pocket box. """ self.scoring_model = scoring_model self.pad = pad - def find_all_pockets(self, protein_file): + def find_all_pockets(self, protein_file: str) -> List[CoordinateBox]: """Find list of binding pockets on protein. - + Parameters ---------- protein_file: str Protein to load in. + + Returns + ------- + List[CoordinateBox] + List of binding pockets on protein. Each pocket is a `CoordinateBox` """ - coords, _ = rdkit_util.load_molecule(protein_file) - return box_utils.get_face_boxes(coords, self.pad) + coords, _ = load_molecule(protein_file) + return get_face_boxes(coords, self.pad) - def find_pockets(self, macromolecule_file): + def find_pockets(self, macromolecule_file: str) -> List[CoordinateBox]: """Find list of suitable binding pockets on protein. This function computes putative binding pockets on this protein. @@ -116,17 +123,18 @@ class ConvexHullPocketFinder(BindingPocketFinder): face of the hull is converted into a coordinate box used for binding. - Params - ------ + Parameters + ---------- macromolecule_file: str Location of the macromolecule file to load Returns ------- - List of pockets. Each pocket is a `CoordinateBox` + List[CoordinateBox] + List of pockets. Each pocket is a `CoordinateBox` """ - coords = rdkit_util.load_molecule( - macromolecule_file, add_hydrogens=False, calc_charges=False)[0] - boxes = box_utils.get_face_boxes(coords, self.pad) - boxes = box_utils.merge_overlapping_boxes(boxes) + coords, _ = load_molecule( + macromolecule_file, add_hydrogens=False, calc_charges=False) + boxes = get_face_boxes(coords, self.pad) + boxes = merge_overlapping_boxes(boxes) return boxes diff --git a/deepchem/dock/docking.py b/deepchem/dock/docking.py index 2dc21bfa61979a638ce86444a9db6359b772feba..5cd5700de012f477169bcdc0411d347a84d0d47f 100644 --- a/deepchem/dock/docking.py +++ b/deepchem/dock/docking.py @@ -1,14 +1,19 @@ """ -Docks Molecular Complexes +Docks Molecular Complexes """ import logging -import numpy as np -import os import tempfile -from subprocess import call +from typing import Generator, Optional, Tuple, Union +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.models import Model +from deepchem.feat import ComplexFeaturizer from deepchem.data import NumpyDataset +from deepchem.dock import PoseGenerator logger = logging.getLogger(__name__) +POSED_COMPLEX = Tuple[RDKitMol, RDKitMol] class Docker(object): @@ -25,16 +30,19 @@ class Docker(object): generation and scoring classes that are provided to this class. """ - def __init__(self, pose_generator, featurizer=None, scoring_model=None): + def __init__(self, + pose_generator: PoseGenerator, + featurizer: Optional[ComplexFeaturizer] = None, + scoring_model: Optional[Model] = None): """Builds model. Parameters ---------- - pose_generator: `PoseGenerator` + pose_generator: PoseGenerator The pose generator to use for this model - featurizer: `ComplexFeaturizer` + featurizer: ComplexFeaturizer, optional (default None) Featurizer associated with `scoring_model` - scoring_model: `Model` + scoring_model: Model, optional (default None) Should make predictions on molecular complex. """ if ((featurizer is not None and scoring_model is None) or @@ -47,24 +55,32 @@ class Docker(object): self.scoring_model = scoring_model def dock(self, - molecular_complex, - centroid=None, - box_dims=None, - exhaustiveness=10, - num_modes=9, - num_pockets=None, - out_dir=None, - use_pose_generator_scores=False): + molecular_complex: Tuple[str, str], + centroid: Optional[np.ndarray] = None, + box_dims: Optional[np.ndarray] = None, + exhaustiveness: int = 10, + num_modes: int = 9, + num_pockets: Optional[int] = None, + out_dir: Optional[str] = None, + use_pose_generator_scores: bool = False + ) -> Union[Generator[POSED_COMPLEX, None, None], Generator[Tuple[ + POSED_COMPLEX, float], None, None]]: """Generic docking function. This docking function uses this object's featurizer, pose generator, and scoring model to make docking predictions. This - function is written in generic style so + function is written in generic style so Parameters ---------- - molecular_complex: Object - Some representation of a molecular complex. + molecular_complex: Tuple[str, str] + A representation of a molecular complex. This tuple is + (protein_file, ligand_file). + centroid: np.ndarray, optional (default None) + The centroid to dock against. Is computed if not specified. + box_dims: np.ndarray, optional (default None) + A numpy array of shape `(3,)` holding the size of the box to dock. If not + specified is set to size of molecular complex plus 5 angstroms. exhaustiveness: int, optional (default 10) Tells pose generator how exhaustive it should be with pose generation. @@ -80,18 +96,21 @@ class Docker(object): use_pose_generator_scores: bool, optional (default False) If `True`, ask pose generator to generate scores. This cannot be `True` if `self.featurizer` and `self.scoring_model` are set - since those will be used to generate scores in that case. + since those will be used to generate scores in that case. Returns ------- - A generator. If `use_pose_generator_scores==True` or - `self.scoring_model` is set, then will yield tuples - `(posed_complex, score)`. Else will yield `posed_complex`. + Generator[Tuple[`posed_complex`, `score`]] or Generator[`posed_complex`] + A generator. If `use_pose_generator_scores==True` or + `self.scoring_model` is set, then will yield tuples + `(posed_complex, score)`. Else will yield `posed_complex`. """ if self.scoring_model is not None and use_pose_generator_scores: raise ValueError( - "Cannot set use_pose_generator_scores=True when self.scoring_model is set (since both generator scores for complexes)." + "Cannot set use_pose_generator_scores=True " + "when self.scoring_model is set (since both generator scores for complexes)." ) + outputs = self.pose_generator.generate_poses( molecular_complex, centroid=centroid, @@ -105,11 +124,15 @@ class Docker(object): complexes, scores = outputs else: complexes = outputs + # We know use_pose_generator_scores == False in this case if self.scoring_model is not None: for posed_complex in complexes: + # check whether self.featurizer is instance of ComplexFeaturizer or not + assert isinstance(self.featurizer, ComplexFeaturizer) # TODO: How to handle the failure here? - features, _ = self.featurizer.featurize_complexes([molecular_complex]) + (protein_file, ligand_file) = molecular_complex + features, _ = self.featurizer.featurize([protein_file], [ligand_file]) dataset = NumpyDataset(X=features) score = self.scoring_model.predict(dataset) yield (posed_complex, score) diff --git a/deepchem/dock/pose_generation.py b/deepchem/dock/pose_generation.py index 2075291de73428ddda1f0e4215ec953f5525f7d4..a9226c2beee912f0ce65250dd13d3c4fa9816c47 100644 --- a/deepchem/dock/pose_generation.py +++ b/deepchem/dock/pose_generation.py @@ -2,22 +2,24 @@ Generates protein-ligand docked poses. """ import platform -import deepchem import logging -import numpy as np import os import tempfile import tarfile +import numpy as np from subprocess import call -from deepchem.utils.rdkit_util import add_hydrogens_to_mol from subprocess import check_output -from deepchem.utils import rdkit_util -from deepchem.utils import mol_xyz_util -from deepchem.utils import geometry_utils -from deepchem.utils import vina_utils -from deepchem.utils import download_url +from typing import List, Optional, Tuple, Union + +from deepchem.dock.binding_pocket import BindingPocketFinder +from deepchem.utils.data_utils import download_url, get_data_dir +from deepchem.utils.typing import RDKitMol +from deepchem.utils.geometry_utils import compute_centroid, compute_protein_range +from deepchem.utils.rdkit_utils import load_molecule, write_molecule +from deepchem.utils.vina_utils import load_docked_ligands, write_vina_conf logger = logging.getLogger(__name__) +DOCKED_POSES = List[Tuple[RDKitMol, RDKitMol]] class PoseGenerator(object): @@ -33,24 +35,25 @@ class PoseGenerator(object): """ def generate_poses(self, - molecular_complex, - centroid=None, - box_dims=None, - exhaustiveness=10, - num_modes=9, - num_pockets=None, - out_dir=None, - generate_scores=False): + molecular_complex: Tuple[str, str], + centroid: Optional[np.ndarray] = None, + box_dims: Optional[np.ndarray] = None, + exhaustiveness: int = 10, + num_modes: int = 9, + num_pockets: Optional[int] = None, + out_dir: Optional[str] = None, + generate_scores: bool = False): """Generates a list of low energy poses for molecular complex Parameters ---------- - molecular_complexes: list - A representation of a molecular complex. - centroid: np.ndarray, optional + molecular_complexes: Tuple[str, str] + A representation of a molecular complex. This tuple is + (protein_file, ligand_file). + centroid: np.ndarray, optional (default None) The centroid to dock against. Is computed if not specified. - box_dims: np.ndarray, optional - Of shape `(3,)` holding the size of the box to dock. If not + box_dims: np.ndarray, optional (default None) + A numpy array of shape `(3,)` holding the size of the box to dock. If not specified is set to size of molecular complex plus 5 angstroms. exhaustiveness: int, optional (default 10) Tells pose generator how exhaustive it should be with pose @@ -62,7 +65,7 @@ class PoseGenerator(object): If specified, `self.pocket_finder` must be set. Will only generate poses for the first `num_pockets` returned by `self.pocket_finder`. - out_dir: str, optional + out_dir: str, optional (default None) If specified, write generated poses to this directory. generate_score: bool, optional (default False) If `True`, the pose generator will return scores for complexes. @@ -90,19 +93,21 @@ class VinaPoseGenerator(PoseGenerator): This class requires RDKit to be installed. """ - def __init__(self, sixty_four_bits=True, pocket_finder=None): + def __init__(self, + sixty_four_bits: bool = True, + pocket_finder: Optional[BindingPocketFinder] = None): """Initializes Vina Pose Generator - Params - ------ + Parameters + ---------- sixty_four_bits: bool, optional (default True) Specifies whether this is a 64-bit machine. Needed to download the correct executable. - pocket_finder: object, optional (default None) + pocket_finder: BindingPocketFinder, optional (default None) If specified should be an instance of `dc.dock.BindingPocketFinder`. """ - data_dir = deepchem.utils.get_data_dir() + data_dir = get_data_dir() if platform.system() == 'Linux': url = "http://vina.scripps.edu/download/autodock_vina_1_1_2_linux_x86.tgz" filename = "autodock_vina_1_1_2_linux_x86.tgz" @@ -127,7 +132,7 @@ class VinaPoseGenerator(PoseGenerator): self.vina_cmd = os.path.join(self.vina_dir, "vina.exe") else: raise ValueError( - "Unknown operating system. Try using a cloud platform to run this code instead." + "Unknown operating system. Try using a cloud platform to run this code instead." ) self.pocket_finder = pocket_finder if not os.path.exists(self.vina_dir): @@ -145,26 +150,29 @@ class VinaPoseGenerator(PoseGenerator): os.remove(downloaded_file) def generate_poses(self, - molecular_complex, - centroid=None, - box_dims=None, - exhaustiveness=10, - num_modes=9, - num_pockets=None, - out_dir=None, - generate_scores=False): + molecular_complex: Tuple[str, str], + centroid: Optional[np.ndarray] = None, + box_dims: Optional[np.ndarray] = None, + exhaustiveness: int = 10, + num_modes: int = 9, + num_pockets: Optional[int] = None, + out_dir: Optional[str] = None, + generate_scores: bool = False + ) -> Union[Tuple[DOCKED_POSES, List[float]], DOCKED_POSES]: """Generates the docked complex and outputs files for docked complex. - TODO: How can this work on Windows? We need to install a .msi file and invoke it correctly from Python for this to work. + TODO: How can this work on Windows? We need to install a .msi file and + invoke it correctly from Python for this to work. Parameters ---------- - molecular_complexes: list - A representation of a molecular complex. + molecular_complexes: Tuple[str, str] + A representation of a molecular complex. This tuple is + (protein_file, ligand_file). centroid: np.ndarray, optional The centroid to dock against. Is computed if not specified. box_dims: np.ndarray, optional - Of shape `(3,)` holding the size of the box to dock. If not + A numpy array of shape `(3,)` holding the size of the box to dock. If not specified is set to size of molecular complex plus 5 angstroms. exhaustiveness: int, optional (default 10) Tells Autodock Vina how exhaustive it should be with pose @@ -185,10 +193,11 @@ class VinaPoseGenerator(PoseGenerator): Returns ------- - Tuple of `(docked_poses, scores)`. `docked_poses` is a list of - docked molecular complexes. Each entry in this list contains a - `(protein_mol, ligand_mol)` pair of RDKit molecules. `scores` is a - list of binding free energies predicted by Vina. + Tuple[`docked_poses`, `scores`] or `docked_poses` + Tuple of `(docked_poses, scores)` or `docked_poses`. `docked_poses` + is a list of docked molecular complexes. Each entry in this list + contains a `(protein_mol, ligand_mol)` pair of RDKit molecules. + `scores` is a list of binding free energies predicted by Vina. Raises ------ @@ -214,10 +223,10 @@ class VinaPoseGenerator(PoseGenerator): protein_name = os.path.basename(protein_file).split(".")[0] protein_hyd = os.path.join(out_dir, "%s_hyd.pdb" % protein_name) protein_pdbqt = os.path.join(out_dir, "%s.pdbqt" % protein_name) - protein_mol = rdkit_util.load_molecule( + protein_mol = load_molecule( protein_file, calc_charges=True, add_hydrogens=True) - rdkit_util.write_molecule(protein_mol[1], protein_hyd, is_protein=True) - rdkit_util.write_molecule(protein_mol[1], protein_pdbqt, is_protein=True) + write_molecule(protein_mol[1], protein_hyd, is_protein=True) + write_molecule(protein_mol[1], protein_pdbqt, is_protein=True) # Get protein centroid and range if centroid is not None and box_dims is not None: @@ -226,8 +235,8 @@ class VinaPoseGenerator(PoseGenerator): else: if self.pocket_finder is None: logger.info("Pockets not specified. Will use whole protein to dock") - protein_centroid = geometry_utils.compute_centroid(protein_mol[0]) - protein_range = mol_xyz_util.get_molecule_range(protein_mol[0]) + protein_centroid = compute_centroid(protein_mol[0]) + protein_range = compute_protein_range(protein_mol[0]) box_dims = protein_range + 5.0 centroids, dimensions = [protein_centroid], [box_dims] else: @@ -258,9 +267,9 @@ class VinaPoseGenerator(PoseGenerator): ligand_name = os.path.basename(ligand_file).split(".")[0] ligand_pdbqt = os.path.join(out_dir, "%s.pdbqt" % ligand_name) - ligand_mol = rdkit_util.load_molecule( + ligand_mol = load_molecule( ligand_file, calc_charges=True, add_hydrogens=True) - rdkit_util.write_molecule(ligand_mol[1], ligand_pdbqt) + write_molecule(ligand_mol[1], ligand_pdbqt) docked_complexes = [] all_scores = [] @@ -271,7 +280,7 @@ class VinaPoseGenerator(PoseGenerator): logger.info("Box dimensions: %s" % str(box_dims)) # Write Vina conf file conf_file = os.path.join(out_dir, "conf.txt") - vina_utils.write_vina_conf( + write_vina_conf( protein_pdbqt, ligand_pdbqt, protein_centroid, @@ -292,10 +301,12 @@ class VinaPoseGenerator(PoseGenerator): else: # I'm not sure why specifying the args as a list fails on other platforms, # but for some reason it only works if I pass it as a string. - args = "%s --config %s --log %s --out %s" % (self.vina_cmd, conf_file, - log_file, out_pdbqt) + # FIXME: Incompatible types in assignment + args = "%s --config %s --log %s --out %s" % ( # type: ignore + self.vina_cmd, conf_file, log_file, out_pdbqt) + # FIXME: We should use `subprocess.run` instead of `call` call(args, shell=True) - ligands, scores = vina_utils.load_docked_ligands(out_pdbqt) + ligands, scores = load_docked_ligands(out_pdbqt) docked_complexes += [(protein_mol[1], ligand) for ligand in ligands] all_scores += scores diff --git a/deepchem/dock/pose_scoring.py b/deepchem/dock/pose_scoring.py index 307f505a7099a8e4081764f9f0942fc431296e5e..e6a0114128ee2cc0bc5eedf25adb0baf9749d031 100644 --- a/deepchem/dock/pose_scoring.py +++ b/deepchem/dock/pose_scoring.py @@ -4,50 +4,51 @@ Utilities to score protein-ligand poses using DeepChem. import numpy as np -def pairwise_distances(coords1, coords2): +def pairwise_distances(coords1: np.ndarray, coords2: np.ndarray) -> np.ndarray: """Returns matrix of pairwise Euclidean distances. Parameters ---------- coords1: np.ndarray - Of shape `(N, 3)` + A numpy array of shape `(N, 3)` coords2: np.ndarray - Of shape `(M, 3)` + A numpy array of shape `(M, 3)` Returns ------- - A `(N,M)` array with pairwise distances. + np.ndarray + A `(N,M)` array with pairwise distances. """ return np.sum((coords1[None, :] - coords2[:, None])**2, -1)**0.5 -def cutoff_filter(d, x, cutoff=8.0): +def cutoff_filter(d: np.ndarray, x: np.ndarray, cutoff=8.0) -> np.ndarray: """Applies a cutoff filter on pairwise distances Parameters ---------- d: np.ndarray - Pairwise distances matrix. Of shape `(N, M)` + Pairwise distances matrix. A numpy array of shape `(N, M)` x: np.ndarray - Matrix of shape `(N, M)` + Matrix of shape `(N, M)` cutoff: float, optional (default 8) Cutoff for selection in Angstroms Returns ------- - A `(N,M)` array with values where distance is too large thresholded - to 0. + np.ndarray + A `(N,M)` array with values where distance is too large thresholded to 0. """ return np.where(d < cutoff, x, np.zeros_like(x)) -def vina_nonlinearity(c, w, Nrot): +def vina_nonlinearity(c: np.ndarray, w: float, Nrot: int) -> np.ndarray: """Computes non-linearity used in Vina. Parameters ---------- - c: np.ndarray - Of shape `(N, M)` + c: np.ndarray + A numpy array of shape `(N, M)` w: float Weighting term Nrot: int @@ -55,65 +56,75 @@ def vina_nonlinearity(c, w, Nrot): Returns ------- - A `(N, M)` array with activations under a nonlinearity. + np.ndarray + A `(N, M)` array with activations under a nonlinearity. """ out_tensor = c / (1 + w * Nrot) return out_tensor -def vina_repulsion(d): +def vina_repulsion(d: np.ndarray) -> np.ndarray: """Computes Autodock Vina's repulsion interaction term. Parameters ---------- d: np.ndarray - Of shape `(N, M)`. + A numpy array of shape `(N, M)`. Returns ------- - A `(N, M)` array with repulsion terms. + np.ndarray + A `(N, M)` array with repulsion terms. """ return np.where(d < 0, d**2, np.zeros_like(d)) -def vina_hydrophobic(d): +def vina_hydrophobic(d: np.ndarray) -> np.ndarray: """Computes Autodock Vina's hydrophobic interaction term. - Here, d is the set of surface distances as defined in: - - Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: a continuous differentiable function tuned to compute binding affinities." Journal of computer-aided molecular design 10.5 (1996): 427-440. + Here, d is the set of surface distances as defined in [1]_ Parameters ---------- d: np.ndarray - Of shape `(N, M)`. + A numpy array of shape `(N, M)`. Returns ------- - A `(N, M)` array of hydrophoboic interactions in a piecewise linear - curve. + np.ndarray + A `(N, M)` array of hydrophoboic interactions in a piecewise linear curve. + + References + ---------- + .. [1] Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: + a continuous differentiable function tuned to compute binding affinities." + Journal of computer-aided molecular design 10.5 (1996): 427-440. """ out_tensor = np.where(d < 0.5, np.ones_like(d), np.where(d < 1.5, 1.5 - d, np.zeros_like(d))) return out_tensor -def vina_hbond(d): +def vina_hbond(d: np.ndarray) -> np.ndarray: """Computes Autodock Vina's hydrogen bond interaction term. - Here, d is the set of surface distances as defined in: - - Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: a continuous differentiable function tuned to compute binding affinities." Journal of computer-aided molecular design 10.5 (1996): 427-440. + Here, d is the set of surface distances as defined in [1]_ Parameters ---------- d: np.ndarray - Of shape `(N, M)`. + A numpy array of shape `(N, M)`. Returns ------- - A `(N, M)` array of hydrophoboic interactions in a piecewise linear - curve. + np.ndarray + A `(N, M)` array of hydrophoboic interactions in a piecewise linear curve. + + References + ---------- + .. [1] Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: + a continuous differentiable function tuned to compute binding affinities." + Journal of computer-aided molecular design 10.5 (1996): 427-440. """ out_tensor = np.where( d < -0.7, np.ones_like(d), @@ -121,70 +132,88 @@ def vina_hbond(d): return out_tensor -def vina_gaussian_first(d): +def vina_gaussian_first(d: np.ndarray) -> np.ndarray: """Computes Autodock Vina's first Gaussian interaction term. - Here, d is the set of surface distances as defined in: - - Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: a continuous differentiable function tuned to compute binding affinities." Journal of computer-aided molecular design 10.5 (1996): 427-440. + Here, d is the set of surface distances as defined in [1]_ Parameters ---------- d: np.ndarray - Of shape `(N, M)`. + A numpy array of shape `(N, M)`. Returns ------- - A `(N, M)` array of gaussian interaction terms. + np.ndarray + A `(N, M)` array of gaussian interaction terms. + + References + ---------- + .. [1] Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: + a continuous differentiable function tuned to compute binding affinities." + Journal of computer-aided molecular design 10.5 (1996): 427-440. """ out_tensor = np.exp(-(d / 0.5)**2) return out_tensor -def vina_gaussian_second(d): +def vina_gaussian_second(d: np.ndarray) -> np.ndarray: """Computes Autodock Vina's second Gaussian interaction term. - Here, d is the set of surface distances as defined in: - - Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: a continuous differentiable function tuned to compute binding affinities." Journal of computer-aided molecular design 10.5 (1996): 427-440. + Here, d is the set of surface distances as defined in [1]_ Parameters ---------- d: np.ndarray - Of shape `(N, M)`. + A numpy array of shape `(N, M)`. Returns ------- - A `(N, M)` array of gaussian interaction terms. + np.ndarray + A `(N, M)` array of gaussian interaction terms. + + References + ---------- + .. [1] Jain, Ajay N. "Scoring noncovalent protein-ligand interactions: + a continuous differentiable function tuned to compute binding affinities." + Journal of computer-aided molecular design 10.5 (1996): 427-440. """ out_tensor = np.exp(-((d - 3) / 2)**2) return out_tensor -def weighted_linear_sum(w, x): +def weighted_linear_sum(w: np.ndarray, x: np.ndarray) -> np.ndarray: """Computes weighted linear sum. Parameters ---------- w: np.ndarray - Of shape `(N,)` + A numpy array of shape `(N,)` x: np.ndarray - Of shape `(N,)` + A numpy array of shape `(N, M, L)` + + Returns + ------- + np.ndarray + A numpy array of shape `(M, L)` """ - return np.sum(np.dot(w, x)) + return np.tensordot(w, x, axes=1) -def vina_energy_term(coords1, coords2, weights, wrot, Nrot): +def vina_energy_term(coords1: np.ndarray, coords2: np.ndarray, + weights: np.ndarray, wrot: float, Nrot: int) -> np.ndarray: """Computes the Vina Energy function for two molecular conformations Parameters ---------- - coords1: np.ndarray + coords1: np.ndarray Molecular coordinates of shape `(N, 3)` - coords2: np.ndarray + coords2: np.ndarray Molecular coordinates of shape `(M, 3)` weights: np.ndarray - Of shape `(5,)` + A numpy array of shape `(5,)`. The 5 values are weights for repulsion interaction term, + hydrophobic interaction term, hydrogen bond interaction term, + first Gaussian interaction term and second Gaussian interaction term. wrot: float The scaling factor for nonlinearity Nrot: int @@ -192,9 +221,11 @@ def vina_energy_term(coords1, coords2, weights, wrot, Nrot): Returns ------- - Scalar with energy + np.ndarray + A scalar value with free energy """ - # TODO(rbharath): The autodock vina source computes surface distances which take into account the van der Waals radius of each atom type. + # TODO(rbharath): The autodock vina source computes surface distances + # which take into account the van der Waals radius of each atom type. dists = pairwise_distances(coords1, coords2) repulsion = vina_repulsion(dists) hydrophobic = vina_hydrophobic(dists) diff --git a/deepchem/dock/tests/test_binding_pocket.py b/deepchem/dock/tests/test_binding_pocket.py index 1b20b341e8bd67db36e805429b26bc6aa4cdcdd3..e2deb2a64f9eaeace7ba873de66a6879f22a062a 100644 --- a/deepchem/dock/tests/test_binding_pocket.py +++ b/deepchem/dock/tests/test_binding_pocket.py @@ -1,15 +1,13 @@ """ Tests for binding pocket detection. """ -import sys +import os import logging import unittest -import os import numpy as np -import pytest import deepchem as dc -from deepchem.utils import rdkit_util +from deepchem.utils import rdkit_utils from deepchem.utils import coordinate_box_utils as box_utils logger = logging.getLogger(__name__) @@ -22,14 +20,13 @@ class TestBindingPocket(unittest.TestCase): def test_convex_init(self): """Tests that ConvexHullPocketFinder can be initialized.""" - finder = dc.dock.ConvexHullPocketFinder() + dc.dock.ConvexHullPocketFinder() def test_get_face_boxes_for_protein(self): """Tests that binding pockets are detected.""" current_dir = os.path.dirname(os.path.realpath(__file__)) protein_file = os.path.join(current_dir, "1jld_protein.pdb") - ligand_file = os.path.join(current_dir, "1jld_ligand.sdf") - coords = rdkit_util.load_molecule(protein_file)[0] + coords = rdkit_utils.load_molecule(protein_file)[0] boxes = box_utils.get_face_boxes(coords) assert isinstance(boxes, list) @@ -41,16 +38,11 @@ class TestBindingPocket(unittest.TestCase): """Test that some pockets are filtered out.""" current_dir = os.path.dirname(os.path.realpath(__file__)) protein_file = os.path.join(current_dir, "1jld_protein.pdb") - ligand_file = os.path.join(current_dir, "1jld_ligand.sdf") - - import mdtraj as md - protein = md.load(protein_file) finder = dc.dock.ConvexHullPocketFinder() all_pockets = finder.find_all_pockets(protein_file) pockets = finder.find_pockets(protein_file) # Test that every atom in pocket maps exists - n_protein_atoms = protein.xyz.shape[1] for pocket in pockets: assert isinstance(pocket, box_utils.CoordinateBox) @@ -62,9 +54,8 @@ class TestBindingPocket(unittest.TestCase): protein_file = os.path.join(current_dir, "1jld_protein.pdb") ligand_file = os.path.join(current_dir, "1jld_ligand.sdf") - active_site_box, active_site_coords = ( - dc.dock.binding_pocket.extract_active_site(protein_file, ligand_file)) - finder = dc.dock.ConvexHullPocketFinder() - pockets = finder.find_pockets(protein_file) + active_site_box, active_site_coords = \ + dc.dock.binding_pocket.extract_active_site(protein_file, ligand_file) - assert len(pockets) > 0 + assert isinstance(active_site_box, box_utils.CoordinateBox) + assert isinstance(active_site_coords, np.ndarray) diff --git a/deepchem/dock/tests/test_docking.py b/deepchem/dock/tests/test_docking.py index 7db21e575a98e1409195fea2d27a1b936fe48ccd..e164f395a8c90e106dfe1886ae7814ff509002e6 100644 --- a/deepchem/dock/tests/test_docking.py +++ b/deepchem/dock/tests/test_docking.py @@ -1,14 +1,12 @@ """ -Tests for Docking +Tests for Docking """ import os -import sys import unittest import pytest import logging import numpy as np import deepchem as dc -from deepchem.dock.binding_pocket import ConvexHullPocketFinder from deepchem.feat import ComplexFeaturizer from deepchem.models import Model from deepchem.dock.pose_generation import PoseGenerator @@ -28,7 +26,7 @@ class TestDocking(unittest.TestCase): def test_docker_init(self): """Test that Docker can be initialized.""" vpg = dc.dock.VinaPoseGenerator() - docker = dc.dock.Docker(vpg) + dc.dock.Docker(vpg) @pytest.mark.slow def test_docker_dock(self): @@ -86,7 +84,7 @@ class TestDocking(unittest.TestCase): """Test that Docker can find pockets and dock dock.""" # Let's turn on logging since this test will run for a while logging.basicConfig(level=logging.INFO) - pocket_finder = ConvexHullPocketFinder() + pocket_finder = dc.dock.ConvexHullPocketFinder() vpg = dc.dock.VinaPoseGenerator(pocket_finder=pocket_finder) docker = dc.dock.Docker(vpg) docked_outputs = docker.dock( @@ -105,7 +103,7 @@ class TestDocking(unittest.TestCase): class DummyFeaturizer(ComplexFeaturizer): - def featurize_complexes(self, complexes, *args, **kwargs): + def featurize(self, complexes, *args, **kwargs): return np.zeros((len(complexes), 5)), None class DummyModel(Model): diff --git a/deepchem/dock/tests/test_pose_generation.py b/deepchem/dock/tests/test_pose_generation.py index 6edca2c7db17488fa49e1343af34dc61475df900..5d047c708524c948bede8b3ec923c1116eb2e50b 100644 --- a/deepchem/dock/tests/test_pose_generation.py +++ b/deepchem/dock/tests/test_pose_generation.py @@ -2,14 +2,12 @@ Tests for Pose Generation """ import os -import sys import tempfile import unittest import logging import numpy as np import deepchem as dc import pytest -from deepchem.dock.binding_pocket import ConvexHullPocketFinder class TestPoseGeneration(unittest.TestCase): @@ -19,12 +17,12 @@ class TestPoseGeneration(unittest.TestCase): def test_vina_initialization(self): """Test that VinaPoseGenerator can be initialized.""" - vpg = dc.dock.VinaPoseGenerator() + dc.dock.VinaPoseGenerator() def test_pocket_vina_initialization(self): """Test that VinaPoseGenerator can be initialized.""" - pocket_finder = ConvexHullPocketFinder() - vpg = dc.dock.VinaPoseGenerator(pocket_finder=pocket_finder) + pocket_finder = dc.dock.ConvexHullPocketFinder() + dc.dock.VinaPoseGenerator(pocket_finder=pocket_finder) @pytest.mark.slow def test_vina_poses_and_scores(self): diff --git a/deepchem/dock/tests/test_pose_scoring.py b/deepchem/dock/tests/test_pose_scoring.py index 15e652ceb6116b77917b64c3a095f99cce57c51c..9c510804541823e64d37cdb8a17a1b3208fb7efc 100644 --- a/deepchem/dock/tests/test_pose_scoring.py +++ b/deepchem/dock/tests/test_pose_scoring.py @@ -1,17 +1,11 @@ """ Tests for Pose Scoring """ -import sys + import logging import unittest -import tempfile -import os -import shutil import numpy as np -import pytest -import deepchem as dc -from subprocess import call from deepchem.dock.pose_scoring import vina_nonlinearity from deepchem.dock.pose_scoring import vina_hydrophobic from deepchem.dock.pose_scoring import vina_gaussian_first @@ -24,7 +18,6 @@ from deepchem.dock.pose_scoring import vina_energy_term logger = logging.getLogger(__name__) -@pytest.mark.linux_only class TestPoseScoring(unittest.TestCase): """ Does sanity checks on pose generation. diff --git a/deepchem/feat/__init__.py b/deepchem/feat/__init__.py index 2e5e6870bfbf823e8ded59a05d1164ce1a797b2b..c0996c928681a5a20b39b813e6acbc82ae05d876 100644 --- a/deepchem/feat/__init__.py +++ b/deepchem/feat/__init__.py @@ -1,26 +1,64 @@ """ Making it easy to import in classes. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" +# flake8: noqa +# base classes for featurizers from deepchem.feat.base_classes import Featurizer +from deepchem.feat.base_classes import MolecularFeaturizer +from deepchem.feat.base_classes import MaterialStructureFeaturizer +from deepchem.feat.base_classes import MaterialCompositionFeaturizer from deepchem.feat.base_classes import ComplexFeaturizer from deepchem.feat.base_classes import UserDefinedFeaturizer + from deepchem.feat.graph_features import ConvMolFeaturizer from deepchem.feat.graph_features import WeaveFeaturizer -from deepchem.feat.fingerprints import CircularFingerprint -from deepchem.feat.basic import RDKitDescriptors -from deepchem.feat.coulomb_matrices import CoulombMatrix -from deepchem.feat.coulomb_matrices import CoulombMatrixEig -from deepchem.feat.coulomb_matrices import BPSymmetryFunctionInput -from deepchem.feat.rdkit_grid_featurizer import RdkitGridFeaturizer from deepchem.feat.binding_pocket_features import BindingPocketFeaturizer -from deepchem.feat.one_hot import OneHotFeaturizer -from deepchem.feat.raw_featurizer import RawFeaturizer -from deepchem.feat.atomic_coordinates import AtomicCoordinates -from deepchem.feat.atomic_coordinates import NeighborListComplexAtomicCoordinates -from deepchem.feat.adjacency_fingerprints import AdjacencyFingerprint -from deepchem.feat.smiles_featurizers import SmilesToSeq, SmilesToImage -from deepchem.feat.materials_featurizers import ElementPropertyFingerprint, SineCoulombMatrix, StructureGraphFeaturizer + +# molecule featurizers +from deepchem.feat.molecule_featurizers import AtomicCoordinates +from deepchem.feat.molecule_featurizers import BPSymmetryFunctionInput +from deepchem.feat.molecule_featurizers import CircularFingerprint +from deepchem.feat.molecule_featurizers import CoulombMatrix +from deepchem.feat.molecule_featurizers import CoulombMatrixEig +from deepchem.feat.molecule_featurizers import MACCSKeysFingerprint +from deepchem.feat.molecule_featurizers import MordredDescriptors +from deepchem.feat.molecule_featurizers import Mol2VecFingerprint +from deepchem.feat.molecule_featurizers import MolGraphConvFeaturizer +from deepchem.feat.molecule_featurizers import OneHotFeaturizer +from deepchem.feat.molecule_featurizers import PubChemFingerprint +from deepchem.feat.molecule_featurizers import RawFeaturizer +from deepchem.feat.molecule_featurizers import RDKitDescriptors +from deepchem.feat.molecule_featurizers import SmilesToImage +from deepchem.feat.molecule_featurizers import SmilesToSeq, create_char_to_idx + +# complex featurizers +from deepchem.feat.complex_featurizers import RdkitGridFeaturizer +from deepchem.feat.complex_featurizers import NeighborListAtomicCoordinates +from deepchem.feat.complex_featurizers import NeighborListComplexAtomicCoordinates +from deepchem.feat.complex_featurizers import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat.complex_featurizers import ContactCircularFingerprint +from deepchem.feat.complex_featurizers import ContactCircularVoxelizer +from deepchem.feat.complex_featurizers import SplifFingerprint +from deepchem.feat.complex_featurizers import SplifVoxelizer +from deepchem.feat.complex_featurizers import ChargeVoxelizer +from deepchem.feat.complex_featurizers import SaltBridgeVoxelizer +from deepchem.feat.complex_featurizers import CationPiVoxelizer +from deepchem.feat.complex_featurizers import PiStackVoxelizer +from deepchem.feat.complex_featurizers import HydrogenBondVoxelizer +from deepchem.feat.complex_featurizers import HydrogenBondCounter + +# material featurizers +from deepchem.feat.material_featurizers import ElementPropertyFingerprint +from deepchem.feat.material_featurizers import SineCoulombMatrix +from deepchem.feat.material_featurizers import CGCNNFeaturizer +from deepchem.feat.material_featurizers import ElemNetFeaturizer + +try: + import transformers + from transformers import BertTokenizer + + from deepchem.feat.smiles_tokenizer import SmilesTokenizer + from deepchem.feat.smiles_tokenizer import BasicSmilesTokenizer +except ModuleNotFoundError: + pass diff --git a/deepchem/feat/base_classes.py b/deepchem/feat/base_classes.py index f7585f0fa4de05484ab23180aa19789eb7c21413..4e99aaf9685c2705ef841ab38b41b9b370c69066 100644 --- a/deepchem/feat/base_classes.py +++ b/deepchem/feat/base_classes.py @@ -1,19 +1,153 @@ """ Feature calculations. """ +import inspect import logging -import types import numpy as np import multiprocessing +from typing import Any, Dict, List, Iterable, Sequence, Tuple, Union -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "BSD 3-clause" +from deepchem.utils import get_print_threshold +from deepchem.utils.typing import PymatgenStructure +logger = logging.getLogger(__name__) -def _featurize_complex(featurizer, mol_pdb_file, protein_pdb_file, log_message): - logging.info(log_message) - return featurizer._featurize_complex(mol_pdb_file, protein_pdb_file) + +class Featurizer(object): + """Abstract class for calculating a set of features for a datapoint. + + This class is abstract and cannot be invoked directly. You'll + likely only interact with this class if you're a developer. In + that case, you might want to make a child class which + implements the `_featurize` method for calculating features for + a single datapoints if you'd like to make a featurizer for a + new datatype. + """ + + def featurize(self, datapoints: Iterable[Any], + log_every_n: int = 1000) -> np.ndarray: + """Calculate features for datapoints. + + Parameters + ---------- + datapoints: Iterable[Any] + A sequence of objects that you'd like to featurize. Subclassses of + `Featurizer` should instantiate the `_featurize` method that featurizes + objects in the sequence. + log_every_n: int, default 1000 + Logs featurization progress every `log_every_n` steps. + + Returns + ------- + np.ndarray + A numpy array containing a featurized representation of `datapoints`. + """ + datapoints = list(datapoints) + features = [] + for i, point in enumerate(datapoints): + if i % log_every_n == 0: + logger.info("Featurizing datapoint %i" % i) + try: + features.append(self._featurize(point)) + except: + logger.warning( + "Failed to featurize datapoint %d. Appending empty array") + features.append(np.array([])) + + features = np.asarray(features) + return features + + def __call__(self, datapoints: Iterable[Any]): + """Calculate features for datapoints. + + Parameters + ---------- + datapoints: Iterable[Any] + Any blob of data you like. Subclasss should instantiate this. + """ + return self.featurize(datapoints) + + def _featurize(self, datapoint: Any): + """Calculate features for a single datapoint. + + Parameters + ---------- + datapoint: Any + Any blob of data you like. Subclass should instantiate this. + """ + raise NotImplementedError('Featurizer is not defined.') + + def __repr__(self) -> str: + """Convert self to repr representation. + + Returns + ------- + str + The string represents the class. + + Examples + -------- + >>> import deepchem as dc + >>> dc.feat.CircularFingerprint(size=1024, radius=4) + CircularFingerprint[radius=4, size=1024, chiral=False, bonds=True, features=False, sparse=False, smiles=False] + >>> dc.feat.CGCNNFeaturizer() + CGCNNFeaturizer[radius=8.0, max_neighbors=12, step=0.2] + """ + args_spec = inspect.getfullargspec(self.__init__) # type: ignore + args_names = [arg for arg in args_spec.args if arg != 'self'] + args_info = '' + for arg_name in args_names: + value = self.__dict__[arg_name] + # for str + if isinstance(value, str): + value = "'" + value + "'" + # for list + if isinstance(value, list): + threshold = get_print_threshold() + value = np.array2string(np.array(value), threshold=threshold) + args_info += arg_name + '=' + str(value) + ', ' + return self.__class__.__name__ + '[' + args_info[:-2] + ']' + + def __str__(self) -> str: + """Convert self to str representation. + + Returns + ------- + str + The string represents the class. + + Examples + -------- + >>> import deepchem as dc + >>> str(dc.feat.CircularFingerprint(size=1024, radius=4)) + 'CircularFingerprint_radius_4_size_1024' + >>> str(dc.feat.CGCNNFeaturizer()) + 'CGCNNFeaturizer' + """ + args_spec = inspect.getfullargspec(self.__init__) # type: ignore + args_names = [arg for arg in args_spec.args if arg != 'self'] + args_num = len(args_names) + args_default_values = [None for _ in range(args_num)] + if args_spec.defaults is not None: + defaults = list(args_spec.defaults) + args_default_values[-len(defaults):] = defaults + + override_args_info = '' + for arg_name, default in zip(args_names, args_default_values): + if arg_name in self.__dict__: + arg_value = self.__dict__[arg_name] + # validation + # skip list + if isinstance(arg_value, list): + continue + if isinstance(arg_value, str): + # skip path string + if "\\/." in arg_value or "/" in arg_value or '.' in arg_value: + continue + # main logic + if default != arg_value: + override_args_info += '_' + arg_name + '_' + str(arg_value) + return self.__class__.__name__ + override_args_info class ComplexFeaturizer(object): @@ -21,30 +155,32 @@ class ComplexFeaturizer(object): Abstract class for calculating features for mol/protein complexes. """ - def featurize_complexes(self, mol_files, protein_pdbs): + def featurize(self, mol_files: Sequence[str], + protein_pdbs: Sequence[str]) -> Tuple[np.ndarray, List]: """ Calculate features for mol/protein complexes. Parameters ---------- - mols: list + mols: List[str] List of PDB filenames for molecules. - protein_pdbs: list + protein_pdbs: List[str] List of PDB filenames for proteins. Returns ------- - features: np.array + features: np.ndarray Array of features - failures: list + failures: List Indices of complexes that failed to featurize. """ + pool = multiprocessing.Pool() results = [] for i, (mol_file, protein_pdb) in enumerate(zip(mol_files, protein_pdbs)): log_message = "Featurizing %d / %d" % (i, len(mol_files)) results.append( - pool.apply_async(_featurize_complex, + pool.apply_async(ComplexFeaturizer._featurize_callback, (self, mol_file, protein_pdb, log_message))) pool.close() features = [] @@ -59,69 +195,228 @@ class ComplexFeaturizer(object): features = np.asarray(features) return features, failures - def _featurize_complex(self, mol_pdb, complex_pdb): + def _featurize(self, mol_pdb: str, complex_pdb: str): """ Calculate features for single mol/protein complex. Parameters ---------- - mol_pdb: list - Should be a list of lines of the PDB file. - complex_pdb: list - Should be a list of lines of the PDB file. + mol_pdb : str + The PDB filename. + complex_pdb : str + The PDB filename. """ raise NotImplementedError('Featurizer is not defined.') + @staticmethod + def _featurize_callback(featurizer, mol_pdb_file, protein_pdb_file, + log_message): + logging.info(log_message) + return featurizer._featurize(mol_pdb_file, protein_pdb_file) -class Featurizer(object): - """ - Abstract class for calculating a set of features for a molecule. - Child classes implement the _featurize method for calculating features - for a single molecule. +class MolecularFeaturizer(Featurizer): + """Abstract class for calculating a set of features for a + molecule. + + The defining feature of a `MolecularFeaturizer` is that it + uses SMILES strings and RDKit molecule objects to represent + small molecules. All other featurizers which are subclasses of + this class should plan to process input which comes as smiles + strings or RDKit molecules. + + Child classes need to implement the _featurize method for + calculating features for a single molecule. + + Notes + ----- + The subclasses of this class require RDKit to be installed. """ - def featurize(self, mols, verbose=True, log_every_n=1000): - """ - Calculate features for molecules. + def featurize(self, molecules, log_every_n=1000) -> np.ndarray: + """Calculate features for molecules. Parameters ---------- - mols : iterable - RDKit Mol objects. + molecules: rdkit.Chem.rdchem.Mol / SMILES string / iterable + RDKit Mol, or SMILES string or iterable sequence of RDKit mols/SMILES + strings. + log_every_n: int, default 1000 + Logging messages reported every `log_every_n` samples. + + Returns + ------- + features: np.ndarray + A numpy array containing a featurized representation of `datapoints`. """ - mols = list(mols) + try: + from rdkit import Chem + from rdkit.Chem import rdmolfiles + from rdkit.Chem import rdmolops + from rdkit.Chem.rdchem import Mol + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + # Special case handling of single molecule + if isinstance(molecules, str) or isinstance(molecules, Mol): + molecules = [molecules] + else: + # Convert iterables to list + molecules = list(molecules) + features = [] - for i, mol in enumerate(mols): - if mol is not None: + for i, mol in enumerate(molecules): + if i % log_every_n == 0: + logger.info("Featurizing datapoint %i" % i) + + try: + if isinstance(mol, str): + # mol must be a RDKit Mol object, so parse a SMILES + mol = Chem.MolFromSmiles(mol) + # SMILES is unique, so set a canonical order of atoms + new_order = rdmolfiles.CanonicalRankAtoms(mol) + mol = rdmolops.RenumberAtoms(mol, new_order) + features.append(self._featurize(mol)) - else: + except Exception as e: + if isinstance(mol, Chem.rdchem.Mol): + mol = Chem.MolToSmiles(mol) + logger.warning( + "Failed to featurize datapoint %d, %s. Appending empty array", i, + mol) + logger.warning("Exception message: {}".format(e)) features.append(np.array([])) features = np.asarray(features) return features - def _featurize(self, mol): - """ - Calculate features for a single molecule. + +class MaterialStructureFeaturizer(Featurizer): + """ + Abstract class for calculating a set of features for an + inorganic crystal structure. + + The defining feature of a `MaterialStructureFeaturizer` is that it + operates on 3D crystal structures with periodic boundary conditions. + Inorganic crystal structures are represented by Pymatgen structure + objects. Featurizers for inorganic crystal structures that are subclasses of + this class should plan to process input which comes as pymatgen + structure objects. + + This class is abstract and cannot be invoked directly. You'll + likely only interact with this class if you're a developer. Child + classes need to implement the _featurize method for calculating + features for a single crystal structure. + + Notes + ----- + Some subclasses of this class will require pymatgen and matminer to be + installed. + """ + + def featurize(self, + structures: Iterable[Union[Dict[str, Any], PymatgenStructure]], + log_every_n: int = 1000) -> np.ndarray: + """Calculate features for crystal structures. Parameters ---------- - mol : RDKit Mol - Molecule. - """ - raise NotImplementedError('Featurizer is not defined.') + structures: Iterable[Union[Dict, pymatgen.Structure]] + Iterable sequence of pymatgen structure dictionaries + or pymatgen.Structure. Please confirm the dictionary representations + of pymatgen.Structure from https://pymatgen.org/pymatgen.core.structure.html. + log_every_n: int, default 1000 + Logging messages reported every `log_every_n` samples. - def __call__(self, mols): + Returns + ------- + features: np.ndarray + A numpy array containing a featurized representation of + `structures`. """ - Calculate features for molecules. + try: + from pymatgen import Structure + except ModuleNotFoundError: + raise ImportError("This class requires pymatgen to be installed.") + + structures = list(structures) + features = [] + for idx, structure in enumerate(structures): + if idx % log_every_n == 0: + logger.info("Featurizing datapoint %i" % idx) + try: + if isinstance(structure, Dict): + structure = Structure.from_dict(structure) + features.append(self._featurize(structure)) + except: + logger.warning( + "Failed to featurize datapoint %i. Appending empty array" % idx) + features.append(np.array([])) + + features = np.asarray(features) + return features + + +class MaterialCompositionFeaturizer(Featurizer): + """ + Abstract class for calculating a set of features for an + inorganic crystal composition. + + The defining feature of a `MaterialCompositionFeaturizer` is that it + operates on 3D crystal chemical compositions. + Inorganic crystal compositions are represented by Pymatgen composition + objects. Featurizers for inorganic crystal compositions that are + subclasses of this class should plan to process input which comes as + Pymatgen composition objects. + + This class is abstract and cannot be invoked directly. You'll + likely only interact with this class if you're a developer. Child + classes need to implement the _featurize method for calculating + features for a single crystal composition. + + Notes + ----- + Some subclasses of this class will require pymatgen and matminer to be + installed. + """ + + def featurize(self, compositions: Iterable[str], + log_every_n: int = 1000) -> np.ndarray: + """Calculate features for crystal compositions. Parameters ---------- - mols : iterable - RDKit Mol objects. + compositions: Iterable[str] + Iterable sequence of composition strings, e.g. "MoS2". + log_every_n: int, default 1000 + Logging messages reported every `log_every_n` samples. + + Returns + ------- + features: np.ndarray + A numpy array containing a featurized representation of + `compositions`. """ - return self.featurize(mols) + try: + from pymatgen import Composition + except ModuleNotFoundError: + raise ImportError("This class requires pymatgen to be installed.") + + compositions = list(compositions) + features = [] + for idx, composition in enumerate(compositions): + if idx % log_every_n == 0: + logger.info("Featurizing datapoint %i" % idx) + try: + c = Composition(composition) + features.append(self._featurize(c)) + except: + logger.warning( + "Failed to featurize datapoint %i. Appending empty array" % idx) + features.append(np.array([])) + + features = np.asarray(features) + return features class UserDefinedFeaturizer(Featurizer): diff --git a/deepchem/feat/basic.py b/deepchem/feat/basic.py deleted file mode 100644 index 7afea234da95ee7e30ce1f73c53122c7bcd0fa4f..0000000000000000000000000000000000000000 --- a/deepchem/feat/basic.py +++ /dev/null @@ -1,92 +0,0 @@ -""" -Basic molecular features. -""" -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "MIT" - -from deepchem.feat import Featurizer - - -class MolecularWeight(Featurizer): - """ - Molecular weight. - """ - name = ['mw', 'molecular_weight'] - - def _featurize(self, mol): - """ - Calculate molecular weight. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - from rdkit.Chem import Descriptors - wt = Descriptors.ExactMolWt(mol) - wt = [wt] - return wt - - -class RDKitDescriptors(Featurizer): - """ - RDKit descriptors. - - See http://rdkit.org/docs/GettingStartedInPython.html - #list-of-available-descriptors. - """ - name = 'descriptors' - - # (ytz): This is done to avoid future compatibility issues like inclusion of - # the 3D descriptors or changing the feature size. - allowedDescriptors = set([ - 'MaxAbsPartialCharge', 'MinPartialCharge', 'MinAbsPartialCharge', - 'HeavyAtomMolWt', 'MaxAbsEStateIndex', 'NumRadicalElectrons', - 'NumValenceElectrons', 'MinAbsEStateIndex', 'MaxEStateIndex', - 'MaxPartialCharge', 'MinEStateIndex', 'ExactMolWt', 'MolWt', 'BalabanJ', - 'BertzCT', 'Chi0', 'Chi0n', 'Chi0v', 'Chi1', 'Chi1n', 'Chi1v', 'Chi2n', - 'Chi2v', 'Chi3n', 'Chi3v', 'Chi4n', 'Chi4v', 'HallKierAlpha', 'Ipc', - 'Kappa1', 'Kappa2', 'Kappa3', 'LabuteASA', 'PEOE_VSA1', 'PEOE_VSA10', - 'PEOE_VSA11', 'PEOE_VSA12', 'PEOE_VSA13', 'PEOE_VSA14', 'PEOE_VSA2', - 'PEOE_VSA3', 'PEOE_VSA4', 'PEOE_VSA5', 'PEOE_VSA6', 'PEOE_VSA7', - 'PEOE_VSA8', 'PEOE_VSA9', 'SMR_VSA1', 'SMR_VSA10', 'SMR_VSA2', 'SMR_VSA3', - 'SMR_VSA4', 'SMR_VSA5', 'SMR_VSA6', 'SMR_VSA7', 'SMR_VSA8', 'SMR_VSA9', - 'SlogP_VSA1', 'SlogP_VSA10', 'SlogP_VSA11', 'SlogP_VSA12', 'SlogP_VSA2', - 'SlogP_VSA3', 'SlogP_VSA4', 'SlogP_VSA5', 'SlogP_VSA6', 'SlogP_VSA7', - 'SlogP_VSA8', 'SlogP_VSA9', 'TPSA', 'EState_VSA1', 'EState_VSA10', - 'EState_VSA11', 'EState_VSA2', 'EState_VSA3', 'EState_VSA4', - 'EState_VSA5', 'EState_VSA6', 'EState_VSA7', 'EState_VSA8', 'EState_VSA9', - 'VSA_EState1', 'VSA_EState10', 'VSA_EState2', 'VSA_EState3', - 'VSA_EState4', 'VSA_EState5', 'VSA_EState6', 'VSA_EState7', 'VSA_EState8', - 'VSA_EState9', 'FractionCSP3', 'HeavyAtomCount', 'NHOHCount', 'NOCount', - 'NumAliphaticCarbocycles', 'NumAliphaticHeterocycles', - 'NumAliphaticRings', 'NumAromaticCarbocycles', 'NumAromaticHeterocycles', - 'NumAromaticRings', 'NumHAcceptors', 'NumHDonors', 'NumHeteroatoms', - 'NumRotatableBonds', 'NumSaturatedCarbocycles', - 'NumSaturatedHeterocycles', 'NumSaturatedRings', 'RingCount', 'MolLogP', - 'MolMR' - ]) - - def __init__(self): - self.descriptors = [] - self.descList = [] - from rdkit.Chem import Descriptors - for descriptor, function in Descriptors.descList: - if descriptor in self.allowedDescriptors: - self.descriptors.append(descriptor) - self.descList.append((descriptor, function)) - - def _featurize(self, mol): - """ - Calculate RDKit descriptors. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - rval = [] - for desc_name, function in self.descList: - rval.append(function(mol)) - return rval diff --git a/deepchem/feat/binding_pocket_features.py b/deepchem/feat/binding_pocket_features.py index afb7100dd98fc8665831c65298d7b1013db4c07d..5920be555955d1d737d877a0c1269e2a1420f3cf 100644 --- a/deepchem/feat/binding_pocket_features.py +++ b/deepchem/feat/binding_pocket_features.py @@ -3,13 +3,17 @@ Featurizes proposed binding pockets. """ import numpy as np import logging -from deepchem.utils import rdkit_util +from typing import Dict, List + from deepchem.feat import Featurizer +from deepchem.utils.coordinate_box_utils import CoordinateBox +from deepchem.utils.rdkit_utils import load_molecule logger = logging.getLogger(__name__) -def boxes_to_atoms(coords, boxes): +def boxes_to_atoms(coords: np.ndarray, boxes: List[CoordinateBox] + ) -> Dict[CoordinateBox, List[int]]: """Maps each box to a list of atoms in that box. Given the coordinates of a macromolecule, and a collection of boxes, @@ -19,13 +23,14 @@ def boxes_to_atoms(coords, boxes): Parameters ---------- coords: np.ndarray - Of shape `(N, 3) + A numpy array of shape `(N, 3)` boxes: list - list of `CoordinateBox` objects. + List of `CoordinateBox` objects. Returns ------- - dictionary mapping `CoordinateBox` objects to lists of atom coordinates + Dict[CoordinateBox, List[int]] + A dictionary mapping `CoordinateBox` objects to lists of atom indices. """ mapping = {} for box_ind, box in enumerate(boxes): @@ -56,6 +61,10 @@ class BindingPocketFeaturizer(Featurizer): implementation for more sophisticated downstream usecases. Note that this class's implementation will only work for proteins and not for other macromolecules + + Notes + ----- + This class requires mdtraj to be installed. """ residues = [ @@ -66,23 +75,30 @@ class BindingPocketFeaturizer(Featurizer): n_features = len(residues) - def featurize(self, protein_file, pockets): + # FIXME: Signature of "featurize" incompatible with supertype "Featurizer" + def featurize( # type: ignore[override] + self, protein_file: str, pockets: List[CoordinateBox]) -> np.ndarray: """ Calculate atomic coodinates. - Params - ------ + Parameters + ---------- protein_file: str Location of PDB file. Will be loaded by MDTraj - pockets: list[CoordinateBox] + pockets: List[CoordinateBox] List of `dc.utils.CoordinateBox` objects. Returns ------- - A numpy array of shale `(len(pockets), n_residues)` + np.ndarray + A numpy array of shale `(len(pockets), n_residues)` """ - import mdtraj - protein_coords = rdkit_util.load_molecule( + try: + import mdtraj + except ModuleNotFoundError: + raise ImportError("This class requires mdtraj to be installed.") + + protein_coords = load_molecule( protein_file, add_hydrogens=False, calc_charges=False)[0] mapping = boxes_to_atoms(protein_coords, pockets) protein = mdtraj.load(protein_file) @@ -100,6 +116,5 @@ class BindingPocketFeaturizer(Featurizer): if residue not in res_map: logger.info("Warning: Non-standard residue in PDB file") continue - atomtype = atom_name.split("-")[1] all_features[pocket_num, res_map[residue]] += 1 return all_features diff --git a/deepchem/feat/complex_featurizers/__init__.py b/deepchem/feat/complex_featurizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fb9ea61f04492878434261f9b9ddddb2b5ca4ada --- /dev/null +++ b/deepchem/feat/complex_featurizers/__init__.py @@ -0,0 +1,18 @@ +""" +Featurizers for complex. +""" +# flake8: noqa +from deepchem.feat.complex_featurizers.rdkit_grid_featurizer import RdkitGridFeaturizer +from deepchem.feat.complex_featurizers.complex_atomic_coordinates import NeighborListAtomicCoordinates +from deepchem.feat.complex_featurizers.complex_atomic_coordinates import NeighborListComplexAtomicCoordinates +from deepchem.feat.complex_featurizers.complex_atomic_coordinates import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat.complex_featurizers.contact_fingerprints import ContactCircularFingerprint +from deepchem.feat.complex_featurizers.contact_fingerprints import ContactCircularVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import ChargeVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import SaltBridgeVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import CationPiVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import PiStackVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import HydrogenBondVoxelizer +from deepchem.feat.complex_featurizers.grid_featurizers import HydrogenBondCounter +from deepchem.feat.complex_featurizers.splif_fingerprints import SplifFingerprint +from deepchem.feat.complex_featurizers.splif_fingerprints import SplifVoxelizer diff --git a/deepchem/feat/atomic_coordinates.py b/deepchem/feat/complex_featurizers/complex_atomic_coordinates.py similarity index 76% rename from deepchem/feat/atomic_coordinates.py rename to deepchem/feat/complex_featurizers/complex_atomic_coordinates.py index ae1db807f523cf08a939e1170f9dfd32ea2041d4..14db9db065d099471cd56839c47e290f9946ea57 100644 --- a/deepchem/feat/atomic_coordinates.py +++ b/deepchem/feat/complex_featurizers/complex_atomic_coordinates.py @@ -1,53 +1,15 @@ """ Atomic coordinate featurizer. """ -__author__ = "Joseph Gomes and Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import logging -import numpy as np -from deepchem.utils.save import log -from deepchem.feat import Featurizer -from deepchem.feat import ComplexFeaturizer -from deepchem.utils import rdkit_util, pad_array -from deepchem.utils.rdkit_util import MoleculeLoadException - - -class AtomicCoordinates(Featurizer): - """ - Nx3 matrix of Cartesian coordinates [Angstrom] - """ - name = ['atomic_coordinates'] - def _featurize(self, mol): - """ - Calculate atomic coodinates. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - - N = mol.GetNumAtoms() - coords = np.zeros((N, 3)) - - # RDKit stores atomic coordinates in Angstrom. Atomic unit of length is the - # bohr (1 bohr = 0.529177 Angstrom). Converting units makes gradient calculation - # consistent with most QM software packages. - coords_in_bohr = [ - mol.GetConformer(0).GetAtomPosition(i).__idiv__(0.52917721092) - for i in range(N) - ] - - for atom in range(N): - coords[atom, 0] = coords_in_bohr[atom].x - coords[atom, 1] = coords_in_bohr[atom].y - coords[atom, 2] = coords_in_bohr[atom].z +import numpy as np - coords = [coords] - return coords +from deepchem.feat.base_classes import Featurizer, ComplexFeaturizer +from deepchem.feat.molecule_featurizers import AtomicCoordinates +from deepchem.utils.data_utils import pad_array +from deepchem.utils.rdkit_utils import MoleculeLoadException, get_xyz_from_mol, \ + load_molecule, merge_molecules_xyz, merge_molecules def compute_neighbor_list(coords, neighbor_cutoff, max_num_neighbors, @@ -80,21 +42,6 @@ def compute_neighbor_list(coords, neighbor_cutoff, max_num_neighbors, return neighbor_list -def get_coords(mol): - """ - Gets coordinates in Angstrom for RDKit mol. - """ - N = mol.GetNumAtoms() - coords = np.zeros((N, 3)) - - coords_raw = [mol.GetConformer(0).GetAtomPosition(i) for i in range(N)] - for atom in range(N): - coords[atom, 0] = coords_raw[atom].x - coords[atom, 1] = coords_raw[atom].y - coords[atom, 2] = coords_raw[atom].z - return coords - - class NeighborListAtomicCoordinates(Featurizer): """ Adjacency List of neighbors in 3-space @@ -126,7 +73,8 @@ class NeighborListAtomicCoordinates(Featurizer): self.periodic_box_size = periodic_box_size # Type of data created by this featurizer self.dtype = object - self.coordinates_featurizer = AtomicCoordinates() + self.bohr_coords_featurizer = AtomicCoordinates(use_bohr=True) + self.coords_featurizer = AtomicCoordinates(use_bohr=False) def _featurize(self, mol): """ @@ -137,10 +85,9 @@ class NeighborListAtomicCoordinates(Featurizer): mol: rdkit Mol To be featurized. """ - N = mol.GetNumAtoms() # TODO(rbharath): Should this return a list? - bohr_coords = self.coordinates_featurizer._featurize(mol)[0] - coords = get_coords(mol) + bohr_coords = self.bohr_coords_featurizer._featurize(mol) + coords = self.coords_featurizer._featurize(mol) neighbor_list = compute_neighbor_list(coords, self.neighbor_cutoff, self.max_num_neighbors, self.periodic_box_size) @@ -164,22 +111,21 @@ class NeighborListComplexAtomicCoordinates(ComplexFeaturizer): self.neighbor_cutoff = neighbor_cutoff # Type of data created by this featurizer self.dtype = object - self.coordinates_featurizer = AtomicCoordinates() - def _featurize_complex(self, mol_pdb_file, protein_pdb_file): + def _featurize(self, mol_pdb_file, protein_pdb_file): """ Compute neighbor list for complex. Parameters ---------- - mol_pdb_file: Str - Filename for ligand pdb file. - protein_pdb_file: Str - Filename for protein pdb file. + mol_pdb_file: str + Filename for ligand pdb file. + protein_pdb_file: str + Filename for protein pdb file. """ - mol_coords, ob_mol = rdkit_util.load_molecule(mol_pdb_file) - protein_coords, protein_mol = rdkit_util.load_molecule(protein_pdb_file) - system_coords = rdkit_util.merge_molecules_xyz([mol_coords, protein_coords]) + mol_coords, ob_mol = load_molecule(mol_pdb_file) + protein_coords, protein_mol = load_molecule(protein_pdb_file) + system_coords = merge_molecules_xyz([mol_coords, protein_coords]) system_neighbor_list = compute_neighbor_list( system_coords, self.neighbor_cutoff, self.max_num_neighbors, None) @@ -222,19 +168,19 @@ class ComplexNeighborListFragmentAtomicCoordinates(ComplexFeaturizer): self.neighborlist_featurizer = NeighborListComplexAtomicCoordinates( self.max_num_neighbors, self.neighbor_cutoff) - def _featurize_complex(self, mol_pdb_file, protein_pdb_file): + def _featurize(self, mol_pdb_file, protein_pdb_file): try: - frag1_coords, frag1_mol = rdkit_util.load_molecule( + frag1_coords, frag1_mol = load_molecule( mol_pdb_file, is_protein=False, sanitize=True, add_hydrogens=False) - frag2_coords, frag2_mol = rdkit_util.load_molecule( + frag2_coords, frag2_mol = load_molecule( protein_pdb_file, is_protein=True, sanitize=True, add_hydrogens=False) except MoleculeLoadException: # Currently handles loading failures by returning None # TODO: Is there a better handling procedure? logging.warning("Some molecules cannot be loaded by Rdkit. Skipping") return None - system_mol = rdkit_util.merge_molecules([frag1_mol, frag2_mol]) - system_coords = rdkit_util.get_xyz_from_mol(system_mol) + system_mol = merge_molecules([frag1_mol, frag2_mol]) + system_coords = get_xyz_from_mol(system_mol) frag1_coords, frag1_mol = self._strip_hydrogens(frag1_coords, frag1_mol) frag2_coords, frag2_mol = self._strip_hydrogens(frag2_coords, frag2_mol) @@ -249,7 +195,7 @@ class ComplexNeighborListFragmentAtomicCoordinates(ComplexFeaturizer): system_coords, system_neighbor_list, system_z = self.featurize_mol( system_coords, system_mol, self.complex_num_atoms) - except ValueError as e: + except ValueError: logging.warning( "max_atoms was set too low. Some complexes too large and skipped") return None diff --git a/deepchem/feat/complex_featurizers/contact_fingerprints.py b/deepchem/feat/complex_featurizers/contact_fingerprints.py new file mode 100644 index 0000000000000000000000000000000000000000..e5ead91fd9173b17a0a31f8df5fe467966aa548d --- /dev/null +++ b/deepchem/feat/complex_featurizers/contact_fingerprints.py @@ -0,0 +1,235 @@ +""" +Topological fingerprints for macromolecular structures. +""" +import numpy as np +import logging +import itertools +from deepchem.utils.hash_utils import hash_ecfp +from deepchem.feat import ComplexFeaturizer +from deepchem.utils.rdkit_utils import load_complex +from deepchem.utils.hash_utils import vectorize +from deepchem.utils.voxel_utils import voxelize +from deepchem.utils.voxel_utils import convert_atom_to_voxel +from deepchem.utils.rdkit_utils import compute_all_ecfp +from deepchem.utils.rdkit_utils import compute_contact_centroid +from deepchem.utils.rdkit_utils import MoleculeLoadException +from deepchem.utils.geometry_utils import compute_pairwise_distances +from deepchem.utils.geometry_utils import subtract_centroid + +from typing import Tuple, Dict, List + +logger = logging.getLogger(__name__) + + +def featurize_contacts_ecfp( + frag1: Tuple, + frag2: Tuple, + pairwise_distances: np.ndarray = None, + cutoff: float = 4.5, + ecfp_degree: int = 2) -> Tuple[Dict[int, str], Dict[int, str]]: + """Computes ECFP dicts for pairwise interaction between two molecular fragments. + + Parameters + ---------- + frag1: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + frag2: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + pairwise_distances: np.ndarray + Array of pairwise fragment-fragment distances (Angstroms) + cutoff: float + Cutoff distance for contact consideration + ecfp_degree: int + ECFP radius + + Returns + ------- + Tuple of dictionaries of ECFP contact fragments + """ + if pairwise_distances is None: + pairwise_distances = compute_pairwise_distances(frag1[0], frag2[0]) + # contacts is of form (x_coords, y_coords), a tuple of 2 lists + contacts = np.nonzero((pairwise_distances < cutoff)) + # contacts[0] is the x_coords, that is the frag1 atoms that have + # nonzero contact. + frag1_atoms = set([int(c) for c in contacts[0].tolist()]) + # contacts[1] is the y_coords, the frag2 atoms with nonzero contacts + frag2_atoms = set([int(c) for c in contacts[1].tolist()]) + + frag1_ecfp_dict = compute_all_ecfp( + frag1[1], indices=frag1_atoms, degree=ecfp_degree) + frag2_ecfp_dict = compute_all_ecfp( + frag2[1], indices=frag2_atoms, degree=ecfp_degree) + + return (frag1_ecfp_dict, frag2_ecfp_dict) + + +class ContactCircularFingerprint(ComplexFeaturizer): + """Compute (Morgan) fingerprints near contact points of macromolecular complexes. + + Given a macromolecular complex made up of multiple + constituent molecules, first compute the contact points where + atoms from different molecules come close to one another. For + atoms within "contact regions," compute radial "ECFP" + fragments which are sub-molecules centered at atoms in the + contact region. + + For a macromolecular complex, returns a vector of shape + `(2*size,)` + """ + + def __init__(self, cutoff: float = 4.5, radius: int = 2, size: int = 8): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for molecules in complex. + radius: int, optional (default 2) + Fingerprint radius. + size: int, optional (default 8) + Length of generated bit vector. + """ + self.cutoff = cutoff + self.radius = radius + self.size = size + + def _featurize(self, mol_pdb: str, protein_pdb: str): + """ + Compute featurization for a molecular complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + try: + fragments = load_complex((mol_pdb, protein_pdb), add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + for (frag1, frag2) in itertools.combinations(fragments, 2): + # Get coordinates + distances = compute_pairwise_distances(frag1[0], frag2[0]) + vector = [ + vectorize(hash_ecfp, feature_dict=ecfp_dict, size=self.size) + for ecfp_dict in featurize_contacts_ecfp( + frag1, + frag2, + distances, + cutoff=self.cutoff, + ecfp_degree=self.radius) + ] + pairwise_features += vector + + pairwise_features = np.concatenate(pairwise_features) + return pairwise_features + + +class ContactCircularVoxelizer(ComplexFeaturizer): + """Computes ECFP fingerprints on a voxel grid. + + Given a macromolecular complex made up of multiple + constituent molecules, first compute the contact points where + atoms from different molecules come close to one another. For + atoms within "contact regions," compute radial "ECFP" + fragments which are sub-molecules centered at atoms in the + contact region. Localize these ECFP fingeprints at the voxel + in which they originated. + + Featurizes a macromolecular complex into a tensor of shape + `(voxels_per_edge, voxels_per_edge, voxels_per_edge, size)` where + `voxels_per_edge = int(box_width/voxel_width)`. If `flatten==True`, + then returns a flattened version of this tensor of length + `size*voxels_per_edge**3` + """ + + def __init__(self, + cutoff: float = 4.5, + radius: int = 2, + size: int = 8, + box_width: float = 16.0, + voxel_width: float = 1.0, + flatten: bool = False): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for molecules in complex. + radius : int, optional (default 2) + Fingerprint radius. + size : int, optional (default 8) + Length of generated bit vector. + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + flatten: bool, optional (default False) + If True, then returns a flat feature vector rather than voxel grid. This + feature vector is constructed by flattening the usual voxel grid. + """ + self.cutoff = cutoff + self.radius = radius + self.size = size + self.box_width = box_width + self.voxel_width = voxel_width + self.voxels_per_edge = int(self.box_width / self.voxel_width) + self.flatten = flatten + + def _featurize(self, mol_pdb: str, protein_pdb: str): + """ + Compute featurization for a molecular complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = load_complex(molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features: List[np.ndarray] = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + for (frag1, frag2) in itertools.combinations(fragments, 2): + distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + pairwise_features.append( + sum([ + voxelize( + convert_atom_to_voxel, + xyz, + self.box_width, + self.voxel_width, + hash_function=hash_ecfp, + feature_dict=ecfp_dict, + nb_channel=self.size) for xyz, ecfp_dict in zip( + xyzs, + featurize_contacts_ecfp( + frag1, + frag2, + distances, + cutoff=self.cutoff, + ecfp_degree=self.radius)) + ])) + if self.flatten: + return np.concatenate( + [features.flatten() for features in pairwise_features]) + else: + # Features are of shape (voxels_per_edge, voxels_per_edge, + # voxels_per_edge, num_feat) so we should concatenate on the last + # axis. + return np.concatenate(pairwise_features, axis=-1) diff --git a/deepchem/feat/complex_featurizers/grid_featurizers.py b/deepchem/feat/complex_featurizers/grid_featurizers.py new file mode 100644 index 0000000000000000000000000000000000000000..cb704bfd5d9165734b603cd18d63c67869dd2343 --- /dev/null +++ b/deepchem/feat/complex_featurizers/grid_featurizers.py @@ -0,0 +1,633 @@ +""" +Compute various spatial fingerprints for macromolecular complexes. +""" +import itertools +import logging +import numpy as np +from deepchem.utils import rdkit_utils +from deepchem.feat import ComplexFeaturizer +from deepchem.utils.voxel_utils import voxelize +from deepchem.utils.voxel_utils import convert_atom_to_voxel +from deepchem.utils.voxel_utils import convert_atom_pair_to_voxel +from deepchem.utils.noncovalent_utils import compute_salt_bridges +from deepchem.utils.noncovalent_utils import compute_binding_pocket_cation_pi +from deepchem.utils.noncovalent_utils import compute_pi_stack +from deepchem.utils.noncovalent_utils import compute_hydrogen_bonds +from deepchem.utils.rdkit_utils import MoleculeLoadException +from deepchem.utils.rdkit_utils import compute_contact_centroid +from deepchem.utils.geometry_utils import compute_pairwise_distances +from deepchem.utils.geometry_utils import subtract_centroid +from deepchem.utils.fragment_utils import get_partial_charge +from deepchem.utils.fragment_utils import reduce_molecular_complex_to_contacts +from typing import List, Tuple, Optional + +logger = logging.getLogger(__name__) + +HBOND_DIST_BINS = [(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)] +HBOND_ANGLE_CUTOFFS = [5., 50., 90.] + + +def compute_charge_dictionary(molecule): + """Create a dictionary with partial charges for each atom in the molecule. + + This function assumes that the charges for the molecule are + already computed (it can be done with + rdkit_util.compute_charges(molecule)) + """ + + charge_dictionary = {} + for i, atom in enumerate(molecule.GetAtoms()): + charge_dictionary[i] = get_partial_charge(atom) + return charge_dictionary + + +class ChargeVoxelizer(ComplexFeaturizer): + """Localize partial charges of atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, compute the partial (Gasteiger + charge) on each molecule. For each atom, localize this + partial charge in the voxel in which it originated to create + a local charge array. Sum contributions to get an effective + charge at each voxel. + + Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a + tensor output of shape `(voxels_per_edge, voxels_per_edge, + voxels_per_edge, 1)` for each macromolecular complex that computes + the effective charge at each voxel. + """ + + def __init__(self, + cutoff: float = 4.5, + box_width: float = 16.0, + voxel_width: float = 1.0, + reduce_to_contacts: bool = True): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for molecules in complex. + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + reduce_to_contacts: bool, optional + If True, reduce the atoms in the complex to those near a contact + region. + """ + self.cutoff = cutoff + self.box_width = box_width + self.voxel_width = voxel_width + self.reduce_to_contacts = reduce_to_contacts + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + if self.reduce_to_contacts: + fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff) + # We compute pairwise contact fingerprints + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + rdks = [frag1[1], frag2[1]] + pairwise_features.append( + sum([ + voxelize( + convert_atom_to_voxel, + hash_function=None, + coordinates=xyz, + box_width=self.box_width, + voxel_width=self.voxel_width, + feature_dict=compute_charge_dictionary(mol), + nb_channel=1, + dtype="np.float16") for xyz, mol in zip(xyzs, rdks) + ])) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) + + +class SaltBridgeVoxelizer(ComplexFeaturizer): + """Localize salt bridges between atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, compute salt bridges between atoms in + the macromolecular complex. For each atom, localize this salt + bridge in the voxel in which it originated to create a local + salt bridge array. Note that if atoms in two different voxels + interact in a salt-bridge, the interaction is double counted + in both voxels. + + Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a + tensor output of shape `(voxels_per_edge, voxels_per_edge, + voxels_per_edge, 1)` for each macromolecular the number of salt + bridges at each voxel. + """ + + def __init__(self, + cutoff: float = 5.0, + box_width: float = 16.0, + voxel_width: float = 1.0, + reduce_to_contacts: bool = True): + """ + Parameters + ---------- + cutoff: float, optional (default 5.0) + The distance in angstroms within which atoms must be to + be considered for a salt bridge between them. + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + reduce_to_contacts: bool, optional + If True, reduce the atoms in the complex to those near a contact + region. + """ + self.cutoff = cutoff + self.box_width = box_width + self.voxel_width = voxel_width + self.reduce_to_contacts = reduce_to_contacts + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + if self.reduce_to_contacts: + fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff) + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + # rdks = [frag1[1], frag2[1]] + pairwise_features.append( + sum([ + voxelize( + convert_atom_pair_to_voxel, + hash_function=None, + coordinates=xyz, + box_width=self.box_width, + voxel_width=self.voxel_width, + feature_list=compute_salt_bridges( + frag1[1], frag2[1], distances, cutoff=self.cutoff), + nb_channel=1) for xyz in xyzs + ])) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) + + +class CationPiVoxelizer(ComplexFeaturizer): + """Localize cation-Pi interactions between atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, compute cation-Pi between atoms in + the macromolecular complex. For each atom, localize this salt + bridge in the voxel in which it originated to create a local + cation-pi array. + + Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a + tensor output of shape `(voxels_per_edge, voxels_per_edge, + voxels_per_edge, 1)` for each macromolecular complex that counts the + number of cation-pi interactions at each voxel. + """ + + def __init__(self, + cutoff: float = 6.5, + angle_cutoff: float = 30.0, + box_width: float = 16.0, + voxel_width: float = 1.0): + """ + Parameters + ---------- + cutoff: float, optional (default 6.5) + The distance in angstroms within which atoms must be to + be considered for a cation-pi interaction between them. + angle_cutoff: float, optional (default 30.0) + Angle cutoff. Max allowed deviation from the ideal (0deg) + angle between ring normal and vector pointing from ring + center to cation (in degrees). + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + """ + self.cutoff = cutoff + self.angle_cutoff = angle_cutoff + self.box_width = box_width + self.voxel_width = voxel_width + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + # distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + # rdks = [frag1[1], frag2[1]] + pairwise_features.append( + sum([ + voxelize( + convert_atom_to_voxel, + hash_function=None, + box_width=self.box_width, + voxel_width=self.voxel_width, + coordinates=xyz, + feature_dict=cation_pi_dict, + nb_channel=1) for xyz, cation_pi_dict in zip( + xyzs, + compute_binding_pocket_cation_pi( + frag1[1], + frag2[1], + dist_cutoff=self.cutoff, + angle_cutoff=self.angle_cutoff, + )) + ])) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) + + +class PiStackVoxelizer(ComplexFeaturizer): + """Localize Pi stacking interactions between atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, compute pi-stacking interactions + between atoms in the macromolecular complex. For each atom, + localize this salt bridge in the voxel in which it originated + to create a local pi-stacking array. + + Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a + tensor output of shape `(voxels_per_edge, voxels_per_edge, + voxels_per_edge, 2)` for each macromolecular complex. Each voxel has + 2 fields, with the first tracking the number of pi-pi parallel + interactions, and the second tracking the number of pi-T + interactions. + """ + + def __init__(self, + cutoff: float = 4.4, + angle_cutoff: float = 30.0, + box_width: float = 16.0, + voxel_width: float = 1.0): + """ + Parameters + ---------- + cutoff: float, optional (default 4.4) + The distance in angstroms within which atoms must be to + be considered for a cation-pi interaction between them. + angle_cutoff: float, optional (default 30.0) + Angle cutoff. Max allowed deviation from the ideal (0 deg) + angle between ring normal and vector pointing from ring + center to other ring center (in degrees). + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + """ + self.cutoff = cutoff + self.angle_cutoff = angle_cutoff + self.box_width = box_width + self.voxel_width = voxel_width + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + # rdks = [frag1[1], frag2[1]] + protein_pi_t, protein_pi_parallel, ligand_pi_t, ligand_pi_parallel = ( + compute_pi_stack( + frag1[1], + frag2[1], + distances, + dist_cutoff=self.cutoff, + angle_cutoff=self.angle_cutoff)) + pi_parallel_tensor = sum([ + voxelize( + convert_atom_to_voxel, + hash_function=None, + box_width=self.box_width, + voxel_width=self.voxel_width, + coordinates=xyz, + feature_dict=feature_dict, + nb_channel=1) + for (xyz, feature_dict + ) in zip(xyzs, [ligand_pi_parallel, protein_pi_parallel]) + ]) + + pi_t_tensor = sum([ + voxelize( + convert_atom_to_voxel, + hash_function=None, + box_width=self.box_width, + voxel_width=self.voxel_width, + coordinates=frag1_xyz, + feature_dict=protein_pi_t, + nb_channel=1) + for (xyz, feature_dict) in zip(xyzs, [ligand_pi_t, protein_pi_t]) + ]) + + pairwise_features.append( + np.concatenate([pi_parallel_tensor, pi_t_tensor], axis=-1)) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 2) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) + + +class HydrogenBondCounter(ComplexFeaturizer): + """Counts hydrogen bonds between atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, count the number of hydrogen bonds + between atoms in the macromolecular complex. + + Creates a scalar output of shape `(3,)` (assuming the default value + ofor `distance_bins` with 3 bins) for each macromolecular complex + that computes the total number of hydrogen bonds. + """ + + def __init__( + self, + cutoff: float = 4.5, + reduce_to_contacts: bool = True, + distance_bins: Optional[List[Tuple[float, float]]] = None, + angle_cutoffs: Optional[List[float]] = None, + ): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for molecules in complex. + reduce_to_contacts: bool, optional + If True, reduce the atoms in the complex to those near a contact + region. + distance_bins: list[tuple] + List of hydgrogen bond distance bins. If not specified is + set to default + `[(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)]`. + angle_cutoffs: list[float] + List of hydrogen bond angle cutoffs. Max allowed + deviation from the ideal (180 deg) angle between + hydrogen-atom1, hydrogen-atom2 vectors.If not specified + is set to default `[5, 50, 90]` + """ + self.cutoff = cutoff + if distance_bins is None: + self.distance_bins = HBOND_DIST_BINS + else: + self.distance_bins = distance_bins + if angle_cutoffs is None: + self.angle_cutoffs = HBOND_ANGLE_CUTOFFS + else: + self.angle_cutoffs = angle_cutoffs + self.reduce_to_contacts = reduce_to_contacts + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + # centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + if self.reduce_to_contacts: + fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff) + # We compute pairwise contact fingerprints + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + distances = compute_pairwise_distances(frag1[0], frag2[0]) + # frag1_xyz = subtract_centroid(frag1[0], centroid) + # frag2_xyz = subtract_centroid(frag2[0], centroid) + # xyzs = [frag1_xyz, frag2_xyz] + # rdks = [frag1[1], frag2[1]] + pairwise_features.append( + np.concatenate( + [ + np.array([len(hbond_list)]) + for hbond_list in compute_hydrogen_bonds( + frag1, frag2, distances, self.distance_bins, + self.angle_cutoffs) + ], + axis=-1)) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) + + +class HydrogenBondVoxelizer(ComplexFeaturizer): + """Localize hydrogen bonds between atoms in macromolecular complexes. + + Given a macromolecular complex made up of multiple + constitutent molecules, compute hydrogen bonds between atoms + in the macromolecular complex. For each atom, localize this + hydrogen bond in the voxel in which it originated to create a + local hydrogen bond array. Note that if atoms in two + different voxels interact in a hydrogen bond, the interaction + is double counted in both voxels. + + Let `voxels_per_edge = int(box_width/voxel_width)`. Creates a + tensor output of shape `(voxels_per_edge, voxels_per_edge, + voxels_per_edge, 3)` (assuming the default for `distance_bins` which + has 3 bins) for each macromolecular complex that counts the number + of hydrogen bonds at each voxel. + """ + + def __init__( + self, + cutoff: float = 4.5, + box_width: float = 16.0, + voxel_width: float = 1.0, + reduce_to_contacts: bool = True, + distance_bins: Optional[List[Tuple[float, float]]] = None, + angle_cutoffs: Optional[List[float]] = None, + ): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for contact atoms in complex. + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + reduce_to_contacts: bool, optional + If True, reduce the atoms in the complex to those near a contact + region. + distance_bins: list[tuple] + List of hydgrogen bond distance bins. If not specified is + set to default + `[(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)]`. + angle_cutoffs: list[float] + List of hydrogen bond angle cutoffs. Max allowed + deviation from the ideal (180 deg) angle between + hydrogen-atom1, hydrogen-atom2 vectors.If not specified + is set to default `[5, 50, 90]` + """ + self.cutoff = cutoff + if distance_bins is None: + self.distance_bins = HBOND_DIST_BINS + else: + self.distance_bins = distance_bins + if angle_cutoffs is None: + self.angle_cutoffs = HBOND_ANGLE_CUTOFFS + else: + self.angle_cutoffs = angle_cutoffs + self.box_width = box_width + self.voxel_width = voxel_width + self.reduce_to_contacts = reduce_to_contacts + + def _featurize(self, mol_pdb: str, protein_pdb: str) -> np.ndarray: + """ + Compute featurization for a single mol/protein complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + protein_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, protein_pdb) + try: + fragments = rdkit_utils.load_complex( + molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + if self.reduce_to_contacts: + fragments = reduce_molecular_complex_to_contacts(fragments, self.cutoff) + for (frag1_ind, frag2_ind) in itertools.combinations( + range(len(fragments)), 2): + frag1, frag2 = fragments[frag1_ind], fragments[frag2_ind] + distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + # rdks = [frag1[1], frag2[1]] + pairwise_features.append( + np.concatenate( + [ + sum([ + voxelize( + convert_atom_pair_to_voxel, + hash_function=None, + box_width=self.box_width, + voxel_width=self.voxel_width, + coordinates=xyz, + feature_list=hbond_list, + nb_channel=1) for xyz in xyzs + ]) for hbond_list in compute_hydrogen_bonds( + frag1, frag2, distances, self.distance_bins, + self.angle_cutoffs) + ], + axis=-1)) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) diff --git a/deepchem/feat/rdkit_grid_featurizer.py b/deepchem/feat/complex_featurizers/rdkit_grid_featurizer.py similarity index 83% rename from deepchem/feat/rdkit_grid_featurizer.py rename to deepchem/feat/complex_featurizers/rdkit_grid_featurizer.py index 4bd7faa6b6c6f7d95ca438efc4b6e7a125d3e559..f56faade94af23dff98a284a3218f2e65cdb4656 100644 --- a/deepchem/feat/rdkit_grid_featurizer.py +++ b/deepchem/feat/complex_featurizers/rdkit_grid_featurizer.py @@ -1,27 +1,17 @@ -__author__ = "Bharath Ramsundar, Evan Feinberg, and Karl Leswing" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - +# flake8: noqa import logging -import os -import shutil -from warnings import warn import time -import tempfile import hashlib -import multiprocessing from collections import Counter -from deepchem.utils.rdkit_util import load_molecule -from deepchem.utils.rdkit_util import MoleculeLoadException + +from deepchem.utils.rdkit_utils import MoleculeLoadException, load_molecule import numpy as np from scipy.spatial.distance import cdist from copy import deepcopy from deepchem.feat import ComplexFeaturizer -from deepchem.utils.save import log -""" -TODO(LESWING) add sanitization with rdkit upgrade to 2017.* -""" + +logger = logging.getLogger(__name__) def compute_centroid(coordinates): @@ -35,7 +25,7 @@ def compute_centroid(coordinates): def generate_random__unit_vector(): - """Generate a random unit vector on the 3-sphere. + r"""Generate a random unit vector on the 3-sphere. citation: http://mathworld.wolfram.com/SpherePointPicking.html @@ -53,22 +43,23 @@ def generate_random__unit_vector(): def generate_random_rotation_matrix(): - """ - 1. Generate a random unit vector u, randomly sampled from the unit - 3-sphere (see function generate_random__unit_vector() for details) - 2. Generate a second random unit vector v - a. If absolute value of u \dot v > 0.99, repeat. + r"""Generate a random rotation matrix in 3D. + + 1. Generate a random unit vector u, randomly sampled from the unit + 3-sphere (see function generate_random__unit_vector() for details) + 2. Generate a second random unit vector v + a. If absolute value of u \dot v > 0.99, repeat. (This is important for numerical stability. Intuition: we want them to - be as linearly independent as possible or else the orthogonalized - version of v will be much shorter in magnitude compared to u. I assume - in Stack they took this from Gram-Schmidt orthogonalization?) - b. v" = v - (u \dot v)*u, i.e. subtract out the component of v that's in + be as linearly independent as possible or else the orthogonalized + version of v will be much shorter in magnitude compared to u. I assume + in Stack they took this from Gram-Schmidt orthogonalization?) + b. v" = v - (u \dot v)*u, i.e. subtract out the component of v that's in u's direction - c. normalize v" (this isn"t in Stack but I assume it must be done) - 3. find w = u \cross v" - 4. u, v", and w will form the columns of a rotation matrix, R. The - intuition is that u, v" and w are, respectively, what the standard basis - vectors e1, e2, and e3 will be mapped to under the transformation. + c. normalize v" (this isn"t in Stack but I assume it must be done) + 3. find w = u \cross v" + 4. u, v", and w will form the columns of a rotation matrix, R. The + intuition is that u, v" and w are, respectively, what the standard basis + vectors e1, e2, and e3 will be mapped to under the transformation. """ u = generate_random__unit_vector() v = generate_random__unit_vector() @@ -204,21 +195,22 @@ def compute_all_ecfp(mol, indices=None, degree=2): def compute_ecfp_features(mol, ecfp_degree=2, ecfp_power=11): """Computes ECFP features for provided rdkit molecule. - Parameters: - ----------- - mol: rdkit molecule - Molecule to featurize. - ecfp_degree: int - ECFP radius - ecfp_power: int - Number of bits to store ECFP features (2^ecfp_power will be length of - ECFP array) - Returns: - -------- - ecfp_array: np.ndarray - Returns an array of size 2^ecfp_power where array at index i has a 1 if - that ECFP fragment is found in the molecule and array at index j has a 0 - if ECFP fragment not in molecule. + Parameters + ---------- + mol: rdkit molecule + Molecule to featurize. + ecfp_degree: int + ECFP radius + ecfp_power: int + Number of bits to store ECFP features (2^ecfp_power will be length of + ECFP array) + + Returns + ------- + ecfp_array: np.ndarray + Returns an array of size 2^ecfp_power where array at index i has a 1 if + that ECFP fragment is found in the molecule and array at index j has a 0 + if ECFP fragment not in molecule. """ from rdkit.Chem import AllChem bv = AllChem.GetMorganFingerprintAsBitVect( @@ -353,17 +345,17 @@ def featurize_splif(protein_xyz, protein, ligand_xyz, ligand, contact_bins, def compute_ring_center(mol, ring_indices): """Computes 3D coordinates of a center of a given ring. - Parameters: - ----------- - mol: rdkit.rdchem.Mol - Molecule containing a ring - ring_indices: array-like - Indices of atoms forming a ring - - Returns: - -------- - ring_centroid: np.ndarray - Position of a ring center + Parameters + ---------- + mol: rdkit.rdchem.Mol + Molecule containing a ring + ring_indices: array-like + Indices of atoms forming a ring + + Returns + ------- + ring_centroid: np.ndarray + Position of a ring center """ conformer = mol.GetConformer() ring_xyz = np.zeros((len(ring_indices), 3)) @@ -377,17 +369,17 @@ def compute_ring_center(mol, ring_indices): def compute_ring_normal(mol, ring_indices): """Computes normal to a plane determined by a given ring. - Parameters: - ----------- - mol: rdkit.rdchem.Mol - Molecule containing a ring - ring_indices: array-like - Indices of atoms forming a ring - - Returns: - -------- - normal: np.ndarray - Normal vector + Parameters + ---------- + mol: rdkit.rdchem.Mol + Molecule containing a ring + ring_indices: array-like + Indices of atoms forming a ring + + Returns + ------- + normal: np.ndarray + Normal vector """ conformer = mol.GetConformer() points = np.zeros((3, 3)) @@ -409,19 +401,19 @@ def is_pi_parallel(ring1_center, angle_cutoff=30.0): """Check if two aromatic rings form a parallel pi-pi contact. - Parameters: - ----------- - ring1_center, ring2_center: np.ndarray - Positions of centers of the two rings. Can be computed with the - compute_ring_center function. - ring1_normal, ring2_normal: np.ndarray - Normals of the two rings. Can be computed with the compute_ring_normal - function. - dist_cutoff: float - Distance cutoff. Max allowed distance between the ring center (Angstroms). - angle_cutoff: float - Angle cutoff. Max allowed deviation from the ideal (0deg) angle between - the rings (in degrees). + Parameters + ---------- + ring1_center, ring2_center: np.ndarray + Positions of centers of the two rings. Can be computed with the + compute_ring_center function. + ring1_normal, ring2_normal: np.ndarray + Normals of the two rings. Can be computed with the compute_ring_normal + function. + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (0deg) angle between + the rings (in degrees). """ dist = np.linalg.norm(ring1_center - ring2_center) @@ -440,19 +432,19 @@ def is_pi_t(ring1_center, angle_cutoff=30.0): """Check if two aromatic rings form a T-shaped pi-pi contact. - Parameters: - ----------- - ring1_center, ring2_center: np.ndarray - Positions of centers of the two rings. Can be computed with the - compute_ring_center function. - ring1_normal, ring2_normal: np.ndarray - Normals of the two rings. Can be computed with the compute_ring_normal - function. - dist_cutoff: float - Distance cutoff. Max allowed distance between the ring center (Angstroms). - angle_cutoff: float - Angle cutoff. Max allowed deviation from the ideal (90deg) angle between - the rings (in degrees). + Parameters + ---------- + ring1_center, ring2_center: np.ndarray + Positions of centers of the two rings. Can be computed with the + compute_ring_center function. + ring1_normal, ring2_normal: np.ndarray + Normals of the two rings. Can be computed with the compute_ring_normal + function. + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (90deg) angle between + the rings (in degrees). """ dist = np.linalg.norm(ring1_center - ring2_center) angle = angle_between(ring1_normal, ring2_normal) * 180 / np.pi @@ -482,23 +474,23 @@ def compute_pi_stack(protein, if it counts as pi-T: count interacting atoms - Parameters: - ----------- - protein, ligand: rdkit.rdchem.Mol - Two interacting molecules. - pairwise_distances: np.ndarray (optional) - Array of pairwise protein-ligand distances (Angstroms) - dist_cutoff: float - Distance cutoff. Max allowed distance between the ring center (Angstroms). - angle_cutoff: float - Angle cutoff. Max allowed deviation from the ideal angle between rings. - - Returns: - -------- - protein_pi_t, protein_pi_parallel, ligand_pi_t, ligand_pi_parallel: dict - Dictionaries mapping atom indices to number of atoms they interact with. - Separate dictionary is created for each type of pi stacking (parallel and - T-shaped) and each molecule (protein and ligand). + Parameters + ---------- + protein, ligand: rdkit.rdchem.Mol + Two interacting molecules. + pairwise_distances: np.ndarray (optional) + Array of pairwise protein-ligand distances (Angstroms) + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal angle between rings. + + Returns + ------- + protein_pi_t, protein_pi_parallel, ligand_pi_t, ligand_pi_parallel: dict + Dictionaries mapping atom indices to number of atoms they interact with. + Separate dictionary is created for each type of pi stacking (parallel and + T-shaped) and each molecule (protein and ligand). """ protein_pi_parallel = Counter() @@ -575,19 +567,19 @@ def is_cation_pi(cation_position, angle_cutoff=30.0): """Check if a cation and an aromatic ring form contact. - Parameters: - ----------- - ring_center: np.ndarray - Positions of ring center. Can be computed with the compute_ring_center - function. - ring_normal: np.ndarray - Normal of ring. Can be computed with the compute_ring_normal function. - dist_cutoff: float - Distance cutoff. Max allowed distance between ring center and cation - (in Angstroms). - angle_cutoff: float - Angle cutoff. Max allowed deviation from the ideal (0deg) angle between - ring normal and vector pointing from ring center to cation (in degrees). + Parameters + ---------- + ring_center: np.ndarray + Positions of ring center. Can be computed with the compute_ring_center + function. + ring_normal: np.ndarray + Normal of ring. Can be computed with the compute_ring_normal function. + dist_cutoff: float + Distance cutoff. Max allowed distance between ring center and cation + (in Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (0deg) angle between + ring normal and vector pointing from ring center to cation (in degrees). """ cation_to_ring_vec = cation_position - ring_center dist = np.linalg.norm(cation_to_ring_vec) @@ -602,26 +594,26 @@ def compute_cation_pi(mol1, mol2, charge_tolerance=0.01, **kwargs): """Finds aromatic rings in mo1 and cations in mol2 that interact with each other. - Parameters: - ----------- - mol1: rdkit.rdchem.Mol - Molecule to look for interacting rings - mol2: rdkit.rdchem.Mol - Molecule to look for interacting cations - charge_tolerance: float - Atom is considered a cation if its formal charge is greater than - 1 - charge_tolerance - **kwargs: - Arguments that are passed to is_cation_pi function - - Returns: - -------- - mol1_pi: dict - Dictionary that maps atom indices (from mol1) to the number of cations - (in mol2) they interact with - mol2_cation: dict - Dictionary that maps atom indices (from mol2) to the number of aromatic - atoms (in mol1) they interact with + Parameters + ---------- + mol1: rdkit.rdchem.Mol + Molecule to look for interacting rings + mol2: rdkit.rdchem.Mol + Molecule to look for interacting cations + charge_tolerance: float + Atom is considered a cation if its formal charge is greater than + 1 - charge_tolerance + **kwargs: + Arguments that are passed to is_cation_pi function + + Returns + ------- + mol1_pi: dict + Dictionary that maps atom indices (from mol1) to the number of cations + (in mol2) they interact with + mol2_cation: dict + Dictionary that maps atom indices (from mol2) to the number of aromatic + atoms (in mol1) they interact with """ mol1_pi = Counter() mol2_cation = Counter() @@ -652,18 +644,18 @@ def compute_cation_pi(mol1, mol2, charge_tolerance=0.01, **kwargs): def compute_binding_pocket_cation_pi(protein, ligand, **kwargs): """Finds cation-pi interactions between protein and ligand. - Parameters: - ----------- - protein, ligand: rdkit.rdchem.Mol - Interacting molecules - **kwargs: - Arguments that are passed to compute_cation_pi function - - Returns: - -------- - protein_cation_pi, ligand_cation_pi: dict - Dictionaries that maps atom indices to the number of cations/aromatic - atoms they interact with + Parameters + ---------- + protein, ligand: rdkit.rdchem.Mol + Interacting molecules + **kwargs: + Arguments that are passed to compute_cation_pi function + + Returns + ------- + protein_cation_pi, ligand_cation_pi: dict + Dictionaries that maps atom indices to the number of cations/aromatic + atoms they interact with """ # find interacting rings from protein and cations from ligand protein_pi, ligand_cation = compute_cation_pi(protein, ligand, **kwargs) @@ -694,7 +686,7 @@ def get_partial_charge(atom): def get_formal_charge(atom): - warn( + logger.warning( 'get_formal_charge function is deprecated and will be removed' ' in version 1.4, use get_partial_charge instead', DeprecationWarning) return get_partial_charge(atom) @@ -716,8 +708,8 @@ def compute_salt_bridges(protein_xyz, cutoff=5.0): """Find salt bridge contacts between protein and lingand. - Parameters: - ----------- + Parameters + ---------- protein_xyz, ligand_xyz: np.ndarray Arrays with atomic coordinates protein, ligand: rdkit.rdchem.Mol @@ -729,9 +721,9 @@ def compute_salt_bridges(protein_xyz, Returns: -------- - salt_bridge_contacts: list of tuples - List of contacts. Tuple (i, j) indicates that atom i from protein - interacts with atom j from ligand. + salt_bridge_contacts: list of tuples + List of contacts. Tuple (i, j) indicates that atom i from protein + interacts with atom j from ligand. """ salt_bridge_contacts = [] @@ -809,27 +801,27 @@ def convert_atom_to_voxel(molecule_xyz, verbose=False): """Converts atom coordinates to an i,j,k grid index. - Parameters: - ----------- - molecule_xyz: np.ndarray - Array with coordinates of all atoms in the molecule, shape (N, 3) - atom_index: int - Index of an atom - box_width: float - Size of a box - voxel_width: float - Size of a voxel - verbose: bool - Print warnings when atom is outside of a box + Parameters + ---------- + molecule_xyz: np.ndarray + Array with coordinates of all atoms in the molecule, shape (N, 3) + atom_index: int + Index of an atom + box_width: float + Size of a box + voxel_width: float + Size of a voxel + verbose: bool + Print warnings when atom is outside of a box """ indices = np.floor( (molecule_xyz[atom_index] + box_width / 2.0) / voxel_width).astype(int) if ((indices < 0) | (indices >= box_width / voxel_width)).any(): if verbose: - warn('Coordinates are outside of the box (atom id = %s,' - ' coords xyz = %s, coords in box = %s' % - (atom_index, molecule_xyz[atom_index], indices)) + logger.warning('Coordinates are outside of the box (atom id = %s,' + ' coords xyz = %s, coords in box = %s' % + (atom_index, molecule_xyz[atom_index], indices)) return ([indices]) @@ -852,7 +844,7 @@ def compute_charge_dictionary(molecule): """Create a dictionary with partial charges for each atom in the molecule. This function assumes that the charges for the molecule are already - computed (it can be done with rdkit_util.compute_charges(molecule)) + computed (it can be done with rdkit_utils.compute_charges(molecule)) """ charge_dictionary = {} @@ -889,19 +881,19 @@ class RdkitGridFeaturizer(ComplexFeaturizer): verbose=True, sanitize=False, **kwargs): - """Parameters: - ----------- + """ + Parameters + ---------- nb_rotations: int, optional (default 0) Number of additional random rotations of a complex to generate. feature_types: list, optional (default ['ecfp']) - Types of features to calculate. Available types are: - flat features: 'ecfp_ligand', 'ecfp_hashed', 'splif_hashed', 'hbond_count' - voxel features: 'ecfp', 'splif', 'sybyl', 'salt_bridge', 'charge', 'hbond', - 'pi_stack, 'cation_pi' - There are also 3 predefined sets of features: 'flat_combined', - 'voxel_combined', and 'all_combined'. Calculated features are concatenated - and their order is preserved (features in predefined sets are in - alphabetical order). + Types of features to calculate. Available types are + flat features -> 'ecfp_ligand', 'ecfp_hashed', 'splif_hashed', 'hbond_count' + voxel features -> 'ecfp', 'splif', 'sybyl', 'salt_bridge', 'charge', 'hbond', 'pi_stack, 'cation_pi' + There are also 3 predefined sets of features + 'flat_combined', 'voxel_combined', and 'all_combined'. + Calculated features are concatenated and their order is preserved + (features in predefined sets are in alphabetical order). ecfp_degree: int, optional (default 2) ECFP radius. ecfp_power: int, optional (default 3) @@ -927,18 +919,18 @@ class RdkitGridFeaturizer(ComplexFeaturizer): Keyword arguments can be usaed to specify custom cutoffs and bins (see default values below). - Default cutoffs and bins: - ------------------------- - hbond_dist_bins: [(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)] - hbond_angle_cutoffs: [5, 50, 90] - splif_contact_bins: [(0, 2.0), (2.0, 3.0), (3.0, 4.5)] - ecfp_cutoff: 4.5 - sybyl_cutoff: 7.0 - salt_bridges_cutoff: 5.0 - pi_stack_dist_cutoff: 4.4 - pi_stack_angle_cutoff: 30.0 - cation_pi_dist_cutoff: 6.5 - cation_pi_angle_cutoff: 30.0 + Default cutoffs and bins + ------------------------ + hbond_dist_bins: [(2.2, 2.5), (2.5, 3.2), (3.2, 4.0)] + hbond_angle_cutoffs: [5, 50, 90] + splif_contact_bins: [(0, 2.0), (2.0, 3.0), (3.0, 4.5)] + ecfp_cutoff: 4.5 + sybyl_cutoff: 7.0 + salt_bridges_cutoff: 5.0 + pi_stack_dist_cutoff: 4.4 + pi_stack_angle_cutoff: 30.0 + cation_pi_dist_cutoff: 6.5 + cation_pi_angle_cutoff: 30.0 """ # check if user tries to set removed arguments @@ -955,7 +947,7 @@ class RdkitGridFeaturizer(ComplexFeaturizer): for arg in deprecated_args: if arg in kwargs and verbose: - warn( + logger.warning( '%s argument was removed and it is ignored,' ' using it will result in error in version 1.4' % arg, DeprecationWarning) @@ -1026,20 +1018,21 @@ class RdkitGridFeaturizer(ComplexFeaturizer): for feature_type in feature_types: if self.sanitize is False and feature_type in require_sanitized: if self.verbose: - warn('sanitize is set to False, %s feature will be ignored' % - feature_type) + logger.warning('sanitize is set to False, %s feature will be ignored' + % feature_type) continue if feature_type in not_implemented: if self.verbose: - warn('%s feature is not implemented yet and will be ignored' % - feature_type) + logger.warning('%s feature is not implemented yet and will be ignored' + % feature_type) continue if feature_type in self.FLAT_FEATURES: self.feature_types.append((True, feature_type)) if self.flatten is False: if self.verbose: - warn('%s feature is used, output will be flattened' % feature_type) + logger.warning( + '%s feature is used, output will be flattened' % feature_type) self.flatten = True elif feature_type in self.VOXEL_FEATURES: @@ -1051,7 +1044,7 @@ class RdkitGridFeaturizer(ComplexFeaturizer): if ftype not in ignored_features] if self.flatten is False: if self.verbose: - warn('Flat features are used, output will be flattened') + logger.warning('Flat features are used, output will be flattened') self.flatten = True elif feature_type == 'voxel_combined': @@ -1067,10 +1060,10 @@ class RdkitGridFeaturizer(ComplexFeaturizer): if ftype not in ignored_features] if self.flatten is False: if self.verbose: - warn('Flat feature are used, output will be flattened') + logger.warning('Flat feature are used, output will be flattened') self.flatten = True elif self.verbose: - warn('Ignoring unknown feature %s' % feature_type) + logger.warning('Ignoring unknown feature %s' % feature_type) def _compute_feature(self, feature_name, prot_xyz, prot_rdk, lig_xyz, lig_rdk, distances): @@ -1213,7 +1206,7 @@ class RdkitGridFeaturizer(ComplexFeaturizer): ] raise ValueError('Unknown feature type "%s"' % feature_name) - def _featurize_complex(self, mol_pdb_file, protein_pdb_file): + def _featurize(self, mol_pdb_file, protein_pdb_file): """Computes grid featurization of protein/ligand complex. Takes as input filenames pdb of the protein, pdb of the ligand. @@ -1240,7 +1233,8 @@ class RdkitGridFeaturizer(ComplexFeaturizer): protein_pdb_file, calc_charges=True, sanitize=self.sanitize) ############################################################## TIMING time2 = time.time() - log("TIMING: Loading protein coordinates took %0.3f s" % (time2 - time1), + logger.info( + "TIMING: Loading protein coordinates took %0.3f s" % (time2 - time1), self.verbose) ############################################################## TIMING ############################################################## TIMING @@ -1250,11 +1244,12 @@ class RdkitGridFeaturizer(ComplexFeaturizer): mol_pdb_file, calc_charges=True, sanitize=self.sanitize) ############################################################## TIMING time2 = time.time() - log("TIMING: Loading ligand coordinates took %0.3f s" % (time2 - time1), + logger.info( + "TIMING: Loading ligand coordinates took %0.3f s" % (time2 - time1), self.verbose) ############################################################## TIMING except MoleculeLoadException: - logging.warning("Some molecules cannot be loaded by Rdkit. Skipping") + logger.warning("Some molecules cannot be loaded by Rdkit. Skipping") return None ############################################################## TIMING @@ -1265,8 +1260,8 @@ class RdkitGridFeaturizer(ComplexFeaturizer): protein_xyz = subtract_centroid(protein_xyz, centroid) ############################################################## TIMING time2 = time.time() - log("TIMING: Centroid processing took %0.3f s" % (time2 - time1), - self.verbose) + logger.info("TIMING: Centroid processing took %0.3f s" % (time2 - time1), + self.verbose) ############################################################## TIMING pairwise_distances = compute_pairwise_distances(protein_xyz, ligand_xyz) diff --git a/deepchem/feat/complex_featurizers/splif_fingerprints.py b/deepchem/feat/complex_featurizers/splif_fingerprints.py new file mode 100644 index 0000000000000000000000000000000000000000..08a0a8812c9fc8fcdbd653ab6e3a7f4eaeba71f3 --- /dev/null +++ b/deepchem/feat/complex_featurizers/splif_fingerprints.py @@ -0,0 +1,284 @@ +""" +SPLIF Fingerprints for molecular complexes. +""" +import logging +import itertools +import numpy as np +from deepchem.utils.hash_utils import hash_ecfp_pair +from deepchem.utils.rdkit_utils import load_complex +from deepchem.utils.rdkit_utils import compute_all_ecfp +from deepchem.utils.rdkit_utils import MoleculeLoadException +from deepchem.utils.rdkit_utils import compute_contact_centroid +from deepchem.feat import ComplexFeaturizer +from deepchem.utils.hash_utils import vectorize +from deepchem.utils.voxel_utils import voxelize +from deepchem.utils.voxel_utils import convert_atom_pair_to_voxel +from deepchem.utils.geometry_utils import compute_pairwise_distances +from deepchem.utils.geometry_utils import subtract_centroid + +from typing import Tuple, Dict, List + +logger = logging.getLogger(__name__) + +SPLIF_CONTACT_BINS = [(0, 2.0), (2.0, 3.0), (3.0, 4.5)] + + +def compute_splif_features_in_range(frag1: Tuple, + frag2: Tuple, + pairwise_distances: np.ndarray, + contact_bin: List, + ecfp_degree: int = 2) -> Dict: + """Computes SPLIF features for close atoms in molecular complexes. + + Finds all frag1 atoms that are > contact_bin[0] and < + contact_bin[1] away from frag2 atoms. Then, finds the ECFP + fingerprints for the contacting atoms. Returns a dictionary + mapping (frag1_index_i, frag2_index_j) --> (frag1_ecfp_i, + frag2_ecfp_j) + + Parameters + ---------- + frag1: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + frag2: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + contact_bins: np.ndarray + Ranges of pair distances which are placed in separate bins. + pairwise_distances: np.ndarray + Array of pairwise fragment-fragment distances (Angstroms) + ecfp_degree: int + ECFP radius + """ + contacts = np.nonzero((pairwise_distances > contact_bin[0]) & + (pairwise_distances < contact_bin[1])) + frag1_atoms = set([int(c) for c in contacts[0].tolist()]) + contacts = zip(contacts[0], contacts[1]) + + frag1_ecfp_dict = compute_all_ecfp( + frag1[1], indices=frag1_atoms, degree=ecfp_degree) + frag2_ecfp_dict = compute_all_ecfp(frag2[1], degree=ecfp_degree) + splif_dict = { + contact: (frag1_ecfp_dict[contact[0]], frag2_ecfp_dict[contact[1]]) + for contact in contacts + } + return splif_dict + + +def featurize_splif(frag1, frag2, contact_bins, pairwise_distances, + ecfp_degree): + """Computes SPLIF featurization of fragment interactions binding pocket. + + For each contact range (i.e. 1 A to 2 A, 2 A to 3 A, etc.) + compute a dictionary mapping (frag1_index_i, frag2_index_j) + tuples --> (frag1_ecfp_i, frag2_ecfp_j) tuples. Return a + list of such splif dictionaries. + + Parameters + ---------- + frag1: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + frag2: Tuple + A tuple of (coords, mol) returned by `load_molecule`. + contact_bins: np.ndarray + Ranges of pair distances which are placed in separate bins. + pairwise_distances: np.ndarray + Array of pairwise fragment-fragment distances (Angstroms) + ecfp_degree: int + ECFP radius, the graph distance at which fragments are computed. + + Returns + ------- + Dictionaries of SPLIF interactions suitable for `vectorize` or + `voxelize`. + """ + splif_dicts = [] + for i, contact_bin in enumerate(contact_bins): + splif_dicts.append( + compute_splif_features_in_range(frag1, frag2, pairwise_distances, + contact_bin, ecfp_degree)) + + return splif_dicts + + +class SplifFingerprint(ComplexFeaturizer): + """Computes SPLIF Fingerprints for a macromolecular complex. + + SPLIF fingerprints are based on a technique introduced in the + following paper. + + Da, C., and D. Kireev. "Structural protein–ligand interaction + fingerprints (SPLIF) for structure-based virtual screening: + method and benchmark study." Journal of chemical information + and modeling 54.9 (2014): 2555-2561. + + SPLIF fingerprints are a subclass of `ComplexFeaturizer`. It + requires 3D coordinates for a molecular complex. For each ligand + atom, it identifies close pairs of atoms from different molecules. + These atom pairs are expanded to 2D circular fragments and a + fingerprint for the union is turned on in the bit vector. Note that + we slightly generalize the original paper by not requiring the + interacting molecules to be proteins or ligands. + + This is conceptually pretty similar to + `ContactCircularFingerprint` but computes ECFP fragments only + for direct contacts instead of the entire contact region. + + For a macromolecular complex, returns a vector of shape + `(len(contact_bins)*size,)` + """ + + def __init__(self, contact_bins=None, radius=2, size=8): + """ + Parameters + ---------- + contact_bins: list[tuple] + List of contact bins. If not specified is set to default + `[(0, 2.0), (2.0, 3.0), (3.0, 4.5)]`. + radius : int, optional (default 2) + Fingerprint radius used for circular fingerprints. + size: int, optional (default 8) + Length of generated bit vector. + """ + if contact_bins is None: + self.contact_bins = SPLIF_CONTACT_BINS + else: + self.contact_bins = contact_bins + self.size = size + self.radius = radius + + def _featurize(self, mol_pdb: str, complex_pdb: str): + """ + Compute featurization for a molecular complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + complex_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, complex_pdb) + try: + fragments = load_complex(molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + for (frag1, frag2) in itertools.combinations(fragments, 2): + # Get coordinates + distances = compute_pairwise_distances(frag1[0], frag2[0]) + # distances = compute_pairwise_distances(prot_xyz, lig_xyz) + vectors = [ + vectorize(hash_ecfp_pair, feature_dict=splif_dict, + size=self.size) for splif_dict in featurize_splif( + frag1, frag2, self.contact_bins, distances, self.radius) + ] + pairwise_features += vectors + pairwise_features = np.concatenate(pairwise_features) + return pairwise_features + + +class SplifVoxelizer(ComplexFeaturizer): + """Computes SPLIF voxel grid for a macromolecular complex. + + SPLIF fingerprints are based on a technique introduced in the + following paper [1]_. + + The SPLIF voxelizer localizes local SPLIF descriptors in + space, by assigning features to the voxel in which they + originated. This technique may be useful for downstream + learning methods such as convolutional networks. + + Featurizes a macromolecular complex into a tensor of shape + `(voxels_per_edge, voxels_per_edge, voxels_per_edge, size)` + where `voxels_per_edge = int(box_width/voxel_width)`. + + References + ---------- + .. [1] Da, C., and D. Kireev. "Structural protein–ligand interaction + fingerprints (SPLIF) for structure-based virtual screening: + method and benchmark study." Journal of chemical information + and modeling 54.9 (2014): 2555-2561. + """ + + def __init__(self, + cutoff: float = 4.5, + contact_bins: List = None, + radius: int = 2, + size: int = 8, + box_width: float = 16.0, + voxel_width: float = 1.0): + """ + Parameters + ---------- + cutoff: float (default 4.5) + Distance cutoff in angstroms for molecules in complex. + contact_bins: list[tuple] + List of contact bins. If not specified is set to default + `[(0, 2.0), (2.0, 3.0), (3.0, 4.5)]`. + radius : int, optional (default 2) + Fingerprint radius used for circular fingerprints. + size: int, optional (default 8) + Length of generated bit vector. + box_width: float, optional (default 16.0) + Size of a box in which voxel features are calculated. Box + is centered on a ligand centroid. + voxel_width: float, optional (default 1.0) + Size of a 3D voxel in a grid. + """ + self.cutoff = cutoff + if contact_bins is None: + self.contact_bins = SPLIF_CONTACT_BINS + else: + self.contact_bins = contact_bins + self.size = size + self.radius = radius + self.box_width = box_width + self.voxel_width = voxel_width + self.voxels_per_edge = int(self.box_width / self.voxel_width) + + def _featurize(self, mol_pdb: str, complex_pdb: str): + """ + Compute featurization for a molecular complex + + Parameters + ---------- + mol_pdb: str + Filename for ligand molecule + complex_pdb: str + Filename for protein molecule + """ + molecular_complex = (mol_pdb, complex_pdb) + try: + fragments = load_complex(molecular_complex, add_hydrogens=False) + + except MoleculeLoadException: + logger.warning("This molecule cannot be loaded by Rdkit. Returning None") + return None + pairwise_features = [] + # We compute pairwise contact fingerprints + centroid = compute_contact_centroid(fragments, cutoff=self.cutoff) + for (frag1, frag2) in itertools.combinations(fragments, 2): + distances = compute_pairwise_distances(frag1[0], frag2[0]) + frag1_xyz = subtract_centroid(frag1[0], centroid) + frag2_xyz = subtract_centroid(frag2[0], centroid) + xyzs = [frag1_xyz, frag2_xyz] + pairwise_features.append( + np.concatenate( + [ + voxelize( + convert_atom_pair_to_voxel, + hash_function=hash_ecfp_pair, + coordinates=xyzs, + box_width=self.box_width, + voxel_width=self.voxel_width, + feature_dict=splif_dict, + nb_channel=self.size) + for splif_dict in featurize_splif( + frag1, frag2, self.contact_bins, distances, self.radius) + ], + axis=-1)) + # Features are of shape (voxels_per_edge, voxels_per_edge, voxels_per_edge, 1) so we should concatenate on the last axis. + return np.concatenate(pairwise_features, axis=-1) diff --git a/deepchem/feat/coulomb_matrices.py b/deepchem/feat/coulomb_matrices.py deleted file mode 100644 index 6360b93405823bd49303112cf372581db3681415..0000000000000000000000000000000000000000 --- a/deepchem/feat/coulomb_matrices.py +++ /dev/null @@ -1,266 +0,0 @@ -""" -Generate coulomb matrices for molecules. - -See Montavon et al., _New Journal of Physics_ __15__ (2013) 095003. -""" -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "MIT" - -import numpy as np -import deepchem as dc -from deepchem.feat import Featurizer -from deepchem.utils import pad_array -from deepchem.feat.atomic_coordinates import AtomicCoordinates - - -class BPSymmetryFunctionInput(Featurizer): - """ - Calculate Symmetry Function for each atom in the molecules - Methods described in https://journals.aps.org/prl/pdf/10.1103/PhysRevLett.98.146401 - """ - - def __init__(self, max_atoms): - self.max_atoms = max_atoms - - def _featurize(self, mol): - coordfeat = AtomicCoordinates() - coordinates = coordfeat._featurize(mol)[0] - atom_numbers = np.array([atom.GetAtomicNum() for atom in mol.GetAtoms()]) - atom_numbers = np.expand_dims(atom_numbers, axis=1) - assert atom_numbers.shape[0] == coordinates.shape[0] - n_atoms = atom_numbers.shape[0] - features = np.concatenate([atom_numbers, coordinates], axis=1) - return np.pad(features, ((0, self.max_atoms - n_atoms), (0, 0)), 'constant') - - -class CoulombMatrix(Featurizer): - """ - Calculate Coulomb matrices for molecules. - - Parameters - ---------- - max_atoms : int - Maximum number of atoms for any molecule in the dataset. Used to - pad the Coulomb matrix. - remove_hydrogens : bool, optional (default False) - Whether to remove hydrogens before constructing Coulomb matrix. - randomize : bool, optional (default False) - Whether to randomize Coulomb matrices to remove dependence on atom - index order. - upper_tri : bool, optional (default False) - Whether to return the upper triangular portion of the Coulomb matrix. - n_samples : int, optional (default 1) - Number of random Coulomb matrices to generate if randomize is True. - seed : int, optional - Random seed. - - Example: - - >>> featurizers = dc.feat.CoulombMatrix(max_atoms=23) - >>> input_file = 'deepchem/feat/tests/data/water.sdf' # really backed by water.sdf.csv - >>> tasks = ["atomization_energy"] - >>> loader = dc.data.SDFLoader(tasks, featurizer=featurizers) - >>> dataset = loader.create_dataset(input_file) #doctest: +ELLIPSIS - Reading structures from deepchem/feat/tests/data/water.sdf. - """ - conformers = True - name = 'coulomb_matrix' - - def __init__(self, - max_atoms, - remove_hydrogens=False, - randomize=False, - upper_tri=False, - n_samples=1, - seed=None): - self.max_atoms = int(max_atoms) - self.remove_hydrogens = remove_hydrogens - self.randomize = randomize - self.upper_tri = upper_tri - self.n_samples = n_samples - if seed is not None: - seed = int(seed) - self.seed = seed - - def _featurize(self, mol): - """ - Calculate Coulomb matrices for molecules. If extra randomized - matrices are generated, they are treated as if they are features - for additional conformers. - - Since Coulomb matrices are symmetric, only the (flattened) upper - triangular portion is returned. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - features = self.coulomb_matrix(mol) - if self.upper_tri: - features = [f[np.triu_indices_from(f)] for f in features] - features = np.asarray(features) - return features - - def coulomb_matrix(self, mol): - """ - Generate Coulomb matrices for each conformer of the given molecule. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - from rdkit import Chem - if self.remove_hydrogens: - mol = Chem.RemoveHs(mol) - n_atoms = mol.GetNumAtoms() - z = [atom.GetAtomicNum() for atom in mol.GetAtoms()] - rval = [] - for conf in mol.GetConformers(): - d = self.get_interatomic_distances(conf) - m = np.zeros((n_atoms, n_atoms)) - for i in range(mol.GetNumAtoms()): - for j in range(mol.GetNumAtoms()): - if i == j: - m[i, j] = 0.5 * z[i]**2.4 - elif i < j: - m[i, j] = (z[i] * z[j]) / d[i, j] - m[j, i] = m[i, j] - else: - continue - if self.randomize: - for random_m in self.randomize_coulomb_matrix(m): - random_m = pad_array(random_m, self.max_atoms) - rval.append(random_m) - else: - m = pad_array(m, self.max_atoms) - rval.append(m) - rval = np.asarray(rval) - return rval - - def randomize_coulomb_matrix(self, m): - """ - Randomize a Coulomb matrix as decribed in Montavon et al., _New Journal - of Physics_ __15__ (2013) 095003: - - 1. Compute row norms for M in a vector row_norms. - 2. Sample a zero-mean unit-variance noise vector e with dimension - equal to row_norms. - 3. Permute the rows and columns of M with the permutation that - sorts row_norms + e. - - Parameters - ---------- - m : ndarray - Coulomb matrix. - n_samples : int, optional (default 1) - Number of random matrices to generate. - seed : int, optional - Random seed. - """ - rval = [] - row_norms = np.asarray([np.linalg.norm(row) for row in m], dtype=float) - rng = np.random.RandomState(self.seed) - for i in range(self.n_samples): - e = rng.normal(size=row_norms.size) - p = np.argsort(row_norms + e) - new = m[p][:, p] # permute rows first, then columns - rval.append(new) - return rval - - @staticmethod - def get_interatomic_distances(conf): - """ - Get interatomic distances for atoms in a molecular conformer. - - Parameters - ---------- - conf : RDKit Conformer - Molecule conformer. - """ - n_atoms = conf.GetNumAtoms() - coords = [ - conf.GetAtomPosition(i).__idiv__(0.52917721092) for i in range(n_atoms) - ] # Convert AtomPositions from Angstrom to bohr (atomic units) - d = np.zeros((n_atoms, n_atoms), dtype=float) - for i in range(n_atoms): - for j in range(n_atoms): - if i < j: - d[i, j] = coords[i].Distance(coords[j]) - d[j, i] = d[i, j] - else: - continue - return d - - -class CoulombMatrixEig(CoulombMatrix): - """ - Calculate the eigenvales of Coulomb matrices for molecules. - - Parameters - ---------- - max_atoms : int - Maximum number of atoms for any molecule in the dataset. Used to - pad the Coulomb matrix. - remove_hydrogens : bool, optional (default False) - Whether to remove hydrogens before constructing Coulomb matrix. - randomize : bool, optional (default False) - Whether to randomize Coulomb matrices to remove dependence on atom - index order. - n_samples : int, optional (default 1) - Number of random Coulomb matrices to generate if randomize is True. - seed : int, optional - Random seed. - - Example: - - >>> featurizers = dc.feat.CoulombMatrixEig(max_atoms=23) - >>> input_file = 'deepchem/feat/tests/data/water.sdf' # really backed by water.sdf.csv - >>> tasks = ["atomization_energy"] - >>> loader = dc.data.SDFLoader(tasks, featurizer=featurizers) - >>> dataset = loader.create_dataset(input_file) #doctest: +ELLIPSIS - Reading structures from deepchem/feat/tests/data/water.sdf. - """ - - conformers = True - name = 'coulomb_matrix' - - def __init__(self, - max_atoms, - remove_hydrogens=False, - randomize=False, - n_samples=1, - seed=None): - self.max_atoms = int(max_atoms) - self.remove_hydrogens = remove_hydrogens - self.randomize = randomize - self.n_samples = n_samples - if seed is not None: - seed = int(seed) - self.seed = seed - - def _featurize(self, mol): - """ - Calculate eigenvalues of Coulomb matrix for molecules. Eigenvalues - are returned sorted by absolute value in descending order and padded - by max_atoms. - - Parameters - ---------- - mol : RDKit Mol - Molecule. - """ - cmat = self.coulomb_matrix(mol) - features = [] - for f in cmat: - w, v = np.linalg.eig(f) - w_abs = np.abs(w) - sortidx = np.argsort(w_abs) - sortidx = sortidx[::-1] - w = w[sortidx] - f = pad_array(w, self.max_atoms) - features.append(f) - features = np.asarray(features) - return features diff --git a/deepchem/feat/graph_data.py b/deepchem/feat/graph_data.py new file mode 100644 index 0000000000000000000000000000000000000000..bbfd74be137df55c54165173c9b2ff0d5add2624 --- /dev/null +++ b/deepchem/feat/graph_data.py @@ -0,0 +1,254 @@ +from typing import Optional, Sequence +import numpy as np + + +class GraphData: + """GraphData class + + This data class is almost same as `torch_geometric.data.Data + `_. + + Attributes + ---------- + node_features: np.ndarray + Node feature matrix with shape [num_nodes, num_node_features] + edge_index: np.ndarray, dtype int + Graph connectivity in COO format with shape [2, num_edges] + edge_features: np.ndarray, optional (default None) + Edge feature matrix with shape [num_edges, num_edge_features] + node_pos_features: np.ndarray, optional (default None) + Node position matrix with shape [num_nodes, num_dimensions]. + num_nodes: int + The number of nodes in the graph + num_node_features: int + The number of features per node in the graph + num_edges: int + The number of edges in the graph + num_edges_features: int, optional (default None) + The number of features per edge in the graph + + Examples + -------- + >>> import numpy as np + >>> node_features = np.random.rand(5, 10) + >>> edge_index = np.array([[0, 1, 2, 3, 4], [1, 2, 3, 4, 0]], dtype=np.int64) + >>> graph = GraphData(node_features=node_features, edge_index=edge_index) + """ + + def __init__( + self, + node_features: np.ndarray, + edge_index: np.ndarray, + edge_features: Optional[np.ndarray] = None, + node_pos_features: Optional[np.ndarray] = None, + ): + """ + Parameters + ---------- + node_features: np.ndarray + Node feature matrix with shape [num_nodes, num_node_features] + edge_index: np.ndarray, dtype int + Graph connectivity in COO format with shape [2, num_edges] + edge_features: np.ndarray, optional (default None) + Edge feature matrix with shape [num_edges, num_edge_features] + node_pos_features: np.ndarray, optional (default None) + Node position matrix with shape [num_nodes, num_dimensions]. + """ + # validate params + if isinstance(node_features, np.ndarray) is False: + raise ValueError('node_features must be np.ndarray.') + + if isinstance(edge_index, np.ndarray) is False: + raise ValueError('edge_index must be np.ndarray.') + elif edge_index.dtype != np.int: + raise ValueError('edge_index.dtype must be np.int.') + elif edge_index.shape[0] != 2: + raise ValueError('The shape of edge_index is [2, num_edges].') + elif np.max(edge_index) >= len(node_features): + raise ValueError('edge_index contains the invalid node number.') + + if edge_features is not None: + if isinstance(edge_features, np.ndarray) is False: + raise ValueError('edge_features must be np.ndarray or None.') + elif edge_index.shape[1] != edge_features.shape[0]: + raise ValueError('The first dimension of edge_features must be the \ + same as the second dimension of edge_index.') + + if node_pos_features is not None: + if isinstance(node_pos_features, np.ndarray) is False: + raise ValueError('node_pos_features must be np.ndarray or None.') + elif node_pos_features.shape[0] != node_features.shape[0]: + raise ValueError( + 'The length of node_pos_features must be the same as the \ + length of node_features.') + + self.node_features = node_features + self.edge_index = edge_index + self.edge_features = edge_features + self.node_pos_features = node_pos_features + self.num_nodes, self.num_node_features = self.node_features.shape + self.num_edges = edge_index.shape[1] + if self.edge_features is not None: + self.num_edge_features = self.edge_features.shape[1] + + def to_pyg_graph(self): + """Convert to PyTorch Geometric graph data instance + + Returns + ------- + torch_geometric.data.Data + Graph data for PyTorch Geometric + + Notes + ----- + This method requires PyTorch Geometric to be installed. + """ + try: + import torch + from torch_geometric.data import Data + except ModuleNotFoundError: + raise ImportError( + "This function requires PyTorch Geometric to be installed.") + + edge_features = self.edge_features + if edge_features is not None: + edge_features = torch.from_numpy(self.edge_features).float() + node_pos_features = self.node_pos_features + if node_pos_features is not None: + node_pos_features = torch.from_numpy(self.node_pos_features).float() + + return Data( + x=torch.from_numpy(self.node_features).float(), + edge_index=torch.from_numpy(self.edge_index).long(), + edge_attr=edge_features, + pos=node_pos_features) + + def to_dgl_graph(self, self_loop: bool = False): + """Convert to DGL graph data instance + + Returns + ------- + dgl.DGLGraph + Graph data for DGL + self_loop: bool + Whether to add self loops for the nodes, i.e. edges from nodes + to themselves. Default to False. + + Notes + ----- + This method requires DGL to be installed. + """ + try: + import dgl + import torch + except ModuleNotFoundError: + raise ImportError("This function requires DGL to be installed.") + + src = self.edge_index[0] + dst = self.edge_index[1] + + g = dgl.graph( + (torch.from_numpy(src).long(), torch.from_numpy(dst).long()), + num_nodes=self.num_nodes) + g.ndata['x'] = torch.from_numpy(self.node_features).float() + + if self.node_pos_features is not None: + g.ndata['pos'] = torch.from_numpy(self.node_pos_features).float() + + if self.edge_features is not None: + g.edata['edge_attr'] = torch.from_numpy(self.edge_features).float() + + if self_loop: + # This assumes that the edge features for self loops are full-zero tensors + # In the future we may want to support featurization for self loops + g.add_edges(np.arange(self.num_nodes), np.arange(self.num_nodes)) + + return g + + +class BatchGraphData(GraphData): + """Batch GraphData class + + Attributes + ---------- + node_features: np.ndarray + Concatenated node feature matrix with shape [num_nodes, num_node_features]. + `num_nodes` is total number of nodes in the batch graph. + edge_index: np.ndarray, dtype int + Concatenated graph connectivity in COO format with shape [2, num_edges]. + `num_edges` is total number of edges in the batch graph. + edge_features: np.ndarray, optional (default None) + Concatenated edge feature matrix with shape [num_edges, num_edge_features]. + `num_edges` is total number of edges in the batch graph. + node_pos_features: np.ndarray, optional (default None) + Concatenated node position matrix with shape [num_nodes, num_dimensions]. + `num_nodes` is total number of edges in the batch graph. + num_nodes: int + The number of nodes in the batch graph. + num_node_features: int + The number of features per node in the graph. + num_edges: int + The number of edges in the batch graph. + num_edges_features: int, optional (default None) + The number of features per edge in the graph. + graph_index: np.ndarray, dtype int + This vector indicates which graph the node belongs with shape [num_nodes,]. + + Examples + -------- + >>> import numpy as np + >>> from deepchem.feat.graph_data import GraphData + >>> node_features_list = np.random.rand(2, 5, 10) + >>> edge_index_list = np.array([ + ... [[0, 1, 2, 3, 4], [1, 2, 3, 4, 0]], + ... [[0, 1, 2, 3, 4], [1, 2, 3, 4, 0]], + ... ], dtype=np.int) + >>> graph_list = [GraphData(node_features, edge_index) for node_features, edge_index + ... in zip(node_features_list, edge_index_list)] + >>> batch_graph = BatchGraphData(graph_list=graph_list) + """ + + def __init__(self, graph_list: Sequence[GraphData]): + """ + Parameters + ---------- + graph_list: Sequence[GraphData] + List of GraphData + """ + # stack features + batch_node_features = np.vstack( + [graph.node_features for graph in graph_list]) + + # before stacking edge_features or node_pos_features, + # we should check whether these are None or not + if graph_list[0].edge_features is not None: + batch_edge_features = np.vstack( + [graph.edge_features for graph in graph_list]) + else: + batch_edge_features = None + + if graph_list[0].node_pos_features is not None: + batch_node_pos_features = np.vstack( + [graph.node_pos_features for graph in graph_list]) + else: + batch_node_pos_features = None + + # create new edge index + num_nodes_list = [graph.num_nodes for graph in graph_list] + batch_edge_index = np.hstack([ + graph.edge_index + prev_num_node + for prev_num_node, graph in zip([0] + num_nodes_list[:-1], graph_list) + ]) + + # graph_index indicates which nodes belong to which graph + graph_index = [] + for i, num_nodes in enumerate(num_nodes_list): + graph_index.extend([i] * num_nodes) + self.graph_index = np.array(graph_index) + + super().__init__( + node_features=batch_node_features, + edge_index=batch_edge_index, + edge_features=batch_edge_features, + node_pos_features=batch_node_pos_features, + ) diff --git a/deepchem/feat/graph_features.py b/deepchem/feat/graph_features.py index f6ce39de78476f0fd4b349d635f8517d782ea695..c4e92f0d31cb948296e92976f95577acafb4c611 100644 --- a/deepchem/feat/graph_features.py +++ b/deepchem/feat/graph_features.py @@ -1,35 +1,84 @@ -import numpy as np +# flake8: noqa +import numpy as np import deepchem as dc -from deepchem.feat import Featurizer -from deepchem.feat.atomic_coordinates import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat.base_classes import MolecularFeaturizer +from deepchem.feat.complex_featurizers import ComplexNeighborListFragmentAtomicCoordinates from deepchem.feat.mol_graphs import ConvMol, WeaveMol from deepchem.data import DiskDataset -import multiprocessing import logging - - -def _featurize_complex(featurizer, mol_pdb_file, protein_pdb_file, log_message): - logging.info(log_message) - return featurizer._featurize_complex(mol_pdb_file, protein_pdb_file) +from typing import Optional, List +from deepchem.utils.typing import RDKitMol, RDKitAtom def one_of_k_encoding(x, allowable_set): + """Encodes elements of a provided set as integers. + + Parameters + ---------- + x: object + Must be present in `allowable_set`. + allowable_set: list + List of allowable quantities. + + Example + ------- + >>> import deepchem as dc + >>> dc.feat.graph_features.one_of_k_encoding("a", ["a", "b", "c"]) + [True, False, False] + + Raises + ------ + `ValueError` if `x` is not in `allowable_set`. + """ if x not in allowable_set: - raise Exception("input {0} not in allowable set{1}:".format( + raise ValueError("input {0} not in allowable set{1}:".format( x, allowable_set)) return list(map(lambda s: x == s, allowable_set)) def one_of_k_encoding_unk(x, allowable_set): - """Maps inputs not in the allowable set to the last element.""" + """Maps inputs not in the allowable set to the last element. + + Unlike `one_of_k_encoding`, if `x` is not in `allowable_set`, this method + pretends that `x` is the last element of `allowable_set`. + + Parameters + ---------- + x: object + Must be present in `allowable_set`. + allowable_set: list + List of allowable quantities. + + Examples + -------- + >>> dc.feat.graph_features.one_of_k_encoding_unk("s", ["a", "b", "c"]) + [False, False, True] + """ if x not in allowable_set: x = allowable_set[-1] return list(map(lambda s: x == s, allowable_set)) def get_intervals(l): - """For list of lists, gets the cumulative products of the lengths""" + """For list of lists, gets the cumulative products of the lengths + + Note that we add 1 to the lengths of all lists (to avoid an empty list + propagating a 0). + + Parameters + ---------- + l: list of lists + Returns the cumulative product of these lengths. + + Examples + -------- + >>> dc.feat.graph_features.get_intervals([[1], [1, 2], [1, 2, 3]]) + [1, 3, 12] + + >>> dc.feat.graph_features.get_intervals([[1], [], [1, 2], [1, 2, 3]]) + [1, 1, 3, 12] + """ intervals = len(l) * [0] # Initalize with 1 intervals[0] = 1 @@ -40,37 +89,59 @@ def get_intervals(l): def safe_index(l, e): - """Gets the index of e in l, providing an index of len(l) if not found""" + """Gets the index of e in l, providing an index of len(l) if not found + + Parameters + ---------- + l: list + List of values + e: object + Object to check whether `e` is in `l` + + Examples + -------- + >>> dc.feat.graph_features.safe_index([1, 2, 3], 1) + 0 + >>> dc.feat.graph_features.safe_index([1, 2, 3], 7) + 3 + """ try: return l.index(e) except: return len(l) -possible_atom_list = [ - 'C', 'N', 'O', 'S', 'F', 'P', 'Cl', 'Mg', 'Na', 'Br', 'Fe', 'Ca', 'Cu', - 'Mc', 'Pd', 'Pb', 'K', 'I', 'Al', 'Ni', 'Mn' -] -possible_numH_list = [0, 1, 2, 3, 4] -possible_valence_list = [0, 1, 2, 3, 4, 5, 6] -possible_formal_charge_list = [-3, -2, -1, 0, 1, 2, 3] -# To avoid importing rdkit, this is a placeholder list of the correct -# length. These will be replaced with rdkit HybridizationType below -possible_hybridization_list = ["SP", "SP2", "SP3", "SP3D", "SP3D2"] -possible_number_radical_e_list = [0, 1, 2] -possible_chirality_list = ['R', 'S'] - -reference_lists = [ - possible_atom_list, possible_numH_list, possible_valence_list, - possible_formal_charge_list, possible_number_radical_e_list, - possible_hybridization_list, possible_chirality_list -] - -intervals = get_intervals(reference_lists) -# We use E-Z notation for stereochemistry -# https://en.wikipedia.org/wiki/E%E2%80%93Z_notation -possible_bond_stereo = ["STEREONONE", "STEREOANY", "STEREOZ", "STEREOE"] -bond_fdim_base = 6 +class GraphConvConstants(object): + """This class defines a collection of constants which are useful for graph convolutions on molecules.""" + possible_atom_list = [ + 'C', 'N', 'O', 'S', 'F', 'P', 'Cl', 'Mg', 'Na', 'Br', 'Fe', 'Ca', 'Cu', + 'Mc', 'Pd', 'Pb', 'K', 'I', 'Al', 'Ni', 'Mn' + ] + """Allowed Numbers of Hydrogens""" + possible_numH_list = [0, 1, 2, 3, 4] + """Allowed Valences for Atoms""" + possible_valence_list = [0, 1, 2, 3, 4, 5, 6] + """Allowed Formal Charges for Atoms""" + possible_formal_charge_list = [-3, -2, -1, 0, 1, 2, 3] + """This is a placeholder for documentation. These will be replaced with corresponding values of the rdkit HybridizationType""" + possible_hybridization_list = ["SP", "SP2", "SP3", "SP3D", "SP3D2"] + """Allowed number of radical electrons.""" + possible_number_radical_e_list = [0, 1, 2] + """Allowed types of Chirality""" + possible_chirality_list = ['R', 'S'] + """The set of all values allowed.""" + reference_lists = [ + possible_atom_list, possible_numH_list, possible_valence_list, + possible_formal_charge_list, possible_number_radical_e_list, + possible_hybridization_list, possible_chirality_list + ] + """The number of different values that can be taken. See `get_intervals()`""" + intervals = get_intervals(reference_lists) + """Possible stereochemistry. We use E-Z notation for stereochemistry + https://en.wikipedia.org/wiki/E%E2%80%93Z_notation""" + possible_bond_stereo = ["STEREONONE", "STEREOANY", "STEREOZ", "STEREOE"] + """Number of different bond types not counting stereochemistry.""" + bond_fdim_base = 6 def get_feature_list(atom): @@ -80,10 +151,39 @@ def get_feature_list(atom): ---------- atom: RDKit.rdchem.Atom Atom to get features for + + Examples + -------- + >>> from rdkit import Chem + >>> mol = Chem.MolFromSmiles("C") + >>> atom = mol.GetAtoms()[0] + >>> dc.feat.graph_features.get_feature_list(atom) + [0, 4, 4, 3, 0, 2] + + Note + ---- + This method requires RDKit to be installed. + + Returns + ------- + features: list + List of length 6. The i-th value in this list provides the index of the + atom in the corresponding feature value list. The 6 feature values lists + for this function are `[GraphConvConstants.possible_atom_list, + GraphConvConstants.possible_numH_list, + GraphConvConstants.possible_valence_list, + GraphConvConstants.possible_formal_charge_list, + GraphConvConstants.possible_num_radical_e_list]`. """ + possible_atom_list = GraphConvConstants.possible_atom_list + possible_numH_list = GraphConvConstants.possible_numH_list + possible_valence_list = GraphConvConstants.possible_valence_list + possible_formal_charge_list = GraphConvConstants.possible_formal_charge_list + possible_number_radical_e_list = GraphConvConstants.possible_number_radical_e_list + possible_hybridization_list = GraphConvConstants.possible_hybridization_list # Replace the hybridization from rdkit import Chem - global possible_hybridization_list + #global possible_hybridization_list possible_hybridization_list = [ Chem.rdchem.HybridizationType.SP, Chem.rdchem.HybridizationType.SP2, Chem.rdchem.HybridizationType.SP3, Chem.rdchem.HybridizationType.SP3D, @@ -102,7 +202,20 @@ def get_feature_list(atom): def features_to_id(features, intervals): - """Convert list of features into index using spacings provided in intervals""" + """Convert list of features into index using spacings provided in intervals + + Parameters + ---------- + features: list + List of features as returned by `get_feature_list()` + intervals: list + List of intervals as returned by `get_intervals()` + + Returns + ------- + id: int + The index in a feature vector given by the given set of features. + """ id = 0 for k in range(len(intervals)): id += features[k] * intervals[k] @@ -113,6 +226,20 @@ def features_to_id(features, intervals): def id_to_features(id, intervals): + """Given an index in a feature vector, return the original set of features. + + Parameters + ---------- + id: int + The index in a feature vector given by the given set of features. + intervals: list + List of intervals as returned by `get_intervals()` + + Returns + ------- + features: list + List of features as returned by `get_feature_list()` + """ features = 6 * [0] # Correct for null @@ -134,6 +261,11 @@ def atom_to_id(atom): ---------- atom: RDKit.rdchem.Atom Atom to convert to ids. + + Returns + ------- + id: int + The index in a feature vector given by the given set of features. """ features = get_feature_list(atom) return features_to_id(features, intervals) @@ -155,6 +287,10 @@ def atom_features(atom, If true, model hydrogens explicitly use_chirality: bool, optional If true, use chirality information. + + Returns + ------- + np.ndarray of per-atom features. """ if bool_id_feat: return np.array([atom_to_id(atom)]) @@ -242,8 +378,21 @@ def bond_features(bond, use_chirality=False): ---------- use_chirality: bool, optional If true, use chirality information. + + Note + ---- + This method requires RDKit to be installed. + + Returns + ------- + bond_feats: np.ndarray + Array of bond features. This is a 1-D array of length 6 if `use_chirality` + is `False` else of length 10 with chirality encoded. """ - from rdkit import Chem + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This method requires RDKit to be installed.") bt = bond.GetBondType() bond_feats = [ bt == Chem.rdchem.BondType.SINGLE, bt == Chem.rdchem.BondType.DOUBLE, @@ -253,12 +402,75 @@ def bond_features(bond, use_chirality=False): ] if use_chirality: bond_feats = bond_feats + one_of_k_encoding_unk( - str(bond.GetStereo()), possible_bond_stereo) + str(bond.GetStereo()), GraphConvConstants.possible_bond_stereo) return np.array(bond_feats) -def pair_features(mol, edge_list, canon_adj_list, bt_len=6, - graph_distance=True): +def max_pair_distance_pairs(mol: RDKitMol, + max_pair_distance: Optional[int]) -> np.ndarray: + """Helper method which finds atom pairs within max_pair_distance graph distance. + + This helper method is used to find atoms which are within max_pair_distance + graph_distance of one another. This is done by using the fact that the + powers of an adjacency matrix encode path connectivity information. In + particular, if `adj` is the adjacency matrix, then `adj**k` has a nonzero + value at `(i, j)` if and only if there exists a path of graph distance `k` + between `i` and `j`. To find all atoms within `max_pair_distance` of each + other, we can compute the adjacency matrix powers `[adj, adj**2, + ...,adj**max_pair_distance]` and find pairs which are nonzero in any of + these matrices. Since adjacency matrices and their powers are positive + numbers, this is simply the nonzero elements of `adj + adj**2 + ... + + adj**max_pair_distance`. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit molecules + max_pair_distance: Optional[int], (default None) + This value can be a positive integer or None. This + parameter determines the maximum graph distance at which pair + features are computed. For example, if `max_pair_distance==2`, + then pair features are computed only for atoms at most graph + distance 2 apart. If `max_pair_distance` is `None`, all pairs are + considered (effectively infinite `max_pair_distance`) + + + Returns + ------- + np.ndarray + Of shape `(2, num_pairs)` where `num_pairs` is the total number of pairs + within `max_pair_distance` of one another. + """ + from rdkit import Chem + from rdkit.Chem import rdmolops + N = len(mol.GetAtoms()) + if (max_pair_distance is None or max_pair_distance >= N): + max_distance = N + elif max_pair_distance is not None and max_pair_distance <= 0: + raise ValueError( + "max_pair_distance must either be a positive integer or None") + elif max_pair_distance is not None: + max_distance = max_pair_distance + adj = rdmolops.GetAdjacencyMatrix(mol) + # Handle edge case of self-pairs (i, i) + sum_adj = np.eye(N) + for i in range(max_distance): + # Increment by 1 since we don't want 0-indexing + power = i + 1 + sum_adj += np.linalg.matrix_power(adj, power) + nonzero_locs = np.where(sum_adj != 0) + num_pairs = len(nonzero_locs[0]) + # This creates a matrix of shape (2, num_pairs) + pair_edges = np.reshape(np.array(list(zip(nonzero_locs))), (2, num_pairs)) + return pair_edges + + +def pair_features(mol: RDKitMol, + bond_features_map: dict, + bond_adj_list: List, + bt_len: int = 6, + graph_distance: bool = True, + max_pair_distance: Optional[int] = None) -> np.ndarray: """Helper method used to compute atom pair feature vectors. Many different featurization methods compute atom pair features @@ -268,40 +480,95 @@ def pair_features(mol, edge_list, canon_adj_list, bt_len=6, Parameters ---------- - mol: TODO - TODO - edge_list: list - List of edges t oconsider - canon_adj_list: list - TODO - bt_len: int, optional - TODO - graph_distance: bool, optional - TODO + mol: RDKit Mol + Molecule to compute features on. + bond_features_map: dict + Dictionary that maps pairs of atom ids (say `(2, 3)` for a bond between + atoms 2 and 3) to the features for the bond between them. + bond_adj_list: list of lists + `bond_adj_list[i]` is a list of the atom indices that atom `i` shares a + bond with . This list is symmetrical so if `j in bond_adj_list[i]` then `i + in bond_adj_list[j]`. + bt_len: int, optional (default 6) + The number of different bond types to consider. + graph_distance: bool, optional (default True) + If true, use graph distance between molecules. Else use euclidean + distance. The specified `mol` must have a conformer. Atomic + positions will be retrieved by calling `mol.getConformer(0)`. + max_pair_distance: Optional[int], (default None) + This value can be a positive integer or None. This + parameter determines the maximum graph distance at which pair + features are computed. For example, if `max_pair_distance==2`, + then pair features are computed only for atoms at most graph + distance 2 apart. If `max_pair_distance` is `None`, all pairs are + considered (effectively infinite `max_pair_distance`) + + Note + ---- + This method requires RDKit to be installed. + + Returns + ------- + features: np.ndarray + Of shape `(N_edges, bt_len + max_distance + 1)`. This is the array + of pairwise features for all atom pairs, where N_edges is the + number of edges within max_pair_distance of one another in this + molecules. + pair_edges: np.ndarray + Of shape `(2, num_pairs)` where `num_pairs` is the total number of + pairs within `max_pair_distance` of one another. """ if graph_distance: max_distance = 7 else: max_distance = 1 N = mol.GetNumAtoms() - features = np.zeros((N, N, bt_len + max_distance + 1)) + pair_edges = max_pair_distance_pairs(mol, max_pair_distance) + num_pairs = pair_edges.shape[1] + N_edges = pair_edges.shape[1] + features = np.zeros((N_edges, bt_len + max_distance + 1)) + # Get mapping + mapping = {} + for n in range(N_edges): + a1, a2 = pair_edges[:, n] + mapping[(int(a1), int(a2))] = n num_atoms = mol.GetNumAtoms() rings = mol.GetRingInfo().AtomRings() for a1 in range(num_atoms): - for a2 in canon_adj_list[a1]: + for a2 in bond_adj_list[a1]: # first `bt_len` features are bond features(if applicable) - features[a1, a2, :bt_len] = np.asarray( - edge_list[tuple(sorted((a1, a2)))], dtype=float) + if (int(a1), int(a2)) not in mapping: + raise ValueError( + "Malformed molecule with bonds not in specified graph distance.") + else: + n = mapping[(int(a1), int(a2))] + features[n, :bt_len] = np.asarray( + bond_features_map[tuple(sorted((a1, a2)))], dtype=float) for ring in rings: if a1 in ring: - # `bt_len`-th feature is if the pair of atoms are in the same ring - features[a1, ring, bt_len] = 1 - features[a1, a1, bt_len] = 0. + for a2 in ring: + if (int(a1), int(a2)) not in mapping: + # For ring pairs outside max pairs distance continue + continue + else: + n = mapping[(int(a1), int(a2))] + # `bt_len`-th feature is if the pair of atoms are in the same ring + if a2 == a1: + features[n, bt_len] = 0 + else: + features[n, bt_len] = 1 # graph distance between two atoms if graph_distance: + # distance is a matrix of 1-hot encoded distances for all atoms distance = find_distance( - a1, num_atoms, canon_adj_list, max_distance=max_distance) - features[a1, :, bt_len + 1:] = distance + a1, num_atoms, bond_adj_list, max_distance=max_distance) + for a2 in range(num_atoms): + if (int(a1), int(a2)) not in mapping: + # For ring pairs outside max pairs distance continue + continue + else: + n = mapping[(int(a1), int(a2))] + features[n, bt_len + 1:] = distance[a2] # Euclidean distance between atoms if not graph_distance: coords = np.zeros((N, 3)) @@ -312,14 +579,37 @@ def pair_features(mol, edge_list, canon_adj_list, bt_len=6, np.stack([coords] * N, axis=1) - \ np.stack([coords] * N, axis=0)), axis=2)) - return features + return features, pair_edges + +def find_distance(a1: RDKitAtom, num_atoms: int, bond_adj_list, + max_distance=7) -> np.ndarray: + """Computes distances from provided atom. -def find_distance(a1, num_atoms, canon_adj_list, max_distance=7): + Parameters + ---------- + a1: RDKit atom + The source atom to compute distances from. + num_atoms: int + The total number of atoms. + bond_adj_list: list of lists + `bond_adj_list[i]` is a list of the atom indices that atom `i` shares a + bond with. This list is symmetrical so if `j in bond_adj_list[i]` then `i in + bond_adj_list[j]`. + max_distance: int, optional (default 7) + The max distance to search. + + Returns + ------- + distances: np.ndarray + Of shape `(num_atoms, max_distance)`. Provides a one-hot encoding of the + distances. That is, `distances[i]` is a one-hot encoding of the distance + from `a1` to atom `i`. + """ distance = np.zeros((num_atoms, max_distance)) radial = 0 # atoms `radial` bonds away from `a1` - adj_list = set(canon_adj_list[a1]) + adj_list = set(bond_adj_list[a1]) # atoms less than `radial` bonds away all_list = set([a1]) while radial < max_distance: @@ -328,16 +618,28 @@ def find_distance(a1, num_atoms, canon_adj_list, max_distance=7): # find atoms `radial`+1 bonds away next_adj = set() for adj in adj_list: - next_adj.update(canon_adj_list[adj]) + next_adj.update(bond_adj_list[adj]) adj_list = next_adj - all_list radial = radial + 1 return distance -class ConvMolFeaturizer(Featurizer): - """This class implements the featurization to implement graph convolutions from the Duvenaud graph convolution paper +class ConvMolFeaturizer(MolecularFeaturizer): + """This class implements the featurization to implement Duvenaud graph convolutions. + + Duvenaud graph convolutions [1]_ construct a vector of descriptors for each + atom in a molecule. The featurizer computes that vector of local descriptors. -Duvenaud, David K., et al. "Convolutional networks on graphs for learning molecular fingerprints." Advances in neural information processing systems. 2015. + References + --------- + + .. [1] Duvenaud, David K., et al. "Convolutional networks on graphs for + learning molecular fingerprints." Advances in neural information + processing systems. 2015. + + Note + ---- + This class requires RDKit to be installed. """ name = ['conv_mol'] @@ -448,26 +750,60 @@ Duvenaud, David K., et al. "Convolutional networks on graphs for learning molecu tuple(self.atom_properties) == tuple(other.atom_properties) -class WeaveFeaturizer(Featurizer): - """This class implements the featurization to implement Weave convolutions from the Google graph convolution paper. +class WeaveFeaturizer(MolecularFeaturizer): + """This class implements the featurization to implement Weave convolutions. + + Weave convolutions were introduced in [1]_. Unlike Duvenaud graph + convolutions, weave convolutions require a quadratic matrix of interaction + descriptors for each pair of atoms. These extra descriptors may provide for + additional descriptive power but at the cost of a larger featurized dataset. + + + Examples + -------- + >>> import deepchem as dc + >>> mols = ["C", "CCC"] + >>> featurizer = dc.feat.WeaveFeaturizer() + >>> X = featurizer.featurize(mols) - Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." Journal of computer-aided molecular design 30.8 (2016): 595-608. + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond + fingerprints." Journal of computer-aided molecular design 30.8 (2016): + 595-608. + + Note + ---- + This class requires RDKit to be installed. """ name = ['weave_mol'] - def __init__(self, graph_distance=True, explicit_H=False, - use_chirality=False): - """ + def __init__(self, + graph_distance: bool = True, + explicit_H: bool = False, + use_chirality: bool = False, + max_pair_distance: Optional[int] = None): + """Initialize this featurizer with set parameters. + Parameters ---------- - graph_distance: bool, optional - If true, use graph distance. Otherwise, use Euclidean - distance. - explicit_H: bool, optional + graph_distance: bool, (default True) + If True, use graph distance for distance features. Otherwise, use + Euclidean distance. Note that this means that molecules that this + featurizer is invoked on must have valid conformer information if this + option is set. + explicit_H: bool, (default False) If true, model hydrogens in the molecule. - use_chirality: bool, optional + use_chirality: bool, (default False) If true, use chiral information in the featurization + max_pair_distance: Optional[int], (default None) + This value can be a positive integer or None. This + parameter determines the maximum graph distance at which pair + features are computed. For example, if `max_pair_distance==2`, + then pair features are computed only for atoms at most graph + distance 2 apart. If `max_pair_distance` is `None`, all pairs are + considered (effectively infinite `max_pair_distance`) """ # Distance is either graph distance(True) or Euclidean distance(False, # only support datasets providing Cartesian coordinates) @@ -478,10 +814,15 @@ class WeaveFeaturizer(Featurizer): self.explicit_H = explicit_H # If uses use_chirality self.use_chirality = use_chirality + if isinstance(max_pair_distance, int) and max_pair_distance <= 0: + raise ValueError( + "max_pair_distance must either be a positive integer or None") + self.max_pair_distance = max_pair_distance if self.use_chirality: - self.bt_len = bond_fdim_base + len(possible_bond_stereo) + self.bt_len = int(GraphConvConstants.bond_fdim_base) + len( + GraphConvConstants.possible_bond_stereo) else: - self.bt_len = bond_fdim_base + self.bt_len = int(GraphConvConstants.bond_fdim_base) def _featurize(self, mol): """Encodes mol as a WeaveMol object.""" @@ -499,27 +840,28 @@ class WeaveFeaturizer(Featurizer): nodes = np.vstack(nodes) # Get bond lists - edge_list = {} + bond_features_map = {} for b in mol.GetBonds(): - edge_list[tuple(sorted([b.GetBeginAtomIdx(), - b.GetEndAtomIdx()]))] = bond_features( - b, use_chirality=self.use_chirality) + bond_features_map[tuple(sorted([b.GetBeginAtomIdx(), + b.GetEndAtomIdx()]))] = bond_features( + b, use_chirality=self.use_chirality) # Get canonical adjacency list - canon_adj_list = [[] for mol_id in range(len(nodes))] - for edge in edge_list.keys(): - canon_adj_list[edge[0]].append(edge[1]) - canon_adj_list[edge[1]].append(edge[0]) + bond_adj_list = [[] for mol_id in range(len(nodes))] + for bond in bond_features_map.keys(): + bond_adj_list[bond[0]].append(bond[1]) + bond_adj_list[bond[1]].append(bond[0]) # Calculate pair features - pairs = pair_features( + pairs, pair_edges = pair_features( mol, - edge_list, - canon_adj_list, + bond_features_map, + bond_adj_list, bt_len=self.bt_len, - graph_distance=self.graph_distance) + graph_distance=self.graph_distance, + max_pair_distance=self.max_pair_distance) - return WeaveMol(nodes, pairs) + return WeaveMol(nodes, pairs, pair_edges) class AtomicConvFeaturizer(ComplexNeighborListFragmentAtomicCoordinates): @@ -603,12 +945,12 @@ class AtomicConvFeaturizer(ComplexNeighborListFragmentAtomicCoordinates): self.epochs = epochs self.labels = labels - def featurize_complexes(self, mol_files, protein_files): + def featurize(self, mol_files, protein_files): features = [] failures = [] for i, (mol_file, protein_pdb) in enumerate(zip(mol_files, protein_files)): logging.info("Featurizing %d / %d" % (i, len(mol_files))) - new_features = self._featurize_complex(mol_file, protein_pdb) + new_features = self._featurize(mol_file, protein_pdb) # Handle loading failures which return None if new_features is not None: features.append(new_features) diff --git a/deepchem/feat/material_featurizers/__init__.py b/deepchem/feat/material_featurizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2b0e181d2f8f7e72aea84fe5b671d47ffcab6edb --- /dev/null +++ b/deepchem/feat/material_featurizers/__init__.py @@ -0,0 +1,8 @@ +""" +Featurizers for inorganic crystals. +""" +# flake8: noqa +from deepchem.feat.material_featurizers.element_property_fingerprint import ElementPropertyFingerprint +from deepchem.feat.material_featurizers.sine_coulomb_matrix import SineCoulombMatrix +from deepchem.feat.material_featurizers.cgcnn_featurizer import CGCNNFeaturizer +from deepchem.feat.material_featurizers.elemnet_featurizer import ElemNetFeaturizer diff --git a/deepchem/feat/material_featurizers/cgcnn_featurizer.py b/deepchem/feat/material_featurizers/cgcnn_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..78bf880952461ca151f5caf228ccb49b75af159f --- /dev/null +++ b/deepchem/feat/material_featurizers/cgcnn_featurizer.py @@ -0,0 +1,189 @@ +import os +import json +import numpy as np +from typing import Tuple + +from deepchem.utils.data_utils import download_url, get_data_dir +from deepchem.utils.typing import PymatgenStructure +from deepchem.feat import MaterialStructureFeaturizer +from deepchem.feat.graph_data import GraphData + +ATOM_INIT_JSON_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/atom_init.json' + + +class CGCNNFeaturizer(MaterialStructureFeaturizer): + """ + Calculate structure graph features for crystals. + + Based on the implementation in Crystal Graph Convolutional + Neural Networks (CGCNN). The method constructs a crystal graph + representation including atom features and bond features (neighbor + distances). Neighbors are determined by searching in a sphere around + atoms in the unit cell. A Gaussian filter is applied to neighbor distances. + All units are in angstrom. + + This featurizer requires the optional dependency pymatgen. It may + be useful when 3D coordinates are available and when using graph + network models and crystal graph convolutional networks. + + See [1]_ for more details. + + References + ---------- + .. [1] T. Xie and J. C. Grossman, Phys. Rev. Lett. 120, 2018. + + Examples + -------- + >>> import pymatgen as mg + >>> lattice = mg.Lattice.cubic(4.2) + >>> structure = mg.Structure(lattice, ["Cs", "Cl"], [[0, 0, 0], [0.5, 0.5, 0.5]]) + >>> featurizer = CGCNNFeaturizer() + >>> features = featurizer.featurize([structure]) + >>> feature = features[0] + >>> print(type(feature)) + + + Notes + ----- + This class requires Pymatgen to be installed. + """ + + def __init__(self, + radius: float = 8.0, + max_neighbors: float = 12, + step: float = 0.2): + """ + Parameters + ---------- + radius: float (default 8.0) + Radius of sphere for finding neighbors of atoms in unit cell. + max_neighbors: int (default 12) + Maximum number of neighbors to consider when constructing graph. + step: float (default 0.2) + Step size for Gaussian filter. This value is used when building edge features. + """ + + self.radius = radius + self.max_neighbors = int(max_neighbors) + self.step = step + + # load atom_init.json + data_dir = get_data_dir() + download_url(ATOM_INIT_JSON_URL, data_dir) + atom_init_json_path = os.path.join(data_dir, 'atom_init.json') + with open(atom_init_json_path, 'r') as f: + atom_init_json = json.load(f) + + self.atom_features = { + int(key): np.array(value, dtype=np.float32) + for key, value in atom_init_json.items() + } + self.valid_atom_number = set(self.atom_features.keys()) + + def _featurize(self, struct: PymatgenStructure) -> GraphData: + """ + Calculate crystal graph features from pymatgen structure. + + Parameters + ---------- + struct: pymatgen.Structure + A periodic crystal composed of a lattice and a sequence of atomic + sites with 3D coordinates and elements. + + Returns + ------- + graph: GraphData + A crystal graph with CGCNN style features. + """ + + node_features = self._get_node_features(struct) + edge_index, edge_features = self._get_edge_features_and_index(struct) + graph = GraphData(node_features, edge_index, edge_features) + return graph + + def _get_node_features(self, struct: PymatgenStructure) -> np.ndarray: + """ + Get the node feature from `atom_init.json`. The `atom_init.json` was collected + from `data/sample-regression/atom_init.json` in the CGCNN repository. + + Parameters + ---------- + struct: pymatgen.Structure + A periodic crystal composed of a lattice and a sequence of atomic + sites with 3D coordinates and elements. + + Returns + ------- + node_features: np.ndarray + A numpy array of shape `(num_nodes, 92)`. + """ + node_features = [] + for site in struct: + # check whether the atom feature exists or not + assert site.specie.number in self.valid_atom_number + node_features.append(self.atom_features[site.specie.number]) + node_features = np.vstack(node_features).astype(np.float) + return node_features + + def _get_edge_features_and_index( + self, struct: PymatgenStructure) -> Tuple[np.ndarray, np.ndarray]: + """ + Calculate the edge feature and edge index from pymatgen structure. + + Parameters + ---------- + struct: pymatgen.Structure + A periodic crystal composed of a lattice and a sequence of atomic + sites with 3D coordinates and elements. + + Returns + ------- + edge_idx np.ndarray, dtype int + A numpy array of shape with `(2, num_edges)`. + edge_features: np.ndarray + A numpy array of shape with `(num_edges, filter_length)`. The `filter_length` is + (self.radius / self.step) + 1. The edge features were built by applying gaussian + filter to the distance between nodes. + """ + + neighbors = struct.get_all_neighbors(self.radius, include_index=True) + neighbors = [sorted(n, key=lambda x: x[1]) for n in neighbors] + + # construct bi-directed graph + src_idx, dest_idx = [], [] + edge_distances = [] + for node_idx, neighbor in enumerate(neighbors): + neighbor = neighbor[:self.max_neighbors] + src_idx.extend([node_idx] * len(neighbor)) + dest_idx.extend([site[2] for site in neighbor]) + edge_distances.extend([site[1] for site in neighbor]) + + edge_idx = np.array([src_idx, dest_idx], dtype=np.int) + edge_distances = np.array(edge_distances, dtype=np.float) + edge_features = self._gaussian_filter(edge_distances) + return edge_idx, edge_features + + def _gaussian_filter(self, distances: np.ndarray) -> np.ndarray: + """ + Apply Gaussian filter to an array of interatomic distances. + + Parameters + ---------- + distances : np.ndarray + A numpy array of the shape `(num_edges, )`. + + Returns + ------- + expanded_distances: np.ndarray + Expanded distance tensor after Gaussian filtering. + The shape is `(num_edges, filter_length)`. The `filter_length` is + (self.radius / self.step) + 1. + """ + + filt = np.arange(0, self.radius + self.step, self.step) + + # Increase dimension of distance tensor and apply filter + expanded_distances = np.exp( + -(distances[..., np.newaxis] - filt)**2 / self.step**2) + + return expanded_distances diff --git a/deepchem/feat/material_featurizers/element_property_fingerprint.py b/deepchem/feat/material_featurizers/element_property_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..07eda39f77bdb1b2eeb47b30fb8af51651107828 --- /dev/null +++ b/deepchem/feat/material_featurizers/element_property_fingerprint.py @@ -0,0 +1,84 @@ +import numpy as np + +from deepchem.utils.typing import PymatgenComposition +from deepchem.feat import MaterialCompositionFeaturizer +from typing import Any + + +class ElementPropertyFingerprint(MaterialCompositionFeaturizer): + """ + Fingerprint of elemental properties from composition. + + Based on the data source chosen, returns properties and statistics + (min, max, range, mean, standard deviation, mode) for a compound + based on elemental stoichiometry. E.g., the average electronegativity + of atoms in a crystal structure. The chemical fingerprint is a + vector of these statistics. For a full list of properties and statistics, + see ``matminer.featurizers.composition.ElementProperty(data_source).feature_labels()``. + + This featurizer requires the optional dependencies pymatgen and + matminer. It may be useful when only crystal compositions are available + (and not 3D coordinates). + + See references [1]_ [2]_ [3]_ [4]_ for more details. + + References + ---------- + .. [1] MagPie data: Ward, L. et al. npj Comput Mater 2, 16028 (2016). + https://doi.org/10.1038/npjcompumats.2016.28 + .. [2] Deml data: Deml, A. et al. Physical Review B 93, 085142 (2016). + 10.1103/PhysRevB.93.085142 + .. [3] Matminer: Ward, L. et al. Comput. Mater. Sci. 152, 60-69 (2018). + .. [4] Pymatgen: Ong, S.P. et al. Comput. Mater. Sci. 68, 314-319 (2013). + + Examples + -------- + >>> import pymatgen as mg + >>> comp = mg.Composition("Fe2O3") + >>> featurizer = ElementPropertyFingerprint() + >>> features = featurizer.featurize([comp]) + + Notes + ----- + This class requires matminer and Pymatgen to be installed. + `NaN` feature values are automatically converted to 0 by this featurizer. + """ + + def __init__(self, data_source: str = 'matminer'): + """ + Parameters + ---------- + data_source: str of "matminer", "magpie" or "deml" (default "matminer") + Source for element property data. + """ + self.data_source = data_source + self.ep_featurizer: Any = None + + def _featurize(self, composition: PymatgenComposition) -> np.ndarray: + """ + Calculate chemical fingerprint from crystal composition. + + Parameters + ---------- + composition: pymatgen.Composition object + Composition object. + + Returns + ------- + feats: np.ndarray + Vector of properties and statistics derived from chemical + stoichiometry. Some values may be NaN. + """ + if self.ep_featurizer is None: + try: + from matminer.featurizers.composition import ElementProperty + self.ep_featurizer = ElementProperty.from_preset(self.data_source) + except ModuleNotFoundError: + raise ImportError("This class requires matminer to be installed.") + + try: + feats = self.ep_featurizer.featurize(composition) + except: + feats = [] + + return np.nan_to_num(np.array(feats)) diff --git a/deepchem/feat/material_featurizers/elemnet_featurizer.py b/deepchem/feat/material_featurizers/elemnet_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..c4c3d6ad8798099648b9ae5a8a39c2a9761e145d --- /dev/null +++ b/deepchem/feat/material_featurizers/elemnet_featurizer.py @@ -0,0 +1,83 @@ +import numpy as np +from typing import DefaultDict, Union + +from deepchem.utils.typing import PymatgenComposition +from deepchem.feat import MaterialCompositionFeaturizer + +elements_tl = [ + 'H', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Na', 'Mg', 'Al', 'Si', 'P', 'S', + 'Cl', 'K', 'Ca', 'Sc', 'Ti', 'V', 'Cr', 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', + 'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr', 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', + 'Ru', 'Rh', 'Pd', 'Ag', 'Cd', 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', + 'La', 'Ce', 'Pr', 'Nd', 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', + 'Tm', 'Yb', 'Lu', 'Hf', 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', 'Au', 'Hg', 'Tl', + 'Pb', 'Bi', 'Ac', 'Th', 'Pa', 'U', 'Np', 'Pu' +] + + +class ElemNetFeaturizer(MaterialCompositionFeaturizer): + """ + Fixed size vector of length 86 containing raw fractional elemental + compositions in the compound. The 86 chosen elements are based on the + original implementation at https://github.com/NU-CUCIS/ElemNet. + + Returns a vector containing fractional compositions of each element + in the compound. + + References + ---------- + .. [1] Jha, D., Ward, L., Paul, A. et al. Sci Rep 8, 17593 (2018). + https://doi.org/10.1038/s41598-018-35934-y + + Examples + -------- + >>> import pymatgen as mg + >>> comp = "Fe2O3" + >>> featurizer = ElemNetFeaturizer() + >>> features = featurizer.featurize([comp]) + + Notes + ----- + This class requires Pymatgen to be installed. + """ + + def get_vector(self, comp: DefaultDict) -> Union[np.ndarray, None]: + """ + Converts a dictionary containing element names and corresponding + compositional fractions into a vector of fractions. + + Parameters + ---------- + comp: collections.defaultdict object + Dictionary mapping element names to fractional compositions. + + Returns + ------- + fractions: np.ndarray + Vector of fractional compositions of each element. + """ + if all(e in elements_tl for e in comp): + fractions = np.array([comp[e] if e in comp else 0 for e in elements_tl], + np.float32) + else: + fractions = None + return fractions + + def _featurize(self, composition: PymatgenComposition) -> np.ndarray: + """ + Calculate 86 dimensional vector containing fractional compositions of + each element in the compound. + + Parameters + ---------- + composition: pymatgen.Composition object + Composition object. + + Returns + ------- + feats: np.ndarray + 86 dimensional vector containing fractional compositions of elements. + """ + fractions = composition.fractional_composition.get_el_amt_dict() + feat = self.get_vector(fractions) + return feat diff --git a/deepchem/feat/material_featurizers/sine_coulomb_matrix.py b/deepchem/feat/material_featurizers/sine_coulomb_matrix.py new file mode 100644 index 0000000000000000000000000000000000000000..1330afa94bc46d0f167b18547137286daca484fe --- /dev/null +++ b/deepchem/feat/material_featurizers/sine_coulomb_matrix.py @@ -0,0 +1,98 @@ +import numpy as np + +from deepchem.utils.typing import PymatgenStructure +from deepchem.feat import MaterialStructureFeaturizer +from deepchem.utils.data_utils import pad_array +from typing import Any + + +class SineCoulombMatrix(MaterialStructureFeaturizer): + """ + Calculate sine Coulomb matrix for crystals. + + A variant of Coulomb matrix for periodic crystals. + + The sine Coulomb matrix is identical to the Coulomb matrix, except + that the inverse distance function is replaced by the inverse of + sin**2 of the vector between sites which are periodic in the + dimensions of the crystal lattice. + + Features are flattened into a vector of matrix eigenvalues by default + for ML-readiness. To ensure that all feature vectors are equal + length, the maximum number of atoms (eigenvalues) in the input + dataset must be specified. + + This featurizer requires the optional dependencies pymatgen and + matminer. It may be useful when crystal structures with 3D coordinates + are available. + + See [1]_ for more details. + + References + ---------- + .. [1] Faber et al. Inter. J. Quantum Chem. 115, 16, 2015. + + Examples + -------- + >>> import pymatgen as mg + >>> lattice = mg.Lattice.cubic(4.2) + >>> structure = mg.Structure(lattice, ["Cs", "Cl"], [[0, 0, 0], [0.5, 0.5, 0.5]]) + >>> featurizer = SineCoulombMatrix(max_atoms=2) + >>> features = featurizer.featurize([structure]) + + Notes + ----- + This class requires matminer and Pymatgen to be installed. + """ + + def __init__(self, max_atoms: int = 100, flatten: bool = True): + """ + Parameters + ---------- + max_atoms: int (default 100) + Maximum number of atoms for any crystal in the dataset. Used to + pad the Coulomb matrix. + flatten: bool (default True) + Return flattened vector of matrix eigenvalues. + """ + self.max_atoms = max_atoms + self.flatten = flatten + self.scm: Any = None + + def _featurize(self, struct: PymatgenStructure) -> np.ndarray: + """ + Calculate sine Coulomb matrix from pymatgen structure. + + Parameters + ---------- + struct: pymatgen.Structure + A periodic crystal composed of a lattice and a sequence of atomic + sites with 3D coordinates and elements. + + Returns + ------- + features: np.ndarray + 2D sine Coulomb matrix with shape (max_atoms, max_atoms), + or 1D matrix eigenvalues with shape (max_atoms,). + """ + if self.scm is None: + try: + from matminer.featurizers.structure import SineCoulombMatrix as SCM + self.scm = SCM(flatten=False) + except ModuleNotFoundError: + raise ImportError("This class requires matminer to be installed.") + + # Get full N x N SCM + sine_mat = self.scm.featurize(struct) + + if self.flatten: + eigs, _ = np.linalg.eig(sine_mat) + zeros = np.zeros(self.max_atoms) + zeros[:len(eigs[0])] = eigs[0] + features = zeros + else: + features = pad_array(sine_mat, self.max_atoms) + + features = np.asarray(features) + + return features diff --git a/deepchem/feat/materials_featurizers.py b/deepchem/feat/materials_featurizers.py deleted file mode 100644 index 05797130bab78f3109232cf125f832f17be91183..0000000000000000000000000000000000000000 --- a/deepchem/feat/materials_featurizers.py +++ /dev/null @@ -1,299 +0,0 @@ -""" -Featurizers for inorganic crystals. -""" - -import numpy as np - -from deepchem.feat import Featurizer -from deepchem.utils import pad_array - - -class ElementPropertyFingerprint(Featurizer): - """ - Fingerprint of elemental properties from composition. - - Based on the data source chosen, returns properties and statistics - (min, max, range, mean, standard deviation, mode) for a compound - based on elemental stoichiometry. E.g., the average electronegativity - of atoms in a crystal structure. The chemical fingerprint is a - vector of these statistics. For a full list of properties and statistics, - see ``matminer.featurizers.composition.ElementProperty(data_source).feature_labels()``. - - This featurizer requires the optional dependencies pymatgen and - matminer. It may be useful when only crystal compositions are available - (and not 3D coordinates). - - References - ---------- - MagPie data: Ward, L. et al. npj Comput Mater 2, 16028 (2016). - https://doi.org/10.1038/npjcompumats.2016.28 - - Deml data: Deml, A. et al. Physical Review B 93, 085142 (2016). - 10.1103/PhysRevB.93.085142 - - Matminer: Ward, L. et al. Comput. Mater. Sci. 152, 60-69 (2018). - - Pymatgen: Ong, S.P. et al. Comput. Mater. Sci. 68, 314-319 (2013). - - """ - - def __init__(self, data_source='matminer'): - """ - Parameters - ---------- - data_source : {"matminer", "magpie", "deml"} - Source for element property data. - - """ - - self.data_source = data_source - - def _featurize(self, comp): - """ - Calculate chemical fingerprint from crystal composition. - - Parameters - ---------- - comp : str - Reduced formula of crystal. - - Returns - ------- - feats: np.ndarray - Vector of properties and statistics derived from chemical - stoichiometry. Some values may be NaN. - - """ - - from pymatgen import Composition - from matminer.featurizers.composition import ElementProperty - - # Get pymatgen Composition object - c = Composition(comp) - - ep = ElementProperty.from_preset(self.data_source) - - try: - feats = ep.featurize(c) - except: - feats = [] - - return np.array(feats) - - -class SineCoulombMatrix(Featurizer): - """ - Calculate sine Coulomb matrix for crystals. - - A variant of Coulomb matrix for periodic crystals. - - The sine Coulomb matrix is identical to the Coulomb matrix, except - that the inverse distance function is replaced by the inverse of - sin**2 of the vector between sites which are periodic in the - dimensions of the crystal lattice. - - Features are flattened into a vector of matrix eigenvalues by default - for ML-readiness. To ensure that all feature vectors are equal - length, the maximum number of atoms (eigenvalues) in the input - dataset must be specified. - - This featurizer requires the optional dependencies pymatgen and - matminer. It may be useful when crystal structures with 3D coordinates - are available. - - References - ---------- - Faber et al. Inter. J. Quantum Chem. 115, 16, 2015. - - """ - - def __init__(self, max_atoms, flatten=True): - """ - Parameters - ---------- - max_atoms : int - Maximum number of atoms for any crystal in the dataset. Used to - pad the Coulomb matrix. - flatten : bool (default True) - Return flattened vector of matrix eigenvalues. - - """ - - self.max_atoms = int(max_atoms) - self.flatten = flatten - - def _featurize(self, struct): - """ - Calculate sine Coulomb matrix from pymatgen structure. - - Parameters - ---------- - struct : dict - Json-serializable dictionary representation of pymatgen.core.structure - https://pymatgen.org/pymatgen.core.structure.html - - Returns - ------- - features: np.ndarray - 2D sine Coulomb matrix with shape (max_atoms, max_atoms), - or 1D matrix eigenvalues with shape (max_atoms,). - - """ - - from pymatgen import Structure - from matminer.featurizers.structure import SineCoulombMatrix as SCM - - s = Structure.from_dict(struct) - - # Get full N x N SCM - scm = SCM(flatten=False) - sine_mat = scm.featurize(s) - - if self.flatten: - eigs, _ = np.linalg.eig(sine_mat) - zeros = np.zeros((self.max_atoms,)) - zeros[:len(eigs)] = eigs - features = zeros - else: - features = pad_array(sine_mat, self.max_atoms) - - features = np.asarray(features) - - return features - - -class StructureGraphFeaturizer(Featurizer): - """ - Calculate structure graph features for crystals. - - Based on the implementation in Crystal Graph Convolutional - Neural Networks (CGCNN). The method constructs a crystal graph - representation including atom features (atomic numbers) and bond - features (neighbor distances). Neighbors are determined by searching - in a sphere around atoms in the unit cell. A Gaussian filter is - applied to neighbor distances. All units are in angstrom. - - This featurizer requires the optional dependency pymatgen. It may - be useful when 3D coordinates are available and when using graph - network models and crystal graph convolutional networks. - - References - ---------- - T. Xie and J. C. Grossman, Phys. Rev. Lett. 120, 2018. - - """ - - def __init__(self, radius=8.0, max_neighbors=12, step=0.2): - """ - Parameters - ---------- - radius : float (default 8.0) - Radius of sphere for finding neighbors of atoms in unit cell. - max_neighbors : int (default 12) - Maximum number of neighbors to consider when constructing graph. - step : float (default 0.2) - Step size for Gaussian filter. - - """ - - self.radius = radius - self.max_neighbors = int(max_neighbors) - self.step = step - - def _featurize(self, struct): - """ - Calculate crystal graph features from pymatgen structure. - - Parameters - ---------- - struct : dict - Json-serializable dictionary representation of pymatgen.core.structure - https://pymatgen.org/pymatgen.core.structure.html - - Returns - ------- - feats: np.array - Atomic and bond features. Atomic features are atomic numbers - and bond features are Gaussian filtered interatomic distances. - - """ - - from pymatgen import Structure - - # Get pymatgen structure object - s = Structure.from_dict(struct) - - features = self._get_structure_graph_features(s) - features = np.array(features) - - return features - - def _get_structure_graph_features(self, struct): - """ - Calculate structure graph features from pymatgen structure. - - Parameters - ---------- - struct : pymatgen.core.structure - A periodic crystal composed of a lattice and a sequence of atomic - sites with 3D coordinates and elements. - - Returns - ------- - feats: tuple[np.array] - atomic numbers, filtered interatomic distance tensor, and neighbor ids - - """ - - atom_features = np.array([site.specie.Z for site in struct], dtype='int32') - - neighbors = struct.get_all_neighbors(self.radius, include_index=True) - neighbors = [sorted(n, key=lambda x: x[1]) for n in neighbors] - - # Get list of lists of neighbor distances - neighbor_features, neighbor_idx = [], [] - for neighbor in neighbors: - if len(neighbor) < self.max_neighbors: - neighbor_idx.append( - list(map(lambda x: x[2], neighbor)) + - [0] * (self.max_neighbors - len(neighbor))) - neighbor_features.append( - list(map(lambda x: x[1], neighbor)) + - [self.radius + 1.] * (self.max_neighbors - len(neighbor))) - else: - neighbor_idx.append( - list(map(lambda x: x[2], neighbor[:self.max_neighbors]))) - neighbor_features.append( - list(map(lambda x: x[1], neighbor[:self.max_neighbors]))) - - neighbor_features = np.array(neighbor_features) - neighbor_idx = np.array(neighbor_idx) - neighbor_features = self._gaussian_filter(neighbor_features) - neighbor_features = np.vstack(neighbor_features) - - return (atom_features, neighbor_features, neighbor_idx) - - def _gaussian_filter(self, distances): - """ - Apply Gaussian filter to an array of interatomic distances. - - Parameters - ---------- - distances : np.array - Matrix of distances of dimension (num atoms) x (max neighbors). - - Returns - ------- - expanded_distances: np.array - Expanded distance tensor after Gaussian filtering. Dimensionality - is (num atoms) x (max neighbors) x (len(filt)) - - """ - - filt = np.arange(0, self.radius + self.step, self.step) - - # Increase dimension of distance tensor and apply filter - expanded_distances = np.exp( - -(distances[..., np.newaxis] - filt)**2 / self.step**2) - - return expanded_distances diff --git a/deepchem/feat/mol_graphs.py b/deepchem/feat/mol_graphs.py index cb269be78d64049eb3d9e5dc84f1293f644605ac..6335e6c57dcd97aff0b568cfe238fe964ce1f893 100644 --- a/deepchem/feat/mol_graphs.py +++ b/deepchem/feat/mol_graphs.py @@ -1,9 +1,7 @@ """ Data Structures used to represented molecules for convolutions. """ -__author__ = "Han Altae-Tran and Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" +# flake8: noqa import csv import random @@ -375,16 +373,23 @@ class WeaveMol(object): """Molecular featurization object for weave convolutions. These objects are produced by WeaveFeaturizer, and feed into - WeaveModel. The underlying implementation is inspired by: + WeaveModel. The underlying implementation is inspired by [1]_. - Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." Journal of computer-aided molecular design 30.8 (2016): 595-608. + + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." Journal of computer-aided molecular design 30.8 (2016): 595-608. """ - def __init__(self, nodes, pairs): + def __init__(self, nodes, pairs, pair_edges): self.nodes = nodes self.pairs = pairs self.num_atoms = self.nodes.shape[0] self.n_features = self.nodes.shape[1] + self.pair_edges = pair_edges + + def get_pair_edges(self): + return self.pair_edges def get_pair_features(self): return self.pairs diff --git a/deepchem/feat/molecule_featurizers/__init__.py b/deepchem/feat/molecule_featurizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4b63f8aeaf345ec65df54fbcd769ee3609822424 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/__init__.py @@ -0,0 +1,16 @@ +# flake8: noqa +from deepchem.feat.molecule_featurizers.atomic_coordinates import AtomicCoordinates +from deepchem.feat.molecule_featurizers.bp_symmetry_function_input import BPSymmetryFunctionInput +from deepchem.feat.molecule_featurizers.circular_fingerprint import CircularFingerprint +from deepchem.feat.molecule_featurizers.coulomb_matrices import CoulombMatrix +from deepchem.feat.molecule_featurizers.coulomb_matrices import CoulombMatrixEig +from deepchem.feat.molecule_featurizers.maccs_keys_fingerprint import MACCSKeysFingerprint +from deepchem.feat.molecule_featurizers.mordred_descriptors import MordredDescriptors +from deepchem.feat.molecule_featurizers.mol2vec_fingerprint import Mol2VecFingerprint +from deepchem.feat.molecule_featurizers.one_hot_featurizer import OneHotFeaturizer +from deepchem.feat.molecule_featurizers.pubchem_fingerprint import PubChemFingerprint +from deepchem.feat.molecule_featurizers.raw_featurizer import RawFeaturizer +from deepchem.feat.molecule_featurizers.rdkit_descriptors import RDKitDescriptors +from deepchem.feat.molecule_featurizers.smiles_to_image import SmilesToImage +from deepchem.feat.molecule_featurizers.smiles_to_seq import SmilesToSeq, create_char_to_idx +from deepchem.feat.molecule_featurizers.mol_graph_conv_featurizer import MolGraphConvFeaturizer diff --git a/deepchem/feat/molecule_featurizers/atomic_coordinates.py b/deepchem/feat/molecule_featurizers/atomic_coordinates.py new file mode 100644 index 0000000000000000000000000000000000000000..6a73a1dd27c6edc7630169d5bcd713de5d64ae5d --- /dev/null +++ b/deepchem/feat/molecule_featurizers/atomic_coordinates.py @@ -0,0 +1,75 @@ +""" +Atomic coordinate featurizer. +""" +import numpy as np + +from deepchem.feat.base_classes import MolecularFeaturizer +from deepchem.utils.typing import RDKitMol + + +class AtomicCoordinates(MolecularFeaturizer): + """Calculate atomic coordinates. + + Notes + ---- + This class requires RDKit to be installed. + """ + + def __init__(self, use_bohr: bool = False): + """ + Parameters + ---------- + use_bohr: bool, optional (default False) + Whether to use bohr or angstrom as a coordinate unit. + """ + try: + from rdkit import Chem # noqa + from rdkit.Chem import AllChem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.use_bohr = use_bohr + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Calculate atomic coordinates. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + A numpy array of atomic coordinates. The shape is `(n_atoms, 3)`. + """ + from rdkit import Chem + from rdkit.Chem import AllChem + + # Check whether num_confs >=1 or not + num_confs = len(mol.GetConformers()) + if num_confs == 0: + mol = Chem.AddHs(mol) + AllChem.EmbedMolecule(mol, AllChem.ETKDG()) + mol = Chem.RemoveHs(mol) + + N = mol.GetNumAtoms() + coords = np.zeros((N, 3)) + + # RDKit stores atomic coordinates in Angstrom. Atomic unit of length is the + # bohr (1 bohr = 0.529177 Angstrom). Converting units makes gradient calculation + # consistent with most QM software packages. + if self.use_bohr: + coords_list = [ + mol.GetConformer(0).GetAtomPosition(i).__idiv__(0.52917721092) + for i in range(N) + ] + else: + coords_list = [mol.GetConformer(0).GetAtomPosition(i) for i in range(N)] + + for atom in range(N): + coords[atom, 0] = coords_list[atom].x + coords[atom, 1] = coords_list[atom].y + coords[atom, 2] = coords_list[atom].z + + return coords diff --git a/deepchem/feat/molecule_featurizers/bp_symmetry_function_input.py b/deepchem/feat/molecule_featurizers/bp_symmetry_function_input.py new file mode 100644 index 0000000000000000000000000000000000000000..1e44a4692ab4ffb4dd6207fdf517d3ee52fa952b --- /dev/null +++ b/deepchem/feat/molecule_featurizers/bp_symmetry_function_input.py @@ -0,0 +1,55 @@ +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.utils.data_utils import pad_array +from deepchem.feat.base_classes import MolecularFeaturizer +from deepchem.feat.molecule_featurizers.atomic_coordinates import AtomicCoordinates + + +class BPSymmetryFunctionInput(MolecularFeaturizer): + """Calculate symmetry function for each atom in the molecules + + This method is described in [1]_ + + References + ---------- + .. [1] Behler, Jörg, and Michele Parrinello. "Generalized neural-network + representation of high-dimensional potential-energy surfaces." Physical + review letters 98.14 (2007): 146401. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, max_atoms: int): + """Initialize this featurizer. + + Parameters + ---------- + max_atoms: int + The maximum number of atoms expected for molecules this featurizer will + process. + """ + self.max_atoms = max_atoms + self.coordfeat = AtomicCoordinates(use_bohr=True) + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Calculate symmetry function. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + A numpy array of symmetry function. The shape is `(max_atoms, 4)`. + """ + coordinates = self.coordfeat._featurize(mol) + atom_numbers = np.array([atom.GetAtomicNum() for atom in mol.GetAtoms()]) + atom_numbers = np.expand_dims(atom_numbers, axis=1) + assert atom_numbers.shape[0] == coordinates.shape[0] + features = np.concatenate([atom_numbers, coordinates], axis=1) + return pad_array(features, (self.max_atoms, 4)) diff --git a/deepchem/feat/fingerprints.py b/deepchem/feat/molecule_featurizers/circular_fingerprint.py similarity index 57% rename from deepchem/feat/fingerprints.py rename to deepchem/feat/molecule_featurizers/circular_fingerprint.py index 75b62c4ac55b68dc451a700344458d5cf326440a..732402400eef8f2320103c17b2c29a0d80741b97 100644 --- a/deepchem/feat/fingerprints.py +++ b/deepchem/feat/molecule_featurizers/circular_fingerprint.py @@ -1,47 +1,66 @@ """ Topological fingerprints. """ -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "MIT" +from typing import Dict -from deepchem.feat import Featurizer +import numpy as np +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer -class CircularFingerprint(Featurizer): - """ - Circular (Morgan) fingerprints. - Parameters +class CircularFingerprint(MolecularFeaturizer): + """Circular (Morgan) fingerprints. + + Extended Connectivity Circular Fingerprints compute a bag-of-words style + representation of a molecule by breaking it into local neighborhoods and + hashing into a bit vector of the specified size. See [1]_ for more details. + + References ---------- - radius : int, optional (default 2) + .. [1] Rogers, David, and Mathew Hahn. "Extended-connectivity fingerprints." + Journal of chemical information and modeling 50.5 (2010): 742-754. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + radius: int = 2, + size: int = 2048, + chiral: bool = False, + bonds: bool = True, + features: bool = False, + sparse: bool = False, + smiles: bool = False): + """ + Parameters + ---------- + radius: int, optional (default 2) Fingerprint radius. - size : int, optional (default 2048) + size: int, optional (default 2048) Length of generated bit vector. - chiral : bool, optional (default False) + chiral: bool, optional (default False) Whether to consider chirality in fingerprint generation. - bonds : bool, optional (default True) + bonds: bool, optional (default True) Whether to consider bond order in fingerprint generation. - features : bool, optional (default False) + features: bool, optional (default False) Whether to use feature information instead of atom information; see RDKit docs for more info. - sparse : bool, optional (default False) + sparse: bool, optional (default False) Whether to return a dict for each molecule containing the sparse fingerprint. - smiles : bool, optional (default False) + smiles: bool, optional (default False) Whether to calculate SMILES strings for fragment IDs (only applicable when calculating sparse fingerprints). - """ - name = 'circular' + """ + try: + from rdkit import Chem # noqa + from rdkit.Chem import rdMolDescriptors # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") - def __init__(self, - radius=2, - size=2048, - chiral=False, - bonds=True, - features=False, - sparse=False, - smiles=False): self.radius = radius self.size = size self.chiral = chiral @@ -50,19 +69,24 @@ class CircularFingerprint(Featurizer): self.sparse = sparse self.smiles = smiles - def _featurize(self, mol): - """ - Calculate circular fingerprint. + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Calculate circular fingerprint. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + A numpy array of circular fingerprint. """ from rdkit import Chem from rdkit.Chem import rdMolDescriptors + if self.sparse: - info = {} + info: Dict = {} fp = rdMolDescriptors.GetMorganFingerprint( mol, self.radius, @@ -90,6 +114,7 @@ class CircularFingerprint(Featurizer): useChirality=self.chiral, useBondTypes=self.bonds, useFeatures=self.features) + fp = np.asarray(fp, dtype=np.float) return fp def __hash__(self): diff --git a/deepchem/feat/molecule_featurizers/coulomb_matrices.py b/deepchem/feat/molecule_featurizers/coulomb_matrices.py new file mode 100644 index 0000000000000000000000000000000000000000..bf32db30892597568b1389b9c792eb5161c56df7 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/coulomb_matrices.py @@ -0,0 +1,298 @@ +""" +Generate coulomb matrices for molecules. + +See Montavon et al., _New Journal of Physics_ __15__ (2013) 095003. +""" +import numpy as np +from typing import Any, List, Optional + +from deepchem.utils.typing import RDKitMol +from deepchem.utils.data_utils import pad_array +from deepchem.feat.base_classes import MolecularFeaturizer + + +class CoulombMatrix(MolecularFeaturizer): + """Calculate Coulomb matrices for molecules. + + Coulomb matrices provide a representation of the electronic structure of a + molecule. This method is described in [1]_. + + Examples + -------- + >>> import deepchem as dc + >>> featurizers = dc.feat.CoulombMatrix(max_atoms=23) + >>> input_file = 'deepchem/feat/tests/data/water.sdf' # really backed by water.sdf.csv + >>> tasks = ["atomization_energy"] + >>> loader = dc.data.SDFLoader(tasks, featurizer=featurizers) + >>> dataset = loader.create_dataset(input_file) + + + References + ---------- + .. [1] Montavon, Grégoire, et al. "Learning invariant representations of + molecules for atomization energy prediction." Advances in neural information + processing systems. 2012. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + max_atoms: int, + remove_hydrogens: bool = False, + randomize: bool = False, + upper_tri: bool = False, + n_samples: int = 1, + seed: Optional[int] = None): + """Initialize this featurizer. + + Parameters + ---------- + max_atoms: int + The maximum number of atoms expected for molecules this featurizer will + process. + remove_hydrogens: bool, optional (default False) + If True, remove hydrogens before processing them. + randomize: bool, optional (default False) + If True, use method `randomize_coulomb_matrices` to randomize Coulomb matrices. + upper_tri: bool, optional (default False) + Generate only upper triangle part of Coulomb matrices. + n_samples: int, optional (default 1) + If `randomize` is set to True, the number of random samples to draw. + seed: int, optional (default None) + Random seed to use. + """ + try: + from rdkit import Chem # noqa + from rdkit.Chem import AllChem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.max_atoms = int(max_atoms) + self.remove_hydrogens = remove_hydrogens + self.randomize = randomize + self.upper_tri = upper_tri + self.n_samples = n_samples + if seed is not None: + seed = int(seed) + self.seed = seed + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate Coulomb matrices for molecules. If extra randomized + matrices are generated, they are treated as if they are features + for additional conformers. + + Since Coulomb matrices are symmetric, only the (flattened) upper + triangular portion is returned. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + The coulomb matrices of the given molecule. + The default shape is `(num_confs, max_atoms, max_atoms)`. + If num_confs == 1, the shape is `(max_atoms, max_atoms)`. + """ + features = self.coulomb_matrix(mol) + if self.upper_tri: + features = [f[np.triu_indices_from(f)] for f in features] + features = np.asarray(features) + if features.shape[0] == 1: + # `(1, max_atoms, max_atoms)` -> `(max_atoms, max_atoms)` + features = np.squeeze(features, axis=0) + return features + + def coulomb_matrix(self, mol: RDKitMol) -> np.ndarray: + """ + Generate Coulomb matrices for each conformer of the given molecule. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + The coulomb matrices of the given molecule + """ + from rdkit import Chem + from rdkit.Chem import AllChem + + # Check whether num_confs >=1 or not + num_confs = len(mol.GetConformers()) + if num_confs == 0: + mol = Chem.AddHs(mol) + AllChem.EmbedMolecule(mol, AllChem.ETKDG()) + + if self.remove_hydrogens: + mol = Chem.RemoveHs(mol) + n_atoms = mol.GetNumAtoms() + z = [atom.GetAtomicNum() for atom in mol.GetAtoms()] + rval = [] + for conf in mol.GetConformers(): + d = self.get_interatomic_distances(conf) + m = np.outer(z, z) / d + m[range(n_atoms), range(n_atoms)] = 0.5 * np.array(z)**2.4 + if self.randomize: + for random_m in self.randomize_coulomb_matrix(m): + random_m = pad_array(random_m, self.max_atoms) + rval.append(random_m) + else: + m = pad_array(m, self.max_atoms) + rval.append(m) + rval = np.asarray(rval) + return rval + + def randomize_coulomb_matrix(self, m: np.ndarray) -> List[np.ndarray]: + """Randomize a Coulomb matrix as decribed in [1]_: + + 1. Compute row norms for M in a vector row_norms. + 2. Sample a zero-mean unit-variance noise vector e with dimension + equal to row_norms. + 3. Permute the rows and columns of M with the permutation that + sorts row_norms + e. + + Parameters + ---------- + m: np.ndarray + Coulomb matrix. + + Returns + ------- + List[np.ndarray] + List of the random coulomb matrix + + References + ---------- + .. [1] Montavon et al., New Journal of Physics, 15, (2013), 095003 + """ + rval = [] + row_norms = np.asarray([np.linalg.norm(row) for row in m], dtype=float) + rng = np.random.RandomState(self.seed) + for i in range(self.n_samples): + e = rng.normal(size=row_norms.size) + p = np.argsort(row_norms + e) + new = m[p][:, p] # permute rows first, then columns + rval.append(new) + return rval + + @staticmethod + def get_interatomic_distances(conf: Any) -> np.ndarray: + """ + Get interatomic distances for atoms in a molecular conformer. + + Parameters + ---------- + conf: rdkit.Chem.rdchem.Conformer + Molecule conformer. + + Returns + ------- + np.ndarray + The distances matrix for all atoms in a molecule + """ + n_atoms = conf.GetNumAtoms() + coords = [ + # Convert AtomPositions from Angstrom to bohr (atomic units) + conf.GetAtomPosition(i).__idiv__(0.52917721092) for i in range(n_atoms) + ] + d = np.zeros((n_atoms, n_atoms), dtype=float) + for i in range(n_atoms): + for j in range(i): + d[i, j] = coords[i].Distance(coords[j]) + d[j, i] = d[i, j] + return d + + +class CoulombMatrixEig(CoulombMatrix): + """Calculate the eigenvalues of Coulomb matrices for molecules. + + This featurizer computes the eigenvalues of the Coulomb matrices for provided + molecules. Coulomb matrices are described in [1]_. + + Examples + -------- + >>> import deepchem as dc + >>> featurizers = dc.feat.CoulombMatrixEig(max_atoms=23) + >>> input_file = 'deepchem/feat/tests/data/water.sdf' # really backed by water.sdf.csv + >>> tasks = ["atomization_energy"] + >>> loader = dc.data.SDFLoader(tasks, featurizer=featurizers) + >>> dataset = loader.create_dataset(input_file) + + References + ---------- + .. [1] Montavon, Grégoire, et al. "Learning invariant representations of + molecules for atomization energy prediction." Advances in neural information + processing systems. 2012. + """ + + def __init__(self, + max_atoms: int, + remove_hydrogens: bool = False, + randomize: bool = False, + n_samples: int = 1, + seed: Optional[int] = None): + """Initialize this featurizer. + + Parameters + ---------- + max_atoms: int + The maximum number of atoms expected for molecules this featurizer will + process. + remove_hydrogens: bool, optional (default False) + If True, remove hydrogens before processing them. + randomize: bool, optional (default False) + If True, use method `randomize_coulomb_matrices` to randomize Coulomb matrices. + n_samples: int, optional (default 1) + If `randomize` is set to True, the number of random samples to draw. + seed: int, optional (default None) + Random seed to use. + """ + self.max_atoms = int(max_atoms) + self.remove_hydrogens = remove_hydrogens + self.randomize = randomize + self.n_samples = n_samples + if seed is not None: + seed = int(seed) + self.seed = seed + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate eigenvalues of Coulomb matrix for molecules. Eigenvalues + are returned sorted by absolute value in descending order and padded + by max_atoms. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + The eigenvalues of Coulomb matrix for molecules. + The default shape is `(num_confs, max_atoms)`. + If num_confs == 1, the shape is `(max_atoms,)`. + """ + cmat = self.coulomb_matrix(mol) + features_list = [] + for f in cmat: + w, v = np.linalg.eig(f) + w_abs = np.abs(w) + sortidx = np.argsort(w_abs) + sortidx = sortidx[::-1] + w = w[sortidx] + f = pad_array(w, self.max_atoms) + features_list.append(f) + features = np.asarray(features_list) + if features.shape[0] == 1: + # `(1, max_atoms)` -> `(max_atoms,)` + features = np.squeeze(features, axis=0) + return features diff --git a/deepchem/feat/molecule_featurizers/maccs_keys_fingerprint.py b/deepchem/feat/molecule_featurizers/maccs_keys_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..a70c0441852aebf8f31b0658c666718e38c2e3f1 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/maccs_keys_fingerprint.py @@ -0,0 +1,47 @@ +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class MACCSKeysFingerprint(MolecularFeaturizer): + """MACCS Keys Fingerprint. + + The MACCS (Molecular ACCess System) keys are one of the most commonly used structural keys. + Please confirm the details in [1]_, [2]_. + + References + ---------- + .. [1] Durant, Joseph L., et al. "Reoptimization of MDL keys for use in drug discovery." + Journal of chemical information and computer sciences 42.6 (2002): 1273-1280. + .. [2] https://github.com/rdkit/rdkit/blob/master/rdkit/Chem/MACCSkeys.py + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self): + """Initialize this featurizer.""" + try: + from rdkit.Chem.AllChem import GetMACCSKeysFingerprint # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.calculator = GetMACCSKeysFingerprint + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate MACCS keys fingerprint. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + 1D array of RDKit descriptors for `mol`. The length is 167. + """ + return self.calculator(mol) diff --git a/deepchem/feat/molecule_featurizers/mol2vec_fingerprint.py b/deepchem/feat/molecule_featurizers/mol2vec_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..2085d8a37c9015fa69edc5a4e104a5a2b9d9c5c0 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/mol2vec_fingerprint.py @@ -0,0 +1,97 @@ +from os import path +from typing import Optional + +import numpy as np + +from deepchem.utils import download_url, get_data_dir, untargz_file +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + +DEFAULT_PRETRAINED_MODEL_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/trained_models/mol2vec_model_300dim.tar.gz' + + +class Mol2VecFingerprint(MolecularFeaturizer): + """Mol2Vec fingerprints. + + This class convert molecules to vector representations by using Mol2Vec. + Mol2Vec is an unsupervised machine learning approach to learn vector representations + of molecular substructures and the algorithm is based on Word2Vec, which is + one of the most popular technique to learn word embeddings using neural network in NLP. + Please see the details from [1]_. + + The Mol2Vec requires the pretrained model, so we use the model which is put on the mol2vec + github repository [2]_. The default model was trained on 20 million compounds downloaded + from ZINC using the following paramters. + + - radius 1 + - UNK to replace all identifiers that appear less than 4 times + - skip-gram and window size of 10 + - embeddings size 300 + + References + ---------- + .. [1] Jaeger, Sabrina, Simone Fulle, and Samo Turk. "Mol2vec: unsupervised machine learning + approach with chemical intuition." Journal of chemical information and modeling 58.1 (2018): 27-35. + .. [2] https://github.com/samoturk/mol2vec/ + + Notes + ----- + This class requires mol2vec to be installed. + """ + + def __init__(self, + pretrain_model_path: Optional[str] = None, + radius: int = 1, + unseen: str = 'UNK'): + """ + Parameters + ---------- + pretrain_file: str, optional + The path for pretrained model. If this value is None, we use the model which is put on + github repository (https://github.com/samoturk/mol2vec/tree/master/examples/models). + The model is trained on 20 million compounds downloaded from ZINC. + radius: int, optional (default 1) + The fingerprint radius. The default value was used to train the model which is put on + github repository. + unseen: str, optional (default 'UNK') + The string to used to replace uncommon words/identifiers while training. + """ + try: + from gensim.models import word2vec + from mol2vec.features import mol2alt_sentence, sentences2vec + except ModuleNotFoundError: + raise ImportError("This class requires mol2vec to be installed.") + + self.radius = radius + self.unseen = unseen + self.sentences2vec = sentences2vec + self.mol2alt_sentence = mol2alt_sentence + if pretrain_model_path is None: + data_dir = get_data_dir() + pretrain_model_path = path.join(data_dir, 'mol2vec_model_300dim.pkl') + if not path.exists(pretrain_model_path): + targz_file = path.join(data_dir, 'mol2vec_model_300dim.tar.gz') + if not path.exists(targz_file): + download_url(DEFAULT_PRETRAINED_MODEL_URL, data_dir) + untargz_file( + path.join(data_dir, 'mol2vec_model_300dim.tar.gz'), data_dir) + # load pretrained models + self.model = word2vec.Word2Vec.load(pretrain_model_path) + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate Mordred descriptors. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + 1D array of mol2vec fingerprint. The default length is 300. + """ + sentence = self.mol2alt_sentence(mol, self.radius) + feature = self.sentences2vec([sentence], self.model, unseen=self.unseen)[0] + return feature diff --git a/deepchem/feat/molecule_featurizers/mol_graph_conv_featurizer.py b/deepchem/feat/molecule_featurizers/mol_graph_conv_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..e24b7239e4fad80bd761e17d8b372fa48a88d13d --- /dev/null +++ b/deepchem/feat/molecule_featurizers/mol_graph_conv_featurizer.py @@ -0,0 +1,221 @@ +from typing import List, Tuple +import numpy as np + +from deepchem.utils.typing import RDKitAtom, RDKitBond, RDKitMol +from deepchem.feat.graph_data import GraphData +from deepchem.feat.base_classes import MolecularFeaturizer +from deepchem.utils.molecule_feature_utils import get_atom_type_one_hot +from deepchem.utils.molecule_feature_utils import construct_hydrogen_bonding_info +from deepchem.utils.molecule_feature_utils import get_atom_hydrogen_bonding_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_hybridization_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_total_num_Hs_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_is_in_aromatic_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_chirality_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_formal_charge +from deepchem.utils.molecule_feature_utils import get_atom_partial_charge +from deepchem.utils.molecule_feature_utils import get_atom_total_degree_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_type_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_in_same_ring_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_conjugated_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_stereo_one_hot + + +def _construct_atom_feature( + atom: RDKitAtom, h_bond_infos: List[Tuple[int, str]], use_chirality: bool, + use_partial_charge: bool) -> np.ndarray: + """Construct an atom feature from a RDKit atom object. + + Parameters + ---------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + h_bond_infos: List[Tuple[int, str]] + A list of tuple `(atom_index, hydrogen_bonding_type)`. + Basically, it is expected that this value is the return value of + `construct_hydrogen_bonding_info`. The `hydrogen_bonding_type` + value is "Acceptor" or "Donor". + use_chirality: bool + Whether to use chirality information or not. + use_partial_charge: bool + Whether to use partial charge data or not. + + Returns + ------- + np.ndarray + A one-hot vector of the atom feature. + """ + atom_type = get_atom_type_one_hot(atom) + formal_charge = get_atom_formal_charge(atom) + hybridization = get_atom_hybridization_one_hot(atom) + acceptor_donor = get_atom_hydrogen_bonding_one_hot(atom, h_bond_infos) + aromatic = get_atom_is_in_aromatic_one_hot(atom) + degree = get_atom_total_degree_one_hot(atom) + total_num_Hs = get_atom_total_num_Hs_one_hot(atom) + atom_feat = np.concatenate([ + atom_type, formal_charge, hybridization, acceptor_donor, aromatic, degree, + total_num_Hs + ]) + + if use_chirality: + chirality = get_atom_chirality_one_hot(atom) + atom_feat = np.concatenate([atom_feat, chirality]) + + if use_partial_charge: + partial_charge = get_atom_partial_charge(atom) + atom_feat = np.concatenate([atom_feat, partial_charge]) + return atom_feat + + +def _construct_bond_feature(bond: RDKitBond) -> np.ndarray: + """Construct a bond feature from a RDKit bond object. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + + Returns + ------- + np.ndarray + A one-hot vector of the bond feature. + """ + bond_type = get_bond_type_one_hot(bond) + same_ring = get_bond_is_in_same_ring_one_hot(bond) + conjugated = get_bond_is_conjugated_one_hot(bond) + stereo = get_bond_stereo_one_hot(bond) + return np.concatenate([bond_type, same_ring, conjugated, stereo]) + + +class MolGraphConvFeaturizer(MolecularFeaturizer): + """This class is a featurizer of general graph convolution networks for molecules. + + The default node(atom) and edge(bond) representations are based on + `WeaveNet paper `_. If you want to use your own representations, + you could use this class as a guide to define your original Featurizer. In many cases, it's enough + to modify return values of `construct_atom_feature` or `construct_bond_feature`. + + The default node representation are constructed by concatenating the following values, + and the feature length is 30. + + - Atom type: A one-hot vector of this atom, "C", "N", "O", "F", "P", "S", "Cl", "Br", "I", "other atoms". + - Formal charge: Integer electronic charge. + - Hybridization: A one-hot vector of "sp", "sp2", "sp3". + - Hydrogen bonding: A one-hot vector of whether this atom is a hydrogen bond donor or acceptor. + - Aromatic: A one-hot vector of whether the atom belongs to an aromatic ring. + - Degree: A one-hot vector of the degree (0-5) of this atom. + - Number of Hydrogens: A one-hot vector of the number of hydrogens (0-4) that this atom connected. + - Chirality: A one-hot vector of the chirality, "R" or "S". (Optional) + - Partial charge: Calculated partial charge. (Optional) + + The default edge representation are constructed by concatenating the following values, + and the feature length is 11. + + - Bond type: A one-hot vector of the bond type, "single", "double", "triple", or "aromatic". + - Same ring: A one-hot vector of whether the atoms in the pair are in the same ring. + - Conjugated: A one-hot vector of whether this bond is conjugated or not. + - Stereo: A one-hot vector of the stereo configuration of a bond. + + If you want to know more details about features, please check the paper [1]_ and + utilities in deepchem.utils.molecule_feature_utils.py. + + Examples + -------- + >>> smiles = ["C1CCC1", "C1=CC=CN=C1"] + >>> featurizer = MolGraphConvFeaturizer(use_edges=True) + >>> out = featurizer.featurize(smiles) + >>> type(out[0]) + + >>> out[0].num_node_features + 30 + >>> out[0].num_edge_features + 11 + + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." + Journal of computer-aided molecular design 30.8 (2016):595-608. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + use_edges: bool = False, + use_chirality: bool = False, + use_partial_charge: bool = False): + """ + Parameters + ---------- + use_edges: bool, default False + Whether to use edge features or not. + use_chirality: bool, default False + Whether to use chirality information or not. + If True, featurization becomes slow. + use_partial_charge: bool, default False + Whether to use partial charge data or not. + If True, this featurizer computes gasteiger charges. + Therefore, there is a possibility to fail to featurize for some molecules + and featurization becomes slow. + """ + try: + from rdkit.Chem import AllChem # noqa + except ModuleNotFoundError: + raise ImportError("This method requires RDKit to be installed.") + + self.use_edges = use_edges + self.use_partial_charge = use_partial_charge + self.use_chirality = use_chirality + + def _featurize(self, mol: RDKitMol) -> GraphData: + """Calculate molecule graph features from RDKit mol object. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit mol object. + + Returns + ------- + graph: GraphData + A molecule graph with some features. + """ + if self.use_partial_charge: + try: + mol.GetAtomWithIdx(0).GetProp('_GasteigerCharge') + except: + # If partial charges were not computed + from rdkit.Chem import AllChem + AllChem.ComputeGasteigerCharges(mol) + + # construct atom (node) feature + h_bond_infos = construct_hydrogen_bonding_info(mol) + atom_features = np.asarray( + [ + _construct_atom_feature(atom, h_bond_infos, self.use_chirality, + self.use_partial_charge) + for atom in mol.GetAtoms() + ], + dtype=np.float, + ) + + # construct edge (bond) index + src, dest = [], [] + for bond in mol.GetBonds(): + # add edge list considering a directed graph + start, end = bond.GetBeginAtomIdx(), bond.GetEndAtomIdx() + src += [start, end] + dest += [end, start] + + # construct edge (bond) feature + bond_features = None # deafult None + if self.use_edges: + bond_features = [] + for bond in mol.GetBonds(): + bond_features += 2 * [_construct_bond_feature(bond)] + bond_features = np.asarray(bond_features, dtype=np.float) + + return GraphData( + node_features=atom_features, + edge_index=np.asarray([src, dest], dtype=np.int), + edge_features=bond_features) diff --git a/deepchem/feat/molecule_featurizers/mordred_descriptors.py b/deepchem/feat/molecule_featurizers/mordred_descriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..3d32c343fcc2fcd36cc78668e6fbec85da07600d --- /dev/null +++ b/deepchem/feat/molecule_featurizers/mordred_descriptors.py @@ -0,0 +1,67 @@ +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class MordredDescriptors(MolecularFeaturizer): + """Mordred descriptors. + + This class computes a list of chemical descriptors using Mordred. + Please see the details about all descriptors from [1]_, [2]_. + + Attributes + ---------- + descriptors: List[str] + List of Mordred descriptor names used in this class. + + References + ---------- + .. [1] Moriwaki, Hirotomo, et al. "Mordred: a molecular descriptor calculator." + Journal of cheminformatics 10.1 (2018): 4. + .. [2] http://mordred-descriptor.github.io/documentation/master/descriptors.html + + Notes + ----- + This class requires Mordred to be installed. + """ + + def __init__(self, ignore_3D: bool = True): + """ + Parameters + ---------- + ignore_3D: bool, optional (default True) + Whether to use 3D information or not. + """ + try: + from mordred import Calculator, descriptors, is_missing + except ModuleNotFoundError: + raise ImportError("This class requires Mordred to be installed.") + + self.calc = Calculator(descriptors, ignore_3D=ignore_3D) + self.is_missing = is_missing + self.descriptors = list(descriptors.__all__) + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate Mordred descriptors. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + 1D array of Mordred descriptors for `mol`. + If ignore_3D is True, the length is 1613. + If ignore_3D is False, the length is 1826. + """ + feature = self.calc(mol) + # convert errors to zero + feature = [ + 0.0 if self.is_missing(val) or isinstance(val, str) else val + for val in feature + ] + return np.asarray(feature) diff --git a/deepchem/feat/molecule_featurizers/one_hot_featurizer.py b/deepchem/feat/molecule_featurizers/one_hot_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..46366b9488549d21f730df5eb3ec1fe1cecea7c9 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/one_hot_featurizer.py @@ -0,0 +1,116 @@ +import logging +from typing import List + +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.utils.molecule_feature_utils import one_hot_encode +from deepchem.feat.base_classes import MolecularFeaturizer + +logger = logging.getLogger(__name__) + +ZINC_CHARSET = [ + '#', ')', '(', '+', '-', '/', '1', '3', '2', '5', '4', '7', '6', '8', '=', + '@', 'C', 'B', 'F', 'I', 'H', 'O', 'N', 'S', '[', ']', '\\', 'c', 'l', 'o', + 'n', 'p', 's', 'r' +] + + +class OneHotFeaturizer(MolecularFeaturizer): + """Encodes SMILES as a one-hot array. + + This featurizer encodes SMILES string as a one-hot array. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, charset: List[str] = ZINC_CHARSET, max_length: int = 100): + """Initialize featurizer. + + Parameters + ---------- + charset: List[str], optional (default ZINC_CHARSET) + A list of strings, where each string is length 1 and unique. + max_length: int, optional (default 100) + The max length for SMILES string. If the length of SMILES string is + shorter than max_length, the SMILES is padded using space. + """ + try: + from rdkit import Chem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + if len(charset) != len(set(charset)): + raise ValueError("All values in charset must be unique.") + self.charset = charset + self.max_length = max_length + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Compute one-hot featurization of this molecule. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + An one hot vector encoded from SMILES. + The shape is `(max_length, len(charset) + 1)`. + The index of unknown character is `len(charset)`. + """ + from rdkit import Chem + + smiles = Chem.MolToSmiles(mol) + # validation + if len(smiles) > self.max_length: + logger.info( + "The length of {} is longer than `max_length`. So we return an empty array." + ) + return np.array([]) + + smiles = self.pad_smile(smiles) + return np.array([ + one_hot_encode(val, self.charset, include_unknown_set=True) + for val in smiles + ]) + + def pad_smile(self, smiles: str) -> str: + """Pad SMILES string to `self.pad_length` + + Parameters + ---------- + smiles: str + The smiles string to be padded. + + Returns + ------- + str + SMILES string space padded to self.pad_length + """ + return smiles.ljust(self.max_length) + + def untransform(self, one_hot_vectors: np.ndarray) -> str: + """Convert from one hot representation back to SMILES + + Parameters + ---------- + one_hot_vectors: np.ndarray + An array of one hot encoded features. + + Returns + ------- + str + SMILES string for an one hot encoded array. + """ + smiles = "" + for one_hot in one_hot_vectors: + try: + idx = np.argmax(one_hot) + smiles += self.charset[idx] + except IndexError: + smiles += "" + return smiles diff --git a/deepchem/feat/molecule_featurizers/pubchem_fingerprint.py b/deepchem/feat/molecule_featurizers/pubchem_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..5180234eff793b2d2843796c3c049b2561403365 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/pubchem_fingerprint.py @@ -0,0 +1,52 @@ +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class PubChemFingerprint(MolecularFeaturizer): + """PubChem Fingerprint. + + The PubChem fingerprint is a 881 bit structural key, + which is used by PubChem for similarity searching. + Please confirm the details in [1]_. + + References + ---------- + .. [1] ftp://ftp.ncbi.nlm.nih.gov/pubchem/specifications/pubchem_fingerprints.pdf + + Notes + ----- + This class requires RDKit and PubChemPy to be installed. + PubChemPy use REST API to get the fingerprint, so you need the internet access. + """ + + def __init__(self): + """Initialize this featurizer.""" + try: + from rdkit import Chem # noqa + import pubchempy as pcp # noqa + except ModuleNotFoundError: + raise ImportError("This class requires PubChemPy to be installed.") + + self.get_pubchem_compounds = pcp.get_compounds + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate PubChem fingerprint. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + 1D array of RDKit descriptors for `mol`. The length is 881. + """ + from rdkit import Chem + smiles = Chem.MolToSmiles(mol) + pubchem_compound = self.get_pubchem_compounds(smiles, 'smiles')[0] + feature = [int(bit) for bit in pubchem_compound.cactvs_fingerprint] + return np.asarray(feature) diff --git a/deepchem/feat/molecule_featurizers/raw_featurizer.py b/deepchem/feat/molecule_featurizers/raw_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..8341e07306139ff5c1a27d10b375ec3a94c1dd2e --- /dev/null +++ b/deepchem/feat/molecule_featurizers/raw_featurizer.py @@ -0,0 +1,52 @@ +from typing import Union + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class RawFeaturizer(MolecularFeaturizer): + """Encodes a molecule as a SMILES string or RDKit mol. + + This featurizer can be useful when you're trying to transform a large + collection of RDKit mol objects as Smiles strings, or alternatively as a + "no-op" featurizer in your molecular pipeline. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, smiles: bool = False): + """Initialize this featurizer. + + Parameters + ---------- + smiles: bool, optional (default False) + If True, encode this molecule as a SMILES string. Else as a RDKit mol. + """ + try: + from rdkit import Chem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.smiles = smiles + + def _featurize(self, mol: RDKitMol) -> Union[str, RDKitMol]: + """Calculate either smiles string or pass through raw molecule. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + str or rdkit.Chem.rdchem.Mol + SMILES string or RDKit Mol object. + """ + from rdkit import Chem + + if self.smiles: + return Chem.MolToSmiles(mol) + else: + return mol diff --git a/deepchem/feat/molecule_featurizers/rdkit_descriptors.py b/deepchem/feat/molecule_featurizers/rdkit_descriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..debc834fb503d8d95229b3de592914a889dd0a80 --- /dev/null +++ b/deepchem/feat/molecule_featurizers/rdkit_descriptors.py @@ -0,0 +1,74 @@ +""" +Basic molecular features. +""" + +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class RDKitDescriptors(MolecularFeaturizer): + """RDKit descriptors. + + This class computes a list of chemical descriptors using RDKit. + + Attributes + ---------- + descriptors: List[str] + List of RDKit descriptor names used in this class. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, use_fragment=True, ipc_avg=True): + """Initialize this featurizer. + + Parameters + ---------- + use_fragment: bool, optional (default True) + If True, the return value includes the fragment binary descriptors like 'fr_XXX'. + ipc_avg: bool, optional (default True) + If True, the IPC descriptor calculates with avg=True option. + Please see this issue: https://github.com/rdkit/rdkit/issues/1527. + """ + try: + from rdkit.Chem import Descriptors + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.use_fragment = use_fragment + self.ipc_avg = ipc_avg + self.descriptors = [] + self.descList = [] + for descriptor, function in Descriptors.descList: + if self.use_fragment is False and descriptor.startswith('fr_'): + continue + self.descriptors.append(descriptor) + self.descList.append((descriptor, function)) + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """ + Calculate RDKit descriptors. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + 1D array of RDKit descriptors for `mol`. + The length is `len(self.descriptors)`. + """ + features = [] + for desc_name, function in self.descList: + if desc_name == 'Ipc' and self.ipc_avg: + feature = function(mol, avg=True) + else: + feature = function(mol) + features.append(feature) + return np.asarray(features) diff --git a/deepchem/feat/molecule_featurizers/smiles_to_image.py b/deepchem/feat/molecule_featurizers/smiles_to_image.py new file mode 100644 index 0000000000000000000000000000000000000000..809934e155fa8d52eed56f56cc60c0b55b05c2eb --- /dev/null +++ b/deepchem/feat/molecule_featurizers/smiles_to_image.py @@ -0,0 +1,168 @@ +""" +Featurizer implementations used in ChemCeption models. +SmilesToImage featurizer for ChemCeption models taken from https://arxiv.org/abs/1710.02238 +""" +import numpy as np + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + + +class SmilesToImage(MolecularFeaturizer): + """Convert SMILES string to an image. + + SmilesToImage Featurizer takes a SMILES string, and turns it into an image. + Details taken from [1]_. + + The default size of for the image is 80 x 80. Two image modes are currently + supported - std & engd. std is the gray scale specification, + with atomic numbers as pixel values for atom positions and a constant value of + 2 for bond positions. engd is a 4-channel specification, which uses atom + properties like hybridization, valency, charges in addition to atomic number. + Bond type is also used for the bonds. + + The coordinates of all atoms are computed, and lines are drawn between atoms + to indicate bonds. For the respective channels, the atom and bond positions are + set to the property values as mentioned in the paper. + + References + ---------- + .. [1] Goh, Garrett B., et al. "Using rule-based labels for weak supervised + learning: a ChemNet for transferable chemical property prediction." + Proceedings of the 24th ACM SIGKDD International Conference on Knowledge + Discovery & Data Mining. 2018. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + img_size: int = 80, + res: float = 0.5, + max_len: int = 250, + img_spec: str = "std"): + """ + Parameters + ---------- + img_size: int, default 80 + Size of the image tensor + res: float, default 0.5 + Displays the resolution of each pixel in Angstrom + max_len: int, default 250 + Maximum allowed length of SMILES string + img_spec: str, default std + Indicates the channel organization of the image tensor + """ + try: + from rdkit import Chem # noqa + from rdkit.Chem import AllChem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + if img_spec not in ["std", "engd"]: + raise ValueError( + "Image mode must be one of std or engd. {} is not supported".format( + img_spec)) + + self.img_size = img_size + self.max_len = max_len + self.res = res + self.img_spec = img_spec + self.embed = int(img_size * res / 2) + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Featurizes a single SMILE into an image. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + A 3D array of image, the shape is `(img_size, img_size, 1)`. + If the length of SMILES is longer than `max_len`, this value is an empty array. + """ + from rdkit import Chem + from rdkit.Chem import AllChem + + smile = Chem.MolToSmiles(mol) + if len(smile) > self.max_len: + return np.array([]) + + cmol = Chem.Mol(mol.ToBinary()) + cmol.ComputeGasteigerCharges() + AllChem.Compute2DCoords(cmol) + atom_coords = cmol.GetConformer(0).GetPositions() + + if self.img_spec == "std": + # Setup image + img = np.zeros((self.img_size, self.img_size, 1)) + # Compute bond properties + bond_props = np.array( + [[2.0, bond.GetBeginAtomIdx(), + bond.GetEndAtomIdx()] for bond in mol.GetBonds()]) + # Compute atom properties + atom_props = np.array([[atom.GetAtomicNum()] for atom in cmol.GetAtoms()]) + + bond_props = bond_props.astype(np.float32) + atom_props = atom_props.astype(np.float32) + + else: + # Setup image + img = np.zeros((self.img_size, self.img_size, 4)) + # Compute bond properties + bond_props = np.array([[ + bond.GetBondTypeAsDouble(), + bond.GetBeginAtomIdx(), + bond.GetEndAtomIdx() + ] for bond in mol.GetBonds()]) + # Compute atom properties + atom_props = np.array([[ + atom.GetAtomicNum(), + atom.GetProp("_GasteigerCharge"), + atom.GetExplicitValence(), + atom.GetHybridization().real, + ] for atom in cmol.GetAtoms()]) + + bond_props = bond_props.astype(np.float32) + atom_props = atom_props.astype(np.float32) + + partial_charges = atom_props[:, 1] + if np.any(np.isnan(partial_charges)): + return np.array([]) + + frac = np.linspace(0, 1, int(1 / self.res * 2)) + # Reshape done for proper broadcast + frac = frac.reshape(-1, 1, 1) + + bond_begin_idxs = bond_props[:, 1].astype(int) + bond_end_idxs = bond_props[:, 2].astype(int) + + # Reshapes, and axes manipulations to facilitate vector processing. + begin_coords = atom_coords[bond_begin_idxs] + begin_coords = np.expand_dims(begin_coords.T, axis=0) + end_coords = atom_coords[bond_end_idxs] + end_coords = np.expand_dims(end_coords.T, axis=0) + + # Draw a line between the two atoms. + # The coordinates of this line, are indicated in line_coords + line_coords = frac * begin_coords + (1 - frac) * end_coords + # Turn the line coordinates into image positions + bond_line_idxs = np.ceil( + (line_coords[:, 0] + self.embed) / self.res).astype(int) + bond_line_idys = np.ceil( + (line_coords[:, 1] + self.embed) / self.res).astype(int) + # Set the bond line coordinates to the bond property used. + img[bond_line_idxs, bond_line_idys, 0] = bond_props[:, 0] + + # Turn atomic coordinates into image positions + atom_idxs = np.round( + (atom_coords[:, 0] + self.embed) / self.res).astype(int) + atom_idys = np.round( + (atom_coords[:, 1] + self.embed) / self.res).astype(int) + # Set the atom positions in image to different atomic properties in channels + img[atom_idxs, atom_idys, :] = atom_props + return img diff --git a/deepchem/feat/molecule_featurizers/smiles_to_seq.py b/deepchem/feat/molecule_featurizers/smiles_to_seq.py new file mode 100644 index 0000000000000000000000000000000000000000..afe35c4c84f2c5c37875675da6aab4672dcfae4a --- /dev/null +++ b/deepchem/feat/molecule_featurizers/smiles_to_seq.py @@ -0,0 +1,153 @@ +""" +Featurizer implementations used in Smiles2Vec models. +SmilesToSeq featurizer for Smiles2Vec models taken from https://arxiv.org/abs/1712.02734 +""" +from typing import Dict, List +import numpy as np +import pandas as pd + +from deepchem.utils.typing import RDKitMol +from deepchem.feat.base_classes import MolecularFeaturizer + +PAD_TOKEN = "" +OUT_OF_VOCAB_TOKEN = "" + + +def create_char_to_idx(filename: str, + max_len: int = 250, + smiles_field: str = "smiles") -> Dict[str, int]: + """Creates a dictionary with character to index mapping. + + Parameters + ---------- + filename: str + Name of the file containing the SMILES strings + max_len: int, default 250 + Maximum allowed length of the SMILES string + smiles_field: str, default "smiles" + Field indicating the SMILES strings int the file. + + Returns + ------- + Dict[str, int] + A dictionary mapping characters to their integer indexes. + """ + smiles_df = pd.read_csv(filename) + char_set = set() + for smile in smiles_df[smiles_field]: + if len(smile) <= max_len: + char_set.update(set(smile)) + + unique_char_list = list(char_set) + unique_char_list += [PAD_TOKEN, OUT_OF_VOCAB_TOKEN] + char_to_idx = {letter: idx for idx, letter in enumerate(unique_char_list)} + return char_to_idx + + +class SmilesToSeq(MolecularFeaturizer): + """ + SmilesToSeq Featurizer takes a SMILES string, and turns it into a sequence. + Details taken from [1]_. + + SMILES strings smaller than a specified max length (max_len) are padded using + the PAD token while those larger than the max length are not considered. Based + on the paper, there is also the option to add extra padding (pad_len) on both + sides of the string after length normalization. Using a character to index (char_to_idx) + mapping, the SMILES characters are turned into indices and the + resulting sequence of indices serves as the input for an embedding layer. + + References + ---------- + .. [1] Goh, Garrett B., et al. "Using rule-based labels for weak supervised + learning: a ChemNet for transferable chemical property prediction." + Proceedings of the 24th ACM SIGKDD International Conference on Knowledge + Discovery & Data Mining. 2018. + + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + char_to_idx: Dict[str, int], + max_len: int = 250, + pad_len: int = 10): + """Initialize this class. + + Parameters + ---------- + char_to_idx: Dict + Dictionary containing character to index mappings for unique characters + max_len: int, default 250 + Maximum allowed length of the SMILES string. + pad_len: int, default 10 + Amount of padding to add on either side of the SMILES seq + """ + try: + from rdkit import Chem # noqa + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + self.max_len = max_len + self.char_to_idx = char_to_idx + self.idx_to_char = {idx: letter for letter, idx in self.char_to_idx.items()} + self.pad_len = pad_len + + def to_seq(self, smile: List[str]) -> np.ndarray: + """Turns list of smiles characters into array of indices""" + out_of_vocab_idx = self.char_to_idx[OUT_OF_VOCAB_TOKEN] + seq = [ + self.char_to_idx.get(character, out_of_vocab_idx) for character in smile + ] + return np.array(seq) + + def remove_pad(self, characters: List[str]) -> List[str]: + """Removes PAD_TOKEN from the character list.""" + characters = characters[self.pad_len:] + characters = characters[:-self.pad_len] + chars = list() + + for char in characters: + if char != PAD_TOKEN: + chars.append(char) + return chars + + def smiles_from_seq(self, seq: List[int]) -> str: + """Reconstructs SMILES string from sequence.""" + characters = [self.idx_to_char[i] for i in seq] + + characters = self.remove_pad(characters) + smile = "".join([letter for letter in characters]) + return smile + + def _featurize(self, mol: RDKitMol) -> np.ndarray: + """Featurizes a SMILES sequence. + + Parameters + ---------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + np.ndarray + A 1D array of a SMILES sequence. + If the length of SMILES is longer than `max_len`, this value is an empty array. + """ + from rdkit import Chem + + smile = Chem.MolToSmiles(mol) + if len(smile) > self.max_len: + return np.array([]) + + smile_list = list(smile) + # Extend shorter strings with padding + if len(smile) < self.max_len: + smile_list.extend([PAD_TOKEN] * (self.max_len - len(smile))) + + # Padding before and after + smile_list += [PAD_TOKEN] * self.pad_len + smile_list = [PAD_TOKEN] * self.pad_len + smile_list + + smile_seq = self.to_seq(smile_list) + return smile_seq diff --git a/deepchem/feat/one_hot.py b/deepchem/feat/one_hot.py deleted file mode 100644 index ffb82d6745ea362818f8b68cd34c2c4127e5774c..0000000000000000000000000000000000000000 --- a/deepchem/feat/one_hot.py +++ /dev/null @@ -1,148 +0,0 @@ -import numpy as np -from deepchem.feat import Featurizer - -zinc_charset = [ - ' ', '#', ')', '(', '+', '-', '/', '1', '3', '2', '5', '4', '7', '6', '8', - '=', '@', 'C', 'B', 'F', 'I', 'H', 'O', 'N', 'S', '[', ']', '\\', 'c', 'l', - 'o', 'n', 'p', 's', 'r' -] - - -class OneHotFeaturizer(Featurizer): - """ - NOTE(LESWING) Not Thread Safe in initialization of charset - """ - - def __init__(self, charset=None, padlength=120): - """ - Parameters - ---------- - charset: obj:`list` of obj:`str` - Each string is length 1 - padlength: int - length to pad the smile strings to - """ - self.charset = charset - self.pad_length = padlength - - def featurize(self, mols, verbose=True, log_every_n=1000): - """ - Parameters - ---------- - mols: obj - List of rdkit Molecule Objects - verbose: bool - How much logging - log_every_n: - How often to log - Returns - - ------- - obj - numpy array of features - """ - from rdkit import Chem - smiles = [Chem.MolToSmiles(mol) for mol in mols] - if self.charset is None: - self.charset = self._create_charset(smiles) - return np.array([self.one_hot_encoded(smile) for smile in smiles]) - - def one_hot_array(self, i): - """ - Create a one hot array with bit i set to 1 - Parameters - ---------- - i: int - bit to set to 1 - Returns - ------- - obj:`list` of obj:`int` - length len(self.charset) - """ - return [int(x) for x in [ix == i for ix in range(len(self.charset))]] - - def one_hot_index(self, c): - """ - TODO(LESWING) replace with map lookup vs linear scan - Parameters - ---------- - c - character whose index we want - Returns - ------- - int - index of c in self.charset - """ - return self.charset.index(c) - - def pad_smile(self, smile): - """ - Pad A Smile String to self.pad_length - Parameters - ---------- - smile: str - - Returns - ------- - str - smile string space padded to self.pad_length - """ - - return smile.ljust(self.pad_length) - - def one_hot_encoded(self, smile): - """ - One Hot Encode an entire SMILE string - Parameters - ---------- - smile: str - smile string to encode - - Returns - ------- - object - np.array of one hot encoded arrays for each character in smile - """ - return np.array([ - self.one_hot_array(self.one_hot_index(x)) for x in self.pad_smile(smile) - ]) - - def untransform(self, z): - """ - Convert from one hot representation back to SMILE - Parameters - ---------- - z: obj:`list` - list of one hot encoded features - - Returns - ------- - Smile Strings picking MAX for each one hot encoded array - """ - z1 = [] - for i in range(len(z)): - s = "" - for j in range(len(z[i])): - oh = np.argmax(z[i][j]) - s += self.charset[oh] - z1.append([s.strip()]) - return z1 - - def _create_charset(self, smiles): - """ - create the charset from smiles - Parameters - ---------- - smiles: obj:`list` of obj:`str` - list of smile strings - - Returns - ------- - obj:`list` of obj:`str` - List of length one strings that are characters in smiles. No duplicates - """ - s = set() - for smile in smiles: - for c in smile: - s.add(c) - return [' '] + sorted(list(s)) diff --git a/deepchem/feat/raw_featurizer.py b/deepchem/feat/raw_featurizer.py deleted file mode 100644 index e0980e7113b814de22f811c2da15025768a8e100..0000000000000000000000000000000000000000 --- a/deepchem/feat/raw_featurizer.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- -from deepchem.feat import Featurizer - - -class RawFeaturizer(Featurizer): - - def __init__(self, smiles=False): - self.smiles = smiles - - def _featurize(self, mol): - from rdkit import Chem - if self.smiles: - return Chem.MolToSmiles(mol) - else: - return mol diff --git a/deepchem/feat/smiles_featurizers.py b/deepchem/feat/smiles_featurizers.py deleted file mode 100644 index 86451ec8b94207156c33a2a340bcaee9b80e0a6a..0000000000000000000000000000000000000000 --- a/deepchem/feat/smiles_featurizers.py +++ /dev/null @@ -1,263 +0,0 @@ -""" -Featurizer implementations used in ChemCeption and Smiles2Vec models. -SmilesToSeq featurizer for Smiles2Vec models taken from https://arxiv.org/abs/1712.02734 -SmilesToImage featurizer for ChemCeption models taken from https://arxiv.org/abs/1710.02238 -""" - -__author__ = "Vignesh Ram Somnath" -__license__ = "MIT" - -import numpy as np -import pandas as pd -from deepchem.feat import Featurizer - -PAD_TOKEN = "" -OUT_OF_VOCAB_TOKEN = "" - - -def create_char_to_idx(filename, - max_len=250, - smiles_field="smiles", - verbose=False): - """Creates a dictionary with character to index mapping. - - Parameters - ---------- - filename: str, - Name of the file containing the SMILES strings - max_len: int, default 250 - Maximum allowed length of the SMILES string - smiles_field: str, default smiles - Field indicating the SMILES strings int the file. - verbose: bool, default True - Whether to print the progress - """ - smiles_df = pd.read_csv(filename) - char_set = set() - for smile in smiles_df[smiles_field]: - if len(smile) <= max_len: - char_set.update(set(smile)) - - unique_char_list = list(char_set) - unique_char_list += [PAD_TOKEN, OUT_OF_VOCAB_TOKEN] - if verbose: - print("Number of unique characters: ", len(unique_char_list)) - - char_to_idx = {letter: idx for idx, letter in enumerate(unique_char_list)} - - if verbose: - print(unique_char_list) - return char_to_idx - - -class SmilesToSeq(Featurizer): - """ - SmilesToSeq Featurizer takes a SMILES string, and turns it into a sequence. - Details taken from https://arxiv.org/abs/1712.02734. - - SMILES strings smaller than a specified max length (max_len) are padded using - the PAD token while those larger than the max length are not considered. Based - on the paper, there is also the option to add extra padding (pad_len) on both - sides of the string after length normalization. Using a character to index (char_to_idx) - mapping, the SMILES characters are turned into indices and the - resulting sequence of indices serves as the input for an embedding layer. - - """ - - def __init__(self, char_to_idx, max_len=250, pad_len=10, **kwargs): - """ - Parameters - ---------- - char_to_idx: dict - Dictionary containing character to index mappings for unique characters - max_len: int, default 250 - Maximum allowed length of the SMILES string - pad_len: int, default 10 - Amount of padding to add on either side of the SMILES seq - """ - self.max_len = max_len - self.char_to_idx = char_to_idx - self.idx_to_char = {idx: letter for letter, idx in self.char_to_idx.items()} - self.pad_len = pad_len - super(SmilesToSeq, self).__init__(**kwargs) - - def to_seq(self, smile): - """Turns list of smiles characters into array of indices""" - out_of_vocab_idx = self.char_to_idx[OUT_OF_VOCAB_TOKEN] - seq = [ - self.char_to_idx.get(character, out_of_vocab_idx) for character in smile - ] - return np.array(seq) - - def remove_pad(self, characters): - """Removes PAD_TOKEN from the character list.""" - characters = characters[self.pad_len:] - characters = characters[:-self.pad_len] - chars = list() - - for char in characters: - if char != PAD_TOKEN: - chars.append(char) - return chars - - def smiles_from_seq(self, seq): - """Reconstructs SMILES string from sequence.""" - characters = [self.idx_to_char[i] for i in seq] - - characters = self.remove_pad(characters) - smile = "".join([letter for letter in characters]) - return smile - - def _featurize(self, mol): - """Featurizes a SMILES sequence.""" - from rdkit import Chem - smile = Chem.MolToSmiles(mol) - if len(smile) > self.max_len: - return list() - - smile_list = list(smile) - # Extend shorter strings with padding - if len(smile) < self.max_len: - smile_list.extend([PAD_TOKEN] * (self.max_len - len(smile))) - - # Padding before and after - smile_list += [PAD_TOKEN] * self.pad_len - smile_list = [PAD_TOKEN] * self.pad_len + smile_list - - smile_seq = self.to_seq(smile_list) - return smile_seq - - -class SmilesToImage(Featurizer): - """ - SmilesToImage Featurizer takes a SMILES string, and turns it into an image. - Details taken from https://arxiv.org/abs/1712.02734. - - The default size of for the image is 80 x 80. Two image modes are currently - supported - std & engd. std is the gray scale specification, - with atomic numbers as pixel values for atom positions and a constant value of - 2 for bond positions. engd is a 4-channel specification, which uses atom - properties like hybridization, valency, charges in addition to atomic number. - Bond type is also used for the bonds. - - The coordinates of all atoms are computed, and lines are drawn between atoms - to indicate bonds. For the respective channels, the atom and bond positions are - set to the property values as mentioned in the paper. - """ - - def __init__(self, - img_size=80, - res=0.5, - max_len=250, - img_spec="std", - **kwargs): - """ - Parameters - ---------- - img_size: int, default 80 - Size of the image tensor - res: float, default 0.5 - Displays the resolution of each pixel in Angstrom - max_len: int, default 250 - Maximum allowed length of SMILES string - img_spec: str, default std - Indicates the channel organization of the image tensor - """ - if img_spec not in ["std", "engd"]: - raise ValueError( - "Image mode must be one of std or engd. {} is not supported".format( - img_spec)) - self.img_size = img_size - self.max_len = max_len - self.res = res - self.img_spec = img_spec - self.embed = int(img_size * res / 2) - super(SmilesToImage, self).__init__() - - def _featurize(self, mol): - """Featurizes a single SMILE sequence.""" - from rdkit import Chem - from rdkit.Chem import AllChem - - smile = Chem.MolToSmiles(mol) - if len(smile) > self.max_len: - return list() - - cmol = Chem.Mol(mol.ToBinary()) - cmol.ComputeGasteigerCharges() - AllChem.Compute2DCoords(cmol) - atom_coords = cmol.GetConformer(0).GetPositions() - - if self.img_spec == "std": - # Setup image - img = np.zeros((self.img_size, self.img_size, 1)) - # Compute bond properties - bond_props = np.array( - [[2.0, bond.GetBeginAtomIdx(), - bond.GetEndAtomIdx()] for bond in mol.GetBonds()]) - # Compute atom properties - atom_props = np.array([[atom.GetAtomicNum()] for atom in cmol.GetAtoms()]) - - bond_props = bond_props.astype(np.float32) - atom_props = atom_props.astype(np.float32) - - else: - # Setup image - img = np.zeros((self.img_size, self.img_size, 4)) - # Compute bond properties - bond_props = np.array([[ - bond.GetBondTypeAsDouble(), - bond.GetBeginAtomIdx(), - bond.GetEndAtomIdx() - ] for bond in mol.GetBonds()]) - # Compute atom properties - atom_props = np.array([[ - atom.GetAtomicNum(), - atom.GetProp("_GasteigerCharge"), - atom.GetExplicitValence(), - atom.GetHybridization().real, - ] for atom in cmol.GetAtoms()]) - - bond_props = bond_props.astype(np.float32) - atom_props = atom_props.astype(np.float32) - - partial_charges = atom_props[:, 1] - if np.any(np.isnan(partial_charges)): - return [] - - frac = np.linspace(0, 1, int(1 / self.res * 2)) - # Reshape done for proper broadcast - frac = frac.reshape(-1, 1, 1) - - try: - bond_begin_idxs = bond_props[:, 1].astype(int) - bond_end_idxs = bond_props[:, 2].astype(int) - - # Reshapes, and axes manipulations to facilitate vector processing. - begin_coords = atom_coords[bond_begin_idxs] - begin_coords = np.expand_dims(begin_coords.T, axis=0) - end_coords = atom_coords[bond_end_idxs] - end_coords = np.expand_dims(end_coords.T, axis=0) - - # Draw a line between the two atoms. - # The coordinates of this line, are indicated in line_coords - line_coords = frac * begin_coords + (1 - frac) * end_coords - # Turn the line coordinates into image positions - bond_line_idxs = np.ceil( - (line_coords[:, 0] + self.embed) / self.res).astype(int) - bond_line_idys = np.ceil( - (line_coords[:, 1] + self.embed) / self.res).astype(int) - # Set the bond line coordinates to the bond property used. - img[bond_line_idxs, bond_line_idys, 0] = bond_props[:, 0] - - # Turn atomic coordinates into image positions - atom_idxs = np.round( - (atom_coords[:, 0] + self.embed) / self.res).astype(int) - atom_idys = np.round( - (atom_coords[:, 1] + self.embed) / self.res).astype(int) - # Set the atom positions in image to different atomic properties in channels - img[atom_idxs, atom_idys, :] = atom_props - return img - - except IndexError as e: - return [] diff --git a/deepchem/feat/smiles_tokenizer.py b/deepchem/feat/smiles_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..1c3134fd41be915b03b8899512c41b8f42be8099 --- /dev/null +++ b/deepchem/feat/smiles_tokenizer.py @@ -0,0 +1,334 @@ +# Requriments - transformers, tokenizers +# Right now, the Smiles Tokenizer uses an exiesting vocab file from rxnfp that is fairly comprehensive and from the USPTO dataset. +# The vocab may be expanded in the near future + +import collections +import os +import re +import pkg_resources +from typing import List +from transformers import BertTokenizer +from logging import getLogger + +logger = getLogger(__name__) +""" +SMI_REGEX_PATTERN: str + SMILES regex pattern for tokenization. Designed by Schwaller et. al. + +References + +.. [1] Philippe Schwaller, Teodoro Laino, Théophile Gaudin, Peter Bolgar, Christopher A. Hunter, Costas Bekas, and Alpha A. Lee + ACS Central Science 2019 5 (9): Molecular Transformer: A Model for Uncertainty-Calibrated Chemical Reaction Prediction + 1572-1583 DOI: 10.1021/acscentsci.9b00576 + +""" + +SMI_REGEX_PATTERN = r"""(\[[^\]]+]|Br?|Cl?|N|O|S|P|F|I|b|c|n|o|s|p|\(|\)|\.|=| +#|-|\+|\\|\/|:|~|@|\?|>>?|\*|\$|\%[0-9]{2}|[0-9])""" + +# add vocab_file dict +VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"} + + +def get_default_tokenizer(): + default_vocab_path = (pkg_resources.resource_filename("deepchem", + "feat/tests/vocab.txt")) + return SmilesTokenizer(default_vocab_path) + + +class SmilesTokenizer(BertTokenizer): + """ + Creates the SmilesTokenizer class. The tokenizer heavily inherits from the BertTokenizer + implementation found in Huggingface's transformers library. It runs a WordPiece tokenization + algorithm over SMILES strings using the tokenisation SMILES regex developed by Schwaller et. al. + + Please see https://github.com/huggingface/transformers + and https://github.com/rxn4chemistry/rxnfp for more details. + + Examples + -------- + >>> from deepchem.feat.smiles_tokenizer import SmilesTokenizer + >>> current_dir = os.path.dirname(os.path.realpath(__file__)) + >>> vocab_path = os.path.join(current_dir, 'tests/data', 'vocab.txt') + >>> tokenizer = SmilesTokenizer(vocab_path) + >>> print(tokenizer.encode("CC(=O)OC1=CC=CC=C1C(=O)O")) + [12, 16, 16, 17, 22, 19, 18, 19, 16, 20, 22, 16, 16, 22, 16, 16, 22, 16, 20, 16, 17, 22, 19, 18, 19, 13] + + + References + ---------- + .. [1] Schwaller, Philippe; Probst, Daniel; Vaucher, Alain C.; Nair, Vishnu H; Kreutter, David; + Laino, Teodoro; et al. (2019): Mapping the Space of Chemical Reactions using Attention-Based Neural + Networks. ChemRxiv. Preprint. https://doi.org/10.26434/chemrxiv.9897365.v3 + + Notes + ---- + This class requires huggingface's transformers and tokenizers libraries to be installed. + """ + vocab_files_names = VOCAB_FILES_NAMES + + def __init__( + self, + vocab_file: str = '', + # unk_token="[UNK]", + # sep_token="[SEP]", + # pad_token="[PAD]", + # cls_token="[CLS]", + # mask_token="[MASK]", + **kwargs): + """Constructs a SmilesTokenizer. + + Parameters + ---------- + vocab_file: str + Path to a SMILES character per line vocabulary file. + Default vocab file is found in deepchem/feat/tests/data/vocab.txt + """ + + super().__init__(vocab_file, **kwargs) + # take into account special tokens in max length + self.max_len_single_sentence = self.max_len - 2 + self.max_len_sentences_pair = self.max_len - 3 + + if not os.path.isfile(vocab_file): + raise ValueError( + "Can't find a vocab file at path '{}'.".format(vocab_file)) + self.vocab = load_vocab(vocab_file) + self.highest_unused_index = max( + [i for i, v in enumerate(self.vocab.keys()) if v.startswith("[unused")]) + self.ids_to_tokens = collections.OrderedDict( + [(ids, tok) for tok, ids in self.vocab.items()]) + self.basic_tokenizer = BasicSmilesTokenizer() + self.init_kwargs["max_len"] = self.max_len + + @property + def vocab_size(self): + return len(self.vocab) + + @property + def vocab_list(self): + return list(self.vocab.keys()) + + def _tokenize(self, text: str): + """ + Tokenize a string into a list of tokens. + + Parameters + ---------- + text: str + Input string sequence to be tokenized. + """ + + split_tokens = [token for token in self.basic_tokenizer.tokenize(text)] + return split_tokens + + def _convert_token_to_id(self, token): + """ + Converts a token (str/unicode) in an id using the vocab. + + Parameters + ---------- + token: str + String token from a larger sequence to be converted to a numerical id. + """ + + return self.vocab.get(token, self.vocab.get(self.unk_token)) + + def _convert_id_to_token(self, index): + """ + Converts an index (integer) in a token (string/unicode) using the vocab. + + Parameters + ---------- + index: int + Integer index to be converted back to a string-based token as part of a larger sequence. + """ + + return self.ids_to_tokens.get(index, self.unk_token) + + def convert_tokens_to_string(self, tokens: List[str]): + """ Converts a sequence of tokens (string) in a single string. + + Parameters + ---------- + tokens: List[str] + List of tokens for a given string sequence. + + Returns + ------- + out_string: str + Single string from combined tokens. + """ + + out_string: str = " ".join(tokens).replace(" ##", "").strip() + return out_string + + def add_special_tokens_ids_single_sequence(self, token_ids: List[int]): + """ + Adds special tokens to the a sequence for sequence classification tasks. + A BERT sequence has the following format: [CLS] X [SEP] + + Parameters + ---------- + + token_ids: list[int] + list of tokenized input ids. Can be obtained using the encode or encode_plus methods. + """ + + return [self.cls_token_id] + token_ids + [self.sep_token_id] + + def add_special_tokens_single_sequence(self, tokens: List[str]): + """ + Adds special tokens to the a sequence for sequence classification tasks. + A BERT sequence has the following format: [CLS] X [SEP] + + Parameters + ---------- + tokens: List[str] + List of tokens for a given string sequence. + + """ + return [self.cls_token] + tokens + [self.sep_token] + + def add_special_tokens_ids_sequence_pair(self, token_ids_0: List[int], + token_ids_1: List[int]) -> List[int]: + """ + Adds special tokens to a sequence pair for sequence classification tasks. + A BERT sequence pair has the following format: [CLS] A [SEP] B [SEP] + + Parameters + ---------- + token_ids_0: List[int] + List of ids for the first string sequence in the sequence pair (A). + + token_ids_1: List[int] + List of tokens for the second string sequence in the sequence pair (B). + """ + + sep = [self.sep_token_id] + cls = [self.cls_token_id] + + return cls + token_ids_0 + sep + token_ids_1 + sep + + def add_padding_tokens(self, + token_ids: List[int], + length: int, + right: bool = True) -> List[int]: + """ + Adds padding tokens to return a sequence of length max_length. + By default padding tokens are added to the right of the sequence. + + Parameters + ---------- + token_ids: list[int] + list of tokenized input ids. Can be obtained using the encode or encode_plus methods. + + length: int + + right: bool (True by default) + + Returns + ---------- + token_ids : + list of tokenized input ids. Can be obtained using the encode or encode_plus methods. + + padding: int + Integer to be added as padding token + + """ + padding = [self.pad_token_id] * (length - len(token_ids)) + + if right: + return token_ids + padding + else: + return padding + token_ids + + def save_vocabulary( + self, vocab_path: str + ): # -> tuple[str]: doctest issue raised with this return type annotation + """ + Save the tokenizer vocabulary to a file. + + Parameters + ---------- + vocab_path: obj: str + The directory in which to save the SMILES character per line vocabulary file. + Default vocab file is found in deepchem/feat/tests/data/vocab.txt + + Returns + ---------- + vocab_file: :obj:`Tuple(str)`: + Paths to the files saved. + typle with string to a SMILES character per line vocabulary file. + Default vocab file is found in deepchem/feat/tests/data/vocab.txt + + """ + index = 0 + if os.path.isdir(vocab_path): + vocab_file = os.path.join(vocab_path, VOCAB_FILES_NAMES["vocab_file"]) + else: + vocab_file = vocab_path + with open(vocab_file, "w", encoding="utf-8") as writer: + for token, token_index in sorted( + self.vocab.items(), key=lambda kv: kv[1]): + if index != token_index: + logger.warning( + "Saving vocabulary to {}: vocabulary indices are not consecutive." + " Please check that the vocabulary is not corrupted!".format( + vocab_file)) + index = token_index + writer.write(token + "\n") + index += 1 + return (vocab_file,) + + +class BasicSmilesTokenizer(object): + """ + + Run basic SMILES tokenization using a regex pattern developed by Schwaller et. al. This tokenizer is to be used + when a tokenizer that does not require the transformers library by HuggingFace is required. + + Examples + -------- + >>> from deepchem.feat.smiles_tokenizer import BasicSmilesTokenizer + >>> tokenizer = BasicSmilesTokenizer() + >>> print(tokenizer.tokenize("CC(=O)OC1=CC=CC=C1C(=O)O")) + ['C', 'C', '(', '=', 'O', ')', 'O', 'C', '1', '=', 'C', 'C', '=', 'C', 'C', '=', 'C', '1', 'C', '(', '=', 'O', ')', 'O'] + + + References + ---------- + .. [1] Philippe Schwaller, Teodoro Laino, Théophile Gaudin, Peter Bolgar, Christopher A. Hunter, Costas Bekas, and Alpha A. Lee + ACS Central Science 2019 5 (9): Molecular Transformer: A Model for Uncertainty-Calibrated Chemical Reaction Prediction + 1572-1583 DOI: 10.1021/acscentsci.9b00576 + + """ + + def __init__(self, regex_pattern: str = SMI_REGEX_PATTERN): + """ Constructs a BasicSMILESTokenizer. + Parameters + ---------- + + regex: string + SMILES token regex + + """ + self.regex_pattern = regex_pattern + self.regex = re.compile(self.regex_pattern) + + def tokenize(self, text): + """ Basic Tokenization of a SMILES. + """ + tokens = [token for token in self.regex.findall(text)] + return tokens + + +def load_vocab(vocab_file): + """Loads a vocabulary file into a dictionary.""" + vocab = collections.OrderedDict() + with open(vocab_file, "r", encoding="utf-8") as reader: + tokens = reader.readlines() + for index, token in enumerate(tokens): + token = token.rstrip("\n") + vocab[token] = index + return vocab diff --git a/deepchem/feat/tests/3ws9_ligand.sdf b/deepchem/feat/tests/data/3ws9_ligand.sdf similarity index 100% rename from deepchem/feat/tests/3ws9_ligand.sdf rename to deepchem/feat/tests/data/3ws9_ligand.sdf diff --git a/deepchem/feat/tests/3ws9_protein_fixer_rdkit.pdb b/deepchem/feat/tests/data/3ws9_protein_fixer_rdkit.pdb similarity index 100% rename from deepchem/feat/tests/3ws9_protein_fixer_rdkit.pdb rename to deepchem/feat/tests/data/3ws9_protein_fixer_rdkit.pdb diff --git a/deepchem/feat/tests/data/vocab.txt b/deepchem/feat/tests/data/vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..6a7cad14fa33ceb18fb9fba55d10239f1fc5260b --- /dev/null +++ b/deepchem/feat/tests/data/vocab.txt @@ -0,0 +1,591 @@ +[PAD] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[UNK] +[CLS] +[SEP] +[MASK] +c +C +( +) +O +1 +2 += +N +. +n +3 +F +Cl +>> +~ +- +4 +[C@H] +S +[C@@H] +[O-] +Br +# +/ +[nH] +[N+] +s +5 +o +P +[Na+] +[Si] +I +[Na] +[Pd] +[K+] +[K] +[P] +B +[C@] +[C@@] +[Cl-] +6 +[OH-] +\ +[N-] +[Li] +[H] +[2H] +[NH4+] +[c-] +[P-] +[Cs+] +[Li+] +[Cs] +[NaH] +[H-] +[O+] +[BH4-] +[Cu] +7 +[Mg] +[Fe+2] +[n+] +[Sn] +[BH-] +[Pd+2] +[CH] +[I-] +[Br-] +[C-] +[Zn] +[B-] +[F-] +[Al] +[P+] +[BH3-] +[Fe] +[C] +[AlH4] +[Ni] +[SiH] +8 +[Cu+2] +[Mn] +[AlH] +[nH+] +[AlH4-] +[O-2] +[Cr] +[Mg+2] +[NH3+] +[S@] +[Pt] +[Al+3] +[S@@] +[S-] +[Ti] +[Zn+2] +[PH] +[NH2+] +[Ru] +[Ag+] +[S+] +[I+3] +[NH+] +[Ca+2] +[Ag] +9 +[Os] +[Se] +[SiH2] +[Ca] +[Ti+4] +[Ac] +[Cu+] +[S] +[Rh] +[Cl+3] +[cH-] +[Zn+] +[O] +[Cl+] +[SH] +[H+] +[Pd+] +[se] +[PH+] +[I] +[Pt+2] +[C+] +[Mg+] +[Hg] +[W] +[SnH] +[SiH3] +[Fe+3] +[NH] +[Mo] +[CH2+] +%10 +[CH2-] +[CH2] +[n-] +[Ce+4] +[NH-] +[Co] +[I+] +[PH2] +[Pt+4] +[Ce] +[B] +[Sn+2] +[Ba+2] +%11 +[Fe-3] +[18F] +[SH-] +[Pb+2] +[Os-2] +[Zr+4] +[N] +[Ir] +[Bi] +[Ni+2] +[P@] +[Co+2] +[s+] +[As] +[P+3] +[Hg+2] +[Yb+3] +[CH-] +[Zr+2] +[Mn+2] +[CH+] +[In] +[KH] +[Ce+3] +[Zr] +[AlH2-] +[OH2+] +[Ti+3] +[Rh+2] +[Sb] +[S-2] +%12 +[P@@] +[Si@H] +[Mn+4] +p +[Ba] +[NH2-] +[Ge] +[Pb+4] +[Cr+3] +[Au] +[LiH] +[Sc+3] +[o+] +[Rh-3] +%13 +[Br] +[Sb-] +[S@+] +[I+2] +[Ar] +[V] +[Cu-] +[Al-] +[Te] +[13c] +[13C] +[Cl] +[PH4+] +[SiH4] +[te] +[CH3-] +[S@@+] +[Rh+3] +[SH+] +[Bi+3] +[Br+2] +[La] +[La+3] +[Pt-2] +[N@@] +[PH3+] +[N@] +[Si+4] +[Sr+2] +[Al+] +[Pb] +[SeH] +[Si-] +[V+5] +[Y+3] +[Re] +[Ru+] +[Sm] +* +[3H] +[NH2] +[Ag-] +[13CH3] +[OH+] +[Ru+3] +[OH] +[Gd+3] +[13CH2] +[In+3] +[Si@@] +[Si@] +[Ti+2] +[Sn+] +[Cl+2] +[AlH-] +[Pd-2] +[SnH3] +[B+3] +[Cu-2] +[Nd+3] +[Pb+3] +[13cH] +[Fe-4] +[Ga] +[Sn+4] +[Hg+] +[11CH3] +[Hf] +[Pr] +[Y] +[S+2] +[Cd] +[Cr+6] +[Zr+3] +[Rh+] +[CH3] +[N-3] +[Hf+2] +[Th] +[Sb+3] +%14 +[Cr+2] +[Ru+2] +[Hf+4] +[14C] +[Ta] +[Tl+] +[B+] +[Os+4] +[PdH2] +[Pd-] +[Cd+2] +[Co+3] +[S+4] +[Nb+5] +[123I] +[c+] +[Rb+] +[V+2] +[CH3+] +[Ag+2] +[cH+] +[Mn+3] +[Se-] +[As-] +[Eu+3] +[SH2] +[Sm+3] +[IH+] +%15 +[OH3+] +[PH3] +[IH2+] +[SH2+] +[Ir+3] +[AlH3] +[Sc] +[Yb] +[15NH2] +[Lu] +[sH+] +[Gd] +[18F-] +[SH3+] +[SnH4] +[TeH] +[Si@@H] +[Ga+3] +[CaH2] +[Tl] +[Ta+5] +[GeH] +[Br+] +[Sr] +[Tl+3] +[Sm+2] +[PH5] +%16 +[N@@+] +[Au+3] +[C-4] +[Nd] +[Ti+] +[IH] +[N@+] +[125I] +[Eu] +[Sn+3] +[Nb] +[Er+3] +[123I-] +[14c] +%17 +[SnH2] +[YH] +[Sb+5] +[Pr+3] +[Ir+] +[N+3] +[AlH2] +[19F] +%18 +[Tb] +[14CH] +[Mo+4] +[Si+] +[BH] +[Be] +[Rb] +[pH] +%19 +%20 +[Xe] +[Ir-] +[Be+2] +[C+4] +[RuH2] +[15NH] +[U+2] +[Au-] +%21 +%22 +[Au+] +[15n] +[Al+2] +[Tb+3] +[15N] +[V+3] +[W+6] +[14CH3] +[Cr+4] +[ClH+] +b +[Ti+6] +[Nd+] +[Zr+] +[PH2+] +[Fm] +[N@H+] +[RuH] +[Dy+3] +%23 +[Hf+3] +[W+4] +[11C] +[13CH] +[Er] +[124I] +[LaH] +[F] +[siH] +[Ga+] +[Cm] +[GeH3] +[IH-] +[U+6] +[SeH+] +[32P] +[SeH-] +[Pt-] +[Ir+2] +[se+] +[U] +[F+] +[BH2] +[As+] +[Cf] +[ClH2+] +[Ni+] +[TeH3] +[SbH2] +[Ag+3] +%24 +[18O] +[PH4] +[Os+2] +[Na-] +[Sb+2] +[V+4] +[Ho+3] +[68Ga] +[PH-] +[Bi+2] +[Ce+2] +[Pd+3] +[99Tc] +[13C@@H] +[Fe+6] +[c] +[GeH2] +[10B] +[Cu+3] +[Mo+2] +[Cr+] +[Pd+4] +[Dy] +[AsH] +[Ba+] +[SeH2] +[In+] +[TeH2] +[BrH+] +[14cH] +[W+] +[13C@H] +[AsH2] +[In+2] +[N+2] +[N@@H+] +[SbH] +[60Co] +[AsH4+] +[AsH3] +[18OH] +[Ru-2] +[Na-2] +[CuH2] +[31P] +[Ti+5] +[35S] +[P@@H] +[ArH] +[Co+] +[Zr-2] +[BH2-] +[131I] +[SH5] +[VH] +[B+2] +[Yb+2] +[14C@H] +[211At] +[NH3+2] +[IrH] +[IrH2] +[Rh-] +[Cr-] +[Sb+] +[Ni+3] +[TaH3] +[Tl+2] +[64Cu] +[Tc] +[Cd+] +[1H] +[15nH] +[AlH2+] +[FH+2] +[BiH3] +[Ru-] +[Mo+6] +[AsH+] +[BaH2] +[BaH] +[Fe+4] +[229Th] +[Th+4] +[As+3] +[NH+3] +[P@H] +[Li-] +[7NaH] +[Bi+] +[PtH+2] +[p-] +[Re+5] +[NiH] +[Ni-] +[Xe+] +[Ca+] +[11c] +[Rh+4] +[AcH] +[HeH] +[Sc+2] +[Mn+] +[UH] +[14CH2] +[SiH4+] +[18OH2] +[Ac-] +[Re+4] +[118Sn] +[153Sm] +[P+2] +[9CH] +[9CH3] +[Y-] +[NiH2] +[Si+2] +[Mn+6] +[ZrH2] +[C-2] +[Bi+5] +[24NaH] +[Fr] +[15CH] +[Se+] +[At] +[P-3] +[124I-] +[CuH2-] +[Nb+4] +[Nb+3] +[MgH] +[Ir+4] +[67Ga+3] +[67Ga] +[13N] +[15OH2] +[2NH] +[Ho] +[Cn] \ No newline at end of file diff --git a/deepchem/feat/tests/test_atomic_coordinates.py b/deepchem/feat/tests/test_atomic_coordinates.py index e1ba142e292c80367fa90137247ea50327492e3d..2ec894e377184ec3d34e7b892d1bae41f6d5b057 100644 --- a/deepchem/feat/tests/test_atomic_coordinates.py +++ b/deepchem/feat/tests/test_atomic_coordinates.py @@ -6,11 +6,9 @@ import logging import numpy as np import unittest from deepchem.utils import conformers -from deepchem.feat.atomic_coordinates import get_coords -from deepchem.feat.atomic_coordinates import AtomicCoordinates -from deepchem.feat.atomic_coordinates import NeighborListAtomicCoordinates -from deepchem.feat.atomic_coordinates import NeighborListComplexAtomicCoordinates -from deepchem.feat.atomic_coordinates import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat import AtomicCoordinates +from deepchem.feat import NeighborListAtomicCoordinates +from deepchem.feat import NeighborListComplexAtomicCoordinates logger = logging.getLogger(__name__) @@ -29,6 +27,7 @@ class TestAtomicCoordinates(unittest.TestCase): mol = Chem.MolFromSmiles(smiles) engine = conformers.ConformerGenerator(max_conformers=1) self.mol = engine.generate_conformers(mol) + self.get_angstrom_coords = AtomicCoordinates()._featurize assert self.mol.GetNumConformers() > 0 def test_atomic_coordinates(self): @@ -37,9 +36,7 @@ class TestAtomicCoordinates(unittest.TestCase): """ N = self.mol.GetNumAtoms() atomic_coords_featurizer = AtomicCoordinates() - # TODO(rbharath, joegomes): Why does AtomicCoordinates return a list? Is - # this expected behavior? Need to think about API. - coords = atomic_coords_featurizer._featurize(self.mol)[0] + coords = atomic_coords_featurizer._featurize(self.mol) assert isinstance(coords, np.ndarray) assert coords.shape == (N, 3) @@ -49,7 +46,7 @@ class TestAtomicCoordinates(unittest.TestCase): """ nblist_featurizer = NeighborListAtomicCoordinates() N = self.mol.GetNumAtoms() - coords = get_coords(self.mol) + coords = self.get_angstrom_coords(self.mol) nblist_featurizer = NeighborListAtomicCoordinates() nblist = nblist_featurizer._featurize(self.mol)[1] @@ -103,7 +100,7 @@ class TestAtomicCoordinates(unittest.TestCase): # Do a manual distance computation and ensure that selected neighbor is # closest since we set max_num_neighbors = 1 - coords = get_coords(self.mol) + coords = self.get_angstrom_coords(self.mol) for i in range(N): closest_dist = np.inf closest_nbr = None @@ -127,7 +124,7 @@ class TestAtomicCoordinates(unittest.TestCase): cutoff = 4.0 box_size = np.array([10.0, 8.0, 9.0]) N = self.mol.GetNumAtoms() - coords = get_coords(self.mol) + coords = self.get_angstrom_coords(self.mol) featurizer = NeighborListAtomicCoordinates( neighbor_cutoff=cutoff, periodic_box_size=box_size) neighborlist = featurizer._featurize(self.mol)[1] @@ -150,7 +147,7 @@ class TestAtomicCoordinates(unittest.TestCase): max_num_neighbors = 4 complex_featurizer = NeighborListComplexAtomicCoordinates(max_num_neighbors) - system_coords, system_neighbor_list = complex_featurizer._featurize_complex( + system_coords, system_neighbor_list = complex_featurizer._featurize( ligand_file, protein_file) N = system_coords.shape[0] diff --git a/deepchem/feat/tests/test_basic.py b/deepchem/feat/tests/test_basic.py deleted file mode 100644 index 6c78387159a9ffe62df0ca5a3f3ad5c454cc65a4..0000000000000000000000000000000000000000 --- a/deepchem/feat/tests/test_basic.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Test basic molecular features. -""" -import numpy as np -import unittest - -from deepchem.feat.basic import MolecularWeight, RDKitDescriptors - - -class TestMolecularWeight(unittest.TestCase): - """ - Test MolecularWeight. - """ - - def setUp(self): - """ - Set up tests. - """ - smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' - from rdkit import Chem - self.mol = Chem.MolFromSmiles(smiles) - self.engine = MolecularWeight() - - def testMW(self): - """ - Test MW. - """ - assert np.allclose(self.engine([self.mol]), 180, atol=0.1) - - -class TestRDKitDescriptors(unittest.TestCase): - """ - Test RDKitDescriptors. - """ - - def setUp(self): - """ - Set up tests. - """ - smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' - from rdkit import Chem - self.mol = Chem.MolFromSmiles(smiles) - self.engine = RDKitDescriptors() - - def testRDKitDescriptors(self): - """ - Test simple descriptors. - """ - descriptors = self.engine([self.mol]) - assert np.allclose( - descriptors[0, self.engine.descriptors.index('ExactMolWt')], - 180, - atol=0.1) diff --git a/deepchem/feat/tests/test_binding_pocket_features.py b/deepchem/feat/tests/test_binding_pocket_features.py index 9d122e85bbe7c27a8b86b7dae35fd1a76244c5c7..4a9ee4c03f8406b338d5ba42251271eb476ac6f8 100644 --- a/deepchem/feat/tests/test_binding_pocket_features.py +++ b/deepchem/feat/tests/test_binding_pocket_features.py @@ -1,5 +1,5 @@ """ -Test Binding Pocket Features. +Test Binding Pocket Features. """ import os import numpy as np @@ -19,7 +19,6 @@ class TestBindingPocketFeatures(unittest.TestCase): current_dir = os.path.dirname(os.path.realpath(__file__)) protein_file = os.path.join(current_dir, "../../dock/tests/1jld_protein.pdb") - ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") finder = dc.dock.ConvexHullPocketFinder() pocket_featurizer = dc.feat.BindingPocketFeaturizer() diff --git a/deepchem/feat/tests/test_fingerprints.py b/deepchem/feat/tests/test_circular_fingerprints.py similarity index 65% rename from deepchem/feat/tests/test_fingerprints.py rename to deepchem/feat/tests/test_circular_fingerprints.py index 070165ec39287c5c40f9aca6e6d35329579e8a5a..d2843c367b2e015f3e4e907232edf55086a3c564 100644 --- a/deepchem/feat/tests/test_fingerprints.py +++ b/deepchem/feat/tests/test_circular_fingerprints.py @@ -2,7 +2,7 @@ Test topological fingerprints. """ import unittest -from deepchem.feat import fingerprints as fp +from deepchem.feat import CircularFingerprint class TestCircularFingerprint(unittest.TestCase): @@ -14,24 +14,32 @@ class TestCircularFingerprint(unittest.TestCase): """ Set up tests. """ - smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' self.mol = Chem.MolFromSmiles(smiles) - self.engine = fp.CircularFingerprint() def test_circular_fingerprints(self): """ Test CircularFingerprint. """ - rval = self.engine([self.mol]) - assert rval.shape == (1, self.engine.size) + featurizer = CircularFingerprint() + rval = featurizer([self.mol]) + assert rval.shape == (1, 2048) + + def test_circular_fingerprints_with_1024(self): + """ + Test CircularFingerprint with 1024 size. + """ + featurizer = CircularFingerprint(size=1024) + rval = featurizer([self.mol]) + assert rval.shape == (1, 1024) def test_sparse_circular_fingerprints(self): """ Test CircularFingerprint with sparse encoding. """ - self.engine = fp.CircularFingerprint(sparse=True) - rval = self.engine([self.mol]) + featurizer = CircularFingerprint(sparse=True) + rval = featurizer([self.mol]) assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0]) @@ -41,8 +49,8 @@ class TestCircularFingerprint(unittest.TestCase): Test CircularFingerprint with sparse encoding and SMILES for each fragment. """ - self.engine = fp.CircularFingerprint(sparse=True, smiles=True) - rval = self.engine([self.mol]) + featurizer = CircularFingerprint(sparse=True, smiles=True) + rval = featurizer([self.mol]) assert rval.shape == (1,) assert isinstance(rval[0], dict) assert len(rval[0]) diff --git a/deepchem/feat/tests/test_contact_fingerprints.py b/deepchem/feat/tests/test_contact_fingerprints.py new file mode 100644 index 0000000000000000000000000000000000000000..6b3f4af561a5c61f55302a1ca9608b71c13a8432 --- /dev/null +++ b/deepchem/feat/tests/test_contact_fingerprints.py @@ -0,0 +1,45 @@ +import os +import unittest +import deepchem as dc + + +class TestContactFeaturizers(unittest.TestCase): + """Test Contact Fingerprints and Voxelizers.""" + + def setUp(self): + # TODO test more formats for ligand + current_dir = os.path.dirname(os.path.realpath(__file__)) + self.protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + self.ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + self.complex_files = [(self.protein_file, self.ligand_file)] + + def test_contact_fingerprint_shape(self): + size = 8 + featurizer = dc.feat.ContactCircularFingerprint(size=size) + features, failures = featurizer.featurize([self.ligand_file], + [self.protein_file]) + assert features.shape == (1, 2 * size) + + def test_contact_voxels_shape(self): + box_width = 48 + voxel_width = 2 + voxels_per_edge = box_width / voxel_width + size = 8 + voxelizer = dc.feat.ContactCircularVoxelizer( + box_width=box_width, voxel_width=voxel_width, size=size) + features, failures = voxelizer.featurize([self.ligand_file], + [self.protein_file]) + assert features.shape == (1, voxels_per_edge, voxels_per_edge, + voxels_per_edge, size) + + def test_contact_voxels_flattened(self): + box_width = 48 + voxel_width = 2 + voxels_per_edge = box_width / voxel_width + size = 8 + voxelizer = dc.feat.ContactCircularVoxelizer( + box_width=box_width, voxel_width=voxel_width, size=size, flatten=True) + features, failures = voxelizer.featurize([self.ligand_file], + [self.protein_file]) + assert features.shape == (1, int(size * voxels_per_edge**3)) diff --git a/deepchem/feat/tests/test_convmol.py b/deepchem/feat/tests/test_convmol.py index 399d59134ca9d707c6bce98d140514102e936ab1..50cb53e7d5fa8c74ad429d76c6d49a3b30f11d83 100644 --- a/deepchem/feat/tests/test_convmol.py +++ b/deepchem/feat/tests/test_convmol.py @@ -1,41 +1,37 @@ -from unittest import TestCase - import numpy as np from deepchem.feat import ConvMolFeaturizer from deepchem.feat.mol_graphs import ConvMol from deepchem.molnet import load_bace_classification -class TestConvMol(TestCase): +def get_molecules(): + tasks, all_dataset, transformers = load_bace_classification(featurizer="Raw") + return all_dataset[0].X - def get_molecules(self): - tasks, all_dataset, transformers = load_bace_classification( - featurizer="Raw") - return all_dataset[0].X - def test_mol_ordering(self): - mols = self.get_molecules() - featurizer = ConvMolFeaturizer() - featurized_mols = featurizer.featurize(mols) - for i in range(len(featurized_mols)): - atom_features = featurized_mols[i].atom_features - degree_list = np.expand_dims(featurized_mols[i].degree_list, axis=1) - atom_features = np.concatenate([degree_list, atom_features], axis=1) - featurized_mols[i].atom_features = atom_features +def test_mol_ordering(): + mols = get_molecules() + featurizer = ConvMolFeaturizer() + featurized_mols = featurizer.featurize(mols) + for i in range(len(featurized_mols)): + atom_features = featurized_mols[i].atom_features + degree_list = np.expand_dims(featurized_mols[i].degree_list, axis=1) + atom_features = np.concatenate([degree_list, atom_features], axis=1) + featurized_mols[i].atom_features = atom_features - conv_mol = ConvMol.agglomerate_mols(featurized_mols) + conv_mol = ConvMol.agglomerate_mols(featurized_mols) - for start, end in conv_mol.deg_slice.tolist(): - members = conv_mol.membership[start:end] - sorted_members = np.array(sorted(members)) - members = np.array(members) - self.assertTrue(np.all(sorted_members == members)) + for start, end in conv_mol.deg_slice.tolist(): + members = conv_mol.membership[start:end] + sorted_members = np.array(sorted(members)) + members = np.array(members) + assert np.all(sorted_members == members) - conv_mol_atom_features = conv_mol.get_atom_features() + conv_mol_atom_features = conv_mol.get_atom_features() - adj_number = 0 - for start, end in conv_mol.deg_slice.tolist(): - deg_features = conv_mol_atom_features[start:end] - adj_number_array = deg_features[:, 0] - self.assertTrue(np.all(adj_number_array == adj_number)) - adj_number += 1 + adj_number = 0 + for start, end in conv_mol.deg_slice.tolist(): + deg_features = conv_mol_atom_features[start:end] + adj_number_array = deg_features[:, 0] + assert np.all(adj_number_array == adj_number) + adj_number += 1 diff --git a/deepchem/feat/tests/test_coulomb_matrices.py b/deepchem/feat/tests/test_coulomb_matrices.py index b8fbb848ddca3293705a78e6391162d765eef591..0d950a20063719e783be444dd5065976befaec99 100644 --- a/deepchem/feat/tests/test_coulomb_matrices.py +++ b/deepchem/feat/tests/test_coulomb_matrices.py @@ -4,119 +4,176 @@ Tests for Coulomb matrix calculation. import numpy as np import unittest -from deepchem.feat import coulomb_matrices as cm +from deepchem.feat import CoulombMatrix, CoulombMatrixEig from deepchem.utils import conformers class TestCoulombMatrix(unittest.TestCase): """ - Tests for CoulombMatrix. - """ + Tests for CoulombMatrix. + """ def setUp(self): """ - Set up tests. - """ - smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + Set up tests. + """ from rdkit import Chem + from rdkit.Chem import AllChem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' mol = Chem.MolFromSmiles(smiles) - engine = conformers.ConformerGenerator(max_conformers=1) - self.mol = engine.generate_conformers(mol) - assert self.mol.GetNumConformers() > 0 + self.mol_with_no_conf = mol + + # with one conformer + mol_with_one_conf = Chem.AddHs(mol) + AllChem.EmbedMolecule(mol_with_one_conf, AllChem.ETKDG()) + self.mol_with_one_conf = mol_with_one_conf + + # with multiple conformers + self.num_confs = 4 + engine = conformers.ConformerGenerator(max_conformers=self.num_confs) + self.mol_with_multi_conf = engine.generate_conformers(mol) + + # include explicit hydrogens + self.num_atoms = mol_with_one_conf.GetNumAtoms() + assert self.num_atoms == 21 + assert self.mol_with_one_conf.GetNumConformers() == 1 + assert self.mol_with_multi_conf.GetNumConformers() == self.num_confs def test_coulomb_matrix(self): """ - Test CoulombMatrix. - """ - f = cm.CoulombMatrix(self.mol.GetNumAtoms()) - rval = f([self.mol]) - assert rval.shape == (1, self.mol.GetNumConformers(), - self.mol.GetNumAtoms(), self.mol.GetNumAtoms()) + Test CoulombMatrix. + """ + f = CoulombMatrix(self.num_atoms) + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, self.num_atoms, self.num_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, self.num_atoms, self.num_atoms) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, self.num_atoms, self.num_atoms) def test_coulomb_matrix_padding(self): """ - Test CoulombMatrix with padding. - """ - max_atoms = self.mol.GetNumAtoms() * 2 - f = cm.CoulombMatrix(max_atoms=max_atoms) - rval = f([self.mol]) - assert rval.shape == (1, self.mol.GetNumConformers(), max_atoms, max_atoms) + Test CoulombMatrix with padding. + """ + max_atoms = self.num_atoms * 2 + f = CoulombMatrix(max_atoms=max_atoms) + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, max_atoms, max_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, max_atoms, max_atoms) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, max_atoms, max_atoms) def test_upper_tri_coulomb_matrix(self): """ - Test upper triangular CoulombMatrix. - """ - f = cm.CoulombMatrix(self.mol.GetNumAtoms(), upper_tri=True) - rval = f([self.mol]) - size = np.triu_indices(self.mol.GetNumAtoms())[0].size - assert rval.shape == (1, self.mol.GetNumConformers(), size) + Test upper triangular CoulombMatrix. + """ + f = CoulombMatrix(self.num_atoms, upper_tri=True) + size = np.triu_indices(self.num_atoms)[0].size + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, size) def test_upper_tri_coulomb_matrix_padding(self): """ Test upper triangular CoulombMatrix with padding. """ - f = cm.CoulombMatrix(max_atoms=self.mol.GetNumAtoms() * 2, upper_tri=True) - rval = f([self.mol]) - size = np.triu_indices(self.mol.GetNumAtoms() * 2)[0].size - assert rval.shape == (1, self.mol.GetNumConformers(), size) + max_atoms = self.num_atoms * 2 + f = CoulombMatrix(max_atoms=max_atoms, upper_tri=True) + size = np.triu_indices(max_atoms)[0].size + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, size) def test_coulomb_matrix_no_hydrogens(self): """ - Test hydrogen removal. - """ - from rdkit import Chem - mol = Chem.RemoveHs(self.mol) - assert mol.GetNumAtoms() < self.mol.GetNumAtoms() - f = cm.CoulombMatrix( - max_atoms=mol.GetNumAtoms(), remove_hydrogens=True, upper_tri=True) - rval = f([self.mol]) # use the version with hydrogens - size = np.triu_indices(mol.GetNumAtoms())[0].size - assert rval.shape == (1, mol.GetNumConformers(), size) + Test hydrogen removal. + """ + num_atoms_with_no_H = self.mol_with_no_conf.GetNumAtoms() + assert num_atoms_with_no_H < self.num_atoms + f = CoulombMatrix( + max_atoms=num_atoms_with_no_H, remove_hydrogens=True, upper_tri=True) + size = np.triu_indices(num_atoms_with_no_H)[0].size + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, size) def test_coulomb_matrix_hydrogens(self): """ - Test no hydrogen removal. - """ - f = cm.CoulombMatrix( - max_atoms=self.mol.GetNumAtoms(), - remove_hydrogens=False, - upper_tri=True) - rval = f([self.mol]) - size = np.triu_indices(self.mol.GetNumAtoms())[0].size - assert rval.shape == (1, self.mol.GetNumConformers(), size) + Test no hydrogen removal. + """ + f = CoulombMatrix( + max_atoms=self.num_atoms, remove_hydrogens=False, upper_tri=True) + size = np.triu_indices(self.num_atoms)[0].size + rval = f([self.mol_with_no_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, size) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, size) class TestCoulombMatrixEig(unittest.TestCase): """ - Tests for CoulombMatrixEig. - """ + Tests for CoulombMatrixEig. + """ def setUp(self): """ - Set up tests. - """ - smiles = '[H]C([H])([H])[H]' + Set up tests. + """ from rdkit import Chem + from rdkit.Chem import AllChem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' mol = Chem.MolFromSmiles(smiles) - mol = Chem.AddHs(mol) - engine = conformers.ConformerGenerator(max_conformers=1) - self.mol = engine.generate_conformers(mol) - assert self.mol.GetNumConformers() > 0 + self.mol_with_no_conf = mol + + # with one conformer + mol_with_one_conf = Chem.AddHs(mol) + AllChem.EmbedMolecule(mol_with_one_conf, AllChem.ETKDG()) + self.mol_with_one_conf = mol_with_one_conf + + # with multiple conformers + self.num_confs = 4 + engine = conformers.ConformerGenerator(max_conformers=self.num_confs) + self.mol_with_multi_conf = engine.generate_conformers(mol) + + # include explicit hydrogens + self.num_atoms = mol_with_one_conf.GetNumAtoms() + assert self.num_atoms == 21 + assert self.mol_with_one_conf.GetNumConformers() == 1 + assert self.mol_with_multi_conf.GetNumConformers() == self.num_confs def test_coulomb_matrix_eig(self): """ - Test CoulombMatrixEig. - """ - f = cm.CoulombMatrixEig(self.mol.GetNumAtoms()) - rval = f([self.mol]) - assert rval.shape == (1, self.mol.GetNumConformers(), - self.mol.GetNumAtoms()) + Test CoulombMatrixEig. + """ + f = CoulombMatrixEig(self.num_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, self.num_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, self.num_atoms) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, self.num_atoms) def test_coulomb_matrix_eig_padding(self): """ Test padding of CoulombMatixEig """ - self.max_atoms = 29 - f = cm.CoulombMatrixEig(self.max_atoms) - rval = f([self.mol]) - assert rval.shape == (1, self.mol.GetNumConformers(), self.max_atoms) + max_atoms = 2 * self.num_atoms + f = CoulombMatrixEig(max_atoms=max_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, max_atoms) + rval = f([self.mol_with_one_conf]) + assert rval.shape == (1, max_atoms) + rval = f([self.mol_with_multi_conf]) + assert rval.shape == (1, self.num_confs, max_atoms) diff --git a/deepchem/feat/tests/test_features.py b/deepchem/feat/tests/test_features.py deleted file mode 100644 index 3888b15a09fde1be9e8a6d0bbd751cfc08861165..0000000000000000000000000000000000000000 --- a/deepchem/feat/tests/test_features.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -Test featurizer class. -""" -import unittest - -from deepchem.feat import ConvMolFeaturizer, CircularFingerprint -from deepchem.feat.basic import MolecularWeight - - -class TestFeaturizer(unittest.TestCase): - """ - Tests for Featurizer. - """ - - def setUp(self): - """ - Set up tests. - """ - smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' - from rdkit import Chem - self.mol = Chem.MolFromSmiles(smiles) - - def test_featurizer(self): - """ - Test basic functionality of Featurizer. - """ - f = MolecularWeight() - rval = f([self.mol]) - assert rval.shape == (1, 1) - - def test_flatten_conformers(self): - """ - Calculate molecule-level features for a multiconformer molecule. - """ - f = MolecularWeight() - rval = f([self.mol]) - assert rval.shape == (1, 1) - - def test_convmol_hashable(self): - featurizer1 = ConvMolFeaturizer(atom_properties=['feature']) - featurizer2 = ConvMolFeaturizer(atom_properties=['feature']) - featurizer3 = ConvMolFeaturizer() - - d = set() - d.add(featurizer1) - d.add(featurizer2) - d.add(featurizer3) - - self.assertEqual(2, len(d)) - featurizers = [featurizer1, featurizer2, featurizer3] - - for featurizer in featurizers: - self.assertTrue(featurizer in featurizers) - - def test_circularfingerprint_hashable(self): - featurizer1 = CircularFingerprint() - featurizer2 = CircularFingerprint() - featurizer3 = CircularFingerprint(size=5) - - d = set() - d.add(featurizer1) - d.add(featurizer2) - d.add(featurizer3) - - self.assertEqual(2, len(d)) - featurizers = [featurizer1, featurizer2, featurizer3] - - for featurizer in featurizers: - self.assertTrue(featurizer in featurizers) diff --git a/deepchem/feat/tests/test_graph_data.py b/deepchem/feat/tests/test_graph_data.py new file mode 100644 index 0000000000000000000000000000000000000000..b087c8aafd586721fcf231ebb202f377a4409322 --- /dev/null +++ b/deepchem/feat/tests/test_graph_data.py @@ -0,0 +1,102 @@ +import unittest +import numpy as np +from deepchem.feat.graph_data import GraphData, BatchGraphData + + +class TestGraph(unittest.TestCase): + + def test_graph_data(self): + num_nodes, num_node_features = 5, 32 + num_edges, num_edge_features = 6, 32 + node_features = np.random.random_sample((num_nodes, num_node_features)) + edge_features = np.random.random_sample((num_edges, num_edge_features)) + edge_index = np.array([ + [0, 1, 2, 2, 3, 4], + [1, 2, 0, 3, 4, 0], + ]) + node_pos_features = None + + graph = GraphData( + node_features=node_features, + edge_index=edge_index, + edge_features=edge_features, + node_pos_features=node_pos_features) + + assert graph.num_nodes == num_nodes + assert graph.num_node_features == num_node_features + assert graph.num_edges == num_edges + assert graph.num_edge_features == num_edge_features + + # check convert function + pyg_graph = graph.to_pyg_graph() + from torch_geometric.data import Data + assert isinstance(pyg_graph, Data) + + dgl_graph = graph.to_dgl_graph() + from dgl import DGLGraph + assert isinstance(dgl_graph, DGLGraph) + + def test_invalid_graph_data(self): + with self.assertRaises(ValueError): + invalid_node_features_type = list(np.random.random_sample((5, 32))) + edge_index = np.array([ + [0, 1, 2, 2, 3, 4], + [1, 2, 0, 3, 4, 0], + ]) + _ = GraphData( + node_features=invalid_node_features_type, + edge_index=edge_index, + ) + + with self.assertRaises(ValueError): + node_features = np.random.random_sample((5, 32)) + invalid_edge_index_shape = np.array([ + [0, 1, 2, 2, 3, 4], + [1, 2, 0, 3, 4, 5], + ]) + _ = GraphData( + node_features=node_features, + edge_index=invalid_edge_index_shape, + ) + + with self.assertRaises(ValueError): + node_features = np.random.random_sample((5, 5)) + invalid_edge_index_shape = np.array([ + [0, 1, 2, 2, 3, 4], + [1, 2, 0, 3, 4, 0], + [2, 2, 1, 4, 0, 3], + ],) + _ = GraphData( + node_features=node_features, + edge_index=invalid_edge_index_shape, + ) + + with self.assertRaises(TypeError): + node_features = np.random.random_sample((5, 32)) + _ = GraphData(node_features=node_features) + + def test_batch_graph_data(self): + num_nodes_list, num_edge_list = [3, 4, 5], [2, 4, 5] + num_node_features, num_edge_features = 32, 32 + edge_index_list = [ + np.array([[0, 1], [1, 2]]), + np.array([[0, 1, 2, 3], [1, 2, 0, 2]]), + np.array([[0, 1, 2, 3, 4], [1, 2, 3, 4, 0]]), + ] + + graph_list = [ + GraphData( + node_features=np.random.random_sample((num_nodes_list[i], + num_node_features)), + edge_index=edge_index_list[i], + edge_features=np.random.random_sample((num_edge_list[i], + num_edge_features)), + node_pos_features=None) for i in range(len(num_edge_list)) + ] + batch = BatchGraphData(graph_list) + + assert batch.num_nodes == sum(num_nodes_list) + assert batch.num_node_features == num_node_features + assert batch.num_edges == sum(num_edge_list) + assert batch.num_edge_features == num_edge_features + assert batch.graph_index.shape == (sum(num_nodes_list),) diff --git a/deepchem/feat/tests/test_graph_features.py b/deepchem/feat/tests/test_graph_features.py index 76bb4a432433fb9e47b2a77dd2ddcb7a88b678d5..ddcb3dd67c3b82f815e5874dcb83cd3db4a3601f 100644 --- a/deepchem/feat/tests/test_graph_features.py +++ b/deepchem/feat/tests/test_graph_features.py @@ -1,10 +1,6 @@ """ -Tests for ConvMolFeaturizer. +Tests for ConvMolFeaturizer. """ -__author__ = "Han Altae-Tran and Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import unittest import os import numpy as np @@ -123,5 +119,5 @@ class TestAtomicConvFeaturizer(unittest.TestCase): max_num_neighbors=max_num_neighbors, neighbor_cutoff=neighbor_cutoff) - features, _ = featurizer.featurize_complexes([ligand_file, ligand_file], - [protein_file, protein_file]) + features, _ = featurizer.featurize([ligand_file, ligand_file], + [protein_file, protein_file]) diff --git a/deepchem/feat/tests/test_grid_featurizers.py b/deepchem/feat/tests/test_grid_featurizers.py new file mode 100644 index 0000000000000000000000000000000000000000..3cc96eeba8338b986a1d534fc1ec7f0cc77634c9 --- /dev/null +++ b/deepchem/feat/tests/test_grid_featurizers.py @@ -0,0 +1,91 @@ +import os +import deepchem as dc + + +def test_charge_voxelizer(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + + cutoff = 4.5 + box_width = 16 + voxel_width = 1.0 + voxelizer = dc.feat.ChargeVoxelizer( + cutoff=cutoff, box_width=box_width, voxel_width=voxel_width) + features, failures = voxelizer.featurize([ligand_file], [protein_file]) + # TODO: Add shape test + + +def test_salt_bridge_voxelizer(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + + cutoff = 4.5 + box_width = 16 + voxel_width = 1.0 + voxelizer = dc.feat.SaltBridgeVoxelizer( + cutoff=cutoff, box_width=box_width, voxel_width=voxel_width) + features, failures = voxelizer.featurize([ligand_file], [protein_file]) + # TODO: Add shape test + + +def test_cation_pi_voxelizer(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + + cutoff = 4.5 + box_width = 16 + voxel_width = 1.0 + voxelizer = dc.feat.CationPiVoxelizer( + cutoff=cutoff, box_width=box_width, voxel_width=voxel_width) + features, failures = voxelizer.featurize([ligand_file], [protein_file]) + # TODO: Add shape test + + +def test_pi_stack_voxelizer(): + current_dir = os.path.dirname(os.path.realpath(__file__)) + protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + + cutoff = 4.5 + box_width = 16 + voxel_width = 1.0 + voxelizer = dc.feat.PiStackVoxelizer( + cutoff=cutoff, box_width=box_width, voxel_width=voxel_width) + features, failures = voxelizer.featurize([ligand_file], [protein_file]) + # TODO: Add shape test + + +# # TODO: This is failing, something about the hydrogen bond counting? +# def test_hydrogen_bond_counter(): +# current_dir = os.path.dirname(os.path.realpath(__file__)) +# protein_file = os.path.join(current_dir, 'data', +# '3ws9_protein_fixer_rdkit.pdb') +# ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') +# +# cutoff = 4.5 +# featurizer = dc.feat.HydrogenBondCounter(cutoff=cutoff) +# features, failures = featurizer.featurize([ligand_file], [protein_file]) +# # TODO: Add shape test +# +# +# # TODO: This is failing, something about the hydrogen bond counting? +# def test_hydrogen_bond_voxelizer(): +# current_dir = os.path.dirname(os.path.realpath(__file__)) +# protein_file = os.path.join(current_dir, 'data', +# '3ws9_protein_fixer_rdkit.pdb') +# ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') +# +# cutoff = 4.5 +# box_width = 16 +# voxel_width = 1.0 +# voxelizer = dc.feat.HydrogenBondVoxelizer( +# cutoff=cutoff, box_width=box_width, voxel_width=voxel_width) +# features, failures = voxelizer.featurize([ligand_file], [protein_file]) +# # TODO: Add shape test diff --git a/deepchem/feat/tests/test_maccs_keys_finerprint.py b/deepchem/feat/tests/test_maccs_keys_finerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..c2a443806f55ff2cef5789c2ebc0342bfae257f5 --- /dev/null +++ b/deepchem/feat/tests/test_maccs_keys_finerprint.py @@ -0,0 +1,25 @@ +import unittest + +from deepchem.feat import MACCSKeysFingerprint + + +class TestMACCSKeysFingerprint(unittest.TestCase): + """ + Test MACCSKeyFingerprint. + """ + + def setUp(self): + """ + Set up tests. + """ + from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + self.mol = Chem.MolFromSmiles(smiles) + + def test_maccs_key_fingerprint(self): + """ + Test simple fingerprint. + """ + featurizer = MACCSKeysFingerprint() + feature_sum = featurizer([self.mol]) + assert feature_sum.shape == (1, 167) diff --git a/deepchem/feat/tests/test_materials_featurizers.py b/deepchem/feat/tests/test_materials_featurizers.py index a8e877d0274c2eea2ae917a5c5e346921ad843d2..b714e0c8a50808a5c03aa6ee112ad1412d5ad9dd 100644 --- a/deepchem/feat/tests/test_materials_featurizers.py +++ b/deepchem/feat/tests/test_materials_featurizers.py @@ -1,10 +1,10 @@ """ Test featurizers for inorganic crystals. """ -import numpy as np import unittest +import numpy as np -from deepchem.feat.materials_featurizers import ElementPropertyFingerprint, SineCoulombMatrix, StructureGraphFeaturizer +from deepchem.feat import ElementPropertyFingerprint, SineCoulombMatrix, CGCNNFeaturizer, ElemNetFeaturizer class TestMaterialFeaturizers(unittest.TestCase): @@ -63,20 +63,36 @@ class TestMaterialFeaturizers(unittest.TestCase): Test SCM featurizer. """ - featurizer = SineCoulombMatrix(max_atoms=1) + featurizer = SineCoulombMatrix(max_atoms=3) features = featurizer.featurize([self.struct_dict]) assert len(features) == 1 - assert np.isclose(features[0], 1244, atol=.5) + assert features.shape == (1, 3) + assert np.isclose(features[0][0], 1244, atol=.5) - def test_structure_graph_featurizer(self): + def test_cgcnn_featurizer(self): """ - Test StructureGraphFeaturizer. + Test CGCNNFeaturizer. """ - featurizer = StructureGraphFeaturizer(radius=3.0, max_neighbors=6) - features = featurizer.featurize([self.struct_dict]) + featurizer = CGCNNFeaturizer(radius=3.0, max_neighbors=6, step=0.3) + graph_features = featurizer.featurize([self.struct_dict]) + + assert graph_features[0].num_nodes == 1 + assert graph_features[0].num_edges == 6 + assert graph_features[0].node_features.shape == (1, 92) + assert graph_features[0].edge_index.shape == (2, 6) + assert graph_features[0].edge_features.shape == (6, 11) + + def test_elemnet_featurizer(self): + """ + Test ElemNetFeaturizer. + """ + + featurizer = ElemNetFeaturizer() + features = featurizer.featurize([self.formula]) - assert len(features[0]) == 3 - assert features[0][0] == 26 - assert features[0][1].shape == (6, 16) + assert features.shape[1] == 86 + assert np.isclose(features[0][13], 0.6666667, atol=0.01) + assert np.isclose(features[0][38], 0.33333334, atol=0.01) + assert np.isclose(features.sum(), 1.0, atol=0.01) diff --git a/deepchem/feat/tests/test_mol2vec_fingerprint.py b/deepchem/feat/tests/test_mol2vec_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..80c73fd2d00a7506f0b1da26348f72b2b37feba7 --- /dev/null +++ b/deepchem/feat/tests/test_mol2vec_fingerprint.py @@ -0,0 +1,25 @@ +import unittest + +from deepchem.feat import Mol2VecFingerprint + + +class TestMol2VecFingerprint(unittest.TestCase): + """ + Test Mol2VecFingerprint. + """ + + def setUp(self): + """ + Set up tests. + """ + from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + self.mol = Chem.MolFromSmiles(smiles) + + def test_mol2vec_fingerprint(self): + """ + Test simple fingerprint. + """ + featurizer = Mol2VecFingerprint() + feature = featurizer([self.mol]) + assert feature.shape == (1, 300) diff --git a/deepchem/feat/tests/test_mol_graph_conv_featurizer.py b/deepchem/feat/tests/test_mol_graph_conv_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..f28a4bb8b39ac78487ca20036a9979468248d4b8 --- /dev/null +++ b/deepchem/feat/tests/test_mol_graph_conv_featurizer.py @@ -0,0 +1,72 @@ +import unittest + +from deepchem.feat import MolGraphConvFeaturizer + + +class TestMolGraphConvFeaturizer(unittest.TestCase): + + def test_default_featurizer(self): + smiles = ["C1=CC=CN=C1", "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C"] + featurizer = MolGraphConvFeaturizer() + graph_feat = featurizer.featurize(smiles) + assert len(graph_feat) == 2 + + # assert "C1=CC=CN=C1" + assert graph_feat[0].num_nodes == 6 + assert graph_feat[0].num_node_features == 30 + assert graph_feat[0].num_edges == 12 + + # assert "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C" + assert graph_feat[1].num_nodes == 22 + assert graph_feat[1].num_node_features == 30 + assert graph_feat[1].num_edges == 44 + + def test_featurizer_with_use_edge(self): + smiles = ["C1=CC=CN=C1", "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C"] + featurizer = MolGraphConvFeaturizer(use_edges=True) + graph_feat = featurizer.featurize(smiles) + assert len(graph_feat) == 2 + + # assert "C1=CC=CN=C1" + assert graph_feat[0].num_nodes == 6 + assert graph_feat[0].num_node_features == 30 + assert graph_feat[0].num_edges == 12 + assert graph_feat[0].num_edge_features == 11 + + # assert "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C" + assert graph_feat[1].num_nodes == 22 + assert graph_feat[1].num_node_features == 30 + assert graph_feat[1].num_edges == 44 + assert graph_feat[1].num_edge_features == 11 + + def test_featurizer_with_use_chirality(self): + smiles = ["C1=CC=CN=C1", "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C"] + featurizer = MolGraphConvFeaturizer(use_chirality=True) + graph_feat = featurizer.featurize(smiles) + assert len(graph_feat) == 2 + + # assert "C1=CC=CN=C1" + assert graph_feat[0].num_nodes == 6 + assert graph_feat[0].num_node_features == 32 + assert graph_feat[0].num_edges == 12 + + # assert "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C" + assert graph_feat[1].num_nodes == 22 + assert graph_feat[1].num_node_features == 32 + assert graph_feat[1].num_edges == 44 + + def test_featurizer_with_use_partial_charge(self): + smiles = ["C1=CC=CN=C1", "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C"] + featurizer = MolGraphConvFeaturizer(use_partial_charge=True) + graph_feat = featurizer.featurize(smiles) + assert len(graph_feat) == 2 + + # assert "C1=CC=CN=C1" + assert graph_feat[0].num_nodes == 6 + assert graph_feat[0].num_node_features == 31 + assert graph_feat[0].num_edges == 12 + + # assert "O=C(NCc1cc(OC)c(O)cc1)CCCC/C=C/C(C)C" + assert graph_feat[1].num_nodes == 22 + assert graph_feat[1].num_node_features == 31 + assert graph_feat[1].num_edges == 44 diff --git a/deepchem/feat/tests/test_mol_graphs.py b/deepchem/feat/tests/test_mol_graphs.py index e8c96806b8822c07c84abcf76d4045f9c87644f4..a63008a6d1d041bd16b623e2f5ebe60e9d6bd249 100644 --- a/deepchem/feat/tests/test_mol_graphs.py +++ b/deepchem/feat/tests/test_mol_graphs.py @@ -1,16 +1,9 @@ """ -Tests for Molecular Graph data structures. +Tests for Molecular Graph data structures. """ -__author__ = "Han Altae-Tran and Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import unittest -import os -import sys import numpy as np from deepchem.feat.mol_graphs import ConvMol -from deepchem.feat.mol_graphs import MultiConvMol class TestMolGraphs(unittest.TestCase): @@ -20,11 +13,10 @@ class TestMolGraphs(unittest.TestCase): def test_construct_conv_mol(self): """Tests that ConvMols can be constructed without crash.""" - N_feat = 4 # Artificial feature array. atom_features = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) adj_list = [[1], [0, 2], [1]] - mol = ConvMol(atom_features, adj_list) + _ = ConvMol(atom_features, adj_list) def test_conv_mol_deg_slice(self): """Tests that deg_slice works properly.""" @@ -86,20 +78,19 @@ class TestMolGraphs(unittest.TestCase): """Test AggrMol.agglomerate_mols.""" molecules = [] - #### First example molecule - N_feat = 4 + # First example molecule # Artificial feature array. atom_features = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) adj_list = [[1], [0, 2], [1]] molecules.append(ConvMol(atom_features, adj_list)) - #### Second example molecule + # Second example molecule atom_features = np.array([[20, 21, 22, 23], [24, 25, 26, 27], [28, 29, 30, 31], [32, 33, 34, 35]]) adj_list = [[1, 2], [0, 3], [0, 3], [1, 2]] molecules.append(ConvMol(atom_features, adj_list)) - ### Third example molecule + # Third example molecule atom_features = np.array([[40, 41, 42, 43], [44, 45, 46, 47], [48, 49, 50, 51], [52, 53, 54, 55], [56, 57, 58, 59]]) @@ -137,7 +128,6 @@ class TestMolGraphs(unittest.TestCase): def test_null_conv_mol(self): """Running Null AggrMol Test. Only works when max_deg=6 and min_deg=0""" num_feat = 4 - min_deg = 0 null_mol = ConvMol.get_null_mol(num_feat) deg_adj_lists = null_mol.get_deg_adjacency_lists() diff --git a/deepchem/feat/tests/test_mordred_descriptors.py b/deepchem/feat/tests/test_mordred_descriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..5cf2e4f613c24a9665eedc1de55786dbc760797d --- /dev/null +++ b/deepchem/feat/tests/test_mordred_descriptors.py @@ -0,0 +1,49 @@ +import numpy as np +import unittest + +from deepchem.feat import MordredDescriptors + + +class TestMordredDescriptors(unittest.TestCase): + """ + Test MordredDescriptors. + """ + + def setUp(self): + """ + Set up tests. + """ + from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + self.mol = Chem.MolFromSmiles(smiles) + + def test_mordred_descriptors(self): + """ + Test simple descriptors. + """ + featurizer = MordredDescriptors() + descriptors = featurizer([self.mol]) + assert descriptors.shape == (1, 1613) + assert np.allclose(descriptors[0][0:3], + np.array([9.54906713, 9.03919229, 1.0])) + + def test_mordred_descriptors_with_3D_info(self): + """ + Test simple descriptors with 3D info + """ + from rdkit import Chem + from rdkit.Chem import AllChem + featurizer = MordredDescriptors(ignore_3D=False) + descriptors = featurizer([self.mol]) + assert descriptors.shape == (1, 1826) + assert np.allclose(descriptors[0][780:784], np.array([0.0, 0.0, 0.0, 0.0])) + + # calculate coordinates + mol = self.mol + mol_with_conf = Chem.AddHs(mol) + AllChem.EmbedMolecule(mol_with_conf, AllChem.ETKDG()) + descriptors = featurizer([mol_with_conf]) + assert descriptors.shape == (1, 1826) + # not zero values + assert not np.allclose(descriptors[0][780:784], + np.array([0.0, 0.0, 0.0, 0.0])) diff --git a/deepchem/feat/tests/test_one_hot.py b/deepchem/feat/tests/test_one_hot.py deleted file mode 100644 index 9706fc48bd62ed834ebf0c2e6182ac74c00411cd..0000000000000000000000000000000000000000 --- a/deepchem/feat/tests/test_one_hot.py +++ /dev/null @@ -1,18 +0,0 @@ -from unittest import TestCase - -import deepchem as dc - - -class TestOneHotFeaturizer(TestCase): - """Tests for the one-hot featurizer.""" - - def test_featurize(self): - from rdkit import Chem - smiles = ["Cn1c(=O)c2c(ncn2C)n(C)c1=O", "CC(=O)N1CN(C(C)=O)C(O)C1O"] - mols = [Chem.MolFromSmiles(smile) for smile in smiles] - featurizer = dc.feat.one_hot.OneHotFeaturizer(dc.feat.one_hot.zinc_charset) - one_hots = featurizer.featurize(mols) - untransformed = featurizer.untransform(one_hots) - assert len(smiles) == len(untransformed) - for i in range(len(smiles)): - assert smiles[i] == untransformed[i][0] diff --git a/deepchem/feat/tests/test_one_hot_featurizer.py b/deepchem/feat/tests/test_one_hot_featurizer.py new file mode 100644 index 0000000000000000000000000000000000000000..d526a43cb308fbf6437ad3f2d89855cc791a7b27 --- /dev/null +++ b/deepchem/feat/tests/test_one_hot_featurizer.py @@ -0,0 +1,64 @@ +import unittest + +import numpy as np + +from deepchem.feat import OneHotFeaturizer +from deepchem.feat.molecule_featurizers.one_hot_featurizer import ZINC_CHARSET + + +class TestOneHotFeaturizert(unittest.TestCase): + """ + Test OneHotFeaturizer. + """ + + def test_onehot_featurizer(self): + """ + Test simple one hot encoding. + """ + from rdkit import Chem + length = len(ZINC_CHARSET) + 1 + smiles = 'CC(=O)Oc1ccccc1C(=O)O' + mol = Chem.MolFromSmiles(smiles) + featurizer = OneHotFeaturizer() + feature = featurizer([mol]) + assert feature.shape == (1, 100, length) + + # untranform + undo_smiles = featurizer.untransform(feature[0]) + assert smiles == undo_smiles + + def test_onehot_featurizer_with_max_length(self): + """ + Test one hot encoding with max_length. + """ + from rdkit import Chem + length = len(ZINC_CHARSET) + 1 + smiles = 'CC(=O)Oc1ccccc1C(=O)O' + mol = Chem.MolFromSmiles(smiles) + featurizer = OneHotFeaturizer(max_length=120) + feature = featurizer([mol]) + assert feature.shape == (1, 120, length) + + # untranform + undo_smiles = featurizer.untransform(feature[0]) + assert smiles == undo_smiles + + def test_correct_transformation(self): + """ + Test correct one hot encoding. + """ + from rdkit import Chem + charset = ['C', 'N', '=', ')', '(', 'O'] + smiles = 'CN=C=O' + mol = Chem.MolFromSmiles(smiles) + featurizer = OneHotFeaturizer(charset=charset, max_length=100) + feature = featurizer([mol]) + assert np.allclose(feature[0][0], np.array([1, 0, 0, 0, 0, 0, 0])) + assert np.allclose(feature[0][1], np.array([0, 1, 0, 0, 0, 0, 0])) + assert np.allclose(feature[0][2], np.array([0, 0, 1, 0, 0, 0, 0])) + assert np.allclose(feature[0][3], np.array([1, 0, 0, 0, 0, 0, 0])) + assert np.allclose(feature[0][4], np.array([0, 0, 1, 0, 0, 0, 0])) + assert np.allclose(feature[0][5], np.array([0, 0, 0, 0, 0, 1, 0])) + # untranform + undo_smiles = featurizer.untransform(feature[0]) + assert smiles == undo_smiles diff --git a/deepchem/feat/tests/test_puchem_fingerprint.py b/deepchem/feat/tests/test_puchem_fingerprint.py new file mode 100644 index 0000000000000000000000000000000000000000..10f3ed46fb4a01de1dd18be6ce3abd38c56921d1 --- /dev/null +++ b/deepchem/feat/tests/test_puchem_fingerprint.py @@ -0,0 +1,25 @@ +import unittest + +from deepchem.feat import PubChemFingerprint + + +class TestPubChemFingerprint(unittest.TestCase): + """ + Test PubChemFingerprint. + """ + + def setUp(self): + """ + Set up tests. + """ + from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + self.mol = Chem.MolFromSmiles(smiles) + + def test_pubchem_fingerprint(self): + """ + Test simple fingerprint. + """ + featurizer = PubChemFingerprint() + feature_sum = featurizer([self.mol]) + assert feature_sum.shape == (1, 881) diff --git a/deepchem/feat/tests/test_rdkit_descriptors.py b/deepchem/feat/tests/test_rdkit_descriptors.py new file mode 100644 index 0000000000000000000000000000000000000000..ad90eaefb72e4e1cd2950e5932b9e2a0eb1eae06 --- /dev/null +++ b/deepchem/feat/tests/test_rdkit_descriptors.py @@ -0,0 +1,61 @@ +""" +Test basic molecular features. +""" +import numpy as np +import unittest + +from deepchem.feat import RDKitDescriptors + + +class TestRDKitDescriptors(unittest.TestCase): + """ + Test RDKitDescriptors. + """ + + def setUp(self): + """ + Set up tests. + """ + from rdkit import Chem + smiles = 'CC(=O)OC1=CC=CC=C1C(=O)O' + self.mol = Chem.MolFromSmiles(smiles) + self.featurizer = RDKitDescriptors() + + def test_rdkit_descriptors(self): + """ + Test simple descriptors. + """ + featurizer = RDKitDescriptors() + descriptors = featurizer([self.mol]) + assert descriptors.shape == (1, len(featurizer.descriptors)) + assert np.allclose( + descriptors[0, featurizer.descriptors.index('ExactMolWt')], + 180, + atol=0.1) + + def test_rdkit_descriptors_on_smiles(self): + """ + Test invocation on raw smiles. + """ + featurizer = RDKitDescriptors() + descriptors = featurizer('CC(=O)OC1=CC=CC=C1C(=O)O') + assert descriptors.shape == (1, len(featurizer.descriptors)) + assert np.allclose( + descriptors[0, featurizer.descriptors.index('ExactMolWt')], + 180, + atol=0.1) + + def test_rdkit_descriptors_with_use_fragment(self): + """ + Test with use_fragment + """ + from rdkit.Chem import Descriptors + featurizer = RDKitDescriptors(use_fragment=False) + descriptors = featurizer(self.mol) + assert descriptors.shape == (1, len(featurizer.descriptors)) + all_descriptors = Descriptors.descList + assert len(featurizer.descriptors) < len(all_descriptors) + assert np.allclose( + descriptors[0, featurizer.descriptors.index('ExactMolWt')], + 180, + atol=0.1) diff --git a/deepchem/feat/tests/test_rdkit_grid_features.py b/deepchem/feat/tests/test_rdkit_grid_features.py index 5bd449e6dd52ecea1a79c4de68ba217094149f13..25ae86e9697895ae00b147e66781db3e13a7d127 100644 --- a/deepchem/feat/tests/test_rdkit_grid_features.py +++ b/deepchem/feat/tests/test_rdkit_grid_features.py @@ -7,8 +7,9 @@ import unittest import numpy as np import pytest +from deepchem.feat.complex_featurizers import rdkit_grid_featurizer as rgf + np.random.seed(123) -from deepchem.feat import rdkit_grid_featurizer as rgf def random_string(length, chars=None): @@ -26,9 +27,9 @@ class TestHelperFunctions(unittest.TestCase): def setUp(self): # TODO test more formats for ligand current_dir = os.path.dirname(os.path.realpath(__file__)) - self.protein_file = os.path.join(current_dir, + self.protein_file = os.path.join(current_dir, 'data', '3ws9_protein_fixer_rdkit.pdb') - self.ligand_file = os.path.join(current_dir, '3ws9_ligand.sdf') + self.ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') def test_load_molecule(self): # adding hydrogens and charges is tested in dc.utils @@ -194,13 +195,13 @@ class TestPiInteractions(unittest.TestCase): # load and sanitize two real molecules _, self.prot = rgf.load_molecule( - os.path.join(current_dir, '3ws9_protein_fixer_rdkit.pdb'), + os.path.join(current_dir, 'data', '3ws9_protein_fixer_rdkit.pdb'), add_hydrogens=False, calc_charges=False, sanitize=True) _, self.lig = rgf.load_molecule( - os.path.join(current_dir, '3ws9_ligand.sdf'), + os.path.join(current_dir, 'data', '3ws9_ligand.sdf'), add_hydrogens=False, calc_charges=False, sanitize=True) @@ -290,8 +291,8 @@ class TestPiInteractions(unittest.TestCase): def test_compute_cation_pi(self): # TODO find better example, currently dicts are empty - dicts1 = rgf.compute_cation_pi(self.prot, self.lig) - dicts2 = rgf.compute_cation_pi(self.lig, self.prot) + _ = rgf.compute_cation_pi(self.prot, self.lig) + _ = rgf.compute_cation_pi(self.lig, self.prot) def test_compute_binding_pocket_cation_pi(self): # TODO find better example, currently dicts are empty @@ -319,9 +320,9 @@ class TestFeaturizationFunctions(unittest.TestCase): def setUp(self): current_dir = os.path.dirname(os.path.realpath(__file__)) - self.protein_file = os.path.join(current_dir, + self.protein_file = os.path.join(current_dir, 'data', '3ws9_protein_fixer_rdkit.pdb') - self.ligand_file = os.path.join(current_dir, '3ws9_ligand.sdf') + self.ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') def test_compute_all_ecfp(self): _, mol = rgf.load_molecule(self.ligand_file) @@ -470,8 +471,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): # test if default parameters work featurizer = rgf.RdkitGridFeaturizer() self.assertIsInstance(featurizer, rgf.RdkitGridFeaturizer) - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) def test_example_featurizer(self): @@ -482,8 +483,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): ecfp_power=9, splif_power=9, flatten=True) - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) def test_force_flatten(self): @@ -491,8 +492,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): featurizer = rgf.RdkitGridFeaturizer( feature_types=['ecfp_hashed'], flatten=False) featurizer.flatten = True # False should be ignored with ecfp_hashed - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) self.assertEqual(feature_tensor.shape, (1, 2 * 2**featurizer.ecfp_power)) @@ -508,8 +509,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): splif_power=splif_power, flatten=False, sanitize=True) - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) voxel_total_len = ( 2**ecfp_power + @@ -524,8 +525,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): ecfp_power=ecfp_power, splif_power=splif_power, sanitize=True) - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) flat_total_len = ( 3 * 2**ecfp_power + @@ -544,8 +545,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): self.assertTrue('pi_stack' not in featurizer.feature_types) self.assertTrue('cation_pi' not in featurizer.feature_types) - feature_tensor, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensor, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertIsInstance(feature_tensor, np.ndarray) total_len = voxel_total_len + flat_total_len - 3 - 2**ecfp_power self.assertEqual(feature_tensor.shape, (1, total_len)) @@ -572,8 +573,8 @@ class TestRdkitGridFeaturizer(unittest.TestCase): feature_types=['voxel_combined'], flatten=False, sanitize=True) - feature_tensors, _ = featurizer.featurize_complexes([self.ligand_file], - [self.protein_file]) + feature_tensors, _ = featurizer.featurize([self.ligand_file], + [self.protein_file]) self.assertEqual(feature_tensors.shape, (1, 4, 16, 16, 16, 40)) def test_voxelize(self): diff --git a/deepchem/feat/tests/test_sdf_reader.py b/deepchem/feat/tests/test_sdf_reader.py deleted file mode 100644 index cd6b2dfbbfdb3ddcc95ff6888dd51ea7acb8792d..0000000000000000000000000000000000000000 --- a/deepchem/feat/tests/test_sdf_reader.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Tests for importing .sdf files -""" -__author__ = "Joseph Gomes" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import os -import unittest -import tempfile -import shutil -import deepchem as dc - - -class TestFeaturizedSamples(unittest.TestCase): - """ - Test Featurized Samples class. - """ - - def random_test_train_valid_test_split_from_sdf(self): - """Test of singletask CoulombMatrixEig regression on .sdf file.""" - splittype = "random" - input_transforms = [] - output_transforms = ["normalize"] - model_params = {} - tasks = ["atomization_energy"] - task_type = "regression" - task_types = {task: task_type for task in tasks} - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "data/water.sdf") - - featurizer = dc.feat.CoulombMatrixEig(6, remove_hydrogens=False) - loader = dc.data.SDFLoader(tasks=tasks, featurizer=featurizer) - - dataset = loader.featurize(input_file) - - # Splits featurized samples into train/test - splitter = dc.splits.RandomSplitter() - train_dataset, valid_dataset, test_dataset = \ - splitter.train_valid_test_split(dataset) - assert len(train_dataset) == 8 - assert len(valid_dataset) == 1 - assert len(test_dataset) == 1 diff --git a/deepchem/feat/tests/test_smiles_featurizers.py b/deepchem/feat/tests/test_smiles_featurizers.py index 3f12621ca6c73525f4a4b123a4dc7b6b7409d0bf..c7245bba2776e7ce60be41077ac8127c79c4e596 100644 --- a/deepchem/feat/tests/test_smiles_featurizers.py +++ b/deepchem/feat/tests/test_smiles_featurizers.py @@ -1,12 +1,13 @@ -from unittest import TestCase -import numpy as np -from deepchem.feat import SmilesToSeq, SmilesToImage -from deepchem.feat.smiles_featurizers import create_char_to_idx import os +import unittest + +import numpy as np + +from deepchem.feat import create_char_to_idx, SmilesToSeq, SmilesToImage -class TestSmilesFeaturizers(TestCase): - """Tests for SmilesToSeq and SmilesToImage featurizers.""" +class TestSmilesToSeq(unittest.TestCase): + """Tests for SmilesToSeq featurizers.""" def setUp(self): """Setup.""" @@ -20,21 +21,64 @@ class TestSmilesFeaturizers(TestCase): def test_smiles_to_seq_featurize(self): """Test SmilesToSeq featurization.""" - from rdkit import Chem smiles = ["Cn1c(=O)c2c(ncn2C)n(C)c1=O", "CC(=O)N1CN(C(C)=O)C(O)C1O"] - mols = [Chem.MolFromSmiles(smile) for smile in smiles] expected_seq_len = self.feat.max_len + 2 * self.feat.pad_len - features = self.feat.featurize(mols) + features = self.feat.featurize(smiles) assert features.shape[0] == len(smiles) assert features.shape[-1] == expected_seq_len def test_reconstruct_from_seq(self): """Test SMILES reconstruction from features.""" smiles = ["Cn1c(=O)c2c(ncn2C)n(C)c1=O"] - from rdkit import Chem - mols = [Chem.MolFromSmiles(smile) for smile in smiles] - features = self.feat.featurize(mols) - + features = self.feat.featurize(smiles) + # not support array style inputs reconstructed_smile = self.feat.smiles_from_seq(features[0]) assert smiles[0] == reconstructed_smile + + +class TestSmilesToImage(unittest.TestCase): + """Tests for SmilesToImage featurizers.""" + + def setUp(self): + """Setup.""" + self.smiles = ["Cn1c(=O)c2c(ncn2C)n(C)c1=O", "CC(=O)N1CN(C(C)=O)C(O)C1O"] + + def test_smiles_to_image(self): + """Test default SmilesToImage""" + featurizer = SmilesToImage() + features = featurizer.featurize(self.smiles) + assert features.shape == (2, 80, 80, 1) + + def test_smiles_to_image_with_res(self): + """Test SmilesToImage with res""" + featurizer = SmilesToImage() + base_features = featurizer.featurize(self.smiles) + featurizer = SmilesToImage(res=0.6) + features = featurizer.featurize(self.smiles) + assert features.shape == (2, 80, 80, 1) + assert not np.allclose(base_features, features) + + def test_smiles_to_image_with_image_size(self): + """Test SmilesToImage with image_size""" + featurizer = SmilesToImage(img_size=100) + features = featurizer.featurize(self.smiles) + assert features.shape == (2, 100, 100, 1) + + def test_smiles_to_image_with_max_len(self): + """Test SmilesToImage with max_len""" + smiles_length = [len(s) for s in self.smiles] + assert smiles_length == [26, 25] + featurizer = SmilesToImage(max_len=25) + features = featurizer.featurize(self.smiles) + assert features[0].shape == (0,) + assert features[1].shape == (80, 80, 1) + + def test_smiles_to_image_with_img_spec(self): + """Test SmilesToImage with img_spec""" + featurizer = SmilesToImage() + base_features = featurizer.featurize(self.smiles) + featurizer = SmilesToImage(img_spec='engd') + features = featurizer.featurize(self.smiles) + assert features.shape == (2, 80, 80, 4) + assert not np.allclose(base_features, features) diff --git a/deepchem/feat/tests/test_smiles_tokenizer.py b/deepchem/feat/tests/test_smiles_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..b9390985cd4c2375099fd6a7b4ee2afbb435bc52 --- /dev/null +++ b/deepchem/feat/tests/test_smiles_tokenizer.py @@ -0,0 +1,31 @@ +# Requirements - transformers, tokenizers +import os +from unittest import TestCase +from deepchem.feat.smiles_tokenizer import SmilesTokenizer +from transformers import RobertaForMaskedLM + + +class TestSmilesTokenizer(TestCase): + """Tests the SmilesTokenizer to load the USPTO vocab file and a ChemBERTa Masked LM model with pre-trained weights..""" + + def test_tokenize(self): + current_dir = os.path.dirname(os.path.realpath(__file__)) + vocab_path = os.path.join(current_dir, 'data', 'vocab.txt') + tokenized_smiles = [ + 12, 16, 16, 16, 17, 16, 16, 18, 16, 19, 16, 17, 22, 19, 18, 33, 17, 16, + 18, 23, 181, 17, 22, 19, 18, 17, 19, 16, 33, 20, 19, 55, 17, 16, 23, 18, + 17, 33, 17, 19, 18, 35, 20, 19, 18, 16, 20, 22, 16, 16, 22, 16, 21, 23, + 20, 23, 22, 16, 23, 22, 16, 21, 23, 18, 19, 16, 20, 22, 16, 16, 22, 16, + 16, 22, 16, 20, 13 + ] + + model = RobertaForMaskedLM.from_pretrained( + 'seyonec/SMILES_tokenized_PubChem_shard00_50k') + model.num_parameters() + + tokenizer = SmilesTokenizer( + vocab_path, max_len=model.config.max_position_embeddings) + + assert tokenized_smiles == tokenizer.encode( + "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=C1" + ) diff --git a/deepchem/feat/tests/test_splif_fingerprints.py b/deepchem/feat/tests/test_splif_fingerprints.py new file mode 100644 index 0000000000000000000000000000000000000000..9b04746940480e01f07305be8c1fad110a88c617 --- /dev/null +++ b/deepchem/feat/tests/test_splif_fingerprints.py @@ -0,0 +1,34 @@ +import unittest +import os +import deepchem as dc + + +class TestSplifFingerprints(unittest.TestCase): + """Test Splif Fingerprint and Voxelizer.""" + + def setUp(self): + # TODO test more formats for ligand + current_dir = os.path.dirname(os.path.realpath(__file__)) + self.protein_file = os.path.join(current_dir, 'data', + '3ws9_protein_fixer_rdkit.pdb') + self.ligand_file = os.path.join(current_dir, 'data', '3ws9_ligand.sdf') + self.complex_files = [(self.protein_file, self.ligand_file)] + + def test_splif_shape(self): + size = 8 + featurizer = dc.feat.SplifFingerprint(size=size) + features, failures = featurizer.featurize([self.ligand_file], + [self.protein_file]) + assert features.shape == (1, 3 * size) + + def test_splif_voxels_shape(self): + box_width = 48 + voxel_width = 2 + voxels_per_edge = int(box_width / voxel_width) + size = 8 + voxelizer = dc.feat.SplifVoxelizer( + box_width=box_width, voxel_width=voxel_width, size=size) + features, failures = voxelizer.featurize([self.ligand_file], + [self.protein_file]) + assert features.shape == (1, voxels_per_edge, voxels_per_edge, + voxels_per_edge, size * 3) diff --git a/deepchem/feat/tests/test_weave.py b/deepchem/feat/tests/test_weave.py new file mode 100644 index 0000000000000000000000000000000000000000..f13a3e577cf5f852ea9cbeec3ea04f4b3e576143 --- /dev/null +++ b/deepchem/feat/tests/test_weave.py @@ -0,0 +1,141 @@ +""" +Tests for weave featurizer. +""" +import numpy as np +import deepchem as dc +from deepchem.feat.graph_features import max_pair_distance_pairs + + +def test_max_pair_distance_pairs(): + """Test that max pair distance pairs are computed properly.""" + from rdkit import Chem + # Carbon + mol = Chem.MolFromSmiles('C') + # Test distance 1 + pair_edges = max_pair_distance_pairs(mol, 1) + assert pair_edges.shape == (2, 1) + assert np.all(pair_edges.flatten() == np.array([0, 0])) + # Test distance 2 + pair_edges = max_pair_distance_pairs(mol, 2) + assert pair_edges.shape == (2, 1) + assert np.all(pair_edges.flatten() == np.array([0, 0])) + + # Test alkane + mol = Chem.MolFromSmiles('CCC') + # Test distance 1 + pair_edges = max_pair_distance_pairs(mol, 1) + # 3 self connections and 2 bonds which are both counted twice because of + # symmetry for 7 total + assert pair_edges.shape == (2, 7) + # Test distance 2 + pair_edges = max_pair_distance_pairs(mol, 2) + # Everything is connected at this distance + assert pair_edges.shape == (2, 9) + + +def test_max_pair_distance_infinity(): + """Test that max pair distance pairs are computed properly with infinity distance.""" + from rdkit import Chem + # Test alkane + mol = Chem.MolFromSmiles('CCC') + # Test distance infinity + pair_edges = max_pair_distance_pairs(mol, None) + # Everything is connected at this distance + assert pair_edges.shape == (2, 9) + + # Test pentane + mol = Chem.MolFromSmiles('CCCCC') + # Test distance infinity + pair_edges = max_pair_distance_pairs(mol, None) + # Everything is connected at this distance + assert pair_edges.shape == (2, 25) + + +def test_weave_single_carbon(): + """Test that single carbon atom is featurized properly.""" + mols = ['C'] + featurizer = dc.feat.WeaveFeaturizer() + mol_list = featurizer.featurize(mols) + mol = mol_list[0] + + # Only one carbon + assert mol.get_num_atoms() == 1 + + # Test feature sizes + assert mol.get_num_features() == 75 + + # No bonds, so only 1 pair feature (for the self interaction) + assert mol.get_pair_features().shape == (1 * 1, 14) + + +def test_chiral_weave(): + """Test weave features on a molecule with chiral structure.""" + mols = [r"F\C=C\F"] + featurizer = dc.feat.WeaveFeaturizer(use_chirality=True) + mol_list = featurizer.featurize(mols) + mol = mol_list[0] + + # Only 4 atoms + assert mol.get_num_atoms() == 4 + + # Test feature sizes for chirality + assert mol.get_num_features() == 78 + + +def test_weave_alkane(): + """Test on simple alkane""" + mols = ['CCC'] + featurizer = dc.feat.WeaveFeaturizer() + mol_list = featurizer.featurize(mols) + mol = mol_list[0] + + # 3 carbonds in alkane + assert mol.get_num_atoms() == 3 + + # Test feature sizes + assert mol.get_num_features() == 75 + + # Should be a 3x3 interaction grid + assert mol.get_pair_features().shape == (3 * 3, 14) + + +def test_weave_alkane_max_pairs(): + """Test on simple alkane with max pairs distance cutoff""" + mols = ['CCC'] + featurizer = dc.feat.WeaveFeaturizer(max_pair_distance=1) + # mol_list = featurizer.featurize(mols) + # mol = mol_list[0] + from rdkit import Chem + mol = featurizer._featurize(Chem.MolFromSmiles(mols[0])) + + # 3 carbonds in alkane + assert mol.get_num_atoms() == 3 + + # Test feature sizes + assert mol.get_num_features() == 75 + + # Should be a 7x14 interaction grid since there are 7 pairs within graph + # distance 1 (3 self interactions plus 2 bonds counted twice because of + # symmetry) + assert mol.get_pair_features().shape == (7, 14) + + +def test_carbon_nitrogen(): + """Test on carbon nitrogen molecule""" + # Note there is a central nitrogen of degree 4, with 4 carbons + # of degree 1 (connected only to central nitrogen). + mols = ['C[N+](C)(C)C'] + # import rdkit.Chem + # mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.WeaveFeaturizer() + mols = featurizer.featurize(mols) + mol = mols[0] + + # 5 atoms in compound + assert mol.get_num_atoms() == 5 + + # Test feature sizes + assert mol.get_num_features() == 75 + + # Should be a 3x3 interaction grid + assert mol.get_pair_features().shape == (5 * 5, 14) diff --git a/deepchem/hyper/__init__.py b/deepchem/hyper/__init__.py index c383293143969054e338037585b3d3f6ceba224c..3d66948f2ad2537a268390dbc3819a2b2f61e6b0 100644 --- a/deepchem/hyper/__init__.py +++ b/deepchem/hyper/__init__.py @@ -1,2 +1,4 @@ -from deepchem.hyper.grid_search import HyperparamOpt +# flake8: noqa +from deepchem.hyper.base_classes import HyperparamOpt +from deepchem.hyper.grid_search import GridHyperparamOpt from deepchem.hyper.gaussian_process import GaussianProcessHyperparamOpt diff --git a/deepchem/hyper/base_classes.py b/deepchem/hyper/base_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..f446c1995317f3307c2ac3354925c3f6d52805a0 --- /dev/null +++ b/deepchem/hyper/base_classes.py @@ -0,0 +1,129 @@ +import logging +from typing import Any, Callable, Dict, List, Optional, Tuple + +from deepchem.data import Dataset +from deepchem.trans import Transformer +from deepchem.models import Model +from deepchem.metrics import Metric + +logger = logging.getLogger(__name__) + + +def _convert_hyperparam_dict_to_filename(hyper_params: Dict[str, Any]) -> str: + """Helper function that converts a dictionary of hyperparameters to a string that can be a filename. + + Parameters + ---------- + hyper_params: Dict + Maps string of hyperparameter name to int/float/string/list etc. + + Returns + ------- + filename: str + A filename of form "_key1_value1_value2_..._key2..." + """ + filename = "" + keys = sorted(hyper_params.keys()) + for key in keys: + filename += "_%s" % str(key) + value = hyper_params[key] + if isinstance(value, int): + filename += "_%s" % str(value) + elif isinstance(value, float): + filename += "_%f" % value + else: + filename += "%s" % str(value) + return filename + + +class HyperparamOpt(object): + """Abstract superclass for hyperparameter search classes. + + This class is an abstract base class for hyperparameter search + classes in DeepChem. Hyperparameter search is performed on + `dc.models.Model` classes. Each hyperparameter object accepts a + `dc.models.Model` class upon construct. When the `hyperparam_search` + class is invoked, this class is used to construct many different + concrete models which are trained on the specified training set and + evaluated on a given validation set. + + Different subclasses of `HyperparamOpt` differ in the choice of + strategy for searching the hyperparameter evaluation space. This + class itself is an abstract superclass and should never be directly + instantiated. + """ + + def __init__(self, model_builder: Callable[..., Model]): + """Initialize Hyperparameter Optimizer. + + Note this is an abstract constructor which should only be used by + subclasses. + + Parameters + ---------- + model_builder: constructor function. + This parameter must be constructor function which returns an + object which is an instance of `dc.models.Model`. This function + must accept two arguments, `model_params` of type `dict` and + `model_dir`, a string specifying a path to a model directory. + See the example. + """ + if self.__class__.__name__ == "HyperparamOpt": + raise ValueError( + "HyperparamOpt is an abstract superclass and cannot be directly instantiated. \ + You probably want to instantiate a concrete subclass instead.") + self.model_builder = model_builder + + def hyperparam_search(self, + params_dict: Dict, + train_dataset: Dataset, + valid_dataset: Dataset, + metric: Metric, + output_transformers: List[Transformer] = [], + use_max: bool = True, + logdir: Optional[str] = None, + **kwargs) -> Tuple[Model, Dict, Dict]: + """Conduct Hyperparameter search. + + This method defines the common API shared by all hyperparameter + optimization subclasses. Different classes will implement + different search methods but they must all follow this common API. + + Parameters + ---------- + params_dict: Dict + Dictionary mapping strings to values. Note that the + precise semantics of `params_dict` will change depending on the + optimizer that you're using. Depending on the type of + hyperparameter optimization, these values can be + ints/floats/strings/lists/etc. Read the documentation for the + concrete hyperparameter optimization subclass you're using to + learn more about what's expected. + train_dataset: Dataset + dataset used for training + valid_dataset: Dataset + dataset used for validation(optimization on valid scores) + metric: Metric + metric used for evaluation + output_transformers: list[Transformer] + Transformers for evaluation. This argument is needed since + `train_dataset` and `valid_dataset` may have been transformed + for learning and need the transform to be inverted before + the metric can be evaluated on a model. + use_max: bool, optional + If True, return the model with the highest score. Else return + model with the minimum score. + logdir: str, optional + The directory in which to store created models. If not set, will + use a temporary directory. + + Returns + ------- + Tuple[`best_model`, `best_hyperparams`, `all_scores`] + `(best_model, best_hyperparams, all_scores)` where `best_model` is + an instance of `dc.models.Model`, `best_hyperparams` is a + dictionary of parameters, and `all_scores` is a dictionary mapping + string representations of hyperparameter sets to validation + scores. + """ + raise NotImplementedError diff --git a/deepchem/hyper/gaussian_process.py b/deepchem/hyper/gaussian_process.py index 251869fb37e130d78b85fb73aa7c5f911dc26bab..307b944d2bdb9d89b03d2042ac68ff8b3e77a047 100644 --- a/deepchem/hyper/gaussian_process.py +++ b/deepchem/hyper/gaussian_process.py @@ -1,307 +1,338 @@ """ Contains class for gaussian process hyperparameter optimizations. """ +import os import logging -import numpy as np import tempfile -import os -from deepchem.hyper.grid_search import HyperparamOpt -from deepchem.utils.evaluate import Evaluator -from deepchem.molnet.run_benchmark_models import benchmark_classification, benchmark_regression +from typing import Dict, List, Optional, Tuple, Union + +from deepchem.data import Dataset +from deepchem.trans import Transformer +from deepchem.metrics import Metric +from deepchem.hyper.base_classes import HyperparamOpt +from deepchem.hyper.base_classes import _convert_hyperparam_dict_to_filename logger = logging.getLogger(__name__) +def compute_parameter_range(params_dict: Dict, + search_range: Union[int, float, Dict] + ) -> Dict[str, Tuple[str, List[float]]]: + """Convenience Function to compute parameter search space. + + Parameters + ---------- + params_dict: Dict + Dictionary mapping strings to Ints/Floats. An explicit list of + parameters is computed with `search_range`. The optimization range + computed is specified in the documentation for `search_range` + below. + search_range: int/float/Dict (default 4) + The `search_range` specifies the range of parameter values to + search for. If `search_range` is an int/float, it is used as the + global search range for parameters. This creates a search + problem on the following space: + + optimization on [initial value / search_range, + initial value * search_range] + + If `search_range` is a dict, it must contain the same keys as + for `params_dict`. In this case, `search_range` specifies a + per-parameter search range. This is useful in case some + parameters have a larger natural range than others. For a given + hyperparameter `hp` this would create the following search + range: + + optimization on hp on [initial value[hp] / search_range[hp], + initial value[hp] * search_range[hp]] + + Returns + ------- + param_range: Dict + Dictionary mapping hyperparameter names to tuples. Each tuple is + of form `(value_type, value_range)` where `value_type` is a string + that is either "int" or "cont" and `value_range` is a list of two + elements of the form `[low, hi]`. This format is expected by + pyGPGO which `GaussianProcessHyperparamOpt` uses to perform + optimization. + """ + # Range of optimization + param_range = {} + if isinstance(search_range, dict): + if sorted(params_dict.keys()) != sorted(search_range.keys()): + raise ValueError( + "If search_range is provided as a dictionary, it must have the same keys as params_dict." + ) + elif (not isinstance(search_range, int)) and (not isinstance( + search_range, float)): + raise ValueError("search_range must be a dict or int or float.") + for hp, value in params_dict.items(): + if isinstance(search_range, dict): + hp_search_range = search_range[hp] + else: + # We know from guard above that this is an int/float + hp_search_range = search_range + if isinstance(value, int): + value_range = [value // hp_search_range, value * hp_search_range] + param_range[hp] = ("int", value_range) + elif isinstance(value, float): + value_range = [value / hp_search_range, value * hp_search_range] + param_range[hp] = ("cont", value_range) + return param_range + + class GaussianProcessHyperparamOpt(HyperparamOpt): """ Gaussian Process Global Optimization(GPGO) + + This class uses Gaussian Process optimization to select + hyperparameters. Underneath the hood it uses pyGPGO to optimize + models. If you don't have pyGPGO installed, you won't be able to use + this class. + + Note that `params_dict` has a different semantics than for + `GridHyperparamOpt`. `param_dict[hp]` must be an int/float and is + used as the center of a search range. + + Examples + -------- + This example shows the type of constructor function expected. + + >>> import sklearn + >>> import deepchem as dc + >>> optimizer = dc.hyper.GaussianProcessHyperparamOpt(lambda **p: dc.models.GraphConvModel(n_tasks=1, **p)) + + Here's a more sophisticated example that shows how to optimize only + some parameters of a model. In this case, we have some parameters we + want to optimize, and others which we don't. To handle this type of + search, we create a `model_builder` which hard codes some arguments + (in this case, `n_tasks` and `n_features` which are properties of a + dataset and not hyperparameters to search over.) + + >>> def model_builder(**model_params): + ... n_layers = model_params['layers'] + ... layer_width = model_params['width'] + ... dropout = model_params['dropout'] + ... return dc.models.MultitaskClassifier( + ... n_tasks=5, + ... n_features=100, + ... layer_sizes=[layer_width]*n_layers, + ... dropouts=dropout + ... ) + >>> optimizer = dc.hyper.GaussianProcessHyperparamOpt(model_builder) + + Notes + ----- + This class requires pyGPGO to be installed. """ - def hyperparam_search( - self, - params_dict, - train_dataset, - valid_dataset, - output_transformers, - metric, - direction=True, - n_features=1024, - n_tasks=1, - max_iter=20, - search_range=4, - hp_invalid_list=[ - 'seed', 'nb_epoch', 'penalty_type', 'dropouts', 'bypass_dropouts', - 'n_pair_feat', 'fit_transformers', 'min_child_weight', - 'max_delta_step', 'subsample', 'colsample_bylevel', - 'colsample_bytree', 'reg_alpha', 'reg_lambda', 'scale_pos_weight', - 'base_score' - ], - log_file='GPhypersearch.log'): - """Perform hyperparams search using a gaussian process assumption - - params_dict include single-valued parameters being optimized, - which should only contain int, float and list of int(float) - - parameters with names in hp_invalid_list will not be changed. - - For Molnet models, self.model_class is model name in string, - params_dict = dc.molnet.preset_hyper_parameters.hps[self.model_class] + def hyperparam_search(self, + params_dict: Dict, + train_dataset: Dataset, + valid_dataset: Dataset, + metric: Metric, + output_transformers: List[Transformer] = [], + use_max: bool = True, + logdir: Optional[str] = None, + max_iter: int = 20, + search_range: Union[int, float, Dict] = 4, + logfile: Optional[str] = None, + **kwargs): + """Perform hyperparameter search using a gaussian process. Parameters ---------- - params_dict: dict - dict including parameters and their initial values - parameters not suitable for optimization can be added to hp_invalid_list - train_dataset: dc.data.Dataset struct + params_dict: Dict + Maps hyperparameter names (strings) to possible parameter + values. The semantics of this list are different than for + `GridHyperparamOpt`. `params_dict[hp]` must map to an int/float, + which is used as the center of a search with radius + `search_range` since pyGPGO can only optimize numerical + hyperparameters. + train_dataset: Dataset dataset used for training - valid_dataset: dc.data.Dataset struct + valid_dataset: Dataset dataset used for validation(optimization on valid scores) - output_transformers: list of dc.trans.Transformer - transformers for evaluation - metric: list of dc.metrics.Metric + metric: Metric metric used for evaluation - direction: bool + output_transformers: list[Transformer] + Transformers for evaluation. This argument is needed since + `train_dataset` and `valid_dataset` may have been transformed + for learning and need the transform to be inverted before + the metric can be evaluated on a model. + use_max: bool, (default True) + Specifies whether to maximize or minimize `metric`. maximization(True) or minimization(False) - n_features: int - number of input features - n_tasks: int - number of tasks - max_iter: int + logdir: str, optional, (default None) + The directory in which to store created models. If not set, will + use a temporary directory. + max_iter: int, (default 20) number of optimization trials - search_range: int(float) - optimization on [initial values / search_range, - initial values * search_range] - hp_invalid_list: list - names of parameters that should not be optimized - logfile: string - name of log file, hyperparameters and results for each trial will be recorded + search_range: int/float/Dict (default 4) + The `search_range` specifies the range of parameter values to + search for. If `search_range` is an int/float, it is used as the + global search range for parameters. This creates a search + problem on the following space: + + optimization on [initial value / search_range, + initial value * search_range] + + If `search_range` is a dict, it must contain the same keys as + for `params_dict`. In this case, `search_range` specifies a + per-parameter search range. This is useful in case some + parameters have a larger natural range than others. For a given + hyperparameter `hp` this would create the following search + range: + + optimization on hp on [initial value[hp] / search_range[hp], + initial value[hp] * search_range[hp]] + logfile: str, optional (default None) + Name of logfile to write results to. If specified, this is must + be a valid file. If not specified, results of hyperparameter + search will be written to `logdir/.txt`. + Returns ------- - hyper_parameters: dict - params_dict with all optimized values - valid_performance_opt: float - best performance on valid dataset - + Tuple[`best_model`, `best_hyperparams`, `all_scores`] + `(best_model, best_hyperparams, all_scores)` where `best_model` is + an instance of `dc.model.Model`, `best_hyperparams` is a + dictionary of parameters, and `all_scores` is a dictionary mapping + string representations of hyperparameter sets to validation + scores. """ + try: + from pyGPGO.covfunc import matern32 + from pyGPGO.acquisition import Acquisition + from pyGPGO.surrogates.GaussianProcess import GaussianProcess + from pyGPGO.GPGO import GPGO + except ModuleNotFoundError: + raise ImportError("This class requires pyGPGO to be installed.") + + # Specify logfile + log_file = None + if logfile: + log_file = logfile + elif logdir is not None: + # Make logdir if it doesn't exist. + if not os.path.exists(logdir): + os.makedirs(logdir, exist_ok=True) + log_file = os.path.join(logdir, "results.txt") + + # setup range + param_range = compute_parameter_range(params_dict, search_range) + param_keys = list(param_range.keys()) + + # Stores all results + all_results = {} + # Store all model references so we don't have to reload + all_models = {} + # Stores all model locations + model_locations = {} + + # Demarcating internal function for readability + def optimizing_function(**placeholders): + """Private Optimizing function - assert len(metric) == 1, 'Only use one metric' - hyper_parameters = params_dict - hp_list = list(hyper_parameters.keys()) - for hp in hp_invalid_list: - if hp in hp_list: - hp_list.remove(hp) - - hp_list_class = [hyper_parameters[hp].__class__ for hp in hp_list] - assert set(hp_list_class) <= set([list, int, float]) - # Float or int hyper parameters(ex. batch_size, learning_rate) - hp_list_single = [ - hp_list[i] for i in range(len(hp_list)) if not hp_list_class[i] is list - ] - # List of float or int hyper parameters(ex. layer_sizes) - hp_list_multiple = [(hp_list[i], len(hyper_parameters[hp_list[i]])) - for i in range(len(hp_list)) - if hp_list_class[i] is list] - - # Number of parameters - n_param = len(hp_list_single) - if len(hp_list_multiple) > 0: - n_param = n_param + sum([hp[1] for hp in hp_list_multiple]) - # Range of optimization - param_range = [] - for hp in hp_list_single: - if hyper_parameters[hp].__class__ is int: - param_range.append((('int'), [ - hyper_parameters[hp] // search_range, - hyper_parameters[hp] * search_range - ])) - else: - param_range.append((('cont'), [ - hyper_parameters[hp] / search_range, - hyper_parameters[hp] * search_range - ])) - for hp in hp_list_multiple: - if hyper_parameters[hp[0]][0].__class__ is int: - param_range.extend([(('int'), [ - hyper_parameters[hp[0]][i] // search_range, - hyper_parameters[hp[0]][i] * search_range - ]) for i in range(hp[1])]) - else: - param_range.extend([(('cont'), [ - hyper_parameters[hp[0]][i] / search_range, - hyper_parameters[hp[0]][i] * search_range - ]) for i in range(hp[1])]) - - # Dummy names - param_name = ['l' + format(i, '02d') for i in range(20)] - param = dict(zip(param_name[:n_param], param_range)) - - data_dir = os.environ['DEEPCHEM_DATA_DIR'] - log_file = os.path.join(data_dir, log_file) - - def f(l00=0, - l01=0, - l02=0, - l03=0, - l04=0, - l05=0, - l06=0, - l07=0, - l08=0, - l09=0, - l10=0, - l11=0, - l12=0, - l13=0, - l14=0, - l15=0, - l16=0, - l17=0, - l18=0, - l19=0): - """ Optimizing function Take in hyper parameter values and return valid set performances Parameters ---------- - l00~l19: int or float - placeholders for hyperparameters being optimized, - hyper_parameters dict is rebuilt based on input values of placeholders + placeholders: keyword arguments + Should be various hyperparameters as specified in `param_keys` above. Returns: -------- valid_scores: float valid set performances """ - args = locals() - # Input hyper parameters - i = 0 - for hp in hp_list_single: - hyper_parameters[hp] = float(args[param_name[i]]) - if param_range[i][0] == 'int': - hyper_parameters[hp] = int(hyper_parameters[hp]) - i = i + 1 - for hp in hp_list_multiple: - hyper_parameters[hp[0]] = [ - float(args[param_name[j]]) for j in range(i, i + hp[1]) - ] - if param_range[i][0] == 'int': - hyper_parameters[hp[0]] = map(int, hyper_parameters[hp[0]]) - i = i + hp[1] - - logger.info(hyper_parameters) - # Run benchmark - with open(log_file, 'a') as f: - # Record hyperparameters - f.write(str(hyper_parameters)) - f.write('\n') - if isinstance(self.model_class, str) or isinstance( - self.model_class, unicode): + hyper_parameters = {} + for hp in param_keys: + if param_range[hp][0] == "int": + # param values are always float in BO, so this line converts float to int + # see : https://github.com/josejimenezluna/pyGPGO/issues/10 + hyper_parameters[hp] = int(placeholders[hp]) + else: + hyper_parameters[hp] = float(placeholders[hp]) + logger.info("Running hyperparameter set: %s" % str(hyper_parameters)) + if log_file: + with open(log_file, 'w+') as f: + # Record hyperparameters + f.write("Parameters: %s" % str(hyper_parameters)) + f.write('\n') + + hp_str = _convert_hyperparam_dict_to_filename(hyper_parameters) + if logdir is not None: + filename = "model%s" % hp_str + model_dir = os.path.join(logdir, filename) + logger.info("model_dir is %s" % model_dir) try: - train_scores, valid_scores, _ = benchmark_classification( - train_dataset, - valid_dataset, - valid_dataset, ['task_placeholder'] * n_tasks, - output_transformers, - n_features, - metric, - self.model_class, - hyper_parameters=hyper_parameters) - except AssertionError: - train_scores, valid_scores, _ = benchmark_regression( - train_dataset, - valid_dataset, - valid_dataset, ['task_placeholder'] * n_tasks, - output_transformers, - n_features, - metric, - self.model_class, - hyper_parameters=hyper_parameters) - score = valid_scores[self.model_class][metric[0].name] + os.makedirs(model_dir) + except OSError: + if not os.path.isdir(model_dir): + logger.info("Error creating model_dir, using tempfile directory") + model_dir = tempfile.mkdtemp() else: model_dir = tempfile.mkdtemp() - model = self.model_class(hyper_parameters, model_dir) - model.fit(train_dataset, **hyper_parameters) + # Add it on to the information needed for the constructor + hyper_parameters["model_dir"] = model_dir + model = self.model_builder(**hyper_parameters) + model.fit(train_dataset) + try: model.save() - evaluator = Evaluator(model, valid_dataset, output_transformers) - multitask_scores = evaluator.compute_model_performance(metric) - score = multitask_scores[metric[0].name] + # Some models autosave + except NotImplementedError: + pass - with open(log_file, 'a') as f: - # Record performances - f.write(str(score)) - f.write('\n') - # GPGO maximize performance by default, set performance to its negative value for minimization - if direction: + multitask_scores = model.evaluate(valid_dataset, [metric], + output_transformers) + score = multitask_scores[metric.name] + + if log_file: + with open(log_file, 'a') as f: + # Record performances + f.write("Score: %s" % str(score)) + f.write('\n') + # Store all results + all_results[hp_str] = score + # Store reference to model + all_models[hp_str] = model + model_locations[hp_str] = model_dir + # GPGO maximize performance by default + # set performance to its negative value for minimization + if use_max: return score else: return -score - import pyGPGO - from pyGPGO.covfunc import matern32 - from pyGPGO.acquisition import Acquisition - from pyGPGO.surrogates.GaussianProcess import GaussianProcess - from pyGPGO.GPGO import GPGO + # execute GPGO cov = matern32() gp = GaussianProcess(cov) acq = Acquisition(mode='ExpectedImprovement') - gpgo = GPGO(gp, acq, f, param) + gpgo = GPGO(gp, acq, optimizing_function, param_range) logger.info("Max number of iteration: %i" % max_iter) gpgo.run(max_iter=max_iter) hp_opt, valid_performance_opt = gpgo.getResult() - # Readout best hyper parameters - i = 0 - for hp in hp_list_single: - hyper_parameters[hp] = float(hp_opt[param_name[i]]) - if param_range[i][0] == 'int': - hyper_parameters[hp] = int(hyper_parameters[hp]) - i = i + 1 - for hp in hp_list_multiple: - hyper_parameters[hp[0]] = [ - float(hp_opt[param_name[j]]) for j in range(i, i + hp[1]) - ] - if param_range[i][0] == 'int': - hyper_parameters[hp[0]] = map(int, hyper_parameters[hp[0]]) - i = i + hp[1] + hyper_parameters = {} + for hp in param_keys: + if param_range[hp][0] == "int": + hyper_parameters[hp] = int(hp_opt[hp]) + else: + # FIXME: Incompatible types in assignment + hyper_parameters[hp] = float(hp_opt[hp]) # type: ignore + hp_str = _convert_hyperparam_dict_to_filename(hyper_parameters) + + # Let's fetch the model with the best parameters + best_model = all_models[hp_str] # Compare best model to default hyperparameters - with open(log_file, 'a') as f: - # Record hyperparameters - f.write(str(params_dict)) - f.write('\n') - if isinstance(self.model_class, str) or isinstance(self.model_class, - unicode): - try: - train_scores, valid_scores, _ = benchmark_classification( - train_dataset, - valid_dataset, - valid_dataset, ['task_placeholder'] * n_tasks, - output_transformers, - n_features, - metric, - self.model_class, - hyper_parameters=params_dict) - except AssertionError: - train_scores, valid_scores, _ = benchmark_regression( - train_dataset, - valid_dataset, - valid_dataset, ['task_placeholder'] * n_tasks, - output_transformers, - n_features, - metric, - self.model_class, - hyper_parameters=params_dict) - score = valid_scores[self.model_class][metric[0].name] + if log_file: with open(log_file, 'a') as f: - # Record performances - f.write(str(score)) + # Record hyperparameters + f.write("params_dict:") + f.write(str(params_dict)) f.write('\n') - if not direction: - score = -score - if score > valid_performance_opt: - # Optimized model is better, return hyperparameters - return params_dict, score # Return default hyperparameters - return hyper_parameters, valid_performance_opt + return best_model, hyper_parameters, all_results diff --git a/deepchem/hyper/grid_search.py b/deepchem/hyper/grid_search.py index 59a0d1ffabc59462a549269e76f5ee5eeeecf986..851c2355b465d47599797ffd1d610c617e286eed 100644 --- a/deepchem/hyper/grid_search.py +++ b/deepchem/hyper/grid_search.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python2 -# -*- coding: utf-8 -*- """ Contains basic hyperparameter optimizations. """ @@ -9,42 +7,110 @@ import itertools import tempfile import shutil import collections +import logging from functools import reduce from operator import mul -from deepchem.utils.evaluate import Evaluator -from deepchem.utils.save import log +from typing import Dict, List, Optional +from deepchem.data import Dataset +from deepchem.trans import Transformer +from deepchem.metrics import Metric +from deepchem.hyper.base_classes import HyperparamOpt +from deepchem.hyper.base_classes import _convert_hyperparam_dict_to_filename -class HyperparamOpt(object): +logger = logging.getLogger(__name__) + + +class GridHyperparamOpt(HyperparamOpt): """ - Provides simple hyperparameter search capabilities. + Provides simple grid hyperparameter search capabilities. + + This class performs a grid hyperparameter search over the specified + hyperparameter space. This implementation is simple and simply does + a direct iteration over all possible hyperparameters and doesn't use + parallelization to speed up the search. + + Examples + -------- + This example shows the type of constructor function expected. + + >>> import sklearn + >>> import deepchem as dc + >>> optimizer = dc.hyper.GridHyperparamOpt(lambda **p: dc.models.GraphConvModel(**p)) + + Here's a more sophisticated example that shows how to optimize only + some parameters of a model. In this case, we have some parameters we + want to optimize, and others which we don't. To handle this type of + search, we create a `model_builder` which hard codes some arguments + (in this case, `n_tasks` and `n_features` which are properties of a + dataset and not hyperparameters to search over.) + + >>> def model_builder(**model_params): + ... n_layers = model_params['layers'] + ... layer_width = model_params['width'] + ... dropout = model_params['dropout'] + ... return dc.models.MultitaskClassifier( + ... n_tasks=5, + ... n_features=100, + ... layer_sizes=[layer_width]*n_layers, + ... dropouts=dropout + ... ) + >>> optimizer = dc.hyper.GridHyperparamOpt(model_builder) + """ - def __init__(self, model_class, verbose=True): - self.model_class = model_class - self.verbose = verbose - - # TODO(rbharath): This function is complicated and monolithic. Is there a nice - # way to refactor this? - def hyperparam_search(self, - params_dict, - train_dataset, - valid_dataset, - output_transformers, - metric, - use_max=True, - logdir=None): + def hyperparam_search( + self, + params_dict: Dict, + train_dataset: Dataset, + valid_dataset: Dataset, + metric: Metric, + output_transformers: List[Transformer] = [], + use_max: bool = True, + logdir: Optional[str] = None, + **kwargs, + ): """Perform hyperparams search according to params_dict. - Each key to hyperparams_dict is a model_param. The values should be a list - of potential values for that hyperparam. + Each key to hyperparams_dict is a model_param. The values should + be a list of potential values for that hyperparam. - TODO(rbharath): This shouldn't be stored in a temporary directory. + Parameters + ---------- + params_dict: Dict + Maps hyperparameter names (strings) to lists of possible + parameter values. + train_dataset: Dataset + dataset used for training + valid_dataset: Dataset + dataset used for validation(optimization on valid scores) + metric: Metric + metric used for evaluation + output_transformers: list[Transformer] + Transformers for evaluation. This argument is needed since + `train_dataset` and `valid_dataset` may have been transformed + for learning and need the transform to be inverted before + the metric can be evaluated on a model. + use_max: bool, optional + If True, return the model with the highest score. Else return + model with the minimum score. + logdir: str, optional + The directory in which to store created models. If not set, will + use a temporary directory. + + Returns + ------- + Tuple[`best_model`, `best_hyperparams`, `all_scores`] + `(best_model, best_hyperparams, all_scores)` where `best_model` is + an instance of `dc.model.Model`, `best_hyperparams` is a + dictionary of parameters, and `all_scores` is a dictionary mapping + string representations of hyperparameter sets to validation + scores. """ hyperparams = params_dict.keys() hyperparam_vals = params_dict.values() for hyperparam_list in params_dict.values(): - assert isinstance(hyperparam_list, collections.Iterable) + assert isinstance(hyperparam_list, collections.abc.Iterable) number_combinations = reduce(mul, [len(vals) for vals in hyperparam_vals]) @@ -58,31 +124,38 @@ class HyperparamOpt(object): for ind, hyperparameter_tuple in enumerate( itertools.product(*hyperparam_vals)): model_params = {} - log("Fitting model %d/%d" % (ind + 1, number_combinations), self.verbose) + logger.info("Fitting model %d/%d" % (ind + 1, number_combinations)) + # Construction dictionary mapping hyperparameter names to values + hyper_params = dict(zip(hyperparams, hyperparameter_tuple)) for hyperparam, hyperparam_val in zip(hyperparams, hyperparameter_tuple): model_params[hyperparam] = hyperparam_val - log("hyperparameters: %s" % str(model_params), self.verbose) + logger.info("hyperparameters: %s" % str(model_params)) if logdir is not None: model_dir = os.path.join(logdir, str(ind)) - log("model_dir is %s" % model_dir, self.verbose) + logger.info("model_dir is %s" % model_dir) try: os.makedirs(model_dir) except OSError: if not os.path.isdir(model_dir): - log("Error creating model_dir, using tempfile directory", - self.verbose) + logger.info("Error creating model_dir, using tempfile directory") model_dir = tempfile.mkdtemp() else: model_dir = tempfile.mkdtemp() - - model = self.model_class(model_params, model_dir) + model_params['model_dir'] = model_dir + model = self.model_builder(**model_params) model.fit(train_dataset) + try: + model.save() + # Some models autosave + except NotImplementedError: + pass - evaluator = Evaluator(model, valid_dataset, output_transformers) - multitask_scores = evaluator.compute_model_performance([metric]) + multitask_scores = model.evaluate(valid_dataset, [metric], + output_transformers) valid_score = multitask_scores[metric.name] - all_scores[str(hyperparameter_tuple)] = valid_score + hp_str = _convert_hyperparam_dict_to_filename(hyper_params) + all_scores[hp_str] = valid_score if (use_max and valid_score >= best_validation_score) or ( not use_max and valid_score <= best_validation_score): @@ -95,21 +168,18 @@ class HyperparamOpt(object): else: shutil.rmtree(model_dir) - log( - "Model %d/%d, Metric %s, Validation set %s: %f" % - (ind + 1, number_combinations, metric.name, ind, valid_score), - self.verbose) - log("\tbest_validation_score so far: %f" % best_validation_score, - self.verbose) + logger.info("Model %d/%d, Metric %s, Validation set %s: %f" % + (ind + 1, number_combinations, metric.name, ind, valid_score)) + logger.info("\tbest_validation_score so far: %f" % best_validation_score) if best_model is None: - log("No models trained correctly.", self.verbose) + logger.info("No models trained correctly.") # arbitrarily return last model best_model, best_hyperparams = model, hyperparameter_tuple return best_model, best_hyperparams, all_scores - train_evaluator = Evaluator(best_model, train_dataset, output_transformers) - multitask_scores = train_evaluator.compute_model_performance([metric]) + multitask_scores = best_model.evaluate(train_dataset, [metric], + output_transformers) train_score = multitask_scores[metric.name] - log("Best hyperparameters: %s" % str(best_hyperparams), self.verbose) - log("train_score: %f" % train_score, self.verbose) - log("validation_score: %f" % best_validation_score, self.verbose) + logger.info("Best hyperparameters: %s" % str(best_hyperparams)) + logger.info("train_score: %f" % train_score) + logger.info("validation_score: %f" % best_validation_score) return best_model, best_hyperparams, all_scores diff --git a/deepchem/hyper/tests/test_gaussian_hyperparam_opt.py b/deepchem/hyper/tests/test_gaussian_hyperparam_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..d147004aaeb735a6ce8ac6ad00dc5d202b6098da --- /dev/null +++ b/deepchem/hyper/tests/test_gaussian_hyperparam_opt.py @@ -0,0 +1,173 @@ +""" +Tests for Gaussian Process Hyperparameter Optimization. + +These tests fails every so often. I think it's when the Gaussian +process optimizer doesn't find an optimal point. This is still a +valuable test suite so leaving it in despite the flakiness. +""" +import numpy as np +import sklearn +import deepchem as dc +import unittest +import tempfile +from flaky import flaky + + +class TestGaussianHyperparamOpt(unittest.TestCase): + """ + Test Gaussian Hyperparameter Optimization. + """ + + def setUp(self): + """Set up common resources.""" + + def rf_model_builder(**model_params): + rf_params = {k: v for (k, v) in model_params.items() if k != 'model_dir'} + model_dir = model_params['model_dir'] + sklearn_model = sklearn.ensemble.RandomForestRegressor(**rf_params) + return dc.models.SklearnModel(sklearn_model, model_dir) + + self.rf_model_builder = rf_model_builder + self.train_dataset = dc.data.NumpyDataset( + X=np.random.rand(50, 5), y=np.random.rand(50, 1)) + self.valid_dataset = dc.data.NumpyDataset( + X=np.random.rand(20, 5), y=np.random.rand(20, 1)) + + def test_rf_example(self): + """Test a simple example of optimizing a RF model with a gaussian process.""" + + optimizer = dc.hyper.GaussianProcessHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": 10} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, self.train_dataset, self.valid_dataset, metric, max_iter=2) + + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + assert valid_score["pearson_r2_score"] == max(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + def test_rf_example_min(self): + """Test a simple example of optimizing a RF model with a gaussian process looking for minimum score.""" + + optimizer = dc.hyper.GaussianProcessHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": 10} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + self.train_dataset, + self.valid_dataset, + metric, + transformers, + use_max=False, + max_iter=2) + + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + assert valid_score["pearson_r2_score"] == min(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + def test_rf_with_logdir(self): + """Test that using a logdir can work correctly.""" + optimizer = dc.hyper.GaussianProcessHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": 10} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + with tempfile.TemporaryDirectory() as tmpdirname: + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + self.train_dataset, + self.valid_dataset, + metric, + transformers, + logdir=tmpdirname, + max_iter=2) + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + assert valid_score["pearson_r2_score"] == max(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + @flaky + def test_multitask_example(self): + """Test a simple example of optimizing a multitask model with a gaussian process search.""" + # Generate dummy dataset + np.random.seed(123) + train_dataset = dc.data.NumpyDataset( + np.random.rand(10, 3), np.zeros((10, 2)), np.ones((10, 2)), + np.arange(10)) + valid_dataset = dc.data.NumpyDataset( + np.random.rand(5, 3), np.zeros((5, 2)), np.ones((5, 2)), np.arange(5)) + transformers = [] + + optimizer = dc.hyper.GaussianProcessHyperparamOpt( + lambda **params: dc.models.MultitaskRegressor(n_tasks=2, + n_features=3, dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + learning_rate=0.003, **params)) + + params_dict = {"batch_size": 10} + metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + train_dataset, + valid_dataset, + metric, + transformers, + max_iter=1, + use_max=False) + + valid_score = best_model.evaluate(valid_dataset, [metric], transformers) + assert valid_score["mean-mean_squared_error"] == min(all_results.values()) + assert valid_score["mean-mean_squared_error"] > 0 + + @flaky + def test_multitask_example_different_search_range(self): + """Test a simple example of optimizing a multitask model with a gaussian process search with per-parameter search range.""" + # Generate dummy dataset + np.random.seed(123) + train_dataset = dc.data.NumpyDataset( + np.random.rand(10, 3), np.zeros((10, 2)), np.ones((10, 2)), + np.arange(10)) + valid_dataset = dc.data.NumpyDataset( + np.random.rand(5, 3), np.zeros((5, 2)), np.ones((5, 2)), np.arange(5)) + transformers = [] + + optimizer = dc.hyper.GaussianProcessHyperparamOpt( + lambda **params: dc.models.MultitaskRegressor( + n_tasks=2, + n_features=3, + dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + **params)) + + params_dict = {"learning_rate": 0.003, "batch_size": 10} + # These are per-example multiplier + search_range = {"learning_rate": 10, "batch_size": 4} + metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean) + + with tempfile.TemporaryDirectory() as tmpdirname: + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + train_dataset, + valid_dataset, + metric, + transformers, + max_iter=2, + logdir=tmpdirname, + search_range=search_range, + use_max=False) + valid_score = best_model.evaluate(valid_dataset, [metric], transformers) + # Test that 2 parameters were optimized + for hp_str in all_results.keys(): + # Recall that the key is a string of the form _batch_size_39_learning_rate_0.01 for example + assert "batch_size" in hp_str + assert "learning_rate" in hp_str + assert valid_score["mean-mean_squared_error"] == min(all_results.values()) + assert valid_score["mean-mean_squared_error"] > 0 diff --git a/deepchem/hyper/tests/test_grid_hyperparam_opt.py b/deepchem/hyper/tests/test_grid_hyperparam_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..37422489f6216311181a7efa3d4ad4d9cb645ca1 --- /dev/null +++ b/deepchem/hyper/tests/test_grid_hyperparam_opt.py @@ -0,0 +1,160 @@ +""" +Tests for hyperparam optimization. +""" +import unittest +import tempfile +import numpy as np +import deepchem as dc +import sklearn + + +class TestGridHyperparamOpt(unittest.TestCase): + """ + Test grid hyperparameter optimization API. + """ + + def setUp(self): + """Set up common resources.""" + + def rf_model_builder(**model_params): + rf_params = {k: v for (k, v) in model_params.items() if k != 'model_dir'} + model_dir = model_params['model_dir'] + sklearn_model = sklearn.ensemble.RandomForestRegressor(**rf_params) + return dc.models.SklearnModel(sklearn_model, model_dir) + + self.rf_model_builder = rf_model_builder + self.train_dataset = dc.data.NumpyDataset( + X=np.random.rand(50, 5), y=np.random.rand(50, 1)) + self.valid_dataset = dc.data.NumpyDataset( + X=np.random.rand(20, 5), y=np.random.rand(20, 1)) + + def test_rf_hyperparam(self): + """Test of hyperparam_opt with singletask RF ECFP regression API.""" + optimizer = dc.hyper.GridHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": [10, 100]} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, self.train_dataset, self.valid_dataset, metric, + transformers) + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + + assert valid_score["pearson_r2_score"] == max(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + def test_rf_hyperparam_min(self): + """Test of hyperparam_opt with singletask RF ECFP regression API.""" + optimizer = dc.hyper.GridHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": [10, 100]} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + self.train_dataset, + self.valid_dataset, + metric, + transformers, + use_max=False) + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + + assert valid_score["pearson_r2_score"] == min(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + def test_rf_with_logdir(self): + """Test that using a logdir can work correctly.""" + optimizer = dc.hyper.GridHyperparamOpt(self.rf_model_builder) + params_dict = {"n_estimators": [10, 5]} + transformers = [] + metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + with tempfile.TemporaryDirectory() as tmpdirname: + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + self.train_dataset, + self.valid_dataset, + metric, + transformers, + logdir=tmpdirname) + valid_score = best_model.evaluate(self.valid_dataset, [metric], + transformers) + assert valid_score["pearson_r2_score"] == max(all_results.values()) + assert valid_score["pearson_r2_score"] > 0 + + def test_multitask_example(self): + """Test a simple example of optimizing a multitask model with a grid search.""" + # Generate dummy dataset + np.random.seed(123) + train_dataset = dc.data.NumpyDataset( + np.random.rand(10, 3), np.zeros((10, 2)), np.ones((10, 2)), + np.arange(10)) + valid_dataset = dc.data.NumpyDataset( + np.random.rand(5, 3), np.zeros((5, 2)), np.ones((5, 2)), np.arange(5)) + + optimizer = dc.hyper.GridHyperparamOpt( + lambda **params: dc.models.MultitaskRegressor(n_tasks=2, + n_features=3, dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + learning_rate=0.003, **params)) + + params_dict = {"batch_size": [10, 20]} + transformers = [] + metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean) + + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + train_dataset, + valid_dataset, + metric, + transformers, + use_max=False) + + valid_score = best_model.evaluate(valid_dataset, [metric]) + assert valid_score["mean-mean_squared_error"] == min(all_results.values()) + assert valid_score["mean-mean_squared_error"] > 0 + + def test_multitask_example_multiple_params(self): + """Test a simple example of optimizing a multitask model with a grid search with multiple parameters to optimize.""" + # Generate dummy dataset + np.random.seed(123) + train_dataset = dc.data.NumpyDataset( + np.random.rand(10, 3), np.zeros((10, 2)), np.ones((10, 2)), + np.arange(10)) + valid_dataset = dc.data.NumpyDataset( + np.random.rand(5, 3), np.zeros((5, 2)), np.ones((5, 2)), np.arange(5)) + + optimizer = dc.hyper.GridHyperparamOpt( + lambda **params: dc.models.MultitaskRegressor( + n_tasks=2, + n_features=3, + dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + **params)) + + params_dict = {"learning_rate": [0.003, 0.03], "batch_size": [10, 50]} + # These are per-example multiplier + transformers = [] + metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean) + + with tempfile.TemporaryDirectory() as tmpdirname: + best_model, best_hyperparams, all_results = optimizer.hyperparam_search( + params_dict, + train_dataset, + valid_dataset, + metric, + transformers, + logdir=tmpdirname, + use_max=False) + valid_score = best_model.evaluate(valid_dataset, [metric]) + # Test that 2 parameters were optimized + for hp_str in all_results.keys(): + # Recall that the key is a string of the form _batch_size_39_learning_rate_0.01 for example + assert "batch_size" in hp_str + assert "learning_rate" in hp_str + + assert valid_score["mean-mean_squared_error"] == min(all_results.values()) + assert valid_score["mean-mean_squared_error"] > 0 diff --git a/deepchem/hyper/tests/test_hyperparam_opt.py b/deepchem/hyper/tests/test_hyperparam_opt.py index 41b4e2ac76f1f2ca696278b4a66b5ef5c2d1a276..fe356e69c92891da0d4972f495a3af6128c0e6df 100644 --- a/deepchem/hyper/tests/test_hyperparam_opt.py +++ b/deepchem/hyper/tests/test_hyperparam_opt.py @@ -1,154 +1,27 @@ """ -Integration tests for hyperparam optimization. +Tests for hyperparam optimization. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" -import os import unittest -import tempfile -import shutil -import numpy as np -import tensorflow as tf +import sklearn import deepchem as dc -from sklearn.ensemble import RandomForestClassifier -from sklearn.ensemble import RandomForestRegressor class TestHyperparamOpt(unittest.TestCase): """ - Test hyperparameter optimization API. + Test abstract superclass behavior. """ - def test_singletask_sklearn_rf_ECFP_regression_hyperparam_opt(self): - """Test of hyperparam_opt with singletask RF ECFP regression API.""" - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "../../models/tests/example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.ScaffoldSplitter() - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset) - - transformers = [ - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - for dataset in [train_dataset, test_dataset]: - for transformer in transformers: - dataset = transformer.transform(dataset) - - params_dict = {"n_estimators": [10, 100]} - metric = dc.metrics.Metric(dc.metrics.r2_score) + def test_cant_be_initialized(self): + """Test HyperparamOpt can't be initialized.""" + initialized = True def rf_model_builder(model_params, model_dir): - sklearn_model = RandomForestRegressor(**model_params) - return dc.models.SklearnModel(sklearn_model, model_dir) - - optimizer = dc.hyper.HyperparamOpt(rf_model_builder) - best_model, best_hyperparams, all_results = optimizer.hyperparam_search( - params_dict, - train_dataset, - valid_dataset, - transformers, - metric, - logdir=None) - - def test_singletask_to_multitask_sklearn_hyperparam_opt(self): - """Test of hyperparam_opt with singletask_to_multitask.""" - tasks = [ - "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", - "task8", "task9", "task10", "task11", "task12", "task13", "task14", - "task15", "task16" - ] - input_file = "multitask_example.csv" - - n_features = 10 - n_tasks = len(tasks) - # Define train dataset - n_train = 100 - X_train = np.random.rand(n_train, n_features) - y_train = np.random.randint(2, size=(n_train, n_tasks)) - w_train = np.ones_like(y_train) - ids_train = ["C"] * n_train - - train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train, w_train, - ids_train, tasks) - - # Define validation dataset - n_valid = 10 - X_valid = np.random.rand(n_valid, n_features) - y_valid = np.random.randint(2, size=(n_valid, n_tasks)) - w_valid = np.ones_like(y_valid) - ids_valid = ["C"] * n_valid - valid_dataset = dc.data.DiskDataset.from_numpy(X_valid, y_valid, w_valid, - ids_valid, tasks) - - transformers = [] - classification_metric = dc.metrics.Metric( - dc.metrics.matthews_corrcoef, np.mean, mode="classification") - params_dict = {"n_estimators": [1, 10]} - - def multitask_model_builder(model_params, model_dir): - - def model_builder(model_dir): - sklearn_model = RandomForestClassifier(**model_params) - return dc.models.SklearnModel(sklearn_model, model_dir) - - return dc.models.SingletaskToMultitask(tasks, model_builder, model_dir) - - optimizer = dc.hyper.HyperparamOpt(multitask_model_builder) - best_model, best_hyperparams, all_results = optimizer.hyperparam_search( - params_dict, - train_dataset, - valid_dataset, - transformers, - classification_metric, - logdir=None) - - def test_multitask_tf_mlp_ECFP_classification_hyperparam_opt(self): - """Straightforward test of Tensorflow multitask deepchem classification API.""" - task_type = "classification" - - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, - "../../models/tests/multitask_example.csv") - tasks = [ - "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", - "task8", "task9", "task10", "task11", "task12", "task13", "task14", - "task15", "task16" - ] - - n_features = 1024 - featurizer = dc.feat.CircularFingerprint(size=n_features) - - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.ScaffoldSplitter() - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset) - - transformers = [] - metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, np.mean, mode="classification") - params_dict = {"layer_sizes": [(10,), (100,)]} - - def model_builder(model_params, model_dir): - return dc.models.MultitaskClassifier( - len(tasks), n_features, model_dir=model_dir, **model_params) - - optimizer = dc.hyper.HyperparamOpt(model_builder) - best_model, best_hyperparams, all_results = optimizer.hyperparam_search( - params_dict, - train_dataset, - valid_dataset, - transformers, - metric, - logdir=None) + sklearn_model = sklearn.ensemble.RandomForestRegressor(**model_params) + return dc.model.SklearnModel(sklearn_model, model_dir) + + try: + _ = dc.hyper.HyperparamOpt(rf_model_builder) + except ValueError: + initialized = False + assert not initialized diff --git a/deepchem/metalearning/__init__.py b/deepchem/metalearning/__init__.py index 751b3cb5f4fcd540a3a537908f5d1ea58825ae3e..ef1b7473efb0d5ff0c309c96d8901f93a7cc16d7 100644 --- a/deepchem/metalearning/__init__.py +++ b/deepchem/metalearning/__init__.py @@ -1 +1,3 @@ +# flake8: noqa + from deepchem.metalearning.maml import MAML, MetaLearner diff --git a/deepchem/metalearning/maml.py b/deepchem/metalearning/maml.py index 587cd4653d59540eb5d272ed8cae040990f5ea90..c1f41af97354efa95cbcff5fbe3d61ee931a890a 100644 --- a/deepchem/metalearning/maml.py +++ b/deepchem/metalearning/maml.py @@ -1,13 +1,14 @@ """Model-Agnostic Meta-Learning (MAML) algorithm for low data learning.""" -from deepchem.models.optimizers import Adam, GradientDescent -import numpy as np import os import shutil import tempfile -import tensorflow as tf import time +import tensorflow as tf + +from deepchem.models.optimizers import Adam, GradientDescent + class MetaLearner(object): """Model and data to which the MAML algorithm can be applied. @@ -37,12 +38,12 @@ class MetaLearner(object): (loss, outputs) where loss is the value of the model's loss function, and outputs is a list of the model's outputs """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") @property def variables(self): """Get the list of Tensorflow variables to train.""" - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") def select_task(self): """Select a new task to train on. @@ -51,7 +52,7 @@ class MetaLearner(object): If there are infinitely many training tasks, this can simply select a new one each time it is called. """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") def get_batch(self): """Get a batch of data for training. @@ -60,7 +61,7 @@ class MetaLearner(object): inputs. This will usually be called twice for each task, and should return a different batch on each call. """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") class MAML(object): @@ -131,9 +132,9 @@ class MAML(object): # Create the optimizers for meta-optimization and task optimization. self._global_step = tf.Variable(0, trainable=False) - self._tf_optimizer = optimizer._create_optimizer(self._global_step) + self._tf_optimizer = optimizer._create_tf_optimizer(self._global_step) task_optimizer = GradientDescent(learning_rate=self.learning_rate) - self._tf_task_optimizer = task_optimizer._create_optimizer( + self._tf_task_optimizer = task_optimizer._create_tf_optimizer( self._global_step) # Create a Checkpoint for saving. diff --git a/deepchem/metalearning/tests/test_maml.py b/deepchem/metalearning/tests/test_maml.py index 11c903b428c163d36c3ed4ddeeda36341dd48270..24c30d06689c629ee6133e6407ab4694da73eccd 100644 --- a/deepchem/metalearning/tests/test_maml.py +++ b/deepchem/metalearning/tests/test_maml.py @@ -1,9 +1,10 @@ -from flaky import flaky +import unittest -import deepchem as dc import numpy as np import tensorflow as tf -import unittest +from flaky import flaky + +import deepchem as dc class TestMAML(unittest.TestCase): diff --git a/deepchem/metalearning/tests/test_maml_reload.py b/deepchem/metalearning/tests/test_maml_reload.py new file mode 100644 index 0000000000000000000000000000000000000000..799b9320f70600e281f58d3b3c25d6397e612f0a --- /dev/null +++ b/deepchem/metalearning/tests/test_maml_reload.py @@ -0,0 +1,61 @@ +"""Test that MAML models can be reloaded.""" + +import deepchem as dc +import numpy as np +import tensorflow as tf + + +class SineLearner(dc.metalearning.MetaLearner): + + def __init__(self): + self.batch_size = 10 + self.w1 = tf.Variable(np.random.normal(size=[1, 40], scale=1.0)) + self.w2 = tf.Variable( + np.random.normal(size=[40, 40], scale=np.sqrt(1 / 40))) + self.w3 = tf.Variable(np.random.normal(size=[40, 1], scale=np.sqrt(1 / 40))) + self.b1 = tf.Variable(np.zeros(40)) + self.b2 = tf.Variable(np.zeros(40)) + self.b3 = tf.Variable(np.zeros(1)) + + def compute_model(self, inputs, variables, training): + x, y = inputs + w1, w2, w3, b1, b2, b3 = variables + dense1 = tf.nn.relu(tf.matmul(x, w1) + b1) + dense2 = tf.nn.relu(tf.matmul(dense1, w2) + b2) + output = tf.matmul(dense2, w3) + b3 + loss = tf.reduce_mean(tf.square(output - y)) + return loss, [output] + + @property + def variables(self): + return [self.w1, self.w2, self.w3, self.b1, self.b2, self.b3] + + def select_task(self): + self.amplitude = 5.0 * np.random.random() + self.phase = np.pi * np.random.random() + + def get_batch(self): + x = np.random.uniform(-5.0, 5.0, (self.batch_size, 1)) + return [x, self.amplitude * np.sin(x + self.phase)] + + +def test_reload(): + """Test that a Metalearner can be reloaded.""" + learner = SineLearner() + optimizer = dc.models.optimizers.Adam(learning_rate=5e-3) + maml = dc.metalearning.MAML(learner, meta_batch_size=4, optimizer=optimizer) + maml.fit(900) + + learner.select_task() + batch = learner.get_batch() + loss, outputs = maml.predict_on_batch(batch) + + reloaded = dc.metalearning.MAML(SineLearner(), model_dir=maml.model_dir) + reloaded.restore() + reloaded_loss, reloaded_outputs = maml.predict_on_batch(batch) + + assert loss == reloaded_loss + + assert len(outputs) == len(reloaded_outputs) + for output, reloaded_output in zip(outputs, reloaded_outputs): + assert np.all(output == reloaded_output) diff --git a/deepchem/metrics/__init__.py b/deepchem/metrics/__init__.py index d5af28822ec85f09db2b737d1c14d3200784046f..0be35fb3a60ab2ace5e627bf7787690685a9cb82 100644 --- a/deepchem/metrics/__init__.py +++ b/deepchem/metrics/__init__.py @@ -1,445 +1,40 @@ -"""Evaluation metrics.""" - -import numpy as np -import warnings -import sklearn.metrics -import logging -from sklearn.metrics import matthews_corrcoef -from sklearn.metrics import recall_score -from sklearn.metrics import r2_score -from sklearn.metrics import mean_squared_error -from sklearn.metrics import mean_absolute_error -from sklearn.metrics import precision_score -from sklearn.metrics import precision_recall_curve -from sklearn.metrics import auc -from sklearn.metrics import jaccard_score -from sklearn.metrics import f1_score -from scipy.stats import pearsonr - -logger = logging.getLogger(__name__) - - -def to_one_hot(y, n_classes=2): - """Transforms label vector into one-hot encoding. - - Turns y into vector of shape `(n_samples, n_classes)` with a one-hot - encoding. - - Parameters - ---------- - y: np.ndarray - A vector of shape `(n_samples, 1)` - - Returns - ------- - A numpy.ndarray of shape `(n_samples, n_classes)`. - """ - n_samples = np.shape(y)[0] - y_hot = np.zeros((n_samples, n_classes)) - y_hot[np.arange(n_samples), y.astype(np.int64)] = 1 - return y_hot - - -def from_one_hot(y, axis=1): - """Transorms label vector from one-hot encoding. - - Parameters - ---------- - y: np.ndarray - A vector of shape `(n_samples, num_classes)` - axis: int, optional (default 1) - The axis with one-hot encodings to reduce on. - - Returns - ------- - A numpy.ndarray of shape `(n_samples,)` - """ - return np.argmax(y, axis=axis) - - -def _ensure_one_hot(y): - """If neceessary, convert class labels to one-hot encoding.""" - if len(y.shape) == 1: - return to_one_hot(y) - return y - - -def _ensure_class_labels(y): - """If necessary, convert one-hot encoding to class labels.""" - if len(y.shape) == 2: - return from_one_hot(y) - return y - - -def roc_auc_score(y, y_pred): - """Area under the receiver operating characteristic curve.""" - if y.shape != y_pred.shape: - y = _ensure_one_hot(y) - return sklearn.metrics.roc_auc_score(y, y_pred) - - -def accuracy_score(y, y_pred): - """Compute accuracy score - - Computes accuracy score for classification tasks. Works for both - binary and multiclass classification. - - Parameters - ---------- - y: np.ndarray - Of shape `(N_samples,)` - y_pred: np.ndarray - Of shape `(N_samples,)` - - Returns - ------- - score: float - The fraction of correctly classified samples. A number between 0 - and 1. - """ - y = _ensure_class_labels(y) - y_pred = _ensure_class_labels(y_pred) - return sklearn.metrics.accuracy_score(y, y_pred) - - -def balanced_accuracy_score(y, y_pred): - """Computes balanced accuracy score.""" - num_positive = float(np.count_nonzero(y)) - num_negative = float(len(y) - num_positive) - pos_weight = num_negative / num_positive - weights = np.ones_like(y) - weights[y != 0] = pos_weight - return sklearn.metrics.balanced_accuracy_score( - y, y_pred, sample_weight=weights) - - -def pearson_r2_score(y, y_pred): - """Computes Pearson R^2 (square of Pearson correlation).""" - return pearsonr(y, y_pred)[0]**2 - - -def jaccard_index(y, y_pred): - """Computes Jaccard Index which is the Intersection Over Union metric which is commonly used in image segmentation tasks - - Parameters - ---------- - y: ground truth array - y_pred: predicted array - """ - return jaccard_score(y, y_pred) - - -def pixel_error(y, y_pred): - """An error metric in case y, y_pred are images. - - Defined as 1 - the maximal F-score of pixel similarity, or squared - Euclidean distance between the original and the result labels. - - Parameters - ---------- - y: np.ndarray - ground truth array - y_pred: np.ndarray - predicted array - """ - return 1 - f1_score(y, y_pred) - - -def prc_auc_score(y, y_pred): - """Compute area under precision-recall curve""" - if y.shape != y_pred.shape: - y = _ensure_one_hot(y) - assert y_pred.shape == y.shape - assert y_pred.shape[1] == 2 - precision, recall, _ = precision_recall_curve(y[:, 1], y_pred[:, 1]) - return auc(recall, precision) - - -def rms_score(y_true, y_pred): - """Computes RMS error.""" - return np.sqrt(mean_squared_error(y_true, y_pred)) - - -def mae_score(y_true, y_pred): - """Computes MAE.""" - return mean_absolute_error(y_true, y_pred) - - -def kappa_score(y_true, y_pred): - """Calculate Cohen's kappa for classification tasks. - - See https://en.wikipedia.org/wiki/Cohen%27s_kappa - - Note that this implementation of Cohen's kappa expects binary labels. - - Parameters - ---------- - y_true: np.ndarray - Numpy array containing true values. - y_pred: np.ndarray - Numpy array containing predicted values. - - Returns - ------- - kappa: np.ndarray - Numpy array containing kappa for each classification task. - - Raises - ------ - AssertionError: If y_true and y_pred are not the same size, or if - class labels are not in [0, 1]. - """ - assert len(y_true) == len(y_pred), 'Number of examples does not match.' - yt = np.asarray(y_true, dtype=int) - yp = np.asarray(y_pred, dtype=int) - assert np.array_equal( - np.unique(yt), - [0, 1]), ('Class labels must be binary: %s' % np.unique(yt)) - observed_agreement = np.true_divide( - np.count_nonzero(np.equal(yt, yp)), len(yt)) - expected_agreement = np.true_divide( - np.count_nonzero(yt == 1) * np.count_nonzero(yp == 1) + - np.count_nonzero(yt == 0) * np.count_nonzero(yp == 0), - len(yt)**2) - kappa = np.true_divide(observed_agreement - expected_agreement, - 1.0 - expected_agreement) - return kappa - - -def bedroc_score(y_true, y_pred, alpha=20.0): - """BEDROC metric implemented according to Truchon and Bayley that modifies - the ROC score by allowing for a factor of early recognition - - Parameters - ---------- - y_true (array_like): - Binary class labels. 1 for positive class, 0 otherwise - y_pred (array_like): - Predicted labels - alpha (float), default 20.0: - Early recognition parameter - - Returns - ------- - float: Value in [0, 1] that indicates the degree of early recognition - - Notes - ----- - The original paper by Truchon et al. is located at - https://pubs.acs.org/doi/pdf/10.1021/ci600426e - """ - - assert len(y_true) == len(y_pred), 'Number of examples do not match' - - assert np.array_equal( - np.unique(y_true).astype(int), - [0, 1]), ('Class labels must be binary: %s' % np.unique(y_true)) - - from rdkit.ML.Scoring.Scoring import CalcBEDROC - - yt = np.asarray(y_true) - yp = np.asarray(y_pred) - - yt = yt.flatten() - yp = yp[:, 1].flatten() # Index 1 because one_hot predictions - - scores = list(zip(yt, yp)) - scores = sorted(scores, key=lambda pair: pair[1], reverse=True) - - return CalcBEDROC(scores, 0, alpha) - - -class Metric(object): - """Wrapper class for computing user-defined metrics. - - There are a variety of different metrics this class aims to support. - At the most simple, metrics for classification and regression that - assume that values to compare are scalars. More complicated, there - may perhaps be two image arrays that need to be compared. - - The `Metric` class provides a wrapper for standardizing the API - around different classes of metrics that may be useful for DeepChem - models. The implementation provides a few non-standard conveniences - such as built-in support for multitask and multiclass metrics, and - support for multidimensional outputs. - """ - - def __init__(self, - metric, - task_averager=None, - name=None, - threshold=None, - mode=None, - compute_energy_metric=False): - """ - Parameters - ---------- - metric: function - function that takes args y_true, y_pred (in that order) and - computes desired score. - task_averager: function, optional - If not None, should be a function that averages metrics across - tasks. For example, task_averager=np.mean. If task_averager is - provided, this task will be inherited as a multitask metric. - name: str, optional - Name of this metric - threshold: float, optional - Used for binary metrics and is the threshold for the positive - class - mode: str, optional - Must be either classification or regression. - compute_energy_metric: TODO(rbharath): Should this be removed? - """ - self.metric = metric - self.task_averager = task_averager - self.is_multitask = (self.task_averager is not None) - if name is None: - if not self.is_multitask: - self.name = self.metric.__name__ - else: - self.name = self.task_averager.__name__ + "-" + self.metric.__name__ - else: - self.name = name - self.threshold = threshold - if mode is None: - if self.metric.__name__ in [ - "roc_auc_score", "matthews_corrcoef", "recall_score", - "accuracy_score", "kappa_score", "precision_score", - "balanced_accuracy_score", "prc_auc_score", "f1_score", "bedroc_score" - ]: - mode = "classification" - elif self.metric.__name__ in [ - "pearson_r2_score", "r2_score", "mean_squared_error", - "mean_absolute_error", "rms_score", "mae_score", "pearsonr" - ]: - mode = "regression" - else: - raise ValueError("Must specify mode for new metric.") - assert mode in ["classification", "regression"] - if self.metric.__name__ in [ - "accuracy_score", "balanced_accuracy_score", "recall_score", - "matthews_corrcoef", "precision_score", "f1_score" - ] and threshold is None: - self.threshold = 0.5 - self.mode = mode - # The convention used is that the first task is the metric. - # TODO(rbharath, joegomes): This doesn't seem like it should be hard-coded as - # an option in the Metric class. Instead, this should be possible to move into - # user-space as a custom task_averager function. - self.compute_energy_metric = compute_energy_metric - - def compute_metric(self, - y_true, - y_pred, - w=None, - n_classes=2, - filter_nans=True, - per_task_metrics=False): - """Compute a performance metric for each task. - - Parameters - ---------- - y_true: np.ndarray - An np.ndarray containing true values for each task. - y_pred: np.ndarray - An np.ndarray containing predicted values for each task. - w: np.ndarray, optional - An np.ndarray containing weights for each datapoint. - n_classes: int, optional - Number of classes in data for classification tasks. - filter_nans: bool, optional - Remove NaN values in computed metrics - per_task_metrics: bool, optional - If true, return computed metric for each task on multitask dataset. - - Returns - ------- - A numpy nd.array containing metric values for each task. - """ - n_samples = y_true.shape[0] - expected_dims = (3 if self.mode == "classification" else 2) - if len(y_pred.shape) < expected_dims: - n_tasks = 1 - y_true = np.expand_dims(y_true, 1) - y_pred = np.expand_dims(y_pred, 1) - else: - n_tasks = y_pred.shape[1] - if w is None or len(w) == 0: - w = np.ones((n_samples, n_tasks)) - computed_metrics = [] - for task in range(n_tasks): - y_task = y_true[:, task] - y_pred_task = y_pred[:, task] - if len(w.shape) == 1: - w_task = w - elif w.shape[1] == 1: - w_task = w[:, 0] - else: - w_task = w[:, task] - - metric_value = self.compute_singletask_metric(y_task, y_pred_task, w_task) - computed_metrics.append(metric_value) - logger.info("computed_metrics: %s" % str(computed_metrics)) - if n_tasks == 1: - computed_metrics = computed_metrics[0] - if not self.is_multitask: - return computed_metrics - else: - if filter_nans: - computed_metrics = np.array(computed_metrics) - computed_metrics = computed_metrics[~np.isnan(computed_metrics)] - if self.compute_energy_metric: - # TODO(rbharath, joegomes): What is this magic number? - force_error = self.task_averager(computed_metrics[1:]) * 4961.47596096 - print("Force error (metric: np.mean(%s)): %f kJ/mol/A" % (self.name, - force_error)) - return computed_metrics[0] - elif not per_task_metrics: - return self.task_averager(computed_metrics) - else: - return self.task_averager(computed_metrics), computed_metrics - - def compute_singletask_metric(self, y_true, y_pred, w): - """Compute a metric value. - - Parameters - ---------- - y_true: list - A list of arrays containing true values for each task. - y_pred: list - A list of arrays containing predicted values for each task. - - Returns - ------- - Float metric value. - - Raises - ------ - NotImplementedError: If metric_str is not in METRICS. - """ - - y_true = np.array(np.squeeze(y_true[w != 0])) - y_pred = np.array(np.squeeze(y_pred[w != 0])) - - if len(y_true.shape) == 0: - n_samples = 1 - else: - n_samples = y_true.shape[0] - # If there are no nonzero examples, metric is ill-defined. - if not y_true.size: - return np.nan - if self.threshold is not None and len(y_pred.shape) == 1: - y_pred = np.expand_dims(y_pred, 0) - if self.threshold is not None: - y_pred = y_pred[:, 1] - y_pred = np.greater(y_pred, self.threshold) - if len(y_true.shape) == 0: - y_true = np.expand_dims(y_true, 0) - if len(y_pred.shape) == 0: - y_pred = np.expand_dims(y_pred, 0) - try: - metric_value = self.metric(y_true, y_pred) - except (AssertionError, ValueError) as e: - warnings.warn("Error calculating metric %s: %s" % (self.name, e)) - metric_value = np.nan - return metric_value +# flake8: noqa + +# metric class +from deepchem.metrics.metric import Metric +# metrics utils +from deepchem.metrics.metric import threshold_predictions +from deepchem.metrics.metric import normalize_weight_shape +from deepchem.metrics.metric import normalize_labels_shape +from deepchem.metrics.metric import normalize_prediction_shape +from deepchem.metrics.metric import handle_classification_mode +from deepchem.metrics.metric import to_one_hot +from deepchem.metrics.metric import from_one_hot + +# sklearn & scipy score function +from deepchem.metrics.score_function import matthews_corrcoef +from deepchem.metrics.score_function import recall_score +from deepchem.metrics.score_function import kappa_score +from deepchem.metrics.score_function import cohen_kappa_score +from deepchem.metrics.score_function import r2_score +from deepchem.metrics.score_function import mean_squared_error +from deepchem.metrics.score_function import mean_absolute_error +from deepchem.metrics.score_function import precision_score +from deepchem.metrics.score_function import precision_recall_curve +from deepchem.metrics.score_function import auc +from deepchem.metrics.score_function import jaccard_score +from deepchem.metrics.score_function import f1_score +from deepchem.metrics.score_function import roc_auc_score +from deepchem.metrics.score_function import accuracy_score +from deepchem.metrics.score_function import balanced_accuracy_score +from deepchem.metrics.score_function import pearsonr + +# original score function +from deepchem.metrics.score_function import pearson_r2_score +from deepchem.metrics.score_function import jaccard_index +from deepchem.metrics.score_function import pixel_error +from deepchem.metrics.score_function import prc_auc_score +from deepchem.metrics.score_function import rms_score +from deepchem.metrics.score_function import mae_score +from deepchem.metrics.score_function import bedroc_score +from deepchem.metrics.score_function import concordance_index diff --git a/deepchem/metrics/genomic_metrics.py b/deepchem/metrics/genomic_metrics.py index 4535c7bd6ea1539e3f67bb42ba9780e7129a1fb7..587f950b6f2cc7b7513f237d4371c7deb7179750 100644 --- a/deepchem/metrics/genomic_metrics.py +++ b/deepchem/metrics/genomic_metrics.py @@ -1,35 +1,51 @@ """Evaluation Metrics for Genomics Datasets.""" +from typing import List, Optional import numpy as np -from deepchem.data import NumpyDataset from scipy.signal import correlate2d +from deepchem.models import Model +from deepchem.data import NumpyDataset + -def get_motif_scores(encoded_sequences, - motif_names, - max_scores=None, - return_positions=False, - GC_fraction=0.4): +def get_motif_scores(encoded_sequences: np.ndarray, + motif_names: List[str], + max_scores: Optional[int] = None, + return_positions: bool = False, + GC_fraction: float = 0.4) -> np.ndarray: """Computes pwm log odds. Parameters ---------- - encoded_sequences : 4darray - (N_sequences, N_letters, sequence_length, 1) array - motif_names : list of strings - max_scores : int, optional - return_positions : boolean, optional - GC_fraction : float, optional + encoded_sequences: np.ndarray + A numpy array of shape `(N_sequences, N_letters, sequence_length, 1)`. + motif_names: List[str] + List of motif file names. + max_scores: int, optional + Get top `max_scores` scores. + return_positions: bool, default False + Whether to return postions or not. + GC_fraction: float, default 0.4 + GC fraction in background sequence. Returns ------- - (N_sequences, num_motifs, seq_length) complete score array by default. - If max_scores, (N_sequences, num_motifs*max_scores) max score array. - If max_scores and return_positions, (N_sequences, 2*num_motifs*max_scores) - array with max scores and their positions. + np.ndarray + A numpy array of complete score. The shape is `(N_sequences, num_motifs, seq_length)` by default. + If max_scores, the shape of score array is `(N_sequences, num_motifs*max_scores)`. + If max_scores and return_positions, the shape of score array with max scores and their positions. + is `(N_sequences, 2*num_motifs*max_scores)`. + + Notes + ----- + This method requires simdna to be installed. """ - import simdna - from simdna import synthetic + try: + import simdna + from simdna import synthetic + except ModuleNotFoundError: + raise ImportError("This function requires simdna to be installed.") + loaded_motifs = synthetic.LoadedEncodeMotifs( simdna.ENCODE_MOTIFS_PATH, pseudocountProb=0.001) num_samples, _, seq_length, _ = encoded_sequences.shape @@ -59,22 +75,23 @@ def get_motif_scores(encoded_sequences, return scores -def get_pssm_scores(encoded_sequences, pssm): +def get_pssm_scores(encoded_sequences: np.ndarray, + pssm: np.ndarray) -> np.ndarray: """ Convolves pssm and its reverse complement with encoded sequences and returns the maximum score at each position of each sequence. Parameters ---------- - encoded_sequences: 3darray - (N_sequences, N_letters, sequence_length, 1) array - pssm: 2darray - (4, pssm_length) array + encoded_sequences: np.ndarray + A numpy array of shape `(N_sequences, N_letters, sequence_length, 1)`. + pssm: np.ndarray + A numpy array of shape `(4, pssm_length)`. Returns ------- - scores: 2darray - (N_sequences, sequence_length) + scores: np.ndarray + A numpy array of shape `(N_sequences, sequence_length)`. """ encoded_sequences = encoded_sequences.squeeze(axis=3) # initialize fwd and reverse scores to -infinity @@ -97,36 +114,41 @@ def get_pssm_scores(encoded_sequences, pssm): return scores -def in_silico_mutagenesis(model, X): +def in_silico_mutagenesis(model: Model, + encoded_sequences: np.ndarray) -> np.ndarray: """Computes in-silico-mutagenesis scores Parameters ---------- model: Model This can be any model that accepts inputs of the required shape and produces - an output of shape (N_sequences, N_tasks). - X: ndarray - Shape (N_sequences, N_letters, sequence_length, 1) + an output of shape `(N_sequences, N_tasks)`. + encoded_sequences: np.ndarray + A numpy array of shape `(N_sequences, N_letters, sequence_length, 1)` Returns ------- - (num_task, N_sequences, N_letters, sequence_length, 1) ISM score array. + np.ndarray + A numpy array of ISM scores. The shape is `(num_task, N_sequences, N_letters, sequence_length, 1)`. """ # Shape (N_sequences, num_tasks) - wild_type_predictions = model.predict(NumpyDataset(X)) + wild_type_predictions = model.predict(NumpyDataset(encoded_sequences)) + # check whether wild_type_predictions is np.ndarray or not + assert isinstance(wild_type_predictions, np.ndarray) num_tasks = wild_type_predictions.shape[1] - #Shape (N_sequences, N_letters, sequence_length, 1, num_tasks) - mutagenesis_scores = np.empty(X.shape + (num_tasks,), dtype=np.float32) + # Shape (N_sequences, N_letters, sequence_length, 1, num_tasks) + mutagenesis_scores = np.empty( + encoded_sequences.shape + (num_tasks,), dtype=np.float32) # Shape (N_sequences, num_tasks, 1, 1, 1) wild_type_predictions = wild_type_predictions[:, np.newaxis, np.newaxis, np.newaxis] for sequence_index, (sequence, wild_type_prediction) in enumerate( - zip(X, wild_type_predictions)): + zip(encoded_sequences, wild_type_predictions)): # Mutates every position of the sequence to every letter # Shape (N_letters * sequence_length, N_letters, sequence_length, 1) # Breakdown: - # Shape of sequence[np.newaxis] (1, N_letters, sequence_length, 1) + # Shape of sequence[np.newaxis] (1, N_letters, sequence_length, 1) mutated_sequences = np.repeat( sequence[np.newaxis], np.prod(sequence.shape), axis=0) @@ -142,6 +164,8 @@ def in_silico_mutagenesis(model, X): mutated_sequences[arange, vertical_repeat, horizontal_cycle, :] = 1 # make mutant predictions mutated_predictions = model.predict(NumpyDataset(mutated_sequences)) + # check whether wild_type_predictions is np.ndarray or not + assert isinstance(mutated_predictions, np.ndarray) mutated_predictions = mutated_predictions.reshape(sequence.shape + (num_tasks,)) mutagenesis_scores[ diff --git a/deepchem/metrics/metric.py b/deepchem/metrics/metric.py new file mode 100644 index 0000000000000000000000000000000000000000..10b5092390df3f4a75a5a0a94acd75643bfc1743 --- /dev/null +++ b/deepchem/metrics/metric.py @@ -0,0 +1,731 @@ +import logging +from typing import Any, Callable, Optional + +import numpy as np + +logger = logging.getLogger(__name__) + + +def threshold_predictions(y: np.ndarray, + threshold: Optional[float] = None) -> np.ndarray: + """Threshold predictions from classification model. + + Parameters + ---------- + y: np.ndarray + Must have shape `(N, n_classes)` and be class probabilities. + threshold: float, default None + The threshold probability for the positive class. Note that this + threshold will only be applied for binary classifiers (where + `n_classes==2`). If specified for multiclass problems, will be + ignored. If `threshold` is None, and `n_classes==2` then a default + threshold of 0.5 will be applied. + + Returns + ------- + y_out: np.ndarray + A numpy array of shape `(N,)` with class predictions as integers ranging from 0 + to `n_classes-1`. + """ + if not isinstance(y, np.ndarray) or not len(y.shape) == 2: + raise ValueError("y must be a ndarray of shape (N, n_classes)") + N = y.shape[0] + n_classes = y.shape[1] + if threshold is None and n_classes == 2: + logger.info("Using default threshold of 0.5 for binary dataset.") + threshold = 0.5 + if not np.allclose(np.sum(y, axis=1), np.ones(N)): + raise ValueError( + "y must be a class probability matrix with rows summing to 1.") + if n_classes != 2: + y_out = np.argmax(y, axis=1) + return y_out + else: + y_out = np.where(y[:, 1] >= threshold, np.ones(N), np.zeros(N)) + return y_out + + +def normalize_weight_shape(w: np.ndarray, n_samples: int, + n_tasks: int) -> np.ndarray: + """A utility function to correct the shape of the weight array. + + This utility function is used to normalize the shapes of a given + weight array. + + Parameters + ---------- + w: np.ndarray + `w` can be `None` or a scalar or a `np.ndarray` of shape + `(n_samples,)` or of shape `(n_samples, n_tasks)`. If `w` is a + scalar, it's assumed to be the same weight for all samples/tasks. + n_samples: int + The number of samples in the dataset. If `w` is not None, we should + have `n_samples = w.shape[0]` if `w` is a ndarray + n_tasks: int + The number of tasks. If `w` is 2d ndarray, then we should have + `w.shape[1] == n_tasks`. + + Examples + -------- + >>> import numpy as np + >>> w_out = normalize_weight_shape(None, n_samples=10, n_tasks=1) + >>> (w_out == np.ones((10, 1))).all() + True + + Returns + ------- + w_out: np.ndarray + Array of shape `(n_samples, n_tasks)` + """ + if w is None: + w_out = np.ones((n_samples, n_tasks)) + elif isinstance(w, np.ndarray): + if len(w.shape) == 0: + # scalar case + w_out = w * np.ones((n_samples, n_tasks)) + elif len(w.shape) == 1: + if len(w) != n_samples: + raise ValueError("Length of w isn't n_samples") + # per-example case + # This is a little arcane but it repeats w across tasks. + w_out = np.tile(w, (n_tasks, 1)).T + elif len(w.shape) == 2: + if w.shape == (n_samples, 1): + # If w.shape == (n_samples, 1) handle it as 1D + w = np.squeeze(w, axis=1) + w_out = np.tile(w, (n_tasks, 1)).T + elif w.shape != (n_samples, n_tasks): + raise ValueError("Shape for w doens't match (n_samples, n_tasks)") + else: + # w.shape == (n_samples, n_tasks) + w_out = w + else: + raise ValueError("w must be of dimension 1, 2, or 3") + else: + # scalar case + w_out = w * np.ones((n_samples, n_tasks)) + return w_out + + +def normalize_labels_shape(y: np.ndarray, + mode: Optional[str] = None, + n_tasks: Optional[int] = None, + n_classes: Optional[int] = None) -> np.ndarray: + """A utility function to correct the shape of the labels. + + Parameters + ---------- + y: np.ndarray + `y` is an array of shape `(N,)` or `(N, n_tasks)` or `(N, n_tasks, 1)`. + mode: str, default None + If `mode` is "classification" or "regression", attempts to apply + data transformations. + n_tasks: int, default None + The number of tasks this class is expected to handle. + n_classes: int, default None + If specified use this as the number of classes. Else will try to + impute it as `n_classes = max(y) + 1` for arrays and as + `n_classes=2` for the case of scalars. Note this parameter only + has value if `mode=="classification"` + + Returns + ------- + y_out: np.ndarray + If `mode=="classification"`, `y_out` is an array of shape `(N, + n_tasks, n_classes)`. If `mode=="regression"`, `y_out` is an array + of shape `(N, n_tasks)`. + """ + if n_tasks is None: + raise ValueError("n_tasks must be specified") + if mode not in ["classification", "regression"]: + raise ValueError("mode must be either classification or regression.") + if mode == "classification" and n_classes is None: + raise ValueError("n_classes must be specified") + if not isinstance(y, np.ndarray): + raise ValueError("y must be a np.ndarray") + # Handle n_classes/n_task shape ambiguity + if mode == "classification" and len(y.shape) == 2: + if n_classes == y.shape[1] and n_tasks != 1 and n_classes != n_tasks: + raise ValueError("Shape of input doesn't match expected n_tasks=1") + elif n_classes == y.shape[1] and n_tasks == 1: + # Add in task dimension + y = np.expand_dims(y, 1) + if len(y.shape) == 1 and n_tasks != 1: + raise ValueError("n_tasks must equal 1 for a 1D set of labels.") + if (len(y.shape) == 2 or len(y.shape) == 3) and n_tasks != y.shape[1]: + raise ValueError( + "Shape of input doesn't match expected n_tasks=%d" % n_tasks) + if len(y.shape) >= 4: + raise ValueError( + "Labels y must be a float scalar or a ndarray of shape `(N,)` or " + "`(N, n_tasks)` or `(N, n_tasks, 1)` for regression problems and " + "of shape `(N,)` or `(N, n_tasks)` or `(N, n_tasks, 1)` for classification problems" + ) + if len(y.shape) == 1: + # Insert a task dimension (we know n_tasks=1 from above0 + y_out = np.expand_dims(y, 1) + elif len(y.shape) == 2: + y_out = y + elif len(y.shape) == 3: + # If 3D and last dimension isn't 1, assume this is one-hot encoded and return as-is. + if y.shape[-1] != 1: + return y + y_out = np.squeeze(y, axis=-1) + # Handle classification. We need to convert labels into one-hot representation. + if mode == "classification": + all_y_task = [] + for task in range(n_tasks): + y_task = y_out[:, task] + # check whether n_classes is int or not + assert isinstance(n_classes, int) + y_hot = to_one_hot(y_task, n_classes=n_classes) + y_hot = np.expand_dims(y_hot, 1) + all_y_task.append(y_hot) + y_out = np.concatenate(all_y_task, axis=1) + return y_out + + +def normalize_prediction_shape(y: np.ndarray, + mode: Optional[str] = None, + n_tasks: Optional[int] = None, + n_classes: Optional[int] = None): + """A utility function to correct the shape of provided predictions. + + The metric computation classes expect that inputs for classification + have the uniform shape `(N, n_tasks, n_classes)` and inputs for + regression have the uniform shape `(N, n_tasks)`. This function + normalizes the provided input array to have the desired shape. + + Examples + -------- + >>> import numpy as np + >>> y = np.random.rand(10) + >>> y_out = normalize_prediction_shape(y, "regression", n_tasks=1) + >>> y_out.shape + (10, 1) + + Parameters + ---------- + y: np.ndarray + If `mode=="classification"`, `y` is an array of shape `(N,)` or + `(N, n_tasks)` or `(N, n_tasks, n_classes)`. If + `mode=="regression"`, `y` is an array of shape `(N,)` or `(N, + n_tasks)`or `(N, n_tasks, 1)`. + mode: str, default None + If `mode` is "classification" or "regression", attempts to apply + data transformations. + n_tasks: int, default None + The number of tasks this class is expected to handle. + n_classes: int, default None + If specified use this as the number of classes. Else will try to + impute it as `n_classes = max(y) + 1` for arrays and as + `n_classes=2` for the case of scalars. Note this parameter only + has value if `mode=="classification"` + + Returns + ------- + y_out: np.ndarray + If `mode=="classification"`, `y_out` is an array of shape `(N, + n_tasks, n_classes)`. If `mode=="regression"`, `y_out` is an array + of shape `(N, n_tasks)`. + """ + if n_tasks is None: + raise ValueError("n_tasks must be specified") + if mode == "classification" and n_classes is None: + raise ValueError("n_classes must be specified") + if not isinstance(y, np.ndarray): + raise ValueError("y must be a np.ndarray") + # Handle n_classes/n_task shape ambiguity + if mode == "classification" and len(y.shape) == 2: + if n_classes == y.shape[1] and n_tasks != 1 and n_classes != n_tasks: + raise ValueError("Shape of input doesn't match expected n_tasks=1") + elif n_classes == y.shape[1] and n_tasks == 1: + # Add in task dimension + y = np.expand_dims(y, 1) + if (len(y.shape) == 2 or len(y.shape) == 3) and n_tasks != y.shape[1]: + raise ValueError( + "Shape of input doesn't match expected n_tasks=%d" % n_tasks) + if len(y.shape) >= 4: + raise ValueError( + "Predictions y must be a float scalar or a ndarray of shape `(N,)` or " + "`(N, n_tasks)` or `(N, n_tasks, 1)` for regression problems and " + "of shape `(N,)` or `(N, n_tasks)` or `(N, n_tasks, n_classes)` for classification problems" + ) + if mode == "classification": + if n_classes is None: + raise ValueError("n_classes must be specified.") + if len(y.shape) == 1 or len(y.shape) == 2: + # Make everything 2D so easy to handle + if len(y.shape) == 1: + y = y[:, np.newaxis] + # Handle each task separately. + all_y_task = [] + for task in range(n_tasks): + y_task = y[:, task] + if len(np.unique(y_task)) > n_classes: + # Handle continuous class probabilites of positive class for binary + if n_classes > 2: + raise ValueError( + "Cannot handle continuous probabilities for multiclass problems." + "Need a per-class probability") + # Fill in class 0 probabilities + y_task = np.array([1 - y_task, y_task]).T + # Add a task dimension to concatenate on + y_task = np.expand_dims(y_task, 1) + all_y_task.append(y_task) + else: + # Handle binary labels + # make y_hot of shape (N, n_classes) + y_task = to_one_hot(y_task, n_classes=n_classes) + # Add a task dimension to concatenate on + y_task = np.expand_dims(y_task, 1) + all_y_task.append(y_task) + y_out = np.concatenate(all_y_task, axis=1) + elif len(y.shape) == 3: + y_out = y + elif mode == "regression": + if len(y.shape) == 1: + # Insert a task dimension + y_out = np.expand_dims(y, 1) + elif len(y.shape) == 2: + y_out = y + elif len(y.shape) == 3: + if y.shape[-1] != 1: + raise ValueError( + "y must be a float scalar or a ndarray of shape `(N,)` or " + "`(N, n_tasks)` or `(N, n_tasks, 1)` for regression problems.") + y_out = np.squeeze(y, axis=-1) + else: + raise ValueError("mode must be either classification or regression.") + return y_out + + +def handle_classification_mode( + y: np.ndarray, + classification_handling_mode: Optional[str] = None, + threshold_value: Optional[float] = None) -> np.ndarray: + """Handle classification mode. + + Transform predictions so that they have the correct classification mode. + + Parameters + ---------- + y: np.ndarray + Must be of shape `(N, n_tasks, n_classes)` + classification_handling_mode: str, default None + DeepChem models by default predict class probabilities for + classification problems. This means that for a given singletask + prediction, after shape normalization, the DeepChem prediction will be a + numpy array of shape `(N, n_classes)` with class probabilities. + `classification_handling_mode` is a string that instructs this method + how to handle transforming these probabilities. It can take on the + following values: + - None: default value. Pass in `y_pred` directy into `self.metric`. + - "threshold": Use `threshold_predictions` to threshold `y_pred`. Use + `threshold_value` as the desired threshold. + - "threshold-one-hot": Use `threshold_predictions` to threshold `y_pred` + using `threshold_values`, then apply `to_one_hot` to output. + threshold_value: float, default None + If set, and `classification_handling_mode` is "threshold" or + "threshold-one-hot" apply a thresholding operation to values with this + threshold. This option isj only sensible on binary classification tasks. + If float, this will be applied as a binary classification value. + + Returns + ------- + y_out: np.ndarray + If `classification_handling_mode` is None, then of shape `(N, n_tasks, n_classes)`. + If `classification_handling_mode` is "threshold", then of shape `(N, n_tasks)`. + If `classification_handling_mode is "threshold-one-hot", then of shape `(N, n_tasks, n_classes)" + """ + if len(y.shape) != 3: + raise ValueError("y must be of shape (N, n_tasks, n_classes)") + N, n_tasks, n_classes = y.shape + if classification_handling_mode is None: + return y + elif classification_handling_mode == "threshold": + thresholded = [] + for task in range(n_tasks): + task_array = y[:, task, :] + # Now of shape (N,) + task_array = threshold_predictions(task_array, threshold_value) + # Now of shape (N, 1) + task_array = np.expand_dims(task_array, 1) + thresholded.append(task_array) + # Returns shape (N, n_tasks) + return np.concatenate(thresholded, axis=1) + elif classification_handling_mode == "threshold-one-hot": + thresholded = [] + for task in range(n_tasks): + task_array = y[:, task, :] + # Now of shape (N,) + task_array = threshold_predictions(task_array, threshold_value) + # Now of shape (N, n_classes) + task_array = to_one_hot(task_array, n_classes=n_classes) + # Now of shape (N, 1, n_classes) + task_array = np.expand_dims(task_array, 1) + thresholded.append(task_array) + # Returns shape (N, n_tasks, n_classes) + return np.concatenate(thresholded, axis=1) + else: + raise ValueError( + "classification_handling_mode must be one of None, threshold, threshold-one-hot" + ) + + +def to_one_hot(y: np.ndarray, n_classes: int = 2) -> np.ndarray: + """Transforms label vector into one-hot encoding. + + Turns y into vector of shape `(N, n_classes)` with a one-hot + encoding. Assumes that `y` takes values from `0` to `n_classes - 1`. + + Parameters + ---------- + y: np.ndarray + A vector of shape `(N,)` or `(N, 1)` + n_classes: int, default 2 + If specified use this as the number of classes. Else will try to + impute it as `n_classes = max(y) + 1` for arrays and as + `n_classes=2` for the case of scalars. Note this parameter only + has value if `mode=="classification"` + + Returns + ------- + np.ndarray + A numpy array of shape `(N, n_classes)`. + """ + if len(y.shape) > 2: + raise ValueError("y must be a vector of shape (N,) or (N, 1)") + if len(y.shape) == 2 and y.shape[1] != 1: + raise ValueError("y must be a vector of shape (N,) or (N, 1)") + if len(np.unique(y)) > n_classes: + raise ValueError("y has more than n_class unique elements.") + N = np.shape(y)[0] + y_hot = np.zeros((N, n_classes)) + y_hot[np.arange(N), y.astype(np.int64)] = 1 + return y_hot + + +def from_one_hot(y: np.ndarray, axis: int = 1) -> np.ndarray: + """Transforms label vector from one-hot encoding. + + Parameters + ---------- + y: np.ndarray + A vector of shape `(n_samples, num_classes)` + axis: int, optional (default 1) + The axis with one-hot encodings to reduce on. + + Returns + ------- + np.ndarray + A numpy array of shape `(n_samples,)` + """ + return np.argmax(y, axis=axis) + + +class Metric(object): + """Wrapper class for computing user-defined metrics. + + The `Metric` class provides a wrapper for standardizing the API + around different classes of metrics that may be useful for DeepChem + models. The implementation provides a few non-standard conveniences + such as built-in support for multitask and multiclass metrics. + + There are a variety of different metrics this class aims to support. + Metrics for classification and regression that assume that values to + compare are scalars are supported. + + At present, this class doesn't support metric computation on models + which don't present scalar outputs. For example, if you have a + generative model which predicts images or molecules, you will need + to write a custom evaluation and metric setup. + """ + + def __init__(self, + metric: Callable[..., float], + task_averager: Optional[Callable[..., Any]] = None, + name: Optional[str] = None, + threshold: Optional[float] = None, + mode: Optional[str] = None, + n_tasks: Optional[int] = None, + classification_handling_mode: Optional[str] = None, + threshold_value: Optional[float] = None, + compute_energy_metric: Optional[bool] = None): + """ + Parameters + ---------- + metric: function + Function that takes args y_true, y_pred (in that order) and + computes desired score. If sample weights are to be considered, + `metric` may take in an additional keyword argument + `sample_weight`. + task_averager: function, default None + If not None, should be a function that averages metrics across + tasks. + name: str, default None + Name of this metric + threshold: float, default None (DEPRECATED) + Used for binary metrics and is the threshold for the positive + class. + mode: str, default None + Should usually be "classification" or "regression." + n_tasks: int, default None + The number of tasks this class is expected to handle. + classification_handling_mode: str, default None + DeepChem models by default predict class probabilities for + classification problems. This means that for a given singletask + prediction, after shape normalization, the DeepChem prediction will be a + numpy array of shape `(N, n_classes)` with class probabilities. + `classification_handling_mode` is a string that instructs this method + how to handle transforming these probabilities. It can take on the + following values: + - None: default value. Pass in `y_pred` directy into `self.metric`. + - "threshold": Use `threshold_predictions` to threshold `y_pred`. Use + `threshold_value` as the desired threshold. + - "threshold-one-hot": Use `threshold_predictions` to threshold `y_pred` + using `threshold_values`, then apply `to_one_hot` to output. + threshold_value: float, default None + If set, and `classification_handling_mode` is "threshold" or + "threshold-one-hot" apply a thresholding operation to values with this + threshold. This option is only sensible on binary classification tasks. + If float, this will be applied as a binary classification value. + compute_energy_metric: bool, default None (DEPRECATED) + Deprecated metric. Will be removed in a future version of + DeepChem. Do not use. + """ + if threshold is not None: + logger.warn( + "threshold is deprecated and will be removed in a future version of DeepChem." + "Set threshold in compute_metric instead.") + if compute_energy_metric is not None: + self.compute_energy_metric = compute_energy_metric + logger.warn( + "compute_energy_metric is deprecated and will be removed in a future version of DeepChem." + ) + else: + self.compute_energy_metric = False + + self.metric = metric + if task_averager is None: + self.task_averager = np.mean + else: + self.task_averager = task_averager + if name is None: + if task_averager is None: + if hasattr(self.metric, '__name__'): + self.name = self.metric.__name__ + else: + self.name = "unknown metric" + else: + if hasattr(self.metric, '__name__'): + self.name = task_averager.__name__ + "-" + self.metric.__name__ + else: + self.name = "unknown metric" + else: + self.name = name + + if mode is None: + # These are some smart defaults + if self.metric.__name__ in [ + "roc_auc_score", "matthews_corrcoef", "recall_score", + "accuracy_score", "kappa_score", "cohen_kappa_score", + "precision_score", "balanced_accuracy_score", "prc_auc_score", + "f1_score", "bedroc_score", "jaccard_score", "jaccard_index", + "pixel_error" + ]: + mode = "classification" + # These are some smart defaults corresponding to sklearn's required + # behavior + if classification_handling_mode is None: + if self.metric.__name__ in [ + "matthews_corrcoef", "cohen_kappa_score", "kappa_score", + "balanced_accuracy_score", "recall_score", "jaccard_score", + "jaccard_index", "pixel_error", "f1_score" + ]: + classification_handling_mode = "threshold" + elif self.metric.__name__ in [ + "accuracy_score", "precision_score", "bedroc_score" + ]: + classification_handling_mode = "threshold-one-hot" + elif self.metric.__name__ in ["roc_auc_score", "prc_auc_score"]: + classification_handling_mode = None + elif self.metric.__name__ in [ + "pearson_r2_score", "r2_score", "mean_squared_error", + "mean_absolute_error", "rms_score", "mae_score", "pearsonr", + "concordance_index" + ]: + mode = "regression" + else: + raise ValueError( + "Please specify the mode of this metric. mode must be 'regression' or 'classification'" + ) + + self.mode = mode + self.n_tasks = n_tasks + if classification_handling_mode not in [ + None, "threshold", "threshold-one-hot" + ]: + raise ValueError( + "classification_handling_mode must be one of None, 'threshold', 'threshold_one_hot'" + ) + self.classification_handling_mode = classification_handling_mode + self.threshold_value = threshold_value + + def compute_metric(self, + y_true: np.ndarray, + y_pred: np.ndarray, + w: Optional[np.ndarray] = None, + n_tasks: Optional[int] = None, + n_classes: int = 2, + filter_nans: bool = False, + per_task_metrics: bool = False, + use_sample_weights: bool = False, + **kwargs) -> np.ndarray: + """Compute a performance metric for each task. + + Parameters + ---------- + y_true: np.ndarray + An np.ndarray containing true values for each task. Must be of shape + `(N,)` or `(N, n_tasks)` or `(N, n_tasks, n_classes)` if a + classification metric. If of shape `(N, n_tasks)` values can either be + class-labels or probabilities of the positive class for binary + classification problems. If a regression problem, must be of shape + `(N,)` or `(N, n_tasks)` or `(N, n_tasks, 1)` if a regression metric. + y_pred: np.ndarray + An np.ndarray containing predicted values for each task. Must be + of shape `(N, n_tasks, n_classes)` if a classification metric, + else must be of shape `(N, n_tasks)` if a regression metric. + w: np.ndarray, default None + An np.ndarray containing weights for each datapoint. If + specified, must be of shape `(N, n_tasks)`. + n_tasks: int, default None + The number of tasks this class is expected to handle. + n_classes: int, default 2 + Number of classes in data for classification tasks. + filter_nans: bool, default False (DEPRECATED) + Remove NaN values in computed metrics + per_task_metrics: bool, default False + If true, return computed metric for each task on multitask dataset. + use_sample_weights: bool, default False + If set, use per-sample weights `w`. + kwargs: dict + Will be passed on to self.metric + + Returns + ------- + np.ndarray + A numpy array containing metric values for each task. + """ + # Attempt some limited shape imputation to find n_tasks + if n_tasks is None: + if self.n_tasks is None and isinstance(y_true, np.ndarray): + if len(y_true.shape) == 1: + n_tasks = 1 + elif len(y_true.shape) >= 2: + n_tasks = y_true.shape[1] + else: + n_tasks = self.n_tasks + # check whether n_tasks is int or not + # This is because `normalize_weight_shape` require int value. + assert isinstance(n_tasks, int) + + y_true = normalize_labels_shape( + y_true, mode=self.mode, n_tasks=n_tasks, n_classes=n_classes) + y_pred = normalize_prediction_shape( + y_pred, mode=self.mode, n_tasks=n_tasks, n_classes=n_classes) + if self.mode == "classification": + y_true = handle_classification_mode( + y_true, self.classification_handling_mode, self.threshold_value) + y_pred = handle_classification_mode( + y_pred, self.classification_handling_mode, self.threshold_value) + n_samples = y_true.shape[0] + w = normalize_weight_shape(w, n_samples, n_tasks) + computed_metrics = [] + for task in range(n_tasks): + y_task = y_true[:, task] + y_pred_task = y_pred[:, task] + w_task = w[:, task] + + metric_value = self.compute_singletask_metric( + y_task, + y_pred_task, + w_task, + use_sample_weights=use_sample_weights, + **kwargs) + computed_metrics.append(metric_value) + logger.info("computed_metrics: %s" % str(computed_metrics)) + if n_tasks == 1: + # FIXME: Incompatible types in assignment + computed_metrics = computed_metrics[0] # type: ignore + + # DEPRECATED. WILL BE REMOVED IN NEXT DEEPCHEM VERSION + if filter_nans: + computed_metrics = np.array(computed_metrics) + computed_metrics = computed_metrics[~np.isnan(computed_metrics)] + # DEPRECATED. WILL BE REMOVED IN NEXT DEEPCHEM VERSION + if self.compute_energy_metric: + force_error = self.task_averager(computed_metrics[1:]) * 4961.47596096 + logger.info("Force error (metric: np.mean(%s)): %f kJ/mol/A" % + (self.name, force_error)) + return computed_metrics[0] + elif not per_task_metrics: + return self.task_averager(computed_metrics) + else: + return self.task_averager(computed_metrics), computed_metrics + + def compute_singletask_metric(self, + y_true: np.ndarray, + y_pred: np.ndarray, + w: Optional[np.ndarray] = None, + n_samples: Optional[int] = None, + use_sample_weights: bool = False, + **kwargs) -> float: + """Compute a metric value. + + Parameters + ---------- + y_true: `np.ndarray` + True values array. This array must be of shape `(N, + n_classes)` if classification and `(N,)` if regression. + y_pred: `np.ndarray` + Predictions array. This array must be of shape `(N, n_classes)` + if classification and `(N,)` if regression. + w: `np.ndarray`, default None + Sample weight array. This array must be of shape `(N,)` + n_samples: int, default None (DEPRECATED) + The number of samples in the dataset. This is `N`. This argument is + ignored. + use_sample_weights: bool, default False + If set, use per-sample weights `w`. + kwargs: dict + Will be passed on to self.metric + + Returns + ------- + metric_value: float + The computed value of the metric. + """ + if n_samples is not None: + logger.warning("n_samples is a deprecated argument which is ignored.") + # Attempt to convert both into the same type + if self.mode == "regression": + if len(y_true.shape) != 1 or len( + y_pred.shape) != 1 or len(y_true) != len(y_pred): + raise ValueError( + "For regression metrics, y_true and y_pred must both be of shape (N,)" + ) + elif self.mode == "classification": + pass + # if len(y_true.shape) != 2 or len(y_pred.shape) != 2 or y_true.shape != y_pred.shape: + # raise ValueError("For classification metrics, y_true and y_pred must both be of shape (N, n_classes)") + else: + raise ValueError( + "Only classification and regression are supported for metrics calculations." + ) + if use_sample_weights: + metric_value = self.metric(y_true, y_pred, sample_weight=w, **kwargs) + else: + metric_value = self.metric(y_true, y_pred, **kwargs) + return metric_value diff --git a/deepchem/metrics/score_function.py b/deepchem/metrics/score_function.py new file mode 100644 index 0000000000000000000000000000000000000000..ec8e31686e89b2568d0c53fa5866cc7aeb0648b0 --- /dev/null +++ b/deepchem/metrics/score_function.py @@ -0,0 +1,216 @@ +"""Evaluation metrics.""" + +import numpy as np +from sklearn.metrics import matthews_corrcoef # noqa +from sklearn.metrics import recall_score # noqa +from sklearn.metrics import cohen_kappa_score +from sklearn.metrics import r2_score # noqa +from sklearn.metrics import mean_squared_error +from sklearn.metrics import mean_absolute_error +from sklearn.metrics import precision_score # noqa +from sklearn.metrics import precision_recall_curve +from sklearn.metrics import auc +from sklearn.metrics import jaccard_score +from sklearn.metrics import f1_score +from sklearn.metrics import roc_auc_score # noqa +from sklearn.metrics import accuracy_score # noqa +from sklearn.metrics import balanced_accuracy_score # noqa +from scipy.stats import pearsonr + +# kappa_score is an alias for `sklearn.metrics.cohen_kappa_score` +kappa_score = cohen_kappa_score + + +def pearson_r2_score(y: np.ndarray, y_pred: np.ndarray) -> float: + """Computes Pearson R^2 (square of Pearson correlation). + + Parameters + ---------- + y: np.ndarray + ground truth array + y_pred: np.ndarray + predicted array + + Returns + ------- + float + The Pearson-R^2 score. + """ + return pearsonr(y, y_pred)[0]**2 + + +def jaccard_index(y: np.ndarray, y_pred: np.ndarray) -> float: + """Computes Jaccard Index which is the Intersection Over Union metric + which is commonly used in image segmentation tasks. + + DEPRECATED: WILL BE REMOVED IN A FUTURE VERSION OF DEEEPCHEM. USE `jaccard_score` instead. + + Parameters + ---------- + y: np.ndarray + ground truth array + y_pred: np.ndarray + predicted array + + Returns + ------- + score: float + The jaccard index. A number between 0 and 1. + """ + return jaccard_score(y, y_pred) + + +def pixel_error(y: np.ndarray, y_pred: np.ndarray) -> float: + """An error metric in case y, y_pred are images. + + Defined as 1 - the maximal F-score of pixel similarity, or squared + Euclidean distance between the original and the result labels. + + Parameters + ---------- + y: np.ndarray + ground truth array + y_pred: np.ndarray + predicted array + + Returns + ------- + score: float + The pixel-error. A number between 0 and 1. + """ + return 1 - f1_score(y, y_pred) + + +def prc_auc_score(y: np.ndarray, y_pred: np.ndarray) -> float: + """Compute area under precision-recall curve + + Parameters + ---------- + y: np.ndarray + A numpy array of shape `(N, n_classes)` or `(N,)` with true labels + y_pred: np.ndarray + Of shape `(N, n_classes)` with class probabilities. + + Returns + ------- + float + The area under the precision-recall curve. A number between 0 and 1. + """ + precision, recall, _ = precision_recall_curve(y[:, 1], y_pred[:, 1]) + return auc(recall, precision) + + +def rms_score(y_true: np.ndarray, y_pred: np.ndarray) -> float: + """Computes RMS error.""" + return np.sqrt(mean_squared_error(y_true, y_pred)) + + +def mae_score(y_true: np.ndarray, y_pred: np.ndarray) -> float: + """Computes MAE.""" + return mean_absolute_error(y_true, y_pred) + + +def bedroc_score(y_true: np.ndarray, y_pred: np.ndarray, alpha: float = 20.0): + """Compute BEDROC metric. + + BEDROC metric implemented according to Truchon and Bayley that modifies + the ROC score by allowing for a factor of early recognition. + Please confirm details from [1]_. + + Parameters + ---------- + y_true: np.ndarray + Binary class labels. 1 for positive class, 0 otherwise + y_pred: np.ndarray + Predicted labels + alpha: float, default 20.0 + Early recognition parameter + + Returns + ------- + float + Value in [0, 1] that indicates the degree of early recognition + + Notes + ----- + This function requires RDKit to be installed. + + References + ---------- + .. [1] Truchon et al. "Evaluating virtual screening methods: good and bad metrics + for the “early recognition” problem." Journal of chemical information and modeling + 47.2 (2007): 488-508. + """ + try: + from rdkit.ML.Scoring.Scoring import CalcBEDROC + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + + # validation + assert len(y_true) == len(y_pred), 'Number of examples do not match' + assert np.array_equal( + np.unique(y_true).astype(int), + [0, 1]), ('Class labels must be binary: %s' % np.unique(y_true)) + + yt = np.asarray(y_true) + yp = np.asarray(y_pred) + + yt = yt.flatten() + yp = yp[:, 1].flatten() # Index 1 because one_hot predictions + + scores = list(zip(yt, yp)) + scores = sorted(scores, key=lambda pair: pair[1], reverse=True) + + return CalcBEDROC(scores, 0, alpha) + + +def concordance_index(y_true: np.ndarray, y_pred: np.ndarray) -> float: + """Compute Concordance index. + + Statistical metric indicates the quality of the predicted ranking. + Please confirm details from [1]_. + + Parameters + ---------- + y_true: np.ndarray + continous value + y_pred: np.ndarray + Predicted value + + Returns + ------- + float + score between [0,1] + + References + ---------- + .. [1] Steck, Harald, et al. "On ranking in survival analysis: + Bounds on the concordance index." Advances in neural information processing systems (2008): 1209-1216. + """ + + idx = np.argsort(y_true) + y_true = y_true[idx] + y_pred = y_pred[idx] + + pairs = 0 + correct_pairs = 0.0 + + for i in range(len(y_true)): + true_a = y_true[i] + pred_a = y_pred[i] + + for j in range(i + 1, len(y_true)): + true_b = y_true[j] + pred_b = y_pred[j] + if true_a != true_b: + pairs += 1 + if pred_a == pred_b: + correct_pairs += 0.5 + elif pred_a < pred_b: + correct_pairs += true_a < true_b + else: + correct_pairs += true_a > true_b + + assert pairs > 0, 'No pairs for comparision' + + return correct_pairs / pairs diff --git a/deepchem/metrics/tests/metrics_test.py b/deepchem/metrics/tests/metrics_test.py deleted file mode 100644 index e36fd6709180567a9ad37486743a53e2f6e01018..0000000000000000000000000000000000000000 --- a/deepchem/metrics/tests/metrics_test.py +++ /dev/null @@ -1,89 +0,0 @@ -""" -Tests for metricsT. -""" -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import numpy as np -import deepchem as dc -from tensorflow.python.platform import googletest -from deepchem import metrics - - -class MetricsTest(googletest.TestCase): - - def test_kappa_score(self): - y_true = [1, 0, 1, 0] - y_pred = [0.8, 0.2, 0.3, 0.4] # [1, 0, 0, 0] with 0.5 threshold - kappa = dc.metrics.kappa_score(y_true, np.greater(y_pred, 0.5)) - observed_agreement = 3.0 / 4.0 - expected_agreement = ((2 * 1) + (2 * 3)) / 4.0**2 - expected_kappa = np.true_divide(observed_agreement - expected_agreement, - 1.0 - expected_agreement) - self.assertAlmostEqual(kappa, expected_kappa) - - def test_one_sample(self): - """Test that the metrics won't raise error even in an extreme condition - where there is only one sample with w > 0. - """ - np.random.seed(123) - n_samples = 2 - y_true = np.array([0, 0]) - y_pred = np.random.rand(n_samples, 2) - w = np.array([0, 1]) - all_metrics = [ - dc.metrics.Metric(dc.metrics.recall_score), - dc.metrics.Metric(dc.metrics.matthews_corrcoef), - dc.metrics.Metric(dc.metrics.roc_auc_score) - ] - for metric in all_metrics: - score = metric.compute_singletask_metric(y_true, y_pred, w) - self.assertTrue(np.isnan(score) or score == 0) - - def test_r2_score(self): - """Test that R^2 metric passes basic sanity tests""" - np.random.seed(123) - n_samples = 10 - y_true = np.random.rand(n_samples,) - y_pred = np.random.rand(n_samples,) - regression_metric = dc.metrics.Metric(dc.metrics.r2_score) - assert np.isclose( - dc.metrics.r2_score(y_true, y_pred), - regression_metric.compute_metric(y_true, y_pred)) - - def test_one_hot(self): - y = np.array([0, 0, 1, 0, 1, 1, 0]) - y_hot = metrics.to_one_hot(y) - expected = np.array([[1, 0], [1, 0], [0, 1], [1, 0], [0, 1], [0, 1], [1, - 0]]) - yp = metrics.from_one_hot(y_hot) - assert np.array_equal(expected, y_hot) - assert np.array_equal(y, yp) - - def test_bedroc_score(self): - - num_actives = 20 - num_total = 400 - - y_true_actives = np.ones(num_actives) - y_true_inactives = np.zeros(num_total - num_actives) - y_true = np.concatenate([y_true_actives, y_true_inactives]) - - # Best score case - y_pred_best = dc.metrics.to_one_hot( - np.concatenate([y_true_actives, y_true_inactives])) - best_score = dc.metrics.bedroc_score(y_true, y_pred_best) - self.assertAlmostEqual(best_score, 1.0) - - # Worst score case - worst_pred_actives = np.zeros(num_actives) - worst_pred_inactives = np.ones(num_total - num_actives) - y_pred_worst = dc.metrics.to_one_hot( - np.concatenate([worst_pred_actives, worst_pred_inactives])) - worst_score = dc.metrics.bedroc_score(y_true, y_pred_worst) - self.assertAlmostEqual(worst_score, 0.0, 4) - - -if __name__ == '__main__': - googletest.main() diff --git a/deepchem/metrics/tests/test_genomics.py b/deepchem/metrics/tests/test_genomics.py index d5abf314c3500005f8010c246da8d49828fc41db..b96fd805559e97cd9539c585c7e553c02fa6c07b 100644 --- a/deepchem/metrics/tests/test_genomics.py +++ b/deepchem/metrics/tests/test_genomics.py @@ -2,18 +2,17 @@ Test that genomic metrics work. """ import unittest -import os import numpy as np import deepchem as dc import tensorflow as tf -LETTERS = "ACGT" - from deepchem.metrics.genomic_metrics import get_motif_scores from deepchem.metrics.genomic_metrics import get_pssm_scores from deepchem.metrics.genomic_metrics import in_silico_mutagenesis +LETTERS = "ACGT" + class TestGenomicMetrics(unittest.TestCase): """ @@ -25,7 +24,8 @@ class TestGenomicMetrics(unittest.TestCase): # Encode motif motif_name = "TAL1_known4" sequences = np.array(["ACGTA", "GATAG", "CGCGC"]) - sequences = dc.utils.genomics.seq_one_hot_encode(sequences, letters=LETTERS) + sequences = dc.utils.genomics_utils.seq_one_hot_encode( + sequences, letters=LETTERS) # sequences now has shape (3, 4, 5, 1) self.assertEqual(sequences.shape, (3, 4, 5, 1)) @@ -34,9 +34,9 @@ class TestGenomicMetrics(unittest.TestCase): def test_get_pssm_scores(self): """Test get_pssm_scores returns correct shape.""" - motif_name = "TAL1_known4" sequences = np.array(["ACGTA", "GATAG", "CGCGC"]) - sequences = dc.utils.genomics.seq_one_hot_encode(sequences, letters=LETTERS) + sequences = dc.utils.genomics_utils.seq_one_hot_encode( + sequences, letters=LETTERS) # sequences now has shape (3, 4, 5, 1) self.assertEqual(sequences.shape, (3, 4, 5, 1)) pssm = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) @@ -58,14 +58,12 @@ class TestGenomicMetrics(unittest.TestCase): """Test in-silico mutagenesis returns correct shape.""" # Construct and train SequenceDNN model sequences = np.array(["ACGTA", "GATAG", "CGCGC"]) - sequences = dc.utils.genomics.seq_one_hot_encode(sequences, letters=LETTERS) + sequences = dc.utils.genomics_utils.seq_one_hot_encode( + sequences, letters=LETTERS) labels = np.array([1, 0, 0]) labels = np.reshape(labels, (3, 1)) self.assertEqual(sequences.shape, (3, 4, 5, 1)) - #X = np.random.rand(10, 1, 4, 50) - #y = np.random.randint(0, 2, size=(10, 1)) - #dataset = dc.data.NumpyDataset(X, y) dataset = dc.data.NumpyDataset(sequences, labels) model = self.create_model_for_mutagenesis() model.fit(dataset, nb_epoch=1) @@ -78,14 +76,12 @@ class TestGenomicMetrics(unittest.TestCase): """Test in-silico mutagenesis returns nonzero output.""" # Construct and train SequenceDNN model sequences = np.array(["ACGTA", "GATAG", "CGCGC"]) - sequences = dc.utils.genomics.seq_one_hot_encode(sequences, letters=LETTERS) + sequences = dc.utils.genomics_utils.seq_one_hot_encode( + sequences, letters=LETTERS) labels = np.array([1, 0, 0]) labels = np.reshape(labels, (3, 1)) self.assertEqual(sequences.shape, (3, 4, 5, 1)) - #X = np.random.rand(10, 1, 4, 50) - #y = np.random.randint(0, 2, size=(10, 1)) - #dataset = dc.data.NumpyDataset(X, y) dataset = dc.data.NumpyDataset(sequences, labels) model = self.create_model_for_mutagenesis() model.fit(dataset, nb_epoch=1) diff --git a/deepchem/metrics/tests/test_metrics.py b/deepchem/metrics/tests/test_metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..5cd665174a83a0847b747aa7b378739bc9ff2c13 --- /dev/null +++ b/deepchem/metrics/tests/test_metrics.py @@ -0,0 +1,90 @@ +""" +Tests for metricsT. +""" +import numpy as np +import deepchem as dc + + +def test_kappa_score(): + y_true = [1, 0, 1, 0] + y_pred = [0.8, 0.2, 0.3, 0.4] # [1, 0, 0, 0] with 0.5 threshold + kappa = dc.metrics.kappa_score(y_true, np.greater(y_pred, 0.5)) + observed_agreement = 3.0 / 4.0 + expected_agreement = ((2 * 1) + (2 * 3)) / 4.0**2 + expected_kappa = np.true_divide(observed_agreement - expected_agreement, + 1.0 - expected_agreement) + np.testing.assert_almost_equal(kappa, expected_kappa) + + +def test_one_sample(): + """Test that the metrics won't raise error even in an extreme condition + where there is only one sample with w > 0. + """ + np.random.seed(123) + n_samples = 2 + y_true = np.random.randint(2, size=(n_samples,)) + y_pred = np.random.randint(2, size=(n_samples,)) + w = np.array([0, 1]) + all_metrics = [ + dc.metrics.Metric(dc.metrics.recall_score), + dc.metrics.Metric(dc.metrics.matthews_corrcoef), + dc.metrics.Metric(dc.metrics.roc_auc_score) + ] + for metric in all_metrics: + _ = metric.compute_singletask_metric(y_true, y_pred, w) + + +def test_r2_score(): + """Test that R^2 metric passes basic sanity tests""" + np.random.seed(123) + n_samples = 10 + y_true = np.random.rand(n_samples,) + y_pred = np.random.rand(n_samples,) + regression_metric = dc.metrics.Metric(dc.metrics.r2_score, n_tasks=1) + assert np.isclose( + dc.metrics.r2_score(y_true, y_pred), + regression_metric.compute_metric(y_true, y_pred)) + + +def test_bedroc_score(): + """Test BEDROC.""" + num_actives = 20 + num_total = 400 + + y_true_actives = np.ones(num_actives) + y_true_inactives = np.zeros(num_total - num_actives) + y_true = np.concatenate([y_true_actives, y_true_inactives]) + + # Best score case + y_pred_best = dc.metrics.to_one_hot( + np.concatenate([y_true_actives, y_true_inactives])) + best_score = dc.metrics.bedroc_score(y_true, y_pred_best) + np.testing.assert_almost_equal(best_score, 1.0) + + # Worst score case + worst_pred_actives = np.zeros(num_actives) + worst_pred_inactives = np.ones(num_total - num_actives) + y_pred_worst = dc.metrics.to_one_hot( + np.concatenate([worst_pred_actives, worst_pred_inactives])) + worst_score = dc.metrics.bedroc_score(y_true, y_pred_worst) + np.testing.assert_almost_equal(worst_score, 0.0, 4) + + +def test_concordance_index(): + """Test concordance index.""" + + metric = dc.metrics.Metric(dc.metrics.concordance_index) + + y_true = np.array([1, 3, 5, 4, 2]) + y_pred = np.array([3, 1, 5, 4, 2]) + assert metric.compute_singletask_metric(y_true, y_pred) == 0.7 + + # best case + y_true = np.array([1, 3, 5, 4, 2]) + y_pred = np.array([1, 3, 5, 4, 2]) + assert metric.compute_singletask_metric(y_true, y_pred) == 1.0 + + # duplicate prediction value + y_true = np.array([1, 3, 5, 4, 2]) + y_pred = np.array([1, 3, 4, 4, 2]) + assert metric.compute_singletask_metric(y_true, y_pred) == 0.95 diff --git a/deepchem/metrics/tests/test_normalize.py b/deepchem/metrics/tests/test_normalize.py new file mode 100644 index 0000000000000000000000000000000000000000..d4caac5d8a5f4a63df03cd6c8cef7b8b06dcd82f --- /dev/null +++ b/deepchem/metrics/tests/test_normalize.py @@ -0,0 +1,194 @@ +"""Test normalization of input.""" + +import numpy as np + +import deepchem as dc +from deepchem.metrics import to_one_hot +from deepchem.metrics import from_one_hot +from deepchem.metrics import threshold_predictions +from deepchem.metrics import handle_classification_mode +from deepchem.metrics import normalize_prediction_shape +from deepchem.metrics import normalize_weight_shape + + +def test_one_hot(): + """Test the one hot encoding.""" + y = np.array([0, 0, 1, 0, 1, 1, 0]) + y_hot = to_one_hot(y) + expected = np.array([[1, 0], [1, 0], [0, 1], [1, 0], [0, 1], [0, 1], [1, 0]]) + yp = from_one_hot(y_hot) + assert np.array_equal(expected, y_hot) + assert np.array_equal(y, yp) + + +def test_handle_classification_mode_none(): + """Test proper thresholding.""" + y = np.random.rand(10, 2) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y = np.expand_dims(y, 1) + y_expected = y + y_out = handle_classification_mode(y, None) + assert y_out.shape == (10, 1, 2) + assert np.array_equal(y_out, y_expected) + + +def test_handle_classification_mode_threshold(): + """Test proper thresholding.""" + y = np.random.rand(10, 2) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y = np.expand_dims(y, 1) + y_expected = np.argmax(np.squeeze(y), axis=1)[:, np.newaxis] + y_out = handle_classification_mode(y, "threshold", threshold_value=0.5) + assert y_out.shape == (10, 1) + assert np.array_equal(y_out, y_expected) + + +def test_handle_classification_mode_threshold_nonstandard(): + """Test proper thresholding.""" + y = np.random.rand(10, 2) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y_expected = np.where(y[:, 1] >= 0.3, np.ones(10), + np.zeros(10))[:, np.newaxis] + y = np.expand_dims(y, 1) + y_out = handle_classification_mode(y, "threshold", threshold_value=0.3) + assert y_out.shape == (10, 1) + assert np.array_equal(y_out, y_expected) + + +def test_handle_classification_mode_threshold_one_hot(): + """Test proper thresholding.""" + y = np.random.rand(10, 2) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y = np.expand_dims(y, 1) + y_expected = np.expand_dims( + to_one_hot(np.argmax(np.squeeze(y), axis=1), n_classes=2), 1) + y_out = handle_classification_mode( + y, "threshold-one-hot", threshold_value=0.5) + assert y_out.shape == (10, 1, 2) + assert np.array_equal(y_out, y_expected) + + +def test_threshold_predictions_binary(): + """Test thresholding of binary predictions.""" + # Get a random prediction matrix + y = np.random.rand(10, 2) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y_thresh = threshold_predictions(y, 0.5) + assert y_thresh.shape == (10,) + assert (y_thresh == np.argmax(y, axis=1)).all() + + +def test_threshold_predictions_multiclass(): + """Test thresholding of multiclass predictions.""" + y = np.random.rand(10, 5) + y = y / np.sum(y, axis=1)[:, np.newaxis] + y_thresh = threshold_predictions(y) + assert y_thresh.shape == (10,) + assert (y_thresh == np.argmax(y, axis=1)).all() + + +def test_normalize_1d_classification_binary(): + """Tests 1d classification normalization.""" + y = np.array([0, 0, 1, 0, 1, 1, 0]) + expected = np.array([[[1., 0.]], [[1., 0.]], [[0., 1.]], [[1., 0.]], + [[0., 1.]], [[0., 1.]], [[1., 0.]]]) + y_out = normalize_prediction_shape( + y, mode="classification", n_tasks=1, n_classes=2) + assert y_out.shape == (7, 1, 2) + assert np.array_equal(expected, y_out) + + +def test_normalize_1d_classification_multiclass(): + """Tests 1d classification normalization.""" + y = np.random.randint(5, size=(200,)) + y_expected = np.expand_dims(to_one_hot(y, n_classes=5), 1) + y_out = normalize_prediction_shape( + y, mode="classification", n_tasks=1, n_classes=5) + assert y_out.shape == (200, 1, 5) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_1d_classification_multiclass_explicit_nclasses(): + """Tests 1d classification normalization.""" + y = np.random.randint(5, size=(10,)) + y_expected = np.expand_dims(to_one_hot(y, n_classes=10), 1) + y_out = normalize_prediction_shape( + y, mode="classification", n_classes=10, n_tasks=1) + assert y_out.shape == (10, 1, 10) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_2d_classification_binary(): + """Tests 2d classification normalization.""" + # Of shape (N, n_classes) + y = np.random.randint(2, size=(10, 1)) + y_expected = np.expand_dims(dc.metrics.to_one_hot(np.squeeze(y)), 1) + y_out = normalize_prediction_shape( + y, mode="classification", n_tasks=1, n_classes=2) + assert y_out.shape == (10, 1, 2) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_3d_classification_binary(): + """Tests 1d classification normalization.""" + # Of shape (N, 1, n_classes) + y = np.random.randint(2, size=(10,)) + y = dc.metrics.to_one_hot(y, n_classes=2) + y = np.expand_dims(y, 1) + y_expected = y + y_out = normalize_prediction_shape( + y, mode="classification", n_tasks=1, n_classes=2) + assert y_out.shape == (10, 1, 2) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_1d_regression(): + """Tests 1d regression normalization.""" + y = np.random.rand(10) + y_expected = y[:, np.newaxis] + y_out = normalize_prediction_shape(y, mode="regression", n_tasks=1) + assert y_out.shape == (10, 1) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_2d_regression(): + """Tests 2d regression normalization.""" + y = np.random.rand(10, 5) + y_expected = y + y_out = normalize_prediction_shape(y, mode="regression", n_tasks=5) + assert y_out.shape == (10, 5) + assert np.array_equal(y_expected, y_out) + + +def test_normalize_3d_regression(): + """Tests 3d regression normalization.""" + y = np.random.rand(10, 5, 1) + y_expected = np.squeeze(y) + y_out = normalize_prediction_shape(y, mode="regression", n_tasks=5) + assert y_out.shape == (10, 5) + assert np.array_equal(y_expected, y_out) + + +def test_scalar_weight_normalization(): + """Test normalization of weights.""" + w_out = normalize_weight_shape(w=5, n_samples=10, n_tasks=5) + assert w_out.shape == (10, 5) + assert np.all(w_out == 5 * np.ones((10, 5))) + + +def test_1d_weight_normalization(): + """Test normalization of weights.""" + w = np.random.rand(10) + # This has w for each task. + w_expected = np.array([w, w, w, w, w]).T + w_out = normalize_weight_shape(w, n_samples=10, n_tasks=5) + assert w_out.shape == (10, 5) + assert np.all(w_out == w_expected) + + +def test_2d_weight_normalization(): + """Test normalization of weights.""" + w = np.random.rand(10, 5) + w_out = normalize_weight_shape(w, n_samples=10, n_tasks=5) + assert w_out.shape == (10, 5) + assert np.all(w_out == w) diff --git a/deepchem/models/IRV.py b/deepchem/models/IRV.py index efb40e9d711120287272719d0cd987b812e18d37..92aed4e4510d0fc8096b9d89141a4aa34c5ee871 100644 --- a/deepchem/models/IRV.py +++ b/deepchem/models/IRV.py @@ -2,7 +2,6 @@ import logging import numpy as np import tensorflow as tf -from deepchem.utils.save import log from deepchem.models import KerasModel, layers from deepchem.models.losses import SigmoidCrossEntropy from deepchem.trans import undo_transforms @@ -79,7 +78,7 @@ class Slice(Layer): return tf.slice(inputs, [0] * axis + [slice_num], [-1] * axis + [1]) -class TensorflowMultitaskIRVClassifier(KerasModel): +class MultitaskIRVClassifier(KerasModel): def __init__(self, n_tasks, @@ -87,7 +86,7 @@ class TensorflowMultitaskIRVClassifier(KerasModel): penalty=0.0, mode="classification", **kwargs): - """Initialize TensorflowMultitaskIRVClassifier + """Initialize MultitaskIRVClassifier Parameters ---------- @@ -119,8 +118,19 @@ class TensorflowMultitaskIRVClassifier(KerasModel): if len(logits) == 1 else Concatenate(axis=1)(logits) ] model = tf.keras.Model(inputs=[mol_features], outputs=outputs) - super(TensorflowMultitaskIRVClassifier, self).__init__( + super(MultitaskIRVClassifier, self).__init__( model, SigmoidCrossEntropy(), output_types=['prediction', 'loss'], **kwargs) + + +class TensorflowMultitaskIRVClassifier(MultitaskIRVClassifier): + + def __init__(self, *args, **kwargs): + + warnings.warn( + "TensorflowMultitaskIRVClassifier is deprecated and has been renamed to MultitaskIRVClassifier", + FutureWarning) + + super(TensorflowMultitaskIRVClassifier, self).__init__(*args, **kwargs) diff --git a/deepchem/models/__init__.py b/deepchem/models/__init__.py index 0c928d2dc7b2c46583ff2734f2031645ec3af336..d034e2a5aac92e0de08929b8d7b4b0402f11fe7d 100644 --- a/deepchem/models/__init__.py +++ b/deepchem/models/__init__.py @@ -1,17 +1,16 @@ """ Gathers all models in one place for convenient imports """ +# flake8: noqa from deepchem.models.models import Model from deepchem.models.keras_model import KerasModel -from deepchem.models.sklearn_models import SklearnModel -from deepchem.models.xgboost_models import XGBoostModel from deepchem.models.multitask import SingletaskToMultitask from deepchem.models.callbacks import ValidationCallback from deepchem.models.fcnet import MultitaskRegressor from deepchem.models.fcnet import MultitaskClassifier from deepchem.models.fcnet import MultitaskFitTransformRegressor -from deepchem.models.IRV import TensorflowMultitaskIRVClassifier +from deepchem.models.IRV import MultitaskIRVClassifier from deepchem.models.robust_multitask import RobustMultitaskClassifier from deepchem.models.robust_multitask import RobustMultitaskRegressor from deepchem.models.progressive_multitask import ProgressiveMultitaskRegressor, ProgressiveMultitaskClassifier @@ -25,7 +24,30 @@ from deepchem.models.text_cnn import TextCNNModel from deepchem.models.atomic_conv import AtomicConvModel from deepchem.models.chemnet_models import Smiles2Vec, ChemCeption -#################### Compatibility imports for renamed TensorGraph models. Remove below with DeepChem 3.0. #################### +# scikit-learn model +from deepchem.models.sklearn_models import SklearnModel +from deepchem.models.gbdt_models import GBDTModel + +# PyTorch models +try: + from deepchem.models.torch_models import TorchModel + from deepchem.models.torch_models import AttentiveFP, AttentiveFPModel + from deepchem.models.torch_models import CGCNN, CGCNNModel + from deepchem.models.torch_models import GAT, GATModel + from deepchem.models.torch_models import GCN, GCNModel +except ModuleNotFoundError: + pass + +##################################################################################### +# Compatibility imports for renamed XGBoost models. Remove below with DeepChem 3.0. +##################################################################################### + +from deepchem.models.gbdt_models.gbdt_model import XGBoostModel + +######################################################################################## +# Compatibility imports for renamed TensorGraph models. Remove below with DeepChem 3.0. +######################################################################################## from deepchem.models.text_cnn import TextCNNTensorGraph from deepchem.models.graph_models import WeaveTensorGraph, DTNNTensorGraph, DAGTensorGraph, GraphConvTensorGraph, MPNNTensorGraph +from deepchem.models.IRV import TensorflowMultitaskIRVClassifier diff --git a/deepchem/models/atomic_conv.py b/deepchem/models/atomic_conv.py index a422f726d90e114ab6a3c818d85e300e037fab36..16320687d82ed5fd57ef5ebf44c1b6e925a208e1 100644 --- a/deepchem/models/atomic_conv.py +++ b/deepchem/models/atomic_conv.py @@ -178,8 +178,8 @@ class AtomicConvModel(KerasModel): learning_rate=0.001, **kwargs): """ - Params - ------ + Parameters + ---------- frag1_num_atoms: int Number of atoms in first fragment frag2_num_atoms: int diff --git a/deepchem/models/callbacks.py b/deepchem/models/callbacks.py index 0ea0759dba04fa54cc3284bf1104f6b6bd6fba1e..40679583e95c8249d0a12fb2bdc222562278cbe8 100644 --- a/deepchem/models/callbacks.py +++ b/deepchem/models/callbacks.py @@ -1,8 +1,6 @@ """ Callback functions that can be invoked while fitting a KerasModel. """ - -import tensorflow as tf import sys @@ -80,7 +78,11 @@ class ValidationCallback(object): print(message, file=self.output_file) if model.tensorboard: for key in scores: - model._log_value_to_tensorboard(tag=key, simple_value=scores[key]) + model._log_scalar_to_tensorboard(key, scores[key], + model.get_global_step()) + if model.wandb: + import wandb + wandb.log(scores, step=step) if self.save_dir is not None: score = scores[self.metrics[self.save_metric].name] if not self.save_on_minimum: diff --git a/deepchem/models/cnn.py b/deepchem/models/cnn.py index 1d098cf6cddbc91c181e719eb1576dcae01416b1..f0c400f26f4e75b646ebe7b4c862d38b8fb45374 100644 --- a/deepchem/models/cnn.py +++ b/deepchem/models/cnn.py @@ -6,7 +6,10 @@ from deepchem.models.layers import SwitchedDropout from deepchem.metrics import to_one_hot from tensorflow.keras.layers import Input, Dense, Reshape, Softmax, Dropout, Activation, Lambda import tensorflow.keras.layers as layers -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection class CNN(KerasModel): @@ -128,15 +131,15 @@ class CNN(KerasModel): n_layers = len(layer_filters) if not isinstance(kernel_size, list): kernel_size = [kernel_size] * n_layers - if not isinstance(strides, collections.Sequence): + if not isinstance(strides, SequenceCollection): strides = [strides] * n_layers - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1) - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * (n_layers + 1) - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers if weight_decay_penalty != 0.0: if weight_decay_penalty_type == 'l1': diff --git a/deepchem/models/fcnet.py b/deepchem/models/fcnet.py index a37e42e38565492d21bf1e85c08d624a7452b83c..1308d5da3b56da5eff59044edf7c20c89b22fcd0 100644 --- a/deepchem/models/fcnet.py +++ b/deepchem/models/fcnet.py @@ -6,15 +6,20 @@ import time import numpy as np import tensorflow as tf import threading -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection import deepchem as dc from deepchem.models import KerasModel from deepchem.models.layers import SwitchedDropout -from deepchem.utils.save import log from deepchem.metrics import to_one_hot from tensorflow.keras.layers import Input, Dense, Reshape, Softmax, Dropout, Activation, Lambda +from typing import Any, Callable, Iterable, List, Optional, Sequence, Tuple, Union +from deepchem.utils.typing import KerasActivationFn, LossFn, OneOrMany + logger = logging.getLogger(__name__) @@ -34,18 +39,18 @@ class MultitaskClassifier(KerasModel): """ def __init__(self, - n_tasks, - n_features, - layer_sizes=[1000], - weight_init_stddevs=0.02, - bias_init_consts=1.0, - weight_decay_penalty=0.0, - weight_decay_penalty_type="l2", - dropouts=0.5, - activation_fns=tf.nn.relu, - n_classes=2, - residual=False, - **kwargs): + n_tasks: int, + n_features: int, + layer_sizes: Sequence[int] = [1000], + weight_init_stddevs: OneOrMany[float] = 0.02, + bias_init_consts: OneOrMany[float] = 1.0, + weight_decay_penalty: float = 0.0, + weight_decay_penalty_type: str = "l2", + dropouts: OneOrMany[float] = 0.5, + activation_fns: OneOrMany[KerasActivationFn] = tf.nn.relu, + n_classes: int = 2, + residual: bool = False, + **kwargs) -> None: """Create a MultitaskClassifier. In addition to the following arguments, this class also accepts @@ -66,7 +71,7 @@ class MultitaskClassifier(KerasModel): equal len(layer_sizes). Alternatively this may be a single value instead of a list, in which case the same value is used for every layer. - bias_init_consts: list or loat + bias_init_consts: list or float the value to initialize the biases in each layer to. The length of this list should equal len(layer_sizes). Alternatively this may be a single value instead of a list, in @@ -92,13 +97,13 @@ class MultitaskClassifier(KerasModel): self.n_features = n_features self.n_classes = n_classes n_layers = len(layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * n_layers - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * n_layers - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers if weight_decay_penalty != 0.0: if weight_decay_penalty_type == 'l1': @@ -150,12 +155,13 @@ class MultitaskClassifier(KerasModel): output_types=['prediction', 'loss'], **kwargs) - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): + def default_generator( + self, + dataset: dc.data.Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: for epoch in range(epochs): for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( batch_size=self.batch_size, @@ -183,18 +189,18 @@ class MultitaskRegressor(KerasModel): """ def __init__(self, - n_tasks, - n_features, - layer_sizes=[1000], - weight_init_stddevs=0.02, - bias_init_consts=1.0, - weight_decay_penalty=0.0, - weight_decay_penalty_type="l2", - dropouts=0.5, - activation_fns=tf.nn.relu, - uncertainty=False, - residual=False, - **kwargs): + n_tasks: int, + n_features: int, + layer_sizes: Sequence[int] = [1000], + weight_init_stddevs: OneOrMany[float] = 0.02, + bias_init_consts: OneOrMany[float] = 1.0, + weight_decay_penalty: float = 0.0, + weight_decay_penalty_type: str = "l2", + dropouts: OneOrMany[float] = 0.5, + activation_fns: OneOrMany[KerasActivationFn] = tf.nn.relu, + uncertainty: bool = False, + residual: bool = False, + **kwargs) -> None: """Create a MultitaskRegressor. In addition to the following arguments, this class also accepts all the keywork arguments @@ -237,13 +243,13 @@ class MultitaskRegressor(KerasModel): self.n_tasks = n_tasks self.n_features = n_features n_layers = len(layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * (n_layers + 1) - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * (n_layers + 1) - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers if weight_decay_penalty != 0.0: if weight_decay_penalty_type == 'l1': @@ -296,6 +302,7 @@ class MultitaskRegressor(KerasModel): stddev=weight_init_stddevs[-1]), bias_initializer=tf.constant_initializer( value=bias_init_consts[-1]))(prev_layer)) + loss: Union[dc.models.losses.Loss, LossFn] if uncertainty: log_var = Reshape((n_tasks, 1))(Dense( n_tasks, @@ -318,12 +325,13 @@ class MultitaskRegressor(KerasModel): super(MultitaskRegressor, self).__init__( model, loss, output_types=output_types, **kwargs) - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): + def default_generator( + self, + dataset: dc.data.Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: for epoch in range(epochs): for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( batch_size=self.batch_size, @@ -339,8 +347,8 @@ class MultitaskRegressor(KerasModel): class MultitaskFitTransformRegressor(MultitaskRegressor): """Implements a MultitaskRegressor that performs on-the-fly transformation during fit/predict. - Example: - + Examples + -------- >>> n_samples = 10 >>> n_features = 3 >>> n_tasks = 1 @@ -358,10 +366,10 @@ class MultitaskFitTransformRegressor(MultitaskRegressor): """ def __init__(self, - n_tasks, - n_features, - fit_transformers=[], - batch_size=50, + n_tasks: int, + n_features: int, + fit_transformers: Sequence[dc.trans.Transformer] = [], + batch_size: int = 50, **kwargs): """Create a MultitaskFitTransformRegressor. @@ -388,18 +396,21 @@ class MultitaskFitTransformRegressor(MultitaskRegressor): else: raise ValueError("n_features should be list or int") for transformer in fit_transformers: - X_b = transformer.X_transform(X_b) + assert transformer.transform_X and not (transformer.transform_y or + transformer.transform_w) + X_b, _, _, _ = transformer.transform_array(X_b, None, None, None) n_features = X_b.shape[1] logger.info("n_features after fit_transform: %d", int(n_features)) super(MultitaskFitTransformRegressor, self).__init__( n_tasks, n_features, batch_size=batch_size, **kwargs) - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): + def default_generator( + self, + dataset: dc.data.Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: for epoch in range(epochs): for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( batch_size=self.batch_size, @@ -410,18 +421,19 @@ class MultitaskFitTransformRegressor(MultitaskRegressor): if X_b is not None: if mode == 'fit': for transformer in self.fit_transformers: - X_b = transformer.X_transform(X_b) + X_b, _, _, _ = transformer.transform_array(X_b, None, None, None) if mode == 'predict': dropout = np.array(0.0) else: dropout = np.array(1.0) yield ([X_b, dropout], [y_b], [w_b]) - def predict_on_generator(self, - generator, - transformers=[], - outputs=None, - output_types=None): + def predict_on_generator( + self, + generator: Iterable[Tuple[Any, Any, Any]], + transformers: List[dc.trans.Transformer] = [], + outputs: Optional[OneOrMany[tf.Tensor]] = None, + output_types: Optional[OneOrMany[str]] = None) -> OneOrMany[np.ndarray]: def transform_generator(): for inputs, labels, weights in generator: diff --git a/deepchem/models/gan.py b/deepchem/models/gan.py index 17071773037352d50d7bd7cbbe081095edea1afb..78cf8e007aa7fc09e0c890a9dfb5d556c5760dca 100644 --- a/deepchem/models/gan.py +++ b/deepchem/models/gan.py @@ -2,7 +2,10 @@ from deepchem.models import KerasModel, layers, losses from tensorflow.keras.layers import Input, Lambda, Layer, Softmax, Reshape, Multiply -from collections import Sequence +try: + from collections.abc import Sequence +except: + from collections import Sequence import numpy as np import tensorflow as tf import time @@ -83,13 +86,11 @@ class GAN(KerasModel): self.data_input_layers = [] for shape in self.get_data_input_shapes(): self.data_input_layers.append(Input(shape=shape)) - self.data_inputs = [i.experimental_ref() for i in self.data_input_layers] + self.data_inputs = [i.ref() for i in self.data_input_layers] self.conditional_input_layers = [] for shape in self.get_conditional_input_shapes(): self.conditional_input_layers.append(Input(shape=shape)) - self.conditional_inputs = [ - i.experimental_ref() for i in self.conditional_input_layers - ] + self.conditional_inputs = [i.ref() for i in self.conditional_input_layers] # Create the generators. @@ -344,9 +345,9 @@ class GAN(KerasModel): inputs = [self.get_noise_batch(self.batch_size)] for input in self.data_input_layers: - inputs.append(feed_dict[input.experimental_ref()]) + inputs.append(feed_dict[input.ref()]) for input in self.conditional_input_layers: - inputs.append(feed_dict[input.experimental_ref()]) + inputs.append(feed_dict[input.ref()]) discrim_error += self.fit_generator( [(inputs, [], [])], variables=self.discrim_variables, @@ -373,7 +374,7 @@ class GAN(KerasModel): # Write checkpoints and report progress. if discrim_average_steps == checkpoint_interval: - self._exec_with_session(lambda: manager.save()) + manager.save() discrim_loss = discrim_error / max(1, discrim_average_steps) gen_loss = gen_error / max(1, gen_average_steps) print( @@ -393,7 +394,7 @@ class GAN(KerasModel): print( 'Ending global_step %d: generator average loss %g, discriminator average loss %g' % (global_step, gen_loss, discrim_loss)) - self._exec_with_session(lambda: manager.save()) + manager.save() time2 = time.time() print("TIMING: model fitting took %0.3f s" % (time2 - time1)) diff --git a/deepchem/models/gbdt_models/__init__.py b/deepchem/models/gbdt_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..971dc34d3c4d3dac10fd1fd8cd3685a72848c8ef --- /dev/null +++ b/deepchem/models/gbdt_models/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa +from deepchem.models.gbdt_models.gbdt_model import GBDTModel \ No newline at end of file diff --git a/deepchem/models/gbdt_models/gbdt_model.py b/deepchem/models/gbdt_models/gbdt_model.py new file mode 100644 index 0000000000000000000000000000000000000000..4f10f0da8f529bb5d01646b9f4378796a711fccd --- /dev/null +++ b/deepchem/models/gbdt_models/gbdt_model.py @@ -0,0 +1,158 @@ +""" +Gradient Boosting Decision Tree wrapper interface +""" + +import os +import logging +import tempfile +import warnings +from typing import Callable, Optional, Union + +import numpy as np +from sklearn.base import BaseEstimator +from sklearn.model_selection import train_test_split + +from deepchem.data import Dataset +from deepchem.models.sklearn_models import SklearnModel + +logger = logging.getLogger(__name__) + + +class GBDTModel(SklearnModel): + """Wrapper class that wraps GBDT models as DeepChem models. + + This class supports LightGBM/XGBoost models. + """ + + def __init__(self, + model: BaseEstimator, + model_dir: Optional[str] = None, + early_stopping_rounds: int = 50, + eval_metric: Optional[Union[str, Callable]] = None, + **kwargs): + """ + Parameters + ---------- + model: BaseEstimator + The model instance of scikit-learn wrapper LightGBM/XGBoost models. + model_dir: str, optional (default None) + Path to directory where model will be stored. + early_stopping_rounds: int, optional (default 50) + Activates early stopping. Validation metric needs to improve at least once + in every early_stopping_rounds round(s) to continue training. + eval_metric: Union[str, Callbale] + If string, it should be a built-in evaluation metric to use. + If callable, it should be a custom evaluation metric, see official note for more details. + """ + if model_dir is not None: + if not os.path.exists(model_dir): + os.makedirs(model_dir) + else: + model_dir = tempfile.mkdtemp() + self.model_dir = model_dir + self.model = model + self.model_class = model.__class__ + self.early_stopping_rounds = early_stopping_rounds + self.model_type = self._check_model_type() + + if eval_metric is None: + if self.model_type == 'classification': + self.eval_metric: Optional[Union[str, Callable]] = 'auc' + elif self.model_type == 'regression': + self.eval_metric = 'mae' + else: + self.eval_metric = eval_metric + else: + self.eval_metric = eval_metric + + def _check_model_type(self) -> str: + class_name = self.model.__class__.__name__ + if class_name.endswith('Classifier'): + return 'classification' + elif class_name.endswith('Regressor'): + return 'regression' + elif class_name == 'NoneType': + return 'none' + else: + raise ValueError( + '{} is not a supported model instance.'.format(class_name)) + + def fit(self, dataset: Dataset): + """Fits GDBT model with all data. + + First, this function splits all data into train and valid data (8:2), + and finds the best n_estimators. And then, we retrain all data using + best n_estimators * 1.25. + + Parameters + ---------- + dataset: Dataset + The `Dataset` to train this model on. + """ + X = dataset.X + y = np.squeeze(dataset.y) + + # GDBT doesn't support multi-output(task) + if len(y.shape) != 1: + raise ValueError("GDBT model doesn't support multi-output(task)") + + seed = self.model.random_state + stratify = None + if self.model_type == 'classification': + stratify = y + + # Find optimal n_estimators based on original learning_rate and early_stopping_rounds + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=0.2, random_state=seed, stratify=stratify) + self.model.fit( + X_train, + y_train, + early_stopping_rounds=self.early_stopping_rounds, + eval_metric=self.eval_metric, + eval_set=[(X_test, y_test)]) + + # retrain model to whole data using best n_estimators * 1.25 + if self.model.__class__.__name__.startswith('XGB'): + estimated_best_round = np.round(self.model.best_ntree_limit * 1.25) + else: + estimated_best_round = np.round(self.model.best_iteration_ * 1.25) + self.model.n_estimators = np.int64(estimated_best_round) + self.model.fit(X, y, eval_metric=self.eval_metric) + + def fit_with_eval(self, train_dataset: Dataset, valid_dataset: Dataset): + """Fits GDBT model with valid data. + + Parameters + ---------- + train_dataset: Dataset + The `Dataset` to train this model on. + valid_dataset: Dataset + The `Dataset` to validate this model on. + """ + X_train, X_valid = train_dataset.X, valid_dataset.X + y_train, y_valid = np.squeeze(train_dataset.y), np.squeeze(valid_dataset.y) + + # GDBT doesn't support multi-output(task) + if len(y_train.shape) != 1 or len(y_valid.shape) != 1: + raise ValueError("GDBT model doesn't support multi-output(task)") + + self.model.fit( + X_train, + y_train, + early_stopping_rounds=self.early_stopping_rounds, + eval_metric=self.eval_metric, + eval_set=[(X_valid, y_valid)]) + + +######################################### +# Deprecation warnings for XGBoostModel +######################################### + + +class XGBoostModel(GBDTModel): + + def __init__(self, *args, **kwargs): + warnings.warn( + "XGBoostModel is deprecated and has been renamed to GBDTModel.", + FutureWarning) + super(XGBoostModel, self).__init__(*args, **kwargs) diff --git a/deepchem/models/graph_models.py b/deepchem/models/graph_models.py index cf72262b5f222c0d1834a7fbc7cab9be29cf1476..0dc5a5e50d8bcf9cd5f73b5b25a91e4aa04ae701 100644 --- a/deepchem/models/graph_models.py +++ b/deepchem/models/graph_models.py @@ -1,15 +1,20 @@ -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection import deepchem as dc import numpy as np import tensorflow as tf -from deepchem.data import NumpyDataset, pad_features +from typing import List, Union, Tuple, Iterable, Dict, Optional +from deepchem.utils.typing import OneOrMany, LossFn, KerasActivationFn +from deepchem.data import Dataset, NumpyDataset, pad_features from deepchem.feat.graph_features import ConvMolFeaturizer from deepchem.feat.mol_graphs import ConvMol from deepchem.metrics import to_one_hot from deepchem.models import KerasModel, layers -from deepchem.models.losses import L2Loss, SoftmaxCrossEntropy +from deepchem.models.losses import L2Loss, SoftmaxCrossEntropy, Loss from deepchem.trans import undo_transforms from tensorflow.keras.layers import Input, Dense, Reshape, Softmax, Dropout, Activation, BatchNormalization @@ -33,9 +38,7 @@ class WeaveModel(KerasModel): """Implements Google-style Weave Graph Convolutions This model implements the Weave style graph convolutions - from the following paper. - - Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." Journal of computer-aided molecular design 30.8 (2016): 595-608. + from [1]_. The biggest difference between WeaveModel style convolutions and GraphConvModel style convolutions is that Weave @@ -44,38 +47,166 @@ class WeaveModel(KerasModel): explicitly to model bond interactions. This may cause scaling issues, but may possibly allow for better modeling of subtle bond effects. + + Note that [1]_ introduces a whole variety of different architectures for + Weave models. The default settings in this class correspond to the W2N2 + variant from [1]_ which is the most commonly used variant.. + + Examples + -------- + + Here's an example of how to fit a `WeaveModel` on a tiny sample dataset. + + >>> import numpy as np + >>> import deepchem as dc + >>> featurizer = dc.feat.WeaveFeaturizer() + >>> X = featurizer(["C", "CC"]) + >>> y = np.array([1, 0]) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> model = dc.models.WeaveModel(n_tasks=1, n_weave=2, fully_connected_layer_sizes=[2000, 1000], mode="classification") + >>> loss = model.fit(dataset) + + Note + ---- + In general, the use of batch normalization can cause issues with NaNs. If + you're having trouble with NaNs while using this model, consider setting + `batch_normalize_kwargs={"trainable": False}` or turning off batch + normalization entirely with `batch_normalize=False`. + + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond + fingerprints." Journal of computer-aided molecular design 30.8 (2016): + 595-608. + """ - def __init__(self, - n_tasks, - n_atom_feat=75, - n_pair_feat=14, - n_hidden=50, - n_graph_feat=128, - mode="classification", - n_classes=2, - batch_size=100, - **kwargs): + def __init__( + self, + n_tasks: int, + n_atom_feat: OneOrMany[int] = 75, + n_pair_feat: OneOrMany[int] = 14, + n_hidden: int = 50, + n_graph_feat: int = 128, + n_weave: int = 2, + fully_connected_layer_sizes: List[int] = [2000, 100], + conv_weight_init_stddevs: OneOrMany[float] = 0.03, + weight_init_stddevs: OneOrMany[float] = 0.01, + bias_init_consts: OneOrMany[float] = 0.0, + weight_decay_penalty: float = 0.0, + weight_decay_penalty_type: str = "l2", + dropouts: OneOrMany[float] = 0.25, + final_conv_activation_fn: Optional[KerasActivationFn] = tf.nn.tanh, + activation_fns: OneOrMany[KerasActivationFn] = tf.nn.relu, + batch_normalize: bool = True, + batch_normalize_kwargs: Dict = { + "renorm": True, + "fused": False + }, + gaussian_expand: bool = True, + compress_post_gaussian_expansion: bool = False, + mode: str = "classification", + n_classes: int = 2, + batch_size: int = 100, + **kwargs): """ Parameters ---------- n_tasks: int Number of tasks - n_atom_feat: int, optional - Number of features per atom. - n_pair_feat: int, optional + n_atom_feat: int, optional (default 75) + Number of features per atom. Note this is 75 by default and should be 78 + if chirality is used by `WeaveFeaturizer`. + n_pair_feat: int, optional (default 14) Number of features per pair of atoms. - n_hidden: int, optional + n_hidden: int, optional (default 50) Number of units(convolution depths) in corresponding hidden layer - n_graph_feat: int, optional + n_graph_feat: int, optional (default 128) Number of output features for each molecule(graph) - mode: str + n_weave: int, optional (default 2) + The number of weave layers in this model. + fully_connected_layer_sizes: list (default `[2000, 100]`) + The size of each dense layer in the network. The length of + this list determines the number of layers. + conv_weight_init_stddevs: list or float (default 0.03) + The standard deviation of the distribution to use for weight + initialization of each convolutional layer. The length of this lisst + should equal `n_weave`. Alternatively, this may be a single value instead + of a list, in which case the same value is used for each layer. + weight_init_stddevs: list or float (default 0.01) + The standard deviation of the distribution to use for weight + initialization of each fully connected layer. The length of this list + should equal len(layer_sizes). Alternatively this may be a single value + instead of a list, in which case the same value is used for every layer. + bias_init_consts: list or float (default 0.0) + The value to initialize the biases in each fully connected layer. The + length of this list should equal len(layer_sizes). + Alternatively this may be a single value instead of a list, in + which case the same value is used for every layer. + weight_decay_penalty: float (default 0.0) + The magnitude of the weight decay penalty to use + weight_decay_penalty_type: str (default "l2") + The type of penalty to use for weight decay, either 'l1' or 'l2' + dropouts: list or float (default 0.25) + The dropout probablity to use for each fully connected layer. The length of this list + should equal len(layer_sizes). Alternatively this may be a single value + instead of a list, in which case the same value is used for every layer. + final_conv_activation_fn: Optional[KerasActivationFn] (default `tf.nn.tanh`) + The Tensorflow activation funcntion to apply to the final + convolution at the end of the weave convolutions. If `None`, then no + activate is applied (hence linear). + activation_fns: list or object (default `tf.nn.relu`) + The Tensorflow activation function to apply to each fully connected layer. The length + of this list should equal len(layer_sizes). Alternatively this may be a + single value instead of a list, in which case the same value is used for + every layer. + batch_normalize: bool, optional (default True) + If this is turned on, apply batch normalization before applying + activation functions on convolutional and fully connected layers. + batch_normalize_kwargs: Dict, optional (default `{"renorm"=True, "fused": False}`) + Batch normalization is a complex layer which has many potential + argumentswhich change behavior. This layer accepts user-defined + parameters which are passed to all `BatchNormalization` layers in + `WeaveModel`, `WeaveLayer`, and `WeaveGather`. + gaussian_expand: boolean, optional (default True) + Whether to expand each dimension of atomic features by gaussian + histogram + compress_post_gaussian_expansion: bool, optional (default False) + If True, compress the results of the Gaussian expansion back to the + original dimensions of the input. + mode: str (default "classification") Either "classification" or "regression" for type of model. - n_classes: int + n_classes: int (default 2) Number of classes to predict (only used in classification mode) + batch_size: int (default 100) + Batch size used by this model for training. """ if mode not in ['classification', 'regression']: raise ValueError("mode must be either 'classification' or 'regression'") + + if not isinstance(n_atom_feat, SequenceCollection): + n_atom_feat = [n_atom_feat] * n_weave + if not isinstance(n_pair_feat, SequenceCollection): + n_pair_feat = [n_pair_feat] * n_weave + n_layers = len(fully_connected_layer_sizes) + if not isinstance(conv_weight_init_stddevs, SequenceCollection): + conv_weight_init_stddevs = [conv_weight_init_stddevs] * n_weave + if not isinstance(weight_init_stddevs, SequenceCollection): + weight_init_stddevs = [weight_init_stddevs] * n_layers + if not isinstance(bias_init_consts, SequenceCollection): + bias_init_consts = [bias_init_consts] * n_layers + if not isinstance(dropouts, SequenceCollection): + dropouts = [dropouts] * n_layers + if not isinstance(activation_fns, SequenceCollection): + activation_fns = [activation_fns] * n_layers + if weight_decay_penalty != 0.0: + if weight_decay_penalty_type == 'l1': + regularizer = tf.keras.regularizers.l1(weight_decay_penalty) + else: + regularizer = tf.keras.regularizers.l2(weight_decay_penalty) + else: + regularizer = None + self.n_tasks = n_tasks self.n_atom_feat = n_atom_feat self.n_pair_feat = n_pair_feat @@ -85,44 +216,78 @@ class WeaveModel(KerasModel): self.n_classes = n_classes # Build the model. - - atom_features = Input(shape=(self.n_atom_feat,)) - pair_features = Input(shape=(self.n_pair_feat,)) + atom_features = Input(shape=(self.n_atom_feat[0],)) + pair_features = Input(shape=(self.n_pair_feat[0],)) pair_split = Input(shape=tuple(), dtype=tf.int32) atom_split = Input(shape=tuple(), dtype=tf.int32) atom_to_pair = Input(shape=(2,), dtype=tf.int32) - weave_layer1A, weave_layer1P = layers.WeaveLayer( - n_atom_input_feat=self.n_atom_feat, - n_pair_input_feat=self.n_pair_feat, - n_atom_output_feat=self.n_hidden, - n_pair_output_feat=self.n_hidden)( - [atom_features, pair_features, pair_split, atom_to_pair]) - weave_layer2A, weave_layer2P = layers.WeaveLayer( - n_atom_input_feat=self.n_hidden, - n_pair_input_feat=self.n_hidden, - n_atom_output_feat=self.n_hidden, - n_pair_output_feat=self.n_hidden, - update_pair=False)( - [weave_layer1A, weave_layer1P, pair_split, atom_to_pair]) - dense1 = Dense(self.n_graph_feat, activation=tf.nn.tanh)(weave_layer2A) - # Batch normalization causes issues, spitting out NaNs if - # allowed to train - batch_norm1 = BatchNormalization(epsilon=1e-5, trainable=False)(dense1) + inputs = [atom_features, pair_features, pair_split, atom_to_pair] + for ind in range(n_weave): + n_atom = self.n_atom_feat[ind] + n_pair = self.n_pair_feat[ind] + if ind < n_weave - 1: + n_atom_next = self.n_atom_feat[ind + 1] + n_pair_next = self.n_pair_feat[ind + 1] + else: + n_atom_next = n_hidden + n_pair_next = n_hidden + weave_layer_ind_A, weave_layer_ind_P = layers.WeaveLayer( + n_atom_input_feat=n_atom, + n_pair_input_feat=n_pair, + n_atom_output_feat=n_atom_next, + n_pair_output_feat=n_pair_next, + init=tf.keras.initializers.TruncatedNormal( + stddev=conv_weight_init_stddevs[ind]), + batch_normalize=batch_normalize)(inputs) + inputs = [weave_layer_ind_A, weave_layer_ind_P, pair_split, atom_to_pair] + # Final atom-layer convolution. Note this differs slightly from the paper + # since we use a tanh activation as default. This seems necessary for numerical + # stability. + dense1 = Dense( + self.n_graph_feat, + activation=final_conv_activation_fn)(weave_layer_ind_A) + if batch_normalize: + dense1 = BatchNormalization(**batch_normalize_kwargs)(dense1) weave_gather = layers.WeaveGather( - batch_size, n_input=self.n_graph_feat, - gaussian_expand=True)([batch_norm1, atom_split]) + batch_size, + n_input=self.n_graph_feat, + gaussian_expand=gaussian_expand, + compress_post_gaussian_expansion=compress_post_gaussian_expansion)( + [dense1, atom_split]) + + if n_layers > 0: + # Now fully connected layers + input_layer = weave_gather + for layer_size, weight_stddev, bias_const, dropout, activation_fn in zip( + fully_connected_layer_sizes, weight_init_stddevs, bias_init_consts, + dropouts, activation_fns): + layer = Dense( + layer_size, + kernel_initializer=tf.keras.initializers.TruncatedNormal( + stddev=weight_stddev), + bias_initializer=tf.constant_initializer(value=bias_const), + kernel_regularizer=regularizer)(input_layer) + if dropout > 0.0: + layer = Dropout(rate=dropout)(layer) + if batch_normalize: + # Should this allow for training? + layer = BatchNormalization(**batch_normalize_kwargs)(layer) + layer = Activation(activation_fn)(layer) + input_layer = layer + output = input_layer + else: + output = weave_gather n_tasks = self.n_tasks if self.mode == 'classification': n_classes = self.n_classes - logits = Reshape((n_tasks, - n_classes))(Dense(n_tasks * n_classes)(weave_gather)) + logits = Reshape((n_tasks, n_classes))(Dense(n_tasks * n_classes)(output)) output = Softmax()(logits) outputs = [output, logits] output_types = ['prediction', 'loss'] - loss = SoftmaxCrossEntropy() + loss: Loss = SoftmaxCrossEntropy() else: - output = Dense(n_tasks)(weave_gather) + output = Dense(n_tasks)(output) outputs = [output] output_types = ['prediction'] loss = L2Loss() @@ -134,12 +299,98 @@ class WeaveModel(KerasModel): super(WeaveModel, self).__init__( model, loss, output_types=output_types, batch_size=batch_size, **kwargs) - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): + def compute_features_on_batch(self, X_b): + """Compute tensors that will be input into the model from featurized representation. + + The featurized input to `WeaveModel` is instances of `WeaveMol` created by + `WeaveFeaturizer`. This method converts input `WeaveMol` objects into + tensors used by the Keras implementation to compute `WeaveModel` outputs. + + Parameters + ---------- + X_b: np.ndarray + A numpy array with dtype=object where elements are `WeaveMol` objects. + + Returns + ------- + atom_feat: np.ndarray + Of shape `(N_atoms, N_atom_feat)`. + pair_feat: np.ndarray + Of shape `(N_pairs, N_pair_feat)`. Note that `N_pairs` will depend on + the number of pairs being considered. If `max_pair_distance` is + `None`, then this will be `N_atoms**2`. Else it will be the number + of pairs within the specifed graph distance. + pair_split: np.ndarray + Of shape `(N_pairs,)`. The i-th entry in this array will tell you the + originating atom for this pair (the "source"). Note that pairs are + symmetric so for a pair `(a, b)`, both `a` and `b` will separately be + sources at different points in this array. + atom_split: np.ndarray + Of shape `(N_atoms,)`. The i-th entry in this array will be the molecule + with the i-th atom belongs to. + atom_to_pair: np.ndarray + Of shape `(N_pairs, 2)`. The i-th row in this array will be the array + `[a, b]` if `(a, b)` is a pair to be considered. (Note by symmetry, this + implies some other row will contain `[b, a]`. + """ + atom_feat = [] + pair_feat = [] + atom_split = [] + atom_to_pair = [] + pair_split = [] + start = 0 + for im, mol in enumerate(X_b): + n_atoms = mol.get_num_atoms() + # pair_edges is of shape (2, N) + pair_edges = mol.get_pair_edges() + N_pairs = pair_edges[1] + # number of atoms in each molecule + atom_split.extend([im] * n_atoms) + # index of pair features + C0, C1 = np.meshgrid(np.arange(n_atoms), np.arange(n_atoms)) + atom_to_pair.append(pair_edges.T + start) + # Get starting pair atoms + pair_starts = pair_edges.T[:, 0] + # number of pairs for each atom + pair_split.extend(pair_starts + start) + start = start + n_atoms + + # atom features + atom_feat.append(mol.get_atom_features()) + # pair features + pair_feat.append(mol.get_pair_features()) + + return (np.concatenate(atom_feat, axis=0), np.concatenate( + pair_feat, axis=0), np.array(pair_split), np.array(atom_split), + np.concatenate(atom_to_pair, axis=0)) + + def default_generator( + self, + dataset: Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: + """Convert a dataset into the tensors needed for learning. + + Parameters + ---------- + dataset: `dc.data.Dataset` + Dataset to convert + epochs: int, optional (Default 1) + Number of times to walk over `dataset` + mode: str, optional (Default 'fit') + Ignored in this implementation. + deterministic: bool, optional (Default True) + Whether the dataset should be walked in a deterministic fashion + pad_batches: bool, optional (Default True) + If true, each returned batch will have size `self.batch_size`. + + Returns + ------- + Iterator which walks over the batches + """ + for epoch in range(epochs): for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( batch_size=self.batch_size, @@ -149,49 +400,19 @@ class WeaveModel(KerasModel): if self.mode == 'classification': y_b = to_one_hot(y_b.flatten(), self.n_classes).reshape( -1, self.n_tasks, self.n_classes) - atom_feat = [] - pair_feat = [] - atom_split = [] - atom_to_pair = [] - pair_split = [] - start = 0 - for im, mol in enumerate(X_b): - n_atoms = mol.get_num_atoms() - # number of atoms in each molecule - atom_split.extend([im] * n_atoms) - # index of pair features - C0, C1 = np.meshgrid(np.arange(n_atoms), np.arange(n_atoms)) - atom_to_pair.append( - np.transpose( - np.array([C1.flatten() + start, - C0.flatten() + start]))) - # number of pairs for each atom - pair_split.extend(C1.flatten() + start) - start = start + n_atoms - - # atom features - atom_feat.append(mol.get_atom_features()) - # pair features - pair_feat.append( - np.reshape(mol.get_pair_features(), - (n_atoms * n_atoms, self.n_pair_feat))) - - inputs = [ - np.concatenate(atom_feat, axis=0), - np.concatenate(pair_feat, axis=0), - np.array(pair_split), - np.array(atom_split), - np.concatenate(atom_to_pair, axis=0) - ] + inputs = self.compute_features_on_batch(X_b) yield (inputs, [y_b], [w_b]) class DTNNModel(KerasModel): """Deep Tensor Neural Networks - This class implements deep tensor neural networks as first defined in + This class implements deep tensor neural networks as first defined in [1]_ - Schütt, Kristof T., et al. "Quantum-chemical insights from deep tensor neural networks." Nature communications 8.1 (2017): 1-8. + References + ---------- + .. [1] Schütt, Kristof T., et al. "Quantum-chemical insights from deep + tensor neural networks." Nature communications 8.1 (2017): 1-8. """ def __init__(self, @@ -347,7 +568,7 @@ class DTNNModel(KerasModel): class DAGModel(KerasModel): """Directed Acyclic Graph models for molecular property prediction. - This model is based on the following paper: + This model is based on the following paper: Lusci, Alessandro, Gianluca Pollastri, and Pierre Baldi. "Deep architectures and deep learning in chemoinformatics: the prediction of aqueous solubility for drug-like molecules." Journal of chemical information and modeling 53.7 (2013): 1563-1575. @@ -361,7 +582,7 @@ class DAGModel(KerasModel): This model accepts ConvMols as input, just as GraphConvModel does, but these ConvMol objects must be transformed by - dc.trans.DAGTransformer. + dc.trans.DAGTransformer. As a note, performance of this model can be a little sensitive to initialization. It might be worth training a few @@ -382,7 +603,7 @@ class DAGModel(KerasModel): uncertainty=False, batch_size=100, **kwargs): - """ + """ Parameters ---------- n_tasks: int @@ -430,10 +651,6 @@ class DAGModel(KerasModel): if dropout is None or dropout == 0.0: raise ValueError('Dropout must be included to predict uncertainty') - ############################################ - print("self.dropout") - print(self.dropout) - ############################################ # Build the model. atom_features = Input(shape=(self.n_atom_feat,)) @@ -501,7 +718,7 @@ class DAGModel(KerasModel): mode='fit', deterministic=True, pad_batches=True): - """TensorGraph style implementation""" + """Convert a dataset into the tensors needed for learning""" for epoch in range(epochs): for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( batch_size=self.batch_size, @@ -576,7 +793,7 @@ class _GraphConvKerasModel(tf.keras.Model): self.mode = mode self.uncertainty = uncertainty - if not isinstance(dropout, collections.Sequence): + if not isinstance(dropout, SequenceCollection): dropout = [dropout] * (len(graph_conv_layers) + 1) if len(dropout) != len(graph_conv_layers) + 1: raise ValueError('Wrong number of dropout probabilities provided') @@ -635,7 +852,7 @@ class _GraphConvKerasModel(tf.keras.Model): if self.batch_norms[-1] is not None: dense = self.batch_norms[-1](dense, training=training) if training and self.dropouts[-1] is not None: - dense = self.dropouts[1](dense, training=training) + dense = self.dropouts[-1](dense, training=training) neural_fingerprint = self.graph_gather([dense, degree_slice, membership] + deg_adjs) if self.mode == 'classification': @@ -661,30 +878,36 @@ class GraphConvModel(KerasModel): """Graph Convolutional Models. This class implements the graph convolutional model from the - following paper: - + following paper [1]_. These graph convolutions start with a per-atom set of + descriptors for each atom in a molecule, then combine and recombine these + descriptors over convolutional layers. + following [1]_. - Duvenaud, David K., et al. "Convolutional networks on graphs for learning molecular fingerprints." Advances in neural information processing systems. 2015. + References + ---------- + .. [1] Duvenaud, David K., et al. "Convolutional networks on graphs for + learning molecular fingerprints." Advances in neural information processing + systems. 2015. """ def __init__(self, - n_tasks, - graph_conv_layers=[64, 64], - dense_layer_size=128, - dropout=0.0, - mode="classification", - number_atom_features=75, - n_classes=2, - batch_size=100, - batch_normalize=True, - uncertainty=False, + n_tasks: int, + graph_conv_layers: List[int] = [64, 64], + dense_layer_size: int = 128, + dropout: float = 0.0, + mode: str = "classification", + number_atom_features: int = 75, + n_classes: int = 2, + batch_size: int = 100, + batch_normalize: bool = True, + uncertainty: bool = False, **kwargs): """The wrapper class for graph convolutions. Note that since the underlying _GraphConvKerasModel class is specified using imperative subclassing style, this model - cannout make predictions for arbitrary outputs. + cannout make predictions for arbitrary outputs. Parameters ---------- @@ -695,16 +918,17 @@ class GraphConvModel(KerasModel): dense_layer_size: int Width of channels for Atom Level Dense Layer before GraphPool dropout: list or float - the dropout probablity to use for each layer. The length of this list should equal - len(graph_conv_layers)+1 (one value for each convolution layer, and one for the - dense layer). Alternatively this may be a single value instead of a list, in which - case the same value is used for every layer. + the dropout probablity to use for each layer. The length of this list + should equal len(graph_conv_layers)+1 (one value for each convolution + layer, and one for the dense layer). Alternatively this may be a single + value instead of a list, in which case the same value is used for every + layer. mode: str Either "classification" or "regression" number_atom_features: int - 75 is the default number of atom features created, but - this can vary if various options are passed to the - function atom_features in graph_features + 75 is the default number of atom features created, but + this can vary if various options are passed to the + function atom_features in graph_features n_classes: int the number of classes to predict (only used in classification mode) batch_normalize: True @@ -731,7 +955,7 @@ class GraphConvModel(KerasModel): batch_size=batch_size) if mode == "classification": output_types = ['prediction', 'loss', 'embedding'] - loss = SoftmaxCrossEntropy() + loss: Union[Loss, LossFn] = SoftmaxCrossEntropy() else: if self.uncertainty: output_types = ['prediction', 'variance', 'loss', 'loss', 'embedding'] @@ -779,11 +1003,12 @@ class MPNNModel(KerasModel): nodes in a graph send each other "messages" and update their internal state as a consequence of these messages. - Ordering structures in this model are built according to - - -Vinyals, Oriol, Samy Bengio, and Manjunath Kudlur. "Order matters: Sequence to sequence for sets." arXiv preprint arXiv:1511.06391 (2015). + Ordering structures in this model are built according to [1]_ + References + ---------- + .. [1] Vinyals, Oriol, Samy Bengio, and Manjunath Kudlur. "Order matters: + Sequence to sequence for sets." arXiv preprint arXiv:1511.06391 (2015). """ def __init__(self, diff --git a/deepchem/models/keras_model.py b/deepchem/models/keras_model.py index 7d059cdcb69a1695dbbdedd8c7ad263deb510bdb..9294abfe88d8df16df4f7c0a96f8cecd2e511337 100644 --- a/deepchem/models/keras_model.py +++ b/deepchem/models/keras_model.py @@ -4,19 +4,36 @@ import time import logging import os try: - from collections.abc import Sequence + from collections.abc import Sequence as SequenceCollection except: - from collections import Sequence + from collections import Sequence as SequenceCollection -logger = logging.getLogger(__name__) - -from deepchem.data import NumpyDataset +from deepchem.data import Dataset, NumpyDataset +from deepchem.metrics import Metric from deepchem.models.losses import Loss from deepchem.models.models import Model -from deepchem.models.optimizers import Adam -from deepchem.trans import undo_transforms +from deepchem.models.optimizers import Adam, Optimizer, LearningRateSchedule +from deepchem.trans import Transformer, undo_transforms from deepchem.utils.evaluate import GeneratorEvaluator +from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union +from deepchem.utils.typing import LossFn, OneOrMany + +try: + import wandb + wandb.ensure_configured() + if wandb.api.api_key is None: + _has_wandb = False + wandb.termwarn( + "W&B installed but not logged in. Run `wandb login` or set the WANDB_API_KEY env variable." + ) + else: + _has_wandb = True +except (ImportError, AttributeError): + _has_wandb = False + +logger = logging.getLogger(__name__) + class KerasModel(Model): """This is a DeepChem model implemented by a Keras model. @@ -36,6 +53,16 @@ class KerasModel(Model): Keras Model class, such as uncertainty prediction and saliency mapping. + Here is a simple example of code that uses KerasModel to train + a Keras model on a DeepChem dataset. + + >> keras_model = tf.keras.Sequential([ + >> tf.keras.layers.Dense(1000, activation='tanh'), + >> tf.keras.layers.Dense(1) + >> ]) + >> model = KerasModel(keras_model, loss=dc.models.losses.L2Loss()) + >> model.fit(dataset) + The loss function for a model can be defined in two different ways. For models that have only a single output and use a standard loss function, you can simply provide a @@ -96,16 +123,17 @@ class KerasModel(Model): """ def __init__(self, - model, - loss, - output_types=None, - batch_size=100, - model_dir=None, - learning_rate=0.001, - optimizer=None, - tensorboard=False, - log_frequency=100, - **kwargs): + model: tf.keras.Model, + loss: Union[Loss, LossFn], + output_types: Optional[List[str]] = None, + batch_size: int = 100, + model_dir: Optional[str] = None, + learning_rate: Union[float, LearningRateSchedule] = 0.001, + optimizer: Optional[Optimizer] = None, + tensorboard: bool = False, + wandb: bool = False, + log_frequency: int = 100, + **kwargs) -> None: """Create a new KerasModel. Parameters @@ -130,27 +158,37 @@ class KerasModel(Model): ignored. tensorboard: bool whether to log progress to TensorBoard during training + wandb: bool + whether to log progress to Weights & Biases during training log_frequency: int The frequency at which to log data. Data is logged using `logging` by default. If `tensorboard` is set, data is also - logged to TensorBoard. Logging happens at global steps. Roughly, + logged to TensorBoard. If `wandb` is set, data is also logged + to Weights & Biases. Logging happens at global steps. Roughly, a global step corresponds to one batch of training. If you'd like a printout every 10 batch steps, you'd set `log_frequency=10` for example. """ - super(KerasModel, self).__init__( - model_instance=model, model_dir=model_dir, **kwargs) - self.model = model + super(KerasModel, self).__init__(model=model, model_dir=model_dir, **kwargs) if isinstance(loss, Loss): - self._loss_fn = _StandardLoss(model, loss) + self._loss_fn: LossFn = _StandardLoss(model, loss) else: self._loss_fn = loss self.batch_size = batch_size if optimizer is None: - self.optimizer = Adam(learning_rate=learning_rate) + self.optimizer: Optimizer = Adam(learning_rate=learning_rate) else: self.optimizer = optimizer self.tensorboard = tensorboard + + # W&B logging + if wandb and not _has_wandb: + logger.warning( + "You set wandb to True but W&B is not installed. To use wandb logging, " + "run `pip install wandb; wandb login` see https://docs.wandb.com/huggingface." + ) + self.wandb = wandb and _has_wandb + # Backwards compatibility if "tensorboard_log_frequency" in kwargs: logger.warning( @@ -185,20 +223,20 @@ class KerasModel(Model): self._built = False self._inputs_built = False self._training_ops_built = False - self._output_functions = {} - self._gradient_fn_for_vars = {} + self._output_functions: Dict[Any, Any] = {} + self._gradient_fn_for_vars: Dict[Any, Any] = {} - def _ensure_built(self): + def _ensure_built(self) -> None: """The first time this is called, create internal data structures.""" if self._built: return self._built = True self._global_step = tf.Variable(0, trainable=False) - self._tf_optimizer = self.optimizer._create_optimizer(self._global_step) + self._tf_optimizer = self.optimizer._create_tf_optimizer(self._global_step) self._checkpoint = tf.train.Checkpoint( optimizer=self._tf_optimizer, model=self.model) - def _create_inputs(self, example_inputs): + def _create_inputs(self, example_inputs: List) -> None: """The first time this is called, create tensors representing the inputs and outputs.""" if self._inputs_built: return @@ -214,7 +252,8 @@ class KerasModel(Model): for x in example_inputs ] - def _create_training_ops(self, example_batch): + def _create_training_ops(self, + example_batch: Tuple[List, List, List]) -> None: """The first time this is called, create tensors used in optimization.""" if self._training_ops_built: return @@ -230,15 +269,16 @@ class KerasModel(Model): ] def fit(self, - dataset, - nb_epoch=10, - max_checkpoints_to_keep=5, - checkpoint_interval=1000, - deterministic=False, - restore=False, - variables=None, - loss=None, - callbacks=[]): + dataset: Dataset, + nb_epoch: int = 10, + max_checkpoints_to_keep: int = 5, + checkpoint_interval: int = 1000, + deterministic: bool = False, + restore: bool = False, + variables: Optional[List[tf.Variable]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + all_losses: Optional[List[float]] = None) -> float: """Train this model on a dataset. Parameters @@ -268,21 +308,30 @@ class KerasModel(Model): callbacks: function or list of functions one or more functions of the form f(model, step) that will be invoked after every step. This can be used to perform validation, logging, etc. + all_losses: Optional[List[float]], optional (default None) + If specified, all logged losses are appended into this list. Note that + you can call `fit()` repeatedly with the same list and losses will + continue to be appended. + + Returns + ------- + The average loss over the most recent checkpoint interval """ return self.fit_generator( self.default_generator( dataset, epochs=nb_epoch, deterministic=deterministic), max_checkpoints_to_keep, - checkpoint_interval, restore, variables, loss, callbacks) + checkpoint_interval, restore, variables, loss, callbacks, all_losses) def fit_generator(self, - generator, - max_checkpoints_to_keep=5, - checkpoint_interval=1000, - restore=False, - variables=None, - loss=None, - callbacks=[]): + generator: Iterable[Tuple[Any, Any, Any]], + max_checkpoints_to_keep: int = 5, + checkpoint_interval: int = 1000, + restore: bool = False, + variables: Optional[List[tf.Variable]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + all_losses: Optional[List[float]] = None) -> float: """Train this model on data from a generator. Parameters @@ -308,20 +357,24 @@ class KerasModel(Model): callbacks: function or list of functions one or more functions of the form f(model, step) that will be invoked after every step. This can be used to perform validation, logging, etc. + all_losses: Optional[List[float]], optional (default None) + If specified, all logged losses are appended into this list. Note that + you can call `fit()` repeatedly with the same list and losses will + continue to be appended. Returns ------- - the average loss over the most recent checkpoint interval + The average loss over the most recent checkpoint interval """ - if not isinstance(callbacks, Sequence): + if not isinstance(callbacks, SequenceCollection): callbacks = [callbacks] self._ensure_built() if checkpoint_interval > 0: manager = tf.train.CheckpointManager(self._checkpoint, self.model_dir, max_checkpoints_to_keep) avg_loss = 0.0 + last_avg_loss = 0.0 averaged_batches = 0 - train_op = None if loss is None: loss = self._loss_fn var_key = None @@ -365,6 +418,11 @@ class KerasModel(Model): avg_loss = float(avg_loss) / averaged_batches logger.info( 'Ending global_step %d: Average loss %g' % (current_step, avg_loss)) + if all_losses is not None: + all_losses.append(avg_loss) + # Capture the last avg_loss in case of return since we're resetting to + # 0 now + last_avg_loss = avg_loss avg_loss = 0.0 averaged_batches = 0 @@ -373,23 +431,28 @@ class KerasModel(Model): for c in callbacks: c(self, current_step) if self.tensorboard and should_log: - with self._summary_writer.as_default(): - tf.summary.scalar('loss', batch_loss, current_step) + self._log_scalar_to_tensorboard('loss', batch_loss, current_step) + if self.wandb and should_log: + wandb.log({'loss': batch_loss}, step=current_step) # Report final results. if averaged_batches > 0: avg_loss = float(avg_loss) / averaged_batches logger.info( 'Ending global_step %d: Average loss %g' % (current_step, avg_loss)) + if all_losses is not None: + all_losses.append(avg_loss) + last_avg_loss = avg_loss if checkpoint_interval > 0: manager.save() time2 = time.time() logger.info("TIMING: model fitting took %0.3f s" % (time2 - time1)) - return avg_loss + return last_avg_loss - def _create_gradient_fn(self, variables): + def _create_gradient_fn(self, + variables: Optional[List[tf.Variable]]) -> Callable: """Create a function that computes gradients and applies them to the model. Because of the way TensorFlow function tracing works, we need to create a separate function for each new set of variables. @@ -416,14 +479,14 @@ class KerasModel(Model): return apply_gradient_for_batch def fit_on_batch(self, - X, - y, - w, - variables=None, - loss=None, - callbacks=[], - checkpoint=True, - max_checkpoints_to_keep=5): + X: Sequence, + y: Sequence, + w: Sequence, + variables: Optional[List[tf.Variable]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + checkpoint: bool = True, + max_checkpoints_to_keep: int = 5) -> float: """Perform a single step of training. Parameters @@ -448,6 +511,10 @@ class KerasModel(Model): if true, save a checkpoint after performing the training step max_checkpoints_to_keep: int the maximum number of checkpoints to keep. Older checkpoints are discarded. + + Returns + ------- + the loss on the batch """ self._ensure_built() dataset = NumpyDataset(X, y, w) @@ -460,8 +527,11 @@ class KerasModel(Model): loss=loss, callbacks=callbacks) - def _predict(self, generator, transformers, outputs, uncertainty, - other_output_types): + def _predict( + self, generator: Iterable[Tuple[Any, Any, Any]], + transformers: List[Transformer], outputs: Optional[OneOrMany[tf.Tensor]], + uncertainty: bool, + other_output_types: Optional[OneOrMany[str]]) -> OneOrMany[np.ndarray]: """ Predict outputs for data provided by a generator. @@ -488,20 +558,22 @@ class KerasModel(Model): returns the values of the uncertainty outputs. other_output_types: list, optional Provides a list of other output_types (strings) to predict from model. - Returns: - a NumPy array of the model produces a single output, or a list of arrays - if it produces multiple outputs + + Returns + ------- + a NumPy array of the model produces a single output, or a list of arrays + if it produces multiple outputs """ - results = None - variances = None + results: Optional[List[np.ndarray]] = None + variances: Optional[List[np.ndarray]] = None if (outputs is not None) and (other_output_types is not None): raise ValueError( - 'This model cannot compute outputs and other output_types simultaneously. Please invoke one at a time.' - ) + 'This model cannot compute outputs and other output_types simultaneously.' + 'Please invoke one at a time.') if uncertainty and (other_output_types is not None): raise ValueError( - 'This model cannot compute uncertainties and other output types simultaneously. Please invoke one at a time.' - ) + 'This model cannot compute uncertainties and other output types simultaneously.' + 'Please invoke one at a time.') if uncertainty: assert outputs is None if self._variance_outputs is None or len(self._variance_outputs) == 0: @@ -518,7 +590,8 @@ class KerasModel(Model): if (outputs is not None and self.model.inputs is not None and len(self.model.inputs) == 0): raise ValueError( - "Cannot use 'outputs' argument with a model that does not specify its inputs. Note models defined in imperative subclassing style cannot specify outputs" + "Cannot use 'outputs' argument with a model that does not specify its inputs." + "Note models defined in imperative subclassing style cannot specify outputs" ) if isinstance(outputs, tf.Tensor): outputs = [outputs] @@ -575,9 +648,10 @@ class KerasModel(Model): # Concatenate arrays to create the final results. final_results = [] final_variances = [] - for r in results: - final_results.append(np.concatenate(r, axis=0)) - if uncertainty: + if results is not None: + for r in results: + final_results.append(np.concatenate(r, axis=0)) + if uncertainty and variances is not None: for v in variances: final_variances.append(np.concatenate(v, axis=0)) return zip(final_results, final_variances) @@ -587,15 +661,16 @@ class KerasModel(Model): return final_results @tf.function(experimental_relax_shapes=True) - def _compute_model(self, inputs): + def _compute_model(self, inputs: Sequence): """Evaluate the model for a set of inputs.""" return self.model(inputs, training=False) - def predict_on_generator(self, - generator, - transformers=[], - outputs=None, - output_types=None): + def predict_on_generator( + self, + generator: Iterable[Tuple[Any, Any, Any]], + transformers: List[Transformer] = [], + outputs: Optional[OneOrMany[tf.Tensor]] = None, + output_types: Optional[OneOrMany[str]] = None) -> OneOrMany[np.ndarray]: """ Parameters ---------- @@ -622,7 +697,11 @@ class KerasModel(Model): """ return self._predict(generator, transformers, outputs, False, output_types) - def predict_on_batch(self, X, transformers=[], outputs=None): + def predict_on_batch( + self, + X: Sequence, + transformers: List[Transformer] = [], + outputs: Optional[OneOrMany[tf.Tensor]] = None) -> OneOrMany[np.ndarray]: """Generates predictions for input samples, processing samples in a batch. Parameters @@ -646,7 +725,8 @@ class KerasModel(Model): dataset = NumpyDataset(X=X, y=None) return self.predict(dataset, transformers, outputs) - def predict_uncertainty_on_batch(self, X, masks=50): + def predict_uncertainty_on_batch(self, X: Sequence, masks: int = 50 + ) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]: """ Predict the model's outputs, along with the uncertainty in each one. @@ -673,7 +753,12 @@ class KerasModel(Model): dataset = NumpyDataset(X=X, y=None) return self.predict_uncertainty(dataset, masks) - def predict(self, dataset, transformers=[], outputs=None, output_types=None): + def predict( + self, + dataset: Dataset, + transformers: List[Transformer] = [], + outputs: Optional[OneOrMany[tf.Tensor]] = None, + output_types: Optional[List[str]] = None) -> OneOrMany[np.ndarray]: """ Uses self to make predictions on provided Dataset object. @@ -689,8 +774,10 @@ class KerasModel(Model): outputs will be returned. Alternatively one or more Tensors within the model may be specified, in which case the output of those Tensors will be returned. - output_types: list of Strings - The output types to return. Will retrieve all outputs of these types from the model. + output_types: String or list of Strings + If specified, all outputs of this type will be retrieved + from the model. If output_types is specified, outputs must + be None. Returns ------- @@ -698,14 +785,14 @@ class KerasModel(Model): if it produces multiple outputs """ generator = self.default_generator( - dataset, mode='predict', pad_batches=False) + dataset, mode='predict', deterministic=True, pad_batches=False) return self.predict_on_generator( generator, transformers=transformers, outputs=outputs, output_types=output_types) - def predict_embedding(self, dataset): + def predict_embedding(self, dataset: Dataset) -> OneOrMany[np.ndarray]: """ Predicts embeddings created by underlying model if any exist. An embedding must be specified to have `output_type` of @@ -725,7 +812,8 @@ class KerasModel(Model): dataset, mode='predict', pad_batches=False) return self._predict(generator, [], None, False, ['embedding']) - def predict_uncertainty(self, dataset, masks=50): + def predict_uncertainty(self, dataset: Dataset, masks: int = 50 + ) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]: """ Predict the model's outputs, along with the uncertainty in each one. @@ -749,9 +837,9 @@ class KerasModel(Model): value of the output, and each element of y_std estimates the standard deviation of the corresponding element of y_pred """ - sum_pred = [] - sum_sq_pred = [] - sum_var = [] + sum_pred: List[np.ndarray] = [] + sum_sq_pred: List[np.ndarray] = [] + sum_var: List[np.ndarray] = [] for i in range(masks): generator = self.default_generator( dataset, mode='uncertainty', pad_batches=False) @@ -775,13 +863,13 @@ class KerasModel(Model): if len(output) == 1: return (output[0], std[0]) else: - return zip(output, std) + return list(zip(output, std)) def evaluate_generator(self, - generator, - metrics, - transformers=[], - per_task_metrics=False): + generator: Iterable[Tuple[Any, Any, Any]], + metrics: List[Metric], + transformers: List[Transformer] = [], + per_task_metrics: bool = False): """Evaluate the performance of this model on the data produced by a generator. Parameters @@ -789,7 +877,7 @@ class KerasModel(Model): generator: generator this should generate batches, each represented as a tuple of the form (inputs, labels, weights). - metric: deepchem.metrics.Metric + metric: list of deepchem.metrics.Metric Evaluation metric transformers: list of dc.trans.Transformers Transformers that the input data has been transformed by. The output @@ -805,7 +893,7 @@ class KerasModel(Model): evaluator = GeneratorEvaluator(self, generator, transformers) return evaluator.compute_model_performance(metrics, per_task_metrics) - def compute_saliency(self, X): + def compute_saliency(self, X: np.ndarray) -> OneOrMany[np.ndarray]: """Compute the saliency map for an input sample. This computes the Jacobian matrix with the derivative of each output element @@ -854,7 +942,8 @@ class KerasModel(Model): return final_result[0] return final_result - def _prepare_batch(self, batch): + def _prepare_batch(self, + batch: Tuple[Any, Any, Any]) -> Tuple[List, List, List]: inputs, labels, weights = batch inputs = [ x if x.dtype == t else x.astype(t) @@ -880,12 +969,13 @@ class KerasModel(Model): inputs[i] = inputs[i].reshape(shape[:expected_dims]) return (inputs, labels, weights) - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): + def default_generator( + self, + dataset: Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: """Create a generator that iterates batches for a dataset. Subclasses may override this method to customize how model inputs are @@ -919,7 +1009,9 @@ class KerasModel(Model): pad_batches=pad_batches): yield ([X_b], [y_b], [w_b]) - def save_checkpoint(self, max_checkpoints_to_keep=5, model_dir=None): + def save_checkpoint(self, + max_checkpoints_to_keep: int = 5, + model_dir: Optional[str] = None) -> None: """Save a checkpoint to disk. Usually you do not need to call this method, since fit() saves checkpoints @@ -942,7 +1034,7 @@ class KerasModel(Model): max_checkpoints_to_keep) manager.save() - def get_checkpoints(self, model_dir=None): + def get_checkpoints(self, model_dir: Optional[str] = None): """Get a list of all available checkpoint files. Parameters @@ -955,7 +1047,9 @@ class KerasModel(Model): model_dir = self.model_dir return tf.train.get_checkpoint_state(model_dir).all_model_checkpoint_paths - def restore(self, checkpoint=None, model_dir=None, session=None): + def restore(self, + checkpoint: Optional[str] = None, + model_dir: Optional[str] = None) -> None: """Reload the values of all variables from a checkpoint file. Parameters @@ -966,8 +1060,6 @@ class KerasModel(Model): list of all available checkpoints. model_dir: str, default None Directory to restore checkpoint from. If None, use self.model_dir. - session: tf.Session(), default None - Session to run restore ops under. If None, self.session is used. """ self._ensure_built() if model_dir is None: @@ -978,11 +1070,19 @@ class KerasModel(Model): raise ValueError('No checkpoint found') self._checkpoint.restore(checkpoint) - def get_global_step(self): + def get_global_step(self) -> int: """Get the number of steps of fitting that have been performed.""" return int(self._global_step) - def _create_assignment_map(self, source_model, include_top=True, **kwargs): + def _log_scalar_to_tensorboard(self, name: str, value: Any, step: int): + """Log a scalar value to Tensorboard.""" + with self._summary_writer.as_default(): + tf.summary.scalar(name, value, step) + + def _create_assignment_map(self, + source_model: "KerasModel", + include_top: bool = True, + **kwargs) -> Dict[Any, Any]: """ Creates a default assignment map between variables of source and current model. This is used only when a custom assignment map is missing. This assumes the @@ -998,7 +1098,7 @@ class KerasModel(Model): include_top: bool, default True if true, copies the last dense layer """ - assignment_map = {} + assignment_map: Dict[Any, Any] = {} source_vars = source_model.model.trainable_variables dest_vars = self.model.trainable_variables @@ -1011,7 +1111,8 @@ class KerasModel(Model): return assignment_map - def _create_value_map(self, source_model, **kwargs): + def _create_value_map(self, source_model: "KerasModel", + **kwargs) -> Dict[Any, Any]: """ Creates a value map between variables in the source model and their current values. This is used only when a custom value map is missing, and @@ -1022,7 +1123,7 @@ class KerasModel(Model): source_model: dc.models.KerasModel Source model to create value map from """ - value_map = {} + value_map: Dict[Any, Any] = {} source_vars = source_model.model.trainable_variables for source_var in source_vars: @@ -1031,14 +1132,14 @@ class KerasModel(Model): return value_map def load_from_pretrained(self, - source_model, - assignment_map=None, - value_map=None, - checkpoint=None, - model_dir=None, - include_top=True, - inputs=None, - **kwargs): + source_model: "KerasModel", + assignment_map: Optional[Dict[Any, Any]] = None, + value_map: Optional[Dict[Any, Any]] = None, + checkpoint: Optional[str] = None, + model_dir: Optional[str] = None, + include_top: bool = True, + inputs: Optional[Sequence[Any]] = None, + **kwargs) -> None: """Copies variable values from a pretrained model. `source_model` can either be a pretrained model or a model with the same architecture. `value_map` is a variable-value dictionary. If no `value_map` is provided, the variable @@ -1104,16 +1205,16 @@ class KerasModel(Model): class _StandardLoss(object): """The implements the loss function for models that use a dc.models.losses.Loss.""" - def __init__(self, model, loss): + def __init__(self, model: tf.keras.Model, loss: Loss) -> None: self.model = model self.loss = loss - def __call__(self, outputs, labels, weights): + def __call__(self, outputs: List, labels: List, weights: List) -> float: if len(outputs) != 1 or len(labels) != 1 or len(weights) != 1: raise ValueError( "Loss functions expects exactly one each of outputs, labels, and weights" ) - losses = self.loss(outputs[0], labels[0]) + losses = self.loss._compute_tf_loss(outputs[0], labels[0]) w = weights[0] if len(w.shape) < len(losses.shape): if isinstance(w, tf.Tensor): diff --git a/deepchem/models/layers.py b/deepchem/models/layers.py index 81ce38e238ccbd42ab87d922cee5834694e95009..29706898574ae0e28ee9b6c147e11a9457e0e2b4 100644 --- a/deepchem/models/layers.py +++ b/deepchem/models/layers.py @@ -1,22 +1,54 @@ # -*- coding: utf-8 -*- import tensorflow as tf -import tensorflow_probability as tfp import numpy as np -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection +from typing import Callable, Dict, List from tensorflow.keras import activations, initializers, backend -from tensorflow.keras.layers import Dropout +from tensorflow.keras.layers import Dropout, BatchNormalization class InteratomicL2Distances(tf.keras.layers.Layer): - """Compute (squared) L2 Distances between atoms given neighbors.""" + """Compute (squared) L2 Distances between atoms given neighbors. + + This class computes pairwise distances between its inputs. + + Examples + -------- + >>> import numpy as np + >>> import deepchem as dc + >>> atoms = 5 + >>> neighbors = 2 + >>> coords = np.random.rand(atoms, 3) + >>> neighbor_list = np.random.randint(0, atoms, size=(atoms, neighbors)) + >>> layer = InteratomicL2Distances(atoms, neighbors, 3) + >>> result = np.array(layer([coords, neighbor_list])) + >>> result.shape + (5, 2) - def __init__(self, N_atoms, M_nbrs, ndim, **kwargs): + """ + + def __init__(self, N_atoms: int, M_nbrs: int, ndim: int, **kwargs): + """Constructor for this layer. + + Parameters + ---------- + N_atoms: int + Number of atoms in the system total. + M_nbrs: int + Number of neighbors to consider when computing distances. + n_dim: int + Number of descriptors for each atom. + """ super(InteratomicL2Distances, self).__init__(**kwargs) self.N_atoms = N_atoms self.M_nbrs = M_nbrs self.ndim = ndim - def get_config(self): + def get_config(self) -> Dict: + """Returns config dictionary for this layer.""" config = super(InteratomicL2Distances, self).get_config() config['N_atoms'] = self.N_atoms config['M_nbrs'] = self.M_nbrs @@ -29,7 +61,12 @@ class InteratomicL2Distances(tf.keras.layers.Layer): Parameters ---------- inputs: list - Should be of form `inputs=[coords, nbr_list]` where `coords` is a tensor of shape `(None, N, 3)` and `nbr_list` is a list. + Should be of form `inputs=[coords, nbr_list]` where `coords` is a + tensor of shape `(None, N, 3)` and `nbr_list` is a list. + + Returns + ------- + Tensor of shape `(N_atoms, M_nbrs)` with interatomic distances. """ if len(inputs) != 2: raise ValueError("InteratomicDistances requires coords,nbr_list") @@ -47,20 +84,22 @@ class InteratomicL2Distances(tf.keras.layers.Layer): class GraphConv(tf.keras.layers.Layer): """Graph Convolutional Layers - This layer implements the graph convolution introduced in + This layer implements the graph convolution introduced in [1]_. The graph + convolution combines per-node feature vectures in a nonlinear fashion with + the feature vectors for neighboring nodes. This "blends" information in + local neighborhoods of a graph. - Duvenaud, David K., et al. "Convolutional networks on graphs for learning molecular fingerprints." Advances in neural information processing systems. 2015. https://arxiv.org/abs/1509.09292 + References + ---------- + .. [1] Duvenaud, David K., et al. "Convolutional networks on graphs for learning molecular fingerprints." Advances in neural information processing systems. 2015. https://arxiv.org/abs/1509.09292 - The graph convolution combines per-node feature vectures in a - nonlinear fashion with the feature vectors for neighboring nodes. - This "blends" information in local neighborhoods of a graph. """ def __init__(self, - out_channel, - min_deg=0, - max_deg=10, - activation_fn=None, + out_channel: int, + min_deg: int = 0, + max_deg: int = 10, + activation_fn: Callable = None, **kwargs): """Initialize a graph convolutional layer. @@ -91,14 +130,14 @@ class GraphConv(tf.keras.layers.Layer): num_deg = 2 * self.max_degree + (1 - self.min_degree) self.W_list = [ self.add_weight( - name='kernel', + name='kernel' + str(k), shape=(int(input_shape[0][-1]), self.out_channel), initializer='glorot_uniform', trainable=True) for k in range(num_deg) ] self.b_list = [ self.add_weight( - name='bias', + name='bias' + str(k), shape=(self.out_channel,), initializer='zeros', trainable=True) for k in range(num_deg) @@ -181,8 +220,16 @@ class GraphPool(tf.keras.layers.Layer): """A GraphPool gathers data from local neighborhoods of a graph. This layer does a max-pooling over the feature vectors of atoms in a - neighborhood. You can think of this layer as analogous to a max-pooling layer - for 2D convolutions but which operates on graphs instead. + neighborhood. You can think of this layer as analogous to a max-pooling + layer for 2D convolutions but which operates on graphs instead. This + technique is described in [1]_. + + References + ---------- + .. [1] Duvenaud, David K., et al. "Convolutional networks on graphs for + learning molecular fingerprints." Advances in neural information processing + systems. 2015. https://arxiv.org/abs/1509.09292 + """ def __init__(self, min_degree=0, max_degree=10, **kwargs): @@ -264,6 +311,12 @@ class GraphGather(tf.keras.layers.Layer): `GraphConv`, and `GraphPool` layers pool all nodes from all graphs in a batch that's being processed. The `GraphGather` reassembles these jumbled node feature vectors into per-graph feature vectors. + + References + ---------- + .. [1] Duvenaud, David K., et al. "Convolutional networks on graphs for + learning molecular fingerprints." Advances in neural information processing + systems. 2015. https://arxiv.org/abs/1509.09292 """ def __init__(self, batch_size, activation_fn=None, **kwargs): @@ -420,19 +473,92 @@ class LSTMStep(tf.keras.layers.Layer): return h, [h, c] -def _cosine_dist(x, y): - """Computes the inner product (cosine distance) between two tensors. +def cosine_dist(x, y): + """Computes the inner product (cosine similarity) between two tensors. + + This assumes that the two input tensors contain rows of vectors where + each column represents a different feature. The output tensor will have + elements that represent the inner product between pairs of normalized vectors + in the rows of `x` and `y`. The two tensors need to have the same number of + columns, because one cannot take the dot product between vectors of different + lengths. For example, in sentence similarity and sentence classification tasks, + the number of columns is the embedding size. In these tasks, the rows of the + input tensors would be different test vectors or sentences. The input tensors + themselves could be different batches. Using vectors or tensors of all 0s + should be avoided. + + Methods + ------- + The vectors in the input tensors are first l2-normalized such that each vector + has length or magnitude of 1. The inner product (dot product) is then taken + between corresponding pairs of row vectors in the input tensors and returned. + + Examples + -------- + The cosine similarity between two equivalent vectors will be 1. The cosine + similarity between two equivalent tensors (tensors where all the elements are + the same) will be a tensor of 1s. In this scenario, if the input tensors `x` and + `y` are each of shape `(n,p)`, where each element in `x` and `y` is the same, then + the output tensor would be a tensor of shape `(n,n)` with 1 in every entry. + + >>> import tensorflow as tf + >>> import deepchem.models.layers as layers + >>> x = tf.ones((6, 4), dtype=tf.dtypes.float32, name=None) + >>> y_same = tf.ones((6, 4), dtype=tf.dtypes.float32, name=None) + >>> cos_sim_same = layers.cosine_dist(x,y_same) + + `x` and `y_same` are the same tensor (equivalent at every element, in this + case 1). As such, the pairwise inner product of the rows in `x` and `y` will + always be 1. The output tensor will be of shape (6,6). + + >>> diff = cos_sim_same - tf.ones((6, 6), dtype=tf.dtypes.float32, name=None) + >>> tf.reduce_sum(diff) == 0 # True + + >>> cos_sim_same.shape + TensorShape([6, 6]) + + The cosine similarity between two orthogonal vectors will be 0 (by definition). + If every row in `x` is orthogonal to every row in `y`, then the output will be a + tensor of 0s. In the following example, each row in the tensor `x1` is orthogonal + to each row in `x2` because they are halves of an identity matrix. + + >>> identity_tensor = tf.eye(512, dtype=tf.dtypes.float32) + >>> x1 = identity_tensor[0:256,:] + >>> x2 = identity_tensor[256:512,:] + >>> cos_sim_orth = layers.cosine_dist(x1,x2) + + Each row in `x1` is orthogonal to each row in `x2`. As such, the pairwise inner + product of the rows in `x1`and `x2` will always be 0. Furthermore, because the + shape of the input tensors are both of shape `(256,512)`, the output tensor will + be of shape `(256,256)`. + + >>> tf.reduce_sum(cos_sim_orth) == 0 # True + + >>> cos_sim_orth.shape + TensorShape([256, 256]) Parameters ---------- x: tf.Tensor - Input Tensor + Input Tensor of shape `(n, p)`. + The shape of this input tensor should be `n` rows by `p` columns. + Note that `n` need not equal `m` (the number of rows in `y`). y: tf.Tensor - Input Tensor + Input Tensor of shape `(m, p)` + The shape of this input tensor should be `m` rows by `p` columns. + Note that `m` need not equal `n` (the number of rows in `x`). + + Returns + ------- + tf.Tensor + Returns a tensor of shape `(n, m)`, that is, `n` rows by `m` columns. + Each `i,j`-th entry of this output tensor is the inner product between + the l2-normalized `i`-th row of the input tensor `x` and the + the l2-normalized `j`-th row of the output tensor `y`. """ - denom = (backend.sqrt(backend.sum(tf.square(x)) * backend.sum(tf.square(y))) + - backend.epsilon()) - return backend.dot(x, tf.transpose(y)) / denom + x_norm = tf.math.l2_normalize(x, axis=1) + y_norm = tf.math.l2_normalize(y, axis=1) + return backend.dot(x_norm, tf.transpose(y_norm)) class AttnLSTMEmbedding(tf.keras.layers.Layer): @@ -448,12 +574,14 @@ class AttnLSTMEmbedding(tf.keras.layers.Layer): metric that allows a network to modify its internal notion of distance. - References: - Matching Networks for One Shot Learning - https://arxiv.org/pdf/1606.04080v1.pdf + See references [1]_ [2]_ for more details. - Order Matters: Sequence to sequence for sets - https://arxiv.org/abs/1511.06391 + References + ---------- + .. [1] Vinyals, Oriol, et al. "Matching networks for one shot learning." + Advances in neural information processing systems. 2016. + .. [2] Vinyals, Oriol, Samy Bengio, and Manjunath Kudlur. "Order matters: + Sequence to sequence for sets." arXiv preprint arXiv:1511.06391 (2015). """ def __init__(self, n_test, n_support, n_feat, max_depth, **kwargs): @@ -520,7 +648,7 @@ class AttnLSTMEmbedding(tf.keras.layers.Layer): for d in range(self.max_depth): # Process using attention # Eqn (4), appendix A.1 of Matching Networks paper - e = _cosine_dist(x + q, xp) + e = cosine_dist(x + q, xp) a = tf.nn.softmax(e) r = backend.dot(a, xp) @@ -622,13 +750,13 @@ class IterRefLSTMEmbedding(tf.keras.layers.Layer): for d in range(self.max_depth): # Process support xp using attention - e = _cosine_dist(z + q, xp) + e = cosine_dist(z + q, xp) a = tf.nn.softmax(e) # Get linear combination of support set r = backend.dot(a, xp) # Process test x using attention - x_e = _cosine_dist(x + p, z) + x_e = cosine_dist(x + p, z) x_a = tf.nn.softmax(x_e) s = backend.dot(x_a, z) @@ -973,7 +1101,7 @@ class NeighborList(tf.keras.layers.Layer): return config def call(self, inputs): - if isinstance(inputs, collections.Sequence): + if isinstance(inputs, SequenceCollection): if len(inputs) != 1: raise ValueError("NeighborList can only have one input") inputs = inputs[0] @@ -1757,7 +1885,7 @@ class ANIFeat(tf.keras.layers.Layer): class GraphEmbedPoolLayer(tf.keras.layers.Layer): - """ + r""" GraphCNNPool Layer from Robust Spatial Filtering with Graph Convolutional Neural Networks https://arxiv.org/abs/1703.00792 @@ -1850,7 +1978,7 @@ class GraphEmbedPoolLayer(tf.keras.layers.Layer): class GraphCNN(tf.keras.layers.Layer): - """ + r""" GraphCNN Layer from Robust Spatial Filtering with Graph Convolutional Neural Networks https://arxiv.org/abs/1703.00792 @@ -1993,7 +2121,7 @@ class Highway(tf.keras.layers.Layer): return config def build(self, input_shape): - if isinstance(input_shape, collections.Sequence): + if isinstance(input_shape, SequenceCollection): input_shape = input_shape[0] out_channels = input_shape[1] @@ -2015,7 +2143,7 @@ class Highway(tf.keras.layers.Layer): self.built = True def call(self, inputs): - if isinstance(inputs, collections.Sequence): + if isinstance(inputs, SequenceCollection): parent = inputs[0] else: parent = inputs @@ -2026,49 +2154,152 @@ class Highway(tf.keras.layers.Layer): class WeaveLayer(tf.keras.layers.Layer): """This class implements the core Weave convolution from the - Google graph convolution paper. - - Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond fingerprints." Journal of computer-aided molecular design 30.8 (2016): 595-608. + Google graph convolution paper [1]_ This model contains atom features and bond features separately.Here, bond features are also called pair features. There are 2 types of transformation, atom->atom, atom->pair, pair->atom, pair->pair that this model implements. + + Examples + -------- + This layer expects 4 inputs in a list of the form `[atom_features, + pair_features, pair_split, atom_to_pair]`. We'll walk through the structure + of these inputs. Let's start with some basic definitions. + + >>> import deepchem as dc + >>> import numpy as np + + Suppose you have a batch of molecules + + >>> smiles = ["CCC", "C"] + + Note that there are 4 atoms in total in this system. This layer expects its + input molecules to be batched together. + + >>> total_n_atoms = 4 + + Let's suppose that we have a featurizer that computes `n_atom_feat` features + per atom. + + >>> n_atom_feat = 75 + + Then conceptually, `atom_feat` is the array of shape `(total_n_atoms, + n_atom_feat)` of atomic features. For simplicity, let's just go with a + random such matrix. + + >>> atom_feat = np.random.rand(total_n_atoms, n_atom_feat) + + Let's suppose we have `n_pair_feat` pairwise features + + >>> n_pair_feat = 14 + + For each molecule, we compute a matrix of shape `(n_atoms*n_atoms, + n_pair_feat)` of pairwise features for each pair of atoms in the molecule. + Let's construct this conceptually for our example. + + >>> pair_feat = [np.random.rand(3*3, n_pair_feat), np.random.rand(1*1, n_pair_feat)] + >>> pair_feat = np.concatenate(pair_feat, axis=0) + >>> pair_feat.shape + (10, 14) + + `pair_split` is an index into `pair_feat` which tells us which atom each row belongs to. In our case, we hve + + >>> pair_split = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3]) + + That is, the first 9 entries belong to "CCC" and the last entry to "C". The + final entry `atom_to_pair` goes in a little more in-depth than `pair_split` + and tells us the precise pair each pair feature belongs to. In our case + + >>> atom_to_pair = np.array([[0, 0], + ... [0, 1], + ... [0, 2], + ... [1, 0], + ... [1, 1], + ... [1, 2], + ... [2, 0], + ... [2, 1], + ... [2, 2], + ... [3, 3]]) + + Let's now define the actual layer + + >>> layer = WeaveLayer() + + And invoke it + + >>> [A, P] = layer([atom_feat, pair_feat, pair_split, atom_to_pair]) + + The weave layer produces new atom/pair features. Let's check their shapes + + >>> A = np.array(A) + >>> A.shape + (4, 50) + >>> P = np.array(P) + >>> P.shape + (10, 50) + + The 4 is `total_num_atoms` and the 10 is the total number of pairs. Where + does `50` come from? It's from the default arguments `n_atom_input_feat` and + `n_pair_input_feat`. + + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond + fingerprints." Journal of computer-aided molecular design 30.8 (2016): + 595-608. + """ def __init__(self, - n_atom_input_feat=75, - n_pair_input_feat=14, - n_atom_output_feat=50, - n_pair_output_feat=50, - n_hidden_AA=50, - n_hidden_PA=50, - n_hidden_AP=50, - n_hidden_PP=50, - update_pair=True, - init='glorot_uniform', - activation='relu', + n_atom_input_feat: int = 75, + n_pair_input_feat: int = 14, + n_atom_output_feat: int = 50, + n_pair_output_feat: int = 50, + n_hidden_AA: int = 50, + n_hidden_PA: int = 50, + n_hidden_AP: int = 50, + n_hidden_PP: int = 50, + update_pair: bool = True, + init: str = 'glorot_uniform', + activation: str = 'relu', + batch_normalize: bool = True, + batch_normalize_kwargs: Dict = {"renorm": True}, **kwargs): """ Parameters ---------- - n_atom_input_feat: int, optional + n_atom_input_feat: int, optional (default 75) Number of features for each atom in input. - n_pair_input_feat: int, optional + n_pair_input_feat: int, optional (default 14) Number of features for each pair of atoms in input. - n_atom_output_feat: int, optional + n_atom_output_feat: int, optional (default 50) Number of features for each atom in output. - n_pair_output_feat: int, optional + n_pair_output_feat: int, optional (default 50) Number of features for each pair of atoms in output. - n_hidden_XX: int, optional + n_hidden_AA: int, optional (default 50) + Number of units(convolution depths) in corresponding hidden layer + n_hidden_PA: int, optional (default 50) Number of units(convolution depths) in corresponding hidden layer - update_pair: bool, optional + n_hidden_AP: int, optional (default 50) + Number of units(convolution depths) in corresponding hidden layer + n_hidden_PP: int, optional (default 50) + Number of units(convolution depths) in corresponding hidden layer + update_pair: bool, optional (default True) Whether to calculate for pair features, could be turned off for last layer - init: str, optional + init: str, optional (default 'glorot_uniform') Weight initialization for filters. - activation: str, optional + activation: str, optional (default 'relu') Activation function applied + batch_normalize: bool, optional (default True) + If this is turned on, apply batch normalization before applying + activation functions on convolutional layers. + batch_normalize_kwargs: Dict, optional (default `{renorm=True}`) + Batch normalization is a complex layer which has many potential + argumentswhich change behavior. This layer accepts user-defined + parameters which are passed to all `BatchNormalization` layers in + `WeaveModel`, `WeaveLayer`, and `WeaveGather`. """ super(WeaveLayer, self).__init__(**kwargs) self.init = init # Set weight initialization @@ -2081,6 +2312,8 @@ class WeaveLayer(tf.keras.layers.Layer): self.n_hidden_PP = n_hidden_PP self.n_hidden_A = n_hidden_AA + n_hidden_PA self.n_hidden_P = n_hidden_AP + n_hidden_PP + self.batch_normalize = batch_normalize + self.batch_normalize_kwargs = batch_normalize_kwargs self.n_atom_input_feat = n_atom_input_feat self.n_pair_input_feat = n_pair_input_feat @@ -2088,7 +2321,8 @@ class WeaveLayer(tf.keras.layers.Layer): self.n_pair_output_feat = n_pair_output_feat self.W_AP, self.b_AP, self.W_PP, self.b_PP, self.W_P, self.b_P = None, None, None, None, None, None - def get_config(self): + def get_config(self) -> Dict: + """Returns config dictionary for this layer.""" config = super(WeaveLayer, self).get_config() config['n_atom_input_feat'] = self.n_atom_input_feat config['n_pair_input_feat'] = self.n_pair_input_feat @@ -2098,51 +2332,75 @@ class WeaveLayer(tf.keras.layers.Layer): config['n_hidden_PA'] = self.n_hidden_PA config['n_hidden_AP'] = self.n_hidden_AP config['n_hidden_PP'] = self.n_hidden_PP + config['batch_normalize'] = self.batch_normalize + config['batch_normalize_kwargs'] = self.batch_normalize_kwargs config['update_pair'] = self.update_pair config['init'] = self.init config['activation'] = self.activation return config def build(self, input_shape): - """ Construct internal trainable weights.""" - init = initializers.get(self.init) # Set weight initialization + """ Construct internal trainable weights. + + Parameters + ---------- + input_shape: tuple + Ignored since we don't need the input shape to create internal weights. + """ + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) self.W_AA = init([self.n_atom_input_feat, self.n_hidden_AA]) self.b_AA = backend.zeros(shape=[ self.n_hidden_AA, ]) + self.AA_bn = BatchNormalization(**self.batch_normalize_kwargs) self.W_PA = init([self.n_pair_input_feat, self.n_hidden_PA]) self.b_PA = backend.zeros(shape=[ self.n_hidden_PA, ]) + self.PA_bn = BatchNormalization(**self.batch_normalize_kwargs) self.W_A = init([self.n_hidden_A, self.n_atom_output_feat]) self.b_A = backend.zeros(shape=[ self.n_atom_output_feat, ]) + self.A_bn = BatchNormalization(**self.batch_normalize_kwargs) if self.update_pair: self.W_AP = init([self.n_atom_input_feat * 2, self.n_hidden_AP]) self.b_AP = backend.zeros(shape=[ self.n_hidden_AP, ]) + self.AP_bn = BatchNormalization(**self.batch_normalize_kwargs) self.W_PP = init([self.n_pair_input_feat, self.n_hidden_PP]) self.b_PP = backend.zeros(shape=[ self.n_hidden_PP, ]) + self.PP_bn = BatchNormalization(**self.batch_normalize_kwargs) self.W_P = init([self.n_hidden_P, self.n_pair_output_feat]) self.b_P = backend.zeros(shape=[ self.n_pair_output_feat, ]) + self.P_bn = BatchNormalization(**self.batch_normalize_kwargs) self.built = True - def call(self, inputs): + def call(self, inputs: List) -> List: """Creates weave tensors. - inputs: [atom_features, pair_features, pair_split, atom_to_pair] + Parameters + ---------- + inputs: List + Should contain 4 tensors [atom_features, pair_features, pair_split, + atom_to_pair] """ atom_features = inputs[0] pair_features = inputs[1] @@ -2153,29 +2411,45 @@ class WeaveLayer(tf.keras.layers.Layer): activation = self.activation_fn AA = tf.matmul(atom_features, self.W_AA) + self.b_AA + if self.batch_normalize: + AA = self.AA_bn(AA) AA = activation(AA) PA = tf.matmul(pair_features, self.W_PA) + self.b_PA + if self.batch_normalize: + PA = self.PA_bn(PA) PA = activation(PA) PA = tf.math.segment_sum(PA, pair_split) A = tf.matmul(tf.concat([AA, PA], 1), self.W_A) + self.b_A + if self.batch_normalize: + A = self.A_bn(A) A = activation(A) if self.update_pair: + # Note that AP_ij and AP_ji share the same self.AP_bn batch + # normalization AP_ij = tf.matmul( tf.reshape( tf.gather(atom_features, atom_to_pair), [-1, 2 * self.n_atom_input_feat]), self.W_AP) + self.b_AP + if self.batch_normalize: + AP_ij = self.AP_bn(AP_ij) AP_ij = activation(AP_ij) AP_ji = tf.matmul( tf.reshape( tf.gather(atom_features, tf.reverse(atom_to_pair, [1])), [-1, 2 * self.n_atom_input_feat]), self.W_AP) + self.b_AP + if self.batch_normalize: + AP_ji = self.AP_bn(AP_ji) AP_ji = activation(AP_ji) PP = tf.matmul(pair_features, self.W_PP) + self.b_PP + if self.batch_normalize: + PP = self.PP_bn(PP) PP = activation(PP) P = tf.matmul(tf.concat([AP_ij + AP_ji, PP], 1), self.W_P) + self.b_P + if self.batch_normalize: + P = self.P_bn(P) P = activation(P) else: P = pair_features @@ -2184,39 +2458,109 @@ class WeaveLayer(tf.keras.layers.Layer): class WeaveGather(tf.keras.layers.Layer): + """Implements the weave-gathering section of weave convolutions. + + Implements the gathering layer from [1]_. The weave gathering layer gathers + per-atom features to create a molecule-level fingerprint in a weave + convolutional network. This layer can also performs Gaussian histogram + expansion as detailed in [1]_. Note that the gathering function here is + simply addition as in [1]_> + + Examples + -------- + This layer expects 2 inputs in a list of the form `[atom_features, + pair_features]`. We'll walk through the structure + of these inputs. Let's start with some basic definitions. + + >>> import deepchem as dc + >>> import numpy as np + + Suppose you have a batch of molecules + + >>> smiles = ["CCC", "C"] + + Note that there are 4 atoms in total in this system. This layer expects its + input molecules to be batched together. + + >>> total_n_atoms = 4 + + Let's suppose that we have `n_atom_feat` features per atom. + + >>> n_atom_feat = 75 + + Then conceptually, `atom_feat` is the array of shape `(total_n_atoms, + n_atom_feat)` of atomic features. For simplicity, let's just go with a + random such matrix. + + >>> atom_feat = np.random.rand(total_n_atoms, n_atom_feat) + + We then need to provide a mapping of indices to the atoms they belong to. In + ours case this would be + + >>> atom_split = np.array([0, 0, 0, 1]) + + Let's now define the actual layer + + >>> gather = WeaveGather(batch_size=2, n_input=n_atom_feat) + >>> output_molecules = gather([atom_feat, atom_split]) + >>> len(output_molecules) + 2 + + References + ---------- + .. [1] Kearnes, Steven, et al. "Molecular graph convolutions: moving beyond + fingerprints." Journal of computer-aided molecular design 30.8 (2016): + 595-608. + + Note + ---- + This class requires `tensorflow_probability` to be installed. + """ def __init__(self, - batch_size, - n_input=128, - gaussian_expand=False, - init='glorot_uniform', - activation='tanh', - epsilon=1e-3, - momentum=0.99, + batch_size: int, + n_input: int = 128, + gaussian_expand: bool = True, + compress_post_gaussian_expansion: bool = False, + init: str = 'glorot_uniform', + activation: str = 'tanh', **kwargs): """ Parameters ---------- batch_size: int number of molecules in a batch - n_input: int, optional + n_input: int, optional (default 128) number of features for each input molecule - gaussian_expand: boolean. optional + gaussian_expand: boolean, optional (default True) Whether to expand each dimension of atomic features by gaussian histogram - init: str, optional - Weight initialization for filters. - activation: str, optional - Activation function applied - """ + compress_post_gaussian_expansion: bool, optional (default False) + If True, compress the results of the Gaussian expansion back to the + original dimensions of the input by using a linear layer with specified + activation function. Note that this compression was not in the original + paper, but was present in the original DeepChem implementation so is + left present for backwards compatibility. + init: str, optional (default 'glorot_uniform') + Weight initialization for filters if `compress_post_gaussian_expansion` + is True. + activation: str, optional (default 'tanh') + Activation function applied for filters if + `compress_post_gaussian_expansion` is True. Should be recognizable by + `tf.keras.activations`. + """ + try: + import tensorflow_probability as tfp + except ModuleNotFoundError: + raise ImportError( + "This class requires tensorflow-probability to be installed.") super(WeaveGather, self).__init__(**kwargs) self.n_input = n_input self.batch_size = batch_size self.gaussian_expand = gaussian_expand + self.compress_post_gaussian_expansion = compress_post_gaussian_expansion self.init = init # Set weight initialization self.activation = activation # Get activations self.activation_fn = activations.get(activation) - self.epsilon = epsilon - self.momentum = momentum def get_config(self): config = super(WeaveGather, self).get_config() @@ -2225,18 +2569,38 @@ class WeaveGather(tf.keras.layers.Layer): config['gaussian_expand'] = self.gaussian_expand config['init'] = self.init config['activation'] = self.activation - config['epsilon'] = self.epsilon - config['momentum'] = self.momentum + config[ + 'compress_post_gaussian_expansion'] = self.compress_post_gaussian_expansion return config def build(self, input_shape): - if self.gaussian_expand: - init = initializers.get(self.init) + if self.compress_post_gaussian_expansion: + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + self.W = init([self.n_input * 11, self.n_input]) self.b = backend.zeros(shape=[self.n_input]) self.built = True - def call(self, inputs): + def call(self, inputs: List) -> List: + """Creates weave tensors. + + Parameters + ---------- + inputs: List + Should contain 2 tensors [atom_features, atom_split] + + Returns + ------- + output_molecules: List + Each entry in this list is of shape `(self.n_inputs,)` + + """ outputs = inputs[0] atom_split = inputs[1] @@ -2245,13 +2609,43 @@ class WeaveGather(tf.keras.layers.Layer): output_molecules = tf.math.segment_sum(outputs, atom_split) - if self.gaussian_expand: + if self.compress_post_gaussian_expansion: output_molecules = tf.matmul(output_molecules, self.W) + self.b output_molecules = self.activation_fn(output_molecules) return output_molecules def gaussian_histogram(self, x): + """Expands input into a set of gaussian histogram bins. + + Parameters + ---------- + x: tf.Tensor + Of shape `(N, n_feat)` + + Examples + -------- + This method uses 11 bins spanning portions of a Gaussian with zero mean + and unit standard deviation. + + >>> gaussian_memberships = [(-1.645, 0.283), (-1.080, 0.170), + ... (-0.739, 0.134), (-0.468, 0.118), + ... (-0.228, 0.114), (0., 0.114), + ... (0.228, 0.114), (0.468, 0.118), + ... (0.739, 0.134), (1.080, 0.170), + ... (1.645, 0.283)] + + We construct a Gaussian at `gaussian_memberships[i][0]` with standard + deviation `gaussian_memberships[i][1]`. Each feature in `x` is assigned + the probability of falling in each Gaussian, and probabilities are + normalized across the 11 different Gaussians. + + Returns + ------- + outputs: tf.Tensor + Of shape `(N, 11*n_feat)` + """ + import tensorflow_probability as tfp gaussian_memberships = [(-1.645, 0.283), (-1.080, 0.170), (-0.739, 0.134), (-0.468, 0.118), (-0.228, 0.114), (0., 0.114), (0.228, 0.114), (0.468, 0.118), (0.739, 0.134), @@ -2295,7 +2689,14 @@ class DTNNEmbedding(tf.keras.layers.Layer): return config def build(self, input_shape): - init = initializers.get(self.init) + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + self.embedding_list = init([self.periodic_table_length, self.n_embedding]) self.built = True @@ -2348,7 +2749,14 @@ class DTNNStep(tf.keras.layers.Layer): return config def build(self, input_shape): - init = initializers.get(self.init) + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + self.W_cf = init([self.n_embedding, self.n_hidden]) self.W_df = init([self.n_distance, self.n_hidden]) self.W_fc = init([self.n_hidden, self.n_embedding]) @@ -2433,7 +2841,14 @@ class DTNNGather(tf.keras.layers.Layer): def build(self, input_shape): self.W_list = [] self.b_list = [] - init = initializers.get(self.init) + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + prev_layer_size = self.n_embedding for i, layer_size in enumerate(self.layer_sizes): self.W_list.append(init([prev_layer_size, layer_size])) @@ -2557,22 +2972,37 @@ class DAGLayer(tf.keras.layers.Layer): self.W_list = [] self.b_list = [] self.dropouts = [] - init = initializers.get(self.init) prev_layer_size = self.n_inputs for layer_size in self.layer_sizes: - self.W_list.append(init([prev_layer_size, layer_size])) - self.b_list.append(backend.zeros(shape=[ - layer_size, - ])) + self.W_list.append( + self.add_weight( + name='kernel', + shape=(prev_layer_size, layer_size), + initializer=self.init, + trainable=True)) + self.b_list.append( + self.add_weight( + name='bias', + shape=(layer_size,), + initializer='zeros', + trainable=True)) if self.dropout is not None and self.dropout > 0.0: self.dropouts.append(Dropout(rate=self.dropout)) else: self.dropouts.append(None) prev_layer_size = layer_size - self.W_list.append(init([prev_layer_size, self.n_outputs])) - self.b_list.append(backend.zeros(shape=[ - self.n_outputs, - ])) + self.W_list.append( + self.add_weight( + name='kernel', + shape=(prev_layer_size, self.n_outputs), + initializer=self.init, + trainable=True)) + self.b_list.append( + self.add_weight( + name='bias', + shape=(self.n_outputs,), + initializer='zeros', + trainable=True)) if self.dropout is not None and self.dropout > 0.0: self.dropouts.append(Dropout(rate=self.dropout)) else: @@ -2690,22 +3120,37 @@ class DAGGather(tf.keras.layers.Layer): self.W_list = [] self.b_list = [] self.dropouts = [] - init = initializers.get(self.init) prev_layer_size = self.n_graph_feat for layer_size in self.layer_sizes: - self.W_list.append(init([prev_layer_size, layer_size])) - self.b_list.append(backend.zeros(shape=[ - layer_size, - ])) + self.W_list.append( + self.add_weight( + name='kernel', + shape=(prev_layer_size, layer_size), + initializer=self.init, + trainable=True)) + self.b_list.append( + self.add_weight( + name='bias', + shape=(layer_size,), + initializer='zeros', + trainable=True)) if self.dropout is not None and self.dropout > 0.0: self.dropouts.append(Dropout(rate=self.dropout)) else: self.dropouts.append(None) prev_layer_size = layer_size - self.W_list.append(init([prev_layer_size, self.n_outputs])) - self.b_list.append(backend.zeros(shape=[ - self.n_outputs, - ])) + self.W_list.append( + self.add_weight( + name='kernel', + shape=(prev_layer_size, self.n_outputs), + initializer=self.init, + trainable=True)) + self.b_list.append( + self.add_weight( + name='bias', + shape=(self.n_outputs,), + initializer='zeros', + trainable=True)) if self.dropout is not None and self.dropout > 0.0: self.dropouts.append(Dropout(rate=self.dropout)) else: @@ -2809,9 +3254,16 @@ class EdgeNetwork(tf.keras.layers.Layer): return config def build(self, input_shape): + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + n_pair_features = self.n_pair_features n_hidden = self.n_hidden - init = initializers.get(self.init) self.W = init([n_pair_features, n_hidden * n_hidden]) self.b = backend.zeros(shape=(n_hidden * n_hidden,)) self.built = True @@ -2841,7 +3293,14 @@ class GatedRecurrentUnit(tf.keras.layers.Layer): def build(self, input_shape): n_hidden = self.n_hidden - init = initializers.get(self.init) + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + self.Wz = init([n_hidden, n_hidden]) self.Wr = init([n_hidden, n_hidden]) self.Wh = init([n_hidden, n_hidden]) @@ -2896,7 +3355,14 @@ class SetGather(tf.keras.layers.Layer): return config def build(self, input_shape): - init = initializers.get(self.init) + + def init(input_shape): + return self.add_weight( + name='kernel', + shape=(input_shape[0], input_shape[1]), + initializer=self.init, + trainable=True) + self.U = init((2 * self.n_hidden, 4 * self.n_hidden)) self.b = tf.Variable( np.concatenate((np.zeros(self.n_hidden), np.ones(self.n_hidden), diff --git a/deepchem/models/losses.py b/deepchem/models/losses.py index cfa714560d7cb883071d6516aa0541c95f3221f2..5dccae8aea33450ba1182fdf58fb7cf2f520f867 100644 --- a/deepchem/models/losses.py +++ b/deepchem/models/losses.py @@ -1,11 +1,8 @@ -import tensorflow as tf - - class Loss: """A loss function for use in training models.""" - def __call__(self, output, labels): - """Compute the loss function. + def _compute_tf_loss(self, output, labels): + """Compute the loss function for TensorFlow tensors. The inputs are tensors containing the model's outputs and the labels for a batch. The return value should be a tensor of shape (batch_size) or @@ -25,24 +22,38 @@ class Loss: """ raise NotImplementedError("Subclasses must implement this") + def _create_pytorch_loss(self): + """Create a PyTorch loss function.""" + raise NotImplementedError("Subclasses must implement this") + class L1Loss(Loss): """The absolute difference between the true and predicted values.""" - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.abs(output - labels) + def _create_pytorch_loss(self): + import torch + return torch.nn.L1Loss(reduction='none') + class L2Loss(Loss): """The squared difference between the true and predicted values.""" - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.square(output - labels) + def _create_pytorch_loss(self): + import torch + return torch.nn.MSELoss(reduction='none') + class HingeLoss(Loss): """The hinge loss function. @@ -51,10 +62,20 @@ class HingeLoss(Loss): should equal 0 or 1. """ - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) return tf.keras.losses.hinge(labels, output) + def _create_pytorch_loss(self): + import torch + + def loss(output, labels): + output, labels = _make_pytorch_shapes_consistent(output, labels) + return torch.mean(torch.clamp(1 - labels * output, min=0), dim=-1) + + return loss + class BinaryCrossEntropy(Loss): """The cross entropy between pairs of probabilities. @@ -63,11 +84,22 @@ class BinaryCrossEntropy(Loss): contain probabilities. """ - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.keras.losses.binary_crossentropy(labels, output) + def _create_pytorch_loss(self): + import torch + bce = torch.nn.BCELoss(reduction='none') + + def loss(output, labels): + output, labels = _make_pytorch_shapes_consistent(output, labels) + return torch.mean(bce(output, labels), dim=-1) + + return loss + class CategoricalCrossEntropy(Loss): """The cross entropy between two probability distributions. @@ -77,11 +109,21 @@ class CategoricalCrossEntropy(Loss): classes. """ - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.keras.losses.categorical_crossentropy(labels, output) + def _create_pytorch_loss(self): + import torch + + def loss(output, labels): + output, labels = _make_pytorch_shapes_consistent(output, labels) + return -torch.sum(labels * torch.log(output), dim=-1) + + return loss + class SigmoidCrossEntropy(Loss): """The cross entropy between pairs of probabilities. @@ -91,11 +133,22 @@ class SigmoidCrossEntropy(Loss): converted to probabilities using a sigmoid function. """ - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.nn.sigmoid_cross_entropy_with_logits(labels, output) + def _create_pytorch_loss(self): + import torch + bce = torch.nn.BCEWithLogitsLoss(reduction='none') + + def loss(output, labels): + output, labels = _make_pytorch_shapes_consistent(output, labels) + return bce(output, labels) + + return loss + class SoftmaxCrossEntropy(Loss): """The cross entropy between two probability distributions. @@ -106,11 +159,22 @@ class SoftmaxCrossEntropy(Loss): function. """ - def __call__(self, output, labels): - output, labels = _make_shapes_consistent(output, labels) + def _compute_tf_loss(self, output, labels): + import tensorflow as tf + output, labels = _make_tf_shapes_consistent(output, labels) output, labels = _ensure_float(output, labels) return tf.nn.softmax_cross_entropy_with_logits(labels, output) + def _create_pytorch_loss(self): + import torch + ls = torch.nn.LogSoftmax(dim=1) + + def loss(output, labels): + output, labels = _make_pytorch_shapes_consistent(output, labels) + return -torch.sum(labels * ls(output), dim=-1) + + return loss + class SparseSoftmaxCrossEntropy(Loss): """The cross entropy between two probability distributions. @@ -121,13 +185,200 @@ class SparseSoftmaxCrossEntropy(Loss): using a softmax function. """ - def __call__(self, output, labels): + def _compute_tf_loss(self, output, labels): + import tensorflow as tf labels = tf.cast(labels, tf.int32) return tf.nn.sparse_softmax_cross_entropy_with_logits(labels, output) + def _create_pytorch_loss(self): + import torch + ce_loss = torch.nn.CrossEntropyLoss(reduction='none') + + def loss(output, labels): + # Convert (batch_size, tasks, classes) to (batch_size, classes, tasks) + # CrossEntropyLoss only supports (batch_size, classes, tasks) + # This is for API consistency + if len(output.shape) == 3: + output = output.permute(0, 2, 1) + return ce_loss(output, labels.long()) + + return loss + + +class VAE_ELBO(Loss): + """The Variational AutoEncoder loss, KL Divergence Regularize + marginal log-likelihood. + + This losses based on _[1]. + ELBO(Evidence lower bound) lexically replaced Variational lower bound. + BCE means marginal log-likelihood, and KLD means KL divergence with normal distribution. + Added hyper parameter 'kl_scale' for KLD. + + The logvar and mu should have shape (batch_size, hidden_space). + The x and reconstruction_x should have (batch_size, attribute). + The kl_scale should be float. + + Examples + -------- + Examples for calculating loss using constant tensor. + + batch_size = 2, + hidden_space = 2, + num of original attribute = 3 + >>> import numpy as np + >>> import torch + >>> import tensorflow as tf + >>> logvar = np.array([[1.0,1.3],[0.6,1.2]]) + >>> mu = np.array([[0.2,0.7],[1.2,0.4]]) + >>> x = np.array([[0.9,0.4,0.8],[0.3,0,1]]) + >>> reconstruction_x = np.array([[0.8,0.3,0.7],[0.2,0,0.9]]) + + Case tensorflow + >>> VAE_ELBO()._compute_tf_loss(tf.constant(logvar), tf.constant(mu), tf.constant(x), tf.constant(reconstruction_x)) + + + Case pytorch + >>> (VAE_ELBO()._create_pytorch_loss())(torch.tensor(logvar), torch.tensor(mu), torch.tensor(x), torch.tensor(reconstruction_x)) + tensor([0.7017, 0.7624], dtype=torch.float64) + + + References + ---------- + .. [1] Kingma, Diederik P., and Max Welling. "Auto-encoding variational bayes." arXiv preprint arXiv:1312.6114 (2013). + + """ + + def _compute_tf_loss(self, logvar, mu, x, reconstruction_x, kl_scale=1): + import tensorflow as tf + x, reconstruction_x = _make_tf_shapes_consistent(x, reconstruction_x) + x, reconstruction_x = _ensure_float(x, reconstruction_x) + BCE = tf.keras.losses.binary_crossentropy(x, reconstruction_x) + KLD = VAE_KLDivergence()._compute_tf_loss(logvar, mu) + return BCE + kl_scale * KLD + + def _create_pytorch_loss(self): + import torch + bce = torch.nn.BCELoss(reduction='none') + + def loss(logvar, mu, x, reconstruction_x, kl_scale=1): + x, reconstruction_x = _make_pytorch_shapes_consistent(x, reconstruction_x) + BCE = torch.mean(bce(reconstruction_x, x), dim=-1) + KLD = (VAE_KLDivergence()._create_pytorch_loss())(logvar, mu) + return BCE + kl_scale * KLD + + return loss + + +class VAE_KLDivergence(Loss): + """The KL_divergence between hidden distribution and normal distribution. + + This loss represents KL divergence losses between normal distribution(using parameter of distribution) + based on _[1]. + + The logvar should have shape (batch_size, hidden_space) and each term represents + standard deviation of hidden distribution. The mean shuold have + (batch_size, hidden_space) and each term represents mean of hidden distribtuon. + + Examples + -------- + Examples for calculating loss using constant tensor. + + batch_size = 2, + hidden_space = 2, + >>> import numpy as np + >>> import torch + >>> import tensorflow as tf + >>> logvar = np.array([[1.0,1.3],[0.6,1.2]]) + >>> mu = np.array([[0.2,0.7],[1.2,0.4]]) + + Case tensorflow + >>> VAE_KLDivergence()._compute_tf_loss(tf.constant(logvar), tf.constant(mu)) + + + Case pytorch + >>> (VAE_KLDivergence()._create_pytorch_loss())(torch.tensor(logvar), torch.tensor(mu)) + tensor([0.1738, 0.5143], dtype=torch.float64) + + References + ---------- + .. [1] Kingma, Diederik P., and Max Welling. "Auto-encoding variational bayes." arXiv preprint arXiv:1312.6114 (2013). + + """ + + def _compute_tf_loss(self, logvar, mu): + import tensorflow as tf + logvar, mu = _make_tf_shapes_consistent(logvar, mu) + logvar, mu = _ensure_float(logvar, mu) + return 0.5 * tf.reduce_mean( + tf.square(mu) + tf.square(logvar) - + tf.math.log(1e-20 + tf.square(logvar)) - 1, -1) + + def _create_pytorch_loss(self): + import torch + + def loss(logvar, mu): + logvar, mu = _make_pytorch_shapes_consistent(logvar, mu) + return 0.5 * torch.mean( + torch.square(mu) + torch.square(logvar) - + torch.log(1e-20 + torch.square(logvar)) - 1, -1) + + return loss + + +class ShannonEntropy(Loss): + """The ShannonEntropy of discrete-distribution. + + This loss represents shannon entropy based on _[1]. + + The inputs should have shape (batch size, num of variable) and represents + probabilites distribution. + + Examples + -------- + Examples for calculating loss using constant tensor. + + batch_size = 2, + num_of variable = variable, + >>> import numpy as np + >>> import torch + >>> import tensorflow as tf + >>> inputs = np.array([[0.7,0.3],[0.9,0.1]]) + + Case tensorflow + >>> ShannonEntropy()._compute_tf_loss(tf.constant(inputs)) + + + Case pytorch + >>> (ShannonEntropy()._create_pytorch_loss())(torch.tensor(inputs)) + tensor([0.3054, 0.1625], dtype=torch.float64) + + References + ---------- + .. [1] Chen, Ricky Xiaofeng. "A Brief Introduction to Shannon’s Information Theory." arXiv preprint arXiv:1612.09316 (2016). + + """ + + def _compute_tf_loss(self, inputs): + import tensorflow as tf + #extended one of probabilites to binary distribution + if inputs.shape[-1] == 1: + inputs = tf.concat([inputs, 1 - inputs], axis=-1) + return tf.reduce_mean(-inputs * tf.math.log(1e-20 + inputs), -1) + + def _create_pytorch_loss(self): + import torch + + def loss(inputs): + #extended one of probabilites to binary distribution + if inputs.shape[-1] == 1: + inputs = torch.cat((inputs, 1 - inputs), dim=-1) + return torch.mean(-inputs * torch.log(1e-20 + inputs), -1) + + return loss -def _make_shapes_consistent(output, labels): + +def _make_tf_shapes_consistent(output, labels): """Try to make inputs have the same shape by adding dimensions of size 1.""" + import tensorflow as tf shape1 = output.shape shape2 = labels.shape len1 = len(shape1) @@ -150,8 +401,32 @@ def _make_shapes_consistent(output, labels): (str(shape1), str(shape2))) +def _make_pytorch_shapes_consistent(output, labels): + """Try to make inputs have the same shape by adding dimensions of size 1.""" + import torch + shape1 = output.shape + shape2 = labels.shape + len1 = len(shape1) + len2 = len(shape2) + if len1 == len2: + return (output, labels) + shape1 = tuple(shape1) + shape2 = tuple(shape2) + if len1 > len2 and all(i == 1 for i in shape1[len2:]): + for i in range(len1 - len2): + labels = torch.unsqueeze(labels, -1) + return (output, labels) + if len2 > len1 and all(i == 1 for i in shape2[len1:]): + for i in range(len2 - len1): + output = torch.unsqueeze(output, -1) + return (output, labels) + raise ValueError("Incompatible shapes for outputs and labels: %s versus %s" % + (str(shape1), str(shape2))) + + def _ensure_float(output, labels): """Make sure the outputs and labels are both floating point types.""" + import tensorflow as tf if output.dtype not in (tf.float32, tf.float64): output = tf.cast(output, tf.float32) if labels.dtype not in (tf.float32, tf.float64): diff --git a/deepchem/models/models.py b/deepchem/models/models.py index 993d915054f66096fbbf49b0860f2ed1b0cf282e..35b69885afc8e18b57bfa2ab8dc68575c8c797a8 100644 --- a/deepchem/models/models.py +++ b/deepchem/models/models.py @@ -1,46 +1,49 @@ """ Contains an abstract base class that supports different ML models. """ -__author__ = "Bharath Ramsundar and Joseph Gomes" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" -import sys -import numpy as np -import pandas as pd -import joblib import os import shutil import tempfile -import sklearn +import logging +from typing import List, Optional, Sequence + +import numpy as np from sklearn.base import BaseEstimator -from deepchem.data import Dataset, pad_features -from deepchem.trans import undo_transforms -from deepchem.utils.save import load_from_disk -from deepchem.utils.save import save_to_disk -from deepchem.utils.save import log +from deepchem.data import Dataset +from deepchem.metrics import Metric +from deepchem.trans import Transformer, undo_transforms from deepchem.utils.evaluate import Evaluator +logger = logging.getLogger(__name__) + class Model(BaseEstimator): """ - Abstract base class for different ML models. + Abstract base class for DeepChem models. """ - def __init__(self, - model_instance=None, - model_dir=None, - verbose=True, - **kwargs): + def __init__(self, model=None, model_dir: Optional[str] = None, + **kwargs) -> None: """Abstract class for all models. - Parameters: - ----------- - model_instance: object + + This is intended only for convenience of subclass implementations + and should not be invoked directly. + + Parameters + ---------- + model: object Wrapper around ScikitLearn/Keras/Tensorflow model object. - model_dir: str - Path to directory where model will be stored. + model_dir: str, optional (default None) + Path to directory where model will be stored. If not specified, + model will be stored in a temporary directory. """ + if self.__class__.__name__ == "Model": + raise ValueError( + "This constructor is for an abstract class and should never be called directly." + "Can only call from subclass constructors.") + self.model_dir_is_temp = False if model_dir is not None: if not os.path.exists(model_dir): @@ -49,23 +52,29 @@ class Model(BaseEstimator): model_dir = tempfile.mkdtemp() self.model_dir_is_temp = True self.model_dir = model_dir - self.model_instance = model_instance - self.model_class = model_instance.__class__ - - self.verbose = verbose + self.model = model + self.model_class = model.__class__ def __del__(self): if 'model_dir_is_temp' in dir(self) and self.model_dir_is_temp: shutil.rmtree(self.model_dir) - def fit_on_batch(self, X, y, w): - """ - Updates existing model with new information. + def fit_on_batch(self, X: Sequence, y: Sequence, w: Sequence): + """Perform a single step of training. + + Parameters + ---------- + X: np.ndarray + the inputs for the batch + y: np.ndarray + the labels for the batch + w: np.ndarray + the weights for the batch """ raise NotImplementedError( "Each model is responsible for its own fit_on_batch method.") - def predict_on_batch(self, X, **kwargs): + def predict_on_batch(self, X: Sequence): """ Makes predictions on given batch of new data. @@ -77,7 +86,7 @@ class Model(BaseEstimator): raise NotImplementedError( "Each model is responsible for its own predict_on_batch method.") - def reload(self): + def reload(self) -> None: """ Reload trained model from disk. """ @@ -85,54 +94,58 @@ class Model(BaseEstimator): "Each model is responsible for its own reload method.") @staticmethod - def get_model_filename(model_dir): + def get_model_filename(model_dir: str) -> str: """ Given model directory, obtain filename for the model itself. """ return os.path.join(model_dir, "model.joblib") @staticmethod - def get_params_filename(model_dir): + def get_params_filename(model_dir: str) -> str: """ Given model directory, obtain filename for the model itself. """ return os.path.join(model_dir, "model_params.joblib") - def save(self): + def save(self) -> None: """Dispatcher function for saving. Each subclass is responsible for overriding this method. """ raise NotImplementedError - def fit(self, dataset, nb_epoch=10, batch_size=50, **kwargs): + def fit(self, dataset: Dataset): """ Fits a model on data in a Dataset object. + + Parameters + ---------- + dataset: Dataset + the Dataset to train on """ - # TODO(rbharath/enf): We need a structured way to deal with potential GPU - # memory overflows. - for epoch in range(nb_epoch): - log("Starting epoch %s" % str(epoch + 1), self.verbose) - losses = [] - for (X_batch, y_batch, w_batch, - ids_batch) in dataset.iterbatches(batch_size): - losses.append(self.fit_on_batch(X_batch, y_batch, w_batch)) - log("Avg loss for epoch %d: %f" % (epoch + 1, np.array(losses).mean()), - self.verbose) + raise NotImplementedError( + "Each model is responsible for its own fit method.") - def predict(self, dataset, transformers=[], batch_size=None): + def predict(self, dataset: Dataset, + transformers: List[Transformer] = []) -> np.ndarray: """ Uses self to make predictions on provided Dataset object. - Returns: - y_pred: numpy ndarray of shape (n_samples,) + Parameters + ---------- + dataset: Dataset + Dataset to make prediction on + transformers: List[Transformer] + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + + Returns + ------- + np.ndarray + A numpy array of predictions the model produces. """ y_preds = [] - n_tasks = self.get_num_tasks() - ind = 0 - - for (X_batch, _, _, ids_batch) in dataset.iterbatches( - batch_size, deterministic=True): + for (X_batch, _, _, ids_batch) in dataset.iterbatches(deterministic=True): n_samples = len(X_batch) y_pred_batch = self.predict_on_batch(X_batch) # Discard any padded predictions @@ -142,42 +155,76 @@ class Model(BaseEstimator): y_pred = np.concatenate(y_preds) return y_pred - def evaluate(self, dataset, metrics, transformers=[], per_task_metrics=False): + def evaluate(self, + dataset: Dataset, + metrics: List[Metric], + transformers: List[Transformer] = [], + per_task_metrics: bool = False, + use_sample_weights: bool = False, + n_classes: int = 2): """ Evaluates the performance of this model on specified dataset. + This function uses `Evaluator` under the hood to perform model + evaluation. As a result, it inherits the same limitations of + `Evaluator`. Namely, that only regression and classification + models can be evaluated in this fashion. For generator models, you + will need to overwrite this method to perform a custom evaluation. + + Keyword arguments specified here will be passed to + `Evaluator.compute_model_performance`. + Parameters ---------- - dataset: dc.data.Dataset + dataset: Dataset Dataset object. - metric: deepchem.metrics.Metric - Evaluation metric - transformers: list - List of deepchem.transformers.Transformer - per_task_metrics: bool - If True, return per-task scores. + metrics: Metric / List[Metric] / function + The set of metrics provided. This class attempts to do some + intelligent handling of input. If a single `dc.metrics.Metric` + object is provided or a list is provided, it will evaluate + `self.model` on these metrics. If a function is provided, it is + assumed to be a metric function that this method will attempt to + wrap in a `dc.metrics.Metric` object. A metric function must + accept two arguments, `y_true, y_pred` both of which are + `np.ndarray` objects and return a floating point score. The + metric function may also accept a keyword argument + `sample_weight` to account for per-sample weights. + transformers: List[Transformer] + List of `dc.trans.Transformer` objects. These transformations + must have been applied to `dataset` previously. The dataset will + be untransformed for metric evaluation. + per_task_metrics: bool, optional (default False) + If true, return computed metric for each task on multitask dataset. + use_sample_weights: bool, optional (default False) + If set, use per-sample weights `w`. + n_classes: int, optional (default None) + If specified, will use `n_classes` as the number of unique classes + in `self.dataset`. Note that this argument will be ignored for + regression metrics. Returns ------- - dict - Maps tasks to scores under metric. + multitask_scores: dict + Dictionary mapping names of metrics to metric scores. + all_task_scores: dict, optional + If `per_task_metrics == True` is passed as a keyword argument, + then returns a second dictionary of scores for each task + separately. """ evaluator = Evaluator(self, dataset, transformers) - if not per_task_metrics: - scores = evaluator.compute_model_performance(metrics) - return scores - else: - scores, per_task_scores = evaluator.compute_model_performance( - metrics, per_task_metrics=per_task_metrics) - return scores, per_task_scores + return evaluator.compute_model_performance( + metrics, + per_task_metrics=per_task_metrics, + use_sample_weights=use_sample_weights, + n_classes=n_classes) - def get_task_type(self): + def get_task_type(self) -> str: """ Currently models can only be classifiers or regressors. """ raise NotImplementedError - def get_num_tasks(self): + def get_num_tasks(self) -> int: """ Get number of tasks. """ diff --git a/deepchem/models/multitask.py b/deepchem/models/multitask.py index a6e4510d41ab61cefa63e895dcd40129b548811b..4a0e61c22b82753627ea864e6e0fe50ee65208dd 100644 --- a/deepchem/models/multitask.py +++ b/deepchem/models/multitask.py @@ -15,10 +15,15 @@ logger = logging.getLogger(__name__) class SingletaskToMultitask(Model): - """ - Convenience class to let singletask models be fit on multitask data. + """Convenience class to let singletask models be fit on multitask data. + + This wrapper class groups a set of singletask `SklearnModel` objects to + create a multitask model. This class exists primarily to facilitate + benchmarking. - Warning: This current implementation is only functional for sklearn models. + Note + ---- + This current implementation is only functional for sklearn models. """ def __init__(self, tasks, model_builder, model_dir=None): @@ -89,7 +94,9 @@ class SingletaskToMultitask(Model): """ Updates all singletask models with new information. - Warning: This current implementation is only functional for sklearn models. + Note + ---- + This current implementation is only functional for sklearn models. """ if not isinstance(dataset, DiskDataset): raise ValueError('SingletaskToMultitask only works with DiskDatasets') diff --git a/deepchem/models/normalizing_flows.py b/deepchem/models/normalizing_flows.py new file mode 100644 index 0000000000000000000000000000000000000000..ac9a7940f827aa451e22b674ab7338d2f15358c0 --- /dev/null +++ b/deepchem/models/normalizing_flows.py @@ -0,0 +1,349 @@ +""" +Normalizing flows for transforming probability distributions. +""" + +import numpy as np +import logging +from typing import List, Iterable, Optional, Tuple, Sequence, Any, Callable + +import tensorflow as tf +from tensorflow.keras.layers import Lambda + +import deepchem as dc +from deepchem.models.losses import Loss +from deepchem.models.models import Model +from deepchem.models.keras_model import KerasModel +from deepchem.models.optimizers import Optimizer, Adam +from deepchem.utils.typing import OneOrMany +from deepchem.utils.data_utils import load_from_disk, save_to_disk + +logger = logging.getLogger(__name__) + + +class NormalizingFlow(tf.keras.models.Model): + """Base class for normalizing flow. + + The purpose of a normalizing flow is to map a simple distribution (that is + easy to sample from and evaluate probability densities for) to a more + complex distribution that is learned from data. The base distribution + p(x) is transformed by the associated normalizing flow y=g(x) to model the + distribution p(y). + + Normalizing flows combine the advantages of autoregressive models + (which provide likelihood estimation but do not learn features) and + variational autoencoders (which learn feature representations but + do not provide marginal likelihoods). + + """ + + def __init__(self, + base_distribution, + flow_layers: Sequence, + event_shape: Optional[List[int]] = None, + **kwargs) -> None: + """Create a new NormalizingFlow. + + Parameters + ---------- + base_distribution: tfd.Distribution + Probability distribution to be transformed. + Typically an N dimensional multivariate Gaussian. + flow_layers: Sequence[tfb.Bijector] + An iterable of bijectors that comprise the flow. + event_shape: Optional[List[int]] + Dimensionality of inputs, e.g. [2] for 2D inputs. + **kwargs + + """ + + try: + import tensorflow_probability as tfp + tfd = tfp.distributions + tfb = tfp.bijectors + except ModuleNotFoundError: + raise ImportError( + "This class requires tensorflow-probability to be installed.") + + self.base_distribution = base_distribution + self.flow_layers = flow_layers + self.event_shape = event_shape + + # Chain of flows is also a normalizing flow + bijector = tfb.Chain(list(reversed(self.flow_layers))) + + # An instance of tfd.TransformedDistribution + self.flow = tfd.TransformedDistribution( + distribution=self.base_distribution, + bijector=bijector, + event_shape=self.event_shape) + + super(NormalizingFlow, self).__init__(**kwargs) + + def __call__(self, *inputs, training=True): + return self.flow.bijector.forward(*inputs) + + +class NormalizingFlowModel(KerasModel): + """A base distribution and normalizing flow for applying transformations. + + Normalizing flows are effective for any application requiring + a probabilistic model that can both sample from a distribution and + compute marginal likelihoods, e.g. generative modeling, + unsupervised learning, or probabilistic inference. For a thorough review + of normalizing flows, see [1]_. + + A distribution implements two main operations: + 1. Sampling from the transformed distribution + 2. Calculating log probabilities + + A normalizing flow implements three main operations: + 1. Forward transformation + 2. Inverse transformation + 3. Calculating the Jacobian + + Deep Normalizing Flow models require normalizing flow layers where + input and output dimensions are the same, the transformation is invertible, + and the determinant of the Jacobian is efficient to compute and + differentiable. The determinant of the Jacobian of the transformation + gives a factor that preserves the probability volume to 1 when transforming + between probability densities of different random variables. + + References + ---------- + .. [1] Papamakarios, George et al. "Normalizing Flows for Probabilistic Modeling and Inference." (2019). https://arxiv.org/abs/1912.02762. + + """ + + def __init__(self, model: NormalizingFlow, **kwargs) -> None: + """Creates a new NormalizingFlowModel. + + In addition to the following arguments, this class also accepts all the keyword arguments from KerasModel. + + Parameters + ---------- + model: NormalizingFlow + An instance of NormalizingFlow. + + Examples + -------- + >> import tensorflow_probability as tfp + >> tfd = tfp.distributions + >> tfb = tfp.bijectors + >> flow_layers = [ + .. tfb.RealNVP( + .. num_masked=2, + .. shift_and_log_scale_fn=tfb.real_nvp_default_template( + .. hidden_layers=[8, 8])) + ..] + >> base_distribution = tfd.MultivariateNormalDiag(loc=[0., 0., 0.]) + >> nf = NormalizingFlow(base_distribution, flow_layers) + >> nfm = NormalizingFlowModel(nf) + >> dataset = NumpyDataset( + .. X=np.random.rand(5, 3).astype(np.float32), + .. y=np.random.rand(5,), + .. ids=np.arange(5)) + >> nfm.fit(dataset) + + """ + + try: + import tensorflow_probability as tfp + tfd = tfp.distributions + tfb = tfp.bijectors + except ModuleNotFoundError: + raise ImportError( + "This class requires tensorflow-probability to be installed.") + + self.nll_loss_fn = lambda input, labels, weights: self.create_nll(input) + + super(NormalizingFlowModel, self).__init__( + model=model, loss=self.nll_loss_fn, **kwargs) + + self.flow = self.model.flow # normalizing flow + + # TODO: Incompability between TF and TFP means that TF doesn't track + # trainable variables in the flow; must override `_create_gradient_fn` + # self._variables = self.flow.trainable_variables + + def create_nll(self, input: OneOrMany[tf.Tensor]) -> tf.Tensor: + """Create the negative log likelihood loss function. + + The default implementation is appropriate for most cases. Subclasses can + override this if there is a need to customize it. + + Parameters + ---------- + input: OneOrMany[tf.Tensor] + A batch of data. + + Returns + ------- + A Tensor equal to the loss function to use for optimization. + + """ + + return -tf.reduce_mean(self.flow.log_prob(input, training=True)) + + def save(self): + """Saves model to disk using joblib.""" + save_to_disk(self.model, self.get_model_filename(self.model_dir)) + + def reload(self): + """Loads model from joblib file on disk.""" + self.model = load_from_disk(self.get_model_filename(self.model_dir)) + + def _create_gradient_fn(self, + variables: Optional[List[tf.Variable]]) -> Callable: + """Create a function that computes gradients and applies them to the model. + + Because of the way TensorFlow function tracing works, we need to create a + separate function for each new set of variables. + + Parameters + ---------- + variables: Optional[List[tf.Variable]] + Variables to track during training. + + Returns + ------- + Callable function that applies gradients for batch of training data. + + """ + + @tf.function(experimental_relax_shapes=True) + def apply_gradient_for_batch(inputs, labels, weights, loss): + with tf.GradientTape() as tape: + tape.watch(self.flow.trainable_variables) + if isinstance(inputs, tf.Tensor): + inputs = [inputs] + if self._loss_outputs is not None: + inputs = [inputs[i] for i in self._loss_outputs] + batch_loss = loss(inputs, labels, weights) + if variables is None: + vars = self.flow.trainable_variables + else: + vars = variables + grads = tape.gradient(batch_loss, vars) + self._tf_optimizer.apply_gradients(zip(grads, vars)) + self._global_step.assign_add(1) + return batch_loss + + return apply_gradient_for_batch + + +class NormalizingFlowLayer(object): + """Base class for normalizing flow layers. + + This is an abstract base class for implementing new normalizing flow + layers that are not available in tfb. It should not be called directly. + + A normalizing flow transforms random variables into new random variables. + Each learnable layer is a bijection, an invertible + transformation between two probability distributions. A simple initial + density is pushed through the normalizing flow to produce a richer, + more multi-modal distribution. Normalizing flows have three main operations: + + 1. Forward + Transform a distribution. Useful for generating new samples. + 2. Inverse + Reverse a transformation, useful for computing conditional probabilities. + 3. Log(|det(Jacobian)|) [LDJ] + Compute the determinant of the Jacobian of the transformation, + which is a scaling that conserves the probability "volume" to equal 1. + + For examples of customized normalizing flows applied to toy problems, + see [1]_. + + References + ---------- + .. [1] Saund, Brad. "Normalizing Flows." (2020). https://github.com/bsaund/normalizing_flows. + + Notes + ----- + - A sequence of normalizing flows is a normalizing flow. + - The Jacobian is the matrix of first-order derivatives of the transform. + + """ + + def __init__(self, **kwargs): + """Create a new NormalizingFlowLayer.""" + + pass + + def _forward(self, x: tf.Tensor) -> tf.Tensor: + """Forward transformation. + + x = g(y) + + Parameters + ---------- + x: tf.Tensor + Input tensor. + + Returns + ------- + fwd_x: tf.Tensor + Transformed tensor. + + """ + + raise NotImplementedError("Forward transform must be defined.") + + def _inverse(self, y: tf.Tensor) -> tf.Tensor: + """Inverse transformation. + + x = g^{-1}(y) + + Parameters + ---------- + y: tf.Tensor + Input tensor. + + Returns + ------- + inv_y: tf.Tensor + Inverted tensor. + + """ + + raise NotImplementedError("Inverse transform must be defined.") + + def _forward_log_det_jacobian(self, x: tf.Tensor) -> tf.Tensor: + """Log |Determinant(Jacobian(x)| + + Note x = g^{-1}(y) + + Parameters + ---------- + x: tf.Tensor + Input tensor. + + Returns + ------- + ldj: tf.Tensor + Log of absolute value of determinant of Jacobian of x. + + """ + + raise NotImplementedError("LDJ must be defined.") + + def _inverse_log_det_jacobian(self, y: tf.Tensor) -> tf.Tensor: + """Inverse LDJ. + + The ILDJ = -LDJ. + + Note x = g^{-1}(y) + + Parameters + ---------- + y: tf.Tensor + Input tensor. + + Returns + ------- + ildj: tf.Tensor + Log of absolute value of determinant of Jacobian of y. + + """ + + return -self._forward_log_det_jacobian(self._inverse(y)) diff --git a/deepchem/models/optimizers.py b/deepchem/models/optimizers.py index db7c1ddec355063a820672c5fdf1816b85b4512f..12e531927c3a372d8cd6d901c198270278cf01a4 100644 --- a/deepchem/models/optimizers.py +++ b/deepchem/models/optimizers.py @@ -1,16 +1,28 @@ """Optimizers and related classes for use with TensorGraph.""" -import tensorflow as tf +import math + +from typing import Union class Optimizer(object): - """An algorithm for optimizing a TensorGraph based model. + """An algorithm for optimizing a model. This is an abstract class. Subclasses represent specific optimization algorithms. """ - def _create_optimizer(self, global_step): - """Construct the TensorFlow optimizer. + def __init__(self, learning_rate: "Union[float, LearningRateSchedule]"): + """This constructor should only be called by subclasses. + + Parameters + ---------- + learning_rate: float or LearningRateSchedule + the learning rate to use for optimization + """ + self.learning_rate = learning_rate + + def _create_tf_optimizer(self, global_step): + """Construct a TensorFlow optimizer. Parameters ---------- @@ -21,7 +33,21 @@ class Optimizer(object): ------- a new TensorFlow optimizer implementing the algorithm """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") + + def _create_pytorch_optimizer(self, params): + """Construct a PyTorch optimizer. + + Parameters + ---------- + params: Iterable + the model parameters to optimize + + Returns + ------- + a new PyTorch optimizer implementing the algorithm + """ + raise NotImplementedError("Subclasses must implement this") class LearningRateSchedule(object): @@ -30,7 +56,7 @@ class LearningRateSchedule(object): This is an abstract class. Subclasses represent specific schedules. """ - def _create_tensor(self, global_step): + def _create_tf_tensor(self, global_step): """Construct a tensor that equals the learning rate. Parameters @@ -42,14 +68,89 @@ class LearningRateSchedule(object): ------- a tensor that equals the learning rate """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") + + def _create_pytorch_schedule(self, optimizer): + """Construct a PyTorch learning rate scheduler. + + Parameters + ---------- + optimizer: torch.optim.Optimizer + the Optimizer whose learning rate will be modified + + Returns + ------- + a PyTorch scheduler implementing the schedule + """ + raise NotImplementedError("Subclasses must implement this") + + +class AdaGrad(Optimizer): + """The AdaGrad optimization algorithm. + + Adagrad is an optimizer with parameter-specific learning rates, which are + adapted relative to how frequently a parameter gets updated during training. + The more updates a parameter receives, the smaller the updates. See [1]_ for + a full reference for the algorithm. + + References + ---------- + .. [1] Duchi, John, Elad Hazan, and Yoram Singer. "Adaptive subgradient + methods for online learning and stochastic optimization." Journal of machine + learning research 12.7 (2011). + """ + + def __init__(self, + learning_rate: Union[float, LearningRateSchedule] = 0.001, + initial_accumulator_value: float = 0.1, + epsilon: float = 1e-07): + """Construct an AdaGrad optimizer. + Parameters + ---------- + learning_rate: float or LearningRateSchedule + the learning rate to use for optimization + initial_accumulator_value: float + a parameter of the AdaGrad algorithm + epsilon: float + a parameter of the AdaGrad algorithm + + """ + super(AdaGrad, self).__init__(learning_rate) + self.initial_accumulator_value = initial_accumulator_value + self.epsilon = epsilon + + def _create_tf_optimizer(self, global_step): + import tensorflow as tf + if isinstance(self.learning_rate, LearningRateSchedule): + learning_rate = self.learning_rate._create_tf_tensor(global_step) + else: + learning_rate = self.learning_rate + return tf.keras.optimizers.Adagrad( + learning_rate=learning_rate, + initial_accumulator_value=self.initial_accumulator_value, + epsilon=self.epsilon) + + def _create_pytorch_optimizer(self, params): + import torch + if isinstance(self.learning_rate, LearningRateSchedule): + lr = self.learning_rate.initial_rate + else: + lr = self.learning_rate + return torch.optim.Adagrad( + params, + lr, + initial_accumulator_value=self.initial_accumulator_value, + eps=self.epsilon) class Adam(Optimizer): """The Adam optimization algorithm.""" - def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, - epsilon=1e-08): + def __init__(self, + learning_rate: Union[float, LearningRateSchedule] = 0.001, + beta1: float = 0.9, + beta2: float = 0.999, + epsilon: float = 1e-08): """Construct an Adam optimizer. Parameters @@ -63,14 +164,15 @@ class Adam(Optimizer): epsilon: float a parameter of the Adam algorithm """ - self.learning_rate = learning_rate + super(Adam, self).__init__(learning_rate) self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon - def _create_optimizer(self, global_step): + def _create_tf_optimizer(self, global_step): + import tensorflow as tf if isinstance(self.learning_rate, LearningRateSchedule): - learning_rate = self.learning_rate._create_tensor(global_step) + learning_rate = self.learning_rate._create_tf_tensor(global_step) else: learning_rate = self.learning_rate return tf.keras.optimizers.Adam( @@ -79,15 +181,23 @@ class Adam(Optimizer): beta_2=self.beta2, epsilon=self.epsilon) + def _create_pytorch_optimizer(self, params): + import torch + if isinstance(self.learning_rate, LearningRateSchedule): + lr = self.learning_rate.initial_rate + else: + lr = self.learning_rate + return torch.optim.Adam(params, lr, (self.beta1, self.beta2), self.epsilon) + class RMSProp(Optimizer): """RMSProp Optimization algorithm.""" def __init__(self, - learning_rate=0.001, - momentum=0.0, - decay=0.9, - epsilon=1e-10): + learning_rate: Union[float, LearningRateSchedule] = 0.001, + momentum: float = 0.0, + decay: float = 0.9, + epsilon: float = 1e-10): """Construct an RMSProp Optimizer. Parameters @@ -101,14 +211,15 @@ class RMSProp(Optimizer): epsilon: float, default 1e-10 a parameter of the RMSProp algorithm """ - self.learning_rate = learning_rate + super(RMSProp, self).__init__(learning_rate) self.momentum = momentum self.decay = decay self.epsilon = epsilon - def _create_optimizer(self, global_step): + def _create_tf_optimizer(self, global_step): + import tensorflow as tf if isinstance(self.learning_rate, LearningRateSchedule): - learning_rate = self.learning_rate._create_tensor(global_step) + learning_rate = self.learning_rate._create_tf_tensor(global_step) else: learning_rate = self.learning_rate return tf.keras.optimizers.RMSprop( @@ -117,11 +228,20 @@ class RMSProp(Optimizer): rho=self.decay, epsilon=self.epsilon) + def _create_pytorch_optimizer(self, params): + import torch + if isinstance(self.learning_rate, LearningRateSchedule): + lr = self.learning_rate.initial_rate + else: + lr = self.learning_rate + return torch.optim.RMSprop( + params, lr, alpha=self.decay, eps=self.epsilon, momentum=self.momentum) + class GradientDescent(Optimizer): """The gradient descent optimization algorithm.""" - def __init__(self, learning_rate=0.001): + def __init__(self, learning_rate: Union[float, LearningRateSchedule] = 0.001): """Construct a gradient descent optimizer. Parameters @@ -129,20 +249,33 @@ class GradientDescent(Optimizer): learning_rate: float or LearningRateSchedule the learning rate to use for optimization """ - self.learning_rate = learning_rate + super(GradientDescent, self).__init__(learning_rate) - def _create_optimizer(self, global_step): + def _create_tf_optimizer(self, global_step): + import tensorflow as tf if isinstance(self.learning_rate, LearningRateSchedule): - learning_rate = self.learning_rate._create_tensor(global_step) + learning_rate = self.learning_rate._create_tf_tensor(global_step) else: learning_rate = self.learning_rate return tf.keras.optimizers.SGD(learning_rate=learning_rate) + def _create_pytorch_optimizer(self, params): + import torch + if isinstance(self.learning_rate, LearningRateSchedule): + lr = self.learning_rate.initial_rate + else: + lr = self.learning_rate + return torch.optim.SGD(params, lr) + class ExponentialDecay(LearningRateSchedule): """A learning rate that decreases exponentially with the number of training steps.""" - def __init__(self, initial_rate, decay_rate, decay_steps, staircase=True): + def __init__(self, + initial_rate: float, + decay_rate: float, + decay_steps: int, + staircase: bool = True): """Create an exponentially decaying learning rate. The learning rate starts as initial_rate. Every decay_steps training steps, it is multiplied by decay_rate. @@ -164,18 +297,31 @@ class ExponentialDecay(LearningRateSchedule): self.decay_steps = decay_steps self.staircase = staircase - def _create_tensor(self, global_step): + def _create_tf_tensor(self, global_step): + import tensorflow as tf return tf.keras.optimizers.schedules.ExponentialDecay( initial_learning_rate=self.initial_rate, decay_rate=self.decay_rate, decay_steps=self.decay_steps, staircase=self.staircase)(global_step) + def _create_pytorch_schedule(self, optimizer): + import torch + if self.staircase: + return torch.optim.lr_scheduler.StepLR(optimizer, self.decay_steps, + self.decay_rate) + return torch.optim.lr_scheduler.ExponentialLR( + optimizer, math.pow(self.decay_rate, 1 / self.decay_steps)) + class PolynomialDecay(LearningRateSchedule): """A learning rate that decreases from an initial value to a final value over a fixed number of training steps.""" - def __init__(self, initial_rate, final_rate, decay_steps, power=1.0): + def __init__(self, + initial_rate: float, + final_rate: float, + decay_steps: int, + power: float = 1.0): """Create a smoothly decaying learning rate. The learning rate starts as initial_rate. It smoothly decreases to final_rate over decay_steps training steps. @@ -198,23 +344,34 @@ class PolynomialDecay(LearningRateSchedule): self.decay_steps = decay_steps self.power = power - def _create_tensor(self, global_step): + def _create_tf_tensor(self, global_step): + import tensorflow as tf return tf.keras.optimizers.schedules.PolynomialDecay( initial_learning_rate=self.initial_rate, end_learning_rate=self.final_rate, decay_steps=self.decay_steps, power=self.power)(global_step) + def _create_pytorch_schedule(self, optimizer): + + def f(step): + t = min(step, self.decay_steps) / self.decay_steps + return ((self.initial_rate - self.final_rate) * + (1 - t)**self.power) + self.final_rate + + import torch + return torch.optim.lr_scheduler.LambdaLR(optimizer, f) + class LinearCosineDecay(LearningRateSchedule): """Applies linear cosine decay to the learning rate""" def __init__(self, - initial_rate, - decay_steps, - alpha=0.0, - beta=0.001, - num_periods=0.5): + initial_rate: float, + decay_steps: int, + alpha: float = 0.0, + beta: float = 0.001, + num_periods: float = 0.5): """ Parameters ---------- @@ -231,7 +388,8 @@ class LinearCosineDecay(LearningRateSchedule): self.beta = beta self.num_periods = num_periods - def _create_tensor(self, global_step): + def _create_tf_tensor(self, global_step): + import tensorflow as tf return tf.compat.v1.train.linear_cosine_decay( learning_rate=self.initial_rate, global_step=global_step, @@ -239,3 +397,15 @@ class LinearCosineDecay(LearningRateSchedule): alpha=self.alpha, beta=self.beta, num_periods=self.num_periods) + + def _create_pytorch_schedule(self, optimizer): + + def f(step): + t = min(step, self.decay_steps) / self.decay_steps + linear_decay = 1 - t + cosine_decay = 0.5 * (1 + math.cos(math.pi * 2 * self.num_periods * t)) + decayed = (self.alpha + linear_decay) * cosine_decay + self.beta + return self.initial_rate * decayed + + import torch + return torch.optim.lr_scheduler.LambdaLR(optimizer, f) diff --git a/deepchem/models/progressive_multitask.py b/deepchem/models/progressive_multitask.py index b8c751ee8c76b3327ea49415f63f97d066c4081a..4131000bf0326f39d194076e3af71a4e02815926 100644 --- a/deepchem/models/progressive_multitask.py +++ b/deepchem/models/progressive_multitask.py @@ -1,9 +1,12 @@ import time import numpy as np import tensorflow as tf -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection -from deepchem.utils.save import log +import logging from deepchem.metrics import to_one_hot from deepchem.metrics import from_one_hot from deepchem.models import KerasModel, layers @@ -11,16 +14,22 @@ from deepchem.models.losses import L2Loss, SparseSoftmaxCrossEntropy from deepchem.models.keras_model import _StandardLoss from tensorflow.keras.layers import Input, Dense, Dropout, ReLU, Concatenate, Add, Multiply, Softmax +logger = logging.getLogger(__name__) + class ProgressiveMultitaskRegressor(KerasModel): """Implements a progressive multitask neural network for regression. - Progressive Networks: https://arxiv.org/pdf/1606.04671v3.pdf - Progressive networks allow for multitask learning where each task gets a new column of weights. As a result, there is no exponential forgetting where previous tasks are ignored. + References + ---------- + See [1]_ for a full description of the progressive architecture + + .. [1] Rusu, Andrei A., et al. "Progressive neural networks." arXiv preprint + arXiv:1606.04671 (2016). """ def __init__(self, @@ -84,15 +93,15 @@ class ProgressiveMultitaskRegressor(KerasModel): self.n_outputs = n_outputs n_layers = len(layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): self.weight_init_stddevs = [weight_init_stddevs] * n_layers - if not isinstance(alpha_init_stddevs, collections.Sequence): + if not isinstance(alpha_init_stddevs, SequenceCollection): self.alpha_init_stddevs = [alpha_init_stddevs] * n_layers - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): self.bias_init_consts = [bias_init_consts] * n_layers - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): self.dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): self.activation_fns = [activation_fns] * n_layers # Add the input features. diff --git a/deepchem/models/robust_multitask.py b/deepchem/models/robust_multitask.py index f7bf16f5d2493f992ae26d57594f3449a4fe3d4f..4b01ae071a8eed2f1787e895014a3f7157f5bfd2 100644 --- a/deepchem/models/robust_multitask.py +++ b/deepchem/models/robust_multitask.py @@ -1,18 +1,34 @@ import numpy as np import tensorflow as tf -import collections +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection +import logging +import deepchem as dc from deepchem.metrics import to_one_hot from deepchem.models import KerasModel from deepchem.models.layers import Stack from deepchem.models.losses import SoftmaxCrossEntropy, L2Loss +from typing import Tuple, Iterable, List + +logger = logging.getLogger(__name__) class RobustMultitaskClassifier(KerasModel): """Implements a neural network for robust multitasking. - Key idea is to have bypass layers that feed directly from features to task - output. Hopefully will allow tasks to route around bad multitasking. + The key idea of this model is to have bypass layers that feed + directly from features to task output. This might provide some + flexibility toroute around challenges in multitasking with + destructive interference. + + References + ---------- + This technique was introduced in [1]_ + + .. [1] Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. """ @@ -78,13 +94,13 @@ class RobustMultitaskClassifier(KerasModel): self.n_features = n_features self.n_classes = n_classes n_layers = len(layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * n_layers - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * n_layers - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers if weight_decay_penalty != 0.0: if weight_decay_penalty_type == 'l1': @@ -95,12 +111,12 @@ class RobustMultitaskClassifier(KerasModel): regularizer = None n_bypass_layers = len(bypass_layer_sizes) - if not isinstance(bypass_weight_init_stddevs, collections.Sequence): + if not isinstance(bypass_weight_init_stddevs, SequenceCollection): bypass_weight_init_stddevs = [bypass_weight_init_stddevs ] * n_bypass_layers - if not isinstance(bypass_bias_init_consts, collections.Sequence): + if not isinstance(bypass_bias_init_consts, SequenceCollection): bypass_bias_init_consts = [bypass_bias_init_consts] * n_bypass_layers - if not isinstance(bypass_dropouts, collections.Sequence): + if not isinstance(bypass_dropouts, SequenceCollection): bypass_dropouts = [bypass_dropouts] * n_bypass_layers bypass_activation_fns = [activation_fns[0]] * n_bypass_layers @@ -194,8 +210,14 @@ class RobustMultitaskClassifier(KerasModel): class RobustMultitaskRegressor(KerasModel): """Implements a neural network for robust multitasking. - Key idea is to have bypass layers that feed directly from features to task - output. Hopefully will allow tasks to route around bad multitasking. + The key idea of this model is to have bypass layers that feed + directly from features to task output. This might provide some + flexibility toroute around challenges in multitasking with + destructive interference. + + References + ---------- + .. [1] Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. """ @@ -257,13 +279,13 @@ class RobustMultitaskRegressor(KerasModel): self.n_tasks = n_tasks self.n_features = n_features n_layers = len(layer_sizes) - if not isinstance(weight_init_stddevs, collections.Sequence): + if not isinstance(weight_init_stddevs, SequenceCollection): weight_init_stddevs = [weight_init_stddevs] * n_layers - if not isinstance(bias_init_consts, collections.Sequence): + if not isinstance(bias_init_consts, SequenceCollection): bias_init_consts = [bias_init_consts] * n_layers - if not isinstance(dropouts, collections.Sequence): + if not isinstance(dropouts, SequenceCollection): dropouts = [dropouts] * n_layers - if not isinstance(activation_fns, collections.Sequence): + if not isinstance(activation_fns, SequenceCollection): activation_fns = [activation_fns] * n_layers if weight_decay_penalty != 0.0: if weight_decay_penalty_type == 'l1': @@ -274,12 +296,12 @@ class RobustMultitaskRegressor(KerasModel): regularizer = None n_bypass_layers = len(bypass_layer_sizes) - if not isinstance(bypass_weight_init_stddevs, collections.Sequence): + if not isinstance(bypass_weight_init_stddevs, SequenceCollection): bypass_weight_init_stddevs = [bypass_weight_init_stddevs ] * n_bypass_layers - if not isinstance(bypass_bias_init_consts, collections.Sequence): + if not isinstance(bypass_bias_init_consts, SequenceCollection): bypass_bias_init_consts = [bypass_bias_init_consts] * n_bypass_layers - if not isinstance(bypass_dropouts, collections.Sequence): + if not isinstance(bypass_dropouts, SequenceCollection): bypass_dropouts = [bypass_dropouts] * n_bypass_layers bypass_activation_fns = [activation_fns[0]] * n_bypass_layers @@ -331,6 +353,21 @@ class RobustMultitaskRegressor(KerasModel): task_out = tf.keras.layers.Dense(1)(task_layer) task_outputs.append(task_out) - outputs = tf.keras.layers.Concatenate(axis=1)(task_outputs) + outputs = Stack(axis=1)(task_outputs) model = tf.keras.Model(inputs=mol_features, outputs=outputs) - super(RobustMultitaskRegressor, self).__init__(model, L2Loss(), **kwargs) + super(RobustMultitaskRegressor, self).__init__( + model, L2Loss(), output_types=['prediction'], **kwargs) + + def default_generator( + self, + dataset: dc.data.Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: + for epoch in range(epochs): + for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( + batch_size=self.batch_size, + deterministic=deterministic, + pad_batches=pad_batches): + yield ([X_b], [y_b], [w_b]) diff --git a/deepchem/models/sklearn_models/__init__.py b/deepchem/models/sklearn_models/__init__.py index dfcbe28209683d39698ccc46200173e3304da306..944bf3e593485d1ffadbeaf43d72665da3484601 100644 --- a/deepchem/models/sklearn_models/__init__.py +++ b/deepchem/models/sklearn_models/__init__.py @@ -1,102 +1,2 @@ -""" -Code for processing datasets using scikit-learn. -""" -import numpy as np -from sklearn.cross_decomposition import PLSRegression -from sklearn.ensemble import RandomForestClassifier -from sklearn.ensemble import RandomForestRegressor -from sklearn.gaussian_process import GaussianProcessRegressor -from sklearn.linear_model import LogisticRegression, BayesianRidge -from sklearn.linear_model import LinearRegression -from sklearn.linear_model import RidgeCV -from sklearn.linear_model import LassoCV -from sklearn.linear_model import ElasticNetCV -from sklearn.linear_model import LassoLarsCV -from deepchem.models import Model -from deepchem.utils.save import load_from_disk -from deepchem.utils.save import save_to_disk - -NON_WEIGHTED_MODELS = [ - LogisticRegression, PLSRegression, GaussianProcessRegressor, ElasticNetCV, - LassoCV, BayesianRidge -] - - -class SklearnModel(Model): - """ - Abstract base class for different ML models. - """ - - def __init__(self, - model_instance=None, - model_dir=None, - verbose=True, - **kwargs): - """ - Parameters - ---------- - model_instance: sklearn model - model_dir: str - verbose: bool - kwargs: dict - kwargs['use_weights'] is a bool which determines if we pass weights into - self.model_instance.fit() - """ - super(SklearnModel, self).__init__(model_instance, model_dir, verbose, - **kwargs) - if 'use_weights' in kwargs: - self.use_weights = kwargs['use_weights'] - else: - self.use_weights = True - for model_instance in NON_WEIGHTED_MODELS: - if isinstance(self.model_instance, model_instance): - self.use_weights = False - - def fit(self, dataset, **kwargs): - """ - Fits SKLearn model to data. - """ - X = dataset.X - y = np.squeeze(dataset.y) - w = np.squeeze(dataset.w) - # Logistic regression doesn't support weights - if self.use_weights: - self.model_instance.fit(X, y, w) - return - self.model_instance.fit(X, y) - - def predict_on_batch(self, X, pad_batch=False): - """ - Makes predictions on batch of data. - - Parameters - ---------- - X: np.ndarray - Features - pad_batch: bool, optional - Ignored for Sklearn Model. Only used for Tensorflow models - with rigid batch-size requirements. - """ - try: - return self.model_instance.predict_proba(X) - except AttributeError: - return self.model_instance.predict(X) - - def predict(self, X, transformers=[]): - """ - Makes predictions on dataset. - """ - return super(SklearnModel, self).predict(X, transformers) - - def save(self): - """Saves sklearn model to disk using joblib.""" - save_to_disk(self.model_instance, self.get_model_filename(self.model_dir)) - - def reload(self): - """Loads sklearn model from joblib file on disk.""" - self.model_instance = load_from_disk( - Model.get_model_filename(self.model_dir)) - - def get_num_tasks(self): - """Number of tasks for this model. Defaults to 1""" - return 1 +# flake8: ignore +from deepchem.models.sklearn_models.sklearn_model import SklearnModel diff --git a/deepchem/models/sklearn_models/sklearn_model.py b/deepchem/models/sklearn_models/sklearn_model.py new file mode 100644 index 0000000000000000000000000000000000000000..df0b61ac254b4383a42dc0bc773ba61d3c590332 --- /dev/null +++ b/deepchem/models/sklearn_models/sklearn_model.py @@ -0,0 +1,141 @@ +""" +Code for processing datasets using scikit-learn. +""" +import logging +from typing import List, Optional + +import numpy as np +from sklearn.base import BaseEstimator +from sklearn.cross_decomposition import PLSRegression +from sklearn.gaussian_process import GaussianProcessRegressor +from sklearn.linear_model import LogisticRegression, BayesianRidge +from sklearn.linear_model import LassoCV +from sklearn.linear_model import ElasticNetCV + +from deepchem.models import Model +from deepchem.data import Dataset +from deepchem.trans import Transformer +from deepchem.utils.data_utils import load_from_disk, save_to_disk + +NON_WEIGHTED_MODELS = [ + LogisticRegression, PLSRegression, GaussianProcessRegressor, ElasticNetCV, + LassoCV, BayesianRidge +] + +logger = logging.getLogger(__name__) + + +class SklearnModel(Model): + """Wrapper class that wraps scikit-learn models as DeepChem models. + + When you're working with scikit-learn and DeepChem, at times it can + be useful to wrap a scikit-learn model as a DeepChem model. The + reason for this might be that you want to do an apples-to-apples + comparison of a scikit-learn model to another DeepChem model, or + perhaps you want to use the hyperparameter tuning capabilities in + `dc.hyper`. The `SklearnModel` class provides a wrapper around scikit-learn + models that allows scikit-learn models to be trained on `Dataset` objects + and evaluated with the same metrics as other DeepChem models. + + Notes + ----- + All `SklearnModels` perform learning solely in memory. This means that it + may not be possible to train `SklearnModel` on large `Dataset`s. + """ + + def __init__(self, + model: BaseEstimator, + model_dir: Optional[str] = None, + **kwargs): + """ + Parameters + ---------- + model: BaseEstimator + The model instance which inherits a scikit-learn `BaseEstimator` Class. + model_dir: str, optional (default None) + If specified the model will be stored in this directory. Else, a + temporary directory will be used. + model_instance: BaseEstimator (DEPRECATED) + The model instance which inherits a scikit-learn `BaseEstimator` Class. + kwargs: dict + kwargs['use_weights'] is a bool which determines if we pass weights into + self.model.fit(). + """ + if 'model_instance' in kwargs: + model_instance = kwargs['model_instance'] + if model is not None: + raise ValueError( + "Can not use both model and model_instance argument at the same time." + ) + logger.warning( + "model_instance argument is deprecated and will be removed in a future version of DeepChem." + "Use model argument instead.") + model = model_instance + + super(SklearnModel, self).__init__(model, model_dir, **kwargs) + if 'use_weights' in kwargs: + self.use_weights = kwargs['use_weights'] + else: + self.use_weights = True + for model in NON_WEIGHTED_MODELS: + if isinstance(self.model, model): + self.use_weights = False + + def fit(self, dataset: Dataset) -> None: + """Fits scikit-learn model to data. + + Parameters + ---------- + dataset: Dataset + The `Dataset` to train this model on. + """ + X = dataset.X + y = np.squeeze(dataset.y) + w = np.squeeze(dataset.w) + # Some scikit-learn models don't use weights. + if self.use_weights: + self.model.fit(X, y, w) + return + self.model.fit(X, y) + + def predict_on_batch(self, X: np.ndarray) -> np.ndarray: + """Makes predictions on batch of data. + + Parameters + ---------- + X: np.ndarray + A numpy array of features. + + Returns + ------- + np.ndarray + The value is a return value of `predict_proba` or `predict` method + of the scikit-learn model. If the scikit-learn model has both methods, + the value is always a return value of `predict_proba`. + """ + try: + return self.model.predict_proba(X) + except AttributeError: + return self.model.predict(X) + + def predict(self, X: Dataset, + transformers: List[Transformer] = []) -> np.ndarray: + """Makes predictions on dataset. + + Parameters + ---------- + dataset: Dataset + Dataset to make prediction on. + transformers: List[Transformer] + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + """ + return super(SklearnModel, self).predict(X, transformers) + + def save(self): + """Saves scikit-learn model to disk using joblib.""" + save_to_disk(self.model, self.get_model_filename(self.model_dir)) + + def reload(self): + """Loads scikit-learn model from joblib file on disk.""" + self.model = load_from_disk(self.get_model_filename(self.model_dir)) diff --git a/deepchem/models/tests/mp_is_metal.tar.gz b/deepchem/models/tests/mp_is_metal.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..861c588433721739440c5e293deeec493b2d920a Binary files /dev/null and b/deepchem/models/tests/mp_is_metal.tar.gz differ diff --git a/deepchem/models/tests/perovskite.tar.gz b/deepchem/models/tests/perovskite.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..bd35a926ff92a816e9570204eb83f6fbcfbef3b7 Binary files /dev/null and b/deepchem/models/tests/perovskite.tar.gz differ diff --git a/deepchem/models/tests/test_api.py b/deepchem/models/tests/test_api.py index 1b090cbecabbfe14612dbd27fc5eb718964c103b..a4bc027c8be111cf69dfd47c110857e8694ebb0c 100644 --- a/deepchem/models/tests/test_api.py +++ b/deepchem/models/tests/test_api.py @@ -1,185 +1,160 @@ """ Integration tests for singletask vector feature models. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import os -import unittest -import tempfile -import shutil -import tensorflow as tf import deepchem as dc +import numpy as np from sklearn.ensemble import RandomForestRegressor -class TestAPI(unittest.TestCase): - """ - Test top-level API for ML models. - """ - - def test_singletask_sklearn_rf_ECFP_regression_API(self): - """Test of singletask RF ECFP regression API.""" - splittype = "scaffold" - featurizer = dc.feat.CircularFingerprint(size=1024) - tasks = ["log-solubility"] - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.ScaffoldSplitter() - train_dataset, test_dataset = splitter.train_test_split(dataset) - - transformers = [ - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - regression_metrics = [ - dc.metrics.Metric(dc.metrics.r2_score), - dc.metrics.Metric(dc.metrics.mean_squared_error), - dc.metrics.Metric(dc.metrics.mean_absolute_error) - ] - - sklearn_model = RandomForestRegressor() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on train - _ = model.evaluate(train_dataset, regression_metrics, transformers) - _ = model.evaluate(test_dataset, regression_metrics, transformers) - - def test_singletask_sklearn_rf_user_specified_regression_API(self): - """Test of singletask RF USF regression API.""" - splittype = "specified" - featurizer = dc.feat.UserDefinedFeaturizer( - ["user-specified1", "user-specified2"]) - tasks = ["log-solubility"] - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "user_specified_example.csv") - loader = dc.data.UserCSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.SpecifiedSplitter(input_file, "split") - train_dataset, test_dataset = splitter.train_test_split(dataset) - - transformers = [ - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - for dataset in [train_dataset, test_dataset]: - for transformer in transformers: - dataset = transformer.transform(dataset) - - regression_metrics = [ - dc.metrics.Metric(dc.metrics.r2_score), - dc.metrics.Metric(dc.metrics.mean_squared_error), - dc.metrics.Metric(dc.metrics.mean_absolute_error) - ] - - sklearn_model = RandomForestRegressor() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on train/test - _ = model.evaluate(train_dataset, regression_metrics, transformers) - _ = model.evaluate(test_dataset, regression_metrics, transformers) - - def test_singletask_sklearn_rf_RDKIT_descriptor_regression_API(self): - """Test of singletask RF RDKIT-descriptor regression API.""" - splittype = "scaffold" - featurizer = dc.feat.RDKitDescriptors() - tasks = ["log-solubility"] - - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "example.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.ScaffoldSplitter() - train_dataset, test_dataset = splitter.train_test_split(dataset) - - transformers = [ - dc.trans.NormalizationTransformer( - transform_X=True, dataset=train_dataset), - dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset), - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - for dataset in [train_dataset, test_dataset]: - for transformer in transformers: - dataset = transformer.transform(dataset) - - regression_metrics = [ - dc.metrics.Metric(dc.metrics.r2_score), - dc.metrics.Metric(dc.metrics.mean_squared_error), - dc.metrics.Metric(dc.metrics.mean_absolute_error) - ] - - sklearn_model = RandomForestRegressor() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on train/test - _ = model.evaluate(train_dataset, regression_metrics, transformers) - _ = model.evaluate(test_dataset, regression_metrics, transformers) - - def test_singletask_tg_mlp_ECFP_classification_API(self): - """Test of TensorGraph singletask deepchem classification API.""" - n_features = 1024 - featurizer = dc.feat.CircularFingerprint(size=n_features) - - tasks = ["outcome"] - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "example_classification.csv") - - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - splitter = dc.splits.ScaffoldSplitter() - train_dataset, test_dataset = splitter.train_test_split(dataset) - - transformers = [ - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - - for dataset in [train_dataset, test_dataset]: - for transformer in transformers: - dataset = transformer.transform(dataset) - - classification_metrics = [ - dc.metrics.Metric(dc.metrics.roc_auc_score), - dc.metrics.Metric(dc.metrics.matthews_corrcoef), - dc.metrics.Metric(dc.metrics.recall_score), - dc.metrics.Metric(dc.metrics.accuracy_score) - ] - - model = dc.models.MultitaskClassifier(len(tasks), n_features) - - # Test Parameter getting and setting - param, value = 'weight_decay_penalty_type', 'l2' - assert model.get_params()[param] is None - model.set_params(**{param: value}) - assert model.get_params()[param] == value - - # Fit trained model - model.fit(train_dataset) - - # Eval model on train/test - _ = model.evaluate(train_dataset, classification_metrics, transformers) - _ = model.evaluate(test_dataset, classification_metrics, transformers) +def test_singletask_sklearn_rf_ECFP_regression_API(): + """Test of singletask RF ECFP regression API.""" + X = np.random.rand(100, 5) + y = np.random.rand(100,) + dataset = dc.data.NumpyDataset(X, y) + + splitter = dc.splits.RandomSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + + transformer = dc.trans.NormalizationTransformer( + transform_y=True, dataset=train_dataset) + train_dataset = transformer.transform(train_dataset) + test_dataset = transformer.transform(test_dataset) + + regression_metrics = [ + dc.metrics.Metric(dc.metrics.r2_score), + dc.metrics.Metric(dc.metrics.mean_squared_error), + dc.metrics.Metric(dc.metrics.mean_absolute_error) + ] + + sklearn_model = RandomForestRegressor() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on train + _ = model.evaluate(train_dataset, regression_metrics, [transformer]) + _ = model.evaluate(test_dataset, regression_metrics, [transformer]) + + +def test_singletask_sklearn_rf_user_specified_regression_API(): + """Test of singletask RF USF regression API.""" + featurizer = dc.feat.UserDefinedFeaturizer( + ["user-specified1", "user-specified2"]) + tasks = ["log-solubility"] + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, "user_specified_example.csv") + loader = dc.data.UserCSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + splitter = dc.splits.RandomSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + + transformers = [ + dc.trans.NormalizationTransformer( + transform_y=True, dataset=train_dataset) + ] + for dataset in [train_dataset, test_dataset]: + for transformer in transformers: + dataset = transformer.transform(dataset) + + regression_metrics = [ + dc.metrics.Metric(dc.metrics.r2_score), + dc.metrics.Metric(dc.metrics.mean_squared_error), + dc.metrics.Metric(dc.metrics.mean_absolute_error) + ] + + sklearn_model = RandomForestRegressor() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on train/test + _ = model.evaluate(train_dataset, regression_metrics, transformers) + _ = model.evaluate(test_dataset, regression_metrics, transformers) + + +def test_singletask_sklearn_rf_RDKIT_descriptor_regression_API(): + """Test of singletask RF RDKIT-descriptor regression API.""" + splittype = "scaffold" + featurizer = dc.feat.RDKitDescriptors() + tasks = ["log-solubility"] + + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, "example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + splitter = dc.splits.ScaffoldSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + + transformers = [ + dc.trans.NormalizationTransformer( + transform_X=True, dataset=train_dataset), + dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset), + dc.trans.NormalizationTransformer( + transform_y=True, dataset=train_dataset) + ] + for dataset in [train_dataset, test_dataset]: + for transformer in transformers: + dataset = transformer.transform(dataset) + + regression_metrics = [ + dc.metrics.Metric(dc.metrics.r2_score), + dc.metrics.Metric(dc.metrics.mean_squared_error), + dc.metrics.Metric(dc.metrics.mean_absolute_error) + ] + + sklearn_model = RandomForestRegressor() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on train/test + _ = model.evaluate(train_dataset, regression_metrics, transformers) + _ = model.evaluate(test_dataset, regression_metrics, transformers) + + +def test_singletask_mlp_ECFP_classification_API(): + """Test of singletask MLP classification API.""" + np.random.seed(123) + + X = np.random.rand(100, 5) + y = np.random.randint(2, size=(100,)) + dataset = dc.data.NumpyDataset(X, y) + + splitter = dc.splits.RandomSplitter() + train_dataset, test_dataset = splitter.train_test_split(dataset) + + transformers = [] + + classification_metrics = [ + dc.metrics.Metric(dc.metrics.roc_auc_score), + dc.metrics.Metric(dc.metrics.prc_auc_score), + dc.metrics.Metric(dc.metrics.matthews_corrcoef), + dc.metrics.Metric(dc.metrics.recall_score), + dc.metrics.Metric(dc.metrics.accuracy_score), + dc.metrics.Metric(dc.metrics.balanced_accuracy_score), + dc.metrics.Metric(dc.metrics.jaccard_score), + dc.metrics.Metric(dc.metrics.f1_score), + dc.metrics.Metric(dc.metrics.pixel_error), + dc.metrics.Metric(dc.metrics.kappa_score), + dc.metrics.Metric(dc.metrics.bedroc_score), + ] + + model = dc.models.MultitaskClassifier(1, 5) + + # Fit trained model + model.fit(train_dataset) + + # Eval model on train/test + _ = model.evaluate(train_dataset, classification_metrics, transformers) + _ = model.evaluate(test_dataset, classification_metrics, transformers) diff --git a/deepchem/models/tests/test_atomic_conv.py b/deepchem/models/tests/test_atomic_conv.py index 59375eba50d381fd119b7c33acc48dc59687f443..3430b6de8fff48e4fa5be3dfbfdebac481af9af0 100644 --- a/deepchem/models/tests/test_atomic_conv.py +++ b/deepchem/models/tests/test_atomic_conv.py @@ -13,7 +13,7 @@ import unittest import numpy as np from deepchem.models import atomic_conv from deepchem.data import NumpyDataset -from deepchem.feat.atomic_coordinates import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat import ComplexNeighborListFragmentAtomicCoordinates class TestAtomicConv(unittest.TestCase): @@ -123,8 +123,7 @@ class TestAtomicConv(unittest.TestCase): neighbor_cutoff) # arbitrary label labels = np.array([0]) - features, _ = complex_featurizer.featurize_complexes([ligand_file], - [protein_file]) + features, _ = complex_featurizer.featurize([ligand_file], [protein_file]) dataset = deepchem.data.DiskDataset.from_numpy(features, labels) batch_size = 1 diff --git a/deepchem/models/tests/test_attentivefp.py b/deepchem/models/tests/test_attentivefp.py new file mode 100644 index 0000000000000000000000000000000000000000..cc9d18cc9879e6b5bfef974e575e63802b223cb7 --- /dev/null +++ b/deepchem/models/tests/test_attentivefp.py @@ -0,0 +1,95 @@ +import unittest +import tempfile + +import numpy as np + +import deepchem as dc +from deepchem.feat import MolGraphConvFeaturizer +from deepchem.models import AttentiveFPModel +from deepchem.models.tests.test_graph_models import get_dataset + +try: + import dgl + import dgllife + import torch + has_torch_and_dgl = True +except: + has_torch_and_dgl = False + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_attentivefp_regression(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'regression', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = AttentiveFPModel(mode='regression', n_tasks=n_tasks, batch_size=10) + + # overfit test + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.5 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_attentivefp_classification(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = AttentiveFPModel( + mode='classification', + n_tasks=n_tasks, + batch_size=10, + learning_rate=0.001) + + # overfit test + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_attentivefp_reload(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model_dir = tempfile.mkdtemp() + model = AttentiveFPModel( + mode='classification', + n_tasks=n_tasks, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + reloaded_model = AttentiveFPModel( + mode='classification', + n_tasks=n_tasks, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + reloaded_model.restore() + + pred_mols = ["CCCC", "CCCCCO", "CCCCC"] + X_pred = featurizer(pred_mols) + random_dataset = dc.data.NumpyDataset(X_pred) + original_pred = model.predict(random_dataset) + reload_pred = reloaded_model.predict(random_dataset) + assert np.all(original_pred == reload_pred) diff --git a/deepchem/models/tests/test_cgcnn.py b/deepchem/models/tests/test_cgcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..085e30cda1221ba15dec97de5fe15cf450ff598d --- /dev/null +++ b/deepchem/models/tests/test_cgcnn.py @@ -0,0 +1,153 @@ +import unittest +import tempfile +from os import path, remove + +import numpy as np + +from deepchem.feat import CGCNNFeaturizer +from deepchem.molnet import load_perovskite, load_mp_metallicity +from deepchem.metrics import Metric, mae_score, roc_auc_score +from deepchem.models import CGCNNModel + +try: + import dgl # noqa + import torch # noqa + has_pytorch_and_dgl = True +except: + has_pytorch_and_dgl = False + + +@unittest.skipIf(not has_pytorch_and_dgl, 'PyTorch and DGL are not installed') +def test_cgcnn_regression(): + # load datasets + current_dir = path.dirname(path.abspath(__file__)) + config = { + "reload": False, + "featurizer": CGCNNFeaturizer(), + # disable transformer + "transformers": [], + "data_dir": current_dir + } + tasks, datasets, transformers = load_perovskite(**config) + train, valid, test = datasets + + n_tasks = len(tasks) + model = CGCNNModel( + n_tasks=n_tasks, mode='regression', batch_size=4, learning_rate=0.001) + + # check train + model.fit(train, nb_epoch=20) + + # check predict shape + valid_preds = model.predict_on_batch(valid.X) + assert valid_preds.shape == (2, n_tasks) + test_preds = model.predict(test) + assert test_preds.shape == (3, n_tasks) + + # check overfit + regression_metric = Metric(mae_score, n_tasks=n_tasks) + scores = model.evaluate(train, [regression_metric], transformers) + assert scores[regression_metric.name] < 0.6 + + if path.exists(path.join(current_dir, 'perovskite.json')): + remove(path.join(current_dir, 'perovskite.json')) + + +@unittest.skipIf(not has_pytorch_and_dgl, 'PyTorch and DGL are not installed') +def test_cgcnn_classification(): + # load datasets + current_dir = path.dirname(path.abspath(__file__)) + config = { + "reload": False, + "featurizer": CGCNNFeaturizer(), + # disable transformer + "transformers": [], + "data_dir": current_dir + } + tasks, datasets, transformers = load_mp_metallicity(**config) + train, valid, test = datasets + + n_tasks = len(tasks) + n_classes = 2 + model = CGCNNModel( + n_tasks=n_tasks, + n_classes=n_classes, + mode='classification', + batch_size=4, + learning_rate=0.001) + + # check train + model.fit(train, nb_epoch=20) + + # check predict shape + valid_preds = model.predict_on_batch(valid.X) + assert valid_preds.shape == (2, n_classes) + test_preds = model.predict(test) + assert test_preds.shape == (3, n_classes) + + # check overfit + classification_metric = Metric(roc_auc_score, n_tasks=n_tasks) + scores = model.evaluate( + train, [classification_metric], transformers, n_classes=n_classes) + assert scores[classification_metric.name] > 0.8 + + if path.exists(path.join(current_dir, 'mp_is_metal.json')): + remove(path.join(current_dir, 'mp_is_metal.json')) + + +@unittest.skipIf(not has_pytorch_and_dgl, 'PyTorch and DGL are not installed') +def test_cgcnn_reload(): + # load datasets + current_dir = path.dirname(path.abspath(__file__)) + config = { + "reload": False, + "featurizer": CGCNNFeaturizer(), + # disable transformer + "transformers": [], + "data_dir": current_dir + } + tasks, datasets, transformers = load_mp_metallicity(**config) + train, valid, test = datasets + + n_tasks = len(tasks) + n_classes = 2 + model_dir = tempfile.mkdtemp() + model = CGCNNModel( + n_tasks=n_tasks, + n_classes=n_classes, + mode='classification', + model_dir=model_dir, + batch_size=4, + learning_rate=0.001) + + # check train + model.fit(train, nb_epoch=20) + + # check predict shape + valid_preds = model.predict_on_batch(valid.X) + assert valid_preds.shape == (2, n_classes) + test_preds = model.predict(test) + assert test_preds.shape == (3, n_classes) + + # check overfit + classification_metric = Metric(roc_auc_score, n_tasks=n_tasks) + scores = model.evaluate( + train, [classification_metric], transformers, n_classes=n_classes) + assert scores[classification_metric.name] > 0.8 + + # reload + reloaded_model = CGCNNModel( + n_tasks=n_tasks, + n_classes=n_classes, + mode='classification', + model_dir=model_dir, + batch_size=4, + learning_rate=0.001) + reloaded_model.restore() + + original_pred = model.predict(test) + reload_pred = reloaded_model.predict(test) + assert np.all(original_pred == reload_pred) + + if path.exists(path.join(current_dir, 'mp_is_metal.json')): + remove(path.join(current_dir, 'mp_is_metal.json')) diff --git a/deepchem/models/tests/test_chemnet_models.py b/deepchem/models/tests/test_chemnet_models.py index 3b820d277087ea73de9b58520476c1237925b0ce..628ebd24c7dbdd90362dcdb7408f4eaa1da29802 100644 --- a/deepchem/models/tests/test_chemnet_models.py +++ b/deepchem/models/tests/test_chemnet_models.py @@ -4,137 +4,139 @@ import numpy as np import tempfile import pytest - import deepchem as dc -from deepchem.data import NumpyDataset from deepchem.models import Smiles2Vec, ChemCeption -from deepchem.feat import SmilesToSeq, SmilesToImage -from deepchem.molnet.load_function.chembl25_datasets import chembl25_tasks -from deepchem.feat.smiles_featurizers import create_char_to_idx - -from flaky import flaky - - -@pytest.mark.skip(reason="Unknown") -class TestChemnetModel(unittest.TestCase): - - def setUp(self): - self.max_seq_len = 20 - self.data_points = 10 - self.n_tasks = 5 - - def get_dataset(self, mode="classification", featurizer="smiles2seq"): - dataset_file = os.path.join( - os.path.dirname(__file__), "chembl_25_small.csv") - - if featurizer == "smiles2seq": - max_len = 250 - pad_len = 10 - self.char_to_idx = create_char_to_idx( - dataset_file, max_len=max_len, smiles_field="smiles") - featurizer = SmilesToSeq( - char_to_idx=self.char_to_idx, max_len=max_len, pad_len=pad_len) - - elif featurizer == "smiles2img": - img_size = 80 - img_spec = "engd" - res = 0.5 - featurizer = SmilesToImage(img_size=img_size, img_spec=img_spec, res=res) - - loader = dc.data.CSVLoader( - tasks=chembl25_tasks, smiles_field='smiles', featurizer=featurizer) - dataset = loader.featurize( - input_files=[dataset_file], - shard_size=10000, - data_dir=tempfile.mkdtemp()) - - w = np.ones(shape=(self.data_points, self.n_tasks)) - - if mode == 'classification': - y = np.random.randint(0, 2, size=(self.data_points, self.n_tasks)) - metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, np.mean, mode="classification") - else: - y = np.random.normal(size=(self.data_points, self.n_tasks)) - metric = dc.metrics.Metric( - dc.metrics.mean_absolute_error, mode="regression") - - if featurizer == "smiles2seq": - dataset = dc.data.NumpyDataset( - dataset.X[:self.data_points, :self.max_seq_len], y, w, - dataset.ids[:self.data_points]) - else: - dataset = dc.data.NumpyDataset(dataset.X[:self.data_points], y, w, - dataset.ids[:self.data_points]) - +from deepchem.feat import create_char_to_idx, SmilesToSeq, SmilesToImage +from deepchem.molnet.load_function.chembl25_datasets import CHEMBL25_TASKS + + +def get_dataset(mode="classification", + featurizer="smiles2seq", + max_seq_len=20, + data_points=10, + n_tasks=5): + dataset_file = os.path.join(os.path.dirname(__file__), "chembl_25_small.csv") + + if featurizer == "smiles2seq": + max_len = 250 + pad_len = 10 + char_to_idx = create_char_to_idx( + dataset_file, max_len=max_len, smiles_field="smiles") + feat = SmilesToSeq( + char_to_idx=char_to_idx, max_len=max_len, pad_len=pad_len) + + elif featurizer == "smiles2img": + img_size = 80 + img_spec = "engd" + res = 0.5 + feat = SmilesToImage(img_size=img_size, img_spec=img_spec, res=res) + + loader = dc.data.CSVLoader( + tasks=CHEMBL25_TASKS, smiles_field='smiles', featurizer=feat) + dataset = loader.create_dataset( + inputs=[dataset_file], shard_size=10000, data_dir=tempfile.mkdtemp()) + + w = np.ones(shape=(data_points, n_tasks)) + + if mode == 'classification': + y = np.random.randint(0, 2, size=(data_points, n_tasks)) + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + else: + y = np.random.normal(size=(data_points, n_tasks)) + metric = dc.metrics.Metric( + dc.metrics.mean_absolute_error, mode="regression") + + if featurizer == "smiles2seq": + dataset = dc.data.NumpyDataset(dataset.X[:data_points, :max_seq_len], y, w, + dataset.ids[:data_points]) + else: + dataset = dc.data.NumpyDataset(dataset.X[:data_points], y, w, + dataset.ids[:data_points]) + + if featurizer == "smiles2seq": + return dataset, metric, char_to_idx + else: return dataset, metric - @pytest.mark.slow - def test_smiles_to_vec_regression(self): - dataset, metric = self.get_dataset( - mode="regression", featurizer="smiles2seq") - model = Smiles2Vec( - char_to_idx=self.char_to_idx, - max_seq_len=self.max_seq_len, - use_conv=True, - n_tasks=self.n_tasks, - model_dir=None, - mode="regression") - model.fit(dataset, nb_epoch=500) - scores = model.evaluate(dataset, [metric], []) - assert all(s < 0.1 for s in scores['mean_absolute_error']) - - @pytest.mark.slow - def test_smiles_to_vec_classification(self): - dataset, metric = self.get_dataset( - mode="classification", featurizer="smiles2seq") - model = Smiles2Vec( - char_to_idx=self.char_to_idx, - max_seq_len=self.max_seq_len, - use_conv=True, - n_tasks=self.n_tasks, - model_dir=None, - mode="classification") - model.fit(dataset, nb_epoch=500) - scores = model.evaluate(dataset, [metric], []) - assert scores['mean-roc_auc_score'] >= 0.9 - - @pytest.mark.slow - def test_chemception_regression(self): - dataset, metric = self.get_dataset( - mode="regression", featurizer="smiles2img") - model = ChemCeption( - n_tasks=self.n_tasks, - img_spec="engd", - model_dir=None, - mode="regression") - model.fit(dataset, nb_epoch=300) - scores = model.evaluate(dataset, [metric], []) - assert all(s < 0.1 for s in scores['mean_absolute_error']) - - @pytest.mark.slow - def test_chemception_classification(self): - dataset, metric = self.get_dataset( - mode="classification", featurizer="smiles2img") - model = ChemCeption( - n_tasks=self.n_tasks, - img_spec="engd", - model_dir=None, - mode="classification") - model.fit(dataset, nb_epoch=300) - scores = model.evaluate(dataset, [metric], []) - assert scores['mean-roc_auc_score'] >= 0.9 - - @pytest.mark.slow - def test_chemception_fit_with_augmentation(self): - dataset, metric = self.get_dataset( - mode="classification", featurizer="smiles2img") - model = ChemCeption( - n_tasks=self.n_tasks, - img_spec="engd", - model_dir=None, - augment=True, - mode="classification") - model.fit(dataset, nb_epoch=300) - scores = model.evaluate(dataset, [metric], []) - assert scores['mean-roc_auc_score'] >= 0.9 + +@pytest.mark.slow +def test_chemception_regression(): + n_tasks = 5 + dataset, metric = get_dataset( + mode="regression", featurizer="smiles2img", n_tasks=n_tasks) + model = ChemCeption( + n_tasks=n_tasks, img_spec="engd", model_dir=None, mode="regression") + model.fit(dataset, nb_epoch=300) + scores = model.evaluate(dataset, [metric], []) + assert scores['mean_absolute_error'] < 0.1 + + +@pytest.mark.slow +def test_chemception_classification(): + n_tasks = 5 + dataset, metric = get_dataset( + mode="classification", featurizer="smiles2img", n_tasks=n_tasks) + model = ChemCeption( + n_tasks=n_tasks, img_spec="engd", model_dir=None, mode="classification") + model.fit(dataset, nb_epoch=300) + scores = model.evaluate(dataset, [metric], []) + assert scores['mean-roc_auc_score'] >= 0.9 + + +@pytest.mark.slow +def test_smiles_to_vec_regression(): + n_tasks = 5 + max_seq_len = 20 + dataset, metric, char_to_idx = get_dataset( + mode="regression", + featurizer="smiles2seq", + n_tasks=n_tasks, + max_seq_len=max_seq_len) + model = Smiles2Vec( + char_to_idx=char_to_idx, + max_seq_len=max_seq_len, + use_conv=True, + n_tasks=n_tasks, + model_dir=None, + mode="regression") + model.fit(dataset, nb_epoch=500) + scores = model.evaluate(dataset, [metric], []) + assert scores['mean_absolute_error'] < 0.1 + + +@pytest.mark.slow +def test_smiles_to_vec_classification(): + n_tasks = 5 + max_seq_len = 20 + dataset, metric, char_to_idx, = get_dataset( + mode="classification", + featurizer="smiles2seq", + n_tasks=n_tasks, + max_seq_len=max_seq_len) + model = Smiles2Vec( + char_to_idx=char_to_idx, + max_seq_len=max_seq_len, + use_conv=True, + n_tasks=n_tasks, + model_dir=None, + mode="classification") + model.fit(dataset, nb_epoch=500) + scores = model.evaluate(dataset, [metric], []) + assert scores['mean-roc_auc_score'] >= 0.9 + + +@pytest.mark.slow +def test_chemception_fit_with_augmentation(): + n_tasks = 5 + dataset, metric = get_dataset( + mode="classification", featurizer="smiles2img", n_tasks=n_tasks) + model = ChemCeption( + n_tasks=n_tasks, + img_spec="engd", + model_dir=None, + augment=True, + mode="classification") + model.fit(dataset, nb_epoch=300) + scores = model.evaluate(dataset, [metric], []) + assert scores['mean-roc_auc_score'] >= 0.9 diff --git a/deepchem/models/tests/test_gan.py b/deepchem/models/tests/test_gan.py index 959dd14ba881da260d4d07b8347c5b0561ce7950..99e1f18c2c9bbf31b11744146ebb0603b3442693 100644 --- a/deepchem/models/tests/test_gan.py +++ b/deepchem/models/tests/test_gan.py @@ -2,6 +2,7 @@ import deepchem as dc import numpy as np import tensorflow as tf import unittest +import tempfile from tensorflow.keras.layers import Input, Concatenate, Dense from flaky import flaky @@ -49,94 +50,204 @@ class ExampleGAN(dc.models.GAN): return tf.keras.Model(inputs=inputs, outputs=output) -class TestGAN(unittest.TestCase): - - @flaky - def test_cgan(self): - """Test fitting a conditional GAN.""" - - gan = ExampleGAN(learning_rate=0.01) - gan.fit_gan( - generate_data(gan, 500, 100), - generator_steps=0.5, - checkpoint_interval=0) - - # See if it has done a plausible job of learning the distribution. - - means = 10 * np.random.random([1000, 1]) - values = gan.predict_gan_generator(conditional_inputs=[means]) - deltas = values - means - assert abs(np.mean(deltas)) < 1.0 - assert np.std(deltas) > 1.0 - assert gan.get_global_step() == 500 - - @flaky - def test_mix_gan(self): - """Test a GAN with multiple generators and discriminators.""" - - gan = ExampleGAN(n_generators=2, n_discriminators=2, learning_rate=0.01) - gan.fit_gan( - generate_data(gan, 1000, 100), - generator_steps=0.5, - checkpoint_interval=0) - - # See if it has done a plausible job of learning the distribution. - - means = 10 * np.random.random([1000, 1]) - for i in range(2): - values = gan.predict_gan_generator( - conditional_inputs=[means], generator_index=i) - deltas = values - means - assert abs(np.mean(deltas)) < 1.0 - assert np.std(deltas) > 1.0 - assert gan.get_global_step() == 1000 - - @flaky - def test_wgan(self): - """Test fitting a conditional WGAN.""" - - class ExampleWGAN(dc.models.WGAN): - - def get_noise_input_shape(self): - return (2,) - - def get_data_input_shapes(self): - return [(1,)] - - def get_conditional_input_shapes(self): - return [(1,)] - - def create_generator(self): - noise_input = Input(self.get_noise_input_shape()) - conditional_input = Input(self.get_conditional_input_shapes()[0]) - inputs = [noise_input, conditional_input] - gen_in = Concatenate(axis=1)(inputs) - output = Dense(1)(gen_in) - return tf.keras.Model(inputs=inputs, outputs=output) - - def create_discriminator(self): - data_input = Input(self.get_data_input_shapes()[0]) - conditional_input = Input(self.get_conditional_input_shapes()[0]) - inputs = [data_input, conditional_input] - discrim_in = Concatenate(axis=1)(inputs) - dense = Dense(10, activation=tf.nn.relu)(discrim_in) - output = Dense(1)(dense) - return tf.keras.Model(inputs=inputs, outputs=output) - - # We have to set the gradient penalty very small because the generator's - # output is only a single number, so the default penalty would constrain - # it far too much. - - gan = ExampleWGAN(learning_rate=0.01, gradient_penalty=0.1) - gan.fit_gan( - generate_data(gan, 1000, 100), - generator_steps=0.1, - checkpoint_interval=0) - - # See if it has done a plausible job of learning the distribution. - - means = 10 * np.random.random([1000, 1]) - values = gan.predict_gan_generator(conditional_inputs=[means]) +@flaky +def test_cgan(): + """Test fitting a conditional GAN.""" + + gan = ExampleGAN(learning_rate=0.01) + gan.fit_gan( + generate_data(gan, 500, 100), generator_steps=0.5, checkpoint_interval=0) + + # See if it has done a plausible job of learning the distribution. + + means = 10 * np.random.random([1000, 1]) + values = gan.predict_gan_generator(conditional_inputs=[means]) + deltas = values - means + assert abs(np.mean(deltas)) < 1.0 + assert np.std(deltas) > 1.0 + assert gan.get_global_step() == 500 + + +@flaky +def test_cgan_reload(): + """Test reloading a conditional GAN.""" + + model_dir = tempfile.mkdtemp() + gan = ExampleGAN(learning_rate=0.01, model_dir=model_dir) + gan.fit_gan(generate_data(gan, 500, 100), generator_steps=0.5) + + # See if it has done a plausible job of learning the distribution. + means = 10 * np.random.random([1000, 1]) + batch_size = len(means) + noise_input = gan.get_noise_batch(batch_size=batch_size) + values = gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means]) + deltas = values - means + assert abs(np.mean(deltas)) < 1.0 + assert np.std(deltas) > 1.0 + assert gan.get_global_step() == 500 + + reloaded_gan = ExampleGAN(learning_rate=0.01, model_dir=model_dir) + reloaded_gan.restore() + reloaded_values = reloaded_gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means]) + + assert np.all(values == reloaded_values) + + +@flaky +def test_mix_gan_reload(): + """Test reloading a GAN with multiple generators and discriminators.""" + + model_dir = tempfile.mkdtemp() + gan = ExampleGAN( + n_generators=2, + n_discriminators=2, + learning_rate=0.01, + model_dir=model_dir) + gan.fit_gan(generate_data(gan, 1000, 100), generator_steps=0.5) + + reloaded_gan = ExampleGAN( + n_generators=2, + n_discriminators=2, + learning_rate=0.01, + model_dir=model_dir) + reloaded_gan.restore() + # See if it has done a plausible job of learning the distribution. + + means = 10 * np.random.random([1000, 1]) + batch_size = len(means) + noise_input = gan.get_noise_batch(batch_size=batch_size) + for i in range(2): + values = gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means], generator_index=i) + reloaded_values = reloaded_gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means], generator_index=i) + assert np.all(values == reloaded_values) + assert gan.get_global_step() == 1000 + # No training has been done after reload + assert reloaded_gan.get_global_step() == 0 + + +@flaky +def test_mix_gan(): + """Test a GAN with multiple generators and discriminators.""" + + gan = ExampleGAN(n_generators=2, n_discriminators=2, learning_rate=0.01) + gan.fit_gan( + generate_data(gan, 1000, 100), generator_steps=0.5, checkpoint_interval=0) + + # See if it has done a plausible job of learning the distribution. + + means = 10 * np.random.random([1000, 1]) + for i in range(2): + values = gan.predict_gan_generator( + conditional_inputs=[means], generator_index=i) deltas = values - means assert abs(np.mean(deltas)) < 1.0 assert np.std(deltas) > 1.0 + assert gan.get_global_step() == 1000 + + +@flaky +def test_wgan(): + """Test fitting a conditional WGAN.""" + + class ExampleWGAN(dc.models.WGAN): + + def get_noise_input_shape(self): + return (2,) + + def get_data_input_shapes(self): + return [(1,)] + + def get_conditional_input_shapes(self): + return [(1,)] + + def create_generator(self): + noise_input = Input(self.get_noise_input_shape()) + conditional_input = Input(self.get_conditional_input_shapes()[0]) + inputs = [noise_input, conditional_input] + gen_in = Concatenate(axis=1)(inputs) + output = Dense(1)(gen_in) + return tf.keras.Model(inputs=inputs, outputs=output) + + def create_discriminator(self): + data_input = Input(self.get_data_input_shapes()[0]) + conditional_input = Input(self.get_conditional_input_shapes()[0]) + inputs = [data_input, conditional_input] + discrim_in = Concatenate(axis=1)(inputs) + dense = Dense(10, activation=tf.nn.relu)(discrim_in) + output = Dense(1)(dense) + return tf.keras.Model(inputs=inputs, outputs=output) + + # We have to set the gradient penalty very small because the generator's + # output is only a single number, so the default penalty would constrain + # it far too much. + + gan = ExampleWGAN(learning_rate=0.01, gradient_penalty=0.1) + gan.fit_gan(generate_data(gan, 1000, 100), generator_steps=0.1) + + # See if it has done a plausible job of learning the distribution. + + means = 10 * np.random.random([1000, 1]) + values = gan.predict_gan_generator(conditional_inputs=[means]) + deltas = values - means + assert abs(np.mean(deltas)) < 1.0 + assert np.std(deltas) > 1.0 + + +@flaky +def test_wgan_reload(): + """Test fitting a conditional WGAN.""" + + class ExampleWGAN(dc.models.WGAN): + + def get_noise_input_shape(self): + return (2,) + + def get_data_input_shapes(self): + return [(1,)] + + def get_conditional_input_shapes(self): + return [(1,)] + + def create_generator(self): + noise_input = Input(self.get_noise_input_shape()) + conditional_input = Input(self.get_conditional_input_shapes()[0]) + inputs = [noise_input, conditional_input] + gen_in = Concatenate(axis=1)(inputs) + output = Dense(1)(gen_in) + return tf.keras.Model(inputs=inputs, outputs=output) + + def create_discriminator(self): + data_input = Input(self.get_data_input_shapes()[0]) + conditional_input = Input(self.get_conditional_input_shapes()[0]) + inputs = [data_input, conditional_input] + discrim_in = Concatenate(axis=1)(inputs) + dense = Dense(10, activation=tf.nn.relu)(discrim_in) + output = Dense(1)(dense) + return tf.keras.Model(inputs=inputs, outputs=output) + + # We have to set the gradient penalty very small because the generator's + # output is only a single number, so the default penalty would constrain + # it far too much. + + model_dir = tempfile.mkdtemp() + gan = ExampleWGAN( + learning_rate=0.01, gradient_penalty=0.1, model_dir=model_dir) + gan.fit_gan(generate_data(gan, 1000, 100), generator_steps=0.1) + + reloaded_gan = ExampleWGAN( + learning_rate=0.01, gradient_penalty=0.1, model_dir=model_dir) + reloaded_gan.restore() + + # See if it has done a plausible job of learning the distribution. + means = 10 * np.random.random([1000, 1]) + batch_size = len(means) + noise_input = gan.get_noise_batch(batch_size=batch_size) + values = gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means]) + reloaded_values = reloaded_gan.predict_gan_generator( + noise_input=noise_input, conditional_inputs=[means]) + assert np.all(values == reloaded_values) diff --git a/deepchem/models/tests/test_gat.py b/deepchem/models/tests/test_gat.py new file mode 100644 index 0000000000000000000000000000000000000000..155349f7958f400415854be6e985020797eec76d --- /dev/null +++ b/deepchem/models/tests/test_gat.py @@ -0,0 +1,103 @@ +import unittest +import tempfile + +import numpy as np + +import deepchem as dc +from deepchem.feat import MolGraphConvFeaturizer +from deepchem.models import GATModel +from deepchem.models.tests.test_graph_models import get_dataset + +try: + import dgl + import dgllife + import torch + has_torch_and_dgl = True +except: + has_torch_and_dgl = False + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gat_regression(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'regression', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = GATModel( + mode='regression', + n_tasks=n_tasks, + number_atom_features=30, + batch_size=10, + learning_rate=0.001) + + # overfit test + model.fit(dataset, nb_epoch=500) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.5 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gat_classification(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = GATModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + batch_size=10, + learning_rate=0.001) + + # overfit test + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gat_reload(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model_dir = tempfile.mkdtemp() + model = GATModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + reloaded_model = GATModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + reloaded_model.restore() + + pred_mols = ["CCCC", "CCCCCO", "CCCCC"] + X_pred = featurizer(pred_mols) + random_dataset = dc.data.NumpyDataset(X_pred) + original_pred = model.predict(random_dataset) + reload_pred = reloaded_model.predict(random_dataset) + assert np.all(original_pred == reload_pred) diff --git a/deepchem/models/tests/test_gbdt_model.py b/deepchem/models/tests/test_gbdt_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c695427bf5c2daea0fb6682e0a1600ca3be576f3 --- /dev/null +++ b/deepchem/models/tests/test_gbdt_model.py @@ -0,0 +1,267 @@ +""" +Tests to make sure deepchem models can fit models on easy datasets. +""" + +import tempfile + +import numpy as np +import xgboost +import lightgbm +from sklearn.datasets import load_diabetes, load_digits +from sklearn.model_selection import train_test_split + +import deepchem as dc + + +def test_singletask_regression_with_xgboost(): + np.random.seed(123) + + # prepare dataset + dataset = load_diabetes() + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + params = {'early_stopping_rounds': 25} + + # xgboost test + xgb_model = xgboost.XGBRegressor( + n_estimators=50, random_state=123, verbose=False) + model = dc.models.GBDTModel(xgb_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + assert scores[regression_metric.name] < 55 + + +def test_singletask_regression_with_lightgbm(): + np.random.seed(123) + + # prepare dataset + dataset = load_diabetes() + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + params = {'early_stopping_rounds': 25} + + # lightgbm test + lgbm_model = lightgbm.LGBMRegressor( + n_estimators=50, random_state=123, silent=True) + model = dc.models.GBDTModel(lgbm_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + assert scores[regression_metric.name] < 55 + + +def test_multitask_regression_with_xgboost(): + np.random.seed(123) + + # prepare dataset + n_tasks = 4 + tasks = range(n_tasks) + dataset = load_diabetes() + X, y = dataset.data, dataset.target + y = np.reshape(y, (len(y), 1)) + y = np.hstack([y] * n_tasks) + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) + test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + params = {'early_stopping_rounds': 25} + + # xgboost test + def xgboost_builder(model_dir): + xgb_model = xgboost.XGBRegressor(n_estimators=50, seed=123, verbose=False) + return dc.models.GBDTModel(xgb_model, model_dir, **params) + + model = dc.models.SingletaskToMultitask(tasks, xgboost_builder) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + score = scores[regression_metric.name] + assert score < 55 + + +def test_multitask_regression_with_lightgbm(): + np.random.seed(123) + + # prepare dataset + n_tasks = 4 + tasks = range(n_tasks) + dataset = load_diabetes() + X, y = dataset.data, dataset.target + y = np.reshape(y, (len(y), 1)) + y = np.hstack([y] * n_tasks) + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) + test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + params = {'early_stopping_rounds': 25} + + # lightgbm test + def lightgbm_builder(model_dir): + lgbm_model = lightgbm.LGBMRegressor(n_estimators=50, seed=123, silent=False) + return dc.models.GBDTModel(lgbm_model, model_dir, **params) + + model = dc.models.SingletaskToMultitask(tasks, lightgbm_builder) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + score = scores[regression_metric.name] + assert score < 55 + + +def test_classification_with_xgboost(): + """Test that sklearn models can learn on simple classification datasets.""" + np.random.seed(123) + + # prepare dataset + dataset = load_digits(n_class=2) + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + params = {'early_stopping_rounds': 25} + + # xgboost test + xgb_model = xgboost.XGBClassifier(n_estimators=50, seed=123, verbose=False) + model = dc.models.GBDTModel(xgb_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_classification_with_lightgbm(): + """Test that sklearn models can learn on simple classification datasets.""" + np.random.seed(123) + + # prepare dataset + dataset = load_digits(n_class=2) + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + params = {'early_stopping_rounds': 25} + + # lightgbm test + lgbm_model = lightgbm.LGBMClassifier(n_estimators=50, seed=123, silent=True) + model = dc.models.GBDTModel(lgbm_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # eval model on test + scores = model.evaluate(test_dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_reload_with_xgboost(): + np.random.seed(123) + + # prepare dataset + dataset = load_diabetes() + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + model_dir = tempfile.mkdtemp() + params = {'early_stopping_rounds': 25, 'model_dir': model_dir} + + # xgboost test + xgb_model = xgboost.XGBRegressor( + n_estimators=50, random_state=123, verbose=False) + model = dc.models.GBDTModel(xgb_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # reload + reloaded_model = dc.models.GBDTModel(None, model_dir) + reloaded_model.reload() + # check predictions match on test dataset + original_pred = model.predict(test_dataset) + reload_pred = reloaded_model.predict(test_dataset) + assert np.all(original_pred == reload_pred) + # eval model on test + scores = reloaded_model.evaluate(test_dataset, [regression_metric]) + assert scores[regression_metric.name] < 55 + + +def test_reload_with_lightgbm(): + np.random.seed(123) + + # prepare dataset + dataset = load_diabetes() + X, y = dataset.data, dataset.target + frac_train = .7 + X_train, X_test, y_train, y_test = \ + train_test_split(X, y, train_size=frac_train) + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # global setting + regression_metric = dc.metrics.Metric(dc.metrics.mae_score) + model_dir = tempfile.mkdtemp() + params = {'early_stopping_rounds': 25, 'model_dir': model_dir} + + # lightgbm test + lgbm_model = lightgbm.LGBMRegressor( + n_estimators=50, random_state=123, silent=True) + model = dc.models.GBDTModel(lgbm_model, **params) + # fit trained model + model.fit(train_dataset) + model.save() + # reload + reloaded_model = dc.models.GBDTModel(None, model_dir) + reloaded_model.reload() + # check predictions match on test dataset + original_pred = model.predict(test_dataset) + reload_pred = reloaded_model.predict(test_dataset) + assert np.all(original_pred == reload_pred) + # eval model on test + scores = reloaded_model.evaluate(test_dataset, [regression_metric]) + assert scores[regression_metric.name] < 55 diff --git a/deepchem/models/tests/test_gcn.py b/deepchem/models/tests/test_gcn.py new file mode 100644 index 0000000000000000000000000000000000000000..1548615d197ff93491223f171830b6197989c672 --- /dev/null +++ b/deepchem/models/tests/test_gcn.py @@ -0,0 +1,103 @@ +import unittest +import tempfile + +import numpy as np + +import deepchem as dc +from deepchem.feat import MolGraphConvFeaturizer +from deepchem.models import GCNModel +from deepchem.models.tests.test_graph_models import get_dataset + +try: + import dgl + import dgllife + import torch + has_torch_and_dgl = True +except: + has_torch_and_dgl = False + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gcn_regression(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'regression', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = GCNModel( + mode='regression', + n_tasks=n_tasks, + number_atom_features=30, + batch_size=10, + learning_rate=0.003) + + # overfit test + model.fit(dataset, nb_epoch=300) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.5 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gcn_classification(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = GCNModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + batch_size=10, + learning_rate=0.0003) + + # overfit test + model.fit(dataset, nb_epoch=70) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_gcn_reload(): + # load datasets + featurizer = MolGraphConvFeaturizer() + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model_dir = tempfile.mkdtemp() + model = GCNModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + model_dir=model_dir, + batch_size=10, + learning_rate=0.0003) + + model.fit(dataset, nb_epoch=70) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + reloaded_model = GCNModel( + mode='classification', + n_tasks=n_tasks, + number_atom_features=30, + model_dir=model_dir, + batch_size=10, + learning_rate=0.0003) + reloaded_model.restore() + + pred_mols = ["CCCC", "CCCCCO", "CCCCC"] + X_pred = featurizer(pred_mols) + random_dataset = dc.data.NumpyDataset(X_pred) + original_pred = model.predict(random_dataset) + reload_pred = reloaded_model.predict(random_dataset) + assert np.all(original_pred == reload_pred) diff --git a/deepchem/models/tests/test_generalize.py b/deepchem/models/tests/test_generalize.py deleted file mode 100644 index da1e12c317081c40ded2ce3ef93e0d1c14380b8f..0000000000000000000000000000000000000000 --- a/deepchem/models/tests/test_generalize.py +++ /dev/null @@ -1,281 +0,0 @@ -""" -Tests to make sure deepchem models can fit models on easy datasets. -""" - -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import sklearn -import sklearn.datasets -import numpy as np -import unittest -import tempfile -import deepchem as dc -from sklearn.ensemble import RandomForestRegressor -from sklearn.linear_model import LinearRegression -from sklearn.linear_model import LogisticRegression - - -class TestGeneralize(unittest.TestCase): - """ - Test that models can learn generalizable models on simple datasets. - """ - - def test_sklearn_regression(self): - """Test that sklearn models can learn on simple regression datasets.""" - np.random.seed(123) - - dataset = sklearn.datasets.load_diabetes() - X, y = dataset.data, dataset.target - y = np.expand_dims(y, 1) - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.NumpyDataset(X_train, y_train) - test_dataset = dc.data.NumpyDataset(X_test, y_test) - - regression_metric = dc.metrics.Metric(dc.metrics.r2_score) - - sklearn_model = LinearRegression() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on test - scores = model.evaluate(test_dataset, [regression_metric]) - assert scores[regression_metric.name] > .5 - - def test_sklearn_transformed_regression(self): - """Test that sklearn models can learn on simple transformed regression datasets.""" - np.random.seed(123) - dataset = sklearn.datasets.load_diabetes() - X, y = dataset.data, dataset.target - y = np.expand_dims(y, 1) - - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.NumpyDataset(X_train, y_train) - test_dataset = dc.data.NumpyDataset(X_test, y_test) - - # Eval model on train - transformers = [ - dc.trans.NormalizationTransformer( - transform_X=True, dataset=train_dataset), - dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset), - dc.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset) - ] - for data in [train_dataset, test_dataset]: - for transformer in transformers: - data = transformer.transform(data) - - regression_metric = dc.metrics.Metric(dc.metrics.r2_score) - sklearn_model = LinearRegression() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(train_dataset) - model.save() - - train_scores = model.evaluate(train_dataset, [regression_metric], - transformers) - assert train_scores[regression_metric.name] > .5 - - # Eval model on test - test_scores = model.evaluate(test_dataset, [regression_metric], - transformers) - assert test_scores[regression_metric.name] > .5 - - def test_sklearn_multitask_regression(self): - """Test that sklearn models can learn on simple multitask regression.""" - np.random.seed(123) - n_tasks = 4 - tasks = range(n_tasks) - dataset = sklearn.datasets.load_diabetes() - X, y = dataset.data, dataset.target - y = np.reshape(y, (len(y), 1)) - y = np.hstack([y] * n_tasks) - - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) - test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) - - regression_metric = dc.metrics.Metric(dc.metrics.r2_score) - - def model_builder(model_dir): - sklearn_model = LinearRegression() - return dc.models.SklearnModel(sklearn_model, model_dir) - - model = dc.models.SingletaskToMultitask(tasks, model_builder) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on test - scores = model.evaluate(test_dataset, [regression_metric]) - for score in scores[regression_metric.name]: - assert score > .5 - - #def test_sklearn_classification(self): - # """Test that sklearn models can learn on simple classification datasets.""" - # np.random.seed(123) - # dataset = sklearn.datasets.load_digits(n_class=2) - # X, y = dataset.data, dataset.target - - # frac_train = .7 - # n_samples = len(X) - # n_train = int(frac_train*n_samples) - # X_train, y_train = X[:n_train], y[:n_train] - # X_test, y_test = X[n_train:], y[n_train:] - # train_dataset = dc.data.NumpyDataset(X_train, y_train) - # test_dataset = dc.data.NumpyDataset(X_test, y_test) - - # classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - # sklearn_model = LogisticRegression() - # model = dc.models.SklearnModel(sklearn_model) - - # # Fit trained model - # model.fit(train_dataset) - # model.save() - - # # Eval model on test - # scores = model.evaluate(test_dataset, [classification_metric]) - # assert scores[classification_metric.name] > .5 - - #def test_sklearn_multitask_classification(self): - # """Test that sklearn models can learn on simple multitask classification.""" - # np.random.seed(123) - # n_tasks = 4 - # tasks = range(n_tasks) - # dataset = sklearn.datasets.load_digits(n_class=2) - # X, y = dataset.data, dataset.target - # y = np.reshape(y, (len(y), 1)) - # y = np.hstack([y] * n_tasks) - # - # frac_train = .7 - # n_samples = len(X) - # n_train = int(frac_train*n_samples) - # X_train, y_train = X[:n_train], y[:n_train] - # X_test, y_test = X[n_train:], y[n_train:] - # train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) - # test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) - - # classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - # def model_builder(model_dir): - # sklearn_model = LogisticRegression() - # return dc.models.SklearnModel(sklearn_model, model_dir) - # model = dc.models.SingletaskToMultitask(tasks, model_builder) - - # # Fit trained model - # model.fit(train_dataset) - # model.save() - # # Eval model on test - # scores = model.evaluate(test_dataset, [classification_metric]) - # for score in scores[classification_metric.name]: - # assert score > .5 - - def test_xgboost_regression(self): - import xgboost - np.random.seed(123) - - dataset = sklearn.datasets.load_diabetes() - X, y = dataset.data, dataset.target - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.NumpyDataset(X_train, y_train) - test_dataset = dc.data.NumpyDataset(X_test, y_test) - - regression_metric = dc.metrics.Metric(dc.metrics.mae_score) - # Set early stopping round = n_estimators so that esr won't work - esr = {'early_stopping_rounds': 50} - - xgb_model = xgboost.XGBRegressor(n_estimators=50, random_state=123) - model = dc.models.XGBoostModel(xgb_model, verbose=False, **esr) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on test - scores = model.evaluate(test_dataset, [regression_metric]) - assert scores[regression_metric.name] < 55 - - def test_xgboost_multitask_regression(self): - import xgboost - np.random.seed(123) - n_tasks = 4 - tasks = range(n_tasks) - dataset = sklearn.datasets.load_diabetes() - X, y = dataset.data, dataset.target - y = np.reshape(y, (len(y), 1)) - y = np.hstack([y] * n_tasks) - - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) - test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) - - regression_metric = dc.metrics.Metric(dc.metrics.mae_score) - esr = {'early_stopping_rounds': 50} - - def model_builder(model_dir): - xgb_model = xgboost.XGBRegressor(n_estimators=50, seed=123) - return dc.models.XGBoostModel(xgb_model, model_dir, verbose=False, **esr) - - model = dc.models.SingletaskToMultitask(tasks, model_builder) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on test - scores = model.evaluate(test_dataset, [regression_metric]) - for score in scores[regression_metric.name]: - assert score < 50 - - def test_xgboost_classification(self): - """Test that sklearn models can learn on simple classification datasets.""" - import xgboost - np.random.seed(123) - dataset = sklearn.datasets.load_digits(n_class=2) - X, y = dataset.data, dataset.target - - frac_train = .7 - n_samples = len(X) - n_train = int(frac_train * n_samples) - X_train, y_train = X[:n_train], y[:n_train] - X_test, y_test = X[n_train:], y[n_train:] - train_dataset = dc.data.NumpyDataset(X_train, y_train) - test_dataset = dc.data.NumpyDataset(X_test, y_test) - - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - esr = {'early_stopping_rounds': 50} - xgb_model = xgboost.XGBClassifier(n_estimators=50, seed=123) - model = dc.models.XGBoostModel(xgb_model, verbose=False, **esr) - - # Fit trained model - model.fit(train_dataset) - model.save() - - # Eval model on test - scores = model.evaluate(test_dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 diff --git a/deepchem/models/tests/test_graph_models.py b/deepchem/models/tests/test_graph_models.py index f3e88f3a23dd298f383ce211250ab7ef77a7a1f2..d0166f470dd0cc0026b77b69331582f0c42db156 100644 --- a/deepchem/models/tests/test_graph_models.py +++ b/deepchem/models/tests/test_graph_models.py @@ -5,7 +5,6 @@ import pytest import scipy import deepchem as dc -import tensorflow as tf from deepchem.data import NumpyDataset from deepchem.models import GraphConvModel, DAGModel, WeaveModel, MPNNModel from deepchem.molnet import load_bace_classification, load_delaney @@ -14,332 +13,311 @@ from deepchem.feat import ConvMolFeaturizer from flaky import flaky -class TestGraphModels(unittest.TestCase): - - def get_dataset(self, - mode='classification', - featurizer='GraphConv', - num_tasks=2): - data_points = 10 - if mode == 'classification': - tasks, all_dataset, transformers = load_bace_classification(featurizer) - else: - tasks, all_dataset, transformers = load_delaney(featurizer) - - train, valid, test = all_dataset - for i in range(1, num_tasks): - tasks.append("random_task") - w = np.ones(shape=(data_points, len(tasks))) - - if mode == 'classification': - y = np.random.randint(0, 2, size=(data_points, len(tasks))) - metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, np.mean, mode="classification") - else: - y = np.random.normal(size=(data_points, len(tasks))) - metric = dc.metrics.Metric( - dc.metrics.mean_absolute_error, mode="regression") - - ds = NumpyDataset(train.X[:data_points], y, w, train.ids[:data_points]) - - return tasks, ds, transformers, metric - - def test_graph_conv_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'classification', 'GraphConv') - - batch_size = 10 - model = GraphConvModel( - len(tasks), - batch_size=batch_size, - batch_normalize=False, - mode='classification') - - model.fit(dataset, nb_epoch=10) - scores = model.evaluate(dataset, [metric], transformers) - assert scores['mean-roc_auc_score'] >= 0.9 - - def test_neural_fingerprint_retrieval(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'classification', 'GraphConv') - - fp_size = 3 - - batch_size = 50 - model = GraphConvModel( - len(tasks), - batch_size=batch_size, - dense_layer_size=3, - mode='classification') - - model.fit(dataset, nb_epoch=1) - neural_fingerprints = model.predict_embedding(dataset) - neural_fingerprints = np.array(neural_fingerprints)[:len(dataset)] - self.assertEqual((len(dataset), fp_size * 2), neural_fingerprints.shape) - - def test_graph_conv_regression_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'GraphConv') - - batch_size = 10 - model = GraphConvModel( - len(tasks), - batch_size=batch_size, - batch_normalize=False, - mode='regression') - - model.fit(dataset, nb_epoch=100) - scores = model.evaluate(dataset, [metric], transformers) - assert all(s < 0.1 for s in scores['mean_absolute_error']) - - def test_graph_conv_regression_uncertainty(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'GraphConv') - - batch_size = 10 - model = GraphConvModel( - len(tasks), - batch_size=batch_size, - batch_normalize=False, - mode='regression', - dropout=0.1, - uncertainty=True) - - model.fit(dataset, nb_epoch=100) - - # Predict the output and uncertainty. - pred, std = model.predict_uncertainty(dataset) - mean_error = np.mean(np.abs(dataset.y - pred)) - mean_value = np.mean(np.abs(dataset.y)) - mean_std = np.mean(std) - assert mean_error < 0.5 * mean_value - assert mean_std > 0.5 * mean_error - assert mean_std < mean_value - - def test_graph_conv_atom_features(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'Raw', num_tasks=1) - - atom_feature_name = 'feature' - y = [] - for mol in dataset.X: - atom_features = [] - for atom in mol.GetAtoms(): - val = np.random.normal() - mol.SetProp("atom %08d %s" % (atom.GetIdx(), atom_feature_name), - str(val)) - atom_features.append(np.random.normal()) - y.append([np.sum(atom_features)]) - - featurizer = ConvMolFeaturizer(atom_properties=[atom_feature_name]) - X = featurizer.featurize(dataset.X) - dataset = dc.data.NumpyDataset(X, np.array(y)) - batch_size = 50 - model = GraphConvModel( - len(tasks), - number_atom_features=featurizer.feature_length(), - batch_size=batch_size, - mode='regression') - - model.fit(dataset, nb_epoch=1) - y_pred1 = model.predict(dataset) - - @pytest.mark.slow - def test_weave_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'classification', 'Weave') - - batch_size = 10 - model = WeaveModel(len(tasks), batch_size=batch_size, mode='classification') - model.fit(dataset, nb_epoch=50) - scores = model.evaluate(dataset, [metric], transformers) - assert scores['mean-roc_auc_score'] >= 0.9 - - @flaky - def test_weave_regression_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'Weave') - - batch_size = 10 - model = WeaveModel(len(tasks), batch_size=batch_size, mode='regression') - model.fit(dataset, nb_epoch=80) - scores = model.evaluate(dataset, [metric], transformers) - assert all(s < 0.1 for s in scores['mean_absolute_error']) - - @pytest.mark.slow - def test_dag_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'classification', 'GraphConv') - - batch_size = 10 - max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) - transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) - dataset = transformer.transform(dataset) - - model = DAGModel( - len(tasks), - max_atoms=max_atoms, - mode='classification', - learning_rate=0.03, - batch_size=batch_size, - use_queue=False) - - model.fit(dataset, nb_epoch=40) - scores = model.evaluate(dataset, [metric], transformers) - assert scores['mean-roc_auc_score'] >= 0.9 - - @pytest.mark.slow - def test_dag_regression_model(self): - np.random.seed(1234) - tf.random.set_seed(1234) - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'GraphConv') - - batch_size = 10 - max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) - transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) - dataset = transformer.transform(dataset) - - model = DAGModel( - len(tasks), - max_atoms=max_atoms, - mode='regression', - learning_rate=0.03, - batch_size=batch_size, - use_queue=False) - - model.fit(dataset, nb_epoch=1200) - scores = model.evaluate(dataset, [metric], transformers) - assert all(s < 0.15 for s in scores['mean_absolute_error']) - - @pytest.mark.slow - def test_dag_regression_uncertainty(self): - np.random.seed(1234) - tf.random.set_seed(1234) - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'GraphConv') - - batch_size = 10 - max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) - transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) - dataset = transformer.transform(dataset) - - model = DAGModel( - len(tasks), - max_atoms=max_atoms, - mode='regression', - learning_rate=0.003, - batch_size=batch_size, - use_queue=False, - dropout=0.05, - uncertainty=True) - - model.fit(dataset, nb_epoch=750) - - # Predict the output and uncertainty. - pred, std = model.predict_uncertainty(dataset) - mean_error = np.mean(np.abs(dataset.y - pred)) - mean_value = np.mean(np.abs(dataset.y)) - mean_std = np.mean(std) - # The DAG models have high error with dropout - # Despite a lot of effort tweaking it , there appears to be - # a limit to how low the error can go with dropout. - #assert mean_error < 0.5 * mean_value - assert mean_error < .7 * mean_value - assert mean_std > 0.5 * mean_error - assert mean_std < mean_value - - @pytest.mark.slow - def test_mpnn_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'classification', 'Weave') - - batch_size = 10 - model = MPNNModel( - len(tasks), - mode='classification', - n_hidden=75, - n_atom_feat=75, - n_pair_feat=14, - T=1, - M=1, - batch_size=batch_size) - - model.fit(dataset, nb_epoch=40) - scores = model.evaluate(dataset, [metric], transformers) - assert scores['mean-roc_auc_score'] >= 0.9 - - @pytest.mark.slow - def test_mpnn_regression_model(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'Weave') - - batch_size = 10 - model = MPNNModel( - len(tasks), - mode='regression', - n_hidden=75, - n_atom_feat=75, - n_pair_feat=14, - T=1, - M=1, - batch_size=batch_size) - - model.fit(dataset, nb_epoch=60) - scores = model.evaluate(dataset, [metric], transformers) - assert all(s < 0.1 for s in scores['mean_absolute_error']) - - @pytest.mark.slow - def test_mpnn_regression_uncertainty(self): - tasks, dataset, transformers, metric = self.get_dataset( - 'regression', 'Weave') - - batch_size = 10 - model = MPNNModel( - len(tasks), - mode='regression', - n_hidden=75, - n_atom_feat=75, - n_pair_feat=14, - T=1, - M=1, - dropout=0.1, - batch_size=batch_size, - uncertainty=True) - - model.fit(dataset, nb_epoch=40) - - # Predict the output and uncertainty. - pred, std = model.predict_uncertainty(dataset) - mean_error = np.mean(np.abs(dataset.y - pred)) - mean_value = np.mean(np.abs(dataset.y)) - mean_std = np.mean(std) - assert mean_error < 0.5 * mean_value - assert mean_std > 0.5 * mean_error - assert mean_std < mean_value - - @flaky - def test_dtnn_regression_model(self): - current_dir = os.path.dirname(os.path.abspath(__file__)) - input_file = os.path.join(current_dir, "example_DTNN.mat") - dataset = scipy.io.loadmat(input_file) - X = dataset['X'] - y = dataset['T'] - w = np.ones_like(y) - dataset = dc.data.NumpyDataset(X, y, w, ids=None) - n_tasks = y.shape[1] - - model = dc.models.DTNNModel( - n_tasks, - n_embedding=20, - n_distance=100, - learning_rate=1.0, - mode="regression") - - # Fit trained model - model.fit(dataset, nb_epoch=250) - - # Eval model on train - pred = model.predict(dataset) - mean_rel_error = np.mean(np.abs(1 - pred / y)) - assert mean_rel_error < 0.1 +def get_dataset(mode='classification', featurizer='GraphConv', num_tasks=2): + data_points = 20 + if mode == 'classification': + tasks, all_dataset, transformers = load_bace_classification(featurizer) + else: + tasks, all_dataset, transformers = load_delaney(featurizer) + + train, valid, test = all_dataset + for i in range(1, num_tasks): + tasks.append("random_task") + w = np.ones(shape=(data_points, len(tasks))) + + if mode == 'classification': + y = np.random.randint(0, 2, size=(data_points, len(tasks))) + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + else: + y = np.random.normal(size=(data_points, len(tasks))) + metric = dc.metrics.Metric( + dc.metrics.mean_absolute_error, mode="regression") + + ds = NumpyDataset(train.X[:data_points], y, w, train.ids[:data_points]) + + return tasks, ds, transformers, metric + + +def test_graph_conv_model(): + tasks, dataset, transformers, metric = get_dataset('classification', + 'GraphConv') + + batch_size = 10 + model = GraphConvModel( + len(tasks), + batch_size=batch_size, + batch_normalize=False, + mode='classification') + + model.fit(dataset, nb_epoch=20) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.9 + + +def test_neural_fingerprint_retrieval(): + tasks, dataset, transformers, metric = get_dataset('classification', + 'GraphConv') + + fp_size = 3 + + batch_size = 50 + model = GraphConvModel( + len(tasks), + batch_size=batch_size, + dense_layer_size=3, + mode='classification') + + model.fit(dataset, nb_epoch=1) + neural_fingerprints = model.predict_embedding(dataset) + neural_fingerprints = np.array(neural_fingerprints)[:len(dataset)] + assert (len(dataset), fp_size * 2) == neural_fingerprints.shape + + +def test_graph_conv_regression_model(): + tasks, dataset, transformers, metric = get_dataset('regression', 'GraphConv') + + batch_size = 10 + model = GraphConvModel( + len(tasks), + batch_size=batch_size, + batch_normalize=False, + mode='regression') + + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.1 + + +def test_graph_conv_regression_uncertainty(): + tasks, dataset, transformers, metric = get_dataset('regression', 'GraphConv') + + batch_size = 10 + model = GraphConvModel( + len(tasks), + batch_size=batch_size, + batch_normalize=False, + mode='regression', + dropout=0.1, + uncertainty=True) + + model.fit(dataset, nb_epoch=100) + + # Predict the output and uncertainty. + pred, std = model.predict_uncertainty(dataset) + mean_error = np.mean(np.abs(dataset.y - pred)) + mean_value = np.mean(np.abs(dataset.y)) + mean_std = np.mean(std) + assert mean_error < 0.5 * mean_value + assert mean_std > 0.5 * mean_error + assert mean_std < mean_value + + +def test_graph_conv_atom_features(): + tasks, dataset, transformers, metric = get_dataset( + 'regression', 'Raw', num_tasks=1) + + atom_feature_name = 'feature' + y = [] + for mol in dataset.X: + atom_features = [] + for atom in mol.GetAtoms(): + val = np.random.normal() + mol.SetProp("atom %08d %s" % (atom.GetIdx(), atom_feature_name), str(val)) + atom_features.append(np.random.normal()) + y.append([np.sum(atom_features)]) + + featurizer = ConvMolFeaturizer(atom_properties=[atom_feature_name]) + X = featurizer.featurize(dataset.X) + dataset = dc.data.NumpyDataset(X, np.array(y)) + batch_size = 50 + model = GraphConvModel( + len(tasks), + number_atom_features=featurizer.feature_length(), + batch_size=batch_size, + mode='regression') + + model.fit(dataset, nb_epoch=1) + y_pred1 = model.predict(dataset) + + +@pytest.mark.slow +def test_dag_model(): + tasks, dataset, transformers, metric = get_dataset('classification', + 'GraphConv') + + batch_size = 10 + max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) + transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) + dataset = transformer.transform(dataset) + + model = DAGModel( + len(tasks), + max_atoms=max_atoms, + mode='classification', + learning_rate=0.03, + batch_size=batch_size, + use_queue=False) + + model.fit(dataset, nb_epoch=40) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.9 + + +@pytest.mark.slow +def test_dag_regression_model(): + import tensorflow as tf + np.random.seed(1234) + tf.random.set_seed(1234) + tasks, dataset, transformers, metric = get_dataset('regression', 'GraphConv') + + batch_size = 10 + max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) + transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) + dataset = transformer.transform(dataset) + + model = DAGModel( + len(tasks), + max_atoms=max_atoms, + mode='regression', + learning_rate=0.03, + batch_size=batch_size, + use_queue=False) + + model.fit(dataset, nb_epoch=1200) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.15 + + +@pytest.mark.slow +def test_dag_regression_uncertainty(): + import tensorflow as tf + np.random.seed(1234) + tf.random.set_seed(1234) + tasks, dataset, transformers, metric = get_dataset('regression', 'GraphConv') + + batch_size = 10 + max_atoms = max([mol.get_num_atoms() for mol in dataset.X]) + transformer = dc.trans.DAGTransformer(max_atoms=max_atoms) + dataset = transformer.transform(dataset) + + model = DAGModel( + len(tasks), + max_atoms=max_atoms, + mode='regression', + learning_rate=0.003, + batch_size=batch_size, + use_queue=False, + dropout=0.05, + uncertainty=True) + + model.fit(dataset, nb_epoch=750) + + # Predict the output and uncertainty. + pred, std = model.predict_uncertainty(dataset) + mean_error = np.mean(np.abs(dataset.y - pred)) + mean_value = np.mean(np.abs(dataset.y)) + mean_std = np.mean(std) + # The DAG models have high error with dropout + # Despite a lot of effort tweaking it , there appears to be + # a limit to how low the error can go with dropout. + #assert mean_error < 0.5 * mean_value + assert mean_error < .7 * mean_value + assert mean_std > 0.5 * mean_error + assert mean_std < mean_value + + +@pytest.mark.slow +def test_mpnn_model(): + tasks, dataset, transformers, metric = get_dataset('classification', 'Weave') + + batch_size = 10 + model = MPNNModel( + len(tasks), + mode='classification', + n_hidden=75, + n_atom_feat=75, + n_pair_feat=14, + T=1, + M=1, + batch_size=batch_size) + + model.fit(dataset, nb_epoch=40) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.9 + + +@pytest.mark.slow +def test_mpnn_regression_model(): + tasks, dataset, transformers, metric = get_dataset('regression', 'Weave') + + batch_size = 10 + model = MPNNModel( + len(tasks), + mode='regression', + n_hidden=75, + n_atom_feat=75, + n_pair_feat=14, + T=1, + M=1, + batch_size=batch_size) + + model.fit(dataset, nb_epoch=60) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.1 + + +@pytest.mark.slow +def test_mpnn_regression_uncertainty(): + tasks, dataset, transformers, metric = get_dataset('regression', 'Weave') + + batch_size = 10 + model = MPNNModel( + len(tasks), + mode='regression', + n_hidden=75, + n_atom_feat=75, + n_pair_feat=14, + T=1, + M=1, + dropout=0.1, + batch_size=batch_size, + uncertainty=True) + + model.fit(dataset, nb_epoch=40) + + # Predict the output and uncertainty. + pred, std = model.predict_uncertainty(dataset) + mean_error = np.mean(np.abs(dataset.y - pred)) + mean_value = np.mean(np.abs(dataset.y)) + mean_std = np.mean(std) + assert mean_error < 0.5 * mean_value + assert mean_std > 0.5 * mean_error + assert mean_std < mean_value + + +@flaky +def test_dtnn_regression_model(): + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, "example_DTNN.mat") + dataset = scipy.io.loadmat(input_file) + X = dataset['X'] + y = dataset['T'] + w = np.ones_like(y) + dataset = dc.data.NumpyDataset(X, y, w, ids=None) + n_tasks = y.shape[1] + + model = dc.models.DTNNModel( + n_tasks, + n_embedding=20, + n_distance=100, + learning_rate=1.0, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=250) + + # Eval model on train + pred = model.predict(dataset) + mean_rel_error = np.mean(np.abs(1 - pred / y)) + assert mean_rel_error < 0.1 diff --git a/deepchem/models/tests/test_kerasmodel.py b/deepchem/models/tests/test_kerasmodel.py index 2fcdc43140fd538a021e85770e12222f2bdff1f3..d84213662e7a30f15da8f5b7a96329065360dc10 100644 --- a/deepchem/models/tests/test_kerasmodel.py +++ b/deepchem/models/tests/test_kerasmodel.py @@ -5,333 +5,363 @@ import numpy as np import tensorflow as tf -class TestKerasModel(unittest.TestCase): - - def test_overfit_graph_model(self): - """Test fitting a KerasModel defined as a graph.""" - n_data_points = 10 - n_features = 2 - np.random.seed(1234) - X = np.random.rand(n_data_points, n_features) - y = (X[:, 0] > X[:, 1]).astype(np.float32) - dataset = dc.data.NumpyDataset(X, y) - inputs = tf.keras.Input(shape=(n_features,)) - hidden = tf.keras.layers.Dense(10, activation='relu')(inputs) - logits = tf.keras.layers.Dense(1)(hidden) - outputs = tf.keras.layers.Activation('sigmoid')(logits) - keras_model = tf.keras.Model(inputs=inputs, outputs=[outputs, logits]) - model = dc.models.KerasModel( - keras_model, - dc.models.losses.SigmoidCrossEntropy(), - output_types=['prediction', 'loss'], - learning_rate=0.005) - model.fit(dataset, nb_epoch=1000) - prediction = np.squeeze(model.predict_on_batch(X)) - assert np.array_equal(y, np.round(prediction)) - metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - scores = model.evaluate(dataset, [metric]) - assert scores[metric.name] > 0.9 - - # Check that predicting internal layers works. - pred_logits = np.squeeze(model.predict_on_batch(X, outputs=logits)) - pred_from_logits = 1.0 / (1.0 + np.exp(-pred_logits)) - assert np.allclose(prediction, pred_from_logits, atol=1e-4) - - def test_overfit_sequential_model(self): - """Test fitting a KerasModel defined as a sequential model.""" - n_data_points = 10 - n_features = 2 - X = np.random.rand(n_data_points, n_features) - y = (X[:, 0] > X[:, 1]).astype(np.float32) - dataset = dc.data.NumpyDataset(X, y) - keras_model = tf.keras.Sequential([ - tf.keras.layers.Dense(10, activation='relu'), - tf.keras.layers.Dense(1, activation='sigmoid') - ]) - model = dc.models.KerasModel( - keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) - model.fit(dataset, nb_epoch=1000) - prediction = np.squeeze(model.predict_on_batch(X)) - assert np.array_equal(y, np.round(prediction)) - metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - generator = model.default_generator(dataset, pad_batches=False) - scores = model.evaluate_generator(generator, [metric]) - assert scores[metric.name] > 0.9 - - def test_fit_on_batch(self): - """Test fitting a KerasModel to individual batches.""" - n_data_points = 10 - n_features = 2 - X = np.random.rand(n_data_points, n_features) - y = (X[:, 0] > X[:, 1]).astype(np.float32) - dataset = dc.data.NumpyDataset(X, y) - keras_model = tf.keras.Sequential([ - tf.keras.layers.Dense(10, activation='relu'), - tf.keras.layers.Dense(1, activation='sigmoid') - ]) - model = dc.models.KerasModel( - keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) - i = 0 - for X, y, w, ids in dataset.iterbatches(model.batch_size, 500): - i += 1 - model.fit_on_batch(X, y, w, checkpoint=False) - prediction = np.squeeze(model.predict_on_batch(X)) - assert np.array_equal(y, np.round(prediction)) - metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - generator = model.default_generator(dataset, pad_batches=False) - scores = model.evaluate_generator(generator, [metric]) - assert scores[metric.name] > 0.9 - - def test_checkpointing(self): - """Test loading and saving checkpoints with KerasModel.""" - # Create two models using the same model directory. - - keras_model1 = tf.keras.Sequential([tf.keras.layers.Dense(10)]) - keras_model2 = tf.keras.Sequential([tf.keras.layers.Dense(10)]) - model1 = dc.models.KerasModel(keras_model1, dc.models.losses.L2Loss()) - model2 = dc.models.KerasModel( - keras_model2, dc.models.losses.L2Loss(), model_dir=model1.model_dir) - - # Check that they produce different results. - - X = np.random.rand(5, 5) - y1 = model1.predict_on_batch(X) - y2 = model2.predict_on_batch(X) - assert not np.array_equal(y1, y2) - - # Save a checkpoint from the first model and load it into the second one, - # and make sure they now match. - - model1.save_checkpoint() - model2.restore() - y3 = model1.predict_on_batch(X) - y4 = model2.predict_on_batch(X) - assert np.array_equal(y1, y3) - assert np.array_equal(y1, y4) - - def test_fit_restore(self): - """Test specifying restore=True when calling fit().""" - n_data_points = 10 - n_features = 2 - X = np.random.rand(n_data_points, n_features) - y = (X[:, 0] > X[:, 1]).astype(np.float32) - dataset = dc.data.NumpyDataset(X, y) - - # Train a model to overfit the dataset. - - keras_model = tf.keras.Sequential([ - tf.keras.layers.Dense(10, activation='relu'), - tf.keras.layers.Dense(1, activation='sigmoid') - ]) - model = dc.models.KerasModel( - keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) - model.fit(dataset, nb_epoch=1000) - prediction = np.squeeze(model.predict_on_batch(X)) - assert np.array_equal(y, np.round(prediction)) - - # Create an identical model, do a single step of fitting with restore=True, - # and make sure it got restored correctly. - - keras_model2 = tf.keras.Sequential([ - tf.keras.layers.Dense(10, activation='relu'), - tf.keras.layers.Dense(1, activation='sigmoid') - ]) - model2 = dc.models.KerasModel( - keras_model2, - dc.models.losses.BinaryCrossEntropy(), - model_dir=model.model_dir) - model2.fit(dataset, nb_epoch=1, restore=True) - prediction = np.squeeze(model2.predict_on_batch(X)) - assert np.array_equal(y, np.round(prediction)) - - def test_uncertainty(self): - """Test estimating uncertainty a KerasModel.""" - n_samples = 30 - n_features = 1 - noise = 0.1 - X = np.random.rand(n_samples, n_features) - y = (10 * X + np.random.normal(scale=noise, size=(n_samples, n_features))) - dataset = dc.data.NumpyDataset(X, y) - - # Build a model that predicts uncertainty. - - inputs = tf.keras.Input(shape=(n_features,)) - switch = tf.keras.Input(shape=tuple()) - hidden = tf.keras.layers.Dense(200, activation='relu')(inputs) - dropout = dc.models.layers.SwitchedDropout(rate=0.1)([hidden, switch]) - output = tf.keras.layers.Dense(n_features)(dropout) - log_var = tf.keras.layers.Dense(n_features)(dropout) - var = tf.keras.layers.Activation(tf.exp)(log_var) - keras_model = tf.keras.Model( - inputs=[inputs, switch], outputs=[output, var, output, log_var]) - - def loss(outputs, labels, weights): - diff = labels[0] - outputs[0] - log_var = outputs[1] - var = tf.exp(log_var) - return tf.reduce_mean(diff * diff / var + log_var) - - class UncertaintyModel(dc.models.KerasModel): - - def default_generator(self, - dataset, - epochs=1, - mode='fit', - deterministic=True, - pad_batches=True): - for epoch in range(epochs): - for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( - batch_size=self.batch_size, - deterministic=deterministic, - pad_batches=pad_batches): - if mode == 'predict': - dropout = np.array(0.0) - else: - dropout = np.array(1.0) - yield ([X_b, dropout], [y_b], [w_b]) - - model = UncertaintyModel( - keras_model, - loss, - output_types=['prediction', 'variance', 'loss', 'loss'], - learning_rate=0.003) - - # Fit the model and see if its predictions are correct. - - model.fit(dataset, nb_epoch=2500) - pred, std = model.predict_uncertainty(dataset) - assert np.mean(np.abs(y - pred)) < 1.0 - assert noise < np.mean(std) < 1.0 - - def test_saliency_mapping(self): - """Test computing a saliency map.""" - n_tasks = 3 - n_features = 5 - keras_model = tf.keras.Sequential([ - tf.keras.layers.Dense(20, activation='tanh'), - tf.keras.layers.Dense(n_tasks) - ]) - model = dc.models.KerasModel(keras_model, dc.models.losses.L2Loss()) - x = np.random.random(n_features) - s = model.compute_saliency(x) - assert s.shape[0] == n_tasks - assert s.shape[1] == n_features - - # Take a tiny step in the direction of s and see if the output changes by - # the expected amount. - - delta = 0.01 - for task in range(n_tasks): - norm = np.sqrt(np.sum(s[task]**2)) - step = 0.5 * delta / norm - pred1 = model.predict_on_batch((x + s[task] * step).reshape( - (1, n_features))).flatten() - pred2 = model.predict_on_batch((x - s[task] * step).reshape( - (1, n_features))).flatten() - self.assertAlmostEqual( - pred1[task], (pred2 + norm * delta)[task], places=4) - - def test_saliency_shapes(self): - """Test computing saliency maps for multiple outputs with multiple dimensions.""" - inputs = tf.keras.Input(shape=(2, 3)) - flatten = tf.keras.layers.Flatten()(inputs) - output1 = tf.keras.layers.Reshape((4, 1))(tf.keras.layers.Dense(4)(flatten)) - output2 = tf.keras.layers.Reshape((1, 5))(tf.keras.layers.Dense(5)(flatten)) - keras_model = tf.keras.Model(inputs=inputs, outputs=[output1, output2]) - model = dc.models.KerasModel(keras_model, dc.models.losses.L2Loss()) - x = np.random.random((2, 3)) - s = model.compute_saliency(x) - assert len(s) == 2 - assert s[0].shape == (4, 1, 2, 3) - assert s[1].shape == (1, 5, 2, 3) - - def test_tensorboard(self): - """Test logging to Tensorboard.""" - n_data_points = 20 - n_features = 2 - X = np.random.rand(n_data_points, n_features) - y = [[0.0, 1.0] for x in range(n_data_points)] - dataset = dc.data.NumpyDataset(X, y) - keras_model = tf.keras.Sequential([ - tf.keras.layers.Dense(2, activation='softmax'), - ]) - model = dc.models.KerasModel( - keras_model, - dc.models.losses.CategoricalCrossEntropy(), - tensorboard=True, - log_frequency=1) - model.fit(dataset, nb_epoch=10) - files_in_dir = os.listdir(model.model_dir) - event_file = list(filter(lambda x: x.startswith("events"), files_in_dir)) - assert len(event_file) > 0 - event_file = os.path.join(model.model_dir, event_file[0]) - file_size = os.stat(event_file).st_size - assert file_size > 0 - - def test_fit_variables(self): - """Test training a subset of the variables in a model.""" - - class VarModel(tf.keras.Model): - - def __init__(self, **kwargs): - super(VarModel, self).__init__(**kwargs) - self.var1 = tf.Variable([0.5]) - self.var2 = tf.Variable([0.5]) - - def call(self, inputs, training=False): - return [self.var1, self.var2] - - def loss(outputs, labels, weights): - return (outputs[0] * outputs[1] - labels[0])**2 - - keras_model = VarModel() - model = dc.models.KerasModel(keras_model, loss, learning_rate=0.01) - x = np.ones((1, 1)) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 0.5) - assert np.allclose(vars[1], 0.5) - model.fit_generator([(x, x, x)] * 300) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 1.0) - assert np.allclose(vars[1], 1.0) - model.fit_generator([(x, 2 * x, x)] * 300, variables=[keras_model.var1]) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 2.0) - assert np.allclose(vars[1], 1.0) - model.fit_generator([(x, x, x)] * 300, variables=[keras_model.var2]) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 2.0) - assert np.allclose(vars[1], 0.5) - - def test_fit_loss(self): - """Test specifying a different loss function when calling fit().""" - - class VarModel(tf.keras.Model): - - def __init__(self, **kwargs): - super(VarModel, self).__init__(**kwargs) - self.var1 = tf.Variable([0.5]) - self.var2 = tf.Variable([0.5]) - - def call(self, inputs, training=False): - return [self.var1, self.var2] - - def loss1(outputs, labels, weights): - return (outputs[0] * outputs[1] - labels[0])**2 - - def loss2(outputs, labels, weights): - return (outputs[0] + outputs[1] - labels[0])**2 - - keras_model = VarModel() - model = dc.models.KerasModel(keras_model, loss1, learning_rate=0.01) - x = np.ones((1, 1)) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 0.5) - assert np.allclose(vars[1], 0.5) - model.fit_generator([(x, x, x)] * 300) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0], 1.0) - assert np.allclose(vars[1], 1.0) - model.fit_generator([(x, 3 * x, x)] * 300, loss=loss2) - vars = model.predict_on_batch(x) - assert np.allclose(vars[0] + vars[1], 3.0) +def test_overfit_graph_model(): + """Test fitting a KerasModel defined as a graph.""" + n_data_points = 10 + n_features = 2 + np.random.seed(1234) + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + inputs = tf.keras.Input(shape=(n_features,)) + hidden = tf.keras.layers.Dense(10, activation='relu')(inputs) + logits = tf.keras.layers.Dense(1)(hidden) + outputs = tf.keras.layers.Activation('sigmoid')(logits) + keras_model = tf.keras.Model(inputs=inputs, outputs=[outputs, logits]) + model = dc.models.KerasModel( + keras_model, + dc.models.losses.SigmoidCrossEntropy(), + output_types=['prediction', 'loss'], + learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + scores = model.evaluate(dataset, [metric]) + assert scores[metric.name] > 0.9 + + # Check that predicting internal layers works. + pred_logits = np.squeeze(model.predict_on_batch(X, outputs=logits)) + pred_from_logits = 1.0 / (1.0 + np.exp(-pred_logits)) + assert np.allclose(prediction, pred_from_logits, atol=1e-4) + + +def test_overfit_sequential_model(): + """Test fitting a KerasModel defined as a sequential model.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(10, activation='relu'), + tf.keras.layers.Dense(1, activation='sigmoid') + ]) + model = dc.models.KerasModel( + keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + generator = model.default_generator(dataset, pad_batches=False) + scores = model.evaluate_generator(generator, [metric]) + assert scores[metric.name] > 0.9 + + +def test_fit_use_all_losses(): + """Test fitting a KerasModel and getting a loss curve back.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(10, activation='relu'), + tf.keras.layers.Dense(1, activation='sigmoid') + ]) + model = dc.models.KerasModel( + keras_model, + dc.models.losses.BinaryCrossEntropy(), + learning_rate=0.005, + log_frequency=10) + losses = [] + model.fit(dataset, nb_epoch=1000, all_losses=losses) + # Each epoch is a single step for this model + assert len(losses) == 100 + assert np.count_nonzero(np.array(losses)) == 100 + + +def test_fit_on_batch(): + """Test fitting a KerasModel to individual batches.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(10, activation='relu'), + tf.keras.layers.Dense(1, activation='sigmoid') + ]) + model = dc.models.KerasModel( + keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + i = 0 + for X, y, w, ids in dataset.iterbatches(model.batch_size, 500): + i += 1 + model.fit_on_batch(X, y, w, checkpoint=False) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + generator = model.default_generator(dataset, pad_batches=False) + scores = model.evaluate_generator(generator, [metric]) + assert scores[metric.name] > 0.9 + + +def test_checkpointing(): + """Test loading and saving checkpoints with KerasModel.""" + # Create two models using the same model directory. + + keras_model1 = tf.keras.Sequential([tf.keras.layers.Dense(10)]) + keras_model2 = tf.keras.Sequential([tf.keras.layers.Dense(10)]) + model1 = dc.models.KerasModel(keras_model1, dc.models.losses.L2Loss()) + model2 = dc.models.KerasModel( + keras_model2, dc.models.losses.L2Loss(), model_dir=model1.model_dir) + + # Check that they produce different results. + + X = np.random.rand(5, 5) + y1 = model1.predict_on_batch(X) + y2 = model2.predict_on_batch(X) + assert not np.array_equal(y1, y2) + + # Save a checkpoint from the first model and load it into the second one, + # and make sure they now match. + + model1.save_checkpoint() + model2.restore() + y3 = model1.predict_on_batch(X) + y4 = model2.predict_on_batch(X) + assert np.array_equal(y1, y3) + assert np.array_equal(y1, y4) + + +def test_fit_restore(): + """Test specifying restore=True when calling fit().""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + + # Train a model to overfit the dataset. + + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(10, activation='relu'), + tf.keras.layers.Dense(1, activation='sigmoid') + ]) + model = dc.models.KerasModel( + keras_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + + # Create an identical model, do a single step of fitting with restore=True, + # and make sure it got restored correctly. + + keras_model2 = tf.keras.Sequential([ + tf.keras.layers.Dense(10, activation='relu'), + tf.keras.layers.Dense(1, activation='sigmoid') + ]) + model2 = dc.models.KerasModel( + keras_model2, + dc.models.losses.BinaryCrossEntropy(), + model_dir=model.model_dir) + model2.fit(dataset, nb_epoch=1, restore=True) + prediction = np.squeeze(model2.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + + +def test_uncertainty(): + """Test estimating uncertainty a KerasModel.""" + n_samples = 30 + n_features = 1 + noise = 0.1 + X = np.random.rand(n_samples, n_features) + y = (10 * X + np.random.normal(scale=noise, size=(n_samples, n_features))) + dataset = dc.data.NumpyDataset(X, y) + + # Build a model that predicts uncertainty. + + inputs = tf.keras.Input(shape=(n_features,)) + switch = tf.keras.Input(shape=tuple()) + hidden = tf.keras.layers.Dense(200, activation='relu')(inputs) + dropout = dc.models.layers.SwitchedDropout(rate=0.1)([hidden, switch]) + output = tf.keras.layers.Dense(n_features)(dropout) + log_var = tf.keras.layers.Dense(n_features)(dropout) + var = tf.keras.layers.Activation(tf.exp)(log_var) + keras_model = tf.keras.Model( + inputs=[inputs, switch], outputs=[output, var, output, log_var]) + + def loss(outputs, labels, weights): + diff = labels[0] - outputs[0] + log_var = outputs[1] + var = tf.exp(log_var) + return tf.reduce_mean(diff * diff / var + log_var) + + class UncertaintyModel(dc.models.KerasModel): + + def default_generator(self, + dataset, + epochs=1, + mode='fit', + deterministic=True, + pad_batches=True): + for epoch in range(epochs): + for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( + batch_size=self.batch_size, + deterministic=deterministic, + pad_batches=pad_batches): + if mode == 'predict': + dropout = np.array(0.0) + else: + dropout = np.array(1.0) + yield ([X_b, dropout], [y_b], [w_b]) + + model = UncertaintyModel( + keras_model, + loss, + output_types=['prediction', 'variance', 'loss', 'loss'], + learning_rate=0.003) + + # Fit the model and see if its predictions are correct. + + model.fit(dataset, nb_epoch=2500) + pred, std = model.predict_uncertainty(dataset) + assert np.mean(np.abs(y - pred)) < 1.0 + assert noise < np.mean(std) < 1.0 + + +def test_saliency_mapping(): + """Test computing a saliency map.""" + n_tasks = 3 + n_features = 5 + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(20, activation='tanh'), + tf.keras.layers.Dense(n_tasks) + ]) + model = dc.models.KerasModel(keras_model, dc.models.losses.L2Loss()) + x = np.random.random(n_features) + s = model.compute_saliency(x) + assert s.shape[0] == n_tasks + assert s.shape[1] == n_features + + # Take a tiny step in the direction of s and see if the output changes by + # the expected amount. + + delta = 0.01 + for task in range(n_tasks): + norm = np.sqrt(np.sum(s[task]**2)) + step = 0.5 * delta / norm + pred1 = model.predict_on_batch((x + s[task] * step).reshape( + (1, n_features))).flatten() + pred2 = model.predict_on_batch((x - s[task] * step).reshape( + (1, n_features))).flatten() + assert np.allclose(pred1[task], (pred2 + norm * delta)[task]) + + +def test_saliency_shapes(): + """Test computing saliency maps for multiple outputs with multiple dimensions.""" + inputs = tf.keras.Input(shape=(2, 3)) + flatten = tf.keras.layers.Flatten()(inputs) + output1 = tf.keras.layers.Reshape((4, 1))(tf.keras.layers.Dense(4)(flatten)) + output2 = tf.keras.layers.Reshape((1, 5))(tf.keras.layers.Dense(5)(flatten)) + keras_model = tf.keras.Model(inputs=inputs, outputs=[output1, output2]) + model = dc.models.KerasModel(keras_model, dc.models.losses.L2Loss()) + x = np.random.random((2, 3)) + s = model.compute_saliency(x) + assert len(s) == 2 + assert s[0].shape == (4, 1, 2, 3) + assert s[1].shape == (1, 5, 2, 3) + + +def test_tensorboard(): + """Test logging to Tensorboard.""" + n_data_points = 20 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = [[0.0, 1.0] for x in range(n_data_points)] + dataset = dc.data.NumpyDataset(X, y) + keras_model = tf.keras.Sequential([ + tf.keras.layers.Dense(2, activation='softmax'), + ]) + model = dc.models.KerasModel( + keras_model, + dc.models.losses.CategoricalCrossEntropy(), + tensorboard=True, + log_frequency=1) + model.fit(dataset, nb_epoch=10) + files_in_dir = os.listdir(model.model_dir) + event_file = list(filter(lambda x: x.startswith("events"), files_in_dir)) + assert len(event_file) > 0 + event_file = os.path.join(model.model_dir, event_file[0]) + file_size = os.stat(event_file).st_size + assert file_size > 0 + + +def test_fit_variables(): + """Test training a subset of the variables in a model.""" + + class VarModel(tf.keras.Model): + + def __init__(self, **kwargs): + super(VarModel, self).__init__(**kwargs) + self.var1 = tf.Variable([0.5]) + self.var2 = tf.Variable([0.5]) + + def call(self, inputs, training=False): + return [self.var1, self.var2] + + def loss(outputs, labels, weights): + return (outputs[0] * outputs[1] - labels[0])**2 + + keras_model = VarModel() + model = dc.models.KerasModel(keras_model, loss, learning_rate=0.01) + x = np.ones((1, 1)) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 0.5) + assert np.allclose(vars[1], 0.5) + model.fit_generator([(x, x, x)] * 300) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 1.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, 2 * x, x)] * 300, variables=[keras_model.var1]) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 2.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, x, x)] * 300, variables=[keras_model.var2]) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 2.0) + assert np.allclose(vars[1], 0.5) + + +def test_fit_loss(): + """Test specifying a different loss function when calling fit().""" + + class VarModel(tf.keras.Model): + + def __init__(self, **kwargs): + super(VarModel, self).__init__(**kwargs) + self.var1 = tf.Variable([0.5]) + self.var2 = tf.Variable([0.5]) + + def call(self, inputs, training=False): + return [self.var1, self.var2] + + def loss1(outputs, labels, weights): + return (outputs[0] * outputs[1] - labels[0])**2 + + def loss2(outputs, labels, weights): + return (outputs[0] + outputs[1] - labels[0])**2 + + keras_model = VarModel() + model = dc.models.KerasModel(keras_model, loss1, learning_rate=0.01) + x = np.ones((1, 1)) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 0.5) + assert np.allclose(vars[1], 0.5) + model.fit_generator([(x, x, x)] * 300) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 1.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, 3 * x, x)] * 300, loss=loss2) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0] + vars[1], 3.0) diff --git a/deepchem/models/tests/test_layers.py b/deepchem/models/tests/test_layers.py index 01bc88dcee3a4c6ee03d38fc0d02f60fc5ef1ca3..5c7e424351e723ecbea901234e4d29d20d330cf5 100644 --- a/deepchem/models/tests/test_layers.py +++ b/deepchem/models/tests/test_layers.py @@ -5,448 +5,564 @@ import deepchem.models.layers as layers from tensorflow.python.framework import test_util -class TestLayers(test_util.TensorFlowTestCase): - - def test_highway(self): - """Test invoking Highway.""" - width = 5 - batch_size = 10 - input = np.random.rand(batch_size, width).astype(np.float32) - layer = layers.Highway() - result = layer(input) - assert result.shape == (batch_size, width) - assert len(layer.trainable_variables) == 4 - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.Highway() - result2 = layer2(input) - assert not np.allclose(result, result2) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer(input) - assert np.allclose(result, result3) - - def test_combine_mean_std(self): - """Test invoking CombineMeanStd.""" - mean = np.random.rand(5, 3).astype(np.float32) - std = np.random.rand(5, 3).astype(np.float32) - layer = layers.CombineMeanStd(training_only=True, noise_epsilon=0.01) - result1 = layer([mean, std], training=False) - assert np.array_equal(result1, mean) # No noise in test mode - result2 = layer([mean, std], training=True) - assert not np.array_equal(result2, mean) - assert np.allclose(result2, mean, atol=0.1) - - def test_stack(self): - """Test invoking Stack.""" - input1 = np.random.rand(5, 4).astype(np.float32) - input2 = np.random.rand(5, 4).astype(np.float32) - result = layers.Stack()([input1, input2]) - assert result.shape == (5, 2, 4) - assert np.array_equal(input1, result[:, 0, :]) - assert np.array_equal(input2, result[:, 1, :]) - - def test_variable(self): - """Test invoking Variable.""" - value = np.random.rand(5, 4).astype(np.float32) - layer = layers.Variable(value) - layer.build([]) - result = layer.call([]).numpy() - assert np.allclose(result, value) - assert len(layer.trainable_variables) == 1 - - def test_interatomic_l2_distances(self): - """Test invoking InteratomicL2Distances.""" - atoms = 5 - neighbors = 2 - coords = np.random.rand(atoms, 3) - neighbor_list = np.random.randint(0, atoms, size=(atoms, neighbors)) - layer = layers.InteratomicL2Distances(atoms, neighbors, 3) - result = layer([coords, neighbor_list]) - assert result.shape == (atoms, neighbors) - for atom in range(atoms): - for neighbor in range(neighbors): - delta = coords[atom] - coords[neighbor_list[atom, neighbor]] - dist2 = np.dot(delta, delta) - assert np.allclose(dist2, result[atom, neighbor]) - - def test_weave_layer(self): - """Test invoking WeaveLayer.""" - out_channels = 2 - n_atoms = 4 # In CCC and C, there are 4 atoms - raw_smiles = ['CCC', 'C'] - import rdkit - mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles] - featurizer = dc.feat.WeaveFeaturizer() - mols = featurizer.featurize(mols) - weave = layers.WeaveLayer() - atom_feat = [] - pair_feat = [] - atom_to_pair = [] - pair_split = [] - start = 0 - n_pair_feat = 14 - for im, mol in enumerate(mols): - n_atoms = mol.get_num_atoms() - # index of pair features - C0, C1 = np.meshgrid(np.arange(n_atoms), np.arange(n_atoms)) - atom_to_pair.append( - np.transpose(np.array([C1.flatten() + start, - C0.flatten() + start]))) - # number of pairs for each atom - pair_split.extend(C1.flatten() + start) - start = start + n_atoms - - # atom features - atom_feat.append(mol.get_atom_features()) - # pair features - pair_feat.append( - np.reshape(mol.get_pair_features(), (n_atoms * n_atoms, n_pair_feat))) - inputs = [ - np.array(np.concatenate(atom_feat, axis=0), dtype=np.float32), - np.concatenate(pair_feat, axis=0), - np.array(pair_split), - np.concatenate(atom_to_pair, axis=0) - ] - # Outputs should be [A, P] - outputs = weave(inputs) - assert len(outputs) == 2 - - def test_graph_conv(self): - """Test invoking GraphConv.""" - out_channels = 2 - n_atoms = 4 # In CCC and C, there are 4 atoms - raw_smiles = ['CCC', 'C'] - import rdkit - mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles] - featurizer = dc.feat.graph_features.ConvMolFeaturizer() - mols = featurizer.featurize(mols) - multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) - atom_features = multi_mol.get_atom_features().astype(np.float32) - degree_slice = multi_mol.deg_slice - membership = multi_mol.membership - deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] - args = [atom_features, degree_slice, membership] + deg_adjs - layer = layers.GraphConv(out_channels) - result = layer(args) - assert result.shape == (n_atoms, out_channels) - num_deg = 2 * layer.max_degree + (1 - layer.min_degree) - assert len(layer.trainable_variables) == 2 * num_deg - - def test_graph_pool(self): - """Test invoking GraphPool.""" - n_atoms = 4 # In CCC and C, there are 4 atoms - raw_smiles = ['CCC', 'C'] - import rdkit - mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles] - featurizer = dc.feat.graph_features.ConvMolFeaturizer() - mols = featurizer.featurize(mols) - multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) - atom_features = multi_mol.get_atom_features().astype(np.float32) - degree_slice = multi_mol.deg_slice - membership = multi_mol.membership - deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] - args = [atom_features, degree_slice, membership] + deg_adjs - result = layers.GraphPool()(args) - assert result.shape[0] == n_atoms - # TODO What should shape[1] be? It's not documented. - - def test_graph_gather(self): - """Test invoking GraphGather.""" - batch_size = 2 - n_features = 75 - n_atoms = 4 # In CCC and C, there are 4 atoms - raw_smiles = ['CCC', 'C'] - import rdkit - mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles] - featurizer = dc.feat.graph_features.ConvMolFeaturizer() - mols = featurizer.featurize(mols) - multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) - atom_features = multi_mol.get_atom_features().astype(np.float32) - degree_slice = multi_mol.deg_slice - membership = multi_mol.membership - deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] - args = [atom_features, degree_slice, membership] + deg_adjs - result = layers.GraphGather(batch_size)(args) - # TODO(rbharath): Why is it 2*n_features instead of n_features? - assert result.shape == (batch_size, 2 * n_features) - - def test_lstm_step(self): - """Test invoking LSTMStep.""" - max_depth = 5 - n_test = 5 - n_feat = 10 - y = np.random.rand(n_test, 2 * n_feat).astype(np.float32) - state_zero = np.random.rand(n_test, n_feat).astype(np.float32) - state_one = np.random.rand(n_test, n_feat).astype(np.float32) - layer = layers.LSTMStep(n_feat, 2 * n_feat) - result = layer([y, state_zero, state_one]) - h_out, h_copy_out, c_out = (result[0], result[1][0], result[1][1]) - assert h_out.shape == (n_test, n_feat) - assert h_copy_out.shape == (n_test, n_feat) - assert c_out.shape == (n_test, n_feat) - assert len(layer.trainable_variables) == 1 - - def test_attn_lstm_embedding(self): - """Test invoking AttnLSTMEmbedding.""" - max_depth = 5 - n_test = 5 - n_support = 11 - n_feat = 10 - test = np.random.rand(n_test, n_feat).astype(np.float32) - support = np.random.rand(n_support, n_feat).astype(np.float32) - layer = layers.AttnLSTMEmbedding(n_test, n_support, n_feat, max_depth) - test_out, support_out = layer([test, support]) - assert test_out.shape == (n_test, n_feat) - assert support_out.shape == (n_support, n_feat) - assert len(layer.trainable_variables) == 4 - - def test_iter_ref_lstm_embedding(self): - """Test invoking IterRefLSTMEmbedding.""" - max_depth = 5 - n_test = 5 - n_support = 11 - n_feat = 10 - test = np.random.rand(n_test, n_feat).astype(np.float32) - support = np.random.rand(n_support, n_feat).astype(np.float32) - layer = layers.IterRefLSTMEmbedding(n_test, n_support, n_feat, max_depth) - test_out, support_out = layer([test, support]) - assert test_out.shape == (n_test, n_feat) - assert support_out.shape == (n_support, n_feat) - assert len(layer.trainable_variables) == 8 - - def test_vina_free_energy(self): - """Test invoking VinaFreeEnergy.""" - n_atoms = 5 - m_nbrs = 1 - ndim = 3 - nbr_cutoff = 1 - start = 0 - stop = 4 - X = np.random.rand(n_atoms, ndim).astype(np.float32) - Z = np.random.randint(0, 2, (n_atoms)).astype(np.float32) - layer = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start, - stop) - result = layer([X, Z]) - assert len(layer.trainable_variables) == 6 - assert result.shape == tuple() - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start, - stop) - result2 = layer2([X, Z]) - assert not np.allclose(result, result2) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer([X, Z]) - assert np.allclose(result, result3) - - def test_weighted_linear_combo(self): - """Test invoking WeightedLinearCombo.""" - input1 = np.random.rand(5, 10).astype(np.float32) - input2 = np.random.rand(5, 10).astype(np.float32) - layer = layers.WeightedLinearCombo() - result = layer([input1, input2]) - assert len(layer.trainable_variables) == 2 - expected = input1 * layer.trainable_variables[0] + input2 * layer.trainable_variables[1] - assert np.allclose(result, expected) - - def test_neighbor_list(self): - """Test invoking NeighborList.""" - N_atoms = 5 - start = 0 - stop = 12 - nbr_cutoff = 3 - ndim = 3 - M_nbrs = 2 - coords = start + np.random.rand(N_atoms, ndim) * (stop - start) - coords = tf.cast(tf.stack(coords), tf.float32) - layer = layers.NeighborList(N_atoms, M_nbrs, ndim, nbr_cutoff, start, stop) - result = layer(coords) - assert result.shape == (N_atoms, M_nbrs) - - def test_atomic_convolution(self): - """Test invoking AtomicConvolution.""" - batch_size = 4 - max_atoms = 5 - max_neighbors = 2 - dimensions = 3 - params = [[5.0, 2.0, 0.5], [10.0, 2.0, 0.5]] - input1 = np.random.rand(batch_size, max_atoms, - dimensions).astype(np.float32) - input2 = np.random.randint( - max_atoms, size=(batch_size, max_atoms, max_neighbors)) - input3 = np.random.randint( - 1, 10, size=(batch_size, max_atoms, max_neighbors)) - layer = layers.AtomicConvolution(radial_params=params) - result = layer([input1, input2, input3]) - assert result.shape == (batch_size, max_atoms, len(params)) - assert len(layer.trainable_variables) == 3 - - def test_alpha_share_layer(self): - """Test invoking AlphaShareLayer.""" - batch_size = 10 - length = 6 - input1 = np.random.rand(batch_size, length).astype(np.float32) - input2 = np.random.rand(batch_size, length).astype(np.float32) - layer = layers.AlphaShareLayer() - result = layer([input1, input2]) - assert input1.shape == result[0].shape - assert input2.shape == result[1].shape - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.AlphaShareLayer() - result2 = layer2([input1, input2]) - assert not np.allclose(result[0], result2[0]) - assert not np.allclose(result[1], result2[1]) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer([input1, input2]) - assert np.allclose(result[0], result3[0]) - assert np.allclose(result[1], result3[1]) - - def test_sluice_loss(self): - """Test invoking SluiceLoss.""" - input1 = np.ones((3, 4)).astype(np.float32) - input2 = np.ones((2, 2)).astype(np.float32) - result = layers.SluiceLoss()([input1, input2]) - assert np.allclose(result, 40.0) - - def test_beta_share(self): - """Test invoking BetaShare.""" - batch_size = 10 - length = 6 - input1 = np.random.rand(batch_size, length).astype(np.float32) - input2 = np.random.rand(batch_size, length).astype(np.float32) - layer = layers.BetaShare() - result = layer([input1, input2]) - assert input1.shape == result.shape - assert input2.shape == result.shape - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.BetaShare() - result2 = layer2([input1, input2]) - assert not np.allclose(result, result2) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer([input1, input2]) - assert np.allclose(result, result3) - - def test_ani_feat(self): - """Test invoking ANIFeat.""" - batch_size = 10 - max_atoms = 5 - input = np.random.rand(batch_size, max_atoms, 4).astype(np.float32) - layer = layers.ANIFeat(max_atoms=max_atoms) - result = layer(input) - # TODO What should the output shape be? It's not documented, and there - # are no other test cases for it. - - def test_graph_embed_pool_layer(self): - """Test invoking GraphEmbedPoolLayer.""" - V = np.random.uniform(size=(10, 100, 50)).astype(np.float32) - adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32) - layer = layers.GraphEmbedPoolLayer(num_vertices=6) - result = layer([V, adjs]) - assert result[0].shape == (10, 6, 50) - assert result[1].shape == (10, 6, 5, 6) - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.GraphEmbedPoolLayer(num_vertices=6) - result2 = layer2([V, adjs]) - assert not np.allclose(result[0], result2[0]) - assert not np.allclose(result[1], result2[1]) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer([V, adjs]) - assert np.allclose(result[0], result3[0]) - assert np.allclose(result[1], result3[1]) - - def test_graph_cnn(self): - """Test invoking GraphCNN.""" - V = np.random.uniform(size=(10, 100, 50)).astype(np.float32) - adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32) - layer = layers.GraphCNN(num_filters=6) - result = layer([V, adjs]) - assert result.shape == (10, 100, 6) - - # Creating a second layer should produce different results, since it has - # different random weights. - - layer2 = layers.GraphCNN(num_filters=6) - result2 = layer2([V, adjs]) - assert not np.allclose(result, result2) - - # But evaluating the first layer again should produce the same result as before. - - result3 = layer([V, adjs]) - assert np.allclose(result, result3) - - def test_DAG_layer(self): - """Test invoking DAGLayer.""" - batch_size = 10 - n_graph_feat = 30 - n_atom_feat = 75 - max_atoms = 50 - layer_sizes = [100] - atom_features = np.random.rand(batch_size, n_atom_feat) - parents = np.random.randint( - 0, max_atoms, size=(batch_size, max_atoms, max_atoms)) - calculation_orders = np.random.randint( - 0, batch_size, size=(batch_size, max_atoms)) - calculation_masks = np.random.randint(0, 2, size=(batch_size, max_atoms)) - # Recall that the DAG layer expects a MultiConvMol as input, - # so the "batch" is a pooled set of atoms from all the - # molecules in the batch, just as it is for the graph conv. - # This means that n_atoms is the batch-size - n_atoms = batch_size - #dropout_switch = False - layer = layers.DAGLayer( - n_graph_feat=n_graph_feat, - n_atom_feat=n_atom_feat, - max_atoms=max_atoms, - layer_sizes=layer_sizes) - outputs = layer([ - atom_features, - parents, - calculation_orders, - calculation_masks, - n_atoms, - #dropout_switch - ]) - ## TODO(rbharath): What is the shape of outputs supposed to be? - ## I'm getting (7, 30) here. Where does 7 come from?? - - def test_DAG_gather(self): - """Test invoking DAGGather.""" - # TODO(rbharath): We need more documentation about why - # these numbers work. - batch_size = 10 - n_graph_feat = 30 - n_atom_feat = 30 - n_outputs = 75 - max_atoms = 50 - layer_sizes = [100] - layer = layers.DAGGather( - n_graph_feat=n_graph_feat, - n_outputs=n_outputs, - max_atoms=max_atoms, - layer_sizes=layer_sizes) - atom_features = np.random.rand(batch_size, n_atom_feat) - membership = np.sort(np.random.randint(0, batch_size, size=(batch_size))) - outputs = layer([atom_features, membership]) +def test_cosine_dist(): + """Test invoking cosine_dist.""" + x = tf.ones((5, 4), dtype=tf.dtypes.float32, name=None) + y_same = tf.ones((5, 4), dtype=tf.dtypes.float32, name=None) + # x and y are the same tensor (equivalent at every element) + # the pairwise inner product of the rows in x and y will always be 1 + # the output tensor will be of shape (5,5) + cos_sim_same = layers.cosine_dist(x, y_same) + diff = cos_sim_same - tf.ones((5, 5), dtype=tf.dtypes.float32, name=None) + assert tf.reduce_sum(diff) == 0 # True + + identity_tensor = tf.eye( + 512, dtype=tf.dtypes.float32) # identity matrix of shape (512,512) + x1 = identity_tensor[0:256, :] + x2 = identity_tensor[256:512, :] + # each row in x1 is orthogonal to each row in x2 + # the pairwise inner product of the rows in x and y will always be 0 + # the output tensor will be of shape (256,256) + cos_sim_orth = layers.cosine_dist(x1, x2) + assert tf.reduce_sum(cos_sim_orth) == 0 # True + assert all([cos_sim_orth.shape[dim] == 256 for dim in range(2)]) # True + + +def test_highway(): + """Test invoking Highway.""" + width = 5 + batch_size = 10 + input = np.random.rand(batch_size, width).astype(np.float32) + layer = layers.Highway() + result = layer(input) + assert result.shape == (batch_size, width) + assert len(layer.trainable_variables) == 4 + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.Highway() + result2 = layer2(input) + assert not np.allclose(result, result2) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer(input) + assert np.allclose(result, result3) + + +def test_combine_mean_std(): + """Test invoking CombineMeanStd.""" + mean = np.random.rand(5, 3).astype(np.float32) + std = np.random.rand(5, 3).astype(np.float32) + layer = layers.CombineMeanStd(training_only=True, noise_epsilon=0.01) + result1 = layer([mean, std], training=False) + assert np.array_equal(result1, mean) # No noise in test mode + result2 = layer([mean, std], training=True) + assert not np.array_equal(result2, mean) + assert np.allclose(result2, mean, atol=0.1) + + +def test_stack(): + """Test invoking Stack.""" + input1 = np.random.rand(5, 4).astype(np.float32) + input2 = np.random.rand(5, 4).astype(np.float32) + result = layers.Stack()([input1, input2]) + assert result.shape == (5, 2, 4) + assert np.array_equal(input1, result[:, 0, :]) + assert np.array_equal(input2, result[:, 1, :]) + + +def test_variable(): + """Test invoking Variable.""" + value = np.random.rand(5, 4).astype(np.float32) + layer = layers.Variable(value) + layer.build([]) + result = layer.call([]).numpy() + assert np.allclose(result, value) + assert len(layer.trainable_variables) == 1 + + +def test_interatomic_l2_distances(): + """Test invoking InteratomicL2Distances.""" + atoms = 5 + neighbors = 2 + coords = np.random.rand(atoms, 3) + neighbor_list = np.random.randint(0, atoms, size=(atoms, neighbors)) + layer = layers.InteratomicL2Distances(atoms, neighbors, 3) + result = layer([coords, neighbor_list]) + assert result.shape == (atoms, neighbors) + for atom in range(atoms): + for neighbor in range(neighbors): + delta = coords[atom] - coords[neighbor_list[atom, neighbor]] + dist2 = np.dot(delta, delta) + assert np.allclose(dist2, result[atom, neighbor]) + + +def test_weave_layer(): + """Test invoking WeaveLayer.""" + out_channels = 2 + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + from rdkit import Chem + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.WeaveFeaturizer() + mols = featurizer.featurize(mols) + weave = layers.WeaveLayer( + init=tf.keras.initializers.TruncatedNormal(stddev=0.03)) + atom_feat = [] + pair_feat = [] + atom_to_pair = [] + pair_split = [] + start = 0 + n_pair_feat = 14 + for im, mol in enumerate(mols): + n_atoms = mol.get_num_atoms() + # index of pair features + C0, C1 = np.meshgrid(np.arange(n_atoms), np.arange(n_atoms)) + atom_to_pair.append( + np.transpose(np.array([C1.flatten() + start, + C0.flatten() + start]))) + # number of pairs for each atom + pair_split.extend(C1.flatten() + start) + start = start + n_atoms + + # atom features + atom_feat.append(mol.get_atom_features()) + # pair features + pair_feat.append( + np.reshape(mol.get_pair_features(), (n_atoms * n_atoms, n_pair_feat))) + inputs = [ + np.array(np.concatenate(atom_feat, axis=0), dtype=np.float32), + np.concatenate(pair_feat, axis=0), + np.array(pair_split), + np.concatenate(atom_to_pair, axis=0) + ] + # Outputs should be [A, P] + outputs = weave(inputs) + assert len(outputs) == 2 + + +def test_weave_gather(): + """Test invoking WeaveGather.""" + out_channels = 2 + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + from rdkit import Chem + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.WeaveFeaturizer() + mols = featurizer.featurize(mols) + atom_feat = [] + atom_split = [] + for im, mol in enumerate(mols): + n_atoms = mol.get_num_atoms() + atom_split.extend([im] * n_atoms) + + # atom features + atom_feat.append(mol.get_atom_features()) + inputs = [ + np.array(np.concatenate(atom_feat, axis=0), dtype=np.float32), + np.array(atom_split) + ] + # Try without compression + gather = layers.WeaveGather(batch_size=2, n_input=75, gaussian_expand=True) + # Outputs should be [mol1_vec, mol2_vec) + outputs = gather(inputs) + assert len(outputs) == 2 + assert np.array(outputs[0]).shape == (11 * 75,) + assert np.array(outputs[1]).shape == (11 * 75,) + + # Try with compression + gather = layers.WeaveGather( + batch_size=2, + n_input=75, + gaussian_expand=True, + compress_post_gaussian_expansion=True) + # Outputs should be [mol1_vec, mol2_vec) + outputs = gather(inputs) + assert len(outputs) == 2 + assert np.array(outputs[0]).shape == (75,) + assert np.array(outputs[1]).shape == (75,) + + +def test_weave_gather_gaussian_histogram(): + """Test Gaussian Histograms.""" + import tensorflow as tf + from rdkit import Chem + out_channels = 2 + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.WeaveFeaturizer() + mols = featurizer.featurize(mols) + gather = layers.WeaveGather(batch_size=2, n_input=75) + atom_feat = [] + atom_split = [] + for im, mol in enumerate(mols): + n_atoms = mol.get_num_atoms() + atom_split.extend([im] * n_atoms) + + # atom features + atom_feat.append(mol.get_atom_features()) + inputs = [ + np.array(np.concatenate(atom_feat, axis=0), dtype=np.float32), + np.array(atom_split) + ] + #per_mol_features = tf.math.segment_sum(inputs[0], inputs[1]) + outputs = gather.gaussian_histogram(inputs[0]) + # Gaussian histograms expands into 11 Gaussian buckets. + assert np.array(outputs).shape == ( + 4, + 11 * 75, + ) + #assert np.array(outputs[1]).shape == (11 * 75,) + + +def test_graph_conv(): + """Test invoking GraphConv.""" + out_channels = 2 + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + from rdkit import Chem + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.graph_features.ConvMolFeaturizer() + mols = featurizer.featurize(mols) + multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) + atom_features = multi_mol.get_atom_features().astype(np.float32) + degree_slice = multi_mol.deg_slice + membership = multi_mol.membership + deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] + args = [atom_features, degree_slice, membership] + deg_adjs + layer = layers.GraphConv(out_channels) + result = layer(args) + assert result.shape == (n_atoms, out_channels) + num_deg = 2 * layer.max_degree + (1 - layer.min_degree) + assert len(layer.trainable_variables) == 2 * num_deg + + +def test_graph_pool(): + """Test invoking GraphPool.""" + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + from rdkit import Chem + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.graph_features.ConvMolFeaturizer() + mols = featurizer.featurize(mols) + multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) + atom_features = multi_mol.get_atom_features().astype(np.float32) + degree_slice = multi_mol.deg_slice + membership = multi_mol.membership + deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] + args = [atom_features, degree_slice, membership] + deg_adjs + result = layers.GraphPool()(args) + assert result.shape[0] == n_atoms + # TODO What should shape[1] be? It's not documented. + + +def test_graph_gather(): + """Test invoking GraphGather.""" + batch_size = 2 + n_features = 75 + n_atoms = 4 # In CCC and C, there are 4 atoms + raw_smiles = ['CCC', 'C'] + from rdkit import Chem + mols = [Chem.MolFromSmiles(s) for s in raw_smiles] + featurizer = dc.feat.graph_features.ConvMolFeaturizer() + mols = featurizer.featurize(mols) + multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols) + atom_features = multi_mol.get_atom_features().astype(np.float32) + degree_slice = multi_mol.deg_slice + membership = multi_mol.membership + deg_adjs = multi_mol.get_deg_adjacency_lists()[1:] + args = [atom_features, degree_slice, membership] + deg_adjs + result = layers.GraphGather(batch_size)(args) + # TODO(rbharath): Why is it 2*n_features instead of n_features? + assert result.shape == (batch_size, 2 * n_features) + + +def test_lstm_step(): + """Test invoking LSTMStep.""" + max_depth = 5 + n_test = 5 + n_feat = 10 + y = np.random.rand(n_test, 2 * n_feat).astype(np.float32) + state_zero = np.random.rand(n_test, n_feat).astype(np.float32) + state_one = np.random.rand(n_test, n_feat).astype(np.float32) + layer = layers.LSTMStep(n_feat, 2 * n_feat) + result = layer([y, state_zero, state_one]) + h_out, h_copy_out, c_out = (result[0], result[1][0], result[1][1]) + assert h_out.shape == (n_test, n_feat) + assert h_copy_out.shape == (n_test, n_feat) + assert c_out.shape == (n_test, n_feat) + assert len(layer.trainable_variables) == 1 + + +def test_attn_lstm_embedding(): + """Test invoking AttnLSTMEmbedding.""" + max_depth = 5 + n_test = 5 + n_support = 11 + n_feat = 10 + test = np.random.rand(n_test, n_feat).astype(np.float32) + support = np.random.rand(n_support, n_feat).astype(np.float32) + layer = layers.AttnLSTMEmbedding(n_test, n_support, n_feat, max_depth) + test_out, support_out = layer([test, support]) + assert test_out.shape == (n_test, n_feat) + assert support_out.shape == (n_support, n_feat) + assert len(layer.trainable_variables) == 4 + + +def test_iter_ref_lstm_embedding(): + """Test invoking IterRefLSTMEmbedding.""" + max_depth = 5 + n_test = 5 + n_support = 11 + n_feat = 10 + test = np.random.rand(n_test, n_feat).astype(np.float32) + support = np.random.rand(n_support, n_feat).astype(np.float32) + layer = layers.IterRefLSTMEmbedding(n_test, n_support, n_feat, max_depth) + test_out, support_out = layer([test, support]) + assert test_out.shape == (n_test, n_feat) + assert support_out.shape == (n_support, n_feat) + assert len(layer.trainable_variables) == 8 + + +def test_vina_free_energy(): + """Test invoking VinaFreeEnergy.""" + n_atoms = 5 + m_nbrs = 1 + ndim = 3 + nbr_cutoff = 1 + start = 0 + stop = 4 + X = np.random.rand(n_atoms, ndim).astype(np.float32) + Z = np.random.randint(0, 2, (n_atoms)).astype(np.float32) + layer = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start, stop) + result = layer([X, Z]) + assert len(layer.trainable_variables) == 6 + assert result.shape == tuple() + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start, stop) + result2 = layer2([X, Z]) + assert not np.allclose(result, result2) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer([X, Z]) + assert np.allclose(result, result3) + + +def test_weighted_linear_combo(): + """Test invoking WeightedLinearCombo.""" + input1 = np.random.rand(5, 10).astype(np.float32) + input2 = np.random.rand(5, 10).astype(np.float32) + layer = layers.WeightedLinearCombo() + result = layer([input1, input2]) + assert len(layer.trainable_variables) == 2 + expected = input1 * layer.trainable_variables[0] + input2 * layer.trainable_variables[1] + assert np.allclose(result, expected) + + +def test_neighbor_list(): + """Test invoking NeighborList.""" + N_atoms = 5 + start = 0 + stop = 12 + nbr_cutoff = 3 + ndim = 3 + M_nbrs = 2 + coords = start + np.random.rand(N_atoms, ndim) * (stop - start) + coords = tf.cast(tf.stack(coords), tf.float32) + layer = layers.NeighborList(N_atoms, M_nbrs, ndim, nbr_cutoff, start, stop) + result = layer(coords) + assert result.shape == (N_atoms, M_nbrs) + + +def test_atomic_convolution(): + """Test invoking AtomicConvolution.""" + batch_size = 4 + max_atoms = 5 + max_neighbors = 2 + dimensions = 3 + params = [[5.0, 2.0, 0.5], [10.0, 2.0, 0.5]] + input1 = np.random.rand(batch_size, max_atoms, dimensions).astype(np.float32) + input2 = np.random.randint( + max_atoms, size=(batch_size, max_atoms, max_neighbors)) + input3 = np.random.randint(1, 10, size=(batch_size, max_atoms, max_neighbors)) + layer = layers.AtomicConvolution(radial_params=params) + result = layer([input1, input2, input3]) + assert result.shape == (batch_size, max_atoms, len(params)) + assert len(layer.trainable_variables) == 3 + + +def test_alpha_share_layer(): + """Test invoking AlphaShareLayer.""" + batch_size = 10 + length = 6 + input1 = np.random.rand(batch_size, length).astype(np.float32) + input2 = np.random.rand(batch_size, length).astype(np.float32) + layer = layers.AlphaShareLayer() + result = layer([input1, input2]) + assert input1.shape == result[0].shape + assert input2.shape == result[1].shape + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.AlphaShareLayer() + result2 = layer2([input1, input2]) + assert not np.allclose(result[0], result2[0]) + assert not np.allclose(result[1], result2[1]) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer([input1, input2]) + assert np.allclose(result[0], result3[0]) + assert np.allclose(result[1], result3[1]) + + +def test_sluice_loss(): + """Test invoking SluiceLoss.""" + input1 = np.ones((3, 4)).astype(np.float32) + input2 = np.ones((2, 2)).astype(np.float32) + result = layers.SluiceLoss()([input1, input2]) + assert np.allclose(result, 40.0) + + +def test_beta_share(): + """Test invoking BetaShare.""" + batch_size = 10 + length = 6 + input1 = np.random.rand(batch_size, length).astype(np.float32) + input2 = np.random.rand(batch_size, length).astype(np.float32) + layer = layers.BetaShare() + result = layer([input1, input2]) + assert input1.shape == result.shape + assert input2.shape == result.shape + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.BetaShare() + result2 = layer2([input1, input2]) + assert not np.allclose(result, result2) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer([input1, input2]) + assert np.allclose(result, result3) + + +def test_ani_feat(): + """Test invoking ANIFeat.""" + batch_size = 10 + max_atoms = 5 + input = np.random.rand(batch_size, max_atoms, 4).astype(np.float32) + layer = layers.ANIFeat(max_atoms=max_atoms) + result = layer(input) + # TODO What should the output shape be? It's not documented, and there + # are no other test cases for it. + + +def test_graph_embed_pool_layer(): + """Test invoking GraphEmbedPoolLayer.""" + V = np.random.uniform(size=(10, 100, 50)).astype(np.float32) + adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32) + layer = layers.GraphEmbedPoolLayer(num_vertices=6) + result = layer([V, adjs]) + assert result[0].shape == (10, 6, 50) + assert result[1].shape == (10, 6, 5, 6) + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.GraphEmbedPoolLayer(num_vertices=6) + result2 = layer2([V, adjs]) + assert not np.allclose(result[0], result2[0]) + assert not np.allclose(result[1], result2[1]) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer([V, adjs]) + assert np.allclose(result[0], result3[0]) + assert np.allclose(result[1], result3[1]) + + +def test_graph_cnn(): + """Test invoking GraphCNN.""" + V = np.random.uniform(size=(10, 100, 50)).astype(np.float32) + adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32) + layer = layers.GraphCNN(num_filters=6) + result = layer([V, adjs]) + assert result.shape == (10, 100, 6) + + # Creating a second layer should produce different results, since it has + # different random weights. + + layer2 = layers.GraphCNN(num_filters=6) + result2 = layer2([V, adjs]) + assert not np.allclose(result, result2) + + # But evaluating the first layer again should produce the same result as before. + + result3 = layer([V, adjs]) + assert np.allclose(result, result3) + + +def test_DAG_layer(): + """Test invoking DAGLayer.""" + batch_size = 10 + n_graph_feat = 30 + n_atom_feat = 75 + max_atoms = 50 + layer_sizes = [100] + atom_features = np.random.rand(batch_size, n_atom_feat) + parents = np.random.randint( + 0, max_atoms, size=(batch_size, max_atoms, max_atoms)) + calculation_orders = np.random.randint( + 0, batch_size, size=(batch_size, max_atoms)) + calculation_masks = np.random.randint(0, 2, size=(batch_size, max_atoms)) + # Recall that the DAG layer expects a MultiConvMol as input, + # so the "batch" is a pooled set of atoms from all the + # molecules in the batch, just as it is for the graph conv. + # This means that n_atoms is the batch-size + n_atoms = batch_size + #dropout_switch = False + layer = layers.DAGLayer( + n_graph_feat=n_graph_feat, + n_atom_feat=n_atom_feat, + max_atoms=max_atoms, + layer_sizes=layer_sizes) + outputs = layer([ + atom_features, + parents, + calculation_orders, + calculation_masks, + n_atoms, + #dropout_switch + ]) + ## TODO(rbharath): What is the shape of outputs supposed to be? + ## I'm getting (7, 30) here. Where does 7 come from?? + + +def test_DAG_gather(): + """Test invoking DAGGather.""" + # TODO(rbharath): We need more documentation about why + # these numbers work. + batch_size = 10 + n_graph_feat = 30 + n_atom_feat = 30 + n_outputs = 75 + max_atoms = 50 + layer_sizes = [100] + layer = layers.DAGGather( + n_graph_feat=n_graph_feat, + n_outputs=n_outputs, + max_atoms=max_atoms, + layer_sizes=layer_sizes) + atom_features = np.random.rand(batch_size, n_atom_feat) + membership = np.sort(np.random.randint(0, batch_size, size=(batch_size))) + outputs = layer([atom_features, membership]) diff --git a/deepchem/models/tests/test_layers_from_config.py b/deepchem/models/tests/test_layers_from_config.py index 0c8c33609bbf8093001d393fc38d2160853b6323..b81a1fbf3a7cd54b226730273968a144f741561e 100644 --- a/deepchem/models/tests/test_layers_from_config.py +++ b/deepchem/models/tests/test_layers_from_config.py @@ -6,475 +6,504 @@ import tensorflow as tf from tensorflow.python.eager import context -class TestLayer(unittest.TestCase): - - def test_interatomic_l2_distance(self): - N_atoms = 10 - M_nbrs = 15 - ndim = 20 - - layer = dc.models.layers.InteratomicL2Distances( - N_atoms=N_atoms, M_nbrs=M_nbrs, ndim=ndim) - config = layer.get_config() - layer_copied = dc.models.layers.InteratomicL2Distances.from_config(config) - - assert layer_copied.N_atoms == layer.N_atoms - assert layer_copied.M_nbrs == layer.M_nbrs - assert layer_copied.ndim == layer.ndim - - def test_graph_conv(self): - out_channel = 10 - min_deg = 0, - max_deg = 10, - activation_fn = 'relu' - - layer = dc.models.layers.GraphConv( - out_channel=out_channel, - min_deg=min_deg, - max_deg=max_deg, - activation_fn=activation_fn) - config = layer.get_config() - layer_copied = dc.models.layers.GraphConv.from_config(config) - - assert layer_copied.out_channel == layer.out_channel - assert layer_copied.activation_fn == layer.activation_fn - assert layer_copied.max_degree == layer.max_degree - assert layer_copied.min_degree == layer.min_degree - - def test_graph_gather(self): - batch_size = 10 - activation_fn = 'relu' - - layer_copied = dc.models.layers.GraphGather( - batch_size=batch_size, activation_fn=activation_fn) - config = layer_copied.get_config() - layer_copied = dc.models.layers.GraphGather.from_config(config) - - assert layer_copied.batch_size == layer_copied.batch_size - assert layer_copied.activation_fn == layer_copied.activation_fn - - def test_graph_pool(self): - min_degree = 0 - max_degree = 10 - - layer_copied = dc.models.layers.GraphPool( - min_degree=min_degree, max_degree=max_degree) - config = layer_copied.get_config() - layer_copied = dc.models.layers.GraphPool.from_config(config) - - assert layer_copied.max_degree == layer_copied.max_degree - assert layer_copied.min_degree == layer_copied.min_degree - - def test_lstmstep(self): - output_dim = 100 - input_dim = 50 - init_fn = 'glorot_uniform' - inner_init_fn = 'orthogonal' - activation_fn = 'tanh' - inner_activation_fn = 'hard_sigmoid' - - layer = dc.models.layers.LSTMStep(output_dim, input_dim, init_fn, - inner_init_fn, activation_fn, - inner_activation_fn) - config = layer.get_config() - layer_copied = dc.models.layers.LSTMStep.from_config(config) - - assert layer_copied.output_dim == layer.output_dim - assert layer_copied.input_dim == layer.input_dim - assert layer_copied.init == layer.init - assert layer_copied.inner_init == layer.inner_init - assert layer_copied.activation == layer.activation - assert layer_copied.inner_activation == layer.inner_activation - - def test_attn_lstm_embedding(self): - n_test = 10 - n_support = 100 - n_feat = 20 - max_depth = 3 - - layer = dc.models.layers.AttnLSTMEmbedding(n_test, n_support, n_feat, - max_depth) - config = layer.get_config() - layer_copied = dc.models.layers.AttnLSTMEmbedding.from_config(config) - - assert layer_copied.n_test == layer.n_test - assert layer_copied.n_support == layer.n_support - assert layer_copied.n_feat == layer.n_feat - assert layer_copied.max_depth == layer.max_depth - - def test_iterref_lstm_embedding(self): - n_test = 10 - n_support = 100 - n_feat = 20 - max_depth = 3 - - layer = dc.models.layers.IterRefLSTMEmbedding(n_test, n_support, n_feat, - max_depth) - config = layer.get_config() - layer_copied = dc.models.layers.IterRefLSTMEmbedding.from_config(config) - - assert layer_copied.n_test == layer.n_test - assert layer_copied.n_support == layer.n_support - assert layer_copied.n_feat == layer.n_feat - assert layer_copied.max_depth == layer.max_depth - - def test_switched_dropout(self): - rate = 0.1 - layer = dc.models.layers.SwitchedDropout(rate=rate) - config = layer.get_config() - layer_copied = dc.models.layers.SwitchedDropout.from_config(config) - - assert layer_copied.rate == layer.rate - - def test_weighted_linearcombo(self): - std = 0.1 - layer = dc.models.layers.WeightedLinearCombo(std=std) - - config = layer.get_config() - layer_copied = dc.models.layers.WeightedLinearCombo.from_config(config) - - assert layer_copied.std == layer.std - - def test_combine_mean_std(self): - training_only = True - noise_epsilon = 0.001 - - layer = dc.models.layers.CombineMeanStd(training_only, noise_epsilon) - config = layer.get_config() - layer_copied = dc.models.layers.CombineMeanStd.from_config(config) - - assert layer_copied.training_only == layer.training_only - assert layer_copied.noise_epsilon == layer.noise_epsilon - - def test_stack(self): - axis = 2 - layer = dc.models.layers.Stack(axis=axis) - config = layer.get_config() - layer_copied = dc.models.layers.Stack.from_config(config) - - assert layer_copied.axis == layer.axis - - def test_variable(self): - initial_value = 10 - layer = dc.models.layers.Variable(initial_value) - config = layer.get_config() - layer_copied = dc.models.layers.Variable.from_config(config) - - assert layer_copied.initial_value == layer.initial_value - - def test_vina_free_energy(self): - N_atoms = 10 - M_nbrs = 15 - ndim = 20 - nbr_cutoff = 5 - start = 1 - stop = 7 - stddev = 0.3 - Nrot = 1 - - layer = dc.models.layers.VinaFreeEnergy(N_atoms, M_nbrs, ndim, nbr_cutoff, - start, stop, stddev, Nrot) - config = layer.get_config() - layer_copied = dc.models.layers.VinaFreeEnergy.from_config(config) - - assert layer_copied.N_atoms == layer.N_atoms - assert layer_copied.M_nbrs == layer.M_nbrs - assert layer_copied.ndim == layer.ndim - assert layer_copied.nbr_cutoff == layer.nbr_cutoff - assert layer_copied.start == layer.start - assert layer_copied.stop == layer.stop - assert layer_copied.stddev == layer.stddev - assert layer_copied.Nrot == layer_copied.Nrot - - def test_neighbor_list(self): - N_atoms = 10 - M_nbrs = 15 - ndim = 20 - nbr_cutoff = 5 - start = 1 - stop = 7 - - layer = dc.models.layers.NeighborList(N_atoms, M_nbrs, ndim, nbr_cutoff, - start, stop) - config = layer.get_config() - layer_copied = dc.models.layers.VinaFreeEnergy.from_config(config) - - assert layer_copied.N_atoms == layer.N_atoms - assert layer_copied.M_nbrs == layer.M_nbrs - assert layer_copied.ndim == layer.ndim - assert layer_copied.nbr_cutoff == layer.nbr_cutoff - assert layer_copied.start == layer.start - assert layer_copied.stop == layer.stop - - def test_atomic_convolution(self): - atom_types = None - radial_params = list() - boxsize = None - - layer = dc.models.layers.AtomicConvolution(atom_types, radial_params, - boxsize) - config = layer.get_config() - layer_copied = dc.models.layers.AtomicConvolution.from_config(config) - - assert layer_copied.atom_types == layer.atom_types - assert layer_copied.radial_params == layer.radial_params - assert layer_copied.boxsize == layer.boxsize - - def test_ani_feat(self): - max_atoms = 23 - radial_cutoff = 4.6 - angular_cutoff = 3.1 - radial_length = 32 - angular_length = 8 - atom_cases = [1, 6, 7, 8, 16] - atomic_number_differentiated = True - coordinates_in_bohr = True - - layer = dc.models.layers.ANIFeat( - max_atoms, radial_cutoff, angular_cutoff, radial_length, angular_length, - atom_cases, atomic_number_differentiated, coordinates_in_bohr) - config = layer.get_config() - layer_copied = dc.models.layers.ANIFeat.from_config(config) - - assert layer_copied.max_atoms == layer.max_atoms - assert layer_copied.radial_cutoff == layer.radial_cutoff - assert layer_copied.angular_cutoff == layer.angular_cutoff - assert layer_copied.radial_length == layer.radial_length - assert layer_copied.angular_length == layer.angular_length - assert layer_copied.atom_cases == layer.atom_cases - assert layer_copied.atomic_number_differentiated == layer.atomic_number_differentiated - assert layer_copied.coordinates_in_bohr == layer.coordinates_in_bohr - - def test_graph_embed_pool(self): - num_vertices = 100 - layer = dc.models.layers.GraphEmbedPoolLayer(num_vertices) - config = layer.get_config() - layer_copied = dc.models.layers.GraphEmbedPoolLayer.from_config(config) - - assert layer_copied.num_vertices == layer.num_vertices - - def test_graph_cnn(self): - num_filters = 20 - layer = dc.models.layers.GraphCNN(num_filters) - config = layer.get_config() - layer_copied = dc.models.layers.GraphCNN.from_config(config) - - assert layer_copied.num_filters == layer.num_filters - - def test_highway(self): - activation_fn = 'relu' - biases_initializer = 'zeros' - weights_initializer = None - - layer = dc.models.layers.Highway(activation_fn, biases_initializer, - weights_initializer) - config = layer.get_config() - layer_copied = dc.models.layers.Highway.from_config(config) - - assert layer_copied.activation_fn == layer.activation_fn - assert layer_copied.biases_initializer == layer.biases_initializer - assert layer_copied.weights_initializer == layer.weights_initializer - - def test_weave(self): - n_atom_input_feat = 75 - n_pair_input_feat = 14 - n_atom_output_feat = 50 - n_pair_output_feat = 50 - n_hidden_AA = 50 - n_hidden_PA = 50 - n_hidden_AP = 50 - n_hidden_PP = 50 - update_pair = True - init = 'glorot_uniform' - activation = 'relu' - - layer = dc.models.layers.WeaveLayer( - n_atom_input_feat, n_pair_input_feat, n_atom_output_feat, - n_pair_output_feat, n_hidden_AA, n_hidden_PA, n_hidden_AP, n_hidden_PP, - update_pair, init, activation) - config = layer.get_config() - layer_copied = dc.models.layers.WeaveLayer.from_config(config) - - assert layer_copied.n_atom_input_feat == layer.n_atom_input_feat - assert layer_copied.n_pair_input_feat == layer.n_pair_input_feat - assert layer_copied.n_atom_output_feat == layer.n_atom_output_feat - assert layer_copied.n_pair_output_feat == layer.n_pair_output_feat - assert layer_copied.n_hidden_AA == layer.n_hidden_AA - assert layer_copied.n_hidden_PA == layer.n_hidden_PA - assert layer_copied.n_hidden_AP == layer.n_hidden_AP - assert layer_copied.n_hidden_PP == layer.n_hidden_PP - assert layer_copied.update_pair == layer.update_pair - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - - def test_weave_gather(self): - batch_size = 32 - n_input = 128 - gaussian_expand = False - init = 'glorot_uniform' - activation = 'tanh' - epsilon = 1e-3 - momentum = 0.99 - - layer = dc.models.layers.WeaveGather(batch_size, n_input, gaussian_expand, - init, activation, epsilon, momentum) - config = layer.get_config() - layer_copied = dc.models.layers.WeaveGather.from_config(config) - - assert layer_copied.batch_size == layer.batch_size - assert layer_copied.n_input == layer.n_input - assert layer_copied.gaussian_expand == layer.gaussian_expand - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - assert layer_copied.epsilon == layer.epsilon - assert layer_copied.momentum == layer.momentum - - def test_dtnn_embedding(self): - n_embedding = 30 - periodic_table_length = 30 - init = 'glorot_uniform' - - layer = dc.models.layers.DTNNEmbedding(n_embedding, periodic_table_length, - init) - config = layer.get_config() - layer_copied = dc.models.layers.DTNNEmbedding.from_config(config) - - assert layer_copied.n_embedding == layer.n_embedding - assert layer_copied.periodic_table_length == layer.periodic_table_length - assert layer_copied.init == layer.init - - def test_dtnn_step(self): - n_embedding = 30 - n_distance = 100 - n_hidden = 60 - init = 'glorot_uniform' - activation = 'tanh' - - layer = dc.models.layers.DTNNStep(n_embedding, n_distance, n_hidden, init, - activation) - config = layer.get_config() - layer_copied = dc.models.layers.DTNNStep.from_config(config) - - assert layer_copied.n_embedding == layer.n_embedding - assert layer_copied.n_distance == layer.n_distance - assert layer_copied.n_hidden == layer.n_hidden - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - - def test_dtnn_gather(self): - n_embedding = 30 - n_outputs = 100 - layer_sizes = [100] - output_activation = True - init = 'glorot_uniform' - activation = 'tanh' - - layer = dc.models.layers.DTNNGather(n_embedding, n_outputs, layer_sizes, - output_activation, init, activation) - config = layer.get_config() - layer_copied = dc.models.layers.DTNNGather.from_config(config) - - assert layer_copied.n_embedding == layer.n_embedding - assert layer_copied.n_outputs == layer.n_outputs - assert layer_copied.layer_sizes == layer.layer_sizes - assert layer_copied.output_activation == layer.output_activation - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - - def test_dag(self): - n_graph_feat = 30 - n_atom_feat = 75 - max_atoms = 50 - layer_sizes = [100] - init = 'glorot_uniform' - activation = 'relu' - dropout = None - batch_size = 64 - - layer = dc.models.layers.DAGLayer(n_graph_feat, n_atom_feat, max_atoms, - layer_sizes, init, activation, dropout, - batch_size) - config = layer.get_config() - layer_copied = dc.models.layers.DAGLayer.from_config(config) - - assert layer_copied.n_graph_feat == layer.n_graph_feat - assert layer_copied.n_atom_feat == layer.n_atom_feat - assert layer_copied.max_atoms == layer.max_atoms - assert layer_copied.layer_sizes == layer.layer_sizes - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - assert layer_copied.dropout == layer.dropout - assert layer_copied.batch_size == layer.batch_size - - def test_dag_gather(self): - n_graph_feat = 30 - n_outputs = 30 - max_atoms = 50 - layer_sizes = [100] - init = 'glorot_uniform' - activation = 'relu' - dropout = None - - layer = dc.models.layers.DAGGather(n_graph_feat, n_outputs, max_atoms, - layer_sizes, init, activation, dropout) - config = layer.get_config() - layer_copied = dc.models.layers.DAGGather.from_config(config) - - assert layer_copied.n_graph_feat == layer.n_graph_feat - assert layer_copied.n_outputs == layer.n_outputs - assert layer_copied.max_atoms == layer.max_atoms - assert layer_copied.layer_sizes == layer.layer_sizes - assert layer_copied.init == layer.init - assert layer_copied.activation == layer.activation - assert layer_copied.dropout == layer.dropout - - def test_message_passing(self): - T = 20 - message_fn = 'enn' - update_fn = 'gru' - n_hidden = 100 - layer = dc.models.layers.MessagePassing(T, message_fn, update_fn, n_hidden) - config = layer.get_config() - layer_copied = dc.models.layers.MessagePassing.from_config(config) - - assert layer_copied.T == layer.T - assert layer_copied.message_fn == layer.message_fn - assert layer_copied.update_fn == layer.update_fn - assert layer_copied.n_hidden == layer.n_hidden - - def test_edge_network(self): - n_pair_features = 8 - n_hidden = 100 - init = 'glorot_uniform' - layer = dc.models.layers.EdgeNetwork(n_pair_features, n_hidden, init) - config = layer.get_config() - layer_copied = dc.models.layers.EdgeNetwork.from_config(config) - - assert layer_copied.n_pair_features == layer.n_pair_features - assert layer_copied.n_hidden == layer.n_hidden - assert layer_copied.init == layer.init - - def test_gru(self): - n_hidden = 100 - init = 'glorot_uniform' - layer = dc.models.layers.GatedRecurrentUnit(n_hidden, init) - config = layer.get_config() - layer_copied = dc.models.layers.GatedRecurrentUnit.from_config(config) - - assert layer_copied.n_hidden == layer.n_hidden - assert layer_copied.init == layer.init - - def test_set_gather(self): - M = 10 - batch_size = 16 - n_hidden = 100 - init = 'orthogonal' - - layer = dc.models.layers.SetGather(M, batch_size, n_hidden, init) - config = layer.get_config() - layer_copied = dc.models.layers.SetGather.from_config(config) - - assert layer_copied.M == layer.M - assert layer_copied.batch_size == layer.batch_size - assert layer_copied.n_hidden == layer.n_hidden - assert layer_copied.init == layer.init +def test_interatomic_l2_distance(): + N_atoms = 10 + M_nbrs = 15 + ndim = 20 + + layer = dc.models.layers.InteratomicL2Distances( + N_atoms=N_atoms, M_nbrs=M_nbrs, ndim=ndim) + config = layer.get_config() + layer_copied = dc.models.layers.InteratomicL2Distances.from_config(config) + + assert layer_copied.N_atoms == layer.N_atoms + assert layer_copied.M_nbrs == layer.M_nbrs + assert layer_copied.ndim == layer.ndim + + +def test_graph_conv(): + out_channel = 10 + min_deg = 0, + max_deg = 10, + activation_fn = 'relu' + + layer = dc.models.layers.GraphConv( + out_channel=out_channel, + min_deg=min_deg, + max_deg=max_deg, + activation_fn=activation_fn) + config = layer.get_config() + layer_copied = dc.models.layers.GraphConv.from_config(config) + + assert layer_copied.out_channel == layer.out_channel + assert layer_copied.activation_fn == layer.activation_fn + assert layer_copied.max_degree == layer.max_degree + assert layer_copied.min_degree == layer.min_degree + + +def test_graph_gather(): + batch_size = 10 + activation_fn = 'relu' + + layer_copied = dc.models.layers.GraphGather( + batch_size=batch_size, activation_fn=activation_fn) + config = layer_copied.get_config() + layer_copied = dc.models.layers.GraphGather.from_config(config) + + assert layer_copied.batch_size == layer_copied.batch_size + assert layer_copied.activation_fn == layer_copied.activation_fn + + +def test_graph_pool(): + min_degree = 0 + max_degree = 10 + + layer_copied = dc.models.layers.GraphPool( + min_degree=min_degree, max_degree=max_degree) + config = layer_copied.get_config() + layer_copied = dc.models.layers.GraphPool.from_config(config) + + assert layer_copied.max_degree == layer_copied.max_degree + assert layer_copied.min_degree == layer_copied.min_degree + + +def test_lstmstep(): + output_dim = 100 + input_dim = 50 + init_fn = 'glorot_uniform' + inner_init_fn = 'orthogonal' + activation_fn = 'tanh' + inner_activation_fn = 'hard_sigmoid' + + layer = dc.models.layers.LSTMStep(output_dim, input_dim, init_fn, + inner_init_fn, activation_fn, + inner_activation_fn) + config = layer.get_config() + layer_copied = dc.models.layers.LSTMStep.from_config(config) + + assert layer_copied.output_dim == layer.output_dim + assert layer_copied.input_dim == layer.input_dim + assert layer_copied.init == layer.init + assert layer_copied.inner_init == layer.inner_init + assert layer_copied.activation == layer.activation + assert layer_copied.inner_activation == layer.inner_activation + + +def test_attn_lstm_embedding(): + n_test = 10 + n_support = 100 + n_feat = 20 + max_depth = 3 + + layer = dc.models.layers.AttnLSTMEmbedding(n_test, n_support, n_feat, + max_depth) + config = layer.get_config() + layer_copied = dc.models.layers.AttnLSTMEmbedding.from_config(config) + + assert layer_copied.n_test == layer.n_test + assert layer_copied.n_support == layer.n_support + assert layer_copied.n_feat == layer.n_feat + assert layer_copied.max_depth == layer.max_depth + + +def test_iterref_lstm_embedding(): + n_test = 10 + n_support = 100 + n_feat = 20 + max_depth = 3 + + layer = dc.models.layers.IterRefLSTMEmbedding(n_test, n_support, n_feat, + max_depth) + config = layer.get_config() + layer_copied = dc.models.layers.IterRefLSTMEmbedding.from_config(config) + + assert layer_copied.n_test == layer.n_test + assert layer_copied.n_support == layer.n_support + assert layer_copied.n_feat == layer.n_feat + assert layer_copied.max_depth == layer.max_depth + + +def test_switched_dropout(): + rate = 0.1 + layer = dc.models.layers.SwitchedDropout(rate=rate) + config = layer.get_config() + layer_copied = dc.models.layers.SwitchedDropout.from_config(config) + + assert layer_copied.rate == layer.rate + + +def test_weighted_linearcombo(): + std = 0.1 + layer = dc.models.layers.WeightedLinearCombo(std=std) + + config = layer.get_config() + layer_copied = dc.models.layers.WeightedLinearCombo.from_config(config) + + assert layer_copied.std == layer.std + + +def test_combine_mean_std(): + training_only = True + noise_epsilon = 0.001 + + layer = dc.models.layers.CombineMeanStd(training_only, noise_epsilon) + config = layer.get_config() + layer_copied = dc.models.layers.CombineMeanStd.from_config(config) + + assert layer_copied.training_only == layer.training_only + assert layer_copied.noise_epsilon == layer.noise_epsilon + + +def test_stack(): + axis = 2 + layer = dc.models.layers.Stack(axis=axis) + config = layer.get_config() + layer_copied = dc.models.layers.Stack.from_config(config) + + assert layer_copied.axis == layer.axis + + +def test_variable(): + initial_value = 10 + layer = dc.models.layers.Variable(initial_value) + config = layer.get_config() + layer_copied = dc.models.layers.Variable.from_config(config) + + assert layer_copied.initial_value == layer.initial_value + + +def test_vina_free_energy(): + N_atoms = 10 + M_nbrs = 15 + ndim = 20 + nbr_cutoff = 5 + start = 1 + stop = 7 + stddev = 0.3 + Nrot = 1 + + layer = dc.models.layers.VinaFreeEnergy(N_atoms, M_nbrs, ndim, nbr_cutoff, + start, stop, stddev, Nrot) + config = layer.get_config() + layer_copied = dc.models.layers.VinaFreeEnergy.from_config(config) + + assert layer_copied.N_atoms == layer.N_atoms + assert layer_copied.M_nbrs == layer.M_nbrs + assert layer_copied.ndim == layer.ndim + assert layer_copied.nbr_cutoff == layer.nbr_cutoff + assert layer_copied.start == layer.start + assert layer_copied.stop == layer.stop + assert layer_copied.stddev == layer.stddev + assert layer_copied.Nrot == layer_copied.Nrot + + +def test_neighbor_list(): + N_atoms = 10 + M_nbrs = 15 + ndim = 20 + nbr_cutoff = 5 + start = 1 + stop = 7 + + layer = dc.models.layers.NeighborList(N_atoms, M_nbrs, ndim, nbr_cutoff, + start, stop) + config = layer.get_config() + layer_copied = dc.models.layers.VinaFreeEnergy.from_config(config) + + assert layer_copied.N_atoms == layer.N_atoms + assert layer_copied.M_nbrs == layer.M_nbrs + assert layer_copied.ndim == layer.ndim + assert layer_copied.nbr_cutoff == layer.nbr_cutoff + assert layer_copied.start == layer.start + assert layer_copied.stop == layer.stop + + +def test_atomic_convolution(): + atom_types = None + radial_params = list() + boxsize = None + + layer = dc.models.layers.AtomicConvolution(atom_types, radial_params, boxsize) + config = layer.get_config() + layer_copied = dc.models.layers.AtomicConvolution.from_config(config) + + assert layer_copied.atom_types == layer.atom_types + assert layer_copied.radial_params == layer.radial_params + assert layer_copied.boxsize == layer.boxsize + + +def test_ani_feat(): + max_atoms = 23 + radial_cutoff = 4.6 + angular_cutoff = 3.1 + radial_length = 32 + angular_length = 8 + atom_cases = [1, 6, 7, 8, 16] + atomic_number_differentiated = True + coordinates_in_bohr = True + + layer = dc.models.layers.ANIFeat( + max_atoms, radial_cutoff, angular_cutoff, radial_length, angular_length, + atom_cases, atomic_number_differentiated, coordinates_in_bohr) + config = layer.get_config() + layer_copied = dc.models.layers.ANIFeat.from_config(config) + + assert layer_copied.max_atoms == layer.max_atoms + assert layer_copied.radial_cutoff == layer.radial_cutoff + assert layer_copied.angular_cutoff == layer.angular_cutoff + assert layer_copied.radial_length == layer.radial_length + assert layer_copied.angular_length == layer.angular_length + assert layer_copied.atom_cases == layer.atom_cases + assert layer_copied.atomic_number_differentiated == layer.atomic_number_differentiated + assert layer_copied.coordinates_in_bohr == layer.coordinates_in_bohr + + +def test_graph_embed_pool(): + num_vertices = 100 + layer = dc.models.layers.GraphEmbedPoolLayer(num_vertices) + config = layer.get_config() + layer_copied = dc.models.layers.GraphEmbedPoolLayer.from_config(config) + + assert layer_copied.num_vertices == layer.num_vertices + + +def test_graph_cnn(): + num_filters = 20 + layer = dc.models.layers.GraphCNN(num_filters) + config = layer.get_config() + layer_copied = dc.models.layers.GraphCNN.from_config(config) + + assert layer_copied.num_filters == layer.num_filters + + +def test_highway(): + activation_fn = 'relu' + biases_initializer = 'zeros' + weights_initializer = None + + layer = dc.models.layers.Highway(activation_fn, biases_initializer, + weights_initializer) + config = layer.get_config() + layer_copied = dc.models.layers.Highway.from_config(config) + + assert layer_copied.activation_fn == layer.activation_fn + assert layer_copied.biases_initializer == layer.biases_initializer + assert layer_copied.weights_initializer == layer.weights_initializer + + +def test_weave(): + n_atom_input_feat = 75 + n_pair_input_feat = 14 + n_atom_output_feat = 50 + n_pair_output_feat = 50 + n_hidden_AA = 50 + n_hidden_PA = 50 + n_hidden_AP = 50 + n_hidden_PP = 50 + update_pair = True + init = 'glorot_uniform' + activation = 'relu' + batch_normalize = True + batch_normalize_kwargs = {"renorm": True} + + layer = dc.models.layers.WeaveLayer( + n_atom_input_feat, n_pair_input_feat, n_atom_output_feat, + n_pair_output_feat, n_hidden_AA, n_hidden_PA, n_hidden_AP, n_hidden_PP, + update_pair, init, activation, batch_normalize, batch_normalize_kwargs) + config = layer.get_config() + layer_copied = dc.models.layers.WeaveLayer.from_config(config) + + assert layer_copied.n_atom_input_feat == layer.n_atom_input_feat + assert layer_copied.n_pair_input_feat == layer.n_pair_input_feat + assert layer_copied.n_atom_output_feat == layer.n_atom_output_feat + assert layer_copied.n_pair_output_feat == layer.n_pair_output_feat + assert layer_copied.n_hidden_AA == layer.n_hidden_AA + assert layer_copied.n_hidden_PA == layer.n_hidden_PA + assert layer_copied.n_hidden_AP == layer.n_hidden_AP + assert layer_copied.n_hidden_PP == layer.n_hidden_PP + assert layer_copied.update_pair == layer.update_pair + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + assert layer_copied.batch_normalize == layer.batch_normalize + assert layer_copied.batch_normalize_kwargs == layer.batch_normalize_kwargs + + +def test_weave_gather(): + batch_size = 32 + n_input = 128 + gaussian_expand = True + compress_post_gaussian_expansion = False + init = 'glorot_uniform' + activation = 'tanh' + + layer = dc.models.layers.WeaveGather(batch_size, n_input, gaussian_expand, + compress_post_gaussian_expansion, init, + activation) + config = layer.get_config() + layer_copied = dc.models.layers.WeaveGather.from_config(config) + + assert layer_copied.batch_size == layer.batch_size + assert layer_copied.n_input == layer.n_input + assert layer_copied.gaussian_expand == layer.gaussian_expand + assert layer_copied.compress_post_gaussian_expansion == layer.compress_post_gaussian_expansion + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + + +def test_dtnn_embedding(): + n_embedding = 30 + periodic_table_length = 30 + init = 'glorot_uniform' + + layer = dc.models.layers.DTNNEmbedding(n_embedding, periodic_table_length, + init) + config = layer.get_config() + layer_copied = dc.models.layers.DTNNEmbedding.from_config(config) + + assert layer_copied.n_embedding == layer.n_embedding + assert layer_copied.periodic_table_length == layer.periodic_table_length + assert layer_copied.init == layer.init + + +def test_dtnn_step(): + n_embedding = 30 + n_distance = 100 + n_hidden = 60 + init = 'glorot_uniform' + activation = 'tanh' + + layer = dc.models.layers.DTNNStep(n_embedding, n_distance, n_hidden, init, + activation) + config = layer.get_config() + layer_copied = dc.models.layers.DTNNStep.from_config(config) + + assert layer_copied.n_embedding == layer.n_embedding + assert layer_copied.n_distance == layer.n_distance + assert layer_copied.n_hidden == layer.n_hidden + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + + +def test_dtnn_gather(): + n_embedding = 30 + n_outputs = 100 + layer_sizes = [100] + output_activation = True + init = 'glorot_uniform' + activation = 'tanh' + + layer = dc.models.layers.DTNNGather(n_embedding, n_outputs, layer_sizes, + output_activation, init, activation) + config = layer.get_config() + layer_copied = dc.models.layers.DTNNGather.from_config(config) + + assert layer_copied.n_embedding == layer.n_embedding + assert layer_copied.n_outputs == layer.n_outputs + assert layer_copied.layer_sizes == layer.layer_sizes + assert layer_copied.output_activation == layer.output_activation + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + + +def test_dag(): + n_graph_feat = 30 + n_atom_feat = 75 + max_atoms = 50 + layer_sizes = [100] + init = 'glorot_uniform' + activation = 'relu' + dropout = None + batch_size = 64 + + layer = dc.models.layers.DAGLayer(n_graph_feat, n_atom_feat, max_atoms, + layer_sizes, init, activation, dropout, + batch_size) + config = layer.get_config() + layer_copied = dc.models.layers.DAGLayer.from_config(config) + + assert layer_copied.n_graph_feat == layer.n_graph_feat + assert layer_copied.n_atom_feat == layer.n_atom_feat + assert layer_copied.max_atoms == layer.max_atoms + assert layer_copied.layer_sizes == layer.layer_sizes + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + assert layer_copied.dropout == layer.dropout + assert layer_copied.batch_size == layer.batch_size + + +def test_dag_gather(): + n_graph_feat = 30 + n_outputs = 30 + max_atoms = 50 + layer_sizes = [100] + init = 'glorot_uniform' + activation = 'relu' + dropout = None + + layer = dc.models.layers.DAGGather(n_graph_feat, n_outputs, max_atoms, + layer_sizes, init, activation, dropout) + config = layer.get_config() + layer_copied = dc.models.layers.DAGGather.from_config(config) + + assert layer_copied.n_graph_feat == layer.n_graph_feat + assert layer_copied.n_outputs == layer.n_outputs + assert layer_copied.max_atoms == layer.max_atoms + assert layer_copied.layer_sizes == layer.layer_sizes + assert layer_copied.init == layer.init + assert layer_copied.activation == layer.activation + assert layer_copied.dropout == layer.dropout + + +def test_message_passing(): + T = 20 + message_fn = 'enn' + update_fn = 'gru' + n_hidden = 100 + layer = dc.models.layers.MessagePassing(T, message_fn, update_fn, n_hidden) + config = layer.get_config() + layer_copied = dc.models.layers.MessagePassing.from_config(config) + + assert layer_copied.T == layer.T + assert layer_copied.message_fn == layer.message_fn + assert layer_copied.update_fn == layer.update_fn + assert layer_copied.n_hidden == layer.n_hidden + + +def test_edge_network(): + n_pair_features = 8 + n_hidden = 100 + init = 'glorot_uniform' + layer = dc.models.layers.EdgeNetwork(n_pair_features, n_hidden, init) + config = layer.get_config() + layer_copied = dc.models.layers.EdgeNetwork.from_config(config) + + assert layer_copied.n_pair_features == layer.n_pair_features + assert layer_copied.n_hidden == layer.n_hidden + assert layer_copied.init == layer.init + + +def test_gru(): + n_hidden = 100 + init = 'glorot_uniform' + layer = dc.models.layers.GatedRecurrentUnit(n_hidden, init) + config = layer.get_config() + layer_copied = dc.models.layers.GatedRecurrentUnit.from_config(config) + + assert layer_copied.n_hidden == layer.n_hidden + assert layer_copied.init == layer.init + + +def test_set_gather(): + M = 10 + batch_size = 16 + n_hidden = 100 + init = 'orthogonal' + + layer = dc.models.layers.SetGather(M, batch_size, n_hidden, init) + config = layer.get_config() + layer_copied = dc.models.layers.SetGather.from_config(config) + + assert layer_copied.M == layer.M + assert layer_copied.batch_size == layer.batch_size + assert layer_copied.n_hidden == layer.n_hidden + assert layer_copied.init == layer.init diff --git a/deepchem/models/tests/test_losses.py b/deepchem/models/tests/test_losses.py new file mode 100644 index 0000000000000000000000000000000000000000..37155a4efaeaac00d44df4a2b47db6ad9c357eb0 --- /dev/null +++ b/deepchem/models/tests/test_losses.py @@ -0,0 +1,310 @@ +import deepchem.models.losses as losses +import unittest +import numpy as np + +try: + import tensorflow as tf + has_tensorflow = True +except: + has_tensorflow = False + +try: + import torch + has_pytorch = True +except: + has_pytorch = False + + +class TestLosses(unittest.TestCase): + """Test loss functions.""" + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_l1_loss_tf(self): + """Test L1Loss.""" + loss = losses.L1Loss() + outputs = tf.constant([[0.1, 0.8], [0.4, 0.6]]) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + expected = [[0.1, 0.2], [0.6, 0.6]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_l1_loss_pytorch(self): + """Test L1Loss.""" + loss = losses.L1Loss() + outputs = torch.tensor([[0.1, 0.8], [0.4, 0.6]]) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + expected = [[0.1, 0.2], [0.6, 0.6]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_l2_loss_tf(self): + """Test L2Loss.""" + loss = losses.L2Loss() + outputs = tf.constant([[0.1, 0.8], [0.4, 0.6]]) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + expected = [[0.1**2, 0.2**2], [0.6**2, 0.6**2]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_l2_loss_pytorch(self): + """Test L2Loss.""" + loss = losses.L2Loss() + outputs = torch.tensor([[0.1, 0.8], [0.4, 0.6]]) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + expected = [[0.1**2, 0.2**2], [0.6**2, 0.6**2]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_hinge_loss_tf(self): + """Test HingeLoss.""" + loss = losses.HingeLoss() + outputs = tf.constant([[0.1, 0.8], [0.4, 0.6]]) + labels = tf.constant([[1.0, -1.0], [-1.0, 1.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + expected = [np.mean([0.9, 1.8]), np.mean([1.4, 0.4])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_hinge_loss_pytorch(self): + """Test HingeLoss.""" + loss = losses.HingeLoss() + outputs = torch.tensor([[0.1, 0.8], [0.4, 0.6]]) + labels = torch.tensor([[1.0, -1.0], [-1.0, 1.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + expected = [np.mean([0.9, 1.8]), np.mean([1.4, 0.4])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_binary_cross_entropy_tf(self): + """Test BinaryCrossEntropy.""" + loss = losses.BinaryCrossEntropy() + outputs = tf.constant([[0.1, 0.8], [0.4, 0.6]]) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + expected = [ + -np.mean([np.log(0.9), np.log(0.8)]), + -np.mean([np.log(0.4), np.log(0.4)]) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_binary_cross_entropy_pytorch(self): + """Test BinaryCrossEntropy.""" + loss = losses.BinaryCrossEntropy() + outputs = torch.tensor([[0.1, 0.8], [0.4, 0.6]]) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + expected = [ + -np.mean([np.log(0.9), np.log(0.8)]), + -np.mean([np.log(0.4), np.log(0.4)]) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_categorical_cross_entropy_tf(self): + """Test CategoricalCrossEntropy.""" + loss = losses.CategoricalCrossEntropy() + outputs = tf.constant([[0.2, 0.8], [0.4, 0.6]]) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + expected = [-np.log(0.8), -np.log(0.4)] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_categorical_cross_entropy_pytorch(self): + """Test CategoricalCrossEntropy.""" + loss = losses.CategoricalCrossEntropy() + outputs = torch.tensor([[0.2, 0.8], [0.4, 0.6]]) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + expected = [-np.log(0.8), -np.log(0.4)] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_sigmoid_cross_entropy_tf(self): + """Test SigmoidCrossEntropy.""" + loss = losses.SigmoidCrossEntropy() + y = [[0.1, 0.8], [0.4, 0.6]] + outputs = tf.constant(y) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + sigmoid = 1.0 / (1.0 + np.exp(-np.array(y))) + expected = [[-np.log(1 - sigmoid[0, 0]), -np.log(sigmoid[0, 1])], + [-np.log(sigmoid[1, 0]), -np.log(1 - sigmoid[1, 1])]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_sigmoid_cross_entropy_pytorch(self): + """Test SigmoidCrossEntropy.""" + loss = losses.SigmoidCrossEntropy() + y = [[0.1, 0.8], [0.4, 0.6]] + outputs = torch.tensor(y) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + sigmoid = 1.0 / (1.0 + np.exp(-np.array(y))) + expected = [[-np.log(1 - sigmoid[0, 0]), -np.log(sigmoid[0, 1])], + [-np.log(sigmoid[1, 0]), -np.log(1 - sigmoid[1, 1])]] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_softmax_cross_entropy_tf(self): + """Test SoftmaxCrossEntropy.""" + loss = losses.SoftmaxCrossEntropy() + y = np.array([[0.1, 0.8], [0.4, 0.6]]) + outputs = tf.constant(y) + labels = tf.constant([[0.0, 1.0], [1.0, 0.0]]) + result = loss._compute_tf_loss(outputs, labels).numpy() + softmax = np.exp(y) / np.expand_dims(np.sum(np.exp(y), axis=1), 1) + expected = [-np.log(softmax[0, 1]), -np.log(softmax[1, 0])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_softmax_cross_entropy_pytorch(self): + """Test SoftmaxCrossEntropy.""" + loss = losses.SoftmaxCrossEntropy() + y = np.array([[0.1, 0.8], [0.4, 0.6]]) + outputs = torch.tensor(y) + labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + softmax = np.exp(y) / np.expand_dims(np.sum(np.exp(y), axis=1), 1) + expected = [-np.log(softmax[0, 1]), -np.log(softmax[1, 0])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_sparse_softmax_cross_entropy_tf(self): + """Test SparseSoftmaxCrossEntropy.""" + loss = losses.SparseSoftmaxCrossEntropy() + y = np.array([[0.1, 0.8], [0.4, 0.6]]) + outputs = tf.constant(y) + labels = tf.constant([1, 0]) + result = loss._compute_tf_loss(outputs, labels).numpy() + softmax = np.exp(y) / np.expand_dims(np.sum(np.exp(y), axis=1), 1) + expected = [-np.log(softmax[0, 1]), -np.log(softmax[1, 0])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_sparse_softmax_cross_entropy_pytorch(self): + """Test SparseSoftmaxCrossEntropy.""" + loss = losses.SparseSoftmaxCrossEntropy() + y = np.array([[0.1, 0.8], [0.4, 0.6]]) + outputs = torch.tensor(y) + labels = torch.tensor([1, 0]) + result = loss._create_pytorch_loss()(outputs, labels).numpy() + softmax = np.exp(y) / np.expand_dims(np.sum(np.exp(y), axis=1), 1) + expected = [-np.log(softmax[0, 1]), -np.log(softmax[1, 0])] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_VAE_ELBO_tf(self): + """.""" + loss = losses.VAE_ELBO() + logvar = tf.constant([[1.0, 1.3], [0.6, 1.2]]) + mu = tf.constant([[0.2, 0.7], [1.2, 0.4]]) + x = tf.constant([[0.9, 0.4, 0.8], [0.3, 0, 1]]) + reconstruction_x = tf.constant([[0.8, 0.3, 0.7], [0.2, 0, 0.9]]) + result = loss._compute_tf_loss(logvar, mu, x, reconstruction_x).numpy() + expected = [ + 0.5 * np.mean([ + 0.04 + 1.0 - np.log(1e-20 + 1.0) - 1, + 0.49 + 1.69 - np.log(1e-20 + 1.69) - 1 + ]) - np.mean( + np.array([0.9, 0.4, 0.8]) * np.log([0.8, 0.3, 0.7]) + + np.array([0.1, 0.6, 0.2]) * np.log([0.2, 0.7, 0.3])), + 0.5 * np.mean([ + 1.44 + 0.36 - np.log(1e-20 + 0.36) - 1, + 0.16 + 1.44 - np.log(1e-20 + 1.44) - 1 + ]) - np.mean( + np.array([0.3, 0, 1]) * np.log([0.2, 1e-20, 0.9]) + + np.array([0.7, 1, 0]) * np.log([0.8, 1, 0.1])) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_VAE_ELBO_pytorch(self): + """.""" + loss = losses.VAE_ELBO() + logvar = torch.tensor([[1.0, 1.3], [0.6, 1.2]]) + mu = torch.tensor([[0.2, 0.7], [1.2, 0.4]]) + x = torch.tensor([[0.9, 0.4, 0.8], [0.3, 0, 1]]) + reconstruction_x = torch.tensor([[0.8, 0.3, 0.7], [0.2, 0, 0.9]]) + result = loss._create_pytorch_loss()(logvar, mu, x, + reconstruction_x).numpy() + expected = [ + 0.5 * np.mean([ + 0.04 + 1.0 - np.log(1e-20 + 1.0) - 1, + 0.49 + 1.69 - np.log(1e-20 + 1.69) - 1 + ]) - np.mean( + np.array([0.9, 0.4, 0.8]) * np.log([0.8, 0.3, 0.7]) + + np.array([0.1, 0.6, 0.2]) * np.log([0.2, 0.7, 0.3])), + 0.5 * np.mean([ + 1.44 + 0.36 - np.log(1e-20 + 0.36) - 1, + 0.16 + 1.44 - np.log(1e-20 + 1.44) - 1 + ]) - np.mean( + np.array([0.3, 0, 1]) * np.log([0.2, 1e-20, 0.9]) + + np.array([0.7, 1, 0]) * np.log([0.8, 1, 0.1])) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_VAE_KLDivergence_tf(self): + """.""" + loss = losses.VAE_KLDivergence() + logvar = tf.constant([[1.0, 1.3], [0.6, 1.2]]) + mu = tf.constant([[0.2, 0.7], [1.2, 0.4]]) + result = loss._compute_tf_loss(logvar, mu).numpy() + expected = [ + 0.5 * np.mean([ + 0.04 + 1.0 - np.log(1e-20 + 1.0) - 1, + 0.49 + 1.69 - np.log(1e-20 + 1.69) - 1 + ]), 0.5 * np.mean([ + 1.44 + 0.36 - np.log(1e-20 + 0.36) - 1, + 0.16 + 1.44 - np.log(1e-20 + 1.44) - 1 + ]) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_VAE_KLDivergence_pytorch(self): + """.""" + loss = losses.VAE_KLDivergence() + logvar = torch.tensor([[1.0, 1.3], [0.6, 1.2]]) + mu = torch.tensor([[0.2, 0.7], [1.2, 0.4]]) + result = loss._create_pytorch_loss()(logvar, mu).numpy() + expected = [ + 0.5 * np.mean([ + 0.04 + 1.0 - np.log(1e-20 + 1.0) - 1, + 0.49 + 1.69 - np.log(1e-20 + 1.69) - 1 + ]), 0.5 * np.mean([ + 1.44 + 0.36 - np.log(1e-20 + 0.36) - 1, + 0.16 + 1.44 - np.log(1e-20 + 1.44) - 1 + ]) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_ShannonEntropy_tf(self): + """.""" + loss = losses.ShannonEntropy() + inputs = tf.constant([[0.7, 0.3], [0.9, 0.1]]) + result = loss._compute_tf_loss(inputs).numpy() + expected = [ + -np.mean([0.7 * np.log(0.7), 0.3 * np.log(0.3)]), + -np.mean([0.9 * np.log(0.9), 0.1 * np.log(0.1)]) + ] + assert np.allclose(expected, result) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_ShannonEntropy_pytorch(self): + """.""" + loss = losses.ShannonEntropy() + inputs = torch.tensor([[0.7, 0.3], [0.9, 0.1]]) + result = loss._create_pytorch_loss()(inputs).numpy() + expected = [ + -np.mean([0.7 * np.log(0.7), 0.3 * np.log(0.3)]), + -np.mean([0.9 * np.log(0.9), 0.1 * np.log(0.1)]) + ] + assert np.allclose(expected, result) diff --git a/deepchem/models/tests/test_mpnn.py b/deepchem/models/tests/test_mpnn.py new file mode 100644 index 0000000000000000000000000000000000000000..ff29680b3361a0b6d0e5df624ad8b6cd9e8f6f24 --- /dev/null +++ b/deepchem/models/tests/test_mpnn.py @@ -0,0 +1,95 @@ +import unittest +import tempfile + +import numpy as np + +import deepchem as dc +from deepchem.feat import MolGraphConvFeaturizer +from deepchem.models.torch_models import MPNNModel +from deepchem.models.tests.test_graph_models import get_dataset + +try: + import dgl + import dgllife + import torch + has_torch_and_dgl = True +except: + has_torch_and_dgl = False + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_mpnn_regression(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'regression', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = MPNNModel(mode='regression', n_tasks=n_tasks, batch_size=10) + + # overfit test + model.fit(dataset, nb_epoch=400) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.5 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_mpnn_classification(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model = MPNNModel( + mode='classification', + n_tasks=n_tasks, + batch_size=10, + learning_rate=0.001) + + # overfit test + model.fit(dataset, nb_epoch=200) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + +@unittest.skipIf(not has_torch_and_dgl, + 'PyTorch, DGL, or DGL-LifeSci are not installed') +def test_mpnn_reload(): + # load datasets + featurizer = MolGraphConvFeaturizer(use_edges=True) + tasks, dataset, transformers, metric = get_dataset( + 'classification', featurizer=featurizer) + + # initialize models + n_tasks = len(tasks) + model_dir = tempfile.mkdtemp() + model = MPNNModel( + mode='classification', + n_tasks=n_tasks, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + + model.fit(dataset, nb_epoch=200) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.85 + + reloaded_model = MPNNModel( + mode='classification', + n_tasks=n_tasks, + model_dir=model_dir, + batch_size=10, + learning_rate=0.001) + reloaded_model.restore() + + pred_mols = ["CCCC", "CCCCCO", "CCCCC"] + X_pred = featurizer(pred_mols) + random_dataset = dc.data.NumpyDataset(X_pred) + original_pred = model.predict(random_dataset) + reload_pred = reloaded_model.predict(random_dataset) + assert np.all(original_pred == reload_pred) diff --git a/deepchem/models/tests/test_multitask.py b/deepchem/models/tests/test_multitask.py index c1e51d69765e743dfcddb74e7050b6de7de766e5..d9bb35e754f5dbc5ce12c1cad7a8d4c97dc82a2b 100644 --- a/deepchem/models/tests/test_multitask.py +++ b/deepchem/models/tests/test_multitask.py @@ -34,8 +34,8 @@ class TestMultitask(unittest.TestCase): featurizer = dc.feat.CircularFingerprint(size=1024) loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) splitter = dc.splits.ScaffoldSplitter() train_dataset, test_dataset = splitter.train_test_split(dataset) diff --git a/deepchem/models/tests/test_normalizing_flows.py b/deepchem/models/tests/test_normalizing_flows.py new file mode 100644 index 0000000000000000000000000000000000000000..16aa90d500538410a22b5f4995843fc3d3178d36 --- /dev/null +++ b/deepchem/models/tests/test_normalizing_flows.py @@ -0,0 +1,56 @@ +""" +Tests for Normalizing Flows. +""" + +import os +import sys +import pytest + +import deepchem +import numpy as np +import tensorflow as tf +import tensorflow_probability as tfp +import unittest +import numpy as np + +from deepchem.data import NumpyDataset +from deepchem.models.normalizing_flows import NormalizingFlow, NormalizingFlowModel + +tfd = tfp.distributions +tfb = tfp.bijectors + + +def test_normalizing_flow(): + + flow_layers = [ + tfb.RealNVP( + num_masked=1, + shift_and_log_scale_fn=tfb.real_nvp_default_template( + hidden_layers=[8, 8])) + ] + # 3D Multivariate Gaussian base distribution + nf = NormalizingFlow( + base_distribution=tfd.MultivariateNormalDiag(loc=[0., 0.]), + flow_layers=flow_layers) + + nfm = NormalizingFlowModel(nf) + + # Must be float32 for RealNVP + target_distribution = tfd.MultivariateNormalDiag(loc=[1., 0.]) + dataset = NumpyDataset(X=target_distribution.sample(96)) + + # Tests a simple flow of one RealNVP layer. + + X = nfm.flow.sample() + x1 = tf.zeros([2]) + x2 = dataset.X[0] + + # log likelihoods should be negative + assert nfm.flow.log_prob(X).numpy() < 0 + assert nfm.flow.log_prob(x1).numpy() < 0 + assert nfm.flow.log_prob(x2).numpy() < 0 + + # # Fit model + final = nfm.fit(dataset, nb_epoch=5) + print(final) + assert final > 0 diff --git a/deepchem/models/tests/test_optimizers.py b/deepchem/models/tests/test_optimizers.py index b9e665a289b73d91da13c404a0782ca31db6bf0e..ed03f45a3e6dfce0d433e227e6d8b335b756039b 100644 --- a/deepchem/models/tests/test_optimizers.py +++ b/deepchem/models/tests/test_optimizers.py @@ -1,57 +1,137 @@ import deepchem.models.optimizers as optimizers -import tensorflow as tf -from tensorflow.python.framework import test_util +import unittest +try: + import tensorflow as tf + has_tensorflow = True +except: + has_tensorflow = False -class TestLayers(test_util.TensorFlowTestCase): +try: + import torch + has_pytorch = True +except: + has_pytorch = False + + +class TestOptimizers(unittest.TestCase): """Test optimizers and related classes.""" - def test_adam(self): + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_adam_tf(self): """Test creating an Adam optimizer.""" opt = optimizers.Adam(learning_rate=0.01) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) - assert isinstance(tfopt, tf.keras.optimizers.Adam) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) + assert isinstance(tfopt, tf.keras.optimizers.Adam) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_adam_pytorch(self): + """Test creating an Adam optimizer.""" + opt = optimizers.Adam(learning_rate=0.01) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + assert isinstance(torchopt, torch.optim.Adam) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_adagrad_tf(self): + """Test creating an AdaGrad optimizer.""" + opt = optimizers.AdaGrad(learning_rate=0.01) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) + assert isinstance(tfopt, tf.keras.optimizers.Adagrad) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_adagrad_pytorch(self): + """Test creating an AdaGrad optimizer.""" + opt = optimizers.AdaGrad(learning_rate=0.01) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + assert isinstance(torchopt, torch.optim.Adagrad) - def test_rmsprop(self): + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_rmsprop_tf(self): """Test creating an RMSProp Optimizer.""" opt = optimizers.RMSProp(learning_rate=0.01) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) - assert isinstance(tfopt, tf.keras.optimizers.RMSprop) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) + assert isinstance(tfopt, tf.keras.optimizers.RMSprop) - def test_gradient_descent(self): + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_rmsprop_pytorch(self): + """Test creating an RMSProp Optimizer.""" + opt = optimizers.RMSProp(learning_rate=0.01) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + assert isinstance(torchopt, torch.optim.RMSprop) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_gradient_descent_tf(self): """Test creating a Gradient Descent optimizer.""" opt = optimizers.GradientDescent(learning_rate=0.01) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) - assert isinstance(tfopt, tf.keras.optimizers.SGD) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) + assert isinstance(tfopt, tf.keras.optimizers.SGD) - def test_exponential_decay(self): + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_gradient_descent_pytorch(self): + """Test creating a Gradient Descent optimizer.""" + opt = optimizers.GradientDescent(learning_rate=0.01) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + assert isinstance(torchopt, torch.optim.SGD) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_exponential_decay_tf(self): """Test creating an optimizer with an exponentially decaying learning rate.""" rate = optimizers.ExponentialDecay( initial_rate=0.001, decay_rate=0.99, decay_steps=10000) opt = optimizers.Adam(learning_rate=rate) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) - def test_polynomial_decay(self): + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_exponential_decay_pytorch(self): + """Test creating an optimizer with an exponentially decaying learning rate.""" + rate = optimizers.ExponentialDecay( + initial_rate=0.001, decay_rate=0.99, decay_steps=10000) + opt = optimizers.Adam(learning_rate=rate) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + schedule = rate._create_pytorch_schedule(torchopt) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_polynomial_decay_tf(self): """Test creating an optimizer with a polynomially decaying learning rate.""" rate = optimizers.PolynomialDecay( initial_rate=0.001, final_rate=0.0001, decay_steps=10000) opt = optimizers.Adam(learning_rate=rate) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) + + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_polynomial_decay_pytorch(self): + """Test creating an optimizer with a polynomially decaying learning rate.""" + rate = optimizers.PolynomialDecay( + initial_rate=0.001, final_rate=0.0001, decay_steps=10000) + opt = optimizers.Adam(learning_rate=rate) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + schedule = rate._create_pytorch_schedule(torchopt) + + @unittest.skipIf(not has_tensorflow, 'TensorFlow is not installed') + def test_linearCosine_decay_tf(self): + """test creating an optimizer with a linear cosine decay to the learning rate""" + rate = optimizers.LinearCosineDecay(initial_rate=0.1, decay_steps=10000) + opt = optimizers.Adam(learning_rate=rate) + global_step = tf.Variable(0) + tfopt = opt._create_tf_optimizer(global_step) - def test_linearCosine_decay(self): + @unittest.skipIf(not has_pytorch, 'PyTorch is not installed') + def test_linearCosine_decay_pytorch(self): """test creating an optimizer with a linear cosine decay to the learning rate""" rate = optimizers.LinearCosineDecay(initial_rate=0.1, decay_steps=10000) opt = optimizers.Adam(learning_rate=rate) - with self.session() as sess: - global_step = tf.Variable(0) - tfopt = opt._create_optimizer(global_step) + params = [torch.nn.Parameter(torch.Tensor([1.0]))] + torchopt = opt._create_pytorch_optimizer(params) + schedule = rate._create_pytorch_schedule(torchopt) diff --git a/deepchem/models/tests/test_overfit.py b/deepchem/models/tests/test_overfit.py index 1e54c354775852f4bf63d56fa28eb15a119d1c90..9371ff5656803d4b454608637ae533723b2b428b 100644 --- a/deepchem/models/tests/test_overfit.py +++ b/deepchem/models/tests/test_overfit.py @@ -2,10 +2,6 @@ Tests to make sure deepchem models can overfit on tiny datasets. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import os import numpy as np @@ -20,883 +16,895 @@ import deepchem as dc from deepchem.models.optimizers import Adam -class TestOverfit(test_util.TensorFlowTestCase): - """ - Test that models can overfit simple datasets. +def test_sklearn_regression_overfit(): + """Test that sklearn models can overfit simple regression datasets.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric(dc.metrics.r2_score) + sklearn_model = RandomForestRegressor() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(dataset) + model.save() + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .7 + + +def test_sklearn_classification_overfit(): + """Test that sklearn models can overfit simple classification datasets.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + sklearn_model = RandomForestClassifier() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(dataset) + model.save() + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_sklearn_skewed_classification_overfit(): + """Test sklearn models can overfit 0/1 datasets with few actives.""" + n_samples = 100 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + p = .05 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.binomial(1, p, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + sklearn_model = RandomForestClassifier() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(dataset) + model.save() + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_regression_overfit(): + """Test that MultitaskRegressor can overfit simple regression datasets.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + # TODO(rbharath): This breaks with optimizer="momentum". Why? + model = dc.models.MultitaskRegressor( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + batch_size=n_samples, + learning_rate=0.003) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .1 + + +def test_classification_overfit(): + """Test that MultitaskClassifier can overfit simple classification datasets.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[.1], + batch_size=n_samples, + optimizer=Adam(learning_rate=0.0003, beta1=0.9, beta2=0.999)) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_residual_classification_overfit(): + """Test that a residual network can overfit simple classification datasets.""" + n_samples = 10 + n_features = 5 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[20] * 10, + dropouts=0.0, + batch_size=n_samples, + residual=True) + + # Fit trained model + model.fit(dataset, nb_epoch=500) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +@flaky +def test_fittransform_regression_overfit(): + """Test that MultitaskFitTransformRegressor can overfit simple regression datasets.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + tf.random.set_seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + fit_transformers = [dc.trans.CoulombFitTransformer(dataset)] + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + model = dc.models.MultitaskFitTransformRegressor( + n_tasks, [n_features, n_features], + dropouts=[0.01], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + batch_size=n_samples, + fit_transformers=fit_transformers, + n_evals=1, + optimizer=Adam(learning_rate=0.003, beta1=0.9, beta2=0.999)) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .1 + + +def test_skewed_classification_overfit(): + """Test MultitaskClassifier can overfit 0/1 datasets with few actives.""" + #n_samples = 100 + n_samples = 100 + n_features = 3 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + p = .05 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.binomial(1, p, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[.1], + batch_size=n_samples, + learning_rate=0.003) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .75 + + +def test_skewed_missing_classification_overfit(): + """MultitaskClassifier, skewed data, few actives + + Test MultitaskClassifier overfit 0/1 datasets with missing data and few + actives. This is intended to be as close to singletask MUV datasets as + possible. """ - - def setUp(self): - super(TestOverfit, self).setUp() - self.current_dir = os.path.dirname(os.path.abspath(__file__)) - - def test_sklearn_regression_overfit(self): - """Test that sklearn models can overfit simple regression datasets.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.rand(n_samples, n_tasks) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - regression_metric = dc.metrics.Metric(dc.metrics.r2_score) - sklearn_model = RandomForestRegressor() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(dataset) - model.save() - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] > .7 - - def test_sklearn_classification_overfit(self): - """Test that sklearn models can overfit simple classification datasets.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - sklearn_model = RandomForestClassifier() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(dataset) - model.save() - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_sklearn_skewed_classification_overfit(self): - """Test sklearn models can overfit 0/1 datasets with few actives.""" - n_samples = 100 - n_features = 3 - n_tasks = 1 - - # Generate dummy dataset - np.random.seed(123) - p = .05 - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.binomial(1, p, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + n_samples = 5120 + n_features = 6 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + p = .002 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.binomial(1, p, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + y_flat, w_flat = np.squeeze(y), np.squeeze(w) + y_nonzero = y_flat[w_flat != 0] + num_nonzero = np.count_nonzero(y_nonzero) + weight_nonzero = len(y_nonzero) / num_nonzero + w_flat[y_flat != 0] = weight_nonzero + w = np.reshape(w_flat, (n_samples, n_tasks)) + + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[1.], + batch_size=n_samples, + learning_rate=0.003) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .7 + + +def test_sklearn_multitask_classification_overfit(): + """Test SKLearn singletask-to-multitask overfits tiny data.""" + n_tasks = 10 + tasks = ["task%d" % task for task in range(n_tasks)] + n_samples = 10 + n_features = 3 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, task_averager=np.mean) + + def model_builder(model_dir): sklearn_model = RandomForestClassifier() - model = dc.models.SklearnModel(sklearn_model) - - # Fit trained model - model.fit(dataset) - model.save() - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_regression_overfit(self): - """Test that MultitaskRegressor can overfit simple regression datasets.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) - # TODO(rbharath): This breaks with optimizer="momentum". Why? - model = dc.models.MultitaskRegressor( - n_tasks, - n_features, - dropouts=[0.], - weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], - batch_size=n_samples, - learning_rate=0.003) - - # Fit trained model - model.fit(dataset, nb_epoch=100) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] < .1 - - def test_classification_overfit(self): - """Test that MultitaskClassifier can overfit simple classification datasets.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) - model = dc.models.MultitaskClassifier( - n_tasks, - n_features, - dropouts=[0.], - weight_init_stddevs=[.1], - batch_size=n_samples, - optimizer=Adam(learning_rate=0.0003, beta1=0.9, beta2=0.999)) - - # Fit trained model - model.fit(dataset, nb_epoch=100) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_residual_classification_overfit(self): - """Test that a residual network can overfit simple classification datasets.""" - n_samples = 10 - n_features = 5 - n_tasks = 1 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) - model = dc.models.MultitaskClassifier( - n_tasks, - n_features, - layer_sizes=[20] * 10, - dropouts=0.0, - batch_size=n_samples, - residual=True) - - # Fit trained model - model.fit(dataset, nb_epoch=500) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_fittransform_regression_overfit(self): - """Test that MultitaskFitTransformRegressor can overfit simple regression datasets.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - fit_transformers = [dc.trans.CoulombFitTransformer(dataset)] - regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) - model = dc.models.MultitaskFitTransformRegressor( - n_tasks, [n_features, n_features], - dropouts=[0.01], - weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], - batch_size=n_samples, - fit_transformers=fit_transformers, - n_evals=1, - optimizer=Adam(learning_rate=0.003, beta1=0.9, beta2=0.999)) - - # Fit trained model - model.fit(dataset, nb_epoch=100) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] < .1 - - def test_skewed_classification_overfit(self): - """Test MultitaskClassifier can overfit 0/1 datasets with few actives.""" - #n_samples = 100 - n_samples = 100 - n_features = 3 - n_tasks = 1 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - p = .05 - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.binomial(1, p, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - model = dc.models.MultitaskClassifier( - n_tasks, - n_features, - dropouts=[0.], - weight_init_stddevs=[.1], - batch_size=n_samples, - learning_rate=0.003) - - # Fit trained model - model.fit(dataset, nb_epoch=100) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .75 - - def test_skewed_missing_classification_overfit(self): - """TG, skewed data, few actives - - Test MultitaskClassifier overfit 0/1 datasets with missing data and few - actives. This is intended to be as close to singletask MUV datasets as - possible. - """ - n_samples = 5120 - n_features = 6 - n_tasks = 1 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - p = .002 - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.binomial(1, p, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - y_flat, w_flat = np.squeeze(y), np.squeeze(w) - y_nonzero = y_flat[w_flat != 0] - num_nonzero = np.count_nonzero(y_nonzero) - weight_nonzero = len(y_nonzero) / num_nonzero - w_flat[y_flat != 0] = weight_nonzero - w = np.reshape(w_flat, (n_samples, n_tasks)) - - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) - model = dc.models.MultitaskClassifier( - n_tasks, - n_features, - dropouts=[0.], - weight_init_stddevs=[1.], - batch_size=n_samples, - learning_rate=0.003) - - # Fit trained model - model.fit(dataset, nb_epoch=100) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .7 - - def test_sklearn_multitask_classification_overfit(self): - """Test SKLearn singletask-to-multitask overfits tiny data.""" - n_tasks = 10 - tasks = ["task%d" % task for task in range(n_tasks)] - n_samples = 10 - n_features = 3 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - - classification_metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, task_averager=np.mean) - - def model_builder(model_dir): - sklearn_model = RandomForestClassifier() - return dc.models.SklearnModel(sklearn_model, model_dir) - - model = dc.models.SingletaskToMultitask(tasks, model_builder) - - # Fit trained model - model.fit(dataset) - model.save() - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - @flaky - def test_multitask_classification_overfit(self): - """Test MultitaskClassifier overfits tiny data.""" - n_tasks = 10 - n_samples = 10 - n_features = 3 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric( - dc.metrics.accuracy_score, task_averager=np.mean) - model = dc.models.MultitaskClassifier( - n_tasks, - n_features, - dropouts=[0.], - weight_init_stddevs=[.1], - batch_size=n_samples, - optimizer=Adam(learning_rate=0.0003, beta1=0.9, beta2=0.999)) - - # Fit trained model - model.fit(dataset) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_tf_robust_multitask_classification_overfit(self): - """Test tf robust multitask overfits tiny data.""" - n_tasks = 10 - n_samples = 10 - n_features = 3 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - - classification_metric = dc.metrics.Metric( - dc.metrics.accuracy_score, task_averager=np.mean) - model = dc.models.RobustMultitaskClassifier( - n_tasks, - n_features, - layer_sizes=[50], - bypass_layer_sizes=[10], - dropouts=[0.], - learning_rate=0.003, - weight_init_stddevs=[.1], - batch_size=n_samples) - - # Fit trained model - model.fit(dataset, nb_epoch=25) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_IRV_multitask_classification_overfit(self): - """Test IRV classifier overfits tiny data.""" - n_tasks = 5 - n_samples = 10 - n_features = 128 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.randint(2, size=(n_samples, n_features)) - y = np.ones((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - IRV_transformer = dc.trans.IRVTransformer(5, n_tasks, dataset) - dataset_trans = IRV_transformer.transform(dataset) - classification_metric = dc.metrics.Metric( - dc.metrics.accuracy_score, task_averager=np.mean) - model = dc.models.TensorflowMultitaskIRVClassifier( - n_tasks, K=5, learning_rate=0.01, batch_size=n_samples) - - # Fit trained model - model.fit(dataset_trans) - - # Eval model on train - scores = model.evaluate(dataset_trans, [classification_metric]) - assert scores[classification_metric.name] > .9 - - def test_sklearn_multitask_regression_overfit(self): - """Test SKLearn singletask-to-multitask overfits tiny regression data.""" - n_tasks = 2 - tasks = ["task%d" % task for task in range(n_tasks)] - n_samples = 10 - n_features = 3 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.rand(n_samples, n_tasks) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) - - regression_metric = dc.metrics.Metric( - dc.metrics.r2_score, task_averager=np.mean) - - def model_builder(model_dir): - sklearn_model = RandomForestRegressor() - return dc.models.SklearnModel(sklearn_model, model_dir) - - model = dc.models.SingletaskToMultitask(tasks, model_builder) - - # Fit trained model - model.fit(dataset) - model.save() - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] > .7 - - def test_multitask_regression_overfit(self): - """Test MultitaskRegressor overfits tiny data.""" - n_tasks = 10 - n_samples = 10 - n_features = 10 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.rand(n_samples, n_tasks) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - regression_metric = dc.metrics.Metric( - dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") - model = dc.models.MultitaskRegressor( - n_tasks, n_features, dropouts=0.0, batch_size=n_samples) - - # Fit trained model - model.fit(dataset, nb_epoch=1000) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] < .02 - - def test_residual_regression_overfit(self): - """Test that a residual multitask network can overfit tiny data.""" - n_tasks = 10 - n_samples = 10 - n_features = 10 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.rand(n_samples, n_tasks) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - regression_metric = dc.metrics.Metric( - dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") - model = dc.models.MultitaskRegressor( - n_tasks, - n_features, - layer_sizes=[20] * 10, - dropouts=0.0, - batch_size=n_samples, - residual=True) - - # Fit trained model - model.fit(dataset, nb_epoch=1000) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] < .02 - - def test_tf_robust_multitask_regression_overfit(self): - """Test tf robust multitask overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 10 - n_samples = 10 - n_features = 3 - n_classes = 2 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - regression_metric = dc.metrics.Metric( - dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") - model = dc.models.RobustMultitaskRegressor( - n_tasks, - n_features, - layer_sizes=[50], - bypass_layer_sizes=[10], - dropouts=[0.], - learning_rate=0.003, - weight_init_stddevs=[.1], - batch_size=n_samples) - - # Fit trained model - model.fit(dataset, nb_epoch=25) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - assert scores[regression_metric.name] < .2 - - @pytest.mark.slow - def test_DAG_singletask_regression_overfit(self): - """Test DAG regressor multitask overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.ConvMolFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_regression.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - regression_metric = dc.metrics.Metric( - dc.metrics.pearson_r2_score, task_averager=np.mean) - - n_feat = 75 - batch_size = 10 - transformer = dc.trans.DAGTransformer(max_atoms=50) - dataset = transformer.transform(dataset) - - model = dc.models.DAGModel( - n_tasks, - max_atoms=50, - n_atom_feat=n_feat, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="regression") - - # Fit trained model - model.fit(dataset, nb_epoch=1200) - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - - assert scores[regression_metric.name] > .8 - - def test_weave_singletask_classification_overfit(self): - """Test weave model overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.WeaveFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_classification.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) - - n_atom_feat = 75 - n_pair_feat = 14 - n_feat = 128 - batch_size = 10 - - model = dc.models.WeaveModel( - n_tasks, - n_atom_feat=n_atom_feat, - n_pair_feat=n_pair_feat, - n_graph_feat=n_feat, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="classification") - - # Fit trained model - model.fit(dataset, nb_epoch=20) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - - assert scores[classification_metric.name] > .65 - - def test_weave_singletask_regression_overfit(self): - """Test weave model overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.WeaveFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_regression.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - regression_metric = dc.metrics.Metric( - dc.metrics.pearson_r2_score, task_averager=np.mean) - - n_atom_feat = 75 - n_pair_feat = 14 - n_feat = 128 - batch_size = 10 - - model = dc.models.WeaveModel( - n_tasks, - n_atom_feat=n_atom_feat, - n_pair_feat=n_pair_feat, - n_graph_feat=n_feat, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="regression") - - # Fit trained model - model.fit(dataset, nb_epoch=120) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - - assert scores[regression_metric.name] > .8 - - @pytest.mark.slow - def test_MPNN_singletask_regression_overfit(self): - """Test MPNN overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.WeaveFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_regression.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - regression_metric = dc.metrics.Metric( - dc.metrics.pearson_r2_score, task_averager=np.mean) - - n_atom_feat = 75 - n_pair_feat = 14 - batch_size = 10 - model = dc.models.MPNNModel( - n_tasks, - n_atom_feat=n_atom_feat, - n_pair_feat=n_pair_feat, - T=2, - M=3, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="regression") - - # Fit trained model - model.fit(dataset, nb_epoch=50) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - - assert scores[regression_metric.name] > .8 - - def test_textCNN_singletask_classification_overfit(self): - """Test textCNN model overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - featurizer = dc.feat.RawFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_classification.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) - - char_dict, length = dc.models.TextCNNModel.build_char_dict(dataset) - batch_size = 10 - - model = dc.models.TextCNNModel( - n_tasks, - char_dict, - seq_length=length, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="classification") - - # Fit trained model - model.fit(dataset, nb_epoch=200) - - # Eval model on train - scores = model.evaluate(dataset, [classification_metric]) - - assert scores[classification_metric.name] > .8 - - @flaky() - def test_textCNN_singletask_regression_overfit(self): - """Test textCNN model overfits tiny data.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.RawFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, "example_regression.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - - regression_metric = dc.metrics.Metric( - dc.metrics.pearson_r2_score, task_averager=np.mean) - - char_dict, length = dc.models.TextCNNModel.build_char_dict(dataset) - batch_size = 10 - - model = dc.models.TextCNNModel( - n_tasks, - char_dict, - seq_length=length, - batch_size=batch_size, - learning_rate=0.001, - use_queue=False, - mode="regression") - - # Fit trained model - model.fit(dataset, nb_epoch=200) - - # Eval model on train - scores = model.evaluate(dataset, [regression_metric]) - - assert scores[regression_metric.name] > .9 - - def test_progressive_classification_overfit(self): - """Test progressive multitask overfits tiny data.""" - np.random.seed(123) - n_tasks = 5 - n_samples = 10 - n_features = 6 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - metric = dc.metrics.Metric(dc.metrics.accuracy_score, task_averager=np.mean) - model = dc.models.ProgressiveMultitaskClassifier( - n_tasks, - n_features, - layer_sizes=[50], - bypass_layer_sizes=[10], - dropouts=[0.], - learning_rate=0.001, - weight_init_stddevs=[.1], - alpha_init_stddevs=[.02], - batch_size=n_samples) - - # Fit trained model - model.fit(dataset, nb_epoch=300) - - # Eval model on train - scores = model.evaluate(dataset, [metric]) - assert scores[metric.name] > .9 - - def test_progressive_regression_overfit(self): - """Test progressive multitask overfits tiny data.""" - np.random.seed(123) - n_tasks = 5 - n_samples = 10 - n_features = 6 - - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.rand(n_samples, n_tasks) - w = np.ones((n_samples, n_tasks)) - - dataset = dc.data.NumpyDataset(X, y, w, ids) - - metric = dc.metrics.Metric(dc.metrics.rms_score, task_averager=np.mean) - model = dc.models.ProgressiveMultitaskRegressor( - n_tasks, - n_features, - layer_sizes=[50], - bypass_layer_sizes=[10], - dropouts=[0.], - learning_rate=0.002, - weight_init_stddevs=[.1], - alpha_init_stddevs=[.02], - batch_size=n_samples) - - # Fit trained model - model.fit(dataset, nb_epoch=200) - - # Eval model on train - scores = model.evaluate(dataset, [metric]) - assert scores[metric.name] < .2 - - def test_multitask_regressor_uncertainty(self): - """Test computing uncertainty for a MultitaskRegressor.""" - n_tasks = 1 - n_samples = 30 - n_features = 1 - noise = 0.1 - - # Generate dummy dataset - X = np.random.rand(n_samples, n_features, 1) - y = 10 * X + np.random.normal(scale=noise, size=(n_samples, n_tasks, 1)) - dataset = dc.data.NumpyDataset(X, y) - - model = dc.models.MultitaskRegressor( - n_tasks, - n_features, - layer_sizes=[200], - weight_init_stddevs=[.1], - batch_size=n_samples, - dropouts=0.1, - learning_rate=0.003, - uncertainty=True) - - # Fit trained model - model.fit(dataset, nb_epoch=2500) - - # Predict the output and uncertainty. - pred, std = model.predict_uncertainty(dataset) - assert np.mean(np.abs(y - pred)) < 1.0 - assert noise < np.mean(std) < 1.0 + return dc.models.SklearnModel(sklearn_model, model_dir) + + model = dc.models.SingletaskToMultitask(tasks, model_builder) + + # Fit trained model + model.fit(dataset) + model.save() + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +@flaky +def test_multitask_classification_overfit(): + """Test MultitaskClassifier overfits tiny data.""" + n_tasks = 10 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean, n_tasks=n_tasks) + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[.1], + batch_size=n_samples, + optimizer=Adam(learning_rate=0.0003, beta1=0.9, beta2=0.999)) + + # Fit trained model + model.fit(dataset) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_robust_multitask_classification_overfit(): + """Test robust multitask overfits tiny data.""" + n_tasks = 10 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model = dc.models.RobustMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=25) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_IRV_multitask_classification_overfit(): + """Test IRV classifier overfits tiny data.""" + n_tasks = 5 + n_samples = 10 + n_features = 128 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.randint(2, size=(n_samples, n_features)) + y = np.ones((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + IRV_transformer = dc.trans.IRVTransformer(5, n_tasks, dataset) + dataset_trans = IRV_transformer.transform(dataset) + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model = dc.models.MultitaskIRVClassifier( + n_tasks, K=5, learning_rate=0.01, batch_size=n_samples) + + # Fit trained model + model.fit(dataset_trans) + + # Eval model on train + scores = model.evaluate(dataset_trans, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_sklearn_multitask_regression_overfit(): + """Test SKLearn singletask-to-multitask overfits tiny regression data.""" + n_tasks = 2 + tasks = ["task%d" % task for task in range(n_tasks)] + n_samples = 10 + n_features = 3 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) + + regression_metric = dc.metrics.Metric( + dc.metrics.r2_score, task_averager=np.mean) + + def model_builder(model_dir): + sklearn_model = RandomForestRegressor() + return dc.models.SklearnModel(sklearn_model, model_dir) + + model = dc.models.SingletaskToMultitask(tasks, model_builder) + + # Fit trained model + model.fit(dataset) + model.save() + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .7 + + +def test_multitask_regression_overfit(): + """Test MultitaskRegressor overfits tiny data.""" + n_tasks = 10 + n_samples = 10 + n_features = 10 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") + model = dc.models.MultitaskRegressor( + n_tasks, n_features, dropouts=0.0, batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=1000) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .02 + + +def test_residual_regression_overfit(): + """Test that a residual multitask network can overfit tiny data.""" + n_tasks = 10 + n_samples = 10 + n_features = 10 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") + model = dc.models.MultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[20] * 10, + dropouts=0.0, + batch_size=n_samples, + residual=True) + + # Fit trained model + model.fit(dataset, nb_epoch=1000) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .02 + + +def test_robust_multitask_regression_overfit(): + """Test robust multitask overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 10 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") + model = dc.models.RobustMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=25) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .2 + + +def test_progressive_classification_overfit(): + """Test progressive multitask overfits tiny data.""" + np.random.seed(123) + n_tasks = 5 + n_samples = 10 + n_features = 6 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + metric = dc.metrics.Metric(dc.metrics.accuracy_score, task_averager=np.mean) + model = dc.models.ProgressiveMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.001, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=300) + + # Eval model on train + scores = model.evaluate(dataset, [metric]) + assert scores[metric.name] > .9 + + +def test_progressive_regression_overfit(): + """Test progressive multitask overfits tiny data.""" + np.random.seed(123) + n_tasks = 5 + n_samples = 10 + n_features = 6 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + metric = dc.metrics.Metric(dc.metrics.rms_score, task_averager=np.mean) + model = dc.models.ProgressiveMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.002, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=200) + + # Eval model on train + scores = model.evaluate(dataset, [metric]) + assert scores[metric.name] < .2 + + +def test_multitask_regressor_uncertainty(): + """Test computing uncertainty for a MultitaskRegressor.""" + n_tasks = 1 + n_samples = 30 + n_features = 1 + noise = 0.1 + + # Generate dummy dataset + X = np.random.rand(n_samples, n_features, 1) + y = 10 * X + np.random.normal(scale=noise, size=(n_samples, n_tasks, 1)) + dataset = dc.data.NumpyDataset(X, y) + + model = dc.models.MultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[200], + weight_init_stddevs=[.1], + batch_size=n_samples, + dropouts=0.1, + learning_rate=0.003, + uncertainty=True) + + # Fit trained model + model.fit(dataset, nb_epoch=2500) + + # Predict the output and uncertainty. + pred, std = model.predict_uncertainty(dataset) + assert np.mean(np.abs(y - pred)) < 1.0 + assert noise < np.mean(std) < 1.0 + + +@pytest.mark.slow +def test_DAG_singletask_regression_overfit(): + """Test DAG regressor multitask overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Load mini log-solubility dataset. + featurizer = dc.feat.ConvMolFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_regression.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + n_feat = 75 + batch_size = 10 + transformer = dc.trans.DAGTransformer(max_atoms=50) + dataset = transformer.transform(dataset) + + model = dc.models.DAGModel( + n_tasks, + max_atoms=50, + n_atom_feat=n_feat, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=1200) + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + + assert scores[regression_metric.name] > .8 + + +def test_weave_singletask_classification_overfit(): + """Test weave model overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Load mini log-solubility dataset. + featurizer = dc.feat.WeaveFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_classification.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + + batch_size = 10 + model = dc.models.WeaveModel( + n_tasks, + batch_size=batch_size, + learning_rate=0.0003, + dropout=0.0, + mode="classification") + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + + assert scores[classification_metric.name] > .65 + + +@pytest.mark.slow +def test_weave_singletask_regression_overfit(): + """Test weave model overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Load mini log-solubility dataset. + featurizer = dc.feat.WeaveFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_regression.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + batch_size = 10 + + model = dc.models.WeaveModel( + n_tasks, + batch_size=batch_size, + learning_rate=0.0003, + dropout=0.0, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=120) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + + assert scores[regression_metric.name] > .8 + + +@pytest.mark.slow +def test_MPNN_singletask_regression_overfit(): + """Test MPNN overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Load mini log-solubility dataset. + featurizer = dc.feat.WeaveFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_regression.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + n_atom_feat = 75 + n_pair_feat = 14 + batch_size = 10 + model = dc.models.MPNNModel( + n_tasks, + n_atom_feat=n_atom_feat, + n_pair_feat=n_pair_feat, + T=2, + M=3, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=50) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + + assert scores[regression_metric.name] > .8 + + +def test_textCNN_singletask_classification_overfit(): + """Test textCNN model overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + featurizer = dc.feat.RawFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_classification.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + + char_dict, length = dc.models.TextCNNModel.build_char_dict(dataset) + batch_size = 10 + + model = dc.models.TextCNNModel( + n_tasks, + char_dict, + seq_length=length, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="classification") + + # Fit trained model + model.fit(dataset, nb_epoch=200) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + + assert scores[classification_metric.name] > .8 + + +@flaky() +def test_textCNN_singletask_regression_overfit(): + """Test textCNN model overfits tiny data.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + current_dir = os.path.dirname(os.path.abspath(__file__)) + + # Load mini log-solubility dataset. + featurizer = dc.feat.RawFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, "example_regression.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + char_dict, length = dc.models.TextCNNModel.build_char_dict(dataset) + batch_size = 10 + + model = dc.models.TextCNNModel( + n_tasks, + char_dict, + seq_length=length, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=200) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + + assert scores[regression_metric.name] > .9 diff --git a/deepchem/models/tests/test_pretrained.py b/deepchem/models/tests/test_pretrained_keras.py similarity index 100% rename from deepchem/models/tests/test_pretrained.py rename to deepchem/models/tests/test_pretrained_keras.py diff --git a/deepchem/models/tests/test_pretrained_torch.py b/deepchem/models/tests/test_pretrained_torch.py new file mode 100644 index 0000000000000000000000000000000000000000..2475516ce739b709473f1a4290939895da6e44d9 --- /dev/null +++ b/deepchem/models/tests/test_pretrained_torch.py @@ -0,0 +1,93 @@ +import os +import unittest +import deepchem as dc +import numpy as np +from deepchem.models.losses import L2Loss +from deepchem.feat.mol_graphs import ConvMol + +try: + import torch + has_pytorch = True +except: + has_pytorch = False + + +class MLP(dc.models.TorchModel): + + def __init__(self, n_tasks=1, feature_dim=100, hidden_layer_size=64, + **kwargs): + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(feature_dim, hidden_layer_size), torch.nn.ReLU(), + torch.nn.Linear(hidden_layer_size, n_tasks), torch.nn.Sigmoid()) + loss = dc.models.losses.BinaryCrossEntropy() + super(MLP, self).__init__(model=pytorch_model, loss=loss, **kwargs) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +class TestPretrainedTorch(unittest.TestCase): + + def setUp(self): + self.feature_dim = 2 + self.hidden_layer_size = 10 + data_points = 10 + + X = np.random.randn(data_points, self.feature_dim) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + + self.dataset = dc.data.NumpyDataset(X, y) + + def test_load_from_pretrained(self): + """Tests loading pretrained model.""" + source_model = MLP( + hidden_layer_size=self.hidden_layer_size, + feature_dim=self.feature_dim, + batch_size=10) + + source_model.fit(self.dataset, nb_epoch=1000, checkpoint_interval=0) + + dest_model = MLP( + feature_dim=self.feature_dim, + hidden_layer_size=self.hidden_layer_size, + n_tasks=10) + + assignment_map = dict() + value_map = dict() + source_vars = list(source_model.model.parameters()) + dest_vars = list(dest_model.model.parameters())[:-2] + + for idx, dest_var in enumerate(dest_vars): + source_var = source_vars[idx] + assignment_map[source_var] = dest_var + value_map[source_var] = source_var.detach().numpy() + + dest_model.load_from_pretrained( + source_model=source_model, + assignment_map=assignment_map, + value_map=value_map) + + for source_var, dest_var in assignment_map.items(): + source_val = source_var.detach().numpy() + dest_val = dest_var.detach().numpy() + np.testing.assert_array_almost_equal(source_val, dest_val) + + def test_restore_equivalency(self): + """Test for restore based pretrained model loading.""" + source_model = MLP( + feature_dim=self.feature_dim, + hidden_layer_size=self.hidden_layer_size, + learning_rate=0.003) + + source_model.fit(self.dataset, nb_epoch=1000) + + dest_model = MLP( + feature_dim=self.feature_dim, hidden_layer_size=self.hidden_layer_size) + + dest_model.load_from_pretrained( + source_model=source_model, + assignment_map=None, + value_map=None, + model_dir=None, + include_top=True) + + predictions = np.squeeze(dest_model.predict_on_batch(self.dataset.X)) + np.testing.assert_array_almost_equal(self.dataset.y, np.round(predictions)) diff --git a/deepchem/models/tests/test_reload.py b/deepchem/models/tests/test_reload.py index 01a8fde38231bc15645d1a02cd06248cf57b038f..e7f466707c0189623d2f98c1f7153e84e59951c6 100644 --- a/deepchem/models/tests/test_reload.py +++ b/deepchem/models/tests/test_reload.py @@ -1,48 +1,1167 @@ """ Test reload for trained models. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - +import os +import pytest import unittest import tempfile import numpy as np import deepchem as dc import tensorflow as tf +import scipy +from flaky import flaky from sklearn.ensemble import RandomForestClassifier +from deepchem.molnet.load_function.chembl25_datasets import CHEMBL25_TASKS +from deepchem.feat import create_char_to_idx + + +def test_sklearn_classifier_reload(): + """Test that trained model can be reloaded correctly.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + + sklearn_model = RandomForestClassifier() + model_dir = tempfile.mkdtemp() + model = dc.models.SklearnModel(sklearn_model, model_dir) + + # Fit trained model + model.fit(dataset) + model.save() + + # Load trained model + reloaded_model = dc.models.SklearnModel(None, model_dir) + reloaded_model.reload() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_multitaskregressor_reload(): + """Test that MultitaskRegressor can be reloaded correctly.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + + model_dir = tempfile.mkdtemp() + model = dc.models.MultitaskRegressor( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + batch_size=n_samples, + learning_rate=0.003, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .1 + + # Reload trained model + reloaded_model = dc.models.MultitaskRegressor( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[np.sqrt(6) / np.sqrt(1000)], + batch_size=n_samples, + learning_rate=0.003, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < 0.1 + + +def test_multitaskclassification_reload(): + """Test that MultitaskClassifier can be reloaded correctly.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + model_dir = tempfile.mkdtemp() + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[.1], + batch_size=n_samples, + optimizer=dc.models.optimizers.Adam( + learning_rate=0.0003, beta1=0.9, beta2=0.999), + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Reload trained model + reloaded_model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + dropouts=[0.], + weight_init_stddevs=[.1], + batch_size=n_samples, + optimizer=dc.models.optimizers.Adam( + learning_rate=0.0003, beta1=0.9, beta2=0.999), + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_residual_classification_reload(): + """Test that a residual network can reload correctly.""" + n_samples = 10 + n_features = 5 + n_tasks = 1 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric(dc.metrics.accuracy_score) + model_dir = tempfile.mkdtemp() + model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[20] * 10, + dropouts=0.0, + batch_size=n_samples, + residual=True, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=500) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + # Reload trained model + reloaded_model = dc.models.MultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[20] * 10, + dropouts=0.0, + batch_size=n_samples, + residual=True, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_robust_multitask_classification_reload(): + """Test robust multitask overfits tiny data.""" + n_tasks = 10 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model_dir = tempfile.mkdtemp() + model = dc.models.RobustMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=25) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + # Reloaded Trained Model + reloaded_model = dc.models.RobustMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_normalizing_flow_model_reload(): + """Test that NormalizingFlowModel can be reloaded correctly.""" + from deepchem.models.normalizing_flows import NormalizingFlow, NormalizingFlowModel + import tensorflow_probability as tfp + tfd = tfp.distributions + tfb = tfp.bijectors + tfk = tf.keras + + model_dir = tempfile.mkdtemp() + + Made = tfb.AutoregressiveNetwork( + params=2, hidden_units=[512, 512], activation='relu', dtype='float64') + + flow_layers = [tfb.MaskedAutoregressiveFlow(shift_and_log_scale_fn=Made)] + # 3D Multivariate Gaussian base distribution + nf = NormalizingFlow( + base_distribution=tfd.MultivariateNormalDiag( + loc=np.zeros(2), scale_diag=np.ones(2)), + flow_layers=flow_layers) + + nfm = NormalizingFlowModel(nf, model_dir=model_dir) + + target_distribution = tfd.MultivariateNormalDiag(loc=np.array([1., 0.])) + dataset = dc.data.NumpyDataset(X=target_distribution.sample(96)) + final = nfm.fit(dataset, nb_epoch=1) + + x = np.zeros(2) + lp1 = nfm.flow.log_prob(x).numpy() + + assert nfm.flow.sample().numpy().shape == (2,) + + reloaded_model = NormalizingFlowModel(nf, model_dir=model_dir) + reloaded_model.restore() + + # Check that reloaded model can sample from the distribution + assert reloaded_model.flow.sample().numpy().shape == (2,) + + lp2 = reloaded_model.flow.log_prob(x).numpy() + + # Check that density estimation is same for reloaded model + assert np.all(lp1 == lp2) + + +def test_robust_multitask_regressor_reload(): + """Test that RobustMultitaskRegressor can be reloaded correctly.""" + n_tasks = 10 + n_samples = 10 + n_features = 3 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + + model_dir = tempfile.mkdtemp() + model = dc.models.RobustMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .1 + + # Reload trained model + reloaded_model = dc.models.RobustMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < 0.1 + + +def test_IRV_multitask_classification_reload(): + """Test IRV classifier can be reloaded.""" + n_tasks = 5 + n_samples = 10 + n_features = 128 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.randint(2, size=(n_samples, n_features)) + y = np.ones((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + IRV_transformer = dc.trans.IRVTransformer(5, n_tasks, dataset) + dataset_trans = IRV_transformer.transform(dataset) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model_dir = tempfile.mkdtemp() + model = dc.models.MultitaskIRVClassifier( + n_tasks, + K=5, + learning_rate=0.01, + batch_size=n_samples, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset_trans) + + # Eval model on train + scores = model.evaluate(dataset_trans, [classification_metric]) + assert scores[classification_metric.name] > .9 + + # Reload Trained Model + reloaded_model = dc.models.MultitaskIRVClassifier( + n_tasks, + K=5, + learning_rate=0.01, + batch_size=n_samples, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +@flaky +def test_progressive_classification_reload(): + """Test progressive multitask can reload.""" + np.random.seed(123) + n_tasks = 5 + n_samples = 10 + n_features = 6 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model_dir = tempfile.mkdtemp() + model = dc.models.ProgressiveMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.001, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=400) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + # Reload Trained Model + reloaded_model = dc.models.ProgressiveMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.001, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .9 + + +def test_progressivemultitaskregressor_reload(): + """Test that ProgressiveMultitaskRegressor can be reloaded correctly.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.rand(n_samples, n_tasks) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + + model_dir = tempfile.mkdtemp() + model = dc.models.ProgressiveMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.001, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < .1 + + # Reload trained model + reloaded_model = dc.models.ProgressiveMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.001, + weight_init_stddevs=[.1], + alpha_init_stddevs=[.02], + batch_size=n_samples, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < 0.1 + + +def test_DAG_regression_reload(): + """Test DAG regressor reloads.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + + # Load mini log-solubility dataset. + featurizer = dc.feat.ConvMolFeaturizer() + tasks = ["outcome"] + mols = ["CC", "CCO", "CC", "CCC", "CCCCO", "CO", "CC", "CCCCC", "CCC", "CCCO"] + n_samples = len(mols) + X = featurizer(mols) + y = np.random.rand(n_samples, n_tasks) + dataset = dc.data.NumpyDataset(X, y) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + n_feat = 75 + batch_size = 10 + transformer = dc.trans.DAGTransformer(max_atoms=50) + dataset = transformer.transform(dataset) + + model_dir = tempfile.mkdtemp() + model = dc.models.DAGModel( + n_tasks, + max_atoms=50, + n_atom_feat=n_feat, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression", + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .1 + + reloaded_model = dc.models.DAGModel( + n_tasks, + max_atoms=50, + n_atom_feat=n_feat, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression", + model_dir=model_dir) + + reloaded_model.restore() + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred) + predset = transformer.transform(predset) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .1 + + +def test_weave_classification_reload(): + """Test weave model can be reloaded.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + + # Load mini log-solubility dataset. + featurizer = dc.feat.WeaveFeaturizer() + tasks = ["outcome"] + mols = ["CC", "CCCCC", "CCCCC", "CCC", "COOO", "COO", "OO"] + n_samples = len(mols) + X = featurizer(mols) + y = [1, 1, 1, 1, 0, 0, 0] + dataset = dc.data.NumpyDataset(X, y) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + + batch_size = 5 + + model_dir = tempfile.mkdtemp() + model = dc.models.WeaveModel( + n_tasks, + batch_size=batch_size, + learning_rate=0.01, + mode="classification", + dropouts=0.0, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=100) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .6 + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + + reloaded_model = dc.models.WeaveModel( + n_tasks, + batch_size=batch_size, + learning_rate=0.003, + mode="classification", + dropouts=0.0, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + #Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .6 + + +def test_MPNN_regression_reload(): + """Test MPNN can reload datasets.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + + # Load mini log-solubility dataset. + featurizer = dc.feat.WeaveFeaturizer() + tasks = ["outcome"] + mols = ["C", "CO", "CC"] + n_samples = len(mols) + X = featurizer(mols) + y = np.random.rand(n_samples, n_tasks) + dataset = dc.data.NumpyDataset(X, y) + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + n_atom_feat = 75 + n_pair_feat = 14 + batch_size = 10 + model_dir = tempfile.mkdtemp() + model = dc.models.MPNNModel( + n_tasks, + n_atom_feat=n_atom_feat, + n_pair_feat=n_pair_feat, + T=2, + M=3, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression", + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=50) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .8 + + # Reload trained model + reloaded_model = dc.models.MPNNModel( + n_tasks, + n_atom_feat=n_atom_feat, + n_pair_feat=n_pair_feat, + T=2, + M=3, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="regression", + model_dir=model_dir) + reloaded_model.restore() + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] > .8 + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + +def test_textCNN_classification_reload(): + """Test textCNN model reloadinng.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + + featurizer = dc.feat.RawFeaturizer() + tasks = ["outcome"] + mols = ["C", "CO", "CC"] + n_samples = len(mols) + X = featurizer(mols) + y = np.random.randint(2, size=(n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, ids=mols) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + + char_dict, length = dc.models.TextCNNModel.build_char_dict(dataset) + batch_size = 3 + + model_dir = tempfile.mkdtemp() + model = dc.models.TextCNNModel( + n_tasks, + char_dict, + seq_length=length, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="classification", + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=200) + + # Eval model on train + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .8 + + # Reload trained model + reloaded_model = dc.models.TextCNNModel( + n_tasks, + char_dict, + seq_length=length, + batch_size=batch_size, + learning_rate=0.001, + use_queue=False, + mode="classification", + model_dir=model_dir) + reloaded_model.restore() + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .8 + + assert len(reloaded_model.model.get_weights()) == len( + model.model.get_weights()) + for (reloaded, orig) in zip(reloaded_model.model.get_weights(), + model.model.get_weights()): + assert np.all(reloaded == orig) + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred, ids=predmols) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + assert len(model.model.layers) == len(reloaded_model.model.layers) + + +def test_1d_cnn_regression_reload(): + """Test that a 1D CNN can reload.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + np.random.seed(123) + X = np.random.rand(n_samples, 10, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + + regression_metric = dc.metrics.Metric(dc.metrics.mean_squared_error) + model_dir = tempfile.mkdtemp() + + model = dc.models.CNN( + n_tasks, + n_features, + dims=1, + dropouts=0, + kernel_size=3, + mode='regression', + learning_rate=0.003, + model_dir=model_dir) + + # Fit trained model + model.fit(dataset, nb_epoch=200) + + # Eval model on train + scores = model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < 0.1 + + # Reload trained model + reloaded_model = dc.models.CNN( + n_tasks, + n_features, + dims=1, + dropouts=0, + kernel_size=3, + mode='regression', + learning_rate=0.003, + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + Xpred = np.random.rand(n_samples, 10, n_features) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [regression_metric]) + assert scores[regression_metric.name] < 0.1 + + +def test_graphconvmodel_reload(): + featurizer = dc.feat.ConvMolFeaturizer() + tasks = ["outcome"] + n_tasks = len(tasks) + mols = ["C", "CO", "CC"] + n_samples = len(mols) + X = featurizer(mols) + y = np.array([0, 1, 0]) + dataset = dc.data.NumpyDataset(X, y) + + classification_metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + + batch_size = 10 + model_dir = tempfile.mkdtemp() + model = dc.models.GraphConvModel( + len(tasks), + batch_size=batch_size, + batch_normalize=False, + mode='classification', + model_dir=model_dir) + + model.fit(dataset, nb_epoch=10) + scores = model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] >= 0.6 + + # Reload trained Model + reloaded_model = dc.models.GraphConvModel( + len(tasks), + batch_size=batch_size, + batch_normalize=False, + mode='classification', + model_dir=model_dir) + reloaded_model.restore() + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + # Eval model on train + scores = reloaded_model.evaluate(dataset, [classification_metric]) + assert scores[classification_metric.name] > .6 + + +def test_chemception_reload(): + """Test that chemception models can be saved and reloaded.""" + img_size = 80 + img_spec = "engd" + res = 0.5 + n_tasks = 1 + featurizer = dc.feat.SmilesToImage( + img_size=img_size, img_spec=img_spec, res=res) + + data_points = 10 + mols = ["CCCCCCCC"] * data_points + X = featurizer(mols) + + y = np.random.randint(0, 2, size=(data_points, n_tasks)) + w = np.ones(shape=(data_points, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, mols) + classsification_metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + + model_dir = tempfile.mkdtemp() + model = dc.models.ChemCeption( + n_tasks=n_tasks, + img_spec="engd", + model_dir=model_dir, + mode="classification") + model.fit(dataset, nb_epoch=3) + + # Reload Trained Model + reloaded_model = dc.models.ChemCeption( + n_tasks=n_tasks, + img_spec="engd", + model_dir=model_dir, + mode="classification") + reloaded_model.restore() + + # Check predictions match on random sample + predmols = ["CCCC", "CCCCCO", "CCCCC"] + Xpred = featurizer(predmols) + predset = dc.data.NumpyDataset(Xpred) + origpred = model.predict(predset) + reloadpred = reloaded_model.predict(predset) + assert np.all(origpred == reloadpred) + + +# TODO: This test is a little awkward. The Smiles2Vec model awkwardly depends on a dataset_file being available on disk. This needs to be cleaned up to match the standard model handling API. +def test_smiles2vec_reload(): + """Test that smiles2vec models can be saved and reloaded.""" + dataset_file = os.path.join(os.path.dirname(__file__), "chembl_25_small.csv") + max_len = 250 + pad_len = 10 + max_seq_len = 20 + char_to_idx = create_char_to_idx( + dataset_file, max_len=max_len, smiles_field="smiles") + feat = dc.feat.SmilesToSeq( + char_to_idx=char_to_idx, max_len=max_len, pad_len=pad_len) + + n_tasks = 5 + data_points = 10 + + loader = dc.data.CSVLoader( + tasks=CHEMBL25_TASKS, smiles_field='smiles', featurizer=feat) + dataset = loader.create_dataset( + inputs=[dataset_file], shard_size=10000, data_dir=tempfile.mkdtemp()) + y = np.random.randint(0, 2, size=(data_points, n_tasks)) + w = np.ones(shape=(data_points, n_tasks)) + dataset = dc.data.NumpyDataset(dataset.X[:data_points, :max_seq_len], y, w, + dataset.ids[:data_points]) + + classsification_metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + + model_dir = tempfile.mkdtemp() + model = dc.models.Smiles2Vec( + char_to_idx=char_to_idx, + max_seq_len=max_seq_len, + use_conv=True, + n_tasks=n_tasks, + model_dir=model_dir, + mode="classification") + model.fit(dataset, nb_epoch=3) + + # Reload Trained Model + reloaded_model = dc.models.Smiles2Vec( + char_to_idx=char_to_idx, + max_seq_len=max_seq_len, + use_conv=True, + n_tasks=n_tasks, + model_dir=model_dir, + mode="classification") + reloaded_model.restore() + + # Check predictions match on original dataset + origpred = model.predict(dataset) + reloadpred = reloaded_model.predict(dataset) + assert np.all(origpred == reloadpred) + + +# TODO: We need a cleaner usage example for this +def test_DTNN_regression_reload(): + """Test DTNN can reload datasets.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + + current_dir = os.path.dirname(os.path.abspath(__file__)) + input_file = os.path.join(current_dir, "example_DTNN.mat") + dataset = scipy.io.loadmat(input_file) + X = dataset['X'] + y = dataset['T'] + w = np.ones_like(y) + dataset = dc.data.NumpyDataset(X, y, w, ids=None) + n_tasks = y.shape[1] + + regression_metric = dc.metrics.Metric( + dc.metrics.pearson_r2_score, task_averager=np.mean) + + model_dir = tempfile.mkdtemp() + model = dc.models.DTNNModel( + n_tasks, + n_embedding=20, + n_distance=100, + learning_rate=1.0, + model_dir=model_dir, + mode="regression") + + # Fit trained model + model.fit(dataset, nb_epoch=250) + + # Eval model on train + pred = model.predict(dataset) + mean_rel_error = np.mean(np.abs(1 - pred / y)) + assert mean_rel_error < 0.2 + + reloaded_model = dc.models.DTNNModel( + n_tasks, + n_embedding=20, + n_distance=100, + learning_rate=1.0, + model_dir=model_dir, + mode="regression") + reloaded_model.restore() + + # Check predictions match on random sample + origpred = model.predict(dataset) + reloadpred = reloaded_model.predict(dataset) + assert np.all(origpred == reloadpred) + + +def generate_sequences(sequence_length, num_sequences): + for i in range(num_sequences): + seq = [ + np.random.randint(10) + for x in range(np.random.randint(1, sequence_length + 1)) + ] + yield (seq, seq) + + +def test_seq2seq_reload(): + """Test reloading for seq2seq models.""" + + sequence_length = 8 + tokens = list(range(10)) + model_dir = tempfile.mkdtemp() + s = dc.models.SeqToSeq( + tokens, + tokens, + sequence_length, + encoder_layers=2, + decoder_layers=2, + embedding_dimension=150, + learning_rate=0.01, + dropout=0.1, + model_dir=model_dir) + + # Train the model on random sequences. We aren't training long enough to + # really make it reliable, but I want to keep this test fast, and it should + # still be able to reproduce a reasonable fraction of input sequences. + s.fit_sequences(generate_sequences(sequence_length, 25000)) -class TestReload(unittest.TestCase): + # Test it out. - def test_sklearn_reload(self): - """Test that trained model can be reloaded correctly.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 + tests = [seq for seq, target in generate_sequences(sequence_length, 50)] + pred1 = s.predict_from_sequences(tests, beam_width=1) + pred4 = s.predict_from_sequences(tests, beam_width=4) - # Generate dummy dataset - np.random.seed(123) - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features) - y = np.random.randint(2, size=(n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) + reloaded_s = dc.models.SeqToSeq( + tokens, + tokens, + sequence_length, + encoder_layers=2, + decoder_layers=2, + embedding_dimension=150, + learning_rate=0.01, + dropout=0.1, + model_dir=model_dir) + reloaded_s.restore() - dataset = dc.data.NumpyDataset(X, y, w, ids) - classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + reloaded_pred1 = reloaded_s.predict_from_sequences(tests, beam_width=1) + assert len(pred1) == len(reloaded_pred1) + for (p1, r1) in zip(pred1, reloaded_pred1): + assert p1 == r1 + reloaded_pred4 = reloaded_s.predict_from_sequences(tests, beam_width=4) + assert len(pred4) == len(reloaded_pred4) + for (p4, r4) in zip(pred4, reloaded_pred4): + assert p4 == r4 + embeddings = s.predict_embeddings(tests) + pred1e = s.predict_from_embeddings(embeddings, beam_width=1) + pred4e = s.predict_from_embeddings(embeddings, beam_width=4) - sklearn_model = RandomForestClassifier() - model_dir = tempfile.mkdtemp() - model = dc.models.SklearnModel(sklearn_model, model_dir) + reloaded_embeddings = reloaded_s.predict_embeddings(tests) + reloaded_pred1e = reloaded_s.predict_from_embeddings( + reloaded_embeddings, beam_width=1) + reloaded_pred4e = reloaded_s.predict_from_embeddings( + reloaded_embeddings, beam_width=4) - # Fit trained model - model.fit(dataset) - model.save() + assert np.all(embeddings == reloaded_embeddings) - # Load trained model - reloaded_model = dc.models.SklearnModel(None, model_dir) - reloaded_model.reload() + assert len(pred1e) == len(reloaded_pred1e) + for (p1e, r1e) in zip(pred1e, reloaded_pred1e): + assert p1e == r1e - # Eval model on train - scores = reloaded_model.evaluate(dataset, [classification_metric]) - assert scores[classification_metric.name] > .9 + assert len(pred4e) == len(reloaded_pred4e) + for (p4e, r4e) in zip(pred4e, reloaded_pred4e): + assert p4e == r4e diff --git a/deepchem/models/tests/test_robust.py b/deepchem/models/tests/test_robust.py new file mode 100644 index 0000000000000000000000000000000000000000..49e50aaf6363c6afe169c5974cff2c12834e761d --- /dev/null +++ b/deepchem/models/tests/test_robust.py @@ -0,0 +1,68 @@ +import numpy as np +import tensorflow as tf +import deepchem as dc + + +def test_singletask_robust_multitask_classification(): + """Test robust multitask singletask classification.""" + n_tasks = 1 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + + classification_metric = dc.metrics.Metric( + dc.metrics.accuracy_score, task_averager=np.mean) + model = dc.models.RobustMultitaskClassifier( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=1) + + +def test_singletask_robust_multitask_regression(): + """Test singletask robust multitask regression.""" + np.random.seed(123) + tf.random.set_seed(123) + n_tasks = 1 + n_samples = 10 + n_features = 3 + n_classes = 2 + + # Generate dummy dataset + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + + dataset = dc.data.NumpyDataset(X, y, w, ids) + + regression_metric = dc.metrics.Metric( + dc.metrics.mean_squared_error, task_averager=np.mean, mode="regression") + model = dc.models.RobustMultitaskRegressor( + n_tasks, + n_features, + layer_sizes=[50], + bypass_layer_sizes=[10], + dropouts=[0.], + learning_rate=0.003, + weight_init_stddevs=[.1], + batch_size=n_samples) + + # Fit trained model + model.fit(dataset, nb_epoch=1) diff --git a/deepchem/models/tests/test_scscore.py b/deepchem/models/tests/test_scscore.py index 87461b1ab2395d463f95095744855b45ba40b8d0..1576ae86d40c2750e835f5377e7f753391102ebd 100644 --- a/deepchem/models/tests/test_scscore.py +++ b/deepchem/models/tests/test_scscore.py @@ -1,6 +1,6 @@ import unittest - -import deepchem +import tempfile +import deepchem as dc import numpy as np @@ -16,10 +16,37 @@ class TestScScoreModel(unittest.TestCase): X = np.random.rand(n_samples, 2, n_features) y = np.random.randint(2, size=(n_samples, n_tasks)) - dataset = deepchem.data.NumpyDataset(X, y) + dataset = dc.data.NumpyDataset(X, y) - model = deepchem.models.ScScoreModel(n_features, dropouts=0) + model = dc.models.ScScoreModel(n_features, dropouts=0) model.fit(dataset, nb_epoch=100) pred = model.predict(dataset) assert np.array_equal(y, pred[0] > pred[1]) + + +def test_scscore_reload(): + """Test reloading of ScScoreModel""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + + # Create a dataset and an input function for processing it. + + X = np.random.rand(n_samples, 2, n_features) + y = np.random.randint(2, size=(n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y) + + model_dir = tempfile.mkdtemp() + model = dc.models.ScScoreModel(n_features, dropouts=0, model_dir=model_dir) + model.fit(dataset, nb_epoch=100) + pred = model.predict(dataset) + assert np.array_equal(y, pred[0] > pred[1]) + + reloaded_model = dc.models.ScScoreModel( + n_features, dropouts=0, model_dir=model_dir) + reloaded_model.restore() + reloaded_pred = reloaded_model.predict(dataset) + assert len(pred) == len(reloaded_pred) + for p, r in zip(pred, reloaded_pred): + assert np.all(p == r) diff --git a/deepchem/models/tests/test_sklearn_model.py b/deepchem/models/tests/test_sklearn_model.py new file mode 100644 index 0000000000000000000000000000000000000000..6ec9b0287fbf0414cb59a1f62906b87f6f6e4473 --- /dev/null +++ b/deepchem/models/tests/test_sklearn_model.py @@ -0,0 +1,180 @@ +""" +Tests to make sure deepchem models can fit models on easy datasets. +""" + +import sklearn +import sklearn.datasets +import numpy as np +import deepchem as dc +from sklearn.linear_model import LinearRegression +from sklearn.linear_model import LogisticRegression + + +def test_sklearn_regression(): + """Test that sklearn models can learn on simple regression datasets.""" + np.random.seed(123) + + dataset = sklearn.datasets.load_diabetes() + X, y = dataset.data, dataset.target + y = np.expand_dims(y, 1) + frac_train = .7 + n_samples = len(X) + n_train = int(frac_train * n_samples) + X_train, y_train = X[:n_train], y[:n_train] + X_test, y_test = X[n_train:], y[n_train:] + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + regression_metric = dc.metrics.Metric(dc.metrics.r2_score) + + sklearn_model = LinearRegression() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + assert scores[regression_metric.name] > .5 + + +def test_sklearn_transformed_regression(): + """Test that sklearn models can learn on simple transformed regression datasets.""" + np.random.seed(123) + dataset = sklearn.datasets.load_diabetes() + X, y = dataset.data, dataset.target + y = np.expand_dims(y, 1) + + frac_train = .7 + n_samples = len(X) + n_train = int(frac_train * n_samples) + X_train, y_train = X[:n_train], y[:n_train] + X_test, y_test = X[n_train:], y[n_train:] + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + # Eval model on train + transformers = [ + dc.trans.NormalizationTransformer( + transform_X=True, dataset=train_dataset), + dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset), + dc.trans.NormalizationTransformer( + transform_y=True, dataset=train_dataset) + ] + for data in [train_dataset, test_dataset]: + for transformer in transformers: + data = transformer.transform(data) + + regression_metric = dc.metrics.Metric(dc.metrics.r2_score) + sklearn_model = LinearRegression() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + train_scores = model.evaluate(train_dataset, [regression_metric], + transformers) + assert train_scores[regression_metric.name] > .5 + + # Eval model on test + test_scores = model.evaluate(test_dataset, [regression_metric], transformers) + assert test_scores[regression_metric.name] > .5 + + +def test_sklearn_multitask_regression(): + """Test that sklearn models can learn on simple multitask regression.""" + np.random.seed(123) + n_tasks = 4 + tasks = range(n_tasks) + dataset = sklearn.datasets.load_diabetes() + X, y = dataset.data, dataset.target + y = np.reshape(y, (len(y), 1)) + y = np.hstack([y] * n_tasks) + + frac_train = .7 + n_samples = len(X) + n_train = int(frac_train * n_samples) + X_train, y_train = X[:n_train], y[:n_train] + X_test, y_test = X[n_train:], y[n_train:] + train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) + test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) + + regression_metric = dc.metrics.Metric(dc.metrics.r2_score) + + def model_builder(model_dir): + sklearn_model = LinearRegression() + return dc.models.SklearnModel(sklearn_model, model_dir) + + model = dc.models.SingletaskToMultitask(tasks, model_builder) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on test + scores = model.evaluate(test_dataset, [regression_metric]) + score = scores[regression_metric.name] + assert score > .5 + + +def test_sklearn_classification(): + """Test that sklearn models can learn on simple classification datasets.""" + np.random.seed(123) + dataset = sklearn.datasets.load_digits(n_class=2) + X, y = dataset.data, dataset.target + + frac_train = .7 + n_samples = len(X) + n_train = int(frac_train * n_samples) + X_train, y_train = X[:n_train], y[:n_train] + X_test, y_test = X[n_train:], y[n_train:] + train_dataset = dc.data.NumpyDataset(X_train, y_train) + test_dataset = dc.data.NumpyDataset(X_test, y_test) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + sklearn_model = LogisticRegression() + model = dc.models.SklearnModel(sklearn_model) + + # Fit trained model + model.fit(train_dataset) + model.save() + + # Eval model on test + scores = model.evaluate(test_dataset, [classification_metric]) + assert scores[classification_metric.name] > .5 + + +def test_sklearn_multitask_classification(): + """Test that sklearn models can learn on simple multitask classification.""" + np.random.seed(123) + n_tasks = 4 + tasks = range(n_tasks) + dataset = sklearn.datasets.load_digits(n_class=2) + X, y = dataset.data, dataset.target + y = np.reshape(y, (len(y), 1)) + y = np.hstack([y] * n_tasks) + + frac_train = .7 + n_samples = len(X) + n_train = int(frac_train * n_samples) + X_train, y_train = X[:n_train], y[:n_train] + X_test, y_test = X[n_train:], y[n_train:] + train_dataset = dc.data.DiskDataset.from_numpy(X_train, y_train) + test_dataset = dc.data.DiskDataset.from_numpy(X_test, y_test) + + classification_metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + + def model_builder(model_dir): + sklearn_model = LogisticRegression() + return dc.models.SklearnModel(sklearn_model, model_dir) + + model = dc.models.SingletaskToMultitask(tasks, model_builder) + + # Fit trained model + model.fit(train_dataset) + model.save() + # Eval model on test + scores = model.evaluate(test_dataset, [classification_metric]) + assert scores[classification_metric.name] > .5 diff --git a/deepchem/models/tests/test_torch_model.py b/deepchem/models/tests/test_torch_model.py new file mode 100644 index 0000000000000000000000000000000000000000..2ce37b96649cd26a16a01f74d8ac2b6eac35a63b --- /dev/null +++ b/deepchem/models/tests/test_torch_model.py @@ -0,0 +1,406 @@ +import os +import unittest +import deepchem as dc +import numpy as np + +try: + import torch + import torch.nn.functional as F + has_pytorch = True +except: + has_pytorch = False + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_overfit_subclass_model(): + """Test fitting a TorchModel defined by subclassing Module.""" + n_data_points = 10 + n_features = 2 + np.random.seed(1234) + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + + class ExampleModel(torch.nn.Module): + + def __init__(self, layer_sizes): + super(ExampleModel, self).__init__() + self.layers = torch.nn.ModuleList() + in_size = n_features + for out_size in layer_sizes: + self.layers.append(torch.nn.Linear(in_size, out_size)) + in_size = out_size + + def forward(self, x): + for i, layer in enumerate(self.layers): + x = layer(x) + if i < len(self.layers) - 1: + x = F.relu(x) + return torch.sigmoid(x), x + + pytorch_model = ExampleModel([10, 1]) + model = dc.models.TorchModel( + pytorch_model, + dc.models.losses.SigmoidCrossEntropy(), + output_types=['prediction', 'loss'], + learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + scores = model.evaluate(dataset, [metric]) + assert scores[metric.name] > 0.9 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_overfit_sequential_model(): + """Test fitting a TorchModel defined as a sequential model.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(2, 10), torch.nn.ReLU(), torch.nn.Linear(10, 1), + torch.nn.Sigmoid()) + model = dc.models.TorchModel( + pytorch_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + generator = model.default_generator(dataset, pad_batches=False) + scores = model.evaluate_generator(generator, [metric]) + assert scores[metric.name] > 0.9 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_fit_use_all_losses(): + """Test fitting a TorchModel and getting a loss curve back.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(2, 10), torch.nn.ReLU(), torch.nn.Linear(10, 1), + torch.nn.Sigmoid()) + model = dc.models.TorchModel( + pytorch_model, + dc.models.losses.BinaryCrossEntropy(), + learning_rate=0.005, + log_frequency=10) + losses = [] + model.fit(dataset, nb_epoch=1000, all_losses=losses) + # Each epoch is a single step for this model + assert len(losses) == 100 + assert np.count_nonzero(np.array(losses)) == 100 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_fit_on_batch(): + """Test fitting a TorchModel to individual batches.""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(2, 10), torch.nn.ReLU(), torch.nn.Linear(10, 1), + torch.nn.Sigmoid()) + model = dc.models.TorchModel( + pytorch_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + i = 0 + for X, y, w, ids in dataset.iterbatches(model.batch_size, 500): + i += 1 + model.fit_on_batch(X, y, w, checkpoint=False) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + metric = dc.metrics.Metric(dc.metrics.roc_auc_score) + generator = model.default_generator(dataset, pad_batches=False) + scores = model.evaluate_generator(generator, [metric]) + assert scores[metric.name] > 0.9 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_checkpointing(): + """Test loading and saving checkpoints with TorchModel.""" + # Create two models using the same model directory. + + pytorch_model1 = torch.nn.Sequential(torch.nn.Linear(5, 10)) + pytorch_model2 = torch.nn.Sequential(torch.nn.Linear(5, 10)) + model1 = dc.models.TorchModel(pytorch_model1, dc.models.losses.L2Loss()) + model2 = dc.models.TorchModel( + pytorch_model2, dc.models.losses.L2Loss(), model_dir=model1.model_dir) + + # Check that they produce different results. + + X = np.random.rand(5, 5) + y1 = model1.predict_on_batch(X) + y2 = model2.predict_on_batch(X) + assert not np.array_equal(y1, y2) + + # Save a checkpoint from the first model and load it into the second one, + # and make sure they now match. + + model1.save_checkpoint() + model2.restore() + y3 = model1.predict_on_batch(X) + y4 = model2.predict_on_batch(X) + assert np.array_equal(y1, y3) + assert np.array_equal(y1, y4) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_fit_restore(): + """Test specifying restore=True when calling fit().""" + n_data_points = 10 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = (X[:, 0] > X[:, 1]).astype(np.float32) + dataset = dc.data.NumpyDataset(X, y) + + # Train a model to overfit the dataset. + + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(2, 10), torch.nn.ReLU(), torch.nn.Linear(10, 1), + torch.nn.Sigmoid()) + model = dc.models.TorchModel( + pytorch_model, dc.models.losses.BinaryCrossEntropy(), learning_rate=0.005) + model.fit(dataset, nb_epoch=1000) + prediction = np.squeeze(model.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + + # Create an identical model, do a single step of fitting with restore=True, + # and make sure it got restored correctly. + + pytorch_model2 = torch.nn.Sequential( + torch.nn.Linear(2, 10), torch.nn.ReLU(), torch.nn.Linear(10, 1), + torch.nn.Sigmoid()) + model2 = dc.models.TorchModel( + pytorch_model2, + dc.models.losses.BinaryCrossEntropy(), + model_dir=model.model_dir) + model2.fit(dataset, nb_epoch=1, restore=True) + prediction = np.squeeze(model2.predict_on_batch(X)) + assert np.array_equal(y, np.round(prediction)) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_uncertainty(): + """Test estimating uncertainty a TorchModel.""" + n_samples = 30 + n_features = 1 + noise = 0.1 + X = np.random.rand(n_samples, n_features) + y = (10 * X + np.random.normal(scale=noise, size=(n_samples, n_features))) + dataset = dc.data.NumpyDataset(X, y) + + # Build a model that predicts uncertainty. + + class PyTorchUncertainty(torch.nn.Module): + + def __init__(self): + super(PyTorchUncertainty, self).__init__() + self.hidden = torch.nn.Linear(n_features, 200) + self.output = torch.nn.Linear(200, n_features) + self.log_var = torch.nn.Linear(200, n_features) + + def forward(self, inputs): + x, use_dropout = inputs + x = self.hidden(x) + if use_dropout: + x = F.dropout(x, 0.1) + output = self.output(x) + log_var = self.log_var(x) + var = torch.exp(log_var) + return (output, var, output, log_var) + + def loss(outputs, labels, weights): + diff = labels[0] - outputs[0] + log_var = outputs[1] + var = torch.exp(log_var) + return torch.mean(diff * diff / var + log_var) + + class UncertaintyModel(dc.models.TorchModel): + + def default_generator(self, + dataset, + epochs=1, + mode='fit', + deterministic=True, + pad_batches=True): + for epoch in range(epochs): + for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( + batch_size=self.batch_size, + deterministic=deterministic, + pad_batches=pad_batches): + if mode == 'predict': + dropout = np.array(False) + else: + dropout = np.array(True) + yield ([X_b, dropout], [y_b], [w_b]) + + pytorch_model = PyTorchUncertainty() + model = UncertaintyModel( + pytorch_model, + loss, + output_types=['prediction', 'variance', 'loss', 'loss'], + learning_rate=0.003) + + # Fit the model and see if its predictions are correct. + + model.fit(dataset, nb_epoch=2500) + pred, std = model.predict_uncertainty(dataset) + assert np.mean(np.abs(y - pred)) < 1.0 + assert noise < np.mean(std) < 1.0 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_saliency_mapping(): + """Test computing a saliency map.""" + n_tasks = 3 + n_features = 5 + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(n_features, 20), torch.nn.Tanh(), + torch.nn.Linear(20, n_tasks)) + model = dc.models.TorchModel(pytorch_model, dc.models.losses.L2Loss()) + x = np.random.random(n_features) + s = model.compute_saliency(x) + assert s.shape[0] == n_tasks + assert s.shape[1] == n_features + + # Take a tiny step in the direction of s and see if the output changes by + # the expected amount. + + delta = 0.01 + for task in range(n_tasks): + norm = np.sqrt(np.sum(s[task]**2)) + step = 0.5 * delta / norm + pred1 = model.predict_on_batch((x + s[task] * step).reshape( + (1, n_features))).flatten() + pred2 = model.predict_on_batch((x - s[task] * step).reshape( + (1, n_features))).flatten() + assert np.allclose(pred1[task], (pred2 + norm * delta)[task], atol=1e-6) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_saliency_shapes(): + """Test computing saliency maps for multiple outputs with multiple dimensions.""" + + class SaliencyModel(torch.nn.Module): + + def __init__(self): + super(SaliencyModel, self).__init__() + self.layer1 = torch.nn.Linear(6, 4) + self.layer2 = torch.nn.Linear(6, 5) + + def forward(self, x): + x = torch.flatten(x) + output1 = self.layer1(x).reshape(1, 4, 1) + output2 = self.layer2(x).reshape(1, 1, 5) + return output1, output2 + + pytorch_model = SaliencyModel() + model = dc.models.TorchModel(pytorch_model, dc.models.losses.L2Loss()) + x = np.random.random((2, 3)) + s = model.compute_saliency(x) + assert len(s) == 2 + assert s[0].shape == (4, 1, 2, 3) + assert s[1].shape == (1, 5, 2, 3) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_tensorboard(): + """Test logging to Tensorboard.""" + n_data_points = 20 + n_features = 2 + X = np.random.rand(n_data_points, n_features) + y = [[0.0, 1.0] for x in range(n_data_points)] + dataset = dc.data.NumpyDataset(X, y) + pytorch_model = torch.nn.Sequential( + torch.nn.Linear(n_features, 2), torch.nn.Softmax(dim=1)) + model = dc.models.TorchModel( + pytorch_model, + dc.models.losses.CategoricalCrossEntropy(), + tensorboard=True, + log_frequency=1) + model.fit(dataset, nb_epoch=10) + files_in_dir = os.listdir(model.model_dir) + event_file = list(filter(lambda x: x.startswith("events"), files_in_dir)) + assert len(event_file) > 0 + event_file = os.path.join(model.model_dir, event_file[0]) + file_size = os.stat(event_file).st_size + assert file_size > 0 + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_fit_variables(): + """Test training a subset of the variables in a model.""" + + class VarModel(torch.nn.Module): + + def __init__(self, **kwargs): + super(VarModel, self).__init__(**kwargs) + self.var1 = torch.nn.Parameter(torch.Tensor([0.5])) + self.var2 = torch.nn.Parameter(torch.Tensor([0.5])) + + def forward(self, inputs): + return [self.var1, self.var2] + + def loss(outputs, labels, weights): + return (outputs[0] * outputs[1] - labels[0])**2 + + pytorch_model = VarModel() + model = dc.models.TorchModel(pytorch_model, loss, learning_rate=0.02) + x = np.ones((1, 1)) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 0.5) + assert np.allclose(vars[1], 0.5) + model.fit_generator([(x, x, x)] * 300) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 1.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, 2 * x, x)] * 300, variables=[pytorch_model.var1]) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 2.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, x, x)] * 300, variables=[pytorch_model.var2]) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 2.0) + assert np.allclose(vars[1], 0.5) + + +@unittest.skipIf(not has_pytorch, 'PyTorch is not installed') +def test_fit_loss(): + """Test specifying a different loss function when calling fit().""" + + class VarModel(torch.nn.Module): + + def __init__(self): + super(VarModel, self).__init__() + self.var1 = torch.nn.Parameter(torch.Tensor([0.5])) + self.var2 = torch.nn.Parameter(torch.Tensor([0.5])) + + def forward(self, inputs): + return [self.var1, self.var2] + + def loss1(outputs, labels, weights): + return (outputs[0] * outputs[1] - labels[0])**2 + + def loss2(outputs, labels, weights): + return (outputs[0] + outputs[1] - labels[0])**2 + + pytorch_model = VarModel() + model = dc.models.TorchModel(pytorch_model, loss1, learning_rate=0.01) + x = np.ones((1, 1)) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 0.5) + assert np.allclose(vars[1], 0.5) + model.fit_generator([(x, x, x)] * 300) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0], 1.0) + assert np.allclose(vars[1], 1.0) + model.fit_generator([(x, 3 * x, x)] * 300, loss=loss2) + vars = model.predict_on_batch(x) + assert np.allclose(vars[0] + vars[1], 3.0) diff --git a/deepchem/models/tests/test_weave_models.py b/deepchem/models/tests/test_weave_models.py new file mode 100644 index 0000000000000000000000000000000000000000..aa36787c345f153eb39460116edf7cccfbe90e0c --- /dev/null +++ b/deepchem/models/tests/test_weave_models.py @@ -0,0 +1,214 @@ +import unittest +import os +import numpy as np +import pytest +import scipy + +import deepchem as dc +from deepchem.data import NumpyDataset +from deepchem.models import GraphConvModel, DAGModel, WeaveModel, MPNNModel +from deepchem.molnet import load_bace_classification, load_delaney +from deepchem.feat import ConvMolFeaturizer + +from flaky import flaky + + +def get_dataset(mode='classification', + featurizer='GraphConv', + num_tasks=2, + data_points=20): + if mode == 'classification': + tasks, all_dataset, transformers = load_bace_classification( + featurizer, reload=False) + else: + tasks, all_dataset, transformers = load_delaney(featurizer, reload=False) + + train, valid, test = all_dataset + for i in range(1, num_tasks): + tasks.append("random_task") + w = np.ones(shape=(data_points, len(tasks))) + + if mode == 'classification': + y = np.random.randint(0, 2, size=(data_points, len(tasks))) + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + else: + y = np.random.normal(size=(data_points, len(tasks))) + metric = dc.metrics.Metric( + dc.metrics.mean_absolute_error, mode="regression") + + ds = NumpyDataset(train.X[:data_points], y, w, train.ids[:data_points]) + + return tasks, ds, transformers, metric + + +def test_compute_features_on_infinity_distance(): + """Test that WeaveModel correctly transforms WeaveMol objects into tensors with infinite max_pair_distance.""" + featurizer = dc.feat.WeaveFeaturizer(max_pair_distance=None) + X = featurizer(["C", "CCC"]) + batch_size = 20 + model = WeaveModel( + 1, + batch_size=batch_size, + mode='classification', + fully_connected_layer_sizes=[2000, 1000], + batch_normalize=True, + batch_normalize_kwargs={ + "fused": False, + "trainable": True, + "renorm": True + }, + learning_rage=0.0005) + atom_feat, pair_feat, pair_split, atom_split, atom_to_pair = model.compute_features_on_batch( + X) + + # There are 4 atoms each of which have 75 atom features + assert atom_feat.shape == (4, 75) + # There are 10 pairs with infinity distance and 14 pair features + assert pair_feat.shape == (10, 14) + # 4 atoms in total + assert atom_split.shape == (4,) + assert np.all(atom_split == np.array([0, 1, 1, 1])) + # 10 pairs in total + assert pair_split.shape == (10,) + assert np.all(pair_split == np.array([0, 1, 1, 1, 2, 2, 2, 3, 3, 3])) + # 10 pairs in total each with start/finish + assert atom_to_pair.shape == (10, 2) + assert np.all( + atom_to_pair == np.array([[0, 0], [1, 1], [1, 2], [1, 3], [2, 1], [2, 2], + [2, 3], [3, 1], [3, 2], [3, 3]])) + + +def test_compute_features_on_distance_1(): + """Test that WeaveModel correctly transforms WeaveMol objects into tensors with finite max_pair_distance.""" + featurizer = dc.feat.WeaveFeaturizer(max_pair_distance=1) + X = featurizer(["C", "CCC"]) + batch_size = 20 + model = WeaveModel( + 1, + batch_size=batch_size, + mode='classification', + fully_connected_layer_sizes=[2000, 1000], + batch_normalize=True, + batch_normalize_kwargs={ + "fused": False, + "trainable": True, + "renorm": True + }, + learning_rage=0.0005) + atom_feat, pair_feat, pair_split, atom_split, atom_to_pair = model.compute_features_on_batch( + X) + + # There are 4 atoms each of which have 75 atom features + assert atom_feat.shape == (4, 75) + # There are 8 pairs with distance 1 and 14 pair features. (To see why 8, + # there's the self pair for "C". For "CCC" there are 7 pairs including self + # connections and accounting for symmetry.) + assert pair_feat.shape == (8, 14) + # 4 atoms in total + assert atom_split.shape == (4,) + assert np.all(atom_split == np.array([0, 1, 1, 1])) + # 10 pairs in total + assert pair_split.shape == (8,) + # The center atom is self connected and to both neighbors so it appears + # thrice. The canonical ranking used in MolecularFeaturizer means this + # central atom is ranked last in ordering. + assert np.all(pair_split == np.array([0, 1, 1, 2, 2, 3, 3, 3])) + # 10 pairs in total each with start/finish + assert atom_to_pair.shape == (8, 2) + assert np.all(atom_to_pair == np.array([[0, 0], [1, 1], [1, 3], [2, 2], + [2, 3], [3, 1], [3, 2], [3, 3]])) + + +@flaky +@pytest.mark.slow +def test_weave_model(): + tasks, dataset, transformers, metric = get_dataset( + 'classification', 'Weave', data_points=10) + + batch_size = 10 + model = WeaveModel( + len(tasks), + batch_size=batch_size, + mode='classification', + final_conv_activation_fn=None, + dropouts=0, + learning_rage=0.0003) + model.fit(dataset, nb_epoch=100) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.9 + + +@pytest.mark.slow +def test_weave_regression_model(): + import numpy as np + import tensorflow as tf + tf.random.set_seed(123) + np.random.seed(123) + tasks, dataset, transformers, metric = get_dataset( + 'regression', 'Weave', data_points=10) + + batch_size = 10 + model = WeaveModel( + len(tasks), + batch_size=batch_size, + mode='regression', + dropouts=0, + learning_rate=0.00003) + model.fit(dataset, nb_epoch=400) + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean_absolute_error'] < 0.1 + + +# def test_weave_fit_simple_infinity_distance(): +# featurizer = dc.feat.WeaveFeaturizer(max_pair_distance=None) +# X = featurizer(["C", "CCC"]) +# y = np.array([0, 1.]) +# dataset = dc.data.NumpyDataset(X, y) + +# batch_size = 20 +# model = WeaveModel( +# 1, +# batch_size=batch_size, +# mode='classification', +# fully_connected_layer_sizes=[2000, 1000], +# batch_normalize=True, +# batch_normalize_kwargs={ +# "fused": False, +# "trainable": True, +# "renorm": True +# }, +# learning_rage=0.0005) +# model.fit(dataset, nb_epoch=200) +# transformers = [] +# metric = dc.metrics.Metric( +# dc.metrics.roc_auc_score, np.mean, mode="classification") +# scores = model.evaluate(dataset, [metric], transformers) +# assert scores['mean-roc_auc_score'] >= 0.9 + + +def test_weave_fit_simple_distance_1(): + featurizer = dc.feat.WeaveFeaturizer(max_pair_distance=1) + X = featurizer(["C", "CCC"]) + y = np.array([0, 1.]) + dataset = dc.data.NumpyDataset(X, y) + + batch_size = 20 + model = WeaveModel( + 1, + batch_size=batch_size, + mode='classification', + fully_connected_layer_sizes=[2000, 1000], + batch_normalize=True, + batch_normalize_kwargs={ + "fused": False, + "trainable": True, + "renorm": True + }, + learning_rage=0.0005) + model.fit(dataset, nb_epoch=200) + transformers = [] + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + scores = model.evaluate(dataset, [metric], transformers) + assert scores['mean-roc_auc_score'] >= 0.9 diff --git a/deepchem/models/text_cnn.py b/deepchem/models/text_cnn.py index 30ee965f5e696139427999b414aa12751e881316..e99917ec605866adbb8478278064b7f5c61859a3 100644 --- a/deepchem/models/text_cnn.py +++ b/deepchem/models/text_cnn.py @@ -54,24 +54,36 @@ default_dict = { class TextCNNModel(KerasModel): """ A Convolutional neural network on smiles strings - Reimplementation of the discriminator module in ORGAN: https://arxiv.org/abs/1705.10843 - Originated from: http://emnlp2014.org/papers/pdf/EMNLP2014181.pdf - This model applies multiple 1D convolutional filters to the padded strings, - then max-over-time pooling is applied on all filters, extracting one feature per filter. - All features are concatenated and transformed through several hidden layers to form predictions. + Reimplementation of the discriminator module in ORGAN [1]_ . + Originated from [2]_. - This model is initially developed for sentence-level classification tasks, with - words represented as vectors. In this implementation, SMILES strings are dissected - into characters and transformed to one-hot vectors in a similar way. The model can - be used for general molecular-level classification or regression tasks. It is also - used in the ORGAN model as discriminator. + This model applies multiple 1D convolutional filters to + the padded strings, then max-over-time pooling is applied on + all filters, extracting one feature per filter. All + features are concatenated and transformed through several + hidden layers to form predictions. - Training of the model only requires SMILES strings input, all featurized datasets - that include SMILES in the `ids` attribute are accepted. PDBbind, QM7 and QM7b - are not supported. To use the model, `build_char_dict` should be called first - before defining the model to build character dict of input dataset, example can - be found in examples/delaney/delaney_textcnn.py + This model is initially developed for sentence-level + classification tasks, with words represented as vectors. In + this implementation, SMILES strings are dissected into + characters and transformed to one-hot vectors in a similar + way. The model can be used for general molecular-level + classification or regression tasks. It is also used in the + ORGAN model as discriminator. + + Training of the model only requires SMILES strings input, + all featurized datasets that include SMILES in the `ids` + attribute are accepted. PDBbind, QM7 and QM7b are not + supported. To use the model, `build_char_dict` should be + called first before defining the model to build character + dict of input dataset, example can be found in + examples/delaney/delaney_textcnn.py + + References + ---------- + .. [1] Guimaraes, Gabriel Lima, et al. "Objective-reinforced generative adversarial networks (ORGAN) for sequence generation models." arXiv preprint arXiv:1705.10843 (2017). + .. [2] Kim, Yoon. "Convolutional neural networks for sentence classification." arXiv preprint arXiv:1408.5882 (2014). """ diff --git a/deepchem/models/torch_models/__init__.py b/deepchem/models/torch_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c27901b4446cfd997bb20cd581bca49368d800b2 --- /dev/null +++ b/deepchem/models/torch_models/__init__.py @@ -0,0 +1,7 @@ +# flake8:noqa +from deepchem.models.torch_models.torch_model import TorchModel +from deepchem.models.torch_models.attentivefp import AttentiveFP, AttentiveFPModel +from deepchem.models.torch_models.cgcnn import CGCNN, CGCNNModel +from deepchem.models.torch_models.gat import GAT, GATModel +from deepchem.models.torch_models.gcn import GCN, GCNModel +from deepchem.models.torch_models.mpnn import MPNN, MPNNModel diff --git a/deepchem/models/torch_models/attentivefp.py b/deepchem/models/torch_models/attentivefp.py new file mode 100644 index 0000000000000000000000000000000000000000..648336ce6d1dd4772982838ec2c5ed4d81a1188d --- /dev/null +++ b/deepchem/models/torch_models/attentivefp.py @@ -0,0 +1,309 @@ +""" +DGL-based AttentiveFP for graph property prediction. +""" +import torch.nn as nn +import torch.nn.functional as F + +from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy +from deepchem.models.torch_models.torch_model import TorchModel + + +class AttentiveFP(nn.Module): + """Model for Graph Property Prediction. + + This model proceeds as follows: + + * Combine node features and edge features for initializing node representations, + which involves a round of message passing + * Update node representations with multiple rounds of message passing + * For each graph, compute its representation by combining the representations + of all nodes in it, which involves a gated recurrent unit (GRU). + * Perform the final prediction using a linear layer + + Examples + -------- + + >>> import deepchem as dc + >>> import dgl + >>> from deepchem.models import AttentiveFP + >>> smiles = ["C1CCC1", "C1=CC=CN=C1"] + >>> featurizer = dc.feat.MolGraphConvFeaturizer(use_edges=True) + >>> graphs = featurizer.featurize(smiles) + >>> print(type(graphs[0])) + + >>> dgl_graphs = [graphs[i].to_dgl_graph(self_loop=True) for i in range(len(graphs))] + >>> # Batch two graphs into a graph of two connected components + >>> batch_dgl_graph = dgl.batch(dgl_graphs) + >>> model = AttentiveFP(n_tasks=1, mode='regression') + >>> preds = model(batch_dgl_graph) + >>> print(type(preds)) + + >>> preds.shape == (2, 1) + True + + References + ---------- + .. [1] Zhaoping Xiong, Dingyan Wang, Xiaohong Liu, Feisheng Zhong, Xiaozhe Wan, Xutong Li, + Zhaojun Li, Xiaomin Luo, Kaixian Chen, Hualiang Jiang, and Mingyue Zheng. "Pushing + the Boundaries of Molecular Representation for Drug Discovery with the Graph Attention + Mechanism." Journal of Medicinal Chemistry. 2020, 63, 16, 8749–8760. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + num_layers: int = 2, + num_timesteps: int = 2, + graph_feat_size: int = 200, + dropout: float = 0., + mode: str = 'regression', + number_atom_features: int = 30, + number_bond_features: int = 11, + n_classes: int = 2, + nfeat_name: str = 'x', + efeat_name: str = 'edge_attr'): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + num_layers: int + Number of graph neural network layers, i.e. number of rounds of message passing. + Default to 2. + num_timesteps: int + Number of time steps for updating graph representations with a GRU. Default to 2. + graph_feat_size: int + Size for graph representations. Default to 200. + dropout: float + Dropout probability. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + number_bond_features: int + The length of the initial bond feature vectors. Default to 11. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + nfeat_name: str + For an input graph ``g``, the model assumes that it stores node features in + ``g.ndata[nfeat_name]`` and will retrieve input node features from that. + Default to 'x'. + efeat_name: str + For an input graph ``g``, the model assumes that it stores edge features in + ``g.edata[efeat_name]`` and will retrieve input edge features from that. + Default to 'edge_attr'. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + try: + import dgllife + except: + raise ImportError('This class requires dgllife.') + + if mode not in ['classification', 'regression']: + raise ValueError("mode must be either 'classification' or 'regression'") + + super(AttentiveFP, self).__init__() + + self.n_tasks = n_tasks + self.mode = mode + self.n_classes = n_classes + self.nfeat_name = nfeat_name + self.efeat_name = efeat_name + if mode == 'classification': + out_size = n_tasks * n_classes + else: + out_size = n_tasks + + from dgllife.model import AttentiveFPPredictor as DGLAttentiveFPPredictor + + self.model = DGLAttentiveFPPredictor( + node_feat_size=number_atom_features, + edge_feat_size=number_bond_features, + num_layers=num_layers, + num_timesteps=num_timesteps, + graph_feat_size=graph_feat_size, + n_tasks=out_size, + dropout=dropout) + + def forward(self, g): + """Predict graph labels + + Parameters + ---------- + g: DGLGraph + A DGLGraph for a batch of graphs. It stores the node features in + ``dgl_graph.ndata[self.nfeat_name]`` and edge features in + ``dgl_graph.edata[self.efeat_name]``. + + Returns + ------- + torch.Tensor + The model output. + + * When self.mode = 'regression', + its shape will be ``(dgl_graph.batch_size, self.n_tasks)``. + * When self.mode = 'classification', the output consists of probabilities + for classes. Its shape will be + ``(dgl_graph.batch_size, self.n_tasks, self.n_classes)`` if self.n_tasks > 1; + its shape will be ``(dgl_graph.batch_size, self.n_classes)`` if self.n_tasks is 1. + torch.Tensor, optional + This is only returned when self.mode = 'classification', the output consists of the + logits for classes before softmax. + """ + node_feats = g.ndata[self.nfeat_name] + edge_feats = g.edata[self.efeat_name] + out = self.model(g, node_feats, edge_feats) + + if self.mode == 'classification': + if self.n_tasks == 1: + logits = out.view(-1, self.n_classes) + softmax_dim = 1 + else: + logits = out.view(-1, self.n_tasks, self.n_classes) + softmax_dim = 2 + proba = F.softmax(logits, dim=softmax_dim) + return proba, logits + else: + return out + + +class AttentiveFPModel(TorchModel): + """Model for Graph Property Prediction. + + This model proceeds as follows: + + * Combine node features and edge features for initializing node representations, + which involves a round of message passing + * Update node representations with multiple rounds of message passing + * For each graph, compute its representation by combining the representations + of all nodes in it, which involves a gated recurrent unit (GRU). + * Perform the final prediction using a linear layer + + Examples + -------- + + >>> + >> import deepchem as dc + >> from deepchem.models import AttentiveFPModel + >> featurizer = dc.feat.MolGraphConvFeaturizer(use_edges=True) + >> tasks, datasets, transformers = dc.molnet.load_tox21( + .. reload=False, featurizer=featurizer, transformers=[]) + >> train, valid, test = datasets + >> model = AttentiveFPModel(mode='classification', n_tasks=len(tasks), + .. batch_size=32, learning_rate=0.001) + >> model.fit(train, nb_epoch=50) + + References + ---------- + .. [1] Zhaoping Xiong, Dingyan Wang, Xiaohong Liu, Feisheng Zhong, Xiaozhe Wan, Xutong Li, + Zhaojun Li, Xiaomin Luo, Kaixian Chen, Hualiang Jiang, and Mingyue Zheng. "Pushing + the Boundaries of Molecular Representation for Drug Discovery with the Graph + Attention Mechanism." Journal of Medicinal Chemistry. 2020, 63, 16, 8749–8760. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + num_layers: int = 2, + num_timesteps: int = 2, + graph_feat_size: int = 200, + dropout: float = 0., + mode: str = 'regression', + number_atom_features: int = 30, + number_bond_features: int = 11, + n_classes: int = 2, + self_loop: bool = True, + **kwargs): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + num_layers: int + Number of graph neural network layers, i.e. number of rounds of message passing. + Default to 2. + num_timesteps: int + Number of time steps for updating graph representations with a GRU. Default to 2. + graph_feat_size: int + Size for graph representations. Default to 200. + dropout: float + Dropout probability. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + number_bond_features: int + The length of the initial bond feature vectors. Default to 11. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + self_loop: bool + Whether to add self loops for the nodes, i.e. edges from nodes to themselves. + When input graphs have isolated nodes, self loops allow preserving the original feature + of them in message passing. Default to True. + kwargs + This can include any keyword argument of TorchModel. + """ + model = AttentiveFP( + n_tasks=n_tasks, + num_layers=num_layers, + num_timesteps=num_timesteps, + graph_feat_size=graph_feat_size, + dropout=dropout, + mode=mode, + number_atom_features=number_atom_features, + number_bond_features=number_bond_features, + n_classes=n_classes) + if mode == 'regression': + loss: Loss = L2Loss() + output_types = ['prediction'] + else: + loss = SparseSoftmaxCrossEntropy() + output_types = ['prediction', 'loss'] + super(AttentiveFPModel, self).__init__( + model, loss=loss, output_types=output_types, **kwargs) + + self._self_loop = self_loop + + def _prepare_batch(self, batch): + """Create batch data for AttentiveFP. + + Parameters + ---------- + batch: tuple + The tuple is ``(inputs, labels, weights)``. + + Returns + ------- + inputs: DGLGraph + DGLGraph for a batch of graphs. + labels: list of torch.Tensor or None + The graph labels. + weights: list of torch.Tensor or None + The weights for each sample or sample/task pair converted to torch.Tensor. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + + inputs, labels, weights = batch + dgl_graphs = [ + graph.to_dgl_graph(self_loop=self._self_loop) for graph in inputs[0] + ] + inputs = dgl.batch(dgl_graphs).to(self.device) + _, labels, weights = super(AttentiveFPModel, self)._prepare_batch( + ([], labels, weights)) + return inputs, labels, weights diff --git a/deepchem/models/torch_models/cgcnn.py b/deepchem/models/torch_models/cgcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..bd1c5d090cac2b0964ddc4659997bcfea9181978 --- /dev/null +++ b/deepchem/models/torch_models/cgcnn.py @@ -0,0 +1,359 @@ +""" +This is a sample implementation for working DGL with DeepChem! +""" +import torch +import torch.nn as nn +import torch.nn.functional as F + +from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy +from deepchem.models.torch_models.torch_model import TorchModel + + +class CGCNNLayer(nn.Module): + """The convolutional layer of CGCNN. + + This class was implemented using DGLGraph methods. + Please confirm how to use DGLGraph methods from below link. + See: https://docs.dgl.ai/en/0.4.x/tutorials/models/1_gnn/9_gat.html + + Examples + -------- + >>> import deepchem as dc + >>> import pymatgen as mg + >>> lattice = mg.Lattice.cubic(4.2) + >>> structure = mg.Structure(lattice, ["Cs", "Cl"], [[0, 0, 0], [0.5, 0.5, 0.5]]) + >>> featurizer = dc.feat.CGCNNFeaturizer() + >>> cgcnn_graph = featurizer.featurize([structure])[0] + >>> cgcnn_graph.num_node_features + 92 + >>> cgcnn_graph.num_edge_features + 41 + >>> cgcnn_dgl_graph = cgcnn_graph.to_dgl_graph() + >>> print(type(cgcnn_dgl_graph)) + + >>> layer = CGCNNLayer(hidden_node_dim=92, edge_dim=41) + >>> node_feats = cgcnn_dgl_graph.ndata.pop('x') + >>> edge_feats = cgcnn_dgl_graph.edata.pop('edge_attr') + >>> new_node_feats, new_edge_feats = layer(cgcnn_dgl_graph, node_feats, edge_feats) + + Notes + ----- + This class requires DGL and PyTorch to be installed. + """ + + def __init__(self, + hidden_node_dim: int, + edge_dim: int, + batch_norm: bool = True): + """ + Parameters + ---------- + hidden_node_dim: int + The length of the hidden node feature vectors. + edge_dim: int + The length of the edge feature vectors. + batch_norm: bool, default True + Whether to apply batch normalization or not. + """ + super(CGCNNLayer, self).__init__() + z_dim = 2 * hidden_node_dim + edge_dim + liner_out_dim = 2 * hidden_node_dim + self.linear = nn.Linear(z_dim, liner_out_dim) + self.batch_norm = nn.BatchNorm1d(liner_out_dim) if batch_norm else None + + def message_func(self, edges): + z = torch.cat( + [edges.src['x'], edges.dst['x'], edges.data['edge_attr']], dim=1) + z = self.linear(z) + if self.batch_norm is not None: + z = self.batch_norm(z) + gated_z, message_z = z.chunk(2, dim=1) + gated_z = torch.sigmoid(gated_z) + message_z = F.softplus(message_z) + return {'message': gated_z * message_z} + + def reduce_func(self, nodes): + nbr_sumed = torch.sum(nodes.mailbox['message'], dim=1) + new_x = F.softplus(nodes.data['x'] + nbr_sumed) + return {'new_x': new_x} + + def forward(self, dgl_graph, node_feats, edge_feats): + """Update node representations. + + Parameters + ---------- + dgl_graph: DGLGraph + DGLGraph for a batch of graphs. + node_feats: torch.Tensor + The node features. The shape is `(N, hidden_node_dim)`. + edge_feats: torch.Tensor + The edge features. The shape is `(N, hidden_node_dim)`. + + Returns + ------- + node_feats: torch.Tensor + The updated node features. The shape is `(N, hidden_node_dim)`. + """ + dgl_graph.ndata['x'] = node_feats + dgl_graph.edata['edge_attr'] = edge_feats + dgl_graph.update_all(self.message_func, self.reduce_func) + node_feats = dgl_graph.ndata.pop('new_x') + return node_feats + + +class CGCNN(nn.Module): + """Crystal Graph Convolutional Neural Network (CGCNN). + + This model takes arbitary crystal structures as an input, and predict material properties + using the element information and connection of atoms in the crystal. If you want to get + some material properties which has a high computational cost like band gap in the case + of DFT, this model may be useful. This model is one of variants of Graph Convolutional + Networks. The main differences between other GCN models are how to construct graphs and + how to update node representations. This model defines the crystal graph from structures + using distances between atoms. The crystal graph is an undirected multigraph which is defined + by nodes representing atom properties and edges representing connections between atoms + in a crystal. And, this model updates the node representations using both neighbor node + and edge representations. Please confirm the detail algorithms from [1]_. + + Examples + -------- + >>> import deepchem as dc + >>> import pymatgen as mg + >>> lattice = mg.Lattice.cubic(4.2) + >>> structure = mg.Structure(lattice, ["Cs", "Cl"], [[0, 0, 0], [0.5, 0.5, 0.5]]) + >>> featurizer = dc.feat.CGCNNFeaturizer() + >>> cgcnn_feat = featurizer.featurize([structure])[0] + >>> print(type(cgcnn_feat)) + + >>> cgcnn_dgl_feat = cgcnn_feat.to_dgl_graph() + >>> print(type(cgcnn_dgl_feat)) + + >>> model = dc.models.CGCNN(mode='regression', n_tasks=2) + >>> out = model(cgcnn_dgl_feat) + >>> print(type(out)) + + >>> out.shape == (1, 2) + True + + References + ---------- + .. [1] Xie, Tian, and Jeffrey C. Grossman. "Crystal graph convolutional neural networks + for an accurate and interpretable prediction of material properties." Physical review letters + 120.14 (2018): 145301. + + Notes + ----- + This class requires DGL and PyTorch to be installed. + """ + + def __init__( + self, + in_node_dim: int = 92, + hidden_node_dim: int = 64, + in_edge_dim: int = 41, + num_conv: int = 3, + predictor_hidden_feats: int = 128, + n_tasks: int = 1, + mode: str = 'regression', + n_classes: int = 2, + ): + """ + Parameters + ---------- + in_node_dim: int, default 92 + The length of the initial node feature vectors. The 92 is + based on length of vectors in the atom_init.json. + hidden_node_dim: int, default 64 + The length of the hidden node feature vectors. + in_edge_dim: int, default 41 + The length of the initial edge feature vectors. The 41 is + based on default setting of CGCNNFeaturizer. + num_conv: int, default 3 + The number of convolutional layers. + predictor_hidden_feats: int, default 128 + The size for hidden representations in the output MLP predictor. + n_tasks: int, default 1 + The number of the output size. + mode: str, default 'regression' + The model type, 'classification' or 'regression'. + n_classes: int, default 2 + The number of classes to predict (only used in classification mode). + """ + try: + import dgl + except: + raise ImportError("This class requires DGL to be installed.") + super(CGCNN, self).__init__() + if mode not in ['classification', 'regression']: + raise ValueError("mode must be either 'classification' or 'regression'") + + self.n_tasks = n_tasks + self.mode = mode + self.n_classes = n_classes + self.embedding = nn.Linear(in_node_dim, hidden_node_dim) + self.conv_layers = nn.ModuleList([ + CGCNNLayer( + hidden_node_dim=hidden_node_dim, + edge_dim=in_edge_dim, + batch_norm=True) for _ in range(num_conv) + ]) + self.pooling = dgl.mean_nodes + self.fc = nn.Linear(hidden_node_dim, predictor_hidden_feats) + if self.mode == 'regression': + self.out = nn.Linear(predictor_hidden_feats, n_tasks) + else: + self.out = nn.Linear(predictor_hidden_feats, n_tasks * n_classes) + + def forward(self, dgl_graph): + """Predict labels + + Parameters + ---------- + dgl_graph: DGLGraph + DGLGraph for a batch of graphs. The graph expects that the node features + are stored in `ndata['x']`, and the edge features are stored in `edata['edge_attr']`. + + Returns + ------- + out: torch.Tensor + The output values of this model. + If mode == 'regression', the shape is `(batch_size, n_tasks)`. + If mode == 'classification', the shape is `(batch_size, n_tasks, n_classes)` (n_tasks > 1) + or `(batch_size, n_classes)` (n_tasks == 1) and the output values are probabilities of each class label. + """ + graph = dgl_graph + # embedding node features + node_feats = graph.ndata.pop('x') + edge_feats = graph.edata.pop('edge_attr') + node_feats = self.embedding(node_feats) + + # convolutional layer + for conv in self.conv_layers: + node_feats = conv(graph, node_feats, edge_feats) + + # pooling + graph.ndata['updated_x'] = node_feats + graph_feat = F.softplus(self.pooling(graph, 'updated_x')) + graph_feat = F.softplus(self.fc(graph_feat)) + out = self.out(graph_feat) + + if self.mode == 'regression': + return out + else: + logits = out.view(-1, self.n_tasks, self.n_classes) + # for n_tasks == 1 case + logits = torch.squeeze(logits) + proba = F.softmax(logits) + return proba, logits + + +class CGCNNModel(TorchModel): + """Crystal Graph Convolutional Neural Network (CGCNN). + + Here is a simple example of code that uses the CGCNNModel with + materials dataset. + + >> import deepchem as dc + >> dataset_config = {"reload": False, "featurizer": dc.feat.CGCNNFeaturizer, "transformers": []} + >> tasks, datasets, transformers = dc.molnet.load_perovskite(**dataset_config) + >> train, valid, test = datasets + >> model = dc.models.CGCNNModel(mode='regression', batch_size=32, learning_rate=0.001) + >> model.fit(train, nb_epoch=50) + + This model takes arbitary crystal structures as an input, and predict material properties + using the element information and connection of atoms in the crystal. If you want to get + some material properties which has a high computational cost like band gap in the case + of DFT, this model may be useful. This model is one of variants of Graph Convolutional + Networks. The main differences between other GCN models are how to construct graphs and + how to update node representations. This model defines the crystal graph from structures + using distances between atoms. The crystal graph is an undirected multigraph which is defined + by nodes representing atom properties and edges representing connections between atoms + in a crystal. And, this model updates the node representations using both neighbor node + and edge representations. Please confirm the detail algorithms from [1]_. + + References + ---------- + .. [1] Xie, Tian, and Jeffrey C. Grossman. "Crystal graph convolutional neural networks + for an accurate and interpretable prediction of material properties." Physical review letters + 120.14 (2018): 145301. + + Notes + ----- + This class requires DGL and PyTorch to be installed. + """ + + def __init__(self, + in_node_dim: int = 92, + hidden_node_dim: int = 64, + in_edge_dim: int = 41, + num_conv: int = 3, + predictor_hidden_feats: int = 128, + n_tasks: int = 1, + mode: str = 'regression', + n_classes: int = 2, + **kwargs): + """ + This class accepts all the keyword arguments from TorchModel. + + Parameters + ---------- + in_node_dim: int, default 92 + The length of the initial node feature vectors. The 92 is + based on length of vectors in the atom_init.json. + hidden_node_dim: int, default 64 + The length of the hidden node feature vectors. + in_edge_dim: int, default 41 + The length of the initial edge feature vectors. The 41 is + based on default setting of CGCNNFeaturizer. + num_conv: int, default 3 + The number of convolutional layers. + predictor_hidden_feats: int, default 128 + The size for hidden representations in the output MLP predictor. + n_tasks: int, default 1 + The number of the output size. + mode: str, default 'regression' + The model type, 'classification' or 'regression'. + n_classes: int, default 2 + The number of classes to predict (only used in classification mode). + kwargs: Dict + This class accepts all the keyword arguments from TorchModel. + """ + model = CGCNN(in_node_dim, hidden_node_dim, in_edge_dim, num_conv, + predictor_hidden_feats, n_tasks, mode, n_classes) + if mode == "regression": + loss: Loss = L2Loss() + output_types = ['prediction'] + else: + loss = SparseSoftmaxCrossEntropy() + output_types = ['prediction', 'loss'] + super(CGCNNModel, self).__init__( + model, loss=loss, output_types=output_types, **kwargs) + + def _prepare_batch(self, batch): + """Create batch data for CGCNN. + + Parameters + ---------- + batch: Tuple + The tuple are `(inputs, labels, weights)`. + + Returns + ------- + inputs: DGLGraph + DGLGraph for a batch of graphs. + labels: List[torch.Tensor] or None + The labels converted to torch.Tensor + weights: List[torch.Tensor] or None + The weights for each sample or sample/task pair converted to torch.Tensor + """ + try: + import dgl + except: + raise ImportError("This class requires DGL to be installed.") + + inputs, labels, weights = batch + dgl_graphs = [graph.to_dgl_graph() for graph in inputs[0]] + inputs = dgl.batch(dgl_graphs).to(self.device) + _, labels, weights = super(CGCNNModel, self)._prepare_batch(([], labels, + weights)) + return inputs, labels, weights diff --git a/deepchem/models/torch_models/gat.py b/deepchem/models/torch_models/gat.py new file mode 100644 index 0000000000000000000000000000000000000000..cecc8f03f39b3f921813defdc060099120282fce --- /dev/null +++ b/deepchem/models/torch_models/gat.py @@ -0,0 +1,370 @@ +""" +DGL-based GAT for graph property prediction. +""" +import torch.nn as nn +import torch.nn.functional as F + +from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy +from deepchem.models.torch_models.torch_model import TorchModel + + +class GAT(nn.Module): + """Model for Graph Property Prediction Based on Graph Attention Networks (GAT). + + This model proceeds as follows: + + * Update node representations in graphs with a variant of GAT + * For each graph, compute its representation by 1) a weighted sum of the node + representations in the graph, where the weights are computed by applying a + gating function to the node representations 2) a max pooling of the node + representations 3) concatenating the output of 1) and 2) + * Perform the final prediction using an MLP + + Examples + -------- + + >>> import deepchem as dc + >>> import dgl + >>> from deepchem.models import GAT + >>> smiles = ["C1CCC1", "C1=CC=CN=C1"] + >>> featurizer = dc.feat.MolGraphConvFeaturizer() + >>> graphs = featurizer.featurize(smiles) + >>> print(type(graphs[0])) + + >>> dgl_graphs = [graphs[i].to_dgl_graph(self_loop=True) for i in range(len(graphs))] + >>> # Batch two graphs into a graph of two connected components + >>> batch_dgl_graph = dgl.batch(dgl_graphs) + >>> model = GAT(n_tasks=1, mode='regression') + >>> preds = model(batch_dgl_graph) + >>> print(type(preds)) + + >>> preds.shape == (2, 1) + True + + References + ---------- + .. [1] Petar Veličković, Guillem Cucurull, Arantxa Casanova, Adriana Romero, Pietro Liò, + and Yoshua Bengio. "Graph Attention Networks." ICLR 2018. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + graph_attention_layers: list = None, + n_attention_heads: int = 8, + agg_modes: list = None, + activation=F.elu, + residual: bool = True, + dropout: float = 0., + alpha: float = 0.2, + predictor_hidden_feats: int = 128, + predictor_dropout: float = 0., + mode: str = 'regression', + number_atom_features: int = 30, + n_classes: int = 2, + nfeat_name: str = 'x'): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + graph_attention_layers: list of int + Width of channels per attention head for GAT layers. graph_attention_layers[i] + gives the width of channel for each attention head for the i-th GAT layer. If + both ``graph_attention_layers`` and ``agg_modes`` are specified, they should have + equal length. If not specified, the default value will be [8, 8]. + n_attention_heads: int + Number of attention heads in each GAT layer. + agg_modes: list of str + The way to aggregate multi-head attention results for each GAT layer, which can be + either 'flatten' for concatenating all-head results or 'mean' for averaging all-head + results. ``agg_modes[i]`` gives the way to aggregate multi-head attention results for + the i-th GAT layer. If both ``graph_attention_layers`` and ``agg_modes`` are + specified, they should have equal length. If not specified, the model will flatten + multi-head results for intermediate GAT layers and compute mean of multi-head results + for the last GAT layer. + activation: activation function or None + The activation function to apply to the aggregated multi-head results for each GAT + layer. If not specified, the default value will be ELU. + residual: bool + Whether to add a residual connection within each GAT layer. Default to True. + dropout: float + The dropout probability within each GAT layer. Default to 0. + alpha: float + A hyperparameter in LeakyReLU, which is the slope for negative values. Default to 0.2. + predictor_hidden_feats: int + The size for hidden representations in the output MLP predictor. Default to 128. + predictor_dropout: float + The dropout probability in the output MLP predictor. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + nfeat_name: str + For an input graph ``g``, the model assumes that it stores node features in + ``g.ndata[nfeat_name]`` and will retrieve input node features from that. + Default to 'x'. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + try: + import dgllife + except: + raise ImportError('This class requires dgllife.') + + if mode not in ['classification', 'regression']: + raise ValueError("mode must be either 'classification' or 'regression'") + + super(GAT, self).__init__() + + self.n_tasks = n_tasks + self.mode = mode + self.n_classes = n_classes + self.nfeat_name = nfeat_name + if mode == 'classification': + out_size = n_tasks * n_classes + else: + out_size = n_tasks + + from dgllife.model import GATPredictor as DGLGATPredictor + + if isinstance(graph_attention_layers, list) and isinstance(agg_modes, list): + assert len(graph_attention_layers) == len(agg_modes), \ + 'Expect graph_attention_layers and agg_modes to have equal length, ' \ + 'got {:d} and {:d}'.format(len(graph_attention_layers), len(agg_modes)) + + # Decide first number of GAT layers + if graph_attention_layers is not None: + num_gnn_layers = len(graph_attention_layers) + elif agg_modes is not None: + num_gnn_layers = len(agg_modes) + else: + num_gnn_layers = 2 + + if graph_attention_layers is None: + graph_attention_layers = [8] * num_gnn_layers + if agg_modes is None: + agg_modes = ['flatten' for _ in range(num_gnn_layers - 1)] + agg_modes.append('mean') + + if activation is not None: + activation = [activation] * num_gnn_layers + + self.model = DGLGATPredictor( + in_feats=number_atom_features, + hidden_feats=graph_attention_layers, + num_heads=[n_attention_heads] * num_gnn_layers, + feat_drops=[dropout] * num_gnn_layers, + attn_drops=[dropout] * num_gnn_layers, + alphas=[alpha] * num_gnn_layers, + residuals=[residual] * num_gnn_layers, + agg_modes=agg_modes, + activations=activation, + n_tasks=out_size, + predictor_hidden_feats=predictor_hidden_feats, + predictor_dropout=predictor_dropout) + + def forward(self, g): + """Predict graph labels + + Parameters + ---------- + g: DGLGraph + A DGLGraph for a batch of graphs. It stores the node features in + ``dgl_graph.ndata[self.nfeat_name]``. + + Returns + ------- + torch.Tensor + The model output. + + * When self.mode = 'regression', + its shape will be ``(dgl_graph.batch_size, self.n_tasks)``. + * When self.mode = 'classification', the output consists of probabilities + for classes. Its shape will be + ``(dgl_graph.batch_size, self.n_tasks, self.n_classes)`` if self.n_tasks > 1; + its shape will be ``(dgl_graph.batch_size, self.n_classes)`` if self.n_tasks is 1. + torch.Tensor, optional + This is only returned when self.mode = 'classification', the output consists of the + logits for classes before softmax. + """ + node_feats = g.ndata[self.nfeat_name] + out = self.model(g, node_feats) + + if self.mode == 'classification': + if self.n_tasks == 1: + logits = out.view(-1, self.n_classes) + softmax_dim = 1 + else: + logits = out.view(-1, self.n_tasks, self.n_classes) + softmax_dim = 2 + proba = F.softmax(logits, dim=softmax_dim) + return proba, logits + else: + return out + + +class GATModel(TorchModel): + """Model for Graph Property Prediction Based on Graph Attention Networks (GAT). + + This model proceeds as follows: + + * Update node representations in graphs with a variant of GAT + * For each graph, compute its representation by 1) a weighted sum of the node + representations in the graph, where the weights are computed by applying a + gating function to the node representations 2) a max pooling of the node + representations 3) concatenating the output of 1) and 2) + * Perform the final prediction using an MLP + + Examples + -------- + + >>> + >> import deepchem as dc + >> from deepchem.models import GATModel + >> featurizer = dc.feat.MolGraphConvFeaturizer() + >> tasks, datasets, transformers = dc.molnet.load_tox21( + .. reload=False, featurizer=featurizer, transformers=[]) + >> train, valid, test = datasets + >> model = GATModel(mode='classification', n_tasks=len(tasks), + .. batch_size=32, learning_rate=0.001) + >> model.fit(train, nb_epoch=50) + + References + ---------- + .. [1] Petar Veličković, Guillem Cucurull, Arantxa Casanova, Adriana Romero, Pietro Liò, + and Yoshua Bengio. "Graph Attention Networks." ICLR 2018. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + graph_attention_layers: list = None, + n_attention_heads: int = 8, + agg_modes: list = None, + activation=F.elu, + residual: bool = True, + dropout: float = 0., + alpha: float = 0.2, + predictor_hidden_feats: int = 128, + predictor_dropout: float = 0., + mode: str = 'regression', + number_atom_features: int = 30, + n_classes: int = 2, + self_loop: bool = True, + **kwargs): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + graph_attention_layers: list of int + Width of channels per attention head for GAT layers. graph_attention_layers[i] + gives the width of channel for each attention head for the i-th GAT layer. If + both ``graph_attention_layers`` and ``agg_modes`` are specified, they should have + equal length. If not specified, the default value will be [8, 8]. + n_attention_heads: int + Number of attention heads in each GAT layer. + agg_modes: list of str + The way to aggregate multi-head attention results for each GAT layer, which can be + either 'flatten' for concatenating all-head results or 'mean' for averaging all-head + results. ``agg_modes[i]`` gives the way to aggregate multi-head attention results for + the i-th GAT layer. If both ``graph_attention_layers`` and ``agg_modes`` are + specified, they should have equal length. If not specified, the model will flatten + multi-head results for intermediate GAT layers and compute mean of multi-head results + for the last GAT layer. + activation: activation function or None + The activation function to apply to the aggregated multi-head results for each GAT + layer. If not specified, the default value will be ELU. + residual: bool + Whether to add a residual connection within each GAT layer. Default to True. + dropout: float + The dropout probability within each GAT layer. Default to 0. + alpha: float + A hyperparameter in LeakyReLU, which is the slope for negative values. Default to 0.2. + predictor_hidden_feats: int + The size for hidden representations in the output MLP predictor. Default to 128. + predictor_dropout: float + The dropout probability in the output MLP predictor. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + self_loop: bool + Whether to add self loops for the nodes, i.e. edges from nodes to themselves. + When input graphs have isolated nodes, self loops allow preserving the original feature + of them in message passing. Default to True. + kwargs + This can include any keyword argument of TorchModel. + """ + model = GAT( + n_tasks=n_tasks, + graph_attention_layers=graph_attention_layers, + n_attention_heads=n_attention_heads, + agg_modes=agg_modes, + activation=activation, + residual=residual, + dropout=dropout, + alpha=alpha, + predictor_hidden_feats=predictor_hidden_feats, + predictor_dropout=predictor_dropout, + mode=mode, + number_atom_features=number_atom_features, + n_classes=n_classes) + if mode == 'regression': + loss: Loss = L2Loss() + output_types = ['prediction'] + else: + loss = SparseSoftmaxCrossEntropy() + output_types = ['prediction', 'loss'] + super(GATModel, self).__init__( + model, loss=loss, output_types=output_types, **kwargs) + + self._self_loop = self_loop + + def _prepare_batch(self, batch): + """Create batch data for GAT. + + Parameters + ---------- + batch: tuple + The tuple is ``(inputs, labels, weights)``. + + Returns + ------- + inputs: DGLGraph + DGLGraph for a batch of graphs. + labels: list of torch.Tensor or None + The graph labels. + weights: list of torch.Tensor or None + The weights for each sample or sample/task pair converted to torch.Tensor. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + + inputs, labels, weights = batch + dgl_graphs = [ + graph.to_dgl_graph(self_loop=self._self_loop) for graph in inputs[0] + ] + inputs = dgl.batch(dgl_graphs).to(self.device) + _, labels, weights = super(GATModel, self)._prepare_batch(([], labels, + weights)) + return inputs, labels, weights diff --git a/deepchem/models/torch_models/gcn.py b/deepchem/models/torch_models/gcn.py new file mode 100644 index 0000000000000000000000000000000000000000..f5193503813a57913e89876e3bda642b31e522a5 --- /dev/null +++ b/deepchem/models/torch_models/gcn.py @@ -0,0 +1,354 @@ +""" +DGL-based GCN for graph property prediction. +""" +import torch.nn as nn +import torch.nn.functional as F + +from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy +from deepchem.models.torch_models.torch_model import TorchModel + + +class GCN(nn.Module): + """Model for Graph Property Prediction Based on Graph Convolution Networks (GCN). + + This model proceeds as follows: + + * Update node representations in graphs with a variant of GCN + * For each graph, compute its representation by 1) a weighted sum of the node + representations in the graph, where the weights are computed by applying a + gating function to the node representations 2) a max pooling of the node + representations 3) concatenating the output of 1) and 2) + * Perform the final prediction using an MLP + + Examples + -------- + + >>> import deepchem as dc + >>> import dgl + >>> from deepchem.models import GCN + >>> smiles = ["C1CCC1", "C1=CC=CN=C1"] + >>> featurizer = dc.feat.MolGraphConvFeaturizer() + >>> graphs = featurizer.featurize(smiles) + >>> print(type(graphs[0])) + + >>> dgl_graphs = [graphs[i].to_dgl_graph(self_loop=True) for i in range(len(graphs))] + >>> # Batch two graphs into a graph of two connected components + >>> batch_dgl_graph = dgl.batch(dgl_graphs) + >>> model = GCN(n_tasks=1, mode='regression') + >>> preds = model(batch_dgl_graph) + >>> print(type(preds)) + + >>> preds.shape == (2, 1) + True + + References + ---------- + .. [1] Thomas N. Kipf and Max Welling. "Semi-Supervised Classification with Graph + Convolutional Networks." ICLR 2017. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + + This model is different from deepchem.models.GraphConvModel as follows: + + * For each graph convolution, the learnable weight in this model is shared across all nodes. + ``GraphConvModel`` employs separate learnable weights for nodes of different degrees. A + learnable weight is shared across all nodes of a particular degree. + * For ``GraphConvModel``, there is an additional GraphPool operation after each + graph convolution. The operation updates the representation of a node by applying an + element-wise maximum over the representations of its neighbors and itself. + * For computing graph-level representations, this model computes a weighted sum and an + element-wise maximum of the representations of all nodes in a graph and concatenates them. + The node weights are obtained by using a linear/dense layer followd by a sigmoid function. + For ``GraphConvModel``, the sum over node representations is unweighted. + * There are various minor differences in using dropout, skip connection and batch + normalization. + """ + + def __init__(self, + n_tasks: int, + graph_conv_layers: list = None, + activation=None, + residual: bool = True, + batchnorm: bool = False, + dropout: float = 0., + predictor_hidden_feats: int = 128, + predictor_dropout: float = 0., + mode: str = 'regression', + number_atom_features: int = 30, + n_classes: int = 2, + nfeat_name: str = 'x'): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + graph_conv_layers: list of int + Width of channels for GCN layers. graph_conv_layers[i] gives the width of channel + for the i-th GCN layer. If not specified, the default value will be [64, 64]. + activation: callable + The activation function to apply to the output of each GCN layer. + By default, no activation function will be applied. + residual: bool + Whether to add a residual connection within each GCN layer. Default to True. + batchnorm: bool + Whether to apply batch normalization to the output of each GCN layer. + Default to False. + dropout: float + The dropout probability for the output of each GCN layer. Default to 0. + predictor_hidden_feats: int + The size for hidden representations in the output MLP predictor. Default to 128. + predictor_dropout: float + The dropout probability in the output MLP predictor. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + nfeat_name: str + For an input graph ``g``, the model assumes that it stores node features in + ``g.ndata[nfeat_name]`` and will retrieve input node features from that. + Default to 'x'. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + try: + import dgllife + except: + raise ImportError('This class requires dgllife.') + + if mode not in ['classification', 'regression']: + raise ValueError("mode must be either 'classification' or 'regression'") + + super(GCN, self).__init__() + + self.n_tasks = n_tasks + self.mode = mode + self.n_classes = n_classes + self.nfeat_name = nfeat_name + if mode == 'classification': + out_size = n_tasks * n_classes + else: + out_size = n_tasks + + from dgllife.model import GCNPredictor as DGLGCNPredictor + + if graph_conv_layers is None: + graph_conv_layers = [64, 64] + num_gnn_layers = len(graph_conv_layers) + + if activation is not None: + activation = [activation] * num_gnn_layers + + self.model = DGLGCNPredictor( + in_feats=number_atom_features, + hidden_feats=graph_conv_layers, + activation=activation, + residual=[residual] * num_gnn_layers, + batchnorm=[batchnorm] * num_gnn_layers, + dropout=[dropout] * num_gnn_layers, + n_tasks=out_size, + predictor_hidden_feats=predictor_hidden_feats, + predictor_dropout=predictor_dropout) + + def forward(self, g): + """Predict graph labels + + Parameters + ---------- + g: DGLGraph + A DGLGraph for a batch of graphs. It stores the node features in + ``dgl_graph.ndata[self.nfeat_name]``. + + Returns + ------- + torch.Tensor + The model output. + + * When self.mode = 'regression', + its shape will be ``(dgl_graph.batch_size, self.n_tasks)``. + * When self.mode = 'classification', the output consists of probabilities + for classes. Its shape will be ``(dgl_graph.batch_size, self.n_tasks, self.n_classes)`` + if self.n_tasks > 1; its shape will be ``(dgl_graph.batch_size, self.n_classes)`` if + self.n_tasks is 1. + torch.Tensor, optional + This is only returned when self.mode = 'classification', the output consists of the + logits for classes before softmax. + """ + node_feats = g.ndata[self.nfeat_name] + out = self.model(g, node_feats) + + if self.mode == 'classification': + if self.n_tasks == 1: + logits = out.view(-1, self.n_classes) + softmax_dim = 1 + else: + logits = out.view(-1, self.n_tasks, self.n_classes) + softmax_dim = 2 + proba = F.softmax(logits, dim=softmax_dim) + return proba, logits + else: + return out + + +class GCNModel(TorchModel): + """Model for Graph Property Prediction Based on Graph Convolution Networks (GCN). + + This model proceeds as follows: + + * Update node representations in graphs with a variant of GCN + * For each graph, compute its representation by 1) a weighted sum of the node + representations in the graph, where the weights are computed by applying a + gating function to the node representations 2) a max pooling of the node + representations 3) concatenating the output of 1) and 2) + * Perform the final prediction using an MLP + + Examples + -------- + + >>> + >> import deepchem as dc + >> from deepchem.models import GCNModel + >> featurizer = dc.feat.MolGraphConvFeaturizer() + >> tasks, datasets, transformers = dc.molnet.load_tox21( + .. reload=False, featurizer=featurizer, transformers=[]) + >> train, valid, test = datasets + >> model = GCNModel(mode='classification', n_tasks=len(tasks), + .. batch_size=32, learning_rate=0.001) + >> model.fit(train, nb_epoch=50) + + References + ---------- + .. [1] Thomas N. Kipf and Max Welling. "Semi-Supervised Classification with Graph + Convolutional Networks." ICLR 2017. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + + This model is different from deepchem.models.GraphConvModel as follows: + + * For each graph convolution, the learnable weight in this model is shared across all nodes. + ``GraphConvModel`` employs separate learnable weights for nodes of different degrees. A + learnable weight is shared across all nodes of a particular degree. + * For ``GraphConvModel``, there is an additional GraphPool operation after each + graph convolution. The operation updates the representation of a node by applying an + element-wise maximum over the representations of its neighbors and itself. + * For computing graph-level representations, this model computes a weighted sum and an + element-wise maximum of the representations of all nodes in a graph and concatenates them. + The node weights are obtained by using a linear/dense layer followd by a sigmoid function. + For ``GraphConvModel``, the sum over node representations is unweighted. + * There are various minor differences in using dropout, skip connection and batch + normalization. + """ + + def __init__(self, + n_tasks: int, + graph_conv_layers: list = None, + activation=None, + residual: bool = True, + batchnorm: bool = False, + dropout: float = 0., + predictor_hidden_feats: int = 128, + predictor_dropout: float = 0., + mode: str = 'regression', + number_atom_features=30, + n_classes: int = 2, + self_loop: bool = True, + **kwargs): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + graph_conv_layers: list of int + Width of channels for GCN layers. graph_conv_layers[i] gives the width of channel + for the i-th GCN layer. If not specified, the default value will be [64, 64]. + activation: callable + The activation function to apply to the output of each GCN layer. + By default, no activation function will be applied. + residual: bool + Whether to add a residual connection within each GCN layer. Default to True. + batchnorm: bool + Whether to apply batch normalization to the output of each GCN layer. + Default to False. + dropout: float + The dropout probability for the output of each GCN layer. Default to 0. + predictor_hidden_feats: int + The size for hidden representations in the output MLP predictor. Default to 128. + predictor_dropout: float + The dropout probability in the output MLP predictor. Default to 0. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + self_loop: bool + Whether to add self loops for the nodes, i.e. edges from nodes to themselves. + When input graphs have isolated nodes, self loops allow preserving the original feature + of them in message passing. Default to True. + kwargs + This can include any keyword argument of TorchModel. + """ + model = GCN( + n_tasks=n_tasks, + graph_conv_layers=graph_conv_layers, + activation=activation, + residual=residual, + batchnorm=batchnorm, + dropout=dropout, + predictor_hidden_feats=predictor_hidden_feats, + predictor_dropout=predictor_dropout, + mode=mode, + number_atom_features=number_atom_features, + n_classes=n_classes) + if mode == 'regression': + loss: Loss = L2Loss() + output_types = ['prediction'] + else: + loss = SparseSoftmaxCrossEntropy() + output_types = ['prediction', 'loss'] + super(GCNModel, self).__init__( + model, loss=loss, output_types=output_types, **kwargs) + + self._self_loop = self_loop + + def _prepare_batch(self, batch): + """Create batch data for GCN. + + Parameters + ---------- + batch: tuple + The tuple is ``(inputs, labels, weights)``. + + Returns + ------- + inputs: DGLGraph + DGLGraph for a batch of graphs. + labels: list of torch.Tensor or None + The graph labels. + weights: list of torch.Tensor or None + The weights for each sample or sample/task pair converted to torch.Tensor. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + + inputs, labels, weights = batch + dgl_graphs = [ + graph.to_dgl_graph(self_loop=self._self_loop) for graph in inputs[0] + ] + inputs = dgl.batch(dgl_graphs).to(self.device) + _, labels, weights = super(GCNModel, self)._prepare_batch(([], labels, + weights)) + return inputs, labels, weights diff --git a/deepchem/models/torch_models/mpnn.py b/deepchem/models/torch_models/mpnn.py new file mode 100644 index 0000000000000000000000000000000000000000..689d3195bcd76d86081019602591e5692c78db64 --- /dev/null +++ b/deepchem/models/torch_models/mpnn.py @@ -0,0 +1,308 @@ +""" +DGL-based MPNN for graph property prediction. +""" +import torch.nn as nn +import torch.nn.functional as F + +from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy +from deepchem.models.torch_models.torch_model import TorchModel + + +class MPNN(nn.Module): + """Model for Graph Property Prediction. + + This model proceeds as follows: + + * Combine latest node representations and edge features in updating node representations, + which involves multiple rounds of message passing + * For each graph, compute its representation by combining the representations + of all nodes in it, which involves a Set2Set layer. + * Perform the final prediction using an MLP + + Examples + -------- + + >>> import deepchem as dc + >>> import dgl + >>> from deepchem.models.torch_models import MPNN + >>> smiles = ["C1CCC1", "C1=CC=CN=C1"] + >>> featurizer = dc.feat.MolGraphConvFeaturizer(use_edges=True) + >>> graphs = featurizer.featurize(smiles) + >>> print(type(graphs[0])) + + >>> dgl_graphs = [graphs[i].to_dgl_graph(self_loop=True) for i in range(len(graphs))] + >>> # Batch two graphs into a graph of two connected components + >>> batch_dgl_graph = dgl.batch(dgl_graphs) + >>> model = MPNN(n_tasks=1, mode='regression') + >>> preds = model(batch_dgl_graph) + >>> print(type(preds)) + + >>> preds.shape == (2, 1) + True + + References + ---------- + .. [1] Justin Gilmer, Samuel S. Schoenholz, Patrick F. Riley, Oriol Vinyals, George E. Dahl. + "Neural Message Passing for Quantum Chemistry." ICML 2017. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + node_out_feats: int = 64, + edge_hidden_feats: int = 128, + num_step_message_passing: int = 3, + num_step_set2set: int = 6, + num_layer_set2set: int = 3, + mode: str = 'regression', + number_atom_features: int = 30, + number_bond_features: int = 11, + n_classes: int = 2, + nfeat_name: str = 'x', + efeat_name: str = 'edge_attr'): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + node_out_feats: int + The length of the final node representation vectors. Default to 64. + edge_hidden_feats: int + The length of the hidden edge representation vectors. Default to 128. + num_step_message_passing: int + The number of rounds of message passing. Default to 3. + num_step_set2set: int + The number of set2set steps. Default to 6. + num_layer_set2set: int + The number of set2set layers. Default to 3. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + number_bond_features: int + The length of the initial bond feature vectors. Default to 11. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + nfeat_name: str + For an input graph ``g``, the model assumes that it stores node features in + ``g.ndata[nfeat_name]`` and will retrieve input node features from that. + Default to 'x'. + efeat_name: str + For an input graph ``g``, the model assumes that it stores edge features in + ``g.edata[efeat_name]`` and will retrieve input edge features from that. + Default to 'edge_attr'. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + try: + import dgllife + except: + raise ImportError('This class requires dgllife.') + + if mode not in ['classification', 'regression']: + raise ValueError("mode must be either 'classification' or 'regression'") + + super(MPNN, self).__init__() + + self.n_tasks = n_tasks + self.mode = mode + self.n_classes = n_classes + self.nfeat_name = nfeat_name + self.efeat_name = efeat_name + if mode == 'classification': + out_size = n_tasks * n_classes + else: + out_size = n_tasks + + from dgllife.model import MPNNPredictor as DGLMPNNPredictor + + self.model = DGLMPNNPredictor( + node_in_feats=number_atom_features, + edge_in_feats=number_bond_features, + node_out_feats=node_out_feats, + edge_hidden_feats=edge_hidden_feats, + n_tasks=out_size, + num_step_message_passing=num_step_message_passing, + num_step_set2set=num_step_set2set, + num_layer_set2set=num_layer_set2set) + + def forward(self, g): + """Predict graph labels + + Parameters + ---------- + g: DGLGraph + A DGLGraph for a batch of graphs. It stores the node features in + ``dgl_graph.ndata[self.nfeat_name]`` and edge features in + ``dgl_graph.edata[self.efeat_name]``. + + Returns + ------- + torch.Tensor + The model output. + + * When self.mode = 'regression', + its shape will be ``(dgl_graph.batch_size, self.n_tasks)``. + * When self.mode = 'classification', the output consists of probabilities + for classes. Its shape will be + ``(dgl_graph.batch_size, self.n_tasks, self.n_classes)`` if self.n_tasks > 1; + its shape will be ``(dgl_graph.batch_size, self.n_classes)`` if self.n_tasks is 1. + torch.Tensor, optional + This is only returned when self.mode = 'classification', the output consists of the + logits for classes before softmax. + """ + node_feats = g.ndata[self.nfeat_name] + edge_feats = g.edata[self.efeat_name] + out = self.model(g, node_feats, edge_feats) + + if self.mode == 'classification': + if self.n_tasks == 1: + logits = out.view(-1, self.n_classes) + softmax_dim = 1 + else: + logits = out.view(-1, self.n_tasks, self.n_classes) + softmax_dim = 2 + proba = F.softmax(logits, dim=softmax_dim) + return proba, logits + else: + return out + + +class MPNNModel(TorchModel): + """Model for graph property prediction + + This model proceeds as follows: + + * Combine latest node representations and edge features in updating node representations, + which involves multiple rounds of message passing + * For each graph, compute its representation by combining the representations + of all nodes in it, which involves a Set2Set layer. + * Perform the final prediction using an MLP + + Examples + -------- + + >>> + >> import deepchem as dc + >> from deepchem.models.torch_models import MPNNModel + >> featurizer = dc.feat.MolGraphConvFeaturizer(use_edges=True) + >> tasks, datasets, transformers = dc.molnet.load_tox21( + .. reload=False, featurizer=featurizer, transformers=[]) + >> train, valid, test = datasets + >> model = MPNNModel(mode='classification', n_tasks=len(tasks), + .. batch_size=32, learning_rate=0.001) + >> model.fit(train, nb_epoch=50) + + References + ---------- + .. [1] Justin Gilmer, Samuel S. Schoenholz, Patrick F. Riley, Oriol Vinyals, George E. Dahl. + "Neural Message Passing for Quantum Chemistry." ICML 2017. + + Notes + ----- + This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci + (https://github.com/awslabs/dgl-lifesci) to be installed. + """ + + def __init__(self, + n_tasks: int, + node_out_feats: int = 64, + edge_hidden_feats: int = 128, + num_step_message_passing: int = 3, + num_step_set2set: int = 6, + num_layer_set2set: int = 3, + mode: str = 'regression', + number_atom_features: int = 30, + number_bond_features: int = 11, + n_classes: int = 2, + self_loop: bool = False, + **kwargs): + """ + Parameters + ---------- + n_tasks: int + Number of tasks. + node_out_feats: int + The length of the final node representation vectors. Default to 64. + edge_hidden_feats: int + The length of the hidden edge representation vectors. Default to 128. + num_step_message_passing: int + The number of rounds of message passing. Default to 3. + num_step_set2set: int + The number of set2set steps. Default to 6. + num_layer_set2set: int + The number of set2set layers. Default to 3. + mode: str + The model type, 'classification' or 'regression'. Default to 'regression'. + number_atom_features: int + The length of the initial atom feature vectors. Default to 30. + number_bond_features: int + The length of the initial bond feature vectors. Default to 11. + n_classes: int + The number of classes to predict per task + (only used when ``mode`` is 'classification'). Default to 2. + self_loop: bool + Whether to add self loops for the nodes, i.e. edges from nodes to themselves. + Generally, an MPNNModel does not require self loops. Default to False. + kwargs + This can include any keyword argument of TorchModel. + """ + model = MPNN( + n_tasks=n_tasks, + node_out_feats=node_out_feats, + edge_hidden_feats=edge_hidden_feats, + num_step_message_passing=num_step_message_passing, + num_step_set2set=num_step_set2set, + num_layer_set2set=num_layer_set2set, + mode=mode, + number_atom_features=number_atom_features, + number_bond_features=number_bond_features, + n_classes=n_classes) + if mode == 'regression': + loss: Loss = L2Loss() + output_types = ['prediction'] + else: + loss = SparseSoftmaxCrossEntropy() + output_types = ['prediction', 'loss'] + super(MPNNModel, self).__init__( + model, loss=loss, output_types=output_types, **kwargs) + + self._self_loop = self_loop + + def _prepare_batch(self, batch): + """Create batch data for MPNN. + + Parameters + ---------- + batch: tuple + The tuple is ``(inputs, labels, weights)``. + + Returns + ------- + inputs: DGLGraph + DGLGraph for a batch of graphs. + labels: list of torch.Tensor or None + The graph labels. + weights: list of torch.Tensor or None + The weights for each sample or sample/task pair converted to torch.Tensor. + """ + try: + import dgl + except: + raise ImportError('This class requires dgl.') + + inputs, labels, weights = batch + dgl_graphs = [ + graph.to_dgl_graph(self_loop=self._self_loop) for graph in inputs[0] + ] + inputs = dgl.batch(dgl_graphs).to(self.device) + _, labels, weights = super(MPNNModel, self)._prepare_batch(([], labels, + weights)) + return inputs, labels, weights diff --git a/deepchem/models/torch_models/torch_model.py b/deepchem/models/torch_models/torch_model.py new file mode 100644 index 0000000000000000000000000000000000000000..fe7b89d7a4994201b0444c6ccb63d4fc3ff4b761 --- /dev/null +++ b/deepchem/models/torch_models/torch_model.py @@ -0,0 +1,1135 @@ +import numpy as np +import torch +import torch.utils.tensorboard +import time +import logging +import os +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection + +from deepchem.data import Dataset, NumpyDataset +from deepchem.metrics import Metric +from deepchem.models.losses import Loss +from deepchem.models.models import Model +from deepchem.models.optimizers import Adam, Optimizer, LearningRateSchedule +from deepchem.trans import Transformer, undo_transforms +from deepchem.utils.evaluate import GeneratorEvaluator + +from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union +from deepchem.utils.typing import LossFn, OneOrMany + +try: + import wandb + wandb.ensure_configured() + if wandb.api.api_key is None: + _has_wandb = False + wandb.termwarn( + "W&B installed but not logged in. Run `wandb login` or set the WANDB_API_KEY env variable." + ) + else: + _has_wandb = True +except (ImportError, AttributeError): + _has_wandb = False + +logger = logging.getLogger(__name__) + + +class TorchModel(Model): + """This is a DeepChem model implemented by a PyTorch model. + + Here is a simple example of code that uses TorchModel to train + a PyTorch model on a DeepChem dataset. + + >> pytorch_model = torch.nn.Sequential( + >> torch.nn.Linear(100, 1000), + >> torch.nn.Tanh(), + >> torch.nn.Linear(1000, 1)) + >> model = TorchModel(pytorch_model, loss=dc.models.losses.L2Loss()) + >> model.fit(dataset) + + The loss function for a model can be defined in two different + ways. For models that have only a single output and use a + standard loss function, you can simply provide a + dc.models.losses.Loss object. This defines the loss for each + sample or sample/task pair. The result is automatically + multiplied by the weights and averaged over the batch. + + For more complicated cases, you can instead provide a function + that directly computes the total loss. It must be of the form + f(outputs, labels, weights), taking the list of outputs from + the model, the expected values, and any weight matrices. It + should return a scalar equal to the value of the loss function + for the batch. No additional processing is done to the + result; it is up to you to do any weighting, averaging, adding + of penalty terms, etc. + + You can optionally provide an output_types argument, which + describes how to interpret the model's outputs. This should + be a list of strings, one for each output. You can use an + arbitrary output_type for a output, but some output_types are + special and will undergo extra processing: + + - 'prediction': This is a normal output, and will be returned by predict(). + If output types are not specified, all outputs are assumed + to be of this type. + + - 'loss': This output will be used in place of the normal + outputs for computing the loss function. For example, + models that output probability distributions usually do it + by computing unbounded numbers (the logits), then passing + them through a softmax function to turn them into + probabilities. When computing the cross entropy, it is more + numerically stable to use the logits directly rather than + the probabilities. You can do this by having the model + produce both probabilities and logits as outputs, then + specifying output_types=['prediction', 'loss']. When + predict() is called, only the first output (the + probabilities) will be returned. But during training, it is + the second output (the logits) that will be passed to the + loss function. + + - 'variance': This output is used for estimating the + uncertainty in another output. To create a model that can + estimate uncertainty, there must be the same number of + 'prediction' and 'variance' outputs. Each variance output + must have the same shape as the corresponding prediction + output, and each element is an estimate of the variance in + the corresponding prediction. Also be aware that if a model + supports uncertainty, it MUST use dropout on every layer, + and dropout most be enabled during uncertainty prediction. + Otherwise, the uncertainties it computes will be inaccurate. + + - other: Arbitrary output_types can be used to extract outputs + produced by the model, but will have no additional + processing performed. + """ + + def __init__(self, + model: torch.nn.Module, + loss: Union[Loss, LossFn], + output_types: Optional[List[str]] = None, + batch_size: int = 100, + model_dir: Optional[str] = None, + learning_rate: Union[float, LearningRateSchedule] = 0.001, + optimizer: Optional[Optimizer] = None, + tensorboard: bool = False, + wandb: bool = False, + log_frequency: int = 100, + device: Optional[torch.device] = None, + **kwargs) -> None: + """Create a new TorchModel. + + Parameters + ---------- + model: torch.nn.Module + the PyTorch model implementing the calculation + loss: dc.models.losses.Loss or function + a Loss or function defining how to compute the training loss for each + batch, as described above + output_types: list of strings, optional (default None) + the type of each output from the model, as described above + batch_size: int, optional (default 100) + default batch size for training and evaluating + model_dir: str, optional (default None) + the directory on disk where the model will be stored. If this is None, + a temporary directory is created. + learning_rate: float or LearningRateSchedule, optional (default 0.001) + the learning rate to use for fitting. If optimizer is specified, this is + ignored. + optimizer: Optimizer, optional (default None) + the optimizer to use for fitting. If this is specified, learning_rate is + ignored. + tensorboard: bool, optional (default False) + whether to log progress to TensorBoard during training + wandb: bool, optional (default False) + whether to log progress to Weights & Biases during training + log_frequency: int, optional (default 100) + The frequency at which to log data. Data is logged using + `logging` by default. If `tensorboard` is set, data is also + logged to TensorBoard. If `wandb` is set, data is also logged + to Weights & Biases. Logging happens at global steps. Roughly, + a global step corresponds to one batch of training. If you'd + like a printout every 10 batch steps, you'd set + `log_frequency=10` for example. + device: torch.device, optional (default None) + the device on which to run computations. If None, a device is + chosen automatically. + """ + super(TorchModel, self).__init__(model=model, model_dir=model_dir, **kwargs) + if isinstance(loss, Loss): + self._loss_fn: LossFn = _StandardLoss(model, loss) + else: + self._loss_fn = loss + self.batch_size = batch_size + if optimizer is None: + self.optimizer: Optimizer = Adam(learning_rate=learning_rate) + else: + self.optimizer = optimizer + self.tensorboard = tensorboard + + # Select a device. + + if device is None: + if torch.cuda.is_available(): + device = torch.device('cuda') + else: + device = torch.device('cpu') + self.device = device + self.model = model.to(device) + + # W&B logging + if wandb and not _has_wandb: + logger.warning( + "You set wandb to True but W&B is not installed. To use wandb logging, " + "run `pip install wandb; wandb login` see https://docs.wandb.com/huggingface." + ) + self.wandb = wandb and _has_wandb + + self.log_frequency = log_frequency + if self.tensorboard: + self._summary_writer = torch.utils.tensorboard.SummaryWriter( + self.model_dir) + if output_types is None: + self._prediction_outputs = None + self._loss_outputs = None + self._variance_outputs = None + self._other_outputs = None + else: + self._prediction_outputs = [] + self._loss_outputs = [] + self._variance_outputs = [] + self._other_outputs = [] + for i, type in enumerate(output_types): + if type == 'prediction': + self._prediction_outputs.append(i) + elif type == 'loss': + self._loss_outputs.append(i) + elif type == 'variance': + self._variance_outputs.append(i) + else: + self._other_outputs.append(i) + if len(self._loss_outputs) == 0: + self._loss_outputs = self._prediction_outputs + self._built = False + self._output_functions: Dict[Any, Any] = {} + self._optimizer_for_vars: Dict[Any, Any] = {} + + def _ensure_built(self) -> None: + """The first time this is called, create internal data structures.""" + if self._built: + return + self._built = True + self._global_step = 0 + self._pytorch_optimizer = self.optimizer._create_pytorch_optimizer( + self.model.parameters()) + if isinstance(self.optimizer.learning_rate, LearningRateSchedule): + self._lr_schedule = self.optimizer.learning_rate._create_pytorch_schedule( + self._pytorch_optimizer) + else: + self._lr_schedule = None + + def fit(self, + dataset: Dataset, + nb_epoch: int = 10, + max_checkpoints_to_keep: int = 5, + checkpoint_interval: int = 1000, + deterministic: bool = False, + restore: bool = False, + variables: Optional[List[torch.nn.Parameter]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + all_losses: Optional[List[float]] = None) -> float: + """Train this model on a dataset. + + Parameters + ---------- + dataset: Dataset + the Dataset to train on + nb_epoch: int + the number of epochs to train for + max_checkpoints_to_keep: int + the maximum number of checkpoints to keep. Older checkpoints are discarded. + checkpoint_interval: int + the frequency at which to write checkpoints, measured in training steps. + Set this to 0 to disable automatic checkpointing. + deterministic: bool + if True, the samples are processed in order. If False, a different random + order is used for each epoch. + restore: bool + if True, restore the model from the most recent checkpoint and continue training + from there. If False, retrain the model from scratch. + variables: list of torch.nn.Parameter + the variables to train. If None (the default), all trainable variables in + the model are used. + loss: function + a function of the form f(outputs, labels, weights) that computes the loss + for each batch. If None (the default), the model's standard loss function + is used. + callbacks: function or list of functions + one or more functions of the form f(model, step) that will be invoked after + every step. This can be used to perform validation, logging, etc. + all_losses: Optional[List[float]], optional (default None) + If specified, all logged losses are appended into this list. Note that + you can call `fit()` repeatedly with the same list and losses will + continue to be appended. + + Returns + ------- + The average loss over the most recent checkpoint interval + """ + return self.fit_generator( + self.default_generator( + dataset, epochs=nb_epoch, + deterministic=deterministic), max_checkpoints_to_keep, + checkpoint_interval, restore, variables, loss, callbacks, all_losses) + + def fit_generator(self, + generator: Iterable[Tuple[Any, Any, Any]], + max_checkpoints_to_keep: int = 5, + checkpoint_interval: int = 1000, + restore: bool = False, + variables: Optional[List[torch.nn.Parameter]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + all_losses: Optional[List[float]] = None) -> float: + """Train this model on data from a generator. + + Parameters + ---------- + generator: generator + this should generate batches, each represented as a tuple of the form + (inputs, labels, weights). + max_checkpoints_to_keep: int + the maximum number of checkpoints to keep. Older checkpoints are discarded. + checkpoint_interval: int + the frequency at which to write checkpoints, measured in training steps. + Set this to 0 to disable automatic checkpointing. + restore: bool + if True, restore the model from the most recent checkpoint and continue training + from there. If False, retrain the model from scratch. + variables: list of torch.nn.Parameter + the variables to train. If None (the default), all trainable variables in + the model are used. + loss: function + a function of the form f(outputs, labels, weights) that computes the loss + for each batch. If None (the default), the model's standard loss function + is used. + callbacks: function or list of functions + one or more functions of the form f(model, step) that will be invoked after + every step. This can be used to perform validation, logging, etc. + all_losses: Optional[List[float]], optional (default None) + If specified, all logged losses are appended into this list. Note that + you can call `fit()` repeatedly with the same list and losses will + continue to be appended. + + Returns + ------- + The average loss over the most recent checkpoint interval + """ + if not isinstance(callbacks, SequenceCollection): + callbacks = [callbacks] + self._ensure_built() + self.model.train() + avg_loss = 0.0 + last_avg_loss = 0.0 + averaged_batches = 0 + if loss is None: + loss = self._loss_fn + if variables is None: + optimizer = self._pytorch_optimizer + lr_schedule = self._lr_schedule + else: + var_key = tuple(variables) + if var_key in self._optimizer_for_vars: + optimizer, lr_schedule = self._optimizer_for_vars[var_key] + else: + optimizer = self.optimizer._create_pytorch_optimizer(variables) + if isinstance(self.optimizer.learning_rate, LearningRateSchedule): + lr_schedule = self.optimizer.learning_rate._create_pytorch_schedule( + optimizer) + else: + lr_schedule = None + self._optimizer_for_vars[var_key] = (optimizer, lr_schedule) + time1 = time.time() + + # Main training loop. + + for batch in generator: + if restore: + self.restore() + restore = False + inputs, labels, weights = self._prepare_batch(batch) + + # Execute the loss function, accumulating the gradients. + + if len(inputs) == 1: + inputs = inputs[0] + + optimizer.zero_grad() + outputs = self.model(inputs) + if isinstance(outputs, torch.Tensor): + outputs = [outputs] + if self._loss_outputs is not None: + outputs = [outputs[i] for i in self._loss_outputs] + batch_loss = loss(outputs, labels, weights) + batch_loss.backward() + optimizer.step() + if lr_schedule is not None: + lr_schedule.step() + self._global_step += 1 + current_step = self._global_step + + avg_loss += batch_loss + + # Report progress and write checkpoints. + averaged_batches += 1 + should_log = (current_step % self.log_frequency == 0) + if should_log: + avg_loss = float(avg_loss) / averaged_batches + logger.info( + 'Ending global_step %d: Average loss %g' % (current_step, avg_loss)) + if all_losses is not None: + all_losses.append(avg_loss) + # Capture the last avg_loss in case of return since we're resetting to 0 now + last_avg_loss = avg_loss + avg_loss = 0.0 + averaged_batches = 0 + + if checkpoint_interval > 0 and current_step % checkpoint_interval == checkpoint_interval - 1: + self.save_checkpoint(max_checkpoints_to_keep) + for c in callbacks: + c(self, current_step) + if self.tensorboard and should_log: + self._log_scalar_to_tensorboard('loss', batch_loss, current_step) + if self.wandb and should_log: + wandb.log({'loss': batch_loss}, step=current_step) + + # Report final results. + if averaged_batches > 0: + avg_loss = float(avg_loss) / averaged_batches + logger.info( + 'Ending global_step %d: Average loss %g' % (current_step, avg_loss)) + if all_losses is not None: + all_losses.append(avg_loss) + last_avg_loss = avg_loss + + if checkpoint_interval > 0: + self.save_checkpoint(max_checkpoints_to_keep) + + time2 = time.time() + logger.info("TIMING: model fitting took %0.3f s" % (time2 - time1)) + return last_avg_loss + + def fit_on_batch(self, + X: Sequence, + y: Sequence, + w: Sequence, + variables: Optional[List[torch.nn.Parameter]] = None, + loss: Optional[LossFn] = None, + callbacks: Union[Callable, List[Callable]] = [], + checkpoint: bool = True, + max_checkpoints_to_keep: int = 5) -> float: + """Perform a single step of training. + + Parameters + ---------- + X: ndarray + the inputs for the batch + y: ndarray + the labels for the batch + w: ndarray + the weights for the batch + variables: list of torch.nn.Parameter + the variables to train. If None (the default), all trainable variables in + the model are used. + loss: function + a function of the form f(outputs, labels, weights) that computes the loss + for each batch. If None (the default), the model's standard loss function + is used. + callbacks: function or list of functions + one or more functions of the form f(model, step) that will be invoked after + every step. This can be used to perform validation, logging, etc. + checkpoint: bool + if true, save a checkpoint after performing the training step + max_checkpoints_to_keep: int + the maximum number of checkpoints to keep. Older checkpoints are discarded. + + Returns + ------- + the loss on the batch + """ + self._ensure_built() + dataset = NumpyDataset(X, y, w) + return self.fit( + dataset, + nb_epoch=1, + max_checkpoints_to_keep=max_checkpoints_to_keep, + checkpoint_interval=self._global_step + 2 if checkpoint else 0, + variables=variables, + loss=loss, + callbacks=callbacks) + + def _predict( + self, generator: Iterable[Tuple[Any, Any, Any]], + transformers: List[Transformer], uncertainty: bool, + other_output_types: Optional[OneOrMany[str]]) -> OneOrMany[np.ndarray]: + """ + Predict outputs for data provided by a generator. + + This is the private implementation of prediction. Do not + call it directly. Instead call one of the public prediction + methods. + + Parameters + ---------- + generator: generator + this should generate batches, each represented as a tuple of the form + (inputs, labels, weights). + transformers: list of dc.trans.Transformers + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + uncertainty: bool + specifies whether this is being called as part of estimating uncertainty. + If True, it sets the training flag so that dropout will be enabled, and + returns the values of the uncertainty outputs. + other_output_types: list, optional + Provides a list of other output_types (strings) to predict from model. + Returns: + a NumPy array of the model produces a single output, or a list of arrays + if it produces multiple outputs + """ + results: Optional[List[np.ndarray]] = None + variances: Optional[List[np.ndarray]] = None + if uncertainty and (other_output_types is not None): + raise ValueError( + 'This model cannot compute uncertainties and other output types simultaneously. Please invoke one at a time.' + ) + if uncertainty: + if self._variance_outputs is None or len(self._variance_outputs) == 0: + raise ValueError('This model cannot compute uncertainties') + if len(self._variance_outputs) != len(self._prediction_outputs): + raise ValueError( + 'The number of variances must exactly match the number of outputs') + if other_output_types: + if self._other_outputs is None or len(self._other_outputs) == 0: + raise ValueError( + 'This model cannot compute other outputs since no other output_types were specified.' + ) + self._ensure_built() + self.model.eval() + for batch in generator: + inputs, labels, weights = batch + inputs, _, _ = self._prepare_batch((inputs, None, None)) + + # Invoke the model. + if len(inputs) == 1: + inputs = inputs[0] + output_values = self.model(inputs) + if isinstance(output_values, torch.Tensor): + output_values = [output_values] + output_values = [t.detach().cpu().numpy() for t in output_values] + + # Apply tranformers and record results. + if uncertainty: + var = [output_values[i] for i in self._variance_outputs] + if variances is None: + variances = [var] + else: + for i, t in enumerate(var): + variances[i].append(t) + access_values = [] + if other_output_types: + access_values += self._other_outputs + elif self._prediction_outputs is not None: + access_values += self._prediction_outputs + + if len(access_values) > 0: + output_values = [output_values[i] for i in access_values] + + if len(transformers) > 0: + if len(output_values) > 1: + raise ValueError( + "predict() does not support Transformers for models with multiple outputs." + ) + elif len(output_values) == 1: + output_values = [undo_transforms(output_values[0], transformers)] + if results is None: + results = [[] for i in range(len(output_values))] + for i, t in enumerate(output_values): + results[i].append(t) + + # Concatenate arrays to create the final results. + final_results = [] + final_variances = [] + if results is not None: + for r in results: + final_results.append(np.concatenate(r, axis=0)) + if uncertainty and variances is not None: + for v in variances: + final_variances.append(np.concatenate(v, axis=0)) + return zip(final_results, final_variances) + if len(final_results) == 1: + return final_results[0] + else: + return final_results + + def predict_on_generator( + self, + generator: Iterable[Tuple[Any, Any, Any]], + transformers: List[Transformer] = [], + output_types: Optional[OneOrMany[str]] = None) -> OneOrMany[np.ndarray]: + """ + Parameters + ---------- + generator: generator + this should generate batches, each represented as a tuple of the form + (inputs, labels, weights). + transformers: list of dc.trans.Transformers + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + output_types: String or list of Strings + If specified, all outputs of this type will be retrieved + from the model. If output_types is specified, outputs must + be None. + Returns: + a NumPy array of the model produces a single output, or a list of arrays + if it produces multiple outputs + """ + return self._predict(generator, transformers, False, output_types) + + def predict_on_batch(self, X: Sequence, transformers: List[Transformer] = [] + ) -> OneOrMany[np.ndarray]: + """Generates predictions for input samples, processing samples in a batch. + + Parameters + ---------- + X: ndarray + the input data, as a Numpy array. + transformers: list of dc.trans.Transformers + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + + Returns + ------- + a NumPy array of the model produces a single output, or a list of arrays + if it produces multiple outputs + """ + dataset = NumpyDataset(X=X, y=None) + return self.predict(dataset, transformers) + + def predict_uncertainty_on_batch(self, X: Sequence, masks: int = 50 + ) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]: + """ + Predict the model's outputs, along with the uncertainty in each one. + + The uncertainty is computed as described in https://arxiv.org/abs/1703.04977. + It involves repeating the prediction many times with different dropout masks. + The prediction is computed as the average over all the predictions. The + uncertainty includes both the variation among the predicted values (epistemic + uncertainty) and the model's own estimates for how well it fits the data + (aleatoric uncertainty). Not all models support uncertainty prediction. + + Parameters + ---------- + X: ndarray + the input data, as a Numpy array. + masks: int + the number of dropout masks to average over + + Returns + ------- + for each output, a tuple (y_pred, y_std) where y_pred is the predicted + value of the output, and each element of y_std estimates the standard + deviation of the corresponding element of y_pred + """ + dataset = NumpyDataset(X=X, y=None) + return self.predict_uncertainty(dataset, masks) + + def predict( + self, + dataset: Dataset, + transformers: List[Transformer] = [], + output_types: Optional[List[str]] = None) -> OneOrMany[np.ndarray]: + """ + Uses self to make predictions on provided Dataset object. + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset to make prediction on + transformers: list of dc.trans.Transformers + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + output_types: String or list of Strings + If specified, all outputs of this type will be retrieved + from the model. If output_types is specified, outputs must + be None. + + Returns + ------- + a NumPy array of the model produces a single output, or a list of arrays + if it produces multiple outputs + """ + generator = self.default_generator( + dataset, mode='predict', pad_batches=False) + return self.predict_on_generator( + generator, transformers=transformers, output_types=output_types) + + def predict_embedding(self, dataset: Dataset) -> OneOrMany[np.ndarray]: + """ + Predicts embeddings created by underlying model if any exist. + An embedding must be specified to have `output_type` of + `'embedding'` in the model definition. + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset to make prediction on + + Returns + ------- + a NumPy array of the embeddings model produces, or a list + of arrays if it produces multiple embeddings + """ + generator = self.default_generator( + dataset, mode='predict', pad_batches=False) + return self._predict(generator, [], False, ['embedding']) + + def predict_uncertainty(self, dataset: Dataset, masks: int = 50 + ) -> OneOrMany[Tuple[np.ndarray, np.ndarray]]: + """ + Predict the model's outputs, along with the uncertainty in each one. + + The uncertainty is computed as described in https://arxiv.org/abs/1703.04977. + It involves repeating the prediction many times with different dropout masks. + The prediction is computed as the average over all the predictions. The + uncertainty includes both the variation among the predicted values (epistemic + uncertainty) and the model's own estimates for how well it fits the data + (aleatoric uncertainty). Not all models support uncertainty prediction. + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset to make prediction on + masks: int + the number of dropout masks to average over + + Returns + ------- + for each output, a tuple (y_pred, y_std) where y_pred is the predicted + value of the output, and each element of y_std estimates the standard + deviation of the corresponding element of y_pred + """ + sum_pred: List[np.ndarray] = [] + sum_sq_pred: List[np.ndarray] = [] + sum_var: List[np.ndarray] = [] + for i in range(masks): + generator = self.default_generator( + dataset, mode='uncertainty', pad_batches=False) + results = self._predict(generator, [], True, None) + if len(sum_pred) == 0: + for p, v in results: + sum_pred.append(p) + sum_sq_pred.append(p * p) + sum_var.append(v) + else: + for j, (p, v) in enumerate(results): + sum_pred[j] += p + sum_sq_pred[j] += p * p + sum_var[j] += v + output = [] + std = [] + for i in range(len(sum_pred)): + p = sum_pred[i] / masks + output.append(p) + std.append(np.sqrt(sum_sq_pred[i] / masks - p * p + sum_var[i] / masks)) + if len(output) == 1: + return (output[0], std[0]) + else: + return list(zip(output, std)) + + def evaluate_generator(self, + generator: Iterable[Tuple[Any, Any, Any]], + metrics: List[Metric], + transformers: List[Transformer] = [], + per_task_metrics: bool = False): + """Evaluate the performance of this model on the data produced by a generator. + + Parameters + ---------- + generator: generator + this should generate batches, each represented as a tuple of the form + (inputs, labels, weights). + metric: list of deepchem.metrics.Metric + Evaluation metric + transformers: list of dc.trans.Transformers + Transformers that the input data has been transformed by. The output + is passed through these transformers to undo the transformations. + per_task_metrics: bool + If True, return per-task scores. + + Returns + ------- + dict + Maps tasks to scores under metric. + """ + evaluator = GeneratorEvaluator(self, generator, transformers) + return evaluator.compute_model_performance(metrics, per_task_metrics) + + def compute_saliency(self, X: np.ndarray) -> OneOrMany[np.ndarray]: + """Compute the saliency map for an input sample. + + This computes the Jacobian matrix with the derivative of each output element + with respect to each input element. More precisely, + + - If this model has a single output, it returns a matrix of shape + (output_shape, input_shape) with the derivatives. + - If this model has multiple outputs, it returns a list of matrices, one + for each output. + + This method cannot be used on models that take multiple inputs. + + Parameters + ---------- + X: ndarray + the input data for a single sample + + Returns + ------- + the Jacobian matrix, or a list of matrices + """ + input_shape = X.shape + X = np.reshape(X, [1] + list(X.shape)) + self._ensure_built() + X, _, _ = self._prepare_batch(([X], None, None)) + + # Compute the gradients. + + X = X[0] + X.requires_grad_(True) + outputs = self.model(X) + if isinstance(outputs, torch.Tensor): + outputs = [outputs] + final_result = [] + for output in outputs: + output_shape = tuple(output.shape[1:]) + output = output.reshape([-1]) + result = [] + grad_output = torch.zeros(output.shape[0], device=self.device) + for i in range(output.shape[0]): + grad_output.zero_() + grad_output[i] = 1 + output.backward(grad_output, retain_graph=True) + result.append(X.grad.clone()) + X.grad.zero_() + final_result.append( + torch.reshape(torch.stack(result), + output_shape + input_shape).cpu().numpy()) + if len(final_result) == 1: + return final_result[0] + return final_result + + def _prepare_batch(self, + batch: Tuple[Any, Any, Any]) -> Tuple[List, List, List]: + inputs, labels, weights = batch + inputs = [ + x.astype(np.float32) if x.dtype == np.float64 else x for x in inputs + ] + inputs = [torch.as_tensor(x, device=self.device) for x in inputs] + if labels is not None: + labels = [ + x.astype(np.float32) if x.dtype == np.float64 else x for x in labels + ] + labels = [torch.as_tensor(x, device=self.device) for x in labels] + if weights is not None: + weights = [ + x.astype(np.float32) if x.dtype == np.float64 else x for x in weights + ] + weights = [torch.as_tensor(x, device=self.device) for x in weights] + + return (inputs, labels, weights) + + def default_generator( + self, + dataset: Dataset, + epochs: int = 1, + mode: str = 'fit', + deterministic: bool = True, + pad_batches: bool = True) -> Iterable[Tuple[List, List, List]]: + """Create a generator that iterates batches for a dataset. + + Subclasses may override this method to customize how model inputs are + generated from the data. + + Parameters + ---------- + dataset: Dataset + the data to iterate + epochs: int + the number of times to iterate over the full dataset + mode: str + allowed values are 'fit' (called during training), 'predict' (called + during prediction), and 'uncertainty' (called during uncertainty + prediction) + deterministic: bool + whether to iterate over the dataset in order, or randomly shuffle the + data for each epoch + pad_batches: bool + whether to pad each batch up to this model's preferred batch size + + Returns + ------- + a generator that iterates batches, each represented as a tuple of lists: + ([inputs], [outputs], [weights]) + """ + for epoch in range(epochs): + for (X_b, y_b, w_b, ids_b) in dataset.iterbatches( + batch_size=self.batch_size, + deterministic=deterministic, + pad_batches=pad_batches): + yield ([X_b], [y_b], [w_b]) + + def save_checkpoint(self, + max_checkpoints_to_keep: int = 5, + model_dir: Optional[str] = None) -> None: + """Save a checkpoint to disk. + + Usually you do not need to call this method, since fit() saves checkpoints + automatically. If you have disabled automatic checkpointing during fitting, + this can be called to manually write checkpoints. + + Parameters + ---------- + max_checkpoints_to_keep: int + the maximum number of checkpoints to keep. Older checkpoints are discarded. + model_dir: str, default None + Model directory to save checkpoint to. If None, revert to self.model_dir + """ + self._ensure_built() + if model_dir is None: + model_dir = self.model_dir + if not os.path.exists(model_dir): + os.makedirs(model_dir) + + # Save the checkpoint to a file. + + data = { + 'model_state_dict': self.model.state_dict(), + 'optimizer_state_dict': self._pytorch_optimizer.state_dict(), + 'global_step': self._global_step + } + temp_file = os.path.join(model_dir, 'temp_checkpoint.pt') + torch.save(data, temp_file) + + # Rename and delete older files. + + paths = [ + os.path.join(model_dir, 'checkpoint%d.pt' % (i + 1)) + for i in range(max_checkpoints_to_keep) + ] + if os.path.exists(paths[-1]): + os.remove(paths[-1]) + for i in reversed(range(max_checkpoints_to_keep - 1)): + if os.path.exists(paths[i]): + os.rename(paths[i], paths[i + 1]) + os.rename(temp_file, paths[0]) + + def get_checkpoints(self, model_dir: Optional[str] = None): + """Get a list of all available checkpoint files. + + Parameters + ---------- + model_dir: str, default None + Directory to get list of checkpoints from. Reverts to self.model_dir if None + + """ + if model_dir is None: + model_dir = self.model_dir + files = sorted(os.listdir(model_dir)) + files = [ + f for f in files if f.startswith('checkpoint') and f.endswith('.pt') + ] + return [os.path.join(model_dir, f) for f in files] + + def restore(self, + checkpoint: Optional[str] = None, + model_dir: Optional[str] = None) -> None: + """Reload the values of all variables from a checkpoint file. + + Parameters + ---------- + checkpoint: str + the path to the checkpoint file to load. If this is None, the most recent + checkpoint will be chosen automatically. Call get_checkpoints() to get a + list of all available checkpoints. + model_dir: str, default None + Directory to restore checkpoint from. If None, use self.model_dir. If + checkpoint is not None, this is ignored. + """ + self._ensure_built() + if checkpoint is None: + checkpoints = sorted(self.get_checkpoints(model_dir)) + if len(checkpoints) == 0: + raise ValueError('No checkpoint found') + checkpoint = checkpoints[0] + data = torch.load(checkpoint) + self.model.load_state_dict(data['model_state_dict']) + self._pytorch_optimizer.load_state_dict(data['optimizer_state_dict']) + self._global_step = data['global_step'] + + def get_global_step(self) -> int: + """Get the number of steps of fitting that have been performed.""" + return self._global_step + + def _log_scalar_to_tensorboard(self, name: str, value: Any, step: int): + """Log a scalar value to Tensorboard.""" + self._summary_writer.add_scalar(name, value, step) + + def _create_assignment_map(self, + source_model: "TorchModel", + include_top: bool = True, + **kwargs) -> Dict[Any, Any]: + """ + Creates a default assignment map between parameters of source and current model. + This is used only when a custom assignment map is missing. This assumes the + model is made of different layers followed by a dense layer for mapping to + output tasks. include_top is used to control whether or not the final dense + layer is used. The default assignment map is useful in cases where the type + of task is different (classification vs regression) and/or number of tasks. + + Parameters + ---------- + source_model: dc.models.TorchModel + Source model to copy parameter values from. + include_top: bool, default True + if true, copies the last dense layer + """ + assignment_map: Dict[Any, Any] = {} + source_vars = list(source_model.model.parameters()) + dest_vars = list(self.model.parameters()) + + if not include_top: + source_vars = source_vars[:-2] + dest_vars = dest_vars[:-2] + + for source_var, dest_var in zip(source_vars, dest_vars): + assignment_map[source_var] = dest_var + + return assignment_map + + def _create_value_map(self, source_model: "TorchModel", + **kwargs) -> Dict[Any, Any]: + """ + Creates a value map between parameters in the source model and their + current values. This is used only when a custom value map is missing, and + assumes the restore method has been called. + + Parameters + ---------- + source_model: dc.models.TorchModel + Source model to create value map from + """ + value_map: Dict[Any, Any] = {} + source_vars = list(source_model.model.parameters()) + + for source_var in source_vars: + value_map[source_var] = source_var.detach().cpu().numpy() + + return value_map + + def load_from_pretrained(self, + source_model: "TorchModel", + assignment_map: Optional[Dict[Any, Any]] = None, + value_map: Optional[Dict[Any, Any]] = None, + checkpoint: Optional[str] = None, + model_dir: Optional[str] = None, + include_top: bool = True, + inputs: Optional[Sequence[Any]] = None, + **kwargs) -> None: + """Copies parameter values from a pretrained model. `source_model` can either + be a pretrained model or a model with the same architecture. `value_map` + is a parameter-value dictionary. If no `value_map` is provided, the parameter + values are restored to the `source_model` from a checkpoint and a default + `value_map` is created. `assignment_map` is a dictionary mapping parameters + from the `source_model` to the current model. If no `assignment_map` is + provided, one is made from scratch and assumes the model is composed of + several different layers, with the final one being a dense layer. include_top + is used to control whether or not the final dense layer is used. The default + assignment map is useful in cases where the type of task is different + (classification vs regression) and/or number of tasks in the setting. + + Parameters + ---------- + source_model: dc.TorchModel, required + source_model can either be the pretrained model or a dc.TorchModel with + the same architecture as the pretrained model. It is used to restore from + a checkpoint, if value_map is None and to create a default assignment map + if assignment_map is None + assignment_map: Dict, default None + Dictionary mapping the source_model parameters and current model parameters + value_map: Dict, default None + Dictionary containing source_model trainable parameters mapped to numpy + arrays. If value_map is None, the values are restored and a default + parameter map is created using the restored values + checkpoint: str, default None + the path to the checkpoint file to load. If this is None, the most recent + checkpoint will be chosen automatically. Call get_checkpoints() to get a + list of all available checkpoints + model_dir: str, default None + Restore model from custom model directory if needed + include_top: bool, default True + if True, copies the weights and bias associated with the final dense + layer. Used only when assignment map is None + inputs: List, input tensors for model + if not None, then the weights are built for both the source and self. + """ + if inputs is not None: + # Ensure weights for both models are built. + source_model.model(inputs) + self.model(inputs) + + self._ensure_built() + if value_map is None: + logger.info( + "No value map provided. Creating default value map from restored model." + ) + source_model.restore(model_dir=model_dir, checkpoint=checkpoint) + value_map = self._create_value_map(source_model=source_model) + + if assignment_map is None: + logger.info("No assignment map provided. Creating custom assignment map.") + assignment_map = self._create_assignment_map( + source_model=source_model, include_top=include_top) + + for source_var, dest_var in assignment_map.items(): + assert source_var.shape == dest_var.shape + dest_var.data = torch.as_tensor(value_map[source_var], device=self.device) + + +class _StandardLoss(object): + """The implements the loss function for models that use a dc.models.losses.Loss.""" + + def __init__(self, model: torch.nn.Module, loss: Loss) -> None: + self.model = model # not used + self.loss = loss # not used + self.criterion = loss._create_pytorch_loss() + + def __call__(self, outputs: List, labels: List, weights: List) -> float: + if len(outputs) != 1 or len(labels) != 1 or len(weights) != 1: + raise ValueError( + "Loss functions expects exactly one each of outputs, labels, and weights" + ) + losses = self.criterion(outputs[0], labels[0]) + w = weights[0] + if len(w.shape) < len(losses.shape): + if isinstance(w, torch.Tensor): + shape = tuple(w.shape) + else: + shape = w.shape + shape = tuple(-1 if x is None else x for x in shape) + w = w.reshape(shape + (1,) * (len(losses.shape) - len(w.shape))) + + loss = losses * w + return loss.mean() diff --git a/deepchem/models/xgboost_models/__init__.py b/deepchem/models/xgboost_models/__init__.py deleted file mode 100644 index 1d9e9b502a04426826cd2fa30c63f3b80fb2e00a..0000000000000000000000000000000000000000 --- a/deepchem/models/xgboost_models/__init__.py +++ /dev/null @@ -1,130 +0,0 @@ -""" -Scikit-learn wrapper interface of xgboost -""" - -import numpy as np -import os -from deepchem.models import Model -from deepchem.models.sklearn_models import SklearnModel -from deepchem.utils.save import load_from_disk -from deepchem.utils.save import save_to_disk -from sklearn.model_selection import train_test_split, GridSearchCV -import tempfile - - -class XGBoostModel(SklearnModel): - """ - Abstract base class for XGBoost model. - """ - - def __init__(self, - model_instance=None, - model_dir=None, - verbose=False, - **kwargs): - """Abstract class for XGBoost models. - Parameters: - ----------- - model_instance: object - Scikit-learn wrapper interface of xgboost - model_dir: str - Path to directory where model will be stored. - """ - if model_dir is not None: - if not os.path.exists(model_dir): - os.makedirs(model_dir) - else: - model_dir = tempfile.mkdtemp() - self.model_dir = model_dir - self.model_instance = model_instance - self.model_class = model_instance.__class__ - - self.verbose = verbose - if 'early_stopping_rounds' in kwargs: - self.early_stopping_rounds = kwargs['early_stopping_rounds'] - else: - self.early_stopping_rounds = 50 - - def fit(self, dataset, **kwargs): - """ - Fits XGBoost model to data. - """ - X = dataset.X - y = np.squeeze(dataset.y) - w = np.squeeze(dataset.w) - seed = self.model_instance.random_state - import xgboost as xgb - if isinstance(self.model_instance, xgb.XGBClassifier): - xgb_metric = "auc" - sklearn_metric = "roc_auc" - stratify = y - elif isinstance(self.model_instance, xgb.XGBRegressor): - xgb_metric = "mae" - sklearn_metric = "neg_mean_absolute_error" - stratify = None - best_param = self._search_param(sklearn_metric, X, y) - # update model with best param - self.model_instance = self.model_class(**best_param) - - # Find optimal n_estimators based on original learning_rate - # and early_stopping_rounds - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=0.2, random_state=seed, stratify=stratify) - - self.model_instance.fit( - X_train, - y_train, - early_stopping_rounds=self.early_stopping_rounds, - eval_metric=xgb_metric, - eval_set=[(X_train, y_train), (X_test, y_test)], - verbose=self.verbose) - # Since test size is 20%, when retrain model to whole data, expect - # n_estimator increased to 1/0.8 = 1.25 time. - estimated_best_round = np.round(self.model_instance.best_ntree_limit * 1.25) - self.model_instance.n_estimators = np.int64(estimated_best_round) - self.model_instance.fit(X, y, eval_metric=xgb_metric, verbose=self.verbose) - - def _search_param(self, metric, X, y): - ''' - Find best potential parameters set using few n_estimators - ''' - - # Make sure user specified params are in the grid. - - def unique_not_none(values): - return list(np.unique([x for x in values if x is not None])) - - max_depth_grid = unique_not_none([self.model_instance.max_depth, 5, 7]) - colsample_bytree_grid = unique_not_none( - [self.model_instance.colsample_bytree, 0.66, 0.9]) - reg_lambda_grid = unique_not_none([self.model_instance.reg_lambda, 1, 5]) - learning_rate = 0.3 - if self.model_instance.learning_rate is not None: - learning_rate = max(learning_rate, self.model_instance.learning_rate) - n_estimators = 60 - if self.model_instance.n_estimators is not None: - n_estimators = min(n_estimators, self.model_instance.n_estimators) - param_grid = { - 'max_depth': max_depth_grid, - 'learning_rate': [learning_rate], - 'n_estimators': [n_estimators], - 'gamma': [self.model_instance.gamma], - 'min_child_weight': [self.model_instance.min_child_weight], - 'max_delta_step': [self.model_instance.max_delta_step], - 'subsample': [self.model_instance.subsample], - 'colsample_bytree': colsample_bytree_grid, - 'colsample_bylevel': [self.model_instance.colsample_bylevel], - 'reg_alpha': [self.model_instance.reg_alpha], - 'reg_lambda': reg_lambda_grid, - 'scale_pos_weight': [self.model_instance.scale_pos_weight], - 'base_score': [self.model_instance.base_score], - 'seed': [self.model_instance.random_state] - } - grid_search = GridSearchCV( - self.model_instance, param_grid, cv=2, refit=False, scoring=metric) - grid_search.fit(X, y) - best_params = grid_search.best_params_ - # Change params back original params - best_params['learning_rate'] = self.model_instance.learning_rate - best_params['n_estimators'] = self.model_instance.n_estimators - return best_params diff --git a/deepchem/molnet/__init__.py b/deepchem/molnet/__init__.py index 7763775773fa4283a5af4b6a9c2997e143d0c876..0f3dd0f2af9d16aa3ff28e10f9e18b1d36a35169 100644 --- a/deepchem/molnet/__init__.py +++ b/deepchem/molnet/__init__.py @@ -12,11 +12,10 @@ from deepchem.molnet.load_function.kaggle_datasets import load_kaggle from deepchem.molnet.load_function.lipo_datasets import load_lipo from deepchem.molnet.load_function.muv_datasets import load_muv from deepchem.molnet.load_function.nci_datasets import load_nci -from deepchem.molnet.load_function.pcba_datasets import load_pcba, load_pcba_146, load_pcba_2475 +from deepchem.molnet.load_function.pcba_datasets import load_pcba from deepchem.molnet.load_function.pdbbind_datasets import load_pdbbind_grid, load_pdbbind, load_pdbbind_from_dir from deepchem.molnet.load_function.ppb_datasets import load_ppb from deepchem.molnet.load_function.qm7_datasets import load_qm7 -from deepchem.molnet.load_function.qm7_datasets import load_qm7_from_mat, load_qm7b_from_mat from deepchem.molnet.load_function.qm8_datasets import load_qm8 from deepchem.molnet.load_function.qm9_datasets import load_qm9 from deepchem.molnet.load_function.sampl_datasets import load_sampl @@ -31,6 +30,13 @@ from deepchem.molnet.load_function.kinase_datasets import load_kinase from deepchem.molnet.load_function.thermosol_datasets import load_thermosol from deepchem.molnet.load_function.hppb_datasets import load_hppb from deepchem.molnet.load_function.chembl25_datasets import load_chembl25 +from deepchem.molnet.load_function.zinc15_datasets import load_zinc15 +from deepchem.molnet.load_function.material_datasets.load_bandgap import load_bandgap +from deepchem.molnet.load_function.material_datasets.load_perovskite import load_perovskite +from deepchem.molnet.load_function.material_datasets.load_mp_formation_energy import load_mp_formation_energy +from deepchem.molnet.load_function.material_datasets.load_mp_metallicity import load_mp_metallicity + +from deepchem.molnet.load_function.molnet_loader import featurizers, splitters, transformers, TransformerGenerator, _MolnetLoader from deepchem.molnet.dnasim import simulate_motif_density_localization from deepchem.molnet.dnasim import simulate_motif_counting diff --git a/deepchem/molnet/check_availability.py b/deepchem/molnet/check_availability.py index 12758c4db56edcb2be7485699fc4165404686cb0..be6c31fccb77de148313e0efc1d6f2fa8755aae7 100644 --- a/deepchem/molnet/check_availability.py +++ b/deepchem/molnet/check_availability.py @@ -1,3 +1,5 @@ +import deepchem as dc + CheckFeaturizer = { ('bace_c', 'logreg'): ['ECFP', 1024], ('bace_c', 'tf'): ['ECFP', 1024], @@ -202,23 +204,20 @@ CheckFeaturizer = { ('qm7', 'tf_regression'): ['ECFP', 1024], ('qm7', 'rf_regression'): ['ECFP', 1024], ('qm7', 'krr'): ['ECFP', 1024], - ('qm7', 'krr_ft'): ['CoulombMatrix', 1024], + ('qm7', 'krr_ft'): [dc.feat.CoulombMatrix(23), 1024], ('qm7', 'textcnn_regression'): ['Raw', None], ('qm7', 'graphconvreg'): ['GraphConv', 75], ('qm7', 'weave_regression'): ['Weave', 75], - ('qm7', 'tf_regression_ft'): ['CoulombMatrix', [23, 23]], - ('qm7', 'dtnn'): ['CoulombMatrix', [23, 23]], + ('qm7', 'tf_regression_ft'): [dc.feat.CoulombMatrix(23), [23, 23]], + ('qm7', 'dtnn'): [dc.feat.CoulombMatrix(23), [23, 23]], ('qm7', 'ani'): ['BPSymmetryFunctionInput', [23, 4]], - ('qm7b', 'tf_regression_ft'): ['CoulombMatrix', [23, 23]], - ('qm7b', 'krr_ft'): ['CoulombMatrix', 1024], - ('qm7b', 'dtnn'): ['CoulombMatrix', [23, 23]], ('qm8', 'tf_regression'): ['ECFP', 1024], ('qm8', 'rf_regression'): ['ECFP', 1024], ('qm8', 'krr'): ['ECFP', 1024], ('qm8', 'graphconvreg'): ['GraphConv', 75], - ('qm8', 'tf_regression_ft'): ['CoulombMatrix', [26, 26]], - ('qm8', 'krr_ft'): ['CoulombMatrix', 1024], - ('qm8', 'dtnn'): ['CoulombMatrix', [26, 26]], + ('qm8', 'tf_regression_ft'): [dc.feat.CoulombMatrix(26), [26, 26]], + ('qm8', 'krr_ft'): [dc.feat.CoulombMatrix(26), 1024], + ('qm8', 'dtnn'): [dc.feat.CoulombMatrix(26), [26, 26]], ('qm8', 'ani'): ['BPSymmetryFunctionInput', [26, 4]], ('qm8', 'mpnn'): ['MP', [70, 8]], ('qm8', 'weave_regression'): ['Weave', 75], @@ -227,9 +226,9 @@ CheckFeaturizer = { ('qm9', 'rf_regression'): ['ECFP', 1024], ('qm9', 'krr'): ['ECFP', 1024], ('qm9', 'graphconvreg'): ['GraphConv', 75], - ('qm9', 'tf_regression_ft'): ['CoulombMatrix', [29, 29]], - ('qm9', 'krr_ft'): ['CoulombMatrix', 1024], - ('qm9', 'dtnn'): ['CoulombMatrix', [29, 29]], + ('qm9', 'tf_regression_ft'): [dc.feat.CoulombMatrix(29), [29, 29]], + ('qm9', 'krr_ft'): [dc.feat.CoulombMatrix(29), 1024], + ('qm9', 'dtnn'): [dc.feat.CoulombMatrix(29), [29, 29]], ('qm9', 'ani'): ['BPSymmetryFunctionInput', [29, 4]], ('qm9', 'mpnn'): ['MP', [70, 8]], ('qm9', 'weave_regression'): ['Weave', 75], @@ -256,7 +255,6 @@ CheckSplit = { 'pdbbind': ['index', 'random', 'time'], 'ppb': ['index', 'random', 'scaffold'], 'qm7': ['index', 'random', 'stratified'], - 'qm7b': ['index', 'random', 'stratified'], 'qm8': ['index', 'random', 'stratified'], 'qm9': ['index', 'random', 'stratified'], 'sampl': ['index', 'random', 'scaffold'], diff --git a/deepchem/molnet/defaults.py b/deepchem/molnet/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..519086919c7920d8d85de5fdea7ff4f184e42803 --- /dev/null +++ b/deepchem/molnet/defaults.py @@ -0,0 +1,67 @@ +""" +Featurizers, transformers, and splitters for MolNet. +""" + +import os +import importlib +import inspect +import logging +import json +from typing import Dict, List, Any + +import deepchem as dc +from deepchem.feat.base_classes import Featurizer +from deepchem.trans.transformers import Transformer +from deepchem.splits.splitters import Splitter + +logger = logging.getLogger(__name__) + + +def get_defaults(module_name: str = None) -> Dict[str, Any]: + """Get featurizers, transformers, and splitters. + + This function returns a dictionary with class names as keys and classes + as values. All MolNet ``load_x`` functions should specify which + featurizers, transformers, and splitters the dataset supports and + provide sensible defaults. + + Parameters + ---------- + module_name : {"feat", "trans", "splits"} + Default classes from deepchem.`module_name` will be returned. + + Returns + ------- + defaults : Dict[str, Any] + Keys are class names and values are class constructors. + + Examples + -------- + >> splitter = get_defaults('splits')['RandomSplitter']() + >> transformer = get_defaults('trans')['BalancingTransformer'](dataset, {"transform_X": True}) + >> featurizer = get_defaults('feat')["CoulombMatrix"](max_atoms=5) + + """ + + if module_name not in ["feat", "trans", "splits"]: + raise ValueError( + "Input argument must be either 'feat', 'trans', or 'splits'.") + + if module_name == "feat": + sc: Any = Featurizer + elif module_name == "trans": + sc = Transformer + elif module_name == "splits": + sc = Splitter + + module_name = "deepchem." + module_name + + module = importlib.import_module(module_name, package="deepchem") + + defaults = { + x[0]: x[1] + for x in inspect.getmembers(module, inspect.isclass) + if issubclass(x[1], sc) + } + + return defaults diff --git a/deepchem/molnet/load_function/bace_datasets.py b/deepchem/molnet/load_function/bace_datasets.py index e9700d397d6ff419cbc0ee88409c56dc7636a130..e1afacce4fe66be99b537712bda4d445b1add5ad 100644 --- a/deepchem/molnet/load_function/bace_datasets.py +++ b/deepchem/molnet/load_function/bace_datasets.py @@ -2,219 +2,122 @@ bace dataset loader. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union from deepchem.molnet.load_function.bace_features import bace_user_specified_features -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -BACE_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/bace.csv' - - -def load_bace_regression(featurizer='ECFP', - split='random', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load bace datasets.""" - # Featurize bace dataset - logger.info("About to featurize bace dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - bace_tasks = ["pIC50"] - - if reload: - save_folder = os.path.join(save_dir, "bace_r-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return bace_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "bace.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=BACE_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'UserDefined': - featurizer = deepchem.feat.UserDefinedFeaturizer( - bace_user_specified_features) - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=bace_tasks, smiles_field="mol", featurizer=featurizer) - - dataset = loader.featurize(dataset_file, shard_size=8192) - if split is None: - # Initialize transformers - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset, move_mean=move_mean) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return bace_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split data using {} splitter".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train, move_mean=move_mean) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return bace_tasks, (train, valid, test), transformers - - -def load_bace_classification(featurizer='ECFP', - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load bace datasets.""" - # Featurize bace dataset - logger.info("About to featurize bace dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - bace_tasks = ["Class"] - - if reload: - save_folder = os.path.join(save_dir, "bace_c-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return bace_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "bace.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=BACE_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'UserDefined': - featurizer = deepchem.feat.UserDefinedFeaturizer( - bace_user_specified_features) - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=bace_tasks, smiles_field="mol", featurizer=featurizer) - - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - # Initialize transformers - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return bace_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - - splitter = splitters[split] - logger.info("About to split data using {} splitter".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return bace_tasks, (train, valid, test), transformers +BACE_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/bace.csv" +BACE_REGRESSION_TASKS = ["pIC50"] +BACE_CLASSIFICATION_TASKS = ["Class"] + + +class _BaceLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "bace.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=BACE_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="mol", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_bace_regression( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """ Load BACE dataset, regression labels + + The BACE dataset provides quantitative IC50 and qualitative (binary label) + binding results for a set of inhibitors of human beta-secretase 1 (BACE-1). + + All data are experimental values reported in scientific literature over the + past decade, some with detailed crystal structures available. A collection + of 1522 compounds is provided, along with the regression labels of IC50. + + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "mol" - SMILES representation of the molecular structure + - "pIC50" - Negative log of the IC50 binding affinity + - "class" - Binary labels for inhibitor + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Subramanian, Govindan, et al. "Computational modeling of β-secretase 1 + (BACE-1) inhibitors using ligand based approaches." Journal of chemical + information and modeling 56.10 (2016): 1936-1949. + """ + loader = _BaceLoader(featurizer, splitter, transformers, + BACE_REGRESSION_TASKS, data_dir, save_dir, **kwargs) + return loader.load_dataset('bace_r', reload) + + +def load_bace_classification( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """ Load BACE dataset, classification labels + + BACE dataset with classification labels ("class"). + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _BaceLoader(featurizer, splitter, transformers, + BACE_CLASSIFICATION_TASKS, data_dir, save_dir, **kwargs) + return loader.load_dataset('bace_c', reload) diff --git a/deepchem/molnet/load_function/bbbc_datasets.py b/deepchem/molnet/load_function/bbbc_datasets.py index a500d582238cda288358d392208fc00196ff94ee..9f02dcbbf72f9d2396a460ebf755fda693d9c2a3 100644 --- a/deepchem/molnet/load_function/bbbc_datasets.py +++ b/deepchem/molnet/load_function/bbbc_datasets.py @@ -4,106 +4,98 @@ BBBC Dataset loader. This file contains image loaders for the BBBC dataset collection (https://data.broadinstitute.org/bbbc/image_sets.html). """ import os -import numpy as np -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() BBBC1_IMAGE_URL = 'https://data.broadinstitute.org/bbbc/BBBC001/BBBC001_v1_images_tif.zip' BBBC1_LABEL_URL = 'https://data.broadinstitute.org/bbbc/BBBC001/BBBC001_v1_counts.txt' +BBBC1_TASKS = ["cell-count"] BBBC2_IMAGE_URL = 'https://data.broadinstitute.org/bbbc/BBBC002/BBBC002_v1_images.zip' BBBC2_LABEL_URL = 'https://data.broadinstitute.org/bbbc/BBBC002/BBBC002_v1_counts.txt' - - -def load_bbbc001(split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): +BBBC2_TASKS = ["cell-count"] + + +class _BBBC001Loader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "BBBC001_v1_images_tif.zip") + labels_file = os.path.join(self.data_dir, "BBBC001_v1_counts.txt") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=BBBC1_IMAGE_URL, dest_dir=self.data_dir) + if not os.path.exists(labels_file): + dc.utils.data_utils.download_url( + url=BBBC1_LABEL_URL, dest_dir=self.data_dir) + loader = dc.data.ImageLoader() + return loader.create_dataset(dataset_file, in_memory=False) + + +def load_bbbc001( + splitter: Union[dc.splits.Splitter, str, None] = 'index', + transformers: List[Union[TransformerGenerator, str]] = [], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: """Load BBBC001 dataset This dataset contains 6 images of human HT29 colon cancer cells. The task is to learn to predict the cell counts in these images. This dataset is too small - to serve to train algorithms, but might serve as a good test dataset. - https://data.broadinstitute.org/bbbc/BBBC001/ + to serve to train algorithms, but might serve as a good test dataset. + https://data.broadinstitute.org/bbbc/BBBC001/ + + Parameters + ---------- + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in """ - # Featurize BBBC001 dataset - bbbc001_tasks = ["cell-count"] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "bbbc001-featurized", str(split)) - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return bbbc001_tasks, all_dataset, transformers - dataset_file = os.path.join(data_dir, "BBBC001_v1_images_tif.zip") - labels_file = os.path.join(data_dir, "BBBC001_v1_counts.txt") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=BBBC1_IMAGE_URL, dest_dir=data_dir) - if not os.path.exists(labels_file): - deepchem.utils.download_url(url=BBBC1_LABEL_URL, dest_dir=data_dir) - # Featurize Images into NumpyArrays - loader = deepchem.data.ImageLoader() - dataset = loader.featurize(dataset_file, in_memory=False) - - # Load text file with labels - with open(labels_file) as f: - content = f.readlines() - # Strip the first line which holds field labels - lines = [x.strip() for x in content][1:] - # Format is: Image_name count1 count2 - lines = [x.split("\t") for x in lines] - counts = [(float(x[1]) + float(x[2])) / 2.0 for x in lines] - y = np.array(counts) - - # This is kludgy way to add y to dataset. Can be done better? - dataset = deepchem.data.DiskDataset.from_numpy(dataset.X, y) - - if split == None: - transformers = [] - logger.info("Split is None, no transformers used for the dataset.") - return bbbc001_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - } - if split not in splitters: - raise ValueError("Only index and random splits supported.") - splitter = splitters[split] - - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - transformers = [] - all_dataset = (train, valid, test) - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return bbbc001_tasks, all_dataset, transformers - - -def load_bbbc002(split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): + featurizer = dc.feat.UserDefinedFeaturizer([]) # Not actually used + loader = _BBBC001Loader(featurizer, splitter, transformers, BBBC1_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('bbbc001', reload) + + +class _BBBC002Loader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "BBBC002_v1_images.zip") + labels_file = os.path.join(self.data_dir, "BBBC002_v1_counts.txt.txt") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=BBBC2_IMAGE_URL, dest_dir=self.data_dir) + if not os.path.exists(labels_file): + dc.utils.data_utils.download_url( + url=BBBC2_LABEL_URL, dest_dir=self.data_dir) + loader = dc.data.ImageLoader() + return loader.create_dataset(dataset_file, in_memory=False) + + +def load_bbbc002( + splitter: Union[dc.splits.Splitter, str, None] = 'index', + transformers: List[Union[TransformerGenerator, str]] = [], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: """Load BBBC002 dataset This dataset contains data corresponding to 5 samples of Drosophilia Kc167 @@ -111,72 +103,27 @@ def load_bbbc002(split='index', 512x512. Ground truth labels contain cell counts for this dataset. Full details about this dataset are present at https://data.broadinstitute.org/bbbc/BBBC002/. + + Parameters + ---------- + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in """ - # Featurize BBBC002 dataset - bbbc002_tasks = ["cell-count"] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "bbbc002-featurized", str(split)) - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return bbbc002_tasks, all_dataset, transformers - dataset_file = os.path.join(data_dir, "BBBC002_v1_images.zip") - labels_file = os.path.join(data_dir, "BBBC002_v1_counts.txt") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=BBBC2_IMAGE_URL, dest_dir=data_dir) - if not os.path.exists(labels_file): - deepchem.utils.download_url(url=BBBC2_LABEL_URL, dest_dir=data_dir) - # Featurize Images into NumpyArrays - loader = deepchem.data.ImageLoader() - dataset = loader.featurize(dataset_file, in_memory=False) - - # Load text file with labels - with open(labels_file) as f: - content = f.readlines() - # Strip the first line which holds field labels - lines = [x.strip() for x in content][1:] - # Format is: Image_name count1 count2 - lines = [x.split("\t") for x in lines] - counts = [(float(x[1]) + float(x[2])) / 2.0 for x in lines] - y = np.reshape(np.array(counts), (len(counts), 1)) - ids = [x[0] for x in lines] - - # This is kludgy way to add y to dataset. Can be done better? - dataset = deepchem.data.DiskDataset.from_numpy(dataset.X, y, ids=ids) - - if split == None: - transformers = [] - logger.info("Split is None, no transformers used for the dataset.") - return bbbc002_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - } - if split not in splitters: - raise ValueError("Only index and random splits supported.") - splitter = splitters[split] - - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - all_dataset = (train, valid, test) - transformers = [] - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return bbbc002_tasks, all_dataset, transformers + featurizer = dc.feat.UserDefinedFeaturizer([]) # Not actually used + loader = _BBBC002Loader(featurizer, splitter, transformers, BBBC2_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('bbbc002', reload) diff --git a/deepchem/molnet/load_function/bbbp_datasets.py b/deepchem/molnet/load_function/bbbp_datasets.py index 97241e803ae6ced2f6e00fd63e3911d102f769f9..230560959b6dfad8ef5dd74d05b00c30c27b1e6f 100644 --- a/deepchem/molnet/load_function/bbbp_datasets.py +++ b/deepchem/molnet/load_function/bbbp_datasets.py @@ -2,105 +2,83 @@ Blood-Brain Barrier Penetration dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -BBBP_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/BBBP.csv' - - -def load_bbbp(featurizer='ECFP', - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load blood-brain barrier penetration datasets """ - # Featurize bbb dataset - logger.info("About to featurize bbbp dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - bbbp_tasks = ["p_np"] - - if reload: - save_folder = os.path.join(save_dir, "bbbp-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return bbbp_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "BBBP.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=BBBP_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=bbbp_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - # Initialize transformers - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return bbbp_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter() - } - splitter = splitters[split] - logger.info("About to split data with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - # Initialize transformers - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return bbbp_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +BBBP_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/BBBP.csv" +BBBP_TASKS = ["p_np"] + + +class _BBBPLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "BBBP.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=BBBP_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_bbbp( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load BBBP dataset + + The blood-brain barrier penetration (BBBP) dataset is designed for the + modeling and prediction of barrier permeability. As a membrane separating + circulating blood and brain extracellular fluid, the blood-brain barrier + blocks most drugs, hormones and neurotransmitters. Thus penetration of the + barrier forms a long-standing issue in development of drugs targeting + central nervous system. + + This dataset includes binary labels for over 2000 compounds on their + permeability properties. + + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "name" - Name of the compound + - "smiles" - SMILES representation of the molecular structure + - "p_np" - Binary labels for penetration/non-penetration + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Martins, Ines Filipa, et al. "A Bayesian approach to in silico + blood-brain barrier penetration modeling." Journal of chemical + information and modeling 52.6 (2012): 1686-1697. + """ + loader = _BBBPLoader(featurizer, splitter, transformers, BBBP_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('bbbp', reload) diff --git a/deepchem/molnet/load_function/cell_counting_datasets.py b/deepchem/molnet/load_function/cell_counting_datasets.py index a92d70968b43cf53c5e9a009d7836c6a293fcab5..efbd098f7a4671617ba8e147d8ab8c7e49922df4 100644 --- a/deepchem/molnet/load_function/cell_counting_datasets.py +++ b/deepchem/molnet/load_function/cell_counting_datasets.py @@ -6,72 +6,59 @@ http://www.robots.ox.ac.uk/~vgg/research/counting/index_org.html. Labels aren't available for this dataset, so only raw images are provided. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +CELL_COUNTING_URL = 'http://www.robots.ox.ac.uk/~vgg/research/counting/cells.zip' +CELL_COUNTING_TASKS: List[str] = [] -DEFAULT_DIR = deepchem.utils.get_data_dir() -DATASET_URL = 'http://www.robots.ox.ac.uk/~vgg/research/counting/cells.zip' +class _CellCountingLoader(_MolnetLoader): -def load_cell_counting(split=None, - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load Cell Counting dataset. - - Loads the cell counting dataset from http://www.robots.ox.ac.uk/~vgg/research/counting/index_org.html. - """ - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - # No tasks since no labels provided. - cell_counting_tasks = [] - # For now images are loaded directly by ImageLoader - featurizer = "" - if reload: - save_folder = os.path.join(save_dir, "cell_counting-featurized", str(split)) - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return cell_counting_tasks, all_dataset, transformers - dataset_file = os.path.join(data_dir, "cells.zip") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=DATASET_URL, dest_dir=data_dir) - - loader = deepchem.data.ImageLoader() - dataset = loader.featurize(dataset_file) + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "cells.zip") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=CELL_COUNTING_URL, dest_dir=self.data_dir) + loader = dc.data.ImageLoader() + return loader.featurize(dataset_file) - transformers = [] - if split == None: - logger.info("Split is None, no transformers used.") - return cell_counting_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - } - if split not in splitters: - raise ValueError("Only index and random splits supported.") - splitter = splitters[split] +def load_cell_counting( + splitter: Union[dc.splits.Splitter, str, None] = None, + transformers: List[Union[TransformerGenerator, str]] = [], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load Cell Counting dataset. - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) + Loads the cell counting dataset from http://www.robots.ox.ac.uk/~vgg/research/counting/index_org.html. - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - transformers = [] - all_dataset = (train, valid, test) - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return cell_counting_tasks, all_dataset, transformers + Parameters + ---------- + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + featurizer = dc.feat.UserDefinedFeaturizer([]) # Not actually used + loader = _CellCountingLoader(featurizer, splitter, transformers, + CELL_COUNTING_TASKS, data_dir, save_dir, + **kwargs) + return loader.load_dataset('cell_counting', reload) diff --git a/deepchem/molnet/load_function/chembl25_datasets.py b/deepchem/molnet/load_function/chembl25_datasets.py index f908c83ded95f67155cbb336b1d54a621eab745e..ae82451889123273e2a8aab81900ef37ec8420ae 100644 --- a/deepchem/molnet/load_function/chembl25_datasets.py +++ b/deepchem/molnet/load_function/chembl25_datasets.py @@ -2,22 +2,13 @@ ChEMBL dataset loader, for training ChemNet """ import os -import numpy as np -import logging -import gzip -import shutil import deepchem as dc -import pickle +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -from deepchem.feat import SmilesToSeq, SmilesToImage -from deepchem.feat.smiles_featurizers import create_char_to_idx - -CHEMBL_URL = "https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/chembl_25.csv.gz" -DEFAULT_DIR = dc.utils.get_data_dir() - -logger = logging.getLogger(__name__) - -chembl25_tasks = [ +CHEMBL25_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/chembl_25.csv.gz" +CHEMBL25_TASKS = [ "MolWt", "HeavyAtomMolWt", "MolLogP", "MolMR", "TPSA", "LabuteASA", "HeavyAtomCount", "NHOHCount", "NOCount", "NumHAcceptors", "NumHDonors", "NumHeteroatoms", "NumRotatableBonds", "NumRadicalElectrons", @@ -42,144 +33,50 @@ chembl25_tasks = [ ] -def load_chembl25(featurizer="smiles2seq", - split="random", - data_dir=None, - save_dir=None, - split_seed=None, - reload=True, - transformer_type='minmax', - **kwargs): - """Loads the ChEMBL25 dataset, featurizes it, and does a split. - Parameters - ---------- - featurizer: str, default smiles2seq - Featurizer to use - split: str, default None - Splitter to use - data_dir: str, default None - Directory to download data to, or load dataset from. (TODO: If None, make tmp) - save_dir: str, default None - Directory to save the featurized dataset to. (TODO: If None, make tmp) - split_seed: int, default None - Seed to be used for splitting the dataset - reload: bool, default True - Whether to reload saved dataset - transformer_type: str, default minmax: - Transformer to use - """ - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - save_folder = os.path.join(save_dir, "chembl_25-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - - if reload: - if not os.path.exists(save_folder): - logger.warning( - "{} does not exist. Reconstructing dataset.".format(save_folder)) - else: - logger.info("{} exists. Restoring dataset.".format(save_folder)) - loaded, dataset, transformers = dc.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return chembl25_tasks, dataset, transformers - - dataset_file = os.path.join(data_dir, "chembl_25.csv.gz") - - if not os.path.exists(dataset_file): - logger.warning("File {} not found. Downloading dataset. (~555 MB)".format( - dataset_file)) - dc.utils.download_url(url=CHEMBL_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2seq": - max_len = kwargs.get('max_len', 250) - pad_len = kwargs.get('pad_len', 10) - char_to_idx = create_char_to_idx( - dataset_file, max_len=max_len, smiles_field="smiles") - featurizer = SmilesToSeq( - char_to_idx=char_to_idx, max_len=max_len, pad_len=pad_len) - elif featurizer == "smiles2img": - img_size = kwargs.get("img_size", 80) - img_spec = kwargs.get("img_spec", "engd") - res = kwargs.get("res", 0.5) - featurizer = SmilesToImage(img_size=img_size, img_spec=img_spec, res=res) +class _Chembl25Loader(_MolnetLoader): - else: - raise ValueError( - "Featurizer of type {} is not supported".format(featurizer)) + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "chembl_25.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=CHEMBL25_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) - loader = dc.data.CSVLoader( - tasks=chembl25_tasks, smiles_field='smiles', featurizer=featurizer) - dataset = loader.featurize( - input_files=[dataset_file], shard_size=10000, data_dir=save_folder) - if split is None: - if transformer_type == "minmax": - transformers = [ - dc.trans.MinMaxTransformer( - transform_X=False, transform_y=True, dataset=dataset) - ] - else: - transformers = [ - dc.trans.NormalizationTransformer( - transform_X=False, transform_y=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform dataset.") - for transformer in transformers: - dataset = transformer.transform(dataset) - return chembl25_tasks, (dataset, None, None), transformers - - splitters = { - 'index': dc.splits.IndexSplitter(), - 'random': dc.splits.RandomSplitter(), - 'scaffold': dc.splits.ScaffoldSplitter(), - } - - logger.info("About to split data with {} splitter.".format(split)) - splitter = splitters[split] - - frac_train = kwargs.get('frac_train', 4 / 6) - frac_valid = kwargs.get('frac_valid', 1 / 6) - frac_test = kwargs.get('frac_test', 1 / 6) - - train, valid, test = splitter.train_valid_test_split( - dataset, - seed=split_seed, - frac_train=frac_train, - frac_test=frac_test, - frac_valid=frac_valid) - if transformer_type == "minmax": - transformers = [ - dc.trans.MinMaxTransformer( - transform_X=False, transform_y=True, dataset=train) - ] - else: - transformers = [ - dc.trans.NormalizationTransformer( - transform_X=False, transform_y=True, dataset=train) - ] - - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - dc.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) +def load_chembl25( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Loads the ChEMBL25 dataset, featurizes it, and does a split. - return chembl25_tasks, (train, valid, test), transformers + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _Chembl25Loader(featurizer, splitter, transformers, CHEMBL25_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('chembl25', reload) diff --git a/deepchem/molnet/load_function/chembl_datasets.py b/deepchem/molnet/load_function/chembl_datasets.py index 6fea5de6e1e40f41e2bf3312122d2fef981dc113..4c73dba4ad245494349d4afd333f096949d73dab 100644 --- a/deepchem/molnet/load_function/chembl_datasets.py +++ b/deepchem/molnet/load_function/chembl_datasets.py @@ -2,157 +2,84 @@ ChEMBL dataset loader. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset from deepchem.molnet.load_function.chembl_tasks import chembl_tasks -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() - - -def load_chembl(shard_size=2000, - featurizer="ECFP", - set="5thresh", - split="random", - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - logger.info("About to load ChEMBL dataset.") - - if reload: - save_folder = os.path.join(save_dir, "chembl-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return chembl_tasks, all_dataset, transformers - - dataset_path = os.path.join(data_dir, "chembl_%s.csv.gz" % set) - if not os.path.exists(dataset_path): - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_5thresh.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_sparse.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_5thresh_ts_test.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_5thresh_ts_train.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_5thresh_ts_valid.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_sparse_ts_test.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_sparse_ts_train.csv.gz', - dest_dir=data_dir) - deepchem.utils.download_url( - url= - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/chembl_year_sets/chembl_sparse_ts_valid.csv.gz', - dest_dir=data_dir) - - if split == "year": - train_files = os.path.join( - data_dir, "./chembl_year_sets/chembl_%s_ts_train.csv.gz" % set) - valid_files = os.path.join( - data_dir, "./chembl_year_sets/chembl_%s_ts_valid.csv.gz" % set) - test_files = os.path.join( - data_dir, "./chembl_year_sets/chembl_%s_ts_test.csv.gz" % set) - - # Featurize ChEMBL dataset - logger.info("About to featurize ChEMBL dataset.") - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=chembl_tasks, smiles_field="smiles", featurizer=featurizer) - - if split == "year": - logger.info("Featurizing train datasets") - train = loader.featurize(train_files, shard_size=shard_size) - logger.info("Featurizing valid datasets") - valid = loader.featurize(valid_files, shard_size=shard_size) - logger.info("Featurizing test datasets") - test = loader.featurize(test_files, shard_size=shard_size) - else: - dataset = loader.featurize(dataset_path, shard_size=shard_size) - - if split is None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data.") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return chembl_tasks, (dataset, None, None), transformers - - if split != "year": - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - } - - splitter = splitters[split] - logger.info("Performing new split.") - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer(transform_y=True, dataset=train) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return chembl_tasks, (train, valid, test), transformers +from typing import List, Optional, Tuple, Union + +CHEMBL_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/chembl_%s.csv.gz" + + +class _ChemblLoader(_MolnetLoader): + + def __init__(self, *args, set: str, **kwargs): + super(_ChemblLoader, self).__init__(*args, **kwargs) + self.set = set + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "chembl_%s.csv.gz" % self.set) + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=CHEMBL_URL % self.set, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_chembl( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + set: str = "5thresh", + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load the ChEMBL dataset. + + This dataset is based on release 22.1 of the data from https://www.ebi.ac.uk/chembl/. + Two subsets of the data are available, depending on the "set" argument. "sparse" + is a large dataset with 244,245 compounds. As the name suggests, the data is + extremely sparse, with most compounds having activity data for only one target. + "5thresh" is a much smaller set (23,871 compounds) that includes only compounds + with activity data for at least five targets. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + set: str + the subset to load, either "sparse" or "5thresh" + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + if set not in ("5thresh", "sparse"): + raise ValueError("set must be either '5thresh' or 'sparse'") + loader = _ChemblLoader( + featurizer, + splitter, + transformers, + chembl_tasks, + data_dir, + save_dir, + set=set, + **kwargs) + return loader.load_dataset('chembl-%s' % set, reload) diff --git a/deepchem/molnet/load_function/clearance_datasets.py b/deepchem/molnet/load_function/clearance_datasets.py index b7c34d10eb268c13ec2bc83728c3730b1c26b94d..5c6cf4c1e1380de89343be1f019e0a580367dee6 100644 --- a/deepchem/molnet/load_function/clearance_datasets.py +++ b/deepchem/molnet/load_function/clearance_datasets.py @@ -2,115 +2,61 @@ clearance dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -CLEARANCE_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/clearance.csv' - - -def load_clearance(featurizer='ECFP', - split='random', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load clearance datasets.""" - # Featurize clearance dataset - logger.info("About to featurize clearance dataset.") - logger.info("About to load clearance dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - clearance_tasks = ['exp'] - - if reload: - save_folder = os.path.join(save_dir, "clearance-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return clearance_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "clearance.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=CLEARANCE_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=clearance_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - # Initialize transformers - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset, move_mean=move_mean) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return clearance_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split data with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train, move_mean=move_mean) - ] - - logger.info("About to transform data") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return clearance_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +CLEARANCE_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/clearance.csv" +CLEARANCE_TASKS = ['target'] + + +class _ClearanceLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "clearance.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=CLEARANCE_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_clearance( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """ + Load clearance datasets. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _ClearanceLoader(featurizer, splitter, transformers, CLEARANCE_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('clearance', reload) diff --git a/deepchem/molnet/load_function/clintox_datasets.py b/deepchem/molnet/load_function/clintox_datasets.py index 3b603d5e51c679b55ce0d4b0465e4190435f862f..a83628e52edcd3d1381bf0f93f71983466435355 100644 --- a/deepchem/molnet/load_function/clintox_datasets.py +++ b/deepchem/molnet/load_function/clintox_datasets.py @@ -3,129 +3,95 @@ Clinical Toxicity (clintox) dataset loader. @author Caleb Geniesse """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +CLINTOX_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/clintox.csv.gz" +CLINTOX_TASKS = ['FDA_APPROVED', 'CT_TOX'] -DEFAULT_DIR = deepchem.utils.get_data_dir() -CLINTOX_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/clintox.csv.gz' +class _ClintoxLoader(_MolnetLoader): -def load_clintox(featurizer='ECFP', - split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load clintox datasets. + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "clintox.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=CLINTOX_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_clintox( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load ClinTox dataset The ClinTox dataset compares drugs approved by the FDA and drugs that have failed clinical trials for toxicity reasons. The dataset includes two classification tasks for 1491 drug - compounds with known chemical structures: (1) clinical trial - toxicity (or absence of toxicity) and (2) FDA approval status. + compounds with known chemical structures: + + #. clinical trial toxicity (or absence of toxicity) + #. FDA approval status. + List of FDA-approved drugs are compiled from the SWEETLEAD database, and list of drugs that failed clinical trials for toxicity reasons are compiled from the Aggregate Analysis of ClinicalTrials.gov(AACT) database. - The data file contains a csv table, in which columns below are - used: - "smiles" - SMILES representation of the molecular structure - "FDA_APPROVED" - FDA approval status - "CT_TOX" - Clinical trial results - -References: - Gayvert, Kaitlyn M., Neel S. Madhukar, and Olivier Elemento. "A data-driven approach to predicting successes and failures of clinical trials." Cell chemical biology 23.10 (2016): 1294-1301. - - Artemov, Artem V., et al. "Integrated deep learned transcriptomic and structure-based predictor of clinical trials outcomes." bioRxiv (2016): 095653. - - Novick, Paul A., et al. "SWEETLEAD: an in silico database of approved drugs, regulated chemicals, and herbal isolates for computer-aided drug discovery." PloS one 8.11 (2013): e79568. - - Aggregate Analysis of ClincalTrials.gov (AACT) Database. https://www.ctti-clinicaltrials.org/aact-database + Random splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "smiles" - SMILES representation of the molecular structure + - "FDA_APPROVED" - FDA approval status + - "CT_TOX" - Clinical trial results + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Gayvert, Kaitlyn M., Neel S. Madhukar, and Olivier Elemento. + "A data-driven approach to predicting successes and failures of clinical + trials." + Cell chemical biology 23.10 (2016): 1294-1301. + .. [2] Artemov, Artem V., et al. "Integrated deep learned transcriptomic and + structure-based predictor of clinical trials outcomes." bioRxiv (2016): + 095653. + .. [3] Novick, Paul A., et al. "SWEETLEAD: an in silico database of approved + drugs, regulated chemicals, and herbal isolates for computer-aided drug + discovery." PloS one 8.11 (2013): e79568. + .. [4] Aggregate Analysis of ClincalTrials.gov (AACT) Database. + https://www.ctti-clinicaltrials.org/aact-database """ - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "clintox-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, "clintox.csv.gz") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=CLINTOX_URL, dest_dir=data_dir) - - logger.info("About to load clintox dataset.") - dataset = deepchem.utils.save.load_from_disk(dataset_file) - clintox_tasks = dataset.columns.values[1:].tolist() - logger.info("Tasks in dataset: %s" % (clintox_tasks)) - logger.info("Number of tasks in dataset: %s" % str(len(clintox_tasks))) - logger.info("Number of examples in dataset: %s" % str(dataset.shape[0])) - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return clintox_tasks, all_dataset, transformers - # Featurize clintox dataset - logger.info("About to featurize clintox dataset.") - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=clintox_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - # Transform clintox dataset - if split is None: - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data.") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return clintox_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split data with {} splitter.".format(split)) - train, valid, test = splitter.train_valid_test_split(dataset) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - - return clintox_tasks, (train, valid, test), transformers + loader = _ClintoxLoader(featurizer, splitter, transformers, CLINTOX_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('clintox', reload) diff --git a/deepchem/molnet/load_function/delaney_datasets.py b/deepchem/molnet/load_function/delaney_datasets.py index 056c35840c8fa8e50ac0b6e9ecedd7edf141a410..ba818d7a1df433720eb0406f2667992b4ade54ca 100644 --- a/deepchem/molnet/load_function/delaney_datasets.py +++ b/deepchem/molnet/load_function/delaney_datasets.py @@ -2,115 +2,79 @@ Delaney dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -DELANEY_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/delaney-processed.csv' - - -def load_delaney(featurizer='ECFP', - split='index', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load delaney datasets. - - The Delaney datasets are extracted from the following paper - - Delaney, John S. "ESOL: estimating aqueous solubility directly from molecular structure." Journal of chemical information and computer sciences 44.3 (2004): 1000-1005. - - This dataset contains 2874 measured aqueous solubility - values. The source dataset is available in the supplemental - material of the original paper. +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +DELANEY_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/delaney-processed.csv" +DELANEY_TASKS = ['measured log solubility in mols per litre'] + + +class _DelaneyLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "delaney-processed.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=DELANEY_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_delaney( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load Delaney dataset + + The Delaney (ESOL) dataset a regression dataset containing structures and + water solubility data for 1128 compounds. The dataset is widely used to + validate machine learning models on estimating solubility directly from + molecular structures (as encoded in SMILES strings). + + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "Compound ID" - Name of the compound + - "smiles" - SMILES representation of the molecular structure + - "measured log solubility in mols per litre" - Log-scale water solubility + of the compound, used as label + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Delaney, John S. "ESOL: estimating aqueous solubility directly from + molecular structure." Journal of chemical information and computer + sciences 44.3 (2004): 1000-1005. """ - # Featurize Delaney dataset - logger.info("About to featurize Delaney dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - if reload: - save_folder = os.path.join(save_dir, "delaney-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, "delaney-processed.csv") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=DELANEY_URL, dest_dir=data_dir) - - delaney_tasks = ['measured log solubility in mols per litre'] - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return delaney_tasks, all_dataset, transformers - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - res = kwargs.get("res", 0.5) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec, res=res) - - loader = deepchem.data.CSVLoader( - tasks=delaney_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset, move_mean=move_mean) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return delaney_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - train, valid, test = splitter.train_valid_test_split(dataset) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train, move_mean=move_mean) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return delaney_tasks, (train, valid, test), transformers + loader = _DelaneyLoader(featurizer, splitter, transformers, DELANEY_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('delaney', reload) diff --git a/deepchem/molnet/load_function/factors_datasets.py b/deepchem/molnet/load_function/factors_datasets.py index e234b3e9ee74ce373d4c375dd41db7fa16d8863a..012605100ef79b49888b051304218931f89d1962 100644 --- a/deepchem/molnet/load_function/factors_datasets.py +++ b/deepchem/molnet/load_function/factors_datasets.py @@ -11,9 +11,9 @@ from deepchem.molnet.load_function.kaggle_features import merck_descriptors logger = logging.getLogger(__name__) -TRAIN_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/FACTORS_training_disguised_combined_full.csv.gz' -VALID_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/FACTORS_test1_disguised_combined_full.csv.gz' -TEST_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/FACTORS_test2_disguised_combined_full.csv.gz' +TRAIN_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/FACTORS_training_disguised_combined_full.csv.gz" +VALID_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/FACTORS_test1_disguised_combined_full.csv.gz" +TEST_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/FACTORS_test2_disguised_combined_full.csv.gz" TRAIN_FILENAME = "FACTORS_training_disguised_combined_full.csv.gz" VALID_FILENAME = "FACTORS_test1_disguised_combined_full.csv.gz" @@ -62,15 +62,15 @@ def gen_factors(FACTORS_tasks, if not os.path.exists(train_files): logger.info("Downloading train file...") - deepchem.utils.download_url(url=TRAIN_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TRAIN_URL, dest_dir=data_dir) logger.info("Training file download complete.") logger.info("Downloading validation file...") - deepchem.utils.download_url(url=VALID_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=VALID_URL, dest_dir=data_dir) logger.info("Validation file download complete.") logger.info("Downloading test file...") - deepchem.utils.download_url(url=TEST_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TEST_URL, dest_dir=data_dir) logger.info("Test file download complete") # Featurize the FACTORS dataset @@ -138,8 +138,7 @@ def load_factors(shard_size=2000, featurizer=None, split=None, reload=True): """Loads FACTOR dataset; does not do train/test split The Factors dataset is an in-house dataset from Merck that was first introduced in the following paper: - -Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. + Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. It contains 1500 Merck in-house compounds that were measured for IC50 of inhibition on 12 serine proteases. Unlike most of @@ -173,7 +172,7 @@ Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." J 'T_00007', 'T_00008', 'T_00009', 'T_00010', 'T_00011', 'T_00012' ] - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() data_dir = os.path.join(data_dir, "factors") if not os.path.exists(data_dir): diff --git a/deepchem/molnet/load_function/hiv_datasets.py b/deepchem/molnet/load_function/hiv_datasets.py index f91e14c0efc276591c8a6090aca97ace50f0dc2a..f00aee024025384ba3318c962e4aa4283ba8eac8 100644 --- a/deepchem/molnet/load_function/hiv_datasets.py +++ b/deepchem/molnet/load_function/hiv_datasets.py @@ -2,22 +2,36 @@ hiv dataset loader. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +HIV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/HIV.csv" +HIV_TASKS = ["HIV_active"] -HIV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/HIV.csv' -DEFAULT_DIR = deepchem.utils.get_data_dir() +class _HIVLoader(_MolnetLoader): -def load_hiv(featurizer='ECFP', - split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load hiv datasets. Does not do train/test split + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "HIV.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=HIV_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_hiv( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load HIV dataset The HIV dataset was introduced by the Drug Therapeutics Program (DTP) AIDS Antiviral Screen, which tested the ability @@ -28,101 +42,41 @@ def load_hiv(featurizer='ECFP', latter two labels, making it a classification task between inactive (CI) and active (CA and CM). - The data file contains a csv table, in which columns below - are used: - - "smiles": SMILES representation of the molecular structure - - "activity": Three-class labels for screening results: CI/CM/CA - - "HIV_active": Binary labels for screening results: 1 (CA/CM) and 0 (CI) - - References: - AIDS Antiviral Screen Data. https://wiki.nci.nih.gov/display/NCIDTPdata/AIDS+Antiviral+Screen+Data + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "smiles": SMILES representation of the molecular structure + - "activity": Three-class labels for screening results: CI/CM/CA + - "HIV_active": Binary labels for screening results: 1 (CA/CM) and 0 (CI) + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] AIDS Antiviral Screen Data. + https://wiki.nci.nih.gov/display/NCIDTPdata/AIDS+Antiviral+Screen+Data """ - # Featurize hiv dataset - logger.info("About to featurize hiv dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - hiv_tasks = ["HIV_active"] - - if reload: - save_folder = os.path.join(save_dir, "hiv-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return hiv_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "HIV.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=HIV_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=hiv_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return hiv_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'butina': deepchem.splits.ButinaSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - train, valid, test = splitter.train_valid_test_split(dataset) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return hiv_tasks, (train, valid, test), transformers + loader = _HIVLoader(featurizer, splitter, transformers, HIV_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('hiv', reload) diff --git a/deepchem/molnet/load_function/hopv_datasets.py b/deepchem/molnet/load_function/hopv_datasets.py index 8b228966037a8bf04bae1e4b7c551c40bb26738d..ee82a56cebf5db60b52f5540143b8b1cc53c4407 100644 --- a/deepchem/molnet/load_function/hopv_datasets.py +++ b/deepchem/molnet/load_function/hopv_datasets.py @@ -2,21 +2,40 @@ HOPV dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -HOPV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/hopv.tar.gz' -DEFAULT_DIR = deepchem.utils.get_data_dir() - - -def load_hopv(featurizer='ECFP', - split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +HOPV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/hopv.tar.gz" +HOPV_TASKS = [ + 'HOMO', 'LUMO', 'electrochemical_gap', 'optical_gap', 'PCE', 'V_OC', 'J_SC', + 'fill_factor' +] + + +class _HOPVLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "hopv.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=HOPV_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file( + os.path.join(self.data_dir, 'hopv.tar.gz'), self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_hopv( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: """Load HOPV datasets. Does not do train/test split The HOPV datasets consist of the "Harvard Organic @@ -30,95 +49,29 @@ def load_hopv(featurizer='ECFP', removed (for now). Lopez, Steven A., et al. "The Harvard organic photovoltaic dataset." Scientific data 3.1 (2016): 1-7. - """ - # Featurize HOPV dataset - logger.info("About to featurize HOPV dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - hopv_tasks = [ - 'HOMO', 'LUMO', 'electrochemical_gap', 'optical_gap', 'PCE', 'V_OC', - 'J_SC', 'fill_factor' - ] - - if reload: - save_folder = os.path.join(save_dir, "hopv-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return hopv_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "hopv.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=HOPV_URL, dest_dir=data_dir) - deepchem.utils.untargz_file(os.path.join(data_dir, 'hopv.tar.gz'), data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - loader = deepchem.data.CSVLoader( - tasks=hopv_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split == None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return hopv_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'butina': deepchem.splits.ButinaSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer(transform_y=True, dataset=train) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return hopv_tasks, (train, valid, test), transformers + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _HOPVLoader(featurizer, splitter, transformers, HOPV_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('hopv', reload) diff --git a/deepchem/molnet/load_function/hppb_datasets.py b/deepchem/molnet/load_function/hppb_datasets.py index e886621982ad26a3fd7cbd9bac47ad993ea58f10..a19da873924b2e54ae0bb5c27ab774dac97d9832 100644 --- a/deepchem/molnet/load_function/hppb_datasets.py +++ b/deepchem/molnet/load_function/hppb_datasets.py @@ -2,14 +2,13 @@ HPPB Dataset Loader. """ import os -import logging -import deepchem -import numpy as np +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) - -HPPB_URL = "http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/hppb.csv" -DEFAULT_DATA_DIR = deepchem.utils.get_data_dir() +HPPB_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/hppb.csv" +HPPB_TASKS = ["target"] #Task is solubility in pH 7.4 buffer def remove_missing_entries(dataset): @@ -20,8 +19,6 @@ def remove_missing_entries(dataset): """ for i, (X, y, w, ids) in enumerate(dataset.itershards()): available_rows = X.any(axis=1) - logger.info("Shard %d has %d missing entries." % - (i, np.count_nonzero(~available_rows))) X = X[available_rows] y = y[available_rows] w = w[available_rows] @@ -29,103 +26,52 @@ def remove_missing_entries(dataset): dataset.set_shard(i, X, y, w, ids) -def load_hppb(featurizer="ECFP", - data_dir=None, - save_dir=None, - split=None, - split_seed=None, - reload=True, - **kwargs): - """Loads the thermodynamic solubility datasets.""" - # Featurizer hppb dataset - logger.info("About to featurize hppb dataset...") - hppb_tasks = ["target"] #Task is solubility in pH 7.4 buffer - - if data_dir is None: - data_dir = DEFAULT_DATA_DIR - if save_dir is None: - save_dir = DEFAULT_DATA_DIR - - if reload: - save_folder = os.path.join(save_dir, "hppb-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return hppb_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "hppb.csv") - if not os.path.exists(dataset_file): - logger.info("{} does not exist. Downloading it.".format(dataset_file)) - deepchem.utils.download_url(url=hppb_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'AdjacencyConv': - featurizer = deepchem.feat.AdjacencyFingerprint( - max_n_atoms=150, max_valence=6) - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - logger.info("Featurizing datasets.") - loader = deepchem.data.CSVLoader( - tasks=hppb_tasks, smiles_field='smile', featurizer=featurizer) - dataset = loader.featurize(input_files=[dataset_file], shard_size=2000) - - logger.info("Removing missing entries...") - remove_missing_entries(dataset) - - if split == None: - logger.info("About to transform the data...") - transformers = [] - for transformer in transformers: - logger.info("Transforming the dataset with transformer ", - transformer.__class__.__name__) - dataset = transformer.transform(dataset) - return hppb_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'butina': deepchem.splits.ButinaSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - transformers = [] - - logger.info("About to transform the data...") - for transformer in transformers: - logger.info("Transforming the data with transformer ", - transformer.__class__.__name__) - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - logger.info("Saving file to {}.".format(save_folder)) - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return hppb_tasks, (train, valid, test), transformers +class _HPPBLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "hppb.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=HPPB_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smile", featurizer=self.featurizer) + dataset = loader.create_dataset(dataset_file, shard_size=2000) + remove_missing_entries(dataset) + return dataset + + +def load_hppb( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = [], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Loads the thermodynamic solubility datasets. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _HPPBLoader(featurizer, splitter, transformers, HPPB_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('hppb', reload) diff --git a/deepchem/molnet/load_function/kaggle_datasets.py b/deepchem/molnet/load_function/kaggle_datasets.py index 8c6c319013b18cdfe795d2580fc305b751fe94b2..561864f16486951b1f784c1d73ffa38945e51f17 100644 --- a/deepchem/molnet/load_function/kaggle_datasets.py +++ b/deepchem/molnet/load_function/kaggle_datasets.py @@ -58,14 +58,14 @@ def gen_kaggle(KAGGLE_tasks, test_files = os.path.join(data_dir, "KAGGLE_test2_disguised_combined_full.csv.gz") if not os.path.exists(train_files): - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/KAGGLE_training_disguised_combined_full.csv.gz', + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KAGGLE_training_disguised_combined_full.csv.gz", dest_dir=data_dir) - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/KAGGLE_test1_disguised_combined_full.csv.gz', + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KAGGLE_test1_disguised_combined_full.csv.gz", dest_dir=data_dir) - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/KAGGLE_test2_disguised_combined_full.csv.gz', + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KAGGLE_test2_disguised_combined_full.csv.gz", dest_dir=data_dir) # Featurize KAGGLE dataset @@ -153,7 +153,7 @@ def load_kaggle(shard_size=2000, featurizer=None, split=None, reload=True): '3A4', 'CB1', 'DPP4', 'HIVINT', 'HIV_PROT', 'LOGD', 'METAB', 'NK1', 'OX1', 'OX2', 'PGP', 'PPB', 'RAT_F', 'TDI', 'THROMBIN' ] - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() data_dir = os.path.join(data_dir, "kaggle") if not os.path.exists(data_dir): diff --git a/deepchem/molnet/load_function/kinase_datasets.py b/deepchem/molnet/load_function/kinase_datasets.py index 95c4a1ba88eb4b9d0dacdcc8c88174da90d67518..b948a1ba1d07e8fad9f9264367f40bcd9fb31ee3 100644 --- a/deepchem/molnet/load_function/kinase_datasets.py +++ b/deepchem/molnet/load_function/kinase_datasets.py @@ -9,9 +9,9 @@ import numpy as np import deepchem from deepchem.molnet.load_function.kaggle_features import merck_descriptors -TRAIN_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/KINASE_training_disguised_combined_full.csv.gz' -VALID_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/KINASE_test1_disguised_combined_full.csv.gz' -TEST_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/KINASE_test2_disguised_combined_full.csv.gz' +TRAIN_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KINASE_training_disguised_combined_full.csv.gz" +VALID_UR = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KINASE_test1_disguised_combined_full.csv.gz" +TEST_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/KINASE_test2_disguised_combined_full.csv.gz" TRAIN_FILENAME = "KINASE_training_disguised_combined_full.csv.gz" VALID_FILENAME = "KINASE_test1_disguised_combined_full.csv.gz" @@ -64,15 +64,15 @@ def gen_kinase(KINASE_tasks, if not os.path.exists(train_files): logger.info("Downloading training file...") - deepchem.utils.download_url(url=TRAIN_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TRAIN_URL, dest_dir=data_dir) logger.info("Training file download complete.") logger.info("Downloading validation file...") - deepchem.utils.download_url(url=VALID_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=VALID_URL, dest_dir=data_dir) logger.info("Validation file download complete.") logger.info("Downloading test file...") - deepchem.utils.download_url(url=TEST_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TEST_URL, dest_dir=data_dir) logger.info("Test file download complete") # Featurize the KINASE dataset @@ -144,8 +144,7 @@ def load_kinase(shard_size=2000, featurizer=None, split=None, reload=True): """Loads Kinase datasets, does not do train/test split The Kinase dataset is an in-house dataset from Merck that was first introduced in the following paper: - -Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. + Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. It contains 2500 Merck in-house compounds that were measured for IC50 of inhibition on 99 protein kinases. Unlike most of @@ -193,7 +192,7 @@ Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." J 'T_00109', 'T_00110', 'T_00111' ] - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() data_dir = os.path.join(data_dir, "kinase") if not os.path.exists(data_dir): diff --git a/deepchem/molnet/load_function/lipo_datasets.py b/deepchem/molnet/load_function/lipo_datasets.py index d0f01b08a1875b7bebdd5c7051c45b7f816852db..41a04e0a4eab155bb0bdb1826d0e9a7599210e92 100644 --- a/deepchem/molnet/load_function/lipo_datasets.py +++ b/deepchem/molnet/load_function/lipo_datasets.py @@ -2,115 +2,77 @@ Lipophilicity dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -LIPO_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/Lipophilicity.csv' - - -def load_lipo(featurizer='ECFP', - split='index', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load Lipophilicity datasets.""" - # Featurize Lipophilicity dataset - logger.info("About to featurize Lipophilicity dataset.") - logger.info("About to load Lipophilicity dataset.") - - Lipo_tasks = ['exp'] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "lipo-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return Lipo_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "Lipophilicity.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=LIPO_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=Lipo_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split is None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset, move_mean=move_mean) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return Lipo_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split data with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train, move_mean=move_mean) - ] - - logger.info("About to transform data.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return Lipo_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +LIPO_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/Lipophilicity.csv" +LIPO_TASKS = ['exp'] + + +class _LipoLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "Lipophilicity.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=LIPO_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_lipo( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load Lipophilicity dataset + + Lipophilicity is an important feature of drug molecules that affects both + membrane permeability and solubility. The lipophilicity dataset, curated + from ChEMBL database, provides experimental results of octanol/water + distribution coefficient (logD at pH 7.4) of 4200 compounds. + + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "smiles" - SMILES representation of the molecular structure + - "exp" - Measured octanol/water distribution coefficient (logD) of the + compound, used as label + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Hersey, A. ChEMBL Deposited Data Set - AZ dataset; 2015. + https://doi.org/10.6019/chembl3301361 + """ + loader = _LipoLoader(featurizer, splitter, transformers, LIPO_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('lipo', reload) diff --git a/deepchem/molnet/load_function/load_dataset_template.py b/deepchem/molnet/load_function/load_dataset_template.py new file mode 100644 index 0000000000000000000000000000000000000000..3c5a669b56377b77ea42b2739816a014e94b09a7 --- /dev/null +++ b/deepchem/molnet/load_function/load_dataset_template.py @@ -0,0 +1,220 @@ +""" +Short docstring description of dataset. +""" +import os +import logging +import deepchem +from deepchem.feat import Featurizer +from deepchem.trans import Transformer +from deepchem.splits.splitters import Splitter +from deepchem.molnet.defaults import get_defaults + +from typing import List, Tuple, Dict, Optional + +logger = logging.getLogger(__name__) + +DEFAULT_DIR = deepchem.utils.data_utils.get_data_dir() +MYDATASET_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/mydataset.tar.gz" +MYDATASET_CSV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/mydataset.csv" + +# dict of accepted featurizers for this dataset +# modify the returned dicts for your dataset +DEFAULT_FEATURIZERS = get_defaults("feat") + +# Names of supported featurizers +mydataset_featurizers = ['CircularFingerprint', 'ConvMolFeaturizer'] +DEFAULT_FEATURIZERS = {k: DEFAULT_FEATURIZERS[k] for k in mydataset_featurizers} + +# dict of accepted transformers +DEFAULT_TRANSFORMERS = get_defaults("trans") + +# dict of accepted splitters +DEFAULT_SPLITTERS = get_defaults("splits") + +# names of supported splitters +mydataset_splitters = ['RandomSplitter', 'RandomStratifiedSplitter'] +DEFAULT_SPLITTERS = {k: DEFAULT_SPLITTERS[k] for k in mydataset_splitters} + + +def load_mydataset( + featurizer: Featurizer = DEFAULT_FEATURIZERS['CircularFingerprint'], + transformers: List[Transformer] = [ + DEFAULT_TRANSFORMERS['NormalizationTransformer'] + ], + splitter: Splitter = DEFAULT_SPLITTERS['RandomSplitter'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + featurizer_kwargs: Dict[str, object] = {}, + splitter_kwargs: Dict[str, object] = {}, + transformer_kwargs: Dict[str, Dict[str, object]] = {}, + **kwargs) -> Tuple[List, Tuple, List]: + """Load mydataset. + + This is a template for adding a function to load a dataset from + MoleculeNet. Adjust the global variable URL strings, default parameters, + default featurizers, transformers, and splitters, and variable names as + needed. All available featurizers, transformers, and + splitters are in the `DEFAULTS_X` global variables. + + If `reload = True` and `data_dir` (`save_dir`) is specified, the loader + will attempt to load the raw dataset (featurized dataset) from disk. + Otherwise, the dataset will be downloaded from the DeepChem AWS bucket. + + The dataset will be featurized with `featurizer` and separated into + train/val/test sets according to `splitter`. Some transformers (e.g. + `NormalizationTransformer`) must be initialized with a dataset. + Set up kwargs to enable these transformations. Additional kwargs may + be given for specific featurizers, transformers, and splitters. + + The load function must be modified with the appropriate DataLoaders + for all supported featurizers for your dataset. + + Please refer to the MoleculeNet documentation for further information + https://deepchem.readthedocs.io/en/latest/moleculenet.html. + + Parameters + ---------- + featurizer : allowed featurizers for this dataset + A featurizer that inherits from deepchem.feat.Featurizer. + transformers : List of allowed transformers for this dataset + A transformer that inherits from deepchem.trans.Transformer. + splitter : allowed splitters for this dataset + A splitter that inherits from deepchem.splits.splitters.Splitter. + reload : bool (default True) + Try to reload dataset from disk if already downloaded. Save to disk + after featurizing. + data_dir : str, optional (default None) + Path to datasets. + save_dir : str, optional (default None) + Path to featurized datasets. + featurizer_kwargs : dict + Specify parameters to featurizer, e.g. {"size": 1024} + splitter_kwargs : dict + Specify parameters to splitter, e.g. {"seed": 42} + transformer_kwargs : dict + Maps transformer names to constructor arguments, e.g. + {"BalancingTransformer": {"transform_x":True, "transform_y":False}} + **kwargs : additional optional arguments. + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + References + ---------- + MLA style references for this dataset. The example is like this. + Last, First et al. "Article title." Journal name, vol. #, no. #, year, pp. page range, DOI. + ...[1] Wu, Zhenqin et al. "MoleculeNet: a benchmark for molecular machine learning." + Chemical Science, vol. 9, 2018, pp. 513-530, 10.1039/c7sc02664a. + + Examples + -------- + >> import deepchem as dc + >> tasks, datasets, transformers = dc.molnet.load_tox21(reload=False) + >> train_dataset, val_dataset, test_dataset = datasets + >> n_tasks = len(tasks) + >> n_features = train_dataset.get_data_shape()[0] + >> model = dc.models.MultitaskClassifier(n_tasks, n_features) + """ + + # Warning message about this template + raise ValueError(""" + This is a template function and it doesn't do anything! + Use this function as a reference when implementing new + loaders for MoleculeNet datasets. + """) + + # Featurize mydataset + logger.info("About to featurize mydataset.") + my_tasks = ["task1", "task2", "task3"] # machine learning targets + + # Get DeepChem data directory if needed + if data_dir is None: + data_dir = DEFAULT_DIR + if save_dir is None: + save_dir = DEFAULT_DIR + + # Check for str args to featurizer and splitter + if isinstance(featurizer, str): + featurizer = DEFAULT_FEATURIZERS[featurizer](**featurizer_kwargs) + elif issubclass(featurizer, Featurizer): + featurizer = featurizer(**featurizer_kwargs) + + if isinstance(splitter, str): + splitter = DEFAULT_SPLITTERS[splitter]() + elif issubclass(splitter, Splitter): + splitter = splitter() + + # Reload from disk + if reload: + featurizer_name = str(featurizer.__class__.__name__) + splitter_name = str(splitter.__class__.__name__) + save_folder = os.path.join(save_dir, "mydataset-featurized", + featurizer_name, splitter_name) + + loaded, all_dataset, transformers = deepchem.utils.data_utils.load_dataset_from_disk( + save_folder) + if loaded: + return my_tasks, all_dataset, transformers + + # First type of supported featurizers + supported_featurizers = [] # type: List[Featurizer] + + # If featurizer requires a non-CSV file format, load .tar.gz file + if featurizer in supported_featurizers: + dataset_file = os.path.join(data_dir, 'mydataset.filetype') + + if not os.path.exists(dataset_file): + deepchem.utils.data_utils.download_url( + url=MYDATASET_URL, dest_dir=data_dir) + deepchem.utils.data_utils.untargz_file( + os.path.join(data_dir, 'mydataset.tar.gz'), data_dir) + + # Changer loader to match featurizer and data file type + loader = deepchem.data.DataLoader( + tasks=my_tasks, + id_field="id", # column name holding sample identifier + featurizer=featurizer) + + else: # only load CSV file + dataset_file = os.path.join(data_dir, "mydataset.csv") + if not os.path.exists(dataset_file): + deepchem.utils.data_utils.download_url( + url=MYDATASET_CSV_URL, dest_dir=data_dir) + + loader = deepchem.data.CSVLoader( + tasks=my_tasks, smiles_field="smiles", featurizer=featurizer) + + # Featurize dataset + dataset = loader.create_dataset(dataset_file) + + train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( + dataset, **splitter_kwargs) + + # Initialize transformers + transformers = [ + DEFAULT_TRANSFORMERS[t](dataset=dataset, **transformer_kwargs[t]) + if isinstance(t, str) else t( + dataset=dataset, **transformer_kwargs[str(t.__class__.__name__)]) + for t in transformers + ] + + for transformer in transformers: + train_dataset = transformer.transform(train_dataset) + valid_dataset = transformer.transform(valid_dataset) + test_dataset = transformer.transform(test_dataset) + + if reload: # save to disk + deepchem.utils.data_utils.save_dataset_to_disk( + save_folder, train_dataset, valid_dataset, test_dataset, transformers) + + return my_tasks, (train_dataset, valid_dataset, test_dataset), transformers diff --git a/deepchem/molnet/load_function/material_datasets/__init__.py b/deepchem/molnet/load_function/material_datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepchem/molnet/load_function/material_datasets/load_bandgap.py b/deepchem/molnet/load_function/material_datasets/load_bandgap.py new file mode 100644 index 0000000000000000000000000000000000000000..30c3865414b9afc404d7d5c389aa75b87d92f05e --- /dev/null +++ b/deepchem/molnet/load_function/material_datasets/load_bandgap.py @@ -0,0 +1,106 @@ +""" +Experimental bandgaps for inorganic crystals. +""" +import os +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +BANDGAP_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/expt_gap.tar.gz' +BANDGAP_TASKS = ['experimental_bandgap'] + + +class _BandgapLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, 'expt_gap.json') + targz_file = os.path.join(self.data_dir, 'expt_gap.tar.gz') + if not os.path.exists(dataset_file): + if not os.path.exists(targz_file): + dc.utils.data_utils.download_url( + url=BANDGAP_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file(targz_file, self.data_dir) + loader = dc.data.JsonLoader( + tasks=self.tasks, + feature_field="composition", + label_field="experimental_bandgap", + featurizer=self.featurizer) + return loader.create_dataset(dataset_file) + + +def load_bandgap( + featurizer: Union[dc.feat.Featurizer, + str] = dc.feat.ElementPropertyFingerprint(), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load band gap dataset. + + Contains 4604 experimentally measured band gaps for inorganic + crystal structure compositions. In benchmark studies, random forest + models achieved a mean average error of 0.45 eV during five-fold + nested cross validation on this dataset. + + For more details on the dataset see [1]_. For more details + on previous benchmarks for this dataset, see [2]_. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + References + ---------- + .. [1] Zhuo, Y. et al. "Predicting the Band Gaps of Inorganic Solids by Machine Learning." + J. Phys. Chem. Lett. (2018) DOI: 10.1021/acs.jpclett.8b00124. + .. [2] Dunn, A. et al. "Benchmarking Materials Property Prediction Methods: The Matbench Test Set + and Automatminer Reference Algorithm." https://arxiv.org/abs/2005.00707 (2020) + + Examples + -------- + >>> + >> import deepchem as dc + >> tasks, datasets, transformers = dc.molnet.load_bandgap() + >> train_dataset, val_dataset, test_dataset = datasets + >> n_tasks = len(tasks) + >> n_features = train_dataset.get_data_shape()[0] + >> model = dc.models.MultitaskRegressor(n_tasks, n_features) + + """ + loader = _BandgapLoader(featurizer, splitter, transformers, BANDGAP_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('bandgap', reload) diff --git a/deepchem/molnet/load_function/material_datasets/load_mp_formation_energy.py b/deepchem/molnet/load_function/material_datasets/load_mp_formation_energy.py new file mode 100644 index 0000000000000000000000000000000000000000..022fcae14d442dd343fa2633bc55a771b48a148d --- /dev/null +++ b/deepchem/molnet/load_function/material_datasets/load_mp_formation_energy.py @@ -0,0 +1,107 @@ +""" +Calculated formation energies for inorganic crystals from Materials Project. +""" +import os +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +MPFORME_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/mp_formation_energy.tar.gz' +MPFORME_TASKS = ['formation_energy'] + + +class _MPFormationLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, 'mp_formation_energy.json') + targz_file = os.path.join(self.data_dir, 'mp_formation_energy.tar.gz') + if not os.path.exists(dataset_file): + if not os.path.exists(targz_file): + dc.utils.data_utils.download_url( + url=MPFORME_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file(targz_file, self.data_dir) + loader = dc.data.JsonLoader( + tasks=self.tasks, + feature_field="structure", + label_field="formation_energy", + featurizer=self.featurizer) + return loader.create_dataset(dataset_file) + + +def load_mp_formation_energy( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.SineCoulombMatrix(), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load mp formation energy dataset. + + Contains 132752 calculated formation energies and inorganic + crystal structures from the Materials Project database. In benchmark + studies, random forest models achieved a mean average error of + 0.116 eV/atom during five-folded nested cross validation on this + dataset. + + For more details on the dataset see [1]_. For more details + on previous benchmarks for this dataset, see [2]_. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + References + ---------- + .. [1] A. Jain*, S.P. Ong*, et al. (*=equal contributions) The Materials Project: + A materials genome approach to accelerating materials innovation APL Materials, + 2013, 1(1), 011002. doi:10.1063/1.4812323 (2013). + .. [2] Dunn, A. et al. "Benchmarking Materials Property Prediction Methods: The Matbench + Test Set and Automatminer Reference Algorithm." https://arxiv.org/abs/2005.00707 (2020) + + Examples + -------- + >>> + >> import deepchem as dc + >> tasks, datasets, transformers = dc.molnet.load_mp_formation_energy() + >> train_dataset, val_dataset, test_dataset = datasets + >> n_tasks = len(tasks) + >> n_features = train_dataset.get_data_shape()[0] + >> model = dc.models.MultitaskRegressor(n_tasks, n_features) + + """ + loader = _MPFormationLoader(featurizer, splitter, transformers, MPFORME_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('mp-forme', reload) diff --git a/deepchem/molnet/load_function/material_datasets/load_mp_metallicity.py b/deepchem/molnet/load_function/material_datasets/load_mp_metallicity.py new file mode 100644 index 0000000000000000000000000000000000000000..fffa3a9d16e81b73695dcd25fbf341d4677cb14a --- /dev/null +++ b/deepchem/molnet/load_function/material_datasets/load_mp_metallicity.py @@ -0,0 +1,107 @@ +""" +Metal vs non-metal classification for inorganic crystals from Materials Project. +""" +import os +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +MPMETAL_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/mp_is_metal.tar.gz' +MPMETAL_TASKS = ['is_metal'] + + +class _MPMetallicityLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, 'mp_is_metal.json') + targz_file = os.path.join(self.data_dir, 'mp_is_metal.tar.gz') + if not os.path.exists(dataset_file): + if not os.path.exists(targz_file): + dc.utils.data_utils.download_url( + url=MPMETAL_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file(targz_file, self.data_dir) + loader = dc.data.JsonLoader( + tasks=self.tasks, + feature_field="structure", + label_field="is_metal", + featurizer=self.featurizer) + return loader.create_dataset(dataset_file) + + +def load_mp_metallicity( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.SineCoulombMatrix(), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load mp formation energy dataset. + + Contains 106113 inorganic crystal structures from the Materials + Project database labeled as metals or nonmetals. In benchmark + studies, random forest models achieved a mean ROC-AUC of + 0.9 during five-folded nested cross validation on this + dataset. + + For more details on the dataset see [1]_. For more details + on previous benchmarks for this dataset, see [2]_. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + References + ---------- + .. [1] A. Jain*, S.P. Ong*, et al. (*=equal contributions) The Materials Project: + A materials genome approach to accelerating materials innovation APL Materials, + 2013, 1(1), 011002. doi:10.1063/1.4812323 (2013). + .. [2] Dunn, A. et al. "Benchmarking Materials Property Prediction Methods: The Matbench + Test Set and Automatminer Reference Algorithm." https://arxiv.org/abs/2005.00707 (2020) + + Examples + -------- + >>> + >> import deepchem as dc + >> tasks, datasets, transformers = dc.molnet.load_mp_metallicity() + >> train_dataset, val_dataset, test_dataset = datasets + >> n_tasks = len(tasks) + >> n_features = train_dataset.get_data_shape()[0] + >> model = dc.models.MultitaskRegressor(n_tasks, n_features) + + """ + loader = _MPMetallicityLoader(featurizer, splitter, transformers, + MPMETAL_TASKS, data_dir, save_dir, **kwargs) + return loader.load_dataset('mp-metallicity', reload) diff --git a/deepchem/molnet/load_function/material_datasets/load_perovskite.py b/deepchem/molnet/load_function/material_datasets/load_perovskite.py new file mode 100644 index 0000000000000000000000000000000000000000..1009a67779bfbccdcec04f6c2adc39079a2e2b30 --- /dev/null +++ b/deepchem/molnet/load_function/material_datasets/load_perovskite.py @@ -0,0 +1,107 @@ +""" +Perovskite crystal structures and formation energies. +""" +import os +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +PEROVSKITE_URL = 'https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/perovskite.tar.gz' +PEROVSKITE_TASKS = ['formation_energy'] + + +class _PerovskiteLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, 'perovskite.json') + targz_file = os.path.join(self.data_dir, 'perovskite.tar.gz') + if not os.path.exists(dataset_file): + if not os.path.exists(targz_file): + dc.utils.data_utils.download_url( + url=PEROVSKITE_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file(targz_file, self.data_dir) + loader = dc.data.JsonLoader( + tasks=self.tasks, + feature_field="structure", + label_field="formation_energy", + featurizer=self.featurizer) + return loader.create_dataset(dataset_file) + + +def load_perovskite( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.SineCoulombMatrix(), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load perovskite dataset. + + Contains 18928 perovskite structures and their formation energies. + In benchmark studies, random forest models and crystal graph + neural networks achieved mean average error of 0.23 and 0.05 eV/atom, + respectively, during five-fold nested cross validation on this + dataset. + + For more details on the dataset see [1]_. For more details + on previous benchmarks for this dataset, see [2]_. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + References + ---------- + .. [1] Castelli, I. et al. "New cubic perovskites for one- and two-photon water splitting + using the computational materials repository." Energy Environ. Sci., (2012), 5, + 9034-9043 DOI: 10.1039/C2EE22341D. + .. [2] Dunn, A. et al. "Benchmarking Materials Property Prediction Methods: + The Matbench Test Set and Automatminer Reference Algorithm." https://arxiv.org/abs/2005.00707 (2020) + + Examples + -------- + >>> + >> import deepchem as dc + >> tasks, datasets, transformers = dc.molnet.load_perovskite() + >> train_dataset, val_dataset, test_dataset = datasets + >> n_tasks = len(tasks) + >> n_features = train_dataset.get_data_shape()[0] + >> model = dc.models.MultitaskRegressor(n_tasks, n_features) + + """ + loader = _PerovskiteLoader(featurizer, splitter, transformers, + PEROVSKITE_TASKS, data_dir, save_dir, **kwargs) + return loader.load_dataset('perovskite', reload) diff --git a/deepchem/molnet/load_function/molnet_loader.py b/deepchem/molnet/load_function/molnet_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..660540a5874f623976c0065f2fbcff82a794658f --- /dev/null +++ b/deepchem/molnet/load_function/molnet_loader.py @@ -0,0 +1,228 @@ +""" +Common code for loading MoleculeNet datasets. +""" +import os +import logging +import deepchem as dc +from deepchem.data import Dataset, DiskDataset +from typing import List, Optional, Tuple, Type, Union + +logger = logging.getLogger(__name__) + + +class TransformerGenerator(object): + """Create Transformers for Datasets. + + When loading molnet datasets, you cannot directly pass in Transformers + to use because many Transformers require the Dataset they will be applied to + as a constructor argument. Instead you pass in TransformerGenerator objects + which can create the Transformers once the Dataset is loaded. + """ + + def __init__(self, transformer_class: Type[dc.trans.Transformer], **kwargs): + """Construct an object for creating Transformers. + + Parameters + ---------- + transformer_class: Type[Transformer] + the class of Transformer to create + kwargs: + any additional arguments are passed to the Transformer's constructor + """ + self.transformer_class = transformer_class + self.kwargs = kwargs + + def create_transformer(self, dataset: Dataset) -> dc.trans.Transformer: + """Construct a Transformer for a Dataset.""" + return self.transformer_class(dataset=dataset, **self.kwargs) + + def get_directory_name(self) -> str: + """Get a name for directories on disk describing this Transformer.""" + name = self.transformer_class.__name__ + for key, value in self.kwargs.items(): + if isinstance(value, list): + continue + name += '_' + key + '_' + str(value) + return name + + +featurizers = { + 'graphconv': dc.feat.ConvMolFeaturizer(), + 'weave': dc.feat.WeaveFeaturizer(), +} + +# some featurizers require soft dependencies to instantiate +try: + featurizers['ecfp'] = dc.feat.CircularFingerprint(size=1024) +except ImportError: + pass + +try: + featurizers['raw'] = dc.feat.RawFeaturizer() +except ImportError: + pass + +try: + featurizers['smiles2img'] = dc.feat.SmilesToImage(img_size=80, img_spec='std') +except ImportError: + pass + +try: + featurizers['onehot'] = dc.feat.OneHotFeaturizer() +except ImportError: + pass + +splitters = { + 'index': dc.splits.IndexSplitter(), + 'random': dc.splits.RandomSplitter(), + 'scaffold': dc.splits.ScaffoldSplitter(), + 'butina': dc.splits.ButinaSplitter(), + 'task': dc.splits.TaskSplitter(), + 'stratified': dc.splits.RandomStratifiedSplitter() +} + +transformers = { + 'balancing': + TransformerGenerator(dc.trans.BalancingTransformer), + 'normalization': + TransformerGenerator(dc.trans.NormalizationTransformer, transform_y=True), + 'minmax': + TransformerGenerator(dc.trans.MinMaxTransformer, transform_y=True), + 'clipping': + TransformerGenerator(dc.trans.ClippingTransformer, transform_y=True), + 'log': + TransformerGenerator(dc.trans.LogTransformer, transform_y=True) +} + + +class _MolnetLoader(object): + """The class provides common functionality used by many molnet loader functions. + It is an abstract class. Subclasses implement loading of particular datasets. + """ + + def __init__(self, featurizer: Union[dc.feat.Featurizer, str], + splitter: Union[dc.splits.Splitter, str, None], + transformer_generators: List[Union[TransformerGenerator, str]], + tasks: List[str], data_dir: Optional[str], + save_dir: Optional[str], **kwargs): + """Construct an object for loading a dataset. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformer_generators: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + tasks: List[str] + the names of the tasks in the dataset + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + if 'split' in kwargs: + splitter = kwargs['split'] + logger.warning("'split' is deprecated. Use 'splitter' instead.") + if isinstance(featurizer, str): + featurizer = featurizers[featurizer.lower()] + if isinstance(splitter, str): + splitter = splitters[splitter.lower()] + if data_dir is None: + data_dir = dc.utils.data_utils.get_data_dir() + if save_dir is None: + save_dir = dc.utils.data_utils.get_data_dir() + self.featurizer = featurizer + self.splitter = splitter + self.transformers = [ + transformers[t.lower()] if isinstance(t, str) else t + for t in transformer_generators + ] + self.tasks = list(tasks) + self.data_dir = data_dir + self.save_dir = save_dir + self.args = kwargs + + def load_dataset( + self, name: str, reload: bool + ) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load the dataset. + + Parameters + ---------- + name: str + the name of the dataset, used to identify the directory on disk + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + """ + # Build the path to the dataset on disk. + + featurizer_name = str(self.featurizer) + splitter_name = 'None' if self.splitter is None else str(self.splitter) + save_folder = os.path.join(self.save_dir, name + "-featurized", + featurizer_name, splitter_name) + if len(self.transformers) > 0: + transformer_name = '_'.join( + t.get_directory_name() for t in self.transformers) + save_folder = os.path.join(save_folder, transformer_name) + + # Try to reload cached datasets. + + if reload: + if self.splitter is None: + if os.path.exists(save_folder): + transformers = dc.utils.data_utils.load_transformers(save_folder) + return self.tasks, (DiskDataset(save_folder),), transformers + else: + loaded, all_dataset, transformers = dc.utils.data_utils.load_dataset_from_disk( + save_folder) + if all_dataset is not None: + return self.tasks, all_dataset, transformers + + # Create the dataset + + logger.info("About to featurize %s dataset." % name) + dataset = self.create_dataset() + + # Split and transform the dataset. + + if self.splitter is None: + transformer_dataset: Dataset = dataset + else: + logger.info("About to split dataset with {} splitter.".format( + self.splitter.__class__.__name__)) + train, valid, test = self.splitter.train_valid_test_split(dataset) + transformer_dataset = train + transformers = [ + t.create_transformer(transformer_dataset) for t in self.transformers + ] + logger.info("About to transform data.") + if self.splitter is None: + for transformer in transformers: + dataset = transformer.transform(dataset) + if reload and isinstance(dataset, DiskDataset): + dataset.move(save_folder) + dc.utils.data_utils.save_transformers(save_folder, transformers) + return self.tasks, (dataset,), transformers + + for transformer in transformers: + train = transformer.transform(train) + valid = transformer.transform(valid) + test = transformer.transform(test) + if reload and isinstance(train, DiskDataset) and isinstance( + valid, DiskDataset) and isinstance(test, DiskDataset): + dc.utils.data_utils.save_dataset_to_disk(save_folder, train, valid, test, + transformers) + return self.tasks, (train, valid, test), transformers + + def create_dataset(self) -> Dataset: + """Subclasses must implement this to load the dataset.""" + raise NotImplementedError() diff --git a/deepchem/molnet/load_function/muv_datasets.py b/deepchem/molnet/load_function/muv_datasets.py index a79071ec53100a02bf14135da946ae270db9b762..cb053a82b4003fb97c4331afcacbcbcf974ecc62 100644 --- a/deepchem/molnet/load_function/muv_datasets.py +++ b/deepchem/molnet/load_function/muv_datasets.py @@ -2,113 +2,84 @@ MUV dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -MUV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/muv.csv.gz' - - -def load_muv(featurizer='ECFP', - split='index', - reload=True, - K=4, - data_dir=None, - save_dir=None, - **kwargs): - """Load MUV datasets. Does not do train/test split""" - # Load MUV dataset - logger.info("About to load MUV dataset.") - - MUV_tasks = sorted([ - 'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548', - 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858', - 'MUV-713', 'MUV-733', 'MUV-652', 'MUV-466', 'MUV-832' - ]) - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "muv-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return MUV_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "muv.csv.gz") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=MUV_URL, dest_dir=data_dir) - - # Featurize MUV dataset - logger.info("About to featurize MUV dataset.") - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - if split == None: - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return MUV_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'task': deepchem.splits.TaskSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - if split == 'task': - fold_datasets = splitter.k_fold_split(dataset, K) - all_dataset = fold_datasets - logger.info( - "K-Fold split complete. Use the transformers for this dataset on the returned folds." - ) - return MUV_tasks, all_dataset, [] - - else: - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - all_dataset = (train, valid, test) - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return MUV_tasks, all_dataset, transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +MUV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/muv.csv.gz" +MUV_TASKS = sorted([ + 'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548', 'MUV-852', + 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', + 'MUV-652', 'MUV-466', 'MUV-832' +]) + + +class _MuvLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "muv.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=MUV_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_muv( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load MUV dataset + + The Maximum Unbiased Validation (MUV) group is a benchmark dataset selected + from PubChem BioAssay by applying a refined nearest neighbor analysis. + + The MUV dataset contains 17 challenging tasks for around 90 thousand + compounds and is specifically designed for validation of virtual screening + techniques. + + Scaffold splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "mol_id" - PubChem CID of the compound + - "smiles" - SMILES representation of the molecular structure + - "MUV-XXX" - Measured results (Active/Inactive) for bioassays + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Rohrer, Sebastian G., and Knut Baumann. "Maximum unbiased validation + (MUV) data sets for virtual screening based on PubChem bioactivity data." + Journal of chemical information and modeling 49.2 (2009): 169-184. + """ + loader = _MuvLoader(featurizer, splitter, transformers, MUV_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('muv', reload) diff --git a/deepchem/molnet/load_function/nci_datasets.py b/deepchem/molnet/load_function/nci_datasets.py index ee4110b1422c9e0874abdedf6ea158b766fa2dd1..c594147e8961ee9c28e08ac0ceaa3c5be28ab7e3 100644 --- a/deepchem/molnet/load_function/nci_datasets.py +++ b/deepchem/molnet/load_function/nci_datasets.py @@ -4,119 +4,69 @@ Original Author - Bharath Ramsundar Author - Aneesh Pappu """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -NCI_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/nci_unique.csv' - - -def load_nci(featurizer='ECFP', - shard_size=1000, - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - - # Load nci dataset - logger.info("About to load NCI dataset.") - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - all_nci_tasks = [ - 'CCRF-CEM', 'HL-60(TB)', 'K-562', 'MOLT-4', 'RPMI-8226', 'SR', - 'A549/ATCC', 'EKVX', 'HOP-62', 'HOP-92', 'NCI-H226', 'NCI-H23', - 'NCI-H322M', 'NCI-H460', 'NCI-H522', 'COLO 205', 'HCC-2998', 'HCT-116', - 'HCT-15', 'HT29', 'KM12', 'SW-620', 'SF-268', 'SF-295', 'SF-539', - 'SNB-19', 'SNB-75', 'U251', 'LOX IMVI', 'MALME-3M', 'M14', 'MDA-MB-435', - 'SK-MEL-2', 'SK-MEL-28', 'SK-MEL-5', 'UACC-257', 'UACC-62', 'IGR-OV1', - 'OVCAR-3', 'OVCAR-4', 'OVCAR-5', 'OVCAR-8', 'NCI/ADR-RES', 'SK-OV-3', - '786-0', 'A498', 'ACHN', 'CAKI-1', 'RXF 393', 'SN12C', 'TK-10', 'UO-31', - 'PC-3', 'DU-145', 'MCF7', 'MDA-MB-231/ATCC', 'MDA-MB-468', 'HS 578T', - 'BT-549', 'T-47D' - ] - - if reload: - save_folder = os.path.join(save_dir, "nci-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return all_nci_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "nci_unique.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=NCI_URL, dest_dir=data_dir) - - # Featurize nci dataset - logger.info("About to featurize nci dataset.") - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=all_nci_tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(dataset_file, shard_size=shard_size) - - if split == None: - logger.info("Split is None, about to transform data") - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset) - ] - for transformer in transformers: - dataset = transformer.transform(dataset) - return all_nci_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter() - } - splitter = splitters[split] - logger.info("About to split data with {} splitter.".format(splitter)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer(transform_y=True, dataset=train) - ] - - logger.info("About to transform dataset.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return all_nci_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +NCI_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/nci_unique.csv" +NCI_TASKS = [ + 'CCRF-CEM', 'HL-60(TB)', 'K-562', 'MOLT-4', 'RPMI-8226', 'SR', 'A549/ATCC', + 'EKVX', 'HOP-62', 'HOP-92', 'NCI-H226', 'NCI-H23', 'NCI-H322M', 'NCI-H460', + 'NCI-H522', 'COLO 205', 'HCC-2998', 'HCT-116', 'HCT-15', 'HT29', 'KM12', + 'SW-620', 'SF-268', 'SF-295', 'SF-539', 'SNB-19', 'SNB-75', 'U251', + 'LOX IMVI', 'MALME-3M', 'M14', 'MDA-MB-435', 'SK-MEL-2', 'SK-MEL-28', + 'SK-MEL-5', 'UACC-257', 'UACC-62', 'IGR-OV1', 'OVCAR-3', 'OVCAR-4', + 'OVCAR-5', 'OVCAR-8', 'NCI/ADR-RES', 'SK-OV-3', '786-0', 'A498', 'ACHN', + 'CAKI-1', 'RXF 393', 'SN12C', 'TK-10', 'UO-31', 'PC-3', 'DU-145', 'MCF7', + 'MDA-MB-231/ATCC', 'MDA-MB-468', 'HS 578T', 'BT-549', 'T-47D' +] + + +class _NCILoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "nci_unique.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=NCI_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_nci( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load NCI dataset. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _NCILoader(featurizer, splitter, transformers, NCI_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('nci', reload) diff --git a/deepchem/molnet/load_function/pcba_datasets.py b/deepchem/molnet/load_function/pcba_datasets.py index 428e6064c1b5da93066c405979f4ebadf1dd3f92..952915eee57e2473a5e7a1b9ec65c17350d6ac70 100644 --- a/deepchem/molnet/load_function/pcba_datasets.py +++ b/deepchem/molnet/load_function/pcba_datasets.py @@ -2,168 +2,148 @@ PCBA dataset loader. """ import os -import logging -import deepchem -import gzip - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() - - -def load_pcba(featurizer='ECFP', - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - return load_pcba_dataset( - featurizer=featurizer, - split=split, - reload=reload, - assay_file_name="pcba.csv.gz", - data_dir=data_dir, - save_dir=save_dir, - **kwargs) - - -def load_pcba_146(featurizer='ECFP', - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - return load_pcba_dataset( - featurizer=featurizer, - split=split, - reload=reload, - assay_file_name="pcba_146.csv.gz", - data_dir=data_dir, - save_dir=save_dir, - **kwargs) - - -def load_pcba_2475(featurizer='ECFP', - split='random', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - return load_pcba_dataset( - featurizer=featurizer, - split=split, - reload=reload, - assay_file_name="pcba_2475.csv.gz", - data_dir=data_dir, - save_dir=save_dir, - **kwargs) - - -def load_pcba_dataset(featurizer='ECFP', - split='random', - reload=True, - assay_file_name="pcba.csv.gz", - data_dir=None, - save_dir=None, - **kwargs): - """Load PCBA datasets. Does not do train/test split""" - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, - assay_file_name.split(".")[0] + "-featurized", - featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, assay_file_name) - - if not os.path.exists(dataset_file): - deepchem.utils.download_url( - url="http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/{0}". - format(assay_file_name), - dest_dir=data_dir) - - # Featurize PCBA dataset - logger.info("About to featurize PCBA dataset.") - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - with gzip.GzipFile(dataset_file, "r") as fin: - header = fin.readline().rstrip().decode("utf-8") - columns = header.split(",") - columns.remove("mol_id") - columns.remove("smiles") - PCBA_tasks = columns - - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return PCBA_tasks, all_dataset, transformers - - loader = deepchem.data.CSVLoader( - tasks=PCBA_tasks, smiles_field="smiles", featurizer=featurizer) - - dataset = loader.featurize(dataset_file) - - if split == None: - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return PCBA_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset using {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform dataset.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - - return PCBA_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +PCBA_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/%s" +PCBA_TASKS = [ + 'PCBA-1030', 'PCBA-1379', 'PCBA-1452', 'PCBA-1454', 'PCBA-1457', + 'PCBA-1458', 'PCBA-1460', 'PCBA-1461', 'PCBA-1468', 'PCBA-1469', + 'PCBA-1471', 'PCBA-1479', 'PCBA-1631', 'PCBA-1634', 'PCBA-1688', + 'PCBA-1721', 'PCBA-2100', 'PCBA-2101', 'PCBA-2147', 'PCBA-2242', + 'PCBA-2326', 'PCBA-2451', 'PCBA-2517', 'PCBA-2528', 'PCBA-2546', + 'PCBA-2549', 'PCBA-2551', 'PCBA-2662', 'PCBA-2675', 'PCBA-2676', 'PCBA-411', + 'PCBA-463254', 'PCBA-485281', 'PCBA-485290', 'PCBA-485294', 'PCBA-485297', + 'PCBA-485313', 'PCBA-485314', 'PCBA-485341', 'PCBA-485349', 'PCBA-485353', + 'PCBA-485360', 'PCBA-485364', 'PCBA-485367', 'PCBA-492947', 'PCBA-493208', + 'PCBA-504327', 'PCBA-504332', 'PCBA-504333', 'PCBA-504339', 'PCBA-504444', + 'PCBA-504466', 'PCBA-504467', 'PCBA-504706', 'PCBA-504842', 'PCBA-504845', + 'PCBA-504847', 'PCBA-504891', 'PCBA-540276', 'PCBA-540317', 'PCBA-588342', + 'PCBA-588453', 'PCBA-588456', 'PCBA-588579', 'PCBA-588590', 'PCBA-588591', + 'PCBA-588795', 'PCBA-588855', 'PCBA-602179', 'PCBA-602233', 'PCBA-602310', + 'PCBA-602313', 'PCBA-602332', 'PCBA-624170', 'PCBA-624171', 'PCBA-624173', + 'PCBA-624202', 'PCBA-624246', 'PCBA-624287', 'PCBA-624288', 'PCBA-624291', + 'PCBA-624296', 'PCBA-624297', 'PCBA-624417', 'PCBA-651635', 'PCBA-651644', + 'PCBA-651768', 'PCBA-651965', 'PCBA-652025', 'PCBA-652104', 'PCBA-652105', + 'PCBA-652106', 'PCBA-686970', 'PCBA-686978', 'PCBA-686979', 'PCBA-720504', + 'PCBA-720532', 'PCBA-720542', 'PCBA-720551', 'PCBA-720553', 'PCBA-720579', + 'PCBA-720580', 'PCBA-720707', 'PCBA-720708', 'PCBA-720709', 'PCBA-720711', + 'PCBA-743255', 'PCBA-743266', 'PCBA-875', 'PCBA-881', 'PCBA-883', + 'PCBA-884', 'PCBA-885', 'PCBA-887', 'PCBA-891', 'PCBA-899', 'PCBA-902', + 'PCBA-903', 'PCBA-904', 'PCBA-912', 'PCBA-914', 'PCBA-915', 'PCBA-924', + 'PCBA-925', 'PCBA-926', 'PCBA-927', 'PCBA-938', 'PCBA-995' +] + + +class _PCBALoader(_MolnetLoader): + + def __init__(self, assay_file_name: str, + featurizer: Union[dc.feat.Featurizer, str], + splitter: Union[dc.splits.Splitter, str, None], + transformer_generators: List[Union[TransformerGenerator, str]], + tasks: List[str], data_dir: Optional[str], + save_dir: Optional[str], **kwargs): + super(_PCBALoader, self).__init__( + featurizer, splitter, transformer_generators, tasks, data_dir, save_dir) + self.assay_file_name = assay_file_name + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, self.assay_file_name) + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=PCBA_URL % self.assay_file_name, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file) + + +def load_pcba( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load PCBA dataset + + PubChem BioAssay (PCBA) is a database consisting of biological activities of + small molecules generated by high-throughput screening. We use a subset of + PCBA, containing 128 bioassays measured over 400 thousand compounds, + used by previous work to benchmark machine learning methods. + + Random splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "mol_id" - PubChem CID of the compound + - "smiles" - SMILES representation of the molecular structure + - "PCBA-XXX" - Measured results (Active/Inactive) for bioassays: + search for the assay ID at + https://pubchem.ncbi.nlm.nih.gov/search/#collection=bioassays + for details + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Wang, Yanli, et al. "PubChem's BioAssay database." + Nucleic acids research 40.D1 (2011): D400-D412. + """ + loader = _PCBALoader('pcba.csv.gz', featurizer, splitter, transformers, + PCBA_TASKS, data_dir, save_dir, **kwargs) + return loader.load_dataset('pcba', reload) + + +# def load_pcba_146(featurizer='ECFP', +# split='random', +# reload=True, +# data_dir=None, +# save_dir=None, +# **kwargs): +# return load_pcba_dataset( +# featurizer=featurizer, +# split=split, +# reload=reload, +# assay_file_name="pcba_146.csv.gz", +# data_dir=data_dir, +# save_dir=save_dir, +# **kwargs) + +# def load_pcba_2475(featurizer='ECFP', +# split='random', +# reload=True, +# data_dir=None, +# save_dir=None, +# **kwargs): +# return load_pcba_dataset( +# featurizer=featurizer, +# split=split, +# reload=reload, +# assay_file_name="pcba_2475.csv.gz", +# data_dir=data_dir, +# save_dir=save_dir, +# **kwargs) diff --git a/deepchem/molnet/load_function/pdbbind_datasets.py b/deepchem/molnet/load_function/pdbbind_datasets.py index eded4b1926cf2345a852d054e2d00f1ae84283a1..b13d09f07361a036c3c012e9a66cfdc9095ab470 100644 --- a/deepchem/molnet/load_function/pdbbind_datasets.py +++ b/deepchem/molnet/load_function/pdbbind_datasets.py @@ -11,38 +11,38 @@ import deepchem import numpy as np import pandas as pd import tarfile -from deepchem.feat import rdkit_grid_featurizer as rgf -from deepchem.feat.atomic_coordinates import ComplexNeighborListFragmentAtomicCoordinates +from deepchem.feat import RdkitGridFeaturizer +from deepchem.feat import ComplexNeighborListFragmentAtomicCoordinates from deepchem.feat.graph_features import AtomicConvFeaturizer logger = logging.getLogger(__name__) -DEFAULT_DATA_DIR = deepchem.utils.get_data_dir() +DEFAULT_DATA_DIR = deepchem.utils.data_utils.get_data_dir() def featurize_pdbbind(data_dir=None, feat="grid", subset="core"): """Featurizes pdbbind according to provided featurization""" tasks = ["-logKd/Ki"] - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() pdbbind_dir = os.path.join(data_dir, "pdbbind") dataset_dir = os.path.join(pdbbind_dir, "%s_%s" % (subset, feat)) if not os.path.exists(dataset_dir): - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/featurized_datasets/core_grid.tar.gz' + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/featurized_datasets/core_grid.tar.gz" ) - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/featurized_datasets/full_grid.tar.gz' + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/featurized_datasets/full_grid.tar.gz" ) - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/featurized_datasets/refined_grid.tar.gz' + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/featurized_datasets/refined_grid.tar.gz" ) if not os.path.exists(pdbbind_dir): os.system('mkdir ' + pdbbind_dir) - deepchem.utils.untargz_file( + deepchem.utils.data_utils.untargz_file( os.path.join(data_dir, 'core_grid.tar.gz'), pdbbind_dir) - deepchem.utils.untargz_file( + deepchem.utils.data_utils.untargz_file( os.path.join(data_dir, 'full_grid.tar.gz'), pdbbind_dir) - deepchem.utils.untargz_file( + deepchem.utils.data_utils.untargz_file( os.path.join(data_dir, 'refined_grid.tar.gz'), pdbbind_dir) return deepchem.data.DiskDataset(dataset_dir), tasks @@ -76,7 +76,7 @@ def load_pdbbind_grid(split="random", return tasks, all_dataset, transformers else: - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() if reload: save_dir = os.path.join( data_dir, "pdbbind_" + subset + "/" + featurizer + "/" + str(split)) @@ -84,13 +84,13 @@ def load_pdbbind_grid(split="random", dataset_file = os.path.join(data_dir, subset + "_smiles_labels.csv") if not os.path.exists(dataset_file): - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/' + - subset + "_smiles_labels.csv") + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/" + subset + + "_smiles_labels.csv") tasks = ["-logKd/Ki"] if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( + loaded, all_dataset, transformers = deepchem.utils.data_utils.load_dataset_from_disk( save_dir) if loaded: return tasks, all_dataset, transformers @@ -124,7 +124,6 @@ def load_pdbbind_grid(split="random", 'index': deepchem.splits.IndexSplitter(), 'random': deepchem.splits.RandomSplitter(), 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'time': deepchem.splits.TimeSplitterPDBbind(np.array(df['id'])) } splitter = splitters[split] logger.info("About to split dataset with {} splitter.".format(split)) @@ -142,8 +141,8 @@ def load_pdbbind_grid(split="random", test = transformer.transform(test) if reload: - deepchem.utils.save.save_dataset_to_disk(save_dir, train, valid, test, - transformers) + deepchem.utils.data_utils.save_dataset_to_disk(save_dir, train, valid, + test, transformers) return tasks, (train, valid, test), transformers @@ -185,7 +184,7 @@ def load_pdbbind(reload=True, pdbbind_tasks = ["-logKd/Ki"] - deepchem_dir = deepchem.utils.get_data_dir() + deepchem_dir = deepchem.utils.data_utils.get_data_dir() if data_dir == None: data_dir = DEFAULT_DATA_DIR @@ -203,7 +202,7 @@ def load_pdbbind(reload=True, if save_timestamp: save_folder = "%s-%s-%s" % (save_folder, time.strftime("%Y%m%d", time.localtime()), - re.search("\.(.*)", str(time.time())).group(1)) + re.search(r"\.(.*)", str(time.time())).group(1)) if reload: if not os.path.exists(save_folder): @@ -212,7 +211,7 @@ def load_pdbbind(reload=True, else: print( "\nLoading featurized and splitted dataset from:\n%s\n" % save_folder) - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( + loaded, all_dataset, transformers = deepchem.utils.data_utils.load_dataset_from_disk( save_folder) if loaded: return pdbbind_tasks, all_dataset, transformers @@ -220,15 +219,14 @@ def load_pdbbind(reload=True, dataset_file = os.path.join(data_dir, "pdbbind_v2015.tar.gz") if not os.path.exists(dataset_file): logger.warning("About to download PDBBind full dataset. Large file, 2GB") - deepchem.utils.download_url( - 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/' + - "pdbbind_v2015.tar.gz", + deepchem.utils.data_utils.download_url( + "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/pdbbind_v2015.tar.gz", dest_dir=data_dir) if os.path.exists(data_folder): logger.info("PDBBind full dataset already exists.") else: print("Untarring full dataset...") - deepchem.utils.untargz_file( + deepchem.utils.data_utils.untargz_file( dataset_file, dest_dir=os.path.join(data_dir, "pdbbind")) print("\nRaw dataset:\n%s" % data_folder) @@ -267,7 +265,7 @@ def load_pdbbind(reload=True, # Featurize Data if featurizer == "grid": - featurizer = rgf.RdkitGridFeaturizer( + featurizer = RdkitGridFeaturizer( voxel_width=2.0, feature_types=[ 'ecfp', 'splif', 'hbond', 'salt_bridge', 'pi_stack', 'cation_pi', @@ -308,8 +306,7 @@ def load_pdbbind(reload=True, print("\nFeaturizing Complexes for \"%s\" ...\n" % data_folder) feat_t1 = time.time() - features, failures = featurizer.featurize_complexes(ligand_files, - protein_files) + features, failures = featurizer.featurize(ligand_files, protein_files) feat_t2 = time.time() print("\nFeaturization finished, took %0.3f s." % (feat_t2 - feat_t1)) @@ -340,8 +337,8 @@ def load_pdbbind(reload=True, all_dataset = (train, valid, test) print("\nSaving dataset to \"%s\" ..." % save_folder) - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) + deepchem.utils.data_utils.save_dataset_to_disk(save_folder, train, valid, + test, transformers) return pdbbind_tasks, all_dataset, transformers @@ -414,7 +411,7 @@ def load_pdbbind_from_dir(data_folder, print(labels) # Featurize Data if featurizer == "grid": - featurizer = rgf.RdkitGridFeaturizer( + featurizer = RdkitGridFeaturizer( voxel_width=2.0, feature_types=[ 'ecfp', 'splif', 'hbond', 'salt_bridge', 'pi_stack', 'cation_pi', @@ -437,8 +434,7 @@ def load_pdbbind_from_dir(data_folder, else: raise ValueError("Featurizer not supported") print("Featurizing Complexes") - features, failures = featurizer.featurize_complexes(ligand_files, - protein_files) + features, failures = featurizer.featurize(ligand_files, protein_files) # Delete labels for failing elements labels = np.delete(labels, failures) dataset = deepchem.data.DiskDataset.from_numpy(features, labels) @@ -457,6 +453,6 @@ def load_pdbbind_from_dir(data_folder, train, valid, test = splitter.train_valid_test_split(dataset) all_dataset = (train, valid, test) if save_dir: - deepchem.utils.save.save_dataset_to_disk(save_dir, train, valid, test, - transformers) + deepchem.utils.data_utils.save_dataset_to_disk(save_dir, train, valid, test, + transformers) return pdbbind_tasks, all_dataset, transformers diff --git a/deepchem/molnet/load_function/ppb_datasets.py b/deepchem/molnet/load_function/ppb_datasets.py index 38ae43a137f31683e1e70000c676e54d225c13e7..11e91dfd042980c84b6819d465d0ecfdc30876b0 100644 --- a/deepchem/molnet/load_function/ppb_datasets.py +++ b/deepchem/molnet/load_function/ppb_datasets.py @@ -2,108 +2,59 @@ PPB dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -PPB_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/PPB.csv' - - -def load_ppb(featurizer='ECFP', - split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load PPB datasets.""" - # Featurize PPB dataset - logger.info("About to featurize PPB dataset.") - logger.info("About to load PPB dataset.") - - PPB_tasks = ['exp'] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "ppb-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return PPB_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "PPB.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=PPB_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=PPB_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split == None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return PPB_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer(transform_y=True, dataset=train) - ] - - logger.info("About to transform dataset.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return PPB_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +PPB_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/PPB.csv" +PPB_TASKS = ['exp'] + + +class _PPBLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "PPB.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=PPB_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_ppb( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load PPB datasets. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + """ + loader = _PPBLoader(featurizer, splitter, transformers, PPB_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('ppb', reload) diff --git a/deepchem/molnet/load_function/qm7_datasets.py b/deepchem/molnet/load_function/qm7_datasets.py index ab444fad145b21f51069e40c5c045c30438805b6..fb5c87501039c60f34c58eaac233fa7c509c3819 100644 --- a/deepchem/molnet/load_function/qm7_datasets.py +++ b/deepchem/molnet/load_function/qm7_datasets.py @@ -2,296 +2,94 @@ qm7 dataset loader. """ import os -import numpy as np -import deepchem -import scipy.io -import logging +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +QM7_MAT_UTL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm7.mat" +QM7_CSV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm7.csv" +QM7B_MAT_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm7b.mat" +GDB7_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb7.tar.gz" +QM7_TASKS = ["u0_atom"] -DEFAULT_DIR = deepchem.utils.get_data_dir() -QM7_MAT_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm7.mat' -QM7_CSV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm7.csv' -QM7B_MAT_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm7b.mat' -GDB7_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/gdb7.tar.gz' +class _QM7Loader(_MolnetLoader): -def load_qm7_from_mat(featurizer='CoulombMatrix', - split='stratified', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - - qm7_tasks = ["u0_atom"] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "qm7-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return qm7_tasks, all_dataset, transformers - - if featurizer == 'CoulombMatrix': - dataset_file = os.path.join(data_dir, "qm7.mat") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM7_MAT_URL, dest_dir=data_dir) - - dataset = scipy.io.loadmat(dataset_file) - X = dataset['X'] - y = dataset['T'].T - w = np.ones_like(y) - dataset = deepchem.data.DiskDataset.from_numpy(X, y, w, ids=None) - elif featurizer == 'BPSymmetryFunctionInput': - dataset_file = os.path.join(data_dir, "qm7.mat") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM7_MAT_URL, dest_dir=data_dir) - dataset = scipy.io.loadmat(dataset_file) - X = np.concatenate([np.expand_dims(dataset['Z'], 2), dataset['R']], axis=2) - y = dataset['T'].reshape(-1, 1) # scipy.io.loadmat puts samples on axis 1 - w = np.ones_like(y) - dataset = deepchem.data.DiskDataset.from_numpy(X, y, w, ids=None) - else: - dataset_file = os.path.join(data_dir, "qm7.csv") + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "gdb7.sdf") if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM7_CSV_URL, dest_dir=data_dir) - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - loader = deepchem.data.CSVLoader( - tasks=qm7_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - if split == None: - raise ValueError() - else: - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'stratified': - deepchem.splits.SingletaskStratifiedSplitter(task_number=0) - } - - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset, move_mean=move_mean) - ] - - for transformer in transformers: - train_dataset = transformer.transform(train_dataset) - valid_dataset = transformer.transform(valid_dataset) - test_dataset = transformer.transform(test_dataset) - if reload: - deepchem.utils.save.save_dataset_to_disk( - save_folder, train_dataset, valid_dataset, test_dataset, transformers) - - return qm7_tasks, (train_dataset, valid_dataset, test_dataset), transformers - - -def load_qm7b_from_mat(featurizer='CoulombMatrix', - split='stratified', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load QM7B dataset - - QM7b is an extension for the QM7 dataset with additional properties predicted at different levels (ZINDO, SCS, PBE0, GW). In total 14 tasks are included for 7211 molecules with up to 7 heavy atoms. - - The dataset in .mat format(for python users, we recommend using `scipy.io.loadmat`) includes two arrays: - "X" - (7211 x 23 x 23), Coulomb matrices - "T" - (7211 x 14), properties - Atomization energies E (PBE0, unit: kcal/mol) - Excitation of maximal optimal absorption E_max (ZINDO, unit: eV) - Absorption Intensity at maximal absorption I_max (ZINDO) - Highest occupied molecular orbital HOMO (ZINDO, unit: eV) - Lowest unoccupied molecular orbital LUMO (ZINDO, unit: eV) - First excitation energy E_1st (ZINDO, unit: eV) - Ionization potential IP (ZINDO, unit: eV) - Electron affinity EA (ZINDO, unit: eV) - Highest occupied molecular orbital HOMO (PBE0, unit: eV) - Lowest unoccupied molecular orbital LUMO (PBE0, unit: eV) - Highest occupied molecular orbital HOMO (GW, unit: eV) - Lowest unoccupied molecular orbital LUMO (GW, unit: eV) - Polarizabilities α (PBE0, unit: Å^3) - Polarizabilities α (SCS, unit: Å^3) - - Reference: - Blum, Lorenz C., and Jean-Louis Reymond. "970 million druglike small molecules for virtual screening in the chemical universe database GDB-13." Journal of the American Chemical Society 131.25 (2009): 8732-8733. - Montavon, Grégoire, et al. "Machine learning of molecular electronic properties in chemical compound space." New Journal of Physics 15.9 (2013): 095003. - """ - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - dataset_file = os.path.join(data_dir, "qm7b.mat") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM7B_MAT_URL, dest_dir=data_dir) - dataset = scipy.io.loadmat(dataset_file) - - X = dataset['X'] - y = dataset['T'] - w = np.ones_like(y) - dataset = deepchem.data.DiskDataset.from_numpy(X, y, w, ids=None) - - if split == None: - raise ValueError() - else: - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'stratified': - deepchem.splits.SingletaskStratifiedSplitter(task_number=0) - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset, move_mean=move_mean) - ] - - for transformer in transformers: - train_dataset = transformer.transform(train_dataset) - valid_dataset = transformer.transform(valid_dataset) - test_dataset = transformer.transform(test_dataset) - - qm7_tasks = np.arange(y.shape[1]) - return qm7_tasks, (train_dataset, valid_dataset, test_dataset), transformers - - -def load_qm7(featurizer='CoulombMatrix', - split='random', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load qm7 datasets. + dc.utils.data_utils.download_url(url=GDB7_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file( + os.path.join(self.data_dir, "gdb7.tar.gz"), self.data_dir) + loader = dc.data.SDFLoader(tasks=self.tasks, featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_qm7( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.CoulombMatrix(23), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load QM7 dataset QM7 is a subset of GDB-13 (a database of nearly 1 billion stable and synthetically accessible organic molecules) containing up to 7 heavy atoms C, N, O, and S. The 3D Cartesian coordinates of the most stable conformations and their atomization energies were determined using ab-initio - density functional theory (PBE0/tier2 basis set).This dataset + density functional theory (PBE0/tier2 basis set). This dataset also provided Coulomb matrices as calculated in [Rupp et al. PRL, 2012]: - C_ii = 0.5 * Z^2.4 - C_ij = Z_i * Z_j/abs(R_i − R_j) - Z_i - nuclear charge of atom i - R_i - cartesian coordinates of atom i - - The data file (.mat format, we recommend using `scipy.io.loadmat` for python users to load this original data) contains five arrays: - "X" - (7165 x 23 x 23), Coulomb matrices - "T" - (7165), atomization energies (unit: kcal/mol) - "P" - (5 x 1433), cross-validation splits as used in [Montavon et al. NIPS, 2012] - "Z" - (7165 x 23), atomic charges - "R" - (7165 x 23 x 3), cartesian coordinate (unit: Bohr) of each atom in the molecules - - Reference: - Rupp, Matthias, et al. "Fast and accurate modeling of molecular atomization energies with machine learning." Physical review letters 108.5 (2012): 058301. - Montavon, Grégoire, et al. "Learning invariant representations of molecules for atomization energy prediction." Advances in Neural Information Processing Systems. 2012. + Stratified splitting is recommended for this dataset. + + The data file (.mat format, we recommend using `scipy.io.loadmat` + for python users to load this original data) contains five arrays: + + - "X" - (7165 x 23 x 23), Coulomb matrices + - "T" - (7165), atomization energies (unit: kcal/mol) + - "P" - (5 x 1433), cross-validation splits as used in [Montavon et al. + NIPS, 2012] + - "Z" - (7165 x 23), atomic charges + - "R" - (7165 x 23 x 3), cartesian coordinate (unit: Bohr) of each atom in + the molecules + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Rupp, Matthias, et al. "Fast and accurate modeling of molecular + atomization energies with machine learning." Physical review letters + 108.5 (2012): 058301. + .. [2] Montavon, Grégoire, et al. "Learning invariant representations of + molecules for atomization energy prediction." Advances in Neural + Information Proccessing Systems. 2012. """ - # Featurize qm7 dataset - logger.info("About to featurize qm7 dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - dataset_file = os.path.join(data_dir, "gdb7.sdf") - - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=GDB7_URL, dest_dir=data_dir) - deepchem.utils.untargz_file(os.path.join(data_dir, 'gdb7.tar.gz'), data_dir) - - qm7_tasks = ["u0_atom"] - if featurizer == 'CoulombMatrix': - featurizer = deepchem.feat.CoulombMatrixEig(23) - loader = deepchem.data.SDFLoader( - tasks=qm7_tasks, - smiles_field="smiles", - mol_field="mol", - featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - if split == None: - raise ValueError() - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter(task_number=0) - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset, move_mean=move_mean) - ] - - for transformer in transformers: - train_dataset = transformer.transform(train_dataset) - valid_dataset = transformer.transform(valid_dataset) - test_dataset = transformer.transform(test_dataset) - - return qm7_tasks, (train_dataset, valid_dataset, test_dataset), transformers + loader = _QM7Loader(featurizer, splitter, transformers, QM7_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('qm7', reload) diff --git a/deepchem/molnet/load_function/qm8_datasets.py b/deepchem/molnet/load_function/qm8_datasets.py index 82dd8b3d97240b554a6a85473e96d3f165290d9c..4271cea5d5c40c736e2a9a40a596fc87ae5ed2cc 100644 --- a/deepchem/molnet/load_function/qm8_datasets.py +++ b/deepchem/molnet/load_function/qm8_datasets.py @@ -2,26 +2,44 @@ qm8 dataset loader. """ import os -import deepchem -import logging +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +GDB8_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb8.tar.gz" +QM8_CSV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm8.csv" +QM8_TASKS = [ + "E1-CC2", "E2-CC2", "f1-CC2", "f2-CC2", "E1-PBE0", "E2-PBE0", "f1-PBE0", + "f2-PBE0", "E1-PBE0", "E2-PBE0", "f1-PBE0", "f2-PBE0", "E1-CAM", "E2-CAM", + "f1-CAM", "f2-CAM" +] -DEFAULT_DIR = deepchem.utils.get_data_dir() -GDB8_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/gdb8.tar.gz' -QM8_CSV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm8.csv' +class _QM8Loader(_MolnetLoader): -def load_qm8(featurizer='CoulombMatrix', - split='random', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load QM8 Datasets - - The QM8 is the dataset used in a study on modeling quantum + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "qm8.sdf") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=GDB8_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file( + os.path.join(self.data_dir, "gdb8.tar.gz"), self.data_dir) + loader = dc.data.SDFLoader(tasks=self.tasks, featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_qm8( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.CoulombMatrix(26), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load QM8 dataset + + QM8 is the dataset used in a study on modeling quantum mechanical calculations of electronic spectra and excited state energy of small molecules. Multiple methods, including time-dependent density functional theories (TDDFT) and @@ -31,115 +49,57 @@ def load_qm8(featurizer='CoulombMatrix', there are four excited state properties calculated by four different methods on 22 thousand samples: - S_0 -> S_1 transition energy E_1 and the corresponding oscillator strength f_1 - S_0 -> S_2 transition energy E_2 and the corresponding oscillator strength f_2 - - The source data files (downloadable from moleculenet.ai): - qm8.sdf: molecular structures - qm8.sdf.csv: tables for molecular properties - Column 1: Molecule ID (gdb9 index) mapping to the .sdf file - Columns 2-5: RI-CC2/def2TZVP; E1, E2, f1, f2 in atomic units. f1, f2 in length representation - Columns 6-9: LR-TDPBE0/def2SVP; E1, E2, f1, f2 in atomic units. f1, f2 in length representation - Columns 10-13: LR-TDPBE0/def2TZVP; E1, E2, f1, f2 in atomic units. f1, f2 in length representation - Columns 14-17: LR-TDCAM-B3LYP/def2TZVP; E1, E2, f1, f2 in atomic units. f1, f2 in length representation - - Reference: - Blum, Lorenz C., and Jean-Louis Reymond. "970 million druglike small molecules for virtual screening in the chemical universe database GDB-13." Journal of the American Chemical Society 131.25 (2009): 8732-8733. - Ramakrishnan, Raghunathan, et al. "Electronic spectra from TDDFT and machine learning in chemical space." The Journal of chemical physics 143.8 (2015): 084111. + S0 -> S1 transition energy E1 and the corresponding oscillator strength f1 + + S0 -> S2 transition energy E2 and the corresponding oscillator strength f2 + + E1, E2, f1, f2 are in atomic units. f1, f2 are in length representation + + Random splitting is recommended for this dataset. + + The source data contain: + + - qm8.sdf: molecular structures + - qm8.sdf.csv: tables for molecular properties + + - Column 1: Molecule ID (gdb9 index) mapping to the .sdf file + - Columns 2-5: RI-CC2/def2TZVP + - Columns 6-9: LR-TDPBE0/def2SVP + - Columns 10-13: LR-TDPBE0/def2TZVP + - Columns 14-17: LR-TDCAM-B3LYP/def2TZVP + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Blum, Lorenz C., and Jean-Louis Reymond. "970 million druglike + small molecules for virtual screening in the chemical universe database + GDB-13." Journal of the American Chemical Society 131.25 (2009): + 8732-8733. + .. [2] Ramakrishnan, Raghunathan, et al. "Electronic spectra from TDDFT + and machine learning in chemical space." The Journal of chemical physics + 143.8 (2015): 084111. """ - qm8_tasks = [ - "E1-CC2", "E2-CC2", "f1-CC2", "f2-CC2", "E1-PBE0", "E2-PBE0", "f1-PBE0", - "f2-PBE0", "E1-PBE0", "E2-PBE0", "f1-PBE0", "f2-PBE0", "E1-CAM", "E2-CAM", - "f1-CAM", "f2-CAM" - ] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "qm8-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return qm8_tasks, all_dataset, transformers - - if featurizer in ['CoulombMatrix', 'BPSymmetryFunctionInput', 'MP', 'Raw']: - dataset_file = os.path.join(data_dir, "qm8.sdf") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=GDB8_URL, dest_dir=data_dir) - deepchem.utils.untargz_file( - os.path.join(data_dir, 'gdb8.tar.gz'), data_dir) - else: - dataset_file = os.path.join(data_dir, "qm8.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM8_CSV_URL, dest_dir=data_dir) - - if featurizer in ['CoulombMatrix', 'BPSymmetryFunctionInput', 'MP', 'Raw']: - if featurizer == 'CoulombMatrix': - featurizer = deepchem.feat.CoulombMatrix(26) - elif featurizer == 'BPSymmetryFunctionInput': - featurizer = deepchem.feat.BPSymmetryFunctionInput(26) - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'MP': - featurizer = deepchem.feat.WeaveFeaturizer( - graph_distance=False, explicit_H=True) - loader = deepchem.data.SDFLoader(tasks=qm8_tasks, featurizer=featurizer) - else: - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - loader = deepchem.data.CSVLoader( - tasks=qm8_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - if split == None: - raise ValueError() - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter(task_number=0) - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset, move_mean=move_mean) - ] - for transformer in transformers: - train_dataset = transformer.transform(train_dataset) - valid_dataset = transformer.transform(valid_dataset) - test_dataset = transformer.transform(test_dataset) - if reload: - deepchem.utils.save.save_dataset_to_disk( - save_folder, train_dataset, valid_dataset, test_dataset, transformers) - return qm8_tasks, (train_dataset, valid_dataset, test_dataset), transformers + loader = _QM8Loader(featurizer, splitter, transformers, QM8_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('qm8', reload) diff --git a/deepchem/molnet/load_function/qm9_datasets.py b/deepchem/molnet/load_function/qm9_datasets.py index 299dd0f6f7cfb56b0623e0da867e84da7e8b5bb5..f894f4addf247f883c9d312288498a994c6c5ce8 100644 --- a/deepchem/molnet/load_function/qm9_datasets.py +++ b/deepchem/molnet/load_function/qm9_datasets.py @@ -2,124 +2,111 @@ qm9 dataset loader. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +GDB9_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb9.tar.gz" +QM9_CSV_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/qm9.csv" +QM9_TASKS = [ + "mu", "alpha", "homo", "lumo", "gap", "r2", "zpve", "cv", "u0", "u298", + "h298", "g298" +] -DEFAULT_DIR = deepchem.utils.get_data_dir() -GDB9_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/gdb9.tar.gz' -QM9_CSV_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm9.csv' +class _QM9Loader(_MolnetLoader): -def load_qm9(featurizer='CoulombMatrix', - split='random', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load qm9 datasets.""" - # Featurize qm9 dataset - logger.info("About to featurize qm9 dataset.") - qm9_tasks = [ - "mu", "alpha", "homo", "lumo", "gap", "r2", "zpve", "cv", "u0", "u298", - "h298", "g298" - ] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "gdb9.sdf") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=GDB9_URL, dest_dir=self.data_dir) + dc.utils.data_utils.untargz_file( + os.path.join(self.data_dir, "gdb9.tar.gz"), self.data_dir) + loader = dc.data.SDFLoader(tasks=self.tasks, featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) - if reload: - save_folder = os.path.join(save_dir, "qm9-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) +def load_qm9( + featurizer: Union[dc.feat.Featurizer, str] = dc.feat.CoulombMatrix(29), + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load QM9 dataset - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return qm9_tasks, all_dataset, transformers + QM9 is a comprehensive dataset that provides geometric, energetic, + electronic and thermodynamic properties for a subset of GDB-17 database, + comprising 134 thousand stable organic molecules with up to 9 heavy atoms. + All molecules are modeled using density functional theory + (B3LYP/6-31G(2df,p) based DFT). - if featurizer in ['CoulombMatrix', 'BPSymmetryFunctionInput', 'MP', 'Raw']: - dataset_file = os.path.join(data_dir, "gdb9.sdf") + Random splitting is recommended for this dataset. - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=GDB9_URL, dest_dir=data_dir) - deepchem.utils.untargz_file( - os.path.join(data_dir, 'gdb9.tar.gz'), data_dir) - else: - dataset_file = os.path.join(data_dir, "qm9.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=QM9_CSV_URL, dest_dir=data_dir) + The source data contain: - if featurizer in ['CoulombMatrix', 'BPSymmetryFunctionInput', 'MP', 'Raw']: - if featurizer == 'CoulombMatrix': - featurizer = deepchem.feat.CoulombMatrix(29) - elif featurizer == 'BPSymmetryFunctionInput': - featurizer = deepchem.feat.BPSymmetryFunctionInput(29) - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'MP': - featurizer = deepchem.feat.WeaveFeaturizer( - graph_distance=False, explicit_H=True) - loader = deepchem.data.SDFLoader( - tasks=qm9_tasks, - smiles_field="smiles", - mol_field="mol", - featurizer=featurizer) - else: - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - loader = deepchem.data.CSVLoader( - tasks=qm9_tasks, smiles_field="smiles", featurizer=featurizer) + - qm9.sdf: molecular structures + - qm9.sdf.csv: tables for molecular properties - dataset = loader.featurize(dataset_file) - if split == None: - raise ValueError() + - "mol_id" - Molecule ID (gdb9 index) mapping to the .sdf file + - "A" - Rotational constant (unit: GHz) + - "B" - Rotational constant (unit: GHz) + - "C" - Rotational constant (unit: GHz) + - "mu" - Dipole moment (unit: D) + - "alpha" - Isotropic polarizability (unit: Bohr^3) + - "homo" - Highest occupied molecular orbital energy (unit: Hartree) + - "lumo" - Lowest unoccupied molecular orbital energy (unit: Hartree) + - "gap" - Gap between HOMO and LUMO (unit: Hartree) + - "r2" - Electronic spatial extent (unit: Bohr^2) + - "zpve" - Zero point vibrational energy (unit: Hartree) + - "u0" - Internal energy at 0K (unit: Hartree) + - "u298" - Internal energy at 298.15K (unit: Hartree) + - "h298" - Enthalpy at 298.15K (unit: Hartree) + - "g298" - Free energy at 298.15K (unit: Hartree) + - "cv" - Heat capavity at 298.15K (unit: cal/(mol*K)) + - "u0_atom" - Atomization energy at 0K (unit: kcal/mol) + - "u298_atom" - Atomization energy at 298.15K (unit: kcal/mol) + - "h298_atom" - Atomization enthalpy at 298.15K (unit: kcal/mol) + - "g298_atom" - Atomization free energy at 298.15K (unit: kcal/mol) - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter(task_number=11) - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) + "u0_atom" ~ "g298_atom" (used in MoleculeNet) are calculated from the + differences between "u0" ~ "g298" and sum of reference energies of all + atoms in the molecules, as given in + https://figshare.com/articles/Atomref%3A_Reference_thermochemical_energies_of_H%2C_C%2C_N%2C_O%2C_F_atoms./1057643 - train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train_dataset, move_mean=move_mean) - ] - for transformer in transformers: - train_dataset = transformer.transform(train_dataset) - valid_dataset = transformer.transform(valid_dataset) - test_dataset = transformer.transform(test_dataset) + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in - if reload: - deepchem.utils.save.save_dataset_to_disk( - save_folder, train_dataset, valid_dataset, test_dataset, transformers) - return qm9_tasks, (train_dataset, valid_dataset, test_dataset), transformers + References + ---------- + .. [1] Blum, Lorenz C., and Jean-Louis Reymond. "970 million druglike small + molecules for virtual screening in the chemical universe database GDB-13." + Journal of the American Chemical Society 131.25 (2009): 8732-8733. + .. [2] Ramakrishnan, Raghunathan, et al. "Quantum chemistry structures and + properties of 134 kilo molecules." Scientific data 1 (2014): 140022. + """ + loader = _QM9Loader(featurizer, splitter, transformers, QM9_TASKS, data_dir, + save_dir, **kwargs) + return loader.load_dataset('qm9', reload) diff --git a/deepchem/molnet/load_function/sampl_datasets.py b/deepchem/molnet/load_function/sampl_datasets.py index 1b93848491f4f6e7c7be6f65e745a6b6061eb597..90e8574cbedb087ee0af6a17958d80d6029c5054 100644 --- a/deepchem/molnet/load_function/sampl_datasets.py +++ b/deepchem/molnet/load_function/sampl_datasets.py @@ -2,118 +2,81 @@ SAMPL dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -SAMPL_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/SAMPL.csv' -DEFAULT_DIR = deepchem.utils.get_data_dir() - - -def load_sampl(featurizer='ECFP', - split='index', - reload=True, - move_mean=True, - data_dir=None, - save_dir=None, - **kwargs): - """Load SAMPL datasets.""" - # Featurize SAMPL dataset - logger.info("About to featurize SAMPL dataset.") - logger.info("About to load SAMPL dataset.") - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "sampl-featurized") - if not move_mean: - save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") - else: - save_folder = os.path.join(save_folder, str(featurizer)) - - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, "SAMPL.csv") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=SAMPL_URL, dest_dir=data_dir) - - SAMPL_tasks = ['expt'] - - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return SAMPL_tasks, all_dataset, transformers - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'smiles2img': - img_size = kwargs.get("img_size", 80) - img_spec = kwargs.get("img_spec", "std") - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=SAMPL_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split == None: - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=dataset, move_mean=move_mean) - ] - - logger.info("Split is None, about to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return SAMPL_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter(task_number=0) - } - - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - train, valid, test = splitter.train_valid_test_split(dataset) - - transformers = [ - deepchem.trans.NormalizationTransformer( - transform_y=True, dataset=train, move_mean=move_mean) - ] - - logger.info("About to transform dataset.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return SAMPL_tasks, (train, valid, test), transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +SAMPL_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/SAMPL.csv" +SAMPL_TASKS = ['expt'] + + +class _SAMPLLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "SAMPL.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=SAMPL_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_sampl( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load SAMPL(FreeSolv) dataset + + The Free Solvation Database, FreeSolv(SAMPL), provides experimental and + calculated hydration free energy of small molecules in water. The calculated + values are derived from alchemical free energy calculations using molecular + dynamics simulations. The experimental values are included in the benchmark + collection. + + Random splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "iupac" - IUPAC name of the compound + - "smiles" - SMILES representation of the molecular structure + - "expt" - Measured solvation energy (unit: kcal/mol) of the compound, + used as label + - "calc" - Calculated solvation energy (unit: kcal/mol) of the compound + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Mobley, David L., and J. Peter Guthrie. "FreeSolv: a database of + experimental and calculated hydration free energies, with input files." + Journal of computer-aided molecular design 28.7 (2014): 711-720. + """ + loader = _SAMPLLoader(featurizer, splitter, transformers, SAMPL_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('sampl', reload) diff --git a/deepchem/molnet/load_function/sider_datasets.py b/deepchem/molnet/load_function/sider_datasets.py index 09f95d1395c9199c992d596bf38ba475fe2c25ab..82c40d1a00f65d592c741af4222261b80285caf7 100644 --- a/deepchem/molnet/load_function/sider_datasets.py +++ b/deepchem/molnet/load_function/sider_datasets.py @@ -2,23 +2,53 @@ SIDER dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = deepchem.utils.get_data_dir() -SIDER_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/sider.csv.gz' - - -def load_sider(featurizer='ECFP', - split='index', - reload=True, - K=4, - data_dir=None, - save_dir=None, - **kwargs): - """Load SIDER datasets +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +SIDER_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/sider.csv.gz" +SIDER_TASKS = [ + 'Hepatobiliary disorders', 'Metabolism and nutrition disorders', + 'Product issues', 'Eye disorders', 'Investigations', + 'Musculoskeletal and connective tissue disorders', + 'Gastrointestinal disorders', 'Social circumstances', + 'Immune system disorders', 'Reproductive system and breast disorders', + 'Neoplasms benign, malignant and unspecified (incl cysts and polyps)', + 'General disorders and administration site conditions', + 'Endocrine disorders', 'Surgical and medical procedures', + 'Vascular disorders', 'Blood and lymphatic system disorders', + 'Skin and subcutaneous tissue disorders', + 'Congenital, familial and genetic disorders', 'Infections and infestations', + 'Respiratory, thoracic and mediastinal disorders', 'Psychiatric disorders', + 'Renal and urinary disorders', + 'Pregnancy, puerperium and perinatal conditions', + 'Ear and labyrinth disorders', 'Cardiac disorders', + 'Nervous system disorders', 'Injury, poisoning and procedural complications' +] + + +class _SiderLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "sider.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=SIDER_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_sider( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load SIDER dataset The Side Effect Resource (SIDER) is a database of marketed drugs and adverse drug reactions (ADR). The version of the @@ -26,106 +56,45 @@ def load_sider(featurizer='ECFP', 27 system organ classes following MedDRA classifications measured for 1427 approved drugs. - The data file contains a csv table, in which columns below - are used: - - - "smiles": SMILES representation of the molecular structure - - "Hepatobiliary disorders" ~ "Injury, poisoning and procedural complications": Recorded side effects for the drug + Random splitting is recommended for this dataset. - Please refer to http://sideeffects.embl.de/se/?page=98 for details on ADRs. + The raw data csv file contains columns below: - References: - Kuhn, Michael, et al. "The SIDER database of drugs and side effects." Nucleic acids research 44.D1 (2015): D1075-D1079. - Altae-Tran, Han, et al. "Low data drug discovery with one-shot learning." ACS central science 3.4 (2017): 283-293. - Medical Dictionary for Regulatory Activities. http://www.meddra.org/ + - "smiles": SMILES representation of the molecular structure + - "Hepatobiliary disorders" ~ "Injury, poisoning and procedural + complications": Recorded side effects for the drug. Please refer + to http://sideeffects.embl.de/se/?page=98 for details on ADRs. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Kuhn, Michael, et al. "The SIDER database of drugs and side effects." + Nucleic acids research 44.D1 (2015): D1075-D1079. + .. [2] Altae-Tran, Han, et al. "Low data drug discovery with one-shot + learning." ACS central science 3.4 (2017): 283-293. + .. [3] Medical Dictionary for Regulatory Activities. http://www.meddra.org/ """ - - logger.info("About to load SIDER dataset.") - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "sider-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, "sider.csv.gz") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=SIDER_URL, dest_dir=data_dir) - - dataset = deepchem.utils.save.load_from_disk(dataset_file) - logger.info("Columns of dataset: %s" % str(dataset.columns.values)) - logger.info("Number of examples in dataset: %s" % str(dataset.shape[0])) - SIDER_tasks = dataset.columns.values[1:].tolist() - - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return SIDER_tasks, all_dataset, transformers - - # Featurize SIDER dataset - logger.info("About to featurize SIDER dataset.") - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - logger.info("SIDER tasks: %s" % str(SIDER_tasks)) - logger.info("%d tasks in total" % len(SIDER_tasks)) - - loader = deepchem.data.CSVLoader( - tasks=SIDER_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - logger.info("%d datapoints in SIDER dataset" % len(dataset)) - - # Initialize transformers - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - logger.info("About to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - if split == None: - return SIDER_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'task': deepchem.splits.TaskSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - if split == 'task': - fold_datasets = splitter.k_fold_split(dataset, K) - all_dataset = fold_datasets - else: - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - all_dataset = (train, valid, test) - return SIDER_tasks, all_dataset, transformers + loader = _SiderLoader(featurizer, splitter, transformers, SIDER_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('sider', reload) diff --git a/deepchem/molnet/load_function/sweetlead_datasets.py b/deepchem/molnet/load_function/sweetlead_datasets.py index d71351de0cde88ceda7d961538fa585635117dba..d7b2b6b4de30d7d21676da81dd8dfa4013a18faf 100644 --- a/deepchem/molnet/load_function/sweetlead_datasets.py +++ b/deepchem/molnet/load_function/sweetlead_datasets.py @@ -2,102 +2,70 @@ SWEET dataset loader. """ import os -import numpy as np -import shutil -import logging import deepchem as dc - -logger = logging.getLogger(__name__) - -DEFAULT_DIR = dc.utils.get_data_dir() -SWEETLEAD_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/sweet.csv.gz' - - -def load_sweet(featurizer='ECFP', - split='index', - reload=True, - frac_train=.8, - data_dir=None, - save_dir=None, - **kwargs): +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +SWEETLEAD_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/sweet.csv.gz" +SWEETLEAD_TASKS = ["task"] + + +class _SweetLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "sweet.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=SWEETLEAD_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_sweet( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: """Load sweet datasets. - - Sweetlead is a dataset of chemical structures for approved drugs, chemical isolates from traditional medicinal herbs, and regulated chemicals. Resulting structures are filtered for the active pharmaceutical ingredient, standardized, and differing formulations of the same drug were combined in the final database. - Novick, Paul A., et al. "SWEETLEAD: an in silico database of approved drugs, regulated chemicals, and herbal isolates for computer-aided drug discovery." PLoS One 8.11 (2013). + Sweetlead is a dataset of chemical structures for approved drugs, chemical isolates + from traditional medicinal herbs, and regulated chemicals. Resulting structures are + filtered for the active pharmaceutical ingredient, standardized, and differing + formulations of the same drug were combined in the final database. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + Novick, Paul A., et al. "SWEETLEAD: an in silico database of approved drugs, regulated + chemicals, and herbal isolates for computer-aided drug discovery." PLoS One 8.11 (2013). """ - # Load Sweetlead dataset - logger.info("About to load Sweetlead dataset.") - SWEET_tasks = ["task"] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "sweet-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = dc.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return SWEET_tasks, all_dataset, transformers - - # Featurize SWEET dataset - logger.info("About to featurize SWEET dataset.") - if featurizer == 'ECFP': - featurizer = dc.feat.CircularFingerprint(size=1024) - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - else: - raise ValueError("Other featurizations not supported") - - dataset_file = os.path.join(data_dir, "sweet.csv.gz") - if not os.path.exists(dataset_file): - dc.utils.download_url(SWEETLEAD_URL) - loader = dc.data.CSVLoader( - tasks=SWEET_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - logger.info("About to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - if split == None: - return SWEET_tasks, (dataset, None, None), transformers - - splitters = { - 'index': dc.splits.IndexSplitter(), - 'random': dc.splits.RandomSplitter(), - 'scaffold': dc.splits.ScaffoldSplitter(), - 'task': dc.splits.TaskSplitter(), - 'stratified': dc.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - if reload: - dc.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - all_dataset = (train, valid, test) - - return SWEET_tasks, (train, valid, test), transformers + loader = _SweetLoader(featurizer, splitter, transformers, SWEETLEAD_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('sweet', reload) diff --git a/deepchem/molnet/load_function/tests/__init__.py b/deepchem/molnet/load_function/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepchem/molnet/load_function/tests/qm9.csv b/deepchem/molnet/load_function/tests/qm9.csv new file mode 100644 index 0000000000000000000000000000000000000000..5e4d7f55a45046ae3967efe78a4474c1dd645765 --- /dev/null +++ b/deepchem/molnet/load_function/tests/qm9.csv @@ -0,0 +1,11 @@ +mol_id,smiles,A,B,C,mu,alpha,homo,lumo,gap,r2,zpve,u0,u298,h298,g298,cv,u0_atom,u298_atom,h298_atom,g298_atom +gdb_1,C,157.7118,157.70997,157.70699,0.0,13.21,-0.3877,0.1171,0.5048,35.3641,0.044749000000000004,-40.47893,-40.476062,-40.475117,-40.498597,6.468999999999999,-395.99959459400003,-398.643290011,-401.01464652199996,-372.471772148 +gdb_2,N,293.60975,293.54111,191.39397,1.6256,9.46,-0.257,0.0829,0.3399,26.1563,0.034358,-56.525887,-56.523026,-56.522082,-56.544961,6.316,-276.861363363,-278.62027109,-280.399259105,-259.338802047 +gdb_3,O,799.58812,437.90385999999995,282.94545,1.8511,6.31,-0.2928,0.0687,0.3615,19.0002,0.021375,-76.404702,-76.40186700000001,-76.400922,-76.422349,6.002000000000001,-213.08762369299998,-213.97429391,-215.15965841099998,-201.407171167 +gdb_4,C#C,0.0,35.6100361,35.6100361,0.0,16.28,-0.2845,0.0506,0.3351,59.5248,0.026841000000000004,-77.30842700000001,-77.305527,-77.304583,-77.32742900000001,8.574,-385.501996533,-387.23768642699997,-389.01604693300004,-365.800723969 +gdb_5,C#N,0.0,44.593883,44.593883,2.8937,12.99,-0.3604,0.0191,0.3796,48.7476,0.016600999999999998,-93.411888,-93.40937,-93.408425,-93.431246,6.278,-301.820533838,-302.906751917,-304.091488909,-288.720028445 +gdb_6,C=O,285.48839,38.9823,34.29892,2.1089,14.18,-0.267,-0.0406,0.2263,59.9891,0.026602999999999998,-114.48361299999999,-114.480746,-114.479802,-114.50526799999999,6.412999999999999,-358.756935444,-360.512705626,-362.29106613199997,-340.464420585 +gdb_7,CC,80.46225,19.906489999999998,19.90633,0.0,23.95,-0.3385,0.1041,0.4426,109.5031,0.074542,-79.764152,-79.760666,-79.759722,-79.787269,10.097999999999999,-670.78829573,-675.7104763259999,-679.860820852,-626.927299157 +gdb_8,CO,127.83497,24.85872,23.978720000000003,1.5258,16.97,-0.2653,0.0784,0.3437,83.794,0.051208000000000004,-115.67913600000001,-115.675816,-115.674872,-115.701876,8.751,-481.10675773699995,-484.35537183,-487.319724346,-450.124128371 +gdb_9,CC#C,160.28041000000002,8.59323,8.593210000000001,0.7156,28.78,-0.2609,0.0613,0.3222,177.1963,0.05541,-116.609549,-116.60555,-116.604606,-116.633775,12.482000000000001,-670.268090769,-673.980434013,-677.537155025,-631.346845044 +gdb_10,CC#N,159.03566999999998,9.22327,9.223239999999999,3.8266,24.45,-0.3264,0.0376,0.364,160.7223,0.045286,-132.71815,-132.714563,-132.713619,-132.742149,10.287,-589.8120243340001,-592.893721033,-595.85744604,-557.125708033 diff --git a/deepchem/molnet/load_function/tests/test_load_zinc15.py b/deepchem/molnet/load_function/tests/test_load_zinc15.py new file mode 100644 index 0000000000000000000000000000000000000000..92a8d522a93957f587a1c777935d87df4319c39b --- /dev/null +++ b/deepchem/molnet/load_function/tests/test_load_zinc15.py @@ -0,0 +1,34 @@ +""" +Tests for zinc15 loader. +""" + +import os +import numpy as np +from deepchem.molnet import load_zinc15 + +# def test_zinc15_loader(): +# current_dir = os.path.dirname(os.path.abspath(__file__)) +# +# tasks, datasets, transformers = load_zinc15( +# reload=False, +# data_dir=current_dir, +# splitter_kwargs={ +# 'seed': 42, +# 'frac_train': 0.6, +# 'frac_valid': 0.2, +# 'frac_test': 0.2 +# }) +# +# test_vec = np.array([ +# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, +# 0.0, -1.224744871391589, 0.0, 0.0, 0.0, 0.0, 2.0, -0.5, 0.0, 0.0, 0.0, +# 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 +# ]) +# +# train, val, test = datasets +# assert tasks == ['mwt', 'logp', 'reactive'] +# assert train.X.shape == (3, 100, 35) +# assert np.allclose(train.X[0][0], test_vec, atol=0.01) +# +# if os.path.exists(os.path.join(current_dir, 'zinc15_250K_2D.csv')): +# os.remove(os.path.join(current_dir, 'zinc15_250K_2D.csv')) diff --git a/deepchem/molnet/load_function/tests/test_qm9_loader.py b/deepchem/molnet/load_function/tests/test_qm9_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..286c6457450d169b90aa0f99b7d2cbb80452e211 --- /dev/null +++ b/deepchem/molnet/load_function/tests/test_qm9_loader.py @@ -0,0 +1,24 @@ +""" +Tests for qm9 loader. +""" + +import os +import numpy as np +from deepchem.molnet import load_qm9 + +# def test_qm9_loader(): +# current_dir = os.path.dirname(os.path.abspath(__file__)) +# tasks, datasets, transformers = load_qm9( +# reload=False, +# data_dir=current_dir, +# featurizer='ECFP', +# splitter_kwargs={ +# 'seed': 42, +# 'frac_train': 0.6, +# 'frac_valid': 0.2, +# 'frac_test': 0.2 +# }) +# +# assert len(tasks) == 12 +# assert tasks[0] == 'mu' +# assert datasets[0].X.shape == (8, 1024) diff --git a/deepchem/molnet/load_function/tests/zinc15_250K_2D.tar.gz b/deepchem/molnet/load_function/tests/zinc15_250K_2D.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..0177fb25b0454a498e2fa41d768efe5ecd5d84e7 Binary files /dev/null and b/deepchem/molnet/load_function/tests/zinc15_250K_2D.tar.gz differ diff --git a/deepchem/molnet/load_function/thermosol_datasets.py b/deepchem/molnet/load_function/thermosol_datasets.py index cc2b022c70a2d413e8b3861bc72365320eea239b..2858adaa958d6e45f238c8b26dd6311888d033cf 100644 --- a/deepchem/molnet/load_function/thermosol_datasets.py +++ b/deepchem/molnet/load_function/thermosol_datasets.py @@ -2,130 +2,60 @@ Thermodynamic Solubility Dataset Loader """ import os -import logging -import deepchem -import numpy as np - -logger = logging.getLogger(__name__) - -THERMOSOL_URL = "http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/thermosol.csv" -DEFAULT_DATA_DIR = deepchem.utils.get_data_dir() - - -def remove_missing_entries(dataset): - """Remove missing entries. - - Some of the datasets have missing entries that sneak in as zero'd out - feature vectors. Get rid of them. +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +THERMOSOL_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/thermosol.csv" +THERMOSOL_TASKS = ["target"] #Task is solubility in pH 7.4 buffer + + +class _ThermosolLoader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "thermosol.csv") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url( + url=THERMOSOL_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smile", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_thermosol( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = [], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Loads the thermodynamic solubility datasets. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in """ - for i, (X, y, w, ids) in enumerate(dataset.itershards()): - available_rows = X.any(axis=1) - logger.info("Shard %d has %d missing entries." % - (i, np.count_nonzero(~available_rows))) - X = X[available_rows] - y = y[available_rows] - w = w[available_rows] - ids = ids[available_rows] - dataset.set_shard(i, X, y, w, ids) - - -def load_thermosol(featurizer="ECFP", - data_dir=None, - save_dir=None, - split=None, - split_seed=None, - reload=True): - """Loads the thermodynamic solubility datasets.""" - # Featurizer thermosol dataset - logger.info("About to featurize thermosol dataset...") - thermosol_tasks = ["target"] #Task is solubility in pH 7.4 buffer - - if data_dir is None: - data_dir = DEFAULT_DATA_DIR - if save_dir is None: - save_dir = DEFAULT_DATA_DIR - - if reload: - save_folder = os.path.join(save_dir, "thermosol-featurized", featurizer) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return thermosol_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "thermosol.csv") - if not os.path.exists(dataset_file): - logger.info("{} does not exist. Downloading it.".format(dataset_file)) - deepchem.utils.download_url(url=THERMOSOL_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'AdjacencyConv': - featurizer = deepchem.feat.AdjacencyFingerprint( - max_n_atoms=150, max_valence=6) - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - logger.info("Featurizing datasets.") - loader = deepchem.data.CSVLoader( - tasks=thermosol_tasks, smiles_field='smile', featurizer=featurizer) - dataset = loader.featurize(input_files=[dataset_file], shard_size=2000) - - logger.info("Removing missing entries...") - remove_missing_entries(dataset) - - if split == None: - logger.info("About to transform the data...") - transformers = [] - for transformer in transformers: - logger.info("Transforming the dataset with transformer ", - transformer.__class__.__name__) - dataset = transformer.transform(dataset) - return thermosol_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'butina': deepchem.splits.ButinaSplitter(), - 'stratified': deepchem.splits.SingletaskStratifiedSplitter() - } - splitter = splitters[split] - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test, - seed=split_seed) - transformers = [] - - logger.info("About to transform the data...") - for transformer in transformers: - logger.info("Transforming the data with transformer ", - transformer.__class__.__name__) - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - logger.info("Saving file to {}.".format(save_folder)) - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return thermosol_tasks, (train, valid, test), transformers + loader = _ThermosolLoader(featurizer, splitter, transformers, THERMOSOL_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('thermosol', reload) diff --git a/deepchem/molnet/load_function/tox21_datasets.py b/deepchem/molnet/load_function/tox21_datasets.py index 08cab09b3c7ce7801e75f6eae452c74a4b3a08dc..33210ea5766fb7cdf9e920e1b246a3189fdfaafc 100644 --- a/deepchem/molnet/load_function/tox21_datasets.py +++ b/deepchem/molnet/load_function/tox21_datasets.py @@ -2,119 +2,82 @@ Tox21 dataset loader. """ import os -import logging -import deepchem - -logger = logging.getLogger(__name__) - -TOX21_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/tox21.csv.gz' -DEFAULT_DIR = deepchem.utils.get_data_dir() - - -def load_tox21(featurizer='ECFP', - split='index', - reload=True, - K=4, - data_dir=None, - save_dir=None, - **kwargs): - """Load Tox21 datasets. Does not do train/test split""" - # Featurize Tox21 dataset - - tox21_tasks = [ - 'NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', - 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53' - ] - - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "tox21-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return tox21_tasks, all_dataset, transformers - - dataset_file = os.path.join(data_dir, "tox21.csv.gz") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=TOX21_URL, dest_dir=data_dir) - - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == 'AdjacencyConv': - featurizer = deepchem.feat.AdjacencyFingerprint( - max_n_atoms=150, max_valence=6) - elif featurizer == "smiles2img": - img_size = kwargs.get("img_size", 80) - img_spec = kwargs.get("img_spec", "std") - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) - - loader = deepchem.data.CSVLoader( - tasks=tox21_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file, shard_size=8192) - - if split == None: - # Initialize transformers - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - - logger.info("About to transform data") - for transformer in transformers: - dataset = transformer.transform(dataset) - - return tox21_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'butina': deepchem.splits.ButinaSplitter(), - 'task': deepchem.splits.TaskSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - if split == 'task': - fold_datasets = splitter.k_fold_split(dataset, K) - all_dataset = fold_datasets - else: - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - all_dataset = (train, valid, test) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform data") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - return tox21_tasks, all_dataset, transformers +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +TOX21_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/tox21.csv.gz" +TOX21_TASKS = [ + 'NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', + 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53' +] + + +class _Tox21Loader(_MolnetLoader): + + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "tox21.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=TOX21_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_tox21( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load Tox21 dataset + + The "Toxicology in the 21st Century" (Tox21) initiative created a public + database measuring toxicity of compounds, which has been used in the 2014 + Tox21 Data Challenge. This dataset contains qualitative toxicity measurements + for 8k compounds on 12 different targets, including nuclear receptors and + stress response pathways. + + Random splitting is recommended for this dataset. + + The raw data csv file contains columns below: + + - "smiles" - SMILES representation of the molecular structure + - "NR-XXX" - Nuclear receptor signaling bioassays results + - "SR-XXX" - Stress response bioassays results + + please refer to https://tripod.nih.gov/tox21/challenge/data.jsp for details. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + + References + ---------- + .. [1] Tox21 Challenge. https://tripod.nih.gov/tox21/challenge/ + """ + loader = _Tox21Loader(featurizer, splitter, transformers, TOX21_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('tox21', reload) diff --git a/deepchem/molnet/load_function/toxcast_datasets.py b/deepchem/molnet/load_function/toxcast_datasets.py index fe4bbb2312163afd997856de1308490450263d59..6280c652699d7b6fbcef0984c1b4e618a8e1234b 100644 --- a/deepchem/molnet/load_function/toxcast_datasets.py +++ b/deepchem/molnet/load_function/toxcast_datasets.py @@ -2,22 +2,259 @@ TOXCAST dataset loader. """ import os -import logging -import deepchem +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union -logger = logging.getLogger(__name__) +TOXCAST_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/toxcast_data.csv.gz" +TOXCAST_TASKS = [ + 'ACEA_T47D_80hr_Negative', 'ACEA_T47D_80hr_Positive', + 'APR_HepG2_CellCycleArrest_24h_dn', 'APR_HepG2_CellCycleArrest_24h_up', + 'APR_HepG2_CellCycleArrest_72h_dn', 'APR_HepG2_CellLoss_24h_dn', + 'APR_HepG2_CellLoss_72h_dn', 'APR_HepG2_MicrotubuleCSK_24h_dn', + 'APR_HepG2_MicrotubuleCSK_24h_up', 'APR_HepG2_MicrotubuleCSK_72h_dn', + 'APR_HepG2_MicrotubuleCSK_72h_up', 'APR_HepG2_MitoMass_24h_dn', + 'APR_HepG2_MitoMass_24h_up', 'APR_HepG2_MitoMass_72h_dn', + 'APR_HepG2_MitoMass_72h_up', 'APR_HepG2_MitoMembPot_1h_dn', + 'APR_HepG2_MitoMembPot_24h_dn', 'APR_HepG2_MitoMembPot_72h_dn', + 'APR_HepG2_MitoticArrest_24h_up', 'APR_HepG2_MitoticArrest_72h_up', + 'APR_HepG2_NuclearSize_24h_dn', 'APR_HepG2_NuclearSize_72h_dn', + 'APR_HepG2_NuclearSize_72h_up', 'APR_HepG2_OxidativeStress_24h_up', + 'APR_HepG2_OxidativeStress_72h_up', 'APR_HepG2_StressKinase_1h_up', + 'APR_HepG2_StressKinase_24h_up', 'APR_HepG2_StressKinase_72h_up', + 'APR_HepG2_p53Act_24h_up', 'APR_HepG2_p53Act_72h_up', + 'APR_Hepat_Apoptosis_24hr_up', 'APR_Hepat_Apoptosis_48hr_up', + 'APR_Hepat_CellLoss_24hr_dn', 'APR_Hepat_CellLoss_48hr_dn', + 'APR_Hepat_DNADamage_24hr_up', 'APR_Hepat_DNADamage_48hr_up', + 'APR_Hepat_DNATexture_24hr_up', 'APR_Hepat_DNATexture_48hr_up', + 'APR_Hepat_MitoFxnI_1hr_dn', 'APR_Hepat_MitoFxnI_24hr_dn', + 'APR_Hepat_MitoFxnI_48hr_dn', 'APR_Hepat_NuclearSize_24hr_dn', + 'APR_Hepat_NuclearSize_48hr_dn', 'APR_Hepat_Steatosis_24hr_up', + 'APR_Hepat_Steatosis_48hr_up', 'ATG_AP_1_CIS_dn', 'ATG_AP_1_CIS_up', + 'ATG_AP_2_CIS_dn', 'ATG_AP_2_CIS_up', 'ATG_AR_TRANS_dn', 'ATG_AR_TRANS_up', + 'ATG_Ahr_CIS_dn', 'ATG_Ahr_CIS_up', 'ATG_BRE_CIS_dn', 'ATG_BRE_CIS_up', + 'ATG_CAR_TRANS_dn', 'ATG_CAR_TRANS_up', 'ATG_CMV_CIS_dn', 'ATG_CMV_CIS_up', + 'ATG_CRE_CIS_dn', 'ATG_CRE_CIS_up', 'ATG_C_EBP_CIS_dn', 'ATG_C_EBP_CIS_up', + 'ATG_DR4_LXR_CIS_dn', 'ATG_DR4_LXR_CIS_up', 'ATG_DR5_CIS_dn', + 'ATG_DR5_CIS_up', 'ATG_E2F_CIS_dn', 'ATG_E2F_CIS_up', 'ATG_EGR_CIS_up', + 'ATG_ERE_CIS_dn', 'ATG_ERE_CIS_up', 'ATG_ERRa_TRANS_dn', + 'ATG_ERRg_TRANS_dn', 'ATG_ERRg_TRANS_up', 'ATG_ERa_TRANS_up', + 'ATG_E_Box_CIS_dn', 'ATG_E_Box_CIS_up', 'ATG_Ets_CIS_dn', 'ATG_Ets_CIS_up', + 'ATG_FXR_TRANS_up', 'ATG_FoxA2_CIS_dn', 'ATG_FoxA2_CIS_up', + 'ATG_FoxO_CIS_dn', 'ATG_FoxO_CIS_up', 'ATG_GAL4_TRANS_dn', + 'ATG_GATA_CIS_dn', 'ATG_GATA_CIS_up', 'ATG_GLI_CIS_dn', 'ATG_GLI_CIS_up', + 'ATG_GRE_CIS_dn', 'ATG_GRE_CIS_up', 'ATG_GR_TRANS_dn', 'ATG_GR_TRANS_up', + 'ATG_HIF1a_CIS_dn', 'ATG_HIF1a_CIS_up', 'ATG_HNF4a_TRANS_dn', + 'ATG_HNF4a_TRANS_up', 'ATG_HNF6_CIS_dn', 'ATG_HNF6_CIS_up', + 'ATG_HSE_CIS_dn', 'ATG_HSE_CIS_up', 'ATG_IR1_CIS_dn', 'ATG_IR1_CIS_up', + 'ATG_ISRE_CIS_dn', 'ATG_ISRE_CIS_up', 'ATG_LXRa_TRANS_dn', + 'ATG_LXRa_TRANS_up', 'ATG_LXRb_TRANS_dn', 'ATG_LXRb_TRANS_up', + 'ATG_MRE_CIS_up', 'ATG_M_06_TRANS_up', 'ATG_M_19_CIS_dn', + 'ATG_M_19_TRANS_dn', 'ATG_M_19_TRANS_up', 'ATG_M_32_CIS_dn', + 'ATG_M_32_CIS_up', 'ATG_M_32_TRANS_dn', 'ATG_M_32_TRANS_up', + 'ATG_M_61_TRANS_up', 'ATG_Myb_CIS_dn', 'ATG_Myb_CIS_up', 'ATG_Myc_CIS_dn', + 'ATG_Myc_CIS_up', 'ATG_NFI_CIS_dn', 'ATG_NFI_CIS_up', 'ATG_NF_kB_CIS_dn', + 'ATG_NF_kB_CIS_up', 'ATG_NRF1_CIS_dn', 'ATG_NRF1_CIS_up', + 'ATG_NRF2_ARE_CIS_dn', 'ATG_NRF2_ARE_CIS_up', 'ATG_NURR1_TRANS_dn', + 'ATG_NURR1_TRANS_up', 'ATG_Oct_MLP_CIS_dn', 'ATG_Oct_MLP_CIS_up', + 'ATG_PBREM_CIS_dn', 'ATG_PBREM_CIS_up', 'ATG_PPARa_TRANS_dn', + 'ATG_PPARa_TRANS_up', 'ATG_PPARd_TRANS_up', 'ATG_PPARg_TRANS_up', + 'ATG_PPRE_CIS_dn', 'ATG_PPRE_CIS_up', 'ATG_PXRE_CIS_dn', 'ATG_PXRE_CIS_up', + 'ATG_PXR_TRANS_dn', 'ATG_PXR_TRANS_up', 'ATG_Pax6_CIS_up', + 'ATG_RARa_TRANS_dn', 'ATG_RARa_TRANS_up', 'ATG_RARb_TRANS_dn', + 'ATG_RARb_TRANS_up', 'ATG_RARg_TRANS_dn', 'ATG_RARg_TRANS_up', + 'ATG_RORE_CIS_dn', 'ATG_RORE_CIS_up', 'ATG_RORb_TRANS_dn', + 'ATG_RORg_TRANS_dn', 'ATG_RORg_TRANS_up', 'ATG_RXRa_TRANS_dn', + 'ATG_RXRa_TRANS_up', 'ATG_RXRb_TRANS_dn', 'ATG_RXRb_TRANS_up', + 'ATG_SREBP_CIS_dn', 'ATG_SREBP_CIS_up', 'ATG_STAT3_CIS_dn', + 'ATG_STAT3_CIS_up', 'ATG_Sox_CIS_dn', 'ATG_Sox_CIS_up', 'ATG_Sp1_CIS_dn', + 'ATG_Sp1_CIS_up', 'ATG_TAL_CIS_dn', 'ATG_TAL_CIS_up', 'ATG_TA_CIS_dn', + 'ATG_TA_CIS_up', 'ATG_TCF_b_cat_CIS_dn', 'ATG_TCF_b_cat_CIS_up', + 'ATG_TGFb_CIS_dn', 'ATG_TGFb_CIS_up', 'ATG_THRa1_TRANS_dn', + 'ATG_THRa1_TRANS_up', 'ATG_VDRE_CIS_dn', 'ATG_VDRE_CIS_up', + 'ATG_VDR_TRANS_dn', 'ATG_VDR_TRANS_up', 'ATG_XTT_Cytotoxicity_up', + 'ATG_Xbp1_CIS_dn', 'ATG_Xbp1_CIS_up', 'ATG_p53_CIS_dn', 'ATG_p53_CIS_up', + 'BSK_3C_Eselectin_down', 'BSK_3C_HLADR_down', 'BSK_3C_ICAM1_down', + 'BSK_3C_IL8_down', 'BSK_3C_MCP1_down', 'BSK_3C_MIG_down', + 'BSK_3C_Proliferation_down', 'BSK_3C_SRB_down', + 'BSK_3C_Thrombomodulin_down', 'BSK_3C_Thrombomodulin_up', + 'BSK_3C_TissueFactor_down', 'BSK_3C_TissueFactor_up', 'BSK_3C_VCAM1_down', + 'BSK_3C_Vis_down', 'BSK_3C_uPAR_down', 'BSK_4H_Eotaxin3_down', + 'BSK_4H_MCP1_down', 'BSK_4H_Pselectin_down', 'BSK_4H_Pselectin_up', + 'BSK_4H_SRB_down', 'BSK_4H_VCAM1_down', 'BSK_4H_VEGFRII_down', + 'BSK_4H_uPAR_down', 'BSK_4H_uPAR_up', 'BSK_BE3C_HLADR_down', + 'BSK_BE3C_IL1a_down', 'BSK_BE3C_IP10_down', 'BSK_BE3C_MIG_down', + 'BSK_BE3C_MMP1_down', 'BSK_BE3C_MMP1_up', 'BSK_BE3C_PAI1_down', + 'BSK_BE3C_SRB_down', 'BSK_BE3C_TGFb1_down', 'BSK_BE3C_tPA_down', + 'BSK_BE3C_uPAR_down', 'BSK_BE3C_uPAR_up', 'BSK_BE3C_uPA_down', + 'BSK_CASM3C_HLADR_down', 'BSK_CASM3C_IL6_down', 'BSK_CASM3C_IL6_up', + 'BSK_CASM3C_IL8_down', 'BSK_CASM3C_LDLR_down', 'BSK_CASM3C_LDLR_up', + 'BSK_CASM3C_MCP1_down', 'BSK_CASM3C_MCP1_up', 'BSK_CASM3C_MCSF_down', + 'BSK_CASM3C_MCSF_up', 'BSK_CASM3C_MIG_down', + 'BSK_CASM3C_Proliferation_down', 'BSK_CASM3C_Proliferation_up', + 'BSK_CASM3C_SAA_down', 'BSK_CASM3C_SAA_up', 'BSK_CASM3C_SRB_down', + 'BSK_CASM3C_Thrombomodulin_down', 'BSK_CASM3C_Thrombomodulin_up', + 'BSK_CASM3C_TissueFactor_down', 'BSK_CASM3C_VCAM1_down', + 'BSK_CASM3C_VCAM1_up', 'BSK_CASM3C_uPAR_down', 'BSK_CASM3C_uPAR_up', + 'BSK_KF3CT_ICAM1_down', 'BSK_KF3CT_IL1a_down', 'BSK_KF3CT_IP10_down', + 'BSK_KF3CT_IP10_up', 'BSK_KF3CT_MCP1_down', 'BSK_KF3CT_MCP1_up', + 'BSK_KF3CT_MMP9_down', 'BSK_KF3CT_SRB_down', 'BSK_KF3CT_TGFb1_down', + 'BSK_KF3CT_TIMP2_down', 'BSK_KF3CT_uPA_down', 'BSK_LPS_CD40_down', + 'BSK_LPS_Eselectin_down', 'BSK_LPS_Eselectin_up', 'BSK_LPS_IL1a_down', + 'BSK_LPS_IL1a_up', 'BSK_LPS_IL8_down', 'BSK_LPS_IL8_up', + 'BSK_LPS_MCP1_down', 'BSK_LPS_MCSF_down', 'BSK_LPS_PGE2_down', + 'BSK_LPS_PGE2_up', 'BSK_LPS_SRB_down', 'BSK_LPS_TNFa_down', + 'BSK_LPS_TNFa_up', 'BSK_LPS_TissueFactor_down', 'BSK_LPS_TissueFactor_up', + 'BSK_LPS_VCAM1_down', 'BSK_SAg_CD38_down', 'BSK_SAg_CD40_down', + 'BSK_SAg_CD69_down', 'BSK_SAg_Eselectin_down', 'BSK_SAg_Eselectin_up', + 'BSK_SAg_IL8_down', 'BSK_SAg_IL8_up', 'BSK_SAg_MCP1_down', + 'BSK_SAg_MIG_down', 'BSK_SAg_PBMCCytotoxicity_down', + 'BSK_SAg_PBMCCytotoxicity_up', 'BSK_SAg_Proliferation_down', + 'BSK_SAg_SRB_down', 'BSK_hDFCGF_CollagenIII_down', 'BSK_hDFCGF_EGFR_down', + 'BSK_hDFCGF_EGFR_up', 'BSK_hDFCGF_IL8_down', 'BSK_hDFCGF_IP10_down', + 'BSK_hDFCGF_MCSF_down', 'BSK_hDFCGF_MIG_down', 'BSK_hDFCGF_MMP1_down', + 'BSK_hDFCGF_MMP1_up', 'BSK_hDFCGF_PAI1_down', + 'BSK_hDFCGF_Proliferation_down', 'BSK_hDFCGF_SRB_down', + 'BSK_hDFCGF_TIMP1_down', 'BSK_hDFCGF_VCAM1_down', 'CEETOX_H295R_11DCORT_dn', + 'CEETOX_H295R_ANDR_dn', 'CEETOX_H295R_CORTISOL_dn', 'CEETOX_H295R_DOC_dn', + 'CEETOX_H295R_DOC_up', 'CEETOX_H295R_ESTRADIOL_dn', + 'CEETOX_H295R_ESTRADIOL_up', 'CEETOX_H295R_ESTRONE_dn', + 'CEETOX_H295R_ESTRONE_up', 'CEETOX_H295R_OHPREG_up', + 'CEETOX_H295R_OHPROG_dn', 'CEETOX_H295R_OHPROG_up', 'CEETOX_H295R_PROG_up', + 'CEETOX_H295R_TESTO_dn', 'CLD_ABCB1_48hr', 'CLD_ABCG2_48hr', + 'CLD_CYP1A1_24hr', 'CLD_CYP1A1_48hr', 'CLD_CYP1A1_6hr', 'CLD_CYP1A2_24hr', + 'CLD_CYP1A2_48hr', 'CLD_CYP1A2_6hr', 'CLD_CYP2B6_24hr', 'CLD_CYP2B6_48hr', + 'CLD_CYP2B6_6hr', 'CLD_CYP3A4_24hr', 'CLD_CYP3A4_48hr', 'CLD_CYP3A4_6hr', + 'CLD_GSTA2_48hr', 'CLD_SULT2A_24hr', 'CLD_SULT2A_48hr', 'CLD_UGT1A1_24hr', + 'CLD_UGT1A1_48hr', 'NCCT_HEK293T_CellTiterGLO', 'NCCT_QuantiLum_inhib_2_dn', + 'NCCT_QuantiLum_inhib_dn', 'NCCT_TPO_AUR_dn', 'NCCT_TPO_GUA_dn', + 'NHEERL_ZF_144hpf_TERATOSCORE_up', 'NVS_ADME_hCYP19A1', 'NVS_ADME_hCYP1A1', + 'NVS_ADME_hCYP1A2', 'NVS_ADME_hCYP2A6', 'NVS_ADME_hCYP2B6', + 'NVS_ADME_hCYP2C19', 'NVS_ADME_hCYP2C9', 'NVS_ADME_hCYP2D6', + 'NVS_ADME_hCYP3A4', 'NVS_ADME_hCYP4F12', 'NVS_ADME_rCYP2C12', + 'NVS_ENZ_hAChE', 'NVS_ENZ_hAMPKa1', 'NVS_ENZ_hAurA', 'NVS_ENZ_hBACE', + 'NVS_ENZ_hCASP5', 'NVS_ENZ_hCK1D', 'NVS_ENZ_hDUSP3', 'NVS_ENZ_hES', + 'NVS_ENZ_hElastase', 'NVS_ENZ_hFGFR1', 'NVS_ENZ_hGSK3b', 'NVS_ENZ_hMMP1', + 'NVS_ENZ_hMMP13', 'NVS_ENZ_hMMP2', 'NVS_ENZ_hMMP3', 'NVS_ENZ_hMMP7', + 'NVS_ENZ_hMMP9', 'NVS_ENZ_hPDE10', 'NVS_ENZ_hPDE4A1', 'NVS_ENZ_hPDE5', + 'NVS_ENZ_hPI3Ka', 'NVS_ENZ_hPTEN', 'NVS_ENZ_hPTPN11', 'NVS_ENZ_hPTPN12', + 'NVS_ENZ_hPTPN13', 'NVS_ENZ_hPTPN9', 'NVS_ENZ_hPTPRC', 'NVS_ENZ_hSIRT1', + 'NVS_ENZ_hSIRT2', 'NVS_ENZ_hTrkA', 'NVS_ENZ_hVEGFR2', 'NVS_ENZ_oCOX1', + 'NVS_ENZ_oCOX2', 'NVS_ENZ_rAChE', 'NVS_ENZ_rCNOS', 'NVS_ENZ_rMAOAC', + 'NVS_ENZ_rMAOAP', 'NVS_ENZ_rMAOBC', 'NVS_ENZ_rMAOBP', 'NVS_ENZ_rabI2C', + 'NVS_GPCR_bAdoR_NonSelective', 'NVS_GPCR_bDR_NonSelective', + 'NVS_GPCR_g5HT4', 'NVS_GPCR_gH2', 'NVS_GPCR_gLTB4', 'NVS_GPCR_gLTD4', + 'NVS_GPCR_gMPeripheral_NonSelective', 'NVS_GPCR_gOpiateK', + 'NVS_GPCR_h5HT2A', 'NVS_GPCR_h5HT5A', 'NVS_GPCR_h5HT6', 'NVS_GPCR_h5HT7', + 'NVS_GPCR_hAT1', 'NVS_GPCR_hAdoRA1', 'NVS_GPCR_hAdoRA2a', + 'NVS_GPCR_hAdra2A', 'NVS_GPCR_hAdra2C', 'NVS_GPCR_hAdrb1', + 'NVS_GPCR_hAdrb2', 'NVS_GPCR_hAdrb3', 'NVS_GPCR_hDRD1', 'NVS_GPCR_hDRD2s', + 'NVS_GPCR_hDRD4.4', 'NVS_GPCR_hH1', 'NVS_GPCR_hLTB4_BLT1', 'NVS_GPCR_hM1', + 'NVS_GPCR_hM2', 'NVS_GPCR_hM3', 'NVS_GPCR_hM4', 'NVS_GPCR_hNK2', + 'NVS_GPCR_hOpiate_D1', 'NVS_GPCR_hOpiate_mu', 'NVS_GPCR_hTXA2', + 'NVS_GPCR_p5HT2C', 'NVS_GPCR_r5HT1_NonSelective', + 'NVS_GPCR_r5HT_NonSelective', 'NVS_GPCR_rAdra1B', + 'NVS_GPCR_rAdra1_NonSelective', 'NVS_GPCR_rAdra2_NonSelective', + 'NVS_GPCR_rAdrb_NonSelective', 'NVS_GPCR_rNK1', 'NVS_GPCR_rNK3', + 'NVS_GPCR_rOpiate_NonSelective', 'NVS_GPCR_rOpiate_NonSelectiveNa', + 'NVS_GPCR_rSST', 'NVS_GPCR_rTRH', 'NVS_GPCR_rV1', 'NVS_GPCR_rabPAF', + 'NVS_GPCR_rmAdra2B', 'NVS_IC_hKhERGCh', 'NVS_IC_rCaBTZCHL', + 'NVS_IC_rCaDHPRCh_L', 'NVS_IC_rNaCh_site2', 'NVS_LGIC_bGABARa1', + 'NVS_LGIC_h5HT3', 'NVS_LGIC_hNNR_NBungSens', 'NVS_LGIC_rGABAR_NonSelective', + 'NVS_LGIC_rNNR_BungSens', 'NVS_MP_hPBR', 'NVS_MP_rPBR', 'NVS_NR_bER', + 'NVS_NR_bPR', 'NVS_NR_cAR', 'NVS_NR_hAR', 'NVS_NR_hCAR_Antagonist', + 'NVS_NR_hER', 'NVS_NR_hFXR_Agonist', 'NVS_NR_hFXR_Antagonist', 'NVS_NR_hGR', + 'NVS_NR_hPPARa', 'NVS_NR_hPPARg', 'NVS_NR_hPR', 'NVS_NR_hPXR', + 'NVS_NR_hRAR_Antagonist', 'NVS_NR_hRARa_Agonist', 'NVS_NR_hTRa_Antagonist', + 'NVS_NR_mERa', 'NVS_NR_rAR', 'NVS_NR_rMR', 'NVS_OR_gSIGMA_NonSelective', + 'NVS_TR_gDAT', 'NVS_TR_hAdoT', 'NVS_TR_hDAT', 'NVS_TR_hNET', 'NVS_TR_hSERT', + 'NVS_TR_rNET', 'NVS_TR_rSERT', 'NVS_TR_rVMAT2', 'OT_AR_ARELUC_AG_1440', + 'OT_AR_ARSRC1_0480', 'OT_AR_ARSRC1_0960', 'OT_ER_ERaERa_0480', + 'OT_ER_ERaERa_1440', 'OT_ER_ERaERb_0480', 'OT_ER_ERaERb_1440', + 'OT_ER_ERbERb_0480', 'OT_ER_ERbERb_1440', 'OT_ERa_EREGFP_0120', + 'OT_ERa_EREGFP_0480', 'OT_FXR_FXRSRC1_0480', 'OT_FXR_FXRSRC1_1440', + 'OT_NURR1_NURR1RXRa_0480', 'OT_NURR1_NURR1RXRa_1440', + 'TOX21_ARE_BLA_Agonist_ch1', 'TOX21_ARE_BLA_Agonist_ch2', + 'TOX21_ARE_BLA_agonist_ratio', 'TOX21_ARE_BLA_agonist_viability', + 'TOX21_AR_BLA_Agonist_ch1', 'TOX21_AR_BLA_Agonist_ch2', + 'TOX21_AR_BLA_Agonist_ratio', 'TOX21_AR_BLA_Antagonist_ch1', + 'TOX21_AR_BLA_Antagonist_ch2', 'TOX21_AR_BLA_Antagonist_ratio', + 'TOX21_AR_BLA_Antagonist_viability', 'TOX21_AR_LUC_MDAKB2_Agonist', + 'TOX21_AR_LUC_MDAKB2_Antagonist', 'TOX21_AR_LUC_MDAKB2_Antagonist2', + 'TOX21_AhR_LUC_Agonist', 'TOX21_Aromatase_Inhibition', + 'TOX21_AutoFluor_HEK293_Cell_blue', 'TOX21_AutoFluor_HEK293_Media_blue', + 'TOX21_AutoFluor_HEPG2_Cell_blue', 'TOX21_AutoFluor_HEPG2_Cell_green', + 'TOX21_AutoFluor_HEPG2_Media_blue', 'TOX21_AutoFluor_HEPG2_Media_green', + 'TOX21_ELG1_LUC_Agonist', 'TOX21_ERa_BLA_Agonist_ch1', + 'TOX21_ERa_BLA_Agonist_ch2', 'TOX21_ERa_BLA_Agonist_ratio', + 'TOX21_ERa_BLA_Antagonist_ch1', 'TOX21_ERa_BLA_Antagonist_ch2', + 'TOX21_ERa_BLA_Antagonist_ratio', 'TOX21_ERa_BLA_Antagonist_viability', + 'TOX21_ERa_LUC_BG1_Agonist', 'TOX21_ERa_LUC_BG1_Antagonist', + 'TOX21_ESRE_BLA_ch1', 'TOX21_ESRE_BLA_ch2', 'TOX21_ESRE_BLA_ratio', + 'TOX21_ESRE_BLA_viability', 'TOX21_FXR_BLA_Antagonist_ch1', + 'TOX21_FXR_BLA_Antagonist_ch2', 'TOX21_FXR_BLA_agonist_ch2', + 'TOX21_FXR_BLA_agonist_ratio', 'TOX21_FXR_BLA_antagonist_ratio', + 'TOX21_FXR_BLA_antagonist_viability', 'TOX21_GR_BLA_Agonist_ch1', + 'TOX21_GR_BLA_Agonist_ch2', 'TOX21_GR_BLA_Agonist_ratio', + 'TOX21_GR_BLA_Antagonist_ch2', 'TOX21_GR_BLA_Antagonist_ratio', + 'TOX21_GR_BLA_Antagonist_viability', 'TOX21_HSE_BLA_agonist_ch1', + 'TOX21_HSE_BLA_agonist_ch2', 'TOX21_HSE_BLA_agonist_ratio', + 'TOX21_HSE_BLA_agonist_viability', 'TOX21_MMP_ratio_down', + 'TOX21_MMP_ratio_up', 'TOX21_MMP_viability', 'TOX21_NFkB_BLA_agonist_ch1', + 'TOX21_NFkB_BLA_agonist_ch2', 'TOX21_NFkB_BLA_agonist_ratio', + 'TOX21_NFkB_BLA_agonist_viability', 'TOX21_PPARd_BLA_Agonist_viability', + 'TOX21_PPARd_BLA_Antagonist_ch1', 'TOX21_PPARd_BLA_agonist_ch1', + 'TOX21_PPARd_BLA_agonist_ch2', 'TOX21_PPARd_BLA_agonist_ratio', + 'TOX21_PPARd_BLA_antagonist_ratio', 'TOX21_PPARd_BLA_antagonist_viability', + 'TOX21_PPARg_BLA_Agonist_ch1', 'TOX21_PPARg_BLA_Agonist_ch2', + 'TOX21_PPARg_BLA_Agonist_ratio', 'TOX21_PPARg_BLA_Antagonist_ch1', + 'TOX21_PPARg_BLA_antagonist_ratio', 'TOX21_PPARg_BLA_antagonist_viability', + 'TOX21_TR_LUC_GH3_Agonist', 'TOX21_TR_LUC_GH3_Antagonist', + 'TOX21_VDR_BLA_Agonist_viability', 'TOX21_VDR_BLA_Antagonist_ch1', + 'TOX21_VDR_BLA_agonist_ch2', 'TOX21_VDR_BLA_agonist_ratio', + 'TOX21_VDR_BLA_antagonist_ratio', 'TOX21_VDR_BLA_antagonist_viability', + 'TOX21_p53_BLA_p1_ch1', 'TOX21_p53_BLA_p1_ch2', 'TOX21_p53_BLA_p1_ratio', + 'TOX21_p53_BLA_p1_viability', 'TOX21_p53_BLA_p2_ch1', 'TOX21_p53_BLA_p2_ch2', + 'TOX21_p53_BLA_p2_ratio', 'TOX21_p53_BLA_p2_viability', + 'TOX21_p53_BLA_p3_ch1', 'TOX21_p53_BLA_p3_ch2', 'TOX21_p53_BLA_p3_ratio', + 'TOX21_p53_BLA_p3_viability', 'TOX21_p53_BLA_p4_ch1', 'TOX21_p53_BLA_p4_ch2', + 'TOX21_p53_BLA_p4_ratio', 'TOX21_p53_BLA_p4_viability', + 'TOX21_p53_BLA_p5_ch1', 'TOX21_p53_BLA_p5_ch2', 'TOX21_p53_BLA_p5_ratio', + 'TOX21_p53_BLA_p5_viability', 'Tanguay_ZF_120hpf_AXIS_up', + 'Tanguay_ZF_120hpf_ActivityScore', 'Tanguay_ZF_120hpf_BRAI_up', + 'Tanguay_ZF_120hpf_CFIN_up', 'Tanguay_ZF_120hpf_CIRC_up', + 'Tanguay_ZF_120hpf_EYE_up', 'Tanguay_ZF_120hpf_JAW_up', + 'Tanguay_ZF_120hpf_MORT_up', 'Tanguay_ZF_120hpf_OTIC_up', + 'Tanguay_ZF_120hpf_PE_up', 'Tanguay_ZF_120hpf_PFIN_up', + 'Tanguay_ZF_120hpf_PIG_up', 'Tanguay_ZF_120hpf_SNOU_up', + 'Tanguay_ZF_120hpf_SOMI_up', 'Tanguay_ZF_120hpf_SWIM_up', + 'Tanguay_ZF_120hpf_TRUN_up', 'Tanguay_ZF_120hpf_TR_up', + 'Tanguay_ZF_120hpf_YSE_up' +] -DEFAULT_DIR = deepchem.utils.get_data_dir() -TOXCAST_URL = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/toxcast_data.csv.gz' +class _ToxcastLoader(_MolnetLoader): -def load_toxcast(featurizer='ECFP', - split='index', - reload=True, - data_dir=None, - save_dir=None, - **kwargs): - """Loads the Toxcast datasets. + def create_dataset(self) -> Dataset: + dataset_file = os.path.join(self.data_dir, "toxcast_data.csv.gz") + if not os.path.exists(dataset_file): + dc.utils.data_utils.download_url(url=TOXCAST_URL, dest_dir=self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, feature_field="smiles", featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_toxcast( + featurizer: Union[dc.feat.Featurizer, str] = 'ECFP', + splitter: Union[dc.splits.Splitter, str, None] = 'scaffold', + transformers: List[Union[TransformerGenerator, str]] = ['balancing'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load Toxcast dataset ToxCast is an extended data collection from the same initiative as Tox21, providing toxicology data for a large @@ -25,102 +262,44 @@ def load_toxcast(featurizer='ECFP', screening. The processed collection includes qualitative results of over 600 experiments on 8k compounds. + Random splitting is recommended for this dataset. - The source data file contains a csv table, in which columns - below are used: + The raw data csv file contains columns below: - "smiles": SMILES representation of the molecular structure - - "ACEA_T47D_80hr_Negative" ~ "Tanguay_ZF_120hpf_YSE_up": Bioassays results. Please refer to the section "high-throughput assay information" at https://www.epa.gov/chemical-research/toxicity-forecaster-toxcasttm-data for details. - - Richard, Ann M., et al. "ToxCast chemical landscape: paving the road to 21st century toxicology." Chemical research in toxicology 29.8 (2016): 1225-1251. - """ - if data_dir is None: - data_dir = DEFAULT_DIR - if save_dir is None: - save_dir = DEFAULT_DIR - - if reload: - save_folder = os.path.join(save_dir, "toxcast-featurized", str(featurizer)) - if featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - save_folder = os.path.join(save_folder, img_spec) - save_folder = os.path.join(save_folder, str(split)) - - dataset_file = os.path.join(data_dir, "toxcast_data.csv.gz") - if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=TOXCAST_URL, dest_dir=data_dir) - - dataset = deepchem.utils.save.load_from_disk(dataset_file) - logger.info("Columns of dataset: %s" % str(dataset.columns.values)) - logger.info("Number of examples in dataset: %s" % str(dataset.shape[0])) - TOXCAST_tasks = dataset.columns.values[1:].tolist() - - if reload: - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( - save_folder) - if loaded: - return TOXCAST_tasks, all_dataset, transformers - - # Featurize TOXCAST dataset - logger.info("About to featurize TOXCAST dataset.") + - "ACEA_T47D_80hr_Negative" ~ "Tanguay_ZF_120hpf_YSE_up": Bioassays results. + Please refer to the section "high-throughput assay information" at + https://www.epa.gov/chemical-research/toxicity-forecaster-toxcasttm-data + for details. - if featurizer == 'ECFP': - featurizer = deepchem.feat.CircularFingerprint(size=1024) - elif featurizer == 'GraphConv': - featurizer = deepchem.feat.ConvMolFeaturizer() - elif featurizer == 'Weave': - featurizer = deepchem.feat.WeaveFeaturizer() - elif featurizer == 'Raw': - featurizer = deepchem.feat.RawFeaturizer() - elif featurizer == "smiles2img": - img_spec = kwargs.get("img_spec", "std") - img_size = kwargs.get("img_size", 80) - featurizer = deepchem.feat.SmilesToImage( - img_size=img_size, img_spec=img_spec) + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in - loader = deepchem.data.CSVLoader( - tasks=TOXCAST_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(dataset_file) - - if split == None: - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] - logger.info("Split is None, about to transform data.") - for transformer in transformers: - dataset = transformer.transform(dataset) - return TOXCAST_tasks, (dataset, None, None), transformers - - splitters = { - 'index': deepchem.splits.IndexSplitter(), - 'random': deepchem.splits.RandomSplitter(), - 'scaffold': deepchem.splits.ScaffoldSplitter(), - 'stratified': deepchem.splits.RandomStratifiedSplitter() - } - splitter = splitters[split] - logger.info("About to split dataset with {} splitter.".format(split)) - frac_train = kwargs.get("frac_train", 0.8) - frac_valid = kwargs.get('frac_valid', 0.1) - frac_test = kwargs.get('frac_test', 0.1) - - train, valid, test = splitter.train_valid_test_split( - dataset, - frac_train=frac_train, - frac_valid=frac_valid, - frac_test=frac_test) - - transformers = [ - deepchem.trans.BalancingTransformer(transform_w=True, dataset=train) - ] - - logger.info("About to transform dataset.") - for transformer in transformers: - train = transformer.transform(train) - valid = transformer.transform(valid) - test = transformer.transform(test) - - if reload: - deepchem.utils.save.save_dataset_to_disk(save_folder, train, valid, test, - transformers) - - return TOXCAST_tasks, (train, valid, test), transformers + References + ---------- + .. [1] Richard, Ann M., et al. "ToxCast chemical landscape: paving the road + to 21st century toxicology." Chemical research in toxicology 29.8 (2016): + 1225-1251. + """ + loader = _ToxcastLoader(featurizer, splitter, transformers, TOXCAST_TASKS, + data_dir, save_dir, **kwargs) + return loader.load_dataset('toxcast', reload) diff --git a/deepchem/molnet/load_function/uspto_datasets.py b/deepchem/molnet/load_function/uspto_datasets.py index a7cb78b22f6da5183bacd241329db9dddb9ef733..0f3ba2ab142d4e6a505c049be5b769951ea0ceea 100644 --- a/deepchem/molnet/load_function/uspto_datasets.py +++ b/deepchem/molnet/load_function/uspto_datasets.py @@ -12,7 +12,7 @@ from deepchem.data import DiskDataset logger = logging.getLogger(__name__) -DEFAULT_DIR = deepchem.utils.get_data_dir() +DEFAULT_DIR = deepchem.utils.data_utils.get_data_dir() USPTO_URL = "https://bitbucket.org/dan2097/patent-reaction-extraction/downloads/2008-2011_USPTO_reactionSmiles_filtered.zip" @@ -52,7 +52,7 @@ def load_uspto(featurizer="plain", save_folder = os.path.join(save_folder, img_spec) save_folder = os.path.join(save_folder, str(split)) - loaded, all_dataset, transformers = deepchem.utils.save.load_dataset_from_disk( + loaded, all_dataset, transformers = deepchem.utils.data_utils.load_dataset_from_disk( save_folder) if loaded: return uspto_tasks, all_dataset, transformers @@ -60,12 +60,12 @@ def load_uspto(featurizer="plain", dataset_file = os.path.join(data_dir, "2008-2011_USPTO_reactionSmiles_filtered.zip") if not os.path.exists(dataset_file): - deepchem.utils.download_url(url=USPTO_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=USPTO_URL, dest_dir=data_dir) # Unzip unzip_dir = os.path.join(data_dir, "2008-2011_USPTO_reactionSmiles_filtered") if not os.path.exists(unzip_dir): - deepchem.utils.unzip_file(dataset_file, dest_dir=unzip_dir) + deepchem.utils.data_utils.unzip_file(dataset_file, dest_dir=unzip_dir) # Unzipped file is a tap seperated values file (despite the .txt) filename = os.path.join(unzip_dir, "2008-2011_USPTO_reactionSmiles_filtered.txt") diff --git a/deepchem/molnet/load_function/uv_datasets.py b/deepchem/molnet/load_function/uv_datasets.py index 89e2e880be8cb92bc9b252b1a9a9310f0ddd3575..ab000a3bd56f4b177ac14106ff769419b0ab3cad 100644 --- a/deepchem/molnet/load_function/uv_datasets.py +++ b/deepchem/molnet/load_function/uv_datasets.py @@ -12,9 +12,9 @@ from deepchem.molnet.load_function.uv_tasks import UV_tasks logger = logging.getLogger(__name__) -TRAIN_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/UV_training_disguised_combined_full.csv.gz' -VALID_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/UV_test1_disguised_combined_full.csv.gz' -TEST_URL = 'https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/UV_test2_disguised_combined_full.csv.gz' +TRAIN_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/UV_training_disguised_combined_full.csv.gz" +VALID_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/UV_test1_disguised_combined_full.csv.gz" +TEST_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/UV_test2_disguised_combined_full.csv.gz" TRAIN_FILENAME = "UV_training_disguised_combined_full.csv.gz" VALID_FILENAME = "UV_test1_disguised_combined_full.csv.gz" @@ -61,15 +61,15 @@ def gen_uv(UV_tasks, data_dir, train_dir, valid_dir, test_dir, shard_size=2000): if not os.path.exists(train_files): logger.info("Downloading training file...") - deepchem.utils.download_url(url=TRAIN_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TRAIN_URL, dest_dir=data_dir) logger.info("Training file download complete.") logger.info("Downloading validation file...") - deepchem.utils.download_url(url=VALID_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=VALID_URL, dest_dir=data_dir) logger.info("Validation file download complete.") logger.info("Downloading test file...") - deepchem.utils.download_url(url=TEST_URL, dest_dir=data_dir) + deepchem.utils.data_utils.download_url(url=TEST_URL, dest_dir=data_dir) logger.info("Test file download complete") # Featurizing datasets @@ -140,8 +140,7 @@ def load_uv(shard_size=2000, featurizer=None, split=None, reload=True): """Load UV dataset; does not do train/test split The UV dataset is an in-house dataset from Merck that was first introduced in the following paper: - -Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. + Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." Journal of chemical information and modeling 57.8 (2017): 2068-2076. The UV dataset tests 10,000 of Merck's internal compounds on 190 absorption wavelengths between 210 and 400 nm. Unlike @@ -169,7 +168,7 @@ Ramsundar, Bharath, et al. "Is multitask deep learning practical for pharma?." J Whether to automatically re-load from disk """ - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() data_dir = os.path.join(data_dir, "UV") if not os.path.exists(data_dir): diff --git a/deepchem/molnet/load_function/zinc15_datasets.py b/deepchem/molnet/load_function/zinc15_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..986c02b5724ad46483a54abd4a6aad48c52a1056 --- /dev/null +++ b/deepchem/molnet/load_function/zinc15_datasets.py @@ -0,0 +1,148 @@ +""" +ZINC15 commercially-available compounds for virtual screening. +""" +import os +import deepchem as dc +from deepchem.molnet.load_function.molnet_loader import TransformerGenerator, _MolnetLoader +from deepchem.data import Dataset +from typing import List, Optional, Tuple, Union + +ZINC15_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/" +ZINC15_TASKS = ['mwt', 'logp', 'reactive'] + + +class _Zinc15Loader(_MolnetLoader): + + def __init__(self, *args, dataset_size: str, dataset_dimension: str, + **kwargs): + super(_Zinc15Loader, self).__init__(*args, **kwargs) + self.dataset_size = dataset_size + self.dataset_dimension = dataset_dimension + self.name = 'zinc15_' + dataset_size + '_' + dataset_dimension + + def create_dataset(self) -> Dataset: + if self.dataset_size not in ['250K', '1M', '10M', '270M']: + raise ValueError( + "Only '250K', '1M', '10M', and '270M' are supported for dataset_size." + ) + if self.dataset_dimension != '2D': + raise ValueError( + "Currently, only '2D' is supported for dataset_dimension.") + if self.dataset_size == '270M': + answer = '' + while answer not in ['y', 'n']: + answer = input("""You're about to download 270M SMILES strings. + This dataset is 23GB. Are you sure you want to continue? (Y/N)""" + ).lower() + if answer == 'n': + raise ValueError('Choose a smaller dataset_size.') + filename = self.name + '.csv' + dataset_file = os.path.join(self.data_dir, filename) + if not os.path.exists(dataset_file): + compressed_file = self.name + '.tar.gz' + if not os.path.exists(compressed_file): + dc.utils.download_url( + url=ZINC15_URL + compressed_file, dest_dir=self.data_dir) + dc.utils.untargz_file( + os.path.join(self.data_dir, compressed_file), self.data_dir) + loader = dc.data.CSVLoader( + tasks=self.tasks, + feature_field="smiles", + id_field="zinc_id", + featurizer=self.featurizer) + return loader.create_dataset(dataset_file, shard_size=8192) + + +def load_zinc15( + featurizer: Union[dc.feat.Featurizer, str] = 'OneHot', + splitter: Union[dc.splits.Splitter, str, None] = 'random', + transformers: List[Union[TransformerGenerator, str]] = ['normalization'], + reload: bool = True, + data_dir: Optional[str] = None, + save_dir: Optional[str] = None, + dataset_size: str = '250K', + dataset_dimension: str = '2D', + **kwargs +) -> Tuple[List[str], Tuple[Dataset, ...], List[dc.trans.Transformer]]: + """Load zinc15. + + ZINC15 is a dataset of over 230 million purchasable compounds for + virtual screening of small molecules to identify structures that + are likely to bind to drug targets. ZINC15 data is currently available + in 2D (SMILES string) format. + + MolNet provides subsets of 250K, 1M, and 10M "lead-like" compounds + from ZINC15. The full dataset of 270M "goldilocks" compounds is also + available. Compounds in ZINC15 are labeled by their molecular weight + and LogP (solubility) values. Each compound also has information about how + readily available (purchasable) it is and its reactivity. Lead-like + compounds have molecular weight between 300 and 350 Daltons and LogP + between -1 and 3.5. Goldilocks compounds are lead-like compounds with + LogP values further restricted to between 2 and 3. + + If `reload = True` and `data_dir` (`save_dir`) is specified, the loader + will attempt to load the raw dataset (featurized dataset) from disk. + Otherwise, the dataset will be downloaded from the DeepChem AWS bucket. + + For more information on ZINC15, please see [1]_ and + https://zinc15.docking.org/. + + Parameters + ---------- + featurizer: Featurizer or str + the featurizer to use for processing the data. Alternatively you can pass + one of the names from dc.molnet.featurizers as a shortcut. + splitter: Splitter or str + the splitter to use for splitting the data into training, validation, and + test sets. Alternatively you can pass one of the names from + dc.molnet.splitters as a shortcut. If this is None, all the data + will be included in a single dataset. + transformers: list of TransformerGenerators or strings + the Transformers to apply to the data. Each one is specified by a + TransformerGenerator or, as a shortcut, one of the names from + dc.molnet.transformers. + reload: bool + if True, the first call for a particular featurizer and splitter will cache + the datasets to disk, and subsequent calls will reload the cached datasets. + data_dir: str + a directory to save the raw data in + save_dir: str + a directory to save the dataset in + size : str (default '250K') + Size of dataset to download. '250K', '1M', '10M', and '270M' are supported. + format : str (default '2D') + Format of data to download. 2D SMILES strings or 3D SDF files. + + Returns + ------- + tasks, datasets, transformers : tuple + tasks : list + Column names corresponding to machine learning target variables. + datasets : tuple + train, validation, test splits of data as + ``deepchem.data.datasets.Dataset`` instances. + transformers : list + ``deepchem.trans.transformers.Transformer`` instances applied + to dataset. + + Notes + ----- + The total ZINC dataset with SMILES strings contains hundreds of millions + of compounds and is over 100GB! ZINC250K is recommended for experimentation. + The full set of 270M goldilocks compounds is 23GB. + + References + ---------- + .. [1] Sterling and Irwin. J. Chem. Inf. Model, 2015 http://pubs.acs.org/doi/abs/10.1021/acs.jcim.5b00559. + """ + loader = _Zinc15Loader( + featurizer, + splitter, + transformers, + ZINC15_TASKS, + data_dir, + save_dir, + dataset_size=dataset_size, + dataset_dimension=dataset_dimension, + **kwargs) + return loader.load_dataset(loader.name, reload) diff --git a/deepchem/molnet/run_benchmark.py b/deepchem/molnet/run_benchmark.py index c9cb3d7df845edf18387d2ced6e82ddbd1861607..3278e51450b51e0c062d6fbf242d2a36cf43b1f5 100644 --- a/deepchem/molnet/run_benchmark.py +++ b/deepchem/molnet/run_benchmark.py @@ -128,12 +128,9 @@ def run_benchmark(datasets, 'muv': deepchem.molnet.load_muv, 'nci': deepchem.molnet.load_nci, 'pcba': deepchem.molnet.load_pcba, - 'pcba_146': deepchem.molnet.load_pcba_146, - 'pcba_2475': deepchem.molnet.load_pcba_2475, 'pdbbind': deepchem.molnet.load_pdbbind_grid, 'ppb': deepchem.molnet.load_ppb, - 'qm7': deepchem.molnet.load_qm7_from_mat, - 'qm7b': deepchem.molnet.load_qm7b_from_mat, + 'qm7': deepchem.molnet.load_qm7, 'qm8': deepchem.molnet.load_qm8, 'qm9': deepchem.molnet.load_qm9, 'sampl': deepchem.molnet.load_sampl, @@ -284,8 +281,7 @@ def load_dataset(dataset, featurizer, split='random'): 'pcba_2475': deepchem.molnet.load_pcba_2475, 'pdbbind': deepchem.molnet.load_pdbbind_grid, 'ppb': deepchem.molnet.load_ppb, - 'qm7': deepchem.molnet.load_qm7_from_mat, - 'qm7b': deepchem.molnet.load_qm7b_from_mat, + 'qm7': deepchem.molnet.load_qm7, 'qm8': deepchem.molnet.load_qm8, 'qm9': deepchem.molnet.load_qm9, 'sampl': deepchem.molnet.load_sampl, diff --git a/deepchem/molnet/run_benchmark_models.py b/deepchem/molnet/run_benchmark_models.py index 37c80a06699c3bba3647d7582431fc04f9396425..6b86d4743124f017a8b491e46be54c25d453aa9a 100644 --- a/deepchem/molnet/run_benchmark_models.py +++ b/deepchem/molnet/run_benchmark_models.py @@ -145,14 +145,14 @@ def benchmark_classification(train_dataset, nb_epoch = None # Building scikit logistic regression model - def model_builder(model_dir_logreg): + def model_builder(model_dir): sklearn_model = LogisticRegression( penalty=penalty_type, C=1. / penalty, class_weight="balanced", n_jobs=-1) return deepchem.models.sklearn_models.SklearnModel( - sklearn_model, model_dir_logreg) + sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -300,11 +300,11 @@ def benchmark_classification(train_dataset, nb_epoch = None # Building scikit random forest model - def model_builder(model_dir_rf): + def model_builder(model_dir): sklearn_model = RandomForestClassifier( class_weight="balanced", n_estimators=n_estimators, n_jobs=-1) return deepchem.models.sklearn_models.SklearnModel( - sklearn_model, model_dir_rf) + sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -315,10 +315,10 @@ def benchmark_classification(train_dataset, nb_epoch = None # Building scikit learn Kernel SVM model - def model_builder(model_dir_kernelsvm): + def model_builder(model_dir): sklearn_model = SVC( C=C, gamma=gamma, class_weight="balanced", probability=True) - return deepchem.models.SklearnModel(sklearn_model, model_dir_kernelsvm) + return deepchem.models.SklearnModel(sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -344,7 +344,7 @@ def benchmark_classification(train_dataset, esr = {'early_stopping_rounds': early_stopping_rounds} # Building xgboost classification model - def model_builder(model_dir_xgb): + def model_builder(model_dir): import xgboost xgboost_model = xgboost.XGBClassifier( max_depth=max_depth, @@ -362,7 +362,7 @@ def benchmark_classification(train_dataset, base_score=base_score, seed=seed) return deepchem.models.xgboost_models.XGBoostModel( - xgboost_model, model_dir_xgb, **esr) + xgboost_model, model_dir, **esr) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -673,11 +673,11 @@ def benchmark_regression(train_dataset, nb_epoch = None # Building scikit random forest model - def model_builder(model_dir_rf_regression): + def model_builder(model_dir): sklearn_model = RandomForestRegressor( n_estimators=n_estimators, n_jobs=-1) return deepchem.models.sklearn_models.SklearnModel( - sklearn_model, model_dir_rf_regression) + sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -687,9 +687,9 @@ def benchmark_regression(train_dataset, nb_epoch = None # Building scikit learn Kernel Ridge Regression model - def model_builder(model_dir_krr): + def model_builder(model_dir): sklearn_model = KernelRidge(kernel="rbf", alpha=alpha) - return deepchem.models.SklearnModel(sklearn_model, model_dir_krr) + return deepchem.models.SklearnModel(sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -704,9 +704,9 @@ def benchmark_regression(train_dataset, test_dataset = ft_transformer.transform(test_dataset) # Building scikit learn Kernel Ridge Regression model - def model_builder(model_dir_krr): + def model_builder(model_dir): sklearn_model = KernelRidge(kernel="rbf", alpha=alpha) - return deepchem.models.SklearnModel(sklearn_model, model_dir_krr) + return deepchem.models.SklearnModel(sklearn_model, model_dir) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) @@ -732,7 +732,7 @@ def benchmark_regression(train_dataset, esr = {'early_stopping_rounds': early_stopping_rounds} # Building xgboost regression model - def model_builder(model_dir_xgb): + def model_builder(model_dir): xgboost_model = xgboost.XGBRegressor( max_depth=max_depth, learning_rate=learning_rate, @@ -749,7 +749,7 @@ def benchmark_regression(train_dataset, base_score=base_score, seed=seed) return deepchem.models.xgboost_models.XGBoostModel( - xgboost_model, model_dir_xgb, **esr) + xgboost_model, model_dir, **esr) model = deepchem.models.multitask.SingletaskToMultitask( tasks, model_builder) diff --git a/deepchem/molnet/tests/test_defaults.py b/deepchem/molnet/tests/test_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..1e59283eb7a3471efeff233af40721d11027d6cd --- /dev/null +++ b/deepchem/molnet/tests/test_defaults.py @@ -0,0 +1,40 @@ +""" +Tests for getting featurizer, transformer, and splitter classes. +""" +import csv +import tempfile +import unittest + +import numpy as np +import os +import pytest + +import deepchem as dc +from deepchem.feat.base_classes import Featurizer +from deepchem.trans.transformers import Transformer +from deepchem.splits.splitters import Splitter +from deepchem.molnet.defaults import get_defaults + + +class TestDefaults(unittest.TestCase): + """ + Tests for getting featurizer, transformer, and splitter classes. + """ + + def test_defaults(self): + """Test getting defaults for MolNet loaders.""" + feats = get_defaults("feat") + trans = get_defaults("trans") + splits = get_defaults("splits") + + fkey = next(iter(feats)) + assert type(fkey) == str + assert issubclass(feats[fkey], Featurizer) + + tkey = next(iter(trans)) + assert type(tkey) == str + assert issubclass(trans[tkey], Transformer) + + skey = next(iter(splits)) + assert type(skey) == str + assert issubclass(splits[skey], Splitter) diff --git a/deepchem/molnet/tests/test_molnet.py b/deepchem/molnet/tests/test_molnet.py index bdfce28f95848e18bc6371caab85d3cb5da7c1e5..719f6e5fcdc3e2575fc5a7000e408b4c4f348081 100644 --- a/deepchem/molnet/tests/test_molnet.py +++ b/deepchem/molnet/tests/test_molnet.py @@ -30,18 +30,13 @@ class TestMolnet(unittest.TestCase): out_path = tempfile.mkdtemp() metric = [dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean)] dc.molnet.run_benchmark( - datasets, - str(model), - metric=metric, - split=split, - out_path=out_path, - reload=False) + datasets, str(model), metric=metric, split=split, out_path=out_path) with open(os.path.join(out_path, 'results.csv'), newline='\n') as f: reader = csv.reader(f) for lastrow in reader: pass assert lastrow[-4] == 'valid' - assert float(lastrow[-3]) > 0.75 + assert float(lastrow[-3]) > 0.65 os.remove(os.path.join(out_path, 'results.csv')) @pytest.mark.slow @@ -53,18 +48,13 @@ class TestMolnet(unittest.TestCase): out_path = tempfile.mkdtemp() metric = [dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean)] dc.molnet.run_benchmark( - datasets, - str(model), - metric=metric, - split=split, - out_path=out_path, - reload=False) + datasets, str(model), metric=metric, split=split, out_path=out_path) with open(os.path.join(out_path, 'results.csv'), newline='\n') as f: reader = csv.reader(f) for lastrow in reader: pass assert lastrow[-4] == 'valid' - assert float(lastrow[-3]) > 0.95 + assert float(lastrow[-3]) > 0.75 os.remove(os.path.join(out_path, 'results.csv')) def test_clintox_multitask(self): @@ -80,8 +70,7 @@ class TestMolnet(unittest.TestCase): metric=metric, split=split, out_path=out_path, - test=True, - reload=False) + test=True) with open(os.path.join(out_path, 'results.csv'), newline='\n') as f: reader = csv.reader(f) for lastrow in reader: diff --git a/deepchem/rl/__init__.py b/deepchem/rl/__init__.py index 0c887505e8812ea43ee3dd1cb009863e4da1cf23..33028d808885b6fd3c705f96a1e4d10d5dc5cc3e 100644 --- a/deepchem/rl/__init__.py +++ b/deepchem/rl/__init__.py @@ -1,7 +1,7 @@ """Interface for reinforcement learning.""" -from deepchem.rl.a2c import A2C -from deepchem.rl.ppo import PPO +from deepchem.rl.a2c import A2C # noqa: F401 +from deepchem.rl.ppo import PPO # noqa: F401 class Environment(object): @@ -55,8 +55,11 @@ class Environment(object): if state_dtype is None: # Assume all arrays are float32. import numpy - import collections - if isinstance(state_shape[0], collections.Sequence): + try: + from collections.abc import Sequence as SequenceCollection + except: + from collections import Sequence as SequenceCollection + if isinstance(state_shape[0], SequenceCollection): self._state_dtype = [numpy.float32] * len(state_shape) else: self._state_dtype = numpy.float32 @@ -120,7 +123,7 @@ class Environment(object): This must be called before calling step() or querying the state. You can call it again later to reset the environment back to its original state. """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") def step(self, action): """Take a time step by performing an action. @@ -137,7 +140,7 @@ class Environment(object): the reward earned by taking the action, represented as a floating point number (higher values are better) """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") class GymEnvironment(Environment): @@ -225,4 +228,4 @@ class Policy(object): Depending on the algorithm being used, other inputs might get passed as well. It is up to each algorithm to document that. """ - raise NotImplemented("Subclasses must implement this") + raise NotImplementedError("Subclasses must implement this") diff --git a/deepchem/rl/a2c.py b/deepchem/rl/a2c.py index 6f3016f73e76db5a896c1dea8df58c5ae9c095de..d571a5d700421bd5993ff9b983900f06dcbba3ad 100644 --- a/deepchem/rl/a2c.py +++ b/deepchem/rl/a2c.py @@ -1,17 +1,14 @@ """Advantage Actor-Critic (A2C) algorithm for reinforcement learning.""" +import time +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection +import numpy as np +import tensorflow as tf from deepchem.models import KerasModel from deepchem.models.optimizers import Adam -import numpy as np -import tensorflow as tf -import tensorflow_probability as tfp -import collections -import copy -import multiprocessing -import os -import re -import threading -import time class A2CLossDiscrete(object): @@ -40,10 +37,20 @@ class A2CLossDiscrete(object): class A2CLossContinuous(object): - """This class computes the loss function for A2C with continuous action spaces.""" + """This class computes the loss function for A2C with continuous action spaces. + + Note + ---- + This class requires tensorflow-probability to be installed. + """ def __init__(self, value_weight, entropy_weight, mean_index, std_index, value_index): + try: + import tensorflow_probability as tfp # noqa: F401 + except ModuleNotFoundError: + raise ValueError( + "This class requires tensorflow-probability to be installed.") self.value_weight = value_weight self.entropy_weight = entropy_weight self.mean_index = mean_index @@ -51,6 +58,7 @@ class A2CLossContinuous(object): self.value_index = value_index def __call__(self, outputs, labels, weights): + import tensorflow_probability as tfp mean = outputs[self.mean_index] std = outputs[self.std_index] value = outputs[self.value_index] @@ -112,6 +120,11 @@ class A2C(object): except specifying the new goal. It should return that list of states, and the rewards that would have been received for taking the specified actions from those states. The output arrays may be shorter than the input ones, if the modified rollout would have terminated sooner. + + + Note + ---- + Using this class on continuous action spaces requires that `tensorflow_probability` be installed. """ def __init__(self, @@ -160,7 +173,7 @@ class A2C(object): self.value_weight = value_weight self.entropy_weight = entropy_weight self.use_hindsight = use_hindsight - self._state_is_list = isinstance(env.state_shape[0], collections.Sequence) + self._state_is_list = isinstance(env.state_shape[0], SequenceCollection) if optimizer is None: self._optimizer = Adam(learning_rate=0.001, beta1=0.9, beta2=0.999) else: @@ -369,7 +382,6 @@ class A2C(object): def _create_rollout(self, rnn_states): """Generate a rollout.""" - n_actions = self._env.n_actions states = [] actions = [] rewards = [] diff --git a/deepchem/rl/envs/tictactoe.py b/deepchem/rl/envs/tictactoe.py index d021c1658959a91ab1a8849f852220cf47e10a83..92c09265b2cf9f08fb0585d5320b20dc55476750 100644 --- a/deepchem/rl/envs/tictactoe.py +++ b/deepchem/rl/envs/tictactoe.py @@ -9,7 +9,7 @@ class TicTacToeEnvironment(deepchem.rl.Environment): Play tictactoe against a randomly acting opponent """ X = np.array([1.0, 0.0]) - O = np.array([0.0, 1.0]) + O = np.array([0.0, 1.0]) # noqa: E741 EMPTY = np.array([0.0, 0.0]) ILLEGAL_MOVE_PENALTY = -3.0 diff --git a/deepchem/rl/ppo.py b/deepchem/rl/ppo.py index 68c72b6f5d0befd52a60028d0238e728a9f9b393..8db1c28f13f7d051468f9b79d92b782491bfd39d 100644 --- a/deepchem/rl/ppo.py +++ b/deepchem/rl/ppo.py @@ -1,16 +1,17 @@ """Proximal Policy Optimization (PPO) algorithm for reinforcement learning.""" +import copy +import time +try: + from collections.abc import Sequence as SequenceCollection +except: + from collections import Sequence as SequenceCollection +from multiprocessing.dummy import Pool -from deepchem.models import KerasModel -from deepchem.models.optimizers import Adam import numpy as np import tensorflow as tf -import collections -import copy -import multiprocessing -from multiprocessing.dummy import Pool -import os -import re -import time + +from deepchem.models import KerasModel +from deepchem.models.optimizers import Adam class PPOLoss(object): @@ -151,7 +152,7 @@ class PPO(object): self.value_weight = value_weight self.entropy_weight = entropy_weight self.use_hindsight = use_hindsight - self._state_is_list = isinstance(env.state_shape[0], collections.Sequence) + self._state_is_list = isinstance(env.state_shape[0], SequenceCollection) if optimizer is None: self._optimizer = Adam(learning_rate=0.001, beta1=0.9, beta2=0.999) else: @@ -209,7 +210,6 @@ class PPO(object): """ step_count = 0 workers = [] - threads = [] for i in range(self.optimization_rollouts): workers.append(_Worker(self, i)) if restore: @@ -435,7 +435,6 @@ class _Worker(object): def create_rollout(self): """Generate a rollout.""" - n_actions = self.env.n_actions states = [] action_prob = [] actions = [] diff --git a/deepchem/rl/tests/test_a2c.py b/deepchem/rl/tests/test_a2c.py index e930f448eeae5f434920d3f6e1b3e95ba3cc9919..187b3e970875504fff595ebc7e662493dd83435d 100644 --- a/deepchem/rl/tests/test_a2c.py +++ b/deepchem/rl/tests/test_a2c.py @@ -1,12 +1,13 @@ +import unittest + +import pytest +import numpy as np +import tensorflow as tf from flaky import flaky +from tensorflow.keras.layers import Input, Dense, GRU, Reshape, Softmax import deepchem as dc from deepchem.models.optimizers import Adam, PolynomialDecay -from tensorflow.keras.layers import Input, Dense, GRU, Reshape, Softmax -import numpy as np -import tensorflow as tf -import unittest -import pytest class TestA2C(unittest.TestCase): diff --git a/deepchem/rl/tests/test_ppo.py b/deepchem/rl/tests/test_ppo.py index dc202681e20cad4b51bd0631dc63bafda0371b99..2b9fa4ed79e0e231e4575bcbbd7afb00b02780e0 100644 --- a/deepchem/rl/tests/test_ppo.py +++ b/deepchem/rl/tests/test_ppo.py @@ -1,12 +1,13 @@ +import unittest + import pytest +import numpy as np +import tensorflow as tf from flaky import flaky +from tensorflow.keras.layers import Input, Dense, GRU, Reshape, Softmax import deepchem as dc -from deepchem.models.optimizers import Adam, PolynomialDecay -from tensorflow.keras.layers import Input, Dense, GRU, Reshape, Softmax -import numpy as np -import tensorflow as tf -import unittest +from deepchem.models.optimizers import Adam class TestPPO(unittest.TestCase): @@ -72,6 +73,7 @@ class TestPPO(unittest.TestCase): env, TestPolicy(), max_rollout_length=20, + optimization_epochs=8, optimizer=Adam(learning_rate=0.003)) ppo.fit(80000) @@ -229,8 +231,6 @@ class TestPPO(unittest.TestCase): # Optimize it. env = TestEnvironment() - learning_rate = PolynomialDecay( - initial_rate=0.0001, final_rate=0.00005, decay_steps=1500000) ppo = dc.rl.PPO( env, TestPolicy(), diff --git a/deepchem/rl/tests/test_rl_reload.py b/deepchem/rl/tests/test_rl_reload.py new file mode 100644 index 0000000000000000000000000000000000000000..06f17d924f6e4fef1e3ff6d6e899009f04b2c3e9 --- /dev/null +++ b/deepchem/rl/tests/test_rl_reload.py @@ -0,0 +1,90 @@ +import deepchem as dc +import tensorflow as tf +import numpy as np +from deepchem.models.optimizers import Adam + + +class RouletteEnvironment(dc.rl.Environment): + + def __init__(self): + super(RouletteEnvironment, self).__init__([(1,)], 38) + self._state = [np.array([0])] + + def step(self, action): + if action == 37: + self._terminated = True # Walk away. + return 0.0 + wheel = np.random.randint(37) + if wheel == 0: + if action == 0: + return 35.0 + return -1.0 + if action != 0 and wheel % 2 == action % 2: + return 1.0 + return -1.0 + + def reset(self): + self._terminated = False + + +# This policy just learns a constant probability for each action, and a constant for the value. + + +class TestPolicy(dc.rl.Policy): + + def __init__(self, env): + super(TestPolicy, self).__init__(['action_prob', 'value']) + self.env = env + + def create_model(self, **kwargs): + env = self.env + + class TestModel(tf.keras.Model): + + def __init__(self): + super(TestModel, self).__init__(**kwargs) + self.action = tf.Variable(np.ones(env.n_actions, np.float32)) + self.value = tf.Variable([0.0], tf.float32) + + def call(self, inputs, **kwargs): + prob = tf.nn.softmax(tf.reshape(self.action, (-1, env.n_actions))) + return (prob, self.value) + + return TestModel() + + +def test_a2c_reload(): + env = RouletteEnvironment() + policy = TestPolicy(env) + + a2c = dc.rl.A2C( + env, policy, max_rollout_length=20, optimizer=Adam(learning_rate=0.001)) + a2c.fit(1000) + action_prob, value = a2c.predict([[0]]) + + new_a2c = dc.rl.A2C(env, policy, model_dir=a2c._model.model_dir) + new_a2c.restore() + action_prob2, value2 = new_a2c.predict([[0]]) + + assert np.all(action_prob == action_prob2) + assert value == value2 + + +def test_ppo_reload(): + env = RouletteEnvironment() + policy = TestPolicy(env) + ppo = dc.rl.PPO( + env, + policy, + max_rollout_length=20, + optimization_epochs=8, + optimizer=Adam(learning_rate=0.003)) + ppo.fit(1000) + action_prob, value = ppo.predict([[0]]) + + new_ppo = dc.rl.PPO(env, policy, model_dir=ppo._model.model_dir) + new_ppo.restore() + action_prob2, value2 = new_ppo.predict([[0]]) + + assert np.all(action_prob == action_prob2) + assert value == value2 diff --git a/deepchem/splits/__init__.py b/deepchem/splits/__init__.py index 97088cc53d31733674b2c98c4ab03f3cfce25fc9..ea5dcc8538d1e78b574a93724a94dc2e0ccc70a2 100644 --- a/deepchem/splits/__init__.py +++ b/deepchem/splits/__init__.py @@ -1,12 +1,38 @@ """ Gathers all splitters in one place for convenient imports """ -# TODO(rbharath): Get rid of * import -from deepchem.splits.splitters import * -from deepchem.splits.splitters import ScaffoldSplitter -from deepchem.splits.splitters import SpecifiedSplitter -from deepchem.splits.splitters import IndexSplitter -from deepchem.splits.splitters import IndiceSplitter +# flake8: noqa + +# basic splitter +from deepchem.splits.splitters import Splitter +from deepchem.splits.splitters import RandomSplitter +from deepchem.splits.splitters import RandomStratifiedSplitter from deepchem.splits.splitters import RandomGroupSplitter +from deepchem.splits.splitters import SingletaskStratifiedSplitter +from deepchem.splits.splitters import IndexSplitter +from deepchem.splits.splitters import SpecifiedSplitter + +# molecule splitter +from deepchem.splits.splitters import ScaffoldSplitter +from deepchem.splits.splitters import MolecularWeightSplitter +from deepchem.splits.splitters import MaxMinSplitter +from deepchem.splits.splitters import FingerprintSplitter +from deepchem.splits.splitters import ButinaSplitter + +# other splitter from deepchem.splits.task_splitter import merge_fold_datasets from deepchem.splits.task_splitter import TaskSplitter + +################################################################# +# Removed API +################################################################# + +import logging +logger = logging.getLogger(__name__) + + +class IndiceSplitter: + + def __init__(self, valid_indices=None, test_indices=None): + raise ImportError("IndiceSplitter was renamed to SpecifiedSplitter.\n" + "Please use SpecifiedSplitter instead of IndiceSplitter.") diff --git a/deepchem/splits/splitters.py b/deepchem/splits/splitters.py index fb893f32cf6413bb0eb99e3d4be2fbe7d36dca3e..a3ca67aec0b8833e81c004a84a86873b522d6ee4 100644 --- a/deepchem/splits/splitters.py +++ b/deepchem/splits/splitters.py @@ -1,32 +1,22 @@ """ Contains an abstract base class that supports chemically aware data splits. """ +import inspect +import os import random - -__author__ = "Bharath Ramsundar, Aneesh Pappu " -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import tempfile +import itertools +import logging +from typing import Any, Dict, List, Iterator, Optional, Sequence, Tuple + import numpy as np import pandas as pd -import itertools -import os -import deepchem as dc -from deepchem.data import DiskDataset -from deepchem.utils import ScaffoldGenerator -from deepchem.utils.save import log -from deepchem.data import NumpyDataset -from deepchem.utils.save import load_data +import deepchem as dc +from deepchem.data import Dataset, DiskDataset +from deepchem.utils import get_print_threshold -def generate_scaffold(smiles, include_chirality=False): - """Compute the Bemis-Murcko scaffold for a SMILES string.""" - from rdkit import Chem - mol = Chem.MolFromSmiles(smiles) - engine = ScaffoldGenerator(include_chirality=include_chirality) - scaffold = engine.get_scaffold(mol) - return scaffold +logger = logging.getLogger(__name__) def randomize_arrays(array_list): @@ -40,42 +30,40 @@ def randomize_arrays(array_list): class Splitter(object): - """ - Abstract base class for chemically aware splits.. - """ + """Splitters split up Datasets into pieces for training/validation/testing. + + In machine learning applications, it's often necessary to split up a dataset + into training/validation/test sets. Or to k-fold split a dataset (that is, + divide into k equal subsets) for cross-validation. The `Splitter` class is + an abstract superclass for all splitters that captures the common API across + splitter classes. - def __init__(self, verbose=False): - """Creates splitter object.""" - self.verbose = verbose + Note that `Splitter` is an abstract superclass. You won't want to + instantiate this class directly. Rather you will want to use a concrete + subclass for your application. + """ - def k_fold_split(self, dataset, k, directories=None, **kwargs): + def k_fold_split(self, + dataset: Dataset, + k: int, + directories: Optional[List[str]] = None, + **kwargs) -> List[Tuple[Dataset, Dataset]]: """ Parameters ---------- dataset: Dataset - Dataset to do a k-fold split - + Dataset to do a k-fold split k: int - number of folds - - directories: list of str - list of length 2*k filepaths to save the result disk-datasets - - kwargs + Number of folds to split `dataset` into. + directories: List[str], optional (default None) + List of length 2*k filepaths to save the result disk-datasets. Returns ------- - list of length k tuples of (train, cv) - - """ + List[Tuple[Dataset, Dataset]] + List of length k tuples of (train, cv) where `train` and `cv` are both `Dataset`. """ - :param dataset: - :param k: - :param directories: - :param kwargs: - :return: list of length k tuples of (train, cv) - """ - log("Computing K-fold split", self.verbose) + logger.info("Computing K-fold split") if directories is None: directories = [tempfile.mkdtemp() for _ in range(2 * k)] else: @@ -89,6 +77,7 @@ class Splitter(object): else: rem_dataset = DiskDataset.from_numpy(dataset.X, dataset.y, dataset.w, dataset.ids) + for fold in range(k): # Note starts as 1/k since fold starts at 0. Ends at 1 since fold goes up # to k-1. @@ -102,45 +91,78 @@ class Splitter(object): **kwargs) cv_dataset = rem_dataset.select(fold_inds, select_dir=cv_dir) cv_datasets.append(cv_dataset) - rem_dataset = rem_dataset.select(rem_inds) + # FIXME: Incompatible types in assignment (expression has type "Dataset", variable has type "DiskDataset") + rem_dataset = rem_dataset.select(rem_inds) # type: ignore - train_ds_to_merge = filter(lambda x: x is not None, - [train_ds_base, rem_dataset]) + train_ds_to_merge: Iterator[Dataset] = filter( + None, [train_ds_base, rem_dataset]) train_ds_to_merge = filter(lambda x: len(x) > 0, train_ds_to_merge) train_dataset = DiskDataset.merge(train_ds_to_merge, merge_dir=train_dir) train_datasets.append(train_dataset) - update_train_base_merge = filter(lambda x: x is not None, - [train_ds_base, cv_dataset]) + update_train_base_merge: Iterator[Dataset] = filter( + None, [train_ds_base, cv_dataset]) train_ds_base = DiskDataset.merge(update_train_base_merge) return list(zip(train_datasets, cv_datasets)) def train_valid_test_split(self, - dataset, - train_dir=None, - valid_dir=None, - test_dir=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - seed=None, - log_every_n=1000, - verbose=True, - **kwargs): - """ - Splits self into train/validation/test sets. + dataset: Dataset, + train_dir: Optional[str] = None, + valid_dir: Optional[str] = None, + test_dir: Optional[str] = None, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: int = 1000, + **kwargs) -> Tuple[Dataset, Dataset, Dataset]: + """ Splits self into train/validation/test sets. + + Returns Dataset objects for train, valid, test. - Returns Dataset objects. - """ - log("Computing train/valid/test indices", self.verbose) + Parameters + ---------- + dataset: Dataset + Dataset to be split. + train_dir: str, optional (default None) + If specified, the directory in which the generated + training dataset should be stored. This is only + considered if `isinstance(dataset, dc.data.DiskDataset)` + valid_dir: str, optional (default None) + If specified, the directory in which the generated + valid dataset should be stored. This is only + considered if `isinstance(dataset, dc.data.DiskDataset)` + is True. + test_dir: str, optional (default None) + If specified, the directory in which the generated + test dataset should be stored. This is only + considered if `isinstance(dataset, dc.data.DiskDataset)` + is True. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default 1000) + Controls the logger by dictating how often logger outputs + will be produced. + + Returns + ------- + Tuple[Dataset, Optional[Dataset], Dataset] + A tuple of train, valid and test datasets as dc.data.Dataset objects. + """ + logger.info("Computing train/valid/test indices") train_inds, valid_inds, test_inds = self.split( dataset, - seed=seed, frac_train=frac_train, frac_test=frac_test, frac_valid=frac_valid, - log_every_n=log_every_n, - **kwargs) + seed=seed, + log_every_n=log_every_n) if train_dir is None: train_dir = tempfile.mkdtemp() if valid_dir is None: @@ -148,10 +170,7 @@ class Splitter(object): if test_dir is None: test_dir = tempfile.mkdtemp() train_dataset = dataset.select(train_inds, train_dir) - if frac_valid != 0: - valid_dataset = dataset.select(valid_inds, valid_dir) - else: - valid_dataset = None + valid_dataset = dataset.select(valid_inds, valid_dir) test_dataset = dataset.select(test_inds, test_dir) if isinstance(train_dataset, DiskDataset): train_dataset.memory_cache_size = 40 * (1 << 20) # 40 MB @@ -159,17 +178,40 @@ class Splitter(object): return train_dataset, valid_dataset, test_dataset def train_test_split(self, - dataset, - train_dir=None, - test_dir=None, - seed=None, - frac_train=.8, - verbose=True, - **kwargs): + dataset: Dataset, + train_dir: Optional[str] = None, + test_dir: Optional[str] = None, + frac_train: float = 0.8, + seed: Optional[int] = None, + **kwargs) -> Tuple[Dataset, Dataset]: + """Splits self into train/test sets. + + Returns Dataset objects for train/test. + + Parameters + ---------- + dataset: data like object + Dataset to be split. + train_dir: str, optional (default None) + If specified, the directory in which the generated + training dataset should be stored. This is only + considered if `isinstance(dataset, dc.data.DiskDataset)` + is True. + test_dir: str, optional (default None) + If specified, the directory in which the generated + test dataset should be stored. This is only + considered if `isinstance(dataset, dc.data.DiskDataset)` + is True. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + seed: int, optional (default None) + Random seed to use. + + Returns + ------- + Tuple[Dataset, Dataset] + A tuple of train and test datasets as dc.data.Dataset objects. """ - Splits self into train/test sets. - Returns Dataset objects. - """ valid_dir = tempfile.mkdtemp() train_dataset, _, test_dataset = self.train_valid_test_split( dataset, @@ -180,72 +222,235 @@ class Splitter(object): frac_test=1 - frac_train, frac_valid=0., seed=seed, - verbose=verbose, **kwargs) return train_dataset, test_dataset def split(self, - dataset, - seed=None, - frac_train=None, - frac_valid=None, - frac_test=None, - log_every_n=None, - verbose=False, - **kwargs): - """ - Stub to be filled in by child classes. + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None) -> Tuple: + """Return indices for specified split + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset to be split. + seed: int, optional (default None) + Random seed to use. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + log_every_n: int, optional (default None) + Controls the logger by dictating how often logger outputs + will be produced. + + Returns + ------- + Tuple + A tuple `(train_inds, valid_inds, test_inds)` of the indices (integers) for + the various splits. """ raise NotImplementedError + def __str__(self) -> str: + """Convert self to str representation. -class RandomGroupSplitter(Splitter): + Returns + ------- + str + The string represents the class. + + Examples + -------- + >>> import deepchem as dc + >>> str(dc.splits.RandomSplitter()) + 'RandomSplitter' + """ + args_spec = inspect.getfullargspec(self.__init__) # type: ignore + args_names = [arg for arg in args_spec.args if arg != 'self'] + args_num = len(args_names) + args_default_values = [None for _ in range(args_num)] + if args_spec.defaults is not None: + defaults = list(args_spec.defaults) + args_default_values[-len(defaults):] = defaults + + override_args_info = '' + for arg_name, default in zip(args_names, args_default_values): + if arg_name in self.__dict__: + arg_value = self.__dict__[arg_name] + # validation + # skip list + if isinstance(arg_value, list): + continue + if isinstance(arg_value, str): + # skip path string + if "\\/." in arg_value or "/" in arg_value or '.' in arg_value: + continue + # main logic + if default != arg_value: + override_args_info += '_' + arg_name + '_' + str(arg_value) + return self.__class__.__name__ + override_args_info + + def __repr__(self) -> str: + """Convert self to repr representation. - def __init__(self, groups, *args, **kwargs): + Returns + ------- + str + The string represents the class. + + Examples + -------- + >>> import deepchem as dc + >>> dc.splits.RandomSplitter() + RandomSplitter[] """ - A splitter class that splits on groupings. An example use case is when there - are multiple conformations of the same molecule that share the same topology. - This splitter subsequently guarantees that resulting splits preserve groupings. + args_spec = inspect.getfullargspec(self.__init__) # type: ignore + args_names = [arg for arg in args_spec.args if arg != 'self'] + args_info = '' + for arg_name in args_names: + value = self.__dict__[arg_name] + # for str + if isinstance(value, str): + value = "'" + value + "'" + # for list + if isinstance(value, list): + threshold = get_print_threshold() + value = np.array2string(np.array(value), threshold=threshold) + args_info += arg_name + '=' + str(value) + ', ' + return self.__class__.__name__ + '[' + args_info[:-2] + ']' + - Note that it doesn't do any dynamic programming or something fancy to try to - maximize the choice such that frac_train, frac_valid, or frac_test is maximized. - It simply permutes the groups themselves. As such, use with caution if the number - of elements per group varies significantly. +class RandomSplitter(Splitter): + """Class for doing random data splits.""" + + def split(self, + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Splits internal compounds randomly into train/validation/test. Parameters ---------- - groups: array like list of hashables - An auxiliary array indicating the group of each item. + dataset: Dataset + Dataset to be split. + seed: int, optional (default None) + Random seed to use. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). - Eg: - g: 3 2 2 0 1 1 2 4 3 - X: 0 1 2 3 4 5 6 7 8 + Returns + ------- + Tuple[np.ndarray, np.ndarray, np.ndarray] + A tuple of train indices, valid indices, and test indices. + Each indices is a numpy array. + """ + np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) + if seed is not None: + np.random.seed(seed) + num_datapoints = len(dataset) + train_cutoff = int(frac_train * num_datapoints) + valid_cutoff = int((frac_train + frac_valid) * num_datapoints) + shuffled = np.random.permutation(range(num_datapoints)) + return (shuffled[:train_cutoff], shuffled[train_cutoff:valid_cutoff], + shuffled[valid_cutoff:]) + + +class RandomGroupSplitter(Splitter): + """Random split based on groupings. + + A splitter class that splits on groupings. An example use case is when + there are multiple conformations of the same molecule that share the same + topology. This splitter subsequently guarantees that resulting splits + preserve groupings. + + Note that it doesn't do any dynamic programming or something fancy to try + to maximize the choice such that frac_train, frac_valid, or frac_test is + maximized. It simply permutes the groups themselves. As such, use with + caution if the number of elements per group varies significantly. + """ + + def __init__(self, groups: Sequence): + """Initialize this object. + + Parameters + ---------- + groups: Sequence + An array indicating the group of each item. + The length is equals to `len(dataset.X)` - Eg: - g: a b b e q x a a r - X: 0 1 2 3 4 5 6 7 8 + Note + ---- + The examples of groups is the following. + | groups : 3 2 2 0 1 1 2 4 3 + | dataset.X : 0 1 2 3 4 5 6 7 8 + + | groups : a b b e q x a a r + | dataset.X : 0 1 2 3 4 5 6 7 8 """ self.groups = groups - super(RandomGroupSplitter, self).__init__(*args, **kwargs) def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[List[int], List[int], List[int]]: + """Return indices for specified split + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple `(train_inds, valid_inds, test_inds` of the indices (integers) for + the various splits. + """ assert len(self.groups) == dataset.X.shape[0] np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) - if not seed is None: + if seed is not None: np.random.seed(seed) # dict is needed in case groups aren't strictly flattened or # hashed by something non-integer like - group_dict = {} + group_dict: Dict[Any, List[int]] = {} for idx, g in enumerate(self.groups): if g not in group_dict: group_dict[g] = [] @@ -274,212 +479,183 @@ class RandomGroupSplitter(Splitter): class RandomStratifiedSplitter(Splitter): - """ - RandomStratified Splitter class. + """RandomStratified Splitter class. For sparse multitask datasets, a standard split offers no guarantees - that the splits will have any activate compounds. This class guarantees - that each task will have a proportional split of the activates in a - split. TO do this, a ragged split is performed with different numbers - of compounds taken from each task. Thus, the length of the split arrays - may exceed the split of the original array. That said, no datapoint is - copied to more than one split, so correctness is still ensured. - - Note that this splitter is only valid for boolean label data. - - TODO(rbharath): This splitter should be refactored to match style of - other splitter classes. + that the splits will have any active compounds. This class tries to + arrange that each split has a proportional number of the actives for each + task. This is strictly guaranteed only for single-task datasets, but for + sparse multitask datasets it usually manages to produces a fairly accurate + division of the actives for each task. + + Note + ---- + This splitter is primarily designed for boolean labeled data. It considers + only whether a label is zero or non-zero. When labels can take on multiple + non-zero values, it does not try to give each split a proportional fraction + of the samples with each value. """ - def __generate_required_hits(self, w, frac_split): - # returns list of per column sum of non zero elements - required_hits = (w != 0).sum(axis=0) - for col_hits in required_hits: - col_hits = int(frac_split * col_hits) - return required_hits - - def get_task_split_indices(self, y, w, frac_split): - """Returns num datapoints needed per task to split properly.""" - w_present = (w != 0) - y_present = y * w_present - - # Compute number of actives needed per task. - task_actives = np.sum(y_present, axis=0) - task_split_actives = (frac_split * task_actives).astype(int) - - # loop through each column and obtain index required to splice out for - # required fraction of hits - split_indices = [] - n_tasks = np.shape(y)[1] - for task in range(n_tasks): - actives_count = task_split_actives[task] - cum_task_actives = np.cumsum(y_present[:, task]) - # Find the first index where the cumulative number of actives equals - # the actives_count - split_index = np.amin(np.where(cum_task_actives >= actives_count)[0]) - # Note that np.where tells us last index required to exceed - # actives_count, so we actually want the following location - split_indices.append(split_index + 1) - return split_indices - - # TODO(rbharath): Refactor this split method to match API of other - # splits (or potentially refactor those to match this). - def split(self, dataset, frac_split, split_dirs=None): - """ - Method that does bulk of splitting dataset. - """ - if split_dirs is not None: - assert len(split_dirs) == 2 - else: - split_dirs = [tempfile.mkdtemp(), tempfile.mkdtemp()] - - # Handle edge case where frac_split is 1 - if frac_split == 1: - dataset_1 = DiskDataset.from_numpy(dataset.X, dataset.y, dataset.w, - dataset.ids) - dataset_2 = None - return dataset_1, dataset_2 - X, y, w, ids = randomize_arrays((dataset.X, dataset.y, dataset.w, - dataset.ids)) - if len(y.shape) == 1: - y = np.expand_dims(y, 1) - if len(w.shape) == 1: - w = np.expand_dims(w, 1) - split_indices = self.get_task_split_indices(y, w, frac_split) - - # Create weight matrices fpor two haves. - w_1, w_2 = np.zeros_like(w), np.zeros_like(w) - for task, split_index in enumerate(split_indices): - # copy over up to required index for weight first_split - w_1[:split_index, task] = w[:split_index, task] - w_2[split_index:, task] = w[split_index:, task] - - # check out if any rows in either w_1 or w_2 are just zeros - rows_1 = w_1.any(axis=1) - X_1, y_1, w_1, ids_1 = X[rows_1], y[rows_1], w_1[rows_1], ids[rows_1] - dataset_1 = DiskDataset.from_numpy(X_1, y_1, w_1, ids_1) - - rows_2 = w_2.any(axis=1) - X_2, y_2, w_2, ids_2 = X[rows_2], y[rows_2], w_2[rows_2], ids[rows_2] - dataset_2 = DiskDataset.from_numpy(X_2, y_2, w_2, ids_2) - - return dataset_1, dataset_2 + def split(self, + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None) -> Tuple: + """Return indices for specified split - def train_valid_test_split(self, - dataset, - train_dir=None, - valid_dir=None, - test_dir=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - seed=None, - log_every_n=1000): - """Custom split due to raggedness in original split. - """ - if train_dir is None: - train_dir = tempfile.mkdtemp() - if valid_dir is None: - valid_dir = tempfile.mkdtemp() - if test_dir is None: - test_dir = tempfile.mkdtemp() - rem_dir = tempfile.mkdtemp() - train_dataset, rem_dataset = self.split(dataset, frac_train, - [train_dir, rem_dir]) + Parameters + ---------- + dataset: dc.data.Dataset + Dataset to be split. + seed: int, optional (default None) + Random seed to use. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + log_every_n: int, optional (default None) + Controls the logger by dictating how often logger outputs + will be produced. - # calculate percent split for valid (out of test and valid) - if frac_valid + frac_test > 0: - valid_percentage = frac_valid / (frac_valid + frac_test) - else: - return train_dataset, None, None - # split remaining data into valid and test, treating sub test set also as sparse - valid_dataset, test_dataset = self.split(rem_dataset, valid_percentage, - [valid_dir, test_dir]) + Returns + ------- + Tuple + A tuple `(train_inds, valid_inds, test_inds)` of the indices (integers) for + the various splits. + """ + y_present = (dataset.y != 0) * (dataset.w != 0) + if len(y_present.shape) == 1: + y_present = np.expand_dims(y_present, 1) + elif len(y_present.shape) > 2: + raise ValueError( + 'RandomStratifiedSplitter cannot be applied when y has more than two dimensions' + ) + if seed is not None: + np.random.seed(seed) - return train_dataset, valid_dataset, test_dataset + # Figure out how many positive samples we want for each task in each dataset. - def k_fold_split(self, dataset, k, directories=None, **kwargs): - """Needs custom implementation due to ragged splits for stratification.""" - log("Computing K-fold split", self.verbose) - if directories is None: - directories = [tempfile.mkdtemp() for _ in range(k)] - else: - assert len(directories) == k - fold_datasets = [] - # rem_dataset is remaining portion of dataset - rem_dataset = dataset - for fold in range(k): - # Note starts as 1/k since fold starts at 0. Ends at 1 since fold goes up - # to k-1. - frac_fold = 1. / (k - fold) - fold_dir = directories[fold] - rem_dir = tempfile.mkdtemp() - fold_dataset, rem_dataset = self.split(rem_dataset, frac_fold, - [fold_dir, rem_dir]) - fold_datasets.append(fold_dataset) - return fold_datasets + n_tasks = y_present.shape[1] + indices_for_task = [ + np.random.permutation(np.nonzero(y_present[:, i])[0]) + for i in range(n_tasks) + ] + count_for_task = np.array([len(x) for x in indices_for_task]) + train_target = np.round(frac_train * count_for_task).astype(np.int) + valid_target = np.round(frac_valid * count_for_task).astype(np.int) + test_target = np.round(frac_test * count_for_task).astype(np.int) + + # Assign the positive samples to datasets. Since a sample may be positive + # on more than one task, we need to keep track of the effect of each added + # sample on each task. To try to keep everything balanced, we cycle through + # tasks, assigning one positive sample for each one. + + train_counts = np.zeros(n_tasks, np.int) + valid_counts = np.zeros(n_tasks, np.int) + test_counts = np.zeros(n_tasks, np.int) + set_target = [train_target, valid_target, test_target] + set_counts = [train_counts, valid_counts, test_counts] + set_inds: List[List[int]] = [[], [], []] + assigned = set() + max_count = np.max(count_for_task) + for i in range(max_count): + for task in range(n_tasks): + indices = indices_for_task[task] + if i < len(indices) and indices[i] not in assigned: + # We have a sample that hasn't been assigned yet. Assign it to + # whichever set currently has the lowest fraction of its target for + # this task. + + index = indices[i] + set_frac = [ + 1 if set_target[i][task] == 0 else + set_counts[i][task] / set_target[i][task] for i in range(3) + ] + s = np.argmin(set_frac) + set_inds[s].append(index) + assigned.add(index) + set_counts[s] += y_present[index] + + # The remaining samples are negative for all tasks. Add them to fill out + # each set to the correct total number. + + n_samples = y_present.shape[0] + set_size = [ + int(np.round(n_samples * f)) + for f in (frac_train, frac_valid, frac_test) + ] + s = 0 + for i in np.random.permutation(range(n_samples)): + if i not in assigned: + while s < 2 and len(set_inds[s]) >= set_size[s]: + s += 1 + set_inds[s].append(i) + return tuple(sorted(x) for x in set_inds) class SingletaskStratifiedSplitter(Splitter): + """Class for doing data splits by stratification on a single task. + + Examples + -------- + >>> n_samples = 100 + >>> n_features = 10 + >>> n_tasks = 10 + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.rand(n_samples, n_tasks) + >>> w = np.ones_like(y) + >>> dataset = DiskDataset.from_numpy(np.ones((100,n_tasks)), np.ones((100,n_tasks))) + >>> splitter = SingletaskStratifiedSplitter(task_number=5) + >>> train_dataset, test_dataset = splitter.train_test_split(dataset) """ - Class for doing data splits by stratification on a single task. - - Example: - - >>> n_samples = 100 - >>> n_features = 10 - >>> n_tasks = 10 - >>> X = np.random.rand(n_samples, n_features) - >>> y = np.random.rand(n_samples, n_tasks) - >>> w = np.ones_like(y) - >>> dataset = DiskDataset.from_numpy(np.ones((100,n_tasks)), np.ones((100,n_tasks))) - >>> splitter = SingletaskStratifiedSplitter(task_number=5, verbose=False) - >>> train_dataset, test_dataset = splitter.train_test_split(dataset) + def __init__(self, task_number: int = 0): """ + Creates splitter object. - def __init__(self, task_number=0, verbose=False): + Parameters + ---------- + task_number: int, optional (default 0) + Task number for stratification. """ - Creates splitter object. - - Parameters - ---------- - task_number: int (Optional, Default 0) - Task number for stratification. - verbose: bool (Optional, Default False) - Controls logging frequency. - """ self.task_number = task_number - self.verbose = verbose - def k_fold_split(self, - dataset, - k, - directories=None, - seed=None, - log_every_n=None, - **kwargs): + # FIXME: Signature of "k_fold_split" incompatible with supertype "Splitter" + def k_fold_split( # type: ignore [override] + self, + dataset: Dataset, + k: int, + directories: Optional[List[str]] = None, + seed: Optional[int] = None, + log_every_n: Optional[int] = None, + **kwargs) -> List[Dataset]: """ - Splits compounds into k-folds using stratified sampling. - Overriding base class k_fold_split. - - Parameters - ---------- - dataset: dc.data.Dataset object - Dataset. - k: int - Number of folds. - seed: int (Optional, Default None) - Random seed. - log_every_n: int (Optional, Default None) - Log every n examples (not currently used). - - Returns - ------- - fold_datasets: List - List containing dc.data.Dataset objects - """ - log("Computing K-fold split", self.verbose) + Splits compounds into k-folds using stratified sampling. + Overriding base class k_fold_split. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + k: int + Number of folds to split `dataset` into. + directories: List[str], optional (default None) + List of length k filepaths to save the result disk-datasets. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + fold_datasets: List[Dataset] + List of dc.data.Dataset objects + """ + logger.info("Computing K-fold split") if directories is None: directories = [tempfile.mkdtemp() for _ in range(k)] else: @@ -498,35 +674,37 @@ class SingletaskStratifiedSplitter(Splitter): return fold_datasets def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Splits compounds into train/validation/test using stratified sampling. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + Fraction of dataset put into training data. + frac_valid: float, optional (default 0.1) + Fraction of dataset put into validation data. + frac_test: float, optional (default 0.1) + Fraction of dataset put into test data. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[np.ndarray, np.ndarray, np.ndarray] + A tuple of train indices, valid indices, and test indices. + Each indices is a numpy array. """ - Splits compounds into train/validation/test using stratified sampling. - - Parameters - ---------- - dataset: dc.data.Dataset object - Dataset. - seed: int (Optional, Default None) - Random seed. - frac_train: float (Optional, Default .8) - Fraction of dataset put into training data. - frac_valid: float (Optional, Default .1) - Fraction of dataset put into validation data. - frac_test: float (Optional, Default .1) - Fraction of dataset put into test data. - log_every_n: int (Optional, Default None) - Log every n examples (not currently used). - - Returns - ------- - retval: Tuple - Tuple containing train indices, valid indices, and test indices - """ # JSG Assert that split fractions can be written as proper fractions over 10. # This can be generalized in the future with some common demoninator determination. # This will work for 80/20 train/test or 80/10/10 train/valid/test (most use cases). @@ -534,7 +712,7 @@ class SingletaskStratifiedSplitter(Splitter): np.testing.assert_equal(10 * frac_train + 10 * frac_valid + 10 * frac_test, 10.) - if not seed is None: + if seed is not None: np.random.seed(seed) y_s = dataset.y[:, self.task_number] @@ -543,7 +721,6 @@ class SingletaskStratifiedSplitter(Splitter): split_cd = 10 train_cutoff = int(np.round(frac_train * split_cd)) valid_cutoff = int(np.round(frac_valid * split_cd)) + train_cutoff - test_cutoff = int(np.round(frac_test * split_cd)) + valid_cutoff train_idx = np.array([]) valid_idx = np.array([]) @@ -558,34 +735,193 @@ class SingletaskStratifiedSplitter(Splitter): test_idx = np.hstack([test_idx, sortidx_split[shuffled[valid_cutoff:]]]) # Append remaining examples to train - if sortidx.shape[0] > 0: np.hstack([train_idx, sortidx]) + if sortidx.shape[0] > 0: + np.hstack([train_idx, sortidx]) return (train_idx, valid_idx, test_idx) -class MolecularWeightSplitter(Splitter): +class IndexSplitter(Splitter): + """Class for simple order based splits. + + Use this class when the `Dataset` you have is already ordered sa you would + like it to be processed. Then the first `frac_train` proportion is used for + training, the next `frac_valid` for validation, and the final `frac_test` for + testing. This class may make sense to use your `Dataset` is already time + ordered (for example). + """ + + def split(self, + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """Splits internal compounds into train/validation/test in provided order. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional + Log every n examples (not currently used). + + Returns + ------- + Tuple[np.ndarray, np.ndarray, np.ndarray] + A tuple of train indices, valid indices, and test indices. + Each indices is a numpy array. + """ + np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) + num_datapoints = len(dataset) + train_cutoff = int(frac_train * num_datapoints) + valid_cutoff = int((frac_train + frac_valid) * num_datapoints) + indices = range(num_datapoints) + return (indices[:train_cutoff], indices[train_cutoff:valid_cutoff], + indices[valid_cutoff:]) + + +class SpecifiedSplitter(Splitter): + """Split data in the fashion specified by user. + + For some applications, you will already know how you'd like to split the + dataset. In this splitter, you simplify specify `valid_indices` and + `test_indices` and the datapoints at those indices are pulled out of the + dataset. Note that this is different from `IndexSplitter` which only splits + based on the existing dataset ordering, while this `SpecifiedSplitter` can + split on any specified ordering. """ - Class for doing data splits by molecular weight. + + def __init__(self, + valid_indices: Optional[List[int]] = None, + test_indices: Optional[List[int]] = None): + """ + Parameters + ----------- + valid_indices: List[int] + List of indices of samples in the valid set + test_indices: List[int] + List of indices of samples in the test set + """ + self.valid_indices = valid_indices + self.test_indices = test_indices + + def split(self, + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Splits internal compounds into train/validation/test in designated order. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + Fraction of dataset put into training data. + frac_valid: float, optional (default 0.1) + Fraction of dataset put into validation data. + frac_test: float, optional (default 0.1) + Fraction of dataset put into test data. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[np.ndarray, np.ndarray, np.ndarray] + A tuple of train indices, valid indices, and test indices. + Each indices is a numpy array. """ + num_datapoints = len(dataset) + indices = np.arange(num_datapoints).tolist() + train_indices = [] + if self.valid_indices is None: + self.valid_indices = [] + if self.test_indices is None: + self.test_indices = [] + valid_test = list(self.valid_indices) + valid_test.extend(self.test_indices) + for indice in indices: + if indice not in valid_test: + train_indices.append(indice) + + return (train_indices, self.valid_indices, self.test_indices) + + +################################################################# +# Splitter for molecule datasets +################################################################# + + +class MolecularWeightSplitter(Splitter): + """ + Class for doing data splits by molecular weight. + + Note + ---- + This class requires RDKit to be installed. + """ def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """Splits on molecular weight. + + Splits internal compounds into train/validation/test using the MW + calculated by SMILES string. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[np.ndarray, np.ndarray, np.ndarray] + A tuple of train indices, valid indices, and test indices. + Each indices is a numpy array. """ - Splits internal compounds into train/validation/test using the MW calculated - by SMILES string. - """ + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) - if not seed is None: + if seed is not None: np.random.seed(seed) mws = [] - from rdkit import Chem for smiles in dataset.ids: mol = Chem.MolFromSmiles(smiles) mw = Chem.rdMolDescriptors.CalcExactMolWt(mol) @@ -595,31 +931,65 @@ class MolecularWeightSplitter(Splitter): mws = np.array(mws) sortidx = np.argsort(mws) - train_cutoff = frac_train * len(sortidx) - valid_cutoff = (frac_train + frac_valid) * len(sortidx) + train_cutoff = int(frac_train * len(sortidx)) + valid_cutoff = int((frac_train + frac_valid) * len(sortidx)) return (sortidx[:train_cutoff], sortidx[train_cutoff:valid_cutoff], sortidx[valid_cutoff:]) class MaxMinSplitter(Splitter): - """ + """Chemical diversity splitter. + Class for doing splits based on the MaxMin diversity algorithm. Intuitively, the test set is comprised of the most diverse compounds of the entire dataset. Furthermore, the validation set is comprised of diverse compounds under the test set. + + Note + ---- + This class requires RDKit to be installed. """ def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[List[int], List[int], List[int]]: """ - Splits internal compounds randomly into train/validation/test. + Splits internal compounds into train/validation/test using the MaxMin diversity algorithm. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple of train indices, valid indices, and test indices. + Each indices is a list of integers. """ + try: + from rdkit import Chem, DataStructs + from rdkit.Chem import AllChem + from rdkit.SimDivFilters.rdSimDivPickers import MaxMinPicker + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) if seed is None: seed = random.randint(0, 2**30) @@ -630,24 +1000,18 @@ class MaxMinSplitter(Splitter): train_cutoff = int(frac_train * num_datapoints) valid_cutoff = int((frac_train + frac_valid) * num_datapoints) - num_train = train_cutoff num_valid = valid_cutoff - train_cutoff num_test = num_datapoints - valid_cutoff all_mols = [] - from rdkit import Chem for ind, smiles in enumerate(dataset.ids): all_mols.append(Chem.MolFromSmiles(smiles)) - from rdkit.Chem import AllChem fps = [AllChem.GetMorganFingerprintAsBitVect(x, 2, 1024) for x in all_mols] - from rdkit import DataStructs - def distance(i, j): return 1 - DataStructs.DiceSimilarity(fps[i], fps[j]) - from rdkit.SimDivFilters.rdSimDivPickers import MaxMinPicker picker = MaxMinPicker() testIndices = picker.LazyPick( distFunc=distance, @@ -676,197 +1040,198 @@ class MaxMinSplitter(Splitter): return sorted(list(trainSet)), sorted(list(validSet)), sorted(list(testSet)) -class RandomSplitter(Splitter): - """ - Class for doing random data splits. - """ +class ButinaSplitter(Splitter): + """Class for doing data splits based on the butina clustering of a bulk tanimoto + fingerprint matrix. - def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): - """ - Splits internal compounds randomly into train/validation/test. - """ - np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) - if not seed is None: - np.random.seed(seed) - num_datapoints = len(dataset) - train_cutoff = int(frac_train * num_datapoints) - valid_cutoff = int((frac_train + frac_valid) * num_datapoints) - shuffled = np.random.permutation(range(num_datapoints)) - return (shuffled[:train_cutoff], shuffled[train_cutoff:valid_cutoff], - shuffled[valid_cutoff:]) + Note + ---- + This class requires RDKit to be installed. + """ + def __init__(self, cutoff: float = 0.6): + """Create a ButinaSplitter. -class IndexSplitter(Splitter): - """ - Class for simple order based splits. + Parameters + ---------- + cutoff: float (default 0.6) + The cutoff value for tanimoto similarity. Molecules that are more similar + than this will tend to be put in the same dataset. """ - - def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + super(ButinaSplitter, self).__init__() + self.cutoff = cutoff + + def split( + self, + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None) -> Tuple[List[int], List[int], List]: """ - Splits internal compounds into train/validation/test in provided order. - """ - np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) - num_datapoints = len(dataset) - train_cutoff = int(frac_train * num_datapoints) - valid_cutoff = int((frac_train + frac_valid) * num_datapoints) - indices = range(num_datapoints) - return (indices[:train_cutoff], indices[train_cutoff:valid_cutoff], - indices[valid_cutoff:]) + Splits internal compounds into train and validation based on the butina + clustering algorithm. This splitting algorithm has an O(N^2) run time, where N + is the number of elements in the dataset. The dataset is expected to be a classification + dataset. + This algorithm is designed to generate validation data that are novel chemotypes. + Setting a small cutoff value will generate smaller, finer clusters of high similarity, + whereas setting a large cutoff value will generate larger, coarser clusters of low similarity. -class IndiceSplitter(Splitter): - """ - Class for splits based on input order. - """ + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split (not currently used). + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split (not currently used). + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split (not currently used). + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). - def __init__(self, verbose=False, valid_indices=None, test_indices=None): + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple of train indices, valid indices, and test indices. + Each indices is a list of integers and test indices is always an empty list. """ - Parameters - ----------- - valid_indices: list of int - indices of samples in the valid set - test_indices: list of int - indices of samples in the test set - """ - self.verbose = verbose - self.valid_indices = valid_indices - self.test_indices = test_indices + try: + from rdkit import Chem, DataStructs + from rdkit.Chem import AllChem + from rdkit.ML.Cluster import Butina + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + + logger.info("Performing butina clustering with cutoff of", self.cutoff) + mols = [] + for ind, smiles in enumerate(dataset.ids): + mols.append(Chem.MolFromSmiles(smiles)) + fps = [AllChem.GetMorganFingerprintAsBitVect(x, 2, 1024) for x in mols] - def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): - """ - Splits internal compounds into train/validation/test in designated order. - """ - num_datapoints = len(dataset) - indices = np.arange(num_datapoints).tolist() - train_indices = [] - if self.valid_indices is None: - self.valid_indices = [] - if self.test_indices is None: - self.test_indices = [] - valid_test = list(self.valid_indices) - valid_test.extend(self.test_indices) - for indice in indices: - if not indice in valid_test: - train_indices.append(indice) + # calcaulate scaffold sets + # (ytz): this is directly copypasta'd from Greg Landrum's clustering example. + dists = [] + nfps = len(fps) + for i in range(1, nfps): + sims = DataStructs.BulkTanimotoSimilarity(fps[i], fps[:i]) + dists.extend([1 - x for x in sims]) + scaffold_sets = Butina.ClusterData( + dists, nfps, self.cutoff, isDistData=True) + scaffold_sets = sorted(scaffold_sets, key=lambda x: -len(x)) - return (train_indices, self.valid_indices, self.test_indices) + train_cutoff = frac_train * len(dataset) + valid_cutoff = (frac_train + frac_valid) * len(dataset) + train_inds: List[int] = [] + valid_inds: List[int] = [] + test_inds: List[int] = [] + logger.info("About to sort in scaffold sets") + for scaffold_set in scaffold_sets: + if len(train_inds) + len(scaffold_set) > train_cutoff: + if len(train_inds) + len(valid_inds) + len(scaffold_set) > valid_cutoff: + test_inds += scaffold_set + else: + valid_inds += scaffold_set + else: + train_inds += scaffold_set + return train_inds, valid_inds, test_inds -def ClusterFps(fps, cutoff=0.2): - # (ytz): this is directly copypasta'd from Greg Landrum's clustering example. - dists = [] - nfps = len(fps) - from rdkit import DataStructs - for i in range(1, nfps): - sims = DataStructs.BulkTanimotoSimilarity(fps[i], fps[:i]) - dists.extend([1 - x for x in sims]) - from rdkit.ML.Cluster import Butina - cs = Butina.ClusterData(dists, nfps, cutoff, isDistData=True) - return cs +def _generate_scaffold(smiles: str, include_chirality: bool = False) -> str: + """Compute the Bemis-Murcko scaffold for a SMILES string. -class ButinaSplitter(Splitter): - """ - Class for doing data splits based on the butina clustering of a bulk tanimoto - fingerprint matrix. - """ + Bemis-Murcko scaffolds are described in DOI: 10.1021/jm9602928. + They are essentially that part of the molecule consisting of + rings and the linker atoms between them. - def split(self, - dataset, - seed=None, - frac_train=None, - frac_valid=None, - frac_test=None, - log_every_n=1000, - cutoff=0.18): - """ - Splits internal compounds into train and validation based on the butina - clustering algorithm. This splitting algorithm has an O(N^2) run time, where N - is the number of elements in the dataset. The dataset is expected to be a classification - dataset. + Paramters + --------- + smiles: str + SMILES + include_chirality: bool, default False + Whether to include chirality in scaffolds or not. - This algorithm is designed to generate validation data that are novel chemotypes. + Returns + ------- + str + The MurckScaffold SMILES from the original SMILES - Note that this function entirely disregards the ratios for frac_train, frac_valid, - and frac_test. Furthermore, it does not generate a test set, only a train and valid set. + References + ---------- + .. [1] Bemis, Guy W., and Mark A. Murcko. "The properties of known drugs. + 1. Molecular frameworks." Journal of medicinal chemistry 39.15 (1996): 2887-2893. - Setting a small cutoff value will generate smaller, finer clusters of high similarity, - whereas setting a large cutoff value will generate larger, coarser clusters of low similarity. - """ - print("Performing butina clustering with cutoff of", cutoff) - mols = [] + Note + ---- + This function requires RDKit to be installed. + """ + try: from rdkit import Chem - for ind, smiles in enumerate(dataset.ids): - mols.append(Chem.MolFromSmiles(smiles)) - n_mols = len(mols) - from rdkit.Chem import AllChem - fps = [AllChem.GetMorganFingerprintAsBitVect(x, 2, 1024) for x in mols] + from rdkit.Chem.Scaffolds.MurckoScaffold import MurckoScaffoldSmiles + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") - scaffold_sets = ClusterFps(fps, cutoff=cutoff) - scaffold_sets = sorted(scaffold_sets, key=lambda x: -len(x)) - - ys = dataset.y - valid_inds = [] - for c_idx, cluster in enumerate(scaffold_sets): - # for m_idx in cluster: - valid_inds.extend(cluster) - # continue until we find an active in all the tasks, otherwise we can't - # compute a meaningful AUC - # TODO (ytz): really, we want at least one active and inactive in both scenarios. - # TODO (Ytz): for regression tasks we'd stop after only one cluster. - active_populations = np.sum(ys[valid_inds], axis=0) - if np.all(active_populations): - print("# of actives per task in valid:", active_populations) - print("Total # of validation points:", len(valid_inds)) - break - - train_inds = list(itertools.chain.from_iterable(scaffold_sets[c_idx + 1:])) - test_inds = [] - - return train_inds, valid_inds, [] + mol = Chem.MolFromSmiles(smiles) + scaffold = MurckoScaffoldSmiles(mol=mol, includeChirality=include_chirality) + return scaffold class ScaffoldSplitter(Splitter): - """ - Class for doing data splits based on the scaffold of small molecules. + """Class for doing data splits based on the scaffold of small molecules. + + Note + ---- + This class requires RDKit to be installed. """ def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=1000): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = 1000 + ) -> Tuple[List[int], List[int], List[int]]: """ Splits internal compounds into train/validation/test by scaffold. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default 1000) + Controls the logger by dictating how often logger outputs + will be produced. + + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple of train indices, valid indices, and test indices. + Each indices is a list of integers. """ np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) scaffold_sets = self.generate_scaffolds(dataset) train_cutoff = frac_train * len(dataset) valid_cutoff = (frac_train + frac_valid) * len(dataset) - train_inds, valid_inds, test_inds = [], [], [] + train_inds: List[int] = [] + valid_inds: List[int] = [] + test_inds: List[int] = [] - log("About to sort in scaffold sets", self.verbose) + logger.info("About to sort in scaffold sets") for scaffold_set in scaffold_sets: if len(train_inds) + len(scaffold_set) > train_cutoff: if len(train_inds) + len(valid_inds) + len(scaffold_set) > valid_cutoff: @@ -877,18 +1242,31 @@ class ScaffoldSplitter(Splitter): train_inds += scaffold_set return train_inds, valid_inds, test_inds - def generate_scaffolds(self, dataset, log_every_n=1000): - """ - Returns all scaffolds from the dataset + def generate_scaffolds(self, dataset: Dataset, + log_every_n: int = 1000) -> List[List[int]]: + """Returns all scaffolds from the dataset. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + log_every_n: int, optional (default 1000) + Controls the logger by dictating how often logger outputs + will be produced. + + Returns + ------- + scaffold_sets: List[List[int]] + List of indices of each scaffold in the dataset. """ scaffolds = {} data_len = len(dataset) - log("About to generate scaffolds", self.verbose) + logger.info("About to generate scaffolds") for ind, smiles in enumerate(dataset.ids): if ind % log_every_n == 0: - log("Generating scaffold %d/%d" % (ind, data_len), self.verbose) - scaffold = generate_scaffold(smiles) + logger.info("Generating scaffold %d/%d" % (ind, data_len)) + scaffold = _generate_scaffold(smiles) if scaffold not in scaffolds: scaffolds[scaffold] = [ind] else: @@ -904,27 +1282,56 @@ class ScaffoldSplitter(Splitter): class FingerprintSplitter(Splitter): - """ - Class for doing data splits based on the fingerprints of small molecules - O(N**2) algorithm + """Class for doing data splits based on the fingerprints of small + molecules O(N**2) algorithm. + + Note + ---- + This class requires RDKit to be installed. """ def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=1000): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[List[int], List[int], List[int]]: """ - Splits internal compounds into train/validation/test by fingerprint. + Splits internal compounds into train/validation/test by fingerprint. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple of train indices, valid indices, and test indices. + Each indices is a list of integers. """ + try: + from rdkit import Chem, DataStructs + from rdkit.Chem.Fingerprints import FingerprintMols + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + np.testing.assert_almost_equal(frac_train + frac_valid + frac_test, 1.) data_len = len(dataset) mols, fingerprints = [], [] train_inds, valid_inds, test_inds = [], [], [] - from rdkit import Chem - from rdkit.Chem.Fingerprints import FingerprintMols for ind, smiles in enumerate(dataset.ids): mol = Chem.MolFromSmiles(smiles, sanitize=False) mols.append(mol) @@ -932,7 +1339,6 @@ class FingerprintSplitter(Splitter): fingerprints.append(fp) distances = np.ones(shape=(data_len, data_len)) - from rdkit import DataStructs for i in range(data_len): for j in range(data_len): distances[i][j] = 1 - DataStructs.FingerprintSimilarity( @@ -990,84 +1396,56 @@ class FingerprintSplitter(Splitter): cur_distances[i] = new_dist -class SpecifiedSplitter(Splitter): - """ - Class that splits data according to user specification. - """ - - def __init__(self, input_file, split_field, verbose=False): - """Provide input information for splits.""" - raw_df = next(load_data([input_file], shard_size=None)) - self.splits = raw_df[split_field].values - self.verbose = verbose - - def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=1000): - """ - Splits internal compounds into train/validation/test by user-specification. - """ - train_inds, valid_inds, test_inds = [], [], [] - for ind, split in enumerate(self.splits): - split = split.lower() - if split == "train": - train_inds.append(ind) - elif split in ["valid", "validation"]: - valid_inds.append(ind) - elif split == "test": - test_inds.append(ind) - else: - raise ValueError("Missing required split information.") - return train_inds, valid_inds, test_inds - +################################################################# +# Not well supported splitters +################################################################# -class SpecifiedIndexSplitter(Splitter): - """ - Class that splits data according to user index specification - """ - def __init__(self, train_inds, valid_inds, test_inds, verbose=False): - """Provide input information for splits.""" - self.train_inds = train_inds - self.valid_inds = valid_inds - self.test_inds = test_inds - self.verbose = verbose - super(SpecifiedIndexSplitter, self).__init__(verbose) +class TimeSplitterPDBbind(Splitter): - def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=1000, - verbose=False): + def __init__(self, ids: Sequence[int], year_file: Optional[str] = None): """ - Splits internal compounds into train/validation/test by user-specification. + Parameters + ---------- + ids: Sequence[int] + The PDB ids to be selected + year_file: str, optional (default None) + The filepath for the PDBBind year selection """ - return self.train_inds, self.valid_inds, self.test_inds - - -class TimeSplitterPDBbind(Splitter): - - def __init__(self, ids, year_file=None, verbose=False): self.ids = ids self.year_file = year_file - self.verbose = verbose def split(self, - dataset, - seed=None, - frac_train=.8, - frac_valid=.1, - frac_test=.1, - log_every_n=None): + dataset: Dataset, + frac_train: float = 0.8, + frac_valid: float = 0.1, + frac_test: float = 0.1, + seed: Optional[int] = None, + log_every_n: Optional[int] = None + ) -> Tuple[List[int], List[int], List[int]]: """ Splits protein-ligand pairs in PDBbind into train/validation/test in time order. + + Parameters + ---------- + dataset: Dataset + Dataset to be split. + frac_train: float, optional (default 0.8) + The fraction of data to be used for the training split. + frac_valid: float, optional (default 0.1) + The fraction of data to be used for the validation split. + frac_test: float, optional (default 0.1) + The fraction of data to be used for the test split. + seed: int, optional (default None) + Random seed to use. + log_every_n: int, optional (default None) + Log every n examples (not currently used). + + Returns + ------- + Tuple[List[int], List[int], List[int]] + A tuple of train indices, valid indices, and test indices. + Each indices is a list of integers. """ if self.year_file is None: try: diff --git a/deepchem/splits/task_splitter.py b/deepchem/splits/task_splitter.py index 6fe63ac6a2dfab1c063a86434ef30ce2e2e416df..d633629e74310557bcb7df2c5de55d4a432e4d8a 100644 --- a/deepchem/splits/task_splitter.py +++ b/deepchem/splits/task_splitter.py @@ -1,16 +1,8 @@ """ Contains an abstract base class that supports chemically aware data splits. """ -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import tempfile import numpy as np -from deepchem.utils import ScaffoldGenerator -from deepchem.utils.save import log from deepchem.data import NumpyDataset -from deepchem.utils.save import load_data from deepchem.splits import Splitter @@ -75,7 +67,6 @@ class TaskSplitter(Splitter): n_tasks = len(dataset.get_task_names()) n_train = int(np.round(frac_train * n_tasks)) n_valid = int(np.round(frac_valid * n_tasks)) - n_test = n_tasks - n_train - n_valid X, y, w, ids = dataset.X, dataset.y, dataset.w, dataset.ids diff --git a/deepchem/splits/test_specified_index_splitter.py b/deepchem/splits/test_specified_index_splitter.py deleted file mode 100644 index 02063d76f851fb5d97b62e898ab76f82399826c0..0000000000000000000000000000000000000000 --- a/deepchem/splits/test_specified_index_splitter.py +++ /dev/null @@ -1,28 +0,0 @@ -from unittest import TestCase - -import deepchem -import numpy as np -from sklearn.model_selection import train_test_split -from deepchem.splits import SpecifiedIndexSplitter - - -class TestSpecifiedIndexSplitter(TestCase): - - def create_dataset(self): - n_samples, n_features = 20, 10 - X = np.random.random(size=(n_samples, n_features)) - y = np.random.random(size=(n_samples, 1)) - return deepchem.data.NumpyDataset(X, y) - - def test_split(self): - ds = self.create_dataset() - indexes = list(range(len(ds))) - train, test = train_test_split(indexes) - train, valid = train_test_split(train) - - splitter = SpecifiedIndexSplitter(train, valid, test) - train_ds, valid_ds, test_ds = splitter.train_valid_test_split(ds) - - self.assertTrue(np.all(train_ds.X == ds.X[train])) - self.assertTrue(np.all(valid_ds.X == ds.X[valid])) - self.assertTrue(np.all(test_ds.X == ds.X[test])) diff --git a/deepchem/splits/test_scaffold_splitter.py b/deepchem/splits/tests/test_scaffold_splitter.py similarity index 91% rename from deepchem/splits/test_scaffold_splitter.py rename to deepchem/splits/tests/test_scaffold_splitter.py index fb06c6b591fd8c937bd480e8d704ce647284b9c1..3c8505b885b16f8e780b46cf6c8286fabc1626c7 100644 --- a/deepchem/splits/test_scaffold_splitter.py +++ b/deepchem/splits/tests/test_scaffold_splitter.py @@ -1,12 +1,10 @@ import unittest -from unittest import TestCase -import numpy as np import deepchem as dc from deepchem.splits.splitters import ScaffoldSplitter -class TestScaffoldSplitter(TestCase): +class TestScaffoldSplitter(unittest.TestCase): def test_scaffolds(self): tox21_tasks, tox21_datasets, transformers = \ diff --git a/deepchem/splits/tests/test_splitter.py b/deepchem/splits/tests/test_splitter.py index aae314ac4f1de29e9606c314123baf18756d539c..c6b1db01cfe72b0c2e4cef559344e8638e4096af 100644 --- a/deepchem/splits/tests/test_splitter.py +++ b/deepchem/splits/tests/test_splitter.py @@ -1,11 +1,7 @@ """ Tests for splitter objects. """ -__author__ = "Bharath Ramsundar, Aneesh Pappu" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import tempfile +import os import unittest import numpy as np import deepchem as dc @@ -13,13 +9,70 @@ from deepchem.data import NumpyDataset from deepchem.splits import IndexSplitter +def load_sparse_multitask_dataset(): + """Load sparse tox multitask data, sample dataset.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = [ + "task1", "task2", "task3", "task4", "task5", "task6", "task7", "task8", + "task9" + ] + input_file = os.path.join(current_dir, + "../../models/tests/sparse_multitask_example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + return loader.create_dataset(input_file) + + +def load_multitask_data(): + """Load example multitask data.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = [ + "task0", "task1", "task2", "task3", "task4", "task5", "task6", "task7", + "task8", "task9", "task10", "task11", "task12", "task13", "task14", + "task15", "task16" + ] + input_file = os.path.join(current_dir, + "../../models/tests/multitask_example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + return loader.create_dataset(input_file) + + +def load_solubility_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + return loader.create_dataset(input_file) + + +def load_butina_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["task"] + # task_type = "regression" + input_file = os.path.join(current_dir, + "../../models/tests/butina_example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + return loader.create_dataset(input_file) + + class TestSplitter(unittest.TestCase): """ Test some basic splitters. """ def test_random_group_split(self): - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() groups = [0, 4, 1, 2, 3, 7, 0, 3, 1, 0] # 0 1 2 3 4 5 6 7 8 9 @@ -48,7 +101,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask RandomSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() random_splitter = dc.splits.RandomSplitter() train_data, valid_data, test_data = \ random_splitter.train_valid_test_split( @@ -65,7 +118,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask IndexSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() random_splitter = dc.splits.IndexSplitter() train_data, valid_data, test_data = \ random_splitter.train_valid_test_split( @@ -86,7 +139,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask ScaffoldSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() scaffold_splitter = dc.splits.ScaffoldSplitter() train_data, valid_data, test_data = \ scaffold_splitter.train_valid_test_split( @@ -99,7 +152,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask Fingerprint class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() assert (len(solubility_dataset.X) == 10) scaffold_splitter = dc.splits.FingerprintSplitter() train_data, valid_data, test_data = \ @@ -116,7 +169,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask SingletaskStratifiedSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() stratified_splitter = dc.splits.ScaffoldSplitter() train_data, valid_data, test_data = \ stratified_splitter.train_valid_test_split( @@ -133,7 +186,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask MaxMinSplitter class. """ - solubility_dataset = dc.data.tests.load_butina_data() + solubility_dataset = load_butina_data() maxmin_splitter = dc.splits.MaxMinSplitter() train_data, valid_data, test_data = \ maxmin_splitter.train_valid_test_split( @@ -146,14 +199,14 @@ class TestSplitter(unittest.TestCase): """ Test singletask ButinaSplitter class. """ - solubility_dataset = dc.data.tests.load_butina_data() + solubility_dataset = load_butina_data() butina_splitter = dc.splits.ButinaSplitter() train_data, valid_data, test_data = \ butina_splitter.train_valid_test_split( solubility_dataset) - assert len(train_data) == 7 - assert len(valid_data) == 3 - assert len(test_data) == 0 + assert len(train_data) == 8 + assert len(valid_data) == 1 + assert len(test_data) == 1 def test_k_fold_splitter(self): """ @@ -177,7 +230,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask RandomSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() random_splitter = dc.splits.RandomSplitter() ids_set = set(solubility_dataset.ids) @@ -202,7 +255,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask IndexSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() index_splitter = dc.splits.IndexSplitter() ids_set = set(solubility_dataset.ids) @@ -232,7 +285,7 @@ class TestSplitter(unittest.TestCase): """ Test singletask ScaffoldSplitter class. """ - solubility_dataset = dc.data.tests.load_solubility_data() + solubility_dataset = load_solubility_data() scaffold_splitter = dc.splits.ScaffoldSplitter() ids_set = set(solubility_dataset.ids) @@ -265,21 +318,22 @@ class TestSplitter(unittest.TestCase): # Test singletask case. n_samples = 100 n_positives = 20 - n_features = 10 n_tasks = 1 - X = np.random.rand(n_samples, n_features) + X = np.ones(n_samples) y = np.zeros((n_samples, n_tasks)) y[:n_positives] = 1 w = np.ones((n_samples, n_tasks)) - ids = np.arange(n_samples) + dataset = dc.data.NumpyDataset(X, y, w) stratified_splitter = dc.splits.RandomStratifiedSplitter() - column_indices = stratified_splitter.get_task_split_indices( - y, w, frac_split=.5) + train, valid, test = stratified_splitter.split(dataset, 0.5, 0, 0.5) - split_index = column_indices[0] # The split index should partition dataset in half. - assert np.count_nonzero(y[:split_index]) == 10 + assert len(train) == 50 + assert len(valid) == 0 + assert len(test) == 50 + assert np.count_nonzero(y[train]) == 10 + assert np.count_nonzero(y[test]) == 10 def test_singletask_stratified_column_indices_mask(self): """ @@ -288,79 +342,74 @@ class TestSplitter(unittest.TestCase): # Test singletask case. n_samples = 100 n_positives = 20 - n_features = 10 n_tasks = 1 # Test case where some weights are zero (i.e. masked) - X = np.random.rand(n_samples, n_features) + X = np.ones(n_samples) y = np.zeros((n_samples, n_tasks)) y[:n_positives] = 1 w = np.ones((n_samples, n_tasks)) # Set half the positives to have zero weight w[:n_positives // 2] = 0 - ids = np.arange(n_samples) + dataset = dc.data.NumpyDataset(X, y, w) stratified_splitter = dc.splits.RandomStratifiedSplitter() - column_indices = stratified_splitter.get_task_split_indices( - y, w, frac_split=.5) + train, valid, test = stratified_splitter.split(dataset, 0.5, 0, 0.5) - split_index = column_indices[0] # There are 10 nonzero actives. # The split index should partition this into half, so expect 5 w_present = (w != 0) y_present = y * w_present - assert np.count_nonzero(y_present[:split_index]) == 5 + assert np.count_nonzero(y_present[train]) == 5 def test_multitask_stratified_column_indices(self): """ Test RandomStratifiedSplitter split on multitask dataset. """ n_samples = 100 - n_features = 10 n_tasks = 10 - X = np.random.rand(n_samples, n_features) p = .05 # proportion actives + X = np.ones(n_samples) y = np.random.binomial(1, p, size=(n_samples, n_tasks)) w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w) stratified_splitter = dc.splits.RandomStratifiedSplitter() - split_indices = stratified_splitter.get_task_split_indices( - y, w, frac_split=.5) + train, valid, test = stratified_splitter.split(dataset, 0.5, 0, 0.5) for task in range(n_tasks): - split_index = split_indices[task] task_actives = np.count_nonzero(y[:, task]) - # The split index should partition dataset in half. - assert np.count_nonzero(y[:split_index, task]) == int(task_actives / 2) + # The split index should partition the positives for each task roughly in half. + target = task_actives / 2 + assert target - 2 <= np.count_nonzero(y[train, task]) <= target + 2 def test_multitask_stratified_column_indices_masked(self): """ Test RandomStratifiedSplitter split on multitask dataset. """ n_samples = 200 - n_features = 10 n_tasks = 10 - X = np.random.rand(n_samples, n_features) p = .05 # proportion actives + X = np.ones(n_samples) y = np.random.binomial(1, p, size=(n_samples, n_tasks)) w = np.ones((n_samples, n_tasks)) # Mask half the examples w[:n_samples // 2] = 0 + dataset = dc.data.NumpyDataset(X, y, w) stratified_splitter = dc.splits.RandomStratifiedSplitter() - split_indices = stratified_splitter.get_task_split_indices( - y, w, frac_split=.5) + train, valid, test = stratified_splitter.split(dataset, 0.5, 0, 0.5) w_present = (w != 0) y_present = y * w_present for task in range(n_tasks): - split_index = split_indices[task] task_actives = np.count_nonzero(y_present[:, task]) + target = task_actives / 2 # The split index should partition dataset in half. - assert np.count_nonzero(y_present[:split_index, task]) == int( - task_actives / 2) + assert target - 1 <= np.count_nonzero( + y_present[train, task]) <= target + 1 - def test_singletask_stratified_split(self): + def test_random_stratified_split(self): """ Test RandomStratifiedSplitter on a singletask split. """ @@ -379,7 +428,10 @@ class TestSplitter(unittest.TestCase): dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) stratified_splitter = dc.splits.RandomStratifiedSplitter() - dataset_1, dataset_2 = stratified_splitter.split(dataset, frac_split=.5) + dataset_1, dataset_2 = stratified_splitter.train_test_split( + dataset, frac_train=.5) + print(dataset_1.get_shape()) + print(dataset_2.get_shape()) # Should have split cleanly in half (picked random seed to ensure this) assert len(dataset_1) == 10 @@ -426,7 +478,6 @@ class TestSplitter(unittest.TestCase): n_samples = 100 n_positives = 20 n_features = 10 - n_tasks = 1 X = np.random.rand(n_samples, n_features) y = np.zeros(n_samples) @@ -441,6 +492,7 @@ class TestSplitter(unittest.TestCase): K = 5 fold_datasets = stratified_splitter.k_fold_split(dataset, K) + fold_datasets = [f[1] for f in fold_datasets] for fold in range(K): fold_dataset = fold_datasets[fold] @@ -469,7 +521,7 @@ class TestSplitter(unittest.TestCase): """ Test multitask RandomSplitter class. """ - multitask_dataset = dc.data.tests.load_multitask_data() + multitask_dataset = load_multitask_data() random_splitter = dc.splits.RandomSplitter() train_data, valid_data, test_data = \ random_splitter.train_valid_test_split( @@ -482,7 +534,7 @@ class TestSplitter(unittest.TestCase): """ Test multitask IndexSplitter class. """ - multitask_dataset = dc.data.tests.load_multitask_data() + multitask_dataset = load_multitask_data() index_splitter = dc.splits.IndexSplitter() train_data, valid_data, test_data = \ index_splitter.train_valid_test_split( @@ -495,7 +547,7 @@ class TestSplitter(unittest.TestCase): """ Test multitask ScaffoldSplitter class. """ - multitask_dataset = dc.data.tests.load_multitask_data() + multitask_dataset = load_multitask_data() scaffold_splitter = dc.splits.ScaffoldSplitter() train_data, valid_data, test_data = \ scaffold_splitter.train_valid_test_split( @@ -504,30 +556,10 @@ class TestSplitter(unittest.TestCase): assert len(valid_data) == 1 assert len(test_data) == 1 - def test_stratified_multitask_split(self): - """ - Test multitask RandomStratifiedSplitter class - """ - # sparsity is determined by number of w weights that are 0 for a given - # task structure of w np array is such that each row corresponds to a - # sample. The loaded sparse dataset has many rows with only zeros - sparse_dataset = dc.data.tests.load_sparse_multitask_dataset() - - stratified_splitter = dc.splits.RandomStratifiedSplitter() - datasets = stratified_splitter.train_valid_test_split( - sparse_dataset, frac_train=0.8, frac_valid=0.1, frac_test=0.1) - train_data, valid_data, test_data = datasets - - for dataset_index, dataset in enumerate(datasets): - w = dataset.w - # verify that there are no rows (samples) in weights matrix w - # that have no hits. - assert len(np.where(~w.any(axis=1))[0]) == 0 - - def test_indice_split(self): + def test_specified_split(self): - solubility_dataset = dc.data.tests.load_solubility_data() - random_splitter = dc.splits.IndiceSplitter( + solubility_dataset = load_solubility_data() + random_splitter = dc.splits.SpecifiedSplitter( valid_indices=[7], test_indices=[8]) train_data, valid_data, test_data = \ random_splitter.split( @@ -538,7 +570,7 @@ class TestSplitter(unittest.TestCase): def test_random_seed(self): """Test that splitters use the random seed correctly.""" - dataset = dc.data.tests.load_solubility_data() + dataset = load_solubility_data() splitter = dc.splits.RandomSplitter() train1, valid1, test1 = splitter.train_valid_test_split(dataset, seed=1) train2, valid2, test2 = splitter.train_valid_test_split(dataset, seed=2) diff --git a/deepchem/splits/tests/test_task_splitter.py b/deepchem/splits/tests/test_task_splitter.py index c975a4e8a76e08121f8006fe89562d4278c09daa..0ab038bcf3dd376dcef2bfbb1d99e6edd694f6f2 100644 --- a/deepchem/splits/tests/test_task_splitter.py +++ b/deepchem/splits/tests/test_task_splitter.py @@ -1,12 +1,7 @@ """ Tests for splitter objects. """ -__author__ = "Bharath Ramsundar, Aneesh Pappu" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - import unittest -import tempfile import numpy as np import deepchem as dc diff --git a/deepchem/trans/__init__.py b/deepchem/trans/__init__.py index 2210420f6727f151e72244a18122110d54dfff18..772027f6bceb1fb4468b2e412d4bc280c55bd46f 100644 --- a/deepchem/trans/__init__.py +++ b/deepchem/trans/__init__.py @@ -1,8 +1,11 @@ """ Gathers all transformers in one place for convenient imports """ +# flake8: noqa + from deepchem.trans.transformers import undo_transforms from deepchem.trans.transformers import undo_grad_transforms +from deepchem.trans.transformers import Transformer from deepchem.trans.transformers import LogTransformer from deepchem.trans.transformers import ClippingTransformer from deepchem.trans.transformers import NormalizationTransformer @@ -14,3 +17,8 @@ from deepchem.trans.transformers import IRVTransformer from deepchem.trans.transformers import DAGTransformer from deepchem.trans.transformers import ANITransformer from deepchem.trans.transformers import MinMaxTransformer +from deepchem.trans.transformers import FeaturizationTransformer +from deepchem.trans.transformers import ImageTransformer +from deepchem.trans.transformers import DataTransforms +from deepchem.trans.transformers import Transformer +from deepchem.trans.duplicate import DuplicateBalancingTransformer diff --git a/deepchem/trans/duplicate.py b/deepchem/trans/duplicate.py new file mode 100644 index 0000000000000000000000000000000000000000..1afb3582ebaa623ebc244771e6faafbeb304a960 --- /dev/null +++ b/deepchem/trans/duplicate.py @@ -0,0 +1,172 @@ +import logging +import numpy as np +from typing import Tuple +from deepchem.data import Dataset +from deepchem.trans.transformers import Transformer + +logger = logging.getLogger(__name__) + + +class DuplicateBalancingTransformer(Transformer): + """Balance binary or multiclass datasets by duplicating rarer class samples. + + This class balances a dataset by duplicating samples of the rarer class so + that the sum of all example weights from all classes is the same. (Up to + integer rounding of course). This can be useful when you're working on an + imabalanced dataset where there are far fewer examples of some classes than + others. + + This class differs from `BalancingTransformer` in that it actually + duplicates rarer class samples rather than just increasing their sample + weights. This may be more friendly for models that are numerically fragile + and can't handle imbalanced example weights. + + Examples + -------- + Here's an example for a binary dataset. + + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> n_classes = 2 + >>> import deepchem as dc + >>> import numpy as np + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + >>> dataset = transformer.transform(dataset) + + And here's a multiclass dataset example. + + >>> n_samples = 50 + >>> n_features = 3 + >>> n_tasks = 1 + >>> n_classes = 5 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + >>> dataset = transformer.transform(dataset) + + See Also + -------- + deepchem.trans.BalancingTransformer: Balance by changing sample weights. + + Note + ---- + This transformer is only well-defined for singletask datasets. (Since + examples are actually duplicated, there's no meaningful way to duplicate + across multiple tasks in a way that preserves the balance.) + + This transformer is only meaningful for classification datasets where `y` + takes on a limited set of values. This class transforms all of `X`, `y`, + `w`, `ids`. + + Raises + ------ + `ValueError` if the provided dataset is multitask. + """ + + def __init__(self, dataset: Dataset): + super(DuplicateBalancingTransformer, self).__init__( + transform_X=True, + transform_y=True, + transform_w=True, + transform_ids=True, + dataset=dataset) + + if len(dataset.get_task_names()) > 1: + raise ValueError( + "This transformation is only defined for singletask datsets.") + + # Get the labels/weights + y = dataset.y + w = dataset.w + # Normalize shapes + if len(y.shape) == 1: + y = np.reshape(y, (len(y), 1)) + if len(w.shape) == 1: + w = np.reshape(w, (len(w), 1)) + if len(y.shape) != 2: + raise ValueError("y must be of shape (N,) or (N, n_tasks)") + if len(w.shape) != 2: + raise ValueError("w must be of shape (N,) or (N, n_tasks)") + self.classes = sorted(np.unique(y)) + # Remove labels with zero weights + y = y[w != 0] + class_weights = [] + # Note that we may have 0 elements of a given class since we remove those + # labels with zero weight. + for c in self.classes: + # this works because y is 1D + c_weight = np.sum(w[y == c]) + class_weights.append(c_weight) + weight_largest = max(class_weights) + # This is the right ratio since int(N/num_c) * num_c \approx N + # for all classes + duplication_ratio = [ + int(weight_largest / float(c_weight)) if c_weight > 0 else 0 + for c_weight in class_weights + ] + self.duplication_ratio = duplication_ratio + + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w, id) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idtrans: np.ndarray + Transformed array of identifiers + """ + if not (len(y.shape) == 1 or (len(y.shape) == 2 and y.shape[1] == 1)): + raise ValueError("y must be of shape (N,) or (N, 1)") + if not (len(w.shape) == 1 or (len(w.shape) == 2 and w.shape[1] == 1)): + raise ValueError("w must be of shape (N,) or (N, 1)") + # Flattening is safe because of shape check above + y = y.flatten() + w = w.flatten() + X_dups, y_dups, w_dups, ids_dups = [], [], [], [] + for i, c in enumerate(self.classes): + duplication_ratio = self.duplication_ratio[i] + c_inds = (y == c) + X_c = X[c_inds] + y_c = y[c_inds] + w_c = w[c_inds] + ids_c = ids[c_inds] + X_c_dup = np.repeat(X_c, duplication_ratio, axis=0) + y_c_dup = np.repeat(y_c, duplication_ratio, axis=0) + w_c_dup = np.repeat(w_c, duplication_ratio, axis=0) + ids_c_dup = np.repeat(ids_c, duplication_ratio, axis=0) + X_dups.append(X_c_dup) + y_dups.append(y_c_dup) + w_dups.append(w_c_dup) + ids_dups.append(ids_c_dup) + Xtrans = np.concatenate(X_dups, axis=0) + ytrans = np.concatenate(y_dups, axis=0) + wtrans = np.concatenate(w_dups, axis=0) + idstrans = np.concatenate(ids_dups, axis=0) + return (Xtrans, ytrans, wtrans, idstrans) diff --git a/deepchem/trans/tests/test_DAG.py b/deepchem/trans/tests/test_DAG.py new file mode 100644 index 0000000000000000000000000000000000000000..e1c7c545650706873811f5c36a8fb6720eb6f10b --- /dev/null +++ b/deepchem/trans/tests/test_DAG.py @@ -0,0 +1,25 @@ +import os +import numpy as np + +import deepchem as dc + + +def test_DAG_transformer(): + """Tests the DAG transformer.""" + np.random.seed(123) + + # Load mini log-solubility dataset. + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.ConvMolFeaturizer() + tasks = ["outcome"] + input_file = os.path.join(current_dir, + "../../models/tests/example_regression.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + dataset = loader.create_dataset(input_file) + transformer = dc.trans.DAGTransformer(max_atoms=50) + dataset = transformer.transform(dataset) + # The transformer generates n DAGs for a molecule with n + # atoms. These are denoted the "parents" + for idm, mol in enumerate(dataset.X): + assert dataset.X[idm].get_num_atoms() == len(dataset.X[idm].parents) diff --git a/deepchem/trans/tests/test_IRV.py b/deepchem/trans/tests/test_IRV.py new file mode 100644 index 0000000000000000000000000000000000000000..2f59bf983cc7afa4c73b477006755465d2ac08df --- /dev/null +++ b/deepchem/trans/tests/test_IRV.py @@ -0,0 +1,28 @@ +import deepchem as dc +import numpy as np + + +def test_IRV_transformer(): + n_features = 128 + n_samples = 20 + test_samples = 5 + n_tasks = 2 + X = np.random.randint(2, size=(n_samples, n_features)) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids=None) + X_test = np.random.randint(2, size=(test_samples, n_features)) + y_test = np.zeros((test_samples, n_tasks)) + w_test = np.ones((test_samples, n_tasks)) + test_dataset = dc.data.NumpyDataset(X_test, y_test, w_test, ids=None) + sims = np.sum( + X_test[0, :] * X, axis=1, dtype=float) / np.sum( + np.sign(X_test[0, :] + X), axis=1, dtype=float) + sims = sorted(sims, reverse=True) + IRV_transformer = dc.trans.IRVTransformer(10, n_tasks, dataset) + test_dataset_trans = IRV_transformer.transform(test_dataset) + dataset_trans = IRV_transformer.transform(dataset) + assert test_dataset_trans.X.shape == (test_samples, 20 * n_tasks) + assert np.allclose(test_dataset_trans.X[0, :10], sims[:10]) + assert np.allclose(test_dataset_trans.X[0, 10:20], [0] * 10) + assert not np.isclose(dataset_trans.X[0, 0], 1.) diff --git a/deepchem/trans/tests/test_balancing.py b/deepchem/trans/tests/test_balancing.py new file mode 100644 index 0000000000000000000000000000000000000000..b8f352e1a77f00b175398c0b3a1d2dd19a6fca63 --- /dev/null +++ b/deepchem/trans/tests/test_balancing.py @@ -0,0 +1,188 @@ +import itertools +import tempfile + +import numpy as np + +import deepchem as dc + + +def test_binary_1d(): + """Test balancing transformer on single-task dataset without explicit task dimension.""" + n_samples = 20 + n_features = 3 + n_classes = 2 + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(n_classes, size=(n_samples,)) + w = np.ones((n_samples,)) + dataset = dc.data.NumpyDataset(X, y, w) + + balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset) + dataset = balancing_transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a w transformer + np.testing.assert_allclose(X, X_t) + # Check y is unchanged since this is a w transformer + np.testing.assert_allclose(y, y_t) + y_task = y_t + w_task = w_t + w_orig_task = w + # Assert that entries with zero weight retain zero weight + np.testing.assert_allclose(w_task[w_orig_task == 0], + np.zeros_like(w_task[w_orig_task == 0])) + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) + + +def test_binary_singletask(): + """Test balancing transformer on single-task dataset.""" + n_samples = 20 + n_features = 3 + n_tasks = 1 + n_classes = 2 + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w) + + balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset) + dataset = balancing_transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a w transformer + np.testing.assert_allclose(X, X_t) + # Check y is unchanged since this is a w transformer + np.testing.assert_allclose(y, y_t) + for ind, task in enumerate(dataset.get_task_names()): + y_task = y_t[:, ind] + w_task = w_t[:, ind] + w_orig_task = w[:, ind] + # Assert that entries with zero weight retain zero weight + np.testing.assert_allclose(w_task[w_orig_task == 0], + np.zeros_like(w_task[w_orig_task == 0])) + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) + + +def test_binary_multitask(): + """Test balancing transformer on multitask dataset.""" + n_samples = 10 + n_features = 3 + n_tasks = 5 + n_classes = 2 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + multitask_dataset = dc.data.NumpyDataset(X, y, w) + balancing_transformer = dc.trans.BalancingTransformer( + dataset=multitask_dataset) + multitask_dataset = balancing_transformer.transform(multitask_dataset) + X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, + multitask_dataset.w, multitask_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a w transformer + np.testing.assert_allclose(X, X_t) + # Check y is unchanged since this is a w transformer + np.testing.assert_allclose(y, y_t) + for ind, task in enumerate(multitask_dataset.get_task_names()): + y_task = y_t[:, ind] + w_task = w_t[:, ind] + w_orig_task = w[:, ind] + # Assert that entries with zero weight retain zero weight + np.testing.assert_allclose(w_task[w_orig_task == 0], + np.zeros_like(w_task[w_orig_task == 0])) + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) + + +def test_multiclass_singletask(): + """Test balancing transformer on single-task dataset.""" + n_samples = 50 + n_features = 3 + n_tasks = 1 + n_classes = 5 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w) + + balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset) + dataset = balancing_transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a w transformer + np.testing.assert_allclose(X, X_t) + # Check y is unchanged since this is a w transformer + np.testing.assert_allclose(y, y_t) + for ind, task in enumerate(dataset.get_task_names()): + y_task = y_t[:, ind] + w_task = w_t[:, ind] + # Check that sum of 0s equals sum of 1s in transformed for each task + for i, j in itertools.product(range(n_classes), range(n_classes)): + if i == j: + continue + assert np.isclose( + np.sum(w_task[y_task == i]), np.sum(w_task[y_task == j])) + + +def test_transform_to_directory(): + """Test that output can be written to a directory.""" + n_samples = 20 + n_features = 3 + n_classes = 2 + np.random.seed(123) + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features) + y = np.random.randint(n_classes, size=(n_samples,)) + w = np.ones((n_samples,)) + dataset = dc.data.NumpyDataset(X, y, w) + + balancing_transformer = dc.trans.BalancingTransformer(dataset=dataset) + with tempfile.TemporaryDirectory() as tmpdirname: + dataset = balancing_transformer.transform(dataset, out_dir=tmpdirname) + balanced_dataset = dc.data.DiskDataset(tmpdirname) + X_t, y_t, w_t, ids_t = (balanced_dataset.X, balanced_dataset.y, + balanced_dataset.w, balanced_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a w transformer + np.testing.assert_allclose(X, X_t) + # Check y is unchanged since this is a w transformer + np.testing.assert_allclose(y, y_t) + y_task = y_t + w_task = w_t + w_orig_task = w + # Assert that entries with zero weight retain zero weight + np.testing.assert_allclose(w_task[w_orig_task == 0], + np.zeros_like(w_task[w_orig_task == 0])) + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) + + +def test_array_shapes(): + """Test BalancingTransformer when y and w have different shapes.""" + n_samples = 20 + X = np.random.rand(n_samples, 5) + y = np.random.randint(2, size=n_samples) + w = np.ones((n_samples, 1)) + dataset = dc.data.NumpyDataset(X, y, w) + transformer = dc.trans.BalancingTransformer(dataset) + Xt, yt, wt, ids = transformer.transform_array(X, y, w, dataset.ids) + sum0 = np.sum(wt[np.where(y == 0)]) + sum1 = np.sum(wt[np.where(y == 1)]) + assert np.isclose(sum0, sum1) diff --git a/deepchem/trans/tests/test_cdf_transform.py b/deepchem/trans/tests/test_cdf_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..33dff3abd91be050fd5055970748bc85314c5092 --- /dev/null +++ b/deepchem/trans/tests/test_cdf_transform.py @@ -0,0 +1,88 @@ +import os +import numpy as np + +import deepchem as dc + + +def load_gaussian_cdf_data(): + """Load example with numbers sampled from Gaussian normal distribution. + Each feature and task is a column of values that is sampled + from a normal distribution of mean 0, stdev 1.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + features = ["feat0", "feat1"] + featurizer = dc.feat.UserDefinedFeaturizer(features) + tasks = ["task0", "task1"] + input_file = os.path.join(current_dir, + "../../models/tests/gaussian_cdf_example.csv") + loader = dc.data.UserCSVLoader( + tasks=tasks, featurizer=featurizer, id_field="id") + return loader.create_dataset(input_file) + + +def test_cdf_X_transformer(): + """Test CDF transformer on Gaussian normal dataset.""" + target = np.array(np.transpose(np.linspace(0., 1., 1001))) + target = np.transpose(np.array(np.append([target], [target], axis=0))) + gaussian_dataset = load_gaussian_cdf_data() + bins = 1001 + cdf_transformer = dc.trans.CDFTransformer( + transform_X=True, dataset=gaussian_dataset, bins=bins) + _, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, + gaussian_dataset.ids) + gaussian_dataset = cdf_transformer.transform(gaussian_dataset) + X_t, y_t, w_t, ids_t = (gaussian_dataset.X, gaussian_dataset.y, + gaussian_dataset.w, gaussian_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is an X transformer + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since this is an X transformer + np.testing.assert_allclose(w, w_t) + # Check X is now holding the proper values when sorted. + sorted = np.sort(X_t, axis=0) + np.testing.assert_allclose(sorted, target) + + +def test_cdf_1d_y_transformer(): + """Test on a synthetic dataset we sample with 1d y.""" + N = 10 + n_feat = 5 + n_bins = 100 + X = np.random.normal(size=(N, n_feat)) + y = np.random.normal(size=(N,)) + dataset = dc.data.NumpyDataset(X, y) + cdftrans = dc.trans.CDFTransformer( + transform_y=True, dataset=dataset, bins=n_bins) + dataset = cdftrans.transform(dataset) + + +def test_cdf_y_transformer(): + """Test CDF transformer on Gaussian normal dataset.""" + target = np.array(np.transpose(np.linspace(0., 1., 1001))) + target = np.transpose(np.array(np.append([target], [target], axis=0))) + gaussian_dataset = load_gaussian_cdf_data() + bins = 1001 + cdf_transformer = dc.trans.CDFTransformer( + transform_y=True, dataset=gaussian_dataset, bins=bins) + X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, + gaussian_dataset.ids) + gaussian_dataset = cdf_transformer.transform(gaussian_dataset, bins=bins) + X_t, y_t, w_t, ids_t = (gaussian_dataset.X, gaussian_dataset.y, + gaussian_dataset.w, gaussian_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is an y transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is an y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now holding the proper values when sorted. + sorted = np.sort(y_t, axis=0) + np.testing.assert_allclose(sorted, target) + + # Check that untransform does the right thing. + y_restored = cdf_transformer.untransform(y_t) + assert np.max(y_restored - y) < 1e-5 diff --git a/deepchem/trans/tests/test_clipping.py b/deepchem/trans/tests/test_clipping.py new file mode 100644 index 0000000000000000000000000000000000000000..0a9dd5772d79173bee5e84139dc1a9162032e684 --- /dev/null +++ b/deepchem/trans/tests/test_clipping.py @@ -0,0 +1,56 @@ +import deepchem as dc +import numpy as np + + +def test_clipping_X_transformer(): + """Test clipping transformer on X of singletask dataset.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + ids = np.arange(n_samples) + X = np.ones((n_samples, n_features)) + target = 5. * X + X *= 6. + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + transformer = dc.trans.ClippingTransformer(transform_X=True, x_max=5.) + clipped_dataset = transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (clipped_dataset.X, clipped_dataset.y, + clipped_dataset.w, clipped_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is an X transformer + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since this is an X transformer + np.testing.assert_allclose(w, w_t) + # Check X is now holding the proper values when sorted. + np.testing.assert_allclose(X_t, target) + + +def test_clipping_y_transformer(): + """Test clipping transformer on y of singletask dataset.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + ids = np.arange(n_samples) + X = np.zeros((n_samples, n_features)) + y = np.ones((n_samples, n_tasks)) + target = 5. * y + y *= 6. + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + transformer = dc.trans.ClippingTransformer(transform_y=True, y_max=5.) + clipped_dataset = transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (clipped_dataset.X, clipped_dataset.y, + clipped_dataset.w, clipped_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a y transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now holding the proper values when sorted. + np.testing.assert_allclose(y_t, target) diff --git a/deepchem/trans/tests/test_coulomb.py b/deepchem/trans/tests/test_coulomb.py new file mode 100644 index 0000000000000000000000000000000000000000..9d7116cd2ec60d16438c77e072aefe90377704e4 --- /dev/null +++ b/deepchem/trans/tests/test_coulomb.py @@ -0,0 +1,18 @@ +import numpy as np + +import deepchem as dc + + +def test_coulomb_fit_transformer(): + """Test coulomb fit transformer on singletask dataset.""" + n_samples = 10 + n_features = 3 + n_tasks = 1 + ids = np.arange(n_samples) + X = np.random.rand(n_samples, n_features, n_features) + y = np.zeros((n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w, ids) + fit_transformer = dc.trans.CoulombFitTransformer(dataset) + X_t = fit_transformer.X_transform(dataset.X) + assert len(X_t.shape) == 2 diff --git a/deepchem/trans/tests/test_data_transforms.py b/deepchem/trans/tests/test_data_transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..4d9526f68fde38b8100aa3bcc150ee24afc98296 --- /dev/null +++ b/deepchem/trans/tests/test_data_transforms.py @@ -0,0 +1,147 @@ +""" +Tests for transformer objects. +""" +import os +import unittest +import numpy as np +import scipy.ndimage + +import deepchem as dc +from deepchem.trans.transformers import DataTransforms + + +class TestDataTransforms(unittest.TestCase): + """ + Test DataTransforms for images + """ + + def setUp(self): + """ + init to load the MNIST data for DataTransforms Tests + """ + import tensorflow as tf + super(TestDataTransforms, self).setUp() + self.current_dir = os.path.dirname(os.path.abspath(__file__)) + (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data() + train = dc.data.NumpyDataset(x_train, y_train) + # extract only the images (no need of the labels) + data = (train.X)[0] + # reshaping the vector to image + data = np.reshape(data, (28, 28)) + self.d = data + + def test_blurring(self): + # Check Blurring + dt = DataTransforms(self.d) + blurred = dt.gaussian_blur(sigma=1.5) + check_blur = scipy.ndimage.gaussian_filter(self.d, 1.5) + assert np.allclose(check_blur, blurred) + + def test_center_crop(self): + # Check center crop + dt = DataTransforms(self.d) + x_crop = 50 + y_crop = 50 + crop = dt.center_crop(x_crop, y_crop) + y = self.d.shape[0] + x = self.d.shape[1] + x_start = x // 2 - (x_crop // 2) + y_start = y // 2 - (y_crop // 2) + check_crop = self.d[y_start:y_start + y_crop, x_start:x_start + x_crop] + assert np.allclose(check_crop, crop) + + def test_crop(self): + # Check crop + dt = DataTransforms(self.d) + crop = dt.crop(0, 10, 0, 10) + y = self.d.shape[0] + x = self.d.shape[1] + check_crop = self.d[10:y - 10, 0:x - 0] + assert np.allclose(crop, check_crop) + + def test_convert2gray(self): + # Check convert2gray + dt = DataTransforms(self.d) + gray = dt.convert2gray() + check_gray = np.dot(self.d[..., :3], [0.2989, 0.5870, 0.1140]) + assert np.allclose(check_gray, gray) + + def test_rotation(self): + # Check rotation + dt = DataTransforms(self.d) + angles = [0, 5, 10, 90] + for ang in angles: + rotate = dt.rotate(ang) + check_rotate = scipy.ndimage.rotate(self.d, ang) + assert np.allclose(rotate, check_rotate) + + # Some more test cases for flip + rotate = dt.rotate(-90) + check_rotate = scipy.ndimage.rotate(self.d, 270) + assert np.allclose(rotate, check_rotate) + + def test_flipping(self): + # Check flip + dt = DataTransforms(self.d) + flip_lr = dt.flip(direction="lr") + flip_ud = dt.flip(direction="ud") + check_lr = np.fliplr(self.d) + check_ud = np.flipud(self.d) + assert np.allclose(flip_ud, check_ud) + assert np.allclose(flip_lr, check_lr) + + def test_scaling(self): + from PIL import Image + # Check Scales + dt = DataTransforms(self.d) + h = 150 + w = 150 + scale = Image.fromarray(self.d).resize((h, w)) + check_scale = dt.scale(h, w) + np.allclose(scale, check_scale) + + def test_shift(self): + # Check shift + dt = DataTransforms(self.d) + height = 5 + width = 5 + if len(self.d.shape) == 2: + shift = scipy.ndimage.shift(self.d, [height, width]) + if len(self.d.shape) == 3: + shift = scipy.ndimage.shift(self.d, [height, width, 0]) + check_shift = dt.shift(width, height) + assert np.allclose(shift, check_shift) + + def test_gaussian_noise(self): + # check gaussian noise + dt = DataTransforms(self.d) + np.random.seed(0) + random_noise = self.d + random_noise = random_noise + np.random.normal( + loc=0, scale=25.5, size=self.d.shape) + np.random.seed(0) + check_random_noise = dt.gaussian_noise(mean=0, std=25.5) + assert np.allclose(random_noise, check_random_noise) + + def test_salt_pepper_noise(self): + # check salt and pepper noise + dt = DataTransforms(self.d) + np.random.seed(0) + prob = 0.05 + random_noise = self.d + noise = np.random.random(size=self.d.shape) + random_noise[noise < (prob / 2)] = 0 + random_noise[noise > (1 - prob / 2)] = 255 + np.random.seed(0) + check_random_noise = dt.salt_pepper_noise(prob, salt=255, pepper=0) + assert np.allclose(random_noise, check_random_noise) + + def test_median_filter(self): + # Check median filter + from PIL import Image, ImageFilter + dt = DataTransforms(self.d) + filtered = dt.median_filter(size=3) + image = Image.fromarray(self.d) + image = image.filter(ImageFilter.MedianFilter(size=3)) + check_filtered = np.array(image) + assert np.allclose(check_filtered, filtered) diff --git a/deepchem/trans/tests/test_duplicate_balancing.py b/deepchem/trans/tests/test_duplicate_balancing.py new file mode 100644 index 0000000000000000000000000000000000000000..b79e1c0923c6e5f9150a05de088d253cf29d9d7e --- /dev/null +++ b/deepchem/trans/tests/test_duplicate_balancing.py @@ -0,0 +1,151 @@ +import numpy as np +import tempfile +import deepchem as dc + + +def test_binary_1d(): + """Test balancing transformer on single-task dataset without explicit task dimension.""" + n_samples = 6 + n_features = 3 + np.random.seed(123) + X = np.random.rand(n_samples, n_features) + y = np.array([1, 1, 0, 0, 0, 0]) + w = np.ones((n_samples,)) + dataset = dc.data.NumpyDataset(X, y, w) + + duplicator = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + dataset = duplicator.transform(dataset) + # Check that we have length 8 now with duplication + assert len(dataset) == 8 + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check shapes + assert X_t.shape == (8, n_features) + assert y_t.shape == (8,) + assert w_t.shape == (8,) + assert ids_t.shape == (8,) + # Check that we have 4 positives and 4 negatives + assert np.sum(y_t == 0) == 4 + assert np.sum(y_t == 1) == 4 + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 1])) + + +def test_binary_weighted_1d(): + """Test balancing transformer on a weighted single-task dataset without explicit task dimension.""" + n_samples = 6 + n_features = 3 + np.random.seed(123) + X = np.random.rand(n_samples, n_features) + # Note that nothing should change in this dataset since weights balance! + y = np.array([1, 1, 0, 0, 0, 0]) + w = np.array([2, 2, 1, 1, 1, 1]) + dataset = dc.data.NumpyDataset(X, y, w) + + duplicator = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + dataset = duplicator.transform(dataset) + # Check that still we have length 6 + assert len(dataset) == 6 + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check shapes + assert X_t.shape == (6, n_features) + assert y_t.shape == (6,) + assert w_t.shape == (6,) + assert ids_t.shape == (6,) + # Check that we have 2 positives and 4 negatives + assert np.sum(y_t == 0) == 4 + assert np.sum(y_t == 1) == 2 + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 1])) + + +def test_binary_singletask(): + """Test duplicate balancing transformer on single-task dataset.""" + n_samples = 6 + n_features = 3 + n_tasks = 1 + np.random.seed(123) + X = np.random.rand(n_samples, n_features) + y = np.reshape(np.array([1, 1, 0, 0, 0, 0]), (n_samples, n_tasks)) + w = np.ones((n_samples, n_tasks)) + dataset = dc.data.NumpyDataset(X, y, w) + + duplicator = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + dataset = duplicator.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check that we have length 8 now with duplication + assert len(dataset) == 8 + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check shapes + assert X_t.shape == (8, n_features) + assert y_t.shape == (8,) + assert w_t.shape == (8,) + assert ids_t.shape == (8,) + # Check that we have 4 positives and 4 negatives + assert np.sum(y_t == 0) == 4 + assert np.sum(y_t == 1) == 4 + # Check that sum of 0s equals sum of 1s in transformed for each task + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 1])) + + +def test_multiclass_singletask(): + """Test balancing transformer on single-task dataset.""" + n_samples = 10 + n_features = 3 + X = np.random.rand(n_samples, n_features) + # 6-1 imbalance in favor of class 0 + y = np.array([0, 0, 0, 0, 0, 0, 1, 2, 3, 4]) + w = np.ones((n_samples,)) + dataset = dc.data.NumpyDataset(X, y, w) + + duplicator = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + dataset = duplicator.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + + # Check that we have length 30 now with duplication + assert len(dataset) == 30 + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check shapes + assert X_t.shape == (30, n_features) + assert y_t.shape == (30,) + assert w_t.shape == (30,) + assert ids_t.shape == (30,) + # Check that we have 6 of each class + assert np.sum(y_t == 0) == 6 + assert np.sum(y_t == 1) == 6 + assert np.sum(y_t == 2) == 6 + assert np.sum(y_t == 3) == 6 + assert np.sum(y_t == 4) == 6 + # Check that sum of all class weights is equal by comparing to 0 weight + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 1])) + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 2])) + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 3])) + assert np.isclose(np.sum(w_t[y_t == 0]), np.sum(w_t[y_t == 4])) + + +def test_transform_to_directory(): + """Test that output can be written to a directory.""" + n_samples = 10 + n_features = 3 + np.random.seed(123) + X = np.random.rand(n_samples, n_features) + # Note class imbalance. This will round to 2x duplication for 1 + y = np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0]) + w = np.ones((n_samples,)) + dataset = dc.data.NumpyDataset(X, y, w) + + duplicator = dc.trans.DuplicateBalancingTransformer(dataset=dataset) + with tempfile.TemporaryDirectory() as tmpdirname: + dataset = duplicator.transform(dataset, out_dir=tmpdirname) + balanced_dataset = dc.data.DiskDataset(tmpdirname) + X_t, y_t, w_t, ids_t = (balanced_dataset.X, balanced_dataset.y, + balanced_dataset.w, balanced_dataset.ids) + # Check that we have length 13 now with duplication + assert len(balanced_dataset) == 13 + # Check shapes + assert X_t.shape == (13, n_features) + assert y_t.shape == (13,) + assert w_t.shape == (13,) + assert ids_t.shape == (13,) + # Check that we have 6 positives and 7 negatives + assert np.sum(y_t == 0) == 7 + assert np.sum(y_t == 1) == 6 diff --git a/deepchem/trans/tests/test_featurization.py b/deepchem/trans/tests/test_featurization.py new file mode 100644 index 0000000000000000000000000000000000000000..66351d05c7ae332d62436af7631156691f296957 --- /dev/null +++ b/deepchem/trans/tests/test_featurization.py @@ -0,0 +1,15 @@ +import deepchem as dc +from deepchem.molnet import load_delaney +from deepchem.trans.transformers import FeaturizationTransformer + + +def test_featurization_transformer(): + fp_size = 2048 + tasks, all_dataset, transformers = load_delaney('Raw') + train = all_dataset[0] + transformer = FeaturizationTransformer( + dataset=train, featurizer=dc.feat.CircularFingerprint(size=fp_size)) + new_train = transformer.transform(train) + + assert new_train.y.shape == train.y.shape + assert new_train.X.shape[-1] == fp_size diff --git a/deepchem/trans/tests/test_log_transform.py b/deepchem/trans/tests/test_log_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..32ce0738702cef60fade8542a7da0fc1fe4c152f --- /dev/null +++ b/deepchem/trans/tests/test_log_transform.py @@ -0,0 +1,151 @@ +import os +import deepchem as dc +import pandas as pd +import numpy as np + + +def load_feat_multitask_data(): + """Load example with numerical features, tasks.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + features = ["feat0", "feat1", "feat2", "feat3", "feat4", "feat5"] + featurizer = dc.feat.UserDefinedFeaturizer(features) + tasks = ["task0", "task1", "task2", "task3", "task4", "task5"] + input_file = os.path.join(current_dir, + "../../models/tests/feat_multitask_example.csv") + loader = dc.data.UserCSVLoader( + tasks=tasks, featurizer=featurizer, id_field="id") + return loader.create_dataset(input_file) + + +def load_solubility_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + return loader.create_dataset(input_file) + + +def test_y_log_transformer(): + """Tests logarithmic data transformer.""" + solubility_dataset = load_solubility_data() + log_transformer = dc.trans.LogTransformer( + transform_y=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = log_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a y transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now a logarithmic version of itself + np.testing.assert_allclose(y_t, np.log(y + 1)) + + # Check that untransform does the right thing. + np.testing.assert_allclose(log_transformer.untransform(y_t), y) + + +def test_X_log_transformer(): + """Tests logarithmic data transformer.""" + solubility_dataset = load_solubility_data() + log_transformer = dc.trans.LogTransformer( + transform_X=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = log_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is a X transformer + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now a logarithmic version of itself + np.testing.assert_allclose(X_t, np.log(X + 1)) + + # Check that untransform does the right thing. + np.testing.assert_allclose(log_transformer.untransform(X_t), X) + + +def test_y_log_transformer_select(): + """Tests logarithmic data transformer with selection.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + multitask_dataset = load_feat_multitask_data() + dfe = pd.read_csv( + os.path.join(current_dir, + "../../models/tests/feat_multitask_example.csv")) + tid = [] + tasklist = ["task0", "task3", "task4", "task5"] + first_task = "task0" + for task in tasklist: + tiid = dfe.columns.get_loc(task) - dfe.columns.get_loc(first_task) + tid = np.concatenate((tid, np.array([tiid]))) + tasks = tid.astype(int) + log_transformer = dc.trans.LogTransformer( + transform_y=True, tasks=tasks, dataset=multitask_dataset) + X, y, w, ids = (multitask_dataset.X, multitask_dataset.y, multitask_dataset.w, + multitask_dataset.ids) + multitask_dataset = log_transformer.transform(multitask_dataset) + X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, + multitask_dataset.w, multitask_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a y transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now a logarithmic version of itself + np.testing.assert_allclose(y_t[:, tasks], np.log(y[:, tasks] + 1)) + + # Check that untransform does the right thing. + np.testing.assert_allclose(log_transformer.untransform(y_t), y) + + +def test_X_log_transformer_select(): + # Tests logarithmic data transformer with selection. + current_dir = os.path.dirname(os.path.abspath(__file__)) + multitask_dataset = load_feat_multitask_data() + dfe = pd.read_csv( + os.path.join(current_dir, + "../../models/tests/feat_multitask_example.csv")) + fid = [] + featurelist = ["feat0", "feat1", "feat2", "feat3", "feat5"] + first_feature = "feat0" + for feature in featurelist: + fiid = dfe.columns.get_loc(feature) - dfe.columns.get_loc(first_feature) + fid = np.concatenate((fid, np.array([fiid]))) + features = fid.astype(int) + log_transformer = dc.trans.LogTransformer( + transform_X=True, features=features, dataset=multitask_dataset) + X, y, w, ids = (multitask_dataset.X, multitask_dataset.y, multitask_dataset.w, + multitask_dataset.ids) + multitask_dataset = log_transformer.transform(multitask_dataset) + X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, + multitask_dataset.w, multitask_dataset.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is a X transformer + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check y is now a logarithmic version of itself + np.testing.assert_allclose(X_t[:, features], np.log(X[:, features] + 1)) + + # Check that untransform does the right thing. + np.testing.assert_allclose(log_transformer.untransform(X_t), X) diff --git a/deepchem/trans/tests/test_minmax.py b/deepchem/trans/tests/test_minmax.py new file mode 100644 index 0000000000000000000000000000000000000000..7f99a6c479e464bce301d29fc164a65c6ba3bc4a --- /dev/null +++ b/deepchem/trans/tests/test_minmax.py @@ -0,0 +1,107 @@ +import os +import numpy as np +import deepchem as dc + + +def load_solubility_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + return loader.create_dataset(input_file) + + +def test_y_minmax_transformer(): + """Tests MinMax transformer.""" + solubility_dataset = load_solubility_data() + minmax_transformer = dc.trans.MinMaxTransformer( + transform_y=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = minmax_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + + # Check ids are unchanged before and after transformation + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + + # Check X is unchanged since transform_y is true + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since transform_y is true + np.testing.assert_allclose(w, w_t) + + # Check minimum and maximum values of transformed y are 0 and 1 + np.testing.assert_allclose(y_t.min(), 0.) + np.testing.assert_allclose(y_t.max(), 1.) + + # Check untransform works correctly + y_restored = minmax_transformer.untransform(y_t) + assert np.max(y_restored - y) < 1e-5 + + +def test_y_minmax_random(): + """Test on random example""" + n_samples = 100 + n_features = 10 + n_tasks = 10 + + X = np.random.randn(n_samples, n_features) + y = np.random.randn(n_samples, n_tasks) + dataset = dc.data.NumpyDataset(X, y) + + minmax_transformer = dc.trans.MinMaxTransformer( + transform_y=True, dataset=dataset) + w, ids = dataset.w, dataset.ids + + dataset = minmax_transformer.transform(dataset) + X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) + # Check ids are unchanged before and after transformation + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + + # Check X is unchanged since transform_y is true + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since transform_y is true + np.testing.assert_allclose(w, w_t) + + # Check minimum and maximum values of transformed y are 0 and 1 + np.testing.assert_allclose(y_t.min(), 0.) + np.testing.assert_allclose(y_t.max(), 1.) + + # Test if dimensionality expansion is handled correctly by untransform + y_t = np.expand_dims(y_t, axis=-1) + y_restored = minmax_transformer.untransform(y_t) + assert y_restored.shape == y.shape + (1,) + np.testing.assert_allclose(np.squeeze(y_restored, axis=-1), y) + + +def test_X_minmax_transformer(): + solubility_dataset = load_solubility_data() + minmax_transformer = dc.trans.MinMaxTransformer( + transform_X=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = minmax_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + + # Check ids are unchanged before and after transformation + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + + # Check X is unchanged since transform_y is true + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since transform_y is true + np.testing.assert_allclose(w, w_t) + + # Check minimum and maximum values of transformed y are 0 and 1 + np.testing.assert_allclose(X_t.min(), 0.) + np.testing.assert_allclose(X_t.max(), 1.) + + # Check untransform works correctly + np.testing.assert_allclose(minmax_transformer.untransform(X_t), X) diff --git a/deepchem/trans/tests/test_normalization.py b/deepchem/trans/tests/test_normalization.py new file mode 100644 index 0000000000000000000000000000000000000000..105dfc769a305b32f0a2c67219ddd7aa364f0443 --- /dev/null +++ b/deepchem/trans/tests/test_normalization.py @@ -0,0 +1,99 @@ +import os +import deepchem as dc +import numpy as np +import pytest + + +def load_unlabelled_data(): + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = [] + input_file = os.path.join(current_dir, "../../data/tests/no_labels.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + return loader.create_dataset(input_file) + + +def load_solubility_data(): + """Loads solubility dataset""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + featurizer = dc.feat.CircularFingerprint(size=1024) + tasks = ["log-solubility"] + input_file = os.path.join(current_dir, "../../models/tests/example.csv") + loader = dc.data.CSVLoader( + tasks=tasks, feature_field="smiles", featurizer=featurizer) + + return loader.create_dataset(input_file) + + +def test_transform_unlabelled(): + ul_dataset = load_unlabelled_data() + # transforming y should raise an exception + with pytest.raises(ValueError): + dc.trans.transformers.Transformer(transform_y=True).transform(ul_dataset) + + # transforming w should raise an exception + with pytest.raises(ValueError): + dc.trans.transformers.Transformer(transform_w=True).transform(ul_dataset) + + # transforming X should be okay + dc.trans.NormalizationTransformer( + transform_X=True, dataset=ul_dataset).transform(ul_dataset) + + +def test_y_normalization_transformer(): + """Tests normalization transformer.""" + solubility_dataset = load_solubility_data() + normalization_transformer = dc.trans.NormalizationTransformer( + transform_y=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = normalization_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is a y transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check that y_t has zero mean, unit std. + assert np.isclose(y_t.mean(), 0.) + assert np.isclose(y_t.std(), 1.) + + # Check that untransform does the right thing. + np.testing.assert_allclose(normalization_transformer.untransform(y_t), y) + + +def test_X_normalization_transformer(): + """Tests normalization transformer.""" + solubility_dataset = load_solubility_data() + normalization_transformer = dc.trans.NormalizationTransformer( + transform_X=True, dataset=solubility_dataset) + X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + solubility_dataset = normalization_transformer.transform(solubility_dataset) + X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, + solubility_dataset.w, solubility_dataset.ids) + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is a X transformer + np.testing.assert_allclose(y, y_t) + # Check w is unchanged since this is a y transformer + np.testing.assert_allclose(w, w_t) + # Check that X_t has zero mean, unit std. + # np.set_printoptions(threshold='nan') + mean = X_t.mean(axis=0) + assert np.amax(np.abs(mean - np.zeros_like(mean))) < 1e-7 + orig_std_array = X.std(axis=0) + std_array = X_t.std(axis=0) + # Entries with zero std are not normalized + for orig_std, std in zip(orig_std_array, std_array): + if not np.isclose(orig_std, 0): + assert np.isclose(std, 1) + + # Check that untransform does the right thing. + np.testing.assert_allclose( + normalization_transformer.untransform(X_t), X, atol=1e-7) diff --git a/deepchem/trans/tests/test_power.py b/deepchem/trans/tests/test_power.py new file mode 100644 index 0000000000000000000000000000000000000000..de1ed684b5d8a40770f97b136286d17ce9d55337 --- /dev/null +++ b/deepchem/trans/tests/test_power.py @@ -0,0 +1,80 @@ +import os +import deepchem as dc +import numpy as np + + +def load_gaussian_cdf_data(): + """Load example with numbers sampled from Gaussian normal distribution. + Each feature and task is a column of values that is sampled + from a normal distribution of mean 0, stdev 1.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + features = ["feat0", "feat1"] + featurizer = dc.feat.UserDefinedFeaturizer(features) + tasks = ["task0", "task1"] + input_file = os.path.join(current_dir, + "../../models/tests/gaussian_cdf_example.csv") + loader = dc.data.UserCSVLoader( + tasks=tasks, featurizer=featurizer, id_field="id") + return loader.create_dataset(input_file) + + +def test_power_X_transformer(): + """Test Power transformer on Gaussian normal dataset.""" + N = 10 + n_feat = 2 + powers = [1, 2, 0.5] + X = np.random.rand(N, n_feat) + y = np.random.normal(size=(N,)) + gaussian_dataset = dc.data.NumpyDataset(X, y) + powers = [1, 2, 0.5] + power_transformer = dc.trans.PowerTransformer(transform_X=True, powers=powers) + X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, + gaussian_dataset.ids) + gaussian_dataset2 = power_transformer.transform(gaussian_dataset) + X_t, y_t, w_t, ids_t = (gaussian_dataset2.X, gaussian_dataset2.y, + gaussian_dataset2.w, gaussian_dataset2.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check y is unchanged since this is an X transformer + np.testing.assert_allclose(y.flatten(), y_t.flatten()) + # Check w is unchanged since this is an X transformer + np.testing.assert_allclose(w, w_t) + # Check X is now holding the proper values in each column. + np.testing.assert_allclose(X_t.shape[1], len(powers) * X.shape[1]) + np.testing.assert_allclose(X, X_t[:, :2]) + np.testing.assert_allclose(np.power(X, 2), X_t[:, 2:4]) + np.testing.assert_allclose(np.power(X, 0.5), X_t[:, 4:]) + + +def test_power_y_transformer(): + """Test Power transformer on Gaussian normal dataset.""" + N = 10 + n_feat = 2 + powers = [1, 2, 0.5] + X = np.random.rand(N, n_feat) + y = np.random.rand(N) + gaussian_dataset = dc.data.NumpyDataset(X, y) + power_transformer = dc.trans.PowerTransformer(transform_y=True, powers=powers) + X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, + gaussian_dataset.ids) + gaussian_dataset2 = power_transformer.transform(gaussian_dataset) + X_t, y_t, w_t, ids_t = (gaussian_dataset2.X, gaussian_dataset2.y, + gaussian_dataset2.w, gaussian_dataset2.ids) + + # Check ids are unchanged. + for id_elt, id_t_elt in zip(ids, ids_t): + assert id_elt == id_t_elt + # Check X is unchanged since this is an X transformer + np.testing.assert_allclose(X, X_t) + # Check w is unchanged since this is an X transformer + np.testing.assert_allclose(w, w_t) + # Check y is now holding the proper values in each column. + np.testing.assert_allclose(y_t.shape[1], len(powers)) + np.testing.assert_allclose(y, y_t[:, :1].flatten()) + np.testing.assert_allclose(np.power(y, 2), y_t[:, 1:2].flatten()) + np.testing.assert_allclose(np.power(y, 0.5), y_t[:, 2:].flatten()) + + # Check that untransform does the right thing. + np.testing.assert_allclose(power_transformer.untransform(y_t).flatten(), y) diff --git a/deepchem/trans/tests/test_transformers.py b/deepchem/trans/tests/test_transformers.py deleted file mode 100644 index e4d2a56d5e95cc6dd12435bfbea8f15bb945b3d9..0000000000000000000000000000000000000000 --- a/deepchem/trans/tests/test_transformers.py +++ /dev/null @@ -1,718 +0,0 @@ -""" -Tests for transformer objects. -""" -from deepchem.molnet import load_delaney -from deepchem.trans.transformers import FeaturizationTransformer -from deepchem.trans.transformers import DataTransforms - -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" - -import os -import unittest -import numpy as np -import pandas as pd -import deepchem as dc -import tensorflow as tf -import scipy.ndimage - - -class TestTransformers(unittest.TestCase): - """ - Test top-level API for transformer objects. - """ - - def setUp(self): - super(TestTransformers, self).setUp() - self.current_dir = os.path.dirname(os.path.abspath(__file__)) - ''' - init to load the MNIST data for DataTransforms Tests - ''' - (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data() - train = dc.data.NumpyDataset(x_train, y_train) - # extract only the images (no need of the labels) - data = (train.X)[0] - # reshaping the vector to image - data = np.reshape(data, (28, 28)) - self.d = data - - def test_y_log_transformer(self): - """Tests logarithmic data transformer.""" - solubility_dataset = dc.data.tests.load_solubility_data() - log_transformer = dc.trans.LogTransformer( - transform_y=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = log_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a y transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now a logarithmic version of itself - np.testing.assert_allclose(y_t, np.log(y + 1)) - - # Check that untransform does the right thing. - np.testing.assert_allclose(log_transformer.untransform(y_t), y) - - def test_transform_unlabelled(self): - ul_dataset = dc.data.tests.load_unlabelled_data() - # transforming y should raise an exception - with self.assertRaises(ValueError) as context: - dc.trans.transformers.Transformer(transform_y=True).transform(ul_dataset) - - # transforming w should raise an exception - with self.assertRaises(ValueError) as context: - dc.trans.transformers.Transformer(transform_w=True).transform(ul_dataset) - - # transforming X should be okay - dc.trans.NormalizationTransformer( - transform_X=True, dataset=ul_dataset).transform(ul_dataset) - - def test_X_log_transformer(self): - """Tests logarithmic data transformer.""" - solubility_dataset = dc.data.tests.load_solubility_data() - log_transformer = dc.trans.LogTransformer( - transform_X=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = log_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is a X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now a logarithmic version of itself - np.testing.assert_allclose(X_t, np.log(X + 1)) - - # Check that untransform does the right thing. - np.testing.assert_allclose(log_transformer.untransform(X_t), X) - - def test_y_log_transformer_select(self): - """Tests logarithmic data transformer with selection.""" - multitask_dataset = dc.data.tests.load_feat_multitask_data() - dfe = pd.read_csv( - os.path.join(self.current_dir, - "../../models/tests/feat_multitask_example.csv")) - tid = [] - tasklist = ["task0", "task3", "task4", "task5"] - first_task = "task0" - for task in tasklist: - tiid = dfe.columns.get_loc(task) - dfe.columns.get_loc(first_task) - tid = np.concatenate((tid, np.array([tiid]))) - tasks = tid.astype(int) - log_transformer = dc.trans.LogTransformer( - transform_y=True, tasks=tasks, dataset=multitask_dataset) - X, y, w, ids = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - multitask_dataset = log_transformer.transform(multitask_dataset) - X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a y transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now a logarithmic version of itself - np.testing.assert_allclose(y_t[:, tasks], np.log(y[:, tasks] + 1)) - - # Check that untransform does the right thing. - np.testing.assert_allclose(log_transformer.untransform(y_t), y) - - def test_X_log_transformer_select(self): - # Tests logarithmic data transformer with selection. - multitask_dataset = dc.data.tests.load_feat_multitask_data() - dfe = pd.read_csv( - os.path.join(self.current_dir, - "../../models/tests/feat_multitask_example.csv")) - fid = [] - featurelist = ["feat0", "feat1", "feat2", "feat3", "feat5"] - first_feature = "feat0" - for feature in featurelist: - fiid = dfe.columns.get_loc(feature) - dfe.columns.get_loc(first_feature) - fid = np.concatenate((fid, np.array([fiid]))) - features = fid.astype(int) - log_transformer = dc.trans.LogTransformer( - transform_X=True, features=features, dataset=multitask_dataset) - X, y, w, ids = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - multitask_dataset = log_transformer.transform(multitask_dataset) - X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is a X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now a logarithmic version of itself - np.testing.assert_allclose(X_t[:, features], np.log(X[:, features] + 1)) - - # Check that untransform does the right thing. - np.testing.assert_allclose(log_transformer.untransform(X_t), X) - - def test_y_minmax_transformer(self): - """Tests MinMax transformer. """ - solubility_dataset = dc.data.tests.load_solubility_data() - minmax_transformer = dc.trans.MinMaxTransformer( - transform_y=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = minmax_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - - # Check ids are unchanged before and after transformation - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - - # Check X is unchanged since transform_y is true - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since transform_y is true - np.testing.assert_allclose(w, w_t) - - # Check minimum and maximum values of transformed y are 0 and 1 - np.testing.assert_allclose(y_t.min(), 0.) - np.testing.assert_allclose(y_t.max(), 1.) - - # Check untransform works correctly - np.testing.assert_allclose(minmax_transformer.untransform(y_t), y) - - # Test on random example - n_samples = 100 - n_features = 10 - n_tasks = 10 - - X = np.random.randn(n_samples, n_features) - y = np.random.randn(n_samples, n_tasks) - dataset = dc.data.NumpyDataset(X, y) - - minmax_transformer = dc.trans.MinMaxTransformer( - transform_y=True, dataset=dataset) - w, ids = dataset.w, dataset.ids - - dataset = minmax_transformer.transform(dataset) - X_t, y_t, w_t, ids_t = (dataset.X, dataset.y, dataset.w, dataset.ids) - # Check ids are unchanged before and after transformation - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - - # Check X is unchanged since transform_y is true - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since transform_y is true - np.testing.assert_allclose(w, w_t) - - # Check minimum and maximum values of transformed y are 0 and 1 - np.testing.assert_allclose(y_t.min(), 0.) - np.testing.assert_allclose(y_t.max(), 1.) - - # Test if dimensionality expansion is handled correctly by untransform - y_t = np.expand_dims(y_t, axis=-1) - y_restored = minmax_transformer.untransform(y_t) - assert y_restored.shape == y.shape + (1,) - np.testing.assert_allclose(np.squeeze(y_restored, axis=-1), y) - - def test_X_minmax_transformer(self): - solubility_dataset = dc.data.tests.load_solubility_data() - minmax_transformer = dc.trans.MinMaxTransformer( - transform_X=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = minmax_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - - # Check ids are unchanged before and after transformation - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - - # Check X is unchanged since transform_y is true - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since transform_y is true - np.testing.assert_allclose(w, w_t) - - # Check minimum and maximum values of transformed y are 0 and 1 - np.testing.assert_allclose(X_t.min(), 0.) - np.testing.assert_allclose(X_t.max(), 1.) - - # Check untransform works correctly - np.testing.assert_allclose(minmax_transformer.untransform(X_t), X) - - def test_y_normalization_transformer(self): - """Tests normalization transformer.""" - solubility_dataset = dc.data.tests.load_solubility_data() - normalization_transformer = dc.trans.NormalizationTransformer( - transform_y=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = normalization_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a y transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check that y_t has zero mean, unit std. - assert np.isclose(y_t.mean(), 0.) - assert np.isclose(y_t.std(), 1.) - - # Check that untransform does the right thing. - np.testing.assert_allclose(normalization_transformer.untransform(y_t), y) - - def test_X_normalization_transformer(self): - """Tests normalization transformer.""" - solubility_dataset = dc.data.tests.load_solubility_data() - normalization_transformer = dc.trans.NormalizationTransformer( - transform_X=True, dataset=solubility_dataset) - X, y, w, ids = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - solubility_dataset = normalization_transformer.transform(solubility_dataset) - X_t, y_t, w_t, ids_t = (solubility_dataset.X, solubility_dataset.y, - solubility_dataset.w, solubility_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is a X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check that X_t has zero mean, unit std. - # np.set_printoptions(threshold='nan') - mean = X_t.mean(axis=0) - assert np.amax(np.abs(mean - np.zeros_like(mean))) < 1e-7 - orig_std_array = X.std(axis=0) - std_array = X_t.std(axis=0) - # Entries with zero std are not normalized - for orig_std, std in zip(orig_std_array, std_array): - if not np.isclose(orig_std, 0): - assert np.isclose(std, 1) - - # TODO(rbharath): Untransform doesn't work properly for binary feature - # vectors. Need to figure out what's wrong here. (low priority) - ## Check that untransform does the right thing. - # np.testing.assert_allclose(normalization_transformer.untransform(X_t), X) - - def test_cdf_X_transformer(self): - """Test CDF transformer on Gaussian normal dataset.""" - target = np.array(np.transpose(np.linspace(0., 1., 1001))) - target = np.transpose(np.array(np.append([target], [target], axis=0))) - gaussian_dataset = dc.data.tests.load_gaussian_cdf_data() - bins = 1001 - cdf_transformer = dc.trans.CDFTransformer( - transform_X=True, dataset=gaussian_dataset, bins=bins) - X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, - gaussian_dataset.ids) - gaussian_dataset = cdf_transformer.transform(gaussian_dataset, bins=bins) - X_t, y_t, w_t, ids_t = (gaussian_dataset.X, gaussian_dataset.y, - gaussian_dataset.w, gaussian_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is an X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is an X transformer - np.testing.assert_allclose(w, w_t) - # Check X is now holding the proper values when sorted. - sorted = np.sort(X_t, axis=0) - np.testing.assert_allclose(sorted, target) - - def test_cdf_y_transformer(self): - # Test CDF transformer on Gaussian normal dataset. - target = np.array(np.transpose(np.linspace(0., 1., 1001))) - target = np.transpose(np.array(np.append([target], [target], axis=0))) - gaussian_dataset = dc.data.tests.load_gaussian_cdf_data() - bins = 1001 - cdf_transformer = dc.trans.CDFTransformer( - transform_y=True, dataset=gaussian_dataset, bins=bins) - X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, - gaussian_dataset.ids) - gaussian_dataset = cdf_transformer.transform(gaussian_dataset, bins=bins) - X_t, y_t, w_t, ids_t = (gaussian_dataset.X, gaussian_dataset.y, - gaussian_dataset.w, gaussian_dataset.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is an y transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is an y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now holding the proper values when sorted. - sorted = np.sort(y_t, axis=0) - np.testing.assert_allclose(sorted, target) - - # Check that untransform does the right thing. - np.testing.assert_allclose(cdf_transformer.untransform(y_t), y) - - def test_clipping_X_transformer(self): - """Test clipping transformer on X of singletask dataset.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - ids = np.arange(n_samples) - X = np.ones((n_samples, n_features)) - target = 5. * X - X *= 6. - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - transformer = dc.trans.ClippingTransformer(transform_X=True, x_max=5.) - clipped_dataset = transformer.transform(dataset) - X_t, y_t, w_t, ids_t = (clipped_dataset.X, clipped_dataset.y, - clipped_dataset.w, clipped_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is an X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is an X transformer - np.testing.assert_allclose(w, w_t) - # Check X is now holding the proper values when sorted. - np.testing.assert_allclose(X_t, target) - - def test_clipping_y_transformer(self): - """Test clipping transformer on y of singletask dataset.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - ids = np.arange(n_samples) - X = np.zeros((n_samples, n_features)) - y = np.ones((n_samples, n_tasks)) - target = 5. * y - y *= 6. - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - transformer = dc.trans.ClippingTransformer(transform_y=True, y_max=5.) - clipped_dataset = transformer.transform(dataset) - X_t, y_t, w_t, ids_t = (clipped_dataset.X, clipped_dataset.y, - clipped_dataset.w, clipped_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a y transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is a y transformer - np.testing.assert_allclose(w, w_t) - # Check y is now holding the proper values when sorted. - np.testing.assert_allclose(y_t, target) - - def test_power_X_transformer(self): - """Test Power transformer on Gaussian normal dataset.""" - gaussian_dataset = dc.data.tests.load_gaussian_cdf_data() - powers = [1, 2, 0.5] - power_transformer = dc.trans.PowerTransformer( - transform_X=True, powers=powers) - X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, - gaussian_dataset.ids) - gaussian_dataset2 = power_transformer.transform(gaussian_dataset) - X_t, y_t, w_t, ids_t = (gaussian_dataset2.X, gaussian_dataset2.y, - gaussian_dataset2.w, gaussian_dataset2.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check y is unchanged since this is an X transformer - np.testing.assert_allclose(y, y_t) - # Check w is unchanged since this is an X transformer - np.testing.assert_allclose(w, w_t) - # Check X is now holding the proper values in each column. - np.testing.assert_allclose(X_t.shape[1], len(powers) * X.shape[1]) - np.testing.assert_allclose(X, X_t[:, :2]) - np.testing.assert_allclose(np.power(X, 2), X_t[:, 2:4]) - np.testing.assert_allclose(np.power(X, 0.5), X_t[:, 4:]) - - def test_power_y_transformer(self): - """Test Power transformer on Gaussian normal dataset.""" - gaussian_dataset = dc.data.tests.load_gaussian_cdf_data() - powers = [1, 2, 0.5] - power_transformer = dc.trans.PowerTransformer( - transform_y=True, powers=powers) - X, y, w, ids = (gaussian_dataset.X, gaussian_dataset.y, gaussian_dataset.w, - gaussian_dataset.ids) - gaussian_dataset2 = power_transformer.transform(gaussian_dataset) - X_t, y_t, w_t, ids_t = (gaussian_dataset2.X, gaussian_dataset2.y, - gaussian_dataset2.w, gaussian_dataset2.ids) - - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is an X transformer - np.testing.assert_allclose(X, X_t) - # Check w is unchanged since this is an X transformer - np.testing.assert_allclose(w, w_t) - # Check y is now holding the proper values in each column. - np.testing.assert_allclose(y_t.shape[1], len(powers) * y.shape[1]) - np.testing.assert_allclose(y, y_t[:, :2]) - np.testing.assert_allclose(np.power(y, 2), y_t[:, 2:4]) - np.testing.assert_allclose(np.power(y, 0.5), y_t[:, 4:]) - - # Check that untransform does the right thing. - np.testing.assert_allclose(power_transformer.untransform(y_t), y) - - def test_singletask_balancing_transformer(self): - """Test balancing transformer on single-task dataset.""" - - classification_dataset = dc.data.tests.load_classification_data() - balancing_transformer = dc.trans.BalancingTransformer( - transform_w=True, dataset=classification_dataset) - X, y, w, ids = (classification_dataset.X, classification_dataset.y, - classification_dataset.w, classification_dataset.ids) - classification_dataset = balancing_transformer.transform( - classification_dataset) - X_t, y_t, w_t, ids_t = (classification_dataset.X, classification_dataset.y, - classification_dataset.w, - classification_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a w transformer - np.testing.assert_allclose(X, X_t) - # Check y is unchanged since this is a w transformer - np.testing.assert_allclose(y, y_t) - for ind, task in enumerate(classification_dataset.get_task_names()): - y_task = y_t[:, ind] - w_task = w_t[:, ind] - w_orig_task = w[:, ind] - # Assert that entries with zero weight retain zero weight - np.testing.assert_allclose(w_task[w_orig_task == 0], - np.zeros_like(w_task[w_orig_task == 0])) - # Check that sum of 0s equals sum of 1s in transformed for each task - assert np.isclose( - np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) - - def test_multitask_balancing_transformer(self): - """Test balancing transformer on multitask dataset.""" - multitask_dataset = dc.data.tests.load_multitask_data() - balancing_transformer = dc.trans.BalancingTransformer( - transform_w=True, dataset=multitask_dataset) - X, y, w, ids = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - multitask_dataset = balancing_transformer.transform(multitask_dataset) - X_t, y_t, w_t, ids_t = (multitask_dataset.X, multitask_dataset.y, - multitask_dataset.w, multitask_dataset.ids) - # Check ids are unchanged. - for id_elt, id_t_elt in zip(ids, ids_t): - assert id_elt == id_t_elt - # Check X is unchanged since this is a w transformer - np.testing.assert_allclose(X, X_t) - # Check y is unchanged since this is a w transformer - np.testing.assert_allclose(y, y_t) - for ind, task in enumerate(multitask_dataset.get_task_names()): - y_task = y_t[:, ind] - w_task = w_t[:, ind] - w_orig_task = w[:, ind] - # Assert that entries with zero weight retain zero weight - np.testing.assert_allclose(w_task[w_orig_task == 0], - np.zeros_like(w_task[w_orig_task == 0])) - # Check that sum of 0s equals sum of 1s in transformed for each task - assert np.isclose( - np.sum(w_task[y_task == 0]), np.sum(w_task[y_task == 1])) - - def test_coulomb_fit_transformer(self): - """Test coulomb fit transformer on singletask dataset.""" - n_samples = 10 - n_features = 3 - n_tasks = 1 - ids = np.arange(n_samples) - X = np.random.rand(n_samples, n_features, n_features) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids) - fit_transformer = dc.trans.CoulombFitTransformer(dataset) - X_t = fit_transformer.X_transform(dataset.X) - assert len(X_t.shape) == 2 - - def test_IRV_transformer(self): - n_features = 128 - n_samples = 20 - test_samples = 5 - n_tasks = 2 - X = np.random.randint(2, size=(n_samples, n_features)) - y = np.zeros((n_samples, n_tasks)) - w = np.ones((n_samples, n_tasks)) - dataset = dc.data.NumpyDataset(X, y, w, ids=None) - X_test = np.random.randint(2, size=(test_samples, n_features)) - y_test = np.zeros((test_samples, n_tasks)) - w_test = np.ones((test_samples, n_tasks)) - test_dataset = dc.data.NumpyDataset(X_test, y_test, w_test, ids=None) - sims = np.sum( - X_test[0, :] * X, axis=1, dtype=float) / np.sum( - np.sign(X_test[0, :] + X), axis=1, dtype=float) - sims = sorted(sims, reverse=True) - IRV_transformer = dc.trans.IRVTransformer(10, n_tasks, dataset) - test_dataset_trans = IRV_transformer.transform(test_dataset) - dataset_trans = IRV_transformer.transform(dataset) - assert test_dataset_trans.X.shape == (test_samples, 20 * n_tasks) - assert np.allclose(test_dataset_trans.X[0, :10], sims[:10]) - assert np.allclose(test_dataset_trans.X[0, 10:20], [0] * 10) - assert not np.isclose(dataset_trans.X[0, 0], 1.) - - def test_featurization_transformer(self): - fp_size = 2048 - tasks, all_dataset, transformers = load_delaney('Raw') - train = all_dataset[0] - transformer = FeaturizationTransformer( - transform_X=True, - dataset=train, - featurizer=dc.feat.CircularFingerprint(size=fp_size)) - new_train = transformer.transform(train) - - self.assertEqual(new_train.y.shape, train.y.shape) - self.assertEqual(new_train.X.shape[-1], fp_size) - - def test_blurring(self): - # Check Blurring - dt = DataTransforms(self.d) - blurred = dt.gaussian_blur(sigma=1.5) - check_blur = scipy.ndimage.gaussian_filter(self.d, 1.5) - assert np.allclose(check_blur, blurred) - - def test_center_crop(self): - # Check center crop - dt = DataTransforms(self.d) - x_crop = 50 - y_crop = 50 - crop = dt.center_crop(x_crop, y_crop) - y = self.d.shape[0] - x = self.d.shape[1] - x_start = x // 2 - (x_crop // 2) - y_start = y // 2 - (y_crop // 2) - check_crop = self.d[y_start:y_start + y_crop, x_start:x_start + x_crop] - assert np.allclose(check_crop, crop) - - def test_crop(self): - #Check crop - dt = DataTransforms(self.d) - crop = dt.crop(0, 10, 0, 10) - y = self.d.shape[0] - x = self.d.shape[1] - check_crop = self.d[10:y - 10, 0:x - 0] - assert np.allclose(crop, check_crop) - - def test_convert2gray(self): - # Check convert2gray - dt = DataTransforms(self.d) - gray = dt.convert2gray() - check_gray = np.dot(self.d[..., :3], [0.2989, 0.5870, 0.1140]) - assert np.allclose(check_gray, gray) - - def test_rotation(self): - # Check rotation - dt = DataTransforms(self.d) - angles = [0, 5, 10, 90] - for ang in angles: - rotate = dt.rotate(ang) - check_rotate = scipy.ndimage.rotate(self.d, ang) - assert np.allclose(rotate, check_rotate) - - # Some more test cases for flip - rotate = dt.rotate(-90) - check_rotate = scipy.ndimage.rotate(self.d, 270) - assert np.allclose(rotate, check_rotate) - - def test_flipping(self): - # Check flip - dt = DataTransforms(self.d) - flip_lr = dt.flip(direction="lr") - flip_ud = dt.flip(direction="ud") - check_lr = np.fliplr(self.d) - check_ud = np.flipud(self.d) - assert np.allclose(flip_ud, check_ud) - assert np.allclose(flip_lr, check_lr) - - def test_scaling(self): - from PIL import Image - # Check Scales - dt = DataTransforms(self.d) - h = 150 - w = 150 - scale = Image.fromarray(self.d).resize((h, w)) - check_scale = dt.scale(h, w) - np.allclose(scale, check_scale) - - def test_shift(self): - # Check shift - dt = DataTransforms(self.d) - height = 5 - width = 5 - if len(self.d.shape) == 2: - shift = scipy.ndimage.shift(self.d, [height, width]) - if len(self.d.shape) == 3: - shift = scipy.ndimage.shift(self.d, [height, width, 0]) - check_shift = dt.shift(width, height) - assert np.allclose(shift, check_shift) - - def test_gaussian_noise(self): - # check gaussian noise - dt = DataTransforms(self.d) - np.random.seed(0) - random_noise = self.d - random_noise = random_noise + np.random.normal( - loc=0, scale=25.5, size=self.d.shape) - np.random.seed(0) - check_random_noise = dt.gaussian_noise(mean=0, std=25.5) - assert np.allclose(random_noise, check_random_noise) - - def test_salt_pepper_noise(self): - # check salt and pepper noise - dt = DataTransforms(self.d) - np.random.seed(0) - prob = 0.05 - random_noise = self.d - noise = np.random.random(size=self.d.shape) - random_noise[noise < (prob / 2)] = 0 - random_noise[noise > (1 - prob / 2)] = 255 - np.random.seed(0) - check_random_noise = dt.salt_pepper_noise(prob, salt=255, pepper=0) - assert np.allclose(random_noise, check_random_noise) - - def test_DAG_transformer(self): - """Tests the DAG transformer.""" - np.random.seed(123) - tf.random.set_seed(123) - n_tasks = 1 - - # Load mini log-solubility dataset. - featurizer = dc.feat.ConvMolFeaturizer() - tasks = ["outcome"] - input_file = os.path.join(self.current_dir, - "../../models/tests/example_regression.csv") - loader = dc.data.CSVLoader( - tasks=tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize(input_file) - transformer = dc.trans.DAGTransformer(max_atoms=50) - dataset = transformer.transform(dataset) - # The transformer generates n DAGs for a molecule with n - # atoms. These are denoted the "parents" - for idm, mol in enumerate(dataset.X): - assert dataset.X[idm].get_num_atoms() == len(dataset.X[idm].parents) - - def test_median_filter(self): - #Check median filter - from PIL import Image, ImageFilter - dt = DataTransforms(self.d) - filtered = dt.median_filter(size=3) - image = Image.fromarray(self.d) - image = image.filter(ImageFilter.MedianFilter(size=3)) - check_filtered = np.array(image) - assert np.allclose(check_filtered, filtered) diff --git a/deepchem/trans/transformers.py b/deepchem/trans/transformers.py index d3146c2d5201233b08ec96e02c7990fe17f061a4..bbc81dad67ca8dcc258bc88c1d9da827eb88e873 100644 --- a/deepchem/trans/transformers.py +++ b/deepchem/trans/transformers.py @@ -1,28 +1,30 @@ -# coding=utf-8 """ Contains an abstract base class that supports data transformations. """ import os +import logging +import time +import warnings +from typing import Any, List, Optional, Tuple, Union import numpy as np import scipy import scipy.ndimage -import time -import deepchem as dc import tensorflow as tf -from deepchem.data import NumpyDataset +import deepchem as dc +from deepchem.data import Dataset, NumpyDataset, DiskDataset +from deepchem.feat import Featurizer +from deepchem.feat.mol_graphs import ConvMol -def undo_transforms(y, transformers): - """Undoes all transformations applied.""" - # Note that transformers have to be undone in reversed order - for transformer in reversed(transformers): - if transformer.transform_y: - y = transformer.untransform(y) - return y +logger = logging.getLogger(__name__) def undo_grad_transforms(grad, tasks, transformers): + """DEPRECATED. DO NOT USE.""" + logger.warning( + "undo_grad_transforms is DEPRECATED and will be removed in a future version of DeepChem. " + "Manually implement transforms to perform force calculations.") for transformer in reversed(transformers): if transformer.transform_y: grad = transformer.untransform_grad(grad, tasks) @@ -32,10 +34,15 @@ def undo_grad_transforms(grad, tasks, transformers): def get_grad_statistics(dataset): """Computes and returns statistics of a dataset - This function assumes that the first task of a dataset holds the energy for - an input system, and that the remaining tasks holds the gradient for the - system. + DEPRECATED DO NOT USE. + + This function assumes that the first task of a dataset holds the + energy for an input system, and that the remaining tasks holds the + gradient for the system. """ + logger.warning( + "get_grad_statistics is DEPRECATED and will be removed in a future version of DeepChem. Manually compute force/energy statistics." + ) if len(dataset) == 0: return None, None, None, None y = dataset.y @@ -48,88 +55,265 @@ def get_grad_statistics(dataset): class Transformer(object): - """ - Abstract base class for different ML models. + """Abstract base class for different data transformation techniques. + + A transformer is an object that applies a transformation to a given + dataset. Think of a transformation as a mathematical operation which + makes the source dataset more amenable to learning. For example, one + transformer could normalize the features for a dataset (ensuring + they have zero mean and unit standard deviation). Another + transformer could for example threshold values in a dataset so that + values outside a given range are truncated. Yet another transformer + could act as a data augmentation routine, generating multiple + different images from each source datapoint (a transformation need + not necessarily be one to one). + + Transformers are designed to be chained, since data pipelines often + chain multiple different transformations to a dataset. Transformers + are also designed to be scalable and can be applied to + large `dc.data.Dataset` objects. Not that Transformers are not + usually thread-safe so you will have to be careful in processing + very large datasets. + + This class is an abstract superclass that isn't meant to be directly + instantiated. Instead, you will want to instantiate one of the + subclasses of this class inorder to perform concrete + transformations. """ # Hack to allow for easy unpickling: # http://stefaanlippens.net/pickleproblem __module__ = os.path.splitext(os.path.basename(__file__))[0] def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None): - """Initializes transformation based on dataset statistics.""" + transform_X: bool = False, + transform_y: bool = False, + transform_w: bool = False, + transform_ids: bool = False, + dataset: Optional[Dataset] = None): + """Initializes transformation based on dataset statistics. + + Parameters + ---------- + transform_X: bool, optional (default False) + Whether to transform X + transform_y: bool, optional (default False) + Whether to transform y + transform_w: bool, optional (default False) + Whether to transform w + transform_ids: bool, optional (default False) + Whether to transform ids + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed + """ + if self.__class__.__name__ == "Transformer": + raise ValueError( + "Transformer is an abstract superclass and cannot be directly instantiated. You probably want to instantiate a concrete subclass instead." + ) self.transform_X = transform_X self.transform_y = transform_y self.transform_w = transform_w - # One, but not both, transform_X or tranform_y is true - assert transform_X or transform_y or transform_w - # Use fact that bools add as ints in python - assert (transform_X + transform_y + transform_w) == 1 + self.transform_ids = transform_ids + # Some transformation must happen + assert transform_X or transform_y or transform_w or transform_ids - def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w, ids) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ raise NotImplementedError( "Each Transformer is responsible for its own transform_array method.") - def untransform(self, z): - """Reverses stored transformation on provided data.""" - raise NotImplementedError( - "Each Transformer is responsible for its own untransfomr method.") + def untransform(self, transformed): + """Reverses stored transformation on provided data. + + Depending on whether `transform_X` or `transform_y` or `transform_w` was + set, this will perform different un-transformations. Note that this method + may not always be defined since some transformations aren't 1-1. - def transform(self, dataset, parallel=False): + Parameters + ---------- + transformed: np.ndarray + Array which was previously transformed by this class. """ - Transforms all internally stored data. - Adds X-transform, y-transform columns to metadata. + raise NotImplementedError( + "Each Transformer is responsible for its own untransform method.") + + def transform(self, + dataset: Dataset, + parallel: bool = False, + out_dir: Optional[str] = None, + **kwargs) -> Dataset: + """Transforms all internally stored data in dataset. + + This method transforms all internal data in the provided dataset by using + the `Dataset.transform` method. Note that this method adds X-transform, + y-transform columns to metadata. Specified keyword arguments are passed on + to `Dataset.transform`. + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset object to be transformed. + parallel: bool, optional (default False) + if True, use multiple processes to transform the dataset in parallel. + For large datasets, this might be faster. + out_dir: str, optional + If `out_dir` is specified in `kwargs` and `dataset` is a `DiskDataset`, + the output dataset will be written to the specified directory. + + Returns + ------- + Dataset + A newly transformed Dataset object """ + # Add this case in to handle non-DiskDataset that should be written to disk + if out_dir is not None: + if not isinstance(dataset, dc.data.DiskDataset): + dataset = dc.data.DiskDataset.from_numpy(dataset.X, dataset.y, + dataset.w, dataset.ids) _, y_shape, w_shape, _ = dataset.get_shape() if y_shape == tuple() and self.transform_y: raise ValueError("Cannot transform y when y_values are not present") if w_shape == tuple() and self.transform_w: raise ValueError("Cannot transform w when w_values are not present") - return dataset.transform(lambda X, y, w: self.transform_array(X, y, w)) + return dataset.transform(self, out_dir=out_dir, parallel=parallel) - def transform_on_array(self, X, y, w): - """ - Transforms numpy arrays X, y, and w + def transform_on_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transforms numpy arrays X, y, and w + + DEPRECATED. Use `transform_array` instead. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids """ - X, y, w = self.transform_array(X, y, w) - return X, y, w + warnings.warn( + "transform_on_array() is deprecated and has been renamed to transform_array()." + "transform_on_array() will be removed in DeepChem 3.0", FutureWarning) + X, y, w, ids = self.transform_array(X, y, w, ids) + return X, y, w, ids + + +def undo_transforms(y: np.ndarray, + transformers: List[Transformer]) -> np.ndarray: + """Undoes all transformations applied. + + Transformations are reversed using `transformer.untransform`. + Transformations will be assumed to have been applied in the order specified, + so transformations will be reversed in the opposite order. That is if + `transformers = [t1, t2]`, then this method will do `t2.untransform` + followed by `t1.untransform`. + + Parameters + ---------- + y: np.ndarray + Array of values for which transformations have to be undone. + transformers: list[dc.trans.Transformer] + List of transformations which have already been applied to `y` in the + order specifed. + + Returns + ------- + y_out: np.ndarray + The array with all transformations reversed. + """ + # Note that transformers have to be undone in reversed order + for transformer in reversed(transformers): + if transformer.transform_y: + y = transformer.untransform(y) + return y class MinMaxTransformer(Transformer): - """MinMax transformer transforms the dataset by shifting each axis of X or y - (depending on whether transform_X or transform_y is True), except the first one - by the minimum value along the axis and dividing the result by the range + """Ensure each value rests between 0 and 1 by using the min and max. + + `MinMaxTransformer` transforms the dataset by shifting each axis of X or y + (depending on whether transform_X or transform_y is True), except the first + one by the minimum value along the axis and dividing the result by the range (maximum value - minimum value) along the axis. This ensures each axis is - between 0 and 1. In case of multi-task learning, it ensures each task is given - equal importance. + between 0 and 1. In case of multi-task learning, it ensures each task is + given equal importance. Given original array A, the transformed array can be written as: - A_min = np.min(A, axis=0) - A_max = np.max(A, axis=0) - A_t = np.nan_to_num((A - A_min)/(A_max - A_min)) - Example: + >>> import numpy as np + >>> A = np.random.rand(10, 10) + >>> A_min = np.min(A, axis=0) + >>> A_max = np.max(A, axis=0) + >>> A_t = np.nan_to_num((A - A_min)/(A_max - A_min)) + + Examples + -------- + >>> n_samples = 10 >>> n_features = 3 >>> n_tasks = 1 >>> ids = np.arange(n_samples) >>> X = np.random.rand(n_samples, n_features) - >>> y = np.zeros((n_samples, n_tasks)) + >>> y = np.random.rand(n_samples, n_tasks) >>> w = np.ones((n_samples, n_tasks)) >>> dataset = dc.data.NumpyDataset(X, y, w, ids) >>> transformer = dc.trans.MinMaxTransformer(transform_y=True, dataset=dataset) >>> dataset = transformer.transform(dataset) + + Note + ---- + This class can only transform `X` or `y` and not `w`. So only one of + `transform_X` or `transform_y` can be set. + + Raises + ------ + ValueError + if `transform_X` and `transform_y` are both set. """ def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None): + transform_X: bool = False, + transform_y: bool = False, + dataset: Optional[Dataset] = None): """Initialization of MinMax transformer. Parameters @@ -138,16 +322,15 @@ class MinMaxTransformer(Transformer): Whether to transform X transform_y: bool, optional (default False) Whether to transform y - transform_w: bool, optional (default False) - Whether to transform w - dataset: dc.data.Dataset object, optional + dataset: dc.data.Dataset object, optional (default None) Dataset to be transformed """ - if transform_X: + if transform_X and transform_y: + raise ValueError("Can only transform only one of X and y") + if dataset is not None and transform_X: self.X_min = np.min(dataset.X, axis=0) self.X_max = np.max(dataset.X, axis=0) - - elif transform_y: + elif dataset is not None and transform_y: self.y_min = np.min(dataset.y, axis=0) self.y_max = np.max(dataset.y, axis=0) @@ -155,31 +338,62 @@ class MinMaxTransformer(Transformer): assert len(self.y_min) == dataset.y.shape[1] super(MinMaxTransformer, self).__init__( - transform_X=transform_X, - transform_y=transform_y, - transform_w=transform_w, - dataset=dataset) + transform_X=transform_X, transform_y=transform_y, dataset=dataset) - def transform(self, dataset, parallel=False): - """Transforms the dataset.""" - return super(MinMaxTransformer, self).transform(dataset, parallel=parallel) + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w, ids) arrays. - def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of ids. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ if self.transform_X: - X = np.nan_to_num((X - self.X_min) / (self.X_max - self.X_min)) + # Handle division by zero + denominator = np.where((self.X_max - self.X_min) > 0, + (self.X_max - self.X_min), + np.ones_like(self.X_max - self.X_min)) + X = np.nan_to_num((X - self.X_min) / denominator) elif self.transform_y: - y = np.nan_to_num((y - self.y_min) / (self.y_max - self.y_min)) - return (X, y, w) - - def untransform(self, z): + # Handle division by zero + denominator = np.where((self.y_max - self.y_min) > 0, + (self.y_max - self.y_min), + np.ones_like(self.y_max - self.y_min)) + y = np.nan_to_num((y - self.y_min) / denominator) + return (X, y, w, ids) + + def untransform(self, z: np.ndarray) -> np.ndarray: """ Undo transformation on provided data. Parameters ---------- - z: np.ndarray, + z: np.ndarray Transformed X or y array + + Returns + ------- + np.ndarray + Array with min-max scaling undone. """ if self.transform_X: X_max = self.X_max @@ -205,26 +419,71 @@ class MinMaxTransformer(Transformer): class NormalizationTransformer(Transformer): + """Normalizes dataset to have zero mean and unit standard deviation + + This transformer transforms datasets to have zero mean and unit standard + deviation. + + Examples + -------- + + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.rand(n_samples, n_tasks) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.NormalizationTransformer(transform_y=True, dataset=dataset) + >>> dataset = transformer.transform(dataset) + + Note + ---- + This class can only transform `X` or `y` and not `w`. So only one of + `transform_X` or `transform_y` can be set. + + Raises + ------ + ValueError + if `transform_X` and `transform_y` are both set. + """ def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None, - transform_gradients=False, - move_mean=True): - """Initialize normalization transformation.""" - if transform_X: + transform_X: bool = False, + transform_y: bool = False, + transform_w: bool = False, + dataset: Optional[Dataset] = None, + transform_gradients: bool = False, + move_mean: bool = True): + """Initialize normalization transformation. + + Parameters + ---------- + transform_X: bool, optional (default False) + Whether to transform X + transform_y: bool, optional (default False) + Whether to transform y + transform_w: bool, optional (default False) + Whether to transform w + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed + """ + if transform_X and transform_y: + raise ValueError("Can only transform only one of X and y") + if transform_w: + raise ValueError("MinMaxTransformer doesn't support w transformation.") + if dataset is not None and transform_X: X_means, X_stds = dataset.get_statistics(X_stats=True, y_stats=False) self.X_means = X_means self.X_stds = X_stds - elif transform_y: + elif dataset is not None and transform_y: y_means, y_stds = dataset.get_statistics(X_stats=False, y_stats=True) self.y_means = y_means # Control for pathological case with no variance. - y_stds = np.array(y_stds) - y_stds[y_stds == 0] = 1. - self.y_stds = y_stds + y_stds_np = np.array(y_stds) + y_stds_np[y_stds_np == 0] = 1. + self.y_stds = y_stds_np self.transform_gradients = transform_gradients self.move_mean = move_mean if self.transform_gradients: @@ -238,12 +497,33 @@ class NormalizationTransformer(Transformer): transform_w=transform_w, dataset=dataset) - def transform(self, dataset, parallel=False): - return super(NormalizationTransformer, self).transform( - dataset, parallel=parallel) + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w) arrays. - def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of ids. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ if self.transform_X: if not hasattr(self, 'move_mean') or self.move_mean: X = np.nan_to_num((X - self.X_means) / self.X_stds) @@ -254,11 +534,21 @@ class NormalizationTransformer(Transformer): y = np.nan_to_num((y - self.y_means) / self.y_stds) else: y = np.nan_to_num(y / self.y_stds) - return (X, y, w) + return (X, y, w, ids) - def untransform(self, z): + def untransform(self, z: np.ndarray) -> np.ndarray: """ Undo transformation on provided data. + + Parameters + ---------- + z: np.ndarray + Array to transform back + + Returns + ------- + z_out: np.ndarray + Array with normalization undone. """ if self.transform_X: if not hasattr(self, 'move_mean') or self.move_mean: @@ -268,7 +558,11 @@ class NormalizationTransformer(Transformer): elif self.transform_y: y_stds = self.y_stds y_means = self.y_means - n_tasks = self.y_stds.shape[0] + # Handle case with 1 task correctly + if len(self.y_stds.shape) == 0: + n_tasks = 1 + else: + n_tasks = self.y_stds.shape[0] z_shape = list(z.shape) # Get the reversed shape of z: (..., n_tasks, batch_size) z_shape.reverse() @@ -284,9 +578,10 @@ class NormalizationTransformer(Transformer): return z * y_stds def untransform_grad(self, grad, tasks): - """ - Undo transformation on gradient. - """ + """DEPRECATED. DO NOT USE.""" + logger.warning( + "NormalizationTransformer.untransform_grad is DEPRECATED and will be removed in a future version of DeepChem. " + "Manually implement transforms to perform force calculations.") if self.transform_y: grad_means = self.y_means[1:] @@ -310,38 +605,36 @@ class NormalizationTransformer(Transformer): class ClippingTransformer(Transformer): """Clip large values in datasets. - Example: - - >>> n_samples = 10 - >>> n_features = 3 - >>> n_tasks = 1 - >>> ids = np.arange(n_samples) - >>> X = np.random.rand(n_samples, n_features) - >>> y = np.zeros((n_samples, n_tasks)) - >>> w = np.ones((n_samples, n_tasks)) - >>> dataset = dc.data.NumpyDataset(X, y, w, ids) - >>> transformer = dc.trans.ClippingTransformer(transform_X=True) - >>> dataset = transformer.transform(dataset) + Examples + -------- + Let's clip values from a synthetic dataset + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.zeros((n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.ClippingTransformer(transform_X=True) + >>> dataset = transformer.transform(dataset) """ def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None, - x_max=5., - y_max=500.): + transform_X: bool = False, + transform_y: bool = False, + dataset: Optional[Dataset] = None, + x_max: float = 5., + y_max: float = 500.): """Initialize clipping transformation. - Parameters: + Parameters ---------- transform_X: bool, optional (default False) Whether to transform X transform_y: bool, optional (default False) Whether to transform y - transform_w: bool, optional (default False) - Whether to transform w dataset: dc.data.Dataset object, optional Dataset to be transformed x_max: float, optional @@ -349,29 +642,39 @@ class ClippingTransformer(Transformer): y_max: float, optional Maximum absolute value for y + Note + ---- + This transformer can transform `X` and `y` jointly, but does not transform + `w`. + + Raises + ------ + ValueError + if `transform_w` is set. """ super(ClippingTransformer, self).__init__( - transform_X=transform_X, - transform_y=transform_y, - transform_w=transform_w, - dataset=dataset) - assert not transform_w + transform_X=transform_X, transform_y=transform_y, dataset=dataset) + self.x_max = x_max self.y_max = y_max - def transform_array(self, X, y, w): + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: """Transform the data in a set of (X, y, w) arrays. - Parameters: + Parameters ---------- X: np.ndarray - Features + Array of Features y: np.ndarray - Tasks + Array of labels w: np.ndarray - Weights + Array of weights + ids: np.ndarray + Array of ids. - Returns: + Returns ------- X: np.ndarray Transformed features @@ -379,7 +682,8 @@ class ClippingTransformer(Transformer): Transformed tasks w: np.ndarray Transformed weights - + idstrans: np.ndarray + Transformed array of ids """ if self.transform_X: X[X > self.x_max] = self.x_max @@ -387,29 +691,105 @@ class ClippingTransformer(Transformer): if self.transform_y: y[y > self.y_max] = self.y_max y[y < (-1.0 * self.y_max)] = -1.0 * self.y_max - return (X, y, w) + return (X, y, w, ids) def untransform(self, z): + """Not implemented.""" raise NotImplementedError( "Cannot untransform datasets with ClippingTransformer.") class LogTransformer(Transformer): + """Computes a logarithmic transformation + + This transformer computes the transformation given by + + >>> import numpy as np + >>> A = np.random.rand(10, 10) + >>> A = np.log(A + 1) + + Assuming that tasks/features are not specified. If specified, then + transformations are only performed on specified tasks/features. + + Examples + -------- + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.zeros((n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.LogTransformer(transform_X=True) + >>> dataset = transformer.transform(dataset) + + Note + ---- + This class can only transform `X` or `y` and not `w`. So only one of + `transform_X` or `transform_y` can be set. + + Raises + ------ + ValueError + if `transform_w` is set or `transform_X` and `transform_y` are both set. + """ def __init__(self, - transform_X=False, - transform_y=False, - features=None, - tasks=None, - dataset=None): + transform_X: bool = False, + transform_y: bool = False, + features: Optional[List[int]] = None, + tasks: Optional[List[str]] = None, + dataset: Optional[Dataset] = None): + """Initialize log transformer. + + Parameters + ---------- + transform_X: bool, optional (default False) + Whether to transform X + transform_y: bool, optional (default False) + Whether to transform y + features: list[Int] + List of features indices to transform + tasks: list[str] + List of task names to transform. + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed + """ + if transform_X and transform_y: + raise ValueError("Can only transform only one of X and y") self.features = features self.tasks = tasks - """Initialize log transformation.""" super(LogTransformer, self).__init__( transform_X=transform_X, transform_y=transform_y, dataset=dataset) - def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of weights. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ if self.transform_X: num_features = len(X[0]) if self.features is None: @@ -430,11 +810,21 @@ class LogTransformer(Transformer): y[:, j] = np.log(y[:, j] + 1) else: y[:, j] = y[:, j] - return (X, y, w) + return (X, y, w, ids) - def untransform(self, z): + def untransform(self, z: np.ndarray) -> np.ndarray: """ Undo transformation on provided data. + + Parameters + ---------- + z: np.ndarray, + Transformed X or y array + + Returns + ------- + np.ndarray + Array with a logarithmic transformation undone. """ if self.transform_X: num_features = len(z[0]) @@ -461,95 +851,290 @@ class LogTransformer(Transformer): class BalancingTransformer(Transformer): - """Balance positive and negative examples for weights.""" + """Balance positive and negative (or multiclass) example weights. - def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None, - seed=None): - super(BalancingTransformer, self).__init__( - transform_X=transform_X, - transform_y=transform_y, - transform_w=transform_w, - dataset=dataset) + This class balances the sample weights so that the sum of all example + weights from all classes is the same. This can be useful when you're + working on an imbalanced dataset where there are far fewer examples of some + classes than others. + + Examples + -------- + + Here's an example for a binary dataset. + + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> n_classes = 2 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.BalancingTransformer(dataset=dataset) + >>> dataset = transformer.transform(dataset) + + And here's a multiclass dataset example. + + >>> n_samples = 50 + >>> n_features = 3 + >>> n_tasks = 1 + >>> n_classes = 5 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features) + >>> y = np.random.randint(n_classes, size=(n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> transformer = dc.trans.BalancingTransformer(dataset=dataset) + >>> dataset = transformer.transform(dataset) + + See Also + -------- + deepchem.trans.DuplicateBalancingTransformer: Balance by duplicating samples. + + + Note + ---- + This transformer is only meaningful for classification datasets where `y` + takes on a limited set of values. This class can only transform `w` and does + not transform `X` or `y`. + + Raises + ------ + ValueError + if `transform_X` or `transform_y` are set. Also raises or if `y` or `w` aren't of shape `(N,)` or `(N, n_tasks)`. + """ + + def __init__(self, dataset: Dataset): # BalancingTransformer can only transform weights. - assert not transform_X - assert not transform_y - assert transform_w + super(BalancingTransformer, self).__init__( + transform_w=True, dataset=dataset) # Compute weighting factors from dataset. y = dataset.y w = dataset.w - # Ensure dataset is binary - np.testing.assert_allclose(sorted(np.unique(y)), np.array([0., 1.])) + # Handle 1-D case + if len(y.shape) == 1: + y = np.reshape(y, (len(y), 1)) + if len(w.shape) == 1: + w = np.reshape(w, (len(w), 1)) + if len(y.shape) != 2: + raise ValueError("y must be of shape (N,) or (N, n_tasks)") + if len(w.shape) != 2: + raise ValueError("w must be of shape (N,) or (N, n_tasks)") + self.classes = sorted(np.unique(y)) weights = [] for ind, task in enumerate(dataset.get_task_names()): task_w = w[:, ind] task_y = y[:, ind] # Remove labels with zero weights task_y = task_y[task_w != 0] - num_positives = np.count_nonzero(task_y) - num_negatives = len(task_y) - num_positives - if num_positives > 0: - pos_weight = float(num_negatives) / num_positives - else: - pos_weight = 1 - neg_weight = 1 - weights.append((neg_weight, pos_weight)) + N_task = len(task_y) + class_counts = [] + # Note that we may have 0 elements of a given class since we remove those + # labels with zero weight. This typically happens in multitask datasets + # where some datapoints only have labels for some tasks. + for c in self.classes: + # this works because task_y is 1D + num_c = len(np.where(task_y == c)[0]) + class_counts.append(num_c) + # This is the right ratio since N_task/num_c * num_c = N_task + # for all classes + class_weights = [ + N_task / float(num_c) if num_c > 0 else 0 for num_c in class_counts + ] + weights.append(class_weights) self.weights = weights - def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of weights. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ w_balanced = np.zeros_like(w) - for ind in range(y.shape[1]): - task_y = y[:, ind] - task_w = w[:, ind] - zero_indices = np.logical_and(task_y == 0, task_w != 0) - one_indices = np.logical_and(task_y == 1, task_w != 0) - w_balanced[zero_indices, ind] = self.weights[ind][0] - w_balanced[one_indices, ind] = self.weights[ind][1] - return (X, y, w_balanced) + if len(y.shape) == 1 and len(w.shape) == 2 and w.shape[1] == 1: + y = np.expand_dims(y, 1) + if len(y.shape) == 1: + n_tasks = 1 + elif len(y.shape) == 2: + n_tasks = y.shape[1] + else: + raise ValueError("y must be of shape (N,) or (N, n_tasks)") + for ind in range(n_tasks): + if n_tasks == 1: + task_y = y + task_w = w + else: + task_y = y[:, ind] + task_w = w[:, ind] + for i, c in enumerate(self.classes): + class_indices = np.logical_and(task_y == c, task_w != 0) + # Set to the class weight computed previously + if n_tasks == 1: + w_balanced[class_indices] = self.weights[ind][i] + else: + w_balanced[class_indices, ind] = self.weights[ind][i] + return (X, y, w_balanced, ids) class CDFTransformer(Transformer): - """Histograms the data and assigns values based on sorted list.""" - """Acts like a Cumulative Distribution Function (CDF).""" + """Histograms the data and assigns values based on sorted list. - def __init__(self, transform_X=False, transform_y=False, dataset=None, - bins=2): - self.transform_X = transform_X - self.transform_y = transform_y + Acts like a Cumulative Distribution Function (CDF). If given a dataset of + samples from a continuous distribution computes the CDF of this dataset and + replaces values with their corresponding CDF values. + + Examples + -------- + Let's look at an example where we transform only features. + + >>> N = 10 + >>> n_feat = 5 + >>> n_bins = 100 + + Note that we're using 100 bins for our CDF histogram + + >>> import numpy as np + >>> X = np.random.normal(size=(N, n_feat)) + >>> y = np.random.randint(2, size=(N,)) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> cdftrans = dc.trans.CDFTransformer(transform_X=True, dataset=dataset, bins=n_bins) + >>> dataset = cdftrans.transform(dataset) + + Note that you can apply this transformation to `y` as well + + >>> X = np.random.normal(size=(N, n_feat)) + >>> y = np.random.normal(size=(N,)) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> cdftrans = dc.trans.CDFTransformer(transform_y=True, dataset=dataset, bins=n_bins) + >>> dataset = cdftrans.transform(dataset) + """ + + def __init__(self, + transform_X: bool = False, + transform_y: bool = False, + dataset: Optional[Dataset] = None, + bins: int = 2): + """Initialize this transformer. + + Parameters + ---------- + transform_X: bool, optional (default False) + Whether to transform X + transform_y: bool, optional (default False) + Whether to transform y + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed + bins: int, optional (default 2) + Number of bins to use when computing histogram. + """ + super(CDFTransformer, self).__init__( + transform_X=transform_X, transform_y=transform_y) self.bins = bins - self.y = dataset.y - # self.w = dataset.w + if transform_y: + if dataset is None: + raise ValueError("dataset must be specified when transforming y") + self.y = dataset.y - # TODO (flee2): for transform_y, figure out weights + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Performs CDF transform on data. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers - def transform(self, dataset, bins): - """Performs CDF transform on data.""" - X, y, w, ids = (dataset.X, dataset.y, dataset.w, dataset.ids) + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ w_t = w - ids_t = ids if self.transform_X: X_t = get_cdf_values(X, self.bins) y_t = y - if self.transform_y: + elif self.transform_y: X_t = X y_t = get_cdf_values(y, self.bins) - # print("y will not be transformed by CDFTransformer, for now.") - return NumpyDataset(X_t, y_t, w_t, ids_t) + return X_t, y_t, w_t, ids - def untransform(self, z): - # print("Cannot undo CDF Transformer, for now.") + def untransform(self, z: np.ndarray) -> np.ndarray: + """Undo transformation on provided data. + + Note that this transformation is only undone for y. + + Parameters + ---------- + z: np.ndarray, + Transformed y array + + Returns + ------- + np.ndarray + Array with the transformation undone. + """ # Need this for transform_y if self.transform_y: return self.y + else: + raise NotImplementedError -def get_cdf_values(array, bins): - # array = np.transpose(array) +def get_cdf_values(array: np.ndarray, bins: int) -> np.ndarray: + """Helper function to compute CDF values. + + Parameters + ---------- + array: np.ndarray + Must be of shape `(n_rows, n_cols)` or `(n_rows,)` + bins: int + Number of bins to split data into. + + Returns + ------- + array_t: np.ndarray + Array with sorted histogram values + """ + # Handle 1D case + if len(array.shape) == 1: + array = np.reshape(array, (len(array), 1)) n_rows = array.shape[0] n_cols = array.shape[1] array_t = np.zeros((n_rows, n_cols)) @@ -570,18 +1155,97 @@ def get_cdf_values(array, bins): class PowerTransformer(Transformer): - """Takes power n transforms of the data based on an input vector.""" + """Takes power n transforms of the data based on an input vector. + + Computes the specified powers of the dataset. This can be useful if you're + looking to add higher order features of the form `x_i^2`, `x_i^3` etc. to + your dataset. + + Examples + -------- + Let's look at an example where we transform only `X`. + + >>> N = 10 + >>> n_feat = 5 + >>> powers = [1, 2, 0.5] + + So in this example, we're taking the identity, squares, and square roots. + Now let's construct our matrices + + >>> import numpy as np + >>> X = np.random.rand(N, n_feat) + >>> y = np.random.normal(size=(N,)) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> trans = dc.trans.PowerTransformer(transform_X=True, dataset=dataset, powers=powers) + >>> dataset = trans.transform(dataset) + + Let's now look at an example where we transform `y`. Note that the `y` + transform expands out the feature dimensions of `y` the same way it does for + `X` so this transform is only well defined for singletask datasets. + + >>> import numpy as np + >>> X = np.random.rand(N, n_feat) + >>> y = np.random.rand(N) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> trans = dc.trans.PowerTransformer(transform_y=True, dataset=dataset, powers=powers) + >>> dataset = trans.transform(dataset) + """ - def __init__(self, transform_X=False, transform_y=False, powers=[1]): - self.transform_X = transform_X - self.transform_y = transform_y + def __init__(self, + transform_X: bool = False, + transform_y: bool = False, + dataset: Optional[Dataset] = None, + powers: List[int] = [1]): + """Initialize this transformer + + Parameters + ---------- + transform_X: bool, optional (default False) + Whether to transform X + transform_y: bool, optional (default False) + Whether to transform y + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed. Note that this argument is ignored since + `PowerTransformer` doesn't require it to be specified. + powers: list[int], optional (default `[1]`) + The list of powers of features/labels to compute. + """ + super(PowerTransformer, self).__init__( + transform_X=transform_X, transform_y=transform_y) self.powers = powers - def transform(self, dataset): - """Performs power transform on data.""" - X, y, w, ids = (dataset.X, dataset.y, dataset.w, dataset.ids) + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Performs power transform on data. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ + if not (len(y.shape) == 1 or len(y.shape) == 2 and y.shape[1] == 1): + raise ValueError("This transform is not defined for multitask y") + # THis reshape is safe because of guard above. + y = np.reshape(y, (len(y), 1)) w_t = w - ids_t = ids n_powers = len(self.powers) if self.transform_X: X_t = np.power(X, self.powers[0]) @@ -589,21 +1253,25 @@ class PowerTransformer(Transformer): X_t = np.hstack((X_t, np.power(X, self.powers[i]))) y_t = y if self.transform_y: - # print("y will not be transformed by PowerTransformer, for now.") y_t = np.power(y, self.powers[0]) for i in range(1, n_powers): y_t = np.hstack((y_t, np.power(y, self.powers[i]))) X_t = X - """ - shutil.rmtree(dataset.data_dir) - os.makedirs(dataset.data_dir) - DiskDataset.from_numpy(dataset.data_dir, X_t, y_t, w_t, ids_t) - return dataset - """ - return NumpyDataset(X_t, y_t, w_t, ids_t) + return (X_t, y_t, w_t, ids) - def untransform(self, z): - # print("Cannot undo Power Transformer, for now.") + def untransform(self, z: np.ndarray) -> np.ndarray: + """Undo transformation on provided data. + + Parameters + ---------- + z: np.ndarray, + Transformed y array + + Returns + ------- + np.ndarray + Array with the power transformation undone. + """ n_powers = len(self.powers) orig_len = (z.shape[1]) // n_powers z = z[:, :orig_len] @@ -614,30 +1282,30 @@ class PowerTransformer(Transformer): class CoulombFitTransformer(Transformer): """Performs randomization and binarization operations on batches of Coulomb Matrix features during fit. - Example: - - >>> n_samples = 10 - >>> n_features = 3 - >>> n_tasks = 1 - >>> ids = np.arange(n_samples) - >>> X = np.random.rand(n_samples, n_features, n_features) - >>> y = np.zeros((n_samples, n_tasks)) - >>> w = np.ones((n_samples, n_tasks)) - >>> dataset = dc.data.NumpyDataset(X, y, w, ids) - >>> fit_transformers = [dc.trans.CoulombFitTransformer(dataset)] - >>> model = dc.models.MultitaskFitTransformRegressor(n_tasks, - ... [n_features, n_features], batch_size=n_samples, fit_transformers=fit_transformers, n_evals=1) - >>> print(model.n_features) - 12 + Examples + -------- + >>> n_samples = 10 + >>> n_features = 3 + >>> n_tasks = 1 + >>> ids = np.arange(n_samples) + >>> X = np.random.rand(n_samples, n_features, n_features) + >>> y = np.zeros((n_samples, n_tasks)) + >>> w = np.ones((n_samples, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w, ids) + >>> fit_transformers = [dc.trans.CoulombFitTransformer(dataset)] + >>> model = dc.models.MultitaskFitTransformRegressor(n_tasks, + ... [n_features, n_features], batch_size=n_samples, fit_transformers=fit_transformers, n_evals=1) + >>> print(model.n_features) + 12 """ - def __init__(self, dataset): + def __init__(self, dataset: Dataset): """Initializes CoulombFitTransformer. - Parameters: + Parameters ---------- - dataset: dc.data.Dataset object - + dataset: dc.data.Dataset + Dataset object to be transformed. """ X = dataset.X num_atoms = X.shape[1] @@ -652,21 +1320,20 @@ class CoulombFitTransformer(Transformer): self.nbout = X.shape[1] self.mean = X.mean(axis=0) self.std = (X - self.mean).std() + super(CoulombFitTransformer, self).__init__(transform_X=True) - def realize(self, X): + def realize(self, X: np.ndarray) -> np.ndarray: """Randomize features. - Parameters: + Parameters ---------- X: np.ndarray Features - Returns: + Returns ------- X: np.ndarray Randomized features - - """ def _realize_(x): @@ -679,97 +1346,153 @@ class CoulombFitTransformer(Transformer): return np.array([_realize_(z) for z in X]) - def normalize(self, X): + def normalize(self, X: np.ndarray) -> np.ndarray: """Normalize features. - Parameters: + Parameters ---------- X: np.ndarray Features - Returns: + Returns ------- X: np.ndarray Normalized features - """ return (X - self.mean) / self.std - def expand(self, X): + def expand(self, X: np.ndarray) -> np.ndarray: """Binarize features. - Parameters: + Parameters ---------- X: np.ndarray Features - Returns: + Returns ------- X: np.ndarray Binarized features - """ Xexp = [] for i in range(X.shape[1]): - for k in np.arange(0, self.max[i] + self.step, self.step): + for k in np.arange(0, self.max[i] + self.step, self.step): # type: ignore Xexp += [np.tanh((X[:, i] - k) / self.step)] return np.array(Xexp).T - def X_transform(self, X): + def X_transform(self, X: np.ndarray) -> np.ndarray: """Perform Coulomb Fit transform on features. - Parameters: + Parameters ---------- X: np.ndarray Features - Returns: + Returns ------- X: np.ndarray Transformed features - """ X = self.normalize(self.expand(self.realize(X))) return X - def transform_array(self, X, y, w): + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Performs randomization and binarization operations on data. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ X = self.X_transform(X) - return (X, y, w) + return (X, y, w, ids) def untransform(self, z): + "Not implemented." raise NotImplementedError( "Cannot untransform datasets with FitTransformer.") -class IRVTransformer(): - """Performs transform from ECFP to IRV features(K nearest neibours).""" +class IRVTransformer(Transformer): + """Performs transform from ECFP to IRV features(K nearest neighbors). + + This transformer is required by `MultitaskIRVClassifier` as a preprocessing + step before training. + + Examples + -------- + Let's start by defining the parameters of the dataset we're about to + transform. - def __init__(self, K, n_tasks, dataset, transform_y=False, transform_x=False): + >>> n_feat = 128 + >>> N = 20 + >>> n_tasks = 2 + + Let's now make our dataset object + + >>> import numpy as np + >>> import deepchem as dc + >>> X = np.random.randint(2, size=(N, n_feat)) + >>> y = np.zeros((N, n_tasks)) + >>> w = np.ones((N, n_tasks)) + >>> dataset = dc.data.NumpyDataset(X, y, w) + + And let's apply our transformer with 10 nearest neighbors. + + >>> K = 10 + >>> trans = dc.trans.IRVTransformer(K, n_tasks, dataset) + >>> dataset = trans.transform(dataset) + + Note + ---- + This class requires TensorFlow to be installed. + """ + + def __init__(self, K: int, n_tasks: int, dataset: Dataset): """Initializes IRVTransformer. - Parameters: + + Parameters ---------- - dataset: dc.data.Dataset object - train_dataset K: int number of nearest neighbours being count n_tasks: int number of tasks - + dataset: dc.data.Dataset object + train_dataset """ self.X = dataset.X self.n_tasks = n_tasks self.K = K self.y = dataset.y self.w = dataset.w - self.transform_x = transform_x - self.transform_y = transform_y + super(IRVTransformer, self).__init__(transform_X=True) - def realize(self, similarity, y, w): + def realize(self, similarity: np.ndarray, y: np.ndarray, + w: np.ndarray) -> List: """find samples with top ten similarity values in the reference dataset - Parameters: - ----------- + Parameters + ---------- similarity: np.ndarray similarity value between target dataset and reference dataset should have size of (n_samples_in_target, n_samples_in_reference) @@ -778,12 +1501,11 @@ class IRVTransformer(): w: np.array weights for a single task - Return: - ---------- + Returns + ------- features: list n_samples * np.array of size (2*K,) each array includes K similarity values and corresponding labels - """ features = [] similarity_xs = similarity * np.sign(w) @@ -800,49 +1522,51 @@ class IRVTransformer(): top_label = tf.gather(y, indice) values.append(value) top_labels.append(top_label) - values = np.concatenate(values, axis=0) - top_labels = np.concatenate(top_labels, axis=0) + values_np = np.concatenate(values, axis=0) + top_labels_np = np.concatenate(top_labels, axis=0) # concatenate batches of data together - for count in range(values.shape[0]): - if values[count, 0] == 1: + for count in range(values_np.shape[0]): + if values_np[count, 0] == 1: features.append( np.concatenate([ - values[count, 1:(self.K + 1)], top_labels[count, 1:(self.K + 1)] + values_np[count, 1:(self.K + 1)], + top_labels_np[count, 1:(self.K + 1)] ])) # highest similarity is 1: target is in the reference # use the following K points else: features.append( np.concatenate( - [values[count, 0:self.K], top_labels[count, 0:self.K]])) + [values_np[count, 0:self.K], top_labels_np[count, 0:self.K]])) # highest less than 1: target not in the reference, use top K points return features - def X_transform(self, X_target): + def X_transform(self, X_target: np.ndarray) -> np.ndarray: """ Calculate similarity between target dataset(X_target) and reference dataset(X): #(1 in intersection)/#(1 in union) - similarity = (X_target intersect X)/(X_target union X) - Parameters: - ----------- + + similarity = (X_target intersect X)/(X_target union X) + + Parameters + ---------- X_target: np.ndarray fingerprints of target dataset should have same length with X in the second axis - Returns: - ---------- + Returns + ------- X_target: np.ndarray features of size(batch_size, 2*K*n_tasks) - """ X_target2 = [] n_features = X_target.shape[1] - print('start similarity calculation') + logger.info('start similarity calculation') time1 = time.time() similarity = IRVTransformer.matrix_mul(X_target, np.transpose( self.X)) / (n_features - IRVTransformer.matrix_mul( 1 - X_target, np.transpose(1 - self.X))) time2 = time.time() - print('similarity calculation takes %i s' % (time2 - time1)) + logger.info('similarity calculation takes %i s' % (time2 - time1)) for i in range(self.n_tasks): X_target2.append(self.realize(similarity, self.y[:, i], self.w[:, i])) return np.concatenate([z for z in np.array(X_target2)], axis=1) @@ -883,7 +1607,27 @@ class IRVTransformer(): del result return all_result - def transform(self, dataset): + def transform(self, + dataset: Dataset, + parallel: bool = False, + out_dir: Optional[str] = None, + **kwargs) -> Union[DiskDataset, NumpyDataset]: + """Transforms a given dataset + + Parameters + ---------- + dataset: Dataset + Dataset to transform + parallel: bool, optional, (default False) + Whether to parallelize this transformation. Currently ignored. + out_dir: str, optional (default None) + Directory to write resulting dataset. + + Returns + ------- + DiskDataset or NumpyDataset + `Dataset` object that is transformed. + """ X_length = dataset.X.shape[0] X_trans = [] for count in range(X_length // 5000 + 1): @@ -891,47 +1635,98 @@ class IRVTransformer(): self.X_transform( dataset.X[count * 5000:min((count + 1) * 5000, X_length), :])) X_trans = np.concatenate(X_trans, axis=0) - return NumpyDataset(X_trans, dataset.y, dataset.w, ids=None) + if out_dir is None: + return NumpyDataset(X_trans, dataset.y, dataset.w, ids=None) + return DiskDataset.from_numpy( + X_trans, dataset.y, dataset.w, data_dir=out_dir) def untransform(self, z): + "Not implemented." raise NotImplementedError( "Cannot untransform datasets with IRVTransformer.") class DAGTransformer(Transformer): - """Performs transform from ConvMol adjacency lists to - DAG calculation orders + """Performs transform from ConvMol adjacency lists to DAG calculation orders + + This transformer is used by `DAGModel` before training to transform its + inputs to the correct shape. This expansion turns a molecule with `n` atoms + into `n` DAGs, each with root at a different atom in the molecule. + + Examples + -------- + Let's transform a small dataset of molecules. + + >>> N = 10 + >>> n_feat = 5 + >>> import numpy as np + >>> feat = dc.feat.ConvMolFeaturizer() + >>> X = feat(["C", "CC"]) + >>> y = np.random.rand(N) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> trans = dc.trans.DAGTransformer(max_atoms=5) + >>> dataset = trans.transform(dataset) """ - def __init__(self, - max_atoms=50, - transform_X=True, - transform_y=False, - transform_w=False): + def __init__(self, max_atoms: int = 50): """Initializes DAGTransformer. - Only X can be transformed + + Parameters + ---------- + max_atoms: int, optional (Default 50) + Maximum number of atoms to allow """ self.max_atoms = max_atoms - self.transform_X = transform_X - self.transform_y = transform_y - self.transform_w = transform_w - assert self.transform_X - assert not self.transform_y - assert not self.transform_w + super(DAGTransformer, self).__init__(transform_X=True) - def transform_array(self, X, y, w): - """Add calculation orders to ConvMol objects""" - if self.transform_X: - for idm, mol in enumerate(X): - X[idm].parents = self.UG_to_DAG(mol) - return (X, y, w) + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transform the data in a set of (X, y, w, ids) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ + for idm, mol in enumerate(X): + X[idm].parents = self.UG_to_DAG(mol) + return (X, y, w, ids) def untransform(self, z): + "Not implemented." raise NotImplementedError( "Cannot untransform datasets with DAGTransformer.") - def UG_to_DAG(self, sample): + def UG_to_DAG(self, sample: ConvMol) -> List: """This function generates the DAGs for a molecule + + Parameters + ---------- + sample: `ConvMol` + Molecule to transform + + Returns + ------- + List + List of parent adjacency matrices """ # list of calculation orders for DAGs # stemming from one specific atom in the molecule @@ -949,7 +1744,7 @@ class DAGTransformer(Transformer): DAG = [] # list of lists, elements represent the calculation orders # for atoms in the current graph - parent = [[] for i in range(n_atoms)] + parent: List[Any] = [[] for i in range(n_atoms)] # starting from the target atom with index `count` current_atoms = [count] # flags of whether the atom is already included in the DAG @@ -1024,24 +1819,53 @@ class DAGTransformer(Transformer): class ImageTransformer(Transformer): - """ - Convert an image into width, height, channel + """Convert an image into width, height, channel + + Note + ---- + This class require Pillow to be installed. """ - def __init__(self, - size, - transform_X=True, - transform_y=False, - transform_w=False): - """Initializes transformation based on dataset statistics.""" + def __init__(self, size: Tuple[int, int]): + """Initializes ImageTransformer. + + Parameters + ---------- + size: Tuple[int, int] + The image size, a tuple of (width, height). + """ self.size = size - self.transform_X = True - self.transform_y = False - self.transform_w = False + super(ImageTransformer, self).__init__(transform_X=True) def transform_array(self, X, y, w): - """Transform the data in a set of (X, y, w) arrays.""" - from PIL import Image + """Transform the data in a set of (X, y, w, ids) arrays. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ + try: + from PIL import Image + except ModuleNotFoundError: + raise ImportError("This function requires Pillow to be installed.") images = [scipy.ndimage.imread(x, mode='RGB') for x in X] images = [Image.fromarray(x).resize(self.size) for x in images] return np.array(images), y, w @@ -1049,6 +1873,10 @@ class ImageTransformer(Transformer): class ANITransformer(Transformer): """Performs transform from 3D coordinates to ANI symmetry functions + + Note + ---- + This class requires TensorFlow to be installed. """ def __init__(self, @@ -1059,10 +1887,7 @@ class ANITransformer(Transformer): angular_length=8, atom_cases=[1, 6, 7, 8, 16], atomic_number_differentiated=True, - coordinates_in_bohr=True, - transform_X=True, - transform_y=False, - transform_w=False): + coordinates_in_bohr=True): """ Only X can be transformed """ @@ -1074,19 +1899,13 @@ class ANITransformer(Transformer): self.atom_cases = atom_cases self.atomic_number_differentiated = atomic_number_differentiated self.coordinates_in_bohr = coordinates_in_bohr - self.transform_X = transform_X - self.transform_y = transform_y - self.transform_w = transform_w self.compute_graph = self.build() self.sess = tf.Session(graph=self.compute_graph) self.transform_batch_size = 32 - assert self.transform_X - assert not self.transform_y - assert not self.transform_w + super(ANITransformer, self).__init__(transform_X=True) def transform_array(self, X, y, w): if self.transform_X: - n_samples = X.shape[0] X_out = [] num_transformed = 0 @@ -1101,7 +1920,7 @@ class ANITransformer(Transformer): [self.outputs], feed_dict={self.inputs: X_batch})[0] X_out.append(output) num_transformed = num_transformed + X_batch.shape[0] - print('%i samples transformed' % num_transformed) + logger.info('%i samples transformed' % num_transformed) start += 1 if end >= len(X): break @@ -1118,7 +1937,8 @@ class ANITransformer(Transformer): """ tensorflow computation graph for transform """ graph = tf.Graph() with graph.as_default(): - self.inputs = tf.placeholder(tf.float32, shape=(None, self.max_atoms, 4)) + self.inputs = tf.keras.Input( + dtype=tf.float32, shape=(None, self.max_atoms, 4)) atom_numbers = tf.cast(self.inputs[:, :, 0], tf.int32) flags = tf.sign(atom_numbers) flags = tf.cast( @@ -1157,7 +1977,8 @@ class ANITransformer(Transformer): # Cutoff with threshold Rc d_flag = flags * tf.sign(cutoff - d) d_flag = tf.nn.relu(d_flag) - d_flag = d_flag * tf.expand_dims((1 - tf.eye(self.max_atoms)), 0) + d_flag = d_flag * tf.expand_dims( + tf.expand_dims((1 - tf.eye(self.max_atoms)), 0), -1) d = 0.5 * (tf.cos(np.pi * d / cutoff) + 1) return d * d_flag @@ -1248,52 +2069,115 @@ class ANITransformer(Transformer): class FeaturizationTransformer(Transformer): - """ - A transformer which runs a featurizer over the X values of a dataset. - Datasets used by this transformer must have rdkit.mol objects as the X - values + """A transformer which runs a featurizer over the X values of a dataset. + + Datasets used by this transformer must be compatible with the internal + featurizer. The idea of this transformer is that it allows for the + application of a featurizer to an existing dataset. + + Examples + -------- + >>> smiles = ["C", "CC"] + >>> X = np.array(smiles) + >>> y = np.array([1, 0]) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> trans = dc.trans.FeaturizationTransformer(dataset, dc.feat.CircularFingerprint()) + >>> dataset = trans.transform(dataset) """ def __init__(self, - transform_X=False, - transform_y=False, - transform_w=False, - dataset=None, - featurizer=None): + dataset: Optional[Dataset] = None, + featurizer: Optional[Featurizer] = None): + """Initialization of FeaturizationTransformer + + Parameters + ---------- + dataset: dc.data.Dataset object, optional (default None) + Dataset to be transformed + featurizer: dc.feat.Featurizer object, optional (default None) + Featurizer applied to perform transformations. + """ + if featurizer is None: + raise ValueError("featurizer must be specified.") self.featurizer = featurizer - if not transform_X: - raise ValueError("FeaturizingTransfomer can only be used on X") super(FeaturizationTransformer, self).__init__( - transform_X=transform_X, - transform_y=transform_y, - transform_w=transform_w, - dataset=dataset) + transform_X=True, dataset=dataset) - def transform_array(self, X, y, w): + def transform_array( + self, X: np.ndarray, y: np.ndarray, w: np.ndarray, + ids: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Transforms arrays of rdkit mols using internal featurizer. + + Parameters + ---------- + X: np.ndarray + Array of features + y: np.ndarray + Array of labels + w: np.ndarray + Array of weights. + ids: np.ndarray + Array of identifiers. + + Returns + ------- + Xtrans: np.ndarray + Transformed array of features + ytrans: np.ndarray + Transformed array of labels + wtrans: np.ndarray + Transformed array of weights + idstrans: np.ndarray + Transformed array of ids + """ X = self.featurizer.featurize(X) - return X, y, w + return X, y, w, ids + +class DataTransforms(object): + """Applies different data transforms to images. -class DataTransforms(Transformer): - """Applies different data transforms to images.""" + This utility class facilitates various image transformations that may be of + use for handling image datasets. + + Note + ---- + This class requires PIL to be installed. + """ def __init__(self, Image): self.Image = Image def scale(self, h, w): - """ Scales the image - Parameters: - h - height of the images - w - width of the images + """Scales the image + + Parameters + ---------- + h: int + Height of the images + w: int + Width of the images + + Returns + ------- + np.ndarray + The scaled image. """ from PIL import Image return Image.fromarray(self.Image).resize((h, w)) def flip(self, direction="lr"): - """ Flips the image - Parameters: - direction - "lr" denotes left-right fliplr - "ud" denotes up-down flip + """Flips the image + + Parameters + ---------- + direction: str + "lr" denotes left-right flip and "ud" denotes up-down flip. + + Returns + ------- + np.ndarray + The flipped image. """ if direction == "lr": return np.fliplr(self.Image) @@ -1305,40 +2189,49 @@ class DataTransforms(Transformer): ) def rotate(self, angle=0): - """ Rotates the image + """Rotates the image Parameters ---------- angle: float (default = 0 i.e no rotation) - Denotes angle by which the image should be rotated (in Degrees) + Denotes angle by which the image should be rotated (in Degrees) Returns - ---------- - The rotated imput array + ------- + np.ndarray + The rotated image. """ return scipy.ndimage.rotate(self.Image, angle) def gaussian_blur(self, sigma=0.2): - """ Adds gaussian noise to the image - Parameters: - sigma - std dev. of the gaussian distribution + """Adds gaussian noise to the image + + Parameters + ---------- + sigma: float + Std dev. of the gaussian distribution + + Returns + ------- + np.ndarray + The image added gaussian noise. """ return scipy.ndimage.gaussian_filter(self.Image, sigma) def center_crop(self, x_crop, y_crop): - """ Crops the image from the center + """Crops the image from the center Parameters ---------- x_crop: int - the total number of pixels to remove in the horizontal direction, evenly split between the left and right sides + the total number of pixels to remove in the horizontal direction, evenly split between the left and right sides y_crop: int - the total number of pixels to remove in the vertical direction, evenly split between the top and bottom sides + the total number of pixels to remove in the vertical direction, evenly split between the top and bottom sides Returns - ---------- - The center cropped input array - + ------- + np.ndarray + The center cropped image. """ y = self.Image.shape[0] x = self.Image.shape[1] @@ -1347,46 +2240,59 @@ class DataTransforms(Transformer): return self.Image[y_start:y_start + y_crop, x_start:x_start + x_crop] def crop(self, left, top, right, bottom): - """ Crops the image and returns the specified rectangular region from an image + """Crops the image and returns the specified rectangular region from an image Parameters ---------- left: int - the number of pixels to exclude from the left of the image + the number of pixels to exclude from the left of the image top: int - the number of pixels to exclude from the top of the image + the number of pixels to exclude from the top of the image right: int - the number of pixels to exclude from the right of the image + the number of pixels to exclude from the right of the image bottom: int - the number of pixels to exclude from the bottom of the image + the number of pixels to exclude from the bottom of the image Returns - ---------- - The cropped input array + ------- + np.ndarray + The cropped image. """ y = self.Image.shape[0] x = self.Image.shape[1] return self.Image[top:y - bottom, left:x - right] def convert2gray(self): - """ Converts the image to grayscale. The coefficients correspond to the Y' component of the Y'UV color system. - - Returns - ---------- - The grayscale image. + """Converts the image to grayscale. The coefficients correspond to the Y' component of the Y'UV color system. + Returns + ------- + np.ndarray + The grayscale image. """ return np.dot(self.Image[..., :3], [0.2989, 0.5870, 0.1140]) def shift(self, width, height, mode='constant', order=3): """Shifts the image - Parameters: - width - amount of width shift(positive values shift image right ) - height - amount of height shift(positive values shift image lower) - mode - Points outside the boundaries of the input are filled according to the given mode - (‘constant’, ‘nearest’, ‘reflect’ or ‘wrap’). Default is ‘constant’ - order - The order of the spline interpolation, default is 3. The order has to be in the range 0-5. - """ + + Parameters + ---------- + width: float + Amount of width shift (positive values shift image right ) + height: float + Amount of height shift(positive values shift image lower) + mode: str + Points outside the boundaries of the input are filled according to the + given mode: (‘constant’, ‘nearest’, ‘reflect’ or ‘wrap’). Default is + ‘constant’ + order: int + The order of the spline interpolation, default is 3. The order has to be in the range 0-5. + + Returns + ------- + np.ndarray + The shifted image. + """ if len(self.Image.shape) == 2: return scipy.ndimage.shift( self.Image, [height, width], order=order, mode=mode) @@ -1395,23 +2301,42 @@ class DataTransforms(Transformer): self.Image, [height, width, 0], order=order, mode=mode) def gaussian_noise(self, mean=0, std=25.5): - '''Adds gaussian noise to the image - Parameters: - mean - mean of gaussian. - std - standard deviation of gaussian. - ''' + """Adds gaussian noise to the image + + Parameters + ---------- + mean: float + Mean of gaussian. + std: float + Standard deviation of gaussian. + + Returns + ------- + np.ndarray + The image added gaussian noise. + """ x = self.Image x = x + np.random.normal(loc=mean, scale=std, size=self.Image.shape) return x def salt_pepper_noise(self, prob=0.05, salt=255, pepper=0): - '''Adds salt and pepper noise to the image - Parameters: - prob - probability of the noise. - salt - value of salt noise. - pepper - value of pepper noise. - ''' + """Adds salt and pepper noise to the image + + Parameters + ---------- + prob: float + probability of the noise. + salt: float + value of salt noise. + pepper: float + value of pepper noise. + + Returns + ------- + np.ndarray + The image added salt and pepper noise. + """ noise = np.random.random(size=self.Image.shape) x = self.Image @@ -1425,11 +2350,12 @@ class DataTransforms(Transformer): Parameters ---------- size: int - The kernel size in pixels. + The kernel size in pixels. Returns - ---------- - The median filtered image. + ------- + np.ndarray + The median filtered image. """ from PIL import Image, ImageFilter image = Image.fromarray(self.Image) diff --git a/deepchem/utils/__init__.py b/deepchem/utils/__init__.py index 8069d267da690903a0044b2fa5494012fa0ac2ed..8aecf1db505648fbcf689271c49761b079d38a9a 100644 --- a/deepchem/utils/__init__.py +++ b/deepchem/utils/__init__.py @@ -1,225 +1,91 @@ """ Miscellaneous utility functions. """ - -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "BSD 3-clause" - -import gzip -import numpy as np -import os -import pandas as pd -import sys -import tempfile -import tarfile -import zipfile - -try: - from urllib.request import urlretrieve # Python 3 -except: - from urllib import urlretrieve # Python 2 - - -def pad_array(x, shape, fill=0, both=False): - """ - Pad an array with a fill value. - - Parameters - ---------- - x : ndarray - Matrix. - shape : tuple or int - Desired shape. If int, all dimensions are padded to that size. - fill : object, optional (default 0) - Fill value. - both : bool, optional (default False) - If True, split the padding on both sides of each axis. If False, - padding is applied to the end of each axis. - """ - x = np.asarray(x) - if not isinstance(shape, tuple): - shape = tuple(shape for _ in range(x.ndim)) - pad = [] - for i in range(x.ndim): - diff = shape[i] - x.shape[i] - assert diff >= 0 - if both: - a, b = divmod(diff, 2) - b += a - pad.append((a, b)) - else: - pad.append((0, diff)) - pad = tuple(pad) - x = np.pad(x, pad, mode='constant', constant_values=fill) - return x - - -def get_data_dir(): - """Get the DeepChem data directory.""" - if 'DEEPCHEM_DATA_DIR' in os.environ: - return os.environ['DEEPCHEM_DATA_DIR'] - return tempfile.gettempdir() - - -# The number of elements to print for dataset ids/tasks -_print_threshold = 10 - - -def get_print_threshold(): - """Return the printing threshold for datasets. - - The print threshold is the number of elements from ids/tasks to - print when printing representations of `Dataset` objects. - - Returns - ---------- - threshold: int - Number of elements that will be printed - """ - return _print_threshold - - -def set_print_threshold(threshold): - """Set print threshold - - The print threshold is the number of elements from ids/tasks to - print when printing representations of `Dataset` objects. - - Parameters - ---------- - threshold: int - Number of elements to print. - """ - global _print_threshold - _print_threshold = threshold - - -# If a dataset contains more than this number of elements, it won't -# print any dataset ids -_max_print_size = 1000 - - -def get_max_print_size(): - """Return the max print size for a datset. - - If a dataset is large, printing `self.ids` as part of a string - representation can be very slow. This field controls the maximum - size for a dataset before ids are no longer printed. - - Returns - ------- - max_print_size: int - Maximum length of a dataset for ids to be printed in string - representation. - """ - return _max_print_size - - -def set_max_print_size(max_print_size): - """Set max_print_size - - If a dataset is large, printing `self.ids` as part of a string - representation can be very slow. This field controls the maximum - size for a dataset before ids are no longer printed. - - Parameters - ---------- - max_print_size: int - Maximum length of a dataset for ids to be printed in string - representation. - """ - global _max_print_size - _max_print_size = max_print_size - - -def download_url(url, dest_dir=get_data_dir(), name=None): - """Download a file to disk. - - Parameters - ---------- - url: str - the URL to download from - dest_dir: str - the directory to save the file in - name: str - the file name to save it as. If omitted, it will try to extract a file name from the URL - """ - if name is None: - name = url - if '?' in name: - name = name[:name.find('?')] - if '/' in name: - name = name[name.rfind('/') + 1:] - urlretrieve(url, os.path.join(dest_dir, name)) - - -def untargz_file(file, dest_dir=get_data_dir(), name=None): - """Untar and unzip a .tar.gz file to disk. - - Parameters - ---------- - file: str - the filepath to decompress - dest_dir: str - the directory to save the file in - name: str - the file name to save it as. If omitted, it will use the file name - """ - if name is None: - name = file - tar = tarfile.open(name) - tar.extractall(path=dest_dir) - tar.close() - - -def unzip_file(file, dest_dir=None, name=None): - """Unzip a .zip file to disk. - - Parameters - ---------- - file: str - the filepath to decompress - dest_dir: str - the directory to save the file in - name: str - the directory name to unzip it to. If omitted, it will use the file - name - """ - if name is None: - name = file - if dest_dir is None: - dest_dir = os.path.join(get_data_dir, name) - with zipfile.ZipFile(file, "r") as zip_ref: - zip_ref.extractall(dest_dir) - - -class ScaffoldGenerator(object): - """ - Generate molecular scaffolds. - - Parameters - ---------- - include_chirality : : bool, optional (default False) - Include chirality in scaffolds. - """ - - def __init__(self, include_chirality=False): - self.include_chirality = include_chirality - - def get_scaffold(self, mol): - """ - Get Murcko scaffolds for molecules. - - Murcko scaffolds are described in DOI: 10.1021/jm9602928. - They are essentially that part of the molecule consisting of - rings and the linker atoms between them. - - Parameters - ---------- - mols : array_like - Molecules. - """ - from rdkit.Chem.Scaffolds import MurckoScaffold - return MurckoScaffold.MurckoScaffoldSmiles( - mol=mol, includeChirality=self.include_chirality) +# flake8: noqa +from deepchem.utils.conformers import ConformerGenerator +from deepchem.utils.evaluate import relative_difference +from deepchem.utils.evaluate import Evaluator +from deepchem.utils.evaluate import GeneratorEvaluator + +from deepchem.utils.coordinate_box_utils import CoordinateBox +from deepchem.utils.coordinate_box_utils import intersect_interval +from deepchem.utils.coordinate_box_utils import intersection +from deepchem.utils.coordinate_box_utils import union +from deepchem.utils.coordinate_box_utils import merge_overlapping_boxes +from deepchem.utils.coordinate_box_utils import get_face_boxes + +from deepchem.utils.data_utils import pad_array +from deepchem.utils.data_utils import get_data_dir +from deepchem.utils.data_utils import download_url +from deepchem.utils.data_utils import untargz_file +from deepchem.utils.data_utils import unzip_file +from deepchem.utils.data_utils import load_image_files +from deepchem.utils.data_utils import load_sdf_files +from deepchem.utils.data_utils import load_csv_files +from deepchem.utils.data_utils import load_json_files +from deepchem.utils.data_utils import load_pickle_files +from deepchem.utils.data_utils import load_data +from deepchem.utils.data_utils import save_to_disk +from deepchem.utils.data_utils import load_from_disk +from deepchem.utils.data_utils import save_dataset_to_disk +from deepchem.utils.data_utils import load_dataset_from_disk + +from deepchem.utils.debug_utils import get_print_threshold +from deepchem.utils.debug_utils import set_print_threshold +from deepchem.utils.debug_utils import get_max_print_size +from deepchem.utils.debug_utils import set_max_print_size + +from deepchem.utils.fragment_utils import AtomShim +from deepchem.utils.fragment_utils import MolecularFragment +from deepchem.utils.fragment_utils import get_partial_charge +from deepchem.utils.fragment_utils import merge_molecular_fragments +from deepchem.utils.fragment_utils import get_mol_subset +from deepchem.utils.fragment_utils import strip_hydrogens +from deepchem.utils.fragment_utils import get_contact_atom_indices +from deepchem.utils.fragment_utils import reduce_molecular_complex_to_contacts + +from deepchem.utils.genomics_utils import seq_one_hot_encode +from deepchem.utils.genomics_utils import encode_bio_sequence + +from deepchem.utils.geometry_utils import unit_vector +from deepchem.utils.geometry_utils import angle_between +from deepchem.utils.geometry_utils import generate_random_unit_vector +from deepchem.utils.geometry_utils import generate_random_rotation_matrix +from deepchem.utils.geometry_utils import is_angle_within_cutoff +from deepchem.utils.geometry_utils import compute_centroid +from deepchem.utils.geometry_utils import subtract_centroid +from deepchem.utils.geometry_utils import compute_protein_range +from deepchem.utils.geometry_utils import compute_pairwise_distances + +from deepchem.utils.hash_utils import hash_ecfp +from deepchem.utils.hash_utils import hash_ecfp_pair +from deepchem.utils.hash_utils import vectorize + +from deepchem.utils.molecule_feature_utils import one_hot_encode +from deepchem.utils.molecule_feature_utils import get_atom_type_one_hot +from deepchem.utils.molecule_feature_utils import construct_hydrogen_bonding_info +from deepchem.utils.molecule_feature_utils import get_atom_hydrogen_bonding_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_is_in_aromatic_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_hybridization_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_total_num_Hs_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_chirality_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_formal_charge +from deepchem.utils.molecule_feature_utils import get_atom_partial_charge +from deepchem.utils.molecule_feature_utils import get_atom_total_degree_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_type_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_in_same_ring_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_conjugated_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_stereo_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_graph_distance_one_hot + +from deepchem.utils.pdbqt_utils import pdbqt_to_pdb +from deepchem.utils.pdbqt_utils import convert_protein_to_pdbqt +from deepchem.utils.pdbqt_utils import convert_mol_to_pdbqt + +from deepchem.utils.vina_utils import write_vina_conf +from deepchem.utils.vina_utils import load_docked_ligands +from deepchem.utils.vina_utils import prepare_inputs + +from deepchem.utils.voxel_utils import convert_atom_to_voxel +from deepchem.utils.voxel_utils import convert_atom_pair_to_voxel +from deepchem.utils.voxel_utils import voxelize diff --git a/deepchem/utils/conformers.py b/deepchem/utils/conformers.py index 43146a49a6a6573829413b434c2f5bc9e8072f9a..3575ad9bbb9b3a3b0e53a48590f8c1efb9e8a3c2 100644 --- a/deepchem/utils/conformers.py +++ b/deepchem/utils/conformers.py @@ -2,54 +2,57 @@ Conformer generation. """ -__author__ = "Steven Kearnes" -__copyright__ = "Copyright 2014, Stanford University" -__license__ = "3-clause BSD" - import numpy as np +from typing import Any, List, Optional +from deepchem.utils.typing import RDKitMol class ConformerGenerator(object): """ Generate molecule conformers. + Notes + ----- Procedure - --------- 1. Generate a pool of conformers. 2. Minimize conformers. 3. Prune conformers using an RMSD threshold. Note that pruning is done _after_ minimization, which differs from the - protocol described in the references. + protocol described in the references [1]_ [2]_. References ---------- - * http://rdkit.org/docs/GettingStartedInPython.html - #working-with-3d-molecules - * http://pubs.acs.org/doi/full/10.1021/ci2004658 + .. [1] http://rdkit.org/docs/GettingStartedInPython.html#working-with-3d-molecules + .. [2] http://pubs.acs.org/doi/full/10.1021/ci2004658 - Parameters - ---------- - max_conformers : int, optional (default 1) + Notes + ----- + This class requires RDKit to be installed. + """ + + def __init__(self, + max_conformers: int = 1, + rmsd_threshold: float = 0.5, + force_field: str = 'uff', + pool_multiplier: int = 10): + """ + Parameters + ---------- + max_conformers: int, optional (default 1) Maximum number of conformers to generate (after pruning). - rmsd_threshold : float, optional (default 0.5) + rmsd_threshold: float, optional (default 0.5) RMSD threshold for pruning conformers. If None or negative, no pruning is performed. - force_field : str, optional (default 'uff') + force_field: str, optional (default 'uff') Force field to use for conformer energy calculation and minimization. Options are 'uff', 'mmff94', and 'mmff94s'. - pool_multiplier : int, optional (default 10) + pool_multiplier: int, optional (default 10) Factor to multiply by max_conformers to generate the initial conformer pool. Since conformers are pruned after energy minimization, increasing the size of the pool increases the chance of identifying max_conformers unique conformers. - """ - - def __init__(self, - max_conformers=1, - rmsd_threshold=0.5, - force_field='uff', - pool_multiplier=10): + """ self.max_conformers = max_conformers if rmsd_threshold is None or rmsd_threshold < 0: rmsd_threshold = -1. @@ -57,18 +60,24 @@ class ConformerGenerator(object): self.force_field = force_field self.pool_multiplier = pool_multiplier - def __call__(self, mol): + def __call__(self, mol: RDKitMol) -> RDKitMol: """ Generate conformers for a molecule. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + mol: rdkit.Chem.rdchem.Mol + A new RDKit Mol object containing the chosen conformers, sorted by + increasing energy. """ return self.generate_conformers(mol) - def generate_conformers(self, mol): + def generate_conformers(self, mol: RDKitMol) -> RDKitMol: """ Generate conformers for a molecule. @@ -77,8 +86,14 @@ class ConformerGenerator(object): Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + mol: rdkit.Chem.rdchem.Mol + A new RDKit Mol object containing the chosen conformers, sorted by + increasing energy. """ # initial embedding @@ -98,36 +113,57 @@ class ConformerGenerator(object): return mol - def embed_molecule(self, mol): + def embed_molecule(self, mol: RDKitMol) -> RDKitMol: """ Generate conformers, possibly with pruning. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object with embedded multiple conformers. """ - from rdkit import Chem - from rdkit.Chem import AllChem + try: + from rdkit import Chem + from rdkit.Chem import AllChem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + mol = Chem.AddHs(mol) # add hydrogens n_confs = self.max_conformers * self.pool_multiplier AllChem.EmbedMultipleConfs(mol, numConfs=n_confs, pruneRmsThresh=-1.) return mol - def get_molecule_force_field(self, mol, conf_id=None, **kwargs): + def get_molecule_force_field(self, + mol: RDKitMol, + conf_id: Optional[int] = None, + **kwargs) -> Any: """ Get a force field for a molecule. Parameters ---------- - mol : RDKit Mol - Molecule. - conf_id : int, optional - ID of the conformer to associate with the force field. - kwargs : dict, optional - Keyword arguments for force field constructor. - """ - from rdkit.Chem import AllChem + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object with embedded conformers. + conf_id: int, optional + ID of the conformer to associate with the force field. + kwargs: dict, optional + Keyword arguments for force field constructor. + + Returns + ------- + ff: rdkit.ForceField.rdForceField.ForceField + RDKit force field instance for a molecule. + """ + try: + from rdkit.Chem import AllChem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + if self.force_field == 'uff': ff = AllChem.UFFGetMoleculeForceField(mol, confId=conf_id, **kwargs) elif self.force_field.startswith('mmff'): @@ -141,32 +177,32 @@ class ConformerGenerator(object): "'{}'.".format(self.force_field)) return ff - def minimize_conformers(self, mol): + def minimize_conformers(self, mol: RDKitMol) -> None: """ Minimize molecule conformers. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object with embedded conformers. """ for conf in mol.GetConformers(): ff = self.get_molecule_force_field(mol, conf_id=conf.GetId()) ff.Minimize() - def get_conformer_energies(self, mol): + def get_conformer_energies(self, mol: RDKitMol) -> np.ndarray: """ Calculate conformer energies. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object with embedded conformers. Returns ------- - energies : array_like - Minimized conformer energies. + energies : np.ndarray + Minimized conformer energies. """ energies = [] for conf in mol.GetConformers(): @@ -176,28 +212,34 @@ class ConformerGenerator(object): energies = np.asarray(energies, dtype=float) return energies - def prune_conformers(self, mol): + def prune_conformers(self, mol: RDKitMol) -> RDKitMol: """ Prune conformers from a molecule using an RMSD threshold, starting with the lowest energy conformer. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object Returns ------- - A new RDKit Mol containing the chosen conformers, sorted by - increasing energy. + new_mol: rdkit.Chem.rdchem.Mol + A new rdkit.Chem.rdchem.Mol containing the chosen conformers, sorted by + increasing energy. """ + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + if self.rmsd_threshold < 0 or mol.GetNumConformers() <= 1: return mol energies = self.get_conformer_energies(mol) rmsd = self.get_conformer_rmsd(mol) sort = np.argsort(energies) # sort by increasing energy - keep = [] # always keep lowest-energy conformer + keep: List[float] = [] # always keep lowest-energy conformer discard = [] for i in sort: # always keep lowest-energy conformer @@ -221,26 +263,34 @@ class ConformerGenerator(object): # create a new molecule to hold the chosen conformers # this ensures proper conformer IDs and energy-based ordering - from rdkit import Chem - new = Chem.Mol(mol) - new.RemoveAllConformers() + new_mol = Chem.Mol(mol) + new_mol.RemoveAllConformers() conf_ids = [conf.GetId() for conf in mol.GetConformers()] for i in keep: conf = mol.GetConformer(conf_ids[i]) - new.AddConformer(conf, assignId=True) - return new + new_mol.AddConformer(conf, assignId=True) + return new_mol @staticmethod - def get_conformer_rmsd(mol): + def get_conformer_rmsd(mol: RDKitMol) -> np.ndarray: """ Calculate conformer-conformer RMSD. Parameters ---------- - mol : RDKit Mol - Molecule. + mol: rdkit.Chem.rdchem.Mol + RDKit Mol object + + Returns + ------- + rmsd: np.ndarray + A conformer-conformer RMSD value. The shape is `(NumConformers, NumConformers)` """ - from rdkit.Chem import AllChem + try: + from rdkit.Chem import AllChem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + rmsd = np.zeros( (mol.GetNumConformers(), mol.GetNumConformers()), dtype=float) for i, ref_conf in enumerate(mol.GetConformers()): diff --git a/deepchem/utils/coordinate_box_utils.py b/deepchem/utils/coordinate_box_utils.py index a7efe6bcd2484fb714b352aa472fb4bef7aed7af..55b8c9b4fff361950b0767fd883d637756103e52 100644 --- a/deepchem/utils/coordinate_box_utils.py +++ b/deepchem/utils/coordinate_box_utils.py @@ -1,174 +1,9 @@ """This module adds utilities for coordinate boxes""" +from typing import List, Sequence, Tuple import numpy as np from scipy.spatial import ConvexHull -def intersect_interval(interval1, interval2): - """Computes the intersection of two intervals. - - Parameters - ---------- - interval1: tuple[int] - Should be `(x1_min, x1_max)` - interval2: tuple[int] - Should be `(x2_min, x2_max)` - - Returns - ------- - x_intersect: tuple[int] - Should be the intersection. If the intersection is empty returns - `(0, 0)` to represent the empty set. Otherwise is `(max(x1_min, - x2_min), min(x1_max, x2_max))`. - """ - x1_min, x1_max = interval1 - x2_min, x2_max = interval2 - if x1_max < x2_min: - # If interval1 < interval2 entirely - return (0, 0) - elif x2_max < x1_min: - # If interval2 < interval1 entirely - return (0, 0) - x_min = max(x1_min, x2_min) - x_max = min(x1_max, x2_max) - return (x_min, x_max) - - -def intersection(box1, box2): - """Computes the intersection box of provided boxes. - - Parameters - ---------- - box1: `CoordinateBox` - First `CoordinateBox` - box2: `CoordinateBox` - Another `CoordinateBox` to intersect first one with. - - Returns - ------- - A `CoordinateBox` containing the intersection. If the intersection is empty, returns the box with 0 bounds. - """ - x_intersection = intersect_interval(box1.x_range, box2.x_range) - y_intersection = intersect_interval(box1.y_range, box2.y_range) - z_intersection = intersect_interval(box1.z_range, box2.z_range) - return CoordinateBox(x_intersection, y_intersection, z_intersection) - - -def union(box1, box2): - """Merges provided boxes to find the smallest union box. - - This method merges the two provided boxes. - - Parameters - ---------- - box1: `CoordinateBox` - First box to merge in - box2: `CoordinateBox` - Second box to merge into this box - - Returns - ------- - Smallest `CoordinateBox` that contains both `box1` and `box2` - """ - x_min = min(box1.x_range[0], box2.x_range[0]) - y_min = min(box1.y_range[0], box2.y_range[0]) - z_min = min(box1.z_range[0], box2.z_range[0]) - x_max = max(box1.x_range[1], box2.x_range[1]) - y_max = max(box1.y_range[1], box2.y_range[1]) - z_max = max(box1.z_range[1], box2.z_range[1]) - return CoordinateBox((x_min, x_max), (y_min, y_max), (z_min, z_max)) - - -def merge_overlapping_boxes(boxes, threshold=.8): - """Merge boxes which have an overlap greater than threshold. - - Parameters - ---------- - boxes: list[CoordinateBox] - A list of `CoordinateBox` objects. - threshold: float, optional (default 0.8) - The volume fraction of the boxes that must overlap for them to be - merged together. - - Returns - ------- - list[CoordinateBox] of merged boxes. This list will have length less - than or equal to the length of `boxes`. - """ - outputs = [] - for box in boxes: - for other in boxes: - if box == other: - continue - intersect_box = intersection(box, other) - if (intersect_box.volume() >= threshold * box.volume() or - intersect_box.volume() >= threshold * other.volume()): - box = union(box, other) - unique_box = True - for output in outputs: - if output.contains(box): - unique_box = False - if unique_box: - outputs.append(box) - return outputs - - -def get_face_boxes(coords, pad=5): - """For each face of the convex hull, compute a coordinate box around it. - - The convex hull of a macromolecule will have a series of triangular - faces. For each such triangular face, we construct a bounding box - around this triangle. Think of this box as attempting to capture - some binding interaction region whose exterior is controlled by the - box. Note that this box will likely be a crude approximation, but - the advantage of this technique is that it only uses simple geometry - to provide some basic biological insight into the molecule at hand. - - The `pad` parameter is used to control the amount of padding around - the face to be used for the coordinate box. - - Parameters - ---------- - coords: np.ndarray - Of shape `(N, 3)`. The coordinates of a molecule. - pad: float, optional (default 5) - The number of angstroms to pad. - - Examples - -------- - >>> coords = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) - >>> boxes = get_face_boxes(coords, pad=5) - """ - hull = ConvexHull(coords) - boxes = [] - # Each triangle in the simplices is a set of 3 atoms from - # coordinates which forms the vertices of an exterior triangle on - # the convex hull of the macromolecule. - for triangle in hull.simplices: - # Points is the set of atom coordinates that make up this - # triangular face on the convex hull - points = np.array( - [coords[triangle[0]], coords[triangle[1]], coords[triangle[2]]]) - # Let's extract x/y/z coords for this face - x_coords = points[:, 0] - y_coords = points[:, 1] - z_coords = points[:, 2] - - # Let's compute min/max points - x_min, x_max = np.amin(x_coords), np.amax(x_coords) - x_min, x_max = int(np.floor(x_min)) - pad, int(np.ceil(x_max)) + pad - x_bounds = (x_min, x_max) - - y_min, y_max = np.amin(points[:, 1]), np.amax(points[:, 1]) - y_min, y_max = int(np.floor(y_min)) - pad, int(np.ceil(y_max)) + pad - y_bounds = (y_min, y_max) - z_min, z_max = np.amin(points[:, 2]), np.amax(points[:, 2]) - z_min, z_max = int(np.floor(z_min)) - pad, int(np.ceil(z_max)) + pad - z_bounds = (z_min, z_max) - box = CoordinateBox(x_bounds, y_bounds, z_bounds) - boxes.append(box) - return boxes - - class CoordinateBox(object): """A coordinate box that represents a block in space. @@ -185,16 +20,17 @@ class CoordinateBox(object): of atoms that live in this box alongside their coordinates. """ - def __init__(self, x_range, y_range, z_range): + def __init__(self, x_range: Tuple[float, float], y_range: Tuple[float, float], + z_range: Tuple[float, float]): """Initialize this box. Parameters ---------- - x_range: tuple + x_range: Tuple[float, float] A tuple of `(x_min, x_max)` with max and min x-coordinates. - y_range: tuple + y_range: Tuple[float, float] A tuple of `(y_min, y_max)` with max and min y-coordinates. - z_range: tuple + z_range: Tuple[float, float] A tuple of `(z_min, z_max)` with max and min z-coordinates. Raises @@ -234,13 +70,19 @@ class CoordinateBox(object): """Create a string representation of this box.""" return self.__repr__() - def __contains__(self, point): + def __contains__(self, point: Sequence[float]) -> bool: """Check whether a point is in this box. Parameters ---------- - point: 3-tuple or list of length 3 or np.ndarray of shape `(3,)` + point: Sequence[float] + 3-tuple or list of length 3 or np.ndarray of shape `(3,)`. The `(x, y, z)` coordinates of a point in space. + + Returns + ------- + bool + `True` if `other` is contained in this box. """ (x_min, x_max) = self.x_range (y_min, y_max) = self.y_range @@ -250,17 +92,19 @@ class CoordinateBox(object): z_cont = (z_min <= point[2] and point[2] <= z_max) return x_cont and y_cont and z_cont - def __eq__(self, other): + # FIXME: Argument 1 of "__eq__" is incompatible with supertype "object" + def __eq__(self, other: "CoordinateBox") -> bool: # type: ignore """Compare two boxes to see if they're equal. Parameters ---------- - other: `CoordinateBox` + other: CoordinateBox Compare this coordinate box to the other one. Returns ------- - bool that's `True` if all bounds match. + bool + That's `True` if all bounds match. Raises ------ @@ -272,7 +116,7 @@ class CoordinateBox(object): return (self.x_range == other.x_range and self.y_range == other.y_range and self.z_range == other.z_range) - def __hash__(self): + def __hash__(self) -> int: """Implement hashing function for this box. Uses the default `hash` on `self.x_range, self.y_range, @@ -280,16 +124,18 @@ class CoordinateBox(object): Returns ------- - Unique integeer + int + Unique integer """ return hash((self.x_range, self.y_range, self.z_range)) - def center(self): + def center(self) -> Tuple[float, float, float]: """Computes the center of this box. Returns ------- - `(x, y, z)` the coordinates of the center of the box. + Tuple[float, float, float] + `(x, y, z)` the coordinates of the center of the box. Examples -------- @@ -303,12 +149,13 @@ class CoordinateBox(object): return (x_min + (x_max - x_min) / 2, y_min + (y_max - y_min) / 2, z_min + (z_max - z_min) / 2) - def volume(self): + def volume(self) -> float: """Computes and returns the volume of this box. Returns ------- - float, the volume of this box. Can be 0 if box is empty + float + The volume of this box. Can be 0 if box is empty Examples -------- @@ -321,19 +168,20 @@ class CoordinateBox(object): z_min, z_max = self.z_range return (x_max - x_min) * (y_max - y_min) * (z_max - z_min) - def contains(self, other): + def contains(self, other: "CoordinateBox") -> bool: """Test whether this box contains another. This method checks whether `other` is contained in this box. Parameters ---------- - other: `CoordinateBox` + other: CoordinateBox The box to check is contained in this box. Returns ------- - bool, `True` if `other` is contained in this box. + bool + `True` if `other` is contained in this box. Raises ------ @@ -350,3 +198,180 @@ class CoordinateBox(object): return (self_x_min <= other_x_min and other_x_max <= self_x_max and self_y_min <= other_y_min and other_y_max <= self_y_max and self_z_min <= other_z_min and other_z_max <= self_z_max) + + +def intersect_interval(interval1: Tuple[float, float], + interval2: Tuple[float, float]) -> Tuple[float, float]: + """Computes the intersection of two intervals. + + Parameters + ---------- + interval1: Tuple[float, float] + Should be `(x1_min, x1_max)` + interval2: Tuple[float, float] + Should be `(x2_min, x2_max)` + + Returns + ------- + x_intersect: Tuple[float, float] + Should be the intersection. If the intersection is empty returns + `(0, 0)` to represent the empty set. Otherwise is `(max(x1_min, + x2_min), min(x1_max, x2_max))`. + """ + x1_min, x1_max = interval1 + x2_min, x2_max = interval2 + if x1_max < x2_min: + # If interval1 < interval2 entirely + return (0, 0) + elif x2_max < x1_min: + # If interval2 < interval1 entirely + return (0, 0) + x_min = max(x1_min, x2_min) + x_max = min(x1_max, x2_max) + return (x_min, x_max) + + +def intersection(box1: CoordinateBox, box2: CoordinateBox) -> CoordinateBox: + """Computes the intersection box of provided boxes. + + Parameters + ---------- + box1: CoordinateBox + First `CoordinateBox` + box2: CoordinateBox + Another `CoordinateBox` to intersect first one with. + + Returns + ------- + CoordinateBox + A `CoordinateBox` containing the intersection. If the intersection is empty, + returns the box with 0 bounds. + """ + x_intersection = intersect_interval(box1.x_range, box2.x_range) + y_intersection = intersect_interval(box1.y_range, box2.y_range) + z_intersection = intersect_interval(box1.z_range, box2.z_range) + return CoordinateBox(x_intersection, y_intersection, z_intersection) + + +def union(box1: CoordinateBox, box2: CoordinateBox) -> CoordinateBox: + """Merges provided boxes to find the smallest union box. + + This method merges the two provided boxes. + + Parameters + ---------- + box1: CoordinateBox + First box to merge in + box2: CoordinateBox + Second box to merge into this box + + Returns + ------- + CoordinateBox + Smallest `CoordinateBox` that contains both `box1` and `box2` + """ + x_min = min(box1.x_range[0], box2.x_range[0]) + y_min = min(box1.y_range[0], box2.y_range[0]) + z_min = min(box1.z_range[0], box2.z_range[0]) + x_max = max(box1.x_range[1], box2.x_range[1]) + y_max = max(box1.y_range[1], box2.y_range[1]) + z_max = max(box1.z_range[1], box2.z_range[1]) + return CoordinateBox((x_min, x_max), (y_min, y_max), (z_min, z_max)) + + +def merge_overlapping_boxes(boxes: List[CoordinateBox], + threshold: float = 0.8) -> List[CoordinateBox]: + """Merge boxes which have an overlap greater than threshold. + + Parameters + ---------- + boxes: list[CoordinateBox] + A list of `CoordinateBox` objects. + threshold: float, default 0.8 + The volume fraction of the boxes that must overlap for them to be + merged together. + + Returns + ------- + List[CoordinateBox] + List[CoordinateBox] of merged boxes. This list will have length less + than or equal to the length of `boxes`. + """ + outputs: List[CoordinateBox] = [] + for box in boxes: + for other in boxes: + if box == other: + continue + intersect_box = intersection(box, other) + if (intersect_box.volume() >= threshold * box.volume() or + intersect_box.volume() >= threshold * other.volume()): + box = union(box, other) + unique_box = True + for output in outputs: + if output.contains(box): + unique_box = False + if unique_box: + outputs.append(box) + return outputs + + +def get_face_boxes(coords: np.ndarray, pad: float = 5.0) -> List[CoordinateBox]: + """For each face of the convex hull, compute a coordinate box around it. + + The convex hull of a macromolecule will have a series of triangular + faces. For each such triangular face, we construct a bounding box + around this triangle. Think of this box as attempting to capture + some binding interaction region whose exterior is controlled by the + box. Note that this box will likely be a crude approximation, but + the advantage of this technique is that it only uses simple geometry + to provide some basic biological insight into the molecule at hand. + + The `pad` parameter is used to control the amount of padding around + the face to be used for the coordinate box. + + Parameters + ---------- + coords: np.ndarray + A numpy array of shape `(N, 3)`. The coordinates of a molecule. + pad: float, optional (default 5.0) + The number of angstroms to pad. + + Returns + ------- + boxes: List[CoordinateBox] + List of `CoordinateBox` + + Examples + -------- + >>> coords = np.array([[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) + >>> boxes = get_face_boxes(coords, pad=5) + """ + hull = ConvexHull(coords) + boxes = [] + # Each triangle in the simplices is a set of 3 atoms from + # coordinates which forms the vertices of an exterior triangle on + # the convex hull of the macromolecule. + for triangle in hull.simplices: + # Points is the set of atom coordinates that make up this + # triangular face on the convex hull + points = np.array( + [coords[triangle[0]], coords[triangle[1]], coords[triangle[2]]]) + # Let's extract x/y/z coords for this face + x_coords = points[:, 0] + y_coords = points[:, 1] + z_coords = points[:, 2] + + # Let's compute min/max points + x_min, x_max = np.amin(x_coords), np.amax(x_coords) + x_min, x_max = int(np.floor(x_min)) - pad, int(np.ceil(x_max)) + pad + x_bounds = (x_min, x_max) + + y_min, y_max = np.amin(y_coords), np.amax(y_coords) + y_min, y_max = int(np.floor(y_min)) - pad, int(np.ceil(y_max)) + pad + y_bounds = (y_min, y_max) + z_min, z_max = np.amin(z_coords), np.amax(z_coords) + z_min, z_max = int(np.floor(z_min)) - pad, int(np.ceil(z_max)) + pad + z_bounds = (z_min, z_max) + box = CoordinateBox(x_bounds, y_bounds, z_bounds) + boxes.append(box) + return boxes diff --git a/deepchem/utils/data_utils.py b/deepchem/utils/data_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1be63162ed5caee60a34bcb9801912e3fa1aafea --- /dev/null +++ b/deepchem/utils/data_utils.py @@ -0,0 +1,581 @@ +""" +Simple utils to save and load from disk. +""" +import joblib +import gzip +import pickle +import os +import tempfile +import tarfile +import zipfile +import logging +from urllib.request import urlretrieve +from typing import Any, Iterator, List, Optional, Tuple, Union, cast, IO + +import pandas as pd +import numpy as np + +import deepchem as dc + +logger = logging.getLogger(__name__) + + +def pad_array(x: np.ndarray, + shape: Union[Tuple, int], + fill: float = 0.0, + both: bool = False) -> np.ndarray: + """ + Pad an array with a fill value. + + Parameters + ---------- + x: np.ndarray + A numpy array. + shape: Tuple or int + Desired shape. If int, all dimensions are padded to that size. + fill: float, optional (default 0.0) + The padded value. + both: bool, optional (default False) + If True, split the padding on both sides of each axis. If False, + padding is applied to the end of each axis. + + Returns + ------- + np.ndarray + A padded numpy array + """ + x = np.asarray(x) + if not isinstance(shape, tuple): + shape = tuple(shape for _ in range(x.ndim)) + pad = [] + for i in range(x.ndim): + diff = shape[i] - x.shape[i] + assert diff >= 0 + if both: + a, b = divmod(diff, 2) + b += a + pad.append((a, b)) + else: + pad.append((0, diff)) + pad = tuple(pad) # type: ignore + x = np.pad(x, pad, mode='constant', constant_values=fill) + return x + + +def get_data_dir() -> str: + """Get the DeepChem data directory. + + Returns + ------- + str + The default path to store DeepChem data. If you want to + change this path, please set your own path to `DEEPCHEM_DATA_DIR` + as an environment variable. + """ + if 'DEEPCHEM_DATA_DIR' in os.environ: + return os.environ['DEEPCHEM_DATA_DIR'] + return tempfile.gettempdir() + + +def download_url(url: str, + dest_dir: str = get_data_dir(), + name: Optional[str] = None): + """Download a file to disk. + + Parameters + ---------- + url: str + The URL to download from + dest_dir: str + The directory to save the file in + name: str + The file name to save it as. If omitted, it will try to extract a file name from the URL + """ + if name is None: + name = url + if '?' in name: + name = name[:name.find('?')] + if '/' in name: + name = name[name.rfind('/') + 1:] + urlretrieve(url, os.path.join(dest_dir, name)) + + +def untargz_file(file: str, + dest_dir: str = get_data_dir(), + name: Optional[str] = None): + """Untar and unzip a .tar.gz file to disk. + + Parameters + ---------- + file: str + The filepath to decompress + dest_dir: str + The directory to save the file in + name: str + The file name to save it as. If omitted, it will use the file name + """ + if name is None: + name = file + tar = tarfile.open(name) + tar.extractall(path=dest_dir) + tar.close() + + +def unzip_file(file: str, + dest_dir: str = get_data_dir(), + name: Optional[str] = None): + """Unzip a .zip file to disk. + + Parameters + ---------- + file: str + The filepath to decompress + dest_dir: str + The directory to save the file in + name: str + The directory name to unzip it to. If omitted, it will use the file name + """ + if name is None: + name = file + if dest_dir is None: + dest_dir = os.path.join(get_data_dir, name) + with zipfile.ZipFile(file, "r") as zip_ref: + zip_ref.extractall(dest_dir) + + +def load_image_files(input_files: List[str]) -> np.ndarray: + """Loads a set of images from disk. + + Parameters + ---------- + input_files: List[str] + List of image filenames. + + Returns + ------- + np.ndarray + A numpy array that contains loaded images. The shape is, `(N,...)`. + + Notes + ----- + This method requires Pillow to be installed. + The supported file types are PNG and TIF. + """ + try: + from PIL import Image + except ModuleNotFoundError: + raise ImportError("This function requires Pillow to be installed.") + + images = [] + for input_file in input_files: + _, extension = os.path.splitext(input_file) + extension = extension.lower() + if extension == ".png": + image = np.array(Image.open(input_file)) + images.append(image) + elif extension == ".tif": + im = Image.open(input_file) + imarray = np.array(im) + images.append(imarray) + else: + raise ValueError("Unsupported image filetype for %s" % input_file) + return np.array(images) + + +def load_sdf_files(input_files: List[str], + clean_mols: bool = True, + tasks: List[str] = [], + shard_size: Optional[int] = None) -> Iterator[pd.DataFrame]: + """Load SDF file into dataframe. + + Parameters + ---------- + input_files: List[str] + List of filenames + clean_mols: bool, default True + Whether to sanitize molecules. + tasks: List[str], default [] + Each entry in `tasks` is treated as a property in the SDF file and is + retrieved with `mol.GetProp(str(task))` where `mol` is the RDKit mol + loaded from a given SDF entry. + shard_size: int, default None + The shard size to yield at one time. + + Returns + ------- + Iterator[pd.DataFrame] + Generator which yields the dataframe which is the same shard size. + + Notes + ----- + This function requires RDKit to be installed. + """ + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + + df_rows = [] + for input_file in input_files: + # Tasks are either in .sdf.csv file or in the .sdf file itself + has_csv = os.path.isfile(input_file + ".csv") + # Structures are stored in .sdf file + logger.info("Reading structures from %s." % input_file) + suppl = Chem.SDMolSupplier(str(input_file), clean_mols, False, False) + for ind, mol in enumerate(suppl): + if mol is None: + continue + smiles = Chem.MolToSmiles(mol) + df_row = [ind, smiles, mol] + if not has_csv: # Get task targets from .sdf file + for task in tasks: + df_row.append(mol.GetProp(str(task))) + df_rows.append(df_row) + if shard_size is not None and len(df_rows) == shard_size: + if has_csv: + mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol')) + raw_df = next(load_csv_files([input_file + ".csv"], shard_size=None)) + yield pd.concat([mol_df, raw_df], axis=1, join='inner') + else: + mol_df = pd.DataFrame( + df_rows, columns=('mol_id', 'smiles', 'mol') + tuple(tasks)) + yield mol_df + # Reset aggregator + df_rows = [] + + # Handle final leftovers for this file + if len(df_rows) > 0: + if has_csv: + mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol')) + raw_df = next(load_csv_files([input_file + ".csv"], shard_size=None)) + yield pd.concat([mol_df, raw_df], axis=1, join='inner') + else: + mol_df = pd.DataFrame( + df_rows, columns=('mol_id', 'smiles', 'mol') + tuple(tasks)) + yield mol_df + df_rows = [] + + +def load_csv_files(input_files: List[str], + shard_size: Optional[int] = None) -> Iterator[pd.DataFrame]: + """Load data as pandas dataframe from CSV files. + + Parameters + ---------- + input_files: List[str] + List of filenames + shard_size: int, default None + The shard size to yield at one time. + + Returns + ------- + Iterator[pd.DataFrame] + Generator which yields the dataframe which is the same shard size. + """ + # First line of user-specified CSV *must* be header. + shard_num = 1 + for input_file in input_files: + if shard_size is None: + yield pd.read_csv(input_file) + else: + logger.info("About to start loading CSV from %s" % input_file) + for df in pd.read_csv(input_file, chunksize=shard_size): + logger.info( + "Loading shard %d of size %s." % (shard_num, str(shard_size))) + df = df.replace(np.nan, str(""), regex=True) + shard_num += 1 + yield df + + +def load_json_files(input_files: List[str], + shard_size: Optional[int] = None) -> Iterator[pd.DataFrame]: + """Load data as pandas dataframe. + + Parameters + ---------- + input_files: List[str] + List of json filenames. + shard_size: int, default None + Chunksize for reading json files. + + Returns + ------- + Iterator[pd.DataFrame] + Generator which yields the dataframe which is the same shard size. + + Notes + ----- + To load shards from a json file into a Pandas dataframe, the file + must be originally saved with ``df.to_json('filename.json', orient='records', lines=True)`` + """ + shard_num = 1 + for input_file in input_files: + if shard_size is None: + yield pd.read_json(input_file, orient='records', lines=True) + else: + logger.info("About to start loading json from %s." % input_file) + for df in pd.read_json( + input_file, orient='records', chunksize=shard_size, lines=True): + logger.info( + "Loading shard %d of size %s." % (shard_num, str(shard_size))) + df = df.replace(np.nan, str(""), regex=True) + shard_num += 1 + yield df + + +def load_pickle_file(input_file: str) -> Any: + """Load from single, possibly gzipped, pickle file. + + Parameters + ---------- + input_file: str + The filename of pickle file. This function can load from + gzipped pickle file like `XXXX.pkl.gz`. + + Returns + ------- + Any + The object which is loaded from the pickle file. + """ + if ".gz" in input_file: + with gzip.open(input_file, "rb") as unzipped_file: + return pickle.load(cast(IO[bytes], unzipped_file)) + else: + with open(input_file, "rb") as opened_file: + return pickle.load(opened_file) + + +def load_pickle_files(input_files: List[str]) -> Iterator[Any]: + """Load dataset from pickle files. + + Parameters + ---------- + input_files: List[str] + The list of filenames of pickle file. This function can load from + gzipped pickle file like `XXXX.pkl.gz`. + + Returns + ------- + Iterator[Any] + Generator which yields the objects which is loaded from each pickle file. + """ + for input_file in input_files: + yield load_pickle_file(input_file) + + +def load_data(input_files: List[str], + shard_size: Optional[int] = None) -> Iterator[Any]: + """Loads data from files. + + Parameters + ---------- + input_files: List[str] + List of filenames. + shard_size: int, default None + Size of shard to yield + + Returns + ------- + Iterator[Any] + Iterator which iterates over provided files. + + Notes + ----- + The supported file types are SDF, CSV and Pickle. + """ + if len(input_files) == 0: + raise ValueError("The length of `filenames` must be more than 1.") + + file_type = _get_file_type(input_files[0]) + if file_type == "sdf": + if shard_size is not None: + logger.info("Ignoring shard_size for sdf input.") + for value in load_sdf_files(input_files): + yield value + elif file_type == "csv": + for value in load_csv_files(input_files, shard_size): + yield value + elif file_type == "pickle": + if shard_size is not None: + logger.info("Ignoring shard_size for pickle input.") + for value in load_pickle_files(input_files): + yield value + + +def _get_file_type(input_file: str) -> str: + """Get type of input file. Must be csv/pkl/sdf/joblib file.""" + filename, file_extension = os.path.splitext(input_file) + # If gzipped, need to compute extension again + if file_extension == ".gz": + filename, file_extension = os.path.splitext(filename) + if file_extension == ".csv": + return "csv" + elif file_extension == ".pkl": + return "pickle" + elif file_extension == ".joblib": + return "joblib" + elif file_extension == ".sdf": + return "sdf" + else: + raise ValueError("Unrecognized extension %s" % file_extension) + + +def save_to_disk(dataset: Any, filename: str, compress: int = 3): + """Save a dataset to file. + + Parameters + ---------- + dataset: str + A data saved + filename: str + Path to save data. + compress: int, default 3 + The compress option when dumping joblib file. + """ + if filename.endswith('.joblib'): + joblib.dump(dataset, filename, compress=compress) + elif filename.endswith('.npy'): + np.save(filename, dataset) + else: + raise ValueError("Filename with unsupported extension: %s" % filename) + + +def load_from_disk(filename: str) -> Any: + """Load a dataset from file. + + Parameters + ---------- + filename: str + A filename you want to load data. + + Returns + ------- + Any + A loaded object from file. + """ + name = filename + if os.path.splitext(name)[1] == ".gz": + name = os.path.splitext(name)[0] + extension = os.path.splitext(name)[1] + if extension == ".pkl": + return load_pickle_file(filename) + elif extension == ".joblib": + return joblib.load(filename) + elif extension == ".csv": + # First line of user-specified CSV *must* be header. + df = pd.read_csv(filename, header=0) + df = df.replace(np.nan, str(""), regex=True) + return df + elif extension == ".npy": + return np.load(filename, allow_pickle=True) + else: + raise ValueError("Unrecognized filetype for %s" % filename) + + +def load_dataset_from_disk(save_dir: str) -> Tuple[bool, Optional[Tuple[ + "dc.data.DiskDataset", "dc.data.DiskDataset", "dc.data.DiskDataset"]], List[ + "dc.trans.Transformer"]]: + """Loads MoleculeNet train/valid/test/transformers from disk. + + Expects that data was saved using `save_dataset_to_disk` below. Expects the + following directory structure for `save_dir`: + save_dir/ + | + ---> train_dir/ + | + ---> valid_dir/ + | + ---> test_dir/ + | + ---> transformers.pkl + + Parameters + ---------- + save_dir: str + Directory name to load datasets. + + Returns + ------- + loaded: bool + Whether the load succeeded + all_dataset: Tuple[DiskDataset, DiskDataset, DiskDataset] + The train, valid, test datasets + transformers: Transformer + The transformers used for this dataset + + See Also + -------- + save_dataset_to_disk + """ + + train_dir = os.path.join(save_dir, "train_dir") + valid_dir = os.path.join(save_dir, "valid_dir") + test_dir = os.path.join(save_dir, "test_dir") + if not os.path.exists(train_dir) or not os.path.exists( + valid_dir) or not os.path.exists(test_dir): + return False, None, list() + loaded = True + train = dc.data.DiskDataset(train_dir) + valid = dc.data.DiskDataset(valid_dir) + test = dc.data.DiskDataset(test_dir) + train.memory_cache_size = 40 * (1 << 20) # 40 MB + all_dataset = (train, valid, test) + transformers = load_transformers(save_dir) + return loaded, all_dataset, transformers + + +def save_dataset_to_disk( + save_dir: str, train: "dc.data.DiskDataset", valid: "dc.data.DiskDataset", + test: "dc.data.DiskDataset", transformers: List["dc.trans.Transformer"]): + """Utility used by MoleculeNet to save train/valid/test datasets. + + This utility function saves a train/valid/test split of a dataset along + with transformers in the same directory. The saved datasets will take the + following structure: + save_dir/ + | + ---> train_dir/ + | + ---> valid_dir/ + | + ---> test_dir/ + | + ---> transformers.pkl + + Parameters + ---------- + save_dir: str + Directory name to save datasets to. + train: DiskDataset + Training dataset to save. + valid: DiskDataset + Validation dataset to save. + test: DiskDataset + Test dataset to save. + transformers: List[Transformer] + List of transformers to save to disk. + + See Also + -------- + load_dataset_from_disk + """ + train_dir = os.path.join(save_dir, "train_dir") + valid_dir = os.path.join(save_dir, "valid_dir") + test_dir = os.path.join(save_dir, "test_dir") + train.move(train_dir) + valid.move(valid_dir) + test.move(test_dir) + save_transformers(save_dir, transformers) + + +def load_transformers(save_dir: str) -> List["dc.trans.Transformer"]: + """Load the transformers for a MoleculeNet dataset from disk.""" + with open(os.path.join(save_dir, "transformers.pkl"), 'rb') as f: + return pickle.load(f) + + +def save_transformers(save_dir: str, + transformers: List["dc.trans.Transformer"]): + """Save the transformers for a MoleculeNet dataset to disk.""" + with open(os.path.join(save_dir, "transformers.pkl"), 'wb') as f: + pickle.dump(transformers, f) diff --git a/deepchem/utils/debug_utils.py b/deepchem/utils/debug_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3333c7e0540458e3e110b99acf357c0a5e27cb79 --- /dev/null +++ b/deepchem/utils/debug_utils.py @@ -0,0 +1,69 @@ +# The number of elements to print for dataset ids/tasks +_print_threshold = 10 + + +def get_print_threshold() -> int: + """Return the printing threshold for datasets. + + The print threshold is the number of elements from ids/tasks to + print when printing representations of `Dataset` objects. + + Returns + ---------- + threshold: int + Number of elements that will be printed + """ + return _print_threshold + + +def set_print_threshold(threshold: int): + """Set print threshold + + The print threshold is the number of elements from ids/tasks to + print when printing representations of `Dataset` objects. + + Parameters + ---------- + threshold: int + Number of elements to print. + """ + global _print_threshold + _print_threshold = threshold + + +# If a dataset contains more than this number of elements, it won't +# print any dataset ids +_max_print_size = 1000 + + +def get_max_print_size() -> int: + """Return the max print size for a dataset. + + If a dataset is large, printing `self.ids` as part of a string + representation can be very slow. This field controls the maximum + size for a dataset before ids are no longer printed. + + Returns + ------- + max_print_size: int + Maximum length of a dataset for ids to be printed in string + representation. + """ + return _max_print_size + + +def set_max_print_size(max_print_size: int): + """Set max_print_size + + If a dataset is large, printing `self.ids` as part of a string + representation can be very slow. This field controls the maximum + size for a dataset before ids are no longer printed. + + Parameters + ---------- + max_print_size: int + Maximum length of a dataset for ids to be printed in string + representation. + """ + global _max_print_size + _max_print_size = max_print_size diff --git a/deepchem/utils/evaluate.py b/deepchem/utils/evaluate.py index 70649ddf7fde660ffb756a3f52a9cf637e63c13c..c750dbe2ef94b8f86ba86e0e3ec00628d7b48c67 100644 --- a/deepchem/utils/evaluate.py +++ b/deepchem/utils/evaluate.py @@ -2,122 +2,323 @@ Utility functions to evaluate models on datasets. """ import csv +import logging +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union + import numpy as np -import warnings -import pandas as pd -import sklearn -from deepchem.utils.save import log -from deepchem.trans import undo_transforms -from deepchem.metrics import from_one_hot +import deepchem as dc +from deepchem.metrics import Metric + +logger = logging.getLogger(__name__) + +Score = Dict[str, float] +Metric_Func = Callable[..., Any] +Metrics = Union[Metric, Metric_Func, List[Metric], List[Metric_Func]] + + +def output_statistics(scores: Score, stats_out: str) -> None: + """Write computed stats to file. + + Statistics are written to specified `stats_out` file. + + Parameters + ---------- + scores: dict + Dictionary mapping names of metrics to scores. + stats_out: str + Name of file to write scores to. + """ + logger.warning("output_statistics is deprecated.") + with open(stats_out, "w") as statsfile: + statsfile.write(str(scores) + "\n") + + +def output_predictions(dataset: "dc.data.Dataset", y_preds: np.ndarray, + csv_out: str) -> None: + """Writes predictions to file. + + Writes predictions made on `dataset` to a specified file on + disk. `dataset.ids` are used to format predictions. The produce CSV file will have format as follows + + | ID | Task1Name | Task2Name | + | ----------- | ------------ | ------------ | + | identifer1 | prediction11 | prediction12 | + | identifer2 | prediction21 | prediction22 | + + Parameters + ---------- + dataset: dc.data.Dataset + Dataset on which predictions have been made. + y_preds: np.ndarray + Predictions to output + csv_out: str + Name of file to write predictions to. + """ + data_ids = dataset.ids + n_tasks = len(dataset.get_task_names()) + y_preds = np.reshape(y_preds, (len(y_preds), n_tasks)) + assert len(y_preds) == len(data_ids) + with open(csv_out, "w") as csvfile: + csvwriter = csv.writer(csvfile) + csvwriter.writerow(["ID"] + dataset.get_task_names()) + for mol_id, y_pred in zip(data_ids, y_preds): + csvwriter.writerow([mol_id] + list(y_pred)) + + +def _process_metric_input(metrics: Metrics) -> List[Metric]: + """A private helper method which processes metrics correctly. + + Metrics can be input as `dc.metrics.Metric` objects, lists of + `dc.metrics.Metric` objects, or as raw metric functions or lists of + raw metric functions. Metric functions are functions which accept + two arguments `y_true, y_pred` both of which must be `np.ndarray` + objects and return a float value. This functions normalizes these + different types of inputs to type `list[dc.metrics.Metric]` object + for ease of later processing. + + Note that raw metric functions which don't have names attached will + simply be named "metric-#" where # is their position in the provided + metric list. For example, "metric-1" or "metric-7" -__author__ = "Bharath Ramsundar" -__copyright__ = "Copyright 2016, Stanford University" -__license__ = "MIT" + Parameters + ---------- + metrics: dc.metrics.Metric/list[dc.metrics.Metric]/metric function/ list[metric function] + Input metrics to process. + Returns + ------- + final_metrics: list[dc.metrics.Metric] + Converts all input metrics and outputs a list of + `dc.metrics.Metric` objects. + """ + # Make sure input is a list + if not isinstance(metrics, list): + # FIXME: Incompatible types in assignment + metrics = [metrics] # type: ignore + + final_metrics = [] + # FIXME: Argument 1 to "enumerate" has incompatible type + for i, metric in enumerate(metrics): # type: ignore + # Ensure that metric is wrapped in a list. + if isinstance(metric, Metric): + final_metrics.append(metric) + # This case checks if input is a function then wraps a + # dc.metrics.Metric object around it + elif callable(metric): + wrap_metric = Metric(metric, name="metric-%d" % (i + 1)) + final_metrics.append(wrap_metric) + else: + raise ValueError( + "metrics must be one of metric function / dc.metrics.Metric object /" + "list of dc.metrics.Metric or metric functions.") + return final_metrics -def relative_difference(x, y): - """Compute the relative difference between x and y""" - return np.abs(x - y) / np.abs(max(x, y)) +def relative_difference(x: np.ndarray, y: np.ndarray) -> np.ndarray: + """Compute the relative difference between x and y -def threshold_predictions(y, threshold): - y_out = np.zeros_like(y) - for ind, pred in enumerate(y): - y_out[ind] = 1 if pred > threshold else 0 - return y_out + The two argument arrays must have the same shape. + + Parameters + ---------- + x: np.ndarray + First input array + y: np.ndarray + Second input array + + Returns + ------- + z: np.ndarray + We will have `z == np.abs(x-y) / np.abs(max(x, y))`. + """ + z = np.abs(x - y) / np.abs(max(x, y)) + return z -# TODO(rbharath): This is now simple enough that we should probably get rid of -# Evaluator object to avoid clutter. class Evaluator(object): - """Class that evaluates a model on a given dataset.""" + """Class that evaluates a model on a given dataset. + + The evaluator class is used to evaluate a `dc.models.Model` class on + a given `dc.data.Dataset` object. The evaluator is aware of + `dc.trans.Transformer` objects so will automatically undo any + transformations which have been applied. + + Examples + -------- + Evaluators allow for a model to be evaluated directly on a Metric + for `sklearn`. Let's do a bit of setup constructing our dataset and + model. + + >>> import deepchem as dc + >>> import numpy as np + >>> X = np.random.rand(10, 5) + >>> y = np.random.rand(10, 1) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> model = dc.models.MultitaskRegressor(1, 5) + >>> transformers = [] + + Then you can evaluate this model as follows + >>> import sklearn + >>> evaluator = Evaluator(model, dataset, transformers) + >>> multitask_scores = evaluator.compute_model_performance( + ... sklearn.metrics.mean_absolute_error) + + Evaluators can also be used with `dc.metrics.Metric` objects as well + in case you want to customize your metric further. + + >>> evaluator = Evaluator(model, dataset, transformers) + >>> metric = dc.metrics.Metric(dc.metrics.mae_score) + >>> multitask_scores = evaluator.compute_model_performance(metric) + """ + + def __init__(self, model, dataset: "dc.data.Dataset", + transformers: List["dc.trans.Transformer"]): + """Initialize this evaluator + + Parameters + ---------- + model: Model + Model to evaluate. Note that this must be a regression or + classification model and not a generative model. + dataset: Dataset + Dataset object to evaluate `model` on. + transformers: List[Transformer] + List of `dc.trans.Transformer` objects. These transformations + must have been applied to `dataset` previously. The dataset will + be untransformed for metric evaluation. + """ - def __init__(self, model, dataset, transformers, verbose=False): self.model = model self.dataset = dataset self.output_transformers = [ transformer for transformer in transformers if transformer.transform_y ] - self.task_names = dataset.get_task_names() - self.verbose = verbose - def output_statistics(self, scores, stats_out): - """ - Write computed stats to file. + def output_statistics(self, scores: Score, stats_out: str): + """ Write computed stats to file. + + Parameters + ---------- + scores: dict + Dictionary mapping names of metrics to scores. + stats_out: str + Name of file to write scores to. """ + logger.warning( + "Evaluator.output_statistics is deprecated." + "Please use dc.utils.evaluate.output_statistics instead." + "This method will be removed in a future version of DeepChem.") with open(stats_out, "w") as statsfile: statsfile.write(str(scores) + "\n") - def output_predictions(self, y_preds, csv_out): - """ - Writes predictions to file. + def output_predictions(self, y_preds: np.ndarray, csv_out: str): + """Writes predictions to file. + + Writes predictions made on `self.dataset` to a specified file on + disk. `self.dataset.ids` are used to format predictions. - Args: - y_preds: np.ndarray - csvfile: Open file object. + Parameters + ---------- + y_preds: np.ndarray + Predictions to output + csv_out: str + Name of file to write predictions to. """ - mol_ids = self.dataset.ids - n_tasks = len(self.task_names) + logger.warning( + "Evaluator.output_predictions is deprecated." + "Please use dc.utils.evaluate.output_predictions instead." + "This method will be removed in a future version of DeepChem.") + data_ids = self.dataset.ids + n_tasks = len(self.dataset.get_task_names()) y_preds = np.reshape(y_preds, (len(y_preds), n_tasks)) - assert len(y_preds) == len(mol_ids) + assert len(y_preds) == len(data_ids) with open(csv_out, "w") as csvfile: csvwriter = csv.writer(csvfile) - csvwriter.writerow(["Compound"] + self.dataset.get_task_names()) - for mol_id, y_pred in zip(mol_ids, y_preds): + csvwriter.writerow(["ID"] + self.dataset.get_task_names()) + for mol_id, y_pred in zip(data_ids, y_preds): csvwriter.writerow([mol_id] + list(y_pred)) - def compute_model_performance(self, - metrics, - csv_out=None, - stats_out=None, - per_task_metrics=False): + def compute_model_performance( + self, + metrics: Metrics, + csv_out: Optional[str] = None, + stats_out: Optional[str] = None, + per_task_metrics: bool = False, + use_sample_weights: bool = False, + n_classes: int = 2) -> Union[Score, Tuple[Score, Score]]: """ Computes statistics of model on test data and saves results to csv. Parameters ---------- - metrics: list - List of dc.metrics.Metric objects - csv_out: str, optional + metrics: dc.metrics.Metric/list[dc.metrics.Metric]/function + The set of metrics provided. This class attempts to do some + intelligent handling of input. If a single `dc.metrics.Metric` + object is provided or a list is provided, it will evaluate + `self.model` on these metrics. If a function is provided, it is + assumed to be a metric function that this method will attempt to + wrap in a `dc.metrics.Metric` object. A metric function must + accept two arguments, `y_true, y_pred` both of which are + `np.ndarray` objects and return a floating point score. The + metric function may also accept a keyword argument + `sample_weight` to account for per-sample weights. + csv_out: str, optional (DEPRECATED) Filename to write CSV of model predictions. - stats_out: str, optional + stats_out: str, optional (DEPRECATED) Filename to write computed statistics. per_task_metrics: bool, optional If true, return computed metric for each task on multitask dataset. + use_sample_weights: bool, optional (default False) + If set, use per-sample weights `w`. + n_classes: int, optional (default None) + If specified, will use `n_classes` as the number of unique classes + in `self.dataset`. Note that this argument will be ignored for + regression metrics. + + Returns + ------- + multitask_scores: dict + Dictionary mapping names of metrics to metric scores. + all_task_scores: dict, optional + If `per_task_metrics == True`, then returns a second dictionary + of scores for each task separately. """ + if csv_out is not None: + logger.warning( + "csv_out is deprecated as an argument and will be removed in a future version of DeepChem." + "Output is not written to CSV; manually write output instead.") + if stats_out is not None: + logger.warning( + "stats_out is deprecated as an argument and will be removed in a future version of DeepChem." + "Stats output is not written; please manually write output instead") + # Process input metrics + metrics = _process_metric_input(metrics) + y = self.dataset.y - y = undo_transforms(y, self.output_transformers) + y = dc.trans.undo_transforms(y, self.output_transformers) w = self.dataset.w - if not len(metrics): - return {} - else: - mode = metrics[0].mode y_pred = self.model.predict(self.dataset, self.output_transformers) - if mode == "classification": - y_pred_print = np.argmax(y_pred, -1) - else: - y_pred_print = y_pred + n_tasks = len(self.dataset.get_task_names()) + multitask_scores = {} all_task_scores = {} - if csv_out is not None: - log("Saving predictions to %s" % csv_out, self.verbose) - self.output_predictions(y_pred_print, csv_out) - # Compute multitask metrics for metric in metrics: + results = metric.compute_metric( + y, + y_pred, + w, + per_task_metrics=per_task_metrics, + n_tasks=n_tasks, + n_classes=n_classes, + use_sample_weights=use_sample_weights) if per_task_metrics: - multitask_scores[metric.name], computed_metrics = metric.compute_metric( - y, y_pred, w, per_task_metrics=True) + multitask_scores[metric.name], computed_metrics = results all_task_scores[metric.name] = computed_metrics else: - multitask_scores[metric.name] = metric.compute_metric( - y, y_pred, w, per_task_metrics=False) - - if stats_out is not None: - log("Saving stats to %s" % stats_out, self.verbose) - self.output_statistics(multitask_scores, stats_out) + multitask_scores[metric.name] = results if not per_task_metrics: return multitask_scores @@ -126,23 +327,57 @@ class Evaluator(object): class GeneratorEvaluator(object): - """ - Partner class to Evaluator. - Instead of operating over datasets this class operates over Generator. - Evaluate a Metric over a model and Generator. + """Evaluate models on a stream of data. + + This class is a partner class to `Evaluator`. Instead of operating + over datasets this class operates over a generator which yields + batches of data to feed into provided model. + + Examples + -------- + >>> import deepchem as dc + >>> import numpy as np + >>> X = np.random.rand(10, 5) + >>> y = np.random.rand(10, 1) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> model = dc.models.MultitaskRegressor(1, 5) + >>> generator = model.default_generator(dataset, pad_batches=False) + >>> transformers = [] + + Then you can evaluate this model as follows + + >>> import sklearn + >>> evaluator = GeneratorEvaluator(model, generator, transformers) + >>> multitask_scores = evaluator.compute_model_performance( + ... sklearn.metrics.mean_absolute_error) + + Evaluators can also be used with `dc.metrics.Metric` objects as well + in case you want to customize your metric further. (Note that a given + generator can only be used once so we have to redefine the generator here.) + + >>> generator = model.default_generator(dataset, pad_batches=False) + >>> evaluator = GeneratorEvaluator(model, generator, transformers) + >>> metric = dc.metrics.Metric(dc.metrics.mae_score) + >>> multitask_scores = evaluator.compute_model_performance(metric) """ - def __init__(self, model, generator, transformers, labels=None, weights=None): + def __init__(self, + model, + generator: Iterable[Tuple[Any, Any, Any]], + transformers: List["dc.trans.Transformer"], + labels: Optional[List] = None, + weights: Optional[List] = None): """ Parameters ---------- model: Model - Model to evaluate - generator: Generator - Generator which yields batches to feed into the model. For a TensorGraph, - each batch should be a dict mapping Layers to NumPy arrays. For a - KerasModel, it should be a tuple of the form (inputs, labels, weights). - transformers: + Model to evaluate. + generator: generator + Generator which yields batches to feed into the model. For a + KerasModel, it should be a tuple of the form (inputs, labels, + weights). The "correct" way to create this generator is to use + `model.default_generator` as shown in the example above. + transformers: List[Transformer] Tranformers to "undo" when applied to the models outputs labels: list of Layer layers which are keys in the generator to compare to outputs @@ -159,65 +394,99 @@ class GeneratorEvaluator(object): if labels is not None and len(labels) != 1: raise ValueError("GeneratorEvaluator currently only supports one label") - def compute_model_performance(self, metrics, per_task_metrics=False): + def compute_model_performance( + self, + metrics: Metrics, + per_task_metrics: bool = False, + use_sample_weights: bool = False, + n_classes: int = 2) -> Union[Score, Tuple[Score, Score]]: """ Computes statistics of model on test data and saves results to csv. Parameters ---------- - metrics: list - List of dc.metrics.Metric objects + metrics: dc.metrics.Metric/list[dc.metrics.Metric]/function + The set of metrics provided. This class attempts to do some + intelligent handling of input. If a single `dc.metrics.Metric` + object is provided or a list is provided, it will evaluate + `self.model` on these metrics. If a function is provided, it is + assumed to be a metric function that this method will attempt to + wrap in a `dc.metrics.Metric` object. A metric function must + accept two arguments, `y_true, y_pred` both of which are + `np.ndarray` objects and return a floating point score. per_task_metrics: bool, optional - If true, return computed metric for each task on multitask dataset. + If true, return computed metric for each task on multitask + dataset. + use_sample_weights: bool, optional (default False) + If set, use per-sample weights `w`. + n_classes: int, optional (default None) + If specified, will assume that all `metrics` are classification + metrics and will use `n_classes` as the number of unique classes + in `self.dataset`. + + Returns + ------- + multitask_scores: dict + Dictionary mapping names of metrics to metric scores. + all_task_scores: dict, optional + If `per_task_metrics == True`, then returns a second dictionary + of scores for each task separately. """ + metrics = _process_metric_input(metrics) + + # We use y/w to aggregate labels/weights across generator. y = [] w = [] def generator_closure(): + """This function is used to pull true labels/weights out as we iterate over the generator.""" if self.label_keys is None: + weights = None # This is a KerasModel. for batch in self.generator: - inputs, labels, weights = batch + # Some datasets have weights + try: + inputs, labels, weights = batch + except ValueError: + try: + inputs, labels, weights, ids = batch + except ValueError: + raise ValueError( + "Generator must yield values of form (input, labels, weights) or (input, labels, weights, ids)" + ) y.append(labels[0]) if len(weights) > 0: w.append(weights[0]) - yield batch - else: - # This is a TensorGraph. - for feed_dict in self.generator: - y.append(feed_dict[self.label_keys[0]]) - if len(self.weights) > 0: - w.append(feed_dict[self.weights[0]]) - yield feed_dict - - if not len(metrics): - return {} - else: - mode = metrics[0].mode + yield (inputs, labels, weights) + + # Process predictions and populate y/w lists y_pred = self.model.predict_on_generator(generator_closure()) + + # Combine labels/weights y = np.concatenate(y, axis=0) + w = np.concatenate(w, axis=0) + multitask_scores = {} all_task_scores = {} - y = undo_transforms(y, self.output_transformers) - y_pred = undo_transforms(y_pred, self.output_transformers) - if len(w) != 0: - w = np.array(w) - if np.prod(w.shape) == y.shape[0]: - w = np.reshape(w, newshape=(y.shape[0], 1)) - else: - w = np.reshape(w, newshape=y.shape) + # Undo data transformations. + y = dc.trans.undo_transforms(y, self.output_transformers) + y_pred = dc.trans.undo_transforms(y_pred, self.output_transformers) # Compute multitask metrics - n_classes = y.shape[-1] for metric in metrics: + results = metric.compute_metric( + y, + y_pred, + w, + per_task_metrics=per_task_metrics, + n_classes=n_classes, + use_sample_weights=use_sample_weights) if per_task_metrics: - multitask_scores[metric.name], computed_metrics = metric.compute_metric( - y, y_pred, w, per_task_metrics=True, n_classes=n_classes) + multitask_scores[metric.name], computed_metrics = results all_task_scores[metric.name] = computed_metrics else: - multitask_scores[metric.name] = metric.compute_metric( - y, y_pred, w, per_task_metrics=False, n_classes=n_classes) + multitask_scores[metric.name] = results if not per_task_metrics: return multitask_scores diff --git a/deepchem/utils/fragment_util.py b/deepchem/utils/fragment_util.py deleted file mode 100644 index 4d9baf0b4f3eca20f51a93b35aa702c2c605c2f2..0000000000000000000000000000000000000000 --- a/deepchem/utils/fragment_util.py +++ /dev/null @@ -1,84 +0,0 @@ -"""A collection of utilities for dealing with Molecular Fragments""" -import itertools -import numpy as np -from deepchem.utils.geometry_utils import compute_pairwise_distances - - -def get_contact_atom_indices(fragments, cutoff=4.5): - """Compute that atoms close to contact region. - - Molecular complexes can get very large. This can make it unwieldy to - compute functions on them. To improve memory usage, it can be very - useful to trim out atoms that aren't close to contact regions. This - function computes pairwise distances between all pairs of molecules - in the molecular complex. If an atom is within cutoff distance of - any atom on another molecule in the complex, it is regarded as a - contact atom. Otherwise it is trimmed. - - Parameters - ---------- - fragments: List - As returned by `rdkit_util.load_complex`, a list of tuples of - `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` - is the rdkit molecule object. - cutoff: float - The cutoff distance in angstroms. - - Returns - ------- - A list of length `len(molecular_complex)`. Each entry in this list - is a list of atom indices from that molecule which should be kept, in - sorted order. - """ - # indices to atoms to keep - keep_inds = [set([]) for _ in fragments] - for (ind1, ind2) in itertools.combinations(range(len(fragments)), 2): - frag1, frag2 = fragments[ind1], fragments[ind2] - pairwise_distances = compute_pairwise_distances(frag1[0], frag2[0]) - # contacts is of form (x_coords, y_coords), a tuple of 2 lists - contacts = np.nonzero((pairwise_distances < cutoff)) - # contacts[0] is the x_coords, that is the frag1 atoms that have - # nonzero contact. - frag1_atoms = set([int(c) for c in contacts[0].tolist()]) - # contacts[1] is the y_coords, the frag2 atoms with nonzero contacts - frag2_atoms = set([int(c) for c in contacts[1].tolist()]) - keep_inds[ind1] = keep_inds[ind1].union(frag1_atoms) - keep_inds[ind2] = keep_inds[ind2].union(frag2_atoms) - keep_inds = [sorted(list(keep)) for keep in keep_inds] - return keep_inds - - -def reduce_molecular_complex_to_contacts(fragments, cutoff=4.5): - """Reduce a molecular complex to only those atoms near a contact. - - Molecular complexes can get very large. This can make it unwieldy to - compute functions on them. To improve memory usage, it can be very - useful to trim out atoms that aren't close to contact regions. This - function takes in a molecular complex and returns a new molecular - complex representation that contains only contact atoms. The contact - atoms are computed by calling `get_contact_atom_indices` under the - hood. - - Parameters - ---------- - fragments: List - As returned by `rdkit_util.load_complex`, a list of tuples of - `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` - is the rdkit molecule object. - cutoff: float - The cutoff distance in angstroms. - - Returns - ------- - A list of length `len(molecular_complex)`. Each entry in this list - is a tuple of `(coords, MolecularFragment)`. The coords is stripped - down to `(N_contact_atoms, 3)` where `N_contact_atoms` is the number - of contact atoms for this complex. `MolecularFragment` is used since - it's tricky to make a RDKit sub-molecule. - """ - atoms_to_keep = get_contact_atom_indices(fragments, cutoff) - reduced_complex = [] - for frag, keep in zip(fragments, atoms_to_keep): - contact_frag = get_mol_subset(frag[0], frag[1], keep) - reduced_complex.append(contact_frag) - return reduced_complex diff --git a/deepchem/utils/fragment_utils.py b/deepchem/utils/fragment_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..37de3b5370e45b4b9fa2ef923c1e3854d81997f9 --- /dev/null +++ b/deepchem/utils/fragment_utils.py @@ -0,0 +1,405 @@ +"""A collection of utilities for dealing with Molecular Fragments""" +import itertools +import numpy as np +from typing import List, Optional, Sequence, Set, Tuple, Union + +import logging +from deepchem.utils.typing import RDKitAtom, RDKitMol +from deepchem.utils.geometry_utils import compute_pairwise_distances + +logger = logging.getLogger(__name__) + + +class MoleculeLoadException(Exception): + + def __init__(self, *args, **kwargs): + Exception.__init__(*args, **kwargs) + + +class AtomShim(object): + """This is a shim object wrapping an atom. + + We use this class instead of raw RDKit atoms since manipulating a + large number of rdkit Atoms seems to result in segfaults. Wrapping + the basic information in an AtomShim seems to avoid issues. + """ + + def __init__(self, atomic_num: int, partial_charge: float, + atom_coords: np.ndarray): + """Initialize this object + + Parameters + ---------- + atomic_num: int + Atomic number for this atom. + partial_charge: float + The partial Gasteiger charge for this atom + atom_coords: np.ndarray + Of shape (3,) with the coordinates of this atom + """ + self.atomic_num = atomic_num + self.partial_charge = partial_charge + self.coords = atom_coords + + def GetAtomicNum(self) -> int: + """Returns atomic number for this atom. + + Returns + ------- + int + Atomic number for this atom. + """ + return self.atomic_num + + def GetPartialCharge(self) -> float: + """Returns partial charge for this atom. + + Returns + ------- + float + A partial Gasteiger charge for this atom. + """ + return self.partial_charge + + def GetCoords(self) -> np.ndarray: + """Returns 3D coordinates for this atom as numpy array. + + Returns + ------- + np.ndarray + Numpy array of shape `(3,)` with coordinates for this atom. + """ + return self.coords + + +class MolecularFragment(object): + """A class that represents a fragment of a molecule. + + It's often convenient to represent a fragment of a molecule. For + example, if two molecules form a molecular complex, it may be useful + to create two fragments which represent the subsets of each molecule + that's close to the other molecule (in the contact region). + + Ideally, we'd be able to do this in RDKit direct, but manipulating + molecular fragments doesn't seem to be supported functionality. + + Examples + -------- + >>> import numpy as np + >>> from rdkit import Chem + >>> mol = Chem.MolFromSmiles("C") + >>> coords = np.array([[0.0, 0.0, 0.0]]) + >>> atom = mol.GetAtoms()[0] + >>> fragment = MolecularFragment([atom], coords) + """ + + def __init__(self, atoms: Sequence[RDKitAtom], coords: np.ndarray): + """Initialize this object. + + Parameters + ---------- + atoms: Iterable[rdkit.Chem.rdchem.Atom] + Each entry in this list should be a RDKit Atom. + coords: np.ndarray + Array of locations for atoms of shape `(N, 3)` where `N == + len(atoms)`. + """ + if not isinstance(coords, np.ndarray): + raise ValueError("Coords must be a numpy array of shape (N, 3)") + if coords.shape != (len(atoms), 3): + raise ValueError( + "Coords must be a numpy array of shape `(N, 3)` where `N == len(atoms)`." + ) + self.atoms = [ + AtomShim(x.GetAtomicNum(), get_partial_charge(x), coords[ind]) + for ind, x in enumerate(atoms) + ] + self.coords = coords + + def GetAtoms(self) -> List[AtomShim]: + """Returns the list of atoms + + Returns + ------- + List[AtomShim] + list of atoms in this fragment. + """ + return self.atoms + + def GetNumAtoms(self) -> int: + """Returns the number of atoms + + Returns + ------- + int + Number of atoms in this fragment. + """ + return len(self.atoms) + + def GetCoords(self) -> np.ndarray: + """Returns 3D coordinates for this fragment as numpy array. + + Returns + ------- + np.ndarray + A numpy array of shape `(N, 3)` with coordinates for this fragment. + Here, N is the number of atoms. + """ + return self.coords + + +def get_partial_charge(atom: Union[RDKitAtom, AtomShim]) -> float: + """Get partial charge of a given atom (rdkit Atom object) + + Parameters + ---------- + atom: rdkit.Chem.rdchem.Atom or AtomShim + Either a rdkit.Atom object or `AtomShim` + + Returns + ------- + float + A partial Gasteiger charge of a given atom. + + Notes + ----- + This function requires RDKit to be installed. + + Examples + -------- + >>> from rdkit import Chem + >>> mol = Chem.MolFromSmiles("CC") + >>> atom = mol.GetAtoms()[0] + >>> get_partial_charge(atom) + 0.0 + """ + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + + if isinstance(atom, Chem.Atom): + try: + value = atom.GetProp(str("_GasteigerCharge")) + if value == '-nan': + return 0.0 + return float(value) + except KeyError: + return 0.0 + else: + return atom.GetPartialCharge() + + +def merge_molecular_fragments( + molecules: List[MolecularFragment]) -> Optional[MolecularFragment]: + """Helper method to merge two molecular fragments. + + Parameters + ---------- + molecules: List[MolecularFragment] + List of `MolecularFragment` objects. + + Returns + ------- + Optional[MolecularFragment] + Returns a merged `MolecularFragment` + """ + if len(molecules) == 0: + return None + if len(molecules) == 1: + return molecules[0] + else: + all_atoms = [] + all_coords = [] + for mol_frag in molecules: + all_atoms += mol_frag.GetAtoms() + all_coords.append(mol_frag.GetCoords()) + all_coords = np.concatenate(all_coords) + return MolecularFragment(all_atoms, all_coords) + + +def get_mol_subset( + coords: np.ndarray, mol: Union[RDKitMol, MolecularFragment], + atom_indices_to_keep: List[int]) -> Tuple[np.ndarray, MolecularFragment]: + """Strip a subset of the atoms in this molecule + + Parameters + ---------- + coords: np.ndarray + Must be of shape (N, 3) and correspond to coordinates of mol. + mol: rdkit.Chem.rdchem.Mol or MolecularFragment + The molecule to strip + atom_indices_to_keep: list + List of the indices of the atoms to keep. Each index is a unique + number between `[0, N)`. + + Returns + ------- + Tuple[np.ndarray, MolecularFragment] + A tuple of `(coords, mol_frag)` where `coords` is a numpy array of + coordinates with hydrogen coordinates. `mol_frag` is a `MolecularFragment`. + + Notes + ----- + This function requires RDKit to be installed. + """ + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + + indexes_to_keep = [] + atoms_to_keep = [] + # Compute partial charges on molecule if RDKit Mol + if isinstance(mol, Chem.Mol): + compute_charges(mol) + atoms = list(mol.GetAtoms()) + for index in atom_indices_to_keep: + indexes_to_keep.append(index) + atoms_to_keep.append(atoms[index]) + coords = coords[indexes_to_keep] + mol_frag = MolecularFragment(atoms_to_keep, coords) + return coords, mol_frag + + +def strip_hydrogens(coords: np.ndarray, mol: Union[RDKitMol, MolecularFragment] + ) -> Tuple[np.ndarray, MolecularFragment]: + """Strip the hydrogens from input molecule + + Parameters + ---------- + coords: np.ndarray + The coords must be of shape (N, 3) and correspond to coordinates of mol. + mol: rdkit.Chem.rdchem.Mol or MolecularFragment + The molecule to strip + + Returns + ------- + Tuple[np.ndarray, MolecularFragment] + A tuple of `(coords, mol_frag)` where `coords` is a numpy array of + coordinates with hydrogen coordinates. `mol_frag` is a `MolecularFragment`. + + Notes + ----- + This function requires RDKit to be installed. + """ + mol_atoms = mol.GetAtoms() + atomic_numbers = [atom.GetAtomicNum() for atom in mol_atoms] + atom_indices_to_keep = [ + ind for (ind, atomic_number) in enumerate(atomic_numbers) + if (atomic_number != 1) + ] + return get_mol_subset(coords, mol, atom_indices_to_keep) + + +def get_contact_atom_indices(fragments: List[Tuple[np.ndarray, RDKitMol]], + cutoff: float = 4.5) -> List[List[int]]: + """Compute that atoms close to contact region. + + Molecular complexes can get very large. This can make it unwieldy to + compute functions on them. To improve memory usage, it can be very + useful to trim out atoms that aren't close to contact regions. This + function computes pairwise distances between all pairs of molecules + in the molecular complex. If an atom is within cutoff distance of + any atom on another molecule in the complex, it is regarded as a + contact atom. Otherwise it is trimmed. + + Parameters + ---------- + fragments: List[Tuple[np.ndarray, rdkit.Chem.rdchem.Mol]] + As returned by `rdkit_utils.load_complex`, a list of tuples of + `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` + is the rdkit molecule object. + cutoff: float, optional (default 4.5) + The cutoff distance in angstroms. + + Returns + ------- + List[List[int]] + A list of length `len(molecular_complex)`. Each entry in this list + is a list of atom indices from that molecule which should be kept, in + sorted order. + """ + # indices to atoms to keep + keep_inds: List[Set[int]] = [set([]) for _ in fragments] + for (ind1, ind2) in itertools.combinations(range(len(fragments)), 2): + frag1, frag2 = fragments[ind1], fragments[ind2] + pairwise_distances = compute_pairwise_distances(frag1[0], frag2[0]) + # contacts is of form (x_coords, y_coords), a tuple of 2 lists + contacts = np.nonzero((pairwise_distances < cutoff)) + # contacts[0] is the x_coords, that is the frag1 atoms that have + # nonzero contact. + frag1_atoms = set([int(c) for c in contacts[0].tolist()]) + # contacts[1] is the y_coords, the frag2 atoms with nonzero contacts + frag2_atoms = set([int(c) for c in contacts[1].tolist()]) + keep_inds[ind1] = keep_inds[ind1].union(frag1_atoms) + keep_inds[ind2] = keep_inds[ind2].union(frag2_atoms) + sorted_keep_inds = [sorted(list(keep)) for keep in keep_inds] + return sorted_keep_inds + + +def reduce_molecular_complex_to_contacts( + fragments: List[Tuple[np.ndarray, RDKitMol]], + cutoff: float = 4.5) -> List[Tuple[np.ndarray, MolecularFragment]]: + """Reduce a molecular complex to only those atoms near a contact. + + Molecular complexes can get very large. This can make it unwieldy to + compute functions on them. To improve memory usage, it can be very + useful to trim out atoms that aren't close to contact regions. This + function takes in a molecular complex and returns a new molecular + complex representation that contains only contact atoms. The contact + atoms are computed by calling `get_contact_atom_indices` under the + hood. + + Parameters + ---------- + fragments: List[Tuple[np.ndarray, rdkit.Chem.rdchem.Mol]] + As returned by `rdkit_utils.load_complex`, a list of tuples of + `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` + is the rdkit molecule object. + cutoff: float + The cutoff distance in angstroms. + + Returns + ------- + List[Tuple[np.ndarray, MolecularFragment]] + A list of length `len(molecular_complex)`. Each entry in this list + is a tuple of `(coords, MolecularFragment)`. The coords is stripped + down to `(N_contact_atoms, 3)` where `N_contact_atoms` is the number + of contact atoms for this complex. `MolecularFragment` is used since + it's tricky to make a RDKit sub-molecule. + """ + atoms_to_keep = get_contact_atom_indices(fragments, cutoff) + reduced_complex = [] + for frag, keep in zip(fragments, atoms_to_keep): + contact_frag = get_mol_subset(frag[0], frag[1], keep) + reduced_complex.append(contact_frag) + return reduced_complex + + +# TODO: This is duplicated! Clean up +def compute_charges(mol): + """Attempt to compute Gasteiger Charges on Mol + + This also has the side effect of calculating charges on mol. The + mol passed into this function has to already have been sanitized + + Parameters + ---------- + mol: rdkit molecule + + Returns + ------- + No return since updates in place. + + Note + ---- + This function requires RDKit to be installed. + """ + from rdkit.Chem import AllChem + try: + # Updates charges in place + AllChem.ComputeGasteigerCharges(mol) + except Exception as e: + logger.exception("Unable to compute charges for mol") + raise MoleculeLoadException(e) diff --git a/deepchem/utils/genomics.py b/deepchem/utils/genomics.py deleted file mode 100644 index f3baba26c446e5203e5398d404109602c7559d26..0000000000000000000000000000000000000000 --- a/deepchem/utils/genomics.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -Genomic data handling utilities. -""" -import numpy as np - - -def seq_one_hot_encode(sequences, letters='ATCGN'): - """One hot encodes list of genomic sequences. - - Sequences encoded have shape (N_sequences, N_letters, sequence_length, 1). - These sequences will be processed as images with one color channel. - - Parameters - ---------- - sequences: np.ndarray - Array of genetic sequences - letters: str - String with the set of possible letters in the sequences. - - Raises - ------ - ValueError: - If sequences are of different lengths. - - Returns - ------- - np.ndarray: Shape (N_sequences, N_letters, sequence_length, 1). - """ - - # The label encoder is given characters for ACGTN - letter_encoder = {l: i for i, l in enumerate(letters)} - alphabet_length = len(letter_encoder) - - # Peak at the first sequence to get the length of the sequence. - try: - first_seq = next(sequences) - tail_seq = sequences - except TypeError: - first_seq = sequences[0] - tail_seq = sequences[1:] - - sequence_length = len(first_seq) - - seqs = [] - - seqs.append( - _seq_to_encoded(first_seq, letter_encoder, alphabet_length, - sequence_length)) - - for other_seq in tail_seq: - if len(other_seq) != sequence_length: - raise ValueError - - seqs.append( - _seq_to_encoded(other_seq, letter_encoder, alphabet_length, - sequence_length)) - - return np.expand_dims(np.array(seqs), -1) - - -def _seq_to_encoded(seq, letter_encoder, alphabet_length, sequence_length): - b = np.zeros((alphabet_length, sequence_length)) - seq_ints = [letter_encoder[s] for s in seq] - b[seq_ints, np.arange(sequence_length)] = 1 - - return b - - -def encode_fasta_sequence(fname): - """ - Loads fasta file and returns an array of one-hot sequences. - - Parameters - ---------- - fname: str - Filename of fasta file. - - Returns - ------- - np.ndarray: Shape (N_sequences, 5, sequence_length, 1). - """ - - return encode_bio_sequence(fname) - - -def encode_bio_sequence(fname, file_type="fasta", letters="ATCGN"): - """ - Loads a sequence file and returns an array of one-hot sequences. - - Parameters - ---------- - fname: str - Filename of fasta file. - file_type: str - The type of file encoding to process, e.g. fasta or fastq, this - is passed to Biopython.SeqIO.parse. - letters: str - The set of letters that the sequences consist of, e.g. ATCG. - - Returns - ------- - np.ndarray: Shape (N_sequences, N_letters, sequence_length, 1). - """ - - from Bio import SeqIO - - sequences = SeqIO.parse(fname, file_type) - return seq_one_hot_encode(sequences, letters) diff --git a/deepchem/utils/genomics_utils.py b/deepchem/utils/genomics_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c2496796a5617c6cab10ae0205eaab79621f0389 --- /dev/null +++ b/deepchem/utils/genomics_utils.py @@ -0,0 +1,122 @@ +""" +Genomic data handling Iterable. +""" +from typing import Dict, Iterator, Iterable, Union +import numpy as np + + +def seq_one_hot_encode(sequences: Union[np.ndarray, Iterator[Iterable[str]]], + letters: str = 'ATCGN') -> np.ndarray: + """One hot encodes list of genomic sequences. + + Sequences encoded have shape (N_sequences, N_letters, sequence_length, 1). + These sequences will be processed as images with one color channel. + + Parameters + ---------- + sequences: np.ndarray or Iterator[Bio.SeqRecord] + Iterable object of genetic sequences + letters: str, optional (default "ATCGN") + String with the set of possible letters in the sequences. + + Raises + ------ + ValueError: + If sequences are of different lengths. + + Returns + ------- + np.ndarray + A numpy array of shape `(N_sequences, N_letters, sequence_length, 1)`. + """ + + # The label encoder is given characters for ACGTN + letter_encoder = {l: i for i, l in enumerate(letters)} + alphabet_length = len(letter_encoder) + + # Peak at the first sequence to get the length of the sequence. + if isinstance(sequences, np.ndarray): + first_seq = sequences[0] + tail_seq = sequences[1:] + else: + first_seq = next(sequences) + tail_seq = sequences + + sequence_length = len(first_seq) + seqs = [] + seqs.append( + _seq_to_encoded(first_seq, letter_encoder, alphabet_length, + sequence_length)) + + for other_seq in tail_seq: + if len(other_seq) != sequence_length: + raise ValueError("The genetic sequences must have a same length") + seqs.append( + _seq_to_encoded(other_seq, letter_encoder, alphabet_length, + sequence_length)) + + return np.expand_dims(np.array(seqs), -1) + + +def _seq_to_encoded(seq: Union[str, Iterable[str]], + letter_encoder: Dict[str, int], alphabet_length: int, + sequence_length: int) -> np.ndarray: + """One hot encodes a genomic sequence. + + Sequences encoded have shape (N_sequences, N_letters, sequence_length, 1). + These sequences will be processed as images with one color channel. + + Parameters + ---------- + seq: str or Bio.SeqRecord + a genetic sequence + letter_encoder: Dict[str, int] + The keys are letters and the values are unique int values (like 0, 1, 2...). + alphabet_length: int + Length with the set of possible letters in the sequences. + sequence_length: int + Length with a genetic sequence + + Returns + ------- + encoded_seq: np.ndarray + A numpy array of shape `(N_letters, sequence_length)`. + """ + encoded_seq = np.zeros((alphabet_length, sequence_length)) + seq_ints = [letter_encoder[s] for s in seq] + encoded_seq[seq_ints, np.arange(sequence_length)] = 1 + return encoded_seq + + +def encode_bio_sequence(fname: str, + file_type: str = "fasta", + letters: str = "ATCGN") -> np.ndarray: + """ + Loads a sequence file and returns an array of one-hot sequences. + + Parameters + ---------- + fname: str + Filename of fasta file. + file_type: str, optional (default "fasta") + The type of file encoding to process, e.g. fasta or fastq, this + is passed to Biopython.SeqIO.parse. + letters: str, optional (default "ATCGN") + The set of letters that the sequences consist of, e.g. ATCG. + + Returns + ------- + np.ndarray + A numpy array of shape `(N_sequences, N_letters, sequence_length, 1)`. + + Notes + ----- + This function requires BioPython to be installed. + """ + try: + from Bio import SeqIO + except ModuleNotFoundError: + raise ImportError("This function requires BioPython to be installed.") + + sequences = SeqIO.parse(fname, file_type) + return seq_one_hot_encode(sequences, letters) diff --git a/deepchem/utils/geometry_utils.py b/deepchem/utils/geometry_utils.py index b03728f47a9aba4826425fba22d60c99e9d8ad2b..bc24e1f1dd5f5178131a608476d58ae0ab894114 100644 --- a/deepchem/utils/geometry_utils.py +++ b/deepchem/utils/geometry_utils.py @@ -1,30 +1,52 @@ """ Geometric utility functions for 3D geometry. """ -import logging import numpy as np from scipy.spatial.distance import cdist -logger = logging.getLogger(__name__) +def unit_vector(vector: np.ndarray) -> np.ndarray: + """ Returns the unit vector of the vector. -def unit_vector(vector): - """ Returns the unit vector of the vector. """ + Parameters + ---------- + vector: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). + + Returns + ---------- + np.ndarray + A numpy array of shape `(3,)`. The unit vector of the input vector. + """ return vector / np.linalg.norm(vector) -def angle_between(vector_i, vector_j): - """Returns the angle in radians between vectors "vector_i" and "vector_j":: +def angle_between(vector_i: np.ndarray, vector_j: np.ndarray) -> np.ndarray: + """Returns the angle in radians between vectors "vector_i" and "vector_j" + Note that this function always returns the smaller of the two angles between + the vectors (value between 0 and pi). + + Parameters + ---------- + vector_i: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). + vector_j: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). + + Returns + ---------- + np.ndarray + The angle in radians between the two vectors. + + Examples + -------- >>> print("%0.06f" % angle_between((1, 0, 0), (0, 1, 0))) 1.570796 >>> print("%0.06f" % angle_between((1, 0, 0), (1, 0, 0))) 0.000000 >>> print("%0.06f" % angle_between((1, 0, 0), (-1, 0, 0))) 3.141593 - - Note that this function always returns the smaller of the two angles between - the vectors (value between 0 and pi). """ vector_i_u = unit_vector(vector_i) vector_j_u = unit_vector(vector_j) @@ -37,8 +59,8 @@ def angle_between(vector_i, vector_j): return angle -def generate_random_unit_vector(): - """Generate a random unit vector on the sphere S^2. +def generate_random_unit_vector() -> np.ndarray: + r"""Generate a random unit vector on the sphere S^2. Citation: http://mathworld.wolfram.com/SpherePointPicking.html @@ -46,17 +68,22 @@ def generate_random_unit_vector(): a. Choose random theta \element [0, 2*pi] b. Choose random z \element [-1, 1] c. Compute output vector u: (x,y,z) = (sqrt(1-z^2)*cos(theta), sqrt(1-z^2)*sin(theta),z) + + Returns + ------- + u: np.ndarray + A numpy array of shape `(3,)`. u is an unit vector """ theta = np.random.uniform(low=0.0, high=2 * np.pi) z = np.random.uniform(low=-1.0, high=1.0) u = np.array( [np.sqrt(1 - z**2) * np.cos(theta), np.sqrt(1 - z**2) * np.sin(theta), z]) - return (u) + return u -def generate_random_rotation_matrix(): - """Generates a random rotation matrix. +def generate_random_rotation_matrix() -> np.ndarray: + r"""Generates a random rotation matrix. 1. Generate a random unit vector u, randomly sampled from the unit sphere (see function generate_random_unit_vector() @@ -81,7 +108,7 @@ def generate_random_rotation_matrix(): Returns ------- R: np.ndarray - R is of shape (3, 3) + A numpy array of shape `(3, 3)`. R is a rotation matrix. """ u = generate_random_unit_vector() v = generate_random_unit_vector() @@ -90,42 +117,71 @@ def generate_random_rotation_matrix(): vp = v - (np.dot(u, v) * u) vp /= np.linalg.norm(vp) - w = np.cross(u, vp) - R = np.column_stack((u, vp, w)) - return (R) + return R -def is_angle_within_cutoff(vector_i, vector_j, angle_cutoff): - """A utility function to compute whether two vectors are within a cutoff from 180 degrees apart. +def is_angle_within_cutoff(vector_i: np.ndarray, vector_j: np.ndarray, + angle_cutoff: float) -> bool: + """A utility function to compute whether two vectors are within a cutoff from 180 degrees apart. Parameters ---------- vector_i: np.ndarray - Of shape (3,) + A numpy array of shape (3,)`, where `3` is (x,y,z). vector_j: np.ndarray - Of shape (3,) + A numpy array of shape `(3,)`, where `3` is (x,y,z). cutoff: float The deviation from 180 (in degrees) + + Returns + ------- + bool + Whether two vectors are within a cutoff from 180 degrees apart """ angle = angle_between(vector_i, vector_j) * 180. / np.pi return (angle > (180 - angle_cutoff) and angle < (180. + angle_cutoff)) -def compute_centroid(coordinates): +def compute_centroid(coordinates: np.ndarray) -> np.ndarray: """Compute the (x,y,z) centroid of provided coordinates Parameters ---------- coordinates: np.ndarray - Shape `(N, 3)`, where `N` is the number of atoms. + A numpy array of shape `(N, 3)`, where `N` is the number of atoms. + + Returns + ------- + centroid: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). """ centroid = np.mean(coordinates, axis=0) - return (centroid) + return centroid + +def compute_protein_range(coordinates: np.ndarray) -> np.ndarray: + """Compute the protein range of provided coordinates -def subtract_centroid(xyz, centroid): + Parameters + ---------- + coordinates: np.ndarray + A numpy array of shape `(N, 3)`, where `N` is the number of atoms. + + Returns + ------- + protein_range: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). + """ + protein_max = np.max(coordinates, axis=0) + protein_min = np.min(coordinates, axis=0) + protein_range = protein_max - protein_min + return protein_range + + +def subtract_centroid(coordinates: np.ndarray, + centroid: np.ndarray) -> np.ndarray: """Subtracts centroid from each coordinate. Subtracts the centroid, a numpy array of dim 3, from all coordinates @@ -135,35 +191,43 @@ def subtract_centroid(xyz, centroid): Parameters ---------- - xyz: numpy array - Of shape `(N, 3)` - centroid: numpy array - Of shape `(3,)` + coordinates: np.ndarray + A numpy array of shape `(N, 3)`, where `N` is the number of atoms. + centroid: np.ndarray + A numpy array of shape `(3,)` + + Returns + ------- + coordinates: np.ndarray + A numpy array of shape `(3,)`, where `3` is (x,y,z). """ - xyz -= np.transpose(centroid) - return (xyz) + coordinates -= np.transpose(centroid) + return coordinates -def compute_pairwise_distances(first_xyz, second_xyz): +def compute_pairwise_distances(first_coordinate: np.ndarray, + second_coordinate: np.ndarray) -> np.ndarray: """Computes pairwise distances between two molecules. Takes an input (m, 3) and (n, 3) numpy arrays of 3D coords of two molecules respectively, and outputs an m x n numpy array of pairwise distances in Angstroms between the first and - second molecule. entry (i,j) is dist between the i"th + second molecule. entry (i,j) is dist between the i"th atom of first molecule and the j"th atom of second molecule. Parameters ---------- - first_xyz: np.ndarray - Of shape (m, 3) - seocnd_xyz: np.ndarray - Of shape (n, 3) + first_coordinate: np.ndarray + A numpy array of shape `(m, 3)`, where `m` is the number of atoms. + second_coordinate: np.ndarray + A numpy array of shape `(n, 3)`, where `n` is the number of atoms. Returns ------- - np.ndarray of shape (m, n) + pairwise_distances: np.ndarray + A numpy array of shape `(m, n)` """ - pairwise_distances = cdist(first_xyz, second_xyz, metric='euclidean') + pairwise_distances = cdist( + first_coordinate, second_coordinate, metric='euclidean') return pairwise_distances diff --git a/deepchem/utils/hash_utils.py b/deepchem/utils/hash_utils.py index d8d8f616f2a3225d81e4a681e632c92d1e4188bc..7d372bada8b16cc5075d0b3a3050b27f2b69a06b 100644 --- a/deepchem/utils/hash_utils.py +++ b/deepchem/utils/hash_utils.py @@ -1,14 +1,12 @@ """ Various utilities around hash functions. """ -import logging +from typing import Callable, Dict, Optional, Tuple, Any import numpy as np import hashlib -logger = logging.getLogger(__name__) - -def hash_ecfp(ecfp, size): +def hash_ecfp(ecfp: str, size: int = 1024) -> int: """ Returns an int < size representing given ECFP fragment. @@ -20,17 +18,22 @@ def hash_ecfp(ecfp, size): ecfp: str String to hash. Usually an ECFP fragment. size: int, optional (default 1024) - Hash to an int in range [0, size) + Hash to an int in range [0, size) + + Returns + ------- + ecfp_hash: int + An int < size representing given ECFP fragment """ - ecfp = ecfp.encode('utf-8') + bytes_ecfp = ecfp.encode('utf-8') md5 = hashlib.md5() - md5.update(ecfp) + md5.update(bytes_ecfp) digest = md5.hexdigest() ecfp_hash = int(digest, 16) % (size) - return (ecfp_hash) + return ecfp_hash -def hash_ecfp_pair(ecfp_pair, size): +def hash_ecfp_pair(ecfp_pair: Tuple[str, str], size: int = 1024) -> int: """Returns an int < size representing that ECFP pair. Input must be a tuple of strings. This utility is primarily used for @@ -41,42 +44,54 @@ def hash_ecfp_pair(ecfp_pair, size): Parameters ---------- - ecfp_pair: tuple + ecfp_pair: Tuple[str, str] Pair of ECFP fragment strings size: int, optional (default 1024) - Hash to an int in range [0, size) + Hash to an int in range [0, size) + + Returns + ------- + ecfp_hash: int + An int < size representing given ECFP pair. """ ecfp = "%s,%s" % (ecfp_pair[0], ecfp_pair[1]) - ecfp = ecfp.encode('utf-8') + bytes_ecfp = ecfp.encode('utf-8') md5 = hashlib.md5() - md5.update(ecfp) + md5.update(bytes_ecfp) digest = md5.hexdigest() ecfp_hash = int(digest, 16) % (size) - return (ecfp_hash) + return ecfp_hash -def vectorize(hash_function, feature_dict=None, size=1024): +def vectorize(hash_function: Callable[[Any, int], int], + feature_dict: Optional[Dict[int, str]] = None, + size: int = 1024) -> np.ndarray: """Helper function to vectorize a spatial description from a hash. Hash functions are used to perform spatial featurizations in DeepChem. However, it's necessary to convert backwards from the hash function to feature vectors. This function aids in this conversion procedure. It creates a vector of zeros of length - `seize`. It then loops through `feature_dict`, uses `hash_function` + `size`. It then loops through `feature_dict`, uses `hash_function` to hash the stored value to an integer in range [0, size) and bumps that index. Parameters ---------- - hash_function: function + hash_function: Function, Callable[[str, int], int] Should accept two arguments, `feature`, and `size` and return a hashed integer. Here `feature` is the item to hash, and `size` is an int. For example, if `size=1024`, then hashed values must fall in range `[0, 1024)`. - feature_dict: dict - Maps unique keys to features computed. + feature_dict: Dict, optional (default None) + Maps unique keys to features computed. size: int, optional (default 1024) Length of generated bit vector + + Returns + ------- + feature_vector: np.ndarray + A numpy array of shape `(size,)` """ feature_vector = np.zeros(size) if feature_dict is not None: diff --git a/deepchem/utils/mol_xyz_util.py b/deepchem/utils/mol_xyz_util.py deleted file mode 100644 index 6734d17ab90d91a94428202ba86a71a6798ab0ab..0000000000000000000000000000000000000000 --- a/deepchem/utils/mol_xyz_util.py +++ /dev/null @@ -1,13 +0,0 @@ -import numpy as np - - -def get_molecule_centroid(molecule_xyz): - """Uses compute centroid and range of 3D coordinents""" - return np.mean(molecule_xyz, axis=0) - - -def get_molecule_range(molecule_xyz): - protein_max = np.max(molecule_xyz, axis=0) - protein_min = np.min(molecule_xyz, axis=0) - protein_range = protein_max - protein_min - return protein_range diff --git a/deepchem/utils/molecule_feature_utils.py b/deepchem/utils/molecule_feature_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2c664e98bec5e117e73cc1db0a1f73e5ad2d3ecc --- /dev/null +++ b/deepchem/utils/molecule_feature_utils.py @@ -0,0 +1,473 @@ +""" +Utilities for constructing node features or bond features. +Some functions are based on chainer-chemistry or dgl-lifesci. + +Repositories: +- https://github.com/chainer/chainer-chemistry +- https://github.com/awslabs/dgl-lifesci +""" + +import os +import logging +from typing import List, Union, Tuple + +import numpy as np + +from deepchem.utils.typing import RDKitAtom, RDKitBond, RDKitMol + +logger = logging.getLogger(__name__) + +DEFAULT_ATOM_TYPE_SET = [ + "C", + "N", + "O", + "F", + "P", + "S", + "Cl", + "Br", + "I", +] +DEFAULT_HYBRIDIZATION_SET = ["SP", "SP2", "SP3"] +DEFAULT_TOTAL_NUM_Hs_SET = [0, 1, 2, 3, 4] +DEFAULT_TOTAL_DEGREE_SET = [0, 1, 2, 3, 4, 5] +DEFAULT_RING_SIZE_SET = [3, 4, 5, 6, 7, 8] +DEFAULT_BOND_TYPE_SET = ["SINGLE", "DOUBLE", "TRIPLE", "AROMATIC"] +DEFAULT_BOND_STEREO_SET = ["STEREONONE", "STEREOANY", "STEREOZ", "STEREOE"] +DEFAULT_GRAPH_DISTANCE_SET = [1, 2, 3, 4, 5, 6, 7] + + +class _ChemicalFeaturesFactory: + """This is a singleton class for RDKit base features.""" + _instance = None + + @classmethod + def get_instance(cls): + try: + from rdkit import RDConfig + from rdkit.Chem import ChemicalFeatures + except ModuleNotFoundError: + raise ImportError("This class requires RDKit to be installed.") + + if not cls._instance: + fdefName = os.path.join(RDConfig.RDDataDir, 'BaseFeatures.fdef') + cls._instance = ChemicalFeatures.BuildFeatureFactory(fdefName) + return cls._instance + + +def one_hot_encode(val: Union[int, str], + allowable_set: Union[List[str], List[int]], + include_unknown_set: bool = False) -> List[float]: + """One hot encoder for elements of a provided set. + + Examples + -------- + >>> one_hot_encode("a", ["a", "b", "c"]) + [1.0, 0.0, 0.0] + >>> one_hot_encode(2, [0, 1, 2]) + [0.0, 0.0, 1.0] + >>> one_hot_encode(3, [0, 1, 2]) + [0.0, 0.0, 0.0] + >>> one_hot_encode(3, [0, 1, 2], True) + [0.0, 0.0, 0.0, 1.0] + + Parameters + ---------- + val: int or str + The value must be present in `allowable_set`. + allowable_set: List[int] or List[str] + List of allowable quantities. + include_unknown_set: bool, default False + If true, the index of all values not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + An one-hot vector of val. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + + Raises + ------ + ValueError + If include_unknown_set is False and `val` is not in `allowable_set`. + """ + if include_unknown_set is False: + if val not in allowable_set: + logger.info("input {0} not in allowable set {1}:".format( + val, allowable_set)) + + # init an one-hot vector + if include_unknown_set is False: + one_hot_legnth = len(allowable_set) + else: + one_hot_legnth = len(allowable_set) + 1 + one_hot = [0.0 for _ in range(one_hot_legnth)] + + try: + one_hot[allowable_set.index(val)] = 1.0 # type: ignore + except: + if include_unknown_set: + # If include_unknown_set is True, set the last index is 1. + one_hot[-1] = 1.0 + else: + pass + return one_hot + + +################################################################# +# atom (node) featurization +################################################################# + + +def get_atom_type_one_hot(atom: RDKitAtom, + allowable_set: List[str] = DEFAULT_ATOM_TYPE_SET, + include_unknown_set: bool = True) -> List[float]: + """Get an one-hot feature of an atom type. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + allowable_set: List[str] + The atom types to consider. The default set is + `["C", "N", "O", "F", "P", "S", "Cl", "Br", "I"]`. + include_unknown_set: bool, default True + If true, the index of all atom not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + An one-hot vector of atom types. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode(atom.GetSymbol(), allowable_set, include_unknown_set) + + +def construct_hydrogen_bonding_info(mol: RDKitMol) -> List[Tuple[int, str]]: + """Construct hydrogen bonding infos about a molecule. + + Parameters + --------- + mol: rdkit.Chem.rdchem.Mol + RDKit mol object + + Returns + ------- + List[Tuple[int, str]] + A list of tuple `(atom_index, hydrogen_bonding_type)`. + The `hydrogen_bonding_type` value is "Acceptor" or "Donor". + """ + factory = _ChemicalFeaturesFactory.get_instance() + feats = factory.GetFeaturesForMol(mol) + hydrogen_bonding = [] + for f in feats: + hydrogen_bonding.append((f.GetAtomIds()[0], f.GetFamily())) + return hydrogen_bonding + + +def get_atom_hydrogen_bonding_one_hot( + atom: RDKitAtom, hydrogen_bonding: List[Tuple[int, str]]) -> List[float]: + """Get an one-hot feat about whether an atom accepts electrons or donates electrons. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + hydrogen_bonding: List[Tuple[int, str]] + The return value of `construct_hydrogen_bonding_info`. + The value is a list of tuple `(atom_index, hydrogen_bonding)` like (1, "Acceptor"). + + Returns + ------- + List[float] + A one-hot vector of the ring size type. The first element + indicates "Donor", and the second element indicates "Acceptor". + """ + one_hot = [0.0, 0.0] + atom_idx = atom.GetIdx() + for hydrogen_bonding_tuple in hydrogen_bonding: + if hydrogen_bonding_tuple[0] == atom_idx: + if hydrogen_bonding_tuple[1] == "Donor": + one_hot[0] = 1.0 + elif hydrogen_bonding_tuple[1] == "Acceptor": + one_hot[1] = 1.0 + return one_hot + + +def get_atom_is_in_aromatic_one_hot(atom: RDKitAtom) -> List[float]: + """Get ans one-hot feature about whether an atom is in aromatic system or not. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + + Returns + ------- + List[float] + A vector of whether an atom is in aromatic system or not. + """ + return [float(atom.GetIsAromatic())] + + +def get_atom_hybridization_one_hot( + atom: RDKitAtom, + allowable_set: List[str] = DEFAULT_HYBRIDIZATION_SET, + include_unknown_set: bool = False) -> List[float]: + """Get an one-hot feature of hybridization type. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + allowable_set: List[str] + The hybridization types to consider. The default set is `["SP", "SP2", "SP3"]` + include_unknown_set: bool, default False + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + An one-hot vector of the hybridization type. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode( + str(atom.GetHybridization()), allowable_set, include_unknown_set) + + +def get_atom_total_num_Hs_one_hot( + atom: RDKitAtom, + allowable_set: List[int] = DEFAULT_TOTAL_NUM_Hs_SET, + include_unknown_set: bool = True) -> List[float]: + """Get an one-hot feature of the number of hydrogens which an atom has. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + allowable_set: List[int] + The number of hydrogens to consider. The default set is `[0, 1, ..., 4]` + include_unknown_set: bool, default True + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + A one-hot vector of the number of hydrogens which an atom has. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode(atom.GetTotalNumHs(), allowable_set, + include_unknown_set) + + +def get_atom_chirality_one_hot(atom: RDKitAtom) -> List[float]: + """Get an one-hot feature about an atom chirality type. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + + Returns + ------- + List[float] + A one-hot vector of the chirality type. The first element + indicates "R", and the second element indicates "S". + """ + one_hot = [0.0, 0.0] + try: + chiral_type = atom.GetProp('_CIPCode') + if chiral_type == "R": + one_hot[0] = 1.0 + elif chiral_type == "S": + one_hot[1] = 1.0 + except: + pass + return one_hot + + +def get_atom_formal_charge(atom: RDKitAtom) -> List[float]: + """Get a formal charge of an atom. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + + Returns + ------- + List[float] + A vector of the formal charge. + """ + return [float(atom.GetFormalCharge())] + + +def get_atom_partial_charge(atom: RDKitAtom) -> List[float]: + """Get a partial charge of an atom. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + + Returns + ------- + List[float] + A vector of the parital charge. + + Notes + ----- + Before using this function, you must calculate `GasteigerCharge` + like `AllChem.ComputeGasteigerCharges(mol)`. + """ + gasteiger_charge = atom.GetProp('_GasteigerCharge') + if gasteiger_charge in ['-nan', 'nan', '-inf', 'inf']: + gasteiger_charge = 0.0 + return [float(gasteiger_charge)] + + +def get_atom_total_degree_one_hot( + atom: RDKitAtom, + allowable_set: List[int] = DEFAULT_TOTAL_DEGREE_SET, + include_unknown_set: bool = True) -> List[float]: + """Get an one-hot feature of the degree which an atom has. + + Parameters + --------- + atom: rdkit.Chem.rdchem.Atom + RDKit atom object + allowable_set: List[int] + The degree to consider. The default set is `[0, 1, ..., 5]` + include_unknown_set: bool, default True + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + A one-hot vector of the degree which an atom has. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode(atom.GetTotalDegree(), allowable_set, + include_unknown_set) + + +################################################################# +# bond (edge) featurization +################################################################# + + +def get_bond_type_one_hot(bond: RDKitBond, + allowable_set: List[str] = DEFAULT_BOND_TYPE_SET, + include_unknown_set: bool = False) -> List[float]: + """Get an one-hot feature of bond type. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + allowable_set: List[str] + The bond types to consider. The default set is `["SINGLE", "DOUBLE", "TRIPLE", "AROMATIC"]`. + include_unknown_set: bool, default False + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + A one-hot vector of the bond type. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode( + str(bond.GetBondType()), allowable_set, include_unknown_set) + + +def get_bond_is_in_same_ring_one_hot(bond: RDKitBond) -> List[float]: + """Get an one-hot feature about whether atoms of a bond is in the same ring or not. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + + Returns + ------- + List[float] + A one-hot vector of whether a bond is in the same ring or not. + """ + return [int(bond.IsInRing())] + + +def get_bond_is_conjugated_one_hot(bond: RDKitBond) -> List[float]: + """Get an one-hot feature about whether a bond is conjugated or not. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + + Returns + ------- + List[float] + A one-hot vector of whether a bond is conjugated or not. + """ + return [int(bond.GetIsConjugated())] + + +def get_bond_stereo_one_hot(bond: RDKitBond, + allowable_set: List[str] = DEFAULT_BOND_STEREO_SET, + include_unknown_set: bool = True) -> List[float]: + """Get an one-hot feature of the stereo configuration of a bond. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + allowable_set: List[str] + The stereo configuration types to consider. + The default set is `["STEREONONE", "STEREOANY", "STEREOZ", "STEREOE"]`. + include_unknown_set: bool, default True + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + A one-hot vector of the stereo configuration of a bond. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + return one_hot_encode( + str(bond.GetStereo()), allowable_set, include_unknown_set) + + +def get_bond_graph_distance_one_hot( + bond: RDKitBond, + graph_dist_matrix: np.ndarray, + allowable_set: List[int] = DEFAULT_GRAPH_DISTANCE_SET, + include_unknown_set: bool = True) -> List[float]: + """Get an one-hot feature of graph distance. + + Parameters + --------- + bond: rdkit.Chem.rdchem.Bond + RDKit bond object + graph_dist_matrix: np.ndarray + The return value of `Chem.GetDistanceMatrix(mol)`. The shape is `(num_atoms, num_atoms)`. + allowable_set: List[int] + The graph distance types to consider. The default set is `[1, 2, ..., 7]`. + include_unknown_set: bool, default False + If true, the index of all types not in `allowable_set` is `len(allowable_set)`. + + Returns + ------- + List[float] + A one-hot vector of the graph distance. + If `include_unknown_set` is False, the length is `len(allowable_set)`. + If `include_unknown_set` is True, the length is `len(allowable_set) + 1`. + """ + graph_dist = graph_dist_matrix[bond.GetBeginAtomIdx(), bond.GetEndAtomIdx()] + return one_hot_encode(graph_dist, allowable_set, include_unknown_set) diff --git a/deepchem/utils/noncovalent_utils.py b/deepchem/utils/noncovalent_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3fcfd6c208cc43f4f19c967de22b9a6d2f109919 --- /dev/null +++ b/deepchem/utils/noncovalent_utils.py @@ -0,0 +1,466 @@ +"""The functions in these utilities check that noncovalent interactions happen""" +import numpy as np +from collections import Counter +from deepchem.utils.fragment_utils import get_partial_charge +from deepchem.utils.rdkit_utils import compute_ring_center +from deepchem.utils.rdkit_utils import compute_ring_normal +from deepchem.utils.geometry_utils import angle_between +from deepchem.utils.geometry_utils import is_angle_within_cutoff + + +def is_salt_bridge(atom_i, atom_j): + """Check if two atoms have correct charges to form a salt bridge""" + if np.abs(2.0 - np.abs( + get_partial_charge(atom_i) - get_partial_charge(atom_j))) < 0.01: + return True + return False + + +def compute_salt_bridges(first, second, pairwise_distances, cutoff=5.0): + """Find salt bridge contacts between two molecules. + + Parameters: + ----------- + first: rdkit.rdchem.Mol + Interacting molecules + second: rdkit.rdchem.Mol + Interacting molecules + pairwise_distances: np.ndarray + Array of pairwise interatomic distances between molecule atoms (Angstroms) + cutoff: float + Cutoff distance for contact consideration + + Returns: + -------- + salt_bridge_contacts: list of tuples + List of contacts. Tuple (i, j) indicates that atom i from + first molecule interacts with atom j from second. + """ + + salt_bridge_contacts = [] + contacts = np.nonzero(pairwise_distances < cutoff) + contacts = zip(contacts[0], contacts[1]) + for contact in contacts: + first_atom = first.GetAtoms()[int(contact[0])] + second_atom = second.GetAtoms()[int(contact[1])] + if is_salt_bridge(first_atom, second_atom): + salt_bridge_contacts.append(contact) + return salt_bridge_contacts + + +def is_hydrogen_bond(frag1, + frag2, + contact, + hbond_distance_cutoff=4.0, + hbond_angle_cutoff=40.0): + """ + Determine if a pair of atoms (contact = frag1_atom_index, + frag2_atom_index) between two molecules represents a hydrogen + bond. Returns a boolean result. + + Parameters + ---------- + frag1: tuple + Tuple of (coords, rdkit mol / MolecularFragment) + frag2: tuple + Tuple of (coords, rdkit mol / MolecularFragment) + contact: Tuple + Tuple of indices for (atom_i, atom_j) contact. + hbond_distance_cutoff: float, optional + Distance cutoff for hbond. + hbond_angle_cutoff: float, optional + Angle deviance cutoff for hbond + """ + frag1_xyz, frag2_xyz = frag1[0], frag2[0] + frag1_mol, frag2_mol = frag1[1], frag2[1] + frag1_atom_xyz = frag1_xyz[int(contact[0])] + frag2_atom_xyz = frag2_xyz[int(contact[1])] + frag1_atom = frag1_mol.GetAtoms()[int(contact[0])] + frag2_atom = frag2_mol.GetAtoms()[int(contact[1])] + + # Nitrogen has atomic number 7, and oxygen 8. + if ((frag2_atom.GetAtomicNum() == 7 or frag2_atom.GetAtomicNum() == 8) and + (frag1_atom.GetAtomicNum() == 7 or frag1_atom.GetAtomicNum() == 8)): + hydrogens = [] + + for i, atom in enumerate(frag2_mol.GetAtoms()): + # If atom is a hydrogen + if atom.GetAtomicNum() == 1: + atom_xyz = frag2_xyz[i] + dist = np.linalg.norm(atom_xyz - frag2_atom_xyz) + # O-H distance is 0.96 A, N-H is 1.01 A. See http://www.science.uwaterloo.ca/~cchieh/cact/c120/bondel.html + if dist < 1.3: + hydrogens.append(atom_xyz) + + for j, atom in enumerate(frag1_mol.GetAtoms()): + # If atom is a hydrogen + if atom.GetAtomicNum() == 1: + atom_xyz = frag1_xyz[i] + dist = np.linalg.norm(atom_xyz - frag1_atom_xyz) + # O-H distance is 0.96 A, N-H is 1.01 A. See http://www.science.uwaterloo.ca/~cchieh/cact/c120/bondel.html + if dist < 1.3: + hydrogens.append(atom_xyz) + + for hydrogen_xyz in hydrogens: + hydrogen_to_frag2 = frag2_atom_xyz - hydrogen_xyz + hydrogen_to_frag1 = frag1_atom_xyz - hydrogen_xyz + return is_angle_within_cutoff(hydrogen_to_frag2, hydrogen_to_frag1, + hbond_angle_cutoff) + return False + + +def compute_hbonds_in_range(frag1, frag2, pairwise_distances, hbond_dist_bin, + hbond_angle_cutoff): + """ + Find all pairs of (frag1_index_i, frag2_index_j) that hydrogen bond + given a distance bin and an angle cutoff. + + Parameters + ---------- + frag1: tuple + Tuple of (coords, rdkit mol / MolecularFragment + frag2: tuple + Tuple of (coords, rdkit mol / MolecularFragment + pairwise_distances: + Matrix of shape `(N, M)` with pairwise distances between frag1/frag2. + hbond_dist_bin: tuple + Tuple of floats `(min_dist, max_dist)` in angstroms. + hbond_angle_cutoffs: list[float] + List of angles of deviances allowed for hbonds + """ + + contacts = np.nonzero((pairwise_distances > hbond_dist_bin[0]) & + (pairwise_distances < hbond_dist_bin[1])) + contacts = zip(contacts[0], contacts[1]) + hydrogen_bond_contacts = [] + for contact in contacts: + if is_hydrogen_bond(frag1, frag2, contact, hbond_angle_cutoff): + hydrogen_bond_contacts.append(contact) + return hydrogen_bond_contacts + + +def compute_hydrogen_bonds(frag1, frag2, pairwise_distances, hbond_dist_bins, + hbond_angle_cutoffs): + """Computes hydrogen bonds between proteins and ligands. + + Returns a list of sublists. Each sublist is a series of tuples + of (protein_index_i, ligand_index_j) that represent a hydrogen + bond. Each sublist represents a different type of hydrogen + bond. + + Parameters + ---------- + frag1: tuple + Tuple of (coords, rdkit mol / MolecularFragment + frag2: tuple + Tuple of (coords, rdkit mol / MolecularFragment + pairwise_distances: + Matrix of shape `(N, M)` with pairwise distances between frag1/frag2. + hbond_dist_bins: list[tuple] + List of tuples of hbond distance ranges. + hbond_angle_cutoffs: list[float] + List of angles of deviances allowed for hbonds + + Returns + ------- + List + A list of hydrogen bond contacts. + """ + + hbond_contacts = [] + for i, hbond_dist_bin in enumerate(hbond_dist_bins): + hbond_angle_cutoff = hbond_angle_cutoffs[i] + hbond_contacts.append( + compute_hbonds_in_range(frag1, frag2, pairwise_distances, + hbond_dist_bin, hbond_angle_cutoff)) + return (hbond_contacts) + + +def compute_cation_pi(mol1, mol2, charge_tolerance=0.01, **kwargs): + """Finds aromatic rings in mo1 and cations in mol2 that interact with each other. + + Parameters: + ----------- + mol1: rdkit.rdchem.Mol + Molecule to look for interacting rings + mol2: rdkit.rdchem.Mol + Molecule to look for interacting cations + charge_tolerance: float + Atom is considered a cation if its formal charge is greater + than 1 - charge_tolerance + **kwargs: + Arguments that are passed to is_cation_pi function + + Returns: + -------- + mol1_pi: dict + Dictionary that maps atom indices (from mol1) to the number of cations + (in mol2) they interact with + mol2_cation: dict + Dictionary that maps atom indices (from mol2) to the number of aromatic + atoms (in mol1) they interact with + """ + mol1_pi = Counter() + mol2_cation = Counter() + conformer = mol2.GetConformer() + + aromatic_atoms = set(atom.GetIdx() for atom in mol1.GetAromaticAtoms()) + from rdkit import Chem + rings = [list(r) for r in Chem.GetSymmSSSR(mol1)] + + for ring in rings: + # if ring from mol1 is aromatic + if set(ring).issubset(aromatic_atoms): + ring_center = compute_ring_center(mol1, ring) + ring_normal = compute_ring_normal(mol1, ring) + + for atom in mol2.GetAtoms(): + # ...and atom from mol2 is a cation + if atom.GetFormalCharge() > 1.0 - charge_tolerance: + cation_position = np.array(conformer.GetAtomPosition(atom.GetIdx())) + # if angle and distance are correct + if is_cation_pi(cation_position, ring_center, ring_normal, **kwargs): + # count atoms forming a contact + mol1_pi.update(ring) + mol2_cation.update([atom.GetIndex()]) + return mol1_pi, mol2_cation + + +def is_cation_pi(cation_position, + ring_center, + ring_normal, + dist_cutoff=6.5, + angle_cutoff=30.0): + """Check if a cation and an aromatic ring form contact. + + Parameters: + ----------- + ring_center: np.ndarray + Positions of ring center. Can be computed with the compute_ring_center + function. + ring_normal: np.ndarray + Normal of ring. Can be computed with the compute_ring_normal function. + dist_cutoff: float + Distance cutoff. Max allowed distance between ring center + and cation (in Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (0deg) + angle between ring normal and vector pointing from ring + center to cation (in degrees). + """ + cation_to_ring_vec = cation_position - ring_center + dist = np.linalg.norm(cation_to_ring_vec) + angle = angle_between(cation_to_ring_vec, ring_normal) * 180. / np.pi + if ((angle < angle_cutoff or angle > 180.0 - angle_cutoff) and + (dist < dist_cutoff)): + return True + return False + + +def compute_pi_stack(mol1, + mol2, + pairwise_distances=None, + dist_cutoff=4.4, + angle_cutoff=30.): + """Find aromatic rings in both molecules that form pi-pi contacts. + For each atom in the contact, count number of atoms in the other molecule + that form this contact. + + Pseudocode: + + for each aromatic ring in mol1: + for each aromatic ring in mol2: + compute distance between centers + compute angle between normals + if it counts as parallel pi-pi: + count interacting atoms + if it counts as pi-T: + count interacting atoms + + Parameters: + ----------- + mol1: rdkit.rdchem.Mol + First molecule. + mol2: rdkit.rdchem.Mol + Second molecule. + pairwise_distances: np.ndarray (optional) + Array of pairwise interatomic distances (Angstroms) + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal angle between rings. + + Returns: + -------- + mol1_pi_t, mol1_pi_parallel, mol2_pi_t, mol2_pi_parallel: dict + Dictionaries mapping atom indices to number of atoms they interact with. + Separate dictionary is created for each type of pi stacking (parallel and + T-shaped) and each molecule (mol1 and mol2). + """ + + mol1_pi_parallel = Counter() + mol1_pi_t = Counter() + mol2_pi_parallel = Counter() + mol2_pi_t = Counter() + + mol1_aromatic_rings = [] + mol2_aromatic_rings = [] + from rdkit import Chem + for mol, ring_list in ((mol1, mol1_aromatic_rings), (mol2, + mol2_aromatic_rings)): + aromatic_atoms = {atom.GetIdx() for atom in mol.GetAromaticAtoms()} + for ring in Chem.GetSymmSSSR(mol): + # if ring is aromatic + if set(ring).issubset(aromatic_atoms): + # save its indices, center, and normal + ring_center = compute_ring_center(mol, ring) + ring_normal = compute_ring_normal(mol, ring) + ring_list.append((ring, ring_center, ring_normal)) + + # remember mol1-mol2 pairs we already counted + counted_pairs_parallel = set() + counted_pairs_t = set() + for prot_ring, prot_ring_center, prot_ring_normal in mol1_aromatic_rings: + for lig_ring, lig_ring_center, lig_ring_normal in mol2_aromatic_rings: + if is_pi_parallel( + prot_ring_center, + prot_ring_normal, + lig_ring_center, + lig_ring_normal, + angle_cutoff=angle_cutoff, + dist_cutoff=dist_cutoff): + prot_to_update = set() + lig_to_update = set() + for prot_atom_idx in prot_ring: + for lig_atom_idx in lig_ring: + if (prot_atom_idx, lig_atom_idx) not in counted_pairs_parallel: + # if this pair is new, count atoms forming a contact + prot_to_update.add(prot_atom_idx) + lig_to_update.add(lig_atom_idx) + counted_pairs_parallel.add((prot_atom_idx, lig_atom_idx)) + + mol1_pi_parallel.update(prot_to_update) + mol2_pi_parallel.update(lig_to_update) + + if is_pi_t( + prot_ring_center, + prot_ring_normal, + lig_ring_center, + lig_ring_normal, + angle_cutoff=angle_cutoff, + dist_cutoff=dist_cutoff): + prot_to_update = set() + lig_to_update = set() + for prot_atom_idx in prot_ring: + for lig_atom_idx in lig_ring: + if (prot_atom_idx, lig_atom_idx) not in counted_pairs_t: + # if this pair is new, count atoms forming a contact + prot_to_update.add(prot_atom_idx) + lig_to_update.add(lig_atom_idx) + counted_pairs_t.add((prot_atom_idx, lig_atom_idx)) + + mol1_pi_t.update(prot_to_update) + mol2_pi_t.update(lig_to_update) + + return (mol1_pi_t, mol1_pi_parallel, mol2_pi_t, mol2_pi_parallel) + + +def is_pi_t(ring1_center, + ring1_normal, + ring2_center, + ring2_normal, + dist_cutoff=5.5, + angle_cutoff=30.0): + """Check if two aromatic rings form a T-shaped pi-pi contact. + + Parameters: + ----------- + ring1_center, ring2_center: np.ndarray + Positions of centers of the two rings. Can be computed with the + compute_ring_center function. + ring1_normal, ring2_normal: np.ndarray + Normals of the two rings. Can be computed with the compute_ring_normal + function. + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (90deg) angle between + the rings (in degrees). + """ + dist = np.linalg.norm(ring1_center - ring2_center) + angle = angle_between(ring1_normal, ring2_normal) * 180 / np.pi + if ((90.0 - angle_cutoff < angle < 90.0 + angle_cutoff) and + dist < dist_cutoff): + return True + return False + + +def is_pi_parallel(ring1_center: np.ndarray, + ring1_normal: np.ndarray, + ring2_center: np.ndarray, + ring2_normal: np.ndarray, + dist_cutoff: float = 8.0, + angle_cutoff: float = 30.0) -> bool: + """Check if two aromatic rings form a parallel pi-pi contact. + + Parameters + ---------- + ring1_center, ring2_center: np.ndarray + Positions of centers of the two rings. Can be computed with the + compute_ring_center function. + ring1_normal, ring2_normal: np.ndarray + Normals of the two rings. Can be computed with the compute_ring_normal + function. + dist_cutoff: float + Distance cutoff. Max allowed distance between the ring center (Angstroms). + angle_cutoff: float + Angle cutoff. Max allowed deviation from the ideal (0deg) angle between + the rings (in degrees). + + Returns + ------- + bool + True if two aromatic rings form a parallel pi-pi. + """ + + dist = np.linalg.norm(ring1_center - ring2_center) + angle = angle_between(ring1_normal, ring2_normal) * 180 / np.pi + if ((angle < angle_cutoff or angle > 180.0 - angle_cutoff) and + dist < dist_cutoff): + return True + return False + + +def compute_binding_pocket_cation_pi(mol1, mol2, **kwargs): + """Finds cation-pi interactions between mol1 and mol2. + + Parameters: + ----------- + mol1: rdkit.rdchem.Mol + Interacting molecules + mol2: rdkit.rdchem.Mol + Interacting molecules + **kwargs: + Arguments that are passed to compute_cation_pi function + + Returns: + -------- + mol1_cation_pi, mol2_cation_pi: dict + Dictionaries that maps atom indices to the number of cations/aromatic + atoms they interact with + """ + # find interacting rings from mol1 and cations from mol2 + mol1_pi, mol2_cation = compute_cation_pi(mol1, mol2, **kwargs) + # find interacting cations from mol1 and rings from mol2 + mol2_pi, mol1_cation = compute_cation_pi(mol2, mol1, **kwargs) + + # merge counters + mol1_cation_pi = Counter() + mol1_cation_pi.update(mol1_pi) + mol1_cation_pi.update(mol1_cation) + + mol2_cation_pi = Counter() + mol2_cation_pi.update(mol2_pi) + mol2_cation_pi.update(mol2_cation) + + return mol1_cation_pi, mol2_cation_pi diff --git a/deepchem/utils/pdbqt_utils.py b/deepchem/utils/pdbqt_utils.py index 9f910c8c514894ce0874126a26d0573724594227..6af1ff9880c765b4086c1302c1be559321ca1ea3 100644 --- a/deepchem/utils/pdbqt_utils.py +++ b/deepchem/utils/pdbqt_utils.py @@ -1,7 +1,11 @@ """Utilities for handling PDBQT files.""" +from typing import Dict, List, Optional, Set, Tuple +from deepchem.utils.typing import RDKitMol -def pdbqt_to_pdb(filename=None, pdbqt_data=None): + +def pdbqt_to_pdb(filename: Optional[str] = None, + pdbqt_data: Optional[List[str]] = None) -> str: """Extracts the PDB part of a pdbqt file as a string. Either `filename` or `pdbqt_data` must be provided. This function @@ -9,14 +13,15 @@ def pdbqt_to_pdb(filename=None, pdbqt_data=None): Parameters ---------- - filename: str, optional + filename: str, optional (default None) Filename of PDBQT file - pdbqt_data: list[str], optional + pdbqt_data: List[str], optional (default None) Raw list of lines containing data from PDBQT file. Returns ------- - pdb_block: String containing the PDB portion of pdbqt file. + pdb_block: str + String containing the PDB portion of pdbqt file. """ if filename is not None and pdbqt_data is not None: raise ValueError("Only one of filename or pdbqt_data can be provided") @@ -24,20 +29,22 @@ def pdbqt_to_pdb(filename=None, pdbqt_data=None): raise ValueError("Either filename or pdbqt_data must be provided") elif filename is not None: pdbqt_data = open(filename).readlines() + pdb_block = "" - for line in pdbqt_data: + # FIXME: Item "None" of "Optional[List[str]]" has no attribute "__iter__" (not iterable) + for line in pdbqt_data: # type: ignore pdb_block += "%s\n" % line[:66] return pdb_block -def convert_protein_to_pdbqt(mol, outfile): +def convert_protein_to_pdbqt(mol: RDKitMol, outfile: str) -> None: """Convert a protein PDB file into a pdbqt file. Writes the extra PDBQT terms directly to `outfile`. Parameters ---------- - mol: rdkit Mol + mol: rdkit.Chem.rdchem.Mol Protein molecule outfile: str filename which already has a valid pdb representation of mol @@ -60,7 +67,7 @@ def convert_protein_to_pdbqt(mol, outfile): fout.write(line) -def mol_to_graph(mol): +def _mol_to_graph(mol: RDKitMol): """Convert RDKit Mol to NetworkX graph Convert mol into a graph representation atoms are nodes, and bonds @@ -68,15 +75,23 @@ def mol_to_graph(mol): Parameters ---------- - mol: rdkit Mol - The molecule to convert into a graph. + mol: rdkit.Chem.rdchem.Mol + The molecule to convert into a graph. Returns ------- graph: networkx.Graph Contains atoms indices as nodes, edges as bonds. + + Notes + ----- + This function requires NetworkX to be installed. """ - import networkx as nx + try: + import networkx as nx + except ModuleNotFoundError: + raise ImportError("This function requires NetworkX to be installed.") + G = nx.Graph() num_atoms = mol.GetNumAtoms() G.add_nodes_from(range(num_atoms)) @@ -87,7 +102,7 @@ def mol_to_graph(mol): return G -def get_rotatable_bonds(mol): +def _get_rotatable_bonds(mol: RDKitMol) -> List[Tuple[int, int]]: """ https://github.com/rdkit/rdkit/blob/f4529c910e546af590c56eba01f96e9015c269a6/Code/GraphMol/Descriptors/Lipinski.cpp#L107 @@ -96,16 +111,24 @@ def get_rotatable_bonds(mol): Parameters ---------- - mol: rdkit Mol + mol: rdkit.Chem.rdchem.Mol Ligand molecule Returns ------- - rotatable_bonds: list + rotatable_bonds: List[List[int, int]] List of rotatable bonds in molecule + + Notes + ----- + This function requires RDKit to be installed. """ - from rdkit import Chem - from rdkit.Chem import rdmolops + try: + from rdkit import Chem + from rdkit.Chem import rdmolops + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + pattern = Chem.MolFromSmarts( "[!$(*#*)&!D1&!$(C(F)(F)F)&!$(C(Cl)(Cl)Cl)&!$(C(Br)(Br)Br)&!$(C([CH3])(" "[CH3])[CH3])&!$([CD3](=[N,O,S])-!@[#7,O,S!D1])&!$([#7,O,S!D1]-!@[CD3]=" @@ -117,29 +140,33 @@ def get_rotatable_bonds(mol): return rotatable_bonds -def convert_mol_to_pdbqt(mol, outfile): +def convert_mol_to_pdbqt(mol: RDKitMol, outfile: str) -> None: """Writes the provided ligand molecule to specified file in pdbqt format. Creates a torsion tree and write to pdbqt file. The torsion tree - represents rotatable bonds in the molecule. - - Note - ---- - This function requires RDKit to be installed. + represents rotatable bonds in the molecule. Parameters ---------- - mol: rdkit Mol + mol: rdkit.Chem.rdchem.Mol The molecule whose value is stored in pdb format in outfile outfile: str Filename for a valid pdb file with the extention .pdbqt + + Notes + ----- + This function requires NetworkX to be installed. """ - import networkx as nx + try: + import networkx as nx + except ModuleNotFoundError: + raise ImportError("This function requires NetworkX to be installed.") + # Walk through the original file and extract ATOM/HETATM lines and # add PDBQT charge annotations. pdb_map = _create_pdb_map(outfile) - graph = mol_to_graph(mol) - rotatable_bonds = get_rotatable_bonds(mol) + graph = _mol_to_graph(mol) + rotatable_bonds = _get_rotatable_bonds(mol) # Remove rotatable bonds from this molecule for bond in rotatable_bonds: @@ -172,7 +199,7 @@ def convert_mol_to_pdbqt(mol, outfile): fout.write(line) -def _create_pdb_map(outfile): +def _create_pdb_map(outfile: str) -> Dict[int, str]: """Create a mapping from atom numbers to lines to write to pdbqt This is a map from rdkit atom number to its line in the pdb @@ -188,12 +215,12 @@ def _create_pdb_map(outfile): Returns ------- - pdb_map: dict + pdb_map: Dict[int, str] Maps rdkit atom numbers to lines to be written to PDBQT file. """ lines = [x.strip() for x in open(outfile).readlines()] - lines = filter(lambda x: x.startswith("HETATM") or x.startswith("ATOM"), - lines) + lines = list( + filter(lambda x: x.startswith("HETATM") or x.startswith("ATOM"), lines)) lines = [x[:66] for x in lines] pdb_map = {} for line in lines: @@ -207,7 +234,8 @@ def _create_pdb_map(outfile): return pdb_map -def _create_component_map(mol, components): +def _create_component_map(mol: RDKitMol, + components: List[List[int]]) -> Dict[int, int]: """Creates a map from atom ids to disconnected component id For each atom in `mol`, maps it to the id of the component in the @@ -217,14 +245,14 @@ def _create_component_map(mol, components): Parameters ---------- - mol: rdkit Mol - molecule to find disconnected compontents in - components: list + mol: rdkit.Chem.rdchem.Mol + The molecule to find disconnected components in + components: List[List[int]] List of connected components Returns ------- - comp_map: dict + comp_map: Dict[int, int] Maps atom ids to component ides """ comp_map = {} @@ -236,33 +264,35 @@ def _create_component_map(mol, components): return comp_map -def _dfs(used_partitions, current_partition, bond, components, rotatable_bonds, - lines, pdb_map, comp_map): +def _dfs(used_partitions: Set[int], current_partition: int, + bond: Tuple[int, int], components: List[List[int]], + rotatable_bonds: List[Tuple[int, int]], lines: List[str], + pdb_map: Dict[int, str], comp_map: Dict[int, int]) -> List[str]: """ This function does a depth first search through the torsion tree Parameters ---------- - used_partions: set + used_partions: Set[int] Partitions which have already been used - current_partition: object + current_partition: int The current partition to expand - bond: object + bond: Tuple[int, int] the bond which goes from the previous partition into this partition - components: list + components: List[List[int]] List of connected components - rotatable_bonds: list - List of rotatable bonds - lines: list + rotatable_bonds: List[Tuple[int, int]] + List of rotatable bonds. This tuple is (from_atom, to_atom). + lines: List[str] List of lines to write - pdb_map: dict + pdb_map: Dict[int, str] Maps atom numbers to PDBQT lines to write - comp_map: dict + comp_map: Dict[int, int] Maps atom numbers to component numbers Returns ------- - lines: list + lines: List[str] List of lines to write. This has more appended lines. """ if comp_map[bond[1]] != current_partition: @@ -273,8 +303,8 @@ def _dfs(used_partitions, current_partition, bond, components, rotatable_bonds, for atom in components[current_partition]: lines.append(pdb_map[atom]) for b in rotatable_bonds: - valid, next_partition = _valid_bond(used_partitions, b, current_partition, - comp_map) + valid, next_partition = \ + _valid_bond(used_partitions, b, current_partition, comp_map) if not valid: continue lines = _dfs(used_partitions, next_partition, b, components, @@ -283,7 +313,9 @@ def _dfs(used_partitions, current_partition, bond, components, rotatable_bonds, return lines -def _valid_bond(used_partitions, bond, current_partition, comp_map): +def _valid_bond(used_partitions: Set[int], bond: Tuple[int, int], + current_partition: int, + comp_map: Dict[int, int]) -> Tuple[bool, int]: """Helper method to find next partition to explore. Used to check if a bond goes from the current partition into a @@ -291,18 +323,22 @@ def _valid_bond(used_partitions, bond, current_partition, comp_map): Parameters ---------- - used_partions: set + used_partions: Set[int] Partitions which have already been used - bond: object - the bond to check if it goes to an unexplored partition - current_partition: object - the current partition of the DFS - comp_map: dict + bond: Tuple[int, int] + The bond to check if it goes to an unexplored partition. + This tuple is (from_atom, to_atom). + current_partition: int + The current partition of the DFS + comp_map: Dict[int, int] Maps atom ids to component ids Returns ------- - is_valid, next_partition + is_valid: bool + Whether to exist the next partition or not + next_partition: int + The next partition to explore """ part1 = comp_map[bond[0]] part2 = comp_map[bond[1]] @@ -312,4 +348,4 @@ def _valid_bond(used_partitions, bond, current_partition, comp_map): next_partition = part2 else: next_partition = part1 - return not next_partition in used_partitions, next_partition + return next_partition not in used_partitions, next_partition diff --git a/deepchem/utils/rdkit_util.py b/deepchem/utils/rdkit_utils.py similarity index 51% rename from deepchem/utils/rdkit_util.py rename to deepchem/utils/rdkit_utils.py index da9a6b63d136f7153c1ec657e8b3fbd0e739d8e4..cf90bb2cec4e0ce08dfda33ef4cb4b9a6fbf072e 100644 --- a/deepchem/utils/rdkit_util.py +++ b/deepchem/utils/rdkit_utils.py @@ -12,24 +12,19 @@ import logging import itertools import numpy as np from io import StringIO -from copy import deepcopy -from collections import Counter -from deepchem.utils import pdbqt_utils +from deepchem.utils.pdbqt_utils import pdbqt_to_pdb from deepchem.utils.pdbqt_utils import convert_mol_to_pdbqt from deepchem.utils.pdbqt_utils import convert_protein_to_pdbqt -from deepchem.utils.geometry_utils import angle_between -from deepchem.utils.geometry_utils import is_angle_within_cutoff -from deepchem.utils.geometry_utils import generate_random_rotation_matrix +from deepchem.utils.geometry_utils import compute_pairwise_distances +from deepchem.utils.geometry_utils import compute_centroid +from deepchem.utils.fragment_utils import MolecularFragment +from deepchem.utils.fragment_utils import MoleculeLoadException +from typing import Any, List, Tuple, Set, Optional, Dict +from deepchem.utils.typing import OneOrMany, RDKitMol logger = logging.getLogger(__name__) -class MoleculeLoadException(Exception): - - def __init__(self, *args, **kwargs): - Exception.__init__(*args, **kwargs) - - def get_xyz_from_mol(mol): """Extracts a numpy array of coordinates from a molecules. @@ -102,7 +97,7 @@ def apply_pdbfixer(mol, is_protein: bool, optional If false, then don't remove heterogens (since this molecule is itself a heterogen). - + Returns ------- Rdkit Mol @@ -153,14 +148,14 @@ def compute_charges(mol): This also has the side effect of calculating charges on mol. The mol passed into this function has to already have been sanitized - Params - ------ + Parameters + ---------- mol: rdkit molecule Returns ------- No return since updates in place. - + Note ---- This function requires RDKit to be installed. @@ -174,10 +169,10 @@ def compute_charges(mol): raise MoleculeLoadException(e) -def load_complex(molecular_complex, - add_hydrogens=True, - calc_charges=True, - sanitize=True): +def load_complex(molecular_complex: OneOrMany[str], + add_hydrogens: bool = True, + calc_charges: bool = True, + sanitize: bool = True) -> List[Tuple[np.ndarray, RDKitMol]]: """Loads a molecular complex. Given some representation of a molecular complex, returns a list of @@ -208,7 +203,7 @@ def load_complex(molecular_complex, This function requires RDKit to be installed. """ if isinstance(molecular_complex, str): - molecule_complex = [molecular_complex] + molecular_complex = [molecular_complex] fragments = [] for mol in molecular_complex: loaded = load_molecule( @@ -267,14 +262,14 @@ def load_molecule(molecule_file, # TODO: This is wrong. Should return all molecules my_mol = suppl[0] elif ".pdbqt" in molecule_file: - pdb_block = pdbqt_utils.pdbqt_to_pdb(molecule_file) + pdb_block = pdbqt_to_pdb(molecule_file) my_mol = Chem.MolFromPDBBlock( str(pdb_block), sanitize=False, removeHs=False) from_pdb = True elif ".pdb" in molecule_file: my_mol = Chem.MolFromPDBFile( str(molecule_file), sanitize=False, removeHs=False) - from_pdb = True + from_pdb = True # noqa: F841 else: raise ValueError("Unrecognized file type for %s" % str(molecule_file)) @@ -289,7 +284,7 @@ def load_molecule(molecule_file, Chem.SanitizeMol(my_mol) # Ideally we should catch AtomValenceException but Travis seems to choke on it for some reason. except: - logger.warn("Mol %s failed sanitization" % Chem.MolToSmiles(my_mol)) + logger.warning("Mol %s failed sanitization" % Chem.MolToSmiles(my_mol)) if calc_charges: # This updates in place compute_charges(my_mol) @@ -328,9 +323,9 @@ def write_molecule(mol, outfile, is_protein=False): writer.write(mol) writer.close() if is_protein: - pdbqt_utils.convert_protein_to_pdbqt(mol, outfile) + convert_protein_to_pdbqt(mol, outfile) else: - pdbqt_utils.convert_mol_to_pdbqt(mol, outfile) + convert_mol_to_pdbqt(mol, outfile) elif ".pdb" in outfile: writer = Chem.PDBWriter(outfile) writer.write(mol) @@ -344,7 +339,7 @@ def write_molecule(mol, outfile, is_protein=False): def merge_molecules_xyz(xyzs): - """Merges coordinates of multiple molecules. + """Merges coordinates of multiple molecules. Parameters ---------- @@ -376,3 +371,234 @@ def merge_molecules(molecules): for nextmol in molecules[1:]: combined = rdmolops.CombineMols(combined, nextmol) return combined + + +def compute_all_ecfp(mol: RDKitMol, + indices: Optional[Set[int]] = None, + degree: int = 2) -> Dict[int, str]: + """Obtain molecular fragment for all atoms emanating outward to given degree. + + For each fragment, compute SMILES string (for now) and hash to + an int. Return a dictionary mapping atom index to hashed + SMILES. + + Parameters + ---------- + mol: rdkit Molecule + Molecule to compute ecfp fragments on + indices: Optional[Set[int]] + List of atom indices for molecule. Default is all indices. If + specified will only compute fragments for specified atoms. + degree: int + Graph degree to use when computing ECFP fingerprints + + Returns + ---------- + dict + Dictionary mapping atom index to hashed smiles. + """ + + ecfp_dict = {} + from rdkit import Chem + for i in range(mol.GetNumAtoms()): + if indices is not None and i not in indices: + continue + env = Chem.FindAtomEnvironmentOfRadiusN(mol, degree, i, useHs=True) + submol = Chem.PathToSubmol(mol, env) + smile = Chem.MolToSmiles(submol) + ecfp_dict[i] = "%s,%s" % (mol.GetAtoms()[i].GetAtomicNum(), smile) + + return ecfp_dict + + +def compute_contact_centroid(molecular_complex: Any, + cutoff: float = 4.5) -> np.ndarray: + """Computes the (x,y,z) centroid of the contact regions of this molecular complex. + + For a molecular complex, it's necessary for various featurizations + that compute voxel grids to find a reasonable center for the + voxelization. This function computes the centroid of all the contact + atoms, defined as an atom that's within `cutoff` Angstroms of an + atom from a different molecule. + + Parameters + ---------- + molecular_complex: Object + A representation of a molecular complex, produced by + `rdkit_util.load_complex`. + cutoff: float, optional + The distance in Angstroms considered for computing contacts. + """ + fragments = reduce_molecular_complex_to_contacts(molecular_complex, cutoff) + coords = [frag[0] for frag in fragments] + contact_coords = merge_molecules_xyz(coords) + centroid = np.mean(contact_coords, axis=0) + return (centroid) + + +def reduce_molecular_complex_to_contacts(fragments: List, + cutoff: float = 4.5) -> List: + """Reduce a molecular complex to only those atoms near a contact. + + Molecular complexes can get very large. This can make it unwieldy to + compute functions on them. To improve memory usage, it can be very + useful to trim out atoms that aren't close to contact regions. This + function takes in a molecular complex and returns a new molecular + complex representation that contains only contact atoms. The contact + atoms are computed by calling `get_contact_atom_indices` under the + hood. + + Parameters + ---------- + fragments: List + As returned by `rdkit_util.load_complex`, a list of tuples of + `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` + is the rdkit molecule object. + cutoff: float + The cutoff distance in angstroms. + + Returns + ------- + A list of length `len(molecular_complex)`. Each entry in this list + is a tuple of `(coords, MolecularShim)`. The coords is stripped down + to `(N_contact_atoms, 3)` where `N_contact_atoms` is the number of + contact atoms for this complex. `MolecularShim` is used since it's + tricky to make a RDKit sub-molecule. + """ + atoms_to_keep = get_contact_atom_indices(fragments, cutoff) + reduced_complex = [] + for frag, keep in zip(fragments, atoms_to_keep): + contact_frag = get_mol_subset(frag[0], frag[1], keep) + reduced_complex.append(contact_frag) + return reduced_complex + + +def compute_ring_center(mol, ring_indices): + """Computes 3D coordinates of a center of a given ring. + + Parameters: + ----------- + mol: rdkit.rdchem.Mol + Molecule containing a ring + ring_indices: array-like + Indices of atoms forming a ring + + Returns: + -------- + ring_centroid: np.ndarray + Position of a ring center + """ + conformer = mol.GetConformer() + ring_xyz = np.zeros((len(ring_indices), 3)) + for i, atom_idx in enumerate(ring_indices): + atom_position = conformer.GetAtomPosition(atom_idx) + ring_xyz[i] = np.array(atom_position) + ring_centroid = compute_centroid(ring_xyz) + return ring_centroid + + +def get_contact_atom_indices(fragments: List, cutoff: float = 4.5) -> List: + """Compute the atoms close to contact region. + + Molecular complexes can get very large. This can make it unwieldy to + compute functions on them. To improve memory usage, it can be very + useful to trim out atoms that aren't close to contact regions. This + function computes pairwise distances between all pairs of molecules + in the molecular complex. If an atom is within cutoff distance of + any atom on another molecule in the complex, it is regarded as a + contact atom. Otherwise it is trimmed. + + Parameters + ---------- + fragments: List + As returned by `rdkit_util.load_complex`, a list of tuples of + `(coords, mol)` where `coords` is a `(N_atoms, 3)` array and `mol` + is the rdkit molecule object. + cutoff: float + The cutoff distance in angstroms. + + Returns + ------- + A list of length `len(molecular_complex)`. Each entry in this list + is a list of atom indices from that molecule which should be kept, in + sorted order. + """ + # indices of atoms to keep + keep_inds: List[Set] = [set([]) for _ in fragments] + for (ind1, ind2) in itertools.combinations(range(len(fragments)), 2): + frag1, frag2 = fragments[ind1], fragments[ind2] + pairwise_distances = compute_pairwise_distances(frag1[0], frag2[0]) + # contacts is of form (x_coords, y_coords), a tuple of 2 lists + contacts = np.nonzero((pairwise_distances < cutoff)) + # contacts[0] is the x_coords, that is the frag1 atoms that have + # nonzero contact. + frag1_atoms = set([int(c) for c in contacts[0].tolist()]) + # contacts[1] is the y_coords, the frag2 atoms with nonzero contacts + frag2_atoms = set([int(c) for c in contacts[1].tolist()]) + keep_inds[ind1] = keep_inds[ind1].union(frag1_atoms) + keep_inds[ind2] = keep_inds[ind2].union(frag2_atoms) + keep_ind_lists = [sorted(list(keep)) for keep in keep_inds] + return keep_ind_lists + + +def get_mol_subset(coords, mol, atom_indices_to_keep): + """Strip a subset of the atoms in this molecule + + Parameters + ---------- + coords: Numpy ndarray + Must be of shape (N, 3) and correspond to coordinates of mol. + mol: Rdkit mol or `MolecularFragment` + The molecule to strip + atom_indices_to_keep: list + List of the indices of the atoms to keep. Each index is a unique + number between `[0, N)`. + + Returns + ------- + A tuple of (coords, mol_frag) where coords is a Numpy array of + coordinates with hydrogen coordinates. mol_frag is a + `MolecularFragment`. + """ + from rdkit import Chem + indexes_to_keep = [] + atoms_to_keep = [] + ##################################################### + # Compute partial charges on molecule if rdkit + if isinstance(mol, Chem.Mol): + compute_charges(mol) + ##################################################### + atoms = list(mol.GetAtoms()) + for index in atom_indices_to_keep: + indexes_to_keep.append(index) + atoms_to_keep.append(atoms[index]) + coords = coords[indexes_to_keep] + mol_frag = MolecularFragment(atoms_to_keep, coords) + return coords, mol_frag + + +def compute_ring_normal(mol, ring_indices): + """Computes normal to a plane determined by a given ring. + + Parameters: + ----------- + mol: rdkit.rdchem.Mol + Molecule containing a ring + ring_indices: array-like + Indices of atoms forming a ring + + Returns: + -------- + normal: np.ndarray + Normal vector + """ + conformer = mol.GetConformer() + points = np.zeros((3, 3)) + for i, atom_idx in enumerate(ring_indices[:3]): + atom_position = conformer.GetAtomPosition(atom_idx) + points[i] = np.array(atom_position) + + v1 = points[1] - points[0] + v2 = points[2] - points[0] + normal = np.cross(v1, v2) + return normal diff --git a/deepchem/utils/save.py b/deepchem/utils/save.py index 651c83257917472864fb60c495751686ad85f50e..16ad4dcdc0605db05b0a096b18d63c9163d39316 100644 --- a/deepchem/utils/save.py +++ b/deepchem/utils/save.py @@ -1,311 +1,12 @@ -""" -Simple utils to save and load from disk. -""" -import joblib -import gzip -import json -import pickle -import pandas as pd -import numpy as np -import os -import deepchem -import warnings -from deepchem.utils.genomics import encode_bio_sequence as encode_sequence, encode_fasta_sequence as fasta_sequence, seq_one_hot_encode as seq_one_hotencode - - -def log(string, verbose=True): - """Print string if verbose.""" - if verbose: - print(string) - - -def save_to_disk(dataset, filename, compress=3): - """Save a dataset to file.""" - if filename.endswith('.joblib'): - joblib.dump(dataset, filename, compress=compress) - elif filename.endswith('.npy'): - np.save(filename, dataset) - else: - raise ValueError("Filename with unsupported extension: %s" % filename) - - -def get_input_type(input_file): - """Get type of input file. Must be csv/pkl.gz/sdf file.""" - filename, file_extension = os.path.splitext(input_file) - # If gzipped, need to compute extension again - if file_extension == ".gz": - filename, file_extension = os.path.splitext(filename) - if file_extension == ".csv": - return "csv" - elif file_extension == ".pkl": - return "pandas-pickle" - elif file_extension == ".joblib": - return "pandas-joblib" - elif file_extension == ".sdf": - return "sdf" - else: - raise ValueError("Unrecognized extension %s" % file_extension) - - -def load_data(input_files, shard_size=None, verbose=True): - """Loads data from disk. - - For CSV files, supports sharded loading for large files. - """ - if not len(input_files): - return - input_type = get_input_type(input_files[0]) - if input_type == "sdf": - if shard_size is not None: - log("Ignoring shard_size for sdf input.", verbose) - for value in load_sdf_files(input_files): - yield value - elif input_type == "csv": - for value in load_csv_files(input_files, shard_size, verbose=verbose): - yield value - elif input_type == "pandas-pickle": - for input_file in input_files: - yield load_pickle_from_disk(input_file) - - -def load_sdf_files(input_files, clean_mols, tasks=[]): - """Load SDF file into dataframe.""" - from rdkit import Chem - dataframes = [] - for input_file in input_files: - # Tasks are either in .sdf.csv file or in the .sdf file itself - has_csv = os.path.isfile(input_file + ".csv") - # Structures are stored in .sdf file - print("Reading structures from %s." % input_file) - suppl = Chem.SDMolSupplier(str(input_file), clean_mols, False, False) - df_rows = [] - for ind, mol in enumerate(suppl): - if mol is None: - continue - smiles = Chem.MolToSmiles(mol) - df_row = [ind, smiles, mol] - if not has_csv: # Get task targets from .sdf file - for task in tasks: - df_row.append(mol.GetProp(str(task))) - df_rows.append(df_row) - if has_csv: - mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol')) - raw_df = next(load_csv_files([input_file + ".csv"], shard_size=None)) - dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner')) - else: - mol_df = pd.DataFrame( - df_rows, columns=('mol_id', 'smiles', 'mol') + tuple(tasks)) - dataframes.append(mol_df) - return dataframes - - -def load_csv_files(filenames, shard_size=None, verbose=True): - """Load data as pandas dataframe.""" - # First line of user-specified CSV *must* be header. - shard_num = 1 - for filename in filenames: - if shard_size is None: - yield pd.read_csv(filename) - else: - log("About to start loading CSV from %s" % filename, verbose) - for df in pd.read_csv(filename, chunksize=shard_size): - log("Loading shard %d of size %s." % (shard_num, str(shard_size)), - verbose) - df = df.replace(np.nan, str(""), regex=True) - shard_num += 1 - yield df - - -def seq_one_hot_encode(sequences, letters='ATCGN'): - """One hot encodes list of genomic sequences. - - Sequences encoded have shape (N_sequences, N_letters, sequence_length, 1). - These sequences will be processed as images with one color channel. - - Parameters - ---------- - sequences: np.ndarray - Array of genetic sequences - letters: str - String with the set of possible letters in the sequences. - - Raises - ------ - ValueError: - If sequences are of different lengths. - - Returns - ------- - np.ndarray: Shape (N_sequences, N_letters, sequence_length, 1). - """ - warnings.warn( - "This Function has been deprecated and now resides in deepchem.utils.genomics ", - DeprecationWarning) - return seq_one_hotencode(sequences, letters=letters) - - -def encode_fasta_sequence(fname): - """ - Loads fasta file and returns an array of one-hot sequences. - - Parameters - ---------- - fname: str - Filename of fasta file. - - Returns - ------- - np.ndarray: Shape (N_sequences, 5, sequence_length, 1). - """ - warnings.warn( - "This Function has been deprecated and now resides in deepchem.utils.genomics", - DeprecationWarning) - - return fasta_sequence(fname) - - -def encode_bio_sequence(fname, file_type="fasta", letters="ATCGN"): - """ - Loads a sequence file and returns an array of one-hot sequences. - - Parameters - ---------- - fname: str - Filename of fasta file. - file_type: str - The type of file encoding to process, e.g. fasta or fastq, this - is passed to Biopython.SeqIO.parse. - letters: str - The set of letters that the sequences consist of, e.g. ATCG. - - Returns - ------- - np.ndarray: Shape (N_sequences, N_letters, sequence_length, 1). - """ - warnings.warn( - "This Function has been deprecated and now resides in deepchem.utils.genomics ", - DeprecationWarning) - return encode_sequence(fname, file_type=file_type, letters=letters) - - -def save_metadata(tasks, metadata_df, data_dir): - """ - Saves the metadata for a DiskDataset - Parameters - ---------- - tasks: list of str - Tasks of DiskDataset - metadata_df: pd.DataFrame - data_dir: str - Directory to store metadata - Returns - ------- - """ - if isinstance(tasks, np.ndarray): - tasks = tasks.tolist() - metadata_filename = os.path.join(data_dir, "metadata.csv.gzip") - tasks_filename = os.path.join(data_dir, "tasks.json") - with open(tasks_filename, 'w') as fout: - json.dump(tasks, fout) - metadata_df.to_csv(metadata_filename, index=False, compression='gzip') - - -def load_from_disk(filename): - """Load a dataset from file.""" - name = filename - if os.path.splitext(name)[1] == ".gz": - name = os.path.splitext(name)[0] - extension = os.path.splitext(name)[1] - if extension == ".pkl": - return load_pickle_from_disk(filename) - elif extension == ".joblib": - return joblib.load(filename) - elif extension == ".csv": - # First line of user-specified CSV *must* be header. - df = pd.read_csv(filename, header=0) - df = df.replace(np.nan, str(""), regex=True) - return df - elif extension == ".npy": - return np.load(filename, allow_pickle=True) - else: - raise ValueError("Unrecognized filetype for %s" % filename) - - -def load_sharded_csv(filenames): - """Load a dataset from multiple files. Each file MUST have same column headers""" - dataframes = [] - for name in filenames: - placeholder_name = name - if os.path.splitext(name)[1] == ".gz": - name = os.path.splitext(name)[0] - if os.path.splitext(name)[1] == ".csv": - # First line of user-specified CSV *must* be header. - df = pd.read_csv(placeholder_name, header=0) - df = df.replace(np.nan, str(""), regex=True) - dataframes.append(df) - else: - raise ValueError("Unrecognized filetype for %s" % filename) - - # combine dataframes - combined_df = dataframes[0] - for i in range(0, len(dataframes) - 1): - combined_df = combined_df.append(dataframes[i + 1]) - combined_df = combined_df.reset_index(drop=True) - return combined_df - - -def load_pickle_from_disk(filename): - """Load dataset from pickle file.""" - if ".gz" in filename: - with gzip.open(filename, "rb") as f: - df = pickle.load(f) - else: - with open(filename, "rb") as f: - df = pickle.load(f) - return df - - -def load_dataset_from_disk(save_dir): - """ - Parameters - ---------- - save_dir: str - - Returns - ------- - loaded: bool - Whether the load succeeded - all_dataset: (dc.data.Dataset, dc.data.Dataset, dc.data.Dataset) - The train, valid, test datasets - transformers: list of dc.trans.Transformer - The transformers used for this dataset - - """ - - train_dir = os.path.join(save_dir, "train_dir") - valid_dir = os.path.join(save_dir, "valid_dir") - test_dir = os.path.join(save_dir, "test_dir") - if not os.path.exists(train_dir) or not os.path.exists( - valid_dir) or not os.path.exists(test_dir): - return False, None, list() - loaded = True - train = deepchem.data.DiskDataset(train_dir) - valid = deepchem.data.DiskDataset(valid_dir) - test = deepchem.data.DiskDataset(test_dir) - train.memory_cache_size = 40 * (1 << 20) # 40 MB - all_dataset = (train, valid, test) - with open(os.path.join(save_dir, "transformers.pkl"), 'rb') as f: - transformers = pickle.load(f) - return loaded, all_dataset, transformers - - -def save_dataset_to_disk(save_dir, train, valid, test, transformers): - train_dir = os.path.join(save_dir, "train_dir") - valid_dir = os.path.join(save_dir, "valid_dir") - test_dir = os.path.join(save_dir, "test_dir") - train.move(train_dir) - valid.move(valid_dir) - test.move(test_dir) - with open(os.path.join(save_dir, "transformers.pkl"), 'wb') as f: - pickle.dump(transformers, f) - return None +################################################################# +# save.py is out of date. You should not import any functions from here. +################################################################# + +# flake8: noqa +import logging +logger = logging.getLogger(__name__) +logger.warning("deepchem.utils.save has been deprecated.\n" + "The utilities in save.py are moved to deepchem.utils.data_utils" + " or deepchem.utils.genomics_utils.") +from deepchem.utils.data_utils import * +from deepchem.utils.genomics_utils import * diff --git a/deepchem/utils/test/1jld_ligand_docked.pdbqt b/deepchem/utils/test/data/1jld_ligand_docked.pdbqt similarity index 100% rename from deepchem/utils/test/1jld_ligand_docked.pdbqt rename to deepchem/utils/test/data/1jld_ligand_docked.pdbqt diff --git a/deepchem/utils/test/test_evaluate.py b/deepchem/utils/test/test_evaluate.py new file mode 100644 index 0000000000000000000000000000000000000000..5486485717b54d6bfb63cd04fde0f4866e5e8664 --- /dev/null +++ b/deepchem/utils/test/test_evaluate.py @@ -0,0 +1,317 @@ +"""Unit tests for evaluators.""" +import deepchem as dc +import numpy as np +import sklearn +from deepchem.utils.evaluate import Evaluator +from deepchem.utils.evaluate import GeneratorEvaluator + + +def test_multiclass_threshold_predictions(): + """Check prediction thresholding works correctly.""" + # Construct a random class probability matrix + y = np.random.rand(10, 5) + y_sums = np.sum(y, axis=1) + y = y / y_sums[:, None] + y_out = dc.metrics.threshold_predictions(y) + assert y_out.shape == (10,) + assert np.allclose(y_out, np.argmax(y, axis=1)) + + +def test_binary_threshold_predictions(): + """Check prediction thresholding works correctly.""" + # Construct a random class probability matrix + y = np.random.rand(10, 2) + y_sums = np.sum(y, axis=1) + y = y / y_sums[:, None] + y_out = dc.metrics.threshold_predictions(y, threshold=0.3) + assert y_out.shape == (10,) + assert np.allclose(y_out, np.where(y[:, 1] >= 0.3, np.ones(10), np.zeros(10))) + + +def test_evaluator_dc_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + evaluator = Evaluator(model, dataset, []) + metric = dc.metrics.Metric(dc.metrics.mae_score) + multitask_scores = evaluator.compute_model_performance(metric) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['mae_score'] > 0 + + +def test_multiclass_classification_singletask(): + """Test multiclass classification evaluation.""" + X = np.random.rand(100, 5) + y = np.random.randint(5, size=(100,)) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskClassifier(1, 5, n_classes=5) + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.roc_auc_score, n_classes=5) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] >= 0 + + +def test_sklearn_multiclass_classification_singletask(): + """Test multiclass classification evaluation.""" + X = np.random.rand(100, 5) + y = np.random.randint(5, size=(100,)) + dataset = dc.data.NumpyDataset(X, y) + rf = sklearn.ensemble.RandomForestClassifier(50) + model = dc.models.SklearnModel(rf) + model.fit(dataset) + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.roc_auc_score, n_classes=5) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] >= 0 + + +def test_evaluate_multiclass_classification_singletask(): + """Test multiclass classification evaluation.""" + X = np.random.rand(100, 5) + y = np.random.randint(5, size=(100,)) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskClassifier(1, 5, n_classes=5) + multitask_scores = model.evaluate( + dataset, dc.metrics.roc_auc_score, n_classes=5) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] >= 0 + + +def test_multitask_evaluator(): + """Test evaluation of a multitask metric.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 2, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(2, 5) + evaluator = Evaluator(model, dataset, []) + metric = dc.metrics.Metric(dc.metrics.mae_score) + multitask_scores, all_task_scores = evaluator.compute_model_performance( + metric, per_task_metrics=True) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['mae_score'] > 0 + assert isinstance(all_task_scores, dict) + assert len(multitask_scores) == 1 + + +def test_model_evaluate_dc_metric(): + """Test a model evaluate on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + metric = dc.metrics.Metric(dc.metrics.mae_score) + multitask_scores = model.evaluate(dataset, metric, []) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['mae_score'] > 0 + + +def test_multitask_model_evaluate_sklearn(): + """Test evaluation of a multitask metric.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 2) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(2, 5) + evaluator = Evaluator(model, dataset, []) + multitask_scores, all_task_scores = evaluator.compute_model_performance( + dc.metrics.mean_absolute_error, per_task_metrics=True) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['metric-1'] > 0 + assert isinstance(all_task_scores, dict) + assert len(multitask_scores) == 1 + + +def test_multitask_model_evaluate(): + """Test evaluation of a multitask metric.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 2) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(2, 5) + multitask_scores, all_task_scores = model.evaluate( + dataset, dc.metrics.mean_absolute_error, per_task_metrics=True) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] > 0 + assert isinstance(all_task_scores, dict) + + +def test_evaluator_dc_multi_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + evaluator = Evaluator(model, dataset, []) + metric1 = dc.metrics.Metric(dc.metrics.mae_score, n_tasks=2) + metric2 = dc.metrics.Metric(dc.metrics.r2_score, n_tasks=2) + multitask_scores = evaluator.compute_model_performance([metric1, metric2]) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 2 + assert multitask_scores['mae_score'] > 0 + assert "r2_score" in multitask_scores + + +def test_model_evaluate_dc_multi_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + metric1 = dc.metrics.Metric(dc.metrics.mae_score) + metric2 = dc.metrics.Metric(dc.metrics.r2_score) + multitask_scores = model.evaluate(dataset, [metric1, metric2]) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 2 + assert multitask_scores['mae_score'] > 0 + assert "r2_score" in multitask_scores + + +def test_generator_evaluator_dc_metric_multitask_single_point(): + """Test generator evaluator on a generator.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + generator = model.default_generator(dataset, pad_batches=False) + evaluator = GeneratorEvaluator(model, generator, []) + metric = dc.metrics.Metric(dc.metrics.mae_score) + multitask_scores = evaluator.compute_model_performance(metric) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['mae_score'] > 0 + assert len(multitask_scores) == 1 + + +def test_evaluator_sklearn_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.mean_absolute_error) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + # Note that since no name as provided, metrics are index by order + # given. + assert multitask_scores['metric-1'] > 0 + + +def test_generator_evaluator_dc_metric_multitask(): + """Test generator evaluator on a generator.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + generator = model.default_generator(dataset, pad_batches=False) + evaluator = GeneratorEvaluator(model, generator, []) + metric = dc.metrics.Metric(dc.metrics.mae_score) + multitask_scores = evaluator.compute_model_performance(metric) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + assert multitask_scores['mae_score'] > 0 + + +def test_model_evaluate_sklearn_metric(): + """Test a model evaluate on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + multitask_scores = model.evaluate(dataset, dc.metrics.mean_absolute_error) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores) == 1 + # Note that since no name as provided, metrics are index by order + # given. + assert multitask_scores['metric-1'] > 0 + + +def test_evaluator_sklearn_multi_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + [dc.metrics.mean_absolute_error, dc.metrics.r2_score]) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores.keys()) == 2 + # Note that since no name as provided, metrics are index by order + # given. + assert multitask_scores['metric-1'] > 0 + assert "metric-2" in multitask_scores + + +def test_model_evaluate_sklearn_multi_metric(): + """Test an evaluator on a dataset.""" + X = np.random.rand(10, 5) + y = np.random.rand(10, 1) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.MultitaskRegressor(1, 5) + multitask_scores = model.evaluate( + dataset, [dc.metrics.mean_absolute_error, dc.metrics.r2_score]) + assert isinstance(multitask_scores, dict) + assert len(multitask_scores.keys()) == 2 + # Note that since no name as provided, metrics are index by order + # given. + assert multitask_scores['metric-1'] > 0 + assert "metric-2" in multitask_scores + + +def test_gc_binary_classification(): + """Test multiclass classification evaluation.""" + smiles = ["C", "CC"] + featurizer = dc.feat.ConvMolFeaturizer() + X = featurizer.featurize(smiles) + y = np.random.randint(2, size=(len(smiles),)) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.GraphConvModel(1, mode="classification") + # TODO: Fix this case with correct thresholding + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.accuracy_score, n_classes=2) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] >= 0 + + +def test_gc_binary_kappa_classification(): + """Test multiclass classification evaluation.""" + np.random.seed(1234) + smiles = ["C", "CC", "CO", "CCC", "CCCC"] + featurizer = dc.feat.ConvMolFeaturizer() + X = featurizer.featurize(smiles) + y = np.random.randint(2, size=(len(smiles),)) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.GraphConvModel(1, mode="classification") + # TODO: Fix this case with correct thresholding + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.kappa_score, n_classes=2) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] <= 1 + assert multitask_scores["metric-1"] >= -1 + + +def test_gc_multiclass_classification(): + """Test multiclass classification evaluation.""" + np.random.seed(1234) + smiles = ["C", "CC"] + featurizer = dc.feat.ConvMolFeaturizer() + X = featurizer.featurize(smiles) + y = np.random.randint(5, size=(len(smiles),)) + dataset = dc.data.NumpyDataset(X, y) + model = dc.models.GraphConvModel(1, mode="classification", n_classes=5) + evaluator = Evaluator(model, dataset, []) + multitask_scores = evaluator.compute_model_performance( + dc.metrics.accuracy_score, n_classes=5) + assert len(multitask_scores) == 1 + assert multitask_scores["metric-1"] >= 0 diff --git a/deepchem/utils/test/test_fragment_util.py b/deepchem/utils/test/test_fragment_util.py deleted file mode 100644 index 78c7b21d8511bb6f9e88c4e2cba6dac30b5d934b..0000000000000000000000000000000000000000 --- a/deepchem/utils/test/test_fragment_util.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import unittest -from deepchem.utils import rdkit_util -from deepchem.utils.fragment_util import get_contact_atom_indices - - -class TestFragmentUtil(unittest.TestCase): - - def setUp(self): - # TODO test more formats for ligand - current_dir = os.path.dirname(os.path.realpath(__file__)) - self.protein_file = os.path.join( - current_dir, '../../feat/tests/3ws9_protein_fixer_rdkit.pdb') - self.ligand_file = os.path.join(current_dir, - '../../feat/tests/3ws9_ligand.sdf') - - def test_get_contact_atom_indices(self): - complexes = rdkit_util.load_complex([self.protein_file, self.ligand_file]) - contact_indices = get_contact_atom_indices(complexes) - assert len(contact_indices) == 2 diff --git a/deepchem/utils/test/test_fragment_utils.py b/deepchem/utils/test/test_fragment_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..714bddfaeb445b3e5c10b6cdeab3b3036ccddfef --- /dev/null +++ b/deepchem/utils/test/test_fragment_utils.py @@ -0,0 +1,62 @@ +import os +import unittest +import numpy as np +from deepchem.utils import rdkit_utils +from deepchem.utils.fragment_utils import get_contact_atom_indices +from deepchem.utils.fragment_utils import merge_molecular_fragments +from deepchem.utils.fragment_utils import get_partial_charge +from deepchem.utils.fragment_utils import strip_hydrogens +from deepchem.utils.fragment_utils import MolecularFragment +from deepchem.utils.fragment_utils import AtomShim + + +class TestFragmentUtil(unittest.TestCase): + + def setUp(self): + # TODO test more formats for ligand + current_dir = os.path.dirname(os.path.realpath(__file__)) + self.protein_file = os.path.join( + current_dir, '../../feat/tests/data/3ws9_protein_fixer_rdkit.pdb') + self.ligand_file = os.path.join(current_dir, + '../../feat/tests/data/3ws9_ligand.sdf') + + def test_get_contact_atom_indices(self): + complexes = rdkit_utils.load_complex([self.protein_file, self.ligand_file]) + contact_indices = get_contact_atom_indices(complexes) + assert len(contact_indices) == 2 + + def test_create_molecular_fragment(self): + mol_xyz, mol_rdk = rdkit_utils.load_molecule(self.ligand_file) + fragment = MolecularFragment(mol_rdk.GetAtoms(), mol_xyz) + assert len(mol_rdk.GetAtoms()) == len(fragment.GetAtoms()) + assert (fragment.GetCoords() == mol_xyz).all() + + def test_strip_hydrogens(self): + mol_xyz, mol_rdk = rdkit_utils.load_molecule(self.ligand_file) + _ = MolecularFragment(mol_rdk.GetAtoms(), mol_xyz) + + # Test on RDKit + _ = strip_hydrogens(mol_xyz, mol_rdk) + + def test_merge_molecular_fragments(self): + mol_xyz, mol_rdk = rdkit_utils.load_molecule(self.ligand_file) + fragment1 = MolecularFragment(mol_rdk.GetAtoms(), mol_xyz) + fragment2 = MolecularFragment(mol_rdk.GetAtoms(), mol_xyz) + joint = merge_molecular_fragments([fragment1, fragment2]) + assert len(mol_rdk.GetAtoms()) * 2 == len(joint.GetAtoms()) + + def test_get_partial_charge(self): + from rdkit import Chem + mol = Chem.MolFromSmiles("CC") + atom = mol.GetAtoms()[0] + partial_charge = get_partial_charge(atom) + assert partial_charge == 0 + + def test_atom_shim(self): + atomic_num = 5 + partial_charge = 1 + atom_coords = np.array([0., 1., 2.]) + shim = AtomShim(atomic_num, partial_charge, atom_coords) + assert shim.GetAtomicNum() == atomic_num + assert shim.GetPartialCharge() == partial_charge + assert (shim.GetCoords() == atom_coords).all() diff --git a/deepchem/utils/test/test_generator_evaluator.py b/deepchem/utils/test/test_generator_evaluator.py index ae41d5c5601feab948cf8b8dac44e96ac884ee7e..b25f1dbad7853e24d669d34b279dd887a177aed4 100644 --- a/deepchem/utils/test/test_generator_evaluator.py +++ b/deepchem/utils/test/test_generator_evaluator.py @@ -1,5 +1,3 @@ -from unittest import TestCase - import numpy as np import tensorflow as tf from flaky import flaky @@ -9,100 +7,101 @@ import deepchem as dc from deepchem.data import NumpyDataset -class TestGeneratorEvaluator(TestCase): - - @flaky - def test_compute_model_performance_multitask_classifier(self): - n_data_points = 20 - n_features = 1 - n_tasks = 2 - n_classes = 2 - - X = np.ones(shape=(n_data_points // 2, n_features)) * -1 - X1 = np.ones(shape=(n_data_points // 2, n_features)) - X = np.concatenate((X, X1)) - class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))]) - class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))]) - y1 = np.concatenate((class_0, class_1)) - y2 = np.concatenate((class_1, class_0)) - y = np.stack([y1, y2], axis=1) - dataset = NumpyDataset(X, y) - - features = layers.Input(shape=(n_data_points // 2, n_features)) - dense = layers.Dense(n_tasks * n_classes)(features) - logits = layers.Reshape((n_tasks, n_classes))(dense) - output = layers.Softmax()(logits) - keras_model = tf.keras.Model(inputs=features, outputs=[output, logits]) - model = dc.models.KerasModel( - keras_model, - dc.models.losses.SoftmaxCrossEntropy(), - output_types=['prediction', 'loss'], - learning_rate=0.01, - batch_size=n_data_points) - - model.fit(dataset, nb_epoch=1000) - metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, np.mean, mode="classification") - - scores = model.evaluate_generator( - model.default_generator(dataset), [metric], per_task_metrics=True) - scores = list(scores[1].values()) - # Loosening atol to see if tests stop failing sporadically - assert np.all(np.isclose(scores, [1.0, 1.0], atol=0.50)) - - def test_compute_model_performance_singletask_classifier(self): - n_data_points = 20 - n_features = 10 - - X = np.ones(shape=(int(n_data_points / 2), n_features)) * -1 - X1 = np.ones(shape=(int(n_data_points / 2), n_features)) - X = np.concatenate((X, X1)) - class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))]) - class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))]) - y = np.concatenate((class_0, class_1)) - dataset = NumpyDataset(X, y) - - features = layers.Input(shape=(n_features,)) - dense = layers.Dense(2)(features) - output = layers.Softmax()(dense) - keras_model = tf.keras.Model(inputs=features, outputs=[output]) - model = dc.models.KerasModel( - keras_model, dc.models.losses.SoftmaxCrossEntropy(), learning_rate=0.1) - - model.fit(dataset, nb_epoch=1000) - metric = dc.metrics.Metric( - dc.metrics.roc_auc_score, np.mean, mode="classification") - - scores = model.evaluate_generator( - model.default_generator(dataset), [metric], per_task_metrics=True) - scores = list(scores[1].values()) - assert np.isclose(scores, [1.0], atol=0.05) - - def test_compute_model_performance_multitask_regressor(self): - random_seed = 42 - n_data_points = 20 - n_features = 2 - n_tasks = 2 - np.random.seed(seed=random_seed) - - X = np.random.rand(n_data_points, n_features) - y1 = np.array([0.5 for x in range(n_data_points)]) - y2 = np.array([-0.5 for x in range(n_data_points)]) - y = np.stack([y1, y2], axis=1) - dataset = NumpyDataset(X, y) - - features = layers.Input(shape=(n_features,)) - dense = layers.Dense(n_tasks)(features) - keras_model = tf.keras.Model(inputs=features, outputs=[dense]) - model = dc.models.KerasModel( - keras_model, dc.models.losses.L2Loss(), learning_rate=0.1) - - model.fit(dataset, nb_epoch=1000) - metric = [ - dc.metrics.Metric( - dc.metrics.mean_absolute_error, np.mean, mode="regression"), - ] - scores = model.evaluate_generator( - model.default_generator(dataset), metric, per_task_metrics=True) - scores = list(scores[1].values()) - assert np.all(np.isclose(scores, [0.0, 0.0], atol=1.0)) +@flaky +def test_compute_model_performance_multitask_classifier(): + n_data_points = 20 + n_features = 1 + n_tasks = 2 + n_classes = 2 + + X = np.ones(shape=(n_data_points // 2, n_features)) * -1 + X1 = np.ones(shape=(n_data_points // 2, n_features)) + X = np.concatenate((X, X1)) + class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))]) + class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))]) + y1 = np.concatenate((class_0, class_1)) + y2 = np.concatenate((class_1, class_0)) + y = np.stack([y1, y2], axis=1) + dataset = NumpyDataset(X, y) + + features = layers.Input(shape=(n_features)) + dense = layers.Dense(n_tasks * n_classes)(features) + logits = layers.Reshape((n_tasks, n_classes))(dense) + output = layers.Softmax()(logits) + keras_model = tf.keras.Model(inputs=features, outputs=[output, logits]) + model = dc.models.KerasModel( + keras_model, + dc.models.losses.SoftmaxCrossEntropy(), + output_types=['prediction', 'loss'], + learning_rate=0.01, + batch_size=n_data_points) + + model.fit(dataset, nb_epoch=1000) + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + + scores = model.evaluate_generator( + model.default_generator(dataset), [metric], per_task_metrics=True) + scores = list(scores[1].values()) + # Loosening atol to see if tests stop failing sporadically + assert np.all(np.isclose(scores, [1.0, 1.0], atol=0.50)) + + +def test_compute_model_performance_singletask_classifier(): + """Computes model performance on singletask dataset with one-hot label encoding.""" + n_data_points = 20 + n_features = 10 + + X = np.ones(shape=(int(n_data_points / 2), n_features)) * -1 + X1 = np.ones(shape=(int(n_data_points / 2), n_features)) + X = np.concatenate((X, X1)) + class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))]) + class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))]) + y = np.concatenate((class_0, class_1)) + dataset = NumpyDataset(X, y) + + features = layers.Input(shape=(n_features,)) + dense = layers.Dense(2)(features) + output = layers.Softmax()(dense) + keras_model = tf.keras.Model(inputs=features, outputs=[output]) + model = dc.models.KerasModel( + keras_model, dc.models.losses.SoftmaxCrossEntropy(), learning_rate=0.1) + + model.fit(dataset, nb_epoch=1000) + metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification", n_tasks=1) + + scores = model.evaluate_generator( + model.default_generator(dataset), [metric], per_task_metrics=True) + scores = list(scores[1].values()) + assert np.isclose(scores, [1.0], atol=0.05) + + +def test_compute_model_performance_multitask_regressor(): + random_seed = 42 + n_data_points = 20 + n_features = 2 + n_tasks = 2 + np.random.seed(seed=random_seed) + + X = np.random.rand(n_data_points, n_features) + y1 = np.array([0.5 for x in range(n_data_points)]) + y2 = np.array([-0.5 for x in range(n_data_points)]) + y = np.stack([y1, y2], axis=1) + dataset = NumpyDataset(X, y) + + features = layers.Input(shape=(n_features,)) + dense = layers.Dense(n_tasks)(features) + keras_model = tf.keras.Model(inputs=features, outputs=[dense]) + model = dc.models.KerasModel( + keras_model, dc.models.losses.L2Loss(), learning_rate=0.1) + + model.fit(dataset, nb_epoch=1000) + metric = [ + dc.metrics.Metric( + dc.metrics.mean_absolute_error, np.mean, mode="regression"), + ] + scores = model.evaluate_generator( + model.default_generator(dataset), metric, per_task_metrics=True) + scores = list(scores[1].values()) + assert np.all(np.isclose(scores, [0.0, 0.0], atol=1.0)) diff --git a/deepchem/utils/test/test_seq.py b/deepchem/utils/test/test_genomics_utils.py similarity index 83% rename from deepchem/utils/test/test_seq.py rename to deepchem/utils/test/test_genomics_utils.py index 579d2dc58627cea30ea9ab35b68fb19e457bf1a4..39baad299e80125b8d3bd67cd2223cdee4944b2f 100644 --- a/deepchem/utils/test/test_seq.py +++ b/deepchem/utils/test/test_genomics_utils.py @@ -25,7 +25,7 @@ class TestSeq(unittest.TestCase): def test_one_hot_simple(self): sequences = np.array(["ACGT", "GATA", "CGCG"]) - sequences = dc.utils.save.seq_one_hot_encode(sequences) + sequences = dc.utils.genomics_utils.seq_one_hot_encode(sequences) self.assertEqual(sequences.shape, (3, 5, 4, 1)) def test_one_hot_mismatch(self): @@ -34,13 +34,14 @@ class TestSeq(unittest.TestCase): with self.assertRaises(ValueError): sequences = np.array(["ACGTA", "GATA", "CGCG"]) - sequences = dc.utils.save.seq_one_hot_encode(sequences) + sequences = dc.utils.genomics_utils.seq_one_hot_encode(sequences) def test_encode_fasta_sequence(self): # Test it's possible to load a sequence with an aribrary alphabet from a fasta file. fname = os.path.join(self.current_dir, "./data/example.fasta") - encoded_seqs = dc.utils.save.encode_bio_sequence(fname, letters=LETTERS) + encoded_seqs = dc.utils.genomics_utils.encode_bio_sequence( + fname, letters=LETTERS) expected = np.expand_dims( np.array([ [[1, 0], [0, 1], [0, 0]], @@ -52,7 +53,7 @@ class TestSeq(unittest.TestCase): def test_encode_fastq_sequence(self): fname = os.path.join(self.current_dir, "./data/example.fastq") - encoded_seqs = dc.utils.save.encode_bio_sequence( + encoded_seqs = dc.utils.genomics_utils.encode_bio_sequence( fname, file_type="fastq", letters=LETTERS) expected = np.expand_dims( diff --git a/deepchem/utils/test/test_molecule_feature_utils.py b/deepchem/utils/test/test_molecule_feature_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f29b77baacac851efdbd0e904f1efc91ddc4fd91 --- /dev/null +++ b/deepchem/utils/test/test_molecule_feature_utils.py @@ -0,0 +1,179 @@ +import unittest + +from deepchem.utils.molecule_feature_utils import one_hot_encode +from deepchem.utils.molecule_feature_utils import get_atom_type_one_hot +from deepchem.utils.molecule_feature_utils import construct_hydrogen_bonding_info +from deepchem.utils.molecule_feature_utils import get_atom_hydrogen_bonding_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_hybridization_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_total_num_Hs_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_is_in_aromatic_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_chirality_one_hot +from deepchem.utils.molecule_feature_utils import get_atom_formal_charge +from deepchem.utils.molecule_feature_utils import get_atom_partial_charge +from deepchem.utils.molecule_feature_utils import get_atom_total_degree_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_type_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_in_same_ring_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_is_conjugated_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_stereo_one_hot +from deepchem.utils.molecule_feature_utils import get_bond_graph_distance_one_hot + + +class TestGraphConvUtils(unittest.TestCase): + + def setUp(self): + from rdkit import Chem + self.mol = Chem.MolFromSmiles("CN=C=O") # methyl isocyanate + self.mol_copper_sulfate = Chem.MolFromSmiles("[Cu+2].[O-]S(=O)(=O)[O-]") + self.mol_benzene = Chem.MolFromSmiles("c1ccccc1") + self.mol_s_alanine = Chem.MolFromSmiles("N[C@@H](C)C(=O)O") + + def test_one_hot_encode(self): + # string set + assert one_hot_encode("a", ["a", "b", "c"]) == [1.0, 0.0, 0.0] + # integer set + assert one_hot_encode(2, [0.0, 1, 2]) == [0.0, 0.0, 1.0] + # include_unknown_set is False + assert one_hot_encode(3, [0.0, 1, 2]) == [0.0, 0.0, 0.0] + # include_unknown_set is True + assert one_hot_encode(3, [0.0, 1, 2], True) == [0.0, 0.0, 0.0, 1.0] + + def test_get_atom_type_one_hot(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_type_one_hot(atoms[0]) + assert one_hot == [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + + # check unknown atoms + atoms = self.mol_copper_sulfate.GetAtoms() + assert atoms[0].GetSymbol() == "Cu" + one_hot = get_atom_type_one_hot(atoms[0]) + assert one_hot == [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] + one_hot = get_atom_type_one_hot(atoms[0], include_unknown_set=False) + assert one_hot == [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + + # check original set + atoms = self.mol.GetAtoms() + assert atoms[1].GetSymbol() == "N" + original_set = ["C", "O", "N"] + one_hot = get_atom_type_one_hot(atoms[1], allowable_set=original_set) + assert one_hot == [0.0, 0.0, 1.0, 0.0] + + def test_construct_hydrogen_bonding_info(self): + info = construct_hydrogen_bonding_info(self.mol) + assert isinstance(info, list) + assert isinstance(info[0], tuple) + # Generally, =O behaves as an electron acceptor + assert info[0] == (3, "Acceptor") + + def test_get_atom_hydrogen_bonding_one_hot(self): + info = construct_hydrogen_bonding_info(self.mol) + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_hydrogen_bonding_one_hot(atoms[0], info) + assert one_hot == [0.0, 0.0] + + assert atoms[3].GetSymbol() == "O" + one_hot = get_atom_hydrogen_bonding_one_hot(atoms[3], info) + assert one_hot == [0.0, 1.0] + + def test_get_atom_is_in_aromatic_one_hot(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_is_in_aromatic_one_hot(atoms[0]) + assert one_hot == [0.0] + + atoms = self.mol_benzene.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_is_in_aromatic_one_hot(atoms[0]) + assert one_hot == [1.0] + + def test_get_atom_hybridization_one_hot(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_hybridization_one_hot(atoms[0]) + assert one_hot == [0.0, 0.0, 1.0] + + def test_get_atom_total_num_Hs_one_hot(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_total_num_Hs_one_hot(atoms[0]) + assert one_hot == [0.0, 0.0, 0.0, 1.0, 0.0, 0.0] + assert atoms[3].GetSymbol() == "O" + one_hot = get_atom_total_num_Hs_one_hot(atoms[3]) + assert one_hot == [1.0, 0.0, 0.0, 0.0, 0.0, 0.0] + + def test_get_atom_chirality_one_hot(self): + atoms = self.mol_s_alanine.GetAtoms() + assert atoms[0].GetSymbol() == "N" + one_hot = get_atom_chirality_one_hot(atoms[0]) + assert one_hot == [0.0, 0.0] + assert atoms[1].GetSymbol() == "C" + one_hot = get_atom_chirality_one_hot(atoms[1]) + assert one_hot == [0.0, 1.0] + + def test_get_atom_formal_charge(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + formal_charge = get_atom_formal_charge(atoms[0]) + assert formal_charge == [0.0] + + def test_get_atom_partial_charge(self): + from rdkit.Chem import AllChem + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + with self.assertRaises(KeyError): + get_atom_partial_charge(atoms[0]) + + # we must compute partial charges before using `get_atom_partial_charge` + AllChem.ComputeGasteigerCharges(self.mol) + partial_charge = get_atom_partial_charge(atoms[0]) + assert len(partial_charge) == 1.0 + assert isinstance(partial_charge[0], float) + + def test_get_atom_total_degree_one_hot(self): + atoms = self.mol.GetAtoms() + assert atoms[0].GetSymbol() == "C" + one_hot = get_atom_total_degree_one_hot(atoms[0]) + assert one_hot == [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0] + + assert atoms[3].GetSymbol() == "O" + one_hot = get_atom_total_degree_one_hot(atoms[3]) + assert one_hot == [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0] + + def test_get_bond_type_one_hot(self): + bonds = self.mol.GetBonds() + one_hot = get_bond_type_one_hot(bonds[0]) + # The C-N bond is a single bond + assert bonds[0].GetBeginAtomIdx() == 0.0 + assert bonds[0].GetEndAtomIdx() == 1.0 + assert one_hot == [1.0, 0.0, 0.0, 0.0] + + def test_get_bond_is_in_same_ring_one_hot(self): + bonds = self.mol.GetBonds() + one_hot = get_bond_is_in_same_ring_one_hot(bonds[0]) + assert one_hot == [0.0] + + bonds = self.mol_benzene.GetBonds() + one_hot = get_bond_is_in_same_ring_one_hot(bonds[0]) + assert one_hot == [1.0] + + def test_get_bond_is_conjugated_one_hot(self): + bonds = self.mol.GetBonds() + one_hot = get_bond_is_conjugated_one_hot(bonds[0]) + assert one_hot == [0.0] + + bonds = self.mol_benzene.GetBonds() + one_hot = get_bond_is_conjugated_one_hot(bonds[0]) + assert one_hot == [1.0] + + def test_get_bond_stereo_one_hot(self): + bonds = self.mol.GetBonds() + one_hot = get_bond_stereo_one_hot(bonds[0]) + assert one_hot == [1.0, 0.0, 0.0, 0.0, 0.0] + + def test_get_bond_graph_distance_one_hot(self): + from rdkit import Chem + bonds = self.mol.GetBonds() + dist_matrix = Chem.GetDistanceMatrix(self.mol) + one_hot = get_bond_graph_distance_one_hot(bonds[0], dist_matrix) + assert one_hot == [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] diff --git a/deepchem/utils/test/test_noncovalent_utils.py b/deepchem/utils/test/test_noncovalent_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..21161718cf76500bb66bf4f1546f7ca1023216ff --- /dev/null +++ b/deepchem/utils/test/test_noncovalent_utils.py @@ -0,0 +1,144 @@ +import os +import unittest +import numpy as np +from deepchem.utils.rdkit_utils import load_molecule +from deepchem.utils.rdkit_utils import compute_ring_center +from deepchem.utils.rdkit_utils import compute_ring_normal +from deepchem.utils.noncovalent_utils import is_pi_parallel +from deepchem.utils.noncovalent_utils import is_pi_t +from deepchem.utils.noncovalent_utils import compute_pi_stack +from deepchem.utils.noncovalent_utils import is_cation_pi +from deepchem.utils.noncovalent_utils import compute_cation_pi +from deepchem.utils.noncovalent_utils import compute_binding_pocket_cation_pi + + +class TestPiInteractions(unittest.TestCase): + + def setUp(self): + current_dir = os.path.dirname(os.path.realpath(__file__)) + + # simple flat ring + from rdkit.Chem import MolFromSmiles + from rdkit.Chem.rdDepictor import Compute2DCoords + self.cycle4 = MolFromSmiles('C1CCC1') + # self.cycle4.Compute2DCoords() + Compute2DCoords(self.cycle4) + + # load and sanitize two real molecules + _, self.prot = load_molecule( + os.path.join(current_dir, + '../../feat/tests/data/3ws9_protein_fixer_rdkit.pdb'), + add_hydrogens=False, + calc_charges=False, + sanitize=True) + + _, self.lig = load_molecule( + os.path.join(current_dir, '../../feat//tests/data/3ws9_ligand.sdf'), + add_hydrogens=False, + calc_charges=False, + sanitize=True) + + def test_compute_ring_center(self): + self.assertTrue(np.allclose(compute_ring_center(self.cycle4, range(4)), 0)) + + def test_compute_ring_normal(self): + normal = compute_ring_normal(self.cycle4, range(4)) + self.assertTrue( + np.allclose(np.abs(normal / np.linalg.norm(normal)), [0, 0, 1])) + + def test_is_pi_parallel(self): + ring1_center = np.array([0.0, 0.0, 0.0]) + ring2_center_true = np.array([4.0, 0.0, 0.0]) + ring2_center_false = np.array([10.0, 0.0, 0.0]) + ring1_normal_true = np.array([1.0, 0.0, 0.0]) + ring1_normal_false = np.array([0.0, 1.0, 0.0]) + + for ring2_normal in (np.array([2.0, 0, 0]), np.array([-3.0, 0, 0])): + # parallel normals + self.assertTrue( + is_pi_parallel(ring1_center, ring1_normal_true, ring2_center_true, + ring2_normal)) + # perpendicular normals + self.assertFalse( + is_pi_parallel(ring1_center, ring1_normal_false, ring2_center_true, + ring2_normal)) + # too far away + self.assertFalse( + is_pi_parallel(ring1_center, ring1_normal_true, ring2_center_false, + ring2_normal)) + + def test_is_pi_t(self): + ring1_center = np.array([0.0, 0.0, 0.0]) + ring2_center_true = np.array([4.0, 0.0, 0.0]) + ring2_center_false = np.array([10.0, 0.0, 0.0]) + ring1_normal_true = np.array([0.0, 1.0, 0.0]) + ring1_normal_false = np.array([1.0, 0.0, 0.0]) + + for ring2_normal in (np.array([2.0, 0, 0]), np.array([-3.0, 0, 0])): + # perpendicular normals + self.assertTrue( + is_pi_t(ring1_center, ring1_normal_true, ring2_center_true, + ring2_normal)) + # parallel normals + self.assertFalse( + is_pi_t(ring1_center, ring1_normal_false, ring2_center_true, + ring2_normal)) + # too far away + self.assertFalse( + is_pi_t(ring1_center, ring1_normal_true, ring2_center_false, + ring2_normal)) + + def test_compute_pi_stack(self): + # order of the molecules shouldn't matter + dicts1 = compute_pi_stack(self.prot, self.lig) + dicts2 = compute_pi_stack(self.lig, self.prot) + for i, j in ((0, 2), (1, 3)): + self.assertEqual(dicts1[i], dicts2[j]) + self.assertEqual(dicts1[j], dicts2[i]) + + # with this criteria we should find both types of stacking + for d in compute_pi_stack( + self.lig, self.prot, dist_cutoff=7, angle_cutoff=40.): + self.assertGreater(len(d), 0) + + def test_is_cation_pi(self): + cation_position = np.array([[2.0, 0.0, 0.0]]) + ring_center_true = np.array([4.0, 0.0, 0.0]) + ring_center_false = np.array([10.0, 0.0, 0.0]) + ring_normal_true = np.array([1.0, 0.0, 0.0]) + ring_normal_false = np.array([0.0, 1.0, 0.0]) + + # parallel normals + self.assertTrue( + is_cation_pi(cation_position, ring_center_true, ring_normal_true)) + # perpendicular normals + self.assertFalse( + is_cation_pi(cation_position, ring_center_true, ring_normal_false)) + # too far away + self.assertFalse( + is_cation_pi(cation_position, ring_center_false, ring_normal_true)) + + # def test_compute_cation_pi(self): + # # TODO(rbharath): find better example, currently dicts are empty + # dicts1 = compute_cation_pi(self.prot, self.lig) + # dicts2 = compute_cation_pi(self.lig, self.prot) + + def test_compute_binding_pocket_cation_pi(self): + # TODO find better example, currently dicts are empty + prot_dict, lig_dict = compute_binding_pocket_cation_pi(self.prot, self.lig) + + exp_prot_dict, exp_lig_dict = compute_cation_pi(self.prot, self.lig) + add_lig, add_prot = compute_cation_pi(self.lig, self.prot) + for exp_dict, to_add in ((exp_prot_dict, add_prot), (exp_lig_dict, + add_lig)): + for atom_idx, count in to_add.items(): + if atom_idx not in exp_dict: + exp_dict[atom_idx] = count + else: + exp_dict[atom_idx] += count + + self.assertEqual(prot_dict, exp_prot_dict) + self.assertEqual(lig_dict, exp_lig_dict) + + def test_compute_hydrogen_bonds(self): + pass diff --git a/deepchem/utils/test/test_pdbqt_utils.py b/deepchem/utils/test/test_pdbqt_utils.py index f1378c7d9397b16da54a8366d80cb4b59c6f66b5..2c85291794ef94dac25a50a60dd28d0bd594d7ef 100644 --- a/deepchem/utils/test/test_pdbqt_utils.py +++ b/deepchem/utils/test/test_pdbqt_utils.py @@ -1,7 +1,7 @@ import unittest import os import tempfile -from deepchem.utils import rdkit_util +from deepchem.utils import rdkit_utils from deepchem.utils import pdbqt_utils @@ -16,20 +16,20 @@ class TestPDBQTUtils(unittest.TestCase): def test_pdbqt_to_pdb(self): """Test that a PDBQT molecule can be converted back in to PDB.""" - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( self.protein_file, calc_charges=False, add_hydrogens=False) with tempfile.TemporaryDirectory() as tmp: out_pdb = os.path.join(tmp, "mol.pdb") out_pdbqt = os.path.join(tmp, "mol.pdbqt") - rdkit_util.write_molecule(mol, out_pdb, is_protein=True) - rdkit_util.write_molecule(mol, out_pdbqt, is_protein=True) + rdkit_utils.write_molecule(mol, out_pdb, is_protein=True) + rdkit_utils.write_molecule(mol, out_pdbqt, is_protein=True) pdb_block = pdbqt_utils.pdbqt_to_pdb(out_pdbqt) from rdkit import Chem pdb_mol = Chem.MolFromPDBBlock(pdb_block, sanitize=False, removeHs=False) - xyz, pdbqt_mol = rdkit_util.load_molecule( + xyz, pdbqt_mol = rdkit_utils.load_molecule( out_pdbqt, add_hydrogens=False, calc_charges=False) assert pdb_mol.GetNumAtoms() == pdbqt_mol.GetNumAtoms() @@ -41,7 +41,7 @@ class TestPDBQTUtils(unittest.TestCase): def test_convert_mol_to_pdbqt(self): """Test that a ligand molecule can be coverted to PDBQT.""" from rdkit import Chem - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( self.ligand_file, calc_charges=False, add_hydrogens=False) with tempfile.TemporaryDirectory() as tmp: outfile = os.path.join(tmp, "mol.pdbqt") @@ -49,7 +49,7 @@ class TestPDBQTUtils(unittest.TestCase): writer.write(mol) writer.close() pdbqt_utils.convert_mol_to_pdbqt(mol, outfile) - pdbqt_xyz, pdbqt_mol = rdkit_util.load_molecule( + pdbqt_xyz, pdbqt_mol = rdkit_utils.load_molecule( outfile, add_hydrogens=False, calc_charges=False) assert pdbqt_mol.GetNumAtoms() == pdbqt_mol.GetNumAtoms() for atom_idx in range(pdbqt_mol.GetNumAtoms()): @@ -60,7 +60,7 @@ class TestPDBQTUtils(unittest.TestCase): def test_convert_protein_to_pdbqt(self): """Test a protein in a PDB can be converted to PDBQT.""" from rdkit import Chem - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( self.protein_file, calc_charges=False, add_hydrogens=False) with tempfile.TemporaryDirectory() as tmp: outfile = os.path.join(tmp, "mol.pdbqt") @@ -68,7 +68,7 @@ class TestPDBQTUtils(unittest.TestCase): writer.write(mol) writer.close() pdbqt_utils.convert_protein_to_pdbqt(mol, outfile) - pdbqt_xyz, pdbqt_mol = rdkit_util.load_molecule( + pdbqt_xyz, pdbqt_mol = rdkit_utils.load_molecule( outfile, add_hydrogens=False, calc_charges=False) assert pdbqt_mol.GetNumAtoms() == pdbqt_mol.GetNumAtoms() for atom_idx in range(pdbqt_mol.GetNumAtoms()): diff --git a/deepchem/utils/test/test_rdkit_util.py b/deepchem/utils/test/test_rdkit_utils.py similarity index 80% rename from deepchem/utils/test/test_rdkit_util.py rename to deepchem/utils/test/test_rdkit_utils.py index 7f8e66384388491595c5dbb4143707e6a07b03f4..2da3de18865c61bd00a8f4e3f47c6cad168fe711 100644 --- a/deepchem/utils/test/test_rdkit_util.py +++ b/deepchem/utils/test/test_rdkit_utils.py @@ -1,11 +1,10 @@ import tempfile import unittest import os -import shutil import numpy as np -from deepchem.utils import rdkit_util +from deepchem.utils import rdkit_utils class TestRdkitUtil(unittest.TestCase): @@ -14,12 +13,12 @@ class TestRdkitUtil(unittest.TestCase): # TODO test more formats for ligand current_dir = os.path.dirname(os.path.realpath(__file__)) self.protein_file = os.path.join( - current_dir, '../../feat/tests/3ws9_protein_fixer_rdkit.pdb') + current_dir, '../../feat/tests/data/3ws9_protein_fixer_rdkit.pdb') self.ligand_file = os.path.join(current_dir, - '../../feat/tests/3ws9_ligand.sdf') + '../../feat/tests/data/3ws9_ligand.sdf') def test_load_complex(self): - complexes = rdkit_util.load_complex( + complexes = rdkit_utils.load_complex( (self.protein_file, self.ligand_file), add_hydrogens=False, calc_charges=False) @@ -30,8 +29,8 @@ class TestRdkitUtil(unittest.TestCase): from rdkit.Chem.AllChem import Mol for add_hydrogens in (True, False): for calc_charges in (True, False): - mol_xyz, mol_rdk = rdkit_util.load_molecule(self.ligand_file, - add_hydrogens, calc_charges) + mol_xyz, mol_rdk = rdkit_utils.load_molecule( + self.ligand_file, add_hydrogens, calc_charges) num_atoms = mol_rdk.GetNumAtoms() self.assertIsInstance(mol_xyz, np.ndarray) self.assertIsInstance(mol_rdk, Mol) @@ -41,9 +40,9 @@ class TestRdkitUtil(unittest.TestCase): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) - xyz2 = rdkit_util.get_xyz_from_mol(mol) + xyz2 = rdkit_utils.get_xyz_from_mol(mol) equal_array = np.all(xyz == xyz2) assert equal_array @@ -51,7 +50,7 @@ class TestRdkitUtil(unittest.TestCase): def test_add_hydrogens_to_mol(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) original_hydrogen_count = 0 for atom_idx in range(mol.GetNumAtoms()): @@ -60,7 +59,7 @@ class TestRdkitUtil(unittest.TestCase): original_hydrogen_count += 1 assert mol is not None - mol = rdkit_util.add_hydrogens_to_mol(mol, is_protein=False) + mol = rdkit_utils.add_hydrogens_to_mol(mol, is_protein=False) assert mol is not None after_hydrogen_count = 0 for atom_idx in range(mol.GetNumAtoms()): @@ -72,7 +71,7 @@ class TestRdkitUtil(unittest.TestCase): def test_apply_pdbfixer(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) original_hydrogen_count = 0 for atom_idx in range(mol.GetNumAtoms()): @@ -81,7 +80,7 @@ class TestRdkitUtil(unittest.TestCase): original_hydrogen_count += 1 assert mol is not None - mol = rdkit_util.apply_pdbfixer(mol, hydrogenate=True, is_protein=False) + mol = rdkit_utils.apply_pdbfixer(mol, hydrogenate=True, is_protein=False) assert mol is not None after_hydrogen_count = 0 for atom_idx in range(mol.GetNumAtoms()): @@ -93,9 +92,9 @@ class TestRdkitUtil(unittest.TestCase): def test_compute_charges(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=True) - rdkit_util.compute_charges(mol) + rdkit_utils.compute_charges(mol) has_a_charge = False for atom_idx in range(mol.GetNumAtoms()): @@ -105,10 +104,10 @@ class TestRdkitUtil(unittest.TestCase): has_a_charge = True assert has_a_charge - def test_load_molecule(self): + def test_load_molecule2(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) assert xyz is not None assert mol is not None @@ -116,14 +115,14 @@ class TestRdkitUtil(unittest.TestCase): def test_write_molecule(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) with tempfile.TemporaryDirectory() as tmp: outfile = os.path.join(tmp, "mol.sdf") - rdkit_util.write_molecule(mol, outfile) + rdkit_utils.write_molecule(mol, outfile) - xyz, mol2 = rdkit_util.load_molecule( + xyz, mol2 = rdkit_utils.load_molecule( outfile, calc_charges=False, add_hydrogens=False) assert mol.GetNumAtoms() == mol2.GetNumAtoms() @@ -135,9 +134,9 @@ class TestRdkitUtil(unittest.TestCase): def test_merge_molecules_xyz(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) - merged = rdkit_util.merge_molecules_xyz([xyz, xyz]) + merged = rdkit_utils.merge_molecules_xyz([xyz, xyz]) for i in range(len(xyz)): first_atom_equal = np.all(xyz[i] == merged[i]) second_atom_equal = np.all(xyz[i] == merged[i + len(xyz)]) @@ -147,14 +146,14 @@ class TestRdkitUtil(unittest.TestCase): def test_merge_molecules(self): current_dir = os.path.dirname(os.path.realpath(__file__)) ligand_file = os.path.join(current_dir, "../../dock/tests/1jld_ligand.sdf") - xyz, mol = rdkit_util.load_molecule( + xyz, mol = rdkit_utils.load_molecule( ligand_file, calc_charges=False, add_hydrogens=False) num_mol_atoms = mol.GetNumAtoms() # self.ligand_file is for 3ws9_ligand.sdf - oth_xyz, oth_mol = rdkit_util.load_molecule( + oth_xyz, oth_mol = rdkit_utils.load_molecule( self.ligand_file, calc_charges=False, add_hydrogens=False) num_oth_mol_atoms = oth_mol.GetNumAtoms() - merged = rdkit_util.merge_molecules([mol, oth_mol]) + merged = rdkit_utils.merge_molecules([mol, oth_mol]) merged_num_atoms = merged.GetNumAtoms() assert merged_num_atoms == num_mol_atoms + num_oth_mol_atoms diff --git a/deepchem/utils/test/test_vina_utils.py b/deepchem/utils/test/test_vina_utils.py index 41174859461a5ac046e9f723df4d175739aa8e34..7c0a147ffcfa477678f2a7ca52e2115493b95d81 100644 --- a/deepchem/utils/test/test_vina_utils.py +++ b/deepchem/utils/test/test_vina_utils.py @@ -5,7 +5,7 @@ import os import numpy as np import unittest from deepchem.utils import vina_utils -from deepchem.utils import rdkit_util +from deepchem.utils import rdkit_utils class TestVinaUtils(unittest.TestCase): @@ -13,7 +13,8 @@ class TestVinaUtils(unittest.TestCase): def setUp(self): # TODO test more formats for ligand current_dir = os.path.dirname(os.path.realpath(__file__)) - self.docked_ligands = os.path.join(current_dir, '1jld_ligand_docked.pdbqt') + self.docked_ligands = os.path.join(current_dir, 'data', + '1jld_ligand_docked.pdbqt') def test_load_docked_ligand(self): docked_ligands, scores = vina_utils.load_docked_ligands(self.docked_ligands) @@ -21,6 +22,26 @@ class TestVinaUtils(unittest.TestCase): assert len(scores) == 9 for ligand, score in zip(docked_ligands, scores): - xyz = rdkit_util.get_xyz_from_mol(ligand) + xyz = rdkit_utils.get_xyz_from_mol(ligand) assert score < 0 # This is a binding free energy assert np.count_nonzero(xyz) > 0 + + def test_prepare_inputs(self): + pdbid = '3cyx' + ligand_smiles = 'CC(C)(C)NC(O)C1CC2CCCCC2C[NH+]1CC(O)C(CC1CCCCC1)NC(O)C(CC(N)O)NC(O)C1CCC2CCCCC2N1' + + protein, ligand = vina_utils.prepare_inputs( + pdbid, ligand_smiles, pdb_name=pdbid) + + assert np.isclose(protein.GetNumAtoms(), 1415, atol=3) + assert np.isclose(ligand.GetNumAtoms(), 124, atol=3) + + protein, ligand = vina_utils.prepare_inputs(pdbid + '.pdb', + 'ligand_' + pdbid + '.pdb') + + assert np.isclose(protein.GetNumAtoms(), 1415, atol=3) + assert np.isclose(ligand.GetNumAtoms(), 124, atol=3) + + os.remove(pdbid + '.pdb') + os.remove('ligand_' + pdbid + '.pdb') + os.remove('tmp.pdb') diff --git a/deepchem/utils/test/test_voxel_utils.py b/deepchem/utils/test/test_voxel_utils.py index 1a184467aec38699c517d6943bbb16802b71a218..bb3f1130ad42e1b8b930dd624b6148fb8e414aff 100644 --- a/deepchem/utils/test/test_voxel_utils.py +++ b/deepchem/utils/test/test_voxel_utils.py @@ -1,6 +1,5 @@ import numpy as np import unittest -import deepchem as dc from deepchem.utils import voxel_utils from deepchem.utils import hash_utils @@ -15,8 +14,7 @@ class TestVoxelUtils(unittest.TestCase): voxel_width = 1 indices = voxel_utils.convert_atom_to_voxel(coordinates, atom_index, box_width, voxel_width) - assert len(indices) == 1 - assert indices[0].shape == (3,) + assert indices.shape == (3,) def test_convert_pair_atom_to_voxel(self): N = 5 @@ -28,14 +26,11 @@ class TestVoxelUtils(unittest.TestCase): voxel_width = 1 indices = voxel_utils.convert_atom_pair_to_voxel( [coordinates1, coordinates2], atom_index_pair, box_width, voxel_width) - assert len(indices) == 2 - assert indices[0].shape == (3,) - assert indices[1].shape == (3,) + assert indices.shape == (2, 3) def test_voxelize_convert_atom(self): N = 5 coordinates = np.random.rand(N, 3) - atom_index = 2 box_width = 16 voxel_width = 1 voxels_per_edge = int(box_width / voxel_width) @@ -45,10 +40,10 @@ class TestVoxelUtils(unittest.TestCase): nb_channel = 16 features = voxel_utils.voxelize( get_voxels, + coordinates, box_width, voxel_width, hash_function, - coordinates, feature_dict, nb_channel=nb_channel) assert features.shape == (voxels_per_edge, voxels_per_edge, voxels_per_edge, @@ -60,7 +55,6 @@ class TestVoxelUtils(unittest.TestCase): coordinates1 = np.random.rand(N, 3) coordinates2 = np.random.rand(M, 3) coordinates = [coordinates1, coordinates2] - atom_index_pair = (2, 3) box_width = 16 voxel_width = 1 voxels_per_edge = int(box_width / voxel_width) @@ -70,10 +64,10 @@ class TestVoxelUtils(unittest.TestCase): nb_channel = 16 features = voxel_utils.voxelize( get_voxels, + coordinates, box_width, voxel_width, hash_function, - coordinates, feature_dict, nb_channel=nb_channel) assert features.shape == (voxels_per_edge, voxels_per_edge, voxels_per_edge, diff --git a/deepchem/utils/typing.py b/deepchem/utils/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..ab1423789a1caa0a887829c8a85000745bc67012 --- /dev/null +++ b/deepchem/utils/typing.py @@ -0,0 +1,26 @@ +"""Type annotations that are widely used in DeepChem""" + +from typing import Any, Callable, List, Sequence, Tuple, TypeVar, Union + +T = TypeVar("T") + +# An activation function for a Keras layer: either a TensorFlow function or the name of a standard activation +KerasActivationFn = Union[Callable, str] + +# A loss function for use with KerasModel or TorchModel: f(outputs, labels, weights) +LossFn = Callable[[List, List, List], Any] + +# A single value of some type, or multiple values of that type +OneOrMany = Union[T, Sequence[T]] + +# The shape of a NumPy array +Shape = Tuple[int, ...] + +# type of RDKit object +RDKitMol = Any +RDKitAtom = Any +RDKitBond = Any + +# type of Pymatgen object +PymatgenStructure = Any +PymatgenComposition = Any diff --git a/deepchem/utils/vina_utils.py b/deepchem/utils/vina_utils.py index 5ba87a02685b0465068e54c574c9051f9a125419..dfcd0e456225b9c3e69ad815e1eaf98ad321e999 100644 --- a/deepchem/utils/vina_utils.py +++ b/deepchem/utils/vina_utils.py @@ -1,16 +1,20 @@ """ This file contains utilities to work with autodock vina. """ -from deepchem.utils import pdbqt_utils +from typing import List, Optional, Tuple +import numpy as np +from deepchem.utils.typing import RDKitMol +from deepchem.utils.pdbqt_utils import pdbqt_to_pdb -def write_vina_conf(protein_filename, - ligand_filename, - centroid, - box_dims, - conf_filename, - num_modes=9, - exhaustiveness=None): + +def write_vina_conf(protein_filename: str, + ligand_filename: str, + centroid: np.ndarray, + box_dims: np.ndarray, + conf_filename: str, + num_modes: int = 9, + exhaustiveness: int = None) -> None: """Writes Vina configuration file to disk. Autodock Vina accepts a configuration file which provides options @@ -21,13 +25,13 @@ def write_vina_conf(protein_filename, Parameters ---------- protein_filename: str - Filename for protein + Filename for protein ligand_filename: str Filename for the ligand centroid: np.ndarray - Of shape `(3,)` holding centroid of system + A numpy array with shape `(3,)` holding centroid of system box_dims: np.ndarray - Of shape `(3,)` holding the size of the box to dock + A numpy array of shape `(3,)` holding the size of the box to dock conf_filename: str Filename to write Autodock Vina configuration to. num_modes: int, optional (default 9) @@ -52,7 +56,8 @@ def write_vina_conf(protein_filename, f.write("exhaustiveness = %d\n" % exhaustiveness) -def load_docked_ligands(pdbqt_output): +def load_docked_ligands( + pdbqt_output: str) -> Tuple[List[RDKitMol], List[float]]: """This function loads ligands docked by autodock vina. Autodock vina writes outputs to disk in a PDBQT file format. This @@ -69,19 +74,24 @@ def load_docked_ligands(pdbqt_output): Returns ------- - Tuple of `molecules, scores`. `molecules` is a list of rdkit - molecules with 3D information. `scores` is the associated vina - score. + Tuple[List[rdkit.Chem.rdchem.Mol], List[float]] + Tuple of `molecules, scores`. `molecules` is a list of rdkit + molecules with 3D information. `scores` is the associated vina + score. - Note - ---- + Notes + ----- This function requires RDKit to be installed. """ - from rdkit import Chem + try: + from rdkit import Chem + except ModuleNotFoundError: + raise ImportError("This function requires RDKit to be installed.") + lines = open(pdbqt_output).readlines() molecule_pdbqts = [] scores = [] - current_pdbqt = None + current_pdbqt: Optional[List[str]] = None for line in lines: if line[:5] == "MODEL": current_pdbqt = [] @@ -95,10 +105,123 @@ def load_docked_ligands(pdbqt_output): molecule_pdbqts.append(current_pdbqt) current_pdbqt = None else: - current_pdbqt.append(line) + # FIXME: Item "None" of "Optional[List[str]]" has no attribute "append" + current_pdbqt.append(line) # type: ignore + molecules = [] for pdbqt_data in molecule_pdbqts: - pdb_block = pdbqt_utils.pdbqt_to_pdb(pdbqt_data=pdbqt_data) + pdb_block = pdbqt_to_pdb(pdbqt_data=pdbqt_data) mol = Chem.MolFromPDBBlock(str(pdb_block), sanitize=False, removeHs=False) molecules.append(mol) return molecules, scores + + +def prepare_inputs(protein: str, + ligand: str, + replace_nonstandard_residues: bool = True, + remove_heterogens: bool = True, + remove_water: bool = True, + add_hydrogens: bool = True, + pH: float = 7.0, + optimize_ligand: bool = True, + pdb_name: Optional[str] = None) -> Tuple[RDKitMol, RDKitMol]: + """This prepares protein-ligand complexes for docking. + + Autodock Vina requires PDB files for proteins and ligands with + sensible inputs. This function uses PDBFixer and RDKit to ensure + that inputs are reasonable and ready for docking. Default values + are given for convenience, but fixing PDB files is complicated and + human judgement is required to produce protein structures suitable + for docking. Always inspect the results carefully before trying to + perform docking. + + Parameters + ---------- + protein: str + Filename for protein PDB file or a PDBID. + ligand: str + Either a filename for a ligand PDB file or a SMILES string. + replace_nonstandard_residues: bool (default True) + Replace nonstandard residues with standard residues. + remove_heterogens: bool (default True) + Removes residues that are not standard amino acids or nucleotides. + remove_water: bool (default True) + Remove water molecules. + add_hydrogens: bool (default True) + Add missing hydrogens at the protonation state given by `pH`. + pH: float (default 7.0) + Most common form of each residue at given `pH` value is used. + optimize_ligand: bool (default True) + If True, optimize ligand with RDKit. Required for SMILES inputs. + pdb_name: Optional[str] + If given, write sanitized protein and ligand to files called + "pdb_name.pdb" and "ligand_pdb_name.pdb" + + Returns + ------- + Tuple[RDKitMol, RDKitMol] + Tuple of `protein_molecule, ligand_molecule` with 3D information. + + Note + ---- + This function requires RDKit and OpenMM to be installed. + Read more about PDBFixer here: https://github.com/openmm/pdbfixer. + + Examples + -------- + >>> p, m = prepare_inputs('3cyx', 'CCC') + >>> p.GetNumAtoms() + 1415 + >>> m.GetNumAtoms() + 11 + + >>> p, m = prepare_inputs('3cyx', 'CCC', remove_heterogens=False) + >>> p.GetNumAtoms() + 1720 + + """ + + try: + from rdkit import Chem + from pdbfixer import PDBFixer + from simtk.openmm.app import PDBFile + except ModuleNotFoundError: + raise ImportError( + "This function requires RDKit and OpenMM to be installed.") + + if protein.endswith('.pdb'): + fixer = PDBFixer(protein) + else: + fixer = PDBFixer(url='https://files.rcsb.org/download/%s.pdb' % (protein)) + + if ligand.endswith('.pdb'): + m = Chem.MolFromPDBFile(ligand) + else: + m = Chem.MolFromSmiles(ligand, sanitize=True) + + # Apply common fixes to PDB files + if replace_nonstandard_residues: + fixer.findMissingResidues() + fixer.findNonstandardResidues() + fixer.replaceNonstandardResidues() + if remove_heterogens and not remove_water: + fixer.removeHeterogens(True) + if remove_heterogens and remove_water: + fixer.removeHeterogens(False) + if add_hydrogens: + fixer.addMissingHydrogens(pH) + + PDBFile.writeFile(fixer.topology, fixer.positions, open('tmp.pdb', 'w')) + p = Chem.MolFromPDBFile('tmp.pdb', sanitize=True) + + # Optimize ligand + if optimize_ligand: + m = Chem.AddHs(m) # need hydrogens for optimization + Chem.AllChem.EmbedMolecule(m) + Chem.AllChem.MMFFOptimizeMolecule(m) + + if pdb_name: + Chem.rdmolfiles.MolToPDBFile(p, '%s.pdb' % (pdb_name)) + Chem.rdmolfiles.MolToPDBFile(m, 'ligand_%s.pdb' % (pdb_name)) + + return (p, m) diff --git a/deepchem/utils/voxel_utils.py b/deepchem/utils/voxel_utils.py index 12ff4780bc7c4d29e93aeb91820bd4b63fa44f4f..138e38ed224dd9008b641b3714d2960ebfbf1d54 100644 --- a/deepchem/utils/voxel_utils.py +++ b/deepchem/utils/voxel_utils.py @@ -2,22 +2,24 @@ Various utilities around voxel grids. """ import logging +from typing import Any, Callable, Dict, List, Optional, Tuple, Union import numpy as np + logger = logging.getLogger(__name__) -def convert_atom_to_voxel(coordinates, atom_index, box_width, voxel_width): +def convert_atom_to_voxel(coordinates: np.ndarray, atom_index: int, + box_width: float, voxel_width: float) -> np.ndarray: """Converts atom coordinates to an i,j,k grid index. This function offsets molecular atom coordinates by (box_width/2, box_width/2, box_width/2) and then divides by voxel_width to compute the voxel indices. - Parameters: + Parameters ----------- coordinates: np.ndarray - Array with coordinates of all atoms in the molecule, shape - (N, 3). + Array with coordinates of all atoms in the molecule, shape (N, 3). atom_index: int Index of an atom in the molecule. box_width: float @@ -27,31 +29,33 @@ def convert_atom_to_voxel(coordinates, atom_index, box_width, voxel_width): Returns ------- - A list containing a numpy array of length 3 with `[i, j, k]`, the - voxel coordinates of specified atom. This is returned a list so it - has the same API as convert_atom_pair_to_voxel + indices: np.ndarray + A 1D numpy array of length 3 with `[i, j, k]`, the voxel coordinates + of specified atom. """ indices = np.floor( (coordinates[atom_index] + box_width / 2.0) / voxel_width).astype(int) + if ((indices < 0) | (indices >= box_width / voxel_width)).any(): logger.warning('Coordinates are outside of the box (atom id = %s,' ' coords xyz = %s, coords in box = %s' % (atom_index, coordinates[atom_index], indices)) - - return [indices] + return indices -def convert_atom_pair_to_voxel(coordinates_tuple, atom_index_pair, box_width, - voxel_width): - """Converts a pair of atoms to a list of i,j,k tuples. +def convert_atom_pair_to_voxel(coordinates_tuple: Tuple[np.ndarray, np.ndarray], + atom_index_pair: Tuple[int, int], + box_width: float, + voxel_width: float) -> np.ndarray: + """Converts a pair of atoms to i,j,k grid indexes. Parameters ---------- - coordinates_tuple: tuple + coordinates_tuple: Tuple[np.ndarray, np.ndarray] A tuple containing two molecular coordinate arrays of shapes `(N, 3)` and `(M, 3)`. - atom_index_pair: tuple + atom_index_pair: Tuple[int, int] A tuple of indices for the atoms in the two molecules. box_width: float Size of the box in Angstroms. @@ -60,29 +64,27 @@ def convert_atom_pair_to_voxel(coordinates_tuple, atom_index_pair, box_width, Returns ------- - A list containing two numpy array of length 3 with `[i, j, k]`, the - voxel coordinates of specified atom. + indices_list: np.ndarray + A numpy array of shape `(2, 3)`, where `3` is `[i, j, k]` of the + voxel coordinates of specified atom. """ indices_list = [] - indices_list.append( - convert_atom_to_voxel(coordinates_tuple[0], atom_index_pair[0], box_width, - voxel_width)[0]) - indices_list.append( - convert_atom_to_voxel(coordinates_tuple[1], atom_index_pair[1], box_width, - voxel_width)[0]) - return (indices_list) - - -def voxelize(get_voxels, - box_width, - voxel_width, - hash_function, - coordinates, - feature_dict=None, - feature_list=None, - nb_channel=16, - dtype="np.int8"): + for coordinates, atom_index in zip(coordinates_tuple, atom_index_pair): + indices_list.append( + convert_atom_to_voxel(coordinates, atom_index, box_width, voxel_width)) + return np.array(indices_list) + + +def voxelize(get_voxels: Callable[..., Any], + coordinates: np.ndarray, + box_width: float = 16.0, + voxel_width: float = 1.0, + hash_function: Optional[Callable[..., Any]] = None, + feature_dict: Optional[Dict[Any, Any]] = None, + feature_list: Optional[List[Union[int, Tuple[int]]]] = None, + nb_channel: int = 16, + dtype: str = 'int') -> np.ndarray: """Helper function to voxelize inputs. This helper function helps convert a hash function which @@ -92,18 +94,18 @@ def voxelize(get_voxels, Parameters ---------- - get_voxels: function + get_voxels: Function Function that voxelizes inputs + coordinates: np.ndarray + Contains the 3D coordinates of a molecular system. box_width: float, optional (default 16.0) Size of a box in which voxel features are calculated. Box is centered on a ligand centroid. voxel_width: float, optional (default 1.0) Size of a 3D voxel in a grid in Angstroms. - hash_function: function - Used to map feature choices to voxel channels. - coordinates: np.ndarray - Contains the 3D coordinates of a molecular system. - feature_dict: dict + hash_function: Function + Used to map feature choices to voxel channels. + feature_dict: Dict, optional (default None) Keys are atom indices or tuples of atom indices, the values are computed features. If `hash_function is not None`, then the values are hashed using the hash function into `[0, nb_channels)` and @@ -111,24 +113,25 @@ def voxelize(get_voxels, for each dictionary entry. If `hash_function is None`, then the value must be a vector of size `(n_channels,)` which is added to the existing channel values at that voxel grid. - feature_list: list + feature_list: List, optional (default None) List of atom indices or tuples of atom indices. This can only be used if `nb_channel==1`. Increments the voxels corresponding to these indices by `1` for each entry. - nb_channel: int (Default 16) + nb_channel: int, , optional (default 16) The number of feature channels computed per voxel. Should be a power of 2. - dtype: type - The dtype of the numpy ndarray created to hold features. + dtype: str ('int' or 'float'), optional (default 'int') + The type of the numpy ndarray created to hold features. Returns ------- - Tensor of shape (voxels_per_edge, voxels_per_edge, - voxels_per_edge, nb_channel), + feature_tensor: np.ndarray + The voxel of the input with the shape + `(voxels_per_edge, voxels_per_edge, voxels_per_edge, nb_channel)`. """ # Number of voxels per one edge of box to voxelize. voxels_per_edge = int(box_width / voxel_width) - if dtype == "np.int8": + if dtype == "int": feature_tensor = np.zeros( (voxels_per_edge, voxels_per_edge, voxels_per_edge, nb_channel), dtype=np.int8) @@ -139,6 +142,8 @@ def voxelize(get_voxels, if feature_dict is not None: for key, features in feature_dict.items(): voxels = get_voxels(coordinates, key, box_width, voxel_width) + if len(voxels.shape) == 1: + voxels = np.expand_dims(voxels, axis=0) for voxel in voxels: if ((voxel >= 0) & (voxel < voxels_per_edge)).all(): if hash_function is not None: diff --git a/devtools/README.md b/devtools/archive/README.md similarity index 100% rename from devtools/README.md rename to devtools/archive/README.md diff --git a/devtools/conda-recipe/deepchem/build.sh b/devtools/archive/conda-recipe/deepchem/build.sh similarity index 100% rename from devtools/conda-recipe/deepchem/build.sh rename to devtools/archive/conda-recipe/deepchem/build.sh diff --git a/devtools/conda-recipe/deepchem/conda_build_config.yaml b/devtools/archive/conda-recipe/deepchem/conda_build_config.yaml similarity index 100% rename from devtools/conda-recipe/deepchem/conda_build_config.yaml rename to devtools/archive/conda-recipe/deepchem/conda_build_config.yaml diff --git a/devtools/conda-recipe/deepchem/meta.yaml b/devtools/archive/conda-recipe/deepchem/meta.yaml similarity index 100% rename from devtools/conda-recipe/deepchem/meta.yaml rename to devtools/archive/conda-recipe/deepchem/meta.yaml diff --git a/devtools/conda-recipe/deepchem/run_test.py b/devtools/archive/conda-recipe/deepchem/run_test.py similarity index 97% rename from devtools/conda-recipe/deepchem/run_test.py rename to devtools/archive/conda-recipe/deepchem/run_test.py index 2410676533432343def268050f1b11678f886dae..6424d9f1c17bd2a93a7f89556e62d21ca5e9922b 100644 --- a/devtools/conda-recipe/deepchem/run_test.py +++ b/devtools/archive/conda-recipe/deepchem/run_test.py @@ -10,7 +10,7 @@ class TestDeepchemBuild(unittest.TestCase): import deepchem import os import shutil - data_dir = deepchem.utils.get_data_dir() + data_dir = deepchem.utils.data_utils.get_data_dir() bace_dir = os.path.join(data_dir, "bace_c") delaney_dir = os.path.join(data_dir, "delaney") try: diff --git a/devtools/conda-recipe/mdtraj/meta.yaml b/devtools/archive/conda-recipe/mdtraj/meta.yaml similarity index 100% rename from devtools/conda-recipe/mdtraj/meta.yaml rename to devtools/archive/conda-recipe/mdtraj/meta.yaml diff --git a/devtools/jenkins/Readme.md b/devtools/archive/jenkins/Readme.md similarity index 100% rename from devtools/jenkins/Readme.md rename to devtools/archive/jenkins/Readme.md diff --git a/devtools/jenkins/build_and_upload_docs.sh b/devtools/archive/jenkins/build_and_upload_docs.sh similarity index 100% rename from devtools/jenkins/build_and_upload_docs.sh rename to devtools/archive/jenkins/build_and_upload_docs.sh diff --git a/devtools/jenkins/compare_results.py b/devtools/archive/jenkins/compare_results.py similarity index 92% rename from devtools/jenkins/compare_results.py rename to devtools/archive/jenkins/compare_results.py index f3dbbec5f55d6b121bc73440b26c38a335b812b8..8c2e860553d23842aacb30fe8b7dd0643353ab98 100644 --- a/devtools/jenkins/compare_results.py +++ b/devtools/archive/jenkins/compare_results.py @@ -75,10 +75,9 @@ def is_good_result(my_result, desired_result): # Higher is Better desired_value = desired_result[key] - CUSHION_PERCENT if my_result[key] < desired_value or LOG_ALL_RESULTS: - message_part = "%s,%s,%s,%s,%s,%s" % (my_result['data_set'], - my_result['model'], - my_result['split'], key, - my_result[key], desired_result[key]) + message_part = "%s,%s,%s,%s,%s,%s" % ( + my_result['data_set'], my_result['model'], my_result['split'], key, + my_result[key], desired_result[key]) message.append(message_part) retval = False return retval, message diff --git a/devtools/jenkins/conda_build.sh b/devtools/archive/jenkins/conda_build.sh similarity index 100% rename from devtools/jenkins/conda_build.sh rename to devtools/archive/jenkins/conda_build.sh diff --git a/devtools/jenkins/convert_to_rst.py b/devtools/archive/jenkins/convert_to_rst.py similarity index 100% rename from devtools/jenkins/convert_to_rst.py rename to devtools/archive/jenkins/convert_to_rst.py diff --git a/devtools/jenkins/desired_results.csv b/devtools/archive/jenkins/desired_results.csv similarity index 100% rename from devtools/jenkins/desired_results.csv rename to devtools/archive/jenkins/desired_results.csv diff --git a/devtools/jenkins/generate_graph.py b/devtools/archive/jenkins/generate_graph.py similarity index 98% rename from devtools/jenkins/generate_graph.py rename to devtools/archive/jenkins/generate_graph.py index 39e259c80dfa02c1188144733764e2ceed0d3fc3..18a3280ce8b9451ea34af92d2c3f3438f223d937 100644 --- a/devtools/jenkins/generate_graph.py +++ b/devtools/archive/jenkins/generate_graph.py @@ -164,5 +164,5 @@ if __name__ == '__main__': os.mkdir(save_dir) for pair in TODO.keys(): plot(pair[0], pair[1], FILE, save_dir) - os.system( - 'aws s3 sync ' + save_dir + ' s3://deepchem.io/trained_models/MolNet_pic') + os.system('aws s3 sync ' + save_dir + + ' s3://deepchem.io/trained_models/MolNet_pic') diff --git a/devtools/jenkins/jenkins.sh b/devtools/archive/jenkins/jenkins.sh similarity index 100% rename from devtools/jenkins/jenkins.sh rename to devtools/archive/jenkins/jenkins.sh diff --git a/devtools/jenkins/molnet_update.sh b/devtools/archive/jenkins/molnet_update.sh similarity index 100% rename from devtools/jenkins/molnet_update.sh rename to devtools/archive/jenkins/molnet_update.sh diff --git a/devtools/jenkins/push-docs-to-s3.py b/devtools/archive/jenkins/push-docs-to-s3.py similarity index 93% rename from devtools/jenkins/push-docs-to-s3.py rename to devtools/archive/jenkins/push-docs-to-s3.py index 58c0fac9d2a276de0fa04c7fb48de51c32a5f7d3..36a4b006a1ea26f025b9fea8b4c58ed3d51f364e 100755 --- a/devtools/jenkins/push-docs-to-s3.py +++ b/devtools/archive/jenkins/push-docs-to-s3.py @@ -5,8 +5,8 @@ import subprocess BUCKET_NAME = 'deepchem.io' -if not any(d.project_name == 's3cmd' - for d in pip.get_installed_distributions()): +if not any( + d.project_name == 's3cmd' for d in pip.get_installed_distributions()): raise ImportError('The s3cmd package is required. try $ pip install s3cmd') # The secret key is available as a secure environment variable diff --git a/devtools/jenkins/results.table b/devtools/archive/jenkins/results.table similarity index 100% rename from devtools/jenkins/results.table rename to devtools/archive/jenkins/results.table diff --git a/devtools/jenkins/table_to_csv.py b/devtools/archive/jenkins/table_to_csv.py similarity index 100% rename from devtools/jenkins/table_to_csv.py rename to devtools/archive/jenkins/table_to_csv.py diff --git a/devtools/jenkins/test_examples.sh b/devtools/archive/jenkins/test_examples.sh similarity index 100% rename from devtools/jenkins/test_examples.sh rename to devtools/archive/jenkins/test_examples.sh diff --git a/devtools/jenkins/test_notebooks.sh b/devtools/archive/jenkins/test_notebooks.sh similarity index 100% rename from devtools/jenkins/test_notebooks.sh rename to devtools/archive/jenkins/test_notebooks.sh diff --git a/devtools/travis-ci/pre-commit b/devtools/archive/travis-ci/pre-commit similarity index 100% rename from devtools/travis-ci/pre-commit rename to devtools/archive/travis-ci/pre-commit diff --git a/devtools/travis-ci/test_format_code.sh b/devtools/archive/travis-ci/test_format_code.sh similarity index 100% rename from devtools/travis-ci/test_format_code.sh rename to devtools/archive/travis-ci/test_format_code.sh diff --git a/devtools/run_flake8.sh b/devtools/run_flake8.sh new file mode 100755 index 0000000000000000000000000000000000000000..aff6428d0dffcce53219adbfbb0a31c6fcf80d6b --- /dev/null +++ b/devtools/run_flake8.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e + +items=( + "deepchem/data" + "deepchem/dock" + "deepchem/feat" + "deepchem/hyper" + "deepchem/metalearning" + "deepchem/metrics" + "deepchem/rl" + "deepchem/splits" + "deepchem/trans" + "deepchem/utils" +) + +for item in "${items[@]}" ; do + flake8 ${item} --count --show-source --statistics +done diff --git a/devtools/run_yapf.sh b/devtools/run_yapf.sh new file mode 100644 index 0000000000000000000000000000000000000000..373061b3edb35fc8174ab9ff1d4323cf616ec504 --- /dev/null +++ b/devtools/run_yapf.sh @@ -0,0 +1,30 @@ +#!/bin/bash -e + +CHANGED_FILES=`git diff --name-only $TRAVIS_COMMIT_RANGE | grep .py$ | grep -v contrib/` + +exit_success () { + echo "Passed Formatting Test" + exit 0 +} + +if [ -z $CHANGED_FILES ] +then + echo "No Python Files Changed" + exit_success +fi + +yapf -d $CHANGED_FILES > diff.txt + +if [ -s diff.txt ] +then + cat diff.txt + echo "" + echo "Failing Formatting Test" + echo "Please run yapf over the files changed" + echo "pip install yapf" + echo "yapf -i $CHANGED_FILES" + exit 1 +else + exit_success +fi +exit 1 diff --git a/docker/conda-forge/Dockerfile b/docker/conda-forge/Dockerfile index b16cbbce34f4db00c274f4031360133a7a3bb50e..c9ae5f555e63fcc7a8dc9d56b541b98846bee2e3 100644 --- a/docker/conda-forge/Dockerfile +++ b/docker/conda-forge/Dockerfile @@ -19,7 +19,7 @@ RUN conda update -n base conda && \ . /miniconda/etc/profile.d/conda.sh && \ conda activate deepchem && \ pip install tensorflow-gpu==1.14 && \ - conda install -c rdkit -c conda-forge rdkit deepchem==2.3.0 && \ + conda install -c conda-forge rdkit deepchem==2.3.0 && \ conda clean -afy && \ rm -rf ~/.cache/pip diff --git a/docker/master/Dockerfile b/docker/master/Dockerfile index bdbfbcc45e546b5990fe39ea9776d487d1418030..6d968649e219f2baac72f8fb87f12854f5654a5d 100644 --- a/docker/master/Dockerfile +++ b/docker/master/Dockerfile @@ -18,9 +18,9 @@ RUN conda update -n base conda && \ git clone --depth 1 https://github.com/deepchem/deepchem.git && \ cd deepchem && \ . /miniconda/etc/profile.d/conda.sh && \ - bash scripts/install_deepchem_conda.sh deepchem && \ + bash scripts/install_deepchem_conda.sh gpu && \ conda activate deepchem && \ - python setup.py install && \ + pip install -e . && \ conda clean -afy && \ rm -rf ~/.cache/pip diff --git a/docs/Makefile b/docs/Makefile index d4bb2cbb9eddb1bb1b4f366623044af8e4830919..39b264e04fa3f665fff894abe7f994fface5a468 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,8 +5,8 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build +SOURCEDIR = source +BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @@ -14,6 +14,12 @@ help: .PHONY: help Makefile +doctest_examples: + @$(SPHINXBUILD) -M doctest "$(SOURCEDIR)" "$(BUILDDIR)" source/get_started/examples.rst; + +doctest_tutorials: + @$(SPHINXBUILD) -M doctest "$(SOURCEDIR)" "$(BUILDDIR)" source/get_started/tutorials.rst; + # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile diff --git a/docs/README.md b/docs/README.md index 2a36ddf2ae1b7b194673d44707fabf75a32a5742..751756f1cdac215bb2cc9af0b32373202f51a8d9 100644 --- a/docs/README.md +++ b/docs/README.md @@ -7,19 +7,24 @@ and examples. ## Building the Documentation To build the docs, you can use the `Makefile` that's been added to -this directory. (Note that `deepchem` must be installed first.) To -generate docs in html, run +this directory. To generate docs in html, run following commands. ``` -pip install -r requirements.txt -make html -open _build/html/index.html +$ pip install -r requirements.txt +$ make html +// clean build +$ make clean html +$ open build/html/index.html ``` -You can generate docs in other formats as well if you like. To clean up past builds run +If you want to confirm logs in more details, ``` -make clean +$ make clean html SPHINXOPTS=-vvv ``` +If you want to confirm the example tests, +``` +$ make doctest_examples +``` \ No newline at end of file diff --git a/docs/_config.yml b/docs/_config.yml deleted file mode 100644 index 2f7efbeab578c8042531ea7908ee8ffd7589fe46..0000000000000000000000000000000000000000 --- a/docs/_config.yml +++ /dev/null @@ -1 +0,0 @@ -theme: jekyll-theme-minimal \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 2b1ea3080f6c7419a87f0d484eeb39f09608ae0a..0000000000000000000000000000000000000000 --- a/docs/conf.py +++ /dev/null @@ -1,156 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -sys.path.insert(0, os.path.abspath('..')) - -# -- Project information ----------------------------------------------------- - -project = 'deepchem' -copyright = '2020, deepchem-contributors' -author = 'deepchem-contributors' - -# The full version, including alpha/beta/rc tags -release = '2.4.0rc' - -# -- General configuration --------------------------------------------------- - -master_doc = 'index' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', - 'sphinx.ext.napoleon' -] - -autosummary_generate = True -autodoc_default_flags = ['members', 'inherited-members'] -numpydoc_class_members_toctree = False - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -import sphinx_rtd_theme -html_theme = 'sphinx_rtd_theme' -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -html_logo = '_static/logo.png' -# ----------------------------------------------------------------------------- -# Source code links -# ----------------------------------------------------------------------------- - -import inspect -from os.path import relpath, dirname - -for name in ['sphinx.ext.linkcode', 'numpydoc.linkcode']: - try: - __import__(name) - extensions.append(name) - break - except ImportError: - pass - else: - print("NOTE: linkcode extension not found -- no links to source generated") - - -# This code was borrowed from Numpy's doc-to-source linker. -def linkcode_resolve(domain, info): - """ - Determine the URL corresponding to Python object - """ - if domain != 'py': - return None - - modname = info['module'] - fullname = info['fullname'] - - submod = sys.modules.get(modname) - if submod is None: - return None - - obj = submod - for part in fullname.split('.'): - try: - obj = getattr(obj, part) - except Exception: - return None - - # strip decorators, which would resolve to the source of the decorator - # possibly an upstream bug in getsourcefile, bpo-1764286 - try: - unwrap = inspect.unwrap - except AttributeError: - pass - else: - obj = unwrap(obj) - - try: - fn = inspect.getsourcefile(obj) - except Exception: - fn = None - if not fn: - return None - - try: - source, lineno = inspect.getsourcelines(obj) - except Exception: - lineno = None - - if lineno: - linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1) - else: - linespec = "" - - fn = relpath( - fn, start=os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) - - return "https://github.com/deepchem/deepchem/blob/master/%s%s" % (fn, - linespec) - # TODO: Should we do similar dev handling? - #if 'dev' in numpy.__version__: - # return "https://github.com/numpy/numpy/blob/master/numpy/%s%s" % ( - # fn, linespec) - #else: - # return "https://github.com/numpy/numpy/blob/v%s/numpy/%s%s" % ( - # numpy.__version__, fn, linespec) - - -# Document __init__ methods -def skip(app, what, name, obj, would_skip, options): - if name == "__init__": - return False - return would_skip - - -def setup(app): - app.connect("autodoc-skip-member", skip) diff --git a/docs/dataloaders.rst b/docs/dataloaders.rst deleted file mode 100644 index e68dbcc2646c452d1bf634347dd19e7400baf834..0000000000000000000000000000000000000000 --- a/docs/dataloaders.rst +++ /dev/null @@ -1,35 +0,0 @@ -Data Loaders -============ - -Processing large amounts of input data to construct a :code:`dc.data.Dataset` object can require some amount of hacking. To simplify this process for you, you can use the :code:`dc.data.DataLoader` classes. These classes provide utilities for you to load and process large amounts of data. - - -DataLoader ----------- - -.. autoclass:: deepchem.data.DataLoader - :members: - -CSVLoader -^^^^^^^^^ - -.. autoclass:: deepchem.data.CSVLoader - :members: - -UserCSVLoader -^^^^^^^^^^^^^ - -.. autoclass:: deepchem.data.UserCSVLoader - :members: - -FASTALoader -^^^^^^^^^^^ - -.. autoclass:: deepchem.data.FASTALoader - :members: - -ImageLoader -^^^^^^^^^^^ - -.. autoclass:: deepchem.data.ImageLoader - :members: diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 0d1572cb8d492eeb4e21bd063fa48f8e8e9756f6..0000000000000000000000000000000000000000 --- a/docs/index.rst +++ /dev/null @@ -1,141 +0,0 @@ -.. deepchem documentation master file, created by - sphinx-quickstart on Sat Mar 7 12:21:39 2020. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -The DeepChem Project -==================== - -.. raw:: html - - - Fork me on GitHub - - - -**The DeepChem project aims to democratize deep learning for science.** - -What is DeepChem? ------------------ - -The DeepChem project aims to build high quality tools to democratize -the use of deep learning in the sciences. The origin of DeepChem -focused on applications of deep learning to chemistry, but the project -has slowly evolved past its roots to broader applications of deep -learning to the sciences. - -The core `DeepChem Repo`_ serves as a monorepo that organizes the DeepChem suite of scientific tools. As the project matures, smaller more focused tool will be surfaced in more targeted repos. DeepChem is primarily developed in Python, but we are experimenting with adding support for other languages. - -What are some of the things you can use DeepChem to do? Here's a few examples: - -- Predict the solubility of small drug-like molecules -- Predict binding affinity for small molecule to protein targets -- Predict physical properties of simple materials -- Analyze protein structures and extract useful descriptors -- Count the number of cells in a microscopy image -- More coming soon... - -We should clarify one thing up front though. DeepChem is a machine -learning library, so it gives you the tools to solve each of the -applications mentioned above yourself. DeepChem may or may not have -prebaked models which can solve these problems out of the box. - -Over time, we hope to grow the set of scientific applications DeepChem -can address. This means we need lots of help! If you're a scientist -who's interested in open source, please pitch on building DeepChem. - -Quick Start ------------ - -The fastest way to get up and running with DeepChem is to run it on -Google Colab. Check out one of the `DeepChem Tutorials`_ or this -`forum post`_ for Colab quick start guides. - -If you'd like to install DeepChem locally, we recommend using -:code:`conda` and installing RDKit with deepchem. -RDKit is a soft requirement package, but many useful methods like -molnet depend on it. - -.. code-block:: bash - - pip install tensorflow-gpu==1.14 - conda install -y -c rdkit -c conda-forge rdkit deepchem - -For CPU only support instead run - -.. code-block:: bash - - pip install tensorflow==1.14 - conda install -y -c rdkit -c conda-forge rdkit deepchem - -Then open your python and try running. - -.. code-block:: python - - import deepchem - -.. _`DeepChem Tutorials`: https://github.com/deepchem/deepchem/tree/master/examples/tutorials -.. _`forum post`: https://forum.deepchem.io/t/getting-deepchem-running-in-colab/81 - -About Us --------- -DeepChem is managed by a team of open source contributors. Anyone is free to join and contribute! DeepChem has weekly developer calls. You can find `meeting minutes`_ on our `forums`_. - -DeepChem developer calls are open to the public! To listen in, please email X.Y@gmail.com, where X=bharath and Y=ramsundar to introduce yourself and ask for an invite. - -.. _`meeting minutes`: https://forum.deepchem.io/search?q=Minutes%20order%3Alatest -.. _`forums`: https://forum.deepchem.io/ - -Licensing and Commercial Uses ------------------------------ -DeepChem is licensed under the MIT License. We actively support -commercial users. Note that any novel molecules, materials, or other -discoveries powered by DeepChem belong entirely to the user and not to -DeepChem developers. - -That said, we would very much appreciate a citation if you find our tools useful. You can cite DeepChem with the following reference. - -.. code-block:: guess - - @book{Ramsundar-et-al-2019, - title={Deep Learning for the Life Sciences}, - author={Bharath Ramsundar and Peter Eastman and Patrick Walters and Vijay Pande and Karl Leswing and Zhenqin Wu}, - publisher={O'Reilly Media}, - note={\url{https://www.amazon.com/Deep-Learning-Life-Sciences-Microscopy/dp/1492039837}}, - year={2019} - } - -Getting Involved ----------------- - -Support the DeepChem project by starring us on `on GitHub`_. -Join our forums at https://forum.deepchem.io to participate in -discussions about research, development or any general questions. If you'd like to talk to real human beings involved in the project, say hi on our `Gitter`_ chatroom. - -.. _`DeepChem repo`: https://github.com/deepchem/deepchem -.. _`on GitHub`: https://github.com/deepchem/deepchem -.. _`Gitter`: https://gitter.im/deepchem/Lobby - -.. important:: Join our `community gitter `_ to discuss DeepChem. Sign up for our `forums `_ to talk about research, development, and general questions. - -.. toctree:: - :maxdepth: 2 - :caption: Table of Contents - :name: mastertoc - - Introduction - Installation - Datasets - Data Loaders - Featurizers - Splitters - Transformers - Models - Layers - Metrics - Hyperparameter Turning - MoleculeNet - Metalearning - Reinforcement Learning - Docking - Utilities diff --git a/docs/metrics.rst b/docs/metrics.rst deleted file mode 100644 index 0d415b0b19623ad57eb884a15a4288cceb860f79..0000000000000000000000000000000000000000 --- a/docs/metrics.rst +++ /dev/null @@ -1,54 +0,0 @@ -Metrics -======= -Metrics are one of the most import parts of machine learning. Unlike -traditional software, in which algorithms either work or don't work, -machine learning models work in degrees. That is, there's a continuous -range of "goodness" for a model. "Metrics" are functions which measure -how well a model works. There are many different choices of metrics -depending on the type of model at hand. - -Metric Utilities ----------------- - -.. autofunction:: deepchem.metrics.to_one_hot - -.. autofunction:: deepchem.metrics.from_one_hot - - -Metric Functions ----------------- -.. autofunction:: deepchem.metrics.roc_auc_score - -.. autofunction:: deepchem.metrics.accuracy_score - -.. autofunction:: deepchem.metrics.balanced_accuracy_score - -.. autofunction:: deepchem.metrics.pearson_r2_score - -.. autofunction:: deepchem.metrics.jaccard_index - -.. autofunction:: deepchem.metrics.pixel_error - -.. autofunction:: deepchem.metrics.prc_auc_score - -.. autofunction:: deepchem.metrics.rms_score - -.. autofunction:: deepchem.metrics.mae_score - -.. autofunction:: deepchem.metrics.kappa_score - -.. autofunction:: deepchem.metrics.bedroc_score - -.. autofunction:: deepchem.metrics.genomic_metrics.get_motif_scores - -.. autofunction:: deepchem.metrics.genomic_metrics.get_pssm_scores - -.. autofunction:: deepchem.metrics.genomic_metrics.in_silico_mutagenesis - -Metric Class ------------- -The :code:`dc.metrics.Metric` class is a wrapper around metric -functions which interoperates with DeepChem :code:`dc.models.Model`. - -.. autoclass:: deepchem.metrics.Metric - :members: diff --git a/docs/models.rst b/docs/models.rst deleted file mode 100644 index 8e1eefe714d9774e6eaeb6b9d15ae6dd21831b50..0000000000000000000000000000000000000000 --- a/docs/models.rst +++ /dev/null @@ -1,164 +0,0 @@ -Model Classes -============= - -Model ------ - -.. autoclass:: deepchem.models.Model - :members: - -SklearnModel ------------- - -.. autoclass:: deepchem.models.SklearnModel - :members: - -XGBoostModel ------------- - -.. autoclass:: deepchem.models.XGBoostModel - :members: - -KerasModel ----------- -DeepChem extensively uses `Keras`_ to build powerful machine learning models. - -.. _`Keras`: https://keras.io/ - - -.. autoclass:: deepchem.models.KerasModel - :members: - -MultitaskRegressor ------------------- - -.. autoclass:: deepchem.models.MultitaskRegressor - :members: - -MultitaskFitTransformRegressor ------------------------------- - -.. autoclass:: deepchem.models.MultitaskClassifier - :members: - -MultitaskClassifier -------------------- - -.. autoclass:: deepchem.models.MultitaskClassifier - :members: - -TensorflowMultitaskIRVClassifier --------------------------------- - -.. autoclass:: deepchem.models.TensorflowMultitaskIRVClassifier - :members: - -RobustMultitaskClassifier -------------------------- - -.. autoclass:: deepchem.models.RobustMultitaskClassifier - :members: - -RobustMultitaskRegressor ------------------------- - -.. autoclass:: deepchem.models.RobustMultitaskRegressor - :members: - -ProgressiveMultitaskClassifier ------------------------------- - -.. autoclass:: deepchem.models.ProgressiveMultitaskClassifier - :members: - -ProgressiveMultitaskRegressor ------------------------------ - -.. autoclass:: deepchem.models.ProgressiveMultitaskRegressor - :members: - -WeaveModel ----------- - -.. autoclass:: deepchem.models.WeaveModel - :members: - -DTNNModel ---------- - -.. autoclass:: deepchem.models.DTNNModel - :members: - -DAGModel --------- - -.. autoclass:: deepchem.models.DAGModel - :members: - -GraphConvModel --------------- - -.. autoclass:: deepchem.models.GraphConvModel - :members: - -MPNNModel ---------- - -.. autoclass:: deepchem.models.MPNNModel - :members: - -ScScoreModel ------------- - -.. autoclass:: deepchem.models.ScScoreModel - :members: - -SeqToSeq --------- - -.. autoclass:: deepchem.models.SeqToSeq - :members: - -GAN ---- - -.. autoclass:: deepchem.models.GAN - :members: - -WGAN -^^^^ - -.. autoclass:: deepchem.models.WGAN - :members: - -CNN ---- - -.. autoclass:: deepchem.models.CNN - :members: - -TextCNNModel ------------- - -.. autoclass:: deepchem.models.CNN - :members: - - -AtomicConvModel ---------------- - -.. autoclass:: deepchem.models.AtomicConvModel - :members: - - -Smiles2Vec ----------- - -.. autoclass:: deepchem.models.Smiles2Vec - :members: - -ChemCeption ------------ - -.. autoclass:: deepchem.models.ChemCeption - :members: diff --git a/docs/moleculenet.rst b/docs/moleculenet.rst deleted file mode 100644 index cf241caf94dcd3efeae9db6a1e68c783af5fb88e..0000000000000000000000000000000000000000 --- a/docs/moleculenet.rst +++ /dev/null @@ -1,178 +0,0 @@ -MoleculeNet -=========== -The DeepChem library is packaged alongside the MoleculeNet suite of datasets. One of the most important parts of machine learning applications is finding a suitable dataset. The MoleculeNet suite has curated a whole range of datasets and loaded them into DeepChem :code:`dc.data.Dataset` objects for convenience. - -BACE Dataset ------------- - -.. autofunction:: deepchem.molnet.load_bace_classification - -.. autofunction:: deepchem.molnet.load_bace_regression - -BBBC Datasets -------------- - -.. autofunction:: deepchem.molnet.load_bbbc001 - -.. autofunction:: deepchem.molnet.load_bbbc002 - -BBBP Datasets -------------- -BBBP stands for Blood-Brain-Barrier Penetration - -.. autofunction:: deepchem.molnet.load_bbbp - -Cell Counting Datasets ----------------------- - -.. autofunction:: deepchem.molnet.load_cell_counting - -Chembl Datasets ---------------- - -.. autofunction:: deepchem.molnet.load_chembl - -Chembl25 Datasets ---------------- - -.. autofunction:: deepchem.molnet.load_chembl25 - -Clearance Datasets ------------------- - -.. autofunction:: deepchem.molnet.load_clearance - -Clintox Datasets ----------------- - -.. autofunction:: deepchem.molnet.load_clintox - -Delaney Datasets ----------------- - -.. autofunction:: deepchem.molnet.load_delaney - -Factors Datasets ----------------- - -.. autofunction:: deepchem.molnet.load_factors - -HIV Datasets ------------- - -.. autofunction:: deepchem.molnet.load_hiv - -HOPV Datasets -------------- -HOPV stands for the Harvard Organic Photovoltaic Dataset. - -.. autofunction:: deepchem.molnet.load_hopv - -HPPB Datasets -------------- - -.. autofunction:: deepchem.molnet.load_hppb - - -KAGGLE Datasets ---------------- - -.. autofunction:: deepchem.molnet.load_kaggle - -Kinase Datasets ---------------- - -.. autofunction:: deepchem.molnet.load_kinase - - -Lipo Datasets -------------- - -.. autofunction:: deepchem.molnet.load_lipo - -MUV Datasets ------------- - -.. autofunction:: deepchem.molnet.load_muv - -NCI Datasets ------------- - -.. autofunction:: deepchem.molnet.load_nci - -PCBA Datasets -------------- - -.. autofunction:: deepchem.molnet.load_pcba - -PDBBIND Datasets ----------------- - -.. autofunction:: deepchem.molnet.load_pdbbind - -PPB Datasets ------------- - -.. autofunction:: deepchem.molnet.load_ppb - -QM7 Datasets ------------- - -.. autofunction:: deepchem.molnet.load_qm7 - -.. autofunction:: deepchem.molnet.load_qm7_from_mat - -.. autofunction:: deepchem.molnet.load_qm7b_from_mat - -QM8 Datasets ------------- - -.. autofunction:: deepchem.molnet.load_qm8 - -QM9 Datasets ------------- - -.. autofunction:: deepchem.molnet.load_qm9 - - -SAMPL Datasets --------------- - -.. autofunction:: deepchem.molnet.load_sampl - - -SIDER Datasets --------------- - -.. autofunction:: deepchem.molnet.load_sider - -SWEETLEAD Datasets ------------------- - -.. autofunction:: deepchem.molnet.load_sweetlead - -Thermosol Datasets ------------------- - -.. autofunction:: deepchem.molnet.load_thermosol - - -Tox21 Datasets --------------- - -.. autofunction:: deepchem.molnet.load_tox21 - -Toxcast Datasets ----------------- - -.. autofunction:: deepchem.molnet.load_toxcast - -USPTO Datasets --------------- - -.. autofunction:: deepchem.molnet.load_uspto - -UV Datasets ------------ - -.. autofunction:: deepchem.molnet.load_uv diff --git a/docs/requirements.txt b/docs/requirements.txt index 35fa8888624f1d882cfecc656015fb6bcba707ea..63e97a2596b6261a9579523eb19dd0c8070049ba 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,7 +1,7 @@ -sphinx_rtd_theme -numpy pandas -sklearn -tensorflow -pillow -tensorflow_probability +scikit-learn +sphinx>=3.2,<4 +sphinx_rtd_theme>=0.5,<1 +tensorflow==2.3.0 +transformers +torch==1.6.0 diff --git a/docs/_static/logo.png b/docs/source/_static/logo.png similarity index 100% rename from docs/_static/logo.png rename to docs/source/_static/logo.png diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 0000000000000000000000000000000000000000..63ee6cc74ce038abb92976e6821248e73378dce6 --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,13 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} diff --git a/docs/source/api_reference/dataclasses.rst b/docs/source/api_reference/dataclasses.rst new file mode 100644 index 0000000000000000000000000000000000000000..5d221f67789b29b065775b30a1193afb8894b042 --- /dev/null +++ b/docs/source/api_reference/dataclasses.rst @@ -0,0 +1,26 @@ +Data Classes +============ +DeepChem featurizers often transform members into "data classes". These are +classes that hold all the information needed to train a model on that data +point. Models then transform these into the tensors for training in their +:code:`default_generator` methods. + +Graph Convolutions +------------------ + +These classes document the data classes for graph convolutions. We plan to simplify these classes into a joint data representation for all graph convolutions in a future version of DeepChem, so these APIs may not remain stable. + +.. autoclass:: deepchem.feat.mol_graphs.ConvMol + :members: + +.. autoclass:: deepchem.feat.mol_graphs.MultiConvMol + :members: + +.. autoclass:: deepchem.feat.mol_graphs.WeaveMol + :members: + +.. autoclass:: deepchem.feat.graph_data.GraphData + :members: + +.. autoclass:: deepchem.feat.graph_data.BatchGraphData + :members: diff --git a/docs/source/api_reference/dataloaders.rst b/docs/source/api_reference/dataloaders.rst new file mode 100644 index 0000000000000000000000000000000000000000..b0ac29135e2fe520542ff39b3fe3423fc8e742f6 --- /dev/null +++ b/docs/source/api_reference/dataloaders.rst @@ -0,0 +1,62 @@ +Data Loaders +============ + +Processing large amounts of input data to construct a :code:`dc.data.Dataset` object can require some amount of hacking. To simplify this process for you, you can use the :code:`dc.data.DataLoader` classes. These classes provide utilities for you to load and process large amounts of data. + + +DataLoader +---------- + +.. autoclass:: deepchem.data.DataLoader + :members: + +CSVLoader +^^^^^^^^^ + +.. autoclass:: deepchem.data.CSVLoader + :members: + +UserCSVLoader +^^^^^^^^^^^^^ + +.. autoclass:: deepchem.data.UserCSVLoader + :members: + +JsonLoader +^^^^^^^^^^ +JSON is a flexible file format that is human-readable, lightweight, +and more compact than other open standard formats like XML. JSON files +are similar to python dictionaries of key-value pairs. All keys must +be strings, but values can be any of (string, number, object, array, +boolean, or null), so the format is more flexible than CSV. JSON is +used for describing structured data and to serialize objects. It is +conveniently used to read/write Pandas dataframes with the +`pandas.read_json` and `pandas.write_json` methods. + +.. autoclass:: deepchem.data.JsonLoader + :members: + +FASTALoader +^^^^^^^^^^^ + +.. autoclass:: deepchem.data.FASTALoader + :members: + +ImageLoader +^^^^^^^^^^^ + +.. autoclass:: deepchem.data.ImageLoader + :members: + +SDFLoader +^^^^^^^^^ + +.. autoclass:: deepchem.data.SDFLoader + :members: + +InMemoryLoader +^^^^^^^^^^^^^^ +The :code:`dc.data.InMemoryLoader` is designed to facilitate the processing of large datasets where you already hold the raw data in-memory (say in a pandas dataframe). + +.. autoclass:: deepchem.data.InMemoryLoader + :members: diff --git a/docs/datasets.rst b/docs/source/api_reference/datasets.rst similarity index 100% rename from docs/datasets.rst rename to docs/source/api_reference/datasets.rst diff --git a/docs/docking.rst b/docs/source/api_reference/docking.rst similarity index 100% rename from docs/docking.rst rename to docs/source/api_reference/docking.rst diff --git a/docs/featurizers.rst b/docs/source/api_reference/featurizers.rst similarity index 53% rename from docs/featurizers.rst rename to docs/source/api_reference/featurizers.rst index 384f7cfa4fbd98e77625f133dbdb4d547b188043..97259d881500f209915a7b1f3034f38bf2907ebd 100644 --- a/docs/featurizers.rst +++ b/docs/source/api_reference/featurizers.rst @@ -35,6 +35,51 @@ MolecularFeaturizer Molecular Featurizers are those that work with datasets of molecules. +.. autoclass:: deepchem.feat.MolecularFeaturizer + :members: + +Here are some constants that are used by the graph convolutional featurizers for molecules. + +.. autoclass:: deepchem.feat.graph_features.GraphConvConstants + :members: + :undoc-members: + +There are a number of helper methods used by the graph convolutional classes which we document here. + +.. autofunction:: deepchem.feat.graph_features.one_of_k_encoding + +.. autofunction:: deepchem.feat.graph_features.one_of_k_encoding_unk + +.. autofunction:: deepchem.feat.graph_features.get_intervals + +.. autofunction:: deepchem.feat.graph_features.safe_index + +.. autofunction:: deepchem.feat.graph_features.get_feature_list + +.. autofunction:: deepchem.feat.graph_features.features_to_id + +.. autofunction:: deepchem.feat.graph_features.id_to_features + +.. autofunction:: deepchem.feat.graph_features.atom_to_id + +This function helps compute distances between atoms from a given base atom. + +.. autofunction:: deepchem.feat.graph_features.find_distance + +This function is important and computes per-atom feature vectors used by +graph convolutional featurizers. + +.. autofunction:: deepchem.feat.graph_features.atom_features + +This function computes the bond features used by graph convolutional +featurizers. + +.. autofunction:: deepchem.feat.graph_features.bond_features + +This function computes atom-atom features (for atom pairs which may not have bonds between them.) + +.. autofunction:: deepchem.feat.graph_features.pair_features + ConvMolFeaturizer ^^^^^^^^^^^^^^^^^ @@ -47,18 +92,42 @@ WeaveFeaturizer .. autoclass:: deepchem.feat.WeaveFeaturizer :members: +MACCSKeysFingerprint +^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.MACCSKeysFingerprint + :members: + CircularFingerprint ^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.feat.CircularFingerprint :members: +PubChemFingerprint +^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.PubChemFingerprint + :members: + +Mol2VecFingerprint +^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.Mol2VecFingerprint + :members: + RDKitDescriptors ^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.feat.RDKitDescriptors :members: +MordredDescriptors +^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.MordredDescriptors + :members: + CoulombMatrix ^^^^^^^^^^^^^ @@ -77,12 +146,6 @@ AtomCoordinates .. autoclass:: deepchem.feat.AtomicCoordinates :members: -AdjacencyFingerprint -^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: deepchem.feat.AdjacencyFingerprint - :members: - SmilesToSeq ^^^^^^^^^^^ @@ -95,6 +158,12 @@ SmilesToImage .. autoclass:: deepchem.feat.SmilesToImage :members: +OneHotFeaturizer +^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.OneHotFeaturizer + :members: + ComplexFeaturizer ----------------- @@ -116,19 +185,17 @@ AtomConvFeaturizer .. autoclass:: deepchem.feat.NeighborListComplexAtomicCoordinates :members: -MaterialsFeaturizers -------------------- +MaterialStructureFeaturizer +--------------------------- -Materials Featurizers are those that work with datasets of inorganic crystals. -These featurizers operate on chemical compositions (e.g. "MoS2"), or on a -lattice and 3D coordinates that specify a periodic crystal structure. They -should be applied on systems that have periodic boundary conditions. Materials -featurizers are not designed to work with molecules. +Material Structure Featurizers are those that work with datasets of crystals with +periodic boundary conditions. For inorganic crystal structures, these +featurizers operate on pymatgen.Structure objects, which include a +lattice and 3D coordinates that specify a periodic crystal structure. +They should be applied on systems that have periodic boundary conditions. +Structure featurizers are not designed to work with molecules. -ElementPropertyFingerprint -^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: deepchem.feat.ElementPropertyFingerprint +.. autoclass:: deepchem.feat.MaterialStructureFeaturizer :members: SineCoulombMatrix @@ -137,10 +204,35 @@ SineCoulombMatrix .. autoclass:: deepchem.feat.SineCoulombMatrix :members: -StructureGraphFeaturizer +CGCNNFeaturizer ^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.feat.StructureGraphFeaturizer +.. autoclass:: deepchem.feat.CGCNNFeaturizer + :members: + +MaterialCompositionFeaturizer +----------------------------- + +Material Composition Featurizers are those that work with datasets of crystal +compositions with periodic boundary conditions. +For inorganic crystal structures, these featurizers operate on chemical +compositions (e.g. "MoS2"). They should be applied on systems that have +periodic boundary conditions. Composition featurizers are not designed +to work with molecules. + +.. autoclass:: deepchem.feat.MaterialCompositionFeaturizer + :members: + +ElementPropertyFingerprint +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.ElementPropertyFingerprint + :members: + +ElemNetFeaturizer +^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.feat.ElemNetFeaturizer :members: BindingPocketFeaturizer @@ -161,12 +253,6 @@ BPSymmetryFunctionInput .. autoclass:: deepchem.feat.BPSymmetryFunctionInput :members: -OneHotFeaturizer ----------------- - -.. autoclass:: deepchem.feat.OneHotFeaturizer - :members: - RawFeaturizer ------------- diff --git a/docs/hyper.rst b/docs/source/api_reference/hyper.rst similarity index 64% rename from docs/hyper.rst rename to docs/source/api_reference/hyper.rst index e21eb120dcb359062377221600ea326d95216397..f5ff3a14128eb9630e85ec220c4bd91e4bccdce5 100644 --- a/docs/hyper.rst +++ b/docs/source/api_reference/hyper.rst @@ -8,6 +8,19 @@ learning algorithm used for the rest of learning and have to be set in an alternate fashion. The :code:`dc.hyper` module contains utilities for hyperparameter tuning. +DeepChem's hyperparameter optimzation algorithms are simple and run in +single-threaded fashion. They are not intended to be production grade +hyperparameter utilities, but rather useful first tools as you start +exploring your parameter space. As the needs of your application grow, +we recommend swapping to a more heavy duty hyperparameter +optimization library. + +Hyperparameter Optimization API +------------------------------- + +.. autoclass:: deepchem.hyper.HyperparamOpt + :members: + Grid Hyperparameter Optimization -------------------------------- @@ -15,7 +28,7 @@ This is the simplest form of hyperparameter optimization that simply involves iterating over a fixed grid of possible values for hyperaparameters. -.. autoclass:: deepchem.hyper.HyperparamOpt +.. autoclass:: deepchem.hyper.GridHyperparamOpt :members: Gaussian Process Hyperparameter Optimization diff --git a/docs/layers.rst b/docs/source/api_reference/layers.rst similarity index 95% rename from docs/layers.rst rename to docs/source/api_reference/layers.rst index a6af626e4b8d4a499879f398e104cf9bca42af03..5b4f351559975eb8c8113e0c2345627ec0cd70c8 100644 --- a/docs/layers.rst +++ b/docs/source/api_reference/layers.rst @@ -10,9 +10,6 @@ another tensor. DeepChem maintains an extensive collection of layers which perfo .. autoclass:: deepchem.models.layers.GraphConv :members: -.. autoclass:: deepchem.models.layers.GraphConv - :members: - .. autoclass:: deepchem.models.layers.GraphPool :members: @@ -103,5 +100,4 @@ another tensor. DeepChem maintains an extensive collection of layers which perfo .. autoclass:: deepchem.models.layers.SetGather :members: -.. autoclass:: deepchem.models.layers.SetGather - :members: +.. autofunction:: deepchem.models.layers.cosine_dist diff --git a/docs/metalearning.rst b/docs/source/api_reference/metalearning.rst similarity index 100% rename from docs/metalearning.rst rename to docs/source/api_reference/metalearning.rst diff --git a/docs/source/api_reference/metrics.rst b/docs/source/api_reference/metrics.rst new file mode 100644 index 0000000000000000000000000000000000000000..a2ac6380da5926d9c9e5708f94c1df910cd4321d --- /dev/null +++ b/docs/source/api_reference/metrics.rst @@ -0,0 +1,96 @@ +Metrics +======= +Metrics are one of the most important parts of machine learning. Unlike +traditional software, in which algorithms either work or don't work, +machine learning models work in degrees. That is, there's a continuous +range of "goodness" for a model. "Metrics" are functions which measure +how well a model works. There are many different choices of metrics +depending on the type of model at hand. + +Metric Utilities +---------------- +Metric utility functions allow for some common manipulations such as +switching to/from one-hot representations. + +.. autofunction:: deepchem.metrics.to_one_hot + +.. autofunction:: deepchem.metrics.from_one_hot + +Metric Shape Handling +--------------------- +One of the trickiest parts of handling metrics correctly is making sure the +shapes of input weights, predictions and labels and processed correctly. This +is challenging in particular since DeepChem supports multitask, multiclass +models which means that shapes must be handled with care to prevent errors. +DeepChem maintains the following utility functions which attempt to +facilitate shape handling for you. + +.. autofunction:: deepchem.metrics.normalize_weight_shape + +.. autofunction:: deepchem.metrics.normalize_labels_shape + +.. autofunction:: deepchem.metrics.normalize_prediction_shape + +.. autofunction:: deepchem.metrics.handle_classification_mode + +Metric Functions +---------------- +DeepChem has a variety of different metrics which are useful for measuring model performance. A number (but not all) of these metrics are directly sourced from :code:`sklearn`. + +.. autofunction:: deepchem.metrics.matthews_corrcoef + +.. autofunction:: deepchem.metrics.recall_score + +.. autofunction:: deepchem.metrics.r2_score + +.. autofunction:: deepchem.metrics.mean_squared_error + +.. autofunction:: deepchem.metrics.mean_absolute_error + +.. autofunction:: deepchem.metrics.precision_score + +.. autofunction:: deepchem.metrics.precision_recall_curve + +.. autofunction:: deepchem.metrics.auc + +.. autofunction:: deepchem.metrics.jaccard_score + +.. autofunction:: deepchem.metrics.f1_score + +.. autofunction:: deepchem.metrics.roc_auc_score + +.. autofunction:: deepchem.metrics.accuracy_score + +.. autofunction:: deepchem.metrics.balanced_accuracy_score + +.. autofunction:: deepchem.metrics.pearson_r2_score + +.. autofunction:: deepchem.metrics.jaccard_index + +.. autofunction:: deepchem.metrics.pixel_error + +.. autofunction:: deepchem.metrics.prc_auc_score + +.. autofunction:: deepchem.metrics.rms_score + +.. autofunction:: deepchem.metrics.mae_score + +.. autofunction:: deepchem.metrics.kappa_score + +.. autofunction:: deepchem.metrics.bedroc_score + +.. autofunction:: deepchem.metrics.concordance_index + +.. autofunction:: deepchem.metrics.genomic_metrics.get_motif_scores + +.. autofunction:: deepchem.metrics.genomic_metrics.get_pssm_scores + +.. autofunction:: deepchem.metrics.genomic_metrics.in_silico_mutagenesis + +Metric Class +------------ +The :code:`dc.metrics.Metric` class is a wrapper around metric +functions which interoperates with DeepChem :code:`dc.models.Model`. + +.. autoclass:: deepchem.metrics.Metric + :members: diff --git a/docs/source/api_reference/models.rst b/docs/source/api_reference/models.rst new file mode 100644 index 0000000000000000000000000000000000000000..71e985ee5611fb64652d1641e60e6c980b0be045 --- /dev/null +++ b/docs/source/api_reference/models.rst @@ -0,0 +1,470 @@ +Model Classes +============= + +DeepChem maintains an extensive collection of models for scientific +applications. DeepChem's focus is on facilitating scientific applications, so +we support a broad range of different machine learning frameworks (currently +scikit-learn, xgboost, TensorFlow, and PyTorch) since different frameworks are +more and less suited for different scientific applications. + +Model Cheatsheet +---------------- +If you're just getting started with DeepChem, you're probably interested in the +basics. The place to get started is this "model cheatsheet" that lists various +types of custom DeepChem models. Note that some wrappers like :code:`SklearnModel` +and :code:`GBDTModel` which wrap external machine learning libraries are excluded, +but this table is otherwise complete. + +As a note about how to read this table, each row describes what's needed to +invoke a given model. Some models must be applied with given :code:`Transformer` or +:code:`Featurizer` objects. Some models also have custom training methods. You can +read off what's needed to train the model from the table below. + + ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| Model | Type | Input Type | Transformations | Acceptable Featurizers | Fit Method | ++========================================+============+======================+========================+================================================================+======================+ +| :code:`AtomicConvModel` | Classifier/| Tuple | | :code:`ComplexNeighborListFragmentAtomicCoordinates` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`ChemCeption` | Classifier/| Tensor of shape | | :code:`SmilesToImage` | :code:`fit` | +| | Regressor | :code:`(N, M, c)` | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`CNN` | Classifier/| Tensor of shape | | | :code:`fit` | +| | Regressor | :code:`(N, c)` or | | | | +| | | :code:`(N, M, c)` or | | | | +| | | :code:`(N, M, L, c)` | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`DTNNModel` | Classifier/| Matrix of | | :code:`CoulombMatrix` | :code:`fit` | +| | Regressor | shape :code:`(N, N)` | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`DAGModel` | Classifier/| :code:`ConvMol` | :code:`DAGTransformer` | :code:`ConvMolFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`GraphConvModel` | Classifier/| :code:`ConvMol` | | :code:`ConvMolFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`MPNNModel` | Classifier/| :code:`WeaveMol` | | :code:`WeaveFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`MultitaskClassifier` | Classifier | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`MultitaskRegressor` | Regressor | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`MultitaskFitTransformRegressor` | Regressor | Vector of | Any | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`MultitaskIRVClassifier` | Classifier | Vector of | :code:`IRVTransformer` | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`ProgressiveMultitaskClassifier` | Classifier | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`ProgressiveMultitaskRegressor` | Regressor | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`RobustMultitaskClassifier` | Classifier | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`RobustMultitaskRegressor` | Regressor | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`ScScoreModel` | Classifier | Vector of | | :code:`CircularFingerprint`, | :code:`fit` | +| | | shape :code:`(N,)` | | :code:`RDKitDescriptors`, | | +| | | | | :code:`CoulombMatrixEig`, | | +| | | | | :code:`RdkitGridFeaturizer`, | | +| | | | | :code:`BindingPocketFeaturizer`, | | +| | | | | :code:`ElementPropertyFingerprint`, | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`SeqToSeq` | Sequence | Sequence | | | :code:`fit_sequences`| ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`Smiles2Vec` | Classifier/| Sequence | | :code:`SmilesToSeq` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`TextCNNModel` | Classifier/| String | | | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`WGAN` | Adversarial| Pair | | | :code:`fit_gan` | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`CGCNNModel` | Classifier/| :code:`GraphData` | | :code:`CGCNNFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`GATModel` | Classifier/| :code:`GraphData` | | :code:`MolGraphConvFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`GCNModel` | Classifier/| :code:`GraphData` | | :code:`MolGraphConvFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ +| :code:`AttentiveFPModel` | Classifier/| :code:`GraphData` | | :code:`MolGraphConvFeaturizer` | :code:`fit` | +| | Regressor | | | | | ++----------------------------------------+------------+----------------------+------------------------+----------------------------------------------------------------+----------------------+ + +Model +----- + +.. autoclass:: deepchem.models.Model + :members: + +Scikit-Learn Models +=================== + +Scikit-learn's models can be wrapped so that they can interact conveniently +with DeepChem. Oftentimes scikit-learn models are more robust and easier to +train and are a nice first model to train. + +SklearnModel +------------ + +.. autoclass:: deepchem.models.SklearnModel + :members: + +Gradient Boosting Models +======================== + +Gradient Boosting Models (LightGBM and XGBoost) can be wrapped so they can interact with DeepChem. + +GBDTModel +------------ + +.. autoclass:: deepchem.models.GBDTModel + :members: + + +Deep Learning Infrastructure +============================ + +DeepChem maintains a lightweight layer of common deep learning model +infrastructure that can be used for models built with different underlying +frameworks. The losses and optimizers can be used for both TensorFlow and +PyTorch models. + +Losses +------ + +.. autoclass:: deepchem.models.losses.Loss + :members: + +.. autoclass:: deepchem.models.losses.L1Loss + :members: + +.. autoclass:: deepchem.models.losses.L2Loss + :members: + +.. autoclass:: deepchem.models.losses.HingeLoss + :members: + +.. autoclass:: deepchem.models.losses.BinaryCrossEntropy + :members: + +.. autoclass:: deepchem.models.losses.CategoricalCrossEntropy + :members: + +.. autoclass:: deepchem.models.losses.SigmoidCrossEntropy + :members: + +.. autoclass:: deepchem.models.losses.SoftmaxCrossEntropy + :members: + +.. autoclass:: deepchem.models.losses.SparseSoftmaxCrossEntropy + :members: + +.. autoclass:: deepchem.models.losses.VAE_ELBO + :members: + +.. autoclass:: deepchem.models.losses.VAE_KLDivergence + :members: + +.. autoclass:: deepchem.models.losses.ShannonEntropy + :members: + +Optimizers +---------- + +.. autoclass:: deepchem.models.optimizers.Optimizer + :members: + +.. autoclass:: deepchem.models.optimizers.LearningRateSchedule + :members: + +.. autoclass:: deepchem.models.optimizers.AdaGrad + :members: + +.. autoclass:: deepchem.models.optimizers.Adam + :members: + +.. autoclass:: deepchem.models.optimizers.RMSProp + :members: + +.. autoclass:: deepchem.models.optimizers.GradientDescent + :members: + +.. autoclass:: deepchem.models.optimizers.ExponentialDecay + :members: + +.. autoclass:: deepchem.models.optimizers.PolynomialDecay + :members: + +.. autoclass:: deepchem.models.optimizers.LinearCosineDecay + :members: + + +Keras Models +============ + +DeepChem extensively uses `Keras`_ to build deep learning models. + + +KerasModel +---------- + +Training loss and validation metrics can be automatically logged to `Weights & Biases`_ with the following commands:: + + # Install wandb in shell + pip install wandb + + # Login in shell (required only once) + wandb login + + # Start a W&B run in your script (refer to docs for optional parameters) + wandb.init(project="my project") + + # Set `wandb` arg when creating `KerasModel` + model = KerasModel(…, wandb=True) + +.. _`Keras`: https://keras.io/ + +.. _`Weights & Biases`: http://docs.wandb.com/ + +.. autoclass:: deepchem.models.KerasModel + :members: + +MultitaskRegressor +------------------ + +.. autoclass:: deepchem.models.MultitaskRegressor + :members: + +MultitaskFitTransformRegressor +------------------------------ + +.. autoclass:: deepchem.models.MultitaskFitTransformRegressor + :members: + +MultitaskClassifier +------------------- + +.. autoclass:: deepchem.models.MultitaskClassifier + :members: + +TensorflowMultitaskIRVClassifier +-------------------------------- + +.. autoclass:: deepchem.models.TensorflowMultitaskIRVClassifier + :members: + +RobustMultitaskClassifier +------------------------- + +.. autoclass:: deepchem.models.RobustMultitaskClassifier + :members: + +RobustMultitaskRegressor +------------------------ + +.. autoclass:: deepchem.models.RobustMultitaskRegressor + :members: + +ProgressiveMultitaskClassifier +------------------------------ + +.. autoclass:: deepchem.models.ProgressiveMultitaskClassifier + :members: + +ProgressiveMultitaskRegressor +----------------------------- + +.. autoclass:: deepchem.models.ProgressiveMultitaskRegressor + :members: + +WeaveModel +---------- + +.. autoclass:: deepchem.models.WeaveModel + :members: + +DTNNModel +--------- + +.. autoclass:: deepchem.models.DTNNModel + :members: + +DAGModel +-------- + +.. autoclass:: deepchem.models.DAGModel + :members: + +GraphConvModel +-------------- + +.. autoclass:: deepchem.models.GraphConvModel + :members: + +MPNNModel +--------- + +.. autoclass:: deepchem.models.MPNNModel + :members: + +ScScoreModel +------------ + +.. autoclass:: deepchem.models.ScScoreModel + :members: + +SeqToSeq +-------- + +.. autoclass:: deepchem.models.SeqToSeq + :members: + +GAN +--- + +.. autoclass:: deepchem.models.GAN + :members: + +WGAN +^^^^ + +.. autoclass:: deepchem.models.WGAN + :members: + +CNN +--- + +.. autoclass:: deepchem.models.CNN + :members: + +TextCNNModel +------------ + +.. autoclass:: deepchem.models.TextCNNModel + :members: + + +AtomicConvModel +--------------- + +.. autoclass:: deepchem.models.AtomicConvModel + :members: + + +Smiles2Vec +---------- + +.. autoclass:: deepchem.models.Smiles2Vec + :members: + +ChemCeption +----------- + +.. autoclass:: deepchem.models.ChemCeption + :members: + +NormalizingFlowModel +-------------------- +The purpose of a normalizing flow is to map a simple distribution (that is +easy to sample from and evaluate probability densities for) to a more +complex distribution that is learned from data. Normalizing flows combine the +advantages of autoregressive models (which provide likelihood estimation +but do not learn features) and variational autoencoders (which learn feature +representations but do not provide marginal likelihoods). They are effective +for any application requiring a probabilistic model with these capabilities, e.g. generative modeling, unsupervised learning, or probabilistic inference. + +.. autoclass:: deepchem.models.normalizing_flows.NormalizingFlowModel + :members: + + +PyTorch Models +============== + +DeepChem supports the use of `PyTorch`_ to build deep learning models. + +.. _`PyTorch`: https://pytorch.org/ + +TorchModel +---------- + +You can wrap an arbitrary :code:`torch.nn.Module` in a :code:`TorchModel` object. + +.. autoclass:: deepchem.models.TorchModel + :members: + +CGCNNModel +---------- + +.. autoclass:: deepchem.models.CGCNNModel + :members: + + +GATModel +-------- + +.. autoclass:: deepchem.models.GATModel + :members: + +GCNModel +-------- + +.. autoclass:: deepchem.models.GCNModel + :members: + +AttentiveFPModel +---------------- + +.. autoclass:: deepchem.models.AttentiveFPModel + :members: + +MPNNModel +--------- + +Note that this is an alternative implementation for MPNN and currently you can only import it from +``deepchem.models.torch_models``. + +.. autoclass:: deepchem.models.torch_models.MPNNModel + :members: diff --git a/docs/source/api_reference/moleculenet.rst b/docs/source/api_reference/moleculenet.rst new file mode 100644 index 0000000000000000000000000000000000000000..4de8c6bfa9ee5b0e36a398c8bf16485df76d1f41 --- /dev/null +++ b/docs/source/api_reference/moleculenet.rst @@ -0,0 +1,235 @@ +MoleculeNet +=========== +The DeepChem library is packaged alongside the MoleculeNet suite of datasets. +One of the most important parts of machine learning applications is finding a suitable dataset. +The MoleculeNet suite has curated a whole range of datasets and loaded them into DeepChem +:code:`dc.data.Dataset` objects for convenience. + +Contributing a new dataset to MoleculeNet +----------------------------------------- + +If you are proposing a new dataset to be included in the +MoleculeNet benchmarking suite, please follow the instructions below. +Please review the `datasets already available in MolNet`_ before contributing. + +0. Read the `Contribution guidelines`_. + +1. Open an `issue`_ to discuss the dataset you want to add to MolNet. + +2. Implement a function in the `deepchem.molnet.load_function`_ + module following the template function `deepchem.molnet.load_function.load_dataset_template`_. + Specify which featurizers, transformers, and splitters (available from + `deepchem.molnet.defaults`_) are supported for your dataset. + +3. Add your load function to `deepchem.molnet.__init__.py`_ for easy importing. + +4. Prepare your dataset as a .tar.gz or .zip file. Accepted filetypes include CSV, JSON, and SDF. + +5. Ask a member of the technical steering committee to add your .tar.gz or .zip file + to the DeepChem AWS bucket. Modify your load function to pull down the dataset from AWS. + +6. Submit a [WIP] PR (Work in progress pull request) following the PR `template`_. + + +BACE Dataset +------------ + +.. autofunction:: deepchem.molnet.load_bace_classification + +.. autofunction:: deepchem.molnet.load_bace_regression + +BBBC Datasets +------------- + +.. autofunction:: deepchem.molnet.load_bbbc001 + +.. autofunction:: deepchem.molnet.load_bbbc002 + +BBBP Datasets +------------- +BBBP stands for Blood-Brain-Barrier Penetration + +.. autofunction:: deepchem.molnet.load_bbbp + +Cell Counting Datasets +---------------------- + +.. autofunction:: deepchem.molnet.load_cell_counting + +Chembl Datasets +--------------- + +.. autofunction:: deepchem.molnet.load_chembl + +Chembl25 Datasets +----------------- + +.. autofunction:: deepchem.molnet.load_chembl25 + +Clearance Datasets +------------------ + +.. autofunction:: deepchem.molnet.load_clearance + +Clintox Datasets +---------------- + +.. autofunction:: deepchem.molnet.load_clintox + +Delaney Datasets +---------------- + +.. autofunction:: deepchem.molnet.load_delaney + +Factors Datasets +---------------- + +.. autofunction:: deepchem.molnet.load_factors + +HIV Datasets +------------ + +.. autofunction:: deepchem.molnet.load_hiv + +HOPV Datasets +------------- +HOPV stands for the Harvard Organic Photovoltaic Dataset. + +.. autofunction:: deepchem.molnet.load_hopv + +HPPB Datasets +------------- + +.. autofunction:: deepchem.molnet.load_hppb + + +KAGGLE Datasets +--------------- + +.. autofunction:: deepchem.molnet.load_kaggle + +Kinase Datasets +--------------- + +.. autofunction:: deepchem.molnet.load_kinase + + +Lipo Datasets +------------- + +.. autofunction:: deepchem.molnet.load_lipo + +Materials Datasets +------------------ +Materials datasets include inorganic crystal structures, chemical +compositions, and target properties like formation energies and band +gaps. Machine learning problems in materials science commonly include +predicting the value of a continuous (regression) or categorical +(classification) property of a material based on its chemical composition +or crystal structure. "Inverse design" is also of great interest, in which +ML methods generate crystal structures that have a desired property. +Other areas where ML is applicable in materials include: discovering new +or modified phenomenological models that describe material behavior + +.. autofunction:: deepchem.molnet.load_bandgap +.. autofunction:: deepchem.molnet.load_perovskite +.. autofunction:: deepchem.molnet.load_mp_formation_energy +.. autofunction:: deepchem.molnet.load_mp_metallicity + +MUV Datasets +------------ + +.. autofunction:: deepchem.molnet.load_muv + +NCI Datasets +------------ + +.. autofunction:: deepchem.molnet.load_nci + +PCBA Datasets +------------- + +.. autofunction:: deepchem.molnet.load_pcba + +PDBBIND Datasets +---------------- + +.. autofunction:: deepchem.molnet.load_pdbbind + +PPB Datasets +------------ + +.. autofunction:: deepchem.molnet.load_ppb + +QM7 Datasets +------------ + +.. autofunction:: deepchem.molnet.load_qm7 + +.. autofunction:: deepchem.molnet.load_qm7_from_mat + +.. autofunction:: deepchem.molnet.load_qm7b_from_mat + +QM8 Datasets +------------ + +.. autofunction:: deepchem.molnet.load_qm8 + +QM9 Datasets +------------ + +.. autofunction:: deepchem.molnet.load_qm9 + + +SAMPL Datasets +-------------- + +.. autofunction:: deepchem.molnet.load_sampl + + +SIDER Datasets +-------------- + +.. autofunction:: deepchem.molnet.load_sider + + +Thermosol Datasets +------------------ + +.. autofunction:: deepchem.molnet.load_thermosol + + +Tox21 Datasets +-------------- + +.. autofunction:: deepchem.molnet.load_tox21 + +Toxcast Datasets +---------------- + +.. autofunction:: deepchem.molnet.load_toxcast + +USPTO Datasets +-------------- + +.. autofunction:: deepchem.molnet.load_uspto + +UV Datasets +----------- + +.. autofunction:: deepchem.molnet.load_uv + + +.. _`datasets already available in MolNet`: http://moleculenet.ai/datasets-1 +.. _`Contribution guidelines`: https://github.com/deepchem/deepchem/blob/master/CONTRIBUTING.md +.. _`issue`: https://github.com/deepchem/deepchem/issues +.. _`deepchem.molnet.load_function`: https://github.com/deepchem/deepchem/tree/master/deepchem/molnet/load_function +.. _`deepchem.molnet.load_function.load_dataset_template`: https://github.com/deepchem/deepchem/blob/master/deepchem/molnet/load_function/load_dataset_template.py +.. _`deepchem.molnet.defaults`: https://github.com/deepchem/deepchem/tree/master/deepchem/molnet/defaults.py +.. _`deepchem.molnet.__init__.py`: https://github.com/deepchem/deepchem/blob/master/deepchem/molnet/__init__.py +.. _`template`: https://github.com/deepchem/deepchem/blob/master/.github/PULL_REQUEST_TEMPLATE/molnet_pr_template.md + +ZINC15 Datasets +--------------- + +.. autofunction:: deepchem.molnet.load_zinc15 diff --git a/docs/rl.rst b/docs/source/api_reference/rl.rst similarity index 100% rename from docs/rl.rst rename to docs/source/api_reference/rl.rst diff --git a/docs/splitters.rst b/docs/source/api_reference/splitters.rst similarity index 66% rename from docs/splitters.rst rename to docs/source/api_reference/splitters.rst index 2bb169cb5c5a44c77e67eb4c85d0cb0f80c55110..2f4c7f41c5f66535137299c7fef1939fe7bc34d0 100644 --- a/docs/splitters.rst +++ b/docs/source/api_reference/splitters.rst @@ -4,7 +4,7 @@ DeepChem :code:`dc.splits.Splitter` objects are a tool to meaningfully split DeepChem datasets for machine learning testing. The core idea is that when evaluating a machine learning model, it's useful to creating training, validation and test splits of your source data. The training -split is used to train models, the validatation is used to benchmark +split is used to train models, the validation is used to benchmark different model architectures. The test is ideally held out till the very end when it's used to gauge a final estimate of the model's performance. @@ -15,95 +15,113 @@ learning models more rigorously than standard deep models since we're looking for the ability to generalize to new domains. Some of the implemented splitters here may help. -Splitter --------- -The :code:`dc.splits.Splitter` class is the abstract parent class for -all splitters. This class should never be directly instantiated. +.. contents:: Contents + :local: -.. autoclass:: deepchem.splits.Splitter - :members: +General Splitters +----------------- RandomSplitter --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.splits.RandomSplitter :members: + :inherited-members: + :exclude-members: __init__ -IndexSplitter -------------- -.. autoclass:: deepchem.splits.IndexSplitter +RandomGroupSplitter +^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.splits.RandomGroupSplitter :members: + :inherited-members: -IndiceSplitter --------------- +RandomStratifiedSplitter +^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.splits.IndiceSplitter +.. autoclass:: deepchem.splits.RandomStratifiedSplitter :members: + :inherited-members: + :exclude-members: __init__ -SpecifiedSplitter ------------------ +SingletaskStratifiedSplitter +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.splits.SpecifiedSplitter +.. autoclass:: deepchem.splits.SingletaskStratifiedSplitter :members: + :inherited-members: -SpecifiedIndexSplitter ----------------------- +IndexSplitter +^^^^^^^^^^^^^ -.. autoclass:: deepchem.splits.SpecifiedIndexSplitter +.. autoclass:: deepchem.splits.IndexSplitter :members: + :inherited-members: + :exclude-members: __init__ +SpecifiedSplitter +^^^^^^^^^^^^^^^^^ -RandomGroupSplitter -------------------- - -.. autoclass:: deepchem.splits.RandomGroupSplitter +.. autoclass:: deepchem.splits.SpecifiedSplitter :members: + :inherited-members: -RandomStratifiedSplitter -------------------- +TaskSplitter +^^^^^^^^^^^^ -.. autoclass:: deepchem.splits.RandomStratifiedSplitter +.. autoclass:: deepchem.splits.TaskSplitter :members: + :inherited-members: -SingletaskStratifiedSplitter ----------------------------- -.. autoclass:: deepchem.splits.SingletaskStratifiedSplitter +Molecule Splitters +------------------ + +ScaffoldSplitter +^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.splits.ScaffoldSplitter :members: + :inherited-members: + :exclude-members: __init__ MolecularWeightSplitter ------------------------ +^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.splits.MolecularWeightSplitter :members: + :inherited-members: + :exclude-members: __init__ MaxMinSplitter --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.splits.MaxMinSplitter :members: + :inherited-members: + :exclude-members: __init__ ButinaSplitter --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.splits.ButinaSplitter :members: + :inherited-members: -ScaffoldSplitter ----------------- - -.. autoclass:: deepchem.splits.ScaffoldSplitter - :members: - -FingeprintSplitter ----------------- +FingerprintSplitter +^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.splits.FingerprintSplitter :members: + :inherited-members: + :exclude-members: __init__ -TimeSplitterPDBbind -------------------- +Base Splitter (for develop) +---------------------------- -.. autoclass:: deepchem.splits.TimeSplitterPDBbind +The :code:`dc.splits.Splitter` class is the abstract parent class for +all splitters. This class should never be directly instantiated. + +.. autoclass:: deepchem.splits.Splitter :members: diff --git a/docs/source/api_reference/tokenizers.rst b/docs/source/api_reference/tokenizers.rst new file mode 100644 index 0000000000000000000000000000000000000000..47f0e1ed96a4537324b40417937261b80ef5db39 --- /dev/null +++ b/docs/source/api_reference/tokenizers.rst @@ -0,0 +1,52 @@ +Tokenizers +=========== + +A tokenizer is in charge of preparing the inputs for a natural language processing model. For many scientific applications, it is possible to treat inputs as "words"/"sentences" and use NLP methods to make meaningful predictions. For example, SMILES strings or DNA sequences have grammatical structure and can be usefully modeled with NLP techniques. DeepChem provides some scientifically relevant tokenizers for use in different applications. These tokenizers are based on those from the Huggingface transformers library (which DeepChem tokenizers inherit from). + +The base classes PreTrainedTokenizer and PreTrainedTokenizerFast implements the common methods for encoding string inputs in model inputs and instantiating/saving python tokenizers either from a local file or directory or from a pretrained tokenizer provided by the library (downloaded from HuggingFace’s AWS S3 repository). + +PreTrainedTokenizer `(transformers.PreTrainedTokenizer) `_ thus implements the main methods for using all the tokenizers: + +- Tokenizing (spliting strings in sub-word token strings), converting tokens strings to ids and back, and encoding/decoding (i.e. tokenizing + convert to integers), + +- Adding new tokens to the vocabulary in a way that is independant of the underlying structure (BPE, SentencePiece…), + +- Managing special tokens like mask, beginning-of-sentence, etc tokens (adding them, assigning them to attributes in the tokenizer for easy access and making sure they are not split during tokenization) + +BatchEncoding holds the output of the tokenizer’s encoding methods (__call__, encode_plus and batch_encode_plus) and is derived from a Python dictionary. When the tokenizer is a pure python tokenizer, this class behave just like a standard python dictionary and hold the various model inputs computed by these methodes (input_ids, attention_mask…). + +For more details on the base tokenizers which the DeepChem tokenizers inherit from, please refer to the following: `HuggingFace tokenizers docs `_ + +Tokenization methods on string-based corpuses in the life sciences are becoming increasingly popular for NLP-based applications to chemistry and biology. One such example is ChemBERTa, a transformer for molecular property prediction. DeepChem offers a tutorial for utilizing ChemBERTa using an alternate tokenizer, a Byte-Piece Encoder, which can be found `here. `_ + +SmilesTokenizer +^^^^^^^^^^^^^^^ + +The :code:`dc.feat.SmilesTokenizer` module inherits from the BertTokenizer class in transformers. It runs a WordPiece tokenization algorithm over SMILES strings using the tokenisation SMILES regex developed by Schwaller et. al. + +The SmilesTokenizer employs an atom-wise tokenization strategy using the following Regex expression: :: + + SMI_REGEX_PATTERN = "(\[[^\]]+]|Br?|Cl?|N|O|S|P|F|I|b|c|n|o|s|p|\(|\)|\.|=|#||\+|\\\\\/|:||@|\?|>|\*|\$|\%[0–9]{2}|[0–9])" + +To use, please install the transformers package using the following pip command: :: + + pip install transformers + +References: + +- `RXN Mapper: Unsupervised Attention-Guided Atom-Mapping `_ +- `Molecular Transformer: Unsupervised Attention-Guided Atom-Mapping `_ + +.. autoclass:: deepchem.feat.SmilesTokenizer + :members: + +BasicSmilesTokenizer +^^^^^^^^^^^^^^^^^^^^ + +The :code:`dc.feat.BasicSmilesTokenizer` module uses a regex tokenization pattern to tokenise SMILES strings. The regex is developed by Schwaller et. al. The tokenizer is to be used on SMILES in cases where the user wishes to not rely on the transformers API. + +References: +- `Molecular Transformer: Unsupervised Attention-Guided Atom-Mapping `_ + +.. autoclass:: deepchem.feat.BasicSmilesTokenizer + :members: diff --git a/docs/transformers.rst b/docs/source/api_reference/transformers.rst similarity index 68% rename from docs/transformers.rst rename to docs/source/api_reference/transformers.rst index 46414dd798809f86f1709cd7ef1dae7fd096b9b7..d1a49a89c1b1e01f87d37cef94cf478a9f4a0770 100644 --- a/docs/transformers.rst +++ b/docs/source/api_reference/transformers.rst @@ -8,101 +8,119 @@ distribution. Real data of course is wild and hard to control. What do you do if you have a crazy dataset and need to bring its statistics to heel? Fear not for you have :code:`Transformer` objects. -Transformer ------------ -The :code:`dc.trans.Transformer` class is the abstract parent class -for all transformers. This class should never be directly initialized, -but contains a number of useful method implementations. +.. contents:: Contents + :local: -.. autoclass:: deepchem.trans.Transformer - :members: +General Transformers +-------------------- -MinMaxTransformer ------------------ +NormalizationTransformer +^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.trans.MinMaxTransformer +.. autoclass:: deepchem.trans.NormalizationTransformer :members: + :inherited-members: -NormalizationTransformer ------------------------- +MinMaxTransformer +^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.trans.NormalizationTransformer +.. autoclass:: deepchem.trans.MinMaxTransformer :members: + :inherited-members: ClippingTransformer -------------------- +^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.ClippingTransformer :members: + :inherited-members: LogTransformer --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.LogTransformer :members: + :inherited-members: + +CDFTransformer +^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.trans.CDFTransformer + :members: + :inherited-members: + +PowerTransformer +^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.trans.PowerTransformer + :members: + :inherited-members: BalancingTransformer --------------------- +^^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.BalancingTransformer :members: + :inherited-members: -CDFTransformer --------------- +DuplicateBalancingTransformer +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.trans.CDFTransformer +.. autoclass:: deepchem.trans.DuplicateBalancingTransformer :members: + :inherited-members: -PowerTransformer ----------------- +ImageTransformer +^^^^^^^^^^^^^^^^ -.. autoclass:: deepchem.trans.PowerTransformer +.. autoclass:: deepchem.trans.ImageTransformer + :members: + :inherited-members: + +FeaturizationTransformer +^^^^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: deepchem.trans.FeaturizationTransformer :members: + :inherited-members: + +Specified Usecase Transformers +------------------------------ CoulombFitTransformer ---------------------- +^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.CoulombFitTransformer :members: + :inherited-members: IRVTransformer --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.IRVTransformer :members: + :inherited-members: DAGTransformer --------------- - -.. autoclass:: deepchem.trans.DAGTransformer - :members: - -DAGTransformer --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.DAGTransformer :members: - -ImageTransformer ----------------- - -.. autoclass:: deepchem.trans.ImageTransformer - :members: + :inherited-members: ANITransformer --------------- +^^^^^^^^^^^^^^ .. autoclass:: deepchem.trans.ANITransformer :members: + :inherited-members: -FeaturizationTransformer ------------------------- +Base Transformer (for develop) +------------------------------- -.. autoclass:: deepchem.trans.FeaturizationTransformer - :members: - -DataTransforms --------------- +The :code:`dc.trans.Transformer` class is the abstract parent class +for all transformers. This class should never be directly initialized, +but contains a number of useful method implementations. -.. autoclass:: deepchem.trans.DataTransforms +.. autoclass:: deepchem.trans.Transformer :members: diff --git a/docs/source/api_reference/utils.rst b/docs/source/api_reference/utils.rst new file mode 100644 index 0000000000000000000000000000000000000000..8909b117ecf1cd665b68c4bf80202bd79f4dbc6d --- /dev/null +++ b/docs/source/api_reference/utils.rst @@ -0,0 +1,220 @@ +Utilities +========= +DeepChem has a broad collection of utility functions. Many of these +maybe be of independent interest to users since they deal with some +tricky aspects of processing scientific datatypes. + +Data Utilities +-------------- + +Array Utilities +^^^^^^^^^^^^^^^ + +.. autofunction:: deepchem.utils.data_utils.pad_array + +Data Directory +^^^^^^^^^^^^^^^ +The DeepChem data directory is where downloaded MoleculeNet datasets are stored. + +.. autofunction:: deepchem.utils.data_utils.get_data_dir + +URL Handling +^^^^^^^^^^^^ + +.. autofunction:: deepchem.utils.data_utils.download_url + +File Handling +^^^^^^^^^^^^^ + +.. autofunction:: deepchem.utils.data_utils.untargz_file + +.. autofunction:: deepchem.utils.data_utils.unzip_file + +.. autofunction:: deepchem.utils.data_utils.load_data + +.. autofunction:: deepchem.utils.data_utils.load_sdf_files + +.. autofunction:: deepchem.utils.data_utils.load_csv_files + +.. autofunction:: deepchem.utils.data_utils.load_json_files + +.. autofunction:: deepchem.utils.data_utils.load_pickle_files + +.. autofunction:: deepchem.utils.data_utils.load_from_disk + +.. autofunction:: deepchem.utils.data_utils.save_to_disk + +.. autofunction:: deepchem.utils.data_utils.load_dataset_from_disk + +.. autofunction:: deepchem.utils.data_utils.save_dataset_to_disk + +Molecular Utilities +------------------- + +.. autoclass:: deepchem.utils.conformers.ConformerGenerator + :members: + +.. autoclass:: deepchem.utils.rdkit_utils.MoleculeLoadException + :members: + +.. autofunction:: deepchem.utils.rdkit_utils.get_xyz_from_mol + +.. autofunction:: deepchem.utils.rdkit_utils.add_hydrogens_to_mol + +.. autofunction:: deepchem.utils.rdkit_utils.compute_charges + +.. autofunction:: deepchem.utils.rdkit_utils.load_molecule + +.. autofunction:: deepchem.utils.rdkit_utils.write_molecule + +Molecular Fragment Utilities +---------------------------- + +It's often convenient to manipulate subsets of a molecule. The :code:`MolecularFragment` class aids in such manipulations. + +.. autoclass:: deepchem.utils.fragment_utils.MolecularFragment + :members: + +.. autoclass:: deepchem.utils.fragment_utils.AtomShim + :members: + +.. autofunction:: deepchem.utils.fragment_utils.strip_hydrogens + +.. autofunction:: deepchem.utils.fragment_utils.merge_molecular_fragments + +.. autofunction:: deepchem.utils.fragment_utils.get_contact_atom_indices + +.. autofunction:: deepchem.utils.fragment_utils.reduce_molecular_complex_to_contacts + +Coordinate Box Utilities +------------------------ + +.. autoclass:: deepchem.utils.coordinate_box_utils.CoordinateBox + :members: + +.. autofunction:: deepchem.utils.coordinate_box_utils.intersect_interval + +.. autofunction:: deepchem.utils.coordinate_box_utils.union + +.. autofunction:: deepchem.utils.coordinate_box_utils.merge_overlapping_boxes + +.. autofunction:: deepchem.utils.coordinate_box_utils.get_face_boxes + +Evaluation Utils +---------------- + +.. autoclass:: deepchem.utils.evaluate.Evaluator + :members: + +.. autoclass:: deepchem.utils.evaluate.GeneratorEvaluator + :members: + +.. autofunction:: deepchem.utils.evaluate.relative_difference + + +Genomic Utilities +----------------- + +.. autofunction:: deepchem.utils.genomics_utils.seq_one_hot_encode + +.. autofunction:: deepchem.utils.genomics_utils.encode_bio_sequence + + +Geometry Utilities +------------------ + +.. autofunction:: deepchem.utils.geometry_utils.unit_vector + +.. autofunction:: deepchem.utils.geometry_utils.angle_between + +.. autofunction:: deepchem.utils.geometry_utils.generate_random_unit_vector + +.. autofunction:: deepchem.utils.geometry_utils.generate_random_rotation_matrix + +.. autofunction:: deepchem.utils.geometry_utils.is_angle_within_cutoff + +Hash Function Utilities +----------------------- + +.. autofunction:: deepchem.utils.hash_utils.hash_ecfp + +.. autofunction:: deepchem.utils.hash_utils.hash_ecfp_pair + +.. autofunction:: deepchem.utils.hash_utils.vectorize + +Voxel Utils +----------- + +.. autofunction:: deepchem.utils.voxel_utils.convert_atom_to_voxel + +.. autofunction:: deepchem.utils.voxel_utils.convert_atom_pair_to_voxel + +.. autofunction:: deepchem.utils.voxel_utils.voxelize + + +Graph Convolution Utilities +--------------------------- + +.. autofunction:: deepchem.utils.molecule_feature_utils.one_hot_encode + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_type_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.construct_hydrogen_bonding_info + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_hydrogen_bonding_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_is_in_aromatic_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_hybridization_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_total_num_Hs_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_chirality_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_formal_charge + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_partial_charge + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_atom_total_degree_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_bond_type_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_bond_is_in_same_ring_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_bond_is_conjugated_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_bond_stereo_one_hot + +.. autofunction:: deepchem.utils.molecule_feature_utils.get_bond_graph_distance_one_hot + + +Debug Utilities +--------------- + +Docking Utilities +----------------- + +These utilities assist in file preparation and processing for molecular +docking. + +.. autofunction:: deepchem.utils.vina_utils.write_vina_conf + +.. autofunction:: deepchem.utils.vina_utils.load_docked_ligands + +.. autofunction:: deepchem.utils.vina_utils.prepare_inputs + + +Print Threshold +^^^^^^^^^^^^^^^ + +The printing threshold controls how many dataset elements are printed +when :code:`dc.data.Dataset` objects are converted to strings or +represnted in the IPython repl. + +.. autofunction:: deepchem.utils.debug_utils.get_print_threshold + +.. autofunction:: deepchem.utils.debug_utils.set_print_threshold + +.. autofunction:: deepchem.utils.debug_utils.get_max_print_size + +.. autofunction:: deepchem.utils.debug_utils.set_max_print_size diff --git a/docs/source/conf.py b/docs/source/conf.py index 525b54792b7e956d3814086283b91dfa311d30dc..cc67bc24cd3382cce012e97709c9a2466c3623ee 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,300 +1,131 @@ -# -*- coding: utf-8 -*- +# Configuration file for the Sphinx documentation builder. # -# deepchem documentation build configuration file, created by -# sphinx-quickstart on Tue Jan 19 17:37:50 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html -import sys -import os -import sphinx_bootstrap_theme +# -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath("../../deepchem")) -sys.path.insert(0, os.path.abspath("../sphinxext")) +# +import os +import sys +import inspect +sys.path.insert(0, os.path.abspath('../..')) + +import sphinx_rtd_theme # noqa +import deepchem # noqa + +# -- Project information ----------------------------------------------------- -# -- General configuration ------------------------------------------------ +project = 'deepchem' +copyright = '2020, deepchem-contributors' +author = 'deepchem-contributors' -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' +# The full version, including alpha/beta/rc tags +version = deepchem.__version__ +release = deepchem.__version__ + +# -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon' + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.doctest', + 'sphinx.ext.linkcode', + 'sphinx.ext.mathjax', + 'sphinx.ext.autosectionlabel', ] -autosummary_generate = True -autodoc_default_flags = ['members', 'inherited-members'] -numpydoc_class_members_toctree = False +# Options for autodoc directives +autodoc_default_options = { + 'member-order': 'bysource', + 'special-members': True, + 'exclude-members': '__repr__, __str__, __weakref__, __hash__, __eq__', +} + +# How to represents typehints. +autodoc_typehints = "signature" + +mathjax_path = 'http://mathjax.connectmv.com/MathJax.js?config=default' # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] +# The suffix of source filenames. source_suffix = '.rst' -# The encoding of source files. -# source_encoding = 'utf-8-sig' - # The master toctree document. master_doc = 'index' -# General information about the project. -project = u'deepchem' -copyright = u'2016, Stanford University and the Authors' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '1.3' -# The full version, including alpha/beta/rc tags. -release = '1.3.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' +# autosectionlabel setting +autosectionlabel_prefix_document = True # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build', '**.ipynb_checkpoints', '*tests*'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +add_module_names = False -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - -# -- Options for HTML output ---------------------------------------------- +# -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -import msmb_theme - -html_theme = 'bootstrap' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} -html_theme_options = { - 'source_link_position': "footer", - 'navbar_sidebarrel': False, - 'navbar_fixed_top': "false", - 'bootstrap_version': "3", - 'navbar_class': "navbar navbar-inverse", - 'navbar_links': [("Notebooks", "notebooks/index")], -} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -html_logo = '_static/logo.png' - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +html_theme = 'sphinx_rtd_theme' +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'deepchemdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - - # Latex figure (float) alignment - # 'figure_align': 'htbp', +html_context = { + 'css_files': [ + '_static/theme_overrides.css', # override wide tables in RTD theme + ], } -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'deepchem.tex', u'deepchem Documentation', - u'Bharath Ramsundar, Evan Feinberg', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'deepchem', u'deepchem Documentation', - ["Stanford University"], 1)] - -# If true, show URL addresses after external links. -# man_show_urls = False +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +html_logo = '_static/logo.png' -# -- Options for Texinfo output ------------------------------------------- +# Customize the sphinx theme +html_theme_options = { + 'collapse_navigation': False, + 'display_version': True, +} -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'deepchem', u'deepchem Documentation', 'Stanford University', - 'deepchem', 'Deep-learning models for drug discovery.', 'Scientific'), -] +# -- Source code links --------------------------------------------------- -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] -# If false, no module index is generated. -# texinfo_domain_indices = True +# Resolve function for the linkcode extension. +def linkcode_resolve(domain, info): -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' + def find_source(): + # try to find the file and line number, based on code from numpy: + # https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L286 + obj = sys.modules[info['module']] + for part in info['fullname'].split('.'): + obj = getattr(obj, part) + fn = inspect.getsourcefile(obj) + fn = os.path.relpath(fn, start=os.path.dirname(deepchem.__file__)) + source, lineno = inspect.getsourcelines(obj) + return fn, lineno, lineno + len(source) - 1 -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False + if domain != 'py' or not info['module']: + return None + try: + filename = 'deepchem/%s#L%d-L%d' % find_source() + except Exception: + filename = info['module'].replace('.', '/') + '.py' -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} + tag = 'master' if 'dev' in release else ('v' + release) + return "https://github.com/deepchem/deepchem/blob/%s/%s" % (tag, filename) diff --git a/docs/source/development_guide/coding.rst b/docs/source/development_guide/coding.rst new file mode 100644 index 0000000000000000000000000000000000000000..85c26b9cdde7477cbeed81c53eb6f3fe6cec3304 --- /dev/null +++ b/docs/source/development_guide/coding.rst @@ -0,0 +1,123 @@ +Coding Conventions +================== + +Code Formatting +--------------- + +.. _`yapf`: https://github.com/google/yapf + +We use `yapf`_ to format all of the code in DeepChem. Although it sometimes +produces slightly awkward formatting, it does have two major benefits. First, +it ensures complete consistency throughout the entire codebase. And second, it +avoids disagreements about how a piece of code should be formatted. + +Whenever you modify a file, run :code:`yapf` on it to reformat it before +checking it in. + +.. code-block:: bash + + yapf -i + +Yapf is run on every pull request to make sure the formatting is correct, so if +you forget to do this the continuous integration system will remind you. +Because different versions of yapf can produce different results, it is +essential to use the same version that is being run on CI. At present, that +is 0.22. We periodically update it to newer versions. + + +Docstrings +---------- + +All classes and functions should include docstrings describing their purpose and +intended usage. When in doubt about how much information to include, always err +on the side of including more rather than less. Explain what problem a class is +intended to solve, what algorithms it uses, and how to use it correctly. When +appropriate, cite the relevant publications. + +.. _`numpy`: https://numpydoc.readthedocs.io/en/latest/format.html#docstring-standard + +All docstrings should follow the `numpy`_ docstring formatting conventions. + + +Unit Tests +---------- + +Having an extensive collection of test cases is essential to ensure the code +works correctly. If you haven't written tests for a feature, that means the +feature isn't finished yet. Untested code is code that probably doesn't work. + +Complex numerical code is sometimes challenging to fully test. When an +algorithm produces a result, it sometimes is not obvious how to tell whether the +result is correct or not. As far as possible, try to find simple examples for +which the correct answer is exactly known. Sometimes we rely on stochastic +tests which will *probably* pass if the code is correct and *probably* fail if +the code is broken. This means these tests are expected to fail a small +fraction of the time. Such tests can be marked with the :code:`@flaky` +annotation. If they fail during continuous integration, they will be run a +second time and an error only reported if they fail again. + +If possible, each test should run in no more than a few seconds. Occasionally +this is not possible. In that case, mark the test with the :code:`@pytest.mark.slow` +annotation. Slow tests are skipped during continuous integration, so changes +that break them may sometimes slip through and get merged into the repository. +We still try to run them regularly, so hopefully the problem will be discovered +fairly soon. + +Testing Machine Learning Models +------------------------------- + +Testing the correctness of a machine learning model can be quite +tricky to do in practice. When adding a new machine learning model to +DeepChem, you should add at least a few basic types of unit tests: + +- Overfitting test: Create a small synthetic dataset and test that + your model can learn this datasest with high accuracy. For regression + and classification task, this should correspond to low training error + on the dataset. For generative tasks, this should correspond to low + training loss on the dataset. + +- Reloading test: Check that a trained model can be saved to disk and + reloaded correctly. This should involve checking that predictions from + the saved and reloaded models matching exactly. + +Note that unit tests are not sufficient to gauge the real performance +of a model. You should benchmark your model on larger datasets as well +and report your benchmarking tests in the PR comments. + +Type Annotations +---------------- + +Type annotations are an important tool for avoiding bugs. All new code should +provide type annotations for function arguments and return types. When you make +significant changes to existing code that does not have type annotations, please +consider adding them at the same time. + +.. _`mypy`: http://mypy-lang.org/ + +We use the `mypy`_ static type checker to verify code correctness. It is +automatically run on every pull request. If you want to run it locally to make +sure you are using types correctly before checking in your code, :code:`cd` to +the top level directory of the repository and execute the command + +.. code-block:: bash + + mypy -p deepchem --ignore-missing-imports + +Because Python is such a dynamic language, it sometimes is not obvious what type +to specify. A good rule of thumb is to be permissive about input types and +strict about output types. For example, many functions are documented as taking +a list as an argument, but actually work just as well with a tuple. In those +cases, it is best to specify the input type as :code:`Sequence` to accept either +one. But if a function returns a list, specify the type as :code:`List` because +we can guarantee the return value will always have that exact type. + +Another important case is NumPy arrays. Many functions are documented as taking +an array, but actually can accept any array-like object: a list of numbers, a +list of lists of numbers, a list of arrays, etc. In that case, specify the type +as :code:`Sequence` to accept any of these. On the other hand, if the function +truly requires an array and will fail with any other input, specify it as +:code:`np.ndarray`. + +The :code:`deepchem.utils.typing` module contains definitions of some types that +appear frequently in the DeepChem API. You may find them useful when annotating +code. diff --git a/docs/source/development_guide/infra.rst b/docs/source/development_guide/infra.rst new file mode 100644 index 0000000000000000000000000000000000000000..915b4cc165b619e5c2945ead72303b6bdce69661 --- /dev/null +++ b/docs/source/development_guide/infra.rst @@ -0,0 +1,99 @@ +Infrastructures +=============== + +The DeepChem project maintains supporting infrastructure on a number of +different services. This infrastructure is maintained by the DeepChem +development team. + +Github +------ +The core DeepChem repositories are maintained in the `deepchem`_ GitHub organization. + +.. _`deepchem`: https://github.com/deepchem + +DeepChem developers have write access to the repositories on this repo and +technical steering committee members have admin access. + +Travis CI +--------- +DeepChem runs continuous integration tests on `Travis CI`_. + +.. _`Travis CI`: https://travis-ci.org/github/deepchem + +Conda Forge +----------- +The DeepChem `feedstock`_ repo maintains the build recipe for Conda-Forge. + +.. _`feedstock`: https://github.com/conda-forge/deepchem-feedstock + + +Dockerhub +--------- +DeepChem hosts major releases and nightly docker build instances on `dockerhub`_. + +.. _`dockerhub`: https://hub.docker.com/r/deepchemio/deepchem + +PyPi +---- +DeepChem hosts major releases and nightly builds on `pypi`_. + +.. _`pypi`: https://pypi.org/project/deepchem/ + +Amazon Web Services +------------------- + +DeepChem's website infrastructure is all managed on AWS through different AWS +services. All DeepChem developers have access to these services through the +deepchem-developers IAM role. (An IAM role controls access permissions.) At +present, @rbharath is the only developer with access to the IAM role, but +longer term we should migrate this so other folks have access to the roles. + +S3 +^^ + +Amazon's S3 allows for storage of data on "buckets" (Think of buckets like folders.) +There are two core deepchem S3 buckets: + + - deepchemdata: This bucket hosts the deepchem.io website, MoleculeNet datasets, pre-featurized datasets, + and pretrained models. This bucket is set up to host a static website (at `static`_). + + - deepchemforum: This bucket hosts backups for the forums. The bucket is private for security reasons. + The forums themselves are hosted on a digital ocean instance that only @rbharath currently has access to. + Longer term, we should migrate the forums onto AWS so all DeepChem developers can access the forums. + The forums themselves are a discord instance. The forums upload their backups to this S3 bucket once a day. + If the forums crash, they can be restored from the backups in this bucket + + +.. _`static`: https://deepchemdata.s3-us-west-1.amazonaws.com/index.html + +Route 53 +^^^^^^^^ +DNS for the deepchem.io website is handled by Route 53. The "hosted zone" +deepchem.io holds all DNS information for the website. + +Certificate Manager +^^^^^^^^^^^^^^^^^^^ +The AWS certificate manager issues the SSL/TLS certificate for the +\*.deepchem.io and deepchem.io domains. + + +Cloudfront +^^^^^^^^^^ +We make use of a cloudfront distribution to serve our static website. The +cloudfront distribution connects to the certificate in Certificate Manager and +uses the deepchemdata bucket as the origin domain. We set CNAME for +www.deepchem.io and deepchem.io + +GoDaddy +------- +The deepchem.io domain is registered with GoDaddy. If you change the name +servers in AWS Route 53, you will need to update the GoDaddy record. At +present, only @rbharath has access to the GoDaddy account that owns the +deepchem.io domain name. We should explore how to provide access to the domain +name for other DeepChem developers. + +Digital Ocean +------------- +The forums are hosted on a digital ocean instance. At present, only @rbharath +has access to this instance. We should migrate this instance onto AWS so other +DeepChem developers can help maintain the forums. diff --git a/docs/source/development_guide/licence.rst b/docs/source/development_guide/licence.rst new file mode 100644 index 0000000000000000000000000000000000000000..7718e713be766113ecbbec11d0bce5b6cf2219c0 --- /dev/null +++ b/docs/source/development_guide/licence.rst @@ -0,0 +1,20 @@ +Licensing and Commercial Uses +============================= + +DeepChem is licensed under the MIT License. We actively support +commercial users. Note that any novel molecules, materials, or other +discoveries powered by DeepChem belong entirely to the user and not to +DeepChem developers. + +That said, we would very much appreciate a citation if you find our tools useful. +You can cite DeepChem with the following reference. + +.. code-block:: + + @book{Ramsundar-et-al-2019, + title={Deep Learning for the Life Sciences}, + author={Bharath Ramsundar and Peter Eastman and Patrick Walters and Vijay Pande and Karl Leswing and Zhenqin Wu}, + publisher={O'Reilly Media}, + note={\url{https://www.amazon.com/Deep-Learning-Life-Sciences-Microscopy/dp/1492039837}}, + year={2019} + } diff --git a/docs/source/development_guide/scientists.rst b/docs/source/development_guide/scientists.rst new file mode 100644 index 0000000000000000000000000000000000000000..1163e24935a04377f4074d9df0421de8daaac106 --- /dev/null +++ b/docs/source/development_guide/scientists.rst @@ -0,0 +1,161 @@ +Contibuting to DeepChem as a Scientist +====================================== + +The scientific community in many ways is quite traditional. +Students typically learn in apprenticeship from from advisors who +teach a small number of students directly. This system has endured +for centuries and allows for expert scientists to teach their ways of +thinking to new students. + +For more context, most scientific research today is done in "labs" +run in this mostly traditional fashion. A principal investigator (PI) +will run the lab and work with undergraduate, graduate, and +postdoctoral students who produce research papers. Labs are funded by +"grants," typically from governments and philanthropic agencies. +Papers and citations are the critical currencies of this system, and a +strong publication record is necessary for any scientist to establish +themselves. + +This traditional model can find it difficult to fund the development +of high quality software for a few reasons. First, students are in a +lab for limited periods of time (3-5 years often). This means there's +high turnover, and critical knowledge can be lost when a student moves +on. Second, grants for software are still new and not broadly +available. A lab might very reasonably choose to focus on scientific +discovery rather than on necessary software engineering. (Although, +it's worth noting there are many exceptions that prove the rule! +DeepChem was born in an academic lab like many other quality +projects.) + +We believe that contributing to and using DeepChem can be highly +valuable for scientific careers. DeepChem can help maintain new +scientific algorithms for the long term, making sure that your +discoveries continue to be used after students graduate. We've seen +too many brilliant projects flounder after students move on, and we'd +like to help you make sure that your algorithms have the most impact. + +Scientist FAQ +------------- + +.. contents:: Contents + :local: + +Wouldn't it be better for my career to make my own package rather than use DeepChem? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The answer to this really depends on what you're looking for out of +your career! Making and maintaining good software is hard. It requires +careful testing and continued maintenance. Your code will bitrot over +time without attention. If your focus is on new inventions and you +find software engineering less compelling, working with DeepChem may +enable you to go further in your career by letting you focus on new +algorithms and leveraging the DeepChem Project's infrastructure to +maintain your inventions. + +In addition, you may find considerable inspiration from participating +in the DeepChem community. Looking at how other scientists solve +problems, and connecting with new collaborators across the world can +help you look at problems in a new way. Longtime DeepChem contributors +find that they often end up writing papers together! + +All that said, there may be very solid reasons for you to build your +own project! Especially if you want to explore designs that we haven't +or can't easily. In that case, we'd still love to collaborate with +you. DeepChem depends on a broad constellation of scientific packages +and we'd love to make your package's features accessible to our users. + +Is there a DeepChem PI? +^^^^^^^^^^^^^^^^^^^^^^^ +While DeepChem was born in the Pande lab at Stanford, +the project now lives as a "decentralized research organization." +It would be more accurate to say that there are informally multiple "DeepChem PIs," +who use it in their work. You too can be a DeepChem PI! + +Do I need to add DeepChem team members as co-authors to my paper? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Our suggestion is to use good judgment and usual scientific etiquette. +If a particular DeepChem team member has contributed a lot to your effort, +adding them might make sense. If no one person has contributed sufficiently, +an acknowledgment or citation would be great! + +I want to establish my scientific niche. How can I do that as a DeepChem contributor? Won't my contribution be lost in the noise? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +It's critically important for a new scientist to establish themselves and +their contributions in order to launch a scientific career. We believe that +DeepChem can help you do this! If you add a significant set of new features to DeepChem, +it might be appropriate for you to write a paper (as lead or corresponding author or however makes sense) +that introduces the new feature and your contribution. + +As a decentralized research organization, we want to help you launch +your careers. We're very open to other collaboration structures that +work for your career needs. + +I'm an aspiring scientist, not part of a lab. Can I join DeepChem? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Yes! DeepChem's core mission is to democratize the use of deep learning for the sciences. +This means no barriers, no walls. Anyone is welcome to join and contribute. +Join our developer calls, chat one-on-one with our scientists, +many of whom are glad to work with new students. You may form connections that +help you join a more traditional lab, or you may choose to form your own path. +We're glad to support either. + + +Is there DeepChem Grant Money? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Not yet, but we're actively looking into getting grants to support DeepChem researchers. +If you're a PI who wants to collaborate with us, please get in touch! + + +I'm an industry researcher. Can I participate too? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Yes! The most powerful features of DeepChem is its community. +Becoming part of the DeepChem project can let you build a network that lasts across jobs and roles. +Lifelong employment at a corporation is less and less common. Joining our community will +let you build bonds that cross jobs and could help you do your job today better too! + +What about intellectual property? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +One of the core goals for DeepChem is to build a shared set of +scientific resources and techniques that aren't locked up by patents. +Our hope is to enable your company or organization to leverage +techniques with less worry about patent infringement. + +We ask in return that you act as a responsible community member +and put in as much as you get out. If you find DeepChem very +valuable, please consider contributing back some innovations or +improvements so others can benefit. If you're getting a patent on your +invention, try to make sure that you don't infringe on anything in +DeepChem. Lots of things sneak past patent review. As an open source +community, we don't have the resources to actively defend ourselves +and we rely on your good judgment and help! + +If I use DeepChem on my organization's data, do I have to release the data? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Not at all! DeepChem is released with a permissive MIT license. Any +analyses you perform belong entirely to you. You are under no +obligation to release your proprietary data or inventions. + +What if I want to release data? Can DeepChem help? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you are interested in open sourcing data, the DeepChem project +maintains the +[MoleculeNet](https://deepchem.readthedocs.io/en/latest/moleculenet.html) +suite of datasets. Adding your dataset to MoleculeNet can be a +powerful way to ensure that a broad community of users can access your +released data in convenient fashion. It's important to note that +MoleculeNet provides programmatic access to data, which may not be +appropriate for all types of data (especially for clinical or patient +data which may be governed by regulations/laws). Open source +datasets can be a powerful resource, but need to be handled with care. + +Is MoleculeNet just about molecules? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Not anymore! Any scientific datasets are welcome in MoleculeNet. At +some point in the future, we may rename the effort to avoid confusion, +but for now, we emphasize that non-molecular datasets are welcome too. + +Does MoleculeNet allow for releasing data under different licenses? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +MoleculeNet already supports datasets released under different +licenses. We can make work with you to use your license of choice. diff --git a/docs/source/get_started/examples.rst b/docs/source/get_started/examples.rst new file mode 100644 index 0000000000000000000000000000000000000000..5b4866293340c61159eba953ec82dbbda0334057 --- /dev/null +++ b/docs/source/get_started/examples.rst @@ -0,0 +1,181 @@ +Examples +======== + +We show a bunch of examples for DeepChem by the doctest style. + +- We match against doctest's :code:`...` wildcard on code where output is usually ignored +- We often use threshold assertions (e.g: :code:`score['mean-pearson_r2_score'] > 0.92`), + as this is what matters for model training code. + +.. contents:: Contents + :local: + +Before jumping in to examples, we'll import our libraries and ensure our doctests are reproducible: + +.. doctest:: * + + >>> import numpy as np + >>> import tensorflow as tf + >>> import deepchem as dc + >>> + >>> # Run before every test for reproducibility + >>> def seed_all(): + ... np.random.seed(123) + ... tf.random.set_seed(123) + +.. testsetup:: * + + import numpy as np + import tensorflow as tf + import deepchem as dc + + # Run before every test for reproducibility + def seed_all(): + np.random.seed(123) + tf.random.set_seed(123) + + +Delaney (ESOL) +---------------- + +Examples of training models on the Delaney (ESOL) dataset included in `MoleculeNet <./moleculenet.html>`_. + +We'll be using its :code:`smiles` field to train models to predict its experimentally measured solvation energy (:code:`expt`). + +MultitaskRegressor +^^^^^^^^^^^^^^^^^^ + +First, we'll load the dataset with :func:`load_delaney() ` and fit a :class:`MultitaskRegressor `: + +.. doctest:: delaney + + >>> seed_all() + >>> # Load dataset with default 'scaffold' splitting + >>> tasks, datasets, transformers = dc.molnet.load_delaney() + >>> tasks + ['measured log solubility in mols per litre'] + >>> train_dataset, valid_dataset, test_dataset = datasets + >>> + >>> # We want to know the pearson R squared score, averaged across tasks + >>> avg_pearson_r2 = dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean) + >>> + >>> # We'll train a multitask regressor (fully connected network) + >>> model = dc.models.MultitaskRegressor( + ... len(tasks), + ... n_features=1024, + ... layer_sizes=[500]) + >>> + >>> model.fit(train_dataset) + 0... + >>> + >>> # We now evaluate our fitted model on our training and validation sets + >>> train_scores = model.evaluate(train_dataset, [avg_pearson_r2], transformers) + >>> assert train_scores['mean-pearson_r2_score'] > 0.7, train_scores + >>> + >>> valid_scores = model.evaluate(valid_dataset, [avg_pearson_r2], transformers) + >>> assert valid_scores['mean-pearson_r2_score'] > 0.3, valid_scores + + +GraphConvModel +^^^^^^^^^^^^^^ +The default `featurizer <./featurizers.html>`_ for Delaney is :code:`ECFP`, short for +`"Extended-connectivity fingerprints." <./featurizers.html#circularfingerprint>`_ +For a :class:`GraphConvModel `, we'll reload our datasets with :code:`featurizer='GraphConv'`: + +.. doctest:: delaney + + >>> seed_all() + >>> tasks, datasets, transformers = dc.molnet.load_delaney(featurizer='GraphConv') + >>> train_dataset, valid_dataset, test_dataset = datasets + >>> + >>> model = dc.models.GraphConvModel(len(tasks), mode='regression', dropout=0.5) + >>> + >>> model.fit(train_dataset, nb_epoch=30) + 0... + >>> + >>> # We now evaluate our fitted model on our training and validation sets + >>> train_scores = model.evaluate(train_dataset, [avg_pearson_r2], transformers) + >>> assert train_scores['mean-pearson_r2_score'] > 0.5, train_scores + >>> + >>> valid_scores = model.evaluate(valid_dataset, [avg_pearson_r2], transformers) + >>> assert valid_scores['mean-pearson_r2_score'] > 0.3, valid_scores + + +ChEMBL +------ + +Examples of training models on `ChEMBL`_ dataset included in MoleculeNet. + +ChEMBL is a manually curated database of bioactive molecules with drug-like properties. +It brings together chemical, bioactivity and genomic data to aid the translation +of genomic information into effective new drugs. + +.. _`ChEMBL`: https://www.ebi.ac.uk/chembl + +MultitaskRegressor +^^^^^^^^^^^^^^^^^^ + +.. doctest:: chembl + + >>> seed_all() + >>> # Load ChEMBL 5thresh dataset with random splitting + >>> chembl_tasks, datasets, transformers = dc.molnet.load_chembl( + ... shard_size=2000, featurizer="ECFP", set="5thresh", split="random") + >>> train_dataset, valid_dataset, test_dataset = datasets + >>> len(chembl_tasks) + 691 + >>> f'Compound train/valid/test split: {len(train_dataset)}/{len(valid_dataset)}/{len(test_dataset)}' + 'Compound train/valid/test split: 19096/2387/2388' + >>> + >>> # We want to know the RMS, averaged across tasks + >>> avg_rms = dc.metrics.Metric(dc.metrics.rms_score, np.mean) + >>> + >>> # Create our model + >>> n_layers = 3 + >>> model = dc.models.MultitaskRegressor( + ... len(chembl_tasks), + ... n_features=1024, + ... layer_sizes=[1000] * n_layers, + ... dropouts=[.25] * n_layers, + ... weight_init_stddevs=[.02] * n_layers, + ... bias_init_consts=[1.] * n_layers, + ... learning_rate=.0003, + ... weight_decay_penalty=.0001, + ... batch_size=100) + >>> + >>> model.fit(train_dataset, nb_epoch=5) + 0... + >>> + >>> # We now evaluate our fitted model on our training and validation sets + >>> train_scores = model.evaluate(train_dataset, [avg_rms], transformers) + >>> assert train_scores['mean-rms_score'] < 10.00 + >>> + >>> valid_scores = model.evaluate(valid_dataset, [avg_rms], transformers) + >>> assert valid_scores['mean-rms_score'] < 10.00 + +GraphConvModel +^^^^^^^^^^^^^^ + +.. doctest:: chembl + + >>> # Load ChEMBL dataset + >>> chembl_tasks, datasets, transformers = dc.molnet.load_chembl( + ... shard_size=2000, featurizer="GraphConv", set="5thresh", split="random") + >>> train_dataset, valid_dataset, test_dataset = datasets + >>> + >>> # RMS, averaged across tasks + >>> avg_rms = dc.metrics.Metric(dc.metrics.rms_score, np.mean) + >>> + >>> model = dc.models.GraphConvModel( + ... len(chembl_tasks), batch_size=128, mode='regression') + >>> + >>> # Fit trained model + >>> model.fit(train_dataset, nb_epoch=5) + 0... + >>> + >>> # We now evaluate our fitted model on our training and validation sets + >>> train_scores = model.evaluate(train_dataset, [avg_rms], transformers) + >>> assert train_scores['mean-rms_score'] < 10.00 + >>> + >>> valid_scores = model.evaluate(valid_dataset, [avg_rms], transformers) + >>> assert valid_scores['mean-rms_score'] < 10.00 diff --git a/docs/installation.rst b/docs/source/get_started/installation.rst similarity index 56% rename from docs/installation.rst rename to docs/source/get_started/installation.rst index 755c9ea2e41c845478f65b6d276c5dbd25414d58..c5d84059d866ac68d53c76159bc179eff3c995f4 100644 --- a/docs/installation.rst +++ b/docs/source/get_started/installation.rst @@ -1,15 +1,13 @@ -Installing DeepChem -=================== +Installation +============ -Google Colab ------------- +Stable version +-------------- -The fastest way to get up and running with DeepChem is to run it on -Google Colab. Check out one of the `DeepChem Tutorials`_ or this -`forum post`_ for Colab quick start guides. +**Caution!! : The latest stable version was published nearly a year ago. +If you are a pip user or you face some errors, we recommend +the nightly build version.** -Conda Installation ------------------- If you'd like to install DeepChem locally, we recommend using :code:`conda` and installing RDKit with deepchem. RDKit is a soft requirement package, but many useful methods like @@ -18,33 +16,50 @@ molnet depend on it. .. code-block:: bash pip install tensorflow-gpu==1.14 - conda install -y -c rdkit -c conda-forge rdkit deepchem + conda install -y -c conda-forge rdkit deepchem For CPU only support instead run .. code-block:: bash pip install tensorflow==1.14 - conda install -y -c rdkit -c conda-forge rdkit deepchem + conda install -y -c conda-forge rdkit deepchem + + +Nightly build version +--------------------- + +You install the nightly build version via pip. +The nightly version is built by the HEAD of DeepChem. + +.. code-block:: bash + + pip install tensorflow==2.3.0 + pip install --pre deepchem -Then open your python and try running. -.. code-block:: python +RDKit is a soft requirement package, but many useful methods +like molnet depend on it. We recommend installing RDKit +with deepchem if you use conda. - import deepchem +.. code-block:: bash + conda install -y -c conda-forge rdkit -Pip Installation ---------------------------- -We are working on improving our pip installation -capabilities. We'll update our docs once we have more information on -how to do this well. +Google Colab +------------ -Docker Installation ------------------- +The fastest way to get up and running with DeepChem is to run it on +Google Colab. Check out one of the `DeepChem Tutorials`_ or this +`forum post`_ for Colab quick start guides. -If you want to install using a docker, you can pull two kinds of images from `DockerHub`_. + +Docker +------ + +If you want to install using a docker, +you can pull two kinds of images from `DockerHub`_. - **deepchemio/deepchem:x.x.x** @@ -102,14 +117,57 @@ If you want to check the tox21 benchmark: (deepchem) root@xxxxxxxxxxxxx:~/mydir# python benchmark.py -d tox21 -m graphconv -s random -Installing from Source ----------------------- +From Source +----------- + +You can install deepchem in a new conda environment using the conda +commands in :code:`scripts/install_deepchem_conda.sh`. Installing via this +script will ensure that you are **installing from the source**. +The following script requires **conda>=4.4** because it uses the +:code:`conda activate` command. + +First, please clone the deepchem repository from GitHub. + +.. code-block:: bash + + git clone https://github.com/deepchem/deepchem.git + cd deepchem + + +Then, execute the shell script. + +.. code-block:: bash + + bash scripts/install_deepchem_conda.sh cpu + + +If you want GPU support (we supports only CUDA 10.1): + +.. code-block:: bash + + bash scripts/install_deepchem_conda.sh gpu + + +If you are using the Windows and the PowerShell: + +.. code-block:: ps1 + + .\scripts\install_deepchem_conda.ps1 cpu + + +| Before activating deepchem environment, make sure conda has been initialized. +| Check if there is a :code:`(base)` in your command line. +| If not, use :code:`conda init ` to activate it, then: + +.. code-block:: bash + + conda activate deepchem + pip install -e . + pytest -m "not slow" deepchem # optional -Check out our directions on Github for how to `install from source`_. .. _`DeepChem Tutorials`: https://github.com/deepchem/deepchem/tree/master/examples/tutorials -.. _`forum post`: https://forum.deepchem.io/t/getting-deepchem-running-in-colab/81 +.. _`forum post`: https://forum.deepchem.io/t/getting-deepchem-running-in-colab/81/7 .. _`DockerHub`: https://hub.docker.com/repository/docker/deepchemio/deepchem .. _`docker/conda-forge`: https://github.com/deepchem/deepchem/tree/master/docker/conda-forge .. _`docker/master`: https://github.com/deepchem/deepchem/tree/master/docker/master -.. _`install from source`: https://github.com/deepchem/deepchem/blob/master/README.md#install-from-source diff --git a/docs/source/get_started/requirements.rst b/docs/source/get_started/requirements.rst new file mode 100644 index 0000000000000000000000000000000000000000..74e55e874c842f05d14159cd955909f9f6d28414 --- /dev/null +++ b/docs/source/get_started/requirements.rst @@ -0,0 +1,155 @@ +Requirements +------------ + +Hard requirements +^^^^^^^^^^^^^^^^^ + +DeepChem officially supports Python 3.6 through 3.7 and requires these packages on any condition. + +- `joblib`_ +- `NumPy`_ +- `pandas`_ +- `scikit-learn`_ +- `SciPy`_ +- `TensorFlow`_ + + - `deepchem>=2.4.0` requires tensorflow v2 (2.3.0) + - `deepchem<2.4.0` requires tensorflow v1 (>=1.14) + + +Soft requirements +^^^^^^^^^^^^^^^^^ + +DeepChem has a number of "soft" requirements. + ++--------------------------------+---------------+---------------------------------------------------+ +| Package name | Version | Location where this package is imported | +| | | (dc: deepchem) | ++================================+===============+===================================================+ +| `BioPython`_ | latest | :code:`dc.utlis.genomics_utils` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Deep Graph Library`_ | latset | :code:`dc.feat.graph_data`, | +| | | :code:`dc.models.torch_models` | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `DGL-LifeSci`_ | latest | :code:`dc.models.torch_models` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `HuggingFace Transformers`_ | Not Testing | :code:`dc.feat.smiles_tokenizer` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `LightGBM`_ | latest | :code:`dc.models.gbdt_models` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `matminer`_ | latest | :code:`dc.feat.materials_featurizers` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `MDTraj`_ | latest | :code:`dc.utils.pdbqt_utils` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Mol2vec`_ | latest | :code:`dc.utils.molecule_featurizers` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Mordred`_ | latest | :code:`dc.utils.molecule_featurizers` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `NetworkX`_ | latest | :code:`dc.utils.rdkit_utils` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `OpenAI Gym`_ | Not Testing | :code:`dc.rl` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `OpenMM`_ | latest | :code:`dc.utils.rdkit_utils` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `PDBFixer`_ | latest | :code:`dc.utils.rdkit_utils` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Pillow`_ | latest | :code:`dc.data.data_loader`, | +| | | :code:`dc.trans.transformers` | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `PubChemPy`_ | latest | :code:`dc.feat.molecule_featurizers` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `pyGPGO`_ | latest | :code:`dc.hyper.gaussian_process` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Pymatgen`_ | latest | :code:`dc.feat.materials_featurizers` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `PyTorch`_ | 1.6.0 | :code:`dc.data.datasets` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `PyTorch Geometric`_ | latest (with | :code:`dc.feat.graph_data` | +| | PyTorch 1.6.0)| :code:`dc.models.torch_models` | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `RDKit`_ | latest | Many modules | +| | | (we recommend you to instal) | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `simdna`_ | latest | :code:`dc.metrics.genomic_metrics`, | +| | | :code:`dc.molnet.dnasim` | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Tensorflow Probability`_ | 0.10.1 | :code:`dc.rl` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `Weights & Biases`_ | Not Testing | :code:`dc.models.keras_model`, | +| | | :code:`dc.models.callbacks` | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ +| `XGBoost`_ | latest | :code:`dc.models.gbdt_models` | +| | | | +| | | | ++--------------------------------+---------------+---------------------------------------------------+ + +.. _`joblib`: https://pypi.python.org/pypi/joblib +.. _`NumPy`: https://numpy.org/ +.. _`pandas`: http://pandas.pydata.org/ +.. _`scikit-learn`: https://scikit-learn.org/stable/ +.. _`SciPy`: https://www.scipy.org/ +.. _`TensorFlow`: https://www.tensorflow.org/ +.. _`BioPython`: https://biopython.org/wiki/Documentation +.. _`Deep Graph Library`: https://www.dgl.ai/ +.. _`DGL-LifeSci`: https://github.com/awslabs/dgl-lifesci +.. _`HuggingFace Transformers`: https://huggingface.co/transformers/ +.. _`LightGBM`: https://lightgbm.readthedocs.io/en/latest/index.html +.. _`matminer`: https://hackingmaterials.lbl.gov/matminer/ +.. _`MDTraj`: http://mdtraj.org/ +.. _`Mol2vec`: https://github.com/samoturk/mol2vec +.. _`Mordred`: http://mordred-descriptor.github.io/documentation/master/ +.. _`NetworkX`: https://networkx.github.io/documentation/stable/index.html +.. _`OpenAI Gym`: https://gym.openai.com/ +.. _`OpenMM`: http://openmm.org/ +.. _`PDBFixer`: https://github.com/pandegroup/pdbfixer +.. _`Pillow`: https://pypi.org/project/Pillow/ +.. _`PubChemPy`: https://pubchempy.readthedocs.io/en/latest/ +.. _`pyGPGO`: https://pygpgo.readthedocs.io/en/latest/ +.. _`Pymatgen`: https://pymatgen.org/ +.. _`PyTorch`: https://pytorch.org/ +.. _`PyTorch Geometric`: https://pytorch-geometric.readthedocs.io/en/latest/ +.. _`RDKit`: http://www.rdkit.org/docs/Install.html +.. _`simdna`: https://github.com/kundajelab/simdna +.. _`Tensorflow Probability`: https://www.tensorflow.org/probability +.. _`Weights & Biases`: https://docs.wandb.com/ +.. _`XGBoost`: https://xgboost.readthedocs.io/en/latest/ diff --git a/docs/source/get_started/tutorials.rst b/docs/source/get_started/tutorials.rst new file mode 100644 index 0000000000000000000000000000000000000000..0d0f1dba1c429553ccd9581102a4e4627a5c147d --- /dev/null +++ b/docs/source/get_started/tutorials.rst @@ -0,0 +1,199 @@ +Tutorials +========= + +If you're new to DeepChem, you probably want to know the basics. What is DeepChem? +Why should you care about using it? The short answer is that DeepChem is a scientific machine learning library. +(The "Chem" indicates the historical fact that DeepChem initially focused on chemical applications, +but we aim to support all types of scientific applications more broadly). + +Why would you want to use DeepChem instead of another machine learning +library? Simply put, DeepChem maintains an extensive collection of utilities +to enable scientific deep learning including classes for loading scientific +datasets, processing them, transforming them, splitting them up, and learning +from them. Behind the scenes DeepChem uses a variety of other machine +learning frameworks such as `scikit-learn`_, `TensorFlow`_, and `XGBoost`_. We are +also experimenting with adding additional models implemented in `PyTorch`_ +and `JAX`_. Our focus is to facilitate scientific experimentation using +whatever tools are available at hand. + +In the rest of this tutorials, we'll provide a rapid fire overview of DeepChem's API. +DeepChem is a big library so we won't cover everything, but we should give you enough to get started. + +.. contents:: Contents + :local: + +Data Handling +------------- + +The :code:`dc.data` module contains utilities to handle :code:`Dataset` +objects. These :code:`Dataset` objects are the heart of DeepChem. +A :code:`Dataset` is an abstraction of a dataset in machine learning. That is, +a collection of features, labels, weights, alongside associated identifiers. +Rather than explaining further, we'll just show you. + +.. doctest:: + + >>> import deepchem as dc + >>> import numpy as np + >>> N_samples = 50 + >>> n_features = 10 + >>> X = np.random.rand(N_samples, n_features) + >>> y = np.random.rand(N_samples) + >>> dataset = dc.data.NumpyDataset(X, y) + >>> dataset.X.shape + (50, 10) + >>> dataset.y.shape + (50,) + +Here we've used the :code:`NumpyDataset` class which stores datasets in memory. +This works fine for smaller datasets and is very convenient for experimentation, +but is less convenient for larger datasets. For that we have the :code:`DiskDataset` class. + +.. doctest:: + + >>> dataset = dc.data.DiskDataset.from_numpy(X, y) + >>> dataset.X.shape + (50, 10) + >>> dataset.y.shape + (50,) + +In this example we haven't specified a data directory, so this :code:`DiskDataset` is written +to a temporary folder. Note that :code:`dataset.X` and :code:`dataset.y` load data +from disk underneath the hood! So this can get very expensive for larger datasets. + + +Feature Engineering +------------------- + +"Featurizer" is a chunk of code which transforms raw input data into a processed +form suitable for machine learning. The :code:`dc.feat` module contains an extensive collection +of featurizers for molecules, molecular complexes and inorganic crystals. +We'll show you the example about the usage of featurizers. + +.. doctest:: + + >>> smiles = [ + ... 'O=Cc1ccc(O)c(OC)c1', + ... 'CN1CCC[C@H]1c2cccnc2', + ... 'C1CCCCC1', + ... 'c1ccccc1', + ... 'CC(=O)O', + ... ] + >>> properties = [0.4, -1.5, 3.2, -0.2, 1.7] + >>> featurizer = dc.feat.CircularFingerprint(size=1024) + >>> ecfp = featurizer.featurize(smiles) + >>> ecfp.shape + (5, 1024) + >>> dataset = dc.data.NumpyDataset(X=ecfp, y=np.array(properties)) + >>> len(dataset) + 5 + +Here, we've used the :code:`CircularFingerprint` and converted SMILES to ECFP. +The ECFP is a fingerprint which is a bit vector made by chemical structure information +and we can use it as the input for various models. + +And then, you may have a CSV file which contains SMILES and property like HOMO-LUMO gap. +In such a case, by using :code:`DataLoader`, you can load and featurize your data at once. + +.. doctest:: + + >>> import pandas as pd + >>> # make a dataframe object for creating a CSV file + >>> df = pd.DataFrame(list(zip(smiles, properties)), columns=["SMILES", "property"]) + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(mode='w') as tmpfile: + ... # dump the CSV file + ... df.to_csv(tmpfile.name) + ... # initizalize the featurizer + ... featurizer = dc.feat.CircularFingerprint(size=1024) + ... # initizalize the dataloader + ... loader = dc.data.CSVLoader(["property"], feature_field="SMILES", featurizer=featurizer) + ... # load and featurize the data from the CSV file + ... dataset = loader.create_dataset(tmpfile.name) + ... len(dataset) + 5 + + +Data Splitting +-------------- + +The :code:`dc.splits` module contains a collection of scientifically aware splitters. +Generally, we need to split the original data to training, validation and test data +in order to tune the model and evaluate the model's performance. +We'll show you the example about the usage of splitters. + +.. doctest:: + + >>> splitter = dc.splits.RandomSplitter() + >>> # split 5 datapoints in the ratio of train:valid:test = 3:1:1 + >>> train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split( + ... dataset=dataset, frac_train=0.6, frac_valid=0.2, frac_test=0.2 + ... ) + >>> len(train_dataset) + 3 + >>> len(valid_dataset) + 1 + >>> len(test_dataset) + 1 + +Here, we've used the :code:`RandomSplitter` and splitted the data randomly +in the ratio of train:valid:test = 3:1:1. But, the random splitting sometimes +overestimates model's performance, especially for small data or imbalance data. +Please be careful for model evaluation. The :code:`dc.splits` provides more methods +and algorithms to evaluate the model's performance appropriately, like cross validation or +splitting using molecular scaffolds. + + +Model Training and Evaluating +----------------------------- + +The :code:`dc.models` conteins an extensive collection of models for scientific applications. +Most of all models inherits :code:`dc.models.Model` and we can train them by just calling :code:`fit` method. +You don't need to care about how to use specific framework APIs. +We'll show you the example about the usage of models. + +.. doctest:: + + >>> from sklearn.ensemble import RandomForestRegressor + >>> rf = RandomForestRegressor() + >>> model = dc.models.SklearnModel(model=rf) + >>> # model training + >>> model.fit(train_dataset) + >>> valid_preds = model.predict(valid_dataset) + >>> valid_preds.shape + (1,) + >>> test_preds = model.predict(test_dataset) + >>> test_preds.shape + (1,) + +Here, we've used the :code:`SklearnModel` and trained the model. +Even if you want to train a deep learning model which is implemented +by TensorFlow or PyTorch, calling :code:`fit` method is all you need! + +And then, if you use :code:`dc.metrics.Metric`, you can evaluate your model +by just calling :code:`evaluate` method. + +.. doctest:: + + >>> # initialze the metric + >>> metric = dc.metrics.Metric(dc.metrics.mae_score) + >>> # evaluate the model + >>> train_score = model.evaluate(train_dataset, [metric]) + >>> valid_score = model.evaluate(valid_dataset, [metric]) + >>> test_score = model.evaluate(test_dataset, [metric]) + + +More Tutorials +-------------- + +DeepChem maintains an extensive collection of addition `tutorials`_ that are meant to +be run on Google `colab`_, an online platform that allows you to execute Jupyter notebooks. +Once you've finished this introductory tutorial, we recommend working through these more involved tutorials. + +.. _`scikit-learn`: https://scikit-learn.org/stable/ +.. _`TensorFlow`: https://www.tensorflow.org/ +.. _`XGBoost`: https://xgboost.readthedocs.io/en/latest/ +.. _`PyTorch`: https://pytorch.org/ +.. _`JAX`: https://github.com/google/jax +.. _`tutorials`: https://github.com/deepchem/deepchem/tree/master/examples/tutorials +.. _`colab`: https://colab.research.google.com/ diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..1c9553430f72b53411d58970da0385d4f84c2099 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,130 @@ +The DeepChem Project +==================== + +.. raw:: html + + + + Fork me on GitHub + + + + +**The DeepChem project aims to democratize deep learning for science.** + +What is DeepChem? +----------------- + +The DeepChem project aims to build high quality tools to democratize +the use of deep learning in the sciences. The origin of DeepChem +focused on applications of deep learning to chemistry, but the project +has slowly evolved past its roots to broader applications of deep +learning to the sciences. + +The core `DeepChem Repo`_ serves as a monorepo that organizes the DeepChem suite of scientific tools. +As the project matures, smaller more focused tool will be surfaced in more targeted repos. +DeepChem is primarily developed in Python, but we are experimenting with adding support for other languages. + +What are some of the things you can use DeepChem to do? Here's a few examples: + +- Predict the solubility of small drug-like molecules +- Predict binding affinity for small molecule to protein targets +- Predict physical properties of simple materials +- Analyze protein structures and extract useful descriptors +- Count the number of cells in a microscopy image +- More coming soon... + +We should clarify one thing up front though. DeepChem is a machine +learning library, so it gives you the tools to solve each of the +applications mentioned above yourself. DeepChem may or may not have +prebaked models which can solve these problems out of the box. + +Over time, we hope to grow the set of scientific applications DeepChem +can address. This means we need lots of help! If you're a scientist +who's interested in open source, please pitch on building DeepChem. + +.. _`DeepChem Repo`: https://github.com/deepchem/deepchem + +Quick Start +----------- + +The fastest way to get up and running with DeepChem is to run it on +Google Colab. Check out one of the `DeepChem Tutorials`_ or this +`forum post`_ for Colab quick start guides. + +If you'd like to install DeepChem locally, +we recommend installing deepchem which is nightly version and RDKit. +RDKit is a soft requirement package, but many useful methods depend on it. + +.. code-block:: bash + + pip install tensorflow==2.3.0 + pip install --pre deepchem + conda install -y -c conda-forge rdkit + +Then open your python and try running. + +.. code-block:: python + + import deepchem + +.. _`DeepChem Tutorials`: https://github.com/deepchem/deepchem/tree/master/examples/tutorials +.. _`forum post`: https://forum.deepchem.io/t/getting-deepchem-running-in-colab/81/7 + +About Us +-------- +DeepChem is managed by a team of open source contributors. Anyone is free to join and contribute! +DeepChem has weekly developer calls. You can find `meeting minutes`_ on our `forums`_. + +DeepChem developer calls are open to the public! +To listen in, please email X.Y@gmail.com, where X=bharath and Y=ramsundar to introduce yourself and ask for an invite. + +.. important:: + + | Join our `community gitter `_ to discuss DeepChem. + | Sign up for our `forums `_ to talk about research, development, and general questions. + +.. _`meeting minutes`: https://forum.deepchem.io/search?q=Minutes%20order%3Alatest +.. _`forums`: https://forum.deepchem.io/ + +.. toctree:: + :glob: + :maxdepth: 1 + :caption: Get Started + + get_started/installation + get_started/requirements + get_started/tutorials + get_started/examples + +.. toctree:: + :glob: + :maxdepth: 1 + :caption: Development Guide + + development_guide/licence + development_guide/scientists + development_guide/coding + development_guide/infra + +.. toctree:: + :glob: + :maxdepth: 1 + :caption: API Reference + + api_reference/datasets + api_reference/dataloaders + api_reference/dataclasses + api_reference/moleculenet + api_reference/featurizers + api_reference/tokenizers + api_reference/splitters + api_reference/transformers + api_reference/models + api_reference/layers + api_reference/metrics + api_reference/hyper + api_reference/metalearning + api_reference/rl + api_reference/docking + api_reference/utils diff --git a/docs/sphinxext/notebook_sphinxext.py b/docs/sphinxext/notebook_sphinxext.py deleted file mode 100644 index ffb955d128473d5877d2d4a2dfa8689137879e73..0000000000000000000000000000000000000000 --- a/docs/sphinxext/notebook_sphinxext.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copied from the yt_project, commit e8fb57e -# yt/doc/extensions/notebook_sphinxext.py -# https://bitbucket.org/yt_analysis/yt/src/e8fb57e66ca42e26052dadf054a5c782740abec9/doc/extensions/notebook_sphinxext.py?at=yt - -# Almost completely re-written by Matthew Harrigan to use nbconvert v4 - -from __future__ import print_function - -import os -import shutil - -from sphinx.util.compat import Directive -from docutils import nodes -from docutils.parsers.rst import directives -import nbformat -from nbconvert import HTMLExporter, PythonExporter -from nbconvert.preprocessors import ExecutePreprocessor - - -def export_html(nb, f): - config = { - 'Exporter': { - 'template_file': 'basic', - 'template_path': ['./sphinxext/'] - }, - 'ExtractOutputPreprocessor': { - 'enabled': True - }, - 'CSSHTMLHeaderPreprocessor': { - 'enabled': True - } - } - - exporter = HTMLExporter(config) - body, resources = exporter.from_notebook_node( - nb, resources={'output_files_dir': f['nbname']}) - - for fn, data in resources['outputs'].items(): - bfn = os.path.basename(fn) - with open("{destdir}/{fn}".format(fn=bfn, **f), 'wb') as res_f: - res_f.write(data) - - return body - - -def export_python(nb, destfn): - exporter = PythonExporter() - body, resources = exporter.from_notebook_node(nb) - with open(destfn, 'w') as f: - f.write(body) - - -class NotebookDirective(Directive): - """Insert an evaluated notebook into a document - """ - required_arguments = 1 - optional_arguments = 1 - option_spec = {'skip_exceptions': directives.flag} - final_argument_whitespace = True - - def run(self): - f = { - 'docdir': setup.confdir, - 'builddir': setup.app.builder.outdir, - 'nbname': self.arguments[0], - } - f['nbpath'] = "{docdir}/../../examples/notebooks/{nbname}.ipynb".format(**f) - f['destdir'] = "{builddir}/notebooks/{nbname}".format(**f) - - if not os.path.exists(f['destdir']): - os.makedirs(f['destdir']) - - f['uneval'] = "{destdir}/{nbname}.ipynb".format(**f) - f['eval'] = "{destdir}/{nbname}.eval.ipynb".format(**f) - f['py'] = "{destdir}/{nbname}.py".format(**f) - - # 1. Uneval notebook - shutil.copyfile(f['nbpath'], f['uneval']) - with open(f['nbpath']) as nb_f: - nb = nbformat.read(nb_f, as_version=4) - # 2. Python - export_python(nb, f['py']) - # 3. HTML (execute first) - # Set per-cell timeout to 60 seconds - executer = ExecutePreprocessor(timeout=60) - executer.preprocess(nb, {}) - html = export_html(nb, f) - # 4. Eval'd notebook - with open(f['eval'], 'w') as eval_f: - nbformat.write(nb, eval_f) - - # Create link to notebook and script files - link_rst = "({uneval}; {eval}; {py})".format( - uneval=formatted_link(f['uneval']), - eval=formatted_link(f['eval']), - py=formatted_link(f['py']),) - - rst_file = self.state_machine.document.attributes['source'] - self.state_machine.insert_input([link_rst], rst_file) - - # create notebook node - nb_node = notebook_node('', html, format='html', source='nb_path') - nb_node.source, nb_node.line = ( - self.state_machine.get_source_and_line(self.lineno)) - - # add dependency - self.state.document.settings.record_dependencies.add(f['nbpath']) - return [nb_node] - - -class notebook_node(nodes.raw): - pass - - -def formatted_link(path): - return "`%s <%s>`__" % (os.path.basename(path), path) - - -def visit_notebook_node(self, node): - self.visit_raw(node) - - -def depart_notebook_node(self, node): - self.depart_raw(node) - - -def setup(app): - setup.app = app - setup.config = app.config - setup.confdir = app.confdir - - app.add_node(notebook_node, html=(visit_notebook_node, depart_notebook_node)) - - app.add_directive('notebook', NotebookDirective) diff --git a/docs/utils.rst b/docs/utils.rst deleted file mode 100644 index 6a4800d3e72be837042d2ee3eb0aa8a719179a9e..0000000000000000000000000000000000000000 --- a/docs/utils.rst +++ /dev/null @@ -1,155 +0,0 @@ -Utilities -========= -DeepChem has a broad collection of utility functions. Many of these -maybe be of independent interest to users since they deal with some -tricky aspects of processing scientific datatypes. - -Array Utilities ---------------- - -.. autofunction:: deepchem.utils.pad_array - -Data Directory --------------- -The DeepChem data directory is where downloaded MoleculeNet datasets are stored. - -.. autofunction:: deepchem.utils.get_data_dir - -Print Threshold ---------------- - -The printing threshold controls how many dataset elements are printed -when :code:`dc.data.Dataset` objects are converted to strings or -represnted in the IPython repl. - -.. autofunction:: deepchem.utils.get_print_threshold - -.. autofunction:: deepchem.utils.set_print_threshold - -.. autofunction:: deepchem.utils.get_max_print_size - -.. autofunction:: deepchem.utils.set_max_print_size - -URL Handling ------------- - -.. autofunction:: deepchem.utils.download_url - -File Handling -------------- - -.. autofunction:: deepchem.utils.untargz_file - -.. autofunction:: deepchem.utils.unzip_file - -.. autofunction:: deepchem.utils.save.save_to_disk - -.. autofunction:: deepchem.utils.save.get_input_type - -.. autofunction:: deepchem.utils.save.load_data - -.. autofunction:: deepchem.utils.save.load_sharded_csv - -.. autofunction:: deepchem.utils.save.load_sdf_files - -.. autofunction:: deepchem.utils.save.load_csv_files - -.. autofunction:: deepchem.utils.save.save_metadata - -.. autofunction:: deepchem.utils.save.load_from_disk - -.. autofunction:: deepchem.utils.save.load_pickle_from_disk - -.. autofunction:: deepchem.utils.save.load_dataset_from_disk - -.. autofunction:: deepchem.utils.save.save_dataset_to_disk - -Molecular Utilities -------------------- - -.. autoclass:: deepchem.utils.ScaffoldGenerator - :members: - -.. autoclass:: deepchem.utils.conformers.ConformerGenerator - :members: - -.. autoclass:: deepchem.utils.rdkit_util.MoleculeLoadException - :members: - -.. autofunction:: deepchem.utils.rdkit_util.get_xyz_from_mol - -.. autofunction:: deepchem.utils.rdkit_util.add_hydrogens_to_mol - -.. autofunction:: deepchem.utils.rdkit_util.compute_charges - -.. autofunction:: deepchem.utils.rdkit_util.load_molecule - -.. autofunction:: deepchem.utils.rdkit_util.write_molecule - -Coordinate Box Utilities ------------------------- - -.. autoclass:: deepchem.utils.coordinate_box_utils.CoordinateBox - :members: - -.. autofunction:: deepchem.utils.coordinate_box_utils.intersect_interval - -.. autofunction:: deepchem.utils.coordinate_box_utils.union - -.. autofunction:: deepchem.utils.coordinate_box_utils.merge_overlapping_boxes - -.. autofunction:: deepchem.utils.coordinate_box_utils.get_face_boxes - -Evaluation Utils ----------------- - -.. autoclass:: deepchem.utils.evaluate.Evaluator - :members: - -.. autoclass:: deepchem.utils.evaluate.GeneratorEvaluator - :members: - -.. autofunction:: deepchem.utils.evaluate.relative_difference - -.. autofunction:: deepchem.utils.evaluate.threshold_predictions - -Genomic Utilities ------------------ - -.. autofunction:: deepchem.utils.genomics.seq_one_hot_encode - -.. autofunction:: deepchem.utils.genomics.encode_fasta_sequence - -.. autofunction:: deepchem.utils.genomics.encode_bio_sequence - - -Geometry Utilities ------------------- - -.. autofunction:: deepchem.utils.geometry_utils.unit_vector - -.. autofunction:: deepchem.utils.geometry_utils.angle_between - -.. autofunction:: deepchem.utils.geometry_utils.generate_random_unit_vector - -.. autofunction:: deepchem.utils.geometry_utils.generate_random_rotation_matrix - -.. autofunction:: deepchem.utils.geometry_utils.is_angle_within_cutoff - -Hash Function Utilities ------------------------ - -.. autofunction:: deepchem.utils.hash_utils.hash_ecfp - -.. autofunction:: deepchem.utils.hash_utils.hash_ecfp_pair - -.. autofunction:: deepchem.utils.hash_utils.vectorize - -Voxel Utils ------------ - -.. autofunction:: deepchem.utils.voxel_utils.convert_atom_to_voxel - -.. autofunction:: deepchem.utils.voxel_utils.convert_atom_pair_to_voxel - -.. autofunction:: deepchem.utils.voxel_utils.voxelize diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c6eb18785edc333ff14b4b44e13aca671d621f56 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,10 @@ +# DeepChem Example Suite + +This directory contains the DeepChem example suite. There are a large number of +examples which break into a few broad categories: + +- API Examples: These examples show how to do little things with DeepChem's API + that you might not have realized were possible. +- Case Study Examples: These show how to analyze interesting datasets with DeepChem. +- Tutorial Notebooks: These IPython notebooks provide walkthroughs of using + DeepChem on interesting problems in practice. diff --git a/examples/bace/README.md b/examples/bace/README.md new file mode 100644 index 0000000000000000000000000000000000000000..545601281c5a74fc2ebf81eaa4bd2a37a6753f9f --- /dev/null +++ b/examples/bace/README.md @@ -0,0 +1,13 @@ +# BACE Dataset Examples + +The BACE dataset is from the following paper: + +Subramanian, Govindan, et al. "Computational modeling of β-secretase 1 (BACE-1) inhibitors using ligand based approaches." Journal of chemical information and modeling 56.10 (2016): 1936-1949. + +This study considers a small dataset of 205 compounds datasets +which are used to train a model which is evaluated on a larger +external validation set of 1273 compounds. + +The file `bace_datasets.py` loads the data as used in the +original paper. `bace_rf.py` demonstrates training a random +forest against this dataset. diff --git a/examples/clintox/README.md b/examples/clintox/README.md new file mode 100644 index 0000000000000000000000000000000000000000..84d8778a6a5e2648c1e4b4ae977b1408a504def3 --- /dev/null +++ b/examples/clintox/README.md @@ -0,0 +1,11 @@ +# Clintox dataset models + +The Clintox dataset is a collection of "clinical toxicity" datasets that compares drugs approved by the FDA and drugs that have failed clinical trials for toxicity reasons. It contains two classification tasks for 1491 compounds: + +1) Clinical trial toxicity/non-toxicity +2) FDA approval status + +In this example, we construct fully connected deep networks and +graph convolutional models for the task of predicting clinical +toxicity/FDA approval status from molecular structure. + diff --git a/examples/clintox/clintox_tf_models.py b/examples/clintox/clintox_fcnet.py similarity index 89% rename from examples/clintox/clintox_tf_models.py rename to examples/clintox/clintox_fcnet.py index 2ed0b99f8cdd4a97378b93bc98fd08a6fd934c94..e853c554d309440cdac289ab54516cefcc3248d0 100644 --- a/examples/clintox/clintox_tf_models.py +++ b/examples/clintox/clintox_fcnet.py @@ -2,10 +2,6 @@ Script that trains multitask models on clintox dataset. @author Caleb Geniesse """ -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals - import numpy as np import deepchem as dc from deepchem.molnet import load_clintox diff --git a/examples/hyperparam_opt/README.md b/examples/hyperparam_opt/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c3a5b6b6542b248489b77a924eb15832122718ae --- /dev/null +++ b/examples/hyperparam_opt/README.md @@ -0,0 +1,4 @@ +# Hyperparameter Optimization + +In this folder we provide examples of performing hyperparameter optimization +with DeepChem. diff --git a/examples/hyperparam_opt/gaussian_hyperparam_opt.py b/examples/hyperparam_opt/gaussian_hyperparam_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..47a8208afc8c8ded942ad7062f48b51d9f459364 --- /dev/null +++ b/examples/hyperparam_opt/gaussian_hyperparam_opt.py @@ -0,0 +1,21 @@ +import numpy as np +import deepchem as dc +np.random.seed(123) + +# Load delaney dataset +delaney_tasks, delaney_datasets, transformers = dc.molnet.load_delaney( + featurizer="GraphConv") +train, valid, test = delaney_datasets + +# Fit models +metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) +optimizer = dc.hyper.GaussianProcessHyperparamOpt( + lambda **p: dc.models.GraphConvModel(n_tasks=len(delaney_tasks), mode="regression", **p)) + +params_dict = {"dropout": 0.5} +best_model, best_params, all_results = optimizer.hyperparam_search( + params_dict, train, valid, metric, transformers, max_iter=1, search_range=2) + +valid_score = best_model.evaluate(valid, [metric], transformers) +print("valid_score") +print(valid_score) diff --git a/examples/hyperparam_opt/gaussian_hyperparam_opt_with_logdir.py b/examples/hyperparam_opt/gaussian_hyperparam_opt_with_logdir.py new file mode 100644 index 0000000000000000000000000000000000000000..39e14d9f7765ca48df513cc25232578fcf4dcdc9 --- /dev/null +++ b/examples/hyperparam_opt/gaussian_hyperparam_opt_with_logdir.py @@ -0,0 +1,21 @@ +import numpy as np +import deepchem as dc +np.random.seed(123) + +# Load delaney dataset +delaney_tasks, delaney_datasets, transformers = dc.molnet.load_delaney( + featurizer="GraphConv") +train, valid, test = delaney_datasets + +# Fit models +metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) +optimizer = dc.hyper.GaussianProcessHyperparamOpt( + lambda **p: dc.models.GraphConvModel(n_tasks=len(delaney_tasks), mode="regression", **p)) + +params_dict = {"dropout": 0.5} +best_model, best_params, all_results = optimizer.hyperparam_search( + params_dict, train, valid, metric, transformers, max_iter=2, search_range=2) + +valid_score = best_model.evaluate(valid, [metric], transformers) +print("valid_score") +print(valid_score) diff --git a/examples/hyperparam_opt/grid_hyperparam_opt.py b/examples/hyperparam_opt/grid_hyperparam_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..e73068c6268371df17e63b08d13a7c4f6ed98053 --- /dev/null +++ b/examples/hyperparam_opt/grid_hyperparam_opt.py @@ -0,0 +1,24 @@ +import numpy as np +import deepchem as dc +np.random.seed(123) + +# Load delaney dataset +delaney_tasks, delaney_datasets, transformers = dc.molnet.load_delaney( + featurizer="GraphConv") +train, valid, test = delaney_datasets + +# Fit models +metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) + +# Fit models +metric = dc.metrics.Metric(dc.metrics.pearson_r2_score) +optimizer = dc.hyper.GridHyperparamOpt( + lambda **p: dc.models.GraphConvModel(n_tasks=len(delaney_tasks), mode="regression", **p)) + +params_dict = {"dropout": [0.1, 0.5]} +best_model, best_params, all_results = optimizer.hyperparam_search( + params_dict, train, valid, metric, transformers) + +valid_score = best_model.evaluate(valid, [metric], transformers) +print("valid_score") +print(valid_score) diff --git a/examples/low_data/datasets.py b/examples/low_data/datasets.py index c09268b6764e61010944853fb394d2262efff616..b50a311d65bc33e11111ad5305e2fce52407bb12 100644 --- a/examples/low_data/datasets.py +++ b/examples/low_data/datasets.py @@ -12,31 +12,31 @@ import tempfile import numpy as np import deepchem as dc + def to_numpy_dataset(dataset): """Converts dataset to numpy dataset.""" return dc.data.NumpyDataset(dataset.X, dataset.y, dataset.w, dataset.ids) + def load_tox21_ecfp(num_train=7200): """Load Tox21 datasets. Does not do train/test split""" # Set some global variables up top current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../../datasets/tox21.csv.gz") + dataset_file = os.path.join(current_dir, "../../datasets/tox21.csv.gz") # Featurize Tox21 dataset print("About to featurize Tox21 dataset.") featurizer = dc.feat.CircularFingerprint(size=1024) - tox21_tasks = ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', - 'NR-ER-LBD', 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', - 'SR-HSE', 'SR-MMP', 'SR-p53'] + tox21_tasks = [ + 'NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', + 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53' + ] loader = dc.data.CSVLoader( tasks=tox21_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize( - dataset_file, shard_size=8192) + dataset = loader.featurize(dataset_file, shard_size=8192) - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + # Initialize transformers + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: @@ -44,28 +44,27 @@ def load_tox21_ecfp(num_train=7200): return tox21_tasks, dataset, transformers + def load_tox21_convmol(base_dir=None, num_train=7200): """Load Tox21 datasets. Does not do train/test split""" # Set some global variables up top current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../../datasets/tox21.csv.gz") + dataset_file = os.path.join(current_dir, "../../datasets/tox21.csv.gz") # Featurize Tox21 dataset print("About to featurize Tox21 dataset.") featurizer = dc.feat.ConvMolFeaturizer() - tox21_tasks = ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', - 'NR-ER-LBD', 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', - 'SR-HSE', 'SR-MMP', 'SR-p53'] + tox21_tasks = [ + 'NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', + 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53' + ] loader = dc.data.CSVLoader( tasks=tox21_tasks, smiles_field="smiles", featurizer=featurizer) - dataset = loader.featurize( - dataset_file, shard_size=8192) + dataset = loader.featurize(dataset_file, shard_size=8192) - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + # Initialize transformers + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: @@ -73,69 +72,69 @@ def load_tox21_convmol(base_dir=None, num_train=7200): return tox21_tasks, dataset, transformers + def load_muv_ecfp(): """Load MUV datasets. Does not do train/test split""" # Load MUV dataset print("About to load MUV dataset.") current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../../datasets/muv.csv.gz") + dataset_file = os.path.join(current_dir, "../../datasets/muv.csv.gz") # Featurize MUV dataset print("About to featurize MUV dataset.") featurizer = dc.feat.CircularFingerprint(size=1024) - MUV_tasks = sorted(['MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', - 'MUV-548', 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', - 'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', 'MUV-652', - 'MUV-466', 'MUV-832']) + MUV_tasks = sorted([ + 'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548', + 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858', + 'MUV-713', 'MUV-733', 'MUV-652', 'MUV-466', 'MUV-832' + ]) loader = dc.data.CSVLoader( tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file) - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + # Initialize transformers + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: - dataset = transformer.transform(dataset) + dataset = transformer.transform(dataset) return MUV_tasks, dataset, transformers + def load_muv_convmol(): """Load MUV datasets. Does not do train/test split""" # Load MUV dataset print("About to load MUV dataset.") current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../../datasets/muv.csv.gz") + dataset_file = os.path.join(current_dir, "../../datasets/muv.csv.gz") # Featurize MUV dataset print("About to featurize MUV dataset.") featurizer = dc.feat.ConvMolFeaturizer() - MUV_tasks = sorted(['MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', - 'MUV-548', 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', - 'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', 'MUV-652', - 'MUV-466', 'MUV-832']) + MUV_tasks = sorted([ + 'MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644', 'MUV-548', + 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712', 'MUV-737', 'MUV-858', + 'MUV-713', 'MUV-733', 'MUV-652', 'MUV-466', 'MUV-832' + ]) loader = dc.data.CSVLoader( tasks=MUV_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file) - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + # Initialize transformers + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: - dataset = transformer.transform(dataset) + dataset = transformer.transform(dataset) return MUV_tasks, dataset, transformers + def load_sider_ecfp(): """Load SIDER datasets. Does not do train/test split""" # Featurize SIDER dataset print("About to featurize SIDER dataset.") current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../sider/sider.csv.gz") + dataset_file = os.path.join(current_dir, "../sider/sider.csv.gz") featurizer = dc.feat.CircularFingerprint(size=1024) dataset = dc.utils.save.load_from_disk(dataset_file) @@ -143,28 +142,26 @@ def load_sider_ecfp(): print("SIDER tasks: %s" % str(SIDER_tasks)) print("%d tasks in total" % len(SIDER_tasks)) - loader = dc.data.CSVLoader( tasks=SIDER_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file) print("%d datapoints in SIDER dataset" % len(dataset)) # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: dataset = transformer.transform(dataset) return SIDER_tasks, dataset, transformers + def load_sider_convmol(): """Load SIDER datasets. Does not do train/test split""" # Featurize SIDER dataset print("About to featurize SIDER dataset.") current_dir = os.path.dirname(os.path.realpath(__file__)) - dataset_file = os.path.join( - current_dir, "../sider/sider.csv.gz") + dataset_file = os.path.join(current_dir, "../sider/sider.csv.gz") featurizer = dc.feat.ConvMolFeaturizer() dataset = dc.utils.save.load_from_disk(dataset_file) @@ -172,15 +169,13 @@ def load_sider_convmol(): print("SIDER tasks: %s" % str(SIDER_tasks)) print("%d tasks in total" % len(SIDER_tasks)) - loader = dc.data.CSVLoader( tasks=SIDER_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file) print("%d datapoints in SIDER dataset" % len(dataset)) # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: dataset = transformer.transform(dataset) diff --git a/examples/multiclass/README.md b/examples/multiclass/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a6aa78d2c7f772faaa3e3d4665725a0b85be83c1 --- /dev/null +++ b/examples/multiclass/README.md @@ -0,0 +1,4 @@ +Multiclass Examples +------------------- + +This directory contains examples of building multiclass models in DeepChem. diff --git a/examples/multiclass/multiclass_sklearn.py b/examples/multiclass/multiclass_sklearn.py new file mode 100644 index 0000000000000000000000000000000000000000..b9b6efdfb1b479e4c0bf100371a1a7be3639f14d --- /dev/null +++ b/examples/multiclass/multiclass_sklearn.py @@ -0,0 +1,25 @@ +import deepchem as dc +import numpy as np +import sklearn +from sklearn.ensemble import RandomForestClassifier + +N = 100 +n_feat = 5 +n_classes = 3 +X = np.random.rand(N, n_feat) +y = np.random.randint(3, size=(N,)) +dataset = dc.data.NumpyDataset(X, y) + +sklearn_model = RandomForestClassifier(class_weight="balanced", n_estimators=50) +model = dc.models.SklearnModel(sklearn_model) + +# Fit trained model +print("About to fit model") +model.fit(dataset) +model.save() + +print("About to evaluate model") +train_scores = model.evaluate(dataset, sklearn.metrics.roc_auc_score, []) + +print("Train scores") +print(train_scores) diff --git a/examples/muv/muv_tf.py b/examples/muv/muv_tf.py index 1816e1632c2f3aa1b2b0ee32d897d2231e7dd630..6ebf935884c2618136a8eb88f0b0f9271b2d5d9d 100644 --- a/examples/muv/muv_tf.py +++ b/examples/muv/muv_tf.py @@ -14,7 +14,7 @@ from deepchem.molnet import load_muv np.random.seed(123) # Load MUV data -muv_tasks, muv_datasets, transformers = load_muv() +muv_tasks, muv_datasets, transformers = load_muv(splitter='stratified') train_dataset, valid_dataset, test_dataset = muv_datasets # Build model diff --git a/examples/pcba/pcba_datasets.py b/examples/pcba/pcba_datasets.py index 62a5fd70a3961cc5dfa74d87bbb56e1f1982e093..51beb397a4f7fd85c3f21e843b091c37702a4c35 100644 --- a/examples/pcba/pcba_datasets.py +++ b/examples/pcba/pcba_datasets.py @@ -57,9 +57,7 @@ def load_pcba(featurizer='ECFP', split='random'): dataset = loader.featurize(dataset_file) # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset) - ] + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: diff --git a/examples/qm7/qm7_tf_model.py b/examples/qm7/qm7_tf_model.py index 5a7039dabf18fb8b9ee82fb58b4f2002a51554c0..88c486fe659492dad6d4d46632b9bea32458dfa4 100644 --- a/examples/qm7/qm7_tf_model.py +++ b/examples/qm7/qm7_tf_model.py @@ -8,12 +8,10 @@ from __future__ import unicode_literals import os import deepchem as dc import numpy as np -from deepchem.molnet import load_qm7_from_mat from deepchem.models.optimizers import ExponentialDecay np.random.seed(123) -qm7_tasks, datasets, transformers = load_qm7_from_mat( - split='stratified', move_mean=True) +qm7_tasks, datasets, transformers = dc.molnet.load_qm7(splitter='stratified') train_dataset, valid_dataset, test_dataset = datasets fit_transformers = [dc.trans.CoulombFitTransformer(train_dataset)] metric = [ diff --git a/examples/sampl/SAMPL.csv b/examples/sampl/SAMPL.csv deleted file mode 100644 index ee845c6705c39fe4d700ac89cabfd4d83b0ea803..0000000000000000000000000000000000000000 --- a/examples/sampl/SAMPL.csv +++ /dev/null @@ -1,644 +0,0 @@ -iupac,smiles,expt,calc -"4-methoxy-N,N-dimethyl-benzamide",CN(C)C(=O)c1ccc(cc1)OC,-11.01,-9.625 -methanesulfonyl chloride,CS(=O)(=O)Cl,-4.87,-6.219 -3-methylbut-1-ene,CC(C)C=C,1.83,2.452 -2-ethylpyrazine,CCc1cnccn1,-5.45,-5.809 -heptan-1-ol,CCCCCCCO,-4.21,-2.917 -"3,5-dimethylphenol",Cc1cc(cc(c1)O)C,-6.27,-5.444 -"2,3-dimethylbutane",CC(C)C(C)C,2.34,2.468 -2-methylpentan-2-ol,CCCC(C)(C)O,-3.92,-2.779 -"1,2-dimethylcyclohexane",C[C@@H]1CCCC[C@@H]1C,1.58,1.685 -butan-2-ol,CC[C@H](C)O,-4.62,-3.145 -dibromomethane,C(Br)Br,-1.96,-0.405 -2-methylpentan-3-ol,CC[C@H](C(C)C)O,-3.88,-2.416 -2-ethylpyridine,CCc1ccccn1,-4.33,-3.31 -ethyl pentanoate,CCCCC(=O)OCC,-2.49,-3.11 -benzenethiol,c1ccc(cc1)S,-2.55,-1.501 -"(2Z)-3,7-dimethylocta-2,6-dien-1-ol",CC(=CCC/C(=C\CO)/C)C,-4.78,-2.597 -indane,c1ccc2c(c1)CCC2,-1.46,-1.752 -ethoxybenzene,CCOc1ccccc1,-2.22,-2.254 -4-bromophenol,c1cc(ccc1O)Br,-5.85,-5.833 -"2,2-dimethylpentane",CCCC(C)(C)C,2.88,2.686 -2-acetoxyethyl acetate,CC(=O)OCCOC(=O)C,-6.34,-8.292 -ethion,CCOP(=S)(OCC)SCSP(=S)(OCC)OCC,-6.1,-10.644 -cycloheptanol,C1CCCC(CC1)O,-5.48,-4.345 -methyl cyclopropanecarboxylate,COC(=O)C1CC1,-4.1,-3.604 -benzonitrile,c1ccc(cc1)C#N,-4.1,-3.238 -pentanenitrile,CCCCC#N,-3.52,-2.147 -2-methylpropan-2-ol,CC(C)(C)O,-4.47,-3.288 -"2,4-dimethylpentan-3-one",CC(C)C(=O)C(C)C,-2.74,-2.629 -propanal,CCC=O,-3.43,-3.148 -"N,N-dimethylformamide",CN(C)C=O,-7.81,-6.932 -p-xylene,Cc1ccc(cc1)C,-0.8,-0.658 -"penta-1,4-diene",C=CCC=C,0.93,2.357 -"2-(2,3-dimethylphenyl)aminobenzoic acid",Cc1cccc(c1C)Nc2ccccc2C(=O)O,-6.78,-7.665 -"N,N-dimethylbenzamide",CN(C)C(=O)c1ccccc1,-9.29,-8.113 -N-ethylethanamine,CCNCC,-4.07,-2.986 -4-tert-butylphenol,CC(C)(C)c1ccc(cc1)O,-5.91,-5.543 -isopentyl formate,CC(C)CCOC=O,-2.13,-3.414 -decan-1-ol,CCCCCCCCCCO,-3.64,-2.446 -ethyl propanoate,CCC(=O)OCC,-2.68,-3.221 -nonane,CCCCCCCCC,3.13,3.221 -N-methylacetamide,CC(=O)NC,-10.0,-8.276 -2-acetoxyethyl acetate,CC(=O)OCCOC(=O)C,-6.34,-8.327 -non-1-ene,CCCCCCCC=C,2.06,2.995 -naphthalen-2-ol,c1ccc2cc(ccc2c1)O,-8.11,-7.849 -"1,2,4-trichlorobenzene",c1cc(c(cc1Cl)Cl)Cl,-1.12,-0.117 -"(2R,3R,4R,5R)-Hexan-1,2,3,4,5,6-hexol",C([C@H]([C@H]([C@@H]([C@@H](CO)O)O)O)O)O,-23.62,-18.162 -methyl butanoate,CCCC(=O)OC,-2.83,-3.552 -2-hydroxybenzaldehyde,c1ccc(c(c1)C=O)O,-4.68,-8.809 -azetidine,C1CNC1,-5.56,-3.861 -N-propylpropan-1-amine,CCCNCCC,-3.65,-2.233 -aniline,c1ccc(cc1)N,-5.49,-5.543 -tetrafluoromethane,C(F)(F)(F)F,3.12,2.489 -2-methylbutan-1-ol,CC[C@@H](C)CO,-4.42,-2.995 -2-iodophenol,c1ccc(c(c1)O)I,-6.2,-3.221 -"2,6-dimethoxyphenol",COc1cccc(c1O)OC,-6.96,-7.393 -but-1-yne,CCC#C,-0.16,0.284 -trifluoromethylbenzene,c1ccc(cc1)C(F)(F)F,-0.25,-0.57 -hydrazine,NN,-9.3,-6.508 -2-methylpyridine,Cc1ccccn1,-4.63,-3.501 -simazine,CCNc1nc(nc(n1)Cl)NCC,-10.22,-10.914 -"2,3-dichlorodibenzo-p-dioxin",c1ccc2c(c1)Oc3cc(c(cc3O2)Cl)Cl,-3.56,-3.59 -octan-1-amine,CCCCCCCCN,-3.65,-2.589 -ammonia,N,-4.29,-4.018 -"1,2-bis(trifluoromethyl)benzene",c1ccc(c(c1)C(F)(F)F)C(F)(F)F,1.07,-1.441 -methyl paraben,COC(=O)c1ccc(cc1)O,-9.51,-9.785 -pentylbenzene,CCCCCc1ccccc1,-0.23,-0.094 -"1,1-difluoroethane",CC(F)F,-0.11,0.226 -5-Amino-4-chloro-2-phenylpyridazin-3(2H)-one,c1ccc(cc1)n2c(=O)c(c(cn2)N)Cl,-16.43,-16.039 -butadiene,C=CC=C,0.56,1.955 -"N,N-dimethylmethanamine",CN(C)C,-3.2,-2.636 -hexanamide,CCCCCC(=O)N,-9.31,-8.103 -isobutyl nitrate,CC(C)CO[N+](=O)[O-],-1.88,-1.835 -"1-(2-hydroxyethylamino)-9,10-anthraquinone",c1ccc2c(c1)C(=O)c3cccc(c3C2=O)NCCO,-14.21,-13.599 -2-(nitrooxy)ethan-1-ol,C(CO[N+](=O)[O-])O,-8.18,-6.676 -octan-2-one,CCCCCCC(=O)C,-2.88,-2.758 -1-methylpiperazine,CN1CCNCC1,-7.77,-8.173 -ethanamine,CCN,-4.5,-3.156 -"cyclohepta-1,3,5-triene",C1C=CC=CC=C1,-0.99,-0.098 -"9,10-dihydroanthracene",c1ccc2c(c1)Cc3ccccc3C2,-3.78,-4.304 -"1,1-dichloroethane",CC(Cl)Cl,-0.84,0.187 -3-methoxyphenol,COc1cccc(c1)O,-7.66,-6.969 -acenaphthene,c1cc2cccc3c2c(c1)CC3,-3.15,-4.198 -1-bromooctane,CCCCCCCCBr,0.52,1.352 -phenylmethanol,c1ccc(cc1)CO,-6.62,-5.133 -5-bromouracil,c1c(c(=O)[nH]c(=O)[nH]1)Br,-18.17,-17.298 -n-butane,CCCC,2.1,2.588 -chloromethane,CCl,-0.55,0.764 -1-bromo-2-methyl-propane,CC(C)CBr,-0.03,0.756 -2-isopropylsulfanylpropane,CC(C)SC(C)C,-1.21,0.14 -heptane,CCCCCCC,2.67,2.925 -imidazole,c1cnc[nH]1,-9.63,-7.972 -"1,2,3,7-tetrachlorodibenzo-p-dioxin",c1cc2c(cc1Cl)Oc3cc(c(c(c3O2)Cl)Cl)Cl,-3.84,-2.66 -bromacil,CC[C@H](C)n1c(=O)c(c([nH]c1=O)C)Br,-9.73,-14.496 -diiodomethane,C(I)I,-2.49,-1.882 -"N,N-dipropyl(propylsulfanyl)formamide",CCCN(CCC)C(=O)SCCC,-4.13,-4.569 -nitromethane,C[N+](=O)[O-],-4.02,-2.075 -methoxyethane,CCOC,-2.1,-0.71 -"2-chloro-1,1,1-trimethoxy-ethane",COC(CCl)(OC)OC,-4.59,-3.638 -isobutane,CC(C)C,2.3,2.535 -3-methylbutanoic acid,CC(C)CC(=O)O,-6.09,-8.844 -"2-chloro-1-(2,4-dichlorophenyl)ethenyl diethyl phosphate",CCOP(=O)(OCC)O/C(=C/Cl)/c1ccc(cc1Cl)Cl,-7.07,-9.029 -1-chloropropane,CCCCl,-0.33,0.973 -1-propylsulfanylpropane,CCCSCCC,-1.28,0.64 -hexan-3-ol,CCC[C@H](CC)O,-4.06,-2.585 -acetonitrile,CC#N,-3.88,-2.789 -"N-methyl-N-(2,2,2-trifluoroethyl)aniline",CN(CC(F)(F)F)c1ccccc1,-1.92,-3.964 -"2-chloro-2-(difluoromethoxy)-1,1,1-trifluoro-ethane",[C@@H](C(F)(F)F)(OC(F)F)Cl,0.1,-1.156 -"hexa-1,5-diene",C=CCCC=C,1.01,2.487 -m-xylene,Cc1cccc(c1)C,-0.83,-0.697 -methyl acetate,CC(=O)OC,-3.13,-3.83 -trimethoxymethylbenzene,COC(c1ccccc1)(OC)OC,-4.04,-5.559 -ethyl benzoate,CCOC(=O)c1ccccc1,-3.64,-4.597 -propanethiol,CCCS,-1.1,-0.182 -heptan-2-one,CCCCCC(=O)C,-3.04,-2.945 -carbofuran,CC1(Cc2cccc(c2O1)OC(=O)NC)C,-9.61,-11.126 -benzyl bromide,c1ccc(cc1)CBr,-2.38,-1.853 -ethyl hexanoate,CCCCCC(=O)OCC,-2.23,-2.929 -1-methoxypropane,CCCOC,-1.66,-0.598 -4-methylmorpholine,CN1CCOCC1,-6.32,-5.774 -3-hydroxybenzonitrile,c1cc(cc(c1)O)C#N,-9.65,-7.739 -"1,2,4,5-tetrachloro-3-(3,4-dichlorophenyl)benzene",c1cc(c(cc1c2c(c(cc(c2Cl)Cl)Cl)Cl)Cl)Cl,-4.38,-0.705 -propylbenzene,CCCc1ccccc1,-0.53,-0.511 -caffeine,Cn1cnc2c1c(=O)n(c(=O)n2C)C,-12.64,-17.621 -N-methylmethanamine,CNC,-4.29,-2.991 -"1,1,2,3,3,3-hexafluoroprop-1-ene",C(=C(F)F)(C(F)(F)F)F,2.93,2.305 -4-chlorophenol,c1cc(ccc1O)Cl,-7.03,-5.373 -piperidine,C1CCNCC1,-5.11,-3.873 -phenanthrene,c1ccc2c(c1)ccc3c2cccc3,-3.88,-5.264 -iodomethane,CI,-0.89,-0.641 -"3,5-dichloro-2,6-dimethoxyphenol",COc1c(cc(c(c1O)OC)Cl)Cl,-6.44,-5.98 -"(E)-1,2-dichloroethylene",C(=C/Cl)\Cl,-0.78,1.024 -n-pentane,CCCCC,2.3,2.673 -butanenitrile,CCCC#N,-3.64,-2.287 -"2-bromo-1,1,1,2-tetrafluoro-ethane",[C@@H](C(F)(F)F)(F)Br,0.5,0.234 -2-isobutylpyrazine,CC(C)Cc1cnccn1,-5.04,-5.495 -[(2S)-butan-2-yl] nitrate,CC[C@H](C)O[N+](=O)[O-],-1.82,-1.864 -"1,4-dichloro-2-phenyl-benzene",c1ccc(cc1)c2cc(ccc2Cl)Cl,-2.46,-1.903 -"1,2,3,4-tetrachloro-5-phenyl-benzene",c1ccc(cc1)c2cc(c(c(c2Cl)Cl)Cl)Cl,-3.48,-1.31 -"2,3-dimethylpentane",CC[C@@H](C)C(C)C,2.52,2.625 -4-methylpentan-2-ol,C[C@H](CC(C)C)O,-3.73,-2.907 -tetrahydropyran,C1CCOCC1,-3.12,-1.809 -cyclopropane,C1CC1,0.75,2.485 -"1,2,3,4-tetrachloro-5-(3,4,5-trichlorophenyl)benzene",c1c(cc(c(c1Cl)Cl)Cl)c2cc(c(c(c2Cl)Cl)Cl)Cl,-3.17,-0.822 -"1,1-dichloroethylene",C=C(Cl)Cl,0.25,1.108 -2-methylpropan-1-ol,CC(C)CO,-4.5,-3.13 -propyl propanoate,CCCOC(=O)CC,-2.44,-2.453 -hexachloroethane,C(C(Cl)(Cl)Cl)(Cl)(Cl)Cl,-0.64,0.885 -methylsulfanylbenzene,CSc1ccccc1,-2.73,-1.325 -2-ethylphenol,CCc1ccccc1O,-5.66,-4.768 -2-chloro-2-methyl-propane,CC(C)(C)Cl,1.09,0.826 -isoprene,CC(=C)C=C,0.68,1.824 -1-isopropyl-4-methyl-benzene,Cc1ccc(cc1)C(C)C,-0.68,-0.456 -1-methylimidazole,Cn1ccnc1,-8.41,-6.282 -ethylene glycol,C(CO)O,-9.3,-7.266 -"1,2-dichlorobenzene",c1ccc(c(c1)Cl)Cl,-1.36,-0.553 -6-chlorouracil,c1c(=O)[nH]c(=O)[nH]c1Cl,-15.83,-15.128 -propyl formate,CCCOC=O,-2.48,-3.699 -2-chlorodibenzo-p-dioxin,c1ccc2c(c1)Oc3ccc(cc3O2)Cl,-3.1,-4.054 -hexanoic acid,CCCCCC(=O)O,-6.21,-7.878 -diethyl butanedioate,CCOC(=O)CCC(=O)OCC,-5.71,-8.683 -"2,4-dimethylpyridine",Cc1ccnc(c1)C,-4.86,-3.282 -cyclohexene,C1CCC=CC1,0.14,1.175 -"1,4-dimethylpiperazine",CN1CCN(CC1)C,-7.58,-7.874 -"1,2,3,4-tetrachloro-5-(3,4-dichlorophenyl)benzene",c1cc(c(cc1c2cc(c(c(c2Cl)Cl)Cl)Cl)Cl)Cl,-3.04,-1.083 -quinone,C1=CC(=O)C=CC1=O,-6.5,-6.96 -methyl 2-chloroacetate,COC(=O)CCl,-4.0,-3.816 -butanal,CCCC=O,-3.18,-3.044 -ethylbenzene,CCc1ccccc1,-0.79,-0.606 -"1,1,2-trichloroethylene",C(=C(Cl)Cl)Cl,-0.44,0.818 -"N,N-diethylethanamine",CCN(CC)CC,-3.22,-1.955 -"1,2,3,4,7-pentachlorodibenzo-p-dioxin",c1cc2c(cc1Cl)Oc3c(c(c(c(c3Cl)Cl)Cl)Cl)O2,-4.15,-2.31 -"3,4-dimethylpyridine",Cc1ccncc1C,-5.22,-3.201 -cyanuric acid,c1(=O)[nH]c(=O)[nH]c(=O)[nH]1,-18.06,-21.762 -benzaldehyde,c1ccc(cc1)C=O,-4.02,-5.058 -2-chloropyridine,c1ccnc(c1)Cl,-4.39,-3.873 -3-chloroprop-1-ene,C=CCCl,-0.57,0.944 -1-(p-tolyl)ethanone,Cc1ccc(cc1)C(=O)C,-4.7,-4.91 -formaldehyde,C=O,-2.75,-3.155 -1-chloro-2-methyl-benzene,Cc1ccccc1Cl,-1.14,-0.473 -1-pyrrolidin-1-ylethanone,CC(=O)N1CCCC1,-9.8,-7.831 -"1,1,1-trimethoxyethane",CC(OC)(OC)OC,-4.42,-3.7 -butylbenzene,CCCCc1ccccc1,-0.4,-0.227 -"N,N-dimethylaniline",CN(C)c1ccccc1,-3.45,-4.426 -2-methoxypropane,CC(C)OC,-2.01,-0.657 -"1,2,3,4,6,7,8,9-octachlorodibenzo-p-dioxin",c12c(c(c(c(c1Cl)Cl)Cl)Cl)Oc3c(c(c(c(c3Cl)Cl)Cl)Cl)O2,-4.53,-1.147 -"1,2,3,4,5-pentachloro-6-(2,3,4,5,6-pentachlorophenyl)benzene",c1(c(c(c(c(c1Cl)Cl)Cl)Cl)Cl)c2c(c(c(c(c2Cl)Cl)Cl)Cl)Cl,-2.98,0.76 -"1,1,2-trichloroethane",C(C(Cl)Cl)Cl,-1.99,-0.384 -N-methylaniline,CNc1ccccc1,-4.69,-5.719 -isopropyl acetate,CC(C)OC(=O)C,-2.64,-3.371 -benzene,c1ccccc1,-0.9,-0.806 -"1,2,3-trichlorobenzene",c1cc(c(c(c1)Cl)Cl)Cl,-1.24,-0.51 -4-chlorophenyl)sulfanylmethylsulfanyl-diethoxy-thioxo-$l^{5}-phosphane,CCOP(=S)(OCC)SCSc1ccc(cc1)Cl,-6.5,-7.024 -"3-(dimethoxyphosphinothioylsulfanylmethyl)-1,2,3-benzotriazin-4-one",COP(=S)(OC)SCn1c(=O)c2ccccc2nn1,-10.03,-14.106 -"1,2,4-trichlorodibenzo-p-dioxin",c1ccc2c(c1)Oc3c(cc(c(c3O2)Cl)Cl)Cl,-4.05,-3.16 -"2,3-dimethylbuta-1,3-diene",CC(=C)C(=C)C,0.4,1.862 -hex-1-ene,CCCCC=C,1.58,2.628 -hydrogen sulfide,S,-0.7,-1.135 -ethoxyethane,CCOCC,-1.59,-0.617 -"2-N-ethyl-6-(methylsulfanyl)-4-N-(propan-2-yl)-1,3,5-triazine-2,4-diamine",CCNc1nc(nc(n1)SC)NC(C)C,-7.65,-10.552 -butyl paraben,CCCCOC(=O)c1ccc(cc1)O,-8.72,-8.771 -hexyl acetate,CCCCCCOC(=O)C,-2.26,-2.219 -cyclopentanone,C1CCC(=O)C1,-4.7,-3.889 -pentanoic acid,CCCCC(=O)O,-6.16,-9.053 -bromoethane,CCBr,-0.74,0.487 -"2,6-dimethylnaphthalene",Cc1ccc2cc(ccc2c1)C,-2.63,-2.848 -hexan-1-ol,CCCCCCO,-4.4,-3.0 -1-chloro-2-phenyl-benzene,c1ccc(cc1)c2ccccc2Cl,-2.69,-2.508 -1-methylcyclohexene,CC1=CCCCC1,0.67,1.338 -hexyl nitrate,CCCCCCO[N+](=O)[O-],-1.66,-1.596 -bromoform,C(Br)(Br)Br,-2.13,-0.531 -4-ethylphenol,CCc1ccc(cc1)O,-6.13,-5.453 -2-propoxyethanol,CCCOCCO,-6.4,-3.94 -phenyl formate,c1ccc(cc1)OC=O,-3.82,-5.442 -5-iodouracil,c1c(c(=O)[nH]c(=O)[nH]1)I,-18.72,-17.742 -butyric acid,CCCC(=O)O,-6.35,-9.434 -"1,1,1-trifluoro-2,2,2-trimethoxyethane",COC(C(F)(F)F)(OC)OC,-0.8,-2.319 -"(2S,3R,4S,5R)-oxane-2,3,4,5-tetrol",C1[C@H]([C@@H]([C@H]([C@H](O1)O)O)O)O,-20.52,-14.148 -bromo-trifluoro-methane,C(F)(F)(F)Br,1.79,1.564 -butan-1-ol,CCCCO,-4.72,-3.232 -fluorobenzene,c1ccc(cc1)F,-0.8,-0.041 -ethyl acetate,CCOC(=O)C,-2.94,-3.745 -isobutyl 2-methylpropanoate,CC(C)COC(=O)C(C)C,-1.69,-2.58 -2-methoxy-2-methyl-propane,CC(C)(C)OC,-2.21,-0.691 -heptachlor,C1=C[C@@H]([C@@H]2[C@H]1[C@@]3(C(=C([C@]2(C3(Cl)Cl)Cl)Cl)Cl)Cl)Cl,-2.55,-0.974 -pentan-3-one,CCC(=O)CC,-3.41,-3.05 -"methyl 2,2,2-trifluoroacetate",COC(=O)C(F)(F)F,-1.1,-1.353 -naphthalene,c1ccc2ccccc2c1,-2.4,-3.213 -"1,2,3,4-tetrachloro-5-(2,3,4-trichlorophenyl)benzene",c1cc(c(c(c1c2cc(c(c(c2Cl)Cl)Cl)Cl)Cl)Cl)Cl,-4.4,-0.805 -acetylsalicylic acid,CC(=O)Oc1ccccc1C(=O)O,-9.94,-9.399 -"3,3-dimethylbutan-2-one",CC(=O)C(C)(C)C,-3.11,-3.234 -methyl methanesulfonate,COS(=O)(=O)C,-4.87,-8.824 -4-ethylpyridine,CCc1ccncc1,-4.73,-3.19 -N-isopropylpropan-2-amine,CC(C)NC(C)C,-3.22,-1.985 -"2,7-dichlorodibenzo-p-dioxin",c1cc2c(cc1Cl)Oc3ccc(cc3O2)Cl,-3.67,-3.321 -heptan-1-amine,CCCCCCCN,-3.79,-2.554 -methylcyclopentane,CC1CCCC1,1.59,1.785 -propane,CCC,2.0,2.495 -2-methyltetrahydrofuran,C[C@H]1CCCO1,-3.3,-1.984 -naphthalen-1-yl N-methylcarbamate,CNC(=O)Oc1cccc2c1cccc2,-9.45,-10.436 -3-hydroxybenzaldehyde,c1cc(cc(c1)O)C=O,-9.52,-9.369 -anthracene,c1ccc2cc3ccccc3cc2c1,-3.95,-5.187 -dichloromethane,C(Cl)Cl,-1.31,0.038 -"methyl 2,2-dimethylpropanoate",CC(C)(C)C(=O)OC,-2.4,-3.304 -trichloro(nitro)methane,C([N+](=O)[O-])(Cl)(Cl)Cl,-1.45,-0.379 -sulfolane,C1CC[S+2](C1)([O-])[O-],-8.61,-9.624 -"2,6-dimethylphenol",Cc1cccc(c1O)C,-5.26,-4.308 -m-cresol,Cc1cccc(c1)O,-5.49,-5.378 -"1-amino-4-hydroxy-9,10-anthracenedione",c1ccc2c(c1)C(=O)c3c(ccc(c3C2=O)O)N,-9.53,-10.984 -"1,4-diamino-9,10-anthracenedione",c1ccc2c(c1)C(=O)c3c(ccc(c3C2=O)N)N,-11.85,-15.252 -nonan-2-one,CCCCCCCC(=O)C,-2.49,-2.563 -butan-1-amine,CCCCN,-4.24,-2.961 -ethyl butanoate,CCCC(=O)OCC,-2.49,-3.381 -4-methylaniline,Cc1ccc(cc1)N,-5.57,-5.494 -1-iodohexane,CCCCCCI,0.08,0.043 -"1,1,2-trichloro-1,2,2-trifluoro-ethane",C(C(F)(Cl)Cl)(F)(F)Cl,1.77,1.691 -trimethyl phosphate,COP(=O)(OC)OC,-8.7,-10.642 -"1,3-dichlorobenzene",c1cc(cc(c1)Cl)Cl,-0.98,-0.11 -"1,3-dimethylnaphthalene",Cc1cc(c2ccccc2c1)C,-2.47,-2.995 -isohexane,CCCC(C)C,2.51,2.808 -chlorpyrifos,CCOP(=S)(OCC)Oc1c(cc(c(n1)Cl)Cl)Cl,-5.04,-9.625 -"2-chloro-1,1,1-trifluoro-ethane",C(C(F)(F)F)Cl,0.06,0.233 -ethylene,C=C,1.28,2.328 -1-iodopentane,CCCCCI,-0.14,-0.111 -trimethoxymethane,COC(OC)OC,-4.42,-4.625 -decane,CCCCCCCCCC,3.16,3.335 -"1,2-dinitroxypropane",C[C@@H](CO[N+](=O)[O-])O[N+](=O)[O-],-4.95,-5.646 -prop-1-ene,CC=C,1.32,2.328 -3-methyl-1H-indole,Cc1c[nH]c2c1cccc2,-5.88,-8.161 -"(1R)-2,2,2-trichloro-1-dimethoxyphosphoryl-ethanol",COP(=O)([C@H](C(Cl)(Cl)Cl)O)OC,-12.74,-13.424 -cyclohexane,C1CCCCC1,1.23,1.503 -"(2E)-3,7-dimethylocta-2,6-dien-1-ol",CC(=CCC/C(=C/CO)/C)C,-4.45,-2.518 -cumene,CC(C)c1ccccc1,-0.3,-0.674 -"2,3,4-trimethylpentane",CC(C)C(C)C(C)C,2.56,2.674 -3-methylbutan-2-one,CC(C)C(=O)C,-3.24,-3.078 -N-butylbutan-1-amine,CCCCNCCCC,-3.24,-2.076 -butane-1-thiol,CCCCS,-0.99,-0.174 -"1,2,3,4-tetrachlorodibenzo-p-dioxin",c1ccc2c(c1)Oc3c(c(c(c(c3Cl)Cl)Cl)Cl)O2,-3.81,-2.775 -"2,6-dichlorosyringaldehyde",COc1c(c(c(c(c1Cl)C=O)Cl)OC)O,-8.68,-9.846 -cyclohexanamine,C1CCC(CC1)N,-4.59,-3.953 -chloro-difluoro-methane,C(F)(F)Cl,-0.5,-0.067 -methyl 4-nitrobenzoate,COC(=O)c1ccc(cc1)[N+](=O)[O-],-6.88,-6.588 -1-(3-pyridyl)ethanone,CC(=O)c1cccnc1,-8.26,-7.844 -prop-1-yne,CC#C,-0.48,0.065 -nonanal,CCCCCCCCC=O,-2.07,-2.336 -propionic acid,CCC(=O)O,-6.46,-9.088 -chloroform,C(Cl)(Cl)Cl,-1.08,0.285 -"1,2,3-trimethylbenzene",Cc1cccc(c1C)C,-1.21,-0.883 -methane,C,2.0,2.446 -benzyl chloride,c1ccc(cc1)CCl,-1.93,-1.742 -methylcyclohexane,CC1CCCCC1,1.7,1.679 -2-methylthiophene,Cc1cccs1,-1.38,-0.3 -pyridine,c1ccncc1,-4.69,-3.508 -1-chlorobutane,CCCCCl,-0.16,0.993 -"2,5-dimethyltetrahydrofuran",C[C@H]1CC[C@@H](O1)C,-2.92,-1.787 -4-methyl-2-methoxyphenol,Cc1ccc(c(c1)OC)O,-5.8,-4.547 -chlordane,C1[C@H]([C@@H]2[C@H]([C@H]1Cl)[C@]3(C(=C([C@@]2(C3(Cl)Cl)Cl)Cl)Cl)Cl)Cl,-3.44,-3.23 -toluene,Cc1ccccc1,-0.9,-0.79 -isobutyl formate,CC(C)COC=O,-2.22,-3.458 -ethyl paraben,CCOC(=O)c1ccc(cc1)O,-9.2,-9.535 -"1,2-diethoxyethane",CCOCCOCC,-3.54,-3.42 -pentyl propanoate,CCCCCOC(=O)CC,-2.11,-2.176 -4-propylphenol,CCCc1ccc(cc1)O,-5.21,-5.211 -2-methylbut-2-ene,CC=C(C)C,1.31,2.272 -"1,2-dichloroethane",C(CCl)Cl,-1.79,-0.363 -"3,3-dimethylpentane",CCC(C)(C)CC,2.56,2.593 -"2,3-dimethylnaphthalene",Cc1cc2ccccc2cc1C,-2.78,-2.953 -"2,6-dimethylpyridine",Cc1cccc(n1)C,-4.59,-3.443 -"2,2-dichloro-1,1-difluoro-1-methoxy-ethane",COC(C(Cl)Cl)(F)F,-1.12,-0.685 -2-ethoxyethyl acetate,CCOCCOC(=O)C,-5.31,-5.751 -3-methoxyaniline,COc1cccc(c1)N,-7.29,-7.201 -pyridine-3-carbaldehyde,c1cc(cnc1)C=O,-7.1,-7.425 -2-methylbutan-2-ol,CCC(C)(C)O,-4.43,-2.933 -alachlor,CCc1cccc(c1N(COC)C(=O)CCl)CC,-8.21,-6.851 -1-methylpyrrole,Cn1cccc1,-2.89,-2.374 -dimethoxymethane,COCOC,-2.93,-3.221 -pentan-3-ol,CCC(CC)O,-4.35,-2.786 -undecan-2-one,CCCCCCCCCC(=O)C,-2.15,-2.201 -1-bromo-2-chloro-ethane,C(CBr)Cl,-1.95,-0.8 -iodobenzene,c1ccc(cc1)I,-1.74,-1.057 -"3,5,5-trimethylcyclohex-2-en-1-one",CC1=CC(=O)CC(C1)(C)C,-5.18,-4.088 -iodoethane,CCI,-0.74,-0.609 -4-propylguaiacol,CCCc1ccc(c(c1)OC)O,-5.26,-4.127 -2-bromopropane,CC(C)Br,-0.48,0.448 -1-bromo-4-methyl-benzene,Cc1ccc(cc1)Br,-1.39,-0.894 -4-hydroxybenzonitrile,c1cc(ccc1C#N)O,-10.17,-8.39 -methylsulfonylmethane,CS(=O)(=O)C,-10.08,-10.559 -3-ethylphenol,CCc1cccc(c1)O,-6.25,-5.272 -"(1S,5R)-2-methyl-5-(1-methylethenyl)-2-cyclohexen-1-ol",CC1=CC[C@H](C[C@@H]1O)C(=C)C,-4.44,-3.257 -"1,4-dibromobenzene",c1cc(ccc1Br)Br,-2.3,-1.091 -dicamba,COc1c(ccc(c1C(=O)O)Cl)Cl,-9.86,-8.658 -pent-2-ene,CC/C=C\C,1.31,2.374 -ethane,CC,1.83,2.465 -"1,2-dimethoxybenzene",COc1ccccc1OC,-5.33,-4.055 -ethylsulfanylethane,CCSCC,-1.46,0.299 -pyridine-3-carbonitrile,c1cc(cnc1)C#N,-6.75,-5.582 -"3,4-dichlorophenol",c1cc(c(cc1O)Cl)Cl,-7.29,-5.139 -anisole,COc1ccccc1,-2.45,-2.318 -"2,5-dimethylphenol",Cc1ccc(c(c1)O)C,-5.91,-5.014 -"1,4-dichlorobenzene",c1cc(ccc1Cl)Cl,-1.01,-0.19 -chloro-fluoro-methane,C(F)Cl,-0.77,-0.171 -pent-1-ene,CCCC=C,1.68,2.532 -"1,2,3,4-tetrachlorobenzene",c1cc(c(c(c1Cl)Cl)Cl)Cl,-1.34,-0.304 -hept-1-yne,CCCCCC#C,0.6,0.639 -decan-2-one,CCCCCCCCC(=O)C,-2.34,-2.573 -chlorobenzene,c1ccc(cc1)Cl,-1.12,-0.475 -[2-benzhydryloxyethyl]-dimethyl-amine,CN(C)CCOC(c1ccccc1)c2ccccc2,-9.34,-7.873 -pentanal,CCCCC=O,-3.03,-2.927 -diphenyl ether,c1ccc(cc1)Oc2ccccc2,-2.87,-2.81 -cyclohexanone,C1CCC(=O)CC1,-4.91,-4.18 -1-nitrobutane,CCCC[N+](=O)[O-],-3.09,-1.449 -pyridine-4-carbaldehyde,c1cnccc1C=O,-7.0,-7.338 -1-chloro-2-(2-chloroethoxy)ethane,C(CCl)OCCCl,-4.23,-2.248 -1-nitroethane,CC[N+](=O)[O-],-3.71,-1.839 -3-chloropyridine,c1cc(cnc1)Cl,-4.01,-2.767 -bromomethane,CBr,-0.82,0.46 -methanol,CO,-5.1,-3.491 -heptanal,CCCCCCC=O,-2.67,-2.704 -"1,3-dichloro-2-(2,6-dichlorophenyl)benzene",c1cc(c(c(c1)Cl)c2c(cccc2Cl)Cl)Cl,-2.28,-1.226 -2-nitroaniline,c1ccc(c(c1)N)[N+](=O)[O-],-7.37,-7.66 -1-methylpiperidine,CN1CCCCC1,-3.88,-3.467 -octanal,CCCCCCCC=O,-2.29,-2.57 -nitrobenzene,c1ccc(cc1)[N+](=O)[O-],-4.12,-3.46 -"(2S,5R)-2-isopropyl-5-methylcyclohexanone",C[C@@H]1CC[C@H](C(=O)C1)C(C)C,-2.53,-3.523 -"(2R,3R,4S,5S,6R)-6-(hydroxymethyl)tetrahydropyran-2,3,4,5-tetrol",C([C@@H]1[C@H]([C@@H]([C@H]([C@@H](O1)O)O)O)O)O,-25.47,-18.095 -fluoromethane,CF,-0.22,0.881 -methylsulfinylmethane,CS(=O)C,-9.280000000000001,-8.243 -dibenzo-p-dioxin,c1ccc2c(c1)Oc3ccccc3O2,-3.15,-4.9 -2-methylaniline,Cc1ccccc1N,-5.53,-5.325 -1-bromobutane,CCCCBr,-0.4,0.705 -nonan-1-ol,CCCCCCCCCO,-3.88,-2.564 -4-methylpyridine,Cc1ccncc1,-4.93,-3.343 -"1,1,2,2-tetrachloroethylene",C(=C(Cl)Cl)(Cl)Cl,0.1,1.328 -2-bromo-2-methyl-propane,CC(C)(C)Br,0.84,0.438 -"1,1-diphenylethene",C=C(c1ccccc1)c2ccccc2,-2.78,-2.47 -1-ethyl-4-methyl-benzene,CCc1ccc(cc1)C,-0.95,-0.575 -3-methylpyridine,Cc1cccnc1,-4.77,-3.221 -"1,1,1,2-tetramethoxyethane",COCC(OC)(OC)OC,-5.73,-5.436 -9H-fluorene,c1ccc-2c(c1)Cc3c2cccc3,-3.35,-4.269 -acetamide,CC(=O)N,-9.71,-8.82 -dimethyl sulfate,COS(=O)(=O)OC,-5.1,-8.411 -"1,1,2,2-tetrachloroethane",C(C(Cl)Cl)(Cl)Cl,-2.37,-0.534 -methyl cyclohexanecarboxylate,COC(=O)C1CCCCC1,-3.3,-4.376 -1-bromohexane,CCCCCCBr,0.18,1.076 -1-bromoheptane,CCCCCCCBr,0.34,1.223 -1-chlorodibenzo-p-dioxin,c1ccc2c(c1)Oc3cccc(c3O2)Cl,-3.52,-4.473 -"3,3,3-trimethoxypropanenitrile",COC(CC#N)(OC)OC,-6.4,-5.859 -2-chlorobutane,CC[C@H](C)Cl,0.0,0.927 -hexylbenzene,CCCCCCc1ccccc1,-0.04,-0.1 -2-chlorosyringaldehyde,COc1cc(c(c(c1O)OC)Cl)C=O,-7.78,-8.292 -m-bis(trifluoromethyl)benzene,c1cc(cc(c1)C(F)(F)F)C(F)(F)F,1.07,-0.34 -1-benzylimidazole,c1ccc(cc1)Cn2ccnc2,-7.63,-7.997 -naphthalen-1-amine,c1ccc2c(c1)cccc2N,-7.28,-7.777 -diethyl propanedioate,CCOC(=O)CC(=O)OCC,-6.0,-6.716 -1-cyclopropylethanone,CC(=O)C1CC1,-4.61,-3.043 -pyrrole,c1cc[nH]c1,-4.78,-4.014 -diflunisal,c1cc(c(cc1c2ccc(cc2F)F)C(=O)O)O,-9.4,-6.613 -"1,4-dimethylcyclohexane",CC1CCC(CC1)C,2.11,1.918 -cyclohexanol,C1CCC(CC1)O,-5.46,-4.178 -Amitriptyline,CN(C)CCC=C1c2ccccc2CCc3c1cccc3,-7.43,-7.349 -4-fluorophenol,c1cc(ccc1O)F,-6.19,-4.955 -2-chloroaniline,c1ccc(c(c1)N)Cl,-4.91,-4.847 -"1,2,4-trimethylbenzene",Cc1ccc(c(c1)C)C,-0.86,-0.795 -1-ethyl-2-methylbenzene,CCc1ccccc1C,-0.85,-0.761 -"(2R,5R)-2-methyl-5-(1-methylethenyl)-cyclohexanone",C[C@@H]1CC[C@H](CC1=O)C(=C)C,-3.75,-3.344 -biphenyl,c1ccc(cc1)c2ccccc2,-2.7,-3.143 -"2,3-dimethylphenol",Cc1cccc(c1C)O,-6.16,-5.148 -methylparathion,COP(=S)(OC)Oc1ccc(cc1)[N+](=O)[O-],-7.19,-10.466 -diethoxy-(4-nitrophenoxy)-thioxo-$l^{5}-phosphane,CCOP(=S)(OCC)Oc1ccc(cc1)[N+](=O)[O-],-6.74,-9.211 -"1-N,1-N-diethyl-2,6-dinitro-4-(trifluoromethyl)benzene-1,3-diamine",CCN(CC)c1c(cc(c(c1[N+](=O)[O-])N)C(F)(F)F)[N+](=O)[O-],-5.66,-7.503 -methylsulfanylmethane,CSC,-1.61,0.44 -ketoprofen,C[C@@H](c1cccc(c1)C(=O)c2ccccc2)C(=O)O,-10.78,-17.242 -cyclopentanol,C1CCC(C1)O,-5.49,-4.29 -methyl pentanoate,CCCCC(=O)OC,-2.56,-3.492 -2-methylpent-1-ene,CCCC(=C)C,1.47,2.486 -flurbiprofen,C[C@@H](c1ccc(c(c1)F)c2ccccc2)C(=O)O,-8.42,-13.953 -nitralin,CCCN(CCC)c1c(cc(cc1[N+](=O)[O-])S(=O)(=O)C)[N+](=O)[O-],-7.98,-11.246 -chloroethylene,C=CCl,-0.59,1.162 -"N,N-4-trimethylbenzamide",Cc1ccc(cc1)C(=O)N(C)C,-9.76,-8.081 -heptan-4-one,CCCC(=O)CCC,-2.92,-2.704 -methyl benzoate,COC(=O)c1ccccc1,-3.92,-4.921 -4-methylbenzaldehyde,Cc1ccc(cc1)C=O,-4.27,-5.014 -propyl butanoate,CCCC(=O)OCCC,-2.28,-2.754 -piperazine,C1CNCCN1,-7.4,-8.481 -dialifor,CCOP(=S)(OCC)S[C@@H](CCl)N1C(=O)c2ccccc2C1=O,-5.74,-16.515 -2-ethoxyethanol,CCOCCO,-6.69,-4.407 -3-methylpentane,CCC(C)CC,2.51,2.613 -2-methylpyrazine,Cc1cnccn1,-5.51,-6.161 -1-nitropropane,CCC[N+](=O)[O-],-3.34,-1.632 -mesitylene,Cc1cc(cc(c1)C)C,-0.9,-0.553 -5-fluorouracil,c1c(c(=O)[nH]c(=O)[nH]1)F,-16.92,-16.371 -ethanol,CCO,-5.0,-3.394 -"1,4-dimethylnaphthalene",Cc1ccc(c2c1cccc2)C,-2.82,-3.081 -"2,3,7,8-tetrachlorodibenzo-p-dioxin",c1c2c(cc(c1Cl)Cl)Oc3cc(c(cc3O2)Cl)Cl,-3.37,-2.54 -dichlobenil,c1cc(c(c(c1)Cl)C#N)Cl,-4.71,-3.32 -ethyl formate,CCOC=O,-2.56,-3.867 -"1,2,4,5-tetrachlorobenzene",c1c(c(cc(c1Cl)Cl)Cl)Cl,-1.34,0.035 -diethoxymethoxybenzene,CCOC(OCC)Oc1ccccc1,-5.23,-5.203 -3-nitrophenol,c1cc(cc(c1)O)[N+](=O)[O-],-9.62,-7.889 -octan-1-ol,CCCCCCCCO,-4.09,-2.69 -but-1-ene,CCC=C,1.38,2.367 -carbon tetrachloride,C(Cl)(Cl)(Cl)Cl,0.08,1.185 -2-phenylethanol,c1ccc(cc1)CCO,-6.79,-5.28 -fenuron,CN(C)C(=O)Nc1ccccc1,-9.13,-11.81 -methyldisulfanylmethane,CSSC,-1.83,-0.093 -captan,C1C=CC[C@@H]2[C@@H]1C(=O)N(C2=O)SC(Cl)(Cl)Cl,-9.01,-8.718 -"2,3-diacetoxypropyl acetate",CC(=O)OCC(COC(=O)C)OC(=O)C,-8.84,-12.333 -methoxymethane,COC,-1.91,-0.853 -hexane,CCCCCC,2.48,2.851 -"1,2-dibromoethane",C(CBr)Br,-2.33,-1.275 -"1,1,1,2,2-pentachloroethane",C(C(Cl)(Cl)Cl)(Cl)Cl,-1.23,0.059 -5-trifluoromethyluracil,c1c(c(=O)[nH]c(=O)[nH]1)C(F)(F)F,-15.46,-17.349 -"2,6-dimethylaniline",Cc1cccc(c1N)C,-5.21,-5.57 -propyl acetate,CCCOC(=O)C,-2.79,-3.486 -quinoline,c1ccc2c(c1)cccn2,-5.72,-4.989 -ethanethiol,CCS,-1.14,-0.395 -ethyldisulfanylethane,CCSSCC,-1.64,-0.979 -thiophene,c1ccsc1,-1.4,-0.359 -1-ethylnaphthalene,CCc1cccc2c1cccc2,-2.4,-2.961 -pentan-2-one,CCCC(=O)C,-3.52,-3.166 -"1,2,3,4-tetrachloro-5-(2,3,4,6-tetrachlorophenyl)benzene",c1c(c(c(c(c1Cl)Cl)Cl)Cl)c2c(cc(c(c2Cl)Cl)Cl)Cl,-4.61,-0.039 -profluralin,CCC[N@@](CC1CC1)c2c(cc(cc2[N+](=O)[O-])C(F)(F)F)[N+](=O)[O-],-2.45,-1.956 -acetic acid,CC(=O)O,-6.69,-7.281 -acetaldehyde,CC=O,-3.5,-3.372 -3-nitroaniline,c1cc(cc(c1)[N+](=O)[O-])N,-8.84,-8.204 -hex-1-yne,CCCCC#C,0.29,0.553 -2-methoxyaniline,COc1ccccc1N,-6.12,-6.771 -phenol,c1ccc(cc1)O,-6.6,-5.707 -propanenitrile,CCC#N,-3.84,-2.491 -naphthalen-1-ol,c1ccc2c(c1)cccc2O,-7.67,-7.137 -butyl acetate,CCCCOC(=O)C,-2.64,-3.406 -aldicarb,CC(C)(/C=N\OC(=O)NC)SC,-9.84,-9.679 -o-cresol,Cc1ccccc1O,-5.9,-5.076 -2-methylpropanal,CC(C)C=O,-2.86,-2.968 -propionamide,CCC(=O)N,-9.4,-8.31 -1-bromopropane,CCCBr,-0.56,0.579 -2-chloropropane,CC(C)Cl,-0.25,0.833 -"1,3-dichloropropane",C(CCl)CCl,-1.89,-0.416 -4-nitrophenol,c1cc(ccc1[N+](=O)[O-])O,-10.64,-8.472 -"1,2-dichloropropane",C[C@@H](CCl)Cl,-1.27,-0.265 -4-chloroaniline,c1cc(ccc1N)Cl,-5.9,-5.281 -"1-amino-9,10-anthracenedione",c1ccc2c(c1)C(=O)c3cccc(c3C2=O)N,-9.44,-12.214 -"2,3-dimethylpyridine",Cc1cccnc1C,-4.82,-3.367 -pyridine-4-carbonitrile,c1cnccc1C#N,-6.02,-5.765 -diethoxy-(ethylsulfanylmethylsulfanyl)-thioxo-$l^{5}-phosphane,CCOP(=S)(OCC)SCSCC,-4.37,-6.427 -1-cyclohexylethanone,CC(=O)C1CCCCC1,-3.9,-4.003 -2-methylbenzaldehyde,Cc1ccccc1C=O,-3.93,-4.554 -1-(4-pyridyl)ethanone,CC(=O)c1ccncc1,-7.62,-7.566 -"1,2,3,4,7,8-hexachlorodibenzo-p-dioxin",c1c2c(cc(c1Cl)Cl)Oc3c(c(c(c(c3Cl)Cl)Cl)Cl)O2,-3.71,-1.878 -acetone,CC(=O)C,-3.8,-3.506 -2-methylprop-1-ene,CC(=C)C,1.16,2.327 -"1,2,3-trichloro-5-(2,5-dichlorophenyl)benzene",c1cc(c(cc1Cl)c2cc(c(c(c2)Cl)Cl)Cl)Cl,-3.61,-0.922 -1-nitropentane,CCCCC[N+](=O)[O-],-2.82,-1.325 -(2E)-hex-2-enal,CCC/C=C/C=O,-3.68,-3.123 -"N,N-dimethyl-4-nitro-benzamide",CN(C)C(=O)c1ccc(cc1)[N+](=O)[O-],-11.95,-10.036 -tetrahydrofuran,C1CCOC1,-3.47,-2.201 -octane,CCCCCCCC,2.88,3.088 -trifluralin,CCCN(CCC)c1c(cc(cc1[N+](=O)[O-])C(F)(F)F)[N+](=O)[O-],-3.25,-2.023 -"(3R)-3,7-Dimethylocta-1,6-dien-3-yl acetate",CC(=CCC[C@](C)(C=C)OC(=O)C)C,-2.49,-2.964 -"1,3-bis-(nitrooxy)butane",C[C@@H](CCO[N+](=O)[O-])O[N+](=O)[O-],-4.29,-4.944 -2-isopropoxypropane,CC(C)OC(C)C,-0.53,-0.178 -2-methylhexane,CCCCC(C)C,2.93,2.894 -pentachloronitrobenzene,c1(c(c(c(c(c1Cl)Cl)Cl)Cl)Cl)N(=O)=O,-5.22,-1.284 -"2-bromo-2-chloro-1,1,1-trifluoro-ethane",[C@@H](C(F)(F)F)(Cl)Br,-0.11,0.206 -1-butoxybutane,CCCCOCCCC,-0.83,0.139 -pentylcyclopentane,CCCCCC1CCCC1,2.55,2.381 -"2,4-dimethylpentane",CC(C)CC(C)C,2.83,2.756 -"2,5-dimethylpyridine",Cc1ccc(nc1)C,-4.72,-3.165 -but-2-enal,C/C=C/C=O,-4.22,-3.341 -3-methylhexane,CCC[C@H](C)CC,2.71,2.81 -"1,3,5-trichloro-2-(2,6-dichlorophenyl)benzene",c1cc(c(c(c1)Cl)c2c(cc(cc2Cl)Cl)Cl)Cl,-1.96,-0.477 -"1,2,2-trifluoroethoxybenzene",c1ccc(cc1)O[C@@H](C(F)F)F,-1.29,-3.043 -"1,2-dimethoxyethane",COCCOC,-4.84,-3.103 -sec-butylbenzene,CC[C@H](C)c1ccccc1,-0.45,-0.22 -3-phenylpropan-1-ol,c1ccc(cc1)CCCO,-6.92,-5.771 -"2-[(1R)-1-methylpropyl]-4,6-dinitro-phenolate",CC[C@@H](C)c1cc(cc(c1O)[N+](=O)[O-])[N+](=O)[O-],-6.23,-5.378 -methyl 4-methoxybenzoate,COc1ccc(cc1)C(=O)OC,-5.33,-6.462 -"N-(3,4-dichlorophenyl)propanimidic acid",CCC(=O)Nc1ccc(c(c1)Cl)Cl,-7.78,-9.409 -naproxen,C[C@@H](c1ccc2cc(ccc2c1)OC)C(=O)O,-10.21,-12.199 -octafluorocyclobutane,C1(C(C(C1(F)F)(F)F)(F)F)(F)F,3.43,3.077 -isopentyl acetate,CC(C)CCOC(=O)C,-2.21,-3.067 -1-chlorohexane,CCCCCCCl,0.0,1.261 -4-methylpentan-2-one,CC(C)CC(=O)C,-3.05,-3.116 -hexanal,CCCCCC=O,-2.81,-2.86 -3-chloroaniline,c1cc(cc(c1)Cl)N,-5.82,-5.138 -morpholine,C1COCCN1,-7.17,-6.116 -"1,1-diethoxyethane",CCOC(C)OCC,-3.28,-1.795 -"N-butyl-N-ethyl-2,6-dinitro-4-(trifluoromethyl)aniline",CCCC[N@](CC)c1c(cc(cc1[N+](=O)[O-])C(F)(F)F)[N+](=O)[O-],-3.51,-2.303 -methanethiol,CS,-1.2,-0.273 -endosulfan alpha,C1[C@@H]2[C@H](COS(=O)O1)[C@@]3(C(=C([C@]2(C3(Cl)Cl)Cl)Cl)Cl)Cl,-4.23,-9.785 -1-(4-methoxyphenyl)ethanone,CC(=O)c1ccc(cc1)OC,-4.4,-6.575 -prop-2-en-1-ol,C=CCO,-5.03,-3.286 -methylsulfanylethane,CCSC,-1.5,0.386 -pentyl acetate,CCCCCOC(=O)C,-2.51,-2.565 -"1,2,3,5-tetrachlorobenzene",c1c(cc(c(c1Cl)Cl)Cl)Cl,-1.62,0.136 -1-phenylethanone,CC(=O)c1ccccc1,-4.58,-5.078 -chloroethane,CCCl,-0.63,0.775 -propylcyclopentane,CCCC1CCCC1,2.13,2.102 -"1,3,5-trichlorobenzene",c1c(cc(cc1Cl)Cl)Cl,-0.78,0.326 -propyl paraben,CCCOC(=O)c1ccc(cc1)O,-9.37,-8.945 -3-chlorophenol,c1cc(cc(c1)Cl)O,-6.62,-5.018 -3-methylbutan-1-ol,CC(C)CCO,-4.42,-3.237 -pentan-1-amine,CCCCCN,-4.09,-2.835 -terbacil,Cc1c(c(=O)n(c(=O)[nH]1)C(C)(C)C)Cl,-11.14,-13.769 -"2,2,5-trimethylhexane",CC(C)CCC(C)(C)C,2.93,2.97 -2-butoxyethanol,CCCCOCCO,-6.25,-3.85 -endrin,C1[C@@H]2[C@H]3[C@@H]([C@H]1[C@H]4[C@@H]2O4)[C@@]5(C(=C([C@]3(C5(Cl)Cl)Cl)Cl)Cl)Cl,-4.82,-5.179 -benzamide,c1ccc(cc1)C(=O)N,-11.0,-10.412 -2-nitropropane,CC(C)[N+](=O)[O-],-3.13,-1.741 -glycerol,C(C(CO)O)O,-13.43,-10.14 -1-iodopropane,CCCI,-0.53,-0.443 -2-methoxyethanamine,COCCN,-6.55,-5.027 -"1,1,1,2-tetrachloroethane",C(C(Cl)(Cl)Cl)Cl,-1.43,-0.091 -methyl propanoate,CCC(=O)OC,-2.93,-3.652 -cyclopentane,C1CCCC1,1.2,1.648 -3-ethylpyridine,CCc1cccnc1,-4.59,-2.965 -"3,5-dimethylpyridine",Cc1cc(cnc1)C,-4.84,-2.869 -2-methoxyethanol,COCCO,-6.619999999999999,-4.686 -methyl formate,COC=O,-2.78,-4.028 -naphthalen-2-amine,c1ccc2cc(ccc2c1)N,-7.47,-8.003 -4-methyl-1H-imidazole,Cc1c[nH]cn1,-10.27,-8.205 -1-methyl-3-nitro-benzene,Cc1cccc(c1)[N+](=O)[O-],-3.45,-3.278 -"1,4-dichlorobutane",C(CCCl)CCl,-2.32,-0.404 -nitroxyacetone,CC(=O)CO[N+](=O)[O-],-5.99,-5.362 -tert-butylbenzene,CC(C)(C)c1ccccc1,-0.44,-0.803 -methyl hexanoate,CCCCCC(=O)OC,-2.49,-3.299 -"1,1,1-trifluoropropan-2-ol",C[C@@H](C(F)(F)F)O,-4.16,-3.518 -1-bromo-pentane,CCCCCBr,-0.1,0.824 -oct-1-ene,CCCCCCC=C,1.92,2.895 -6-isopropyl-3-methyl-1-cyclohex-2-enone,CC1=CC(=O)[C@@H](CC1)C(C)C,-4.51,-3.825 -propan-2-ol,CC(C)O,-4.74,-3.427 -hexan-1-amine,CCCCCCN,-3.95,-2.772 -3-nitrooxypropyl nitrate,C(CO[N+](=O)[O-])CO[N+](=O)[O-],-4.8,-5.322 -"2,4-dimethylphenol",Cc1ccc(c(c1)C)O,-6.01,-4.98 -pentan-1-ol,CCCCCO,-4.57,-3.054 -pentan-2-ol,CCC[C@@H](C)O,-4.39,-2.945 -3-methylheptane,CCCC[C@@H](C)CC,2.97,3.03 -ibuprofen,C[C@@H](c1ccc(cc1)CC(C)C)C(=O)O,-7.0,-10.857 -diethyl (2R)-2-dimethoxyphosphinothioylsulfanylbutanedioate,CCOC(=O)C[C@H](C(=O)OCC)SP(=S)(OC)OC,-8.15,-11.194 -"3,4-dimethylphenol",Cc1ccc(cc1C)O,-6.5,-5.471 -4-chloro-3-methyl-phenol,Cc1cc(ccc1Cl)O,-6.79,-5.14 -hept-2-ene,CCCC/C=C/C,1.68,2.78 -1-propoxypropane,CCCOCCC,-1.16,-0.004 -"(1R,2S,5R)-2-isopropyl-5-methylcyclohexanol",C[C@@H]1CC[C@H]([C@@H](C1)O)C(C)C,-3.2,-3.35 -terbutryn,CCNc1nc(nc(n1)SC)NC(C)(C)C,-6.68,-9.271 -"2,2,4-trimethylpentane",CC(C)CC(C)(C)C,2.89,2.542 -nonan-5-one,CCCCC(=O)CCCC,-2.64,-2.364 -pebulate,CCCCN(CC)C(=O)SCCC,-3.64,-4.573 -hept-1-ene,CCCCCC=C,1.66,2.761 -isopropyl formate,CC(C)OC=O,-2.02,-3.591 -1-acetoxyethyl acetate,CC(OC(=O)C)OC(=O)C,-4.97,-8.006 -5-chlorouracil,c1c(c(=O)[nH]c(=O)[nH]1)Cl,-17.74,-16.612 -isopropenylbenzene,CC(=C)c1ccccc1,-1.24,-0.651 -isopentane,CCC(C)C,2.38,2.59 -butyl nitrate,CCCCO[N+](=O)[O-],-2.09,-1.938 -bromobenzene,c1ccc(cc1)Br,-1.46,-0.947 -"1,1,1-trichloroethane",CC(Cl)(Cl)Cl,-0.19,0.505 -4-(1-Methylethenyl)-1-cyclohexene-1-carboxaldehyde,CC(=C)[C@H]1CCC(=CC1)C=O,-4.09,-3.591 -1-methyl-2-nitro-benzene,Cc1ccccc1[N+](=O)[O-],-3.58,-3.133 -1-iodoheptane,CCCCCCCI,0.27,0.228 -pyrene,c1cc2ccc3cccc4c3c2c(c1)cc4,-4.52,-6.79 -1-chloro-pentane,CCCCCCl,-0.1,1.084 -isobutyl acetate,CC(C)COC(=O)C,-2.36,-2.896 -"2,2-dimethylbutane",CCC(C)(C)C,2.51,2.495 -4-nitroaniline,c1cc(ccc1N)N(=O)=O,-9.82,-9.416 -methyl 2-cyanoacetate,COC(=O)CC#N,-6.72,-6.36 -4-methoxyaniline,COc1ccc(cc1)N,-7.48,-7.016 -isobutylbenzene,CC(C)Cc1ccccc1,0.16,-0.257 -"1,3,5-trichloro-2-phenyl-benzene",c1ccc(cc1)c2c(cc(cc2Cl)Cl)Cl,-2.16,-1.151 -methanamine,CN,-4.55,-3.583 -2-chlorophenol,c1ccc(c(c1)O)Cl,-4.55,-3.317 -"2-amino-9,10-anthraquinone",c1ccc2c(c1)C(=O)c3ccc(cc3C2=O)N,-11.53,-13.895 -"(Z)-1,2-dichloroethylene",C(=C\Cl)\Cl,-1.17,1.156 -hexan-2-one,CCCCC(=O)C,-3.28,-3.006 -"1,2-dinitroxyethane",C(CO[N+](=O)[O-])O[N+](=O)[O-],-5.73,-6.227 -2-fluorophenol,c1ccc(c(c1)O)F,-5.29,-3.346 -pirimor,Cc1c(nc(nc1OC(=O)N(C)C)N(C)C)C,-9.41,-13.87 -styrene,C=Cc1ccccc1,-1.24,-1.078 -triethylphosphate,CCOP(=O)(OCC)OCC,-7.5,-10.251 -"2,2,2-trifluoroethanol",C(C(F)(F)F)O,-4.31,-3.809 -1-butoxy-2-propanol,CCCCOC[C@H](C)O,-5.73,-3.891 -propan-1-ol,CCCO,-4.85,-3.33 -o-xylene,Cc1ccccc1C,-0.9,-0.851 -neopentane,CC(C)(C)C,2.51,2.506 -pent-1-yne,CCCC#C,0.01,0.47 -phthalimide,c1ccc2c(c1)C(=O)NC2=O,-9.61,-11.825 -1-iodobutane,CCCCI,-0.25,-0.223 -p-cresol,Cc1ccc(cc1)O,-6.13,-5.579 -2-iodopropane,CC(C)I,-0.46,-0.492 -2-methoxyphenol,COc1ccccc1O,-5.94,-4.746 -cyclopentene,C1CC=CC1,0.56,1.23 -111-trifluoropropan-2-ol,C[C@H](C(F)(F)F)O,-4.2,-3.486 -propan-1-amine,CCCN,-4.39,-3.053 -2-nitrophenol,c1ccc(c(c1)[N+](=O)[O-])O,-4.58,-5.667 -1-methylnaphthalene,Cc1cccc2c1cccc2,-2.44,-3.212 -hexachlorobenzene,c1(c(c(c(c(c1Cl)Cl)Cl)Cl)Cl)Cl,-2.33,0.379 -oct-2-enal,CCCCC/C=C/C=O,-3.43,-2.706 -oct-1-yne,CCCCCCC#C,0.71,0.832 -diazinon,CCOP(=S)(OCC)Oc1cc(nc(n1)C(C)C)C,-6.48,-10.753 -methyl octanoate,CCCCCCCC(=O)OC,-2.04,-3.035 -pyrrolidine,C1CCNC1,-5.48,-4.278 -4-hydroxybenzaldehyde,c1cc(ccc1C=O)O,-8.83,-10.05 -1-chloroheptane,CCCCCCCCl,0.29,1.467 -"1,4-dioxane",C1COCCO1,-5.06,-4.269 diff --git a/examples/sampl/sampl_graph_conv.py b/examples/sampl/sampl_graph_conv.py deleted file mode 100644 index d2b41c2c55060a1fa6e4a259f0bb636be2d3178e..0000000000000000000000000000000000000000 --- a/examples/sampl/sampl_graph_conv.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -Script that trains graph-conv models on SAMPL(FreeSolv) dataset. -""" -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals - -import numpy as np -np.random.seed(123) -import tensorflow as tf -tf.random.set_seed(123) -import deepchem as dc - -# Load SAMPL(FreeSolv) dataset -SAMPL_tasks, SAMPL_datasets, transformers = dc.molnet.load_sampl( - featurizer='GraphConv') -train_dataset, valid_dataset, test_dataset = SAMPL_datasets - -# Define metric -metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean) - -# Batch size of models -batch_size = 50 -model = dc.models.GraphConvModel(len(SAMPL_tasks), mode='regression') - -# Fit trained model -model.fit(train_dataset, nb_epoch=20) - -print("Evaluating model") -train_scores = model.evaluate(train_dataset, [metric], transformers) -valid_scores = model.evaluate(valid_dataset, [metric], transformers) - -print("Train scores") -print(train_scores) - -print("Validation scores") -print(valid_scores) diff --git a/examples/sampl/sampl_tf_models.py b/examples/sampl/sampl_tf_models.py deleted file mode 100644 index dcaf05237ee1955de0ae45a8108845f04f854649..0000000000000000000000000000000000000000 --- a/examples/sampl/sampl_tf_models.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Script that trains multitask models on SAMPL dataset. -""" -from __future__ import print_function -from __future__ import division -from __future__ import unicode_literals - -import os -import shutil -import numpy as np -import deepchem as dc -from deepchem.molnet import load_sampl - -# Only for debug! -np.random.seed(123) - -# Load SAMPL dataset -n_features = 1024 -SAMPL_tasks, SAMPL_datasets, transformers = load_sampl() -train_dataset, valid_dataset, test_dataset = SAMPL_datasets - -# Fit models -metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean) - -model = dc.models.MultitaskRegressor( - len(SAMPL_tasks), - n_features, - layer_sizes=[1000], - dropouts=[.25], - learning_rate=0.001, - batch_size=50) - -# Fit trained model -model.fit(train_dataset) - -print("Evaluating model") -train_scores = model.evaluate(train_dataset, [metric], transformers) -valid_scores = model.evaluate(valid_dataset, [metric], transformers) - -print("Train scores") -print(train_scores) - -print("Validation scores") -print(valid_scores) diff --git a/examples/sider/sider_datasets.py b/examples/sider/sider_datasets.py index 00bb5563f477dd09aea9491d4bee24fa98b8274d..cffa78607719a8ff038dc661ade60304c0aff5ef 100644 --- a/examples/sider/sider_datasets.py +++ b/examples/sider/sider_datasets.py @@ -10,13 +10,13 @@ import numpy as np import shutil import deepchem as dc + def load_sider(featurizer='ECFP', split='index'): current_dir = os.path.dirname(os.path.realpath(__file__)) - # Load SIDER dataset + # Load SIDER dataset print("About to load SIDER dataset.") - dataset_file = os.path.join( - current_dir, "./sider.csv.gz") + dataset_file = os.path.join(current_dir, "./sider.csv.gz") dataset = dc.utils.save.load_from_disk(dataset_file) print("Columns of dataset: %s" % str(dataset.columns.values)) print("Number of examples in dataset: %s" % str(dataset.shape[0])) @@ -38,15 +38,16 @@ def load_sider(featurizer='ECFP', split='index'): print("%d datapoints in SIDER dataset" % len(dataset)) # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: dataset = transformer.transform(dataset) - splitters = {'index': dc.splits.IndexSplitter(), - 'random': dc.splits.RandomSplitter(), - 'scaffold': dc.splits.ScaffoldSplitter()} + splitters = { + 'index': dc.splits.IndexSplitter(), + 'random': dc.splits.RandomSplitter(), + 'scaffold': dc.splits.ScaffoldSplitter() + } splitter = splitters[split] train, valid, test = splitter.train_valid_test_split(dataset) diff --git a/examples/sider/sider_rf.py b/examples/sider/sider_rf.py index 203057a51be966c11b796abff453bd3b8d8c946b..282715273a7aad5be019d2c3e8f15806ec1165ef 100644 --- a/examples/sider/sider_rf.py +++ b/examples/sider/sider_rf.py @@ -10,19 +10,21 @@ import os import shutil import numpy as np import deepchem as dc -from sider_datasets import load_sider from sklearn.ensemble import RandomForestClassifier -sider_tasks, datasets, transformers = load_sider() +sider_tasks, datasets, transformers = dc.molnet.load_sider() train_dataset, valid_dataset, test_dataset = datasets -metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean, - mode="classification") +metric = dc.metrics.Metric( + dc.metrics.roc_auc_score, np.mean, mode="classification") + def model_builder(model_dir): sklearn_model = RandomForestClassifier( class_weight="balanced", n_estimators=100) return dc.models.SklearnModel(sklearn_model, model_dir) + + model = dc.models.SingletaskToMultitask(sider_tasks, model_builder) # Fit trained model diff --git a/examples/toxcast/toxcast_datasets.py b/examples/toxcast/toxcast_datasets.py index 3974331c674b8e4bcab43abeb2bf749b09d2e09d..da50e6d32ab698bc2483a2da3a92543b04a52530 100644 --- a/examples/toxcast/toxcast_datasets.py +++ b/examples/toxcast/toxcast_datasets.py @@ -10,14 +10,14 @@ import numpy as np import shutil import deepchem as dc + def load_toxcast(featurizer='ECFP', split='index'): current_dir = os.path.dirname(os.path.realpath(__file__)) # Load TOXCAST dataset print("About to load TOXCAST dataset.") - dataset_file = os.path.join( - current_dir, "./processing/toxcast_data.csv.gz") + dataset_file = os.path.join(current_dir, "./processing/toxcast_data.csv.gz") dataset = dc.utils.save.load_from_disk(dataset_file) print("Columns of dataset: %s" % str(dataset.columns.values)) print("Number of examples in dataset: %s" % str(dataset.shape[0])) @@ -26,9 +26,9 @@ def load_toxcast(featurizer='ECFP', split='index'): print("About to featurize TOXCAST dataset.") if featurizer == 'ECFP': - featurizer = dc.feat.CircularFingerprint(size=1024) + featurizer = dc.feat.CircularFingerprint(size=1024) elif featurizer == 'GraphConv': - featurizer = dc.feat.ConvMolFeaturizer() + featurizer = dc.feat.ConvMolFeaturizer() TOXCAST_tasks = dataset.columns.values[1:].tolist() @@ -36,18 +36,19 @@ def load_toxcast(featurizer='ECFP', split='index'): tasks=TOXCAST_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file) - # Initialize transformers - transformers = [ - dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)] + # Initialize transformers + transformers = [dc.trans.BalancingTransformer(dataset=dataset)] print("About to transform data") for transformer in transformers: dataset = transformer.transform(dataset) - splitters = {'index': dc.splits.IndexSplitter(), - 'random': dc.splits.RandomSplitter(), - 'scaffold': dc.splits.ScaffoldSplitter()} + splitters = { + 'index': dc.splits.IndexSplitter(), + 'random': dc.splits.RandomSplitter(), + 'scaffold': dc.splits.ScaffoldSplitter() + } splitter = splitters[split] train, valid, test = splitter.train_valid_test_split(dataset) - + return TOXCAST_tasks, (train, valid, test), transformers diff --git a/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb b/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb index 6bd7edbee0de6471ce28894efda70aff2a594e61..d119434d208d905526af8a7e4a5df63dab9e218a 100644 --- a/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb +++ b/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb @@ -1,1623 +1,379 @@ { - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "socSJe925zFv" + }, + "source": [ + "# Tutorial 1: The Basic Tools of the Deep Life Sciences\n", + "Welcome to DeepChem's introductory tutorial for the deep life sciences. This series of notebooks is a step-by-step guide for you to get to know the new tools and techniques needed to do deep learning for the life sciences. We'll start from the basics, assuming that you're new to machine learning and the life sciences, and build up a repertoire of tools and techniques that you can use to do meaningful work in the life sciences.\n", + "\n", + "**Scope:** This tutorial will encompass both the machine learning and data handling needed to build systems for the deep life sciences.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in the sequences are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb)\n", + "\n", + "\n", + "## Why do the DeepChem Tutorial?\n", + "\n", + "**1) Career Advancement:** Applying AI in the life sciences is a booming\n", + "industry at present. There are a host of newly funded startups and initiatives\n", + "at large pharmaceutical and biotech companies centered around AI. Learning and\n", + "mastering DeepChem will bring you to the forefront of this field and will\n", + "prepare you to enter a career in this field.\n", + "\n", + "**2) Humanitarian Considerations:** Disease is the oldest cause of human\n", + "suffering. From the dawn of human civilization, humans have suffered from pathogens,\n", + "cancers, and neurological conditions. One of the greatest achievements of\n", + "the last few centuries has been the development of effective treatments for\n", + "many diseases. By mastering the skills in this tutorial, you will be able to\n", + "stand on the shoulders of the giants of the past to help develop new\n", + "medicine.\n", + "\n", + "**3) Lowering the Cost of Medicine:** The art of developing new medicine is\n", + "currently an elite skill that can only be practiced by a small core of expert\n", + "practitioners. By enabling the growth of open source tools for drug discovery,\n", + "you can help democratize these skills and open up drug discovery to more\n", + "competition. Increased competition can help drive down the cost of medicine.\n", + "\n", + "## Getting Extra Credit\n", + "If you're excited about DeepChem and want to get more involved, there are some things that you can do right now:\n", + "\n", + "* Star DeepChem on GitHub! - https://github.com/deepchem/deepchem\n", + "* Join the DeepChem forums and introduce yourself! - https://forum.deepchem.io\n", + "* Say hi on the DeepChem gitter - https://gitter.im/deepchem/Lobby\n", + "* Make a YouTube video teaching the contents of this notebook.\n", + "\n", + "\n", + "## Prerequisites\n", + "\n", + "This tutorial sequence will assume some basic familiarity with the Python data science ecosystem. We will assume that you have familiarity with libraries such as Numpy, Pandas, and TensorFlow. We'll provide some brief refreshers on basics through the tutorial so don't worry if you're not an expert.\n", + "\n", + "## Setup\n", + "\n", + "The first step is to get DeepChem up and running. We recommend using Google Colab to work through this tutorial series. You'll need to run the following commands to get DeepChem installed on your colab notebook. Note that this will take something like 5 minutes to run on your colab instance." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "OyxRVW5X5zF0", + "outputId": "affd23f1-1929-456a-f8a6-e53a874c84b4" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "CMWAv-Z46nCc", + "outputId": "9ae7cfd0-ebbf-40b0-f6f1-2940cf32a839" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Jk47QTZ95zF-" + }, + "source": [ + "You can of course run this tutorial locally if you prefer. In this case, don't run the above cell since it will download and install Anaconda on your local machine. In either case, we can now import the `deepchem` package to play with." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { "colab": { - "name": "01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb", - "provenance": [] + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "colab_type": "code", + "id": "PDiY03h35zF_", + "outputId": "cdd7401d-19a0-4476-9297-b04defc67178" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" } + ], + "source": [ + "import deepchem as dc\n", + "dc.__version__" + ] }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "socSJe925zFv", - "colab_type": "text" - }, - "source": [ - "# Tutorial 1: The Basic Tools of the Deep Life Sciences\n", - "Welcome to DeepChem's introductory tutorial for the deep life sciences. This series of notebooks is step-by-step guide for you to get to know the new tools and techniques needed to do deep learning for the life sciences. We'll start from the basics, assuming that you're new to machine learning and the life sciences, and build up a repertoire of tools and techniques that you can use to do meaningful work in the life sciences.\n", - "\n", - "**Scope:** This tutorial will encompass both the machine learning and data handling needed to build systems for the deep life sciences.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in the sequences are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb)\n", - "\n", - "\n", - "## Why do the DeepChem Tutorial?\n", - "\n", - "**1) Career Advancement:** Applying AI in the life sciences is a booming\n", - "industry at present. There are a host of newly funded startups and initiatives\n", - "at large pharmaceutical and biotech companies centered around AI. Learning and\n", - "mastering DeepChem will bring you to the forefront of this field and will\n", - "prepare you to enter a career in this field.\n", - "\n", - "**2) Humanitarian Considerations:** Disease is the oldest cause of human\n", - "suffering. From the dawn of human civilization, humans have suffered from pathogens,\n", - "cancers, and neurological conditions. One of the greatest achievements of\n", - "the last few centuries has been the development of effective treatments for\n", - "many diseases. By mastering the skills in this tutorial, you will be able to\n", - "stand on the shoulders of the giants of the past to help develop new\n", - "medicine.\n", - "\n", - "**3) Lowering the Cost of Medicine:** The art of developing new medicine is\n", - "currently an elite skill that can only be practiced by a small core of expert\n", - "practitioners. By enabling the growth of open source tools for drug discovery,\n", - "you can help democratize these skills and open up drug discovery to more\n", - "competition. Increased competition can help drive down the cost of medicine.\n", - "\n", - "## Getting Extra Credit\n", - "If you're excited about DeepChem and want to get more more involved, there's a couple of things that you can do right now:\n", - "\n", - "* Star DeepChem on GitHub! - https://github.com/deepchem/deepchem\n", - "* Join the DeepChem forums and introduce yourself! - https://forum.deepchem.io\n", - "* Say hi on the DeepChem gitter - https://gitter.im/deepchem/Lobby\n", - "* Make a YouTube video teaching the contents of this notebook.\n", - "\n", - "\n", - "## Prerequisites\n", - "\n", - "This tutorial will assume some basic familiarity with the Python data science ecosystem. We will assume that you have familiarity with libraries such as Numpy, Pandas, and TensorFlow. We'll provide some brief refreshers on basics through the tutorial so don't worry if you're not an expert.\n", - "\n", - "## Setup\n", - "\n", - "The first step is to get DeepChem up and running. We recommend using Google Colab to work through this tutorial series. You'll need to run the following commands to get DeepChem installed on your colab notebook. Note that this will take something like 5 minutes to run on your colab instance." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "OyxRVW5X5zF0", - "colab_type": "code", - "outputId": "a28f18b1-f694-4934-9858-7d4d1c72e56d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 19644 0 --:--:-- --:--:-- --:--:-- 19644\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 3.15 s, sys: 662 ms, total: 3.81 s\n", - "Wall time: 2min 22s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Jk47QTZ95zF-", - "colab_type": "text" - }, - "source": [ - "You can of course run this tutorial locally if you prefer. In this case, don't run the above cell since it will download and install Anaconda on your local machine. In either case, we can now import `deepchem` the package to play with." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "PDiY03h35zF_", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# Run this cell to see if things work\n", - "import deepchem as dc" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "B0u7qIZd5zGG", - "colab_type": "text" - }, - "source": [ - "# Basic Data Handling in DeepChem\n", - "What does it take to do deep learning on the life sciences? Well, the first thing we'll need to do is actually handle some data. How can we start handling some basic data? For beginners, let's just take a look at some synthetic data.\n", - "\n", - "To generate some basic synthetic data, we will use Numpy to create some basic arrays." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "saTaOpXY5zGI", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import numpy as np\n", - "\n", - "data = np.random.random((4, 4))\n", - "labels = np.random.random((4,)) # labels of size 20x1" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "F922OPtL5zGM", - "colab_type": "text" - }, - "source": [ - "We've given these arrays some evocative names: \"data\" and \"labels.\" For now, don't worry too much about the names, but just note that the arrays have different shapes. Let's take a quick look to get a feeling for these arrays" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "YEDcUsz35zGO", - "colab_type": "code", - "outputId": "fc261c64-6878-4865-ac00-a37660d597e1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 102 - } - }, - "source": [ - "data, labels" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(array([[0.19330935, 0.11190623, 0.44660479, 0.82294167],\n", - " [0.23733282, 0.20134574, 0.42438331, 0.7899084 ],\n", - " [0.47503811, 0.95041665, 0.02520914, 0.8065749 ],\n", - " [0.31487199, 0.12439971, 0.77241039, 0.27010706]]),\n", - " array([0.00428323, 0.82485774, 0.99352178, 0.47443043]))" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 4 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "E8UCFrrN5zGf", - "colab_type": "text" - }, - "source": [ - "In order to be able to work with this data in DeepChem, we need to wrap these arrays so DeepChem knows how to work with them. DeepChem has a `Dataset` API that it uses to facilitate its handling of datasets. For handling of Numpy datasets, we use DeepChem's `NumpyDataset` object." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "e5K3rdGV5zGg", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from deepchem.data.datasets import NumpyDataset\n", - "\n", - "dataset = NumpyDataset(data, labels)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_Zcd7jTd5zGr", - "colab_type": "text" - }, - "source": [ - "Ok, now what? We have these arrays in a `NumpyDataset` object. What can we do with it? Let's try printing out the object." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "LJc90fs_5zGs", - "colab_type": "code", - "outputId": "838fc846-fceb-4955-af30-470bb6521349", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "dataset" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 6 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "aQa88cbj5zGw", - "colab_type": "text" - }, - "source": [ - "Ok, that's not terribly informative. It's telling us that `dataset` is a Python object that lives somewhere in memory. Can we recover the two datasets that we used to construct this object? Luckily, the DeepChem API allows us to recover the two original datasets by calling the `dataset.X` and `dataset.y` attributes of the original object." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "HSVqeYox5zGx", - "colab_type": "code", - "outputId": "09364c0b-426f-4d4d-a4b2-b77d6daf7c9d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 102 - } - }, - "source": [ - "dataset.X, dataset.y" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(array([[0.19330935, 0.11190623, 0.44660479, 0.82294167],\n", - " [0.23733282, 0.20134574, 0.42438331, 0.7899084 ],\n", - " [0.47503811, 0.95041665, 0.02520914, 0.8065749 ],\n", - " [0.31487199, 0.12439971, 0.77241039, 0.27010706]]),\n", - " array([0.00428323, 0.82485774, 0.99352178, 0.47443043]))" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 7 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "WBmRfo7D5zG9", - "colab_type": "text" - }, - "source": [ - "This set of transformations raises a few questions. First, what was the point of it all? Why would we want to wrap objects this way instead of working with the raw Numpy arrays? The simple answer is for have a unified API for working with larger datasets. Suppose that `X` and `y` are so large that they can't fit easily into memory. What would we do then? Being able to work with an abstract `dataset` object proves very convenient then. In fact, you'll have reason to use this feature of `Dataset` later in the tutorial series.\n", - "\n", - "What else can we do with the `dataset` object? It turns out that it can be useful to be able to walk through the datapoints in the `dataset` one at a time. For that, we can use the `dataset.itersamples()` method." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "k_8IONOw5zHC", - "colab_type": "code", - "outputId": "cd5449b2-1224-4b85-e54b-d5d4fac18878", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 85 - } - }, - "source": [ - "for x, y, _, _ in dataset.itersamples():\n", - " print(x, y)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[0.19330935 0.11190623 0.44660479 0.82294167] 0.004283228755889379\n", - "[0.23733282 0.20134574 0.42438331 0.7899084 ] 0.8248577426415518\n", - "[0.47503811 0.95041665 0.02520914 0.8065749 ] 0.9935217763676747\n", - "[0.31487199 0.12439971 0.77241039 0.27010706] 0.4744304292698883\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "0vU34w_e5zHH", - "colab_type": "text" - }, - "source": [ - "There are a couple of other fields that the `dataset` object tracks. The first is `dataset.ids`. This is a listing of unique identifiers for the datapoitns in the dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "1fDXCKv_5zHI", - "colab_type": "code", - "outputId": "f28f717f-5869-4f7d-e9d8-8e8518f838ce", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "dataset.ids" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "array([0, 1, 2, 3], dtype=object)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 9 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "qkbLR05r5zHQ", - "colab_type": "text" - }, - "source": [ - "In addition, the `dataset` object has a field `dataset.w`. This is the \"example weight\" associated with each datapoint. Since we haven't explicitly assigned the weights, this is simply going to be all ones." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "uffH-1EI5zHR", - "colab_type": "code", - "outputId": "62728283-37d7-42d5-d568-94186c23606e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "dataset.w" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "array([1., 1., 1., 1.], dtype=float32)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 10 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "XHVs99Jh5zHU", - "colab_type": "text" - }, - "source": [ - "What if we want to set nontrivial weights for a dataset? One time we might want to do this is if we have a dataset where there are only a few positive examples to play with. It's pretty straightforward to do this with DeepChem." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "JHiBOSJB5zHV", - "colab_type": "code", - "outputId": "1ca16108-8e41-40d7-d28d-6376c19f6246", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "w = np.random.random((4,)) # initializing weights with random vector of size 4x1\n", - "dataset_with_weights = NumpyDataset(data, labels, w) # creates numpy dataset object\n", - "dataset_with_weights.w" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "array([0.20590244, 0.60422512, 0.84054797, 0.60248335])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 11 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "LLjtnas35zHk", - "colab_type": "text" - }, - "source": [ - "## MNIST Example\n", - "\n", - "Just to get a better understanding, we'll use the venerable MNIST dataset and use `NumpyDataset` to store the data. We're going to make use of the `tensorflow-datasets` package to facilitate our data reading. You'll need to install this package in order to make use of it. " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "BQSEHyoW5zHn", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# Install tensorflow-datasets\n", - "## TODO(rbharath): Switch to stable version on release\n", - "# TODO(rbharath): This only works on TF2. Uncomment once we've upgraded.\n", - "#!pip install -q --upgrade tfds-nightly tf-nightly" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "s4qRvErO5zHx", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# TODO(rbharath): This cell will only work with TF2 installed. Swap to this as default soon.\n", - "\n", - "#import tensorflow_datasets as tfds\n", - "\n", - "#data_dir = '/tmp/tfds'\n", - "\n", - "## Fetch full datasets for evaluation\n", - "# tfds.load returns tf.Tensors (or tf.data.Datasets if batch_size != -1)\n", - "# You can convert them to NumPy arrays (or iterables of NumPy arrays) with tfds.dataset_as_numpy\n", - "#mnist_data, info = tfds.load(name=\"mnist\", batch_size=-1, data_dir=data_dir, with_info=True)\n", - "#mnist_data = tfds.as_numpy(mnist_data)\n", - "#train_data, test_data = mnist_data['train'], mnist_data['test']\n", - "#num_labels = info.features['label'].num_classes\n", - "#h, w, c = info.features['image'].shape\n", - "#num_pixels = h * w * c\n", - "\n", - "## Full train set\n", - "#train_images, train_labels = train_data['image'], train_data['label']\n", - "#train_images = np.reshape(train_images, (len(train_images), num_pixels))\n", - "#train_labels = one_hot(train_labels, num_labels)\n", - "\n", - "## Full test set\n", - "#test_images, test_labels = test_data['image'], test_data['label']\n", - "#test_images = np.reshape(test_images, (len(test_images), num_pixels))\n", - "#test_labels = one_hot(test_labels, num_labels)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "lPTLNO6n5zH7", - "colab_type": "code", - "outputId": "ba6b7a8f-80a4-46cc-b8cb-ef0abe259f90", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "from tensorflow.examples.tutorials.mnist import input_data\n", - "\n", - "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", - "# Load the numpy data of MNIST into NumpyDataset\n", - "train = NumpyDataset(mnist.train.images, mnist.train.labels)\n", - "valid = NumpyDataset(mnist.validation.images, mnist.validation.labels)" - ], - "execution_count": 14, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From :3: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please write your own downloading logic.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/base.py:252: _internal_retry..wrap..wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use urllib or similar directly.\n", - "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", - "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.one_hot on tensors.\n", - "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", - "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", - "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", - "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "vqOZyOsy5zH-", - "colab_type": "text" - }, - "source": [ - "Let's take a look at some of the data we've loaded so we can visualize our samples." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "MgAfsAdn5zH_", - "colab_type": "code", - "outputId": "5166a371-5bc2-421c-ffa6-99b54a1786a4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "# Visualize one sample \n", - "sample = np.reshape(train.X[5], (28, 28))\n", - "plt.imshow(sample)\n", - "plt.show()" - ], - "execution_count": 15, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAPEUlEQVR4nO3df7BcZX3H8c8n4ZJAACXQpjFEIRCKwUJgbgKtVONQKPKjQJ2mMB2lM5SrDjjSwakMrZW/asYRIqIFAkRDQZARIihBSFMqUpFygQAJv0IxDMncJEIKBAr5ce+3f9yDc4F7nr3sOfuDPO/XzJ3dPd89e76z8MnZ3eec8zgiBGDnN67TDQBoD8IOZIKwA5kg7EAmCDuQiV3aubFdPSEmalI7Nwlk5U29rm2x1aPVKoXd9gmSLpM0XtI1EbEg9fyJmqSjfGyVTQJIeCBWlNaa/hhve7yk70n6tKRZks60PavZ1wPQWlW+s8+V9GxEPBcR2yTdJOnUetoCULcqYZ8m6YURj9cVy97Gdp/tftv927W1wuYAVNHyX+MjYlFE9EZEb48mtHpzAEpUCft6SdNHPN6vWAagC1UJ+4OSZto+wPauks6QdHs9bQGoW9NDbxGxw/Z5ku7S8NDb4ohYXVtnAGpVaZw9IpZJWlZTLwBaiMNlgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUxUmsUV2HrSnGR98zmvldYemXND3e28zRfW/Wlp7b47D0+uO+Oq55L1HQMbmuqpkyqF3fZaSVskDUraERG9dTQFoH517Nk/FREv1vA6AFqI7+xAJqqGPSTdbfsh232jPcF2n+1+2/3btbXi5gA0q+rH+GMiYr3t35e03PZTEXHvyCdExCJJiyRpL0+OitsD0KRKe/aIWF/cbpK0VNLcOpoCUL+mw257ku0937ov6XhJq+pqDEC9qnyMnyJpqe23XueHEfHzWrpC27hn12T9mUuPSNbvOGVhsn5Qz4TS2lByzequ3O+X5ds+597SmiTN/qPPJev7fSajcfaIeE5S+sgEAF2DoTcgE4QdyARhBzJB2IFMEHYgE5zimrmnL5+drD9zyr8m6+M0MVkfUusOmux7YV6yfs30XzT92t+ZfVOyfsk+n0zWB1/a3PS2W4U9O5AJwg5kgrADmSDsQCYIO5AJwg5kgrADmWCcfSeQOk210Tj66pO/2+DVxyerA4P/l6x/YulXSmszlm5LrjthTfo00sEXX0rWj/jR35TWHppzfXLdh9/YP1mPbduT9W7Enh3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwwzr4TGDi3fPLcZ065vMHa6XH0a1/5cLJ+6znHJesz/+vXDbZfbkfTaw7burWn6XV/uv6wZH23Lb9p+rU7hT07kAnCDmSCsAOZIOxAJgg7kAnCDmSCsAOZYJx9J/DFvttKa+Pk5LrfeGlWsn7/XxycrHvtymS9ivF77ZWsr/u7jyXr/3DYraW1R7alJ4ze7c/ff+PojTTcs9tebHuT7VUjlk22vdz2muJ279a2CaCqsXyM/4GkE96x7EJJKyJipqQVxWMAXaxh2CPiXknvnMvmVElLivtLJJ1Wc18Aatbsd/YpETFQ3N8gaUrZE233SeqTpInavcnNAaiq8q/xERFS+ex9EbEoInojordHE6puDkCTmg37RttTJam43VRfSwBaodmw3y7prOL+WZLKx34AdIWG39lt3yhpnqR9ba+T9HVJCyTdbPtsSc9Lmt/KJpE2mPg3u9H86Mv+ZV6yvufa5s9HlySNKz9ffvCThydXPfm7K5L1L3zwnvSmE8cYnPR0o9+U1zeov/80DHtEnFlSOrbmXgC0EIfLApkg7EAmCDuQCcIOZIKwA5ngFNfM7b4hPW1yVanhtTuvv7ql2z792RNLa+M+k55qerDuZroAe3YgE4QdyARhBzJB2IFMEHYgE4QdyARhBzLBOPtOYM0bpVcFkz6wNrnu4uu+k6wv2Phnyfp/Pn9Qsv7zuanX3y257itDbybrc+74+2T9kAtWl9aGXn89ue7OiD07kAnCDmSCsAOZIOxAJgg7kAnCDmSCsAOZ8PCELu2xlyfHUeaitLU7+rDS0s9u+X5LN91oSuhGl7JOOfKyLyXrH/rmr5p+7Z3VA7FCr8bmUf+jsGcHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATnM/+PrD1pDnJ+gtn7CitNRoHr2q8G+wvYqi0dOzqv0yuyjh6vRru2W0vtr3J9qoRyy62vd72yuKv/Gr8ALrCWD7G/0DSCaMsXxgRs4u/ZfW2BaBuDcMeEfdK2tyGXgC0UJUf6M6z/VjxMX/vsifZ7rPdb7t/u7ZW2ByAKpoN+xWSDpQ0W9KApEvKnhgRiyKiNyJ6ezShyc0BqKqpsEfExogYjIghSVdLmltvWwDq1lTYbU8d8fB0SavKngugOzQcZ7d9o6R5kva1vU7S1yXNsz1bUkhaK+nzLezxfW/cYYck63+waH2yfs30q5L11Dnj5aPcY3PhhvQY/63/3ZusX3HcktLatX94fXLdz83/SrK+x82/Ttbxdg3DHhFnjrL42hb0AqCFOFwWyARhBzJB2IFMEHYgE4QdyASXkq7Bi31/nKzf9bVvJesfGDcxWa9yueYLBo5Ornvnf6SHzg5e+JtkfcfAhmR98FNHlm/7+quT61758oxk/WeHlh6lnS0uJQ2AsAO5IOxAJgg7kAnCDmSCsAOZIOxAJriU9BhtOaN8vLrqOPqT27cn6ws3HJesP/3tQ8u3/ZOVyXVnvHl/sl5+keqxGf+LR0trh9x8bnLdR//q28n60uPPS9Z77u5P1nPDnh3IBGEHMkHYgUwQdiAThB3IBGEHMkHYgUwwzj5GLx5Wfk55o3H0pa9PTta/P/+kZH1o5RPJ+p4qv6Ry1UtJVzVut/L35tAj1ybXneCeZH1ol9ZOR72zYc8OZIKwA5kg7EAmCDuQCcIOZIKwA5kg7EAmGGevQaPrun/1nvnJ+sErH6yznbYav+8+yfruS8vfmx/NWNbg1RlHr1PDPbvt6bbvsf2E7dW2v1wsn2x7ue01xS1X7Ae62Fg+xu+QdEFEzJJ0tKRzbc+SdKGkFRExU9KK4jGALtUw7BExEBEPF/e3SHpS0jRJp0paUjxtiaTTWtUkgOre03d22/tLOkLSA5KmRMRAUdogaUrJOn2S+iRponZvtk8AFY3513jbe0i6RdL5EfHqyFoMzw456uyCEbEoInojordHEyo1C6B5Ywq77R4NB/2GiLi1WLzR9tSiPlXSpta0CKAODT/G27akayU9GRGXjijdLuksSQuK29ta0mGX2Pex8mmR/3fojeS6D56YviTynKvOT9Y/+s/PJ+uDG5v/d3aXaR9K1l8/fFqyfv5lNybrJ+3+Smmt0em333v5wGR9t18+lax3+vTebjOW7+wfl/RZSY/bfusi5BdpOOQ32z5b0vOS0oPJADqqYdgj4j6VH91wbL3tAGgVDpcFMkHYgUwQdiAThB3IBGEHMuHhg9/aYy9PjqO88/2A/8I//Umy/ugXL6/0+qu3pSdOPn/NXzf92j/+6A3JeqPLZDc6vXdo9AMrJUkXDJRPgy1JT31pVrLu+8ung87VA7FCr8bmUf+jsGcHMkHYgUwQdiAThB3IBGEHMkHYgUwQdiATXEq6BpOfGkzWr3x5RrI+a+K6ZH3exPRY9vJDb0nW09Lj6I1c+cpHkvWFd5xcWpv5tUeS6/pNxtHrxJ4dyARhBzJB2IFMEHYgE4QdyARhBzJB2IFMcD57F9hl/w8n62sWfLDp1/7GkT9J1n+15aBk/ad3HZWsH3DR/e+5J7QO57MDIOxALgg7kAnCDmSCsAOZIOxAJgg7kImG4+y2p0u6TtIUSSFpUURcZvtiSedI+m3x1IsiYlnqtRhnB1orNc4+lotX7JB0QUQ8bHtPSQ/ZXl7UFkbEt+pqFEDrjGV+9gFJA8X9LbaflDSt1Y0BqNd7+s5ue39JR0h6oFh0nu3HbC+2vXfJOn22+233b9fWSs0CaN6Yw257D0m3SDo/Il6VdIWkAyXN1vCe/5LR1ouIRRHRGxG9PZpQQ8sAmjGmsNvu0XDQb4iIWyUpIjZGxGBEDEm6WtLc1rUJoKqGYbdtSddKejIiLh2xfOqIp50uaVX97QGoy1h+jf+4pM9Ketz2ymLZRZLOtD1bw8NxayV9viUdAqjFWH6Nv08adRLu5Jg6gO7CEXRAJgg7kAnCDmSCsAOZIOxAJgg7kAnCDmSCsAOZIOxAJgg7kAnCDmSCsAOZIOxAJgg7kIm2Ttls+7eSnh+xaF9JL7atgfemW3vr1r4kemtWnb19JCJ+b7RCW8P+ro3b/RHR27EGErq1t27tS6K3ZrWrNz7GA5kg7EAmOh32RR3efkq39tatfUn01qy29NbR7+wA2qfTe3YAbULYgUx0JOy2T7D9tO1nbV/YiR7K2F5r+3HbK233d7iXxbY32V41Ytlk28ttryluR51jr0O9XWx7ffHerbR9Yod6m277HttP2F5t+8vF8o6+d4m+2vK+tf07u+3xkp6RdJykdZIelHRmRDzR1kZK2F4rqTciOn4Ahu1PSHpN0nUR8bFi2TclbY6IBcU/lHtHxFe7pLeLJb3W6Wm8i9mKpo6cZlzSaZL+Vh187xJ9zVcb3rdO7NnnSno2Ip6LiG2SbpJ0agf66HoRca+kze9YfKqkJcX9JRr+n6XtSnrrChExEBEPF/e3SHprmvGOvneJvtqiE2GfJumFEY/Xqbvmew9Jd9t+yHZfp5sZxZSIGCjub5A0pZPNjKLhNN7t9I5pxrvmvWtm+vOq+IHu3Y6JiCMlfVrSucXH1a4Uw9/BumnsdEzTeLfLKNOM/04n37tmpz+vqhNhXy9p+ojH+xXLukJErC9uN0laqu6binrjWzPoFrebOtzP73TTNN6jTTOuLnjvOjn9eSfC/qCkmbYPsL2rpDMk3d6BPt7F9qTihxPZniTpeHXfVNS3SzqruH+WpNs62MvbdMs03mXTjKvD713Hpz+PiLb/STpRw7/I/4+kf+xEDyV9zZD0aPG3utO9SbpRwx/rtmv4t42zJe0jaYWkNZL+XdLkLurt3yQ9LukxDQdraod6O0bDH9Efk7Sy+Dux0+9doq+2vG8cLgtkgh/ogEwQdiAThB3IBGEHMkHYgUwQdiAThB3IxP8DY0uEeSQOrDIAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "EDfwAaNh5zIM", - "colab_type": "text" - }, - "source": [ - "## Converting a Numpy Array to tf.data.dataset()\n", - "\n", - "\n", - "Let's say you want to use the `tf.data` module instead of DeepChem's data handling library. Doing this is straightforward and is quite similar to getting a `NumpyDataset` object from numpy arrays." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "lhbV376Z5zIN", - "colab_type": "code", - "outputId": "78497237-dab2-4fda-9906-49ab7934cca9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "import tensorflow as tf\n", - "data_small = np.random.random((4,5))\n", - "label_small = np.random.random((4,))\n", - "dataset = tf.data.Dataset.from_tensor_slices((data_small, label_small))\n", - "print (\"Data\\n\")\n", - "print (data_small)\n", - "print (\"\\n Labels\")\n", - "print (label_small)" - ], - "execution_count": 16, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Data\n", - "\n", - "[[0.13069116 0.11472656 0.2155923 0.9727515 0.21519239]\n", - " [0.66106298 0.35152465 0.73548336 0.24584364 0.15193656]\n", - " [0.96722837 0.97295284 0.87249717 0.67836399 0.95312763]\n", - " [0.8326375 0.87615737 0.06231603 0.79597528 0.9668341 ]]\n", - "\n", - " Labels\n", - "[0.45182705 0.03122323 0.41106018 0.35049048]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "aPGKoCv05zIY", - "colab_type": "text" - }, - "source": [ - "## Extracting the numpy dataset from tf.data\n", - "\n", - "In order to extract the numpy array from the `tf.data`, you first need to define an `iterator` to iterate over the `tf.data.Dataset` object and then in the tensorflow session, run over the iterator to get the data instances. Let's have a look at how it's done." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "e5L_u7YC5zIa", - "colab_type": "code", - "outputId": "ef831135-6245-46c9-f97b-a75ecc67f509", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "iterator = dataset.make_one_shot_iterator() # iterator\n", - "next_element = iterator.get_next()\n", - "numpy_data = np.zeros((4, 5))\n", - "numpy_label = np.zeros((4,))\n", - "sess = tf.Session() # tensorflow session \n", - "for i in range(4):\n", - " data_, label_ = sess.run(next_element) # data_ contains the data and label_ contains the labels that we fed in the previous step\n", - " numpy_data[i, :] = data_\n", - " numpy_label[i] = label_\n", - " \n", - "print (\"Numpy Data\")\n", - "print(numpy_data)\n", - "print (\"\\n Numpy Label\")\n", - "print(numpy_label)" - ], - "execution_count": 17, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From :1: DatasetV1.make_one_shot_iterator (from tensorflow.python.data.ops.dataset_ops) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Use `for ... in dataset:` to iterate over a dataset. If using `tf.estimator`, return the `Dataset` object directly from your input function. As a last resort, you can use `tf.compat.v1.data.make_one_shot_iterator(dataset)`.\n", - "Numpy Data\n", - "[[0.13069116 0.11472656 0.2155923 0.9727515 0.21519239]\n", - " [0.66106298 0.35152465 0.73548336 0.24584364 0.15193656]\n", - " [0.96722837 0.97295284 0.87249717 0.67836399 0.95312763]\n", - " [0.8326375 0.87615737 0.06231603 0.79597528 0.9668341 ]]\n", - "\n", - " Numpy Label\n", - "[0.45182705 0.03122323 0.41106018 0.35049048]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6_IMrMth5zIh", - "colab_type": "text" - }, - "source": [ - "Now that you have the numpy arrays of `data` and `labels`, you can convert it to `NumpyDataset`." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "c5DV_aLj5zIo", - "colab_type": "code", - "outputId": "ebced79c-4ed7-47d3-cd2d-bc72efb39f93", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "dataset_ = NumpyDataset(numpy_data, numpy_label) # convert to NumpyDataset\n", - "dataset_.X # printing just to check if the data is same!!" - ], - "execution_count": 18, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "array([[0.13069116, 0.11472656, 0.2155923 , 0.9727515 , 0.21519239],\n", - " [0.66106298, 0.35152465, 0.73548336, 0.24584364, 0.15193656],\n", - " [0.96722837, 0.97295284, 0.87249717, 0.67836399, 0.95312763],\n", - " [0.8326375 , 0.87615737, 0.06231603, 0.79597528, 0.9668341 ]])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 18 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ltQfj-9n5zI_", - "colab_type": "text" - }, - "source": [ - "## Converting NumpyDataset to `tf.data`\n", - "\n", - "This can be easily done by the `make_iterator()` method of `NumpyDataset`. This converts the `NumpyDataset` to `tf.data`. Let's look how it's done!" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hVy39LEe5zJA", - "colab_type": "code", - "outputId": "2722f7d4-623b-45a0-e059-ce0abb8a3254", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "iterator_ = dataset_.make_iterator() # Using make_iterator for converting NumpyDataset to tf.data\n", - "next_element_ = iterator_.get_next()\n", - "\n", - "sess = tf.Session() # tensorflow session \n", - "data_and_labels = sess.run(next_element_) # data_ contains the data and label_ contains the labels that we fed in the previous step\n", - "\n", - "\n", - "print (\"Numpy Data\")\n", - "print(data_and_labels[0]) # Data in the first index \n", - "print (\"\\n Numpy Label\")\n", - "print(data_and_labels[1]) # Labels in the second index" - ], - "execution_count": 19, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Numpy Data\n", - "[[0.66106298 0.35152465 0.73548336 0.24584364 0.15193656]\n", - " [0.96722837 0.97295284 0.87249717 0.67836399 0.95312763]\n", - " [0.8326375 0.87615737 0.06231603 0.79597528 0.9668341 ]\n", - " [0.13069116 0.11472656 0.2155923 0.9727515 0.21519239]]\n", - "\n", - " Numpy Label\n", - "[0.03122323 0.41106018 0.35049048 0.45182705]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "PNoeh7sG5zJP", - "colab_type": "text" - }, - "source": [ - "# Using Splitters to split DeepChem Datasets\n", - "\n", - "In this section we will have a look at the various splitters that are present in deepchem library and how each of them can be used.\n", - "\n", - "### Index Splitter\n", - "\n", - "We start with the IndexSplitter. This splitter returns a range object which contains the split according to the fractions provided by the user. The three range objects can then be used to iterate over the dataset as test,valid and Train.\n", - "\n", - "Each of the splitters that will be used has two functions inherited from the main class that are `train_test_split` which can be used to split the data into training and tesing data and the other fucnction is `train_valid_test_split` which is used to split the data to train, validation and test split.\n", - "\n", - "Note: All the splitters have a default percentage of 80,10,10 as train, valid and test respectively. But can be changed by specifying the `frac_train`,`frac_test` and `frac_valid` in the ratio we want to split the data." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "I-MBPtBX5zJU", - "colab_type": "code", - "outputId": "2dbe02ca-b88d-4ad2-8ee8-fbd9f540ba85", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "!wget https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/example.csv" - ], - "execution_count": 20, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2020-06-12 03:06:22-- https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/example.csv\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 568 [text/plain]\n", - "Saving to: ‘example.csv’\n", - "\n", - "\rexample.csv 0%[ ] 0 --.-KB/s \rexample.csv 100%[===================>] 568 --.-KB/s in 0s \n", - "\n", - "2020-06-12 03:06:22 (28.8 MB/s) - ‘example.csv’ saved [568/568]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "bs1xIWzo5zJe", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import os\n", - "\n", - "current_dir=os.path.dirname(os.path.realpath('__file__'))\n", - "input_data=os.path.join(current_dir,'example.csv')" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "bXHlTmdK5zJh", - "colab_type": "text" - }, - "source": [ - "We then featurize the data using any one of the featurizers present." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "jN1lRtgC5zJi", - "colab_type": "code", - "outputId": "86b6be7f-d4ee-43e5-d980-b6786e049dbd", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "import deepchem as dc\n", - "\n", - "tasks=['log-solubility']\n", - "featurizer=dc.feat.CircularFingerprint(size=1024)\n", - "loader = dc.data.CSVLoader(tasks=tasks, smiles_field=\"smiles\",featurizer=featurizer)\n", - "dataset=loader.featurize(input_data)" - ], - "execution_count": 22, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /content/example.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 0.050 s\n", - "TIMING: dataset construction took 0.082 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "vh7q0jGx5zJv", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from deepchem.splits.splitters import IndexSplitter" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "IemZbbvp5zJ1", - "colab_type": "code", - "colab": {} - }, - "source": [ - "splitter=IndexSplitter()\n", - "train_data,valid_data,test_data=splitter.split(dataset)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "R6aE7YPn5zJ9", - "colab_type": "code", - "colab": {} - }, - "source": [ - "train_data=[i for i in train_data]\n", - "valid_data=[i for i in valid_data]\n", - "test_data=[i for i in test_data]" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "VkW5MLyL5zKC", - "colab_type": "code", - "outputId": "3aebdd56-0fe0-4022-fe06-4b835233dae1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "len(train_data),len(valid_data),len(test_data)" - ], - "execution_count": 26, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(8, 1, 1)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 26 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "H7BQBpnP5zKG", - "colab_type": "text" - }, - "source": [ - "As we can see that without providing the user specifications on how to split the data, the data was split into a default of 80,10,10.\n", - "\n", - "But when we specify the parameters the dataset can be split according to our specificaitons." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "cYeqhEgA5zKH", - "colab_type": "code", - "outputId": "b8f15f13-108f-4d24-9a24-47d9695121ce", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "train_data,valid_data,test_data=splitter.split(dataset,frac_train=0.7,frac_valid=0.2,frac_test=0.1)\n", - "train_data=[i for i in train_data]\n", - "valid_data=[i for i in valid_data]\n", - "test_data=[i for i in test_data]\n", - "len(train_data),len(valid_data),len(test_data)" - ], - "execution_count": 27, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(7, 2, 1)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 27 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "AI1VsAGH5zKQ", - "colab_type": "text" - }, - "source": [ - "## Specified Splitter\n", - "\n", - "The next splitter that is present in the library is the specified splitter. This splitter needs a list from the dataset where it is specified which data is for training and which is for validation and testing." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "kplzieL35zKb", - "colab_type": "code", - "outputId": "a67d0025-de10-4573-eb9e-675657bf252b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "!wget https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/user_specified_example.csv" - ], - "execution_count": 28, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2020-06-12 03:06:24-- https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/user_specified_example.csv\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 714 [text/plain]\n", - "Saving to: ‘user_specified_example.csv’\n", - "\n", - "\r user_spec 0%[ ] 0 --.-KB/s \ruser_specified_exam 100%[===================>] 714 --.-KB/s in 0s \n", - "\n", - "2020-06-12 03:06:24 (41.9 MB/s) - ‘user_specified_example.csv’ saved [714/714]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "s3t_4cEe5zKg", - "colab_type": "code", - "outputId": "49a0234f-b015-408d-b621-688572d3cd3b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "from deepchem.splits.splitters import SpecifiedSplitter\n", - "current_dir=os.path.dirname(os.path.realpath('__file__'))\n", - "input_file=os.path.join(current_dir, 'user_specified_example.csv')\n", - "\n", - "tasks=['log-solubility']\n", - "featurizer=dc.feat.CircularFingerprint(size=1024)\n", - "loader = dc.data.CSVLoader(tasks=tasks, smiles_field=\"smiles\",featurizer=featurizer)\n", - "dataset=loader.featurize(input_file)\n", - "\n", - "split_field='split'\n", - "\n", - "splitter=SpecifiedSplitter(input_file,split_field)" - ], - "execution_count": 29, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /content/user_specified_example.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 0.041 s\n", - "TIMING: dataset construction took 0.055 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "PxVaPW9I5zKj", - "colab_type": "code", - "colab": {} - }, - "source": [ - "train_data,valid_data,test_data=splitter.split(dataset)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "JOz75Y125zKt", - "colab_type": "text" - }, - "source": [ - "When we split the data using the specified splitter it compares the data in each row of the `split_field` which the user has to specify wether the given row should be used as training data, validation data or testing data. The user has to specify as `train`,`test` and `valid` in the `split_field`.\n", - "Note: The input is case insensitive." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "JNBpEHmm5zKx", - "colab_type": "code", - "outputId": "6973590f-5b0d-42cc-b276-79550d72c0ce", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "train_data,valid_data,test_data" - ], - "execution_count": 31, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "([0, 1, 2, 3, 4, 5], [6, 7], [8, 9])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 31 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QVmV8dFe5zK1", - "colab_type": "text" - }, - "source": [ - "## Indice Splitter\n", - "\n", - "Another splitter present in the fraework is `IndiceSplitter`. This splitter takes an input of valid_indices and test_indices which are lists with the indices of validation data and test data in the dataset respectively." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "zCT3KKQz5zK2", - "colab_type": "code", - "outputId": "11e48048-7fe1-4fc3-c8fa-819cc9e78dce", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "from deepchem.splits.splitters import IndiceSplitter\n", - "\n", - "splitter=IndiceSplitter(valid_indices=[7],test_indices=[9])\n", - "splitter.split(dataset)" - ], - "execution_count": 32, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "([0, 1, 2, 3, 4, 5, 6, 8], [7], [9])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 32 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ROktroBH5zK6", - "colab_type": "text" - }, - "source": [ - "## RandomGroupSplitter\n", - "\n", - "The splitter which can be used to split the data on the basis of groupings is the `RandomGroupSplitter`. This splitter that splits on groupings. \n", - "\n", - "An example use case is when there are multiple conformations of the same molecule that share the same topology.This splitter subsequently guarantees that resulting splits preserve groupings.\n", - "\n", - "Note that it doesn't do any dynamic programming or something fancy to try to maximize the choice such that `frac_train`, `frac_valid`, or `frac_test` is maximized.It simply permutes the groups themselves. As such, use with caution if the number of elements per group varies significantly.\n", - "\n", - "The parameter that needs to be provided with the splitter is `groups`. This is an array like list of hashables which is the same as the size of the dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Tu_TRPslerPX", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - }, - "outputId": "b64c8385-b21a-48cc-b47f-c79386331587" - }, - "source": [ - "!wget https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/example.csv" - ], - "execution_count": 33, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2020-06-12 03:06:25-- https://raw.githubusercontent.com/deepchem/deepchem/master/deepchem/models/tests/example.csv\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 568 [text/plain]\n", - "Saving to: ‘example.csv.1’\n", - "\n", - "\rexample.csv.1 0%[ ] 0 --.-KB/s \rexample.csv.1 100%[===================>] 568 --.-KB/s in 0s \n", - "\n", - "2020-06-12 03:06:25 (19.1 MB/s) - ‘example.csv.1’ saved [568/568]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "7jr7bNmneGMe", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# This is workaround...\n", - "def load_solubility_data():\n", - " \"\"\"Loads solubility dataset\"\"\"\n", - " featurizer = dc.feat.CircularFingerprint(size=1024)\n", - " tasks = [\"log-solubility\"]\n", - " task_type = \"regression\"\n", - " loader = dc.data.CSVLoader(\n", - " tasks=tasks, smiles_field=\"smiles\", featurizer=featurizer)\n", - "\n", - " return loader.featurize(\"example.csv\")" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "es-X6PDQ5zK7", - "colab_type": "code", - "outputId": "b96441ce-a822-47a9-b718-148178e86e80", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "from deepchem.splits.splitters import RandomGroupSplitter\n", - "\n", - "groups = [0, 4, 1, 2, 3, 7, 0, 3, 1, 0]\n", - "solubility_dataset=load_solubility_data()\n", - "\n", - "splitter=RandomGroupSplitter(groups=groups)\n", - "\n", - "train_idxs, valid_idxs, test_idxs = splitter.split(solubility_dataset)" - ], - "execution_count": 35, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from example.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 0.038 s\n", - "TIMING: dataset construction took 0.051 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "sCYn9An75zLK", - "colab_type": "code", - "outputId": "d3604d91-d139-4259-b560-0cca187f20f2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "train_idxs,valid_idxs,test_idxs" - ], - "execution_count": 36, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "([4, 7, 2, 8, 1, 0, 6, 9], [5], [3])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 36 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "PW-jhqnr5zLk", - "colab_type": "code", - "colab": {} - }, - "source": [ - "train_data=[]\n", - "for i in range(len(train_idxs)):\n", - " train_data.append(groups[train_idxs[i]])\n", - "\n", - "valid_data=[]\n", - "for i in range(len(valid_idxs)):\n", - " valid_data.append(groups[valid_idxs[i]])\n", - "\n", - "test_data=[]\n", - "for i in range(len(test_idxs)):\n", - " test_data.append(groups[test_idxs[i]])" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Wdiwca-U5zLo", - "colab_type": "code", - "outputId": "b6cd177e-b012-43cc-de8b-ea0524dc8e53", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 68 - } - }, - "source": [ - "print(\"Groups present in the training data =\",train_data)\n", - "print(\"Groups present in the validation data = \",valid_data)\n", - "print(\"Groups present in the testing data = \", test_data)" - ], - "execution_count": 38, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Groups present in the training data = [3, 3, 1, 1, 4, 0, 0, 0]\n", - "Groups present in the validation data = [7]\n", - "Groups present in the testing data = [2]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "q3i_fjBt5zLs", - "colab_type": "text" - }, - "source": [ - "So the `RandomGroupSplitter` when properly assigned the groups, splits the data accordingly and preserves the groupings." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "He3vY6wu5zLu", - "colab_type": "text" - }, - "source": [ - "## Scaffold Splitter\n", - "\n", - "The `ScaffoldSplitter` splits the data based on the scaffold of small molecules. The splitter takes the data and generates scaffolds using the smiles in the data. Then the splitter sorts the data into scaffold sets." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "C8Kkvi5F5zL_", - "colab_type": "code", - "outputId": "ec06cacb-b645-4d6d-acc6-8ce0d88bea9f", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 170 - } - }, - "source": [ - "from deepchem.splits.splitters import ScaffoldSplitter\n", - "\n", - "splitter=ScaffoldSplitter()\n", - "solubility_dataset=load_solubility_data()\n", - "train_data,valid_data,test_data = splitter.split(solubility_dataset,frac_train=0.7,frac_valid=0.2,frac_test=0.1)\n", - "len(train_data),len(valid_data),len(test_data)" - ], - "execution_count": 39, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from example.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 0.038 s\n", - "TIMING: dataset construction took 0.052 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - }, - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(7, 2, 1)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 39 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "MhZxVoVs5zMa", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "B0u7qIZd5zGG" + }, + "source": [ + "# Training a Model with DeepChem: A First Example\n", + "\n", + "Deep learning can be used to solve many sorts of problems, but the basic workflow is usually the same. Here are the typical steps you follow.\n", + "\n", + "1. Select the data set you will train your model on (or create a new data set if there isn't an existing suitable one).\n", + "2. Create the model.\n", + "3. Train the model on the data.\n", + "4. Evaluate the model on an independent test set to see how well it works.\n", + "5. Use the model to make predictions about new data.\n", + "\n", + "With DeepChem, each of these steps can be as little as one or two lines of Python code. In this tutorial we will walk through a basic example showing the complete workflow to solve a real world scientific problem.\n", + "\n", + "The problem we will solve is predicting the solubility of small molecules given their chemical formulas. This is a very important property in drug development: if a proposed drug isn't soluble enough, you probably won't be able to get enough into the patient's bloodstream to have a therapeutic effect. The first thing we need is a data set of measured solubilities for real molecules. One of the core components of DeepChem is MoleculeNet, a diverse collection of chemical and molecular data sets. For this tutorial, we can use the Delaney solubility data set." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "saTaOpXY5zGI" + }, + "outputs": [], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_delaney(featurizer='GraphConv')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "F922OPtL5zGM" + }, + "source": [ + "I won't say too much about this code right now. We will see many similar examples in later tutorials. There are two details I do want to draw your attention to. First, notice the `featurizer` argument passed to the `load_delaney()` function. Molecules can be represented in many ways. We therefore tell it which representation we want to use, or in more technical language, how to \"featurize\" the data. Second, notice that we actually get three different data sets: a training set, a validation set, and a test set. Each of these serves a different function in the standard deep learning workflow.\n", + "\n", + "Now that we have our data, the next step is to create a model. We will use a particular kind of model called a \"graph convolutional network\", or \"graphconv\" for short." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "colab_type": "code", + "id": "YEDcUsz35zGO", + "outputId": "5a05747f-8b06-407d-9b11-790a1b4d1c8f" + }, + "outputs": [], + "source": [ + "model = dc.models.GraphConvModel(n_tasks=1, mode='regression', dropout=0.2)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "E8UCFrrN5zGf" + }, + "source": [ + "Here again I will not say much about the code. Later tutorials will give lots more information about `GraphConvModel`, as well as other types of models provided by DeepChem.\n", + "\n", + "We now need to train the model on the data set. We simply give it the data set and tell it how many epochs of training to perform (that is, how many complete passes through the data to make)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "e5K3rdGV5zGg" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/peastman/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/indexed_slices.py:434: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", + " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n" + ] + }, + { + "data": { + "text/plain": [ + "0.1147727108001709" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" } - ] -} \ No newline at end of file + ], + "source": [ + "model.fit(train_dataset, nb_epoch=100)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_Zcd7jTd5zGr" + }, + "source": [ + "If everything has gone well, we should now have a fully trained model! But do we? To find out, we must evaluate the model on the test set. We do that by selecting an evaluation metric and calling `evaluate()` on the model. For this example, let's use the Pearson correlation, also known as r2, as our metric. We can evaluate it on both the training set and test set." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "LJc90fs_5zGs", + "outputId": "8c9fd5ab-e23a-40dc-9292-8b4ff3a86890" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training set score: {'pearson_r2_score': 0.8914309123616354}\n", + "Test set score: {'pearson_r2_score': 0.7744246373275885}\n" + ] + } + ], + "source": [ + "metric = dc.metrics.Metric(dc.metrics.pearson_r2_score)\n", + "print(\"Training set score:\", model.evaluate(train_dataset, [metric], transformers))\n", + "print(\"Test set score:\", model.evaluate(test_dataset, [metric], transformers))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "aQa88cbj5zGw" + }, + "source": [ + "Notice that it has a higher score on the training set than the test set. Models usually perform better on the particular data they were trained on than they do on similar but independent data. This is called \"overfitting\", and it is the reason it is essential to evaluate your model on an independent test set.\n", + "\n", + "Our model still has quite respectable performance on the test set. For comparison, a model that produced totally random outputs would have a correlation of 0, while one that made perfect predictions would have a correlation of 1. Our model does quite well, so now we can use it to make predictions about other molecules we care about.\n", + "\n", + "Since this is just a tutorial and we don't have any other molecules we specifically want to predict, let's just use the first ten molecules from the test set. For each one we print out the chemical structure (represented as a SMILES string) and the predicted solubility." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "colab_type": "code", + "id": "HSVqeYox5zGx", + "outputId": "270a6a17-6238-4081-b0cf-3f17e23f4bb5" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-1.4806377] C1c2ccccc2c3ccc4ccccc4c13\n", + "[0.37774816] COc1ccccc1Cl\n", + "[-1.3225354] COP(=S)(OC)Oc1cc(Cl)c(Br)cc1Cl\n", + "[-0.590009] ClC(Cl)CC(=O)NC2=C(Cl)C(=O)c1ccccc1C2=O\n", + "[-2.0383604] ClC(Cl)C(c1ccc(Cl)cc1)c2ccc(Cl)cc2 \n", + "[2.0883522] COC(=O)C=C\n", + "[-0.25627953] CN(C)C(=O)Nc2ccc(Oc1ccc(Cl)cc1)cc2\n", + "[0.97384584] N(=Nc1ccccc1)c2ccccc2\n", + "[-0.40858203] CC(C)c1ccc(C)cc1\n", + "[1.1107407] Oc1c(Cl)cccc1Cl\n" + ] + } + ], + "source": [ + "solubilities = model.predict_on_batch(test_dataset.X[:10])\n", + "for molecule, solubility in zip(test_dataset.ids, solubilities):\n", + " print(solubility, molecule)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "MhZxVoVs5zMa" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/02_Learning_MNIST_Digit_Classifiers.ipynb b/examples/tutorials/02_Learning_MNIST_Digit_Classifiers.ipynb deleted file mode 100644 index e7fa11a3e962debcb4171974a8da4024800712ec..0000000000000000000000000000000000000000 --- a/examples/tutorials/02_Learning_MNIST_Digit_Classifiers.ipynb +++ /dev/null @@ -1,363 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "02_Learning_MNIST_Digit_Classifiers.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "DprlHnnr5xE4", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 2: Learning MNIST Digit Classifiers\n", - "\n", - "In the previous tutorial, we learned some basics of how to load data into DeepChem and how to use the basic DeepChem objects to load and manipulate this data. In this tutorial, you'll put the parts together and learn how to train a basic image classification model in DeepChem. You might ask, why are we bothering to learn this material in DeepChem? Part of the reason is that image processing is an increasingly important part of AI for the life sciences. So learning how to train image processing models will be very useful for using some of the more advanced DeepChem features.\n", - "\n", - "The MNIST dataset contains handwritten digits along with their human annotated labels. The learning challenge for this dataset is to train a model that maps the digit image to its true label. MNIST has been a standard benchmark for machine learning for decades at this point. \n", - "\n", - "![MNIST](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/mnist_examples.png?raw=1)\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/02_Learning_MNIST_Digit_Classifiers.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "We recommend running this tutorial on Google colab. You'll need to run the following cell of installation commands on Colab to get your environment set up. If you'd rather run the tutorial locally, make sure you don't run these commands (since they'll download and install a new Anaconda python setup)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "UXJKRlAv5xFA", - "colab_type": "code", - "outputId": "40b16b6e-9346-403e-daae-86af016d45b4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 9934 0 --:--:-- --:--:-- --:--:-- 9934\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.54 s, sys: 520 ms, total: 3.06 s\n", - "Wall time: 1min 58s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hbTulXIP5xFN", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from tensorflow.examples.tutorials.mnist import input_data" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "4u9vY8iu5xFU", - "colab_type": "code", - "outputId": "cfefccae-e0ad-470a-dbf0-8d0d4c00d198", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 530 - } - }, - "source": [ - "# TODO: This is deprecated. Let's replace with a DeepChem native loader for maintainability.\n", - "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From :2: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please write your own downloading logic.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/base.py:252: _internal_retry..wrap..wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use urllib or similar directly.\n", - "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", - "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.one_hot on tensors.\n", - "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", - "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", - "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", - "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "MsHJLy-35xFe", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc\n", - "import tensorflow as tf\n", - "from tensorflow.keras.layers import Reshape, Conv2D, Flatten, Dense, Softmax" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "n0nJCPak5xFo", - "colab_type": "code", - "colab": {} - }, - "source": [ - "train = dc.data.NumpyDataset(mnist.train.images, mnist.train.labels)\n", - "valid = dc.data.NumpyDataset(mnist.validation.images, mnist.validation.labels)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Y5AfheB55xF1", - "colab_type": "code", - "colab": {} - }, - "source": [ - "keras_model = tf.keras.Sequential([\n", - " Reshape((28, 28, 1)),\n", - " Conv2D(filters=32, kernel_size=5, activation=tf.nn.relu),\n", - " Conv2D(filters=64, kernel_size=5, activation=tf.nn.relu),\n", - " Flatten(),\n", - " Dense(1024, activation=tf.nn.relu),\n", - " Dense(10),\n", - " Softmax()\n", - "])\n", - "model = dc.models.KerasModel(keras_model, dc.models.losses.CategoricalCrossEntropy())" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Xq9T4trd5xGD", - "colab_type": "code", - "outputId": "e626df29-14e6-46ad-e5db-a039de833366", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 275 - } - }, - "source": [ - "model.fit(train, nb_epoch=2)" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:200: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n" - ], - "name": "stdout" - }, - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "0.0" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 7 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ZGP9d70u5xGU", - "colab_type": "code", - "outputId": "8dca6c10-a762-4c5e-f86a-f2b36584d599", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "from sklearn.metrics import roc_curve, auc\n", - "import numpy as np\n", - "\n", - "print(\"Validation\")\n", - "prediction = np.squeeze(model.predict_on_batch(valid.X))\n", - "\n", - "fpr = dict()\n", - "tpr = dict()\n", - "roc_auc = dict()\n", - "for i in range(10):\n", - " fpr[i], tpr[i], thresh = roc_curve(valid.y[:, i], prediction[:, i])\n", - " roc_auc[i] = auc(fpr[i], tpr[i])\n", - " print(\"class %s:auc=%s\" % (i, roc_auc[i]))" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Validation\n", - "class 0:auc=0.9999482812520925\n", - "class 1:auc=0.9999327470315621\n", - "class 2:auc=0.9999223382455529\n", - "class 3:auc=0.9999378924197698\n", - "class 4:auc=0.999804920932277\n", - "class 5:auc=0.9997608046652174\n", - "class 6:auc=0.9999347825797615\n", - "class 7:auc=0.9997099080694587\n", - "class 8:auc=0.999882187740275\n", - "class 9:auc=0.9996286953889618\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ccdgh2Ni5xGx", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/02_Working_With_Datasets.ipynb b/examples/tutorials/02_Working_With_Datasets.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..12f4f71732fcc76ec0411c8ee3240f4e70960dd3 --- /dev/null +++ b/examples/tutorials/02_Working_With_Datasets.ipynb @@ -0,0 +1,997 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "socSJe925zFv" + }, + "source": [ + "# Tutorial 2: Working With Datasets\n", + "\n", + "Data is central to machine learning. This tutorial introduces the `Dataset` class that DeepChem uses to store and manage data. It provides simple but powerful tools for efficiently working with large amounts of data. It also is designed to easily interact with other popular Python frameworks such as NumPy, Pandas, TensorFlow, and PyTorch.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/02_Working_With_Datasets.ipynb)\n", + "\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "OyxRVW5X5zF0", + "outputId": "affd23f1-1929-456a-f8a6-e53a874c84b4" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "CMWAv-Z46nCc", + "outputId": "9ae7cfd0-ebbf-40b0-f6f1-2940cf32a839" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Jk47QTZ95zF-" + }, + "source": [ + "We can now import the `deepchem` package to play with." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "colab_type": "code", + "id": "PDiY03h35zF_", + "outputId": "cdd7401d-19a0-4476-9297-b04defc67178" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import deepchem as dc\n", + "dc.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "B0u7qIZd5zGG" + }, + "source": [ + "# Anatomy of a Dataset\n", + "\n", + "In the last tutorial we loaded the Delaney dataset of molecular solubilities. Let's load it again." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "saTaOpXY5zGI" + }, + "outputs": [], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_delaney(featurizer='GraphConv')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "F922OPtL5zGM" + }, + "source": [ + "We now have three Dataset objects: the training, validation, and test sets. What information does each of them contain? We can start to get an idea by printing out the string representation of one of them." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "colab_type": "code", + "id": "YEDcUsz35zGO", + "outputId": "5a05747f-8b06-407d-9b11-790a1b4d1c8f" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "print(test_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "E8UCFrrN5zGf" + }, + "source": [ + "There's a lot of information there, so let's start at the beginning. It begins with the label \"DiskDataset\". Dataset is an abstract class. It has a few subclasses that correspond to different ways of storing data.\n", + "\n", + "- `DiskDataset` is a dataset that has been saved to disk. The data is stored in a way that can be efficiently accessed, even if the total amount of data is far larger than your computer's memory.\n", + "- `NumpyDataset` is an in-memory dataset that holds all the data in NumPy arrays. It is a useful tool when manipulating small to medium sized datasets that can fit entirely in memory.\n", + "- `ImageDataset` is a more specialized class that stores some or all of the data in image files on disk. It is useful when working with models that have images as their inputs or outputs.\n", + "\n", + "Now let's consider the contents of the Dataset. Every Dataset stores a list of *samples*. Very roughly speaking, a sample is a single data point. In this case, each sample is a molecule. In other datasets a sample might correspond to an experimental assay, a cell line, an image, or many other things. For every sample the dataset stores the following information.\n", + "\n", + "- The *features*, referred to as `X`. This is the input that should be fed into a model to represent the sample.\n", + "- The *labels*, referred to as `y`. This is the desired output from the model. During training, it tries to make the model's output for each sample as close as possible to `y`.\n", + "- The *weights*, referred to as `w`. This can be used to indicate that some data values are more important than others. In later tutorials we will see examples of how this is useful.\n", + "- An *ID*, which is a unique identifier for the sample. This can be anything as long as it is unique. Sometimes it is just an integer index, but in this dataset the ID is a SMILES string describing the molecule.\n", + "\n", + "Notice that `X`, `y`, and `w` all have 113 as the size of their first dimension. That means this dataset contains 113 samples.\n", + "\n", + "The final piece of information listed in the output is `task_names`. Some datasets contain multiple pieces of information for each sample. For example, if a sample represents a molecule, the dataset might record the results of several different experiments on that molecule. This dataset has only a single task: \"measured log solubility in mols per litre\". Also notice that `y` and `w` each have shape (113, 1). The second dimension of these arrays usually matches the number of tasks.\n", + "\n", + "# Accessing Data from a Dataset\n", + "\n", + "There are many ways to access the data contained in a dataset. The simplest is just to directly access the `X`, `y`, `w`, and `ids` properties. Each of these returns the corresponding information as a NumPy array." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "e5K3rdGV5zGg" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-1.7065408738415053],\n", + " [0.2911162036252904],\n", + " [-1.4272475857596547],\n", + " [-0.9254664241210759],\n", + " [-1.9526976701170347],\n", + " [1.3514839414275706],\n", + " [-0.8591934405084332],\n", + " [-0.6509069205829855],\n", + " [-0.32900957160729316],\n", + " [0.6082797680572224],\n", + " [1.8295961803473488],\n", + " [1.6213096604219008],\n", + " [1.3751528641463715],\n", + " [0.45632528420252055],\n", + " [1.0532555151706793],\n", + " [-1.1053502367839627],\n", + " [-0.2011973889257683],\n", + " [0.3479216181504126],\n", + " [-0.9870056231899582],\n", + " [-0.8161160011602158],\n", + " [0.8402352107014712],\n", + " [0.22815686919328],\n", + " [0.06247441016167367],\n", + " [1.040947675356903],\n", + " [-0.5197810887208284],\n", + " [0.8023649343513898],\n", + " [-0.41895147793873655],\n", + " [-2.5964923680684198],\n", + " [1.7443880585596654],\n", + " [0.45206487811313645],\n", + " [0.233837410645792],\n", + " [-1.7917489956291888],\n", + " [0.7739622270888287],\n", + " [1.0011838851893173],\n", + " [-0.05445006806920272],\n", + " [1.1043803882432892],\n", + " [0.7597608734575482],\n", + " [-0.7001382798380905],\n", + " [0.8213000725264304],\n", + " [-1.3136367567094103],\n", + " [0.4567986626568967],\n", + " [-0.5732728540653187],\n", + " [0.4094608172192949],\n", + " [-0.3242757870635329],\n", + " [-0.049716283525442634],\n", + " [-0.39054877067617544],\n", + " [-0.08095926151425996],\n", + " [-0.2627365879946506],\n", + " [-0.5467636606202616],\n", + " [1.997172153196459],\n", + " [-0.03551492989416198],\n", + " [1.4508934168465344],\n", + " [-0.8639272250521937],\n", + " [0.23904457364392848],\n", + " [0.5278054308132993],\n", + " [-0.48475108309700315],\n", + " [0.2248432200126478],\n", + " [0.3431878336066523],\n", + " [1.5029650468278963],\n", + " [-0.4946920306388995],\n", + " [0.3479216181504126],\n", + " [0.7928973652638694],\n", + " [0.5609419226196206],\n", + " [-0.13965818985688602],\n", + " [-0.13965818985688602],\n", + " [0.15857023640000523],\n", + " [1.6071083067906202],\n", + " [1.9006029485037514],\n", + " [-0.7171799041956278],\n", + " [-0.8165893796145915],\n", + " [-0.13019062076936566],\n", + " [-0.24380144981960986],\n", + " [-0.14912575894440638],\n", + " [0.9538460397517154],\n", + " [-0.07811899078800374],\n", + " [-0.18226225075072758],\n", + " [0.2532459272752089],\n", + " [0.6887541053011454],\n", + " [0.044012650441008896],\n", + " [-0.5514974451640217],\n", + " [-0.2580028034508905],\n", + " [-0.021313576262881533],\n", + " [-2.4128215277705247],\n", + " [0.07336211461232214],\n", + " [0.9017744097703536],\n", + " [1.9384732248538328],\n", + " [0.8402352107014712],\n", + " [-0.10652169805056463],\n", + " [1.07692443788948],\n", + " [-0.403803367398704],\n", + " [1.2662758196398873],\n", + " [-0.2532690189071302],\n", + " [0.29064282517091444],\n", + " [0.9443784706641951],\n", + " [-0.41563782875810434],\n", + " [-0.7370617992794205],\n", + " [-1.0012069768212388],\n", + " [0.46626623174441706],\n", + " [0.3758509469585975],\n", + " [-0.46628932337633816],\n", + " [1.2662758196398873],\n", + " [-1.4968342185529295],\n", + " [-0.17800184466134344],\n", + " [0.8828392715953128],\n", + " [-0.6083028596891439],\n", + " [-2.170451759130003],\n", + " [0.32898647997537184],\n", + " [0.3005837727128107],\n", + " [0.6461500444073038],\n", + " [1.5058053175541524],\n", + " [-0.007585601085977053],\n", + " [-0.049716283525442634],\n", + " [-0.6849901692980588]], dtype=object)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "test_dataset.y" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_Zcd7jTd5zGr" + }, + "source": [ + "This is a very easy way to access data, but you should be very careful about using it. This requires the data for all samples to be loaded into memory at once. That's fine for small datasets like this one, but for large datasets it could easily take more memory than you have.\n", + "\n", + "A better approach is to iterate over the dataset. That lets it load just a little data at a time, process it, then free the memory before loading the next bit. You can use the `itersamples()` method to iterate over samples one at a time." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "LJc90fs_5zGs", + "outputId": "8c9fd5ab-e23a-40dc-9292-8b4ff3a86890" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-1.70654087] C1c2ccccc2c3ccc4ccccc4c13\n", + "[0.2911162] COc1ccccc1Cl\n", + "[-1.42724759] COP(=S)(OC)Oc1cc(Cl)c(Br)cc1Cl\n", + "[-0.92546642] ClC(Cl)CC(=O)NC2=C(Cl)C(=O)c1ccccc1C2=O\n", + "[-1.95269767] ClC(Cl)C(c1ccc(Cl)cc1)c2ccc(Cl)cc2 \n", + "[1.35148394] COC(=O)C=C\n", + "[-0.85919344] CN(C)C(=O)Nc2ccc(Oc1ccc(Cl)cc1)cc2\n", + "[-0.65090692] N(=Nc1ccccc1)c2ccccc2\n", + "[-0.32900957] CC(C)c1ccc(C)cc1\n", + "[0.60827977] Oc1c(Cl)cccc1Cl\n", + "[1.82959618] OCC2OC(OC1(CO)OC(CO)C(O)C1O)C(O)C(O)C2O \n", + "[1.62130966] OC1C(O)C(O)C(O)C(O)C1O\n", + "[1.37515286] Cn2c(=O)n(C)c1ncn(CC(O)CO)c1c2=O\n", + "[0.45632528] OCC(NC(=O)C(Cl)Cl)C(O)c1ccc(cc1)N(=O)=O\n", + "[1.05325552] CCC(O)(CC)CC\n", + "[-1.10535024] CC45CCC2C(CCC3CC1SC1CC23C)C4CCC5O\n", + "[-0.20119739] Brc1ccccc1Br\n", + "[0.34792162] Oc1c(Cl)cc(Cl)cc1Cl\n", + "[-0.98700562] CCCN(CCC)c1c(cc(cc1N(=O)=O)S(N)(=O)=O)N(=O)=O\n", + "[-0.816116] C2c1ccccc1N(CCF)C(=O)c3ccccc23 \n", + "[0.84023521] CC(C)C(=O)C(C)C\n", + "[0.22815687] O=C1NC(=O)NC(=O)C1(C(C)C)CC=C(C)C\n", + "[0.06247441] c1c(O)C2C(=O)C3cc(O)ccC3OC2cc1(OC)\n", + "[1.04094768] Cn1cnc2n(C)c(=O)n(C)c(=O)c12\n", + "[-0.51978109] CC(=O)SC4CC1=CC(=O)CCC1(C)C5CCC2(C)C(CCC23CCC(=O)O3)C45\n", + "[0.80236493] Cc1ccc(O)cc1C\n", + "[-0.41895148] O(c1ccccc1)c2ccccc2\n", + "[-2.59649237] Clc1cc(Cl)c(cc1Cl)c2cc(Cl)c(Cl)cc2Cl \n", + "[1.74438806] NC(=O)c1cccnc1 \n", + "[0.45206488] Sc1ccccc1\n", + "[0.23383741] CNC(=O)Oc1cc(C)cc(C)c1\n", + "[-1.791749] ClC1CC2C(C1Cl)C3(Cl)C(=C(Cl)C2(Cl)C3(Cl)Cl)Cl\n", + "[0.77396223] CSSC\n", + "[1.00118389] NC(=O)c1ccccc1\n", + "[-0.05445007] Clc1ccccc1Br\n", + "[1.10438039] COC(=O)c1ccccc1OC2OC(COC3OCC(O)C(O)C3O)C(O)C(O)C2O\n", + "[0.75976087] CCCCC(O)CC\n", + "[-0.70013828] CCN2c1nc(C)cc(C)c1NC(=O)c3cccnc23 \n", + "[0.82130007] Oc1cc(Cl)cc(Cl)c1\n", + "[-1.31363676] Cc1cccc2c1ccc3ccccc32\n", + "[0.45679866] CCCCC(CC)CO\n", + "[-0.57327285] CC(C)N(C(C)C)C(=O)SCC(=CCl)Cl\n", + "[0.40946082] Cc1ccccc1\n", + "[-0.32427579] Clc1cccc(n1)C(Cl)(Cl)Cl\n", + "[-0.04971628] C1CCC=CCC1\n", + "[-0.39054877] CN(C)C(=S)SSC(=S)N(C)C \n", + "[-0.08095926] COC1=CC(=O)CC(C)C13Oc2c(Cl)c(OC)cc(OC)c2C3=O\n", + "[-0.26273659] CCCCCCCCCCO\n", + "[-0.54676366] CCC(C)(C)CC\n", + "[1.99717215] CNC(=O)C(C)SCCSP(=O)(OC)(OC)\n", + "[-0.03551493] Oc1cc(Cl)c(Cl)c(Cl)c1Cl\n", + "[1.45089342] CCCC=O\n", + "[-0.86392723] CC4CC3C2CCC1=CC(=O)C=CC1(C)C2(F)C(O)CC3(C)C4(O)C(=O)COC(C)=O \n", + "[0.23904457] CCCC\n", + "[0.52780543] COc1ccccc1O\n", + "[-0.48475108] CC1CC2C3CCC(O)(C(=O)C)C3(C)CC(O)C2(F)C4(C)C=CC(=O)C=C14\n", + "[0.22484322] ClC(Cl)C(Cl)(Cl)Cl\n", + "[0.34318783] CCOC(=O)c1ccccc1C(=O)OCC\n", + "[1.50296505] CC(C)CO\n", + "[-0.49469203] CC(C)Cc1ccccc1\n", + "[0.34792162] ICI\n", + "[0.79289737] CCCC(O)CCC\n", + "[0.56094192] CCCCCOC(=O)C\n", + "[-0.13965819] Oc1c(Cl)c(Cl)cc(Cl)c1Cl\n", + "[-0.13965819] CCCc1ccccc1\n", + "[0.15857024] FC(F)(Cl)C(F)(F)Cl\n", + "[1.60710831] CC=CC=O\n", + "[1.90060295] CN(C)C(=O)N(C)C \n", + "[-0.7171799] Cc1cc(C)c(C)cc1C\n", + "[-0.81658938] CC(=O)OC3(CCC4C2CCC1=CC(=O)CCC1C2CCC34C)C#C\n", + "[-0.13019062] CCOP(=S)(OCC)N2C(=O)c1ccccc1C2=O\n", + "[-0.24380145] c1ccccc1NC(=O)c2c(O)cccc2\n", + "[-0.14912576] CCN(CC)C(=S)SCC(Cl)=C\n", + "[0.95384604] ClCC\n", + "[-0.07811899] CC(=O)Nc1cc(NS(=O)(=O)C(F)(F)F)c(C)cc1C\n", + "[-0.18226225] O=C(C=CC=Cc2ccc1OCOc1c2)N3CCCCC3\n", + "[0.25324593] CC/C=C\\C\n", + "[0.68875411] CNC(=O)ON=C(CSC)C(C)(C)C \n", + "[0.04401265] O=C2NC(=O)C1(CCCCCCC1)C(=O)N2\n", + "[-0.55149745] c1(C(C)(C)C)cc(C(C)(C)C)cc(OC(=O)NC)c1\n", + "[-0.2580028] Oc2cc(O)c1C(=O)CC(Oc1c2)c3ccc(O)c(O)c3\n", + "[-0.02131358] O=C(c1ccccc1)c2ccccc2\n", + "[-2.41282153] CCCCCCCCCCCCCCCCCCCC\n", + "[0.07336211] N(Nc1ccccc1)c2ccccc2 \n", + "[0.90177441] CCC(CC)CO\n", + "[1.93847322] Oc1ccncc1\n", + "[0.84023521] Cl\\C=C/Cl\n", + "[-0.1065217] CC1CCCC1\n", + "[1.07692444] CC(C)CC(C)O\n", + "[-0.40380337] O2c1ccc(N)cc1N(C)C(=O)c3cc(C)ccc23 \n", + "[1.26627582] CC(C)(C)CO\n", + "[-0.25326902] CC(C)(C)C(=O)C(Oc1ccc(Cl)cc1)n2cncn2\n", + "[0.29064283] Cc1cc(no1)C(=O)NNCc2ccccc2\n", + "[0.94437847] CC=C\n", + "[-0.41563783] Oc1ccc(Cl)cc1Cc2cc(Cl)ccc2O\n", + "[-0.7370618] CCOC(=O)Nc2cccc(OC(=O)Nc1ccccc1)c2 \n", + "[-1.00120698] O=C1c2ccccc2C(=O)c3ccccc13\n", + "[0.46626623] CCCCCCC(C)O\n", + "[0.37585095] CC1=C(C(=O)Nc2ccccc2)S(=O)(=O)CCO1\n", + "[-0.46628932] CCCCc1ccccc1\n", + "[1.26627582] O=C1NC(=O)C(=O)N1 \n", + "[-1.49683422] COP(=S)(OC)Oc1ccc(Sc2ccc(OP(=S)(OC)OC)cc2)cc1\n", + "[-0.17800184] NS(=O)(=O)c1cc(ccc1Cl)C2(O)NC(=O)c3ccccc23\n", + "[0.88283927] CC(C)COC(=O)C\n", + "[-0.60830286] CC(C)C(C)(C)C\n", + "[-2.17045176] Clc1ccc(c(Cl)c1Cl)c2c(Cl)cc(Cl)c(Cl)c2Cl \n", + "[0.32898648] N#Cc1ccccc1C#N\n", + "[0.30058377] Cc1cccc(c1)N(=O)=O\n", + "[0.64615004] FC(F)(F)C(Cl)Br \n", + "[1.50580532] CNC(=O)ON=C(SC)C(=O)N(C)C\n", + "[-0.0075856] CCSCCSP(=S)(OC)OC\n", + "[-0.04971628] CCC(C)C\n", + "[-0.68499017] COP(=O)(OC)OC(=CCl)c1cc(Cl)c(Cl)cc1Cl\n" + ] + } + ], + "source": [ + "for X, y, w, id in test_dataset.itersamples():\n", + " print(y, id)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "aQa88cbj5zGw" + }, + "source": [ + "Most deep learning models can process a batch of multiple samples all at once. You can use `iterbatches()` to iterate over batches of samples." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "colab_type": "code", + "id": "HSVqeYox5zGx", + "outputId": "270a6a17-6238-4081-b0cf-3f17e23f4bb5" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(50, 1)\n", + "(50, 1)\n", + "(13, 1)\n" + ] + } + ], + "source": [ + "for X, y, w, ids in test_dataset.iterbatches(batch_size=50):\n", + " print(y.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`iterbatches()` has other features that are useful when training models. For example, `iterbatches(batch_size=100, epochs=10, deterministic=False)` will iterate over the complete dataset ten times, each time with the samples in a different random order.\n", + "\n", + "Datasets can also expose data using the standard interfaces for TensorFlow and PyTorch. To get a `tensorflow.data.Dataset`, call `make_tf_dataset()`. To get a `torch.utils.data.IterableDataset`, call `make_pytorch_dataset()`. See the API documentation for more details.\n", + "\n", + "The final way of accessing data is `to_dataframe()`. This copies the data into a Pandas `DataFrame`. This requires storing all the data in memory at once, so you should only use it with small datasets." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Xywids
0<deepchem.feat.mol_graphs.ConvMol object at 0x...-1.7065411.0C1c2ccccc2c3ccc4ccccc4c13
1<deepchem.feat.mol_graphs.ConvMol object at 0x...0.2911161.0COc1ccccc1Cl
2<deepchem.feat.mol_graphs.ConvMol object at 0x...-1.4272481.0COP(=S)(OC)Oc1cc(Cl)c(Br)cc1Cl
3<deepchem.feat.mol_graphs.ConvMol object at 0x...-0.9254661.0ClC(Cl)CC(=O)NC2=C(Cl)C(=O)c1ccccc1C2=O
4<deepchem.feat.mol_graphs.ConvMol object at 0x...-1.9526981.0ClC(Cl)C(c1ccc(Cl)cc1)c2ccc(Cl)cc2
...............
108<deepchem.feat.mol_graphs.ConvMol object at 0x...0.6461501.0FC(F)(F)C(Cl)Br
109<deepchem.feat.mol_graphs.ConvMol object at 0x...1.5058051.0CNC(=O)ON=C(SC)C(=O)N(C)C
110<deepchem.feat.mol_graphs.ConvMol object at 0x...-0.0075861.0CCSCCSP(=S)(OC)OC
111<deepchem.feat.mol_graphs.ConvMol object at 0x...-0.0497161.0CCC(C)C
112<deepchem.feat.mol_graphs.ConvMol object at 0x...-0.6849901.0COP(=O)(OC)OC(=CCl)c1cc(Cl)c(Cl)cc1Cl
\n", + "

113 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " X y w \\\n", + "0 \n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "X = np.random.random((10, 5))\n", + "y = np.random.random((10, 2))\n", + "dataset = dc.data.NumpyDataset(X=X, y=y)\n", + "print(dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Notice that we did not specify weights or IDs. These are optional, as is `y` for that matter. Only `X` is required. Since we left them out, it automatically built `w` and `ids` arrays for us, setting all weights to 1 and setting the IDs to integer indices." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
X1X2X3X4X5y1y2wids
00.5473300.9199410.2891380.4318060.7766720.5325790.4432581.00
10.9808670.6424870.4606400.5001530.0148480.6782590.2740291.01
20.9532540.7044460.8574580.3783720.7057890.7047860.9010801.02
30.9049700.7297100.3042470.8615460.9170290.1217470.7588451.03
40.4641440.0591680.6004050.8805290.6880430.5954950.7198611.04
50.8204820.1390020.6274210.1293990.9200240.6340300.4645251.05
60.1137270.5518010.5361890.0660910.3113200.6993310.1715321.06
70.5161310.9189030.4290360.8449730.6393670.4640890.3379891.07
80.8093930.2014500.8214200.8413900.1000260.2304620.3761511.08
90.0767500.3892770.3503710.2918060.1275220.5446060.3065781.09
\n", + "
" + ], + "text/plain": [ + " X1 X2 X3 X4 X5 y1 y2 w \\\n", + "0 0.547330 0.919941 0.289138 0.431806 0.776672 0.532579 0.443258 1.0 \n", + "1 0.980867 0.642487 0.460640 0.500153 0.014848 0.678259 0.274029 1.0 \n", + "2 0.953254 0.704446 0.857458 0.378372 0.705789 0.704786 0.901080 1.0 \n", + "3 0.904970 0.729710 0.304247 0.861546 0.917029 0.121747 0.758845 1.0 \n", + "4 0.464144 0.059168 0.600405 0.880529 0.688043 0.595495 0.719861 1.0 \n", + "5 0.820482 0.139002 0.627421 0.129399 0.920024 0.634030 0.464525 1.0 \n", + "6 0.113727 0.551801 0.536189 0.066091 0.311320 0.699331 0.171532 1.0 \n", + "7 0.516131 0.918903 0.429036 0.844973 0.639367 0.464089 0.337989 1.0 \n", + "8 0.809393 0.201450 0.821420 0.841390 0.100026 0.230462 0.376151 1.0 \n", + "9 0.076750 0.389277 0.350371 0.291806 0.127522 0.544606 0.306578 1.0 \n", + "\n", + " ids \n", + "0 0 \n", + "1 1 \n", + "2 2 \n", + "3 3 \n", + "4 4 \n", + "5 5 \n", + "6 6 \n", + "7 7 \n", + "8 8 \n", + "9 9 " + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dataset.to_dataframe()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "What about creating a DiskDataset? If you have the data in NumPy arrays, you can call `DiskDataset.from_numpy()` to save it to disk. Since this is just a tutorial, we will save it to a temporary directory." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "import tempfile\n", + "\n", + "with tempfile.TemporaryDirectory() as data_dir:\n", + " disk_dataset = dc.data.DiskDataset.from_numpy(X=X, y=y, data_dir=data_dir)\n", + " print(disk_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "What about larger datasets that can't fit in memory? What if you have some huge files on disk containing data on hundreds of millions of molecules? The process for creating a DiskDataset from them is slightly more involved. Fortunately, DeepChem's `DataLoader` framework can automate most of the work for you. That is a larger subject, so we will return to it in a later tutorial." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "MhZxVoVs5zMa" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/03_An_Introduction_To_MoleculeNet.ipynb b/examples/tutorials/03_An_Introduction_To_MoleculeNet.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..81b15402ae6a3b3fd42171d08db199578d22e492 --- /dev/null +++ b/examples/tutorials/03_An_Introduction_To_MoleculeNet.ipynb @@ -0,0 +1,621 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tutorial 3: An Introduction To MoleculeNet\n", + "\n", + "One of the most powerful features of DeepChem is that it comes \"batteries included\" with datasets to use. The DeepChem developer community maintains the MoleculeNet [1] suite of datasets which maintains a large collection of different scientific datasets for use in machine learning applications. The original MoleculeNet suite had 17 datasets mostly focused on molecular properties. Over the last several years, MoleculeNet has evolved into a broader collection of scientific datasets to facilitate the broad use and development of scientific machine learning tools.\n", + "\n", + "These datasets are integrated with the rest of the DeepChem suite so you can conveniently access these these through functions in the `dc.molnet` submodule. You've already seen a few examples of these loaders already as you've worked through the tutorial series. The full documentation for the MoleculeNet suite is available in our docs [2].\n", + "\n", + "[1] Wu, Zhenqin, et al. \"MoleculeNet: a benchmark for molecular machine learning.\" Chemical science 9.2 (2018): 513-530.\n", + "\n", + "[2] https://deepchem.readthedocs.io/en/latest/moleculenet.html\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/03_An_Introduction_To_MoleculeNet.ipynb)\n", + "\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can now import the `deepchem` package to play with." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import deepchem as dc\n", + "dc.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# MoleculeNet Overview\n", + "\n", + "In the last two tutorials we loaded the Delaney dataset of molecular solubilities. Let's load it one more time." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_delaney(featurizer='GraphConv', splitter='random')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Notice that the loader function we invoke `dc.molnet.load_delaney` lives in the `dc.molnet` submodule of MoleculeNet loaders. Let's take a look at the full collection of loaders available for us" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['load_bace_classification',\n", + " 'load_bace_regression',\n", + " 'load_bandgap',\n", + " 'load_bbbc001',\n", + " 'load_bbbc002',\n", + " 'load_bbbp',\n", + " 'load_cell_counting',\n", + " 'load_chembl',\n", + " 'load_chembl25',\n", + " 'load_clearance',\n", + " 'load_clintox',\n", + " 'load_delaney',\n", + " 'load_factors',\n", + " 'load_function',\n", + " 'load_hiv',\n", + " 'load_hopv',\n", + " 'load_hppb',\n", + " 'load_kaggle',\n", + " 'load_kinase',\n", + " 'load_lipo',\n", + " 'load_mp_formation_energy',\n", + " 'load_mp_metallicity',\n", + " 'load_muv',\n", + " 'load_nci',\n", + " 'load_pcba',\n", + " 'load_pcba_146',\n", + " 'load_pcba_2475',\n", + " 'load_pdbbind',\n", + " 'load_pdbbind_from_dir',\n", + " 'load_pdbbind_grid',\n", + " 'load_perovskite',\n", + " 'load_ppb',\n", + " 'load_qm7',\n", + " 'load_qm7_from_mat',\n", + " 'load_qm7b_from_mat',\n", + " 'load_qm8',\n", + " 'load_qm9',\n", + " 'load_sampl',\n", + " 'load_sider',\n", + " 'load_sweet',\n", + " 'load_thermosol',\n", + " 'load_tox21',\n", + " 'load_toxcast',\n", + " 'load_uspto',\n", + " 'load_uv',\n", + " 'load_zinc15']" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[method for method in dir(dc.molnet) if \"load_\" in method ]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The set of MoleculeNet loaders is actively maintained by the DeepChem community and we work on adding new datasets to the collection. Let's see how many datasets there are in MoleculeNet today" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "46" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len([method for method in dir(dc.molnet) if \"load_\" in method ])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# MoleculeNet Dataset Categories\n", + "\n", + "There's a lot of different datasets in MoleculeNet. Let's do a quick overview of the different types of datasets available. We'll break datasets into different categories and list loaders which belong to those categories. More details on each of these datasets can be found at https://deepchem.readthedocs.io/en/latest/moleculenet.html. The original MoleculeNet paper [1] provides details about a subset of these papers. We've marked these datasets as \"V1\" below. All remaining dataset are \"V2\" and not documented in the older paper.\n", + "\n", + "## Quantum Mechanical Datasets\n", + "\n", + "MoleculeNet's quantum mechanical datasets contain various quantum mechanical property prediction tasks. The current set of quantum mechanical datasets includes QM7, QM7b, QM8, QM9. The associated loaders are \n", + "\n", + "- [`dc.molnet.load_qm7`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_qm7): V1\n", + "- [`dc.molnet.load_qm7b_from_mat`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_qm7): V1\n", + "- [`dc.molnet.load_qm8`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_qm8): V1\n", + "- [`dc.molnet.load_qm9`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_qm9): V1\n", + "\n", + "## Physical Chemistry Datasets\n", + "\n", + "The physical chemistry dataset collection contain a variety of tasks for predicting various physical properties of molecules.\n", + "\n", + "- [`dc.molnet.load_delaney`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_delaney): V1. This dataset is also referred to as ESOL in the original paper.\n", + "- [`dc.molnet.load_sampl`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_sampl): V1. This dataset is also referred to as FreeSolv in the original paper.\n", + "- [`dc.molnet.load_lipo`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_lipo): V1. This dataset is also referred to as Lipophilicity in the original paper.\n", + "- [`dc.molnet.load_thermosol`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_thermosol): V2.\n", + "- [`dc.molnet.load_hppb`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_hppb): V2.\n", + "- [`dc.molnet.load_hopv`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_hopv): V2. This dataset is drawn from a recent publication [3]\n", + "\n", + "## Chemical Reaction Datasets\n", + "\n", + "These datasets hold chemical reaction datasets for use in computational retrosynthesis / forward synthesis.\n", + "\n", + "- [`dc.molnet.load_uspto`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_uspto)\n", + "\n", + "## Biochemical/Biophysical Datasets\n", + "\n", + "These datasets are drawn from various biochemical/biophysical datasets that measure things like the binding affinity of compounds to proteins.\n", + "\n", + "- [`dc.molnet.load_pcba`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_pcba): V1\n", + "- [`dc.molnet.load_nci`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_nci): V2.\n", + "- [`dc.molnet.load_muv`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_muv): V1\n", + "- [`dc.molnet.load_hiv`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_hiv): V1\n", + "- [`dc.molnet.load_ppb`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#ppb-datasets): V2.\n", + "- [`dc.molnet.load_bace_classification`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bace_classification): V1. This loader loads the classification task for the BACE dataset from the original MoleculeNet paper.\n", + "- [`dc.molnet.load_bace_regression`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bace_regression): V1. This loader loads the regression task for the BACE dataset from the original MoleculeNet paper.\n", + "- [`dc.molnet.load_kaggle`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_kaggle): V2. This dataset is from Merck's drug discovery kaggle contest and is described in [4].\n", + "- [`dc.molnet.load_factors`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_factors): V2. This dataset is from [4].\n", + "- [`dc.molnet.load_uv`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_uv): V2. This dataset is from [4].\n", + "- [`dc.molnet.load_kinase`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_kinase): V2. This datset is from [4].\n", + "\n", + "## Molecular Catalog Datasets\n", + "\n", + "These datasets provide molecular datasets which have no associated properties beyond the raw SMILES formula or structure. These types of datasets are useful for generative modeling tasks.\n", + "\n", + "- [`dc.molnet.load_zinc15`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_zinc15): V2\n", + "- [`dc.molnet.load_chembl`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_chembl): V2\n", + "- [`dc.molnet.load_chembl25`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#chembl25-datasets): V2\n", + "\n", + "## Physiology Datasets\n", + "\n", + "These datasets measure physiological properties of how molecules interact with human patients.\n", + "\n", + "- [`dc.molnet.load_bbbp`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bbbp): V1\n", + "- [`dc.molnet.load_tox21`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_tox21): V1\n", + "- [`dc.molnet.load_toxcast`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_toxcast): V1\n", + "- [`dc.molnet.load_sider`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_sider): V1\n", + "- [`dc.molnet.load_clintox`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_clintox): V1\n", + "- [`dc.molnet.load_clearance`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_clearance): V2.\n", + "\n", + "## Structural Biology Datasets\n", + "\n", + "These datasets contain 3D structures of macromolecules along with associated properties.\n", + "\n", + "- [`dc.molnet.load_pdbbind`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_pdbbind): V1\n", + "\n", + "\n", + "## Microscopy Datasets\n", + "\n", + "These datasets contain microscopy image datasets, typically of cell lines. These datasets were not in the original MoleculeNet paper.\n", + "\n", + "- [`dc.molnet.load_bbbc001`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bbbc001): V2\n", + "- [`dc.molnet.load_bbbc002`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bbbc002): V2\n", + "- [`dc.molnet.load_cell_counting`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#cell-counting-datasets): V2\n", + "\n", + "## Materials Properties Datasets\n", + "\n", + "These datasets compute properties of various materials.\n", + "\n", + "- [`dc.molnet.load_bandgap`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_bandgap): V2\n", + "- [`dc.molnet.load_perovskite`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_perovskite): V2\n", + "- [`dc.molnet.load_mp_formation_energy`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_mp_formation_energy): V2\n", + "- [`dc.molnet.load_mp_metallicity`](https://deepchem.readthedocs.io/en/latest/moleculenet.html#deepchem.molnet.load_mp_metallicity): V2\n", + "\n", + "\n", + "[3] Lopez, Steven A., et al. \"The Harvard organic photovoltaic dataset.\" Scientific data 3.1 (2016): 1-7.\n", + "\n", + "[4] Ramsundar, Bharath, et al. \"Is multitask deep learning practical for pharma?.\" Journal of chemical information and modeling 57.8 (2017): 2068-2076." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# MoleculeNet Loaders Explained" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All MoleculeNet loader functions take the form `dc.molnet.load_X`. Loader functions return a tuple of arguments `(tasks, datasets, transformers)`. Let's walk through each of these return values and explain what we get:\n", + "\n", + "1. `tasks`: This is a list of task-names. Many datasets in MoleculeNet are \"multitask\". That is, a given datapoint has multiple labels associated with it. These correspond to different measurements or values associated with this datapoint.\n", + "2. `datasets`: This field is a tuple of three `dc.data.Dataset` objects `(train, valid, test)`. These correspond to the training, validation, and test set for this MoleculeNet dataset.\n", + "3. `transformers`: This field is a list of `dc.trans.Transformer` objects which were applied to this dataset during processing.\n", + "\n", + "This is abstract so let's take a look at each of these fields for the `dc.molnet.load_delaney` function we invoked above. Let's start with `tasks`." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['measured log solubility in mols per litre']" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tasks" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have one task in this dataset which corresponds to the measured log solubility in mol/L. Let's now take a look at `datasets`:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(,\n", + " ,\n", + " )" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As we mentioned previously, we see that `datasets` is a tuple of 3 datasets. Let's split them out." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "train, valid, test = datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "valid" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "test" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's peek into one of the datapoints in the `train` dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train.X[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that this is a `dc.feat.mol_graphs.ConvMol` object produced by `dc.feat.ConvMolFeaturizer`. We'll say more about how to control choice of featurization shortly. Finally let's take a look at the `transformers` field:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "transformers" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So we see that one transformer was applied, the `dc.trans.NormalizationTransformer`.\n", + "\n", + "After reading through this description so far, you may be wondering what choices are made under the hood. As we've briefly mentioned previously, datasets can be processed with different choices of \"featurizers\". Can we control the choice of featurization here? In addition, how was the source dataset split into train/valid/test as three different datasets? \n", + "\n", + "You can use the 'featurizer' and 'splitter' keyword arguments and pass in different strings. Common possible choices for 'featurizer' are 'ECFP', 'GraphConv', 'Weave' and 'smiles2img' corresponding to the `dc.feat.CircularFingerprint`, `dc.feat.ConvMolFeaturizer`, `dc.feat.WeaveFeaturizer` and `dc.feat.SmilesToImage` featurizers. Common possible choices for 'splitter' are `None`, 'index', 'random', 'scaffold' and 'stratified' corresponding to no split, `dc.splits.IndexSplitter`, `dc.splits.RandomSplitter`, `dc.splits.SingletaskStratifiedSplitter`. We haven't talked much about splitters yet, but intuitively they're a way to partition a dataset based on different criteria. We'll say more in a future tutorial.\n", + "\n", + "Instead of a string, you also can pass in any `Featurizer` or `Splitter` object. This is very useful when, for example, a Featurizer has constructor arguments you can use to customize its behavior." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_delaney(featurizer=\"ECFP\", splitter=\"scaffold\")" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "(train, valid, test) = datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([0., 0., 0., ..., 0., 0., 0.])" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train.X[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that unlike the earlier invocation we have numpy arrays produced by `dc.feat.CircularFingerprint` instead of `ConvMol` objects produced by `dc.feat.ConvMolFeaturizer`.\n", + "\n", + "Give it a try for yourself. Try invoking MoleculeNet to load some other datasets and experiment with dfiferent featurizer/split options and see what happens!" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/tutorials/03_Modeling_Solubility.ipynb b/examples/tutorials/03_Modeling_Solubility.ipynb deleted file mode 100644 index e3270ce837cde5ec862277604cb3ad27a036d57e..0000000000000000000000000000000000000000 --- a/examples/tutorials/03_Modeling_Solubility.ipynb +++ /dev/null @@ -1,1445 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "03_Modeling_Solubility.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "xz586Jg2c_87", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 3: Modeling Solubility\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "GdibQzeVc_8-", - "colab_type": "text" - }, - "source": [ - "Computationally predicting molecular solubility through is useful for drug-discovery. In this tutorial, we will use the `deepchem` library to fit a simple statistical model that predicts the solubility of drug-like compounds. The process of fitting this model involves four steps:\n", - "\n", - "1. Loading a chemical dataset, consisting of a series of compounds along with aqueous solubility measurements.\n", - "2. Transforming each compound into a feature vector $v \\in \\mathbb{R}^n$ comprehensible to statistical learning methods.\n", - "3. Fitting a simple model that maps feature vectors to estimates of aqueous solubility.\n", - "4. Visualizing the results.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/03_Modeling_Solubility.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hagObl_sc_8_", - "colab_type": "code", - "outputId": "7c2e797b-494b-462b-d5ff-be4e0614b90a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 9766 0 --:--:-- --:--:-- --:--:-- 9766\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.61 s, sys: 560 ms, total: 3.17 s\n", - "Wall time: 1min 59s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Jc4oiK4Bc_9C", - "colab_type": "text" - }, - "source": [ - "We need to load a dataset of estimated aqueous solubility measurements [1] into deepchem. The data is in CSV format and contains SMILES strings, predicted aqueaous solubilities, and a number of extraneous (for our purposes) molecular properties. Here is an example line from the dataset:\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "\n", - "
Compound ID ESOL predicted log solubility (mols/liter) Minimum Degree Molecular Weight # H-Bond Donors # Rings # Rotatable Bonds Polar Surface Area Measured log solubility (mols/liter) smiles
benzothiazole-2.7332 135.191 0 2 0 12.89 -1.5 c2ccc1scnc1c2
\n", - "\n", - "\n", - "Most of these fields are not useful for our purposes. The two fields that we will need are the \"smiles\" field and the \"measured log solubility in mols per litre\". The \"smiles\" field holds a SMILES string [2] that specifies the compound in question. Before we load this data into deepchem, we will load the data into python and do some simple preliminary analysis to gain some intuition for the dataset. We'll pull this dataset down from the DeepChem github repo. (If you're running this tutorial on a Mac, you may need to run `brew install wget` to get this command)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "58FAHaJOc_9D", - "colab_type": "code", - "outputId": "f06019c1-9e11-4b4f-e6bd-d49ddb8d0f37", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "!wget https://raw.githubusercontent.com/deepchem/deepchem/master/datasets/delaney-processed.csv" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2020-06-12 02:18:23-- https://raw.githubusercontent.com/deepchem/deepchem/master/datasets/delaney-processed.csv\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 96699 (94K) [text/plain]\n", - "Saving to: ‘delaney-processed.csv’\n", - "\n", - "\rdelaney-processed.c 0%[ ] 0 --.-KB/s \rdelaney-processed.c 100%[===================>] 94.43K --.-KB/s in 0.009s \n", - "\n", - "2020-06-12 02:18:23 (10.1 MB/s) - ‘delaney-processed.csv’ saved [96699/96699]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "XXQteOIQc_9G", - "colab_type": "code", - "outputId": "fe43b795-3cd0-40e9-ccc6-7e97a0b783ee", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 102 - } - }, - "source": [ - "from deepchem.utils.save import load_from_disk\n", - "\n", - "dataset_file= \"delaney-processed.csv\"\n", - "dataset = load_from_disk(dataset_file)\n", - "print(\"Columns of dataset: %s\" % str(dataset.columns.values))\n", - "print(\"Number of examples in dataset: %s\" % str(dataset.shape[0]))" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Columns of dataset: ['Compound ID' 'ESOL predicted log solubility in mols per litre'\n", - " 'Minimum Degree' 'Molecular Weight' 'Number of H-Bond Donors'\n", - " 'Number of Rings' 'Number of Rotatable Bonds' 'Polar Surface Area'\n", - " 'measured log solubility in mols per litre' 'smiles']\n", - "Number of examples in dataset: 1128\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "x_pmi554c_9J", - "colab_type": "text" - }, - "source": [ - "To gain a visual understanding of compounds in our dataset, let's draw them using rdkit. We define a couple of helper functions to get started." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "lpriB1Rfc_9J", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import tempfile\n", - "from rdkit import Chem\n", - "from rdkit.Chem import Draw\n", - "from itertools import islice\n", - "from IPython.display import Image, display\n", - "\n", - "def display_images(filenames):\n", - " \"\"\"Helper to pretty-print images.\"\"\"\n", - " for file in filenames:\n", - " display(Image(file))\n", - "\n", - "def mols_to_pngs(mols, basename=\"test\"):\n", - " \"\"\"Helper to write RDKit mols to png files.\"\"\"\n", - " filenames = []\n", - " for i, mol in enumerate(mols):\n", - " filename = \"%s%d.png\" % (basename, i)\n", - " Draw.MolToFile(mol, filename)\n", - " filenames.append(filename)\n", - " return filenames" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "dv3ZBhyQc_9M", - "colab_type": "text" - }, - "source": [ - "Now, we display some compounds from the dataset:" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "iRNwkDU_c_9N", - "colab_type": "code", - "outputId": "d0a9e168-49e4-4b62-df28-6afb8210884a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "num_to_display = 14\n", - "molecules = []\n", - "for _, data in islice(dataset.iterrows(), num_to_display):\n", - " molecules.append(Chem.MolFromSmiles(data[\"smiles\"]))\n", - "display_images(mols_to_pngs(molecules))" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxU5f4H8O8AyiaLCooi4L6VS0AueRVTUjQ0F0aR0u71l2NqYWU1vtpIW+5U93cvhppD2S8yWzBvuBAkqKllloCmgguICiiiyKrsnO/vj2ccCRBmmHPmOTDf918J4zxfks+c55znnO+jQEQghPBjxbsAQiwdhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISkfUKElSshMBAmTYK0NEhMhOeeAwC4fh0mT+Zcm5EohKR9SkiA0lJIToZNm3Txa7dseBdASJukpkJAAADAiBGQmwuIsGcPZGVBTQ3vyozGLYSpqanR0dFubm43btz48MMPu3btyqsS0i4hAqLuvxUKUChg1izYuBGuX4fQUK6VGc3cISwtLd22bduWLVvS09MBwN7evrKy8sCBAzt27PD19TVzMaQd8/eHr76C5cshIwP69eNdjUnMd0549uzZ1atX9+nT5/nnn09PT/fw8FCr1QcOHBg7dmx2dvYjjzyyYcMGsxVD2r0ZM6C6GiZMgFWrICqKdzWmQYlVVVXFxsYGBgbqR/Tz84uJiampqdG/IDw8nH1r8eLFd+7ckbok0kE88AAC4IkTvOswlYQhzMzMVKvVbm5uLGAuLi4qler06dPNvnjbtm2Ojo4AMHr06KysLOmqIh2EIKCDAwJgaSnvUkwlfgjr6+uTkpKCg4MVCoX+0KfVam/fvt3yX8zIyBg2bBgAODs7f//996IXRjqUq1cRAN3dedchAjFDmJ+fr9FofHx8WPbs7OyUSuUvv/zS9JV1dXUZGRlNv15WVhYSEgIACoUiPDxcP2UlpLEjRxAAx47lXYcIxAnhkSNHlEplp06dWPwGDx6s0WgKCwubvlIfVHd396qqqqYvEAQhMjKSvVVAQEB+fr4oFZKO5osvEADDwnjXIQKTQlhSUqLVah944AGWPWtr6+Dg4KSkJEEQGr1SEIT9+/eHhITogzpkyJDz58/f750PHTrUq1cvAPD09Gz2WEos3VtvIQC+8QbvOkTQxhCmpKSoVCoHBweWqN69e6vV6pycnKavZEF98MEHWw1qIwUFBVOmTAEAGxsbjUbTtjpJh/XkkwiAn3/Ouw4RGB3C+vr6Rx55hCXKysoqKCho165ddXV1TV/JgsqueQJAr1691Gr1lStXDB+rtrZWrVazCzxz5swpKSkxtlrSYY0fjwB46BDvOkRgdAhfeeWVAQMGODs7h4eHX7x4sekLKisrY2NjJ0yYoA9qYGBgbGxsbW1t20qMi4tzdXVlp5r3W+EgFqdHDwTAvDzedYjA6BAOHDgQAE6ePNn0W+fPn1er1d27d2fxc3V1ValU6enppld54cKFESNGAECXLl2++eYb09+QtG/l5QiAdnZYX8+7FBEYF8K6urrOnTsrFIqKigr9F2tra7/77rvJkyfrFwbHjx//5ZdfVlZWilhoRUXF0qVL2furVKrq6moR35y0M3/+iQA4bBjvOsRhXAizs7PZFcuGX6yqqnJ3dwcAJycnlUp1QsrbiLRabefOnQHA39//0qVL0g1E5CwjPj41ICB3+XLehYjDuBu4WQgHDBjQ8Iu2trZvvvnmpk2b8vLytFrt6NGjjXpPo6hUqqNHj/br1y8lJeXhhx/et2+fdGMR2YrPyPA7dOgjW1vehYjDuBBevHgRAPr379/o688///zKlSudnZ1Fq+v+/Pz8/vjjj2nTphUWFs6cOfP99983w6BEVpo9GLRfIhwJzc/NzS0xMZEtHh4/fryoqIhvPcTM2O9h04NBO9WWEMrhh1coFGq1+uGHH46Lizty5AjvcjqQRh2TGvVTkgc2I+N+MBCLcU/Wy+2HLykpATnV0wHp+ymdPg3Ll8PRo7wLgvr6+pycHIVC0bdvX961iMO4EPI9EtbW1upvPQUAQRAuX76sUCj6tfPuBrLTsGNS035KdxeieMnNza2pqfH09LS3t+dbiViMmI7eunWrpKTEycmJLUiY39NPP929e/e9e/eyP+bl5VVXV/fs2VN/ZxwRx6xZkJgIX38N0KSf0smT8NprUFfHsTq5TcdMZ0QIuV+VuXjxYlFRkb4vG/d6LIK/Pxw6BACQkQF9+0JoKPzznxAYCNev86rofpfo2y8jQsj9h2/0EdjxPhHlaMYMcHWFoCB4/nnYuBG2boXeveHQIRg1Cvbv51JRx/vwNeKckO8PX1ZWduvWLQcHh549ezaspyN9IspCUBAEBQEAeHjAzz8DAGze/JcXnDwJYWGQnAxBQfDuu/Dqq2Y+S+x4/+5GT0d5/fD647D+DlXuR2YL5e4OiYkQEQGCAGvXwpw5UFJitsELCwtTUlIAwNvb22yDSs3o6SivI2HT0TvetKTdsLaGt9+GuDhwdYXdu2HMGDh9WuoxU1NTly9f7uPjc/nyZScnp/Dw8MuXL0s9qHm0myNh09HpSMjZrFlw/DiMHAmZmTBmDHz+uRSDlJeXb9myZdSoUf7+/tHR0VVVVRMnTnR0dDxx4sSYMWP2czovFZmBN3pXVVVZW1vb2Njw6oCmUqkAICoqiv2RLdM7ODi02iaDSOvOHVyyBAEQ4PrrrzfbvKttzp49q1ar9RfDe/bsqVars7OzEbGwsHD69Olw98ap+nb+VKGhITx37hwA9O/fX9JqWsB6eP/444/sj6mpqQAwYsQIXvWQv4iJqfP2fqhnTz8/P5aTNquurmYt2xv1rW30eKogCBqNxsrKCgAef/zxW7dumfYD8GRoCOPj4wEgMDBQ0mpawG6L0Tdoi42NBYA5c+bwqoc0cjolhbWcdXNz++mnn9rwDllZWWq1Wn8riLOzs0ql+vPPP1v4Kz/++GO3bt0AwNvb+/fff29r7W0lCLhiBU6dihMnYmoqJiTgqlWIiPn5GBBg+NsYGsKoqCgAWM7pMcra2lobGxsrKyv9bEej0QDASy+9xKUe0qzCwsIZM2YYO0tkLduVSqW1tTWLn6+vr1arLS8vN+Sv5+TkjB07FgBsbW0jIyNN+wmMFB+va3x66hSOHy95CF988UUA+OCDD4ytUxRZWVns007/FXaKuHHjRi71kPths0QWpylTphQUFLTwYtYJWn8ftq2trVKpTEpKMnZQPnsKVVTg+vWo1er+2KcP/vgjenvj9On46KOShHD27NkAwGuLiJ9++gkAHn30Uf1Xpk6dCgAJCQlc6iEtO3DgALunwsvL67fffmv6gpSUlMWLF+tvxx84cKBGo7l586Ypg5ppT6GyMtRqcdQofO45XLcOt2zRfd3LS/IjIevem5aWZlTBYvnkk08A4H/+53/0X2Efny308CZ85ebmjh8/vtEssbS0tGEnaNYOc/fu3WJd4pZ2T6ETJ3D5cuzShV0KxiFDcO9eXLQIETE9HSdNkjaEgiCwz5hG7XfNtjzw8ssvA8B7773H/lhTU2Ntbd3wFLH96sBd41jvZpa3xx9/fMmSJfqW7Z6enuvWrbt69arog4q/p1BVFcbGYmCgLnsA6OeHMTFYU6O7MDN9Ok6Zgn/+KW0Ir127xq56Nfr6ypUrzdN9cN68eQDw7bffsj9mZmYCgI+Pj9TjSqe8HLVaHD0a33yTdykS2759u4ODA+s/pFAoWCdoSVebRdtTKCsL1Wp0d9dlz9kZVSo8dUrUYhENDCHrH+Hn5/fXCrNsbW0BYOzYsc3uQiGiUaNGAcDx48fZHxMTE9l5v6SDSuTUKVy5Ep2ddf+yo0bxLkh6b731FlvUzczMNNugbd5TqLa2NmHXLpw+Ha2sdP9I/v742Wco2fUeg0J44cKF+fPnOzo66tfKmdTUVLZ8171798TERGkqRERkn6P6BdnNmzcDwDPPPCPdiKKrrtZNahSKe5MarRb1XZQTErBHD6yowNpao+Yy7cCrr74KAO+++66Zxy0oKGAX8NieQq2ePTXcYLPooYfQ1haVSjT+aq2xDL0ws2jRInYm/dZbbzVc/2nb0pBRbt68CQCurq76r7BTxPfff1/0saSQnV3dcFLj4oKrVuGZM41flpCAvr64aVMHDOH8+fMBgMv+BYbsKSQIQnJy8vz58xvu2/dzdDQWF5unSEND2HD959FHH71+/Xqz35o5c6YUNxCVl5dfuHBB/8e5c+cCwHfffSf6QCLSr0GPGLGUxW/YMIyMxGaXoAUBExLw7bdxyhSsqupoIWT9oDnc0XLXrl27mt1TqM379onLuDb4P//8s4eHBwD06dPn6NGjDb/VcGno2LFjohbZ2MiRIxueIspNfn7+u+++q3/gzc7OLjy8pLnVMqyvx6QkVCpxzRpMSMB33sGoKNy2raOFkJ1NNLtzs9k02lOo2X37pL60cT9G78qUl5fH9idsundnbm7uuHHjzHADkZOTEwAUFRVJN0TbGL4Gff06vv8+9u2rm6N6eGB8PL7zDt65g1On4mOPmb92qbCzCWdnZ96F4O3bt5988kl26qRfqJw+fXpcXFyzG2yaTVt26m24/hMWFnb79m39txreQPTUU09JcQNRQUFBo1NE7iorKzds2MCWidnH07x58/bt29fsrCYlBVUqtLfXxW/AANRo8MYN3ZEQEd9+GydPNvePIJ1jx44BwEMPPcS7EJ0XX3zRycnJwcFBrH37TNf2Pet/+OEHFxcXABg6dOiZv15n+Oqrr9iBftiwYRkZGSYXeU9KSsrs2bOdnZ27d+/e9vUfsdXU1LCr4R4eHmq1+vLly01fU1qKWi2OHKnLnpUVBgZibCxy/Qg2h6+//hoAQkJCeBei89lnn7FbTHkXck/bQ4iI586dYye1Tk5OjS6TnD17dvjw4WwesmPHDtOKxOLi4v/85z9Dhw7Vn2Wxefzhw4dNfGexfPHFFzt37mx2N+ITJ06rVOjoqItfr1745puYm2v+Gvl45513AECtVvMuROe1114DgLfffpt3IfeYFEJELC8vDw0NZdlQqVQN74QoKytTKpUm3kDU7Al0Wloae8bXwPUfLqqqqtjDqQAwalRxw7udmjp7FtVq7JAPZv3jH/8AAK3+aQPeFi5cCADbtm3jXcg9poaQ0e/d+be//a3hDYGCIPzrX/+ysbEBgGnTphmelkYb3ze93amuri4iIoI9WD179uxicy3pGOLChQtr1qxhD5sCQNeuXdev33vuXDOvrKrCbdvwkUd0B0kHh+YXMNq1gIAAAGjDA0oS8ff3B4Bff/2VdyH3iBNCRDx+/Di71cDd3b3R//HDhw/36tXLwGf/Lly4YPjG97t372Y9SAYNGtTyI9hmwBYGg4ODG/VlaHjhSs9ctyXy16dPHwAwseeFiNiHY8OFbu5ECyEi3rx5c9q0aWzRMyIiouHdM60uJ9TV1e3evbtpZ5FWr69mZmayO0vt7Oy2bt0qwo9hvIa3O7FKlEpls/cr6hcGra118fP1Ra0Wm8tpR1BZWWllZWVjY9Ps2bL5FRcXA4Cjo6OsTmHEDCH+dZY4a9YsQ5byrl69qtFovLy89L/BixcvNurBxcrKymeeeYb99cWLF1fob8eUmCAISUlJq1evZvNtdqF4w4YNzc6NWVDHjJnP4meu2xI5y8jIAIABAwbwLkSHNQ6WW38wkUPI7N27l80SBw4cePLkyWZfo7+rS/8bPGTIEI1G0+a73mJiYtheWb6+vlJPftjtTg888AAAdO/e3cHB4X63OwmCsH//fqVSqV/Bf+KJy//+N8rvRgNJsC20pk2bxrsQHXn2B5MkhIh45cqVMWPGsCPbp59+2vBbxcXFWq1Wv7TduXNn1lnE9BlCWloa6wXcvXt3iTpfHDt27O9//7t+Zzxvb+9333232ROMRk+RW1tbi/sUebuwYcMGAFixYgXvQnT++c9/AsCaNWt4F/IXUoUQESsrK1k7Jv0ska03NHy8OiIiouVeQMYqKSlht3eL+1RHZWVlTEyMr68vq5z1ZYiNjW32VKfZZZUrV66IUkn7snr1agD46KOPeBeis2zZMgDYtGkT70L+QsIQMp999hlbW9df8LSysgoODo6Pj5eocbJRDb9adf78ebVarV9vcHV1DQ8Pv3jxYgt/hd0obJ6nyGVu1qxZALBz507ehehMmTIF5NcfTPIQIuKJEyeGDBkyb968hp3MpdZqw6+W3a8PtCFXfWJiYtasWWPOp8hli901db/rAubH+oM1fCxODswRQkSsra0tKioyQzeahppt+NUqdrWWrW4BgJOTk0qlks+vUTsiCAI79SgtLeVdC+Ld/mDW1tZm/j1slZlCyEvDBz6efPLJZtfNmaZXa4cOHarRaGT4wFR7kZeXx27e4F2IzoULF0CW/cE6eAiZ7du3s8skQ4cObXrzTVFRUWRkJGuW0/BqLZdSO5LDhw8DwLhx43gXoiPb/mBGbJfdfoWFhfn6+oaEhKSnp48bN27r1q3szvLU1NTo6Oht27ZVVlYCwIABA5YtW7Z06VL9niTEFHLbQJLvLrctsIgQAsDQoUN/++23pUuXfv/99wsXLty6devVq1fPnDkDANbW1k888cSzzz47bdo0dq8PEYXctlKW7Wb3FvQ75+TktGPHDq1Wa2Njc+bMmTNnzrBncLOysuLi4oKCgiiB4pLbL73cjsx6Fvdrp1KpJk6cePXq1dWrV+fk5DTcFYiIS27TP7kdmfUsZTraELuVPiwsTH8/JxHXzZs3P//88/T0dACorq7mXY7OpUuXQJZHQgUi8q7B3Lp27VpSUlJYWKi/iYeI5ZdfftmyZcv333+vz56Dg8Mnn3yyZMkSvoUVFBR4eHi4urqyj2BZsbjpaGFhYUlJCWsVxbuWjqO8vDw6Onr06NETJ07cvn17bW1tYGDg9u3bly1bVlFR8fTTTy9ZsoRdguaFzUUHDhzIsYb74r1GYm6///47yKkDX3t39uxZtVrNnlwDAHZn4qVLl/QviImJYffN+Pr6tnzPraS2bdsGAAsXLuRVQAss7pxQblcL2qvq6h9/+OG9qKijR4+yL0yePPnZZ5+dO3cu6zakt2TJkpEjR4aEhKSlpT388MNfffUV27zEzOR2qbYhi5uOyvY6dbtx8SKsXQteXs5a7dGjR52dnVUq1alTpw4ePLhw4cJGCWRGjx6dlpY2f/78oqKixx9/fO3atfX19WavWsb/7rwPxeYmtw587UZdHcbFYVCQfte+qsDATz/9tIXbcRthe3eyW3Mb7SkktYKCgkGDBoH8HmJiLC6EcuvA1w7k56NGc2/fDNPa4+zfv79Hjx4A4O3tXSz9lj7sAWvWCcHJyWnQoEEyaX3fkMWFUG4d+GQtJQUXL8ZOnXTxGzQINRpsbn8bo+Tl5U2YMCF80iS0scG/7ikklpKSkqioKNYECO72FmF3ZTg7O8vnIWPGskJYVVUlqw588iIIuGIFTp2KEyfigQM4dKguezY2OH8+JiejeN1xampqql98Uff+YWEitnzMyMgIDw/v0qULi1/D3UHKysoWLFgAprWEl4JlhbDi3LkNAQERc+fyLkSW4uMxLAwR8dQpHD8e/fywVy9Uq1G67jg//IAuLgiAQ4c2s3exMRpuOsD4+fnFxMQ0TZq+W/zEiROvXbtmyqBisawQ4t69CICy6cAnL+vXo/56VZ8+ePkymmG+cO4cPvggAqCTE377bVve4cIFXLNmV1AQy17Xrl1feOGFc83uOnDXkSNHevfuDQDu7u7JycltrFw8FhbCDRsQAJ99lncdsrRuHW7ZovtvLy/zjVtejqGhuqmpSoUG9p5gzcyDg1GhQIAaL6/xY8d+/vnnBrZ+vnHjhn5PoYiICL59KC0shKtXIwDKpgOfvMTH46JFiIjp6ThpkrlH12qxc2cEwL/9DRvsKdQMdrXW21uXWzs7VCqxuU0HWiafPYUsLISzZiEAyuzimFwIAi5ditOn45QpyGV3nePH0ccHAdDdHffta+YFR46gUnnvau3gwajRYGGhKWPu2bOH+55CFhbC4cMRAKl12v24u6OTE964wa2AggKcOhUBMCLi3qXa33/H//1fHDRIl71OnXDBAjxwQKyrtQ33FPrss89EeU+jWFIIBQEdHBAA5dGBT3bKyhAA7e1FXIpoi7o63LoV9+79y6XaIUMQAHv3RrUac3JEH5PXnkKMJYUwL0831SHNOnECAfCBB3jXgYhNLtX+8APu2oV1dZKOac49hRqypBu4s7MBAOj5ifu5eBFANv9/EEH/uLlCAXPmwOzZYG0t6ZhLliz59ddf+/fvzx74YC0SzcCSQiirXzIZYh9SMnnOwN8fDh0CAMjIgLstYc3goYceSktLmzt37q1bt2bOnLl27VpBEKQe1JJCKKtfMhmS1f+fGTPA1RWCguD55yEqypwju7i47Ny5U6PRWFlZffDBB4GBgTdu3JB0REsKITsSyuSXTIZkNVNQKGDzZkhMhP37YeRIsw+uUKvVycnJPXv2PHjwoL+//7Fjx6QbzpJCuH49xMXB1Km865Ar+pD6q8mTJ6ekpIwfPz43N3fy5Mlsw1MpWEa3NURYtQouXICaGoiMhLt7fZJ76urAwQHq6+HOHbCz412NjNTU1Lz00kubNm1SKBS//fbb2LFjRR/CMo6ECQlQWgrJybBpEzz3HO9qZCknB2prwdOTEthI586dN27cuG7dOnt7+82bN0sxhGWEMDUVAgIAAEaMgNxcsISDv7FkdUIoPwMHDqyoqJCoa6NlhLDRotPdzXeJ3sX8/OPDh5cOG8a7EJmStEmfZYSw0aLTe+/B2rVg9oZfchadnj4mI2OjpyfvQmRK0o6JltF3dMYM2LsXgoKgthZefx1mzYKaGkhLg+3bgbYiBABqx9oaCqHJ2KKT3r59EBoKSUng6wuxsTB+PL/K5ELOvXHlgKajYgsIgJQUmDAB8vJg0iT44AOzjo4IK1dCYCBMmgRpaWYd+v5ku22YHFRVVeXn53fq1MnLy0uK97fIEAKApyf8/DOo1VBXB2vXQlgY3L5tpqHlt15SWFhYWlpKm+Tcz6VLlwRB8PHxsZbmDnJLDSEA2NiARgM//AAuLvDNN+DvD+np0o5YWAgffQSHD8ttvYQOgy2T+oTZgkPIzJkDf/wBDz4I58/DuHHw3XeSjJKaCsuXg48PvPoqpKb+Zb3k3Dk4fFiSQQ1GV2VaJvUJs8WHEAAGD4bffoPQULh9G0JDj77/fm1trTjvXF4OW7bAqFHg7w/R0VBVBcHBEBh4b73E2xtCQmDqVPjXvzgeEumqTMsohGbRpQt88w18/HHexIkT33hjypQp165dM+kNz52DtWvBxwdWrIBTp6BnT1CrISsL9uyBV1+995DOxo2gVIIgwCuvwJw5UFIi0s9jHAphy+qLi7s4Okr4/8dsz/C3C8ePH2c7Fri7u+9rtuFXy6qrMTYWAwNZM0wEQD8/1GqxsrKlv7VnD3btqtvsgUfDL9okpxXDhyNAnWT9wSiEjd28eXP69OkAYG1tHRERUV9fb8jfysvLi4iIWDB5si57zs6oUhmRqCtX8OGHdV00zdvwq76+vlevXgDAcRtdWRMEtLeXtD8YhbAZgiCwB6sBIDg4uKio6H6vrKur27Vr14wZM9iLFQpF5owZGB3dlh1OKitx2TJdhhcvRukbfhUXF0dGRvbv39/BwSEkJIQ2yWke6w/Wo4d0I1AI7ys+Pr5bt24A4OPj88cffzT67vXr1zUaDZu7AoCtra1SqRRhRhcTo+vL6OuLkjX8OnLkSFhYmK2tLSu+f//+Z8+elWisdu/QIQTAceOkG4FC2JKcnBz2EKednV10dDT7YkpKyuLFizt16sR+gwcOHKjRaG6avGvfPamp2L8/AmD37pU//STa2yKWlZVptVrW6BYArKysAgMDY2Nj6yRuJdi+/d//IQA++aR0I1AIW1FZWbls2TL2Wztp0qRhdx/2sbGxmTdvXlJSkiR7iZSW4rx5gqtrgLe3Wq02PSSNdu3r2bOnWq2+dOmSGLV2dG++iQD41lvSjUAhNMiXX35pZ2fHdvnt1auXWq2+It2ufYwgxEdGsvukpk2b1rYjreG79pH7CgtDAPziC+lGoBAaatasWQCwatUqc17AOHjwoIeHBwD06dPn6NGjhv/FrKwstVrtfvdBLWdnZ5VKderUKelK7bDGjkUAPHxYuhEohIaaMGECABw8eNDM4+bl5T3yyCNsAqxpbYf3+vr6pKQkpVKpv9XYz89Pq9XeFm8/aovj7o4ArezWZhoKoaHYESlHgt1IWlVbW6tWq1mowsLCmk1Ufn6+RqPx8fER+WqthTPLJjkUQoPcuXNHoVDY2toauHYvhf/+978uLi4AMHTo0DMNdng/cuSIUqnUX60dNGiQRqMpNG3XPqJjlk1yLOPJepOxu0n69u3LFuW5mDt37vDhw0NCQs6cOTN+/PiPP/64pqYmKirqzJkzAGBtbR0cHLx69eqpU6cqqJOVWKytYfZsuDu/kIqkEe8w4uLiAGDmzJm8C8GysrIFCxYAgIODA/sX9PLyWr9+/bVr13iXRtqIjoQGkc8Td05OTt99993t27cTExMHDx784YcfBgcHS/TEt6Vr1Lj9xg3Yuxc2boTr1yE0FH7+Waxx6FEmg8jtYZ8ePXoIgvDyyy8/8cQTlECpmKsRCR0JDcKOhPIJodw+FDqmpo3b9+yBrCyoqRF3HDoSGkRuXVjkMz3uyI2FrW8AAAipSURBVJo2bp81CxIT4euvxR2HQtg6QRCuXLmiUCj6mXHL2BZI3YGP6Jhrt2CajrYuNze3urq6d+/e+guSfGVnZwuC0L9/fzoblFbDxu1RUWBix5P7oxC2Tm4nYHKbG3dYjRq3jxwJQUEAAB4eIl4aBZqOGkJuJ2Byu0pETEQhbB0dCYmkKIStk9uRUG4fCsREFMLWye2XXm4fCsRECpTBXggy161bt+Li4oKCgh49evCuBRDR0dGxsrKyrKzMycmJdzlEBHQkbEVJSUlxcXGXLl3kkEAAuHbtWmVlZY8ePSiBHQaFsBVZWVlAc1EiJQphK+R2KZLWJzoeCmEr5HbkkduHAjEdhbAVcrs0Krd6iOkohK2Q2/RPbkdmYjoKYSvkNv2jI2HHQ+uELampqWFPTlRUVHTu3Jl3OVBeXu7s7Gxvb8+6v/Euh4iDjoTNEwQhOTk5JCTEycnJ0dExMzOTd0UADebGlMCOhELY2M2bNz/44INBgwY99thje/bsuXPnTllZ2dixY7/55hvepcnuBJWIgkJ4T2pq6vLly318fNauXZudne3p6RkREZGdnb106dI7d+6EhYUtX768Ruz+IkaR2wkqEQfnlosyYMiufTExMfb29gDg5+fHcUexZ599FgA+/vhjXgUQKVh2CE+eXPPcc/qbMD08PF5//fX77XmWmprKesy4ubklJiaauVLmscceA4D4+HguoxOJWGQIq6owNhYDAxFg84QJYPCufYWFhUFBQQCgUCjUarX596VgE1Ha2rqDsbAQZmXhK6+gmxsCIAC6ut54662MjAzD30AQBI1Gw3akmDlz5q1bt6QrtqH6+vrdu3dbW1srFIri4mLzDErMwzJCWF+PSUmoVKK1tS5+fn6o1WJbd+07cOAAe7LJy8vr2LFj4hbbSEFBgUajYTPhzp07KxSK0aNHZ2VlSTooMaeOHsL8fNRo0MdHlz1bW1QqUYxd+3JycsaNGwcAtra2kZGRpr9hU4cOHVq0aJH+JoEBAwa8+OKLbH3C2dl5586dUgxKzK/9hzAhAVetQkTMz8eAABQEXLECp07FiRMxOho7ddLFb8gQ/Pe/sahIxJGrqqrCw8NZQp566qk7d+6I8ralpaVarXbkyJHNXq0tLS2dP3++/ry04SVc0k51uBDGx2NYGCLiqVM4diz27ImBgbh7t3Q7rX711VeOjo4AMGrUqMzMTFPeKiMjIzw8vEuXLvqrtWq1uumKiCAIkZGRbFfQgICA/Px8UwYl3HWIEHp74/Tp+OijGBCA69ejVqv7Vp8+KNLRqWVnz54dPnw4myXu2LHD2L9eVVUVGxsbGBioX7w15GrtoUOHevXqBQCenp6//vqrCeUTzjpECBseCdetwy1bdN/y8jJbFWVlZUqlks0Sw8PDW13tYDIzM9VqtZubG8uei4uLSqU6ffq0gYNevXp1woQJAGBjY6PRaEwon/DU4UIYH4+LFiEipqfjpEnmLKThLHHSpEktbJ1bX1+flJQUHBysvw/bz89Pq9XeNv5qbU1NzQsvvMDeJDQ0tKa83LQfgnDQ4ULILsxMn45TpuCff5q/nMOHD7NZYo8ePfbv39/ou/n5+RqNxufuHuh2dnZKpfKXX34xcdC4uDgXF5fNEybgkCFo8IGUyET7D6H83LhxY+rUqfpZoiAIiHjkyBGlUsmOkwAwePBgjUZTWFgo1qDnz56t9/VFAOzSBb/9Vqy3JWZAIZREbW3tyy+/zGab/v7+gwYNYtnr1KnTggULDhw4IEhxtbaiAv/xD92SjEqF1dXiD0EkQCGU0K5du7p06eLp6QkAvXv3VqvVOTk5ko+q1WLnzgiA/v54+bLkwxGTUQilFRoaCgDLly8366p6Sgr27YsA6OaG+/aZb1zSJvRQr7QKCwsB4IknnjDrrrp+fnD8OEyfDoWFMGMGvP02CIL5RidGohBKi1uHQjc3SEgAjQYQYd06mD0biovNXQMxDHVbk1BdXZ29vb0gCBUVFba2tnyKiI+HJUugqAjeeQfeeINPDaRFdCSU0OXLl+vq6vr06cMtgQDw+OOQkgIrVsCrr8LKlRAYCJMmQVoaJCbCc88BAFy/DpMncyuPUAglJZe+TP36webNkJwMpaWQnAybNuniR+TBhncBHZm8WtanpkJAAADAiBGQmwuIsGcPZGUB1/5xBOhIKCl5taxHBP35v0IBCgXMmgWJifD111zLIhRCKckrhP7+cOgQAEBGBvTrx7sacg9NRyUkr+nojBmwdy8EBUFtLURFwbVrvAsiOrREISEXF5eysrJbt25169aNdy1EvuhIKBXh5s2TI0ZcdnamBJKW0TmhVKwuXuz366+P3rjBuxAidxRCyVy8CAAgk6syRMYohJLJzgYAkMlVGSJjFELJsBDSkZC0hkIoGTYdpSMhaQ2FUDJ0JCSGoXVCaVRVgaMjWFtDZSWY83Fe0g7RkVAa2dkgCNC3LyWQtIpCKA2aixKDUQilQVdliMEohNKgIyExGF2YkUZhIZw7B15ecLfjPSH3Q0dCsSHCypUQGgqvvQa3blErF9IqCqHYEhKolQsxCj3KJDZq5UKMREdCsVErF2IkCqHYqJULMRJNR8VGrVyIkWiJghDOaDpKCGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZ/8PyXhnIlEBESwAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVzUdf4H8PfMyDAMl4rgHQqaV+kSVkqSokPTJmgXWZnHZou1ltVj19itTNPdws3KLgtjS80urFSwzREQ8SS1zLwibzxRQUSOGef4/P74uCM/hxnm+M58ZvD1fPSHzHfmO29iXvM9PpeMMUYAII5cdAEA1zuEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAsDaiC3BKbm5udXX1lClToqKiRNcCIDEZY0x0DS3r27dveXn5/v37+/btK7oWAIkFxulofX09EYWGhoouBEB6CCGAYAghgGABcE1oMpmCgoIUCoXJZBJdC4D0AuBIiMMgtG4IIYBgCCGAYAghgGAIIYBgARPCsLAw0YUAeEXAhBBHQmitAiCEdXV1dH2GkDGaM4e6dyeVim65hYqKRBcEXhEAIbx+j4Svv06vvUazZ9PWrZSQQKNH0549omsC6SGE/spkorfeohkzaMoUSkig3Fzq2ZMWLBBdFkgPIfRX5eVUVUWjRl35USajUaNoyxahNYFXIIT+qrKSiKhTp6uPdO585UFoXRBCfxUcTER0+fLVRwwGkslElQPegxD6q65diYhOn776yJkz1KWLqHLAexBCf9W9O0VH05o1V35kjAoL6c47hdYEXhEAEz1ddyE0GikoiBQK+tvfaOZM6t+fBg+mhQvp7Fl69lnRxYH0EEI/YzRS5840YADpdDRjBhkM9OqrdO4cDRxIOh317i26PpAeQuhntm+nqio6d45UKiKimTNp5kzRNbVCJ06c0Ol0w4cPj4yMjI6OFlsMQuhn1q0jIho5splNr79OZWU0YwYNG+bjoloHvV6/adOmoqKioqKin3/+mTGWkJBQX1+/bt26rvw2mCAIoZ8pKSEiSklpZtOqVfTjj/TUUz6uKNDt27dPp9PpdLoNGzY0NjbyB8PCwpKTk8vLyw8fPpySkrJu3bpu3bqJqjAAJnrq0KFDVVXVuXPnOnToILoWL9PrqX17MhiospKu+WUvXaL27Ukmo+pqwqiultTV1ZWUlKxevVqn0x07dsz6eP/+/dPT0zUaTXJycnBwcE1NjVar3bZtW2xs7Lp16+Li4sSUy/yeSqUiooaGBtGFeF9xMSNiCQnNbMrPZ0Rs2DCf1xQwzGbzjh07srOzNRpNUFCQ9RMeHR2dkZGRk5Nz4sQJ21fV1NQMGTKEiG644YaDBw/6vmzGmL+fjprNZr1eL5fLeRRbOX4u2uwFoYNN17fKysoNGzYUFBR8//331dXV/EGFQpGYmJiWlpaenp6QkCCX220Pj4yMLCwsTE9PX79+PT8v7dWrl69q/x8h0XdebW0tEYWFhYkuxCeGDmVEbPXqZjYNGsSIWEmJr0vyS42NjYWFhVlZWYmJibImXfni4uIyMzPz8vJqampc2mFdXV1KSgoRderUae/evV4q2x5/vyY8c+ZM586dO3bseObMGdG1eNmlSxQVRYxRVRVFRPy/TVVVFBNDSiVduEDXwxmBHYcPH+b3NtesWXPp0iX+oFqtTkpK0mg06enp/fv3d3vnDQ0NY8aMKS4u7tixY1FR0U033SRR1S3z99PR6+jW6IYNZDRSUtK1CSSidevIYqFhw67DBDp5i8XzN1Kr1fn5+ffee29hYeGoUaOKiopuvvlmz3frDITQb7R4Qdhsu0VrZLFYdu7cyQ96paWlRqORPx4dHT1ixAiNRjN69GhvtOyp1eqCgoKMjIyCggKew4EDB0r+Ls3w8emvq7Zs2UJEQ4YMEV2I9yUkMCJWXNzMphtvZESsrMznNfnUmTNn8vLyJkyY0L59e+vnk99imTVr1o4dO8xmsyf7b2xs1Ol0R48edfw0g8EwZswYImrXrt22bds8eUcn+XsICwsLiWjUqFGiC/GyqiomlzOVitm2xJw8yYhYeDgzGkVU5l2S32KxdejQoZycnIyMjPDwcCJ67bXXWnyJwWC49957iaht27Zl3v/uw+mofygpIYuFkpIoJOTaTXyStREjqI2//7FcVVVVNX/+/OzsbP5jWFjYyJEjtVrtXXfd5WE7QU1NTVFR0dq1a3U6XUVFBX9QJpMlJCTExMS0+HKlUpmXl/fwww9/9913Wq12zZo1vC3RS/z973odhZDsXPW13gvCN998c/78+d26dXvssce0Wm1SUpJSqXR7b/auJDt06JCSkuLqlWRQUFBeXt7kyZOXLVum0WhWr149YsQIt2tzDCH0Dw76bbfeZvri4mKTyZSbm6vVat3eydmzZ0tLS4uKivLz863tWPxKUqPRpKWlJSUlOWisd0ChUCxevFguly9dujQtLa2goCDFS1+F3j7f9dDbb79NRM8++6zoQrzp1Ckmk7GwMHb58rWbDhxgRCwqinl2T8IP1dTUKBQKpVJZV1fn6muNRuPGjRu9eiVpZTKZJk2aRERqtbqoqEiq3TaFI6F45tJSBWOUnExNejxeYT1CuvVd7s9KS0vNZnNSUpLzf1zHjfUajSYxMVHyOhUKxaeffqpWqz/88MMxY8asXLkyNTVV2rdACMWbWlRU0aPHnLFjm7n2b70XhCUlJUQ0sqXT7Pr6+q1btxYUFKxatappY31cXBzvGipVY70DMpnsgw8+kMvlH3zwQXp6+vLly9PT06V8A28cXiU0ffp0IlqwYIHoQryoZ8+eRPTTTz9du8FiYZ06MSL2228i6vIu3g6+fv36ZrdaLJa5c+fecccdbZrcE46JiRk/fvzSpUvPnDnj42p5SfzTqFQqV65cKeGe/T2EU6ZMIaLc3FzRhXjL0aNHiah9+/a2LdF7du/+04ABxWPGCCnMq86fPy+Xy9VqtV6vt/ecQYMG0f9usWRlZW3cuNHDxnrPWSyW5557jufwu+++k2q3fh3CgwcPxsXFtW/f/tFHH7VYLKLL8Yrc3Fwiuv/++203vfPOO0Q0YcIE31flbV9//TURpaamOnjOihUrVq5cWVtb67OqnPTiiy8SUVBQ0DfffCPJDv00hCaTad68eXwMoUKhIKKxY8devHhRdF3Se/TRR4no/ffft93EO2188sknvq/K25588klyrvOKf5o5cyb/ZH722Wee780fQ7h3797bbruNXwZkZGTk5+fzXg69e/fes2eP6Ook1qVLFyLat2/fNY+bTKZ27doR0ZEjR0TU5V033ngjEfmgR5j3zJo1i+dwyZIlHu7Kv0JoNBqzs7P5za4uXbrk5+fzxysqKgYPHkxE4eHhUp0D+IO9e/cSUceOHW1Ptrdv305E8fHxQgrzqpMnT/I/pTHAe8PyDne8DcOT/fhRCPfs2XPrrbfyO8KZmZnXXAw0NjZOnjyZb83KyjKZTKLqlNB7771HROPHj7fdNG/ePCL685//7PuqvG3JkiVElJ6eLroQCfA/E2/DcHsnfhHCpgfAHj16FBYW2ntmTk4On8Pn7rvvrq6u9mWR3nD//ffbu/fLe3J9+eWXvq/K2/iX6VtvvSW6EGnMnz+f5/C9995zbw/iQ7h7925+qtnsAdBWaWlpx44d+anar7/+6psivcFsNvNJHA8fPnzNJoPBEBoaKpPJTp8+LaQ2r4qNjSWiX375RXQhklm4cKFMJpPJZO+8844bLxcZQn4A5B3ne/bsWdzseNbmHD9+/PbbbyeikJAQzy+LRfnpp5+IKDY21nbThg0biOimm27yeVFed+DAASKKiooS3ugnrY8++kgul8tksrffftvV1woL4a5du2655RbrAfDSpUsuvVyv1/N2fCLKzMy8bNv12e+98cYbRDRlyhTbTbNnzyai6dOn+74qb8vJyeE3vUUXIr1Fixbx4Rr//Oc/XXqhgBBevnzZegCMi4tbt26d27vKycnh+xk+fHhlZaWERfrAPffcQ0TLli2z3TR8+HAiWrFihe+r8rZx48YR0cKFC0UX4hW5ubk8h3PmzHH+Vb4OoWX7du2QIUQkl8uff/75+vr6Fl/i+Lxl06ZNnTt3JqLu3bv7ZkYQSRiNxoiICCKynRa6vr4+ODhYoVC0gjtP17BYLPx6vry8XHQt3vLFF1/w/q5ZWVlOvsSHIbx8mWVns6Cg8uTk+Ph4ez13r6HX65OSkrKzsx085+TJk0OHDiUilUr1n//8R6JyvWvz5s1E1K9fP9tNa9euJaLBgwf7vipv+/XXX3kLsOhCvOuzzz6Ty+Xp6elONqT5KoQ//sgGDGBETKFgf/tbo9MLS+Tl5fELv8cff7yxsdHe04xGY1ZWVgBdIs6dO5eIpk2bZrvpH//4BxG98MILvq/K2xYsWEBEEydOFF2I1/E1npw8NfN+CBsbWVYWUygYEevVi5WWurqDr776io8nTEhIcNyHa+nSpSEhIUQ0bNgwP7+5z8fRNdv7h9/4/eGHH3xflbeNHTuWiDzsX+L/+NRSkZGR/nEk3LqV9evHiFibNmz6dObEFWCzdu3aFR8fT0QdOnRwPMXATz/9xJuhunbtunXrVvfeztt4M6BcLj9//vw1m2pra9u0aRMUFOTq7WL/Z+0N2+LMn4Hu008/5UMOnHy+10LY9AA4YAD78UcP93fx4kX+PdqmTRvHl4jnzp3jx5ng4OBFixZ5+L5ecuHCheLi4lOnTn366afPPPPMjh07+Ml2fn4+P5KLLlB627ZtI6JevXqJLkQau3btsjeTzcSJE10aie6dEG7Zwvr2vXIAzMpi9gduusRisWRnZ/NbwI888oiDO6vXXCIaDAZJCpBEs5MUhYSEpKam1tfXP//880T0yiuviC5Tery7c2ZmpuhCpNGnTx+FQrFr1y7bTfyC0PnuXFKHsKHh6gHwppuYF9oM8vPzIyMjiWjQoEGHDh1y8MzPP/9crVYTUWJi4rFjxySvxCXl5eXvvvvu6NGjm86XExoaOnr06JdffrlTp05ElJyc/Ntvvy1evNj3q3P5QGvqDWsdCGJ7C7C8vJyIYmJinB+GLmkIN29mffpIfgC0VV5ezhfBat++vU6nc/DMnTt38hlcoqOjPekV4J66urrCwsLp06f36NGDmoiLi5s+fXphYaF1cofy8nI+L+2wYcP8cCy551pZb9ilS5cSUVpamu2mDz/8kIjGjRvn/N4kCiE/AMrljIjdfDPbvl2a3dpXW1vLhyAoFIpZs2Y5+NY5f/48n6OuxYtJSTRdtLnpfNJRUVF80ebjx483+8IjR47w74ukpKTWN4dAK+sN+6c//YmI3nzzTdtNDz30EBF99NFHzu9NihBu3Hhl2SB+APTVBVjTS8QxY8Y4mO/VZDJlZWXxC7Dx48c7003HVZWVlXl5eZmZmXykPGedpKiwsNCZAaxHjx6Ni4vj589VVVWSFylQK+sNy09tdu7cec3j1i5Bv//+u/N7czGEFgt79VXWrRsLDmYJCYwP/JsxgxGxgQOZ7aR93vf999/zG999+vSxnSSiKefbG51kbx7ozp07T5gwIS8v78KFC67u89ixY7wx5pZbbrFtwAhcrak37MGDB8nOQJBdu3bxHpQu7dDFEP7rXyw4mOXmsp9/Zo8/zpRKtns3a2xk777bzBTuvnLgwAG+uHFERITjP/P+/fv79u3L2xsdDB12zLrUVkSTJXVDQkI0Gk12dvaOHTvc261VRUUFX5PoD3/4w7lz5zzcmz9oZb1hFy1aREQPPvig7Sa+asOkSZNc2qErITQaWVQUe/nlKz9aLKxPH9bcSBzfu3TpUkZGBv1v8gsHfb6t7Y0KhcL5S0TrLRZ+2db0FktmZmZ+fr6DLnVuOH36NL/z1K9fv1ZwJ6OV9YZ95JFHiKjZ+Sz46qKLFy92aYeuhHDPHkbESkquPvKXv7DmuiALYbFYFixYwDuwjx492sGpYNOLyYcfftjBgiR79uxxcIuloqLCO78KY4ydOXOGH9779u176tQp772RD7Sm3rAWi4W3J/1mMy26yWRq27Ytud4lyJUQFhczIrZ//9VH5s5l7du79H7eVlJS4uT8iAUFBfx/2cCBA5u2N549e9bzWyySqKysvPnmm/nl7smTJ33zpt7QmnrD7t69m+wMBCkrK+MfPFf36UoIN21iRKxpF4GXX2ZRUa6+pbdZ50cMCwtbvny5g2c2bW+cN2/eCy+8MGjQoKa3WHr06DF16tRvv/1WwqW2XFJdXc1/lxtvvNFe24af471hlUpl6+gN62Ba9Ndee42InnzySVf36UoIjxxhRGzNmquPPPEEu/lmV9/SB5yfH9F6iRgVFSX5LRZJVFdX85kge/ToYTsllP9btWoVESUnJ4suRBoOpkXnzdFff/21q/t0JYQmE4uOZs89d+VHi4XFxrLmRsT5CSfnR6yurg4KClIoFE8//XRRUZGDJUpEsR4Phw59NOBGIPAVVGbNmiW6EAk4mBbd2iXIjRWjXGyimDePKZVs0SL288/siSdYSAhzpVHS90pKSqKjo4koPj7+4MGDzT6H9zPSaDQ+rs0lFy5cuOeeqd26GWNjmcMOs37H8RJogcXBtOjr16/n9xfc2K3rjfVz5rCuXZlSyQYNYoGwoMeJEyduv/32gQMH2rsLOmTIECKSZGUPr6qrYyNGMCLWvbuff/Vd5cwSaAHEwbTofGmK56znia5wt9uaXs/UaiaXs0CY46yxsdHe3cXy8nKZTBYREeGNvmySq6tjI0cyItapEwuIpXGcWQItgNx9991E9MUXX9huSk5OJqJVq1a5sVt3V0IPDqZhw8hioaIiN/fgQyqVqml7Q1O8XTUjI4MPevJzoaFUUEAaDZ05Q6NG0Z49ogtqCV8TO8X+ct8Wi+X48eM+rMh9RqNx06ZNMpnM9tdpaGjYtm2bQqG488473dizuyEkIq2WiEinc38PolkslmXLlhHRpEmTRNfiLLWa8vMpNZUqK2nUKNq9W3RBDq1bt47sL0zPGJs2bdrgwYN3+/mvQUREP/74Y11dXf/+/XljfVMbN240GAyJiYm85dlVHodw7VpizP2dCFVcXHz8+PGePXsOGzZMdC0uCAmhggJKT6ezZ2n4cNqxQ3RBNo4ePbpo0aIxY8ZUVFSoVCre5cDW5cuXjxw5cvbs2VGjRvHZEP2Zgy8UfsC3913TMo/Okbt3Z0TMZkBHoBg/fjwRvfrqq6ILcYfBwMaOZUSsbVvPZ/CRQH19/ffffz99+vQ+ffpYP118pa3Ro0fbuzFjMBh4f8t27dr5+dzNI0aMIKJml6rnDbmOx5c74FkIp0xhRMz7I2W94eLFi2q1WiaTOZ4jw58ZDOy++67kUNSit3xMSVpaGl/bnAsNDdVoNAsWLFi7di0/edNqtQ12Jps1GAy8Bbxt27Z+u3ZvY2OjSqWSy+W24zxramoUCoVSqXTQCdkxz0KYl8eIWEqKRzsR5OOPPyailMAs3spkYo8+yohYZCTbssVHb3r+/HnevbZ79+7W4Mnlcmv32qYza+3fv5+vUzB8+HB7PdcuX77M50mIjIz0z4kqCwsLiSgxMdF208qVK4nozjvvdHvnnoWwupopFEypZAHYLZBfB7o66sQPmUzssccYEQsN/X9DXKR+F5N12g4+VIWLiYnhY0ocDPX47bff+N3p5ORkezPomEwmfnUQFhZW4r1fw10vvvgiEc2YMcN207PPPkuedQnyeHqLIUMYESso8HQ/vnXgwAGZTBYaGto6ehWbTGzixCs5dHqVR6ccPcpycti4cQa+nqn1Sk+j0fz73/9udsK/Zllnsrrjjjsc5HDChAn8bNb3s3I55mAgCL/tVOr61PJWHodw1ixGxJ5+2tP9+NbMmTOJaPLkyaILkYzJxCZPZkRMrWbuzhlwRUMDKyxkWVksMZERXflv0KA7+QjmvLw89+ahcmYmK5PJxJuL1Gq146nWfYkPBGnTpo3t10dlZaVMJvOwS5DHIdyy5coiE4HDYrHwT4MfnvZ4wmy+cqcsOpq5cY/g11/ZG28wjYapVFezFxnJ7r+f5eSwY8ck6FHkzExWFovlqaee4jlcu3at52/qOQfTon/11VdEdNddd3myf49DaDKx9u0ZEbPTPdoPFRcXE1FsbGwrW7GZMWaxsGefdeHK8Px5lpfHMjOvNDbx/+RylpjIsrJYYaH0U+dZZ7JKSEiwN5OVxWKZNm0aP+/Nz8+XuALXOZgWPTMzk4g8nEpTiikPMzIYEQuctVf5UgGtY3CNA/HxrHPnqykaO5a99BJjjJlMbMcOlp3NNBrWps3V7MXEsIwMlpPDvD2ZRkVFRe/evcnhTFYWi+WZZ54hIqVSuXLlSu8W1JJBgwbZO2/iU3J52MIpRQhzcxkRc3oNGrHq6urCwsJkMpm9kU2tRnw8UyrZ0qVXfuQhnDuXRUZeDV5wMBs1iv3738zpOyzScGYmK4vFwsciKpXKZpvIfYMPBFGpVLZzebm6BJo9UoTw+HFGxMLDBc566LxPPvnEw1adQBEfz+67jyUkXPmRh3D+fEbE4uJYZibLy2MCZ/puOpOVgxl0eNtAUFDQt99+68vyrIxG44YNG5ptynJ1CTR7JJoGv39/RuTGAqC+x2ehDZRVtT0RH8+WLGHR0VcuEXkIz51jUsx7LI2mM1mdOHHC3tNefvllIlIoFP425tPVJdDskSiEzz/PiNiLL0qzN685cuSIXC4PCQkRNXGTL8XHsy+/ZDNnsjFjGGtyTehXrDN39O7d28FMVnzIrEKhWLJkiS/Lc+yGG24gV5ZAs8eDURRNWP74x+39+y86dkySvXnP4sWLLRbLAw88wBdXux785S+0di0dPCi6DjvatWtXWFh46623HjhwIDk5+ciRI80+bfbs2dnZ2Waz+fHHH1+8eLFva2ze77//XlFRERMTw0+qPSLJV0JjY6NarZbL5ZV+PNDeYrHwm+NuT4AfWPiRkDE2cSKbNo3de68/Hgm5Cxcu8C4psbGxDvrT89klZDLZQj+4Fe/GEmj2SHMkVKlUycnJFouF93P1Txs2bDh06FC3bt0cDPRulZ57jhYvJoNBdB32tW3bVqfTDR069NixYykpKQftHLhfeOGF+fPnM8amTZv2wQcf+LjIa7Q4aYDzpAkhEfF1WHV+PNB+yZIlRDRp0iSFQiG6Fp9KSKDERCopEV2HQ5GRkYWFhSNGjKioqEhJSTlw4ECzT/vrX/+6cOFCInrmmWfeffdd39Z4FWOstLSUPBnIe83uJLF3714i6tixo/OrBPtSXV1deHg4Ee1vOo1/q2Y9HWWMrVjBiPz3dNSqrq6Of6w7duzoYBWDjz76SCaTyWSyt99+25flWfG5D7t27SrJ3qRcLpvfLLJdOdEf8MNgUlKS6EKgBfX19RqNhudw9+7d9p62cOFCvmDBW2+95bParKvihYaG9uzZ84knnpBkt1KGcMqUKeRxPzov4d+vOTk5oguBljU0NPAp5WNiYhyMlsrNzQ0KCvrSerj3josXL3733XdTp07lq/NyMpls6tSpUr2FlCHMy8sjvxyrfvz4cYVCoVKp3Fg6N3DFx7O5c6/+OGBAAJyOWun1euvcM9u3b7f3NFcXIXOS2Wy2jmBudlU8aRfnuTpE2nMajUahUGzevJn3z5Rwzx5avHix2Wx+6KGH3JuRDnwvODh4+fLl48aNW7lyZWpqqk6nu+2222yfFhsbK+Gbnj17trS0tKioaPXq1adOneIP8lXxNBpNWlpaUlISX9ZSWlKGsF27drfeemtZWdn69evT0tJsn1BaWjp79myVShUeHh4WFhYcHBwZGalWq1UqVdu2bUNCQvg/VCqVWq2OjIxUqVShoaHh4eFN51NwQ8BNLgpEpFQqv/7664cffnjFihVarXbNmjW8LVFaJpOprKxs9erVRUVFP//8M/vf/J2dO3fWaDTp6empqane/u6WMoREpNVqy8rKdDpdsyE8deoUXzfDVW3atGkxt6+88kqzbQ+bN28uLy/v0qULv9yHAKJUKpcvXz5x4sQvvvhCq9X+8MMPQ4cOlWTPhw8fLioqKioq0ul0tbW1/MGQkJA77rhDo9FoNJrExERJ3sgZMibp1L1lZWVDhw7t3bv377//bru1qqpq165dBoOhrq7u0qVLer3+0qVLdXV1BoPh4sWLDQ0Ner2+pqamsbGR/0Ov1zc0NFy8eNFisTh+X7lcbjKZmq7vaZWZmfnxxx///e9/f/3116X5JQNEr1505AhZ/5eYzfTSS/TPfwqtyS1ms3ny5MnLli0LDQ0tKChwu328vr5+69atBQUFBQUFTfvHxcXF8bPN1NTUphM3+ozEITSbzTExMdXV1YcOHeITGUjCaDQ6zq3ZbOZjXq7R2NjYpUuXmpqaffv29evXT6p6AkKvXnTPPZSZeeXH++6jceMCMoREZDabp0yZsmTJErVaXVBQ4HwTucVi2blzJz/obdiw4fLly/zxqKiokSNHajSaP/7xj00nbhRDwps8XEZGBhH5Q+8+xtjnn39ORLfffrvoQgQI6LujtvjxkIjUanWLvX8rKyv5zKhNFwLit1j4zKhGo9E3ZTtD4mtCItJqtcuXL9fpdHy6HrGsXfjP2oIAAANTSURBVNVEFwKeksvln3zyiVqtXrhwYVpa2jfffHPNfQd7t1g6deqUmprqm1ssbpI81nylq/Dw8MuiB9qfOHGCz09ub0Kh1q2VHQk5i8Xy9NNPE5FSqeSLAVp7sURERFg/1SEhIRqNJjs7e8eOHaJLbpn0R8Ju3br1799/3759W7dudW+5NqksXbrUbDY/8MADUVFRAssACclkMt5v+/3333/wwQdjYmJOnjxp3TRw4ECtVqvVaocNG8bXogkI0oeQiLRa7b59+954443Kysrw8HBrw6BKpYqIiFCr1b75H7R06VK6js9FrxkP5P8rijqJ5zAoKKi8vPy///2vf91icYvEd0e59evXz5kzp8Th4BmVSsVb+dq1a2fvH46fEBER4WBQEm8s6dix44kTJzxs6wf/ZDQaf/nll8TERG/0YvElr3w6R4wY0bVr13/961+8lU+v19fX1zdtYDAajXq9Xq/XE9Hp06fdfqOIiAiVShUWFmY93oaGhqpUqsjIyLKyMiKaMGECEthaBQUF8YUBA51XjoTO4C3yjY2NFy5csPcPx09osRH/kUceeemllwYMGOCzXwrADcJCKAlrr5ra2lq9Xs8b9A0GQ21tbX19/dSpU9VqtegaAVoQ2CEEaAUC+4oWoBVACAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEEQwgBBEMIAQRDCAEE+z++dXgbGiYhNQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAZk0lEQVR4nO3daVhU59kH8HtmWMIqiqAGV4zUiCkhGkkAFXQQDFp7pdAszWLV2EtjNK1tTZoYY9IkpvFKNa1GBVNtE62ktYa4ADOACNhqIWoMaTERERQQFBBZHJaZ98PTdzqFYQRmuecc/r9PynM8c6vzn3Oe55y5j8JgMBAA8FFyFwAw2CGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQR5unLlSldXF3cVfYIQggxdvnw5Ojr6ySef7Ozs5K7lzhBCkJvy8vJZs2Zdvny5tra2o6ODu5w7UxgMBu4aAGymsrIyJiamrKwsMjIyIyPDx8eHu6I7QwhBPq5duxYTE/Pvf//7oYceysrKkkQCCSEE2aitrY2Njf3666/Dw8Ozs7OHDh3KXVFfYU4IclBXVzdnzpyvv/46LCxMo9FIKIGEEIIMNDQ0JCQklJSU3HfffVqt1t/fn7ui/sHpKEhbY2NjXFxcUVHRd77znePHj48cOZK7on7DkRAk7ObNm/Hx8UVFRZMmTcrJyZFiAgkhBOlqaWlZuHDh6dOn77nnntzc3Lvvvpu7ogFCCEGSWltbExMT8/Pzx44dq9FogoKCuCsaOIQQpKe1tXXBggV5eXljxozJzc0dP348d0VWQQhBYnQ6XVJSUm5ublBQUG5ubnBwMHdF1kIIQUra29uTkpKOHTsWGBio0WgmTpzIXZENIIQgGR0dHcnJyYcPHw4ICMjJybn33nu5K7INhBCkoaur6+mnn05PTx8+fHh2dnZoaCh3RTaDEIIEiAQeOHDAz88vIyPjvvvu467IlhBCcHZdXV2LFy/ev3//kCFDsrKypk2bxl2RjSGE4NQMBsOKFSs+/vhjX1/fzMzMBx98kLsi20MIwXkZDIaVK1empKR4enqmp6dHRERwV2QXCOFgJIkOSAaDYdWqVTt27PD09Dx8+PDs2bO5K7IXF+4CwNG2bt3a1NS0bdu2WbNmqdXqqKgo51xpXLdu3fbt2z08PNLT02NjY7nLsScDDCa//e1vicjd3d30PTB+/Pgf//jHf/zjH69cucJd4H+89NJLROTm5nb48GHuWuwOIRxEdu3apVAoFArFjh07Ll68uHPnzuTk5G5fgQ0ODl6+fPnevXsrKyu56nzllVdEAtPT07lqcCSEcLDYvXu3UqlUKBTbtm3rNmQMZLeuECKQaWlpN27ccFidr732GhGpVKoDBw447EV5IYSDwp49e0QCf/e731nYrLOzs6ioaMuWLcnJyb6+vsY0KpXKKVOmiEA2NDTYr8733ntPJHDfvn32exVngxDK34EDB1QqFRFt2rSp73+qo6OjqKho06ZNarX6rrvuMgZSpVJNmzZt3bp16enpN2/etGGd77//vtj/xx9/bMPdOj+EUOY+/fRTFxcXInrrrbcGvJPW1tb8/HwRSDc3N2MgXVxcRCA1Gs3t27etqXPLli1EpFAodu3aZc1+pAghlLODBw+6uroS0RtvvGGrfTY3N2s0mnXr1kVFRYmdC56enmq1esOGDRqNRqfT9WufKSkpxhUjW9UpIQihbB09elRcivjFL35hp5e4deuWCOS0adOUyv/e+OHl5aVWqzdt2lRUVNTV1WV5JxZWjAYJhFCeMjMzxUTuZz/7mWNesba2Nj09XQRSoVAYA+nj42MMpF6v7/anxIoREf3mN79xTJ1OCCGUoaysLJHANWvWsBRQU1OTlpa2fPnyCRMmmF7zCAwMTE5O3rJlS1FRkWGgK0bygxDKTX5+vpeXFxE999xzPY88jldWVpaamvqjH/2oW0vCESNGiGPgO++8w10jM3TglpWTJ0/Gx8c3NzcvWbIkNTXV9LTQGZSVlWm12oKCgpycnKtXr44cOTI2Nnbfvn3cdXHj/hQAmzl58qR4GNjixYvvuBzCS6/Xr1mzhoiWLl3KXQs/HAll4syZM3Pnzm1oaPjhD3+4b98+MddyZmfPng0PDw8KCqqsrHS2I7aDIYRycPbs2blz59bX1yclJe3fv19cnXdyBoMhKCiourr6/PnzU6dO5S6HE77UK3lffvmlWq2ur69/9NFHpZJAIlIoFPPmzSOijIwM7lqYIYTSVlpaGh8ff+PGjfnz5+/bt08qCRTi4+OJKDMzk7sQZjgdlbALFy7ExMRUV1fHx8d/9tln3b6q6/xu3LgRGBjo6up6/fp1b29v7nLY4EgoVd9++21sbGx1dXVcXNyhQ4ckl0Ai8vf3nz59uk6ny8vL466F02AJ4a1bt1555RWdTsddiG1UVFTExcVVVVXNnDnzb3/7m+lXjaQlISGBcEbKe4XEMZqbm2fNmkVEy5Yts7BZQ0PDxYsX9+7dW1FR4bDaBqCiokLcDhYVFXXr1i3ucu4sLy9v5cqVBQUFPYcKCwuJKCQkxPFVOQ/5h7C1tXXOnDlENHr06G+//ba3zRobG6dPnx4eHi4+m4KDg59++umdO3c6WyArKyvFo4gefvjhpqYm7nL6RHRtWrt2bc+hzs7OYcOGEZGF/xrZk3kIdTpdYmIiEY0YMeJf//pXb5s1NjaK1s5BQUEJCQlDhgwxPVmYMmXKqlWr/vrXv16/ft2RxfdUU1MjHkX0wAMP1NfX8xbTd7m5uUQUGhpqdjQ5OZmItm/f7uCqnIecQ6jT6RYuXEhEgYGBJSUlvW3W3Nw8c+ZMIho3blx5ebnhf1utdAuksfeR4zNw7dq1KVOmENH999/vyM5L1mtvbxcday5fvtxzNDU1lYgWLVrk+MKchGxD2N7evmjRIiIKCAg4f/58b5u1tLTExMQQ0dixYy9dutRzAxHI3lqtrF69Oi0tzbatVsyqra0Vt5V897vfZT8gD4D4v0hJSek5JG5b8/LysrJBhnTJM4SdnZ2PP/44Efn5+RUXF/e2mXG6OGbMmIsXL95xt6a9j0wvCZi2Wmlra7PpX8VgMBgaGhrEo4gmT55cU1Nj8/07wPbt24koKSnJ7Kj4fMnNzXVsUc5ChiHs7Ox88skniWjIkCH//Oc/e9tMp9M98sgjd5wu9qalpUWj0WzYsEGtVpu2WvHw8IiKihKB7G+rFbOM89WQkJCqqirrd8ji0qVL4jOxo6Oj5+jatWuJ6KWXXnJ8Yc5AbiEUT5MkIl9f31OnTvW2mU6nW7BgwR2ni310x1Yr+fn57e3tA9jzzZs3Z8yYQUSTJk26evWqlXXyCgkJISKzFyqysrLEXNfxVTkDWYVQr9f/5Cc/Ee/+vLy83jZrb2//3ve+J6aLX331lW1rqKurM9tqxdvbu++9jwTj5U3jipGkrV69mojWr1/fc+j27dve3t4KhUK6h3pryCeEer1+xYoVROTp6WlhdtHZ2fnYY48R0dChQy1MF23C2GpFrGoaBQQELFiwoLfeR4LpilFZWZld63SMI0eOENGMGTPMjoqpwZ49exxclTOQSQj1ev2qVavElCw7O7u3zYzTRT8/PwvTRXuoqqoSgRw3bpxpIEeMGJGcnLxz507TlSHTGwz6smIkCa2trR4eHkql8tq1az1Ht27dSkRPPPGE4wtjJ5MQ/vKXvyQiNze3I0eO9LZNV1fXU089JRZsLEwXHaC0tHTHjh2PPfZYYGCgaSAnTJiwdOnSPXv2xMXFDXjFyJmJv5fZ50yUlpYSkb+/v5M35rAHOYTw5ZdfFgn8/PPPe9tGr9cvX75cTBdPnDjhyPIsM/uIMh8fn5EjR8osgQaDYfPmzUT07LPPmh0NDg4mIt7PRxaSD+Grr75KRK6urp999llv25hOF48fP+7I8vquq6uruLh48+bNiYmJmZmZNl8xcgbnz58nopEjR5qdCYv/o40bNzq+MF7SDuHrr78ubl7585//3Ns2er3++eefFwnMyclxZHnQ05gxY4jozJkzPYcOHTpERJGRkY6vipeEQyjObVQq1SeffGJhMzFddHd3P3r0qMNqg94sXbqUemn429zc7O7urlKppHVnrPWkGkLx7HWlUvmnP/3JwmZ9mS6CI6WlpRFRTEyM2VFxVSYtLc3BVfGSZAjFcrZCodi5c6eFzcSjzy1PF8HBGhoaXFxcXF1dzd71/s4779Dg6wgsvRCK7u4KhcLyN9A2bNhwx+kisIiMjCSiQ4cO9Rw6c+aMuDrqDE/RcBiJhfCjjz4Sz7L7/e9/b2Ez43RxUD36XCo2btxIRCtWrOg5pNfrR40aRUQWvn0mP1IKofFZdu+++66FzcR0UaVSWZ4uApdTp06JOxPMjj777LNEtHnzZgdXxUgyIUxLSxOdbS0/Scs4XRyEjz6Xiq6uroCAACIqLS3tOSoe0hQXF+f4wrj0NYR5eXnGzg51dXV2ramn9vZ2cQ/0W2+9ZWEz46PPP/zwQ4fVBgPwxBNPENEHH3zQc6iurk6pVLq7u0uikZxN9DWE4jxeUCqVYWFhL774Ynp6emNjo13rM6qurrb8THPjdHHQPvpcQvbs2UNEiYmJZkfFVygt3AYsM30NoWmrFQ8PD2MgTVutOCyQPeHR59JSXV2tUCg8PT3N9pVZv349Ea1evdrxhbEYyJyQq9VKb/o4XQSnEhYWRkQajabnUEFBAQ2mjsDWLsyYtlpxc3MzG0i7dtH6y1/+IhL461//2n6vAja3bt06Ivr5z3/ec8jYEVg236W0zJaro83NzWZbrXh6eqrV6g0bNmg0moG1WunNwYMHRZOlQXjrvdTl5OQQ0dSpU82OJiUl0aDpCGyvSxRNTU3GQFrZaqU3x44dEyfDZj9NwcnpdDofHx8iMvuggZSUFBo0HYEdcZ3w2rVraWlpq1evFs0zjYYPH37HViu9yczMFK14f/rTn9qpbLA30W4rNTW159Cg6gjs6Iv11dXVotXK+PHjTQNpbLXSly+zajQasUK7Zs0aB9QMdrJt2zYiSk5ONjsaGhpKg6MjMOcdM+I5ZMuXLxdf9DQaNWqUCKTZPn/5+fniqa7Lli0bVLf5yk9ZWVlCQoLZI6FhMHUEdpbb1sy2WiGTR5RVVlYaDIbCwkIxkViyZMkg7Ag0qAyejsBO98x6vV7/5Zdf5ubm5uTknDhxoqmpSfxcoVBMmjSpsrKyra1t8eLFu3fvNl2ABfnR6XT+/v6tra1Xr14VX62QK6cLoamurq6zZ88WFBQUFhZmZWXdvHlz9OjRo0ePLigoUKlU3NWB3SUmJh49enTbtm0rV67krsWeuA/FfaXT6cQk4dFHH+WuBRzkgw8+mDhxokKhmDJlivjyQENDA3dRtufUR8JuysvLJ0yY4Ovre/36ddMHIYGMffTRRytXrtTpdOK3Li4uM2bMiI2NjY2NjYyMNL2NWbqkFEIimjx5cmlpaX5+fnR0NHct4CBtbW3FxcWFhYVarfbEiRPt7e3i5y4uLmFhYWq1Wq1Wz5w50/Q2ZmmRWAhffPHFrVu3vvrqq2+++SZ3LcCgpaXl73//u1arLSgoOH36dEdHh/i5p6dnZGRkVFRUdHT0rFmzTG9jdn4SC+GxY8ceeeSRBx988PTp09y1ALPm5uZ//OMfWq1Wq9V+8cUXxneyl5fXww8/LI6Q4eHhzr+KLrEQtrW1+fv763S66urqbk9TgcGsrq7u+PHjYiHdNJA+Pj4REREikA888IDpbczOQ2IhJKL4+PisrKxPPvlEPOQMoJuampr8/HytVqvRaMRjuoXAwMDZs2eLU9ZutzHzkl4I33///bVr1z7zzDN79+7lrgWcXVVVlVjRycjIqKioMP581KhR0dHRarV63rx53W5jdjzphbCkpGTq1KkjRowQLRK4ywHJKCsrEys6OTk5V69eNf48ODhYHB7nz5/f7TZmx5BeCIlo3LhxFRUVX3zxRXh4OHctIEkikFqtNjs7u76+3vjz4OBgMYGcM2dOt9uY7UeSIXzuuedSU1Pffvtt8bwXgAHreWukccgYSLVaPXToUNM/8s0330yePNlmRfDcqGOdTz/9lIhmz57NXQjISnt7e0FBwRtvvBEbGyu+Mi64uLgYO4K3trYGBga6u7s3Nzfb6nUleSRsamoaPnw4EdXV1Q0ZMoS7HJChzs7Oc+fOiVPW/Pz8AwcOLFq0SAxFREScPn368OHDiYmJNnktZ7+OaZavr++MGTM6Ojpyc3O5awF5SklJefvtt9esWaPRaG7cuJGQkGAcEr/OzMy01WtJMoREFB8fTzb9hwAwtWvXroMHD+bn5xORl5eX6Y2p4r2XkZFhq9eSagjFp5EN/yEATFk43EVERAwbNuybb765ePGiTV5LqiGcNm1aYGBgeXl5aWkpdy0gQxYOdyqVau7cuWS7EzGphlCpVKrVasIZKdhHVFSUr69vSUmJ6X02RradDUk1hIRpIdiTq6trbGwsEYl+U90kJCQoFIrs7Gzjt42tIe0QKhSK3NzctrY27lpAhix8ygcFBYWGhra0tJw8edL6F5JwCEeMGHH//fe3tbWJh/gA2Nb8+fOJSKvVdnZ29hy14YUKCYeQcEYK9jR+/PiQkJDGxsZTp071HLXhe08OIcSFCrATC4e7mTNnent7nzt3rqqqyspXkXYIo6KihgwZ0tsSFoCVLBzu3N3dxd3LGo3GyleRdghdXV1jYmKolyUsACvFxsZ6eHgUFRXV1tb2HLXVGam0Q0iYFoI9eXh4REdH6/X67OzsnqPG915XV5c1ryL5EIolrKysLGP3OwAbsvApHxISMnHixPr6+uLiYmteQvIhFEtYTU1NaIII9mAModkv/c2bN4+sXhqUfAjJDl8tATCaOnXq2LFja2pqzp0713PUJrMhOYQQFyrAruLi4qiXN9jcuXPd3NxOnTrV1NAw4P3LIYRiCau4uNjsEhaAlSwc7ry9vc889ZQuIMBXqx3w/uUQQuMSltaKfwiA3sTFxbm4uBQWFhofWWtqyqRJqpoasuKMVA4hJFyoAHvy8/N7Jj7+w4gIndm7lEXni4wMGmi7JpmEUKzNZGVlSbFvFTi/3TNmLC0oCDh82MxYWBiNGkVXr1JJycB2LpMQhoaGiiWss2fPctcCciQOd8eOmRlSKGjePCIa8BmpTEJI/3/FBmekYBfTp1NAAJWX04ULZkbj44kQQkwLwa6USlKriXpJWlwcKZV04gQ1Nw9k39aV5kTUarVYwjLtZA5gMxYOd8OH0/TppNPRiRMD2LF8Qujn5xcREdHR0XH8+HHuWkCO4uNJoaDcXLp92/woDfCMVD4hJJyRgl2NHElhYdTaSmYvVIgQDui2LRmG8JjZJSwA61k43D30EA0bRhcuUFlZf/cqqxBOnz5ddAS+YHYJC8BKFg53KhXNmUM0kDNSWYVQqVSK1si4mRvsIjqafH3pq6+ostLM6ECnhbIKIRElJCQEBAS0t7dzFwJy5OpKMTFERGbbqYgL+jk51M+3nySfT2hBR0eHSqVSKuX24QLOYvt2ev55Sk6mtDQzo1OnUkkJHT9Os2f3fZdye7O6uroigWBH8+cTEWm1ZK4j8MDOSPF+BeiPCRNo0iRqaCCz7VQGdKECIQToJzH3M3u4mzWLPD3p7Fmqru77/hBCgH6ycM551100ezYZDNSfjsByW5gBsLuWFvL3p44OqqmhgIDuo59/TuXl9P3v05gxfdwfQgjQf2o1ZWfT/v30+OPW7wynowD9l5BAQ4fSjRs22RmOhAD919ZGbm6kUtlkZzgSAvSfh8f/JHD3brr3XnJ3p9Gj6eWXqZ9PZEAIAayzZw8tW0Y/+AGdPk0bN9KWLfTCC/3aAU5HAawTHEz33ktHjvznt6+/Tm++SVeu0KhRfdwBjoQAVqispEuXKDHxvz9ZuJD0eios7Ps+EEIAK9TUENH/HPTEr3HHDIBD6fXdf92fbxEghABWCAoiIqqq+u9PxDFw9Oi+7wMhBLDC3XfTPfeQaXv8I0fI1ZWiovq+D4QQwDrr11NWFv3qV3TuHP3hD/Tuu/TCCzR8eN93gEsUAFbbvZvee48uXaLAQFqyhF57rV830yCEAMxwOgrADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQCz/wOAAroCsHt1UAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAaz0lEQVR4nO3de1xUZf4H8O8AArJK5Fq0S7pq5iXbVTR7Ybprm9puWWFgiqaCmnlJUcrwQt7RLLuYkZqpXPKakqVtudrarrYrqIuYmqhlXjDADREnEISZ+f3xvF6n85uZQzBzzvM9Q5/3n486z4P6mXnmnPN8vxaHw0EAwMePewEAv3QIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmAVwL4CB1Wr98ssvjZ4lKCjooYceMnoWafbt21ddXW30LH369GnevLnRs5iO4xcmJydn6tSpEv5iW7ZsmZiYaLVauX9ib1mt1sTExJYtW0r4S5s6dWpOTg73TyybxeFwSPjLNQmHwxEVFXXo0KEOHTq0a9fO0LmOHj1aUlKSkpKSmppq6ERGe+mllxYvXhweHh4ZGWnoROfOnTtz5sz999+fk5NjsVgMnctcuN8FpEpPTyeiiIgICR9QBw8etFgsQUFBZ8+eNXou45w7dy44ONhisRw4cMDouSoqKlq3bk1EGRkZRs9lKr+gEFqt1t/+9rdE9P7778uZceTIkUQUGxsrZzojxMTEENGoUaPkTJeVlUVE4eHh5eXlcmY0g19QCGfNmkVEUVFRdrtdzoyFhYXNmjUjoj179siZUV/79u0jopCQkIsXL8qZ0W639+7dm4hmz54tZ0Yz+KWE8NtvvxXbKsnf+xctWkREXbp0qampkTmv92pra7t27UpEqampMuc9cuSIn59fYGDgmTNnZM7L6JdyYSYmJmbHjh3x8fEZGRky562qqrrnnnu+++67VatWTZgwQebUXlq1atWkSZNat2596tSpkJAQp1/NzMwsKiryfpakpKSgoCCnwfj4+KysrJiYmOzsbO+n8AHc7wIyiG1Vs2bNLl++LH/2bdu2EVGLFi1KS0vlz+6ZsrIycU9i+/btbn9Djx49dPnvd/XqVdcXLy4uDg0NJZ/dxjdU479Zb7PZpk2bRkSzZ88WF2YEq9UaGxs7c+ZMHW+p22y2NWvWHDlyZN26dcrg4MGD+/fv//nnn6empr7xxht6zWWoBQsW/PDDDw8++GBsbKzb35CQkNC/f3/vJwoODnYdDA8PnzFjRkpKSlJSUn5+fkBAY/9fyv0uYLh33nmHiNq2bXvjxg31eHJyMhE98MADOs5VVFQkHvjYu3evevzEiRMBAQEBAQEnTpzQcTqDnDp1qkmTJv7+/vn5+VxrqKqqat++PRGtXLmSaw3SNPIQXr16VWyrsrOz1ePffPNNUFCQn59fbm6uvjO+/PLLRHTPPffcvHlTPT5+/Hgi6t+/v77TGeGRRx4hogkTJvAuY/v27UTUokWLH374gXclRmvkIUxMTCSiP//5z07jTzzxBBGNGTNG9xmrq6vvvvtuInr77bfV46WlpS1atCCiTz75RPdJdbRr1y4iCgsLu3LlCvdaHAMGDCCiqVOnci/EWI05hF9//bXYVh07dkw9/vnnnxNR8+bNv//+eyPm3bFjBxHdeuutTm/h4gth+/btq6qqjJjXezdv3uzYsSMRvfnmm9xrcThU2/jjx49zr8VAjTmEf/3rX4lo4sSJ6sHa2trf//73RPTKK68YN/XDDz9MRFOmTFEP1tTUdOnShYhef/1146b2xmuvvUZEnTp1ctpLMxL3dXxiG++xRhvCnTt3io+j//3vf+rxFStWEFG7du0M/Tg6efJkQECAv7//V199pR7fs2cPEYWGhhYVFRk3u2dKSkrCwsKI6NNPP+Vey09KS0t//etfE9GuXbu412KUxhnC6urqDh06ENHy5cvV41evXhX/oh999JHRa3juueeI6KGHHnIaf/TRR4no2WefNXoBDTVu3DgiGjhwIPdCnL355ptEdNddd5l2G++lxhnCV199lYg6d+7stK3SCoYRlMB//PHH6vGzZ8+KC7OHDx+WsIx6Onr0qL+/f5MmTQoKCrjX4qympubee+8lomXLlnGvxRCNMIQlJSW33HILEX322Wfqca0tonHE1tf1Lfz5558not69e0t7lPxn9e3bl4heeOEF7oW4t3fvXnEtzYTbeO81whA+88wzRPTYY485jbu9WGKompoatxeBysvL77jjDiL64IMPpC2mDlu3biWi2267raysjHstmgYOHEhE48aN416I/hpbCPPy8vz9/QMDA522VVq3DYymdTtk9erVRNSqVauKigqZ63FVWVnZpk0bInr33Xd5V1I3c27jddHYQvinP/2JiKZPn64e1LqBLofbBwNsNpt4BnrhwoXyl6S2YMECIurWrVttbS3vSn7WCy+8IJ40NM82XheNKoRbtmwhottvv/3atWvqca1HyeRQHpE7dOiQevzLL7+0WCxNmza9cOGC/FUJhYWFv/rVr4jon//8J9ca6u/69etiG79161buteip8YRQ2Va999576nHlXMzu3bu51vbiiy8SUa9evZzewgcPHkxEI0aM4FrY008/TURPPfUU1wIaas2aNSbZxuuo8YRw/vz5brdVCQkJRDRo0CCuhTkcjuvXr//mN78hok2bNqnHL168GBISIqeMkqv//Oc/FoslODj4u+++kz+7Z2w223333UdECxYs4F6LbhpJCC9duuR2W/Xf//7XJLUS1q5dS0R33nnnjz/+qB5/6aWXiKh79+42m03memw22/33309Ec+bMkTmv95Rt/Pnz57nXoo9GEsJhw4YR0dChQ9WDdrv9j3/8IxHNmDGDa2EKm83Ws2dPIpo3b556XKnzl56eLnM969evJ6KIiAinNwWfMGTIECIaPnw490L00RhCKLZVTZs2ddpWbdiwgYjCw8OdrtNw+fe//+32Lfz9998nuXX+rFar2B5v2LBBzoz6Urbx+/fv516LDnw+hMq2au7cuerxyspK8Qmzfv16rrW5iouLI6K4uDj1oN1u79OnDxHNmjVLzjJmzpxJcqs/6m7OnDks23gj+HwIRTUX122VqGpx3333meof6cKFCyEhIX5+fqdPn1aPy6zzp1R/1L2qgEzmfJP1jFElD8vLy/Py8ubOnWvEi6t99dVX169f37Bhg7jarnA4HFlZWR07doyKijJ6DQ2Snp7euXNn11UlJCRkZmbecccdoraKcb755pvi4mL51R91t3HjxhEjRoSGhv7hD38weq6FCxd2795dPJOsP4PCnZCQEBERYciK/7+WLVsGBgY2ggtlaWlpbkuPGSE4ODgtLc3tMtzWIDSn8+fPBwYGymkXFRERkZCQYNAPYsgnYV5eXs+ePQMCAjIyMu68807dX19tyZIlu3fvjouL27x5s6ETGerGjRudOnW6ePHirFmzRJ0l43z66adLly6NiIg4ffq0uK8jXL58OT4+vry8PDc318/PB7rHxsXFbd269ZFHHhENDozz/fffx8fH37x5MycnR1yA0JnusVYuM8ycOVP3F3el3CH817/+JWE6g8i8zFDHpazf/e535CPfsrQuNRtkxowZ5O6ZJ13oH0L5F9znzZtHRJGRkeZ/BNkt+RfctW7qbNy4kcx0U0eL8tzM/Pnz5cyoPPNkxE0dnUPIcutZeQt3emrUVzz11FNE9PTTT8ucdPjw4UQ0ZMgQ9aCpHm+og3iC1PXxI0MZ93iDziHkeghLfCF0PT9hflxnKbTOTygP+jndRDEP5SzFli1bZM5r3IN+eoaQ93FkcZLwxRdflD+1x5RtFcupQq2ThKNHjyai6Oho+Uuqj+nTp5P2qcKEhITmesjLy3N9cYMeedczhG63VXa7Xc5R6Ly8PJO/hbt69913ie9gjnL4a82aNepxMxz+0qJ1OFMhHiv1ntZ/WrfbeC/pFkKtbVVmZqbFYpFTyXzs2LFE9Pjjj0uYy3tKpRnGI6rKMWin6jK8x6Dr8NhjjxHRM888o/UbKisry/Wg9X3KiGPQ+oRQq1iD5DbxWnXWzMkkNdfc1lnjLQiixej+BfWke0EQfUKoVbZI/oPCWhVHzcY8ZYtExVHzlMbSYp7qo7qXxtIhhFoF/FgeFFZqb7/11lvSJvWAqQr4madIZB2WL19OpqnDrW+RSB1CqLWtevLJJ4koPj7e+yka5OOPPyZ3XSjMw2ylbJVtvFMXCvnlkrUoHSl27tzJuxKFjuWSvQ2h1rbqH//4B/G1iRf9mCZNmiR/6p+l9GZ67bXXuNfyk2XLlpG7fkwyGwfUYeLEiUTUr18/3mWo6dg4wNsQum1vUltbK06XLFmyxMvX94xWZ0IzEO1NzNalsLq6WnQm1Gqh49RRQybxgWzCLoV6tdDxKoRajb7Egdp27do5tYmXacqUKWZ4C3eibKtM2K9X9OjVaibH+GVMfDVNTExkmb0OejWT8zyEWi0vlTbxH374oTcr85JJluHE5C0v3bZV1eqoIUd2djaZuHO9Lm1VPQ+hVvNnrTbx8qWlpbF/IKuZv/mzso3Pz89Xj3PdoFNuV77zzjsy560/XRqMexjC0tLSFi1aENHf/vY39bipvoyxfzV1MmDAACKaNm0a90LqMnXqVLfvoW47ahht8eLF4sGdmpoamfM2yCeffEJEYWFhV65c8ewVPAzh+PHj3W6rzHZZkvcirdr27dvNvK1SKNv47Oxs9fjPPrSpO+UR1r///e9yZvSYKIYwYcIEz/64JyHMz8/39/cPCAg4ceKEetycN+gGDRpERMYVCKmPqqoqUb5p1apVjMuop5UrVxJR27ZtnbbxWh01DBIfH09ETz75pIS5vHTq1KkmTZr4+fkdOXLEgz/uSQgffPBBIkpKSlIPmvZRlfo8uKPLI791nPVMTU0loi5duph5W6VQtvGLFy9Wj5eXl4eHh991112FhYVGr0FmDUhdTJs2jYj69OnjwTtUg0O4bds2sa0qLS1Vj5v5oc26H2G9evXqzxxrqZ8ePXq4nb2oqEhsq/bs2WPwD6qbffv2ud3GHzt2TMKNCvnVkL1XVlZ22223EdH27dsb+mcbFsIbN260bduWiFavXq0eN/nxBeUwh9sCIWVlZbocA+3bt6/b2UeOHElEsbGxxv6QeouJiSGiUaNGyZ9afpkiXaxatYqIWrdu3dDToQ0L4aJFi9xuq8x/kC89PZ04+p8o26qzZ8/KnNd7586dE9v4nJwcmfMqZYoyMjJkzuu92trarl27ElFqamqD/mADQlhYWNisWTPXbZVPHGln6QRmt9t79+5NRLNnz5Y2qY5EPU/JLSu4yhTpQmzjQ0JCGlQxqAEhHDFiBBENHjzYadxXirvI74mZmZnpi9sqhbKNz8rKkjMjb5kiXcTGxhLRyJEj6/9H6hvCgwcPWiyWoKAgp22Vb5U5k9kduqKiolWrVkSUmZkpYTqDiH4VERERVqtVwnTs/cO9p2zj6/8+Uq8y+A6Ho1evXrm5uSkpKeJqu2Lr1q2JiYmLFy8WB0NN7vLlyx07dqyoqFixYkW3bt0MnWv9+vUZGRk9evQ4dOiQT1SVd0v5p09ISBgzZoyhcx0/fnzy5MlNmzY9deqU+Froo1JSUpYsWdKAf/r6JLW8vLxfv36hoaFu3w6vXbvmQ9v3iRMnim+2RgsJCQkICPjiiy+4f2JvffHFFwEBASEhIRL+0po1a+b0+LgvslqtoaGh/fr1q+fXkID6/NXY7fbc3NyKiorjx4/36tXL6VeN6hdlAIfDcezYsR9//LFNmzZGd6opKCiorKzcv3+/eLbBd+3fv7+2tjYsLKx79+6GTlRYWHj+/Hnx1LHFYlH/UkFBwWeffZaUlGToAhrq/Pnz2dnZU6ZMCQwMVI8fP37carXm5uba7fZ6vVA9w+3T16wUmzZtIlnfYJUakD7dtk3pt6NjhT8tSr+HzZs3q8evXbvWvHlzi8Vy8OBBo9fQIKLQ7vPPP68e9OA6fH1DyNJkQl9Ky4q1a9fKmVEUoh0+fLic6YwwbNgwIho6dKic6d577z1y12QiJSWFiHr06GGez4ADBw5YLBbXuxEetKxowC0KH32OQaE0b5L2Dym/3ZK+tJo3GUer3ZLZ7uArhXYXLVqkHrdarR40b2pACH3xiT4FVxtDmY0H9aXVxtBoWo0Hs7KyzPMZoG+h3YY9tuZzz7Yr4uLiiCguLk7yvJWVleIt3Cc6b6qtW7euodsqvQwdOpSIhg0bph40z+NHSqHdbdu2qcc9LrTb4FMUPnTKSyG5q6uTDRs2kC903lRTrpFs3LhR/uxa2xaTfAaIi7SuhXY9Prna4BD60HlnwWaz9ezZk4jmzZvHsgBf6byplpycTBLP77rS+gI/atQoIoqJiWFZlUP7/K43NRw8OdS7ZMkSMn3lD8XatWvdXnCTSem86RPb+G+//VZUspDZv8CJcil73bp16nHlM4DrcKaoZDF+/Hj1YG1trShI9/LLL3vwmp6E0Pw1sBTKtmrTpk28K0lISCCiQYMG8S6jPqKjo4lo9OjRvMvYuHGj25u6ovoTS5kCUdPJtdCul3X9PCz0ZPJqkArJZVHqYObOm2piW8Xefsyh2sYnJyerx5WCPStXrpS5HqW64RtvvKEe977Cred1R01bF1khv0BY3ZTOm6bdxivbqqVLl3KvxeHQPqrKUrru9ddfJ6L27dtXV1erx72v9e55CJUOAewte7SwlMqsg7KNT0tL416Le2+//bbYVpmnT8bo0aOJKDo62mlcFHGV0wHa4XBcuXJFVLw3otCuV70oJk2aRCbrlaMwSVdXJx9++CGZqfOmmrKt2rFjB/daflJcXCxOCDht4yWXM3/22WeJaMCAAU7jf/nLX4joueee8+bFvQqhCbvGCbztE+pmqs6bapMnT/ZyW2WQpUuXkrtCftIae4guaMYV2vW2Ndpbb71FpumfqjDnqgTzdN5UO3nypNhWmWpVglLSdsWKFepx5TNg165dhi5A9AN1Oi2htSoPeBtCpZP4q6++6uVL6cUMLfXqZpLOm2piWzV58mTuhbj30Ucfuf3MEc0eDX23/eCDD8hdZ+xXXnnF7eezB3Rol222b18m/C/uxGxvE1r/xU3F7duE0W2Pb9y40aZNGzK40K4OIXQ4HI8//jgRjR07VpdX84Y5N3uuzLNh1nFbZSitDfPevXvFZ4DTDXRdLFy4kIi6du1aW1urHhfldp544gldZtEnhOa5I2fayx5OzHPpSOuyhwlpXToaOHAgEY0bN07f6QoLC8Vz5E6FgnQvtKtPCB0Ox/Tp04nogQceYHw2xcw3AFyZYRuvdQPAnLRuohQUFAQGBvr5+R09elTH6bT6F+heaLdeJQ/rw2q1duzYsaioaPPmzeLwnpPLly+Le8Fe6tq1q6i54OTmzZv33nvv2bNn09LSxNdC84uOjt65c+eYMWPE4T1XCxcurKys9HKWkJCQuXPnuv2lMWPGpKenR0dHi6+F5peWljZlypR27dp9/fXXQUFByvicOXMCAwOnT5/etGlTvebKz8+fMWPG6tWrRf8VYfPmzcOHD7/99tvPnDmjW4kzvdLs0C4QIhw+fFiXBQ8ZMsTt7OZ/KMzVz27jxVMaXgoLC3P74j7Rv8AJ74N1BpUp0u2TkIjsdntUVNThw4fnzZs3f/58p18tLi4W5Zy91LlzZ/GYv1pJSUmHDh2uX7++e/ducRnNVyQnJy9btqxXr17i5LHTry5fvryqqsrLKYKDg0X3PDWHw9G3b98DBw4kJyeLq+2+Yt++ff369WvevPnp06fFERlp5s+fv2DBgsjISHG8WLfX1THQDr4z7D50UMgJ12ErrYNCPoHlsJVxZYp0DqFDo0CIoXzryKwr+ceOtY7M+gqWY8fGlSnSP4SS65r5YvEIJ/ILcMiv/qg7yQU4DN3i6R9Ch9x/Y18so+RK5jaeq/qjvmSWojL6XVLPCzOKysrKTp06Xbp0qVu3bqI4nEEcDkdOTk55efm6deuM7hlktGHDhm3ZsqVVq1biOSzjnDx58tKlS3FxcaKtne9av3792LFjb7nllqioKNdrWjoqLi7Oz89v1apVQUGBIY1xjEi2w+HYsWOHOG1otMjIyKSkJN/dVikuXLjgeg3TINOmTWtQK1lzstlsSUlJkZGREv7GJk2aZNwxS0M+CYWSkpK8vDyDXlxx6623RkVFGT2LHDabbc+ePRImevjhh/39/SVMJEFOTk5ZWZnRs3Tv3j08PNygFzcwhABQH77aQRag0UAIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjA7P8AbMnzBzLh+CYAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO2deVgT5/bHJ0MwYQ+yqVgVC21/eC2b4IJeK2AVDFVsg8imuCHWi1rbq5bLRWvdalW0POBWFS+gEBDZ3ICKgqCoLCq4I0pRUTAgW4Asvz/GO52LJLIkM1nO5/GPOfhmzsnyzUxmvu95aWKxGAEAgDpQqgsAAHUHRAgAFAMiBACKARECAMWACAGAYkCEAEAxIEIAoBgQIQBQDIgQACgGRAgAFAMiBHpFV1fXixcvqK5CNQERAtLg8/kZGRnBwcHDhw9ftmwZgiAZGRngN5YtNHhBgfdpb2/PycnhcrlpaWlv377F/shgMNhsdkpKyu7du9esWUNthaoEiBD4i7a2ttzcXC6Xm5qa2tLSImkYg8EoLCy0t7cnszYVBkQIIA0NDenp6SkpKTk5OR0dHZKGoSgqEomwbUtLy5KSEj09PbJqVGVAhOpLQ0NDVlYWl8u9cOFCZ2enpGEmJiYzZ87kcDh8Pt/b2xv/e1BQ0JEjR0ipVNURA2pGTU3NgQMH2Gw2nU6X8sEYMWJEaGhofn6+UCjEHxscHEwcExcXR+ETURngSKguPH369PTp01wut7CwUMqbbmFh4enpyeFwnJ2daTRat//l8/njx4+/desWFurq6paUlFhZWcmxbjUARKjiVFVVZWRkfFB71tbWnp6ebDZ78uTJ0ndYUVHh6OjY3t6OhY6OjgUFBYMGDZJl0eoGtQdiQE7cuXMnIiLCwcFB+rtvbW0dERFRWVnZp51HRUURd7JhwwY5PQs1AY6EKkVFRQWXy01MTLx3756UYdbW1hwOx8/Pr99nkl5eXqdPn8a2URQ9f/68m5tb/3YFgAiVHpFIVFpampGRER8f/+jRI0nDUBSdOHEih8P55ptvzM3NB5iUx+PZ2to+e/YMC83MzMrLy83MzAa4W/UERKisCIXCoqIiLpebnJz8/PlzScM0NDQmTJjA4XC8vb2HDh0qwwLy8/OnTZsmFAqx0N3dPSsr6/1rOcCHofh0GOgjAoEgPz8/NDR0yJAhUt5WBoPh5uYWGRlZV1cnv2LCw8OJSSMjI+WXS4WBI6FywOfzs7OzMzMzU1NTX79+LWmYlpaWq6srh8OZM2eOvr6+vKsSCoVubm55eXlYyGAwioqK7Ozs5J1XxQARKjQ9GqnfR1tb28XFhcPhzJ07V1dXl8wKa2pqbG1t37x5g4VgZ+sPVB+KgR5obW1NT08PCAiQrihDQ8OAgICkpKSWlhYKq01OTiZWtXjxYgqLUUZAhApEQ0NDbGwsm81mMBhStGdkZBQQEJCent7R0UF1ye9YunQpscL4+HiqK1Im4HSUeurr68+cOcPlcs+fP9/V1SVpGG6kdnd3l277JB8+n+/k5HT79m0sNDAwKC0ttbCwoLYqZQFESBk1NTVnz57NyMg4d+6cQCCQNGzkyJGzZ8/mcDiTJk1CUcXthHDnzh0nJyeine3KlSuamprUVqUcUH0oVjuqq6sjIyN7tEcTsbCwwCYxiEQiqkvuLfv27SM+hbCwMKorUg7gSEgSvTdSczgcT0/PD9o+FRCxWOzl5ZWWloaFKIpeuHDB1dWV2qoUHxChfMHMnJmZmTdv3pQyDNOej4/PZ599Rlpt8qCbnc3c3LysrMzY2JjaqhQcEKFcwLR38uTJ+/fvSxqDoqidnR2bzR6IkVoBuXz5souLC9jZ+gClJ8MqhVAovHHjRkREhKWlpZQXHEVRZ2fnyMjIP//8k+qS5UVYWBjxKe/bt4/qihQaOBIOlL4aqefNmyfd9qkCCASCqVOnFhYWYiGDwbh69aqtrS21VSksIMJ+gmsvKSnp5cuXkoYxmUw3NzdPT885c+aYmpqSWSG1dLOzWVlZlZSUkGypUxqoPhQrGe3t7ZihjMViSXlVtbS02Gx2bGxsU1MT1SVTBpfLJb4mS5cupboiBQVE2Cva2tow7UmfmqCtrY1pr7m5meqSFYLFixcTX5+EhASqK1JE4HRUGo2NjdnZ2RkZGdI7UhsaGrLZbA6H8+WXX0q3faobra2tjo6Od+/exUIWi1VaWjpq1ChKi1I4QIQ98ObNm8zMzA92xTUyMvLw8OBwODNmzIB2Y5LoZmdzcnIqKCgAO9v/QPWhWIF4/fo1NolB+kcEW58oPT29q6uL6pKVg8jISOILGB4eTnVFigUcCZGamppTp05lZmbm5eX1xkj9Qdsn0A2xWDxnzpz09HQsRFE0OzvbxcWF2qoUB/UVYXV1dVpa2gA7UgO9pL6+3sbGBr+PCnY2ImonQnUwUismly5dcnV1xe1sHh4emZmZ8L2GIGrzmxDrSG1tbS391cA6Ut+9e5fqelWTDRs2EF/tqKgoqitSCFT8SKjORmoFRCAQ/P3vfy8qKsJCBoNx7do1GxsbaquiHBUUoUgkKiwszMzMTElJkdKRGjdzyqQjNdBLnj17Zmtry+PxsNDa2vr69eva2trUVkUxVB+KZQbeFXfYsGFSnq+GhgY2ieHFixdUl6ymJCUlEd+R4OBgqiuiGKUXIa496QshMJlMNpt94MCBV69eUV0yIA4KCiK+OydOnKC6IipR1tNRrCM1l8tNT09vamqSNIzkjtRKR2dnZ3Fx8QfXJJQ5ra2t48aNw5eOUnc7G9XfAn0DN1JL7/EMRmrpYHNBli1bZmJiQqPRnj59Sn4Nt27dYjKZ+Fs2efJktXUgKYcIeTxeUlJSQECAjo6OFO1hHanT09P5fD7VJSsikuaC7Nmzh5J6du/eTSwjIiKCkjIoR6FFiHeklm6PxjtSd3Z2Ul2yIvLBpvqTJ0+mpDCRSOTp6YmXgaJobm4uJZVQiyKKsJdG6o8++giM1FKor68/fPiwh4eH9K8wMzOzkJAQqrqbvnr1irhq4vDhw+vr6ymphEIUSITPnj2LjIx0c3OT3uN95MiRStcVl0yUbi7IxYsXNTQ08MK8vLyorYd8qBfhkydPetORevTo0aA9KTx79uzAgQNsNlsZv8LWrVtHLDI6OprqikiFMhE+fvwY056UTwzyXzPnjRs3qKpTwVGNpvpdXV0TJ07Eq2UymWVlZVQXRR5ki7BPRup79+6RXJ6ygH+FSdeeEn2FPX78mHjN1traurW1leqiSIIkEWLa++STT6R8YlAUdXBwiIiIePjwITlVKR3Yy/jB2VVKOhckMTGR+CxCQkKorogk5ChCoVCYn5+/bt26jz/+WMonBjdz1tbWyq8YpQbT3qefftqbr7AHDx5QXW//WbBgAfFJnTx5kuqKyED2IsTNnMRLz1K0B0bqHsG/wtSqqX5LSwvxu4bFYlVXV1NdlNyRpQizs7ODgoKMjIykfGi0tLTmzJlz/PhxHo8nw9QqA8wFuXnzJvHG5pQpUwQCAdVFyRdZijAwMFCK9qAjtRT6Ohekrq6O6pLlyK+//kp81hs3bqS6IvkiGxHyeLxz585t3Lix24eGxWJxOBwwUksCmur3iEgkYrPZ+NNHUfSPP/6guig5IhsR5ufnEz80gwcPBiO1FKCp/gdRKzub7EU4ZswYlT+J7x/4XBDpixPBXBCM8+fPoyiKvyxz586luiJ5IXsROjs7y2SfKgPMBek3P/zwA/El2r9/P9UVyQUQobxQOiO1AtLZ2TlhwgT8tWIymeXl5VQXJXukmX2BfgBN9WWIpqZmXFycvb3927dvEQTh8/m+vr7Xr1/X0tKiujSZIhMpw5EQ5oLIj252tm+//ZbqimQMiHBAqJ6RWjEJCAggvp6pqalUVyRL4HS0P2CNvblcbmVlpZRh2IIWPj4+n332GWm1qSTR0dHFxcV4G/VFixbZ29uPGDGC2qpkBYiwD/Spqb6/v7902yfQe3R1dePj4ydNmoSt2crj8fz9/btNyVdiZHI8VeHTUZgLojj88ssvxNd88+bNVFckG0CEPQNGagVEJBLNmjULf/HpdHpBQQHVRckAOB39H4RCYVFREZfLTUxMrKurkzSMyWS6ubl5enp6eXmZmJiQWaE6Q6PRjhw5YmNj8/LlSwRBBALB/Pnzy8rKBg8eTHVpAwJEiCDQVF95MDU1PXbsmLu7u1gsRhCkpqZm2bJlycnJVNc1MGRyPFXS01Foqq+krF27lvgGHTp0iOqKBoQ6irCXTfVhLojC0tnZOX78eOK3ZGVlJdVF9R81Oh198+ZNZmYml8u9cOECdqW7R4yNjd3d3TkczsyZM6XbPgGq0NTUjI+Px+1sbW1t3t7excXFympnk4mUFflICE31VZVjx44R38HQ0FCqK+onKitCaKqvDvj7++NvJY1GO336NNUV9QdVOx2trq5OS0vjcrmFhYViyeufjh49ms1mwyQGZScmJqa4uPjBgwcIgojF4qCgoLKyMqWzs6mICKuqqjIyMrhc7pUrV6QMw8ycnp6eH+yfCygFmJ3N2dkZt7MFBgbm5uYqmZ1NJsdTqk5Hoak+IBaLt2/fTny7t2zZQnVFfUMpRQhN9QEiQqFw+vTp+FtPp9OvXLlCdVF9QPlORy9fvjx16lRJ/0un06dOnfr11197eXkNGTKEzMIAqkBRNC4ujmhn8/HxKS8vNzQ0pLq0XoF+eIiC4ezsbGpq2u2PuJG6pqYmJycnJCQEFKhWmJqaHj16FL/GhtnZqC2p9yifCDU0NGbPno1tYx2pY2Nj6+vrCwoKVq1aBdpTW2bOnLl69Wo8TE5OPnLkCIX19B7lOx1FEMTHx6epqenrr7/28PCQ3sMTUCt27Nhx5cqV4uJiLAwNDZ00aZLitzWgiSXfTOs9BQUFU6ZMwbadnZ0LCgoGvk8A6AePHj2yt7dvbm7GwrFjxxYXFzOZTGqrko7ynY4CgBQsLS337t2Lh7dv396wYQOF9fQGECGgagQFBfn5+eHh3r1709PTKazng4AIARVk//79VlZW2LZYLF6yZMnz58+pLUkKIEJABcHsbPjiH69fv/b19RUKhdRWJQkQIaCaODo6EhfMvHTp0s6dO6krRxogQkBlWbdunZubGx6Gh4cXFRVRWI8kQISAyoLZ2fAVyAUCgb+/v5RGXlQBIgRUGTMzM6KdraqqaunSpdSW9D4gQkDFcXd3Dw0NxUMul9utLwblgAgB1WfHjh12dnZ4uHLlynv37lFYTzdAhIDqw2AwkpKS8O6yra2t3t7efD6f2qpwQISAWmBpablnzx48vH37dlhYGIX1EAERAurC4sWLfX198XDPnj0ZGRkU1oMDIgTUiOjoaAsLC2xbLBYvXrz4xYsX1JaEgAgBtcLAwCAxMRFvA43Z2UQiEbVVgQgB9cLR0TEiIgIP8/Lyfv31VwrrQUCEgBqyYcMGV1dXPAwLC7t69SqF9YAIAbUDRdHY2FhjY2MsFAgEfn5+2Noy1NRDVWIAoBBzc/Pjx48riJ0NRAioKe7u7itXrsTDpKSk48ePU1IJiBBQX3bu3Glra4uHK1asuH//PvllgAgB9QWzs+FdM1tbW/38/KQsICsnQISAWmNlZbV79248vHnzJvl2NhAhoO4sXbp0/vz5eLhr167MzEwyCwARAgASHR09atQobJt8OxuIEAAQFotFtLO9evVq4cKFpNnZQIQAgCAI4uTkFB4ejocXLlwg/laUKyBCAHhHWFiYi4sLHv7444/Xrl0jIS+IEADegaLo8ePHcTtbV1cXOXY2ECEA/IW5uXlsbCxuZ3v8+PE//vEPeScFEQLA/+Dh4bFixQo8PH78eFxcnFwzgggBoDu7du2ysbHBw5CQkAcPHsgvHYgQALrDYDASEhK0tbWxsKWlxdfXV352NhAhAPSAtbV1Nzsb8QaGbAERAkDPBAcH+/j44OHOnTuzsrLkkQhECAASiYmJIdrZFi1a9PLlS5lnARECgERYLNZ//vMfOp2OhZidTSwWyzYLiBAApDF58uR//etfeHj+/HliJ2+ZACIEgA8QHh5OtLOtX79etnY2ECEAfACsO5uRkREWdnV1+fv7Nzc3y2z/stoRAKgww4cPP3ToEB4+evSIuObhAAERAkCv8PLyCgkJwcNjx47Fx8fLZM8gQgDoLbt37yba2ZYvXy4TOxuIEAB6C5PJjI+PJ9rZZNKdDUQIAH1gzJgxxAVkbty4QVxepn+ACAGgb4SEhMybNw8Pf/nll+zs7IHsEEQIAH1m//79I0eOxLZFIlFAQMBA7GwgQgDoM5idTUNDAwvr6uqCgoL6bWeTjQgZDHTMGFPs30cf6ctknwCgyEyZMoVoZzt37tzevXv7tyuaTNyoLS0F9+9PwbZ1dZ0//bRg4PsEAAVHJBK5ubldvHgRCxkMRlFRkZ2dXV/3A6ejANBPsO5suJ2to6PD29u7H3Y2ECEA9J/hw4cfPHgQDx89erR69eq+7gRECAADYu7cucHBwXh45MiRhISEPu0BRAgAAyUyMvLzzz/Hw+Dg4IcPH/b+4SBCABgoTCYzISFBS0sLCzE7W1dXVy8fDiIEABnQzc52/fr1bdu29fKxIEJA1cjKypJ5G5jesGLFilmzZmHb06ZNW7JkSS8fCCIEVIqkpCQ2mz19+vTnz59TWEafvgVAhIDq8Pjx46VLlyIIkpuba29vf+nSJTKzR0dH441J8/LyDh8+3MsHgggBFUEgEPj7++MrmTU1NRkaGpKWvaKi4vvvv8dDR0fHDRs29PKxIEJARQgLC7t69SoedrttIFf4fL6vr297ezsW6urqxsfH44tvfxAQIaAK5OXlES9OdruBLm9Wr15969YtPDxw4ICVlVXvHw4iBJSe169f+/r6ikQiLOxmJZM3p06dOnDgAB4uWrTI19e3T3sAEQLKjVgsXrx48YsXL7Cwm6la3vz555/Lli3DQ0tLy8jIyL7uBEQIKDd79uzJyMjAw3//+9/Tpk0jJ7VIJAoMDGxoaMBCBoORlJSkp6fX1/2ACAElpqSk5Mcff8TDbhNt5c1PP/2ETyZEEGT79u39mEyIgAgB5aW1tdXPz6+jowMLu7WckDf5+fk///wzHs6cOXPVqlX92xWIEFBWVq5cee/ePTwkNl+SN42NjQEBAUKhEAvNzMyOHj1Ko9H6tzcQIaCUcLncY8eO4WG3NoTyZvny5U+fPsW2URT9z3/+M2TIkH7vDUQIKB9VVVWYPQ3D2tqaeJNQ3sTExCQmJuLhP//5z+nTpw9khyBCQMnA7GlNTU1YiM3lw1vTy5tu9rRx48Zt2rRpgPsEEQJKRnh4eFFRER52W6RFrvD5fD8/v7a2NizE7GmDBg0a4G5BhIAycenSpZ07d+LhrFmzli9fTlr27777rry8HA/379//ySefDHy3IEJAacDsafg1yeHDh8fGxvb7mmRfSU1NjYmJwcOFCxf6+fnJZM8gQkA5EIvFS5YswafqdlvCWt78+eefxEtBlpaW+/btk9XOQYSAcrB379709HQ8DA8Pd3FxISe1SCRasGABbk/T1NSMi4vrhz1NEiBCQAm4ffs2cY7s5MmTybSnbd68+Y8//sDD7du3jx8/Xob7BxECik5ra6u3tzefz8dCzJ5Gp9PJyV5QUEC0p82YMWPNmjWyTQEiBBSd0NBQoj0tJiZm1KhR5KTG7GkCgQALTU1Njx07JvNLQSBCQKFJTk4+cuQIHgYHB/v4+JCWPSQkpLq6Gtum0WhHjhwZiD1NEiBCQHGpqakhTpm1trbevXs3adkPHDhw8uRJPPzhhx/wtqKyBUQIKCgCgcDHx4fH42Ehg8Eg055WWVn53Xff4aGDg8PmzZvllAtECCgoERERhYWFeLhr1y7S7GkdHR2+vr5Ee1pCQsLA7WmSABECisilS5d27NiBhx4eHitWrCAt+9q1a4n2tJiYGJnY0yQBIgQUDh6PFxgYiNvTzM3NybSnnTlzJjo6Gg8DAwP9/f3lmhFECCgWYrE4KCjo2bNnWIh1TzM2NiYne21t7YIFC/CVJD7++OPffvtN3klBhIBi8dtvv6WlpeFhWFgYmfa0wMDA+vp6LNTU1IyPj9fX15d3XhAhoEDcuXNn/fr1eOjk5BQeHk5a9i1bthDtaVu3bpWtPU0SIEJAUWhra/P29sZXdGCxWImJib1f0WGAFBcXE29CfPnll8RbFHIFRAgoCqtWrbp79y4eRkdHk2lPmzdvHr7ANWZPQ1GS1AEiBBSClJQU4oJ+S5cunT9/PmnZV6xYQbSn/f7770OHDiUtO4gQoJ5u9jQrKysy7WmHDh06ceIEHq5du5bNZpOWHQERApQjEAjmz5//5s0bLMRWdNDV1SUn+8OHD7vZ07Zs2UJOahwQIUAxmzZtunLlCh7u3LnT1taWnNQdHR3e3t4tLS1YqKOjI5PuaX0FRAhQyeXLl7dt24aH7u7uK1euJC37Dz/8UFZWhofR0dGffvopadlxQIQAZfB4PBmu6NBXzp49GxUVhYfe3t6BgYHkpO4GiBCgjG72tPj4eDMzM3JS19bWBgYG4va00aNHHzp0iJzU7wMiBKghKiqKaE/bsGGDq6srOamx7mm4PY1Op5NjT5MEiBCggIqKin/+85946OjoGBERQVr2bdu25ebm4uGWLVsmTJhAWvb3ARECZMPn8319fXF7moGBAZn2tOvXrxOXcPniiy+IC7xQAogQIJtVq1bdunULD6Ojoy0sLMhJ3dTURLSnmZiYJCQkkGZPkwSIECCVU6dOHTx4EA8XL17s6+tLWvYVK1Y8efIE2ybfniYJECFAHu+v6LBnzx7Ssv/+++8JCQl4uGbNGk9PT9KySwFECJCESCQKCAjoZk+T4YoO0nn06BGxc/bYsWPJt6dJAkQIkMSmTZvy8vLwcMeOHXZ2duSkxuxpzc3NWKijo5OUlMRkMsnJ/kFAhAAZ5OfnE4887u7uoaGhpGVft25daWkpHkZFRX322WekZf8gIEJA7lBuTyOuJcjhcBYuXEhO6l4CIgTkzvLly58+fYptoygaFxdHmj2trq4uKChIQexpkgARAvIlOjo6KSkJD9etW+fm5kZOapFI5O/vX1dXh4V0Oj0uLs7AwICc7L0HRAjIkYqKCqIfZdy4cRs3biQt+44dO3JycvBw8+bNEydOJC177wERAvKimz1NV1eXzCmz169fJwp+6tSpP/zwAzmp+wqIEJAXa9asIdrT9u/fL9cVHYi0tLT4+fl1dnZiIWZP09DQICd7XwERAnIhNTV1//79eLhw4UI/Pz/Ssi9fvvzhw4fYNo1GO3z48LBhw0jL3ldAhIDsed+eRrxJIG+OHj0aHx+Ph6tWrfrqq69Iy94PQISAjMFWdGhoaMBCTU3NuLg4Mu1pq1atwsOxY8cSe9goJiBCQMb89NNPFy9exMPt27eTs6IDgiBdXV1+fn4Ka0+TBIgQkCUFBQU///wzHs6YMYNom5Y369atKy4uxsN9+/YplD1NEiBCQGY0Njb6+/vj9jRsRQfS7Gnnzp2LjIzEw2+++WbRokXkpB4gIEJAZrxvTxsyZAg5qV+9ekW0p3300UfEqcMKDogQkA379+9PTEzEw++//3769OnkpMbsaS9fvsRCOp1+8uRJQ0NDcrIPHBAhIAMqKyvXrl2Lhw4ODsS1/uTNzp07s7Oz8XDTpk2TJk0iLfvAARECAwWzp7W1tWGhrq5uQkICafa0Gzdu/Pvf/8bDqVOnrlu3jpzUsgJECAyUtWvXlpeX42FMTAxV9jRDQ8Pjx48rrD1NEiBCYEBkZWXFxMTgYWBgoL+/P2nZQ0JCHjx4gG3TaLSjR4+OGDGCtOyyAkQI9J/a2toFCxbg1yQ//vjj3377jbTssbGxcXFxePiPf/xj9uzZpGWXITT8FRwIYrFAKGx8t0eapoaGws2bBGSOSCSaPn36H3/8gYWampr5+fmkmWMeP35sb2//9u1bLPzb3/5WXFyspaVFTnbZQpfJXmg0Op1uLJNdAcrCzz//jCsQQZCtW7eSbE/DFaitrZ2UlKSkCkQQBBEDQN+5du0acfWIL7/8UigUkpadeDsEQZBDhw6RlloeyOZ0FFArGhsb7ezsqqursdDU1LSsrIy0fvLnz593d3fHP7dff/11cnIyOanlBFyYAfpMSEgIrkCSV3R49erVwoULxcppT5PEgH4TikRtTU1ZTU1n29tvd3ZWC4XNCCLW0NDV1ByqpTVWX/9LFutrDQ3K1l4E5MHBgwdPnjyJh2vXrmWz2eSkFovFixYtItrTTpw4MXjwYHKyy4/+n442NByvrV3X1fVSyhg6fbC5+Q5j4yX9SwEoGpWVlY6Ojrg5xsHBobCwkDRzzM6dO4lLi27evPlf//oXOanlSj9F+Px5xIsXP/3PjmgMOp2FIDSBoEEs7iL+19ChEcOGbRxIlYAi0NHRMWHChLKyMizU0dG5efPmp59+Sk72mzdvTpo0CTfHTJky5eLFi0pnjumR/pyOvn2bjSuQRtMwMVlhZLRIS2ssjaaBIIhY3NnaWvzq1W883ruWry9e/KSnN01Pb6qsigYo4fvvv8cViCBIdHQ0aQp8354WFxenGgpEkH7donjwwO3GDQT7x+OdljTsxYut+LD796f2IxGgOGRlZRGn53p7e5OZPSAggPihTU1NJTO7vOnz6ahY3FlSooUgIgRB9PRcPvkkV8rYysqx7e0VWDB27JNBg0b1KRegINTW1tra2tbX12Ph6NGjS0tL9fVJuuSWlJQ0b948PPz222+joqLISU0OfT4d7ep6gSkQQRBdXemztmhGRgtfv47R0vpcS+vzvqodUBBEItGCBQtwBdLp9Pj4eNIU+PjxY2L3xDFjxuzcuZOc1KTRZxGKxYIet3vEzOx7M7PvpY8BFJytW7fm5v51vrNly5YJEyaQk7qrq8vf3x+3pzGZzISEBCW2p0mgzzfrNTWH0Wjv/Epv316QdT2AYlFcXPzTT39dBv/iiy+IC7zIm7CwsKtXr+JhZGTk559/Tlp20ujPLYqHD798+/ZdNwFT05XDh+/GZQmoEk1NTXZ2dk+ePMFCExOT8gV52FMAAAz3SURBVPJy0swxFy5ccHd3F4ne/faZO3duSkoKOalJpj8ibG29ev/+ZLH4XWc7BsPK1PRbFstr0CDlm08JSMHX1/fEiRPYNo1GS0tL8/T0JCf169evbWxsXrx4gYXDhw8vKyszMjIiJzvJ9PNmPY+X/OSJv1jcQfwjg2Glr++qp+eqp+dCpyu9mUjNOXz4MPGKyHfffbdr1y5yUovF4q+++iozMxMLURTNycmZNm0aOdnJp/+2tba20tra9RJ+FqK6uhMMDTmGhvM1NUlaGBmQIQ8fPnRwcMD7ydvb2xcWFjIYDHKy79q1i/jLc+PGjREREeSkpoSBTmVqby9/8+ZEY+NpPv/++/+LolomJiFDh4ZraLDwP758uV1T05zF8iT+EVAcOjo6Jk6cWFpaioU6Ojo3btwgrZ98SUnJxIkTVdKeJgmZzSfs7PyzuTm3ufmP5uaLnZ01xP9iMq2trM5ivxhFopbyclORqJ1G09DRwY6W8zQ1SerTDPSGVatWEVcyO3r06MKFC8lJ3dra6uDgcP/+uy90FotVVlY2cuRIcrJThVwm9ba3l9fXH66vPywS8bG/MBiW1tblKKrN4yVWVfn8TwV/qZGjqam4KzmqCWfPnp01axb+qeBwOElJSaRlX7hwYWxsLB6ePHmS6JVRVeQ4s57Pf/DokUdHx2MsHD78VzOztVVV3jweV8IjUG1tOwMDtpGRP4NhKaeqACnU1dXZ2NjU1dVh4ejRo0tKSgwMSGrb1c2eFhISEh0dTU5qapFve4v29orKys8xm5uOzvjPPrva3l7O43F5vBQ+/56UqnR0HFmsuYaG3zAYH8uvPICISCSaMWNGTk4OFtLp9MuXL0+cOJGc7FVVVXZ2drg5xtra+vr169ra2uRkpxa595i5d29Ca+s1BEFQVMvOrg3/e0dHVVNTBo/HbWkpRBCJNTCZ1oaGHBbLU1vbQa51Alu3bg0LC8PDbdu2rV+/npzUAoHg73//e1FRERYymcyrV6/a2NiQk51y+iZCsbijre1WW9uNtrYbxsbLdHQ+3OLu8WOvxsbT2La9vQCbc0iks7O6sTHtg2pkMEYbGLANDTm6us4IQtKSd+rD9evXnZ2du7rezcaeOnVqbm4uadck169fv2PHDjyMjo4OCQkhJ7Ui0DcRNjamP378rsmxsfGSkSMPffAhd+86trXdQBCETh9sY9MgZWRnZ01j46mmpszm5jwp1vBBg0ayWLNBjTKkpaXF3t7+4cOHWGhiYlJWVjZsGEkXyfLy8tzc3PClRb28vE6dOkVOagWhbyIUiVrLy81EolYEQVBUd8yYCulWNT6/sqLib9jxTV9/upVVrwzfAkF9U9MZHo/79u35bp0yiAwa9JG+vjuLxdbXd6fRZNPFWD3x8/NLSEjAtmk0WkZGxqxZs8hJrVb2NEn0bRYFiuqYmKzAtkWilkePPDo7n0oa3NVVW1XljZ9hDh7s28ssdLqxkVGgpWXG55+/HDUq1sCATaP10Eqos7Omvv7go0df3bo1pLo6sKkpQ4piASksWbLE3Nwc2161ahVpChSLxYsXL8YViKJobGysuikQ6ceFGaHw7d274zo63p26oKi2sfEiFmuOltbnGhqDxeIugeBlZ+ezpqYzr1/HCIV4o3KH//u/4n63ORUKG9++zW5qyuDxTmHH4R7R0DBksdiGhhx9/S9pNJI8VqpBfX39okWLqquri4uLmUwmOUn37Nnz3Xff4WFERMTGjRvJSa1Q9OfqaEfHw4cPZ3Z0VPVyPJP5f5988odMbDEiUXtzcw6Px21sPC0UNksahqLaenouhoYcQ8O5KKo78LzqgFgsfvPmDWkHotu3bzs5OfH57+wckydPvnjxIp2ujj8r+nmLQiBoqK3d0NBwBJ/Q1CMoqm1ismLYsE0oKuMbPiIRv7k5m8fjNjamC4VNkgvQ0tNzNTTksFhzoA2x4tDa2jpu3Lh7997dK2axWKWlpaNGjaK0KMoY0H3Crq7nPF5yS0t+e/udrq46kagFQVANDQNNTTNtbTtd3cmGht7yXiZNLBa2thbxeFweL7Grq07SMBRl6um5sVieLJYXnW4i15KAD7Jo0aKjR4/i4YkTJ3x8fKSMV21UZ0EYghqTu7qeSxoGxnHK4XK53t7eeBgcHLx//34K66Ec1REhAVFLS2FTUyaPl9LR8UjSIIIav9HUNCezPnXm2bNntra2PB4PC9XKniYJlRThX7S3V/B4XB7vZI/THf8Lbhz3YzCsyCtO/ehmT2MwGNeuXVMfe5okVFyEOP9VI5fPr5QyDLOqDh7sw2SSNIdVrfjxxx+3bduGh1FRUd9++y2F9SgI6iJCHDCOU8WlS5dcXV1xe5qHh0dmZiaxtb7aonYixOm1cdzCwMATrKoDpL6+3sbG5vnzdxfMzM3Ny8rKjI2Nqa1KQVBfEeKAcVzeiMXiOXPmpKenYyGKotnZ2S4uLtRWpTiACP+i18bx4fr6HmAc7z179+5dvXo1HoaHhxO7egMgwh4QCN40NWXyeNy3b7O79VYlQqcbGRh4GBpy9PVn9GgxBxAEuXPnjpOTU3t7OxY6OTkVFBRoakLL9r8AEUqDYBxPFYlaJA0D47gkWltbHR0d7969i4Vqbk+TBIiwV4hELU1NZ3i8lKamM1LVaHD79jIdnQnu7u6qt3hQP1iyZMnvv/+OhwkJCfPnz6ewHsUERNg3MON4Y2NmY2OqQPD6/QH+/qPv3avS0tJydXXlcDhz5swhbSk/RSM5OZnD4eDh0qVLDx48SGE9CguIsJ8QrKpJXV0vsT++fGnBZj8hDmMymW5ubp6ennPmzDE1NaWiUmqoqamxtbV98+YNFlpZWZWUlOjqwrSyHgARDhSicTw+/pNffsnrcZiGhsaECRM4HM68efOGDFFx47hAIJg6dWphYSEWMhiMq1ev2traUluV4iKLhe8BDOH161dWr149YoS0vjsaGhrTpk2Lioqqra2lumB5QWydiCDIvn37qK5IoYEjoVyoqKjgcrknT57El1V4HxRF7ezs2Gy2n5+flZXqGMcvX77s4uKC29Pc3d2zsrLAniYFEKF8wdSYmZl58+ZNKcOsra05HI6Pjw9pix/JCR6PZ2tr++zZMywEe1pvABGSRFVVVUZGBpfLLSwslPKaW1tbe3p6stnsyZMnk1meTBCLxV5eXmlpaViIouiFCxdcXV2prUoJoPRkWB2prq6OjIx0dnaWfoZmYWERGhqan58vEomoLrm3EBdUQxAkLCyM6oqUAzgSUkZNTc3Zs2czMjLOnTsnEEg0jo8cOXL27NkcDmfSpEko2s+ekSTQzZ7m6Oh45coVsKf1BhAh9dTX1585c4bL5Z4/fx5fDeJ9TExMZs6cyeFw3N3dFa01IJ/Pd3Jyun37NhYaGBiUlpZaWFhQW5XSQPWhGPiLhoaG2NhYNpstfXV4IyOjgICA9PT0jo4Oqkt+x9KlS4kVxsfHU12RMgEiVERaW1vT09MDAgKkW0wMDQ0DAgKSkpJaWloorDY5OZlY1eLFiyksRhmB01GFpr29PScnh8vlpqWl4Qtovo+2traLiwuHw/Hy8tLT0yOzwm72NEtLy5KSEpJrUHZAhMoBn8/Pzs7OzMxMTU19/boH4zgGycZxoVDo5uaWl5eHhQwGo6ioyM7OTt55VQ2qD8VA3xAIBPn5+aGhodINqAwGw83NLTIysq6uTn7FhIeHE5NGRkbKL5cKA0dCZUUoFBYVFXG53JSUlNraWknDcOO4t7f30KFDZVhAfn7+tGnTwJ42cECESo9IJCotLc3IyIiPj3/0SGLHcRRFJ06cyOFwvvnmG3w1wn7TzZ5mZmZWXl5uZmY2wN2qJyBClQKzqiYmJuILHvUIZlUdiHHcy8vr9OnT2DaKoufPn3dzc+vfrgD4Taia3LlzJyIiwsHhA52Lra2tIyIiKisr+7TzqKgo4k42bNggp2ehJsCRUMWRuXG8oqLC0dGRaE8rKCgYNAiazfUfEKG68PTp09OnT39QjRYWFp6enhwOp0eLOZ/PHz9+/K1bt7BQV1e3pKRElSZDUgO1B2KAfGpqag4cOMBms6UbUEeMGIFN4xAKhfhjg4ODiWPi4uIofCIqAxwJ1ZeGhoasrCwul3vhwoXOzk5Jw3DjOJ/PJy7uGRQUdOTIEVIqVXFAhADS0NCQnp6ekpKSk5PT0SGx4ziKoiKRCNsGe5oMARECf9HW1pabm8vlclNTU1taJPY4ZjAYhYWF9vb2ZNamwoAIgR7o0TjOYDDYbHZKSsru3bvXrFlDbYWqBIgQkAZuHD99+vT48ePT09MzMjLYbDbY02QIiBDoFV1dXfX19bJ1nwIYIEIAoBjFbRwEAGoCiBAAKAZECAAUAyIEAIoBEQIAxYAIAYBiQIQAQDEgQgCgGBAhAFAMiBAAKOb/AY5akbglJ4eaAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deXhTVd4H8JPuC1sLQoHKDkIRWQoFqaCyL/F1Zp5B1pTBZ+wASkAFqq9oQEEqigSZAaoihk3hnedxDAo4tKDgUkra0r10oTt0owtt0iXLef84JdZSoElvcrJ8P49/jNPee361+TY35/7OuSJKKQEAflx4FwDg7BBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzpw9hNXV1VVVVbyrAKfmvCFMTEx8+umnZ8+ePXDgwLCwsIqKCt4VgZMSUUp512Bt5eXlW7ZsOXTokMFg6Natm1qt1uv1/v7+27ZtW716tZubG+8Cwbk41zuhVqvdu3fviBEjPvvsM1dXV6lUWlhYmJaWtmDBgqqqqnXr1j3++ONnzpzhXSY4Geo0zp8/P3r0aPZTz5o1KzU1tc1XR40aZfxqWloarzrB2ThFCK9fv75w4UIWsBEjRpw+fbrdb2tubpbL5d26dSOEuLu7S6XSmpoaK5cKTsjBQ1hdXR0REeHh4UEI6dGjR2RkZGNj44MPqaiokEqlrq6uhJCePXvK5XKdTmedasE5OWwI9Xq9QqHo3bs3IcTFxUUikZSVlXX88Pj4+GnTprE3zwkTJly6dMlypYKTc8wQXrx4cezYsSxCzzzzzLVr18w7j1KpHDRoEDuPWCzOy8sTtEwASh0vhIWFhRKJhMXm0UcfVSgUBoOhMydUq9WRkZFdunQhhHh7e0dERNTV1QlVLQB1pBCq1WqZTObl5UUI8fHxkclkDQ0NQp28uLhYIpGIRCJCSP/+/Tufbei82tof8vIkqamPJST4xse7JiZ2T08fV1i4vqEhg3dppnGEm/WU0n//+98bN24sLCwUiUR//etfP/roowEDBgg+UFxc3Pr162NjYwkhISEhe/funTJliuCjwENRqs3LW15d/X+t/j8XQgzsf4lEbgMGRPXq9SKX2szB+69AZ129enXq1KnsZ5k4ceLPP/9s0eHYfE+fPn0IISKRSCKR3Lp1y6Ijwr1u3tymUhGViuTm/lWtVhkMzZTqm5tvVVR8eu1ab5WKxMe7azSpDz+RbbDjEJaUlISHh7u4uBBC+vXrFxUVpdfrrTN0fX29TCbz9PQkhPj6+spksofe+QABJScPUKlIevoEg6Ht3aP6+liWz/z8v3OpzQx2GcKmpia5XN61a1dCiIeHh1Qqra2ttX4Z2dnZixYtYm/Cw4YNO3XqlPVrcEp6lUqkUpGiotfb/XJx8eaSEllt7Tkrl2U2+wuhUqkcMmQIe+mLxeLc3Fy+9URHR48ZM4bVM3PmzOTkZL71OIOkpD4qFcnNfYF3IcKwpxCmp6fPnTuXvdxHjRp19uxZ3hW10Gq1UVFRvXr1IoS4ubmFh4eXl5fzLsqR5ee/qFIRlUpUUvK2Xq/hXU5n2UcIb9++bWwl8/f3t81WMlYkWwnFitRqtbyLckxabXl6+jj22S8xseuNG0vLyw80NKTzrstMth7C5ubmNm8yFRUVvIt6kIyMjHnz5rG365EjR545c4Z3RY5Jr1ffuvVBcvIgFkX2T1JS34KClzWaFN7VmcamQ3j+/PnHH3/c+HErJcVu/uO2+eCak5PDuyL7ptc3VFd/W1mpuPdLGs21sjJ5Ts7ziYl+d9MoKihYbTDYzWWIjYYwKyvLOPE4fPhwe5x4ZFO4rRdGcZnCtWt6vaamRpmXJ0lM7KZSkaSkPvfekzAyGLQ1NaczM6exKBYWbrBmqZ1hcyGsq6sz3oLr0qWL1W7BpaamJiUlCX7amzdvGm9m9u3b15o3M+2XTld7+/bxnJy/JCT4GN/c0tMn3rq186HTMAaDLitrHrtfr9WasG6GIxsKYetmFLb4qLS01GqjT58+3YwVTx2kUqlCQ0PZG3twcPDly5cFH8IB6HRVlZWK7GxxfLzn3ey5pKcHl5TIGhuzWn+nwaDTau87NVBd/R92eG3tD5avWgC2EsLY2NjJkyezl+nkyZNjY2OtOXpjY+P69evd3d0JIX5+fnv27GlubhZ2CIPBcOrUqYEDB7J+t0WLFuXn5ws7hJ3SaivvZs/DmL3MzNCyMnlzc/G933/jxtKEBJ+0tNH3O2FV1f+x89y5E2PJwgXDP4RFRUXGBQqBgYEcFyh0cBeMzmBLPby9vcndpR4ajd3f5jJPU1NRRUVUdrY4Pt6NZSY+3vVu9h7Ujltc/Cb7/vLyg/d+1WBozMycrlKR+Hg3rfa2xcoXEs8QtnlF2shSvQfvByUI2/m7Y31NTfllZfLMzFDWeqZSkYQEr+xscUVFVAc/wmm1t5OTA9kb5o0bK+rqLul01Xq9pqkpr6rqZEbGlLsTM69Y+mcRCp8Qtr42Y5P4NnVtxnZ86t69O7Hkjk8//vjjuHHjeF2BW1lj4417suednS2urFTodCZPGjc0pKemjmh9h7DNP3l5EoNB4A8UlsMhhCqV6qmnnmIvvgkTJtjsLEVlZaWld3ziOxdlBRpNakmJLD092BiPhAQflj29vlNXPQZDU2Xl4Zyc55OTByYkeMfHuyYm9khLG1tQ8I/6+l+N36bXqw2Gpk7/HJZl1RCy+Xr2smbz9TbYfdZGQkLC9OnT2Z+McePG/fTTT4IPwbaEs/5dGcth2UtNHWnMXmKiX16epKZGaTBY70fTaJJTU4OKijZabUTzWCmE927paV93rpVK5eDBg40Xzzdu3BB8CPvvT9Cr1aqSEllKynBj9q5d63k3exzejtTqq/HxHiqVqKZG+Dk2AVkjhEqlcujQofbew6XRaCIjI9kiRrbj0507dwQfpc20kO136ul0uurqiwUFLycl9TNmLzk5sLBQWlf3E6WcOxNu3dqpUpFr13o3N9/kW8kDWDaEGRkZ8+fPd6Ru5pKSEuPEpoWW87fuWXd3d7fNnnWdTnf58mWpVBoQEHDhwtS72RtYWCitq7tMqe1M9uqzsmarVCQraw73vwj3Y6kQtl7X4+fn52DreuLi4p588kn2x2XSpEm//PKL4EPY5uqthoaGb7/9NiwszM/Pz7hN0Y4dz5eUvKVWJ/Curn3NzaVJSQEqFSkt/ZB3Le0TPoRshesjjzzi2CtcDQaDQqEICAgwdsAUFhYKPkqbdcznzvHZskGj0SiVSolEwj7VM0FBQTKZTKVScSnJJLW1Z1UqUXy8e329Ld4HEjiEMTExxr0eZsyY4fB7PbAdn9hmp2zHJwE3OzXitaOHWq1m2WN7H7fOXkaGne3tWVi4QaUiKSlDzbgtaWmChdCZdz0qKChos+234ENYc2+rqqoqhUIhFovZXRN2DzM4OFgmk2VlZT38eJtkMDSmp49XqUheXhjvWtoSIITY/4+5cOHCE088wV61zz77rCUWRll0l8fKykqWPdbITghxdXUNDQ2Vy+XFxe00UtudxsbsxMSuKhW5ffso71r+oFMhbPO5CDvhsg4Y9nnYcguj2ux33MlpoaKioqioKLFYbHxOuDF7jvfbrKg4pFKRhIQuDQ3XedfyO/NDeOXKFeMm8CEhIb/99puAZdm1qqoq40MR/fz8IiMjm5oEvlXNmm/ZVv9sWqigoMCkM+Tn58vl8tDQUHa7hRDi5eUlFoujoqIcciLN6MaNZSoVSU8Ptp12NnNCiKejdERmZuaCBQvY6/uxxx77/vvvBR/CjGfg5Obmtsmet7e3WCxWKBT21cNkNp2uJiVlsEpFios3866lhWkhZF0jeE5Yx50/fz4oKMjYAZOWlib4EPc+De7e70lNTZXJZMHBwcZJTh8fH5Y9J/wN1tfHxce7q1QutbX/5V0LpSaFEE/MNI91Fka1+1xUlr2RI0cas+fv7y+RSJRKpXNOnhndurXjbjsb/8+9HQ3hSy+9xH6Ltrz4yJZZYWGUTqfbv38/63dzdXVl80NMQEDAmjVroqOjHalvqXP0WVmzVCqSlTWXe5Ndh0J48eLFqVOn+vr62kjzlP2Kj4+fNm2a8c/ZpUuXBB+CLYzy8fHx9vYODAwMDw9XKpXI3r2am0vZMy1KS3fzraRDITxy5AghZOnSpZauxklY4cKe3da3xHWvI6mpOWML7WwupMPYpRR03nPPPZeWlsamuL777rugoKA33nijvr5e8IGMU6DQru7d5/fuvY5SbV7eCr2+jlcZJoQQBMQ2tsrMzJRIJI2NjR988MHIkSOPHDlC7f/p5fYlMHCXj8/4pqacoiIprxoQQp769+9/5MiR2NjYKVOmlJSUrFy5csqUKbGxsbzrciIikeeQISddXbvevv1lVdVxLjUghPyFhIT8+uuvrAEwLi5u6tSpYWFhpaWlvOtyFp6ewwMDPyaEFBSsbmzMsn4BCKFNEIlEYWFhOTk5MpnMw8Pj6NGjw4YN27p1a1NTE+/SnEKvXn/3919qMNTn5S2ntNnKoyOENsTX13fr1q0pKSlisVitVm/btm3MmDFZWRz+NjuhAQMOeHoO1mhUN2/KrDw0Qmhzhg8ffvr06ejo6DFjxhgMBuMWyWBRrq7dBw/+WiRyLy3ddefOeWsOjRDaqJkzZ8bHx//www/GlbVgab6+IX37vkOIIT9fotVa7zM5Qmi73N3djVtFgnX07fu/XbvO1GrL8vNXEWKl20UIIUBrLoMHK9zcet25c668fK+VhrTQefPz85OSku7cuWOh8wNYiLt7/0GDFISIiovf0GgSrTCipUK4YcOGcePGXbhwwULnB7Cc7t0X9O79cnFx4IoVb6jVaksPh8tRgHb07//hzp09v/nmv1KpxdvZEEKAdri4eB06dNjHx+eLL744ceKEZcey6NkB7FdQUNCePXsIIWvXrs3Ly7PcQAghwH2Fh4cvWbKktrZ28eLFWq3WQqMghAAPcuDAgUGDBl29enXr1q0WGgIhBHiQHj16HDt2zM3NLTIyMjo62hJDIIQADxEaGvr2228bDIYVK1aUlZUJfn6EEODhtmzZMmPGjLKyslWrVgm++wFCCPBwLi4uR44c6dmz59mzZ/ft2yfwyYU9HYCjYk98EIlEmzdvTkwUsp0NIQToqIULF65Zs6apqWnx4sV1dYLtzoYQAphg9+7dY8eOzc7Ofu2114Q6J0IIYAIvL68TJ074+Ph8/vnnX331lSDnRAgBTBMUFLR7925CyJo1a/Lz8zt/QoQQwGSrV69evHixUO1sCCGAOQ4ePDhw4MC4uLj33nuvk6dCCAHMwdrZXF1dd+zY0cnF6wghgJmeeuqpLVu2GAyGsLCwyspKs8+DEAKY75133nn22WfZc0TMbmdDCAHMZ2xnO3PmzP79+808ibA1ATibwMDAzz77jBDy+uuvJyUlmXEGhBCgs/785z+vXr26qalp2bJlGo3G1MMRQgAB7Nmz54knnkhPTzejnQ0hBBAAa2fz9vaOior6+uuvTToWIQQQxujRoz/66CNiejsbQgggmLVr1/7pT3+qqamRSCQ6na6DRyGEAO07fPjw7t27Tb3798UXXwwcOPDnn3/evn17Bw9BCAHakZGR8corr2zcuDEmJsakA/38/BQKhYuLy1dffdXY2NiRQxBCgLaMNxtWrVo1a9YsSw+HEAK0tWnTpmvXrg0bNmzvXpMfUVhdXb1y5UqDwbB06VIvL6+OHIIQAvzB2bNn//nPf3p6ep48ebJr166mHv7iiy8WFBSw3u4OHoIQAvyupKQkLCyMUrpz584JEyaYevj+/fv/85//9OjR4+jRo25ubh08CiEEaGEwGFauXFlZWTlv3rwNGzaYenhaWtrGjRvJ3cdXdPxAhBCgxc6dO2NiYnr37n348GGRSGTSsY2NjcuWLWtoaPjHP/6xZMkSk45FCAEIIeTq1avbtm1zcXE5duxYQECAqYe/+uqrycnJQUFBH3/8sanHIoQAxLhl06ZNm2bPnm3q4d98883Bgwc9PT3ZboimHo4QArQ8i3fixInvvvuuqccWFxe/9NJL5O6+wGaMjhCCszt06NCJEye6dOly/PhxDw8Pk45lG8zcvn17wYIFa9euNa8AhBCcWk5OzquvvkoIOXDgwIgRI0w9/N1337148aLxWTHm1YAQgvNqamp64YUX6urqVq5cuWLFClMPZ13abJuZXr16mV0GQgjOKyIiIjExcejQoZ988ompx9bU1KxYsUKv17/11lszZszoTBkIITips2fPfvLJJ+7u7sePH+/WrZuph69evbqgoCAkJOTtt9/uZCUIITgj44Ovd+7cOXnyZFMPP3jw4MmTJ7t3737y5El3d/dOFoMQgtMxGAwrVqwoKyubO3euGfsypaenv/7668T09rT7QQjB6XzwwQfR0dG9e/f+8ssvzWtP02g0f//735cuXSpIPQghOJcrV+JkMpmLi8vRo0fNaE9jO/wOHz7cjPa0++noagsAB1BfT9asCZ40aXdoaNGcOXNMPfz7778/cOCA2UsN7wchBCcSHk4SE10nTVq3fbve1GONT33ZtWvX+PHjBawKl6PgLA4fJl99Rbp0IUePEg8PV5OONbanzZ8/f926dcIWhhCCU8jJIevXE0LI/v3kscdMPnz79u0XLlzo06ePGUsNHwohBMfX1EQWLyZ1deSFF4hEYvLhv/zyy3vvvceWGvbp00fw8hBCcHxvvkkSEsjQoeSzz0w+lrWn6XS6N954w0LbHyKE4ODOnSNyOXF3J8eOEdO701oeLDFp0qStW7cKXxwhBCEEx1ZeTlatIpSSHTvIlCkmH/7pp59+/fXXQrWn3Q9CCA7LYCArVpDSUjJnDnn9dZMPT09PZ0sN9+/fP3jwYOHruwshBIf14Yfk/HnyyCPkyy+Ji4mv9MZGsmrVyxqN5sUXX1y2bJllCmyBEIJjUqnIO+8QkYh88QXp29fkwzdtIrdvn5sz5zUzlhqaylIhlMvl165d6+RiRwDz1NeT5ctJczN57TUiFpt8+Jkz5F//IsXFnpGRu319fS1Q4B9Yqm1NkCUeAOZZs4ZkZZEJE8j775t8bEkJWbmSUEoiI4mg3Wn3hctRcDQKBTl2jPj6khMniImbpxGDgaxcSSorybx5LR02VoAQ2i6tVpubm8u7CjuTm0ukUkII+de/zGlPe/99EhND+vQhhw8TobvT7gshtFExMTETJkyYM2dOU1MT71rshlZLli8nd+6QF14gK1eafHhcHHn3XeLiQo4eJaavNDQfQmhzsrOzn3vuuVmzZqWmprq6uhYUFPCuyG68+Sa5coUMGWJOe1ptLVmyhGi1ZPNmYvpG+J2CENoQtVq9devWMWPGfPfdd76+vjKZLCUlxYwdaZ3TDz+Qjz8mbm5mt6eRvDwycSLZts0CxT0QFvXaBIPBcOzYsc2bN5eVlYlEIolEsmvXLjM2X3Ba5eXkb38jlJLt28mTT5p8+Oeftyw1PH7c5LmczkMI+YuLi1u/fn1sbCwhJCQkZO/evVPMaHN0bmvXktJSMns22bTJ5GOzswnbcu3gQcLlsgMh5KmkpOTNN988duwYpbR///7vv/++RCIRfM2oM9i1izQ1kU8/Nbk9zbjU8G9/I8uXW6a4h0EI+dBoNPv27du+fXt9fb2Pj8+6deu2bNnSpUsX3nXZqyFDyOnT5hy4eTNJTCTDhhHLd6fdlwkh1OtN3hsH2nX69GmpVJqfn08IEYvF+/bts1CDEaXUEqd1GGfPkn37WpYaCrd5muloB1y8eHHq1Km+vr5yuVyn03XkEGhXfHz8tGnT2H/5CRMmXLp0SfAhqqurIyIifHx8vL29AwMDw8PDlUqlVqsVfCB7V1pK+/ShhNDduzlX0qEQUkrZs0jZS+fy5csWrckhVVRUSKVSV1dXQkjPnj0t8edMp9Pt37+fPaPL1dX1kUceMf6pDQgIWLNmTXR0NNLI6PV01ixKCJ07lxoMnIvpaAgppUql0njVJBaL8/LyLFaVQ2lubpbL5ey5P+7u7lKptKamRvBRLly48MQTT7DfzjPPPHPt2jVKaWpqqkwmGzlypDGN/v7+EolEqVQ2NjYKXoMd2bGDEkJ796a3bvEuxaQQUko1Gk1kZCSbP/D29o6IiLhz546FKnMM58+fHzVqFAvArFmz0tLSBB+isLBQcncLsUcffVShUNz7PSyNwcHBxjT6+PiIxWKFQlFXVyd4STYuLo66u1MXF/rf//IuhVJqagiZ4uJi40w6e1Cwgfs7uu3JzMxcsGABe8U/9thj33//veBD1NfXy2QyLy8vFiqZTNbQ0PDgQ3Jzc+VyeWhoqPFGiLe3N0tjbW2t4BXaoJoaOngwJYRu3sy7lLvMCSFz5coV4z3lSZMm/frrrwKWZdeqqqoiIiI8PDwIIX5+fpGRkU1NTcIOYTAYTp06NWDAAEKISCRatGhRQUGBSWfIz89vk0YvLy+xWBwVFVVeXi5stTZl2TJKCA0OpkL/TsxnfggppQaDQaFQsO4q1mx1yxYusfnRarVRUVFsRsTFxUUikVjiBX316tWpU6ey5EycOPGXX37pzNmKioqioqLEYrGbW8v9KldX19DQULlc7ni/zUOHKCG0Sxd6/TrvUlrpVAgZdlHk6elJCGFtxw+9KHJIMTExxqmRZ599NikpSfAhSkpKwsPDXVxcCCH9+vWLiorS6/VCnbyyslKhUIjFYuPefsY0FhcXCzUKR9nZtGtXSgg9epR3KX8kQAiZ7OzsRYsWsV/esGHDTp06JdSZbV/rn33AgAHtTo10UlNTk1wuZ4/j8vDwkEqllpsSq6qqYmlkf1jZu3pwcLBMJsvKyrLQoJbW2EjHj6eE0LAw3qXcQ7AQMjExMWPGjGG/uRkzZiQnJwt7flvTemrEclcBSqXSuO+lWCzOzc0VfIh2qdVqpVIpkUha99MFBQXJZLKMjAzr1CCUDRsoIXToUGqD008Ch5D+8XORm5tbeHi4Q37Qt87n4fT09Llz57JX/6hRo86dOyf4EB2h0WhYGru1WqjH0qhSqbiUZJKzZ6lIRN3daWws71LaI3wImaqqKqlUyj7r+/n5yeVyR+rViIuLe/LuqjULzQzfvn3b2GHj7+9vIw2DDQ0N3377bVhYmJ+fnzGNzz//8ltv0YQE3sXdR2kpDQighNAPP+Rdyn1YKoRMRkbG/PnzjffKzpw5Y9HhrKD1PdJ+/fpZ4h4p67Dp0aMH67AJDw+vqKgQdojO0+l0ly9flkqlAQEBU6eeIIQSQgcOpFIpvXyZfyOYkV5PZ8+mhNA5c6hwc1gCs2wIGaVSOXToUONHmpycHCsMKjjWLcSmRizXLXT+/PnRo0cbO2xSU1MFH0JYOp3u4sXml1+m/fpRFkVCaGAglUrpTz/xf93v3NnSnnbzJudKHsAaIaTt9U/aV39Gm6mRGzduCD7E9evXxXc3ix4+fLjdTS/r9VSlojIZHT789zT27EklEqpU8rkzfvUq9fCgIhE9fZrD6B1npRAyN2/eDA8PZ59zevXqZSOfcx4sISFh+vTpLBvjx4//6aefBB+CLT5i9wN69OgRGRlp793VqalUJqMjR/6eRj+/ljRa8ydLTqZBQXTjRuuNaB6rhpCJj49/6qmn2MvalhdGVVZWWnrxkV6vVygU7AnMrMOmtLRU2CH4YmkMDv49jT4+VCymCgW1Tt+4Wm1D7Wn3wyGEjFKpHDhwoPECLz8/n1cl92IXz927dyeWXHz0448/jhs3jv0XePrppxMTEwUfwnbcuEHlchoaSkWiljR6e7ek0a4+l1gEtxBSStVqtXFhlI+PT0REhC0sq7HC1EhRUZFxijUwMNCplqHk57dNo5cXFYtpVBQtK+NdHCc8Q8jYzivy+vXrCxcuZPEbMWLEd999J/gQarVaJpN5e3uTu4uPNBqN4KPYhaIiGhVFxWLq5taSRldXGhpK5fKHL7Q1Xt9+9FE7Xx06lBJCd+60RNUWwT+ETGxs7OTJk1kAJk+e/Ntvv1lz9MbGxvXr17PGZT8/vz179jQ3Nws7BFt8xK7A2eIjm7oC56iykioUVCymHh4t0XJxaUnj/frGjSHs16+dj3wIofn4zlJMnz6dDVpmgasilUoVGhrK/sQEBwfb7FwUX1VVLWn09Pw9jcHBVCajbfrGW8/0fPpp2/MghJ1VV1dnXBjVpUsXmUxmnfn61NRUSyw+Yndl2OKjvn37Crv4yFHV1tLjx+lf/kJ9fFqSJhLRiRPpzp2UXbyzEE6eTAmhw4e3bQlACIWRlZVlXBxkj3eu6d3FR/bbn2ALNBqqVFKJhHbrRgmhffpQdpOIhTAykvr5UULoyZN/OAohFFJ0dPTjjz/Oojhz5syUlBTeFXWUUqkcMmSIvXfq2Y6GBvrtt9S4TpOF8P336ZYtlBA6fvwfvhkhFBhbGMX20mQLo2ywm7m1jIyMefPmsfiNHDny7NmzvCtyQCyEW7fS8nLq7U0Joa3XeCGEFnHvuh4bXBjFimSrt2y2SMfAQiiTUUrpK69QQujTT//+VYTQgtLT023zTcbu3q7tXesQ5ue33Gk03tVCCC2uzcctq+31cD9tPrg6/I4etqB1CCmly5dTQuj//E/LvyKE1tB64pHtesRl4rH1FK6z7W3FV5sQpqRQkYiKRJS1GCKE1tN6/z8r34JrfTOT7e9k74uP7EubEFJKFy6khFCJhFJKhw1DCK3r6tWrrZtRfv75Z4sO5/CLj+zCvSG8fJkSQt3caF4eHTUKIbS6e/eEt1BbZrMnXRcAAAJCSURBVGxsrHHn/5CQECs3uILRvSGklIaGUkLoyy/TkBCEkBO2QMGkp6N0XOulHngGDnfthvD06ZZligghZ+w5YQIujLLNRY9Ort0QGgx0zJiWRlOEkL+LFy+OHTvWuGidPTHTDHguqm1qN4SU0qNHf19dgRDyx2ZQevfubZxBMWmNUpuNcCzxcHkw2/1CqNXSQYMQQhtjxkZmrR8uby9bwoFdc/AQMq239BwxYsTp+2xDae+bo4KdcooQMg/ewan1NuGzZs1KT0/nVSc4GycKIW1vL8Pq6urMzMzWD8ywxMPlAR5ARCklTqa8vHzLli2HDh0yGAzdunVTq9V6vd7f33/btm2rV682PjUawDqcMYRMYmLihg0b1Gp1dnb2kiVLtm/fzp6pCGBlzhtCprq6mlLq7+/PuxBwXs4eQgDuXHgXAODsEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDO/h997uu2e2eTwgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVwU9f8H8PeysByCggqYdBgSIHwFD7SvXzVK+qZ534ZZ3rdyCKGipqjkfWeZmZaZeZSoKVmWmX39Zh6wX5VrDdMEERS5hAV2d+b3x/jbDHaHa3bes7vv58M/itmYF36/Lz47OzPvkbEsC4QQPDbYAQixdlRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWwMe5p7slSZNyfH8p+sMIAREBUQj57CvcEpgc6pDr4pPlsKtik/3obuzbqTuri4GKLD0BEYIsdQLr2Ptw76fak+Dbxw12HK9XKqDtROlYX6xnLbXWwcQDGwgMQcVAJjUrMSxzhOiKxbSIAdHXqyrJsZlWmVQUg4qASGlaoLVRVqeLbxOu/MqX1FKsKQERDx4SG5WvzAcDTztNqAxDRUAkNc7RxBAAdq7PaAEQ0VELD2tq1BYBcTe6TX6xiq6wnABENldAwe5l9F6cux0uO67+SXJLsc91HtBqgByCioRIatfyp5cklyQvvLlSqlQeLDk64PWFK6yn2MnsAqGQqK5lKrg8aVqP/ZwsLQMQhY1kWO4N0JRUnJeQlZFZmeim8ZrSeEesZKwPZI+aRi9KlxivlMrm2s9byAhARUAnrsCF/w28Vv633Wv+s4lnsLDXd1dxNyEvo6Nhxjvsc7Cyk8aiEfAq0Bb5pviW6km99vu3XvB92nJqOlxwfkj3ETe6mClS1tm2NHYc0Eh0T8onPjS/RlQxqMUiCDQSAwS0G92ver0hXtDRvKXYW0ni0EhqVWpHaLaubHOTXAq752vtixzEsozIjOCOYASbFPyXIMQg7DmkMWgmNisqJ0rG6CI8IyTYQADo4dJjpPlPH6qJyorCzkEaildCwA0UHwv8I97D1UAWqWshbYMfhU6Qr8k3zfaB9cMT7yDDXYdhxSIPRSmiAmlEvyF0AAIltEyXeQABwk7ste2oZAMTmxlYyldhxSINRCQ1Ym7/2dvXtzk6dJ7aaiJ2lXma0nhHkGHSz6uaTN/4Sc0FvR2vK0eT4p/mXM+U/+/78kvNL2HHq60zZmbAbYc42zlmBWdx1p8Rc0EpYU1xOXDlTPsZtjBk1EAD6uPQZ5jrsEfMo/m583a8mUkIr4d/8Wv5rz6yeDjYO6QHp7RTtsOM0zM2qm4EZgVVM1QW/C92bdceOQ+qLVsK/MMBE3olkgY3zjDO7BgKAt713lEcUC2xkTiQL9LvVbNBK+Jfdhbsn357sZeeVFZjVzKYZdpzGeMQ88kvzu6u5+3m7z8e1HIcdh9QLrYSPlenKFt9dDABrvdaaaQMBwNnGmRsMtSB3wSPmEXYcUi9UwsdW3luZp8nr0axHeMtw7CxNMr7V+O7NuudqctfcW4OdhdQLvR0FAMi+nx2cG6xm1Bf8L3Rz6oYdp6m4j5dc5C5Xva4+1/o57DikDrQSAgDETI3xjPWMkcVYQAMBoEezHtEQ3SqqVezMWOwspG60EsKZM2fCwsJcXFyysrKeeuop7DjCyM3N9fPzKy8v/+mnn15++WXsOISPta+EOp0uKioKABYtWmQxDQQALy+v+fPnA0BUVJROR3MTpY21blu3bgUAb2/vyspK7CwCU6vV7dq1A4AdO3ZgZyF8rPrtaFFR0QsvvFBYWHj06NEhQ4ZgxxHe4cOHR48e7e7urlKpXF1dseMQw6z67eiSJUsKCwv79OljkQ0EgFGjRoWGht6/f3/FihXYWYhR1rsSpqenBwcHsyybmprasWNH7DimolQqQ0JCZDKZUqkMDAzEjkMMsN6VcN68eVqtdubMmRbcQADo1KnT5MmTtVptdHQ0dhZimJWuhEePHh02bJibm5tKpWrd2sKHBd6/f9/X17e4uPjkyZP9+/fHjkNqssaVsLq6Oi4uDgCWL19u8Q0EAHd39yVLlgBAZGRkdXU1dhxSkzWWcOPGjTdu3AgICJg+fTp2FpHMnTvXz8/v999/3759O3YWUpPVvR3Nz8/39fUtLS09depU3759seOI5+TJkwMHDmzevHlWVlabNm2w45C/WN1KuGDBgtLS0qFDh1pVAwFgwIABr7/+emlp6bJly7CzkL+xrpUwJSWlW7dutra2169ff+GFF7DjiC0zMzMoKEin0128eLFr167YcchjtqLtSaPRqNVq0XZXG8uyERERDMNER0dbYQMBwN/ff/bs2Zs3b46IiEhOTpbJZIhhHB0d7ezsEANIh0gr4cWLF0+fPr148WIR9sXD2dlZoVD88ccfzZs3x02Cpbi4uF27djqd7tEj5PvuV69eHRYWFhISghtDCsQoYXV1dVBQUHZ2tkKhkMvlpt6dMQzDaLVahUKRnZ3t7u6OFQNXXl6ej4+PTqezs7NDXAl1Ol1VVZW/v79SqbS1Fe/tmESJcJH4unXrAMDf37+6ulqE3fHgTlVPmzYNNwai8ePHA8CwYcNwY1RVVfn6+gLA1q1bcZNIgclLmJ+f36JFCwBITk429b7qdOPGDXt7exsbm8uXL2NnQXDlyhUbGxuFQqFSqbCzsEePHgUANze3+/fvY2dBZvISTp06FQAGDBhg6h3VE3cJZc+ePRmGwc4iKoZhevXqBQALFizAzvIYd5Zozpw52EGQmbaEqampcrnczs4uMzPTpDuqv5KSEu5U9aFDh7CziOrzzz8HAE9Pz5KSEuwsj6WlpdnZ2cnl8qtXr2JnwWTaEoaGhgJATEyMSffSUDt27ACAZ555pry8HDuLSMrLy5999lkA2LNnD3aWv5kzZw4A9OnTBzsIJhOW8ODBgwDg4eFRVFRkur00gk6n405VL1++HDuLSLiTQ126dNHpdNhZ/ubhw4fcNfRHjx7FzoLGVKco1Gp1QEDArVu3du7cyR0WSsr58+d79+7t4OCQmZnJLREW7M6dO/7+/mq1+ty5c9xhoUFnzpxhGMYUAVq0aNGtm9FZku+///7cuXO9vb3T09Pt7e1NEUDqTFTuhIQEAOjUqZNWqzXRLppo5MiRADBu3DjsICY3atQoAHjzzTf5X2a683Uvvvgiz361Wi13X/Xq1asF/bnNhklWQv3Qy7Nnz3KHhU9Sq9Xp6emiXbt48+ZNBweHtm1rPjeznuuDuav/mt+vXz+tVmuKDAEBAdxUO2MscvRrA5ii2WPHjgWA0aNHG9yakJBgY2OTmJhoil3XcODAAXt7+/DwcINbJXukJBQzOvrlZm1NnDgROwgC4Uv43//+VyaTOTo6/vHHH7W33rlzp1mzZgDw888/C77rhu5Osp8ZCuWjjz4CM/kcODs7m7uO4rfffsPOIjaBS6jT6bp37w4AS5YsMfiCN954AwDeeOMNYffLY+nSpQDQuXNng8udBM+eCcXszohyM0d69OhhbddRCFzC3bt3A4CXl9ejR49qb9Uvkrdu3RJ2vzwqKiqee+45APjkk09qb5XgdSRCmTdvHpjVtUGlpaXcAeEXX3yBnUVUQpZQ/5e4b9++2lt1Oh33OfXSpUsF3Gl9fPHFFwDg4eFRXFxce6ukrqgUiv4q2UuXLmFnaYBPPvmE55e4pRKyhNwTSIy9ndi1axcAPP300+L//TIM07t3bwCIi4sz+AKJ3FsgIDO9X0R/OPPuu+9iZxGPYCXkP7DWL5L79+8Xao8NkpKSwi13WVlZtbfeu3ePu833u+++Ez+b4E6fPg0AzZs3z8vLw87SYPwf7FkkwUo4dOhQAJgwYYLBrVI45p44cSIADBkyxODW9957DwACAgI0Go3IwYSl0Wi4cfcbNmzAztJI4eHhADBmzBjsICIRpoQ//vgjADg7O+fm5tbe+vvvv0vh0+d79+5xdzaeOnWq9taqqipu8Mz27dvFzyagjRs3AoCPj4/5PuxNf2Lp7Nmz2FnEIEAJ9ZcdrVq1yuALBg8eDACTJk1q+r6aaPXq1QDQoUMHg/f4HzlyBABatmz54MED8bMJorCwsGXLlgBw4sQJ7CxNws1llPJljwISoITbtm0DAG9vb7VaXXvrDz/8AAAuLi53795t+r6aqM6pCq+99hoAREREiBxMKNxM8VdffRU7SFNVVFRwTzjduXMndhaTa2oJ9beiJCUl1d6qXyTXrFnTxB0JhX+qQlpamq2tra2trTneZnr9+nUu/PXr17GzCODAgQM8J5YsSVNLOHfuXDB+U6Y0H0bNP1Vh1qxZABAWFiZyqqb797//DQBRUVHYQQTz0ksvAUBsbCx2ENNqUgnT09O58QT/+9//am99+PBhq1atQHr3a/JPVSgsLORiHzt2TPxsjXb48GFzP6CtLSUlRS6XKxQK6YxHMYUmlZBbUmbPnm1w6+zZs3kWSVz8UxW2bNkCAO3bt5fUAs6jsrLSx8cHAD788EPsLAKbMmUKAAwcOBA7iAk1voTHjh2r8+BKsjN8+KcqaDSaf/zjHwCwdu1a8bM1wsqVKwEgMDDQ3E9y1qYfmfntt99iZzGVRpawzo8ZpT/NTv+hrsHlTlIf6vLLyclxdnYGgO+//x47i0lww6ONnViyAI0s4Zo1a3j+XpKSkngWSYmoc6rCoEGDAGDy5MkiB2uot956CwBGjBiBHcRU9L/xN2/ejJ3FJBpTQv53CPpLT7Zt29bkeKbFXehjbLnTX+hz8eJF8bPV06+//iqTyezt7W/cuIGdxYSOHz8u/V/rjdaYEk6aNAkABg8ebHDrqlWruIswzeLNA/9UhXfeeQf9klceDMO8+OKLALBo0SLsLCbXr18/AJg5cyZ2EOE1uIT1vB3B4PWZElTPmz++/PJL8bPV6bPPPgOANm3aWN5YgNr4z4eZtQaXkDt/+s477xjcyn+ngjTx3+Hx8ccfY90Gya+srIwbIbd3717sLCKJiIgAgFdeeQU7iMAaVsL9+/fX5xZ1g4ukZPFPVUAcCMAvPj4eALp27Wqpo+Jq059Y+vrrr7GzCKkBJdQPa9m1a1ftrfq71+fPny9cPJHwT1U4f/68+KNx+HHDVGUy2S+//IKdRVTbt28HgOeff97g3QJmqgEl5B9btm/fPm5smTleblvnVAXxh8TxGz58OAC8/fbb2EHEptVqg4KCAECcubX1kVedB1eA+3O69HQjvkN9S8g/wLOiooIb4Ll79+5GhJACSY1L5XfmzBkAcHJy+vPPP7GzIOB+fGN3kJvO7ge7A9IC7FPs219vvzF/45Ob1Dp1sbbY5CXkXwoKCgpGjx4dEhJi1scn4eHhoaGhxmau8b8REI1Wqw0ODpbUUiC+YcOGAcD48eNF2+NnhZ/BFYjPjb9cfnnXg13Oqc7r7q178gVqnbrRJazXsyiKi4uDgoIKCgpUKhXP8wwqKiqcnJzq/G6SxZ+/oqLC29u7pKSksrJSzFS1OTg4uLm5cYeFuEmw3Lx509/fX6FQlJeXN+g/9O3oq/pU1dDdfevzbeSdyI6OHb/y/or7yq4HuzKrMtd7rde/ppKpdFQ6nn7h9Ksurzb0+wv5IB6zbiCYf349lmXVarVZ/ziS+oVeqitVVani28TrvzKl9RQhd1DPFdPaBmDV9u6774KU3o6uXLnS4AsyMjJ69+5t1p/ZcD9jeHh4YWGhwRfwj/YTXJo6Da7AtyV8t3E05e2oMB/MWLw7d+44OTnJZLJz585hZ2F/+eUXmUzm5OR0+/bt2lv1Zy8uXLggfjZBfPDBB2D8PAT/aD9TuFl1E67AiWK+2VlilJB94pMJaxiAVcOYMWMAwNgj1sQ3YsQIAHjrrbcMbl24cCEA/POf/5TmJa/89Gfkv/rqq9pb9aco3nvvPdEiVTKVcAU+uv9RjS8++a8ilVB/sv7jjz9uxJ7MlwRP1t++fZtbmQ2erDfrK9oiIyPB+LVp77//Phgf7Wc6XTK6DPh9gP5fTxaffPrq00/2UKQSsiz75ZdfgnUMwNLT6XQhISEAsGzZMuwsf7No0SIwftnap59+CmZ4bbf+Km2lUll7q36RPHLkiMjBThSfkF2RLchdkFqReuDhAff/uS+7+/j/D2qdWn+eMLkkWa1T11gk69TIC7gtfgCW3s6dO0GSF3Drn3D62Wef1d5qpnc58d+vxD/az9SOFB0JTg+2T7H3vu699t5ahmVYli3Tlekvl9H/kafIG/SdBb6VycKUlpZyz9k8cOAAdhYD9u7dC8afcGp29/t+8803AODq6mrwzl26lelvJk+eDACDBg0SPI3UxMbGAsC//vUvaX7CwTBMz549ASA+Pt7gC8aNGwcAI0eOFDlYI1RVVfn5+QHApk2bDL6AWyRnzZolcjARCD/ewmKYxXiLy5cv8zzh1IxmQHHTnPz9/Q0OZOAf7WfuGjnoae3atWDRA7BYlh04cCAATJkyBTtIHd5++20AGD58uMGtK1asAMlPQ9T/Wk9OTq69VT/oacuWLeJnE0FTRx5a6t8L95xNsxh5qB8pYnC5U6vVzz//PADs2LFD/Gz1xD/h1+J/4zd++K8FD8DSD/9dt25d3a+WgMTERJ7lTj8h39hVYLhSU1PlcrmdnZ3BWffWcOzTpDH4lnqsvHnzZjDPMfgffPCBwRe8/PLLABAdHS1ysPoIDQ0FgJiYGINbreFTQBM+EMZM6R8Ic/z4cewsDfDVV1+B8QfCKJVKuVwuwaem6Z9/VlRUVHurlZwPa+qj0SxvANbMmTPBnB+NFhkZaXCrBJ8fWueTQPlH+1kMwR4SKv6VRKagf87mtWvXsLM0GH/4goICV1dXADh58qT42QxKSEgA48/Etp5rJAV4XDbWNbWmwL+YSN+MGTN4lrsNGzYAgI+PjxSOdXNycrib486ePVt7q1XdLSBACVHuLjGFr7/+muewyizoD2i/+eab2ls1Gk1gYCAAbNiwQfxsNYwdOxYARo8ebXCrVd03J0AJWYz7LAWnf46NsQ8YzcWmTZt4Ptr9/vvvAaB58+Z5eXniZ9Mzo9l2IhCmhCzGACxhcafaAgICpHxlSX3oT3KuX7/e4Av69+8PANOmTRM5mJ7ZTXk1NcFKmJ2dzU1VMPhkFYnTX3Ty3XffYWcRgP5yH4PL3Y0bNxQKhY2NzeXLl8XPxrLs7t27wfi8c/0iKZ1bqE1NsBKyLLtgwQIznarAf/mlORowYAAATJ061eDW6OhoAOjZs6f4/0vpn/yxb9++2lsl++QPkxKyhPqpCp9//rmA39bU+G9EMFM3btzgbgG5dOlS7a0lJSXcfZKHDh0SOdj8+fPB+DOwdu3aJc1bqE1KyBKyLLtnzx7unUZZWZmw39lEGIbp1asXACxcuBA7i8BiYmJ4bobcsWMHADzzzDPl5eWiRarn0yD3798vWiQpELiEDMNwx9yLFy8W9jubCP/N6WZNPxbg4MGDtbfqdLquXbsCwIoVK0SLxD8vVOLPRTYdgUvI/v9UBQcHB4OfPkuKfkzLp59+ip3FJLgBOcaWO/75pYLjP49lFrdQm4jwJWRZ9s033wSAUaNGmeKbC4h/YJkF0I+KS0hIMPiCkSNHOjg4GJzwKbhly5bJZLJVq1YZ3Dp48GAAmDRpkghJpKZeD4RpqNzcXD8/v/Ly8p9++om7icaYWbNmqVQNfkBHfTg6OnKDg4z5888/O3TooFarz507xx0WWqTz58/37t3bwcEhIyODuxDsSTk5OVqtlruKWgSXLl0KCgqyt7ev8fUff/zx1VdfdXFxycrK4g4LrYuJyr18+XIACA4O5r/sqEuXLib6uZycnPgT8g+xtiSjR48GgLFjx2IHMUyj0XTs2BEA1qxZg50Fh0lWQgCorKzs0KHDrVu3Pvroo2nTphl72cWLF0tLS00RQC6Xv/LKK8a2/uc//3nppZccHR0zMjJ4HvZmGe7cuePv769Wq3/++WfukeaSsm3btoiIiPbt26elpdVeJK2C6fp96NAhAHB3dzd4vyYilA8GcS1ZsgQAunTpIrWj34cPH3JXnB87dgw7CxoTlpD9/8kF8+bNM+leGurDDz8E0U+R4ZLs88xnz54NeEO1JcK0JeSf4YOiqKjI3d0dAA4fPoydRVT79u0DAE9PT+ncI5uWlmZrayuXy69evYqdBZNpS8iyLHdA2L9/f1PvqJ6ioqIAoFevXtZ2RphhGO6AcP78+dhZHnvttdcAYO7cudhBkJm8hPqpCgbnuoosIyPDzs4O8QYCXFeuXJHOVbJJSUkA4ObmZr63UAvF5CVkWXb9+vUA4OPjU1VVJcLueLz++usAMH36dNwYiCZMmAAAQ4cOxY2hv4V627ZtuEmkwFSnKJ5UXV0dFBSUnZ2tUCjkcrmpd2cMwzBarVahUGRnZ3OHhVYoLy/Px8eHYRhbW1uZTIYVQ6fTVVVV+fv7K5VKW1tbrBgSIcbPr1Ao9u7de/r06cWLF4uwOx7Ozs52dnZWejIKAAAcHR3t7OwYhikrK8NNsnr16rCwMGogAIixEnI0Go1arRZnXwaxLDtgwIDz58/HxcWtWbMGMQmiqKioLVu2hIaGck8xQMT9OsDNIBHilVAKUlNTQ0JCuMmc3ANtrEpmZmZQUBDDMFeuXAkODsaOQx6zwQ4gqs6dO48fP766ujouLg47C4J58+ZpNJqpU6dSAyXFulZCAMjPz/fz8yspKTl16lTfvn2x44jnxIkTgwYNcnV1ValUVvu5lDRZ10oIAJ6engsXLgSA6OhojUaDHUckGo2Ge/T30qVLqYFSY3UlBIDo6GhfX9+MjAxuzoo12Lp1a1ZWlr+/P3etJpEUq3s7yjl27NjQoUPd3NxUKhX3QBsLVlBQ4OfnV1xcnJyczF2uQCTFGldCABgyZEjfvn2LioqWLVuGncXkFi9eXFxcPGDAAGqgNFnpSggAGRkZwcHBDMOkpqZyd3ZbJKVSGRISYmNjc+3aNT8/P+w4xAArXQkBoEOHDtOnT9fpdNx9FZYqKipKp9NFRERQAyXLeldCACgqKvL19X3w4EFSUhI3EtPCHDp0aMyYMe7u7iqViruXhUiQ9a6EAODm5sY9By8mJqayshI7jsDUajU3c37lypXUQCmz6hICwMyZMzt27Hjz5s3NmzdjZxHYunXrbt261alTp8mTJ2NnIXys+u0o58yZM2FhYc7OziqVymKGXupHv549e5ab9EMky9pXQgDo06fPkCFDPF702HRvE3YWwWzM3+jRzWPkyJHUQOmjlRAAIPt+dnBusJpRX/C/0M2pG3acpvq1/NeeWT1d5C5Xva4+17rm1G0iNbQSAgC0d28/x30OA0zknUgWzPu3EgNMVE4UC2ykRyQ10CzQSvhYma7ML90vT5O3r92+N1u+iR2n8fYU7pl0e5KXnVdWYFYzm2bYcUjdaCV8zEXuktg2EQDm584vZ8qx4zRSma5s0d1FALDGaw010FxQCf8yvtX47s2652py1+avxc7SSIn3EvM0eT2a9Rjbcix2FlJf9Hb0b7iPNBxsHNID0tsp2mHHaZibVTcDMwKrmKoLfhe6N+uOHYfUF62Ef9OjWY/wluFqRj0/dz52lgaLyY2pZCq59Rw7C2kAWglrytXk+qX5lTPlZ33PhjqbzUm2M2Vnwm6EOds4ZwVmtbVrix2HNACthDV52XnFecYBQNSdKB2rw45TLzpWF5UTBQCL2iyiBpodKqEB73i+007RTqlW7i7cjZ2lXj588OE19TVve+8oD0u+LctS0dtRww4WHXzjjzc8bD2yArNc5ZK+BaFIV+Sb5vtA+yDJO2moqwXekGXxaCU0bIzbmFDn0AJtwcp7K7Gz1GHp3aUPtA/6uPShBpopWgmNUqqVIZkhNmBzrcM1PweJ3paeUZkRnBHMAJPinxLkGIQdhzQGrYRGdXLsNLHVRA2ric2Nxc5iVHROtIbVzGg9gxpovmgl5FOgLfBN8y3RlST7JL/eXHKjyo6VHBuaPdRN7qYKVLW2tfDBjRaMHkzFx8PW492n3r2qvtrJsRN2FgNCnELGtRzXvVl3aqBZo5WQEGR0TEgIMiphY9zT3JOlyLg/P5T9YIUBiICohHz2FO4JTA90SHXwSfPZVPDXBJo2dm3UndTFwcUWH4CIgD6YMWrvw72Tbk+KbxM/3HW4Uq3kLiWN9Xx8usLBxgEYCw9AxEElNCoxL3GE6wjudvuuTl1Zls2syrSqAEQcVELDCrWFqipVfJt4/VemtJ5iVQGIaOiY0LB8bT4AeNp5Wm0AIhoqoWGONo4AgHg/IXoAIhoqoWHcrbG5mtwnv1jFVllPACIaKqFh9jL7Lk5djpcc138luSTZ57qPaDVAD0BEQyU0avlTy5NLkhfeXahUKw8WHZxwe8KU1lPsZfYAUMlUVjKVXB80rEb/zxYWgIiDrh3lk1SclJCXkFmZ6aXwmtF6RqxnrAxkj5hHLkqXGq+Uy+TazlrLC0BEQCUkBBm9HSUEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVPG5i4UAAABaSURBVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZP8HLSkn/KpvXDUAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAbS0lEQVR4nO3deVRU5/kH8GcWdlDRIHUXiQsQNfancYFatZqoJabGikul0RitrUoiGpeDkaRxq0aDW90ae7QeTTRRkcTYHBNPQHGriAZjVEhESJQEZRUYmJn398drJ1NGCMvc+9wh38/JH/eMeJ+X4Je587z3vq9OCEEAwEfPPQCAnzuEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGZG7gFoXWFhYVZWFhF5enp6eXnJF+2Pvby8PD092cYHrg8hrM3x48dPnjy5Zs2aOn69fSC9vb09PDzksY+Pj7u7u+Oxr6+vm5vbtGnThg0bZvti+LnRCSG4x9AQJ0+eHDBggO3tSAllZWWhoaHZ2dnBwcEtWrSoqKgoLy+Xf1ReXl5RUeF43DBhYWE9e/bcv39/Y0fMp6ioaNWqVfJYr9c3b95cHhsMhmbNmsljo9Ho5+fneOzm5ubr6yuP3d3dfXx8HI89PDy8vb2V/z54uOQ74dq1axctWjR79uxNmzYpV2XlypXZ2dl9+vS5cOGCwWCoy18pKyszmUzy+MGDB5WVlY7HpaWlVVVVtuO8vLx58+Z9+eWXc+fOHTRokLO/CTUcOXIkPj7+ypUrilaZNGnSzJkzhwwZomgVFi75Tnjp0qUBAwZUVVUlJSX99re/VaJEZmZmz549TSbT6dOnBw4cqEQJm7i4uJUrVw4cOPD06dM6nU7RWk5XXl4eFhb2zTffjBkzRv4SsVgsxcXF8k/NZnNJSYnjcVVVVWlpqTyurKx88OCB47HJZCorK5PHFRUVOp1Op9Ndv369Xbt2qnxnKhKuafXq1UTUunXrvLw8Jc4/evRoInrxxReVOHk1xcXFv/jFL4jogw8+UKGccy1dupSIevbsWVVVpWih3//+96r9RFTmqiG0WCxDhw4lot/97ndOP/kHH3xARP7+/gol3NGWLVuIqEuXLiaTSZ2KTpGZmenp6anT6T7//HOla3399dceHh56vf7ixYtK11KZq4ZQCJGTk+Pv709Eu3btcuJpy8rKOnfuTESbN2924mlrZzabw8LCiGjDhg2qFW08+Vlg2rRp6pSbN28eEQ0bNkydcqpx4RAKIfbs2UNEPj4+N27ccNY54+LiiKhPnz5ms9lZ56yLxMRE+fZ77949Nes22KFDh4ioWbNm3333nToV79+/36pVKyL6+OOP1amoDtcOoRBi4sSJRDRo0CCnZObmzZvy+io1NbXxZ6uv4cOHE9HChQvVL11fZWVlQUFBRLRp0yY167711ltEFBISovRHUDW5fAgLCgo6duxIRG+++Wbjz6ZmP8bRpUuX9Hq9u7t7ZmYmywDq7pH9GJPJ9Nlnnyla12QyPf7440S0Y8cORQupqc4hvHlTdOr08PjgQfHccw+PjxwRISEiKEg8/7zIz3f6+OrixIkTer3eaDSePXu2MedRvx/jKDo6mogmTZrENYC6qKkfs3LlSiJ69dVXnVXo66+/njBhwtq1a+1ffPfdd2VjvLi42FmFeDUuhHfvCn9/ce2aEELMni1mznT+AOtGfmR//PHHS0pKGnYGln6Mo9zcXG9vb51Od/r0acZh1E72Y6ZOnWr/4u3bt+UNLsePH3dWoU8//ZSIWrRo8cMPP9hetFqt4eHhRBQfH++sQrwaF8L9+8WoUQ9fzMgQHTo4eXR1VlFR0atXLyL685//3LAzcPVjHC1ZsoSIBg4caLVaeUfySIcPH35kP+b5558nogkTJji33DPPPENEL7/8sv2LZ86c0el0Xl5et2/fdm45FvUJocEgOnUSnTqJgICHIdy0Sfzxjw+/IC9PeHoqMsa6ycjIkNdISUlJ9f27vP2YaoqLiwMDA4no0KFD3GOprqZ+zCeffEJE3t7et27dcm7FL7/80mg0urm5VWuAjxs3joimT5/u3HIsGvdO+N574plnHr74xReic2cnj66e1q5dS0QBAQF3796t11/k7cc42rx5MxEFBwdrbe6+pn5M9+7diehvf/ubEkVfeuklIho3bpz9i1lZWXLuPi0tTYmiampcCH/4Qfj7i6++EkKImBghrxlOnhSXLzt7nHVisViGDRtGRM/Z+kZ1oIV+TDVVVVVy7n7jxo3cY/lRTf2YFStWEFG3bt0qKiqUqHv37l35yEVycrL966+88krTmLtvdHc0MVGEhYmuXUVUlCgsFBkZws9P+PsLpr5Cbm5uy5YtiWjnzp11+XqN9GMcybn7li1bamfuXrV+jKP4+Hgieuqpp+w/J9+/f1/+rBUtrQJnzxOaTCIqShAJb2/BdFvDwYMH5W00169f/8kv1k4/xtFvfvMb7czdq9yPqaasrKx9+/ZEtH//fvvX5QeQ0NBQl567V2Cy3mwWM2YIIuHuLt57z/nnr4NJkyYRUd++fSsrK2v5Mls/RoX7jxtAO3P36vdjHO3cuZOIOnfubH/RazKZgoODiegf//iH0gNQjjJ3zFitYuFCQSQMBsFxZ0NhYWGnTp2I6PXXX6/ly7TWj3E0ZcoUIpo8eTLvMFj6MdVYLJY+ffoQUbW5e7kiQWBgoOvO3St529rq1UKnEzqdUOWHVE1ycrLBYDAajWfOnHnkF8h+TLNmze7cuaPy2OouJydHzt0zzp1w9WMc2ebu8+3uzbJarfJhYtedu1f43tGtW4VeL4jEokVC9annBQsWyEa/4+9IzfZjHC1evJh37p6xH+NIzt2/8sor9i+mpqbKufucnBw1B+Msyt/AvX+/cHMTRGLWLGGxKF7OTkVFRe/evYlopsP9dLIfo8Lz4I1nm7s/fPiw+tV5+zGOrly5YjAYHOfu5XheeukllcfjFKo8RfHRR8LbWxCJiRNFrZ0Sp7t69apckS0xMdH2osb7MY645u610I9xNH369EfO3bu7uxsMhitXrqg/pEZS61Gm5GTRvLkgEqNHVz14oFJRIYQQ69evl7fR2D77ab8fU01VVVVoaKj6c/e192PWrFmj5mBsvv32W3klnJKSYv96TEwMEY0cOZJlVI2h4vOEX3wh2ra9PHhw//791ZyAtlqto0aNkj8eq9XqEv0YR0eOHCGixx57rLCwUJ2K2unHOKp97v7f//4318AaRtWHeh/cuBHUubOcHFfzHjHbbTSbNm1ylX6MIzl3v2jRInXKaaofU01JSUmbNm2I6N1337V/Xa6V3qtXLw3eelELtZ+s/+6775544gm5spiac9DyDdDNzc1V+jGO0tLS9Hq9p6enCp/EtNaPcbRjxw4iCgoKeuTc/TvvvMM4tvpiWN7i3r17/fv3J6I2bdoo/TH622+/PXr0aHx8fGRkpK+vr1xA9tSpU4oWVc4f/vAHFZ6712Y/phqz2dyzZ08ieuutt+xf37dvHxG1bdu2tLSUa2z1xbMCd2lp6dixY0+cONGyZctjx47JTDaeECIrKyvtvy5dupSfn2//BUaj0Ww2HzhwYPz48U6pqLLs7Oxu3bp5eHjYlrKuacOGuuwhVdP2NWfPnv3888979uyZlpZmNP64UUJqauqMGTNeeOGFhQsXKvhN1tnx48dHjRrVokWLzMxMuQobEQkhIiIiUlNT33jjjWXLlilVOzGRliyhigrq04d27KBWrSgzk4YPp1u3iIjef5/27qUjR+p4MrZl8E0m0+TJkw8dOuTr63v48GG50Fh9WSyW7Ozsq1evXrx48eLFi2fPnq2WuhYtWoSFhf3ff506dWrWrFlBQUHXrl1zxV2QLl261LdvXy8vL9ta8Qpp0aLFunXrXnzxxWqvy1005FW9FkycOLFfv35z5syx/2meOXMmPDzcy8trxYoVtjXza9p2Rv7m6qjTGYUgDw+ybTvj6Uk1bTeUl0chIZSaSj160Jw5VFVF27e7ZAiJyGKxzJgx45///KeHh8e+ffvk543amc3m69evX/yv9PT0av8c27RpExYWFhoaKlMXGhpqv7uDxWJ58sknMzIy1q1bFxsb6/xvSUlCiCFDhiQnJ8fGxq5bt06+aL9hQ7XNG35yD6matq9JTEw8c+bMkCFDTp48qfy3pYiQkJCysrLbt2/X8etNnTu7y/w8kpcX2bagjIigyZNpzx46doyI6OpVGjWKbt921RASkRAiNjY2ISHBYDDs3Llz2rRp1b6gqqrqxo0bttSlpaXZ/m1Jbdq0sb3R9evXT27qUIuPP/549OjR/v7+N2/etF3DuIRdu3ZNnz49MDDw+vXrtr3HlFBYWNi1a9f8/PykpKTIyEjlCikkPT29b9++RPTcc8/ZttOqfduZq61be37/PVVUkO1fV3k5PXLHu8GDafx4unCBdu8mIvr+e+rUicrLKTOTevSg9u2JiMrKaNCguodQE+uOyt1ddDrd+vXrS0pKUlJStm/fHhMTEx4e7njR2KZNm8jIyPj4+KNHj37//fcNKPf0008TUWxsrNO/EeUUFRXJ3y979+5VoVxCQgIR9ejRw+XayFardfDgwUQ0b948p520rEzcv//wv3v3Hr2qS01PvdeBJkIohFi/fr1OpzMYDHq93j5yRqOxV69eU6dO3bBhQ0pKSoNXNLR3+fJlg8Hg7u5+8+bNxp9NHbNnzyaiiIgIdW7jrqys7Nq1KxFt3bpVhXJOtGvXLiIKDAxU8K6GR67q0gRCKIT405/+5Ofnp9frQ0NDo6OjExISUlJSHihzj5u87o2KilLi5E535coVo9FoNBrT09NVK/r+++8TUUBAQFFRkWpFG6moqEhO4v/rX/9StlK1VV1EUwmhvB9qwYIFKtSq6f5DDbJdX6l//RwREUFEcXFxKtdtsDlz5qh5veAsGgrhzJkziWjLli3qlHvttdeIqH///hr/gb3zzjuKX1/V4Ny5c/I5vezsbJVLNwDL9YJTaCiE8nnNDz/8UJ1ytvsPDxw4oE7FBlC5H+NowoQJRPTCCy+wVK87RfoxatFQCHv06EFEGRkZqlXctm0bOdx/qCkq92Mc2fbH/c9//sMygDpSox+jGK2E0Gq1yluu1Fyux2w2y7vJ169fr1rRutPI9dX8+fOJaMiQIYxjqJ16/RhlaCWEeXl5RNSqVSuV63744YdE5O/vn8+0r1tNrFbrwIEDtTCfWVBQIO9qUO2TQn25aD/GRishPH/+PBH98pe/VL/0iBEjiGj+/Pnql64FYz/G0dtvv63ZuXuNXC80hlZCeODAASIaO3as+qXT09PlGrvambu/f/9+69atGfsx1djm7rdt28Y9lv/h0v0YG62EUK5nXm0pO9VMnTqVtPGsqsTej3EkNxdo3bq1pubuXbofY6OVEMrL+rfffpulem5uro+Pj0ae901LS5PLFmvt+krO3S9dupR7IA+5ej/GRishfPbZZ4lpaU1Jriw2YMAA3jcf7fRjHJ09e1ZTc/eu3o+x0UoI5WbXjBs+lpSUyGnxgwcPco1BaKwf4ygqKoocVn9i0QT6MTZaCaF8QI53L76tW7cSUZcuXbjm7rXWj3Gkkbn7ptGPsdFECAsKCojIz8+Pdxhms1nuj8v10VSD/RhHckWCoUOHMo6hafRjbDQRwvT0dCJ64oknuAcikpKS5Ny9+u/Jmu3HVHP//n05d//RRx+xDKDJ9GNsflxLi9GtW7eISO4oyCsyMnL48OEnTpxYtWqVnDVRhxBi9uzZFoslNjZWbmKjWf7+/nFxcbGxsQsWLHj66aftl2OT5DLn9su31fe4dnFxcXfu3ImIiJALQDYF3L8FhBBiw4YNRPSXv/yFeyBC2M3dq7k2scb7MdWYTCY5d799+3bHP7UtNtVg3t7eNV0WNaV+jI0m3gmzs7NJG++ERNS7d+8pU6bs2bNn6dKlchdYpRUUFCxZsoSI1q1bp+gKTs7i7u6+YsWKqKio1157beLEic2aNbP/U51ON378ePul3Gpa1q2m47Kyskeu6SiEmDNnjtlsnjdvnsavF+qH+7eAEEKMGzeOiN5j2uDeUW5urtwf9/Tp0yqUc4l+jKPw8HAiWrZsmdPPXFpa+sgrgibWj7HRRAjlAnVnz57lHsiP5C6iKuyP6yr9GEe2ufvbt2+rUK7p9WNsmNcdlQICAvLz8+/cufOTq4aqprS0tGvXrnfv3p04ceKTTz4pX7RvHvj4+Li7uzse+/r62hao9vPzs/Ut7I+bNWsm18MUQoSHh585c8Z+PV8XEhUVdfDgwWnTpsn3KEXNnTt38+bNERERycnJ9gs6NwXcvwWEvPr39PTU1MWY1WodNGhQQECA0v//3d3dXWtFM3u2ufuLFy8qWqhJ9mNs+Bszcn6iY8eOmvr1FhMTk5qa6uvrO2vWLFuzpKZGQmlpqdykgYhKSkrMZrPjcXFxscVikcdFRUVWq1UeG41GLy8vr5q2PdC2oKCgWbNmbdiwYejQoe3bt6/XFjR1PxZCrFq1qgn2Y2y4fwuIY8eOEdGIESO4B/IjeTO3l5fXyZMnFS1UVVUVEhJCrrlpqbR8+XLHqUKn69KlS7t27Vz0euEn8b8Tamp+gog2b968fPlyg8Gwd+/eIUOGKFrLaDSuXLly7Nixr7/++pQpU1xifsJeXl7e2rVrzWbzzp07Bw4c+MhtZ+p7/Mi5jQEDBsycOdO2rVJTw/1bQCxevJiI3nzzTe6BCCHEnj179Hq9TqfbtWuXakWHDRtGRIsXL1atorPIe1bGjBnDPRDXxh/CSZMmkTb6zkeOHJFXViovvqbmPthOlJKSIqcosrKyuMfi2vhDKJ9hTU5O5h3Gp59+KvsBf/3rX9WvPnnyZCKaMmWK+qUbpqqqSvZI3njjDe6xuDz+ELZt25aIeB/WPnfunJ+fHxHNmTOHZQA5OTnyHp0LFy6wDKC+5KxmcHBweXk591hcHnMITSaTXq83Go2Ma+llZGTIZ3Oio6MtFgvXMBYtWkREv/71r7kGUHd3796VPaSkpCTusTQFzCG8efMmEXXo0IFrANnZ2R06dJDdBd5FNYuLiwMDA4koMTGRcRh1gX6MczGH8Jtvvhk7dqxOp4uMjDx//rzK1fPy8rp160ZEw4YN08Jl1caNG4moe/fulZWV3GOpEfoxTsf/mXDLli3yxkudTjdy5MjPPvtMnbqFhYXyptCnnnpKzQ0waqH9uXv0Y5TAH0IhxJ07d+Lj421T1X369Nm9e7fZbFau4oMHD+QqmmFhYZraheLQoUNEFBAQoM2nddCPUYImQigVFRUlJCTIZikRdenSJSEhoayszOmFTCbTyJEjiahjx47qPIZTL3LufsmSJdwDqQ79GIVoKIRSRUXF7t275Uc1IgoMDIyPjy8oKHDW+c1ms1w8s3Xr1l999ZWzTutEmp27Rz9GIZoLoWSxWI4ePdqvXz8ZRT8/v5iYmNzc3Eae1mq1zpgxg4iaN2/OuNDwT9Lg3D36McrRaAhtUlJSIiMjZRTd3d2jo6OvXbvW4LO9+uqrROTt7Z2SkuLEQTpdTk6Ol5eXdubu0Y9RlNZDKKWlpUVHR8un0fV6fWRkZAPWwli+fLlM8rFjx5QYpHMtXLhQO3P36McoyjVCKGVlZcXExNieFg0PDz969Ggdn8f/+9//TkQGg0E7y0nVrqCg4LHHHtPC3D36MUpzpRBKeXl58fHx/v7+Moq9e/fevXt37Te77Nu3Tz6gtGPHDtXG2XhyOVb2uXv0Y5TmeiGUiouLExIS2rVrJ6MYFBRU03zGJ598IldnWrNmjfrjbIzKykrZJd6yZQvXGNCPUYGrhlAymUy7d+/u0aOHjGJAQEB8fLz9NhKpqak+Pj5EFBcXxzjOBuOdu0c/Rh2uHUJJzmf0799fRtHX1zcmJiYnJ+fy5cvyqnXWrFncY2y4X/3qV1xz9+jHqKMphNDmxIkTI0aMkFF0c3OT74GTJk1ifECp8c6dO6fT6dSfu0c/RjVNKoRSenp6dHS00WgcN27ciBEjuHb8dCK5Akh0dLSaRdGPUY0mVuBWQlZWVvv27eUK89xjaaxbt26FhISYTKbz58/LLQOUdurUqcGDB3t6emZkZHTp0kWFij9neu4BKCU4ONjDw6MJJJCIOnfuPHfuXCHEggULVChnNpvlMh+LFy9GAlXQZN8Jm5jCwsKuXbvm5+cfPXr02WefVbTW+vXr58+fHxwcnJGRYVsMG5SDELqMjRs3vvzyy506dVq9erWPj88j15mvae9b+y1rapeXl9e9e/eioqKkpCTbXbugKITQZchZO4vFcuPGjcacp/Y9pHJzc/Py8saMGZOYmNjYEUPdIISuxGQybdu27fTp03VZQ76m7Wt+0rJly6ZOnRoUFOTUsUONEMKfnZ/cQ6p3795No6HlKhBCAGZNdooCwFUghADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjD7f8l4ls08HQK1AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3de1RU5f4/8M+eCxdBYJQBFVAUFMXULFELE1NMzKFOh6ATOkfTdWx95TTaxdBWhnr65ZjlGVwrz0pXGoVWqOUBFBXzqFhKEpoJyMhFEZD7gNzmvn9/bM/IIUSG2XueuXxeqz+S2ZfPwLxnP3vvZz8PRdM0IITI4ZEuACFnhyFEiDAMIUKEYQgRIgxDiBBhGEKECMMQIkQYhhAhwjCECBGGIUSIMAwhQoRhCBEiDEOIEGEYQoQIwxAiRBiGECHCMIQIEYYhRIgwDCFChGEIESIMQ4gQYRhChAjDECJEGIYQIcIwhAgRhiFEiDAMIUKEYQgRIgxDiBBhGEKECMMQIkQYhhAhwjCECBGGIUSIMAwhQoRhCBEiDEOIEGEYQoQIwxAiRBiGECHCMIQIEYYhRIgwDCFChGEIESIMQ4gQYRhChAjDECJEGIYQIcIwhAgRhiFEiDAMIUKEYQgRIgxDiBBhGEKECMMQIkQYhhAhwjCECBGGIUSIMAwhQoRhCBEiDEPYm+81X6qQogqpmLIY0rUgp+CMISzsKowpi/H6zUt8TRxXEVemKev56qWwSyXhJQuGLiBVHnI2ThfCYnXxXOXcJn3T7qDdOwJ2lKpLo5RRdbo60wKhrqET3SZ68jwJFomcioB0Ada25e4WCqjT40/78H0AQOItCS8OP9J6JEmcRLo05KScLoSn7p2K9Y5lEggAvgLf8snlQ/lDyVaFnJlzNUeb9c2thtYQ15CeP8QEIrKcK4QaWgMAXnwv0oUg9IBzhVAsEPOApzKoSBeC0APOFUIhJRwpHHlDfaPnDwu7Cm9pbxGqCCEnCyEAPO/9/PG24/W6euafXcauxFuJm2o3ka0KOTOKpmnSNVhVlbbqiRtPjBKOetf/XQElSG1IrdBU/Drp10BhIAAoNUojbQSAv9/5u4bW7B29FwC8+F6jhKMI140cl9OFEABuqG+8W/Pu+Y7zPODN8ZzzaeCn413HMy95XvXsNHb2Wj7OJ+7wuMNWLxM5C2cMYT8ud12W3ZGFuoZ+Hfw16VqQs3C6m/X9EwvE+Z35ReoitVHtxnMjXQ5yCk53YaZ/wS7B04dMbze0/9j+I+lakLPAEPYW5xMHAEdaj5AuBDkLPCfsTalRhhWFifii+qn1QkpIuhzk+PBI2NsE1wmT3SarDKqz7WdJ14KcAoawD3EibJEi68EQ9oE5LTzadtRAG0jXghwfhrAPU92nhrmF1evqf+r8iXQtyPFhCPv2ks9LgC1SZBUYwr7dv1GhOkIDXj1G3MIQ9m3GkBljXcbW6GryO/NJ14IcHIbwobBFiqwDQ/hQzI2Kw6rD2CJFnMIQPtRsj9mjhKNuaW9d7bpKuhbkyDCED8UDHrZIkRWwHEIHm8iBuUZ6SHWIdCHIkZkdQqeayGGu51w/gZ9SoyxSF5GuBTks80LobBM58Cn+iz4vAsARFbZIEVfMC6FpIodlw5atGL7i7ISzOlrn2KdMrwhemXN6zqlVp0gXghyWecNbOOFEDnPFc4u3F7e0tJSWloaFhZEuBzkgM46EzjmRg1AolEgkAHD06FHStSDHZEYInXYih7i4OAA4csSRW92IIDNC6LQTOSxatMjLy+vy5cuVlZWka0EOyIwQOu1EDq6urosXLwZskSJumHd11GkncsAWKeKOeaOtOe1EDl1dXX5+ft3d3Xfu3Bk1yu7fDrIpZg95aO5EDuFu4UXhjtDd5M9//vMPP/zw2WefrVmzhnQtyKFwOO5oja4m5HqIhtbI/GSKQAUFFEc7so4DBw4sW7Zs/vz5P/6Ig3MjNnE7+G9WW1ZCZYLaqF46bOn+Mfvteizd9vZ2Pz8/nU539+5dsVhMuhzkOLh9lCnWOzYnJGcof+iBlgNxFXHdxm5Od8epoUOHzp8/32Aw/Pvf/yZdC3IonD9POG/ovDPjz/gKfLPasp4vf/6e4R7Xe+QOXiNFXLDSXBQl6pKFNxfW6GpmDJmRE5rjK/C1wk5Z19zcPGLECIqi6uvrRSIR6XKQg7DSk/WT3CZdCLsQ6hpa0FUwVzm3Wldtnf2ya/jw4fPmzdPpdNnZ2aRrQY7DesNbBLsE503Im+o+tURd8kzpMzc1N622axZhixSxztpTo6kMqiVlSy52XhwhHHEi9MQ092nW3Lvl6uvrAwIChEJhQ0PD0KEO/gQJsg5rD/Qk4otyx+c+5/Vcna7uWeWzP3f+bOUCLOTv7//000+r1eqcnBzStSAHQWC0NQ+eR1ZI1ss+L6sMquduPnfqnp09tD5r1iwA+OSTTyoqKkjXQp5Opzt27NjJkyf379/f0tJCuhz7RBOiN+pX3VoFv4JLocsh1SFSZQzc9evXU1JSJkyYAAAURVEUBQDh4eEpKSlFRUWkq7M2tVqdm5srk8n8/PwAgGmZ8/n8yMhIhUJRW1tLukB7QiyENE0baePaO2vhV+AX8tNvpROs5GH0ev25c+dkMllgYKDpa4uJ3+zZs729vU0/DA8P37Rp09WrV0mXzK3Ozs7Dhw8nJiZ6ef3Ps90CgWDOnDlC4f0eUXw+PyoqKjU19c6dO6RLtgMkQ8iQ18mDs4PF/uIdO3aQruU+vV6fl5cnk8lGjhxp+pyNHj06KSlp3LhxALBq1aqei/n7+5sWCw4OlslkeXl5RqOR9PtgTWdnZ2ZmplQq7XktKjw8XCaTeXp6AsCXX35J07RKpcrIyJBKpR4eHj0XS0lJKS0tJf0mbBf5ENI0/dnez5jDy6ZNmwiW8chQJSUlAUBoaOi9e/f6XLHnU05BQUEymSw3N1en05F6RxZqaWlJS0uLj4/vM1RarZY5PU5ISOi14sNC65xN90eyiRDSNJ2ens40ZtasWWMwGKy56+7ubuYT07N5OW7cOCZ7psWOHTtGUZSrq2thYeHDNmUwGPLy8pKTk0NDQ02b8vX1lUqlmZmZWq3WKm/IUk1NTWlpaRKJxMXFhXkLPB6POdmrrq42Lfb2228zv6i2traHbWqAv1snZyshpGk6KyvL3d0dABITE63wee3n27q4uLjXwtXV1b6+vgCgUCgGuH3mQk7PURKHDRvGpFGj0bD9bljQ0NDAZK/nqd3DLrScOHGCoiiBQPDzzz8PZOPO03QfBBsKIU3TZ8+eZc74JRJJV1cXF7sYxHmLwWCYP38+ACxevHgQHxcmjeHh4abd+fj4xMfHp6WldXR0WPyGLHX79m2FQhEZGcnj3b9f5erqGh0drVAo6uvr+1ylvr5+xIgRACCXy83d3cPOt+296W4J2wohTdMFBQXMMScqKqqfdo65mpub+2xiyeXysrKy/tfdunUrAPj7+9fV1VlSQ3l5OfNxZ06AAcDd3V0ikaSlpfU6ybSCioqKXsW4ubkxxbS2tvazotFoXLJkCQDMmzdPr9cPugBT0z0k5MFItnbXdGeFzYWQpuni4mLmlsCTTz7Z0NBgyaYaGxsH3sTqU35+vlAo5PF4ubm5llTSU2Vl5eACYLnr16/L5fLIyEjT537IkCFmfRF8/PHHACAWi2tqalisynQP1i6a7uyyxRDSNH3r1q3x48cDwMSJE6uqqsxdnWliRUdHCwSCXtkz62imUqmCg4MBYOPGjebWMBBVVVWs1PlIzKd80qRJpk+5SCSSSqUZGRlmNYkLCgpcXFwoisrMzGSxvF512mzTnSM2GkKapuvq6qZNmwYAY8aMUSqVA1mF9SPMq6++CgARERFcfx/3c8S25IDzxyPM8OHDB32EaW9vZ64zvfnmm4MuaYBMTfdBH7HtiO2GkKZplUrF/Bn8/f2vXLnysMU4+oPt2bMHADw9PQf4FcCKfs5db968OZAtMOdaMpksICDA9AsRi8WWn2tJpVIAmDJlSnd396A3Yi6CTXersekQ0jTd2dm5aNEiplly4cKFni9x2nRRKpVMX5D0dDL96UxXcZkyGMxV3Bs3bvxx+X6uOubl5Vl+6/W7774DAA8Pj5KSEgs3NThWa7pbn62HkKZpjUYTHx/PHN9ycnKscBKvVqsff/xxAHjttddY2aAlurq6mPuZPbtrmnqf9OpIzRg7diy799/KysqYve/bt4+VDVqCo6Y7QXYQQpqm9Xr9ypUrmaaI6aM2atSopKSkM2fOWHKhvE9vvPEG9NU9jayurq4ffvhh2bJlPXufMLdzGFOmTNm8efO1a9fY3a9Wq509ezYAxMfHs7tlCzU2Nu7duzcmJsbUdOfz+cnJyb/99hvp0sxjHyGkadpoNMbExIhEIh8fH+bGLuvZYxw/fpyiKKFQmJ+fz8X2LWdqebq7u3t6egYHB/fZy4ct69evZ1q2LS0tHO3CQqamu1AopCiKx+NJpdKH9TSwQXYTQpqmt23bBgDJycnc7cLUPW3nzp3c7YUtzNny8ePHudvFyZMneTzewLunkcV0e2QOjD4+PnK5XK1Wky7q0azxZL3vNV+qkKIKqZiyGCvsbtCMRuPy5cubmppiYmLWrVtHupxHM10w5EhjY+OKFSuMRuPWrVufeuopTvfFCuYX8ssvv0gkktbW1g0bNkydOtX2h8ZjJ4SFXYUxZTFev3mJr4njKuLKNGU9X70UdqkkvGTB0AWs7Is727Zt+/HHH/38/Pbv38/159v20TS9cuXKu3fvRkVFvfvuu6TLMUNoaGhWVlZubu7kyZOVSmVsbOzChQuLimx3ViIWQlisLp6rnNukb9odtHtHwI5SdWmUMqpOV2daINQ1dKLbRE+eZz8bIe7y5ctbtmzh8Xjp6elM72Qnt3PnzuzsbF9f34MHD/L5fNLlmC06OvrKlSsKhcLb2/v06dPTp09fu3Zta2sr6br6wEIIt9zdQgF1evzpZcOWrRi+4uyEszpad6TVnkbmbGtre+WVV3Q63fr16xcuXEi6HPIKCwvfe+89iqK++OIL+52PUSgUrl27try8XCaTGY3GXbt2hYSEpKamGgwG0qX9DxZCeOreqVjvWB++D/NPX4Fv+eTyJHGS5Vu2mjVr1lRWVs6YMYN5YMLJdXZ2Mo90ymSyF154gXQ5lho+fHhqaurly5fnzp3b0tKybt26GTNmnD9/nnRdD1gawmZ9c6uhNcQ1pOcPh/IJjIqbmJgYGxvb0dFh7opffPHFwYMHPT09Dxw4YLrj5MySkpJKS0sfe+wx5nK0Y5g+ffq5c+cyMzPHjh179erVqKio2NjYyspK0nUBWB5CDa0BAC++1yOX5NqJEyeys7O1Wq1Za5WVlb355psA8K9//atnLxynlZGRkZaW5uHhkZGRwVzxdySxsbFFRUVyuXzo0KHZ2dmTJ0/esGFDe3s72aosDaFYIOYBT2VQDWLdDmMHDVYdhL8XjUaTkJDQ3t6+fPnyZcuWEazERlRUVPztb38DgNTU1J7PPTkSd3f35OTk4uLixMREtVq9ffv28PDwkxkZYN35IHqyNIRCSjhSOPKG+kbPHxZ2Fd7S3nrkuuur14/+ffSGmg2VWjKtguTk5CtXroSEhOzatYtIATZFr9cvW7bs3r17L7/88qpVq0iXw63AwMADBw7k5+c/9dRT1dXVT+7YAbNmwc9kJmVg4cLM897PH287Xq+rZ/7ZZexKvJW4qXbTI1e82HmxWle9vX576PXQmLKY71TfMY1b68jJydm1a5dQKDxw4ECvoWyd0/vvv3/x4sWgoKDPP/+cdC1WEhERceHChf8cOOBbXQ2XL8Mzz8Brr8Hdu1Yug4UQvj/ifU++58Kyhekt6d+qvl1wc4FKr9oWcP+cXqlR3lDfuKG+0WHs6DR2Mv9fq6sFgMJJhXkT8lb7rnbjuZ28d/IvlX8ZcW3E61WvF3YVWl5V/+rr65knJLZt28YMnunkzp49+8knnwgEgm+//XbYsGGky7EeHo83LzERysogJQVcXODLL2H8eNi8GdRq6xXBSue3ku6S2LJY76veoqui2LJYpfrBU7AeVzzgV+j1X1x5XM/VW/Wtnzd+HlkaaVogvChcXidv1DX2XKz/vqPM1LnNzc2PrNZgMERHRwPAokWL7HqwvZiYGGCj72hDQwPzFOKHH37ISmGkDBkyBAAG/zRpVRUtldIANAAdFESnpbFa3UPZVgfuou6i5Opk8W9iJoquha7xFfGZrZl6o55mL4QfffQRAPj5+d29e5flN2BdrITQaDTGxsYCwNy5czl6MMVqLA0h48wZeurU+1F89lma+wejCEyN1o9wt3B5gPzOlDuZIZnxongDGA6pDr1Q/sKY62M21GxocWdh5q3Lly9v3ryZoqh9+/Zh9zQAUCgUWVlZIpHo66+/tsfuaex79lm4cgXS0kAshv/8B6ZPh7/+FRoaONwj1ym3RJWmamvt1nHXxzEHRqqAgv8HsbGxfY5xMpAjYXt7OzOI2/r16zmr2nosPxJeu3bNzc2NoqijR4+yWBgp7BwJTVpa6ORk2sWFBqBFIloup7kZ78umQ2hS0Fmw+vZql19cQAYA4O3tvXr16l6TGQwkhEuXLgWAJ5980jFGs7QwhB0dHRMnTgSAN954g93CSGE5hIwbN+jnn7/fOg0Lo48dY3PjNE3bSwgZKTtSwAd6DmQ0bdq01NTUpqYmegAh3LdvHwB4enr2OVCSPbIwhK+99hoAPPbYYxzNOGB9nISQkZtLh4ffj2J0NM3q3FL2FELThZni4uLk5GTT0EYuLi4SiYSZW+JhIbx58yYz8ctXX31l5bK5Y0kIMzIyAMDNzY31MWkI4jCENE1rtbRCQXt70wC0UEjLZDRLYy7aZQiZf2o0mszMzPj4eNMYeACwdu3aP47PqdVqZ86cCX3NpGfXBh3C27dvMw2HPXv2cFEYKdyGkHH3Lr1yJc3j0QB0YiJN03RODj1jBu3mRvv60n/7G61SmbtJOw6hSU1NjVwuN00qxJz1ff7556Y/BtNFOyQkhMUZZmzB4EKo0+mYsSri4uIevbRdsUYIGQUF9Lx5dHExnZdHCwT0okX04cO0QkF7edHPPEObefPZEULIYL7aly9fbhot18vLSyqVbtu2jRk97eLFi1YumGuDC+HGjRsBICgoaCD3VO2L9UJosmABHRLy4KrpV1/RALSZfxHbuk9ouZ07d9bU1OzZs2f27Nn37t37+uuv33vvPZqm//GPfzCDZzq57u7uQ4cO8fn8b775xqm6p3FCo4Fz5+Cll8D0GGp8PLi4wMmTZm3G0UKIkPXcvg16PfSYGh3c3CAoCCoqzNqMo4XwrbfeCggIWL169aVLl5jm6EcffURR1KZNmy5dukS6OvLc3d3j4+MNBsOrr77a0sJCDySn1tUFANDr0Wc3t/s/HzBHCGFtbe327dvb2toAgJkNhrkwU1tb+9VXX23YsGHdunU6nY55WI50seQxg4jeuXNn9erVpGuxW4WFMH8+1NQAAHR2/s9LKhV4mjewoB2HUKvVZmVlJSQkjBkzZsOGDUajEf57i6KgoGD16tWmWem3b98+c+bM8vJy5rFxJ8c8ryQSiY4cObJ3717S5dibujpYtQoiIuA//4H0dBAKobT0wavt7dDQABMnmrdN1i8XcQdv1veCN+t7IXCz/rnn6NGjaVNn5rQ0GoA+f96sDdtTCLHbWi/Yba0XbrutTZrUR7e1n36ihUL6uefoo0fp3btpkYhevNjcbdtHCLEDd5+wA3cvZDpw5+bSs2bRrq60WEwnJdHt7ebuwaZDiI8y9Q8fZeoFH2VijdqozmzNjK+IFxQKmPgFXAtIrk5er1gPFj9Z/8svv7i4uFAUlZ2dzUHtVsXKk/U7d+4EAJFIdPv2bbYKI4W1EBoMdFoaLRbTADSPR0uldEMDGwX2zbaujharizfUbAj6PeiF8hcOqQ7xgR8vis8Mybz92G15gHxYNws9PCIiIjZv3kzT9MqVK+vq6h69gqNbt25dbGysSqWSSqW2NkkDGWfOwPTpsHw5NDbef8r+q69ALOZuhzYRwjZD256mPXOUcyYXT95ev71R38iMc1E9pTpjbEasdyyfYnPYheTk5Ojo6IaGhhUrVtDkhny1EcysLyNHjjx//rxcLiddDlHl5ZCQAAsWwLVrEBQEaWlw5gxMncr1bkmG0AjGCx0XXq96fdTvo16vev2njp98+D6rfVf/OvHXovCiZP9kX4Hvo7diPmb+M39//5MnTzKNMScnFouZ+c82b978M6EBcAnr7ITNm+Gxx+DQIfDwgJQUUCrhr3+1zs5JztT7RMkTzyif2dO0R21UL/Ja9O3Yb+um1n0++vMnhjzBdUn+/v7MTKAbN27Mz8/nene2b968ee+8845er//LX/7iVN3ZjEbjyYMHITQUtmwBrRZWrICbN2HzZnBzs1oNJGfqfcrjqUBhYLJ/ctljZSdCT7wiesWVcmWlnoFYvHixTCbT6XRLly7F7mwA8OGHHzLd2V5//XXStVjJ5cuX58yZE7N06YnAQIiIgAsXYP9+6HEj2jpIztS7I3BH1ZQqeYB8rMtYy8sYhO3bt0+fPp2ZRJJIATZFIBCkp6d7eXkdPnz4iy++IF0Ot6qrq5cuXTpr1qyLFy8GBgYa1q+H/Hx46ikixZCcqdeT50kByanhXV1dMzIyhg4dmpaWlp6eTrASGzFu3DimN+natWtLSkpIl8OJ7u5uZiamgwcPurm5Mb0glyQkAEXso+g4M/XGxMRIJBJzZ/kMDQ395z//CQD/93//p1QquSnNniQkJCxfvryzszMhIaG7u5t0OSzLysoyzUkokUhMcxWSrcpxZuo9ePBgVlaWp5lPkQDAqlWrEhMTOzo6li5dau4cow7ps88+CwsLu379OjMQhmO4cuVKVFTUCy+8UFlZyczam5WVNXYsmfOgXhxnpl5L7N69e+zYsQUFBR988AHpWsjz8PA4ePCgi4vLrl27MjMzSZdjqebm5rVr10ZERJw/f3748OEKhYKZv550XQ+QnKnXdnh7e3/33XdCoXDHjh25ubmkyyHviSee+Oijj2iaXrVqVW1tLelyBkmn06WmpjKTwPJ4PJlMVl5evnbtWlubcoPkTL02JSIiIiUlxWg0Llu2DLuzAcBbb70lkUiampoSExPtsTvb6dOnp0+fvm7dura2tujo6CtXrqSmpnp7e5Ouqw8kZ+q1NRs3blywYEFDQwMzfyjpcghjJq4aOXLkuXPnPv74Y9LlmKGsrEwikSxcuLCoqGjChAlZWVm5ubmTJ08mXddDkZyp19bweLy0tDRfX98TJ04oFArS5Twa198UYrH4yy+/5PF4H3zwwcWLFzndFyuYX8jMmTOPHTvm4+Mjl8t///13iURCuq5HYeVZDAtn6h0Io9EYExMjEol8fHxkMllubi5HM1oeP36cGSw4Pz+fi+1bTq/X5+XlyWQyd3d3T0/P4ODglJSU4uJijna3fv16ABg9enRLSwtHu7CQSqXKyMiQSqVCoZCiKB6PJ5VK6+vrSdc1ULb4POEf6fX6lStXAoBbjx59o0aNSkpKOnPmDOtpfOONNwAgNDT03r177G7ZEl1dXT/88MOyZct6ntj4+j7o4z5lypTNmzezPmaMVqtlxk2Oj49nd8sWamxs3Lt3b0xMjOnmMJ/PT05O/o37uXXZZQch1Gg08fHxADBkyJCcnJzr16+npKRMmDDB9OEbNmyYVCrNzMxka9AKtVr9+OOPAwBzckhWV1dXZmamVCr18npwHyg8PDwlJaWoqEitVufm5spkMtOwVwAwduxYmUyWl5dnNHNShIcpKytj9r5v3z5WNmiJxsbGtLQ0iUQiFApN2YuMjFQoFDU1NaSrGwxbD2FnZ+eiRYsAwMfH58KFCz1fYtIYHh5u+vD5+PjEx8czQ49auF+lUsnc909PT7dwU4NjamL17H7AZK/PgapMbdSeA2GNHj2aSaPBYLCwnu+++w4APDw8SkpKLNzU4FRVVSkUiujoaNMkXKbs1dXVESmJLTYdQpVKFRkZCQD+/v5Xrlx52GLl5eUKhYJZkjFkyBCJRJKWlmZJe3LPnj0A4OnpqVQqH700S5qbm5mveVMTi8fjRUZGyuXyP0751ieDwcCkMSAgwPQLEYvFTGNBq9UOujapVMo0evsc44cjlZWVzB+X+m/fTjc3N+aP28rS9IDE2W4I6+rqpk2bBgBjxowZYAxY/4O9+uqrABAREcH16GwcNbH+2HQfPnz4oJvu7e3tYWFhAPDmm28OuqQB4uiL1TbZaAhv3brFDIs2ceLEqqoqc1e/ffs2K00XlUoVHBwMABs3bjS3hoGwWhOLSeOkSZNMn2mRSCSVSjMyMsxquhcUFDDDZGVmZrJYXq86OTrFsFm2GMLi4uLAwEAAePLJJxssG+WqnyNMbW3tQLaQn58vFAp5PF5ubq4llfREsIl1/fp1uVxuyRGGuXEvFotZvApihYtttszmQlhQUMBcdo+KimJxYt1+zrXKysr6X3fr1q3MeamFRydTE8uUPXd3d1JNrIqKisF9ERiNxiVLlgDAvHnzLLk5xJy7Jicnh4Q8eATH19fX8nNXu2NbITx79ixzKVwikXA0NrvpqqNpuhj471XH0tLSPlcxGAzz588HgMWLFw/ior+NN7GYpntkZKRpvnFXV9fo6GiFQvGw+9319fUjRowAALlcbu7u+rmKm5ubq9PpLH5D9seGQpiVleXu7g4AiYmJVvgi7OzsZO6/9Xymk0njH3ufVFdXM8dnhUIxwO0z2WOuZNhFE6uhoWHgTfcTJ05QFCUQCH7++eeBbNyUPX9/f9MvJDg4mN37mXbKVkKYnp7O/O3XrFlj+U0ts3R3dzNp7NkTZdy4ccznw7TYsWPHKIpydXUtLCx82KZMTazQHrO32l0Tq6mpqc+mu0KhqK6uNi329ttvM7+ofs4aBvi7dXI2EcLP9n3GnJls2rSJYBmP/LZOSkqCvrqzmVYcNWqUacWgoCB7b2K1tLSkpaXFx8f32XTXarWzZs0CgISEhF4r9tPKKDLNZ4wEyHYAAAc5SURBVIT+i3wI5XXy4MJgv4l+O3bsIF3LfQ87b0lKSho3bhwArFq1inamJtbDQiWTyZgOPV9++SU9qPNtRJMNoZE2rr2zFn4FfiE/vZpM77D+6fX6c+fOyWQy5pYJgzloz549u2cTKzw8fNOmTVevXiVdMrc6OzsPHz6cmJjYsyMrAAgEgjlz5vQ8mYyKikpNTb1z5w7pku0AsRDqjfpVt1bBr+BS6HJIdYhUGQPX814WRVFMFJ22idWr4zhzhDT3HixiUDSJR8i1tHZp5dLDrYc9eB7fj/v+Oa/nrF/DoL3zzjuffvrpzJkzv/nmG6Z16sx0Ot2pU6cEAkFtbe2LL744bBgLM2c5G4H1d9lp7PxzxZ9P3Tsl4ouyQ7Of9nja+jVYgpm74p133sEEAoBQKGTu3aNBs/aRUGVQLSlbcrHz4gjhiBOhJ6a5T7Pm3i1XX18fEBAgFAobGhqIDxqLHINVj4R1urpFZYuudV8Ldgk+Nf7UeNfx1tw7K77//nuDwRAbG4sJRGyxXghvaW8tvLmwTFM2yW3SqfGnAoWBj17H9hw5cgQA4uLiSBeCHIeVmqMl6pKFNxfW6GpmDJmRE5rD0eyfXGtubh4xYgRFUfX19SKRiHQ5yEFYY5LQgq6Cucq5NbqaeUPn/Tj+RztNIAAcPXpUr9dHR0djAhGLOA/h2faz82/Ob9I3xXrHHg85btezVmBbFHGB2+ZoVltWQmWC2qheOmzp/jH7hZSQu31xra2tzc/Pz2Aw3L17VywWky4HOQ4Oj4Q1upr4ini1US3zk30d/LVdJxAAsrKytFrtvHnzMIGIXRxeHQ0QBuwZs6dSU5kyMoW7vVgNtkURR8h0W7M7XV1dYrFYrVbfuXOn5/NKCFnOGldHHUB2dnZXV9fTTz+NCUSsYzmEvtd8qUKKKqRiymLY3TJZ2BZF3DE7hIVdhTFlMV6/eYmvieMq4so0ZT1fvRR2qSS8ZMHQBexVSJ5arc7JyaEo6qWXXiJdC3JA5oWwWF08Vzm3Sd+0O2j3joAdperSKGVUne7BvLahrqET3SZ68jz72YjdOXnyZHt7e0RExJgxY0jXghyQeVdHt9zdQgF1evxpH74PAEi8JeHF4UdajySJk7gpzyZgWxRxyrwQnrp3KtY7lkkgAPgKfMsnlw/lO/LzBDqdLjs7GwD+9Kc/ka4FOSYzQtisb241tIa4hvT8oWMnEADON56fvGEyfZbuOUg7Qiwy45xQQ2sAwK47fw7Ct/pvL0RfWPTFItKFIIdlRgjFAjEPeCqDirtqbI2BNvy79d8AECfCE0LEFTNCKKSEI4Ujb6hv9PxhYVfhLe0tlouyGec6zjXqG8PcwsLdwh+9NEKDYt4tiue9nz/edrxeV8/8s8vYlXgrcVPtJg4KswlHWo8AQLxPPOlCkCMz7+ro+yPe/771+4VlC9/1f1dACVIbUlV61baAbcyrSo3SSBsBoMPYoaE1zDHTi+81SmiXXb2MYDzaehSwLYo4ZnYH7hvqG+/WvHu+4zwPeHM853wa+KlpvCbPq56dxs5ey8f5xB0ed5idYq3rp46f5ijnjHUZW/FYBelakCMz+1GmiW4TM0My+3yp4/EOi+uxIUxb9GXRy6QLQQ4On6LoGw30963fA0CcD7ZFEbcwhH0r6Cq4rb0dKAyc6TGTdC3IwWEI+3ZEdQQA4kRxFFCka0EODkPYt6NtRwHbosgqMIR9+K37t1J1qb/Q3+4mq0H2CEPYB+a66EveL/EpPulakOPDEPbBdEJIuhDkFHC0td6UGmVYUdhwwfC6KXUCisD8jcjZ4JGwt0OqQwDwoveLmEBkHRjC3pgTQrwuiqwGm6P/o1JbGXI9xJPv2TClwY3nRroc5BSwxfU/mvRNszxmhbqGYgKR1eCRsA8G2oA3J5DVYAgRIgwvzPTmqCP5I5vljCF0wpH8kS1zuhA650j+yJY53dVR5xzJH9kypwuhE47kj2ycczVHnXMkf2TjnCuEzjmSP7JxzhVCJxzJH9k+5wqhE47kj2yfc4UQnG8kf2T7nK7bWpW26okbT4wSjjKN5F+hqfh10q+BwkDoMZL/3+/8XUNr9o7eC/Y8kj+yC04XQnCmkfyRXXDGECJkU5zunBAhW4MhRIgwDCFChGEIESIMQ4gQYRhChAjDECJEGIYQIcIwhAgRhiFEiDAMIUKEYQgRIgxDiBBhGEKECMMQIkQYhhAhwjCECBGGIUSIMAwhQoRhCBEiDEOIEGEYQoQIwxAiRBiGECHCMIQIEYYhRIgwDCFChGEIESIMQ4gQYRhChAjDECJEGIYQIcIwhAgRhiFEiDAMIUKEYQgRIgxDiBBhGEKECMMQIkQYhhAhwjCECBGGIUSIMAwhQoRhCBEiDEOIEGEYQoQIwxAiRBiGECHCMIQIEYYhRIgwDCFChGEIESIMQ4gQYRhChAjDECJEGIYQIcIwhAgRhiFEiLD/DwyUnhvJH+ZGAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3dfVzN5/8H8Pc51anT3SlichfNQguzGKu5b6i5t9y3RpOwmES5KzdDxMTXRmtuYsPC3I3vl9xkfrmZQiS5KRm5q3Qnnc7pnOv3x2WfpZLqfD6fqzPv52MPD+ez03V9Dl7n+txc1/sjIYQAQogdKesdQOhthyFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMGbLeAX2Smpp648YNS0tLKysrKysrS0tLS0tLY2Nj1vuF9BuGsLoCAgLu3r27f//+iv/LxMTE+lVyubziRm57o0aNpFI8BkEvSQghrPdBDxw9etTd3d3Q0LBXr15qtTo/Pz8vLy8/P7+goECtVte0NQMDg48//jgqKqpNmzZC7C3SLxjCNysqKmrfvn16evqKFStmz55d7v8WFxcrlcri4uLcV1W6kW7Py8tTKBRFRUU3b95s2bIlkw+F6hCC3sTPzw8AOnbsqFKpeGlQpVKNGTMGAHx8fHhpkB8qFRk7lri7E09PUlTEem/eIjgSvkFcXFzv3r2NjIwSExOdnJz4ajYtLa1NmzYSiSQ1NdXe3p6vZnWyaxfcuAGLFsGqVWBuDn5+rHfobYGXB6ry4sWLiRMnEkLmz5/PYwIB4N133x03bpxarV66dCmPzeokKQm6dgUAcHGBq1dZ781bBENYlblz5965c6d9+/bBwcF0y7Jly0JCQlQqle6Nz58/39DQMDo6+tatW7q3VktqNTx58s9L7rAIL96KifXxcN117tw5AwMDQ0PDhIQEuuXq1asymUwikcTHx9euzdzc3KVLlxb9fcY1fvx4ABg/fjw/e1wLixeT+vXJ/v2EELJrFwkOJoSQlSvJ5s3MdklQdfK8F0NYOaVS6ejoCADz58+nW9RqdadOnQBg6tSptW72008/BYDw8HD6MiMjQyaTGRgY3Lx5k4edrqmUFGJiQiQSEhtLCCEqFRk3jri7k1GjSHExg/0Rwc6dJCSEEELCw8mKFeTHH0lMDDl6lJw/T1JSSGYmKSwUf6fwwkzlgoKCVq5c2aZNm8uXL5uYmADA8uXL586da2dnd+3aNQsLi9o1+9///tfDw8PGxubu3bvm5uYA4OPjs3nzZm9v761bt/K4/2+m0cDHH8PFizB5MvzwAwAAIXDoEAwcCBKJqHsipjlzoHt3cHeHs2chLAwOHarkPVIpKBRgbQ2WlhNatcouKVEoFJaWlgqFQqFQ0JlS3BZbW1sbGxtd90r83Nd9ly5dMjIykkql//d//0e3pKammpiYSCSS//3vfzo2/sknnwDAypUr6UtuMExNTdWx5ZoJCyMApHlzUlDwcktkJAEgo0eLuhsiCw4mhw8TQkh8PBk5kvj4EE9P4uZGOncmrVuTRo2IqSkB4P5ramtbdXwCAwN13ymctlaeSqXy9vZWq9WBgYGurq4AoNVqv/rqK6VS+dVXX/Xr10/H9ufPn9+/f/+VK1f6+flZWFjY2dl98cUXP/3007Jly6Kjo/n4BNVw6xYsWgQAEBkJdFTPzISgIACAIUNE2gcmPvgAzpwBDw+Ij4d+/WD8+EreU1oK+fmQnw95eTsKCp7l59OpUfRXOlOK29KsWTMe9kr3HP/LhISEAICDg8OLFy/oltWrVwNA48aNnz17xksXdDBcsWIFfSn2YKjRkG7dCACZMOGfje7uBIAMHCjGDjCRnU3i4+vmeS+G8BVJSUkymUwqlZ4+fZpuSU9Ppydv++klRD4cPXoUAOrXr1/w96HgxIkTAcDLy4uvLqqyZg0BILa2hPtO2bKFABArK/LggRg7wMS4cUQqJRs2sN6PSmAI/6FWq52dnQHA39+fbtFqtW5ubkLEo1u3bgCwfPly+vLevXt0MLxx4wa/HZV39y4xNycAZN++l1sePSL16hEAsm2bsF0z9PvvBICYmpLbt1nvSiUwhP/49ttvAaBFixaFf1+nXr9+PQA0aNDg6dOn/PZ17NixcoPhpEmTAGDs2LH8dlSWVqtN9vEhAGTMmH+2DhtGAIiHh3D9MpaXR5o2JQBkzRrWu1I5DOFLN27coNc/jx49SrdkZGTQWxF79uwRosfu3bsDwLJly+jLe/fuGRsbGxgYpKSkCNEdIWTjxo0AsKJPH5KV9XLTzp0EgFhakr/+EqhT9iZMIACka1dSWsp6VyqHISSEEI1GQy+E+vr6chvphdARI0YI1Onx48fLDYZ0ucaYssMUfx48eGBlZQUAu3fvfrkpK4s0bEgASFSUED3WCSdOEImEGBuT69dZ78prYQgJISQ8PJxe/8zNzaVbfvrpJ5qQx48fC9cvHQyXLl1KX/7111/GxsZSqfTatWu899W/f38AGDRoELflJ39/VfPmpE8fotXy3l2d8Pw5sbcnACQsjPWuVAVDSNLT083MzADgwIEDdEtmZqa1tTUA7NixQ9CuT5w4AQBWVlZc+CdPngwAo0aN4rejzZs3l/tO2bdvHwDYNWhQfPcuv33VIZMnEwDSsSPhaSGoQN72EObl5bm4uACAt7c3t3HYsGEAMFCUm2Y9evQAgCVLltCXmZmZcrlcKpVevXqVry4ePnxIv1O2b99Ot+Tm5jZp0gQA/vOf//DVS50TF0ckEiKTEf7+JAXydoUwMzMzNjY2MjJy2rRpbm5u9vb2EomkcePGCoUiJyeHe1tycnLfvn0zMzNF2KUzZ86UGwynTp0KACNHjuSri6FDhwKAR5nrn1988QUAuLi4aDQavnqpU4qKin4dOlRrbk4WLmS9L2/27w1hbi65cIFER6/99tvhw4e3a9eu0tqExsbGEolELpc/evSI1Z727NkTABYvXkxfPnz4kA6GSUlJujf+yy+/AIBCofjr7+ufR44cAQC5XH7r1i3d26+bZsyYAQBf9ulTxw9EKT0MYcUlYWo1SUsjsbEkMpJMm0bc3Ii9PZFI6BzcQR99xEXO2tra1dXV19c3LCwsJiYmOTm5tLR0yJAhADBjxgxWHyguLq59+/aHDh3itnz99dcA8OmnnyYkJKSlpWVlZdWuvE1WVlbDhg0B4KeffqJbuOmOq1at4mfv656KC0HrOD1cylS2FIpMBhs2QHo6VFzqbmYGDg7QunXsRx9lN2rUunVrBwcHOgGtnOTk5A4dOshksrS0tMaNG4vxESoghEjKLCBKT093dHQ0NTXNzc0t+7YqaplWuj0jI2P48OGtWrU6evQobd/X1zcqKqpLly7x8fEGBgZif07hlZSUfPjhhykpKfPmzaOzL/QA62+BmgsOJkeOEEJIfDyZPJnUr08AiLU1cXUlvr4kLIwcPEjS0khNznbolZhvvvlGqH2uoVmzZgGAjY1Nx44d7e3t69WrV7vAGBkZ1atXz87OLioqKjs7++TJkxKJxNjYODk5mfVHFEpQUBAAtGnTprjOzM9+Iz0cCefMgW7dwMMDzp6FHTvgm2+gSROQy3VpkhsM79y5Qy8bMvTnn3/SC7bnzp3r3Lkzt/11BU5fV+NUqVRyA2njxo3btm2bkZGRlpa2dOnSuXPnsvlsArt8+XKXLl00Gs0ff/xBZ1/oB9bfAjUnTCmU4cOHA8D06dP5arB2SkpKaFm32bNn696aUql8+vRpYmKiQqEAgNGjR3fq1EmtVuvech1UUlLSrl074GmhrZj0cCRUq2HCBMjJAYUCtmwBExNeWr1+/Xr79u2ZD4YLFiz49ttvHRwcrly5ItdteC8rJCRkyZIln3766eHDh42MjPhqVheEkCtXrkycOFEikVhZWdHiEfQZO1zxCGtra+6lpaUlnVPxOqGhoYsXL7a3t7969WrV76xr9DCEDx5A797QrRts2sRvw56ennv27PH391+3bh2/LVdTUlJS586dNRpNXFwcXevEl/z8/BYtWuTl5Z0+fZrOlWNuw4YN9CKwVqut5o8YGhqWfSQWl1VLS8uioqIffvhBo9GcOnWqjnzA6tPDEB49Cv37Q48eEBfHb8N0MDQyMrpz507Tpk35bfyNSktLu3btmpiYOH369IiICN7bpwOFm5tbbGws743X1L1799q1a1dYWLhu3bouXboUFBTk5ubSmhFcIQlaRYJ78E5BQUFxcXEVbdra2jo5OdE1YnqG7dFwbXz3HQEgkycL0faIESMA4Ouvvxai8aotXrwYXl3NyK+8vDw6eY0rGsBQ7VaolJSUZGVl3blzJyEh4cSJE7/99tvWrVvXrl27ZMmSgQMHAoCdnV1JSYlA+ywcPQzhxIkEgKxbJ0Tb169fl0qlxsbG9+/fF6L91+FWMx47dky4XkJDQwGgd+/ewnVRHUKsUNFoNPSqTGRkJF9tikYPQ+jqSgDI8eMCNT9y5EjQrcJvTWk0GnpPws/PT9COuMEwLi5O0I6q8LoVKidPnpw1a9aSJUvWrVu3devWffv2nThxokYThn799VcAaN68ud4NhnoYQloQ5eFDgZpPSUmRSqUymSwjI0OgLspZuXIlADRp0oSbwy2cRYsWAUCvXr2E7uh16CTBAQMGlNu+bNmyqs+b5HJ5o0aNHBwcOnfu7Obm9vnnn0+YMOF6maW6Wq22ffv2ALBx40ZxP5Ou9C2EDx++rAsmpFGjRgHAlClTBO2FunnzJr0VcYROAxJYfn5+vXr1AODUqVMidFfO9u3bAUChUFQ82r948WJYWNjcuXOnTp3q5eU1aNCgnj17vnHCEFedmYqJiQGAZs2aKZVKET+WrvQthMePEwDi6ipoJzdv3jQwMJDJZHcFXvCq0WjorQgxnwlDrwC5CvxnWNHTp08bNGgAAJtrNcXi+fPnmZmZN27cOH/+/LFjx2JiYuhcvLLv0Wq1HTp0AIAffviBp70Wg76FcN06AkAmThS6n9GjR4twkrZ27VoAsLW15auscHWwGgzpnKQ+ffpohaymsXv3br0bDPUthLRgwXffCd3PrVu3DA0NjYyMKg6GO3bs2L17d2xs7MWLF2/evPno0SOuVneNcM+E+e2333jY45pYsmSJyIMhPVC0tLS8d++eoB1xg+H3338vaEc80rMQ5o8YQeRyovNTWapj7NixADBp0qRy2yt9JJORkZGNjY29vb2zs3Pv3r2HDBni7e3t7+8/f/78lStXVoyZVqulj0kbzeIBLIWFhfTI8MSJEyJ0l52d/c477wDABlEKYO/du5ceX9Tuy1F8ehbChg0bGkqlmaIUyeQGw/T09LLbx48fP2zYsD59+jg7O7dq1aphw4aVrtkvq0ePHuUapyVAbWxsnjx5IsJnqYg+plucwXDMmDEA0LNnT0EPRDncYLh+/XoRutOdPk1by8nJsbGxMTc3LygokIjyDD0vL6+ff/7Z19c3MjKy6neWlJRwk61yc3PLzr0qKCho0aIFnSdJZWZmOjk55eXl/frrr3SOjvieP39ub2+flZV1/PjxPn36CNfR4cOHBwwYYGpqmpSU1KpVK+E6Kmvfvn3Dhg2ztbVNS0vjcR68UFh/C9TA6dOnAaBLly6i9Xj79u1KB0Mdubu7w6slQJmgt+ZcXFyE6yIvL4/Owo2IiBCul4q0Wi19rPI6YWZW8UufQrhhwwYQ/QnvXl5eANChQ4elS5euX79+27ZtBw4cOHXqVGJiYlpaWk5OTk2X523ZsgUArKysHrB+BNLz589pBZpY+rhsAYwfPx4AunbtWip6CXpaWFUvzgz16SGhN27cAIC2bduK2amFhYVcLk9KSkpKSnrde8zMzMoue7O2ti67yob71crKSqVSBQQEAMC6deuYL+E3MzObMWPGnDlzQkJC6MOn+HXixImtW7caGxtv2rRJ/Ho2gwcP7ty588WLF6OioqZNmyZy7zXD+lugBug/lN9//120HmmtFyMjI29v7+Dg4MmTJ48dO3bAgAHdu3f/4IMPWrZsaW1tLZVKa/QHbmJiUvE6DSvcYMg9BofHlu3t7aHMs1DFd+DAAQBo1KhRHR8M9WkkTElJAQBHR0dxups7d254eLhMJtu1axetn/s6hYWF3DWY112boU9avnfvXnZ2dqVF35gwMzMLCAgIDg5euHBh3759eWw5MDAwPT29Y8eOtAQoE4MGDaKD4Y8//jh9+nRWu/Fm1Qxrdnb2ggULGH6j0JpFpqamIhSN1mq133zzDQDIZLJ93MM0+UAL+0okkitXrvDYrC64wXDLli3Z2dm1K3BazqlTpyQSiUwm47GYf+0cPHgQABo1alRES9TWSdUNIS2c/t577/3xxx+C7tDrxMfHA4Czs7PQHWm1WnoKIZPJuEfE8Ih+JQ8fPpz3lmstICDA5NVSPSYmJra2to6Ojq6urgMGDPDy8po2bVpQUFBoaGhERER0dPTBgwfPnDmTnJycmZn57NmzsjcAi4qK6K0IrqY4Wx999BEAfCf8LKtaq24Ir1y58uGHHwKARCLx9fXlHqknmjVr1oDw80u0Wq2/vz9N4MGDB4Xo4tGjR6ampnVnMFSr1fRvtn79+tbW1rUucMpNGKKngh06dOBlUNXdoUOH6vhgWIMLMyqVKiwsTCaTAUCLFi0EXQNeVnZ2dmhoqJmZWbNmzSwsLMLCwgQ6KtZqtfRhLMbGxmWL0vOOHusOHTpUuC6qj94ttLOz475YX7x48ezZs8zMzOTk5DNnzhw8eDA6OjoiIiI0NDQoKGjatGleXl4DBgxwdXV1dHS0tbWli3TLsrCwWL58OdvPVRYdDFevXs16RypX46uj165d4yrSenp6ln2YEe9ycnLmzZvHzdVs3rw5/U3Tpk03btzI7xetVqudMmUKAMjlcqG/X7jB8OLFi4J29Eapqam0rMb/dJuOq1Qqnzx5cvv27YSEBPoV4+joyJ2937p1a+TIkSEhIXzscm0cPnwYAGxsbASq36Oj2tyiUKvVYWFh9CzC1taW30sXVEFBQVhYGH28MwC4ubmdP3+eEBIbG0tnQtBMRkZG8lLKVqvV0qdzmpqaCnfnuix6t3DIkCEi9PU6Go3mk08+AYCvvvqKx2ZVKlXLli0BYOfOnXTL2bNnAcDc3DwrK4vHjmrE2dkZANq2bevp6enj4zNjxoyFCxd+9913mzZtKrsmRvX4Man6OKvi84h0Vvv7hLdv3+YKPHp6evL151sufq6urhULosTGxn7wwQf0DW3atImOjtZlQoZGo/nyyy9pAo8LVrqmnMePHzMfDFevXg0AjRs35n01Y1RUFP0Xzw2GtLzavHnz+O2oUlqtduzYseVO6bt37879o6rC/SZNCAAxMiI2NsTenjg7k969yZAhxNub+PuTefPIggWEDunh4YSnRSE63azXaDSRkZH0rtc777yze/duXVqrGL8qVp1qNJqYmBgHBwf65vfffz8mJqYWk/RLS0u9vb1pAsVZ18OZOXMmAAwePFjMTjnp6en0L27//v28N65SqejlGa6a07lz5+hg+PTpU967K4euUGnYsCF38Llr1y56prp9+/aYmJgff/xx1apVCxYsmD59+pdffll2TUx+y5bE2Jg+VK/y/7p1e+V5RHzgYcZMWlpa7969uSGxFn/KNH7c+b2rq+vJkyer84M0iu+++y79wY8++qhGlzRLS0vprRczM7Nq9sijJ0+ejOzW7YqDA0lMFLlrrVZLpx95eXkJ1AWta/jee+9xByn9+/cHgLlz5wrUI/XgwQP6PR4TE0O3cKsZa1ANUakkT56Q27dJQgKJjSV795LNm0lEBFm8mIwdSw4fJoSQ+HjCU0k+fqatabXayMhIegXF2tq6+p+2sLCwdvErS6VSRUZGco8WdHFxqc6YVlpaOm7cOJpAJlWPCCEkMJAAENGXU6xfvx4AGjRoINy4xA2Gv/zyC92SkJAgkUiEHgwrrlChZbt69erFz2pGAZ5HxOfc0YyMDLpaHAA8PDyqrp9L40eLndD46Xg0qFQqIyMj6XcevZbz559/vu7NpaWldOG8QqE4e/asLv3qJCuLmJsTAPL6XeVdRkYG/brcs2ePoB1t2rSJDobcxTOakDlz5gjU4+bNmwGgfv363MPP6U1CU1PTO3fu8NOHSkXGjSPu7mTUKFJcTPh4CiL/E7hjYmJotBQKRWRkZMWvn4rx4/FyyPPnz8sOrW5ubokVDvZKS0vpWm+FQnHu3Dm+uq6lWbMIAKlQh1M4tStBXwulpaX0pH379u10Cx0MzczMhKgn8PDhQ/r3znWXl5dHl6oIsqqwtJR88w1p2JC8WvGtFgRZRfHo0SNa4xUA+vXrx9X2KSwsjIiI4AYrfuNXFj3JtLS0BACJROLp6Zmamkr/l0qlos/ltbKyorc9GMvKIhYWBIBcuCBCb0KUoK8CHZrKDoafffYZAAQFBfHeF51k7+HhwW2hF70//vhjoeYb9+9PAIjOZ7kCLmWKiYmxsbEBAEtLy5UrV65Zs6Zs/ASaFFZWVlZWUFAQrW4glUo9PT1TUlLot4OVldUFUf7RV0tQEAEgn30mdD+vK0EvHG4w3LZtG90i0GC4Y8cO+i/tr7/rD8XGxtJng6ekpPDY0SsSEohEQszNiW5nucKuJ4yPj6cHP9yDKT/55BOR7wQ8ePBg8uTJdLYdnRhZv379iseoLGVnvxwMBR6ZX1eCXlBbt24FgFatWnGD4YABA4CnRxFTWVlZdCFIVFQU3ZKfn0/nV4WHh/PVS+U8PAjAy0s1tSVsCOmEjEWLFkVERAwZMqR///6tW7dmciny3r17vr6+HTp0GDJkyOXLl8XfgTcIDiYApMyhFO+qKEEvqNLS0tatWwNAdHQ03ZKYmMjvYEiLZfXu3Zu7ADFp0iR6y0rwshp0MDQzIzp8FmFDSJ9WxS0X6NmzJ4hV67JSdWRefyUEHgx1LEGvo+joaAB49913ucGQPk5w1qxZujdOVwyamZmlpaXRLSdPnqSrGa9du6Z7+2/22WcEgOhwlitsCOkhAVfEumPHjgCQkJAgaKf6au5cIpMRYR6iIE4J+tfhBsMtW7bQLZcuXaKDoY7Xh3Jzc+n1T67EaFFREZ28sWTJEh13u7oSE3UcDIUNoUKhAABuaiK9e3v79m1BO9VXz54RYUrEi1aCvgrbtm0rNxgOGjQIAAIDA3VpltbCc3Fx4a5/0gXZYq9mHDCAAJDaDuwChlCr1UqlUolEwv25169fHwBEmD2orwSYoS9yCfrXKS0tbdOmTdnjYToY0lLOtWuTLlAyNTW9desW3XL27FkDAwNDQ0Oxj7Z0GwwFDGF+fj4AmJubc1voJUo9elyO2HbufGWGflwcOXWKXLpE0tJITg6p1TUGkUvQV+Hnn38GgBYtWnBP0l21alVycnLtWnv+/DktK7xq1Sq6RalU0iJgCxYs4GePa2TQIKW9/X+XLq3FjwpYBv/+/fvNmzdv0qTJgwcPAKC4uNjU1NTExKS4uFigHvXenDnQvTu4u8PZs/Dzz3DsGKSlvfIGMzOwtASFgv6a5OS0vrCQq3fKFTi1tramv4mPjx82bJjIJehfR6PRODk5paambtq0acKECbo3uHPnzh07duzfv5/eeZo9e3Z4eHjbtm0vXbpUrmSOCO5fufJup05Gxsbp6enc/fDq4vsL4R/JyckA4OjoSF8+evQIAN555x3hetR7wcGvzNAfO5Z07046dCAtWxJrayKRlFtWc7pHj6r/cumhx5o1a1h/sJd++eUXALCzs+P9sfIXLlwwMDCQSqXx8fH8tlx9gwcPBoCZM2fW9AcFrDtKD0fp3LGKL1ElPvgAzpwBDw+IjwdnZxg/vvwbCguhoADy8+mvjUpKIseMKVvvlP6an5+fl5eXn59fXFysUqm4VZfMjRo1avny5cnJydu3b/fx8eGrWZVK5ePjo9FoZs+e7eLiwlezNbVw4cKDBw9+//33AQEB3JqeahHiK4E6cuQIAPTr14++vHDhAgB06tRJuB71XrkZ+joLDw8HAGdnZ+YnhBw6v6x58+Y8VuuaP38+ADg4ODCvtE3nJAUEBNTopwQ8J9y1a9fo0aNHjBjx66+/AkBsbGzfvn379Olz/PhxgXpE5RQXF7dq1erhw4eHDx/28PBgvTsAAFqttkWLFrm5uc+fPzcxMZHL5SYmJtYVVLpdLpdXrOyWlJTUuXNnjUYTFxfXrVs3Jh+Kk5yc3KFDB5lMlpaWVv3BUPDDUXqrsOJLJAK5XD5z5syZM2eGhIS4u7uL81DHqv311185OTkajUYikSiVSqVSCQD0ekF1GBsb02tO9AE7lpaW58+fV6vV/v7+zBMIAE5OToMHD963b194eDitlFsdGMJ/uSlTpqxevToxMfHIkSN0GRFDhJBJkya9ePFi1KhRO3fuLC4uzs3NVSqV9DdlVbqRbs/KysrKyuLadHJy6tix4/Llyxl+rrIWL1584MCBjRs3BgYGVvPBWwKGsKCgACpcmMEQiszExCQwMDAgICAkJMTDw4PtYBgVFXXs2DEbG5u1a9cCgFwur+ljdOJx2xoAAAVsSURBVIuLi7lLUPTik7m5OV2pU0c4OTkNHTp079694eHhERER1foZQc5PCSGE0AdEr127lr4MDQ0FgNDQUOF6RJUqLi6mX8kirOGsQmZmJi3BtGvXLoa7IYKrV69KpVITE5NqTg6r2bP1aqTSw1G8RSE+OhgCwMKFC4lg1+HeaMqUKXl5eQMHDhw5ciSrfRBHu3btFi5ceOTIEbpy5Y3EDiEejjLh5+fXpEmTS5cu0cJH4ouOjj5w4ICVlRV95vm/3oIFC3r16lXNNwsYQnpOiCGsC0xMTGbPng2MBsPHjx/Tsv9r165l/pDwOghHwreFr69vkyZNLl++TFfBimnq1KnPnj1zd3enpZZROYKHEK+O1hEmJiZBQUEg+mCo2b17bE5OPSuryMhI0TrVL+KNhOWOTpH4fH19mzZteuXKlQMHDojUZXa2wdSpw06fzlizplmzZiJ1qm8EPyfEkbDuMDY2poPhggULtFqtGF36+0NWFvTqZeHtLUZ3+kmouaMvXrwwMzMru3pQLpcrlcqioiJTU1MhekTVUVJS0qpVqwcPHuzdu5cWQRbQoUMwaBCYmsLVq/D3Q3tQRUKNhOXGPZVKpVQqDQ0NMYFsGRsbBwcHA0BoaKiwg2F+PkyeDACwYgUmsGpChRDvT9RZEydOtLOzS05O3rdvn4DdTJ8OmZnw8ccwZYqAvfwrCHU4qtFo7t+/r1ar33vvPQAoLCykj8iiT6VGbG3YsGHKlCnvv/8+nWDFfwfHj0PfviCTweXL0LYt/+3/uwi4nhDVWWq12sHBISMjY/fu3Z9//jnPrRcUgJMT3L8P4eEQGMhz4/9GAl4dRXWWkZERd8+Q/zPD2bPh/n3o0gVmzOC55X8pDOFbysfHp2XLltevX9+zZw/PTfv5QZcusGkTGBjw3PK/FB6Ovr0iIyP9/PxatmwZERFBV6nT5eoKhUKQE0X0GhjCt5dKpXJ2dlYqlXfu3Cn3vywsLLhapj3btFleVATW1mVLnoKlJVhagpUVWFm9/L2PDzx7BubmsHUr4I2omsAQvtW0Wu3ixYsTEhK4cokFBQW5ubll3/N51667z59/Q0MbNsCjR7BoEaxaBebm4Ocn4E7/6whY3gLVfVKpdOHChRW3l61lqiguhuxsyM+HvDyu5CkUFLz8Dd14+za4uQEAuLjAzz+L/Cn0HYYQVYKWM6vBD8yZA9whFZ5P1hD+eSE+0NrhAC9rh6OawHNCxAe1GiZMgJwcUChgyxYQ/Xkseg1DiBBjeDiKEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHG/h8pipBjgXiW5gAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAaL0lEQVR4nO3deVxU9f7H8fewK3gRFUsTrlpupKWZpai54QVxRlQ8KEuuaWal5nJd+tl1Sb1lG2iR4XZNFjmCCqgpmhYCLqVyC1tsUXNHE0SQdeb3x+BoZlzAge85M+/n4/4xj8NhzqdH99Xg8PGMxmAwgIjEsRE9AJG1Y4REgjFCIsEYIZFgjJBIMEZIJBgjJBKMERIJxgiJBGOERIIxQiLBGCGRYIyQSDBGSCQYIyQSjBESCcYIiQRjhESCMUIiwRghkWCMkEgwRkgkGCMkEowREgnGCIkEY4REgjFCIsEYIZFgjJBIMEZIJBgjJBKMERIJxgiJBGOERIIxQiLBGCGRYIyQSDBGSCQYIyQSjBESCcYIiQRjhESCMUIiwexED2Ahzp49m5iYmJ+fr9PpOnfuLHocUhONwWAQPYOKnT59evv27bIsZ2RkGAwGBweH0tLS6Ojo4OBg0aORajDCmjh58mRCQkJCQkJWVpbxiIuLy+DBgxs2bLh69WoXF5djx461adNG7JCkFvxxtBqys7NlWZZl+eTJk8YjDRs2HDhwoFarDQwMdHZ2BnDjxo3Y2NjQ0NCDBw86ODgInZfUga+E/5uxvbi4uB9++MF4pFGjRoMHD5YkydfX957ScnNzu3Tpcvr06Xnz5i1btkzEvKQyjPD+9Hp9RkZGSkrKli1bfv75Z+PBJk2aDBo0SJIkPz8/e3v7v/reI0eO9OrVq7y8fPfu3T4+PnU1MqkVI/yD8vLyzMxMWZa3bNly4cIF40EPD49hw4bpdLq+ffva2VXpB/glS5a88cYbDz30UFZW1kMPPVSbI5PqMULgrvY2b958+fJl48GWLVsOGTJEkqSePXtqNJpqPaFerx84cODnn38+aNCgHTt2VPfbyapYdYRFRUWpqamyLCclJeXl5RkPtm7dWqvVSpLUq1evB3ny8+fPd+7c+erVqx988MG0adPMMS9ZJmuM8NatW3v37pVledu2bfn5+caDXl5ekiQFBQV5eXmZ60I7d+7UarUODg6ZmZldunQx19OShbGiCHNzc1NTU5OTkxMTEwsKCowHje0FBwe3a9euNi76yiuvfPjhh4899tixY8caNGhQG5cgtbP8CH///feUlBRZlvfs2VNSUgLAxsamR48eOp1uxIgRjz76aK1evbi4+Nlnn83KypowYcKaNWtq9VqkUhYb4dWrV3fu3CnL8u7du0tLSwHY2tp2795dkiRJkpo3b15nk5w8ebJbt26FhYXR0dEhISF1dl1SC0uL8LfffktMTExJSTlw4EBZWRnuam/UqFGifluwevXqyZMnu7q6Hj9+vFWrVkJmIMWykAjvWaQG4OTk5OPjI0lSQECAq6ur6AERHBwcFxfXrVu39PT0Sn7RT1ZI3RH+8ssvycnJsiynp6cbj9SvX79///6SJA0bNkxRb4SY1tlef/31N998U/Q4pCCqjLAqi9QKdPDgwX79+un1+j179gwYMED0OKQUaorQ2F5sbOyPP/5oPFLJIrUyLVq0aOHChY888siJEyeaNGkiehxSBKVH+CCL1ArEdTb6M4VGaFrmlGX54sWLxoOenp5Dhw6t1iK1Ap07d65z587Xrl2LiIh49dVXRY9D4ikrQrMvUivT1q1bhw8f7ujoeOjQId6QhhQR4X0Xqb28vHQ6nVarfcBFamWaMmVKZGRkmzZtjh075uLiInocEklkhIWFhfv27bvvIvXIkSM7dOggarA6UFRU1L1796ysrIkTJ37yySeixyGRBERYySJ1SEhI27Zt63geUbKzs5955pnCwsKYmBjenc2a1V2Ef7VILUlSYGBgixYt6mYMRYmMjJwyZUrDhg2PHz/esmVL0eOQGLUeYSWL1EFBQc2aNavVqyvfqFGjNm/e/Mwzzxw8eFBdv24hc6mtCM+ePbt161ZZljMzM/V6PQBHR8fevXtrtdrg4OCmTZvWxkXVKDc3t3PnzmfOnFmwYMHixYtFj0MCmDlC5S9SK1BaWlq/fv0MBkNqamr//v1Fj0N1zTwRZmdnp6SkJCcnK3+RWpkWLly4aNEirrNZpweK0LjMGR8f/9133xmPuLm5abVanU7n7++v2EVqBdLr9T4+Pvv37/f3909JSbGMnQSqoppE+OdF6saNG/v7+6tokVqBTOtsq1atevnll0WPQ3WnqhEaF6llWU5MTDx37pzxoLu7u5+fnxoXqZUpMTExMDDQ0dHx8OHDTz75pOhxqK4Yqmb+/Pmmb2nZsuXMmTPT09PLy8ur+O1URS+++CIALy+vgoIC0bNQHalShPHx8a6urg0bNpw3b95XX31V2zNZs1u3bj3xxBMAXnzxRdGzUB2p0sdlFxUV5eXlabXaZcuWde3atZZekwmAk5NTTExMvXr1Vq9eHRcXJ3ocqgv8zHrFefzxx1esWAHgpZdeOn36tOhxqNYxQiV6+eWXhw4dmpub+/zzzxtv3EgWjBEq1Lp16zw9PQ8ePMhbs1k8RqhQbm5umzZtsrW1XbJkyeeffy56HKpFjFC5evfuPX/+fL1eP2bMmGvXrokeh2oLI1S0N954o2fPnufOnZs4caLoWai2MEJFs7Ozi42NbdSo0datWyMjI0WPQ7WCESqdh4eH8SY0M2bMyMrKEj0OmR8jVIHAwMCJEycWFRWFhIQUFhaKHofMjBGqQ0RERKdOnU6ePDlr1izRs5CZMUJ1MK2zRUZGbt68WfQ4ZE6MUDU6duz41ltvAZg8efKZM2dEj0NmwwjV5JVXXgkICDCus5WXl4seh8yDEaqJRqNZv369p6dnWloa19ksBiNUGTc3t08//dTW1nbx4sX79+8XPQ6ZASNUn+eee27u3Ll6vX706NFcZ7MAjFCVFi5c6O3tfe7cuUmTJomehR4UI1QlOzu7uLi4Ro0aJSYmrl69WvQ49EAYoVp5eHgY85s+ffp///tf0eNQzTFCFRsxYsSECROM62y3bt0SPQ7VECNUt/Dw8A4dOmRnZ8+ePVv0LFRDjFDdnJ2d4+Pj69Wr9+GHH27btk30OFQTjFD1OnbsuHz5cgDjx48/e/as6HGo2hihJZg6deqQIUOuX78eFhbGdTbVYYSWQKPRrF27tnnz5mlpacuWLRM9DlUPI7QQTZo0iYmJMa6zmT4lklSBEVqOPn36/POf/ywrKwsODv79999Fj0NVxQgtyuLFi3v06PHbb79xnU1FGKFFsbOzi46OdnV1TUhIWLNmjehxqEoYoaVp1apVVFQUgGnTppk+xpyUjBFaIEmSxo0bV1hYGBQUxHU25WOElmnlypXt27f/9ttv586dK3oW+h8YoWUyrrM5OTmtXLly+/btosehyjBCi9WpU6dly5YZDIZx48ZxnU3JGKElmz59uk6nu379+ujRo7nOpliM0JIZ19maNWv2xRdfGO9ZSgrECC2cu7t7TEyMjY3Nv/71r4yMDNHj0H0wQsvXt2/fWbNmlZWVjRo16vr166LHoXvZiR6A6sLSpUu//PLLQ4cOTZo0SZble7564MCBmJgYIYPdw87W9iPl/Nl13Dj06FEH12GEVsG4ztalS5ctW7asW7du/Pjxd381OzvbuGQjnJ2d3UdlZaKnuM3bmxGSObVu3ToqKmrkyJFTp0719vZu37696Uv9+vVTyH0TbTQaGAyip7jN27tursMIrUhQUNCOHTs2btwYFBR05MgRJycn43EvLy8vLy+xs6lDVhY+/RR79+L8edy4AXd3tGmDIUMwdizc3Gr8rHxjxrp89NFH7dq1++abb+bNmyd6FlUpKcGUKXjqKbz7LrKycPUqSkpw/jwOHMCMGWjXDlu21Pi5GaF1cXZ2jo6OdnBwCA8PT0pKEj2OSpSVISgIkZHQ6wHAwQHPPgtfX7RtW3FCTg5GjkR0dM2enhFana5duy5dutRgMLzwwgsXLlwQPY4aRETAtH8bFobz53HoED77DD/8gPR0tG4NAHo9Jk7E6dM1eHpGaI1mzpyp1WpzcnJCQkK4zvY/5OZi4cKKx8OH49NP0aTJna96e2Pfvoo/EN66hTlzanAFRmiN7l5nW7FihehxlE2WkZ8PAPb2eP/9+5zQsiVMtz/ftg3VX4dghFaqadOmGzZssLGxWbBgQWZmpuhxFCwhoeKBjw88Pe9/zoQJ0GgAoKQE1f+TNiO0Xv/4xz9mzJhRVlYWFhaWl5cnehylOnas4sGAAX95TtOm6NSp4vHXX1f3CozQqi1dutTDw8Pe3r6goED0LIp06RJycioed+hQ2Zmmr37zTXUvwl/WW7WjR49evHhRr9fn5eU1b95c9DjKc/nyncd/9bPoPV+9cqW6F+ErofXKzc0NCwsrKyubM2dOh8r/M2+1jG/JGDk7V3ami0vFgxs3qnsRRmi9pkyZcvr06W7dui00vQVP9ygquvPY0bGyM2/vAKL6t7djhFYqKioqNjbWxcXFuEAjehylql//zuO7g/wzU3uml8QqY4TW6NSpUzNmzADw8ccft2nTRvQ4CtagwZ3Hlf+cafrB9W9/q+5FGKHVKS4uDgoKunnz5rhx40JDQ0WPo2wtWtx5XPlK2pkzFQ88PKp7EUZodWbPnn3ixInHHnssPDxc9CyK5+YG05vG335b2ZnZ2RUPOneu7kUYoXXZtWvXqlWrHB0dN2/e3ODun7Xorzz7bMWD1NS/POfSJZg+9qN79+pegRFakfPnz48ePdpgMCxfvvypp54SPY5KjBhR8SAtDd9/f/9zNmyoeODmBl/f6l6Bv6y3Fnq9fsyYMVevXvXz85s+ffrdX4qKipo5c6aowe5mb29/rbRU9BS3ffwxQkIQEAB3d+TkwGDAq69i927Y/PGl68IFvPNOxeMxY1D9t5oZobVYvnz5vn37mjZtun79eo1x2/i2kpKS/Lt/Ky2OnZ0dlHOjp5ISAHB2xuLFeOklANi7t+Jv97q7V5xz4gRCQ3HtGgC4u+ONN2pwHUZoFY4ePbpo0SIbG5tNmzY9/PDD93x10qRJzz//vJDB7qFR1I2e6tWrePDii0hPx6ZNAJCQgORkdOsGNzecOXNnU9TJCbGxNbvTDCO0fHl5eSNHjiwtLZ0zZ87AgQP/fIK9vb29vX3dD6YaGg3Wr0ezZnjvPZSXo6QE6el/OKFlS6xbh379avb0fGPG8k2ZMuXXX399+umnFy9eLHoW1bKzw9tvIysLs2fjySfRuDEcHNCiBXx98fHHOHmyxgWCr4QWb+3atTExMVxPM4/HH8fbb+Ptt837rHwltGQ//fTTa6+9BiAyMrKt6dZgpDCM0GIZ19Py8/PHjBkTFhYmehz6S4zQYs2ZM+f48eOPPvpoRESE6FmoMozQMu3atSsiIsLe3j46Ovpv1d/rp7rECC3Q5cuXx40bZ1xPe9a0+khKxQgtjV6vDwsLu3z5sq+vr/EvDZLCMUJL89Zbb+3du9d4W9F71tNImRihRTl69OjChQs1Gs26dev+vJ5GysQILcfNmzdDQ0NLSkpmzZo1ePBg0eNQVTFCyzF58uRTp0517dr1zTffFD0LVQMjtBAbNmyIjo42ffyg6HGoGhihJfjpp5+mTp2K2x/EK3ocqh5GqHqlpaVhYWH5+flBQUGjR48WPQ5VGyNUvblz5x4+fLh169ZRUVGiZ6GaYITqtnv37vfff9/Ozo7raerFCFXsypUrY8eONRgMS5cu7V79O+2RQjBCtTIYDOPHj7906VLfvn1nzZolehyqOUaoVitWrNixY4e7u3tMTIyNDf89qhj/5anS119/vWDBAo1Gs3bt2mbNmokehx4I7zGjPjdv4rXXHm/f/gUfHyedTid6HHpQjFB9XnoJaWlO3bqtWrZMMTerpgfAH0dVZuNGbNoEZ2ds3KhxdOR6miVghGry88949VUAWLUK7duLnobMhBGqRmkpQkNx4wYkCWPHip6GzIcRqsb8+Th8GK1bg9tpFoYRqsOePXjvPdjZYdMmuLqKnobMiu+OqsCVKxg7Fno9li5Fjx6ipyFz4yuh0hkMmDABFy+iTx/Mni16GqoFjFDp3n0XKSlwd0dMDGxtRU9DtYARKtrXX+P116HRYM0aNG8uehqqHYxQuQoKEBqKkhJMm4YhQ0RPQ7WGESrXlCn44Qd06oTly0WPQrWJESpUfDw2boSzM+Lj4eQkehqqTYxQiX75BRMnAkBEBNfTLB8jVJyysor1tBEjMH686Gmo9lUjwlOnTuXk5NTeKGT0+us4dAgeHvjkE9GjUJ2oUoQ9e/YcM2bM4cOHH3744V69eoWHh58/f762J7NOBw7gnXdgZ4e4OLi5iZ6G6oTGYDBU5bzU1NSIiIjU1NTi4mIANjY23bt3DwwMHD58eMuWLWt3RquRk4Mnn8TFi1i6FPPni56G6kpVIzQqLCzct2+fLMvbtm3Lz883HvTy8pIkaeTIkR06dKidIa2CwYCAACQno08f7NvH5RgrUr0ITW7durV3715ZlpOSkvLy8owHvby8dDqdVqvt1auXWYe0Cu+9h5kz4eaGEyfg6Sl6GqpDNYzQpLi4OC0tLTk5OS4u7sqVK8aDrVq10ul0kiT17NmTHxZbFceOwdsbJSXYuhUBAaKnobr1oBGalJeXZ2ZmyrIsy/LFixeNBz09PYcOHSpJkre3N++N+VcKCvD00/j+e0ydivBw0dNQnTNbhCZ6vT4jI0OW5YSEBNObqO7u7n5+fpIk+fn52dvbm/eKajduHDZsQMeOOHIE9eqJnobqnPkjvFt2drYsy7GxsT/++KPxSOPGjf39/SVJ8vX15WdZApBlBAWhfn189RX4xpZ1qt0ITYw1xsfHf/fdd8Yjbm5uWq1Wp9P5+/s7OzvXwQwK9MsveOop5OUhKgovvCB6GhKkjiI0yc7OTklJSU5OTk9PNx6pX79+//79JUkaNmxYgwYN6nIYscrK8NxzyMxEYCC2bBE9DYlT1xGa/Prrr0lJSbIsZ2RkGGdwcnLy8fGRJCkgIMDVCm5mNG8e/v1veHjgxAk0aiR6GhJHWIQmZ8+e3bp1qyzLmZmZer0egKOjY+/evbVabXBwcNOmTcWOV0u++AIDBkCjwYED6NlT9DQklPgITXJycnbt2iXL8u7du0tLSwHY2tp2795dkqSgoCBL+uyhnBx07owLF7BkCf7v/0RPQ6IpKEKTK1eubNu2LSEhYf/+/aYae/fuHRz88uDBIx55RPR8D8ZgwNChSErCc8/h88+5nkaKjNDk+vXrycnJKSkpO3fuLCgo6NMn5YsvBnt5QZIQEoK2bUXPVyMffIDXXuN6Gt2h6AhN8vPzU1JSvvxy4MaNTQoLKw527YrAQAQGqqnGb77BM8+gqAjbtnE9jSqoI0KToiKkpkKWkZSE23vj8PKCTgetFgrfGzetp73yClauFD0NKYbKIjQpLkZaGpKTEReH23vjaNUKOh0kCT17QoF74xMnYs0adOqEI0d47ya6Q60RmpSXIzMTsgxZxu29cXh6YuhQSBK8vaGcvfGjRzF+PKKj8cQTokchJVF9hCZ6PTIyIMtITMS5cxUH3d3h5wdJgp8flLA3rtcr6D8KpBCWE+HdsrMhy4iNxe29cTRuDH9/SBJ8fcG9cVIUy4zQxFhjfDxu743DzQ1aLXQ6+PvDWvfGSVksPEKT7GykpCA5Gbf3xlG/Pvr3hyRh2DBY0944KY61RGhy+jS2b4csIyMDxn90Jyf4+ECSEBBQ1Q/B/ewzDBpU8XjYMCQm3v80JycUF6NxY1y9ao7RyUJZXYQmZ89i61bIMjIzodcDgKMjeveGVovgYFS+N353hDY2+P57tGlzn9MYIVWF9b5V5+mJadNw8CAuX8Z//gOtFno99u7F9Olo3hy9eiE8/M7vPCqh1+Pdd2t/XLJc1vtK+Ge//46UFMgy9uxBSQkA2NigRw9IEgID0aLFnTNNr4S2tigvh5MTzpy5z4snXwmpKqz3lfDPGjXC6NFITsalS9iwAUOGwMEB6emYPh2enujRA++8gzNn/vAtAwYAQFERVq0SMjJZAkZ4H25uGDMG27fj2jUkJeH55+HigkOHMHs2Nm78w5l9+sD4IQAffQTTZjlRtTDCytSvD50OGzfi0iUkJCAkBCNG/OGEggJMnw4A165h3TohM5LqMcIqqV8fw4cjOvreuxIWFmLChIqPT3rvPZSXC5mO1I0RPpCyMri4YPJkAPj1VyQkiB6IVIgRPhDjW8tTp1bso65YIXYcUiVGaAYPP4zQUAD46iscOCB4GFIdRmges2ZV/DVivhhSdTFC8/Dyqvj1/a5dyM4WPQ2pCiM0m1mzAMBg4IshVQ8jNJt+/dC1KwDExMD4kXAKvM8NKRAjNCfji2FpacVnffJOFlQV/L+JOUkS/v53AFi9Gjdu8JZqVCWM0JxsbSu22G7cwCefoEkT0QORGjBCM3vhBTRsCADh4RXrbESVY4RmZtpiO3cOJ06InobUgBGan2mLrbhY9CikBozQ/Jo1Q0iI6CFIPRhhrTBtsRH9T7zHDJFgfCUkEowREgnGCIkEY4REgjHCmvjsM2g00GgqlmMqP4fLa1Q5RkgkGCMkEowREgnGCIkEY4REgjFCIsEYIZFgjJBIMDvRA6jbzZvo1ev+X8rNrdtRSLUY4QMpL0d6uughSOX44yiRYIzwgbi6wmC4//927RI9HKkEIyQSjBESCcYIiQRjhESCMUIiwRghkWCMkEgwRkgkGCMkEowREgnGz6IgEoyvhESCMUIiwRghkWCMkEgwRkgkGCMkEowREgnGCIkEY4REgjFCIsEYIZFgjJBIMEZIJBgjJBKMERIJxgiJBGOERIIxQiLBGCGRYIyQSDBGSCQYIyQSjBESCcYIiQRjhESCMUIiwRghkWCMkEgwRkgkGCMkEowREgnGCIkEY4REgjFCIsEYIZFgjJBIMEZIJBgjJBKMERIJxgiJBGOERIIxQiLBGCGRYIyQSDBGSCTY/wOFEBQ3JSHLAQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAdjUlEQVR4nO3de1BU5/kH8GdZrnIRQVREFBBBbSLBS8zIeA1Gq1iNiplovTTYTZzpkJm06Zp2EmqSSdc202o7zcwaxaIZU5lcDI2NFu94F0QMilxEBIEocnG5LJdl9/fH29/OBrmcPXvOeRf2+5n80ZjznvO43e/Zc55zeVUWi4UAgB833gUAuDqEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzd94FAPRu7969Pj4+V65cSUlJiYuL412OjFQWi4V3DQA/0t7evm3btn/+859BQUENDQ1ENHXq1E2bNm3dujU4OJh3ddJDCMG51NTUrFmz5vLly97e3mlpaU1NTenp6XV1dUTk5eX1s5/9TKPRvPjiiyqVinel0rEAOI0LFy6EhoYSUXh4eG5uLvvD9vb2zMzMpKQktVrNvrTh4eFarfbevXtci5UMQgjOYs+ePZ6enkQ0b968hw8fPr3AgwcPdDpdZGQki6Kbm1tiYmJmZmZHR4fy1UoIIQT+urq6tFoti5ZGo+ns7Oxn4e7u7uzs7I0bNw4bNowNGTFihEajuXHjhmIFSwshBM7q6uoWLlxIRF5eXunp6cIHNjY26vX6+Ph467nVjBkzdu3aVV9fL1+1ckAIgaf8/PwJEyYQUVhY2JUrV8StpLCwUKvVWhun3t7eycnJ2dnZZrNZ2mplghACN4cOHWKHlAkJCbW1tQ6uzWg0ZmZmJiYmWhunMTExaWlp9+/fl6Ra+SCEwIHJZLI9CZS2s1JZWanT6dgPLBGp1WrWv+n/VJMjhBCUVl9fv3jxYiJyd3ffvXu3TFth/Zvk5GTWcSWiMWPGpKam3rx5U6YtioYQgqIKCgqioqKIKCQk5PTp0wpssaGhQa/X2974NmPGDL1e39zcrMDWhUAIQTmZmZm+vr5EFB8fr/ypWm5urkaj8ff3Z1H09/ffuHGjM/RvEEJQgtls1ul0bm5uRLR+/fq2tjZelTzdv5k8ebJOp+v19gBlIIQgO4PBsHLlSnYSqNPpxK3k008/3bJlS05OjlRVFRcXp6WlhYeH9+jfdHV1SbUJgRBCpbW2tj58+LCmpoZ3IQopKSmZMmUKEQUHB584cUL0eqwX5WNjY3fu3On4JQ3GZDKx/o2Hhwdb/9ixY7VabWlpqSTrFwIhVBRrS/z0pz+13vfotH1zSRw9ejQwMJCIpk2bVl5e7siq5L7wUFtbu2vXrmeeeaZH/6alpUWS9fcDIVSOtS0xduxY2775b3/72zt37vCuTmK2J4GvvPKKVF9lBS48sP6Nn58fW//w4cM1Go2Eh8FPQwiVwL6RrBOwYcOGtrY25++bO6K5uXnNmjVEpFKp0tLS5Gg/yv0BPnnyJCMjIzEx0br+qVOn6nS6R48eSbJ+Wwih7PpvS/Tom/v4+Ayu+x6fVlZWxg7qAgICvvnmG7k3J/eFh6KiIq1WO2rUKLZ+Ly+v5OTkrKwsk8kkyfotCKHciouLhbQlnLBvLs7x48dHjBjB7tu8ffu2YtuV+wPs6OjIyspKTk52d//fa5nCwsK0Wu3du3cdXzlCKCNrWyIuLk5gW4L1zcePH8+9by6CXq9n39Hly5c3NTVxqUHuCw/V1dU6nW7ixIls/a+99prjB6gIoSwcbEs83TcPDQ1VuG9uF6PRuHnzZnYSqNVqu7u7+dYj94UHs9l88uRJdkb6u9/9zsG1IYTSk7Atwfrmzz77rPJ9c+GqqqpmzZpFRH5+fl9++SXvcn5E1gsPhw4dIqLVq1c7uB6EUGIytSV69M0DAgLk7psLlJOTM3r0aCKKjo4uLCzkXU6f5LjwcP36ddY1dbA2hFBKx44ds7YlioqKJF+/kn1zIfR6PTveW7p0aUNDA5ca7CLtB9ja2urm5ubp6engCaf0Ifyg9gPKI/ZPcEGw5Ot3Wkq2JRTom/evvb09JSWFbT01NVWx7UpFqg+Q3cFTXFzsSDHSh/Ba67W/P/r73x/9PaowykVCyKstIWvfvB/V1dUvvPACEXl7e2dkZMi6LVk5/gEuWbKEiBw877A7hJ/Vfzbnzhz/G/7e+d4zi2Z+3fh1X0smFCe4QgidoS3Ro2/u5uaWkJCg1+tbW1sl39bFixet7+e9du2a5Ovnoqqq6oMPPmBPG7MPcPHixUL+dm+++SYR7dy505Gt2xdC7QMtO8jcXLF5S8WWUQWjKI90P/T+cIorhNDxtkRZWVlZWZlU9bD2A7tDlYgCAwM1Gk1eXp5U6z948KCPjw8RzZ07d9DdSCCE7Qd4/fr1AZf/5JNP2NVCRzZqRwivt15X5akivo/4ofMH9icNpoa423FueW63jb3cGzHkQyhJW2LTpk2SX3hoamrS6/UJCQk92g+PHz8WvU673s872DU2Nn722WdCljx16hQRzZkzx5HN2RHC1KpUyqP0xz96PevV1qupVanlHb3cDjKEQyhhW2Lbtm22P1zbtm2zzsHguFu3bmm12pEjR9q2H0TcV1lXV7do0SK2hn379klV3hBQW1tLREFBQY6sxI4Qzr4zm/KoqrNK4PJDNYSStyXkvvDg4IQq+fn5ERERRDR27NjLly9LUtJQwi5KOXJwbkcIw26Gqa+rzRahO9EhGUJZ2xJyX3gQMaHK559/zt7PO2fOHNd5G4Bd2B757NmzotdgRwiDC4J98n2ELz/0QqhMW0LuCw/WCVXY34X6mFBF1vfzDiVbtmwhIr1eL3oNdoRw/Pfj3fLcTGahe+VeQ9jc3dxubhe+USfBpS0h94WHfiZUsX0/r+hXM7mIP/7xj0T01ltviV6DHSGcXzyf8qi0ved96H2FqtcQvl/zfuCNQM19zfXWgfu/ToJvW8JsNufk5Mh64eHq1atvvPHG8OHD2fqHDRvGJlcZPXq0M9ye6uS+/vprIlq2bJnoNdgRwvdq3qM8+tujv9n+YXVndeCNwL88/MvTy/cawpfvvmy9qW1m0cxPHn3SZOLz4JlAztOWkOPCgy3b52InTJgQGRlZUVEhyZqHtqKiIiKKiooSvQY7QljRUeGd7x16M7Si43//3zR3Ny8sWajKU11qufT08n2dE94y3tI+0I4sGMmi6J3vnVyenG3IFt7yUYxztiWkuvDQl3fffZeIfvnLX0qytiGvs7PTw8PDzc1N9BuN7btj5tO6TymPAm4EbLi3IaUiJfz7cMqjD2o/sC5wpOkIu3GU3Tvqm+9r/dcrLT+afa7d3J7ZkJlUlqS+rmZpDP8+XPtAe6/jnri/ibScvy0h30zux48fJ6IFCxZIUaZLmDx5MhEVFBSIG273vaMnDCcWly4OvBHoed3zudvPfdX4le1/fbHkRevRZo9/0mrSel1hVWeV7gdd5PeRbDG3PLfEksTMhswOM7fv/eBqS1RVVUk7k/v9+/eJaMyYMdLWOYStWrWKiA4fPixuuLM8T9ht6c42ZG+8t9En34elccSNEZr7mhttSk9EfvPmTXYj78iRI5WZNkgSAi88CGE2m9nDr4PiEUFnsH37diLasWOHuOHOEkKrRlOjvk4fXxRv/QmdUTRDX6c3mAwKbD0rKysgIICI4uPjB2lbotcLD3q93mCw4wOcPn06EV261MupPjxt//79RPTqq6+KG+50IbS62nr19fuvD78xnEXRL9/vFxW/ON9wXqbN9Zg2SI6HgBSWm5ubmpoqbib39evXE9H+/fvlL3MouHTpEttxixvuvCFkjN3GzIbMxJJEVZ6K8mj+7+azicgrKysl3IrBYGCH9Wq12vlPAu3S2tp64MCB+fPnW1/I+fnnnw84aseOHUS0fft2BSocAhobG9n1VXHPczt7CK1K20vfqX5n9vzZ7Jvk4eGxcuXKrKwsx98nWVJSMnXqVCIKDg7Ozs6WpFonVFpa+s4778TGxgp5Ufzhw4eJaNWqVQoUNjSMGTOGiMSdwgyaEDLW+UCs75N0cD6Q//znP1JNGzSUFBQUENHkyZN5FzJoLFiwgIiOHTsmYuwgC6EVmw9k2rRpPdoPwucDsT0JXLdunVO9yZM7o9GoVqs9PDyG9sO7EnrjjTeIaNeuXSLGDtYQWvWYDyQgIIDNB9L/qObm5rVr19L/v5pp8M6+Ih92nUaOFzcOSX/961+JaNu2bSLGDvoQMm1tbcLnAykrK2PvtA4ICDhy5Ijy1Q4Ky5YtI6Kvvvpq4EXBYvnuu++IaOHChSLGDpEQWt25c0er1bKXLxGRp6dnUlKS7XwgZ86cCQkJIcWnDRp03nrrLSL66KOPeBcyONy7d4+IQkNDRYwdaiFk+poP5MMPP2R/smzZMl7TBg0Wer2eiDZv3sy7kMGhu7ub3evf2Nho79ihGUKrmpoanU43adIk6xmjSqV67733uE8b5PzOnj1LRLNnz+ZdyKDx3HPPEZGI593caEhjM4oVFxefOXMmLi7OYDCsXbt2x44drCkK/WBPBty5c4d3IYOG6E/MJb6LKpVq/vz5v/nNb9j/5l3O4DBq1Kjg4OAnT56wt/rBgFgIi4uL7R3oEiFk2GfEnoMGIWJjYwk/hoLhl3BgkydPVqlUJSUl3d3dvGsZHHBEaheEcGB+fn5hYWEdHR0VFRW8axkc2C+hiOMr1xQTE+Pm5lZWVtbV1WXXQBcKIWHXbid8XHbx8fGZMGFCV1dXeXm5XQMRwp5u3bp16NAhdCMIIbSfuE/MtUIo5PhKq9Vu2LCBPabp4qKiory8vCorK1taWnjXMjgghAMT8hlh92/l7u4eHR1tsVhKS0t51zI4iDuLRgh7XwbdCAa7JLvgl3BgYWFhAQEBdXV19fX1fS2Dr50t7JLsIu5atGuFUKVSDXgBesqUKWwBi8WiXGXOCtfr7TJ69OigoKCmpqaHDx8KH+VaISQBP3TBwcEjR440GAxokBKOC+zHdlt2/Ri6XAiFnDrjm2fFbjMqLi7GbUYCifjyuFwI0SC1i7+//9ixY9vb2ysrK3nXMjiIaJAihL3A7Vq2sEuyC34JBzZp0iR3d/fy8vKOjo6+lsHXzhY+DbsghAPz9PSMjIzs7u4uKyvraxl87WzhuMAuEydO9PT0rKysbGtrEzjE5UJIAjIWGRnp7e1dVVWF27UIz2Hayd3dfeLEiWazuaSkROAQ1w1hP98qtVrNbtcS/jkOYTgusJe9n5grhhBXKewybtw4Pz+/R48e9XObEdhCCAeGqxR2sd5mhNPCJ0+eCFnM3o/LFUMo5MY03K5lC7skIvrvf/8bFRWVmZk54JLWL5jANbtiCIOCgkJCQlpaWqqrq/taBl87W/gl/Pjjj5ctW9bQ0PDvf/97wIWttxmZzWYhK3fFEJKAjOGtULZceZfU3t6+ZcuWt99+22w2a7XajIyMAYcEBASEhoYajUaBtxkhhL3DW6FsuWwIHzx4MG/evIyMDD8/vy+++MI6l17/LBaLn58fEQl8GNqlQ4gGqUAxMTFqtbr/24yGnvPnz8+cOfPatWvR0dGXL19evXq1kFHNzc2rV68uKSlZsWLF4sWLhQxx6RCiQSqQl5dXRESEyWS6e/cu71oUsmfPnkWLFj18+HDp0qVXr179yU9+ImRUWVnZnDlzjhw5EhQU9Ktf/UrgthDCPqEbYct1dkkdHR1bt259/fXXu7q6UlNTv/322xEjRggZePz48eeff76wsDA2NvbChQsvvfSSwC26aAgjIiJ8fHyqq6sNBkNfy7jO104IF/k0ampqFixYsG/fPm9v74yMjN27d6vVaiEDd+/evXz58sbGxqSkpCtXrrCPSyiJp4caPNh899euXetrgaqqKiIKCQlRsiqn9emnnxLRpk2beBcio4sXL4aGhhJReHh4P1+MHoxG46ZNm+j/p14XMeue64Zw3bp1RHTw4MG+FjCbzQEBAUT0+PFjJQtzTjk5OUT0/PPP8y5ELgcPHvTx8SGiuXPn9jrLeq+qqqpmzpxJRP7+/qKnFnfRw1ES0CBVqVQxMTHkAsdgQgzh91+ZTKbt27dv3LjRaDRqNJqTJ0+OGjVKyMCcnJyZM2fm5uZGR0dfunTp5ZdfFleA64ZQyI1pLnIiJMRQff/V48ePlyxZsnPnTi8vr3379un1eusU6/3bs2fPiy++yNqn165dE9g+7ZXrhlDIY3J466atobdLunHjxqxZs06dOhUWFnb27NnXXntNyKiOjo6UlJTXX3/dZDJptdpvv/02MDDQkTJcN4SxsbEqlaq0tNRkMvW1zND72jliiH0a//rXvxISEioqKubMmZObmzt79mwho2pqaubPn5+enu7t7X3gwAGdTiewfdoP1w2hr69veHh4Z2fnvXv3+lpmiH3tHDRkLpx2d3dv37791VdfbWtr02g0p0+fHjNmjJCBFy9enDlz5pUrV8LDw8+fP//zn/9cmoLE9XOGBnY5NSsrq68FOjo63N3d1Wp1e3u7koUpqbOz8w9/+ENjY+OAS7IHCF566SUFqpJPfX09u5vM3d1dp9MJH6jX6z09PYlo3rx5wtunQrh0CFNTU4noT3/6Uz/LTJo0iYgKCwsVq0pJDx8+nDt3LhG9/PLLAy7MbkceP368AoXJ5ObNm1FRUUQ0cuTI06dPCxzV1dWl1WrZj5ZGo+ns7JS2KpcO4T/+8Q8iSklJ6WeZFStWENEXX3yhWFWKyc/PnzBhAhGFhYVdvnx5wOVNJpO3t7dKpWpublagPMllZWWxC7/x8fEVFRUCR9XV1S1cuJCIvLy80tPT5SjMpUN48uRJIkpISOhnmbfffpuIPvzwQ8WqUsahQ4eGDRvG/vq1tbUCR02dOpWITp06JWttkjObzdankNavX9/W1iZw4PXr1637qStXrshUnkuHkD1ZHxQU1M8ye/fuJaKNGzcqVpXcWGPdenDV0dEhcOCFCxf8/f3Hjx/v4eGRlJSUmZnZ1dUla6mSMBgMq1atIiK1Wm3XSaC4/ZQILh1Ci8XCrvA8evSorwXOnz9PRLNmzVKyKvnYtiV2794tfKC1LTFu3DhrU378+PFpaWn37t2TrV5HlZSUsF/v4ODg7OxsgaNE76fEcfUQsqtD586d62sB9p4/f39/s9msZGFysLYlQkJCHGlLVFdX63S66Oho9odubm4JCQl6vb61tVXO8u129OhRtpOdNm1aeXm5wFH19fWJiYki9lOiuXoIN2/eTER79uzpZ5mQkBAievDggWJVycHxtsT+/ft7/Nfc3FyNRuPr68vSOHz4cI1Gk5eXJ3Hp9rM9CVy3bl1LS4vAgQUFBSL2Uw5y9RB+9NFHRPTrX/+6n2VYE//EiROKVSUtudsSTU1Ner0+ISHBevF56tSpOp2urq5Oor+BfZqbm9euXUtEKpUqLS1N+CFMZmYm26HEx8ffv39f1iJtuXoIv/rqKyJavnx5P8u8++67K1asuHDhgmJVSUjJtsStW7e0Wi07cGA/nsnJydnZ2UoeyZeVlT377LNEFBAQcOTIEYGj2H5KpVIR0YYNG4TvpyTh6iG8ffs2EU2cOJF3IbLg0pZob2/PzMxMSkqy9m/GjRun1WqV6d/8/ve/J6IpU6YUFxcLHGIwGFauXCniHhqpuHoIOzs7PTw83NzcjEYj71okJq4t8fjxY9aW8PT07P9UeUAPHjzQ6XTsFIv1bxITEzMyMmT9nTGZTO+///6TJ08ELl9cXMwelQwODuZ1xuHqIbRYLOy+5Js3b/IuRDKOtCUiIyOlbUt0d3fn5ORoNBp2cEtEI0aM0Gg0+fn5kqzfEdb9VFxcnPD9lOQQQgs7FDl8+DDvQqTR3Ny8Zs0aEW2Jw4cPs7bE9OnT5WhLNDY26vX66dOnW/s3M2bM2LVrV319veTbGpDtfuqVV14Rvp+SA0JoYec/O3bs4F2IBMrKyp555hknb0sUFhZqtdrg4GAWRW9vb4X7N6L3UzJBCC3p6emsd8+7EEedOXOGdSZjYmJu374tcBSvtgTr3yQmJrLwE9GkSZPS0tLkvjZgu5/65ptvZN2WQAih5eLFi+wYjHchDtHr9e7u7uxyS1NTk8BRztCWqKys1Ol0ERERtv2bzMxMyZ8Yslgsx48fZ2/yjYmJKSoqknz94iCElsbGRiLy9fXlflgijtFoZPf92Pvey6NHjw4fPpx7W4Lp7u7Ozs5OTk5md6gS0ZgxY1JTUyVsmInbTykAIbRYLJbRo0cTkZI3SUilqqpq1qxZROTn5/fll18KHNWjLeFU93w2NDTo9fq4uDjb/o1er3fkIUbR+yllIIQWi8WyYMECIlq7du3333/PuxY75OTksN1HdHS08MqtbQl2D43T/v7n5uampqYGBQWxKPr4+Ijr34jbTykJIbRYLJbKykr2Vny+fXO7WN+QuWTJkoaGBoGjnLAt0T+j0dijfxMbG6vT6X744Qchw233U077jhKE8H+4982Fa29v37p1K6szNTXVZDIJHHjs2DHWloiNjXWetoRAxcXFaWlp48ePZ39xtVrN+jf9PFhs3U8tXbpU+H5KeQjhj/DqmwtXXV39wgsvsN1ERkaG8IFO25awi8lkYv0b63uyQ0NDtVptaWmp7WLt7e0pKSki9lNcIIS9U7JvLlxeXh77KRg3bpyS0wY5odra2l27drEHJmz7Ny0tLaL3U7wghP3pq29eUFCgfDG20wYJPCOy/LgtIXraIKdlNpvPnTu3efNm2weL2XWXiIgIZ7g9VQiEUBDWN3/uueck7Jvb5f3337ceXAl/vdK5c+ecvy0hiSdPnmRkZLDnP5YuXWrX9GbcIYT2kapvbq+CgoKgoKB9+/YJHzJY2hLSKiwsbG1tHRSvgbNCCMVwsG8ujsFgELiktS3BTgKdvC0BCKFDRPTN5Wbbljhw4ACvMkA4hFACAvvmCrCddT03N1fhrYM4CKGU+umbK7D1AwcOeHt7kwzTBoGsEEJZsBdy+vn5sSgGBARoNJqcnByZNif3tEEgK4RQRgaDgfXNrf2bKVOm6HS6ft66L4IC0waBrBBCJRQVFWm12lGjRrEoenp6SjWhir3Tm4ETQgiVY+3fsHs4WXK0Wu3du3fFrVCxaYNAVgghB45PqKLwtEEgK4SQpx4TqgQGBgqZUEX09GbgnBBC/vqaUOXx48dPLyxuejNwZgihExlwQhVx05uBk1NZLBYCZ9LR0ZGVlXXgwIHvvvuuu7ubiMLDw9evX09Ef/7zn81m8/r16/fu3csea4IhACF0XtXV1Z999tmePXvKy8uJyN/f32g0fvzxx2+++Sbv0kBKCKGzM5vNJ06cSE9P37Bhg6+v76JFi3hXBBJDCAE4c+NdAICrQwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOPs/oRHd0bgFC0kAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAANGElEQVR4nO3dTWwU9R/H8V8rBUHxCRQRAR9ABapSrPWhKgokCm4TLz32pOFkWg4mxQM28aA9rhoPJF5qTEwIXrYFTQpSHqpYBHxqC5UKVusTQi0taFvb/R9+yWTS9b/dh5n5zOy+XycTt9Oh7Xt39jszvy1JJpMGgE6pegeAYkeEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaECKn+/v6JiQn1XgSBCBFG4+PjW7ZsKS8v37Nnj3pffEeECKMff/wxmUz29fXFYrGampq+vj71HvmICBFGK1eu7O7ujsfj119/fVtbW3l5eUNDw/DwsHq/fFGSTCbV+wD8XxcuXHj99dfffffdycnJBQsW7Nix4+WXX77qqqvU++UlIkQEnDx5ctu2bYcOHTLGVFRUxOPxp556Sr1TniFCREZra2tDQ8PZs2eNMbFY7O23377zzjvVO+WBontPODAwcOTIkd27dxfJ+DsqJiYmdu/enf4xNTU13d3dzc3N8+fPb2trW7Nmzfbt20dGRoLZQx8li8bly5ebm5uvvfbahQsXGmNWrly5a9cu9U4hmUwm29vb16xZY4zZs2dPJo//+eef6+rqSkpKjDG33XZbS0vL1NSU3zvpn6KIcGpq6sMPP1y6dKkxpqSkZMOGDStWrLDPQZs3b+7t7VXvYPHq7e3dvHmz/V3ce++9Bw4cyPxru7q6HnvsMfu1Dz/88Geffebbbvqr8CM8fvz4k08+aX9V69atO3ToUDKZHB8f37lzp31JLCsr27p16/nz59V7WlyGhoYaGxvnzJljjLnhhhuam5v/+eefbDcyNTXV0tJy66232qfXurq6X3/91Y+99VUhR/jnn3/W19fbcfaCBQvi8fi///7rfsCFCxecB9x0002pD4AfJicnW1paFi1aZIwpLS2tq6v77bff8tng6OhoU1PT1VdfbYy55pprmpqa/v77b6/2NgCFGeH4+Lg9z2tf6Orr6//666//9+Cenp5nn33WvlSuWrXqk08+CXJXi01HR8eDDz5of9rr168/efKkV1s+c+ZMbW2t3fLSpUtbWlq82rLfCjDC9vb21atX21/Gpk2buru7M/mqRCJx11132a+KxWL9/f1+72ex+emnn5xpyu233+7TNGX//v0PPPCA/T0+88wzX3/9teffwnMFFeGpU6e2bNnivMvPcNTmGBsbi8fj8+fPN8bMnj27vr7+0qVLPu1qUbl8+bJzuDhv3rympqYrV644//ejjz5qa2vz8NvZw92bb77ZOdz9448/PNy+5wokwosXLzY2Ns6ePdsYc+ONNzY3N4+NjeW2qcHBwa1bt5aWltrx986dOycnJ73d2+IxNTW1a9eu5cuX28FJbW3tuXPn3A+4dOnS4sWLjTEbN2789ttvPfzWHv5J+C3yEaY+7f3+++/5b/bYsWOPP/64fVGtrKzs7OzMf5vF5ssvv6yurrY/w4ceeujw4cOpj5mYmHDG1LNmzfJ8TJ3nwVEwoh3hp59+6t8bAPssvmzZMudZ3N5fgxn98ssvztHE4sWLZzyaSB1TT0xMeLg/7e3tq1atynZMEJioRjgwMFBXVxfAKMw9/rbvZ6I1/g6YfV993XXXOXPp4eHhDL/WPaa+7777Pv74Yw93LKuBecCiF6HkpFBgzUfatAnzmTNn8t+It2PqGU8dS0QpQnt86Fx9VltbOzAwEOQOuI9+n3766a+++irI7x5mvb29zz33nFcvYu6XUzumzvzlNBP/eRGVUGQinHahoGpSYudAt9xyi7dzoOiyb+dmzZrl+ds595g6kzeW2UokEnfccYfzknv27FkPN56VCEQ4ODjovmQ+DOcM7HWPdvxtr3sM7fjbP34PNq1jx465R6xHjhzxcOPOjTXGmLlz5zY2No6MjHi4/QyFOsIrV67Ym8fCefb89OnTzz//vP37uOeee7w94xxy+/btKy8vt//2jRs3fvPNN/59r9Qx9bSTjXly3xi1ZMmS4G+MCm+EiUTCuW86Fov98MMP6j36b9Oukvvuu+/Ue+Svvr4+5xLNFStWBHZPZuplN94O5L744otHH33U/ruqqqo+//xzDzeeXhgjPHHihLOCyNq1azs6OtR7NIMwj789ZOfS9uYjO5fO4eajPLnH1J5fgGrf8Ad/Y1S4InRPkCN3b1E4x9+eSL35SHvb3oEDB5xbMTwfUwf/XBOWCFNfTIaGhtQ7lQv3y3hFRcXBgwfVe5Qv4XFaGn6Pqb///vvAjrpDEaGzxEjBvK2aNv4O7Rva9OQTixl5cnt+Gvv27bv//vvt73HDhg0+zZ/EEZ4+fToWizkDxtbWVu3+eMg92rXj71CNdtNzz+7nzZunmt1nyNe/otQzMZ7fGCWL0O/nsJAI/4tJqvCcxc6Kr8dT/l2TkJREWIQXnURlXbCwXc+VLb8nC9Ouztu7d68nmw06Qvdca/369cVz+WXI1wU7f/58wYx2/Z6xJxKJu+++2zlSyO06dbfgIrRnePxeYiTkQrgumH31cN98VBgnOX0925z6Q8vnEvMgIvT7WofICXL8nV7I73bNn6/XXaXeu5zbS66/Efp91V+k7d+/3z3+DnhdsEis++AJv69AzmQVj/R8jNDX698Lgx1/2wVyfBp/p4rQCkge8vVenBnXs0rPlwizXWKkyF28eNEZf/tahbv5SKwF6Dlf70q1b7vmzp1r/mtlxzQ8jtDve6IL2KlTp9wfjeL58WEUV8X1Q+r6DN6u35XDGsdeRsgi1vlzj783bdrU09OT/zbdc6Bly5axQE7S/zF1R0fH2rVr7c/8kUceOXr0aJoHexOht0uMFDkPx98hPCMSKr6u3+W+9eS1115L88h8I/T1cp5i5j57vnDhwmzPOIf82oBQ8XX12qGhoR07doyOjqZ5TO4RBrPESJE7fvz4E088ke11ZFG5Si48fFrHPUM5RjhtiRFvP0UA0yQSCTv+NjNdUV1gnyMdMNXJm6wjdC8xwse+B2bGe4sifedUqAR/GUMWEY6MjMiXGCly7vG3+8aoqCyKFSG5fcplbjKN8P3333duPnrppZfy/Hxj5OPgwYMVFRX276OysrKystL+d2GsphEega3flWmEb775pgnTEiNFzhl/l5WVzZkzJ+o3H4VZAOt3lSSTSZOBsbGxvXv3vvDCC/ZYCGEwPDzc2dlpjKmurrZP2PDJiRMntm3bdvjwYWPMunXr4vG4c/dz/jKNEEBra2t9ff25c+eMMbFY7J133nHWAclHaf6bAIpETU1NT0+PHVO3tbWtXr16+/bto6OjeW6WV0Iga4ODg6+++uoHH3yQTCaXLFnyxhtvOFPrHBAhkKOurq6GhoajR48aY6qqqt566y1nleSscDgK5Kiqqqqzs/O9995btGhRV1dXdXX1K6+8ksN2iBDIXWlp6Ysvvtjf39/U1FRWVpbbnIbDUcAb/f39y5cvt3cUZYUIATEORwExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAGx/wFmd1nIWWiPaAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxUVf8H8O8MM+wkCIgoICC4oaKBaCpqSqboS1Kj0sQWdcrlQfv50rHHpcy0sawH63mkoVKpXBrLLXtMEZeA1MddEVdQAUFljZ0ZZs7vj0OIwAwwzL3nDn7ff+l4uffLyIdz7plzzxERQgAhxI6YdQEIPe0whAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhCxt2gRiERw9+7jV44fB5EINmxgVhKfMIQIMYYhRIgxDCFCjElYF4BQrezsx39+8IBdHbzDECKhCA1lXQEjGEIkFEoldOpU++fUVFi5kmk1PMIQIqEYOxa8vWv/7OjIshKe4cAMQoxhCBFiDEOIEGMYQoQYExFCWNeA0FMNW0KEGMMQIvboUxQzZz5+JT8fn6JAiHc7dkBWFusiWMAQIkHw9AQnJ/jiC9Z1sIAhRIJQVQXz58O330JREetSeIch5Fd+PsybBx4eYGUFPj6wbBmUl7OuSRA0GliwAHQ62LSJdSm8wxDyqKICRo4EpRLGjoV16yAoCNavhwkTQKdjXRl7hICzM7z1Fnz1FVRVsa6GXxhCHsXEQFoafPEFbN4MixfDzz/DwoVw4gRs2/b4GJkM3nkHcnPZVcnS4sWQnw/x8azr4BeGkEc//QR2dvDuu49fWbq09nUqNxe2boW4OOjRA1auhL/+YlAkjwiBhw+feMXHB6ZOhc8/f9w50GggIgL27eO/Ov5gCPlSUwNpadCvH1hZPX6xSxfo0gUuX679q7s7XLwIkZFQXg4ffwy+vvDhh1BayqRert2+DS+8AGFhoFY/8frSpXDrFhw4UPvX7dth/3546SUYMgSOHuW/TD5gCPlSUgI1NeDs3PB1FxcoKHj81z59QKWCkydhzBgoLITVq6F7d1i/Hqqr+SyWU9XV8OGH0LcvJCZCfj7cuvXEvwYFwfPPQ2wsSKUAANOng1IJ7u5w+jSMGQPDh0NSEpOquUQQPwoLCQAJD2/4er9+xN6+6S9JSCDBwQSAAJBu3YhSSWpquC6Ta0lJpE8fAkBEIhIVRfLzCSFk4ULSocPjYw4erP2mP/us9pXycqJQECen2tfDwsjFiwyK5wiGkC9aLbGyIkFBDV/v3Jn4+Bj6woQE0r9/7U9fnz5EpSI6HXdlcqeoiERHE7GYABB/f5KY+PifGoSQENKv3xMhpEpKiEJBHBwIABGLSWQkuX2bj8q5hiHkUVAQsbIiZWWPX8nKIgDk5Zeb+UKtlqhUpHv32igOHkyOHOG0UpPbv5907UoAiFRK5HJSVWX8qR49InI5sbauPZtMRnJyTFcoCxhCHn36KQEg69Y9fmXxYgJA9u5t0Zer1USpJO7utVEcNowkJXFUqQllZ5PJk2tLDg0laWmmOe29e0QmIxYWBIDY2hK5nBQWmubM/MMQ8qiigvTrR0Qi8vbbZONGMm0aASCTJrXuJGVlRKEgjo7Cvz3SaEhMDLG3JwDE0ZHExBCt1sSXSEsjkZFEJCIAxMGByOXkr79MfAkeYAj5VVhI5s8nXboQqZT4+JCVK8mVK6S6utXnKSggH3wg5NujCxfIoEG1vygiI8nDhxxe63//IxMn1l7LxYUoFKSyksPLmRyGkCm1mvj5ES8vI0c+6e2RlZWgbo/Ky4lcXttR9PEhBw/ydN3kZDJiRG0U6Tuq0fB06TbCEDJ15w4JCHg88rl7tzEjn+npJCqKDjtW9Ov3z3/+s6ioiINaW+S330i3bgSASCQkOpqUlvJdQEICGTCg9h3t1cs8xpIxhKzRkU8/v9ofnEGDyP79xpznyhUSEfHJ4MEA4ODgIJfLS0pKTF2rIbm5JCqq9psYMID87398XvwJWi35/nvi61tbzOuvXz18+DCzaloAQygMdOSzS5fHI58nThhxmtOnT48ZM4ZOw3B1dVUoFJXc3x7pdCQ+njg71w5UKhSCmFNA31EfH527+yAAGDZs2Amj3lIeYAiFpLycxMQQV9fHI58XLhhxmqSkpNC/d1fx8vJSKpU1nMUiLS0tIiKT1jtxIrl3j6PrGKm8vFKhUHTs2JG+G5MmTbpy5QrrohrCEAoPnRjyzDOPRz5v3TLiNAkJCYGBgfSHr3fv3iqVSmfS2yO1Wq1QKKysrIKDx3fuTOLjTXhuEystLVUoFM888wwAiMXiyMjIW0a9pRwRUAg1Gs2mTZv27t175cqV8vJy1uWwlpfXcGLI/futPYdWq1WpVH5+fjSKgwYN2m/cDWcjx48f79mzJ/2Zfvfdd4uKWv8pC+/y8vLkcrm1tTUASKVSmUx2v/VvKReEEsL9+/f37NlTJBLVzSx3cnIKCgqKjIyUy+VKpTIhISE9Pd20v8vNQGYmkcmIRFJ7vxUdbcQnbmq1WqlUdunShb6xbbw7Kioqio6OFovFAODv73/06FGjT8VEZmamTCaTSCQAYGtrGx0d/ZDTDzFbQBAhvHXrVocOHQAgNDR0/PjxPXv2tLS0bPKZDwcHhwEDBkyZMmXJkiWxsbGHDx9OT0/XmMvnQUa7du3xxBB7e+MmhpSXlysUCicnJ/pOhoWFXWj9Def+/fu7du1KWxK5XF7VljmgTF27di0yMpL+0re3t5fL5X+xm2vDPoSVlZUDBw4EgIiIiPoNXWFh4dmzZ1UqlUKhkMlkYWFh7u7uTSZTIpH4+vqGhYXJZDKFQqFSqc6ePcvwPeWKKSaGlJSUGHd3lJGRMW7cOPqGh4aGpplqDihTly9fjoyMpN+Ui4sLP4PJjbEP4ZtvvgkAPXr0aElsGifT19fXwsKiyXA22aHVmnz+Is+OHiVDhtQ9ZFizdasRI5+tujvSaDQxMTH29vYA4OjoqFQq29lNQUpKyogRI+jPjKenp1Kp5LlvxTiEsbGxAGBnZ5eammr0Saqrq9PT0xMSEpRKpVwuj4yMDAoKoj80jVlZWTVuNs1vHOjviSGXR4zo1auXcSOfLbk7unDhwqBBg+hbFxkZyfz2iTsJCQm0RwYARr+lxmEZwgsXLtjY2ADA999/z8X5m2w2m0xmXbMZFRVVl0yGk79aRKslP/ww6e+EBAcHHzp0yIjTXL16dcqUKfTuyNHRsa53Wl5eLpfLaS/Dx8fn999/N2n1QkQHk/39/elb2q9fP5VKxcN1mYWwoKDAx8cHAKKjo/m8bnFx8blz52gy58yZM3r0aG9vb30dWhcXl8GDB0+bNm3FihVbtmw5ceKE0Hqzphr5pHdHISEh9Nf/gQMHunXrRu+3o6OjS/mfA8qOWq2OjY2te0vDG69IYmpsQqjVasePHw8AgwcPrjbiQR5TU6vVjTu0Dg4ODTJpZ2e3efNmAd4RlZeXx8TEdOrUidYZFhZ2/vx5I85TUlKSm5sbFRVFzzNw4MAzZ86YvFqzUF1drVQqnZycnn322e3bt3N6LTYh/OCDDwCgU6dOWVlZTApooZycnKSkpK1bt65ater111+nn6MkJCSwrqtpjeeF3Lx5s+VfrtPp4uPjnZ2d6S2iQqHgbrKbufjyyy8BYNasWZxehUEIExISLCwsxGKxcfcwDK1ZswYApk6dyroQQxqPfGZnZzf7VTdv3nz++edpAzhhwoR7QpsDysi2bdsAYNq0aZxehe8Q3rt3z8XFBQDWrl3L86XbLjc3VyqVSiSSlvxYs1V/5NPS0lImkxkY2CwrK6MNYJcuXX7++Wc+6xS4PXv2AEBERASnV+E1hFVVVXS8e+LEiUIb4Wgh+tnu6tWrWRfSIteuXYuKiqJTzOi8kOLi4saHLV261NnZedy4cU3+69Ps0KFDAPDCCy9wehVeQyiTyQCgW7du+XTNVzOUmJhIWwy1Ws26lpaqPy/E2dm58byQ2bNnA0BcXByrCgUrKSmJjjlzehX+lsHftm1bXFyctbX17t27nRuvBl/PvHnz1q9fTwjhrbaWGz16dJ8+fXJycn777TfWtbQU/bwrJSVl5MiRBQUFy5Yt8/f3j4uLq6mpoQfY2toCQEVFBdMyhYind4bTiNe5fPky/X6+++47w0du3boVAGxsbNLT0/mprbX+9a9/AcCLL77IuhBjJCQkPPvss/S/vmfPnvHx8VqtdtmyZQCwrv6CqIgQQsj169fpG8XpVfgIYUlJSa9evQBgzpw5ho+8dOlSC7PKUFFRka2trUgkatUHAMKh1Wq3bdtGHzK0s7N79OjR6tWrAWDlypWsSxOczMxMAPD09OT0Kpx3Rwkhb7755vXr1wMDAzdu3GjgyOLi4ilTplRUVMhksrfffpvrwozm6Oj46quvEkK++eYb1rUYQywWT58+PS0t7euvv16zZo2rqyt2R/Wh70w511uacxpxQsgnn3wCAE5OToa7lzqdbsqUKQAQGBhYUVHBdVVtdPr0aQBwdnZm8uSLyf3nP/8BgLlz57IuRHDoLyZra2tOr8JtS3j8+PGVK1eKRKLNmzcbmDwNAAqFYvfu3U5OTrt376azuoUsJCQkKCiooKDg559/Zl2LCdjZ2QG2hE2xtrYWi8VVVVVarZa7q3AYwgcPHkyfPr2mpmbFihUvvfSSgSOPHTu2atUqsVj8448/Gs4qb/bs2TNp0qSSkhJ9B7z77rsA8PXXX/NYFFewO6qPSCSiTUJlZSWHl+GohVWr1XTVvTFjxhiegpibm0sfmV+1ahVHxRhh5MiRALBp0yZ9B5SXl9OlIoybKi0oBw4cAF4eFzBHrq6uAMDpg5RctYRLlixJSkry9PTcsWOHvgeFAECj0bzyyiu5ubljxoxZtWoVR8UYYe7cuWCwobO1tZ0xYwYAmOnwTH3YEhrAx5vDRbJ/+uknAJBKpcnJyYaPjI6OBgBPT89Hjx5xUYnR1Go1faIsJSVF3zFpaWkikcje3t7c17M5deoUAISEhLAuRIh69+4NAFevXuXuEqZvCW/evDlnzhwAiImJGTZsmIEjf/rppy+//FIqle7cuZM2+sIhlUrp4jd0AY4m9e7dOzQ0tKysbPv27fxVxgFsCQ0wv5awtLS0T58+0IKnP65fv06ffDNw38XWvXv3LCwsrKysDNwP7NixAwD69+/PZ2Emd/v2bQDw9fVlXYgQ0aENTvexMHEI6UPZffv2Lau/M3sjLc8qWxMmTACATz/9VN8B1dXVbm5uAPDnn3/yWZhp5eTkAEDnzp1ZFyJEL774IgAc5HKbRVN2Rzdu3PjDDz/Y29urVCr60ZM+c+fOTUtL69u3r8BHNeqGZ3Q6XZMHWFpa0sk9BnqtwofdUQPMqTt68uRJS0tLkUi0a9cuw0fSCdAODg7CX0BWq9XS1agMLAJQ12sV2thSy2k0GgCQSCSsCxEiOgb+ww8/cHcJ07SEjx49evnll9Vq9eLFi19++WUDR546dUoul9M5NHTcScjEYjEdZDLQ0Hl5eb344ovV1dXx8fE8lmZKEonE0tKypqZGrVazrkVwzKMl1Gq1L7zwAgAMHTrU8KOuDx8+pDsZLFmypO3X5cejR4+srKwsLCzu3r2r75hff/0VALp3726mywUQQhwdHQFA6EutsrBo0SIA+OKLL7i7hAlawuXLlyckJLi5ue3atUsqleo7TKvVzpgx4/79+0OHDl27dm3br8sPV1fXl156SavVbt68Wd8x4eHh3t7e6enp9Ll7c8TT4wJmyAxawl9//VUkEkkkkmbHcOVyOQC4ubkJZFO4ljt+/DgAuLu7G2jn6a+VyZMn81mYCdHHC830CUlOffJJhYVFzT//yeEl2hTCO3fu0I2IN2zYYPjI/fv3tzCrwtS3b18AMLASGe21SiQSga+kqk///v0B4OLFi6wLEZzPPycA5L33OLyE8d3RqqqqqVOnFhYWRkRE/N///Z+BI2/fvj1z5kxCyPr16+u2vzEvdIkqA1NJXV1dIyIiampqvv32Wx7rMhn8lEIfW1sAAG7fGKPj+9ZbbwGAv7+/4XXyKisr6aImDbYfNC/FxcV2dnYikejGjRv6jjl27Bg012sVrNGjRwPAkSNHWBciOPHxBIBERXF4CSNbwri4uC1bttjY2KhUKro4vD7z5s07f/68v79/fHx8/d2wzUuHDh3o5B6lUqnvmFGjRgUEBOTm5tIng8wLtoT60FknnL4xxoTw4sWLdNw2NjZ2wIABBo5UKpVbtmyxs7Pbs2eP4awK3/z58wFg8+bNBn5Saa/VHGfPYAj14aE72uoQFhUVTZkypbKycsGCBW+88YaBIy9evPjee+8BwKZNmwICAoyvURgGDBgwaNCg4uLiXbt26TvmjTfesLOzO3LkyM2bN/msre1whQt9aAg5/eymdSHU6XSvv/76nTt3QkJCNmzYYODIuqz+4x//mDlzZtuKFIpmn/Tt0KEDXYgtLi6Ox7pMAFtCfQQ3MPPhhx8CQMeOHe/cuWPgMKFtP2gqFRUV9COZc+fO6Tvm/PnzAODk5GReW3AvWbIEAOjC56i+1FQCQPr04fASrWgJExMT16xZIxaLt2/f7u3tbeDIjz766ODBgx07dty5c6elpaXxvyEExsbGhrbqBoZnBg4cGBwcXFRUZF4LsWFLqI+A7gmzsrJee+01rVa7evVq+oSVPomJiR9//HFLsmqO3nnnHZFItH379r/++kvfMc32WgWIjzXFzJNQuqPV1dUhISEAMGHCBMNzlDMzM+n2gx9//LGJ2mrBoZtp/vvf/9Z3QEt6rUJDt6RdsGAB60IEp6SEABB7ew4v0aKWUKvV9uzZ08nJKT4+nm52p09hYaGdnd3EiRPff//9tv+CECba0NFVOZo8wMbGhq4wYEbDM9gd1aeuJeRwl7CWJDUvL8/a2trw4zx18vPzCwsL2/rLQcA0Gg1diC0pKUnfMdeuXTOvhdjoWlWvvfYa60KEyNqaABDudjxoUUvo4uIyZcoUrVbbkomRzs7OdFXc9koikTS7pEWvXr1GjhxZVlb2448/8lia8fBzQgO4vi1s6cAMXfX922+/pUshPOVkMpmFhcXPP//86NEjfcfQXivda0X4sDtqANef17c0hKGhof369Xvw4MHevXu5qsV8eHp6hoeHq9XqLVu26Dtm8uTJXbp0SUtLS0lJ4bM24+BDvQYIpSUEgHfeeQfMbeSdO7Shi4uL07cQm1QqpQ+amMVUUmwJDRBQCGfOnOng4HD06NG0tDSuyjEf48aN8/Pzy8jIOHz4sL5jaK91165dBnqtAoEhNEBAIXRwcJg2bRq0iy1Q2k4kEs2aNQuaW4ht/PjxarV669at/FVmFAyhAQIKIQDMmzcPALZu3Yr/WwAwe/ZsKyur33777d69e/qOob1WpVKpr9cqEBhCA4QVwsDAwMGDBxcXF9N9l55yLfnkZty4cT4+PhkZGQkJCXzW1loYQgO4fq631c8TmuPESO7Qd8PAJzctWT5YCOhkjOrqak73hTZTXLeEItLK2TiVlZWenp4FBQVnz54NCgriqCwzEhgY2Llz5y1bttBpNI3l5eV5enrW1NSkp6d369aN5/JazsHBoaysrKSkxMHBgXUtwrJ5M5w8CdOnw/PPc3L+VreENjY29IF6bAypU6dOHTp0SF8CAcDV1XXy5MmGlw8WApw0o8/bb8M333CVQDCiJQSA27dv9+jRw8bGJjs7u33PUDOVP/74Y+TIke7u7vfu3TOwSDlbvr6+d+7cycjIoHvgIN4Ys9CTn5/f6NGjKyoqzGViJHMjRozw9/fPzc0dPXr0smXLvvnmm8TExLt37wrqBgwnzTSwaBGIRFB/bZb8fBCJoG5dF3rA3buPDzh+/IkDWkhiXH1z585NTEyMjY1dsGCB+S5kyJvCwsKSkhIXF5fk5OTk5OS616VSqaenp++TevToweSuDAdIm7RjB6xdC56eHF7CyBBGRER07dr12rVrSUlJZrqoNm90Ot2MGTMePnwYGBj4+eefZ2Zmpqenp6enZ2Rk3L9/PyMjIyMjo8GXuLu7d+/e3dfXt3v37nV/6NSpE6d1Yggb8/SEqir44gv41784vIqRIZRIJLNmzfroo49iY2MxhIbVrbizd+/eBut9qNXq7OzsjCddv349Nzc3Nze3fpsJAFZWVl27dm3QbAYEBFhbW5ukTgxhY1VVMH8+bNgAq1YBd6MfRoYQAObMmbNu3brdu3c/fPiQ7tuOGjO84o6lpSXNUoPXi4qKGiTz6tWrubm5jZtNiUTi5eXVIJn+/v7PPPNMa0vFEDam0cCCBfDpp7BpEyxf3vQx2dmP//zggTFXMT6EHh4eEyZM2Ldv35YtW5YtW2b0edqxutWxPv74Y8OrYzXg5OQUFBTU4GPYxsnMyMi4d+9ekx1aJycn30a8vb0NrE6CIWyMEHB2hrfegq++gsWLmz4mNLStVzE+hAAwd+7cffv2xcbGLlmyxMLCoq21tC/V1dVTp07Nz8+fMGGCSVbcaTKZTXZob9y4UVRUdO7cuXPnztU/uMkObe/evWn8MIT6LF4MX38N8fEwdWoT/6pUQt3demoqrFzZ6vO3KYRjx4719/e/devWoUOHwsPD23Kq9mfhwoVnzpzp1q1bs6tjtUWTHVpCCB3yqRsBon8oKCho3GxaWFh4enp27949Ly8PAB4Y16NqR7Ra2Lv3ibz5+MDUqfD55zB5chPHjx0LdfcZjo7GXLFNIRSJRLNnz5bL5bGxsRjC+rZv365UKq2trX/55RdnZ2eery4SiTw8PDw8PBqMmVVVVeXk5NA7zLS0NBrIzMzMu3fv3r17FwAcHR0VCkVRUdHy5cu5HowVpsuXQSaD06dh+/YnXl+6FIKDgavtttq4UBRdiE0sFhteGP+pcvnyZdq1++6771jX0rzq6uqbN28ePHiQLhhLP/V1dHRcu3ZtWVkZ6+r4U15OliwhEgkBIF5e5PffycKFpEOHxwc8/zwJDiZSKfnss9pXFi4kAKT+D/6xYwTg8QEt1NZukouLy9SpU3U6nZnuUGtypaWlr7zySkVFxezZs+mibAJnaWnp7+/v5uaWn5/v5uZ28uTJiRMnFhcXL1++3Nvbe/369VVVVaxr5Nzx4zBwIHz2Geh0IJNBaio0HkdbuhTOngVO1jlr6y8QQujHWZ07d25Pe78YR6fTTZ06FQACAwMrKipYl9MKK1euBIC5c+fSvyYnJ9d1ZT09PZVKpUajYVshRwoKiExGAAgACQwkp08//qcGLSEhpF+/Jxo6U7WEJgghIYRuFfrTTz+Z5GzmS6FQAICTk1N6ejrrWlqHbiB5+PDh+i8mJCTUbQLbq1cvlUplvhueN6bTkfh44uJCAIiNDfngA8Jqm3PThJCurjlq1CiTnM1MHTt2TCKRiESiPXv2sK6ldeiWpo6Ojo37MlqtVqVS+fn50Sj269dPpVIxKdK0bt8mYWG1DeD48YTtgIZpQlhSUkKnaKSmpprkhGYnNzfX3d0dAFasWMG6llZbt24dALzxxhv6DlCr1Uqlsu6ZyWHDhp04cYLHAk1JrSYKRe3K9m5uJD6edUGmCiEhhK5KunDhQlOd0IxoNJrQ0FAAGD16dE1NDetyWm3QoEEAsHfvXsOHlZeXx8TE1H10ERYWdv78eX4qNJXkZBIQQACISESiokheHuuCCCEmDOGlS5dol+apGtemFi1aRAcwHj16xLqWVsvKyhKJRLa2ti3cWri0tFShUNCOj1gsjoyMvHnzJtdFtl1xMYmOJmIxASB+fuTIEdYF1WOyEBJCnnvuOTCTD8dMaM+ePSKRSCqVJicns67FGBs3bgSAyMjIVn1VXl6eXC6nD3BIpVKZTHb//n2OKmy7/fuJhwcBIFIpkcs53F/JOKYMYXx8PAAMHDjQhOcUuBs3btA2wcCeoQI3atQoANi+fbsRX5uZmSmTySQSCQBYWlrKZLKHDx+avMK2uH+fTJlSOwAzbBi5epV1QU0xZQirqqpcXV0B4MyZMyY8rWCVlZXRkX3z3dYvLy9PIpFYWVkVFxcbfZJr165FRkbSqTb29vZyuVwIuzJqtUSpJA4OBIA4OpKYGGJwj2mWTBlCQsjixYsBYNasWaY9rTDR7Xh79uxZUlLCuhYj0R0NwsPD236qy5cvR0ZG0jEbFxcXhUJRya7bd/78+RkzDtIG8NVXSW4uq0JaxMQhvH37tlgstrGxad+b9ZK/b6Xs7e3T0tJY12I8Ou3+m2++MdUJU1JSRo4cyXCqTVlZ2eLFiyUSiVgsGTOm4MABPi9uJBOHkBDywgsvAMDGjRtNfmbhOHXqlKWlpUgkMutPrktKSujC2ya/kUtISBg4cCCNYs+ePePj47W89AX/+9//0uULxGKxTCYzlx6K6UP4yy+/0FlO7WmKU30PHz708PAAgMWLF7OupU22bdvG3TwnnU6nUqn8/f1pFPv27cvpL6wHDx7QuwM6cfd0/Tmggmf6EGo0mq5duwLA8ePHTX5y5rRa7dixYwHgueeeU7Oaa2giL7/8Mtd9FjrVhv48AMDQoUNN/lOh0+ni4+Ppc1i2trYKhcLs5pqbPoSEkA8++AAAXn31VS5OzhZdqMLNzU3IH4u1RGVlpYODg0gkunfvHtfXqq6uViqVdauBhYWFnTt3ziRnvnXr1pgxY+hpw8PDzfShVk5CmJOTI5VKpVJpS35SCwoKIiMj5XJ5XFxcYmLinTt3BDvz69dffxWJRBKJpB008nv37gWAkJAQ3q5Ip9p06NABAEQiURun2qjVasz4Xi0AAArwSURBVIVCYWVlRX8nxgthDqixOAkhIeSll14CgLVr1zZ75KlTpxo84iiVSn19fcPCwmQymUKhUKlUZ8+eZX6TfffuXbpQxWetfVxMkOiuPp988gnP183Pz5fL5TY2NvQ/OioqyojmKykpqU+fPjTMUVFR+fn5HFTKH65CeOjQIQDw8vJqtlnLy8vbtm3bmjVr3nzzzdDQ0Lr7h8bc3d2HDRs2c+bM1atX//DDD3/++Sdv8zMqKyufffZZAJg0aVI7GHBSq9UdO3YEgGvXrjEpICsrq8FUmwcPHrTkC4uKiqKjo+nCWf7+/omJiVyXygOuQqjT6Xr06AEAB1r/SU11dXV6enpCQoJSqZTL5ZGRkUFBQXTjrsasrKwaN5smf6qdLlTh7+/flpklwnH48GEACAgIYFvG9evXo6KiaKLoVBvDb+/+/fvp72ipVCqXy6uqqngrlVNchZAQ8tlnnwHAhAkTTHXCwsLCs2fPqlQqhUIhk8nCwsLoI3z6ms0GyTQ6P3FxcQBgY2Njdk/u6EM3GF65ciXrQggh5MqVK3VTbZydnRUKRePfodnZ2ZP/Xm9w+PDhV4U5B9RYHIYwPz+fLsSWkZHB3VUKCgrOnDmzc+fOdevWzZo1a9SoUV5eXvrW+ezUqdOQIUM2bNjQ8vNfvHiR3sBs3bqVu++CT1qtlv7yEtTvlD///JNOJQcADw+Puqk2Go0mJibG3t4eABwdHWNiYvj53J9PHIaQEDJjxgwAeP/99zm9SmNqtbpxh5b+R0JrnjwuLCyk6+rOnz+f04L5RBfm8vb2FuDNbUJCAr33phWuWLGCPnAMABMnTszOzmZdICe4DWFKSgoAuLq6CqH7rtPpsrOzT5w40cLZnjqdLiIiAgBCQkKEUL+p0En2gp3uQ6fa0AEFR0dHAPDx8Tl48CAh5ODBgyqVivk4uclxG0JCCJ1DuHPnTq4vZHKrV68GgI4dO5rpR8D60LZdgI8gT58+3cXF5Y8//iCEaDSauLi4nTt3rl69uu6RfzoJ7saNG0zLND3OQxgbGwsAI0aM4PpCpnXkyBELCwuxWEx/B7cb58+fBwA3NzcB3lnRRzoMDKfT9RcvXLjAZ1U8aNNeFC3x+uuvL1269MKFC05OTnTT2fp8fHwEuNt2VlbWtGnTtFrtRx99NG7cONblmNKePXsAYMqUKdztUWO0ZreFogeUl5fzVxMvOA+hg4PD+fPnp02bdvbs2cabddnb2zfeF9rLy0sqlXJdmD4ajWbatGl5eXnh4eHL9W0MabZoCCc3ub0Qay0MYfvbvI3zEAKAn5/fmTNnmtzjMiMj49KlS3Sltvrc3d0DAgLqt5l+fn502iHXFi1alJKS4uXlxemWZkzcunUrNTXV0dGx7rlbQcEQcq5Vu8/STdsbn6G1u8+21o4dOzZt2kS3NKNPx7Qn9FHPSZMmWVpasq6lCXRSFIaQb00mU6PRZGVlmWT32VZJTU2dM2cOAHz11VfBwcFt/NYESMh9UcCWUFDoUxQNdp8FPc3mnTt3mt20vU+fPgEBAd27d3fUv5Mq3dKsvLx8xowZs2fPNv13xdr9+/fPnDlja2tLH0oWIDozCUMoaE02m3W7z9bfgDYzM7PJZrPJDm23bt3EYvHbb7997dq1/v37K5VKfr8tnvzyyy+EkPDwcCP6CPzAltBcWVtbN242NRrN3bt3G2/a3mQyra2tnZ2d79+/7+jouHv3bsH+jLaRwPuigCFsZ6RSqb+/f90qQ3X0dWjv378/f/78sWPHdu/enUnBXMvPz09OTpZKpfQDcWHCED4VmuzQlpWVpaen9+rVi66V0C7t27evpqYmPDzcwF0xcxjCp5e9vX1gYCDrKrgl/L4oPMUhbFcfRqMmlZaWJiYmWlhYTJo0iXUthmAIUbt14MCBqqqq4cOH1+3vKUwYQtRumUVfFDCEqL2qqqr6/fffRSKR8EPY7LS1Zg8wUxjCdu7QoUOlpaXBwcFeXl6sa2nGU/soE4awnTOXvihgdxS1SzU1Nb/99huYSQhtbGxEIlFlZaVOp2vyAAwhMj/Hjh3Lz88PCAjo1asX61qaJxKJbGxsCCGVlZVNHoAhROanbjEL1oW0lOGYYQiRmdHpdPv27QMz6YtShmNmaWkpkUg0Go1Go+G3Lm7htLV269SpUzk5Od7e3nSRMrMg699f4u5uVVWl74Ds556TlpdbVFQAL2ud8AND2G7V9UUFuJ6dPstzcuD8eSgr03eA2+3bkJsLGEIkcLm5uadPn/7+++/BrPqiAAD0YU4Dd33NHmCGMITtgUajuXz5cnJyMn1kOS0tDQCeeeaZ9957b+jQoayra40WhrB9fV6PITRXt27dOv23ixcv1h+r6NChQ0hIyJAhQ5YvX25mqzZiS4iErLS09NKlS+fOnUtJSTl+/HheXl7dP1lYWPTp04c+rzx8+PCBAweaWfbq0K1gMYRIILRa7fXr12kPMyUl5cKFC/WnknTu3Dk4OLgueE5OTgxLNZlme5sYQsS13Nzcuv0CkpOTi4uL6/5JKpUOHDhw2LBhNHgBAQEM6+QKdkcR/5ocU6nj7u4+fPhwGrzg4GBra2tWdfIEQ4j4kZOTk5KSQoN39uzZ6urqun9ycHDo378/7WGOHDlS4M/Cm56NDQCAnrmjABhCZCw6pkKDd/r06fY5pmISzWas2ZEbM4Qh5IThMRV3d/egv4WGhgp5GUK+tXB0FD8nRE162sdUTKLZlrDZ/qoZwhAar+VjKoMGDWrHKwubEn5EgVpl8+bN7777bt1f6ZgKDd6QIUNcXV0Z1maucHQUtcqQIUMCAwOfe+65wYMHDx48uFevXmb0vIJAYQhRqwQGBl68eJF1Fe3LUxnCp3g0HAnQU/kRBYYQCclT+SgThhAJyVP5FAWGEAkJ3hMixBiGECHGLCzAygq0WtC34BqGECHOGY6ZnR14eICbG58VcQ0/J0QCEx8PUmntCE1j1taQlcVvQZwTEUJY14DQUw27o0io8vNh3jzw8AArK/DxgWXL2tnHg3WwJUSCVFEBgwbB9evwxhsQEAAnT8Ivv8DIkXD0KLS7h57xnhAJUkwMpKVBTAwsXFj7yqJFsHEjbNsGUVFMKzM9bAmRIAUGQno6FBRA3XOYOTnQtStMmAAHDjCtzPQwhEh4amrAxgaCg+HkySde79oVLCwgM5NRWVxpb91r1B6UlEBNDTg7N3zdxQUKClgUxC0MIRIe+mx04z4aIe1vVAYwhEiIOnQAKyt4+LDh63l50B4XDcEQIuERi6FvX0hNfeKDwexsePAAgoLYlcUVDCESpFdfhepq+PLLx6/ExAAAzJjBqiLu4OgoEqTKShg8GFJT4a23IDAQTp2CHTtg0iTYt491ZaaHIURCVVQEK1fCnj2QlwceHjBjBixfDu1x+VYMIUKM4T0hQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixNj/A2t2CTg/fNxjAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "WtMyOYmYc_9R", - "colab_type": "text" - }, - "source": [ - "Analyzing the distribution of solubilities shows us a nice spread of data." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "t7V7o6x8c_9S", - "colab_type": "code", - "outputId": "b34fa76e-870e-46bb-ee74-acd74479c104", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "%matplotlib inline\n", - "import matplotlib\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "solubilities = np.array(dataset[\"measured log solubility in mols per litre\"])\n", - "n, bins, patches = plt.hist(solubilities, 50, facecolor='green', alpha=0.75)\n", - "plt.xlabel('Measured log-solubility in mols/liter')\n", - "plt.ylabel('Number of compounds')\n", - "plt.title(r'Histogram of solubilities')\n", - "plt.grid(True)\n", - "plt.show()\n" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAEWCAYAAABhffzLAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZhcZZn+8e9NCIs0EBKgJywStuAgDEgaRHE0LS6oCIwsQ0QEZCaj/oZxGUdxRURGkAGEcUEGGeKIaRAFIgqomAZ0BExYjBiIrCJLYLKYNCqQ5Pn9cd6WSndV16lOnaqunPtzXX11nVNnuWvpp99665z3KCIwM7Py2KDdAczMrLVc+M3MSsaF38ysZFz4zcxKxoXfzKxkXPjNzErGhd/WmaR7JU1vd452kvR3kh6TNCDpFU3a5omSftaMZSVdL+mEasumzLuMsG7pX9/1jQu/jUjSI5LeMGTeWoUjIl4eEf11tjNFUkjasKCo7fYfwD9HRFdE3NXuMENFxFsiYlaN+7oi4iEASZdJ+vyQ++u+vtZZXPhtvTAG/qHsBNzb5gxmubjw2zqr/FQg6QBJ8yStkLRY0nlpsVvS7+Wpa+FVkjaQ9ClJj0p6WtI3JW1Zsd13p/uWSPr0kP18VtJVkr4laQVwYtr3LyQtl/SkpC9L2qhieyHp/ZJ+K2mlpDMk7Srpf1PeKyuXH/IYq2aVtLGkAWAccI+kB6usK0nnp/VWSFogaa9035ZpW8+kbX9K0rC/y2qfmCT1S/qHIbv6sqQ/SLpP0sEjLFu5UkjaTdJM4Djgo+k1+n6V13cDSadKejC9LldKmpju2yS9HkvSa/BLSd3V9mnt5cJvzXYBcEFEbAHsClyZ5r82/Z6QuhZ+AZyYfnqBXYAu4MsAkvYEvkpWiCYDWwLbD9nX4cBVwATgcmA18CFga+BVwMHA+4es82ZgGnAg8FHgYuBdwI7AXsCMGo+rataIeC4iutIy+0TErlXWfVN6/FPT4zgGWJLu+880bxfgdcC7gZNqZKjnlcCDZI//NOB7g0U5j4i4mOx5/GJ6jd5eZbFTgCNS1u2AZcBX0n0nkD2WHYFJwHuBP43uoViRXPgtj2tSC265pOVkBbmWF4DdJG0dEQMRcdsIyx4HnBcRD0XEAPBx4NjUqj0K+H5E/Cwingc+AwwdWOoXEXFNRKyJiD9FxPyIuC0iVkXEI8DXyQpUpS9GxIqIuBf4NfCjtP8/ANcDtb6YHSlrPS8AmwMvAxQRCyPiSUnjgGOBj0fEypT5XOD4HNus5mngSxHxQkRcAdwPvG2U26rlvcAnI+L3EfEc8FngqPQ8vEBW8HeLiNXp9VjR5P1bE7jwWx5HRMSEwR+Gt6IrnUzWsr0vfdQ/dIRltwMerZh+FNgQ6E73PTZ4R0T8kRdbyYMeq5yQNFXSdZKeSt0//07W+q20uOL2n6pMd1HdSFlHFBE/Jfsk8xXgaUkXS9oiZRtfZbtDP9nk9XisPerioyl3M+0EXF3RCFhI9kmrG/gf4EagT9ITkr4oaXyT929N4MJvTRURv42IGcC2wNnAVZI2Y3hrHeAJskIy6KXAKrJi/CSww+AdkjYla02utbsh018D7gN2T11NnwA0+keTO2tdEXFhREwD9iT7x/hvwP+RtZKHbvfxKpt4Nv1+ScW8vxqyzPaSKh/vS1PuRtQbrvcx4C2VDYGI2CQiHk+fNE6PiD2BVwOHknVd2Rjjwm9NJeldkraJiDXA8jR7DfBM+l15vPhs4EOSdpbURdZCvyIiVpH13b9d0qvTF66fpX4R3xxYAQxIehnwvmY9rjpZRyRpf0mvTK3fZ4E/A2siYjXZdyBnStpc0k7Ah4FvDd1GRDxD9g/hXZLGSXoP2XcolbYF/kXSeElHA38N/LDBx7mYtV+joS5KeXdKj20bSYen272S9k5dWCvI/qmtaXD/1gIu/NZshwD3piNdLgCOTf3vfwTOBH6eugkOBC4l6x64BXiYrCCeApD64E8B+sha/wNkfdjPjbDvjwDvBFYC/wVc0cTHVTNrDlukPMvIul+WAOek+04h+2fwEPAz4NtpX9X8I9knhSXAy4H/HXL/7cDuZJ8kzgSOioih3WP1fAPYM71G11S5/wJgDvAjSSuB28i+VIbsE8hVZEV/IXAz2XNmY4x8IRbrBKmVvZysG+fhducx62Ru8duYJentkl6SviP4D2AB8Eh7U5l1Phd+G8sOJ/ty8gmyLoxjwx9RzdaZu3rMzErGLX4zs5Jp98BWuWy99dYxZcqUYfOfffZZNttss9YHWkfO3Vqdmhs6N7tzt1at3PPnz/+/iNhm2B0RMeZ/pk2bFtXMnTu36vyxzrlbq1NzR3RududurVq5gXlRpaa6q8fMrGRc+M3MSsaF38ysZFz4zcxKxoXfzKxkXPjNzErGhd/MrGRc+M3MSsaF38ysZDpiyAYzGzt6Z/VWnT/3hLktTmKj5Ra/mVnJuPCbmZWMC7+ZWcm48JuZlYwLv5lZyfioHjNrisqjfWZ0zeD0WacDPtpnLHKL38ysZFz4zcxKxoXfzKxkXPjNzErGhd/MrGRc+M3MSsaF38ysZFz4zcxKptATuCRNAC4B9gICeA9wP3AFMAV4BDgmIpYVmcPMGldr+GXrfEW3+C8AboiIlwH7AAuBU4GbImJ34KY0bWZmLVJY4Ze0JfBa4BsAEfF8RCwHDgdmpcVmAUcUlcHMzIYrssW/M/AM8N+S7pJ0iaTNgO6IeDIt8xTQXWAGMzMbQhFRzIalHuA24KCIuF3SBcAK4JSImFCx3LKI2KrK+jOBmQDd3d3T+vr6hu1jYGCArq6uQvIXyblbq1NzQ3uzL1qyaNTrThw3kaWrlwIwddLUZkUqXKe+V2rl7u3tnR8RPUPnF1n4/wq4LSKmpOm/JevP3w2YHhFPSpoM9EfEHiNtq6enJ+bNmzdsfn9/P9OnT2929MI5d2t1am5ob/Z1+XJ3RtcMZg/MBjprdM5Ofa/Uyi2pauEvrKsnIp4CHpM0WNQPBn4DzAFOSPNOAK4tKoOZmQ1X9Hj8pwCXS9oIeAg4ieyfzZWSTgYeBY4pOIOZmVUotPBHxN3AsI8ZZK1/MzNrA5+5a2ZWMi78ZmYl48JvZlYyLvxmZiVT9FE9ZlaQRo+z76Tj6a1YbvGbmZWMC7+ZWcm48JuZlYwLv5lZybjwm5mVjAu/mVnJuPCbmZWMC7+ZWcn4BC6zkqh1wpdP7Coft/jNzErGhd/MrGRc+M3MSsaF38ysZFz4zcxKpqHCL2kDSVsUFcbMzIpX93BOSd8G3gusBn4JbCHpgog4p+hwZla8Rsf1t86Xp8W/Z0SsAI4Argd2Bo4vNJWZmRUmT+EfL2k8WeGfExEvAFFsLDMzK0qewv914BFgM+AWSTsBK/JsXNIjkhZIulvSvDRvoqQfS/pt+r3VaMObmVnj6hb+iLgwIraPiLdG5lGgkU7B3ojYNyJ60vSpwE0RsTtwU5o2M7MWqfnlrqQP11n3vFHu83Bgero9C+gHPjbKbZmZWYMUUb27XtJp6eYewP7AnDT9duCOiHhX3Y1LDwPLyL4T+HpEXCxpeURMSPcLWDY4PWTdmcBMgO7u7ml9fX3Dtj8wMEBXV1e9GGOOc7dWp+aGkbMvWrKoxWnymzhuIktXLwVg6qSpbU6TX6e+V2rl7u3tnV/R2/IXNQv/XxaQbgHeFhEr0/TmwA8i4rX1wkjaPiIel7Qt8GPgFLIviCdULLMsIkbs5+/p6Yl58+YNm9/f38/06dPrxRhznLu1OjU3jJx9LB+GOaNrBrMHZgOdNfpnp75XauWWVLXw5/lytxt4vmL6+TSvroh4PP1+GrgaOABYLGlyCjUZeDrPtszMrDnyjMf/TeAOSVen6SPI+uZHJGkzYIOIWJluvwn4HFmX0QnAWen3taMJbmadwdcBGHvqFv6IOFPSDcBr0qyTIuKuHNvuBq7OuvHZEPh2RNwg6ZfAlZJOBh4FjhlddDMzG428V+C6G3hycHlJL42I3420QkQ8BOxTZf4S4OAGc5qZWZPkGavnFOA0YDHZeD0iO0rnb4qNZrb+afQL2RldM5j+l6Ofy2Gk58jdQ82Rp8X/AWCP1FI3M7MOl+eonseAPxQdxMzMWiNPi/8hoF/SD4DnBmdGxGjP3DUzszbKU/h/l342Sj9mZtbB8hzOeXorgpiZWWvkOapnLlXG34+I1xeSyMzMCpWnq+cjFbc3AY4EVhUTx8zMipanq2f+kFk/l3RHQXnMzKxgebp6JlZMbgBMA7YsLJGZmRUqT1fPfLI+fpF18TwMnFxkKDMzK06erp6dWxHEzMxaI09Xz3jgfcDghVf6ya6m9UKBuczMrCB5unq+BowHvpqmj0/z/qGoUGZmVpw8hX//iKgcXvmnku4pKpCZmRUrzyBtqyXtOjghaRey4ZnNzKwD5Wnx/xswV9JDZEf27AScVGgqsw43li+EbpbnqJ6bJO0O7JFm3R8Rz420jpmZjV15jurZBHg/2TV3A7hV0kUR8eeiw5mZWfPl6er5JrAS+M80/U7gf4CjiwplZi9yt5E1W57Cv1dE7FkxPVfSb4oKZGZmxcpzVM+dkg4cnJD0SmBecZHMzKxIeQr/NOB/JT0i6RHgF8D+khZI+lW9lSWNk3SXpOvS9M6Sbpf0gKQrJPmqXmZmLZSnq+eQddzHB4CFwBZp+mzg/Ijok3QR2YBvX1vHfZiZWU51W/wR8Siwgmwo5kmDPxHxaLqvJkk7AG8DLknTAl4PXJUWmQUcMer0ZmbWMEUMu6ri2gtIZwAnAg/y4iUYI8+lFyVdBXwB2JzsSl4nArdFxG7p/h2B6yNiryrrzgRmAnR3d0/r6+sbtv2BgQG6urrqxRhznLu12pF70ZJFTdnOxHETWbp6aVO21Uqtzj110tSmbGd9e4/39vbOj4ieofPzdPUcA+waEc83EkTSocDTETFf0vRG1gWIiIuBiwF6enpi+vThm+jv76fa/LHOuVurHblPn3V6U7Yzo2sGswdmN2VbrdTq3HOPnNuU7ZTlPZ6n8P8amAA83WCWg4DDJL2V7Fq9WwAXABMkbRgRq4AdgMcb3K6Zma2DPEf1fAG4S9KNkuYM/tRbKSI+HhE7RMQU4FjgpxFxHDAXOCotdgJw7Sizm5nZKORp8c8iOxJnAbCmCfv8GNAn6fPAXcA3mrBNMzPLKU/h/2NEXLguO4mIfrIrdxERDwEHrMv2zMxs9PIU/lslfQGYA/xlVM6IuLOwVGZmVpg8hf8V6feBFfOC7Hh8MzPrMHnG4/fQgGZm65G6R/VI2lLSeZLmpZ9zJW3ZinBmZtZ8eQ7nvJRsPP5j0s8K4L+LDGVmZsXJ08e/a0QcWTF9uqS7iwpkZmbFytPi/5Ok1wxOSDoI+FNxkczMrEh5WvzvA2ZV9OsvIxtszczMOlCeo3ruBvaRtEWaXlF4KjMzK0yeo3r+XdKEiFgRESskbZWGWzAzsw6Up4//LRGxfHAiIpYBby0ukpmZFSlP4R8naePBCUmbAhuPsLyZmY1heb7cvRy4SdLgsfsnkY3YaWZmHSjPl7tnS7oHeEOadUZE3FhsLDOz/HpnVR9ZZu4Jzbky1/omT4ufiLgBuKHgLGZm1gJ5+vjNzGw94sJvZlYyNQu/pJvS77NbF8fMzIo2Uh//ZEmvBg6T1Aeo8k5fgcvMrDONVPg/A3wa2AE4b8h9vgKXmVmHqln4I+Iq4CpJn46IM1qYyczMCpTnOP4zJB0GvDbN6o+I64qNZWZmRckzSNsXgA8Av0k/H5D070UHMzOzYuQ5gettwL4RsQZA0izgLuATI60kaRPgFrJxfTYEroqI0yTtDPQBk4D5wPER8fzoH4KZmTUi73H8Eypu573Q+nPA6yNiH2Bf4BBJBwJnA+dHxG5kF3U5OW9YMzNbd3kK/xeAuyRdllr784Ez660UmYE0OT79DB4NdFWaPws4ouHUZmY2aoqI+gtJk4H90+QdEfFUro1L48j+UewGfAU4B7gttfaRtCNwfUTsVWXdmcBMgO7u7ml9fX3Dtj8wMEBXV1eeKGOKc7dWM3IvWrKo6vypk6Y2tHyjJo6byNLVS5uyrVYaK7lrvT61rG/v8d7e3vkR0TN0fq7Cv64kTQCuJjsv4LI8hb9ST09PzJs3b9j8/v5+pk+f3vzABXPu1mpG7kZHf6y1fKNmdM1g9sDspmyrlcZK7kZH51zf3uOSqhb+lozVk67gNRd4FTBB0uCXyjsAj7cig5mZZXINyzwakrYBXoiI5emqXW8k+2J3LnAU2ZE9JwDXFpXBrGjNatmbtdKILX5J4yTdN8ptTwbmSvoV8Evgx+nEr48BH5b0ANkhnd8Y5fbNzGwURmzxR8RqSfdLemlE/K6RDUfEr4BXVJn/EHBAYzHNzKxZ8nT1bAXcK+kO4NnBmRFxWGGpzMysMHkK/6cLT2FmZi2TZ5C2myXtBOweET+R9BJgXPHRzMysCHULv6R/JDuRaiKwK7A9cBFwcLHRzMzWTaPnX5RFnuP4/x9wELACICJ+C2xbZCgzMytOnsL/XOXomenkq+JP9zUzs0LkKfw3S/oEsKmkNwLfAb5fbCwzMytKnsJ/KvAMsAD4J+CHwKeKDGVmZsXJc1TPmjQc8+1kXTz3RytGdjMzs0LkOarnbWRH8TwICNhZ0j9FxPVFhzMzs+bLcwLXuUBvRDwAIGlX4AeAC7+ZWQfK08e/crDoJw8BKwvKY2ZmBavZ4pf0jnRznqQfAleS9fEfTTbappmZdaCRunreXnF7MfC6dPsZYNPCEpmZWaFqFv6IOKmVQczMrDXyHNWzM3AKMKVyeQ/LbGbWmfIc1XMN2VWyvg+sKTaOmdnYs74N9pan8P85Ii4sPImZmbVEnsJ/gaTTgB8Bzw3OjIg7C0tlZmaFyVP49waOB17Pi109kabNzKzD5Cn8RwO7VA7NbGZmnSvPmbu/BiYUHcTMzFojT4t/AnCfpF+ydh//iIdzStoR+CbQTdY1dHFEXCBpInAF2eGhjwDHRMSyUaU3MxuFWkfpnLbTaS1O0h55Cv9on4lVwL9GxJ2SNgfmS/oxcCJwU0ScJelUsvH+PzbKfZiZWYPyjMd/82g2HBFPAk+m2yslLSS7UPvhwPS02CygHxd+M7OWUb1rqkhayYvX2N0IGA88GxFb5N6JNAW4BdgL+F1ETEjzBSwbnB6yzkxgJkB3d/e0vr6+YdsdGBigq6srb4wxw7lbqxm5Fy1Z1KQ0jZk4biJLVy9ty77XRafm3m7j7aq+V2q9/lMnTS06Ui613uO9vb3zI6Jn6Py6hX+thbNCfThwYEScmnOdLuBm4MyI+J6k5ZWFXtKyiNhqpG309PTEvHnzhs3v7+9n+vTpufOPFc7dWs3IXatPuGgzumYwe2B2W/a9Ljo192k7nVb1vTLWz9yt9R6XVLXw5zmq5y8icw3w5jzLSxoPfBe4PCK+l2YvljQ53T8ZeLqRDGZmtm7yDNL2jorJDYAe4M851hPZGD8LI+K8irvmACcAZ6Xf1zYS2KxI7WrZm7VSnqN6KsflX0V2CObhOdY7iOyM3wWS7k7zPkFW8K+UdDLwKHBM7rRmZrbO8hzVM6px+SPiZ2QXZ6/m4NFs08zM1t1Il178zAjrRUScUUAeMzMr2Egt/merzNsMOBmYBLjwm5l1oJEuvXju4O105u0HgJOAPuDcWuuZmdnYNmIffxpX58PAcWRn2e7ncXXMzDrbSH385wDvAC4G9o6IgZalMjOzwox0Ate/AtsBnwKekLQi/ayUtKI18czMrNlG6uNv6KxeMzPrDHlO4DLrWL2zepnRNYPTZ52+1vyxMsaKWTu4VW9mVjIu/GZmJeOuHlsvNDq4mgdjszJzi9/MrGRc+M3MSsZdPdZR3EVjRVq0ZNGwI8DWR27xm5mVjAu/mVnJuKvHzGyUxvpF2Gtxi9/MrGRc+M3MSsZdPVa4kY7EGesfic3WR27xm5mVjAu/mVnJFNbVI+lS4FDg6YjYK82bCFwBTAEeAY7xpRzHrqFdNIPDGzeze6ZTj4ow62RFtvgvAw4ZMu9U4KaI2B24KU2bmVkLFVb4I+IWYOmQ2YeTXbSd9PuIovZvZmbVKSKK27g0BbiuoqtneURMSLcFLBucrrLuTGAmQHd397S+vr5hywwMDNDV1VVM+AJ1Su5FSxatNT1x3ESWrl7K1ElT12k7edTax2i2NZi7E3Vq9rLnbvRvZF3Vqim9vb3zI6Jn6Py2Ff40vSwitqq3nZ6enpg3b96w+f39/UyfPr1peVulU3JX6+OfPTC74f730QysVmsfo9nWYO5O1KnZy5671d9R1aopkqoW/lYf1bNY0uQUaDLwdIv3b2ZWeq0u/HOAE9LtE4BrW7x/M7PSK6zwS5oN/ALYQ9LvJZ0MnAW8UdJvgTekaTMza6HCjuOPiBk17jq4qH2amVl9PnPXzKxkPEibmVmLjJUz1d3iNzMrGRd+M7OScVePNWysfFw1G6tGc6JhK7nFb2ZWMi78ZmYl466eEhnrHz8rdVJWs07jFr+ZWcm48JuZlYwLv5lZybjwm5mVjAu/mVnJ+KgeaxofiWPWGdziNzMrGRd+M7OScVdPB3PXitn6odXjX7nFb2ZWMi78ZmYl48JvZlYyLvxmZiXjwm9mVjIu/GZmJdOWwzklHQJcAIwDLomIs4ra1/pwmUAftmlmzdTyFr+kccBXgLcAewIzJO3Z6hxmZmXVjq6eA4AHIuKhiHge6AMOb0MOM7NSUkS0dofSUcAhEfEPafp44JUR8c9DlpsJzEyTewD3V9nc1sD/FRi3KM7dWp2aGzo3u3O3Vq3cO0XENkNnjtkhGyLiYuDikZaRNC8ieloUqWmcu7U6NTd0bnbnbq1Gc7ejq+dxYMeK6R3SPDMza4F2FP5fArtL2lnSRsCxwJw25DAzK6WWd/VExCpJ/wzcSHY456URce8oNzdiV9AY5tyt1am5oXOzO3drNZS75V/umplZe/nMXTOzknHhNzMrmY4r/JKOlnSvpDWSeirmv1HSfEkL0u/XtzNnNbWyp/s+LukBSfdLenO7MtYjaV9Jt0m6W9I8SQe0O1Nekk6RdF96Db7Y7jyNkPSvkkLS1u3Okoekc9Jz/StJV0ua0O5MI5F0SPrbe0DSqe3Ok5ekHSXNlfSb9L7+QK4VI6KjfoC/Jjuhqx/oqZj/CmC7dHsv4PF2Z20g+57APcDGwM7Ag8C4duet8Rh+BLwl3X4r0N/uTDlz9wI/ATZO09u2O1MD2XckOxjiUWDrdufJmflNwIbp9tnA2e3ONELWcelvbhdgo/S3uGe7c+XMPhnYL93eHFiUJ3vHtfgjYmFEDDuLNyLuiogn0uS9wKaSNm5tupHVyk42ZEVfRDwXEQ8DD5ANbTEWBbBFur0l8MQIy44l7wPOiojnACLi6TbnacT5wEfJnvuOEBE/iohVafI2svN1xqqOHUYmIp6MiDvT7ZXAQmD7eut1XOHP6UjgzsE/8g6wPfBYxfTvyfHitckHgXMkPQb8B/DxNufJayrwt5Jul3SzpP3bHSgPSYeTfXq9p91Z1sF7gOvbHWIEnfT3V5OkKWQ9H7fXW3ZMDtkg6SfAX1W565MRcW2ddV9O9tHyTUVkq2ddso8VIz0G4GDgQxHxXUnHAN8A3tDKfLXUyb0hMBE4ENgfuFLSLpE+I7dTndyfoE3v5XryvNclfRJYBVzeymxlI6kL+C7wwYhYUW/5MVn4I2JUhUTSDsDVwLsj4sHmpspnlNnH1DAWIz0GSd8EBr9A+g5wSUtC5VAn9/uA76VCf4ekNWQDWz3Tqny11MotaW+y73zukQTZ++JOSQdExFMtjFhVvfe6pBOBQ4GDx8I/2BGMqb+/RkkaT1b0L4+I7+VZZ73p6klHDfwAODUift7uPA2aAxwraWNJOwO7A3e0OVMtTwCvS7dfD/y2jVkacQ3ZF7xImkr2Jd6YHoUxIhZExLYRMSUippB1Qew3Fop+PeliSx8FDouIP7Y7Tx0dO4yMshbBN4CFEXFe7vXG9j/i4ST9HfCfwDbAcuDuiHizpE+R9TdXFqI3jaUv8WplT/d9kqwvdBXZx7Ux2Scq6TVkV0/bEPgz8P6ImN/eVPWlP+hLgX2B54GPRMRP25uqMZIeITsabEz/wwKQ9ADZUWpL0qzbIuK9bYw0IklvBb7Ei8PInNnmSLmkv8dbgQXAmjT7ExHxwxHX67TCb2Zm62a96eoxM7N8XPjNzErGhd/MrGRc+M3MSsaF38ysZFz413NpRMdvVUxvKOkZSde1M1c9kgYamV80SY/UGxlTUv/QUVfT/MMGR3yU9FlJH0m3PyfpDen2ByW9pMFMP2zlqJc5n4NjJX1S0omSvpzmvVfSu9PtEyVt14q8VpsL//rvWWAvSZum6TfSprMSJY3JM8WLFhFzIuKsKvM/ExE/SZMfBBoq/BHx1ohY3oyMTfQW4IbKGRFxUUR8M02eCDRU+Mv6vimSC385/BB4W7o9A5g9eIekzSRdKukOSXelQcGQNEXSrZLuTD+vTvMnS7oljcf/a0l/m+YPVGzzKEmXpduXSbpI0u3AFyXtKukGZddMuFXSy9JyO0v6hbLrKXy+3gNS5pyUYYGkv0/zN5D0VWVjwf84tYqPqrJ+rccxI23v15LOrrLeFEm/rpj+iKTPVixyfMU2D0jL/KX1O2Rbl6Xn6l/IiuFcZWOrv0fSlyqW+0dJ51dZ/xFJW6dMCyX9l7Ix2X9U8Y9+6P6+pux6Cg9Jmp5e+4WDr1fO52AzST+QdE9aZvC5F9kJcncOWf6z6Xk6CugBLk/P0aaSpikbNG++pBslTU7r9Ev6kqR5vDhEiDWJC3859JENCbEJ8DesPXrfJ4GfRsQBZEManCNpM+Bp4I0RsR/w98CFafl3AjdGxL7APsDdOfa/A/DqiPgw2UWhT4mIacBHgK+mZS4AvhYRewNP5tjmO8iKzD5kg8Sdk4rGO4ApZNc4OB54VY31hz2O1AVxNtlQFPsC+0s6IkeWSi9J23w/2ZnCdUXEhWRDYfRGRC9wJfB2ZWOwAJyUY1u7A1+JiJeTnRV+ZI3ltiJ7Tj5ENizB+cDLgR90MwMAAAPLSURBVL2VXWQnz3NwCPBEROwTEXvxYgv/FcA9tcbliYirgHnAcek5WkV2JvtR6f1wKVB5xuxGEdETEefWeezWIH+EKoGI+JWyIVtnkLX+K70JOEyp3xnYBHgpWSH6sqR9gdVkwxpDNq7JpakoXRMReQr/dyJitbIRBF8NfCdrHALZaf0AB/FisfofsuIzktcAsyNiNbBY0s1ko26+Ju1vDfCUpLk11h/2OJRdta0/Ip4BkHQ58FqycX7ymg0QEbdI2kKj6IOPiAFJPwUOlbQQGB8RC+qs9nDFazGf7J9fNd+PiJC0AFg8uF1J96Z1dqL+c7AAODd9GrguIm5N8w+hseGX9yC7aNKP0/thHGv/07+igW1ZA1z4y2MO2fj504FJFfMFHDn0AjGp+2IxWWt4A7JxeQYL2mvJuo4uk3Re6r+tbOVtMmTfz6bfGwDLU2uvmsLGD5H0SuDrafIzETFn6OMA/pBjU6tY+5Py0Mc69DGM9jFdQjYk833Af+dYvvLaE6uBYV09Q5ZbM2SdNWT14IV6O4qIRZL2I7sC2+cl3RQRnyNrRNT6pFGNgHsjotansmdrzLd15K6e8rgUOL1Ky/FG4JTUP4ukV6T5WwJPppbz8WStMSTtRNZS/C+y4rRfWn6xpL+WtAHwd9UCpHHCH5Z0dNqWJO2T7v452aiIAMfleDy3An8vaZykbchapXek7RyZ+vq7yf7RERG3R8S+6WdOjcdxB/C61G8+juwT0s1D9rsY2FbSJGVXeDt0yP2D/d2vAf4QEXn+mQCsJLt0HoN5yYYKficV38m0QN3nIHUH/TEivgWcA+wnaUuySy0uGbbFtVU+zvuBbSS9Km13vLLraVjB3OIviYj4PS/201c6g2xUwl+lov0wWTH7KvBdZYfh3cCLra/pwL9JegEYAN6d5p8KXEc2vv08oKtGlOOArykbTXU82fcP95B9gfdtSR8D8lyw5mqyvup7yFrVH42IpyR9l+xiMb8hu6rSnVRvyQ97HBHxpLLDLueStUZ/MPTiORHxgqTPkRXIx8la5JX+LOmu9Njek+NxDLoYuEHSE6mfH7K+/n0jYlkD21kneZ4DYG+y71TWkH1CeB/Z0WI/ob7LgIsk/Yns9TsKuHDwHwfZe/HeZjwWq82jc9p6R1JX6iefRFagD+qEMeyHUnauxfkRcVO7s9Qj6RLgkoi4rd1ZrD4XflvvSOoHJpBdbOWLEXFZWwM1KH0hfAfZETJHtzuPrX9c+M3MSsZf7pqZlYwLv5lZybjwm5mVjAu/mVnJuPCbmZXM/wdzvtKk9S6t2gAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "2-kQjl9Nc_9U", - "colab_type": "text" - }, - "source": [ - "With our preliminary analysis completed, we return to the original goal of constructing a predictive statistical model of molecular solubility using `deepchem`. The first step in creating such a molecule is translating each compound into a vectorial format that can be understood by statistical learning techniques. This process is commonly called featurization. `deepchem` packages a number of commonly used featurization for user convenience. In this tutorial, we will use ECPF4 fingeprints [3].\n", - "\n", - "`deepchem` offers an object-oriented API for featurization. To get started with featurization, we first construct a ```Featurizer``` object. `deepchem` provides the ```CircularFingeprint``` class (a subclass of ```Featurizer``` that performs ECFP4 featurization).\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "rJ1fDb9tc_9V", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc\n", - "\n", - "featurizer = dc.feat.CircularFingerprint(size=1024)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ZzMWdf5Yc_9Y", - "colab_type": "text" - }, - "source": [ - "Now, let's perform the actual featurization. `deepchem` provides the ```CSVLoader``` class for this purpose. The ```featurize()``` method for this class loads data from disk and uses provided ```Featurizer```instances to transform the provided data into feature vectors. \n", - "\n", - "To perform machine learning upon these datasets, we need to convert the samples into datasets suitable for machine-learning (that is, into data matrix $X \\in \\mathbb{R}^{n\\times d}$ where $n$ is the number of samples and $d$ the dimensionality of the feature vector, and into label vector $y \\in \\mathbb{R}^n$). `deepchem` provides the `Dataset` class to facilitate this transformation. This style lends itself easily to validation-set hyperparameter searches, which we illustate below. " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "UUiC9Z52c_9Z", - "colab_type": "code", - "outputId": "fbb283a1-d991-479e-adab-d07ef4e590bc", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 170 - } - }, - "source": [ - "loader = dc.data.CSVLoader(\n", - " tasks=[\"measured log solubility in mols per litre\"], smiles_field=\"smiles\",\n", - " featurizer=featurizer)\n", - "dataset = loader.featurize(dataset_file)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from delaney-processed.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "TIMING: featurizing shard 0 took 3.569 s\n", - "TIMING: dataset construction took 3.629 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "YWnh-dYOc_9b", - "colab_type": "text" - }, - "source": [ - "When constructing statistical models, it's necessary to separate the provided data into train/test subsets. The train subset is used to learn the statistical model, while the test subset is used to evaluate the learned model. In practice, it's often useful to elaborate this split further and perform a train/validation/test split. The validation set is used to perform model selection. Proposed models are evaluated on the validation-set, and the best performed model is at the end tested on the test-set.\n", - "\n", - "Choosing the proper method of performing a train/validation/test split can be challenging. Standard machine learning practice is to perform a random split of the data into train/validation/test, but random splits are not well suited for the purposes of chemical informatics. For our predictive models to be useful, we require them to have predictive power in portions of chemical space beyond the set of molecules in the training data. Consequently, our models should use splits of the data that separate compounds in the training set from those in the validation and test-sets. We use Bemis-Murcko scaffolds [5] to perform this separation (all compounds that share an underlying molecular scaffold will be placed into the same split in the train/test/validation split).\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "_wEJ8mn_c_9c", - "colab_type": "code", - "outputId": "9c36d811-76aa-4eb3-c31d-9a0ec5dfbaf4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "splitter = dc.splits.ScaffoldSplitter(dataset_file)\n", - "train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(\n", - " dataset)" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Computing train/valid/test indices\n", - "About to generate scaffolds\n", - "Generating scaffold 0/1128\n", - "Generating scaffold 1000/1128\n", - "About to sort in scaffold sets\n", - "TIMING: dataset construction took 0.054 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.029 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.032 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "YsdH2vtqc_9f", - "colab_type": "text" - }, - "source": [ - "Let's visually inspect some of the molecules in the separate splits to verify that they appear structurally dissimilar. The `FeaturizedSamples` class provides an `itersamples` method that lets us obtain the underlying compounds in each split." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "koTNAeQ8c_9g", - "colab_type": "code", - "outputId": "32e02276-22a4-48ea-b958-e43e0505233d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "train_mols = [Chem.MolFromSmiles(compound)\n", - " for compound in train_dataset.ids]\n", - "display_images(mols_to_pngs(train_mols[:10], basename=\"train\"))" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAZk0lEQVR4nO3daVhU59kH8HtmWMIqiqAGV4zUiCkhGkkAFXQQDFp7pdAszWLV2EtjNK1tTZoYY9IkpvFKNa1GBVNtE62ktYa4ADOACNhqIWoMaTERERQQFBBZHJaZ98PTdzqFYQRmuecc/r9PynM8c6vzn3Oe55y5j8JgMBAA8FFyFwAw2CGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQR5unLlSldXF3cVfYIQggxdvnw5Ojr6ySef7Ozs5K7lzhBCkJvy8vJZs2Zdvny5tra2o6ODu5w7UxgMBu4aAGymsrIyJiamrKwsMjIyIyPDx8eHu6I7QwhBPq5duxYTE/Pvf//7oYceysrKkkQCCSEE2aitrY2Njf3666/Dw8Ozs7OHDh3KXVFfYU4IclBXVzdnzpyvv/46LCxMo9FIKIGEEIIMNDQ0JCQklJSU3HfffVqt1t/fn7ui/sHpKEhbY2NjXFxcUVHRd77znePHj48cOZK7on7DkRAk7ObNm/Hx8UVFRZMmTcrJyZFiAgkhBOlqaWlZuHDh6dOn77nnntzc3Lvvvpu7ogFCCEGSWltbExMT8/Pzx44dq9FogoKCuCsaOIQQpKe1tXXBggV5eXljxozJzc0dP348d0VWQQhBYnQ6XVJSUm5ublBQUG5ubnBwMHdF1kIIQUra29uTkpKOHTsWGBio0WgmTpzIXZENIIQgGR0dHcnJyYcPHw4ICMjJybn33nu5K7INhBCkoaur6+mnn05PTx8+fHh2dnZoaCh3RTaDEIIEiAQeOHDAz88vIyPjvvvu467IlhBCcHZdXV2LFy/ev3//kCFDsrKypk2bxl2RjSGE4NQMBsOKFSs+/vhjX1/fzMzMBx98kLsi20MIwXkZDIaVK1empKR4enqmp6dHRERwV2QXCOFgJIkOSAaDYdWqVTt27PD09Dx8+PDs2bO5K7IXF+4CwNG2bt3a1NS0bdu2WbNmqdXqqKgo51xpXLdu3fbt2z08PNLT02NjY7nLsScDDCa//e1vicjd3d30PTB+/Pgf//jHf/zjH69cucJd4H+89NJLROTm5nb48GHuWuwOIRxEdu3apVAoFArFjh07Ll68uHPnzuTk5G5fgQ0ODl6+fPnevXsrKyu56nzllVdEAtPT07lqcCSEcLDYvXu3UqlUKBTbtm3rNmQMZLeuECKQaWlpN27ccFidr732GhGpVKoDBw447EV5IYSDwp49e0QCf/e731nYrLOzs6ioaMuWLcnJyb6+vsY0KpXKKVOmiEA2NDTYr8733ntPJHDfvn32exVngxDK34EDB1QqFRFt2rSp73+qo6OjqKho06ZNarX6rrvuMgZSpVJNmzZt3bp16enpN2/etGGd77//vtj/xx9/bMPdOj+EUOY+/fRTFxcXInrrrbcGvJPW1tb8/HwRSDc3N2MgXVxcRCA1Gs3t27etqXPLli1EpFAodu3aZc1+pAghlLODBw+6uroS0RtvvGGrfTY3N2s0mnXr1kVFRYmdC56enmq1esOGDRqNRqfT9WufKSkpxhUjW9UpIQihbB09elRcivjFL35hp5e4deuWCOS0adOUyv/e+OHl5aVWqzdt2lRUVNTV1WV5JxZWjAYJhFCeMjMzxUTuZz/7mWNesba2Nj09XQRSoVAYA+nj42MMpF6v7/anxIoREf3mN79xTJ1OCCGUoaysLJHANWvWsBRQU1OTlpa2fPnyCRMmmF7zCAwMTE5O3rJlS1FRkWGgK0bygxDKTX5+vpeXFxE999xzPY88jldWVpaamvqjH/2oW0vCESNGiGPgO++8w10jM3TglpWTJ0/Gx8c3NzcvWbIkNTXV9LTQGZSVlWm12oKCgpycnKtXr44cOTI2Nnbfvn3cdXHj/hQAmzl58qR4GNjixYvvuBzCS6/Xr1mzhoiWLl3KXQs/HAll4syZM3Pnzm1oaPjhD3+4b98+MddyZmfPng0PDw8KCqqsrHS2I7aDIYRycPbs2blz59bX1yclJe3fv19cnXdyBoMhKCiourr6/PnzU6dO5S6HE77UK3lffvmlWq2ur69/9NFHpZJAIlIoFPPmzSOijIwM7lqYIYTSVlpaGh8ff+PGjfnz5+/bt08qCRTi4+OJKDMzk7sQZjgdlbALFy7ExMRUV1fHx8d/9tln3b6q6/xu3LgRGBjo6up6/fp1b29v7nLY4EgoVd9++21sbGx1dXVcXNyhQ4ckl0Ai8vf3nz59uk6ny8vL466F02AJ4a1bt1555RWdTsddiG1UVFTExcVVVVXNnDnzb3/7m+lXjaQlISGBcEbKe4XEMZqbm2fNmkVEy5Yts7BZQ0PDxYsX9+7dW1FR4bDaBqCiokLcDhYVFXXr1i3ucu4sLy9v5cqVBQUFPYcKCwuJKCQkxPFVOQ/5h7C1tXXOnDlENHr06G+//ba3zRobG6dPnx4eHi4+m4KDg59++umdO3c6WyArKyvFo4gefvjhpqYm7nL6RHRtWrt2bc+hzs7OYcOGEZGF/xrZk3kIdTpdYmIiEY0YMeJf//pXb5s1NjaK1s5BQUEJCQlDhgwxPVmYMmXKqlWr/vrXv16/ft2RxfdUU1MjHkX0wAMP1NfX8xbTd7m5uUQUGhpqdjQ5OZmItm/f7uCqnIecQ6jT6RYuXEhEgYGBJSUlvW3W3Nw8c+ZMIho3blx5ebnhf1utdAuksfeR4zNw7dq1KVOmENH999/vyM5L1mtvbxcday5fvtxzNDU1lYgWLVrk+MKchGxD2N7evmjRIiIKCAg4f/58b5u1tLTExMQQ0dixYy9dutRzAxHI3lqtrF69Oi0tzbatVsyqra0Vt5V897vfZT8gD4D4v0hJSek5JG5b8/LysrJBhnTJM4SdnZ2PP/44Efn5+RUXF/e2mXG6OGbMmIsXL95xt6a9j0wvCZi2Wmlra7PpX8VgMBgaGhrEo4gmT55cU1Nj8/07wPbt24koKSnJ7Kj4fMnNzXVsUc5ChiHs7Ox88skniWjIkCH//Oc/e9tMp9M98sgjd5wu9qalpUWj0WzYsEGtVpu2WvHw8IiKihKB7G+rFbOM89WQkJCqqirrd8ji0qVL4jOxo6Oj5+jatWuJ6KWXXnJ8Yc5AbiEUT5MkIl9f31OnTvW2mU6nW7BgwR2ni310x1Yr+fn57e3tA9jzzZs3Z8yYQUSTJk26evWqlXXyCgkJISKzFyqysrLEXNfxVTkDWYVQr9f/5Cc/Ee/+vLy83jZrb2//3ve+J6aLX331lW1rqKurM9tqxdvbu++9jwTj5U3jipGkrV69mojWr1/fc+j27dve3t4KhUK6h3pryCeEer1+xYoVROTp6WlhdtHZ2fnYY48R0dChQy1MF23C2GpFrGoaBQQELFiwoLfeR4LpilFZWZld63SMI0eOENGMGTPMjoqpwZ49exxclTOQSQj1ev2qVavElCw7O7u3zYzTRT8/PwvTRXuoqqoSgRw3bpxpIEeMGJGcnLxz507TlSHTGwz6smIkCa2trR4eHkql8tq1az1Ht27dSkRPPPGE4wtjJ5MQ/vKXvyQiNze3I0eO9LZNV1fXU089JRZsLEwXHaC0tHTHjh2PPfZYYGCgaSAnTJiwdOnSPXv2xMXFDXjFyJmJv5fZ50yUlpYSkb+/v5M35rAHOYTw5ZdfFgn8/PPPe9tGr9cvX75cTBdPnDjhyPIsM/uIMh8fn5EjR8osgQaDYfPmzUT07LPPmh0NDg4mIt7PRxaSD+Grr75KRK6urp999llv25hOF48fP+7I8vquq6uruLh48+bNiYmJmZmZNl8xcgbnz58nopEjR5qdCYv/o40bNzq+MF7SDuHrr78ubl7585//3Ns2er3++eefFwnMyclxZHnQ05gxY4jozJkzPYcOHTpERJGRkY6vipeEQyjObVQq1SeffGJhMzFddHd3P3r0qMNqg94sXbqUemn429zc7O7urlKppHVnrPWkGkLx7HWlUvmnP/3JwmZ9mS6CI6WlpRFRTEyM2VFxVSYtLc3BVfGSZAjFcrZCodi5c6eFzcSjzy1PF8HBGhoaXFxcXF1dzd71/s4779Dg6wgsvRCK7u4KhcLyN9A2bNhwx+kisIiMjCSiQ4cO9Rw6c+aMuDrqDE/RcBiJhfCjjz4Sz7L7/e9/b2Ez43RxUD36XCo2btxIRCtWrOg5pNfrR40aRUQWvn0mP1IKofFZdu+++66FzcR0UaVSWZ4uApdTp06JOxPMjj777LNEtHnzZgdXxUgyIUxLSxOdbS0/Scs4XRyEjz6Xiq6uroCAACIqLS3tOSoe0hQXF+f4wrj0NYR5eXnGzg51dXV2ramn9vZ2cQ/0W2+9ZWEz46PPP/zwQ4fVBgPwxBNPENEHH3zQc6iurk6pVLq7u0uikZxN9DWE4jxeUCqVYWFhL774Ynp6emNjo13rM6qurrb8THPjdHHQPvpcQvbs2UNEiYmJZkfFVygt3AYsM30NoWmrFQ8PD2MgTVutOCyQPeHR59JSXV2tUCg8PT3N9pVZv349Ea1evdrxhbEYyJyQq9VKb/o4XQSnEhYWRkQajabnUEFBAQ2mjsDWLsyYtlpxc3MzG0i7dtH6y1/+IhL461//2n6vAja3bt06Ivr5z3/ec8jYEVg236W0zJaro83NzWZbrXh6eqrV6g0bNmg0moG1WunNwYMHRZOlQXjrvdTl5OQQ0dSpU82OJiUl0aDpCGyvSxRNTU3GQFrZaqU3x44dEyfDZj9NwcnpdDofHx8iMvuggZSUFBo0HYEdcZ3w2rVraWlpq1evFs0zjYYPH37HViu9yczMFK14f/rTn9qpbLA30W4rNTW159Cg6gjs6Iv11dXVotXK+PHjTQNpbLXSly+zajQasUK7Zs0aB9QMdrJt2zYiSk5ONjsaGhpKg6MjMOcdM+I5ZMuXLxdf9DQaNWqUCKTZPn/5+fniqa7Lli0bVLf5yk9ZWVlCQoLZI6FhMHUEdpbb1sy2WiGTR5RVVlYaDIbCwkIxkViyZMkg7Ag0qAyejsBO98x6vV7/5Zdf5ubm5uTknDhxoqmpSfxcoVBMmjSpsrKyra1t8eLFu3fvNl2ABfnR6XT+/v6tra1Xr14VX62QK6cLoamurq6zZ88WFBQUFhZmZWXdvHlz9OjRo0ePLigoUKlU3NWB3SUmJh49enTbtm0rV67krsWeuA/FfaXT6cQk4dFHH+WuBRzkgw8+mDhxokKhmDJlivjyQENDA3dRtufUR8JuysvLJ0yY4Ovre/36ddMHIYGMffTRRytXrtTpdOK3Li4uM2bMiI2NjY2NjYyMNL2NWbqkFEIimjx5cmlpaX5+fnR0NHct4CBtbW3FxcWFhYVarfbEiRPt7e3i5y4uLmFhYWq1Wq1Wz5w50/Q2ZmmRWAhffPHFrVu3vvrqq2+++SZ3LcCgpaXl73//u1arLSgoOH36dEdHh/i5p6dnZGRkVFRUdHT0rFmzTG9jdn4SC+GxY8ceeeSRBx988PTp09y1ALPm5uZ//OMfWq1Wq9V+8cUXxneyl5fXww8/LI6Q4eHhzr+KLrEQtrW1+fv763S66urqbk9TgcGsrq7u+PHjYiHdNJA+Pj4REREikA888IDpbczOQ2IhJKL4+PisrKxPPvlEPOQMoJuampr8/HytVqvRaMRjuoXAwMDZs2eLU9ZutzHzkl4I33///bVr1z7zzDN79+7lrgWcXVVVlVjRycjIqKioMP581KhR0dHRarV63rx53W5jdjzphbCkpGTq1KkjRowQLRK4ywHJKCsrEys6OTk5V69eNf48ODhYHB7nz5/f7TZmx5BeCIlo3LhxFRUVX3zxRXh4OHctIEkikFqtNjs7u76+3vjz4OBgMYGcM2dOt9uY7UeSIXzuuedSU1Pffvtt8bwXgAHreWukccgYSLVaPXToUNM/8s0330yePNlmRfDcqGOdTz/9lIhmz57NXQjISnt7e0FBwRtvvBEbGyu+Mi64uLgYO4K3trYGBga6u7s3Nzfb6nUleSRsamoaPnw4EdXV1Q0ZMoS7HJChzs7Oc+fOiVPW/Pz8AwcOLFq0SAxFREScPn368OHDiYmJNnktZ7+OaZavr++MGTM6Ojpyc3O5awF5SklJefvtt9esWaPRaG7cuJGQkGAcEr/OzMy01WtJMoREFB8fTzb9hwAwtWvXroMHD+bn5xORl5eX6Y2p4r2XkZFhq9eSagjFp5EN/yEATFk43EVERAwbNuybb765ePGiTV5LqiGcNm1aYGBgeXl5aWkpdy0gQxYOdyqVau7cuWS7EzGphlCpVKrVasIZKdhHVFSUr69vSUmJ6X02RradDUk1hIRpIdiTq6trbGwsEYl+U90kJCQoFIrs7Gzjt42tIe0QKhSK3NzctrY27lpAhix8ygcFBYWGhra0tJw8edL6F5JwCEeMGHH//fe3tbWJh/gA2Nb8+fOJSKvVdnZ29hy14YUKCYeQcEYK9jR+/PiQkJDGxsZTp071HLXhe08OIcSFCrATC4e7mTNnent7nzt3rqqqyspXkXYIo6KihgwZ0tsSFoCVLBzu3N3dxd3LGo3GyleRdghdXV1jYmKolyUsACvFxsZ6eHgUFRXV1tb2HLXVGam0Q0iYFoI9eXh4REdH6/X67OzsnqPG915XV5c1ryL5EIolrKysLGP3OwAbsvApHxISMnHixPr6+uLiYmteQvIhFEtYTU1NaIII9mAModkv/c2bN4+sXhqUfAjJDl8tATCaOnXq2LFja2pqzp0713PUJrMhOYQQFyrAruLi4qiXN9jcuXPd3NxOnTrV1NAw4P3LIYRiCau4uNjsEhaAlSwc7ry9vc889ZQuIMBXqx3w/uUQQuMSltaKfwiA3sTFxbm4uBQWFhofWWtqyqRJqpoasuKMVA4hJFyoAHvy8/N7Jj7+w4gIndm7lEXni4wMGmi7JpmEUKzNZGVlSbFvFTi/3TNmLC0oCDh82MxYWBiNGkVXr1JJycB2LpMQhoaGiiWss2fPctcCciQOd8eOmRlSKGjePCIa8BmpTEJI/3/FBmekYBfTp1NAAJWX04ULZkbj44kQQkwLwa6USlKriXpJWlwcKZV04gQ1Nw9k39aV5kTUarVYwjLtZA5gMxYOd8OH0/TppNPRiRMD2LF8Qujn5xcREdHR0XH8+HHuWkCO4uNJoaDcXLp92/woDfCMVD4hJJyRgl2NHElhYdTaSmYvVIgQDui2LRmG8JjZJSwA61k43D30EA0bRhcuUFlZf/cqqxBOnz5ddAS+YHYJC8BKFg53KhXNmUM0kDNSWYVQqVSK1si4mRvsIjqafH3pq6+ostLM6ECnhbIKIRElJCQEBAS0t7dzFwJy5OpKMTFERGbbqYgL+jk51M+3nySfT2hBR0eHSqVSKuX24QLOYvt2ev55Sk6mtDQzo1OnUkkJHT9Os2f3fZdye7O6uroigWBH8+cTEWm1ZK4j8MDOSPF+BeiPCRNo0iRqaCCz7VQGdKECIQToJzH3M3u4mzWLPD3p7Fmqru77/hBCgH6ycM551100ezYZDNSfjsByW5gBsLuWFvL3p44OqqmhgIDuo59/TuXl9P3v05gxfdwfQgjQf2o1ZWfT/v30+OPW7wynowD9l5BAQ4fSjRs22RmOhAD919ZGbm6kUtlkZzgSAvSfh8f/JHD3brr3XnJ3p9Gj6eWXqZ9PZEAIAayzZw8tW0Y/+AGdPk0bN9KWLfTCC/3aAU5HAawTHEz33ktHjvznt6+/Tm++SVeu0KhRfdwBjoQAVqispEuXKDHxvz9ZuJD0eios7Ps+EEIAK9TUENH/HPTEr3HHDIBD6fXdf92fbxEghABWCAoiIqqq+u9PxDFw9Oi+7wMhBLDC3XfTPfeQaXv8I0fI1ZWiovq+D4QQwDrr11NWFv3qV3TuHP3hD/Tuu/TCCzR8eN93gEsUAFbbvZvee48uXaLAQFqyhF57rV830yCEAMxwOgrADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQCz/wOAAroCsHt1UAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deXhTVd4H8JPuC1sLQoHKDkIRWQoFqaCyL/F1Zp5B1pTBZ+wASkAFqq9oQEEqigSZAaoihk3hnedxDAo4tKDgUkra0r10oTt0owtt0iXLef84JdZSoElvcrJ8P49/jNPee361+TY35/7OuSJKKQEAflx4FwDg7BBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzpw9hNXV1VVVVbyrAKfmvCFMTEx8+umnZ8+ePXDgwLCwsIqKCt4VgZMSUUp512Bt5eXlW7ZsOXTokMFg6Natm1qt1uv1/v7+27ZtW716tZubG+8Cwbk41zuhVqvdu3fviBEjPvvsM1dXV6lUWlhYmJaWtmDBgqqqqnXr1j3++ONnzpzhXSY4Geo0zp8/P3r0aPZTz5o1KzU1tc1XR40aZfxqWloarzrB2ThFCK9fv75w4UIWsBEjRpw+fbrdb2tubpbL5d26dSOEuLu7S6XSmpoaK5cKTsjBQ1hdXR0REeHh4UEI6dGjR2RkZGNj44MPqaiokEqlrq6uhJCePXvK5XKdTmedasE5OWwI9Xq9QqHo3bs3IcTFxUUikZSVlXX88Pj4+GnTprE3zwkTJly6dMlypYKTc8wQXrx4cezYsSxCzzzzzLVr18w7j1KpHDRoEDuPWCzOy8sTtEwASh0vhIWFhRKJhMXm0UcfVSgUBoOhMydUq9WRkZFdunQhhHh7e0dERNTV1QlVLQB1pBCq1WqZTObl5UUI8fHxkclkDQ0NQp28uLhYIpGIRCJCSP/+/Tufbei82tof8vIkqamPJST4xse7JiZ2T08fV1i4vqEhg3dppnGEm/WU0n//+98bN24sLCwUiUR//etfP/roowEDBgg+UFxc3Pr162NjYwkhISEhe/funTJliuCjwENRqs3LW15d/X+t/j8XQgzsf4lEbgMGRPXq9SKX2szB+69AZ129enXq1KnsZ5k4ceLPP/9s0eHYfE+fPn0IISKRSCKR3Lp1y6Ijwr1u3tymUhGViuTm/lWtVhkMzZTqm5tvVVR8eu1ab5WKxMe7azSpDz+RbbDjEJaUlISHh7u4uBBC+vXrFxUVpdfrrTN0fX29TCbz9PQkhPj6+spksofe+QABJScPUKlIevoEg6Ht3aP6+liWz/z8v3OpzQx2GcKmpia5XN61a1dCiIeHh1Qqra2ttX4Z2dnZixYtYm/Cw4YNO3XqlPVrcEp6lUqkUpGiotfb/XJx8eaSEllt7Tkrl2U2+wuhUqkcMmQIe+mLxeLc3Fy+9URHR48ZM4bVM3PmzOTkZL71OIOkpD4qFcnNfYF3IcKwpxCmp6fPnTuXvdxHjRp19uxZ3hW10Gq1UVFRvXr1IoS4ubmFh4eXl5fzLsqR5ee/qFIRlUpUUvK2Xq/hXU5n2UcIb9++bWwl8/f3t81WMlYkWwnFitRqtbyLckxabXl6+jj22S8xseuNG0vLyw80NKTzrstMth7C5ubmNm8yFRUVvIt6kIyMjHnz5rG365EjR545c4Z3RY5Jr1ffuvVBcvIgFkX2T1JS34KClzWaFN7VmcamQ3j+/PnHH3/c+HErJcVu/uO2+eCak5PDuyL7ptc3VFd/W1mpuPdLGs21sjJ5Ts7ziYl+d9MoKihYbTDYzWWIjYYwKyvLOPE4fPhwe5x4ZFO4rRdGcZnCtWt6vaamRpmXJ0lM7KZSkaSkPvfekzAyGLQ1NaczM6exKBYWbrBmqZ1hcyGsq6sz3oLr0qWL1W7BpaamJiUlCX7amzdvGm9m9u3b15o3M+2XTld7+/bxnJy/JCT4GN/c0tMn3rq186HTMAaDLitrHrtfr9WasG6GIxsKYetmFLb4qLS01GqjT58+3YwVTx2kUqlCQ0PZG3twcPDly5cFH8IB6HRVlZWK7GxxfLzn3ey5pKcHl5TIGhuzWn+nwaDTau87NVBd/R92eG3tD5avWgC2EsLY2NjJkyezl+nkyZNjY2OtOXpjY+P69evd3d0JIX5+fnv27GlubhZ2CIPBcOrUqYEDB7J+t0WLFuXn5ws7hJ3SaivvZs/DmL3MzNCyMnlzc/G933/jxtKEBJ+0tNH3O2FV1f+x89y5E2PJwgXDP4RFRUXGBQqBgYEcFyh0cBeMzmBLPby9vcndpR4ajd3f5jJPU1NRRUVUdrY4Pt6NZSY+3vVu9h7Ujltc/Cb7/vLyg/d+1WBozMycrlKR+Hg3rfa2xcoXEs8QtnlF2shSvQfvByUI2/m7Y31NTfllZfLMzFDWeqZSkYQEr+xscUVFVAc/wmm1t5OTA9kb5o0bK+rqLul01Xq9pqkpr6rqZEbGlLsTM69Y+mcRCp8Qtr42Y5P4NnVtxnZ86t69O7Hkjk8//vjjuHHjeF2BW1lj4417suednS2urFTodCZPGjc0pKemjmh9h7DNP3l5EoNB4A8UlsMhhCqV6qmnnmIvvgkTJtjsLEVlZaWld3ziOxdlBRpNakmJLD092BiPhAQflj29vlNXPQZDU2Xl4Zyc55OTByYkeMfHuyYm9khLG1tQ8I/6+l+N36bXqw2Gpk7/HJZl1RCy+Xr2smbz9TbYfdZGQkLC9OnT2Z+McePG/fTTT4IPwbaEs/5dGcth2UtNHWnMXmKiX16epKZGaTBY70fTaJJTU4OKijZabUTzWCmE927paV93rpVK5eDBg40Xzzdu3BB8CPvvT9Cr1aqSEllKynBj9q5d63k3exzejtTqq/HxHiqVqKZG+Dk2AVkjhEqlcujQofbew6XRaCIjI9kiRrbj0507dwQfpc20kO136ul0uurqiwUFLycl9TNmLzk5sLBQWlf3E6WcOxNu3dqpUpFr13o3N9/kW8kDWDaEGRkZ8+fPd6Ru5pKSEuPEpoWW87fuWXd3d7fNnnWdTnf58mWpVBoQEHDhwtS72RtYWCitq7tMqe1M9uqzsmarVCQraw73vwj3Y6kQtl7X4+fn52DreuLi4p588kn2x2XSpEm//PKL4EPY5uqthoaGb7/9NiwszM/Pz7hN0Y4dz5eUvKVWJ/Curn3NzaVJSQEqFSkt/ZB3Le0TPoRshesjjzzi2CtcDQaDQqEICAgwdsAUFhYKPkqbdcznzvHZskGj0SiVSolEwj7VM0FBQTKZTKVScSnJJLW1Z1UqUXy8e329Ld4HEjiEMTExxr0eZsyY4fB7PbAdn9hmp2zHJwE3OzXitaOHWq1m2WN7H7fOXkaGne3tWVi4QaUiKSlDzbgtaWmChdCZdz0qKChos+234ENYc2+rqqoqhUIhFovZXRN2DzM4OFgmk2VlZT38eJtkMDSmp49XqUheXhjvWtoSIITY/4+5cOHCE088wV61zz77rCUWRll0l8fKykqWPdbITghxdXUNDQ2Vy+XFxe00UtudxsbsxMSuKhW5ffso71r+oFMhbPO5CDvhsg4Y9nnYcguj2ux33MlpoaKioqioKLFYbHxOuDF7jvfbrKg4pFKRhIQuDQ3XedfyO/NDeOXKFeMm8CEhIb/99puAZdm1qqoq40MR/fz8IiMjm5oEvlXNmm/ZVv9sWqigoMCkM+Tn58vl8tDQUHa7hRDi5eUlFoujoqIcciLN6MaNZSoVSU8Ptp12NnNCiKejdERmZuaCBQvY6/uxxx77/vvvBR/CjGfg5Obmtsmet7e3WCxWKBT21cNkNp2uJiVlsEpFios3866lhWkhZF0jeE5Yx50/fz4oKMjYAZOWlib4EPc+De7e70lNTZXJZMHBwcZJTh8fH5Y9J/wN1tfHxce7q1QutbX/5V0LpSaFEE/MNI91Fka1+1xUlr2RI0cas+fv7y+RSJRKpXNOnhndurXjbjsb/8+9HQ3hSy+9xH6Ltrz4yJZZYWGUTqfbv38/63dzdXVl80NMQEDAmjVroqOjHalvqXP0WVmzVCqSlTWXe5Ndh0J48eLFqVOn+vr62kjzlP2Kj4+fNm2a8c/ZpUuXBB+CLYzy8fHx9vYODAwMDw9XKpXI3r2am0vZMy1KS3fzraRDITxy5AghZOnSpZauxklY4cKe3da3xHWvI6mpOWML7WwupMPYpRR03nPPPZeWlsamuL777rugoKA33nijvr5e8IGMU6DQru7d5/fuvY5SbV7eCr2+jlcZJoQQBMQ2tsrMzJRIJI2NjR988MHIkSOPHDlC7f/p5fYlMHCXj8/4pqacoiIprxoQQp769+9/5MiR2NjYKVOmlJSUrFy5csqUKbGxsbzrciIikeeQISddXbvevv1lVdVxLjUghPyFhIT8+uuvrAEwLi5u6tSpYWFhpaWlvOtyFp6ewwMDPyaEFBSsbmzMsn4BCKFNEIlEYWFhOTk5MpnMw8Pj6NGjw4YN27p1a1NTE+/SnEKvXn/3919qMNTn5S2ntNnKoyOENsTX13fr1q0pKSlisVitVm/btm3MmDFZWRz+NjuhAQMOeHoO1mhUN2/KrDw0Qmhzhg8ffvr06ejo6DFjxhgMBuMWyWBRrq7dBw/+WiRyLy3ddefOeWsOjRDaqJkzZ8bHx//www/GlbVgab6+IX37vkOIIT9fotVa7zM5Qmi73N3djVtFgnX07fu/XbvO1GrL8vNXEWKl20UIIUBrLoMHK9zcet25c668fK+VhrTQefPz85OSku7cuWOh8wNYiLt7/0GDFISIiovf0GgSrTCipUK4YcOGcePGXbhwwULnB7Cc7t0X9O79cnFx4IoVb6jVaksPh8tRgHb07//hzp09v/nmv1KpxdvZEEKAdri4eB06dNjHx+eLL744ceKEZcey6NkB7FdQUNCePXsIIWvXrs3Ly7PcQAghwH2Fh4cvWbKktrZ28eLFWq3WQqMghAAPcuDAgUGDBl29enXr1q0WGgIhBHiQHj16HDt2zM3NLTIyMjo62hJDIIQADxEaGvr2228bDIYVK1aUlZUJfn6EEODhtmzZMmPGjLKyslWrVgm++wFCCPBwLi4uR44c6dmz59mzZ/ft2yfwyYU9HYCjYk98EIlEmzdvTkwUsp0NIQToqIULF65Zs6apqWnx4sV1dYLtzoYQAphg9+7dY8eOzc7Ofu2114Q6J0IIYAIvL68TJ074+Ph8/vnnX331lSDnRAgBTBMUFLR7925CyJo1a/Lz8zt/QoQQwGSrV69evHixUO1sCCGAOQ4ePDhw4MC4uLj33nuvk6dCCAHMwdrZXF1dd+zY0cnF6wghgJmeeuqpLVu2GAyGsLCwyspKs8+DEAKY75133nn22WfZc0TMbmdDCAHMZ2xnO3PmzP79+808ibA1ATibwMDAzz77jBDy+uuvJyUlmXEGhBCgs/785z+vXr26qalp2bJlGo3G1MMRQgAB7Nmz54knnkhPTzejnQ0hBBAAa2fz9vaOior6+uuvTToWIQQQxujRoz/66CNiejsbQgggmLVr1/7pT3+qqamRSCQ6na6DRyGEAO07fPjw7t27Tb3798UXXwwcOPDnn3/evn17Bw9BCAHakZGR8corr2zcuDEmJsakA/38/BQKhYuLy1dffdXY2NiRQxBCgLaMNxtWrVo1a9YsSw+HEAK0tWnTpmvXrg0bNmzvXpMfUVhdXb1y5UqDwbB06VIvL6+OHIIQAvzB2bNn//nPf3p6ep48ebJr166mHv7iiy8WFBSw3u4OHoIQAvyupKQkLCyMUrpz584JEyaYevj+/fv/85//9OjR4+jRo25ubh08CiEEaGEwGFauXFlZWTlv3rwNGzaYenhaWtrGjRvJ3cdXdPxAhBCgxc6dO2NiYnr37n348GGRSGTSsY2NjcuWLWtoaPjHP/6xZMkSk45FCAEIIeTq1avbtm1zcXE5duxYQECAqYe/+uqrycnJQUFBH3/8sanHIoQAxLhl06ZNm2bPnm3q4d98883Bgwc9PT3ZboimHo4QArQ8i3fixInvvvuuqccWFxe/9NJL5O6+wGaMjhCCszt06NCJEye6dOly/PhxDw8Pk45lG8zcvn17wYIFa9euNa8AhBCcWk5OzquvvkoIOXDgwIgRI0w9/N1337148aLxWTHm1YAQgvNqamp64YUX6urqVq5cuWLFClMPZ13abJuZXr16mV0GQgjOKyIiIjExcejQoZ988ompx9bU1KxYsUKv17/11lszZszoTBkIITips2fPfvLJJ+7u7sePH+/WrZuph69evbqgoCAkJOTtt9/uZCUIITgj44Ovd+7cOXnyZFMPP3jw4MmTJ7t3737y5El3d/dOFoMQgtMxGAwrVqwoKyubO3euGfsypaenv/7668T09rT7QQjB6XzwwQfR0dG9e/f+8ssvzWtP02g0f//735cuXSpIPQghOJcrV+JkMpmLi8vRo0fNaE9jO/wOHz7cjPa0++noagsAB1BfT9asCZ40aXdoaNGcOXNMPfz7778/cOCA2UsN7wchBCcSHk4SE10nTVq3fbve1GONT33ZtWvX+PHjBawKl6PgLA4fJl99Rbp0IUePEg8PV5OONbanzZ8/f926dcIWhhCCU8jJIevXE0LI/v3kscdMPnz79u0XLlzo06ePGUsNHwohBMfX1EQWLyZ1deSFF4hEYvLhv/zyy3vvvceWGvbp00fw8hBCcHxvvkkSEsjQoeSzz0w+lrWn6XS6N954w0LbHyKE4ODOnSNyOXF3J8eOEdO701oeLDFp0qStW7cKXxwhBCEEx1ZeTlatIpSSHTvIlCkmH/7pp59+/fXXQrWn3Q9CCA7LYCArVpDSUjJnDnn9dZMPT09PZ0sN9+/fP3jwYOHruwshBIf14Yfk/HnyyCPkyy+Ji4mv9MZGsmrVyxqN5sUXX1y2bJllCmyBEIJjUqnIO+8QkYh88QXp29fkwzdtIrdvn5sz5zUzlhqaylIhlMvl165d6+RiRwDz1NeT5ctJczN57TUiFpt8+Jkz5F//IsXFnpGRu319fS1Q4B9Yqm1NkCUeAOZZs4ZkZZEJE8j775t8bEkJWbmSUEoiI4mg3Wn3hctRcDQKBTl2jPj6khMniImbpxGDgaxcSSorybx5LR02VoAQ2i6tVpubm8u7CjuTm0ukUkII+de/zGlPe/99EhND+vQhhw8TobvT7gshtFExMTETJkyYM2dOU1MT71rshlZLli8nd+6QF14gK1eafHhcHHn3XeLiQo4eJaavNDQfQmhzsrOzn3vuuVmzZqWmprq6uhYUFPCuyG68+Sa5coUMGWJOe1ptLVmyhGi1ZPNmYvpG+J2CENoQtVq9devWMWPGfPfdd76+vjKZLCUlxYwdaZ3TDz+Qjz8mbm5mt6eRvDwycSLZts0CxT0QFvXaBIPBcOzYsc2bN5eVlYlEIolEsmvXLjM2X3Ba5eXkb38jlJLt28mTT5p8+Oeftyw1PH7c5LmczkMI+YuLi1u/fn1sbCwhJCQkZO/evVPMaHN0bmvXktJSMns22bTJ5GOzswnbcu3gQcLlsgMh5KmkpOTNN988duwYpbR///7vv/++RCIRfM2oM9i1izQ1kU8/Nbk9zbjU8G9/I8uXW6a4h0EI+dBoNPv27du+fXt9fb2Pj8+6deu2bNnSpUsX3nXZqyFDyOnT5hy4eTNJTCTDhhHLd6fdlwkh1OtN3hsH2nX69GmpVJqfn08IEYvF+/bts1CDEaXUEqd1GGfPkn37WpYaCrd5muloB1y8eHHq1Km+vr5yuVyn03XkEGhXfHz8tGnT2H/5CRMmXLp0SfAhqqurIyIifHx8vL29AwMDw8PDlUqlVqsVfCB7V1pK+/ShhNDduzlX0qEQUkrZs0jZS+fy5csWrckhVVRUSKVSV1dXQkjPnj0t8edMp9Pt37+fPaPL1dX1kUceMf6pDQgIWLNmTXR0NNLI6PV01ixKCJ07lxoMnIvpaAgppUql0njVJBaL8/LyLFaVQ2lubpbL5ey5P+7u7lKptKamRvBRLly48MQTT7DfzjPPPHPt2jVKaWpqqkwmGzlypDGN/v7+EolEqVQ2NjYKXoMd2bGDEkJ796a3bvEuxaQQUko1Gk1kZCSbP/D29o6IiLhz546FKnMM58+fHzVqFAvArFmz0tLSBB+isLBQcncLsUcffVShUNz7PSyNwcHBxjT6+PiIxWKFQlFXVyd4STYuLo66u1MXF/rf//IuhVJqagiZ4uJi40w6e1Cwgfs7uu3JzMxcsGABe8U/9thj33//veBD1NfXy2QyLy8vFiqZTNbQ0PDgQ3Jzc+VyeWhoqPFGiLe3N0tjbW2t4BXaoJoaOngwJYRu3sy7lLvMCSFz5coV4z3lSZMm/frrrwKWZdeqqqoiIiI8PDwIIX5+fpGRkU1NTcIOYTAYTp06NWDAAEKISCRatGhRQUGBSWfIz89vk0YvLy+xWBwVFVVeXi5stTZl2TJKCA0OpkL/TsxnfggppQaDQaFQsO4q1mx1yxYusfnRarVRUVFsRsTFxUUikVjiBX316tWpU6ey5EycOPGXX37pzNmKioqioqLEYrGbW8v9KldX19DQULlc7ni/zUOHKCG0Sxd6/TrvUlrpVAgZdlHk6elJCGFtxw+9KHJIMTExxqmRZ599NikpSfAhSkpKwsPDXVxcCCH9+vWLiorS6/VCnbyyslKhUIjFYuPefsY0FhcXCzUKR9nZtGtXSgg9epR3KX8kQAiZ7OzsRYsWsV/esGHDTp06JdSZbV/rn33AgAHtTo10UlNTk1wuZ4/j8vDwkEqllpsSq6qqYmlkf1jZu3pwcLBMJsvKyrLQoJbW2EjHj6eE0LAw3qXcQ7AQMjExMWPGjGG/uRkzZiQnJwt7flvTemrEclcBSqXSuO+lWCzOzc0VfIh2qdVqpVIpkUha99MFBQXJZLKMjAzr1CCUDRsoIXToUGqD008Ch5D+8XORm5tbeHi4Q37Qt87n4fT09Llz57JX/6hRo86dOyf4EB2h0WhYGru1WqjH0qhSqbiUZJKzZ6lIRN3daWws71LaI3wImaqqKqlUyj7r+/n5yeVyR+rViIuLe/LuqjULzQzfvn3b2GHj7+9vIw2DDQ0N3377bVhYmJ+fnzGNzz//8ltv0YQE3sXdR2kpDQighNAPP+Rdyn1YKoRMRkbG/PnzjffKzpw5Y9HhrKD1PdJ+/fpZ4h4p67Dp0aMH67AJDw+vqKgQdojO0+l0ly9flkqlAQEBU6eeIIQSQgcOpFIpvXyZfyOYkV5PZ8+mhNA5c6hwc1gCs2wIGaVSOXToUONHmpycHCsMKjjWLcSmRizXLXT+/PnRo0cbO2xSU1MFH0JYOp3u4sXml1+m/fpRFkVCaGAglUrpTz/xf93v3NnSnnbzJudKHsAaIaTt9U/aV39Gm6mRGzduCD7E9evXxXc3ix4+fLjdTS/r9VSlojIZHT789zT27EklEqpU8rkzfvUq9fCgIhE9fZrD6B1npRAyN2/eDA8PZ59zevXqZSOfcx4sISFh+vTpLBvjx4//6aefBB+CLT5i9wN69OgRGRlp793VqalUJqMjR/6eRj+/ljRa8ydLTqZBQXTjRuuNaB6rhpCJj49/6qmn2MvalhdGVVZWWnrxkV6vVygU7AnMrMOmtLRU2CH4YmkMDv49jT4+VCymCgW1Tt+4Wm1D7Wn3wyGEjFKpHDhwoPECLz8/n1cl92IXz927dyeWXHz0448/jhs3jv0XePrppxMTEwUfwnbcuEHlchoaSkWiljR6e7ek0a4+l1gEtxBSStVqtXFhlI+PT0REhC0sq7HC1EhRUZFxijUwMNCplqHk57dNo5cXFYtpVBQtK+NdHCc8Q8jYzivy+vXrCxcuZPEbMWLEd999J/gQarVaJpN5e3uTu4uPNBqN4KPYhaIiGhVFxWLq5taSRldXGhpK5fKHL7Q1Xt9+9FE7Xx06lBJCd+60RNUWwT+ETGxs7OTJk1kAJk+e/Ntvv1lz9MbGxvXr17PGZT8/vz179jQ3Nws7BFt8xK7A2eIjm7oC56iykioUVCymHh4t0XJxaUnj/frGjSHs16+dj3wIofn4zlJMnz6dDVpmgasilUoVGhrK/sQEBwfb7FwUX1VVLWn09Pw9jcHBVCajbfrGW8/0fPpp2/MghJ1VV1dnXBjVpUsXmUxmnfn61NRUSyw+Yndl2OKjvn37Crv4yFHV1tLjx+lf/kJ9fFqSJhLRiRPpzp2UXbyzEE6eTAmhw4e3bQlACIWRlZVlXBxkj3eu6d3FR/bbn2ALNBqqVFKJhHbrRgmhffpQdpOIhTAykvr5UULoyZN/OAohFFJ0dPTjjz/Oojhz5syUlBTeFXWUUqkcMmSIvXfq2Y6GBvrtt9S4TpOF8P336ZYtlBA6fvwfvhkhFBhbGMX20mQLo2ywm7m1jIyMefPmsfiNHDny7NmzvCtyQCyEW7fS8nLq7U0Joa3XeCGEFnHvuh4bXBjFimSrt2y2SMfAQiiTUUrpK69QQujTT//+VYTQgtLT023zTcbu3q7tXesQ5ue33Gk03tVCCC2uzcctq+31cD9tPrg6/I4etqB1CCmly5dTQuj//E/LvyKE1tB64pHtesRl4rH1FK6z7W3FV5sQpqRQkYiKRJS1GCKE1tN6/z8r34JrfTOT7e9k74uP7EubEFJKFy6khFCJhFJKhw1DCK3r6tWrrZtRfv75Z4sO5/CLj+zCvSG8fJkSQt3caF4eHTUKIbS6e/eEt1BbZrMnXRcAAAJCSURBVGxsrHHn/5CQECs3uILRvSGklIaGUkLoyy/TkBCEkBO2QMGkp6N0XOulHngGDnfthvD06ZZligghZ+w5YQIujLLNRY9Ort0QGgx0zJiWRlOEkL+LFy+OHTvWuGidPTHTDHguqm1qN4SU0qNHf19dgRDyx2ZQevfubZxBMWmNUpuNcCzxcHkw2/1CqNXSQYMQQhtjxkZmrR8uby9bwoFdc/AQMq239BwxYsTp+2xDae+bo4KdcooQMg/ewan1NuGzZs1KT0/nVSc4GycKIW1vL8Pq6urMzMzWD8ywxMPlAR5ARCklTqa8vHzLli2HDh0yGAzdunVTq9V6vd7f33/btm2rV682PjUawDqcMYRMYmLihg0b1Gp1dnb2kiVLtm/fzp6pCGBlzhtCprq6mlLq7+/PuxBwXs4eQgDuXHgXAODsEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDO/h997uu2e2eTwgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVwU9f8H8PeysByCggqYdBgSIHwFD7SvXzVK+qZ534ZZ3rdyCKGipqjkfWeZmZaZeZSoKVmWmX39Zh6wX5VrDdMEERS5hAV2d+b3x/jbDHaHa3bes7vv58M/itmYF36/Lz47OzPvkbEsC4QQPDbYAQixdlRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWwMe5p7slSZNyfH8p+sMIAREBUQj57CvcEpgc6pDr4pPlsKtik/3obuzbqTuri4GKLD0BEYIsdQLr2Ptw76fak+Dbxw12HK9XKqDtROlYX6xnLbXWwcQDGwgMQcVAJjUrMSxzhOiKxbSIAdHXqyrJsZlWmVQUg4qASGlaoLVRVqeLbxOu/MqX1FKsKQERDx4SG5WvzAcDTztNqAxDRUAkNc7RxBAAdq7PaAEQ0VELD2tq1BYBcTe6TX6xiq6wnABENldAwe5l9F6cux0uO67+SXJLsc91HtBqgByCioRIatfyp5cklyQvvLlSqlQeLDk64PWFK6yn2MnsAqGQqK5lKrg8aVqP/ZwsLQMQhY1kWO4N0JRUnJeQlZFZmeim8ZrSeEesZKwPZI+aRi9KlxivlMrm2s9byAhARUAnrsCF/w28Vv633Wv+s4lnsLDXd1dxNyEvo6Nhxjvsc7Cyk8aiEfAq0Bb5pviW6km99vu3XvB92nJqOlxwfkj3ETe6mClS1tm2NHYc0Eh0T8onPjS/RlQxqMUiCDQSAwS0G92ver0hXtDRvKXYW0ni0EhqVWpHaLaubHOTXAq752vtixzEsozIjOCOYASbFPyXIMQg7DmkMWgmNisqJ0rG6CI8IyTYQADo4dJjpPlPH6qJyorCzkEaildCwA0UHwv8I97D1UAWqWshbYMfhU6Qr8k3zfaB9cMT7yDDXYdhxSIPRSmiAmlEvyF0AAIltEyXeQABwk7ste2oZAMTmxlYyldhxSINRCQ1Ym7/2dvXtzk6dJ7aaiJ2lXma0nhHkGHSz6uaTN/4Sc0FvR2vK0eT4p/mXM+U/+/78kvNL2HHq60zZmbAbYc42zlmBWdx1p8Rc0EpYU1xOXDlTPsZtjBk1EAD6uPQZ5jrsEfMo/m583a8mUkIr4d/8Wv5rz6yeDjYO6QHp7RTtsOM0zM2qm4EZgVVM1QW/C92bdceOQ+qLVsK/MMBE3olkgY3zjDO7BgKAt713lEcUC2xkTiQL9LvVbNBK+Jfdhbsn357sZeeVFZjVzKYZdpzGeMQ88kvzu6u5+3m7z8e1HIcdh9QLrYSPlenKFt9dDABrvdaaaQMBwNnGmRsMtSB3wSPmEXYcUi9UwsdW3luZp8nr0axHeMtw7CxNMr7V+O7NuudqctfcW4OdhdQLvR0FAMi+nx2cG6xm1Bf8L3Rz6oYdp6m4j5dc5C5Xva4+1/o57DikDrQSAgDETI3xjPWMkcVYQAMBoEezHtEQ3SqqVezMWOwspG60EsKZM2fCwsJcXFyysrKeeuop7DjCyM3N9fPzKy8v/+mnn15++WXsOISPta+EOp0uKioKABYtWmQxDQQALy+v+fPnA0BUVJROR3MTpY21blu3bgUAb2/vyspK7CwCU6vV7dq1A4AdO3ZgZyF8rPrtaFFR0QsvvFBYWHj06NEhQ4ZgxxHe4cOHR48e7e7urlKpXF1dseMQw6z67eiSJUsKCwv79OljkQ0EgFGjRoWGht6/f3/FihXYWYhR1rsSpqenBwcHsyybmprasWNH7DimolQqQ0JCZDKZUqkMDAzEjkMMsN6VcN68eVqtdubMmRbcQADo1KnT5MmTtVptdHQ0dhZimJWuhEePHh02bJibm5tKpWrd2sKHBd6/f9/X17e4uPjkyZP9+/fHjkNqssaVsLq6Oi4uDgCWL19u8Q0EAHd39yVLlgBAZGRkdXU1dhxSkzWWcOPGjTdu3AgICJg+fTp2FpHMnTvXz8/v999/3759O3YWUpPVvR3Nz8/39fUtLS09depU3759seOI5+TJkwMHDmzevHlWVlabNm2w45C/WN1KuGDBgtLS0qFDh1pVAwFgwIABr7/+emlp6bJly7CzkL+xrpUwJSWlW7dutra2169ff+GFF7DjiC0zMzMoKEin0128eLFr167YcchjtqLtSaPRqNVq0XZXG8uyERERDMNER0dbYQMBwN/ff/bs2Zs3b46IiEhOTpbJZIhhHB0d7ezsEANIh0gr4cWLF0+fPr148WIR9sXD2dlZoVD88ccfzZs3x02Cpbi4uF27djqd7tEj5PvuV69eHRYWFhISghtDCsQoYXV1dVBQUHZ2tkKhkMvlpt6dMQzDaLVahUKRnZ3t7u6OFQNXXl6ej4+PTqezs7NDXAl1Ol1VVZW/v79SqbS1Fe/tmESJcJH4unXrAMDf37+6ulqE3fHgTlVPmzYNNwai8ePHA8CwYcNwY1RVVfn6+gLA1q1bcZNIgclLmJ+f36JFCwBITk429b7qdOPGDXt7exsbm8uXL2NnQXDlyhUbGxuFQqFSqbCzsEePHgUANze3+/fvY2dBZvISTp06FQAGDBhg6h3VE3cJZc+ePRmGwc4iKoZhevXqBQALFizAzvIYd5Zozpw52EGQmbaEqampcrnczs4uMzPTpDuqv5KSEu5U9aFDh7CziOrzzz8HAE9Pz5KSEuwsj6WlpdnZ2cnl8qtXr2JnwWTaEoaGhgJATEyMSffSUDt27ACAZ555pry8HDuLSMrLy5999lkA2LNnD3aWv5kzZw4A9OnTBzsIJhOW8ODBgwDg4eFRVFRkur00gk6n405VL1++HDuLSLiTQ126dNHpdNhZ/ubhw4fcNfRHjx7FzoLGVKco1Gp1QEDArVu3du7cyR0WSsr58+d79+7t4OCQmZnJLREW7M6dO/7+/mq1+ty5c9xhoUFnzpxhGMYUAVq0aNGtm9FZku+///7cuXO9vb3T09Pt7e1NEUDqTFTuhIQEAOjUqZNWqzXRLppo5MiRADBu3DjsICY3atQoAHjzzTf5X2a683Uvvvgiz361Wi13X/Xq1asF/bnNhklWQv3Qy7Nnz3KHhU9Sq9Xp6emiXbt48+ZNBweHtm1rPjeznuuDuav/mt+vXz+tVmuKDAEBAdxUO2MscvRrA5ii2WPHjgWA0aNHG9yakJBgY2OTmJhoil3XcODAAXt7+/DwcINbJXukJBQzOvrlZm1NnDgROwgC4Uv43//+VyaTOTo6/vHHH7W33rlzp1mzZgDw888/C77rhu5Osp8ZCuWjjz4CM/kcODs7m7uO4rfffsPOIjaBS6jT6bp37w4AS5YsMfiCN954AwDeeOMNYffLY+nSpQDQuXNng8udBM+eCcXszohyM0d69OhhbddRCFzC3bt3A4CXl9ejR49qb9Uvkrdu3RJ2vzwqKiqee+45APjkk09qb5XgdSRCmTdvHpjVtUGlpaXcAeEXX3yBnUVUQpZQ/5e4b9++2lt1Oh33OfXSpUsF3Gl9fPHFFwDg4eFRXFxce6ukrqgUiv4q2UuXLmFnaYBPPvmE55e4pRKyhNwTSIy9ndi1axcAPP300+L//TIM07t3bwCIi4sz+AKJ3FsgIDO9X0R/OPPuu+9iZxGPYCXkP7DWL5L79+8Xao8NkpKSwi13WVlZtbfeu3ePu833u+++Ez+b4E6fPg0AzZs3z8vLw87SYPwf7FkkwUo4dOhQAJgwYYLBrVI45p44cSIADBkyxODW9957DwACAgI0Go3IwYSl0Wi4cfcbNmzAztJI4eHhADBmzBjsICIRpoQ//vgjADg7O+fm5tbe+vvvv0vh0+d79+5xdzaeOnWq9taqqipu8Mz27dvFzyagjRs3AoCPj4/5PuxNf2Lp7Nmz2FnEIEAJ9ZcdrVq1yuALBg8eDACTJk1q+r6aaPXq1QDQoUMHg/f4HzlyBABatmz54MED8bMJorCwsGXLlgBw4sQJ7CxNws1llPJljwISoITbtm0DAG9vb7VaXXvrDz/8AAAuLi53795t+r6aqM6pCq+99hoAREREiBxMKNxM8VdffRU7SFNVVFRwTzjduXMndhaTa2oJ9beiJCUl1d6qXyTXrFnTxB0JhX+qQlpamq2tra2trTneZnr9+nUu/PXr17GzCODAgQM8J5YsSVNLOHfuXDB+U6Y0H0bNP1Vh1qxZABAWFiZyqqb797//DQBRUVHYQQTz0ksvAUBsbCx2ENNqUgnT09O58QT/+9//am99+PBhq1atQHr3a/JPVSgsLORiHzt2TPxsjXb48GFzP6CtLSUlRS6XKxQK6YxHMYUmlZBbUmbPnm1w6+zZs3kWSVz8UxW2bNkCAO3bt5fUAs6jsrLSx8cHAD788EPsLAKbMmUKAAwcOBA7iAk1voTHjh2r8+BKsjN8+KcqaDSaf/zjHwCwdu1a8bM1wsqVKwEgMDDQ3E9y1qYfmfntt99iZzGVRpawzo8ZpT/NTv+hrsHlTlIf6vLLyclxdnYGgO+//x47i0lww6ONnViyAI0s4Zo1a3j+XpKSkngWSYmoc6rCoEGDAGDy5MkiB2uot956CwBGjBiBHcRU9L/xN2/ejJ3FJBpTQv53CPpLT7Zt29bkeKbFXehjbLnTX+hz8eJF8bPV06+//iqTyezt7W/cuIGdxYSOHz8u/V/rjdaYEk6aNAkABg8ebHDrqlWruIswzeLNA/9UhXfeeQf9klceDMO8+OKLALBo0SLsLCbXr18/AJg5cyZ2EOE1uIT1vB3B4PWZElTPmz++/PJL8bPV6bPPPgOANm3aWN5YgNr4z4eZtQaXkDt/+s477xjcyn+ngjTx3+Hx8ccfY90Gya+srIwbIbd3717sLCKJiIgAgFdeeQU7iMAaVsL9+/fX5xZ1g4ukZPFPVUAcCMAvPj4eALp27Wqpo+Jq059Y+vrrr7GzCKkBJdQPa9m1a1ftrfq71+fPny9cPJHwT1U4f/68+KNx+HHDVGUy2S+//IKdRVTbt28HgOeff97g3QJmqgEl5B9btm/fPm5smTleblvnVAXxh8TxGz58OAC8/fbb2EHEptVqg4KCAECcubX1kVedB1eA+3O69HQjvkN9S8g/wLOiooIb4Ll79+5GhJACSY1L5XfmzBkAcHJy+vPPP7GzIOB+fGN3kJvO7ge7A9IC7FPs219vvzF/45Ob1Dp1sbbY5CXkXwoKCgpGjx4dEhJi1scn4eHhoaGhxmau8b8REI1Wqw0ODpbUUiC+YcOGAcD48eNF2+NnhZ/BFYjPjb9cfnnXg13Oqc7r7q178gVqnbrRJazXsyiKi4uDgoIKCgpUKhXP8wwqKiqcnJzq/G6SxZ+/oqLC29u7pKSksrJSzFS1OTg4uLm5cYeFuEmw3Lx509/fX6FQlJeXN+g/9O3oq/pU1dDdfevzbeSdyI6OHb/y/or7yq4HuzKrMtd7rde/ppKpdFQ6nn7h9Ksurzb0+wv5IB6zbiCYf349lmXVarVZ/ziS+oVeqitVVani28TrvzKl9RQhd1DPFdPaBmDV9u6774KU3o6uXLnS4AsyMjJ69+5t1p/ZcD9jeHh4YWGhwRfwj/YTXJo6Da7AtyV8t3E05e2oMB/MWLw7d+44OTnJZLJz585hZ2F/+eUXmUzm5OR0+/bt2lv1Zy8uXLggfjZBfPDBB2D8PAT/aD9TuFl1E67AiWK+2VlilJB94pMJaxiAVcOYMWMAwNgj1sQ3YsQIAHjrrbcMbl24cCEA/POf/5TmJa/89Gfkv/rqq9pb9aco3nvvPdEiVTKVcAU+uv9RjS8++a8ilVB/sv7jjz9uxJ7MlwRP1t++fZtbmQ2erDfrK9oiIyPB+LVp77//Phgf7Wc6XTK6DPh9gP5fTxaffPrq00/2UKQSsiz75ZdfgnUMwNLT6XQhISEAsGzZMuwsf7No0SIwftnap59+CmZ4bbf+Km2lUll7q36RPHLkiMjBThSfkF2RLchdkFqReuDhAff/uS+7+/j/D2qdWn+eMLkkWa1T11gk69TIC7gtfgCW3s6dO0GSF3Drn3D62Wef1d5qpnc58d+vxD/az9SOFB0JTg+2T7H3vu699t5ahmVYli3Tlekvl9H/kafIG/SdBb6VycKUlpZyz9k8cOAAdhYD9u7dC8afcGp29/t+8803AODq6mrwzl26lelvJk+eDACDBg0SPI3UxMbGAsC//vUvaX7CwTBMz549ASA+Pt7gC8aNGwcAI0eOFDlYI1RVVfn5+QHApk2bDL6AWyRnzZolcjARCD/ewmKYxXiLy5cv8zzh1IxmQHHTnPz9/Q0OZOAf7WfuGjnoae3atWDRA7BYlh04cCAATJkyBTtIHd5++20AGD58uMGtK1asAMlPQ9T/Wk9OTq69VT/oacuWLeJnE0FTRx5a6t8L95xNsxh5qB8pYnC5U6vVzz//PADs2LFD/Gz1xD/h1+J/4zd++K8FD8DSD/9dt25d3a+WgMTERJ7lTj8h39hVYLhSU1PlcrmdnZ3BWffWcOzTpDH4lnqsvHnzZjDPMfgffPCBwRe8/PLLABAdHS1ysPoIDQ0FgJiYGINbreFTQBM+EMZM6R8Ic/z4cewsDfDVV1+B8QfCKJVKuVwuwaem6Z9/VlRUVHurlZwPa+qj0SxvANbMmTPBnB+NFhkZaXCrBJ8fWueTQPlH+1kMwR4SKv6VRKagf87mtWvXsLM0GH/4goICV1dXADh58qT42QxKSEgA48/Etp5rJAV4XDbWNbWmwL+YSN+MGTN4lrsNGzYAgI+PjxSOdXNycrib486ePVt7q1XdLSBACVHuLjGFr7/+muewyizoD2i/+eab2ls1Gk1gYCAAbNiwQfxsNYwdOxYARo8ebXCrVd03J0AJWYz7LAWnf46NsQ8YzcWmTZt4Ptr9/vvvAaB58+Z5eXniZ9Mzo9l2IhCmhCzGACxhcafaAgICpHxlSX3oT3KuX7/e4Av69+8PANOmTRM5mJ7ZTXk1NcFKmJ2dzU1VMPhkFYnTX3Ty3XffYWcRgP5yH4PL3Y0bNxQKhY2NzeXLl8XPxrLs7t27wfi8c/0iKZ1bqE1NsBKyLLtgwQIznarAf/mlORowYAAATJ061eDW6OhoAOjZs6f4/0vpn/yxb9++2lsl++QPkxKyhPqpCp9//rmA39bU+G9EMFM3btzgbgG5dOlS7a0lJSXcfZKHDh0SOdj8+fPB+DOwdu3aJc1bqE1KyBKyLLtnzx7unUZZWZmw39lEGIbp1asXACxcuBA7i8BiYmJ4bobcsWMHADzzzDPl5eWiRarn0yD3798vWiQpELiEDMNwx9yLFy8W9jubCP/N6WZNPxbg4MGDtbfqdLquXbsCwIoVK0SLxD8vVOLPRTYdgUvI/v9UBQcHB4OfPkuKfkzLp59+ip3FJLgBOcaWO/75pYLjP49lFrdQm4jwJWRZ9s033wSAUaNGmeKbC4h/YJkF0I+KS0hIMPiCkSNHOjg4GJzwKbhly5bJZLJVq1YZ3Dp48GAAmDRpkghJpKZeD4RpqNzcXD8/v/Ly8p9++om7icaYWbNmqVQNfkBHfTg6OnKDg4z5888/O3TooFarz507xx0WWqTz58/37t3bwcEhIyODuxDsSTk5OVqtlruKWgSXLl0KCgqyt7ev8fUff/zx1VdfdXFxycrK4g4LrYuJyr18+XIACA4O5r/sqEuXLib6uZycnPgT8g+xtiSjR48GgLFjx2IHMUyj0XTs2BEA1qxZg50Fh0lWQgCorKzs0KHDrVu3Pvroo2nTphl72cWLF0tLS00RQC6Xv/LKK8a2/uc//3nppZccHR0zMjJ4HvZmGe7cuePv769Wq3/++WfukeaSsm3btoiIiPbt26elpdVeJK2C6fp96NAhAHB3dzd4vyYilA8GcS1ZsgQAunTpIrWj34cPH3JXnB87dgw7CxoTlpD9/8kF8+bNM+leGurDDz8E0U+R4ZLs88xnz54NeEO1JcK0JeSf4YOiqKjI3d0dAA4fPoydRVT79u0DAE9PT+ncI5uWlmZrayuXy69evYqdBZNpS8iyLHdA2L9/f1PvqJ6ioqIAoFevXtZ2RphhGO6AcP78+dhZHnvttdcAYO7cudhBkJm8hPqpCgbnuoosIyPDzs4O8QYCXFeuXJHOVbJJSUkA4ObmZr63UAvF5CVkWXb9+vUA4OPjU1VVJcLueLz++usAMH36dNwYiCZMmAAAQ4cOxY2hv4V627ZtuEmkwFSnKJ5UXV0dFBSUnZ2tUCjkcrmpd2cMwzBarVahUGRnZ3OHhVYoLy/Px8eHYRhbW1uZTIYVQ6fTVVVV+fv7K5VKW1tbrBgSIcbPr1Ao9u7de/r06cWLF4uwOx7Ozs52dnZWejIKAAAcHR3t7OwYhikrK8NNsnr16rCwMGogAIixEnI0Go1arRZnXwaxLDtgwIDz58/HxcWtWbMGMQmiqKioLVu2hIaGck8xQMT9OsDNIBHilVAKUlNTQ0JCuMmc3ANtrEpmZmZQUBDDMFeuXAkODsaOQx6zwQ4gqs6dO48fP766ujouLg47C4J58+ZpNJqpU6dSAyXFulZCAMjPz/fz8yspKTl16lTfvn2x44jnxIkTgwYNcnV1ValUVvu5lDRZ10oIAJ6engsXLgSA6OhojUaDHUckGo2Ge/T30qVLqYFSY3UlBIDo6GhfX9+MjAxuzoo12Lp1a1ZWlr+/P3etJpEUq3s7yjl27NjQoUPd3NxUKhX3QBsLVlBQ4OfnV1xcnJyczF2uQCTFGldCABgyZEjfvn2LioqWLVuGncXkFi9eXFxcPGDAAGqgNFnpSggAGRkZwcHBDMOkpqZyd3ZbJKVSGRISYmNjc+3aNT8/P+w4xAArXQkBoEOHDtOnT9fpdNx9FZYqKipKp9NFRERQAyXLeldCACgqKvL19X3w4EFSUhI3EtPCHDp0aMyYMe7u7iqViruXhUiQ9a6EAODm5sY9By8mJqayshI7jsDUajU3c37lypXUQCmz6hICwMyZMzt27Hjz5s3NmzdjZxHYunXrbt261alTp8mTJ2NnIXys+u0o58yZM2FhYc7OziqVymKGXupHv549e5ab9EMky9pXQgDo06fPkCFDPF702HRvE3YWwWzM3+jRzWPkyJHUQOmjlRAAIPt+dnBusJpRX/C/0M2pG3acpvq1/NeeWT1d5C5Xva4+17rm1G0iNbQSAgC0d28/x30OA0zknUgWzPu3EgNMVE4UC2ykRyQ10CzQSvhYma7ML90vT5O3r92+N1u+iR2n8fYU7pl0e5KXnVdWYFYzm2bYcUjdaCV8zEXuktg2EQDm584vZ8qx4zRSma5s0d1FALDGaw010FxQCf8yvtX47s2652py1+avxc7SSIn3EvM0eT2a9Rjbcix2FlJf9Hb0b7iPNBxsHNID0tsp2mHHaZibVTcDMwKrmKoLfhe6N+uOHYfUF62Ef9OjWY/wluFqRj0/dz52lgaLyY2pZCq59Rw7C2kAWglrytXk+qX5lTPlZ33PhjqbzUm2M2Vnwm6EOds4ZwVmtbVrix2HNACthDV52XnFecYBQNSdKB2rw45TLzpWF5UTBQCL2iyiBpodKqEB73i+007RTqlW7i7cjZ2lXj588OE19TVve+8oD0u+LctS0dtRww4WHXzjjzc8bD2yArNc5ZK+BaFIV+Sb5vtA+yDJO2moqwXekGXxaCU0bIzbmFDn0AJtwcp7K7Gz1GHp3aUPtA/6uPShBpopWgmNUqqVIZkhNmBzrcM1PweJ3paeUZkRnBHMAJPinxLkGIQdhzQGrYRGdXLsNLHVRA2ric2Nxc5iVHROtIbVzGg9gxpovmgl5FOgLfBN8y3RlST7JL/eXHKjyo6VHBuaPdRN7qYKVLW2tfDBjRaMHkzFx8PW492n3r2qvtrJsRN2FgNCnELGtRzXvVl3aqBZo5WQEGR0TEgIMiphY9zT3JOlyLg/P5T9YIUBiICohHz2FO4JTA90SHXwSfPZVPDXBJo2dm3UndTFwcUWH4CIgD6YMWrvw72Tbk+KbxM/3HW4Uq3kLiWN9Xx8usLBxgEYCw9AxEElNCoxL3GE6wjudvuuTl1Zls2syrSqAEQcVELDCrWFqipVfJt4/VemtJ5iVQGIaOiY0LB8bT4AeNp5Wm0AIhoqoWGONo4AgHg/IXoAIhoqoWHcrbG5mtwnv1jFVllPACIaKqFh9jL7Lk5djpcc138luSTZ57qPaDVAD0BEQyU0avlTy5NLkhfeXahUKw8WHZxwe8KU1lPsZfYAUMlUVjKVXB80rEb/zxYWgIiDrh3lk1SclJCXkFmZ6aXwmtF6RqxnrAxkj5hHLkqXGq+Uy+TazlrLC0BEQCUkBBm9HSUEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVPG5i4UAAABaSURBVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZFRCQpBRCQlBRiUkBBmVkBBkVEJCkFEJCUFGJSQEGZWQEGRUQkKQUQkJQUYlJAQZlZAQZP8HLSkn/KpvXDUAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAbS0lEQVR4nO3deVRU5/kH8GcWdlDRIHUXiQsQNfancYFatZqoJabGikul0RitrUoiGpeDkaRxq0aDW90ae7QeTTRRkcTYHBNPQHGriAZjVEhESJQEZRUYmJn398drJ1NGCMvc+9wh38/JH/eMeJ+X4Je587z3vq9OCEEAwEfPPQCAnzuEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGZG7gFoXWFhYVZWFhF5enp6eXnJF+2Pvby8PD092cYHrg8hrM3x48dPnjy5Zs2aOn69fSC9vb09PDzksY+Pj7u7u+Oxr6+vm5vbtGnThg0bZvti+LnRCSG4x9AQJ0+eHDBggO3tSAllZWWhoaHZ2dnBwcEtWrSoqKgoLy+Xf1ReXl5RUeF43DBhYWE9e/bcv39/Y0fMp6ioaNWqVfJYr9c3b95cHhsMhmbNmsljo9Ho5+fneOzm5ubr6yuP3d3dfXx8HI89PDy8vb2V/z54uOQ74dq1axctWjR79uxNmzYpV2XlypXZ2dl9+vS5cOGCwWCoy18pKyszmUzy+MGDB5WVlY7HpaWlVVVVtuO8vLx58+Z9+eWXc+fOHTRokLO/CTUcOXIkPj7+ypUrilaZNGnSzJkzhwwZomgVFi75Tnjp0qUBAwZUVVUlJSX99re/VaJEZmZmz549TSbT6dOnBw4cqEQJm7i4uJUrVw4cOPD06dM6nU7RWk5XXl4eFhb2zTffjBkzRv4SsVgsxcXF8k/NZnNJSYnjcVVVVWlpqTyurKx88OCB47HJZCorK5PHFRUVOp1Op9Ndv369Xbt2qnxnKhKuafXq1UTUunXrvLw8Jc4/evRoInrxxReVOHk1xcXFv/jFL4jogw8+UKGccy1dupSIevbsWVVVpWih3//+96r9RFTmqiG0WCxDhw4lot/97ndOP/kHH3xARP7+/gol3NGWLVuIqEuXLiaTSZ2KTpGZmenp6anT6T7//HOla3399dceHh56vf7ixYtK11KZq4ZQCJGTk+Pv709Eu3btcuJpy8rKOnfuTESbN2924mlrZzabw8LCiGjDhg2qFW08+Vlg2rRp6pSbN28eEQ0bNkydcqpx4RAKIfbs2UNEPj4+N27ccNY54+LiiKhPnz5ms9lZ56yLxMRE+fZ77949Nes22KFDh4ioWbNm3333nToV79+/36pVKyL6+OOP1amoDtcOoRBi4sSJRDRo0CCnZObmzZvy+io1NbXxZ6uv4cOHE9HChQvVL11fZWVlQUFBRLRp0yY167711ltEFBISovRHUDW5fAgLCgo6duxIRG+++Wbjz6ZmP8bRpUuX9Hq9u7t7ZmYmywDq7pH9GJPJ9Nlnnyla12QyPf7440S0Y8cORQupqc4hvHlTdOr08PjgQfHccw+PjxwRISEiKEg8/7zIz3f6+OrixIkTer3eaDSePXu2MedRvx/jKDo6mogmTZrENYC6qKkfs3LlSiJ69dVXnVXo66+/njBhwtq1a+1ffPfdd2VjvLi42FmFeDUuhHfvCn9/ce2aEELMni1mznT+AOtGfmR//PHHS0pKGnYGln6Mo9zcXG9vb51Od/r0acZh1E72Y6ZOnWr/4u3bt+UNLsePH3dWoU8//ZSIWrRo8cMPP9hetFqt4eHhRBQfH++sQrwaF8L9+8WoUQ9fzMgQHTo4eXR1VlFR0atXLyL685//3LAzcPVjHC1ZsoSIBg4caLVaeUfySIcPH35kP+b5558nogkTJji33DPPPENEL7/8sv2LZ86c0el0Xl5et2/fdm45FvUJocEgOnUSnTqJgICHIdy0Sfzxjw+/IC9PeHoqMsa6ycjIkNdISUlJ9f27vP2YaoqLiwMDA4no0KFD3GOprqZ+zCeffEJE3t7et27dcm7FL7/80mg0urm5VWuAjxs3joimT5/u3HIsGvdO+N574plnHr74xReic2cnj66e1q5dS0QBAQF3796t11/k7cc42rx5MxEFBwdrbe6+pn5M9+7diehvf/ubEkVfeuklIho3bpz9i1lZWXLuPi0tTYmiampcCH/4Qfj7i6++EkKImBghrxlOnhSXLzt7nHVisViGDRtGRM/Z+kZ1oIV+TDVVVVVy7n7jxo3cY/lRTf2YFStWEFG3bt0qKiqUqHv37l35yEVycrL966+88krTmLtvdHc0MVGEhYmuXUVUlCgsFBkZws9P+PsLpr5Cbm5uy5YtiWjnzp11+XqN9GMcybn7li1bamfuXrV+jKP4+Hgieuqpp+w/J9+/f1/+rBUtrQJnzxOaTCIqShAJb2/BdFvDwYMH5W00169f/8kv1k4/xtFvfvMb7czdq9yPqaasrKx9+/ZEtH//fvvX5QeQ0NBQl567V2Cy3mwWM2YIIuHuLt57z/nnr4NJkyYRUd++fSsrK2v5Mls/RoX7jxtAO3P36vdjHO3cuZOIOnfubH/RazKZgoODiegf//iH0gNQjjJ3zFitYuFCQSQMBsFxZ0NhYWGnTp2I6PXXX6/ly7TWj3E0ZcoUIpo8eTLvMFj6MdVYLJY+ffoQUbW5e7kiQWBgoOvO3St529rq1UKnEzqdUOWHVE1ycrLBYDAajWfOnHnkF8h+TLNmze7cuaPy2OouJydHzt0zzp1w9WMc2ebu8+3uzbJarfJhYtedu1f43tGtW4VeL4jEokVC9annBQsWyEa/4+9IzfZjHC1evJh37p6xH+NIzt2/8sor9i+mpqbKufucnBw1B+Msyt/AvX+/cHMTRGLWLGGxKF7OTkVFRe/evYlopsP9dLIfo8Lz4I1nm7s/fPiw+tV5+zGOrly5YjAYHOfu5XheeukllcfjFKo8RfHRR8LbWxCJiRNFrZ0Sp7t69apckS0xMdH2osb7MY645u610I9xNH369EfO3bu7uxsMhitXrqg/pEZS61Gm5GTRvLkgEqNHVz14oFJRIYQQ69evl7fR2D77ab8fU01VVVVoaKj6c/e192PWrFmj5mBsvv32W3klnJKSYv96TEwMEY0cOZJlVI2h4vOEX3wh2ra9PHhw//791ZyAtlqto0aNkj8eq9XqEv0YR0eOHCGixx57rLCwUJ2K2unHOKp97v7f//4318AaRtWHeh/cuBHUubOcHFfzHjHbbTSbNm1ylX6MIzl3v2jRInXKaaofU01JSUmbNm2I6N1337V/Xa6V3qtXLw3eelELtZ+s/+6775544gm5spiac9DyDdDNzc1V+jGO0tLS9Hq9p6enCp/EtNaPcbRjxw4iCgoKeuTc/TvvvMM4tvpiWN7i3r17/fv3J6I2bdoo/TH622+/PXr0aHx8fGRkpK+vr1xA9tSpU4oWVc4f/vAHFZ6712Y/phqz2dyzZ08ieuutt+xf37dvHxG1bdu2tLSUa2z1xbMCd2lp6dixY0+cONGyZctjx47JTDaeECIrKyvtvy5dupSfn2//BUaj0Ww2HzhwYPz48U6pqLLs7Oxu3bp5eHjYlrKuacOGuuwhVdP2NWfPnv3888979uyZlpZmNP64UUJqauqMGTNeeOGFhQsXKvhN1tnx48dHjRrVokWLzMxMuQobEQkhIiIiUlNT33jjjWXLlilVOzGRliyhigrq04d27KBWrSgzk4YPp1u3iIjef5/27qUjR+p4MrZl8E0m0+TJkw8dOuTr63v48GG50Fh9WSyW7Ozsq1evXrx48eLFi2fPnq2WuhYtWoSFhf3ff506dWrWrFlBQUHXrl1zxV2QLl261LdvXy8vL9ta8Qpp0aLFunXrXnzxxWqvy1005FW9FkycOLFfv35z5syx/2meOXMmPDzcy8trxYoVtjXza9p2Rv7m6qjTGYUgDw+ybTvj6Uk1bTeUl0chIZSaSj160Jw5VFVF27e7ZAiJyGKxzJgx45///KeHh8e+ffvk543amc3m69evX/yv9PT0av8c27RpExYWFhoaKlMXGhpqv7uDxWJ58sknMzIy1q1bFxsb6/xvSUlCiCFDhiQnJ8fGxq5bt06+aL9hQ7XNG35yD6matq9JTEw8c+bMkCFDTp48qfy3pYiQkJCysrLbt2/X8etNnTu7y/w8kpcX2bagjIigyZNpzx46doyI6OpVGjWKbt921RASkRAiNjY2ISHBYDDs3Llz2rRp1b6gqqrqxo0bttSlpaXZ/m1Jbdq0sb3R9evXT27qUIuPP/549OjR/v7+N2/etF3DuIRdu3ZNnz49MDDw+vXrtr3HlFBYWNi1a9f8/PykpKTIyEjlCikkPT29b9++RPTcc8/ZttOqfduZq61be37/PVVUkO1fV3k5PXLHu8GDafx4unCBdu8mIvr+e+rUicrLKTOTevSg9u2JiMrKaNCguodQE+uOyt1ddDrd+vXrS0pKUlJStm/fHhMTEx4e7njR2KZNm8jIyPj4+KNHj37//fcNKPf0008TUWxsrNO/EeUUFRXJ3y979+5VoVxCQgIR9ejRw+XayFardfDgwUQ0b948p520rEzcv//wv3v3Hr2qS01PvdeBJkIohFi/fr1OpzMYDHq93j5yRqOxV69eU6dO3bBhQ0pKSoNXNLR3+fJlg8Hg7u5+8+bNxp9NHbNnzyaiiIgIdW7jrqys7Nq1KxFt3bpVhXJOtGvXLiIKDAxU8K6GR67q0gRCKIT405/+5Ofnp9frQ0NDo6OjExISUlJSHihzj5u87o2KilLi5E535coVo9FoNBrT09NVK/r+++8TUUBAQFFRkWpFG6moqEhO4v/rX/9StlK1VV1EUwmhvB9qwYIFKtSq6f5DDbJdX6l//RwREUFEcXFxKtdtsDlz5qh5veAsGgrhzJkziWjLli3qlHvttdeIqH///hr/gb3zzjuKX1/V4Ny5c/I5vezsbJVLNwDL9YJTaCiE8nnNDz/8UJ1ytvsPDxw4oE7FBlC5H+NowoQJRPTCCy+wVK87RfoxatFQCHv06EFEGRkZqlXctm0bOdx/qCkq92Mc2fbH/c9//sMygDpSox+jGK2E0Gq1yluu1Fyux2w2y7vJ169fr1rRutPI9dX8+fOJaMiQIYxjqJ16/RhlaCWEeXl5RNSqVSuV63744YdE5O/vn8+0r1tNrFbrwIEDtTCfWVBQIO9qUO2TQn25aD/GRishPH/+PBH98pe/VL/0iBEjiGj+/Pnql64FYz/G0dtvv63ZuXuNXC80hlZCeODAASIaO3as+qXT09PlGrvambu/f/9+69atGfsx1djm7rdt28Y9lv/h0v0YG62EUK5nXm0pO9VMnTqVtPGsqsTej3EkNxdo3bq1pubuXbofY6OVEMrL+rfffpulem5uro+Pj0ae901LS5PLFmvt+krO3S9dupR7IA+5ej/GRishfPbZZ4lpaU1Jriw2YMAA3jcf7fRjHJ09e1ZTc/eu3o+x0UoI5WbXjBs+lpSUyGnxgwcPco1BaKwf4ygqKoocVn9i0QT6MTZaCaF8QI53L76tW7cSUZcuXbjm7rXWj3Gkkbn7ptGPsdFECAsKCojIz8+Pdxhms1nuj8v10VSD/RhHckWCoUOHMo6hafRjbDQRwvT0dCJ64oknuAcikpKS5Ny9+u/Jmu3HVHP//n05d//RRx+xDKDJ9GNsflxLi9GtW7eISO4oyCsyMnL48OEnTpxYtWqVnDVRhxBi9uzZFoslNjZWbmKjWf7+/nFxcbGxsQsWLHj66aftl2OT5DLn9su31fe4dnFxcXfu3ImIiJALQDYF3L8FhBBiw4YNRPSXv/yFeyBC2M3dq7k2scb7MdWYTCY5d799+3bHP7UtNtVg3t7eNV0WNaV+jI0m3gmzs7NJG++ERNS7d+8pU6bs2bNn6dKlchdYpRUUFCxZsoSI1q1bp+gKTs7i7u6+YsWKqKio1157beLEic2aNbP/U51ON378ePul3Gpa1q2m47Kyskeu6SiEmDNnjtlsnjdvnsavF+qH+7eAEEKMGzeOiN5j2uDeUW5urtwf9/Tp0yqUc4l+jKPw8HAiWrZsmdPPXFpa+sgrgibWj7HRRAjlAnVnz57lHsiP5C6iKuyP6yr9GEe2ufvbt2+rUK7p9WNsmNcdlQICAvLz8+/cufOTq4aqprS0tGvXrnfv3p04ceKTTz4pX7RvHvj4+Li7uzse+/r62hao9vPzs/Ut7I+bNWsm18MUQoSHh585c8Z+PV8XEhUVdfDgwWnTpsn3KEXNnTt38+bNERERycnJ9gs6NwXcvwWEvPr39PTU1MWY1WodNGhQQECA0v//3d3dXWtFM3u2ufuLFy8qWqhJ9mNs+Bszcn6iY8eOmvr1FhMTk5qa6uvrO2vWLFuzpKZGQmlpqdykgYhKSkrMZrPjcXFxscVikcdFRUVWq1UeG41GLy8vr5q2PdC2oKCgWbNmbdiwYejQoe3bt6/XFjR1PxZCrFq1qgn2Y2y4fwuIY8eOEdGIESO4B/IjeTO3l5fXyZMnFS1UVVUVEhJCrrlpqbR8+XLHqUKn69KlS7t27Vz0euEn8b8Tamp+gog2b968fPlyg8Gwd+/eIUOGKFrLaDSuXLly7Nixr7/++pQpU1xifsJeXl7e2rVrzWbzzp07Bw4c+MhtZ+p7/Mi5jQEDBsycOdO2rVJTw/1bQCxevJiI3nzzTe6BCCHEnj179Hq9TqfbtWuXakWHDRtGRIsXL1atorPIe1bGjBnDPRDXxh/CSZMmkTb6zkeOHJFXViovvqbmPthOlJKSIqcosrKyuMfi2vhDKJ9hTU5O5h3Gp59+KvsBf/3rX9WvPnnyZCKaMmWK+qUbpqqqSvZI3njjDe6xuDz+ELZt25aIeB/WPnfunJ+fHxHNmTOHZQA5OTnyHp0LFy6wDKC+5KxmcHBweXk591hcHnMITSaTXq83Go2Ma+llZGTIZ3Oio6MtFgvXMBYtWkREv/71r7kGUHd3796VPaSkpCTusTQFzCG8efMmEXXo0IFrANnZ2R06dJDdBd5FNYuLiwMDA4koMTGRcRh1gX6MczGH8Jtvvhk7dqxOp4uMjDx//rzK1fPy8rp160ZEw4YN08Jl1caNG4moe/fulZWV3GOpEfoxTsf/mXDLli3yxkudTjdy5MjPPvtMnbqFhYXyptCnnnpKzQ0waqH9uXv0Y5TAH0IhxJ07d+Lj421T1X369Nm9e7fZbFau4oMHD+QqmmFhYZraheLQoUNEFBAQoM2nddCPUYImQigVFRUlJCTIZikRdenSJSEhoayszOmFTCbTyJEjiahjx47qPIZTL3LufsmSJdwDqQ79GIVoKIRSRUXF7t275Uc1IgoMDIyPjy8oKHDW+c1ms1w8s3Xr1l999ZWzTutEmp27Rz9GIZoLoWSxWI4ePdqvXz8ZRT8/v5iYmNzc3Eae1mq1zpgxg4iaN2/OuNDwT9Lg3D36McrRaAhtUlJSIiMjZRTd3d2jo6OvXbvW4LO9+uqrROTt7Z2SkuLEQTpdTk6Ol5eXdubu0Y9RlNZDKKWlpUVHR8un0fV6fWRkZAPWwli+fLlM8rFjx5QYpHMtXLhQO3P36McoyjVCKGVlZcXExNieFg0PDz969Ggdn8f/+9//TkQGg0E7y0nVrqCg4LHHHtPC3D36MUpzpRBKeXl58fHx/v7+Moq9e/fevXt37Te77Nu3Tz6gtGPHDtXG2XhyOVb2uXv0Y5TmeiGUiouLExIS2rVrJ6MYFBRU03zGJ598IldnWrNmjfrjbIzKykrZJd6yZQvXGNCPUYGrhlAymUy7d+/u0aOHjGJAQEB8fLz9NhKpqak+Pj5EFBcXxzjOBuOdu0c/Rh2uHUJJzmf0799fRtHX1zcmJiYnJ+fy5cvyqnXWrFncY2y4X/3qV1xz9+jHqKMphNDmxIkTI0aMkFF0c3OT74GTJk1ifECp8c6dO6fT6dSfu0c/RjVNKoRSenp6dHS00WgcN27ciBEjuHb8dCK5Akh0dLSaRdGPUY0mVuBWQlZWVvv27eUK89xjaaxbt26FhISYTKbz58/LLQOUdurUqcGDB3t6emZkZHTp0kWFij9neu4BKCU4ONjDw6MJJJCIOnfuPHfuXCHEggULVChnNpvlMh+LFy9GAlXQZN8Jm5jCwsKuXbvm5+cfPXr02WefVbTW+vXr58+fHxwcnJGRYVsMG5SDELqMjRs3vvzyy506dVq9erWPj88j15mvae9b+y1rapeXl9e9e/eioqKkpCTbXbugKITQZchZO4vFcuPGjcacp/Y9pHJzc/Py8saMGZOYmNjYEUPdIISuxGQybdu27fTp03VZQ76m7Wt+0rJly6ZOnRoUFOTUsUONEMKfnZ/cQ6p3795No6HlKhBCAGZNdooCwFUghADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjD7f8l4ls08HQK1AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAdjUlEQVR4nO3de1BU5/kH8GdZrnIRQVREFBBBbSLBS8zIeA1Gq1iNiplovTTYTZzpkJm06Zp2EmqSSdc202o7zcwaxaIZU5lcDI2NFu94F0QMilxEBIEocnG5LJdl9/fH29/OBrmcPXvOeRf2+5n80ZjznvO43e/Zc55zeVUWi4UAgB833gUAuDqEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzhBCAM4QQgDOEEIAzd94FAPRu7969Pj4+V65cSUlJiYuL412OjFQWi4V3DQA/0t7evm3btn/+859BQUENDQ1ENHXq1E2bNm3dujU4OJh3ddJDCMG51NTUrFmz5vLly97e3mlpaU1NTenp6XV1dUTk5eX1s5/9TKPRvPjiiyqVinel0rEAOI0LFy6EhoYSUXh4eG5uLvvD9vb2zMzMpKQktVrNvrTh4eFarfbevXtci5UMQgjOYs+ePZ6enkQ0b968hw8fPr3AgwcPdDpdZGQki6Kbm1tiYmJmZmZHR4fy1UoIIQT+urq6tFoti5ZGo+ns7Oxn4e7u7uzs7I0bNw4bNowNGTFihEajuXHjhmIFSwshBM7q6uoWLlxIRF5eXunp6cIHNjY26vX6+Ph467nVjBkzdu3aVV9fL1+1ckAIgaf8/PwJEyYQUVhY2JUrV8StpLCwUKvVWhun3t7eycnJ2dnZZrNZ2mplghACN4cOHWKHlAkJCbW1tQ6uzWg0ZmZmJiYmWhunMTExaWlp9+/fl6Ra+SCEwIHJZLI9CZS2s1JZWanT6dgPLBGp1WrWv+n/VJMjhBCUVl9fv3jxYiJyd3ffvXu3TFth/Zvk5GTWcSWiMWPGpKam3rx5U6YtioYQgqIKCgqioqKIKCQk5PTp0wpssaGhQa/X2974NmPGDL1e39zcrMDWhUAIQTmZmZm+vr5EFB8fr/ypWm5urkaj8ff3Z1H09/ffuHGjM/RvEEJQgtls1ul0bm5uRLR+/fq2tjZelTzdv5k8ebJOp+v19gBlIIQgO4PBsHLlSnYSqNPpxK3k008/3bJlS05OjlRVFRcXp6WlhYeH9+jfdHV1SbUJgRBCpbW2tj58+LCmpoZ3IQopKSmZMmUKEQUHB584cUL0eqwX5WNjY3fu3On4JQ3GZDKx/o2Hhwdb/9ixY7VabWlpqSTrFwIhVBRrS/z0pz+13vfotH1zSRw9ejQwMJCIpk2bVl5e7siq5L7wUFtbu2vXrmeeeaZH/6alpUWS9fcDIVSOtS0xduxY2775b3/72zt37vCuTmK2J4GvvPKKVF9lBS48sP6Nn58fW//w4cM1Go2Eh8FPQwiVwL6RrBOwYcOGtrY25++bO6K5uXnNmjVEpFKp0tLS5Gg/yv0BPnnyJCMjIzEx0br+qVOn6nS6R48eSbJ+Wwih7PpvS/Tom/v4+Ayu+x6fVlZWxg7qAgICvvnmG7k3J/eFh6KiIq1WO2rUKLZ+Ly+v5OTkrKwsk8kkyfotCKHciouLhbQlnLBvLs7x48dHjBjB7tu8ffu2YtuV+wPs6OjIyspKTk52d//fa5nCwsK0Wu3du3cdXzlCKCNrWyIuLk5gW4L1zcePH8+9by6CXq9n39Hly5c3NTVxqUHuCw/V1dU6nW7ixIls/a+99prjB6gIoSwcbEs83TcPDQ1VuG9uF6PRuHnzZnYSqNVqu7u7+dYj94UHs9l88uRJdkb6u9/9zsG1IYTSk7Atwfrmzz77rPJ9c+GqqqpmzZpFRH5+fl9++SXvcn5E1gsPhw4dIqLVq1c7uB6EUGIytSV69M0DAgLk7psLlJOTM3r0aCKKjo4uLCzkXU6f5LjwcP36ddY1dbA2hFBKx44ds7YlioqKJF+/kn1zIfR6PTveW7p0aUNDA5ca7CLtB9ja2urm5ubp6engCaf0Ifyg9gPKI/ZPcEGw5Ot3Wkq2JRTom/evvb09JSWFbT01NVWx7UpFqg+Q3cFTXFzsSDHSh/Ba67W/P/r73x/9PaowykVCyKstIWvfvB/V1dUvvPACEXl7e2dkZMi6LVk5/gEuWbKEiBw877A7hJ/Vfzbnzhz/G/7e+d4zi2Z+3fh1X0smFCe4QgidoS3Ro2/u5uaWkJCg1+tbW1sl39bFixet7+e9du2a5Ovnoqqq6oMPPmBPG7MPcPHixUL+dm+++SYR7dy505Gt2xdC7QMtO8jcXLF5S8WWUQWjKI90P/T+cIorhNDxtkRZWVlZWZlU9bD2A7tDlYgCAwM1Gk1eXp5U6z948KCPjw8RzZ07d9DdSCCE7Qd4/fr1AZf/5JNP2NVCRzZqRwivt15X5akivo/4ofMH9icNpoa423FueW63jb3cGzHkQyhJW2LTpk2SX3hoamrS6/UJCQk92g+PHz8WvU673s872DU2Nn722WdCljx16hQRzZkzx5HN2RHC1KpUyqP0xz96PevV1qupVanlHb3cDjKEQyhhW2Lbtm22P1zbtm2zzsHguFu3bmm12pEjR9q2H0TcV1lXV7do0SK2hn379klV3hBQW1tLREFBQY6sxI4Qzr4zm/KoqrNK4PJDNYSStyXkvvDg4IQq+fn5ERERRDR27NjLly9LUtJQwi5KOXJwbkcIw26Gqa+rzRahO9EhGUJZ2xJyX3gQMaHK559/zt7PO2fOHNd5G4Bd2B757NmzotdgRwiDC4J98n2ELz/0QqhMW0LuCw/WCVXY34X6mFBF1vfzDiVbtmwhIr1eL3oNdoRw/Pfj3fLcTGahe+VeQ9jc3dxubhe+USfBpS0h94WHfiZUsX0/r+hXM7mIP/7xj0T01ltviV6DHSGcXzyf8qi0ved96H2FqtcQvl/zfuCNQM19zfXWgfu/ToJvW8JsNufk5Mh64eHq1atvvPHG8OHD2fqHDRvGJlcZPXq0M9ye6uS+/vprIlq2bJnoNdgRwvdq3qM8+tujv9n+YXVndeCNwL88/MvTy/cawpfvvmy9qW1m0cxPHn3SZOLz4JlAztOWkOPCgy3b52InTJgQGRlZUVEhyZqHtqKiIiKKiooSvQY7QljRUeGd7x16M7Si43//3zR3Ny8sWajKU11qufT08n2dE94y3tI+0I4sGMmi6J3vnVyenG3IFt7yUYxztiWkuvDQl3fffZeIfvnLX0qytiGvs7PTw8PDzc1N9BuN7btj5tO6TymPAm4EbLi3IaUiJfz7cMqjD2o/sC5wpOkIu3GU3Tvqm+9r/dcrLT+afa7d3J7ZkJlUlqS+rmZpDP8+XPtAe6/jnri/ibScvy0h30zux48fJ6IFCxZIUaZLmDx5MhEVFBSIG273vaMnDCcWly4OvBHoed3zudvPfdX4le1/fbHkRevRZo9/0mrSel1hVWeV7gdd5PeRbDG3PLfEksTMhswOM7fv/eBqS1RVVUk7k/v9+/eJaMyYMdLWOYStWrWKiA4fPixuuLM8T9ht6c42ZG+8t9En34elccSNEZr7mhttSk9EfvPmTXYj78iRI5WZNkgSAi88CGE2m9nDr4PiEUFnsH37diLasWOHuOHOEkKrRlOjvk4fXxRv/QmdUTRDX6c3mAwKbD0rKysgIICI4uPjB2lbotcLD3q93mCw4wOcPn06EV261MupPjxt//79RPTqq6+KG+50IbS62nr19fuvD78xnEXRL9/vFxW/ON9wXqbN9Zg2SI6HgBSWm5ubmpoqbib39evXE9H+/fvlL3MouHTpEttxixvuvCFkjN3GzIbMxJJEVZ6K8mj+7+azicgrKysl3IrBYGCH9Wq12vlPAu3S2tp64MCB+fPnW1/I+fnnnw84aseOHUS0fft2BSocAhobG9n1VXHPczt7CK1K20vfqX5n9vzZ7Jvk4eGxcuXKrKwsx98nWVJSMnXqVCIKDg7Ozs6WpFonVFpa+s4778TGxgp5Ufzhw4eJaNWqVQoUNjSMGTOGiMSdwgyaEDLW+UCs75N0cD6Q//znP1JNGzSUFBQUENHkyZN5FzJoLFiwgIiOHTsmYuwgC6EVmw9k2rRpPdoPwucDsT0JXLdunVO9yZM7o9GoVqs9PDyG9sO7EnrjjTeIaNeuXSLGDtYQWvWYDyQgIIDNB9L/qObm5rVr19L/v5pp8M6+Ih92nUaOFzcOSX/961+JaNu2bSLGDvoQMm1tbcLnAykrK2PvtA4ICDhy5Ijy1Q4Ky5YtI6Kvvvpq4EXBYvnuu++IaOHChSLGDpEQWt25c0er1bKXLxGRp6dnUlKS7XwgZ86cCQkJIcWnDRp03nrrLSL66KOPeBcyONy7d4+IQkNDRYwdaiFk+poP5MMPP2R/smzZMl7TBg0Wer2eiDZv3sy7kMGhu7ub3evf2Nho79ihGUKrmpoanU43adIk6xmjSqV67733uE8b5PzOnj1LRLNnz+ZdyKDx3HPPEZGI593caEhjM4oVFxefOXMmLi7OYDCsXbt2x44drCkK/WBPBty5c4d3IYOG6E/MJb6LKpVq/vz5v/nNb9j/5l3O4DBq1Kjg4OAnT56wt/rBgFgIi4uL7R3oEiFk2GfEnoMGIWJjYwk/hoLhl3BgkydPVqlUJSUl3d3dvGsZHHBEaheEcGB+fn5hYWEdHR0VFRW8axkc2C+hiOMr1xQTE+Pm5lZWVtbV1WXXQBcKIWHXbid8XHbx8fGZMGFCV1dXeXm5XQMRwp5u3bp16NAhdCMIIbSfuE/MtUIo5PhKq9Vu2LCBPabp4qKiory8vCorK1taWnjXMjgghAMT8hlh92/l7u4eHR1tsVhKS0t51zI4iDuLRgh7XwbdCAa7JLvgl3BgYWFhAQEBdXV19fX1fS2Dr50t7JLsIu5atGuFUKVSDXgBesqUKWwBi8WiXGXOCtfr7TJ69OigoKCmpqaHDx8KH+VaISQBP3TBwcEjR440GAxokBKOC+zHdlt2/Ri6XAiFnDrjm2fFbjMqLi7GbUYCifjyuFwI0SC1i7+//9ixY9vb2ysrK3nXMjiIaJAihL3A7Vq2sEuyC34JBzZp0iR3d/fy8vKOjo6+lsHXzhY+DbsghAPz9PSMjIzs7u4uKyvraxl87WzhuMAuEydO9PT0rKysbGtrEzjE5UJIAjIWGRnp7e1dVVWF27UIz2Hayd3dfeLEiWazuaSkROAQ1w1hP98qtVrNbtcS/jkOYTgusJe9n5grhhBXKewybtw4Pz+/R48e9XObEdhCCAeGqxR2sd5mhNPCJ0+eCFnM3o/LFUMo5MY03K5lC7skIvrvf/8bFRWVmZk54JLWL5jANbtiCIOCgkJCQlpaWqqrq/taBl87W/gl/Pjjj5ctW9bQ0PDvf/97wIWttxmZzWYhK3fFEJKAjOGtULZceZfU3t6+ZcuWt99+22w2a7XajIyMAYcEBASEhoYajUaBtxkhhL3DW6FsuWwIHzx4MG/evIyMDD8/vy+++MI6l17/LBaLn58fEQl8GNqlQ4gGqUAxMTFqtbr/24yGnvPnz8+cOfPatWvR0dGXL19evXq1kFHNzc2rV68uKSlZsWLF4sWLhQxx6RCiQSqQl5dXRESEyWS6e/cu71oUsmfPnkWLFj18+HDp0qVXr179yU9+ImRUWVnZnDlzjhw5EhQU9Ktf/UrgthDCPqEbYct1dkkdHR1bt259/fXXu7q6UlNTv/322xEjRggZePz48eeff76wsDA2NvbChQsvvfSSwC26aAgjIiJ8fHyqq6sNBkNfy7jO104IF/k0ampqFixYsG/fPm9v74yMjN27d6vVaiEDd+/evXz58sbGxqSkpCtXrrCPSyiJp4caPNh899euXetrgaqqKiIKCQlRsiqn9emnnxLRpk2beBcio4sXL4aGhhJReHh4P1+MHoxG46ZNm+j/p14XMeue64Zw3bp1RHTw4MG+FjCbzQEBAUT0+PFjJQtzTjk5OUT0/PPP8y5ELgcPHvTx8SGiuXPn9jrLeq+qqqpmzpxJRP7+/qKnFnfRw1ES0CBVqVQxMTHkAsdgQgzh91+ZTKbt27dv3LjRaDRqNJqTJ0+OGjVKyMCcnJyZM2fm5uZGR0dfunTp5ZdfFleA64ZQyI1pLnIiJMRQff/V48ePlyxZsnPnTi8vr3379un1eusU6/3bs2fPiy++yNqn165dE9g+7ZXrhlDIY3J466atobdLunHjxqxZs06dOhUWFnb27NnXXntNyKiOjo6UlJTXX3/dZDJptdpvv/02MDDQkTJcN4SxsbEqlaq0tNRkMvW1zND72jliiH0a//rXvxISEioqKubMmZObmzt79mwho2pqaubPn5+enu7t7X3gwAGdTiewfdoP1w2hr69veHh4Z2fnvXv3+lpmiH3tHDRkLpx2d3dv37791VdfbWtr02g0p0+fHjNmjJCBFy9enDlz5pUrV8LDw8+fP//zn/9cmoLE9XOGBnY5NSsrq68FOjo63N3d1Wp1e3u7koUpqbOz8w9/+ENjY+OAS7IHCF566SUFqpJPfX09u5vM3d1dp9MJH6jX6z09PYlo3rx5wtunQrh0CFNTU4noT3/6Uz/LTJo0iYgKCwsVq0pJDx8+nDt3LhG9/PLLAy7MbkceP368AoXJ5ObNm1FRUUQ0cuTI06dPCxzV1dWl1WrZj5ZGo+ns7JS2KpcO4T/+8Q8iSklJ6WeZFStWENEXX3yhWFWKyc/PnzBhAhGFhYVdvnx5wOVNJpO3t7dKpWpublagPMllZWWxC7/x8fEVFRUCR9XV1S1cuJCIvLy80tPT5SjMpUN48uRJIkpISOhnmbfffpuIPvzwQ8WqUsahQ4eGDRvG/vq1tbUCR02dOpWITp06JWttkjObzdankNavX9/W1iZw4PXr1637qStXrshUnkuHkD1ZHxQU1M8ye/fuJaKNGzcqVpXcWGPdenDV0dEhcOCFCxf8/f3Hjx/v4eGRlJSUmZnZ1dUla6mSMBgMq1atIiK1Wm3XSaC4/ZQILh1Ci8XCrvA8evSorwXOnz9PRLNmzVKyKvnYtiV2794tfKC1LTFu3DhrU378+PFpaWn37t2TrV5HlZSUsF/v4ODg7OxsgaNE76fEcfUQsqtD586d62sB9p4/f39/s9msZGFysLYlQkJCHGlLVFdX63S66Oho9odubm4JCQl6vb61tVXO8u129OhRtpOdNm1aeXm5wFH19fWJiYki9lOiuXoIN2/eTER79uzpZ5mQkBAievDggWJVycHxtsT+/ft7/Nfc3FyNRuPr68vSOHz4cI1Gk5eXJ3Hp9rM9CVy3bl1LS4vAgQUFBSL2Uw5y9RB+9NFHRPTrX/+6n2VYE//EiROKVSUtudsSTU1Ner0+ISHBevF56tSpOp2urq5Oor+BfZqbm9euXUtEKpUqLS1N+CFMZmYm26HEx8ffv39f1iJtuXoIv/rqKyJavnx5P8u8++67K1asuHDhgmJVSUjJtsStW7e0Wi07cGA/nsnJydnZ2UoeyZeVlT377LNEFBAQcOTIEYGj2H5KpVIR0YYNG4TvpyTh6iG8ffs2EU2cOJF3IbLg0pZob2/PzMxMSkqy9m/GjRun1WqV6d/8/ve/J6IpU6YUFxcLHGIwGFauXCniHhqpuHoIOzs7PTw83NzcjEYj71okJq4t8fjxY9aW8PT07P9UeUAPHjzQ6XTsFIv1bxITEzMyMmT9nTGZTO+///6TJ08ELl9cXMwelQwODuZ1xuHqIbRYLOy+5Js3b/IuRDKOtCUiIyOlbUt0d3fn5ORoNBp2cEtEI0aM0Gg0+fn5kqzfEdb9VFxcnPD9lOQQQgs7FDl8+DDvQqTR3Ny8Zs0aEW2Jw4cPs7bE9OnT5WhLNDY26vX66dOnW/s3M2bM2LVrV319veTbGpDtfuqVV14Rvp+SA0JoYec/O3bs4F2IBMrKyp555hknb0sUFhZqtdrg4GAWRW9vb4X7N6L3UzJBCC3p6emsd8+7EEedOXOGdSZjYmJu374tcBSvtgTr3yQmJrLwE9GkSZPS0tLkvjZgu5/65ptvZN2WQAih5eLFi+wYjHchDtHr9e7u7uxyS1NTk8BRztCWqKys1Ol0ERERtv2bzMxMyZ8Yslgsx48fZ2/yjYmJKSoqknz94iCElsbGRiLy9fXlflgijtFoZPf92Pvey6NHjw4fPpx7W4Lp7u7Ozs5OTk5md6gS0ZgxY1JTUyVsmInbTykAIbRYLJbRo0cTkZI3SUilqqpq1qxZROTn5/fll18KHNWjLeFU93w2NDTo9fq4uDjb/o1er3fkIUbR+yllIIQWi8WyYMECIlq7du3333/PuxY75OTksN1HdHS08MqtbQl2D43T/v7n5uampqYGBQWxKPr4+Ijr34jbTykJIbRYLJbKykr2Vny+fXO7WN+QuWTJkoaGBoGjnLAt0T+j0dijfxMbG6vT6X744Qchw233U077jhKE8H+4982Fa29v37p1K6szNTXVZDIJHHjs2DHWloiNjXWetoRAxcXFaWlp48ePZ39xtVrN+jf9PFhs3U8tXbpU+H5KeQjhj/DqmwtXXV39wgsvsN1ERkaG8IFO25awi8lkYv0b63uyQ0NDtVptaWmp7WLt7e0pKSki9lNcIIS9U7JvLlxeXh77KRg3bpyS0wY5odra2l27drEHJmz7Ny0tLaL3U7wghP3pq29eUFCgfDG20wYJPCOy/LgtIXraIKdlNpvPnTu3efNm2weL2XWXiIgIZ7g9VQiEUBDWN3/uueck7Jvb5f3337ceXAl/vdK5c+ecvy0hiSdPnmRkZLDnP5YuXWrX9GbcIYT2kapvbq+CgoKgoKB9+/YJHzJY2hLSKiwsbG1tHRSvgbNCCMVwsG8ujsFgELiktS3BTgKdvC0BCKFDRPTN5Wbbljhw4ACvMkA4hFACAvvmCrCddT03N1fhrYM4CKGU+umbK7D1AwcOeHt7kwzTBoGsEEJZsBdy+vn5sSgGBARoNJqcnByZNif3tEEgK4RQRgaDgfXNrf2bKVOm6HS6ft66L4IC0waBrBBCJRQVFWm12lGjRrEoenp6SjWhir3Tm4ETQgiVY+3fsHs4WXK0Wu3du3fFrVCxaYNAVgghB45PqKLwtEEgK4SQpx4TqgQGBgqZUEX09GbgnBBC/vqaUOXx48dPLyxuejNwZgihExlwQhVx05uBk1NZLBYCZ9LR0ZGVlXXgwIHvvvuuu7ubiMLDw9evX09Ef/7zn81m8/r16/fu3csea4IhACF0XtXV1Z999tmePXvKy8uJyN/f32g0fvzxx2+++Sbv0kBKCKGzM5vNJ06cSE9P37Bhg6+v76JFi3hXBBJDCAE4c+NdAICrQwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOEMIAThDCAE4QwgBOPs/oRHd0bgFC0kAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAANGElEQVR4nO3dTWwU9R/H8V8rBUHxCRQRAR9ABapSrPWhKgokCm4TLz32pOFkWg4mxQM28aA9rhoPJF5qTEwIXrYFTQpSHqpYBHxqC5UKVusTQi0taFvb/R9+yWTS9b/dh5n5zOy+XycTt9Oh7Xt39jszvy1JJpMGgE6pegeAYkeEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaECKn+/v6JiQn1XgSBCBFG4+PjW7ZsKS8v37Nnj3pffEeECKMff/wxmUz29fXFYrGampq+vj71HvmICBFGK1eu7O7ujsfj119/fVtbW3l5eUNDw/DwsHq/fFGSTCbV+wD8XxcuXHj99dfffffdycnJBQsW7Nix4+WXX77qqqvU++UlIkQEnDx5ctu2bYcOHTLGVFRUxOPxp556Sr1TniFCREZra2tDQ8PZs2eNMbFY7O23377zzjvVO+WBontPODAwcOTIkd27dxfJ+DsqJiYmdu/enf4xNTU13d3dzc3N8+fPb2trW7Nmzfbt20dGRoLZQx8li8bly5ebm5uvvfbahQsXGmNWrly5a9cu9U4hmUwm29vb16xZY4zZs2dPJo//+eef6+rqSkpKjDG33XZbS0vL1NSU3zvpn6KIcGpq6sMPP1y6dKkxpqSkZMOGDStWrLDPQZs3b+7t7VXvYPHq7e3dvHmz/V3ce++9Bw4cyPxru7q6HnvsMfu1Dz/88Geffebbbvqr8CM8fvz4k08+aX9V69atO3ToUDKZHB8f37lzp31JLCsr27p16/nz59V7WlyGhoYaGxvnzJljjLnhhhuam5v/+eefbDcyNTXV0tJy66232qfXurq6X3/91Y+99VUhR/jnn3/W19fbcfaCBQvi8fi///7rfsCFCxecB9x0002pD4AfJicnW1paFi1aZIwpLS2tq6v77bff8tng6OhoU1PT1VdfbYy55pprmpqa/v77b6/2NgCFGeH4+Lg9z2tf6Orr6//666//9+Cenp5nn33WvlSuWrXqk08+CXJXi01HR8eDDz5of9rr168/efKkV1s+c+ZMbW2t3fLSpUtbWlq82rLfCjDC9vb21atX21/Gpk2buru7M/mqRCJx11132a+KxWL9/f1+72ex+emnn5xpyu233+7TNGX//v0PPPCA/T0+88wzX3/9teffwnMFFeGpU6e2bNnivMvPcNTmGBsbi8fj8+fPN8bMnj27vr7+0qVLPu1qUbl8+bJzuDhv3rympqYrV644//ejjz5qa2vz8NvZw92bb77ZOdz9448/PNy+5wokwosXLzY2Ns6ePdsYc+ONNzY3N4+NjeW2qcHBwa1bt5aWltrx986dOycnJ73d2+IxNTW1a9eu5cuX28FJbW3tuXPn3A+4dOnS4sWLjTEbN2789ttvPfzWHv5J+C3yEaY+7f3+++/5b/bYsWOPP/64fVGtrKzs7OzMf5vF5ssvv6yurrY/w4ceeujw4cOpj5mYmHDG1LNmzfJ8TJ3nwVEwoh3hp59+6t8bAPssvmzZMudZ3N5fgxn98ssvztHE4sWLZzyaSB1TT0xMeLg/7e3tq1atynZMEJioRjgwMFBXVxfAKMw9/rbvZ6I1/g6YfV993XXXOXPp4eHhDL/WPaa+7777Pv74Yw93LKuBecCiF6HkpFBgzUfatAnzmTNn8t+It2PqGU8dS0QpQnt86Fx9VltbOzAwEOQOuI9+n3766a+++irI7x5mvb29zz33nFcvYu6XUzumzvzlNBP/eRGVUGQinHahoGpSYudAt9xyi7dzoOiyb+dmzZrl+ds595g6kzeW2UokEnfccYfzknv27FkPN56VCEQ4ODjovmQ+DOcM7HWPdvxtr3sM7fjbP34PNq1jx465R6xHjhzxcOPOjTXGmLlz5zY2No6MjHi4/QyFOsIrV67Ym8fCefb89OnTzz//vP37uOeee7w94xxy+/btKy8vt//2jRs3fvPNN/59r9Qx9bSTjXly3xi1ZMmS4G+MCm+EiUTCuW86Fov98MMP6j36b9Oukvvuu+/Ue+Svvr4+5xLNFStWBHZPZuplN94O5L744otHH33U/ruqqqo+//xzDzeeXhgjPHHihLOCyNq1azs6OtR7NIMwj789ZOfS9uYjO5fO4eajPLnH1J5fgGrf8Ad/Y1S4InRPkCN3b1E4x9+eSL35SHvb3oEDB5xbMTwfUwf/XBOWCFNfTIaGhtQ7lQv3y3hFRcXBgwfVe5Qv4XFaGn6Pqb///vvAjrpDEaGzxEjBvK2aNv4O7Rva9OQTixl5cnt+Gvv27bv//vvt73HDhg0+zZ/EEZ4+fToWizkDxtbWVu3+eMg92rXj71CNdtNzz+7nzZunmt1nyNe/otQzMZ7fGCWL0O/nsJAI/4tJqvCcxc6Kr8dT/l2TkJREWIQXnURlXbCwXc+VLb8nC9Ouztu7d68nmw06Qvdca/369cVz+WXI1wU7f/58wYx2/Z6xJxKJu+++2zlSyO06dbfgIrRnePxeYiTkQrgumH31cN98VBgnOX0925z6Q8vnEvMgIvT7WofICXL8nV7I73bNn6/XXaXeu5zbS66/Efp91V+k7d+/3z3+DnhdsEis++AJv69AzmQVj/R8jNDX698Lgx1/2wVyfBp/p4rQCkge8vVenBnXs0rPlwizXWKkyF28eNEZf/tahbv5SKwF6Dlf70q1b7vmzp1r/mtlxzQ8jtDve6IL2KlTp9wfjeL58WEUV8X1Q+r6DN6u35XDGsdeRsgi1vlzj783bdrU09OT/zbdc6Bly5axQE7S/zF1R0fH2rVr7c/8kUceOXr0aJoHexOht0uMFDkPx98hPCMSKr6u3+W+9eS1115L88h8I/T1cp5i5j57vnDhwmzPOIf82oBQ8XX12qGhoR07doyOjqZ5TO4RBrPESJE7fvz4E088ke11ZFG5Si48fFrHPUM5RjhtiRFvP0UA0yQSCTv+NjNdUV1gnyMdMNXJm6wjdC8xwse+B2bGe4sifedUqAR/GUMWEY6MjMiXGCly7vG3+8aoqCyKFSG5fcplbjKN8P3333duPnrppZfy/Hxj5OPgwYMVFRX276OysrKystL+d2GsphEega3flWmEb775pgnTEiNFzhl/l5WVzZkzJ+o3H4VZAOt3lSSTSZOBsbGxvXv3vvDCC/ZYCGEwPDzc2dlpjKmurrZP2PDJiRMntm3bdvjwYWPMunXr4vG4c/dz/jKNEEBra2t9ff25c+eMMbFY7J133nHWAclHaf6bAIpETU1NT0+PHVO3tbWtXr16+/bto6OjeW6WV0Iga4ODg6+++uoHH3yQTCaXLFnyxhtvOFPrHBAhkKOurq6GhoajR48aY6qqqt566y1nleSscDgK5Kiqqqqzs/O9995btGhRV1dXdXX1K6+8ksN2iBDIXWlp6Ysvvtjf39/U1FRWVpbbnIbDUcAb/f39y5cvt3cUZYUIATEORwExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAGx/wFmd1nIWWiPaAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAALz0lEQVR4nO3bS0hU7x/H8aOOlXa/ancrKYuKyrGsiKJcRAWtqlXL3CXlRpBoNhYuZxVIkAmtXNqFYrphkWWYFV3VLlZ2z6zUMp05v8VDh0FLxzOXD3/+79fuRzPzzOV5fz3z6C/Jtm0LgE6y+gkA/++IEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExDzxXuDbt29lZWXp6ek3b948evRobm5uvFfs5/bt28nJyS0tLbt3705KSkrw6r29vcFgsK+vb8yYMQleGv8z7Ljp7e09duzY1KlTLcsaMWKEZVnJycn79u37+PFj/BYN9+bNm8LCwuTk5Dlz5liW5fV66+rqErO0EQgEli5dumfPnsmTJ/v9/mAwmMjVv3//XlpaeurUqeLi4o6OjkQubTQ2Nra2tj58+DDxS9u2HQwGE/yGuxavCAOBwPLly03n+fn5ly5d8vl8I0eOtCxrwoQJfr+/t7c3Tkvbtt3Z2Xn48OH09HTLskaNGrVz587MzMxEToF79+4VFBSYlz9p0iTnfaivr4/30rZt9/X1HT9+3Lzk8ePHW5aVkZFRWVkZCoUSsLpt258+fSoqKkpJScnNzfV4PEVFRQmeAhcvXly+fPn+/fs3bdp0//79RC5t23ZXV9eRI0fa29sjvH3sI3zy5MmOHTvMtpszZ05VVZXz2Tc1NW3bts38U05OzoULF2K+eigUqq6unjt3rlllx44dz549s227s7MzMVPg8+fPZv9ZljVx4sTy8vJfv37V1NSYn8ZJSUl79+59//59PJY2Ll++vGLFCvPy8/LyKisrN2zYYP4zNzf3xo0b8Vvatu3u7u6ysjJz7T1ixAiv12veioyMjBMnTiTgR9OjR4+cPWaeRmpq6oEDBxIzBcK3X3FxcYT3imWEX758KSoq8ng8ZqOb/TfwZjU1NfPnz3ciefnyZayeQF1d3dq1a80je73e2trafjeI6xTo6enx+/3mJ09qamphYWH4j9x+U6C8vLynpyeGq9u23dTUtGvXLvPqZs2aFT7+EjAFzP6bN2+eeQIFBQXmQrSxsTExU+DLly8lJSXmi8+YMWN8Pt+7d++cgZiAbwTXrl3zer2DbL9/iU2Ev3//9vv9EyZMsCzL4/EUFhZ++PBhkNub/WoGVXp6us/n+/nzZzRPoLW1de/evebcZebMmRUVFYO83fGYAuGPWVBQ8ODBg7/erKmpafv27eZmixYtOn/+fPRL27bd3t5eUlJiCh89erTP5+vu7u53m66urvhNgfr6+vXr15vXtXLlyitXrvS7QVynwO/fvysqKqZMmWK+cfR7/ARMgVevXjnbb8aMGRUVFX19fZHfPQYR1tTULFiwwNl/kV+Cv3nzxnnqCxYsqK6udrH6jx8/fD7fqFGjTM8lJSU/fvwY8l4xnAK3b992PuPFixefPXt2yLv0mwIvXrxwt7T9Z/+Z0y+z/969ezfI7Zubm2M7BV6/fu18iNOnTx9k/8VpCgQCgSVLljjb7969e3+9WZymwMDt9/379+E+SFQRNjQ0bNy40bm6O336tIsHuXLlyrJly8yDbNmyJfLDtN7e3oqKimnTppl3dteuXa2trcNaOsop4Jy+WpY1ZcoUv98f+fwzU2Ds2LHRTAFz+mreus2bN9+9ezfyO+bk5EQ5BcwFdlpammVZaWlpJSUl3759G/JeMZwCd+7c2bRpk/NQQ358sZ0CwWCwqqoqIyPD2X6uL6lcRtjW1lZYWBh+tR3NOYfJafLkyebbVFFR0ZAfZyAQcNLNz8+P5ncPLqZAV1dXeXm5c/wQyRP+K9dTIPz4YeHChS4uIsw3CHdTwOw/c/pq9t9wMw4EAosXL3Y9Bd6+fetsv0mTJg0rp+bmZufg0PUUMKev5kHWrFkT5SXusCM0+898eCaYWJ07mXMd885Onz49/Fwh3OPHj503MTs7291FbD+RTwFz/GAubMwGev78eZSrX7161flEh5wCfz19db10+BSYPXt2VVXVkHe5dOmSc/q6evXq69evu1s6fAqkpaVFOAW6u7vLy8vHjRvnfFJfv351sbrrKfDkyRPn9Kvf4b9rw4hw4Ol/S0tLlMsP1NDQsG7dOuczvnXrlvNPZv8Nefrq2pBToK6uLj8/3zy3YR1/DclMAXO08K8pMPjpazT6TYFBTpX+dfrqWltbW4RTwGy/rKwsZ/s1NzdHs/Rwp8DA09coTxMdkUZYW1sbfvx67dq1mCz/V6FQyLnaNocNbW1tzv6L5PQ1GuFTIC8vz0yB5uZmZ/8Nefrq2iBTIMLTV9cGnwIDT19jtf+M8CmwefPmga/u1q1bzunrqlWrrl69GqulI5kCg5++Ri/SCIuLi60/x6+J+Wugjo6OAwcOpKamWn9+62pZ1rZt2x49ehTvpYPB4IkTJ8wUSElJ8Xq9zvwrKysbePofW/2mwMmTJ8NPX8+dOxe/pQdOgZ6enmGdvrpmvmcOnAJRnv5HaJApUFNTk52d7Yy/f52+RiPSCNvb248cOdLV1RXzZzC4p0+fbt269eDBgzk5OWfOnEnk0ub0z/zZh9l/b9++TczSoVCosrLSTAHz8z8zM/P48ePx2H8D1dfXO1fdzt/cxWn/9fPx48d9+/aZA+fMzMydO3c6p/+HDx/u7OyM39IDp0Btba1z+B/J6atrcfwD7hgS/jHuw4cPW1tbGxsbE790R0dHcXHxqVOnSktLXfz2KRrmG0FmZuaePXvcnb5Go6GhwVx8ZmVluTt9dS18CpjLn6lTpx47diyuf+qcZNu2BfxNZ2enx+NJSUkxXwoSybbt6urq7OzsUCiUl5eX4NUbGhpKS0vz8/O7u7sPHTpkLkbihwgBMf7PekCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQOw/GzEN2YaRj0wAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAQEUlEQVR4nO3df0yT+R3A8bb8kqIiisqvU/yFCgqKovj7VC7nD87kkrlk8dgZs5nFPzBZspFtf+DlloVbbglZFhOTRQ43l51Zsl0V73Ic6vlb/AmegsjJqYAcKqhYECjt/ujWI33aWkqffp6279dfD1jgQ+0bHtrn+zx6m82mAyDHID0AEO6IEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhICwSOkB4Gf5d/IvmS+5/KeK6RU7J+0M7Dh4PX4TAsL4TRhqZsXMejn00vHmgG3gbv9dwXnwWkSoLVad9WTPyc+efXbOfK5jsOOJ5YnRYJwaNXVV3KofJfxo0/hNep3e82c4nH54+JvN/c1zbs1Rc2SMFhFqSK25ds/DPVd7rw5/58DQwLOhZ3de3Tn49OBi4+JPpn+SHZstNSHUwN+EWvGvZ/9a2bTSqUAn13uv59/Jr+mpCdhUCAAi1IQTPSd+0vKTIduQ/c1p0dM+TPnwbMbZ5qzmG/Nv/PmNP6dEpdj/qc/a9+69d5v7m+WGhZ+xOypv0Da45+Eei81if7MwvvBw+uHxEeMdN8iJzfnpxJ/ufrD7SPcRnU7XM9Sz6/6u0xmnZcaFv/GbUN5fn/z1zqs79u2sMVmfzvh0eIF28RHxf0//++qxq+1vnnl5xvTcFNApoRoilPe3rr85tv+Q+gejwejyZlH6qEPphyL0ETqdLlofXd9XH6D5oDJ2R4W1DbZdNF+0b6dGpb4T/46HG8+InvGbqb9JjEx8b+J7kyInBWRAqI4IhdWaa206m337rfFvvfZlwA9TPlR/KAQUu6PCbvbddGznGnMFJ4EUIhTW1N/k2J4ZPVNwEkghQmHPh547tidHThacBFKIUNiLoReObXfPiyK0EaGwflu/YzvaEC04CaQQobA4Q5xju8/aJzgJpBChsOEHx5itZsFJIIUIhU2JnOLYfjjwUHASSCFCYQtjFzq2WRsRnohQWE5sjmP7gvmCNx9yuOtwz1CPahMh0IhQ2LK4ZY4/C0/1nHrtczPHnh9777v3km8m//zBzzstneoPCNURobAYfcy7E961b5ut5oNPD3q4scVm2fdon/2W/37273GGcQGYEGojQnm/SPyFY/v3Hb/vGOxwd8tftf3Kcf6LX0/9dawhVvXhoD4ilJcfl79j4g77dsdgx7Zvt7UNtilv9sfv/1jeWW7fnjdm3t4pewM3ItTEUiZN+FPqn86/PN8y0KLT6S73Xs5pyPnZpJ9tid+SFpVm0VmumK/85fFfHE/bGA3Gf6T/I0YfIzoy/EZvs9mkZ4BOp9Pd67+3rmld62Cr55sZDcb/zPzPW+Pfcnr/Z88/+13b75S3H37y39So1AkRE5xusCB2wT9n/NPXqeEH/CbUipkxM+sz63/Z+stPnn7i7jZLjUsr0yszx2Qq/6nb0n3r1S3PX6JtsE25ozvGMGbkw8KfiFBDEiISKqZX/Dbpt592f/rliy9bB1o7LB0RuoikqKSVcSt/nPDjrfFbpWeE/7E7GsSm1P9wyFtnNq8ZBit+Ewaxx5bH0iPAD3iJAhBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSERUoPAN9l6DOkR4Af6G02m/QM8JFer3ds8/8YvNgdBYQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRAgII0JAGBECwogQEEaEgDAiBIQRISCMCAFhRBisjh49OvzNQ4cOSU2CUSLC4NPY2Lh58+Zt27bpdLrY2Fij0ajT6d5///0NGzbU19dLT4cR09tsNukZ4K2urq4PPvhg//79FovF5Q0MBsOOHTs+/vjjKVOmBHg2+IwIg4PFYtm/f/++ffu6u7uHv99gMOh0OqvVOvydCQkJ+/bt27NnT2RkZECnhE/YHQ0CNTU1ubm5e/fudSpw48aNdXV1DQ0NW7duHf7+7u7uvXv3ZmVlVVVVBXZS+MQGDbt79+727duV/2tz5sw5cuTI8FtWV1dnZWUpb1lQUHDr1i2p+eENItSoly9flpaWxsTEOEU1duzY0tLSV69eKT9kYGCgvLw8Pj7e6UOioqKKi4ufPXsW+O8C3iBCzRkaGqqsrExKSnJqyWAwFBUVdXR0eP7wJ0+eFBcXR0REOH34pEmTysvLLRZLYL4LeI8IteXSpUv5+fnKvcply5ZduHDB+89z7dq1tWvXKj/P4sWLv/76a/Xmhw+IUCtaW1uLior0er1TNmlpaZWVlVar1YfPaTKZZsyYoUyxsLCwpaXF398BfESE8sxmc1lZ2dixY51SMRqNJSUlPT09o/nkvb29ZWVl48aNc/rksbGxJSUlL1688Nd3AZ8RoTCTyZSenq72Lyt3v2ZTU1N9/jULfyFCMVevXl2zZo0yv9zc3DNnzqjxFWtra1esWKH8inl5eefPn1fjK8IbRCjA3ROYiYmJaj+BabVaXT71qtfri4qKHj16pN6XhjtEGFCeX8p7/vx5YMawvwg5ZswYpzHi4uJKS0v7+voCMwbsiDBwqqurMzMzlXuDBQUFt2/fDvw8zc3NLg/HmT17ttPhOFAVEQZCY2Pjli1blA/3uXPnVlVVyc5WU1OTnZ2tnM2+MEp2tjBBhOrq6uoqKSmJjo52eognJCSUl5cPDg5KD2iz/f8YncmTJzsNGRkZuXv37s7OTukBQxwRqiXoHtlB8fMiJBGhKk6cOOFuH6+urk56Ok887DkfP35cerrQRIR+dv/+/aKiIuWDOLie7aiurp4/f77yuygsLGxubpaeLtQQod+E2PP+Gnk1JRwQoR+E8CvggscVhA8iHK1wOBYs8EfYhRUi9F1bW1tYHRXt4Vjz7777Tnq6IEaEvgjb9UGqrroKW0Q4Yh5Wyt67d096ukBQY/1xOCPCEeCcEcO5OxPH8uXLR3QmDhChVzh7kkv2o4KmTp3qdLd4eU4q2BHha3Aewdfy4eyMGI4IPeGMut7z/jzFcEKErt25c8fp3PJ2GRkZx44dk55Ou7766quFCxcq77eNGzfevHlTejqNIkJn3d3dLhcTTJgwoaysrL+/X3pArRscHDxw4EBiYqLTHWhfPvL48WPpATWHCH9gf5pBeVEx+9MM33//vfSAweTp06fFxcXKy0JNnDiRhVFOiPB/Tp48mZOTo9yPWr9+/Y0bN6SnC1YNDQ2bNm1S3qvz5s37/PPPpafTCiK0PXjwwOXiozfeeKOyslJ6ulBgMplmzpypvIcLCwu//fZb6enkhXWEZrM5lBYfaVl/f395efn48eOd7uro6GgWRoVphFar9ciRI9OmTXN6TOj1+u3bt9+/f196wNDU3t6+e/du+9WFh0tOTj5w4MDQ0JD0gDLCMcLLly+vXLlSuXe0dOnSc+fOSU8X+q5cubJq1Srl/b9kyZKzZ89KTycgvCJsa2tz+ZM4JSUlnH8SB559T2T69OnsidjCJ0LPf5OE8OIjLbP/TR4bG+v0n2I0GsPqb/KwiJBn57Ts4cOHYb4wKsQjvH379ttvv63Mb/78+V988YX0dPjBqVOnFi1apPyfevPNN0P+ddqQjdB+xIZy8ZH9iI2wXXykZZ4XRoXwEUshGKG7YxejoqI4dlH77MfuKhdG2Y/dDcmFUaEWYXV19YIFC5R7NQUFBd988430dPBWU1NTYWGh8v8xIyPj6NGj0tP5WehE2NTUxHq2EONhPWco/UgNhQh7enpcruwO4R2Y8DEwMODuj4vi4uLu7m7pAf0guCPkHCdhIrSfZgviCC9evLh8+XLlvsq6deuuX78uPR38z90LTosWLQrqs90FZYS8vBvOQu+8r0EWoYczQJeWlvb29koPiECwH4QYMmdAD5oIPR/yy7UQwlDIHI4fHBFeuXJl9erVyj2QJUuWcFWgMOduYVpeXl6wLEzTeoT2ZaDKp8XCfBkohvO8RPvBgwfSA76GdiO0n/paufiIK8XCpeC9UrJGIzSZTLNmzVLuY3DNdHgWjKft0lyEDQ0NmzdvVt6JnCQP3jtx4kR2drbyUbR+/fq6ujrp6ZxpKMKuri5OFwt/CaJTOWsiQvvio8mTJzvdX/YTp3d2dkoPiGDV1dXl8qIGCQkJ2rmogXyENTU17i4hUl9fLz0dQoG7y/vMnTu3qqpKejrRCN1dTGv27NksPoLfVVdXZ2ZmKh9v4he6k4nQ3WUl7c8ms/gIKtHmJV8DHaHVaq2srExKSnK6F/R6fVFR0aNHjwI8D8KQ1i5+HtAIL126lJ+fr9wfWLZs2YULFwI5CXDt2rW1a9cqH425ubmnT58O5CQBirC1tdXl4qPU1FQWH0GQyWRKT09XplhYWNjS0hKYGVSPsLe3193io5KSkp6eHrUHADyzP0TdLYwKwENU3Qi18GMG8IbgzppaEV69enXNmjVa2OEGvFdbW7tixQrl41bVpy38H6HWnnoCRiTwT+D7M0JtvggD+MDzwij/vpTttwg1ezgC4LPAHNTlhwgbGxu3bNmiHFQjB+YBo+Tu8OYNGzb45fDmUUUYFIeoA6On6kIfHyP0vFiLxUcISe6WvCYkJIxmyasvEQbXsmXAvxobG92d/OH48eM+fMKRRejuBB7Tpk3T7Ak8ADX48TRI3kYYvKeyAlTirxMCehVhU1NTcnKy8s+/nTt3tre3j+K7AIJee3v7zp07lScCT05Obmpq8uYzOH+kS7NmzUpLSxv+nry8vLNnz1ZUVCjjBMJKcnJyRUXF5cuXnU4Sn5SU5HJ/1QUvcz937pz92NaUlBQWHwEumUwmx+VSvL9am95ms3lZ/K5du9LS0kpKSuLi4kbwgwIIJ2az+aOPPmptbT148KCXHzKCCAGowau/CQGohwgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsL+CzS9g4m0MVi5AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAUb0lEQVR4nO3deVRUdRsH8GcEAdkEcxe0UhkBBQlQcyuXPJSpuSUhLriCpcwgi5CkiIEYKumrkR6XNKPXTI9keswM8i2jPKIJgkCKKygqsgrCMPP+MYbpocKBmefey/dz+IPjzNz7rePX586dO78r02g0BAB8WnEHAGjpUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzFBCAGYoIQAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzFBCAGYoIQAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzFBCAGYoIQAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIDZM5awb1+SyejVVxt46KefSCYjmYy+/LKBR8vKKCaGBg6k554jMzPq3p18fCglRYfEABJjbIid/P47eXnRrVuP/+T6dUpKoqQkWrSI/vMfkskMEQNAkPR/OHr3Lr32Gt26RUZGpFRSWhplZ9OXX5KzMxHRli0UFaX3DAACpv8SRkbSnTtERNu20fr1NHAg9elD06ZRWho5ORERxcbS1at6jwEgVHouYVUV7d1LROThQX5+TzxkaUlr1xIR1dTQZ5/pNwaAgOn5PeGpU1ReTkT09tsNPOrlRTY2VFJCR4/SBx+QWk03bug3z5Ouy2QajcZgu7Ozs2vVCqej4Wl6LuG5c49+8fBo4FEjI+rfn1JTKSODNBoqKaEePfSb50k9iAxXQaLExMTJkye3b9/egPsEHklJSSNGjOjcuXNjnqxTCX/8sbHnMwsKHv3SvXvDT9C2rrKSSkvJyIjs7XXJoyt7A07CkpISf3//33//fcuWLYbZI3DJysqaOXOmlZXV5cuXbWxs/vX5ep6E2mNRIrKwaPgJlpaPn2lvT9eu6TfPkwx5Oig7O9vV1XXr1q3+/v4uLi4G3DMYWlBQkEql8vHxaUwDSccTMx4elJHx9M/u3Q1t/s/t/93AUauffqZEOTo6BgQE1NXVKRQK7iygR4cOHTp27Jitre3KlSsb+RKd/upbWFDfvk//vPBCA8+0snr0S1lZw5uqqHj0i7W1LklEZeXKle3bt09JSTl48CB3FtCLmpqa0NBQIoqKimr8m389z5/6t4JXrjT8BO0nhLa2j+sqXba2tlFRUUQUHBxcXV3NHQeaX0JCQm5urqOjo7+/f+NfpecS1r/5+fXXBh5VqejsWSIiNzf9xhCMhQsXuri4XL58ecOGDdxZoJkVFRXFxMQQ0fr161u3bt34F+q5hC+/TLa2RET//W8DbwuPHXt05mbcOP3GEAwjIyNt/WJiYgrqTx2DJISHh5eWlo4fP97Ly+uZXqjnEpqY0OzZRERZWbR58xMPVVdTRAQRkaUlTZ+u3xhCMnLkyIkTJ1ZUVERo//NBEs6ePbtr1y4TE5OPPvromV+seSbOzhoizSuvNPDQ//6nIdIQaZKSnvjz4mJN164aIk2rVprAQM3p05q8PM3Bgxp390fP//jjZ8sgfpcuXTIzM5PJZGlpadxZoHkMGzaMiEJDQ3V4rf5LqNFoMjM1dnaPHv3rj0ymiYjQIbQEhIeHE9GgQYPUajV3FmiqL774gog6duxYUlKiw8sN8umcszNlZdHq1eThQTY2ZGJC3buTry+dOkUffmiIAMITERHRtWvXtLS0vdoL3EG0qqqqtP+kxsTEtG3bVpdNNPu/CtBIO3fuJKJu3bqVl5dzZwHdrVixgojc3Nzq6up024LEr1MRslmzZg0YMODmzZtxcXHcWUBHN27ciI+PJ6KEhASdvyKDErKRyWQff/yxTCaLj4/Pz8/njgO6CA4OrqysfOedd4YPH67zRlBCToMGDZo+fXp1dbX2WicQl1OnTu3bt69NmzaxsbFN2Q5KyGzNmjUWFhb79+9PTU3lzgLPQK1WKxQKjUYTGhrao2nfg0UJmXXr1i0sLIyIFApFXV0ddxxorB07dpw+fdrOzi4kJKSJmzLo+g7QoOrqakdHxytXriQmJi5cuJA7Dvy78vJyuVxeWFiYlJTk7e3dxK1hEvIzMzNbu3YtEUVERBQXF3PHgX+3atWqwsLCwYMHT5s2relbwyQUihEjRqSmpiqVyvXr13NngX9y6dIlZ2fn2tratLQ0T0/Ppm8Qk1AoEhISjIyMNm3adOHCBe4s8E+USuXDhw/9/PyapYGEEgqHq6vr3LlzVSqVUqnkzgJ/68SJE998842VlVV0dHRzbRMlFJDVq1fb2NgcP378yJEj3FmgASqVSrtEUGRkZJcuXZprsyihgHTo0CEyMpKIAgMDHz58yB0HnrZly5bMzMyePXsuWbKkGTeLEzPCUltb269fv5ycnHXr1gUFBXHHgceKi4sdHBzu3buXnJw8rlnXgsAkFJbWrVtv2rSJiKKiom799WZywC0yMvLevXujRo1q3gYSJqEwvfHGG0ePHl24cGFiYiJ3FiAiysrKcnV1JaL09PR+/fo178YxCYVo48aNJiYm27ZtO3PmDHcWICJSKpUqlWrRokXN3kBCCYWpV69e7777rlqtDgwMxKEKu4MHD3733Xft2rX74IMP9LF9HI4KVFlZmVwuv3Xr1r59+6ZOncodp+Wqqanp27dvXl7e5s2bFy1apI9dYBIKlLW1tfZmBkuXLn3w4AF3nJZr3bp1eXl5Tk5OCxYs0NMuUELhmj9/vru7+/Xr13E1KZfbt2+vWbOGiDZs2GBsrK9bmKGEwtWqVauEhASZTBYbG3vNsDeNA62wsLCysrKJEyeOGTNGf3vBe0Khmzp16v79+319fffs2cOdpWVJT0/39PQ0NjbOzMzs3bu3/naESSh069evNzc337t3708//cSdpQXRaDSBgYFqtTooKEivDSSUUPjs7e2DgoLq/05wx2kptP/qderUSbuwr17hcFQEHjx44OjoeO3atV27ds2aNYs7jvTV/w/fuXPnbO0djfQJk1AEzM3NV69eTX+eJ+COI33aM2EvvfTSzJkzDbA7lFAcfH19hw4dWn/GHPRH+5mQdmlmnRfVfiYooTjIZDLtQuvaz46540iZ9uoIHx+foUOHGmaPKKFouLu7z5gxo6amRrtOKejDzz//vH///jZt2mhvfG0YKKGYxMXFWVtba68n5s4iQfVXzIeHh3fv3t1g+0UJxaRTp07Lli2jP79Zwx1HarTfHbO3t1+6dKkh94uPKETGABf1t0yMX1vBJBQZExMT7QlS7WoL3HGkQ7ueyJAhQ6ZMmWLgXaOE4jNp0qQxY8YUFxevWrWKO4tE/PHHH5s3b66/Yt7Ae8fhqCjVL3ly9uzZvn37cscRvbFjxx45cmTBggWffvqp4feOSShKTk5O8+fPr1+LFppCu9qytbV1VFQUSwBMQrHS3zKYLYpKperfv/+FCxcYF3rFJBSrdu3aaZfr1t6fhDuOWGnvwKNdWYsrAyahiKlUKjc3t8zMzLVr1zb9frEtUHFxce/evYuLiw8fPjx27FiuGJiEImZsbJyQkEBE0dHRhYWF3HHER3tX1tGjRzM2kDAJJWDcuHGHDx+eN2/etm3buLOIyYULF/r3709E586dc3Z2ZkyCSSh6CQkJpqamO3bsOH36NHcWMVEoFCqVavHixbwNJJRQAnr27Ll48WK1Wq1QKHBc00j79+///vvv27Vrt3z5cu4sOByVhPLycrlcXlhYmJSU5O3tzR1H6Kqrq52cnPLz8z/55BN/f3/uOJiEkmBlZaW9hC0kJKSyspI7jtDFx8fn5+c7OzvPmzePOwsRSigZc+bM8fT0vHHjRnx8PHcWQbt582ZcXBzpeVHtZ4ISSkT9xcdxcXFXr17ljiNcy5Ytq6iomDJlymuvvcad5RGUUDoGDx789ttvV1VVGWCpTJFKS0vbu3evqalpbGwsd5bHUEJJiY+Pt7CwSEpKOnnyJHcWwdFoNNoTyMHBwb169eKO8xhKKCl2dnbBwcFEpFAosFz3U3bv3v3rr7927tw5NDSUO8sT8BGF1FRVVTk6Ol69enX79u1z5szhjiMUFRUVcrm8oKBg9+7dM2bM4I7zBExCqWnTpo32DU94eHhpaSl3HKGIiYkpKChwd3efPn06d5anoYQS5O3tPWzYsKKiIkMunilk+fn5GzZskMlk2jUsuOM8TXCBoOnql3BPSEjIzc3ljsMvODi4urp6xowZAwcO5M7SAJRQmtzc3GbPnl1TU4PvGaakpBw4cMDS0lJQH0v8FUooWbGxsW3btk1OTj527Bh3FjZ1dXXaZXjCw8O7du3KHadhKKFkdezYMSIigoiUSmVtbS13HB5bt249f/78Cy+8wLV+TGPgIwopq6mp6devX25u7saNGxcvXswdx9Du37/v4OBw9+7dr7/+etKkSdxx/hZKKHHJyckTJkywtLScNWuWmZkZdxyDSk1NPXPmzKuvvpqSksKd5Z+ghBKn0Wi6du1qamraMq/q9vLyWrNmjXahZMESxFc5QH/27Nlz69YtW1vb6OjoljYJiUihUAjk+0r/AJNQyh48eNCnT5/r169/9tlnhrn9OugAZ0elLCYm5vr16+7u7r6+vtxZ4G9hEkrWtWvXHB0dq6qqTp48abDbr4MOMAklKygo6MGDB76+vmigwGESSlNKSsrIkSPNzc2zs7MNeft10AEmoQTV1dUplUoiioiIQAOFD5NQghITEwMCAuzt7S9evGhubs4dB/4FSig1JSUlDg4Od+7c+eqrrwx/+3XQAQ5HpSYqKurOnTtDhw6dPHkydxZoFExCSbl48aKLi0tdXd1vv/3m7u7OHQcaBZNQUoKCgmpraxcsWIAGiggmoXR8++23b775prW1dW5ubqdOnbjjQGNhEkpEbW3t0qVLiWjlypVooLighBKxcePGnJycPn36vPfee9xZ4NngcFQKioqK5HJ5SUnJkSNHXn/9de448GwwCaVg+fLlJSUlY8eORQPFCJNQ9M6dO+fh4dGqVauMjAy5XM4dB54ZJqHoKRSKurq6xYsXo4EihUkobvv27Zs2bVqHDh1yc3NtbGy444AuMAlFrKqqKiwsjIhWr16NBooXSihi8fHxV65c6d+//9y5c7mzgO5wOCpWN2/elMvllZWVqampr7zyCncc0B0moViFhYVVVlZOnToVDRQ7TEJR+uWXX4YMGWJqapqdnf38889zx4EmwSQUH7VarVAoNBpNSEgIGigBmITis2vXLj8/v27duuXk5FhYWHDHgaZCCUWmoqJCLpcXFBR8/vnnArz9OugAh6Mi8+GHHxYUFAwaNMjHx4c7CzQPTEIxuXz5srOz88OHD9PS0gYMGMAdB5oHJqGYLF26tLq6etasWWiglGASisYPP/wwatQoS0vLnJwcwd5+HXSASSgO9Ytqv//++2igxKCE4pCYmHj+/PkXX3xRoVBwZ4FmhsNREbh//76Dg8Pdu3cPHDgwceJE7jjQzDAJRWDFihV3794dOXIkGihJmIRCl52d7erqqlar09PTXVxcuONA88MkFDqlUllbW+vv748GShUmoaAlJydPmDDB1tY2Nze3ffv23HFALzAJhaumpiYkJISIVq5ciQZKGEooXAkJCbm5uY6OjgEBAdxZQI9wOCpQRUVFDg4OpaWlR48e9fLy4o4DeoRJKFDh4eGlpaXjx49HAyUPk1CIzp496+HhYWxsnJGR4eDgwB0H9AuTUIgUCoVarQ4MDEQDWwJMQsFJSkry8fHp2LFjbm5u27ZtueOA3mESCktVVVV4eDgRxcTEoIEtBEooLHFxcVevXnVzc/Pz8+POAgaCw1EBuXHjRp8+fSorK3/88cfhw4dzxwEDwSQUkJCQkMrKSm9vbzSwRcEkFIpTp04NHTrUzMwsOzu7R48e3HHAcDAJBaF+Ue3Q0FA0sKXBJBSE7du3z5s3z87O7uLFi1hUu6VBCfmVl5fL5fLCwsKkpCRvb2/uOGBoOBzlFx0dXVhY+PLLL0+bNo07CzDAJGR26dIlZ2fn2tratLQ0T09P7jjAAJOQmVKpfPjwoZ+fHxrYYmEScjpx4sTo0aOtrKxycnK6dOnCHQd4YBKyUalU2kW1IyMj0cCWDCVks2XLloyMjJ49ey5ZsoQ7C3DC4SiP4uJiBweHe/fuHTp0aPz48dxxgBMmIY/IyMh79+6NGjUKDQRMQgZZWVmurq5ElJ6e3q9fP+44wAyTkIFSqVSpVAEBAWggECah4R08eHDSpEm2trZ5eXnPPfccdxzgh0loUDU1NWFhYUQUHR2NBoIWSmhQ69aty8vLc3JyWrhwIXcWEAocjhrO7du3HRwcysrKjh07NmbMGO44IBSYhIazbNmysrKyt956Cw2Ev8IkNJD09HRPT09jY+PMzMzevXtzxwEBwSQ0kAMHDqjVaqVSiQbCUzAJDef48eODBg2ysrLiDgLCghICMMPhKAAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzFBCAGYoIQAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzFBCAGYoIQAzlBCAGUoIwAwlBGCGEgIwQwkBmKGEAMxQQgBmKCEAM5QQgBlKCMAMJQRghhICMEMJAZihhADMUEIAZighADOUEIAZSgjADCUEYIYSAjBDCQGYoYQAzP4PiuPXMjG5yWMAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAVa0lEQVR4nO3deVRU9f/H8ffACIaEiiUeFROPDsiiJoYmZLlRrv0U7XiSpI4dTC1lEZHAOoi5FCr5zdQ85Xrym9sfuNM3zaWjcUTFJQXUFFwAQRZZZGDm/v64xdevlcydufe+Z3k9Tn91rnPfok/vLJ/7GY0gCAQAfJy4BwBwdIgQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgJmWe4D/Ki8vT01N5Z5CBlFRUf3799doNNyDgG2wlgj37NmTlpZ2/vx57kFkcObMmcGDB6enp3MPArZBIwgC9wzU0NAQGBh47dq1yZMnh4WFcY9jkTt37qxYscLJyenixYu+vr7c44ANsIorYXp6+rVr1wICAr7//nut1ipGskRFRcWGDRvi4+P37dvHPQvYAP4rYXFxsa+vb3V1dVZW1siRI3mHkUVpaamvr29lZeWBAwdGjRrFPQ5YO/53R+fPn19dXR0REWEfBRJRx44dU1JSiCguLq6xsZF7HLB2zFfCnJyckJAQrVZ7+fLlnj17Mk4ir8bGxqCgoLy8vFWrVsXExHCPA1aN80ooCMLcuXONRuO8efPsqUAiatWqlfjuaGpq6v3797nHAavGGeHWrVt/+eUXLy+vxMRExjEUMnbs2FGjRlVWVn7yySfcs4BVY3s6WldX5+fnV1RUtHnz5mnTprHMoLSrV6/26dPHaDTm5OT07duXexywUmxXws8++6yoqCg4ODgyMpJrBqX5+fnNnj3bYDB8+OGH7O9Cg9XiuRL+/vvv/v7+DQ0Nx48ft/VP55+usrKyV69eZWVlu3btioiI4B4HrBHPlTAhIeHRo0eRkZH2XSARtWvXbtGiRUQUFxdXV1fHPQ5YI4Yr4dGjR4cNG+bm5nblypVu3bqZ+Kvq6upOnDih6GCmCwwM7NKli4kHGwyG4ODg3NzcxYsXJycnKzoY2CRBXU1NTeJbFIsXL5b0C69evcr9o/qvb7/9VtLwR44cISI3N7dbt25J+oXgCNReqLlhw4bc3Nxu3brFxsZK+oVt2rQJDw9XaCqpunbtKun4oUOHRkRE7N69OyUlZcuWLQpNBTZK1aejlZWVOp3u/v37DvguheO8FwVSqfrGjLh8JCwsbOLEiWqe1xr4+PjExcUJghATE2M0GrnHASui3pVQ/OTaYDBkZ2cHBwerc1KrUlNT4+vre/fuXTtenwBmUO9KKN5SEB0d7ZgFEpG7u/vSpUvpzxtHuMcBa6FShPv37z948GDzh2YO65133gkNDS0pKVm+fDn3LGAt1Hg6ivt6HifevdWqVatLly7Z2b0jYB41roSrV6/Oy8sTF1KqcDorFxwcPHXq1IaGhgULFnDPAlZB8Ssh9nr4K/vb0QMsofiVMCUlpbKycsyYMSiwWadOnebPn09EsbGxTU1N3OMAM2WvhOfPnx8wYAD2//ur5l0e165d+8EHH3CPA5yUvRLGxMQYDIaPPvoIBT7B1dV12bJlRJScnFxeXs49DnBSMMIdO3YcO3bs+eefX7hwoXJnsV3iBnMPHjxYvHgx9yzASamno/X19f7+/jdv3ly/fn10dLQSp7ADly9f7tevHxGdO3cuMDCQexzgodSV8Isvvrh582a/fv2mT5+u0CnsQEBAwPvvv9/U1CT1nhKwJ4pcCe/cuePr61tbW/vzzz+/+uqrsj++PXnw4IFOpysvL9+3b9+YMWO4xwEGilwJExMTa2trJ0+ejAJb5OnpKW7XHRMT09DQwD0OMJD/Snjq1KnQ0FBXV9crV650795d3ge3S01NTf369bt8+XJ6enp8fDz3OKA2ma+ERqMxJiZGEISEhAQUaCKtVpuRkUFEqampxcXF3OOA2mSOcPPmzdnZ2V26dLHLTbWVM2LEiDFjxjx8+BDbdTsgOZ+O1tTU6HS6e/fubdu2berUqXI9rIO4du1aYGBgY2Pjr7/+OmDAAO5xQD1yXgk/++yze/fuDRo06O2335bxYR1Ez549Z8+e3fx8nnscUI9sV8IbN24EBAQ0NDScPn06JCRElsd0NNXV1b6+vsXFxT/88MNbb73FPQ6oRLYrYXx8/KNHj6KiolCg2Tw8PFJTU4lo3rx52K7bcchzJTxy5Mjw4cPd3d3z8vI6d+5s+QM6LKPROHDgwDNnzqSmpuJNGgchw5XQYDCIm1YkJyejQAs5OTllZGRoNJply5YVFhZyjwNqkCHCdevWXbx4sUePHtg/RhahoaGTJk2qr6//+OOPuWcBNVj6dLSiokKn05WVle3Zs2fChAlyjeXgioqK/Pz86uvrjx079sorr3CPA8qy9Er46aeflpWVDRs2DAXKyNvbOz4+Htt1OwiLroRXrlzp27ev0Wg8e/Zsnz59ZBwL6uvr/fz8CgsLv/vuu/fee497HAZZWVl6vV6Wh3Jx6aHX+8vyUEQ0ahQ5O8v1YERk2Vejvf7660Q0e/ZsSx4E/sm2bduIyMvLq6qqinsWBu3atZPrL/mQITFEglz/VVfL/Ds1/0qYmZn55ptvtm/fPj8//7nnnpPr5wXNBEEYMmTIyZMnFyxYIO6f71CmTJlSU1Mjy0N17/5/N2++L8tDEdHOnfTMM3I9GJHZT0f1en1QUFB+fv6XX345Z84cOSeCx5w9e/all17SarWXLl3q1asX9zigCDPfmMnIyMjPz+/du/fMmTPlHQge179//2nTpun1enGfUrBL5lwJS0tLdTpdVVXVwYMH33jjDSXGgmYlJSU6na66uvrQoUPii3CwM+ZcCZOSkqqqqsaPH48CVeDl5ZWUlEREcXFx2K7bLkm+Ep47d27AgAFarfbixYs6nU6hseBxer0+MDCwoKDgq6++wpfq2B/JV0Lx4+O5c+eiQNW4uLiI32e4cOFCbNdtf6RFuH379uPHj3fs2DE5OVmhgeBvTZgwITw8vKKiQrzXCXj9/DNpNKTR0MaN//P/d+0ijYakfuedhAjr6+vFFydLlixp27attPOAxVatWqXVar/++uuLFy9yzwJ/WLlShgeREOHy5ctv3br14osvOuYqKnb+/v4zZsxovnEM2Hl40KVLdOiQpY9jaoS3b99OT08nooyMDCcnlb7pHp6QlpbWoUOHI0eOZGZmcs8CNHEiabW0YoWlj2NqTgkJCbW1tVOmTBkyZIil5wRztW/fXrzdPi4uDtt1s2vbliIi6D//odxcyx7IlAWmFRUV3t7erq6ut27dknntKkjU2NjYo0cPFxcXef4ecZNxlfYTnn22UsZF208s4D56VCASPvhAyM4WiIR33vnjT2fnToFISEyU9meKJ5YAZhIEeuklCgujf/+b7twx/3FMirBdu3ZhYWENDQ2SVjCeOnWqrKzM3MEchSAI+/fvF0xeMrFmzZobN254e3s/evRI5ossh4qKCoUeubq6rSCQEv89++z//InEx1NjI/3rX5b9JTBFUVFRmzZtiOjYsWOmHJ+enq7RaGbOnGnBj9EhiDcNTpw40ZSDy8vLO3ToQESZmZlKDwZPIT4dnTFDEATBYBB69hTatRMePlT46WjXrl0TEhLoz6+hb/H40aNHa7Xab7755sKFC+b/C2Hv6urqxN2cxo8fb8rx4oqZ4cOHjxs3TuHRwFROThQTQ5WV9O23pNGY9RCm91pXV/fCCy8Q0YYNG0w5XrzPcOjQodL+WXAk4jcT9u/f32AwtHjw5cuXtVqtVqu9cOGCCrPBUzx+JRQEobZW8PQUuncXdu0y50oobXuL7du3E1HHjh0rKytbPPjBgwfiHfe7d++WNpRjKCwsdHNz02g0J06cMOX48PBwIpozZ47Sg0GLnohQEISPPxaIhJkzlY9QEATxc8J58+aZcvCaNWuIyMfHp76+XuqJ7N6kSZOIKDIy0pSDd+/eTUSenp5lZWVKDwYt+muEd+8KLi5Chw6qRHj27FlnZ2cXF5e8vLwWD25qahJ3YVuyZInUE9m3EydOaDQaNzc3Uz56bWhoEPe2WLNmjQqzQYv+GqEgCFFRf3yQqHiEgiBMnz6diMaNG2fKwUeOHCEid3f3O3fumHEuu2QwGIKDg4koLS3NlOOXLFlCRP7+/o2NjUrPBqb42whzc1WMsKSkRLyL4uDBg6YcL+4LHBUVZca57NK6deuIyNvbu7a2tsWDi4uLPTw8iOjw4cMqzAbqM3Pf0c8//5yIevfurdfrWzz4+vXrrVu31mg0p0+fNu909qSqqqpTp05EtGPHDlOOj4qKIqIJEyYoPRhwMTPChoYG8c76jIwMU44Xb0QcNGiQ0Wg074x2IzY2lohCQ0NN+VGcOXPGycnJxcUlPz9fhdmAhfmb/+7du3f8+PEmbv5bU1Pj6+t79+7drVu3RkZGmndGO3Dt2rWAgICmpqbs7GzxZeFTCH9u/puUlCS+LHQojrP5r0Xb4Iu7rc2aNcuUgzdt2kREXbp0efjwoSUntWmjRo0iohlPvKL/B1u3biVsgy8Hu90GnyR+IYwgCIMGDcrOzk5JSUlLSzP7pLbrxx9/DA8P9/DwyMvLE18WPkVdXV3v3r0LCws3btz47rvvqjKgdcEXwphK0tq0U6dOaTSa1q1b37hxw8Lz2hy9Xu/r60tEK1asMOV4SSvawKZZGqHUtWniC8JJkyZZeF6bs2LFCiLq2bOnKbcgSV3RBjbN0ggFiWvTbt++Ld4SdfToUctPbStKS0vFVzjirYMtkrSiDWydDBFKXZu2aNEiIurbt29TU5PlZ7cJ0dHRRDRy5EhTDj558qRGo3nmmWewmYiDkCFCQRB++uknMnltWn19fffu3Ylo3bp1spzdyp0/f97Z2Vn8erMWD25e0bZo0SIVZgNrIE+EgsS1aTt27CAiT0/P8vJyuQawWq+99hoRxcbGmnKwpBVtYB9ki1Dq2jRJfzVtl6R/bqSuaAP7IFuEgsS1aZKepNkoqU+8Ja1oA7shZ4QPHz7s3LkzEW3dutWU4yW9XWGLxLegAgICTLkFqaCgwNXV1cnJ6cyZMyrMBtZDzggFQdi4cSOZvDZN6hv3tkXqhzGjR48moujoaIXnAqsjc4RGozEkJISIUlJSTDle0kfYtkXSsoSsrCwi8vDwuHfvntKDgbWROUJB4tq05sVcK1eulH0SRpJ+CI2NjQEBAaavaAM7I3+EgsSLwP79++3sIiD16cDKlSvt9ekAmEKRCKW+HJJ0g4/1k/TCuLy83NPT015fGIMpFIlQkLg2raCgwMXFxT7eGJT6FvGMGTOIaMSIEUoPBlZLqQgd9iMyfFgKUikVoSB9sYiXlxcR7dy5U7mRlIZlQ2AGBSMUJP4lW7t2ra0vm5S0gHbnzp2Os4AWnkLZCM27gcDELXGtjdRbSXx8fBznVhJ4CmUjFCSuTZO0ObxVkXpTpbjLjokr2sC+KR6hg9xULnV7AXd3dyLKyspSYTawcopHKDjA9irYaAcsoUaEUtemJScnE1FwcLCtbDRmxpZzrq6uBQUFSg8GNkGNCAWJa9Nqa2u7detGRJs2bVJhNgv99ttvrVq1cnZ2zs3NbfFgo9E4cOBAIkpOTlZhNrAJKkUoSFybtmXLFrKRzafN2Ia8U6dO1v/7AtWoF6GktWlGozEsLIyIkpKSVJjNbJmZmUTUvn37+/fvt3hw84q2LVu2qDAb2Ar1IhQkrk07ffq0eDdQaWmpCrOZwWg0BgUFEb6aCiyjaoTNGxmZuDYtLS3Nyt8jvX79+pw5c0z5ksYbN27gSxrhb6kaoeDAW/pNnDiRiKZNm8Y9CFgdi76VyQziDa85OTlpaWnid544gqNHjw4bNszd3T0vL098WQjQzEnt8zk5ZWRkaDSapUuXFhYWqnx2FgaDISYmhoiSkpJQIPyV2hESUVhYWERERF1dnfihvN1bv379hQsXfHx84uLiuGcBa6T201FRUVGRn59ffX398ePHxY8i7FVFRYVOpysrK9u9e7f4shDgCQxXQiLy9vYWbzKMiYkxGo0sM6gjNTW1rKxs6NChKBD+Cc+VkB77OuhNmzZFRUWxzKC05q8Tz8nJ6du3L/c4YKV4roRE5ObmtnjxYiJKTEysrq7mGkNRcXFxjY2N0dHRKBCegi1CIoqMjAwLCyspKVm2bBnjGArZu3fvoUOH2rdvL248B/BP2J6OinJyckJCQsT9L3r16sU4ibz0en1QUFB+fn5GRsbcuXO5xwGrxnklJKLg4ODIyEi9Xr9gwQLeSeS1evXq/Px8Pz+/WbNmcc8C1o75SkhEJSUlOp2uurr68OHD4eHhvMPIorS0VKfTVVVVHThwQLyBC+ApmK+EROTl5ZWYmEhEsbGxTU1N3OPIIDk5uaqqauzYsSgQTMF/JSQivV4fGBhYUFAwefJkW//s/vbt2ytXrnR2dr5w4YK4qQfA01lFhES0Z8+etLS08+fPcw8ig5dffnnw4MHp6encg4BtsJYIiai8vDw1NZV7ChlERUX1799fo9FwDwK2wYoiBHBM/G/MADg4RAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjADBECMEOEAMwQIQAzRAjA7P8BMRYD2FwfZq8AAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "wizZIO-Ec_9i", - "colab_type": "code", - "outputId": "6995ed71-55cd-4ea9-8ce8-a76acff42fa9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "valid_mols = [Chem.MolFromSmiles(compound)\n", - " for compound in valid_dataset.ids]\n", - "display_images(mols_to_pngs(valid_mols[:10], basename=\"valid\"))" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3dfVzN5/8H8PepTqopq4UayUmY0y3ZYuWuNUImVsj9vobNtvab7fsz+yLMzzazudnmZpv7RTcMbRGFkYg5Ip1D7poUhUk31KnT5/374/L9OGLW6pxzHee8nw//uJw+13Xw+lyfm+tGgohACOHHgncDCDF3FEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEDaSRqO5e/cu71YQU2DFuwFPjTt37uTm5qpUKqVSqVAosrOzJ02a1KpVq1mzZvFuGnm6SRCRdxuMkUajycvLy8nJOX36dE5OTk5OTlFRkfYHJBKJjY1NVVXVd999N23aNF7tJCaAQnjfox3dvXv3tD9gb2/fqVMnuVweEBDg5eXVtWvX9PT06OhoCwuLHTt2hIeH82o5edqZaQg1Gk1BQQHLm0KhUKlU+fn59f4qXF1dWd5Y8Lp06WJhUf8Wevbs2QsWLLC3t8/MzPTx8THgNyCmw4xCmJGRoVAo2BWmUqlUq9Xaf2pvb+/t7e3r6+vv7+/r6+vj42Nvb/+3x0TEsWPHbt68uX379llZWa1bt9Zb84nJMpcQJiUlLVy48NSpU2JJQzq6hqiurg4JCTl69Gj37t0PHjxoZ2enu1YTs2AWISwtLXV1ddVoNOPGjXvppZdYR+fg4NDAH79165b4eCYnJ2fmzJmRkZH1PtCjR49Lly5FRUUlJCRIJBI9fAlissziFUViYqJarR4wYMC6dev+9sN/e7uoUCjqhdDZ2Tk5OTkoKCgpKUkul8+dO1fnX4GYMLMIYVxcHACMGTPmsX/aiOeijx5ELpfHx8eHh4fPnz/f09Nz7Nix+vgixCSZ/uVoQUGBTCazsbEpKSlp3ry5WL58+fLU1NTHvgCUyWR+fn6+vr6+vr5+fn4eHh4NvML84YcfpkyZYm1tvWfPnr59++r2ixBTZfo94U8//SQIQkREhHYCASArK2v37t3wuI7uueeea1xdkydPzsnJ+fbbbyMjI7Oysjw9PXXwBYipM/2e0MfHJzc3NyUlZdCgQdrlmZmZJSUl/6ija4i6urphw4b98ssvL7zwwtGjR5999lldHZmYKhMPYXZ2drdu3Vq2bFlUVCSVSg1TaUVFRXBwcE5OTt++fffs2WNtbW2YeslTysRnUbBHMqNGjTJYAgHA3t4+OTm5devWv/32Gw0rJX/LlEMoCEJCQgL89XNR/XF3d09JSbGzs1uzZs3SpUsNXDt5uphyCPfv319YWNihQ4eXXnrJ8LUHBARs2LDBwsLiww8/3Llzp76qee89CAuDPn3g6FF9VUH0zJRDyK5Fx44dy2sIS2Rk5Lx58wRBGDt2rPaIuaaqqIAjR+DIEdi9G0pLITUV1q2Dt9/W2fGJYZnsg5nq6moXF5eysrK8vLxOnTrxagYiTpgwYdOmTc8///yxY8fatm3bmKNcuwYKBahUoFSCQgHnzoEgQGgo9O4NrVvDlCkAAB06gEoFzZrptv3EAEz2PWFycnJZWVlgYCDHBAKARCJZs2ZNYWHhgQMHhg4deujQoWeeeebJP1JRUaFRqRyzs+H0acjJgTNnoKLioU80awZeXuDjA4igfQ6lMatPJ5MN4ZOHqhmSVCpNSkrq2bPnyZMnx48fn5SUVG+6xrVr19ggVTZu7ty5c9t69Yo4ePDBJxwdQS6HgAAICAAvL/D2vt/j7d4N69fD1Klw+TI4OoK1NQgCNGouCOHINC9Hb9++7erqKghCYWGhkczxy8vL69mzZ2lp6fTp0yMjI8WFM86cOVNeXq79yWbNms0dPPhjBwfw9QVfX/DzA2fnxx8UEWJi4Px5qKmBRYugpgbefhtSUsDNzRBfieiIaYZw5cqV06ZNGzRoUEpKCu+2PLBv376wsDAbG5vKykrtckdHRzZojo2b8/b2bta4W7uwMNizB/z9ISMDHh6jR4yZaYYwODg4MzMzLi5u9OjRvNvykC5duly6dEkmk/Xs2VMcIN6yZUvdHL28HF5+GZRKGDgQfvkFLC11c1iiZyYYwitXrshkMjs7u+Li4ub8OoS7d++WlpZqPw41RMPy8yEwEG7ehA8+gK+/1ksVRNdM8CZ+06ZNiDhs2DCOCQSApKQkd3f3jz76SCzZuHEjIg4fPlyPDZPJ4OefoVkzWLIEVqzQVy0PS0+Ht94CAKisBH9/w9RpUkwwhPHx8WAEz0Xj4uIEQejSpYtYsnnzZjBAw4KDYfVqAICYGDCmW2Lyl9C0nDhxAgBatWpVW1vLsRlFRUWWlpY2NjalpaWs5Pfffzdowz75BAHQwQFzcvRdVVoaurnhgAEYGop+fvquzQSZWk/IXg9GR0dbWfF8Bbply5a6urrw8HBxPiFr2OjRow3UsAULIDoaysvhtdfgxg191zZoEKSmwvbt+q7HNJlUCAVBSExMBOO4FtVuRl1dnaEvkiUSWLMGXnoJ/vijfOzY6upqA9VL/jmTCmF6enpRUZGnp+eLL77IsRlnz57Nzs52dHQcOHCg2LDi4uKOHTt2797dcO2wtYWdOwsDA/tcvTphwgQ0ucfgJsOkQsj6n3HjxtUrr7fYtr5t2rQJAEaMGCG+c2cNGz9+vCGbAQDg4lK+dm3+9euJiYmffvqpnioJDYVVqwAAmjcHHc4VMSO8b0p1KTw83MLC4uTJk9qFmZmZ7dq1O3r0qGHaIAiCTCYDgEOHDrGSu3fvNm/eXCKRXLp0yTBtqGf37t1WVlYSiYS9vNG5tDR87jmsrEREDAzURw0mzqR6Qg8PD0EQZsyYUVtbKxbGxcUVFBQMHz68oKDAAG04fPhwfn5+u3btgoKCWMmOHTsqKytffvllDw8PAzTgUWFhYV999RUiTp48+ciRIzo8sloNFy8CALRvDz/8oMMDmxneZwFdKiwsbNOmDQBMmjRJLKypqQkNDQUALy+vO3fu6LsNU6dOBYCZM2eKJWyVtxUrVui76id75513AMDZ2fnChQuNPsjt25iRgUuX4rhxGBCAzZphq1aYloazZ2NICNbUUE/YGCYVQkQ8ceIE25Ll66+/FgvLysq8vb0BICwsTK+v6dRqNVuz9MyZM6zkxo0bUqlUKpXevHlTf/U2hEajYZsodunSRXx7+WRqNSoUuH49fvABvvIKOjsjwEO/LC3xhRdw506MjcXVq3HdOgphY5haCBGRTdhje3eKhZcvX27VqhUAvP/++/qrevv27QDQtWtXsWT58uUA8Nprr+mv0oYrLy9nmyj279//sSej27dvZ2RkLF26dNy4cQEBAYGB/6mXuhYtMCgIp0zBpUsxI+P+fWBaGsbGYnU19uuHvXoZ+kuZABMMISIuWLAAAOzs7I4fPy4WZmRksMeV3377rZ7qZRvFLF68WCwJDAwEgISEBD3V+E9duHCBTbB888031Wr1yZMn169fP3369FdeecX5kVmLrVp5vfACjhiBCxfiL7/glSuPPyYLISJ+9hn6+2NZGUZH4/nzBvxWTznTDCEisvcBzz///NWrV8XC+Ph4iURiaWn5yy+/6LzGsrIyW1tbCwsLscYLFy5IJBIHB4e7d+/qvLpGKCoqcnR0jIiIsLGxYX8P9VLn6OjYt2/fmJiYH3/88ffff793714javmf/0EA7NwZ//xT59/ANJlsCGtqavr16wcA3bp1q2SXTYiIOGvWLACwt7fP0fWgyh9//BEAQkNDxZLY2FgAeOONN3RbUaMtXrwYACIjIxMSEubNm2dpaenu7h4eHh4bG5uYmJibmysIQtNruXsXX3wRAbB3b1Srm34802eyIUTEW7dudezYEQCGDx9eV1fHCgVBYDN927dvX1xcrMPqWObXrVsnlrA1ptLT03VYS1OwTd22b9+OiDU1NRUVFS4uLnK5/MaNG7qtqKgI27ZFADSa849RM+UQIuK5c+ccHR0BYMaMGWJhVVVVz549AeDFF1/U1YWiOG1CfAty9OhRdj2s0Wh0UkUTqVQqdsFZXV3NSlJTUwGgY8eO+qhOocBnnkEAXLRIH4c3KSYeQkQ8ePAg25Jl1apVYuHNmzc7dOgAAFFRUTq5Blu0aBEAjBgxQix59913AeCjjz5q+sF14pNPPgGAqVOniiVsfN+nn36qpxq3bUMLC7SwwJ9/1lMNJsL0Q4iIa9euBQCpVKp9ZahUKlu0aAEAsezRXtNUVFRs3LhRHBxXW1vLHkJmZ2c3/eBNx2sw3eefIwDa2uKxY/qr5KlnFiFERLbMhJOT07lz58TC1NRUPQ2q/PXXX9lrcd0ettEOHToEAO3atRPvjdmY8pdfflnfVU+digDo6ooFBfqu6mllLiFke3cCgIeHh/ZzCPYy3dra+rffftNhddHR0QCwcOFCHR6zKf5qMN13332n76rVauzXDwGwa1esrDSK22NjYy4hRMR79+6x7Zl69eolPpzA/968ubm5aRc2RWVlJbvSy8/P18kBm4j7YLo//8TOnYU+fWIHDRpkJI+pjIoZhRARr1275ubmBgBskiuj0WjGjRuXmZmpq1o2btwIAL1799bVAZvo0cF033zzDQAMGTLEYG24cKGAnQimT59usEqfFuYVQkQ8efIkW3FQt9eK169f37NnzxdffDFmzBgnJycAWL16tQ6P3xSPDqbr0aMHAMTHxxuyGYcOHWLDBrlPKDE2ZhdCRExJSbG0tJRIJJs3b27cEWpra3NzcxMTE2NjY8PDw11dXeuN/1q3bp0Bpk01yJ07NwMDVwYHFxYWsoKLFy9KJBJ7e3vDD6Zbv349AFhZWe3du1e3R66trVWpVPHx8TNnziwpKeG71t4/ZY4hRMQvv/wSAGxtbRs44167o/Px8ZFKpfVS9+yzz/bu3fvdd9/9/vvvjx8/rqvbSx348UcEQK3BdHPnzgWAiRMncmnOxx9/DAAODg7iDWrj3LlzJyMjY/Xq1TExMUFBQWz+GiOTyd58801dNdgATHAZ/AaaNm3aypUrXVxcjh071q5dO+0/0mg0eXl54l5lCoXi+vXr9X7c1dVV3MJFLpfL5XJe+wH/jZAQOHAA1q2DiRNZQW1g4Fkbm5rY2O4hIYZvDiKOGTNmy5YtMpksKyuLzS/7W3V1defPnxe3ssrJybl69Wq9z8hkMl9fX2dn582bN1dVVS1fvvy9997TwzfQPfMNYW1t7aBBg9LT0728vFJSUq5evaq9SWC9NQIdHBx8fHxY3gICArp27fq3e30ahWvXoF07kEqhuBhatAAAOH4cAgPB1RWuXuW1Y0xVVVVISEhWVlZQUNC+ffseuwVVWVnZmTNnxH+O7Ozse/fuaX/A2tra09NTPA/6+/uLU7GSkpJGjhxpYWGxffv2IUOGGOIrNRHnnpir0tLSzp07P9qDWVpadurUKSoqasGCBcnJyX/88QfvljbWF18gAGoNpsOYGATADz/k1yZExOvXr7OrjxEjRgiCoNFoLl26lJycHBsbGxUV9djLCldX1/Dw8BkzZmzYsCE3N1ccdfBYbP6Kvb396dOnDfalGs2sQ4iIixcvtrCwsLGxEe/ojh07ZiTT/3TA1xcBcOfO+7+trcXWrREAH16QjovTp0/b29sDQPv27W1tbetF7plnngkMDJwyZcp3332XkZHxT59yCYIwduxYAGjTpo34RMpomXsIBwwYAMb0OkGXlEoEQCenB7P6du1CADSawXSLFi0StwlwdXUNDQ2NiYlhHV3T3+mr1eo+ffoAQEBAgJGfVc06hCUlJVZWVtbW1rdu3eLdFj2YMQMB8O23H5SMGYMA+H//x69ND3nrrbcAYPz48bp6naNWqw8cOCD+9tatW56engAQGRn55MtXvsw6hEuWLAGAYcOG8W6IHggCtm+PAHj48P2Sykps3hwlErx8mWvL7nt0MF0TsbUUpFJpWlqaWKhSqVhn+5///EcnteiDWYeQ7QyxdetW3g3RgwMHEADd3VGcLblpEwIYz3JoO3bsAAB/f38dHvOxLyH37NnDdsLasGGDDuvSIfMN4fnz59k/WOOWMzJ2b76JADhr1oOSsDAEQK2ZzXxFRUUBwJdffqnDYwqCMGrUKACQyWQlJSViOVv+RyqV7t+/X4fV6Yr5hpCt+PR0Da34BxYsQDc3VKnu/7akBK2s0NoajePu99GV6crLy9W6WBbq3r17bJnJoKAg7XFL77//PgA899xz541vMUYzDaEgCGxnCO37eFOjvWzH0qUIgBER/FrzkDVr1gDAK6+8IpbMmzfPyclp/fr1TT+4+BJy5MiR4toldXV1Q4cOBYDOnTvfvn276bXokD5D+O67OGAA9u6NR47osZZGOXz4MHuJZC7T2/bvx4gI3L6ddzvuCwkJAYC1a9eKJZ07dwYA7WcqTZGbm8vWLtFeQaeiosLPzw8AevfurZNeV1f0FsJdu3DMGETES5eMcCPzt99+Gx5egs3UGPEZUFyZTtwS49ixY+xVoQ7Pibt27WJzZeLi4rSrbtu2LQD861//0lVFTae3EM6fj+IbcA8PNJ5ZBYhqdQ0bZ/hUjGlqDOM+A7KV6aKiosSSmJgY0MN8X/YKysbG5ojWmUihULBxv9oTLPnSWwjnzXvwIM7D48GgjU2bkPcqgMnJ6OmZO2rUV3yboUdGfAZERHZNKG7XI65Mp1AodF7XtGnTAMDZ2fnixYti4bZt29iWQdv1c31++fLlHTt2NHxpaX1ejrJxw5cuYUDA/cKrV9HGBgFQb1uyNMTIkQiAX3zBsQl69ldnQCOgVCoBwMnJSXx0uWvXLgB44YUX9FGdRqMZPHgwAMjlcu0N4RYuXAgAtra2x5q8GKNarc7Nzd2wYUNMTExoaKg4mUP7MvjJ9BZCQcB338X+/bFvX9TaGgnj41EiQUtL1MOWLA1RVoZ2dmhhYdIr8D32DGgc2Pv0t956SywZM2YMACxYsEBPNZaVlbEN4QYMGKA9437KlCnsRrTgH/5XYB3d/PnzIyMjO3bsaGFRf7trFxeX/v37p6SkNPCABnlFUVKCWluy4OzZCID29sjjlmzdOgTAfv0MX7MB/dUZkDdBENq3bw8AGRkZrERcme6yPgfT5efns9nDkydPFgtramrYQ9quXbtqbxlUz191dCIrKyu5XB4VFRUbG5ucnHzt2rV/2jz9h1CpxPbtMSICxRG0gnB/JLG7O+p0S5aGCA1FAPzxRwNXSxARf/vtNwBwd3cXX99t2rQJAIKDg/Vd9fHjx9kSGN98841Y+Oeff7JNewYPHiw+mC0qKkpLS2M7pcrl8sduIBcUFBQTE7N69eqMjIyqqqomtk3/Ibx48f4+y//+94PCqirs2RMBsHt3NOA0k6IitLREGxts2HbRRMcmT55cbyx1WFgYAKxcudIAtScmJrJdGZOTk8XCvLw8tjpeQEBAr1692NtFbVKp1M/Pb9y4cV9++eXevXt1u5MXY5DL0YMH0doaAVD77/rmTezQAQEwKgp1sSVLQyxejAAYGWmY2shD1Go1+++em5vLSthUMqlUarCpZGzGvfauOIiYnp5uZ2fHFsLUR0f3tww1bG3tWgRAqRS1N+tTqfDZZxEA58wxTCu6dkUA4xk3Yl62bdsGAN26dRNLli1bBgBDhw41WBsEQdi6dWu9fbiSkpIAwMPDQ08d3d8y4NjR//1fBEAHB/zviRARMTUVraxQItE0+Hluo6lUCICOjsb22sxcDB8+HAC++urB61m2K0FiYiLHViFiREQEACxZsoRXAwwYQkG4/4ZOJkPtrWF/+OFep07B7u76Hks9cyYC4MNXIsRA7ty5w6ZNiCu+XLhwQSKRcJ9Kdvv27WbNmllaWl6/fp1XGww7i+LePXzpJQTA4GDt/mjGBx+wYQ362ytPEFAmQwD87/58xKC+//57AHj11VfFkjlz5hjDGM7Vq1ezV4gc22DwqUzXrqGbGwLg+PFiWV1d3WuvvcaGTehpmolajcuWYUSEwZ4BkYewNZe0ZyqxdwP79u3j2CpE7N27NwBs3LiRYxt4zCfMzsbmzdHa+pDWG5vy8nI2pLBPnz5GNc2ENF1hYSFbV1Jc0Ono0aMA8Pzzz/OdSnblyhULCws7O7uKigqOzag/4sYQ/P1x8+Z/+/j0iYlhD6YAwN7ePiUlpU2bNgcPHmSDbnUlPR2cneHuXQCAHj3ul7z1FgBAZSX4++uwKvJ4P/30kyAIERER4ls4tk/w6NGjH30VbkhxcXGsYeL7CS54hBBAMmSIS3Q0Io4fPz4rK4sVtmnTZufOnXZ2dmvWrPn666+bWIUgwIULwLaQaN8efvihiccjjccix8aIMjExMXPmzJn43+0xeNm8eTM83DA+OPbCbGati4vLlStXxMKtW7eyaSbiVJcGKivDEydwwwaMicGgIGzeHAFw/nxMS8PZszEkBGtqMDAQETEtDd3ccMAADA01wtl2pubUqVMA4OjoaGx3GSdPngSAli1b1tTU8G0Jn56QWb58+auvvlpcXDxw4MCysjJW+Prrr8+fP18QhNGjR//+++9/9bOCIFy8WLd1K8yZAxER4OEBLVpA9+4wYQIsXw6ZmVBZCW5uYGUFAGBhASNHQlzcgx8fNAhSU2H7dv1+QQIAbm5ugwYNkkqld9ktgdFg/fOoUaMe3ejO0PieA8rKyry9vQEgLCxMe5rJhAkTAMDT01MsLCsrO3HiBBvMHhQU1Lx588DAawAo/pJKUS7HcePw888xORnFBe/S0jA2FqursV+/+4tupqXdf1tYUUE9od7dvXvX398fAEJCQrj3OaK6ujq2zkVWVhbvtqAV31OAg4NDcnJyjx49UlNTP/zwQzaOCQBWrVpVUlLSq1ev+fPns/3o8vPz6/2si8vZwYNdfX3Bzw98faFTpydt9dWsGfTvDwkJ+vsq5PHs7OxSUlICAwP3798/derUtWvX8m4RAMCBAwcKCws7dOjARu1wxvssgIh4+PBhtkndt99+i4jFxcVsFqY2W1vb7t27T5o0admyZQcOHDC2VevIk4kruyxatIh3WxAR33jjDQCIjY3l3RBE49mpNyEhITo62sLCYuvWrZMmTaqrq7O1tfX29mabcnp5efn4+FhbW/NuJmm8n3/+ma26vXXr1mHDhnFsSXV1tYuLS1lZ2blz59hSi5zxPgs8MHv2bABgo2l1u0UBMRKfffYZ6Ghll6ZISEgAgED2rNwI8Hw6Ws+8efN+/fVX9vaW7fBITMzHH388derUqqqqiIiIRzedN5hH31vyZSyXo0x5ebmLi4tarS4oKGjTpg3v5hDdq62tHThw4L59+7p27Xro0CHDD1W5ffu2q6urIAiFhYVsqUXujKgnBIDExMSqqqqQkBBKoKmSSqWJiYmdOnXKzs4eOXJkXV2dgRuQkJBQU1PTv39/I0kgGFsIje06geiDk5PT7t27W7ZsuWvXrhkzZhi4diP8P2ZEl6NFRUXu7u5SqbS4uPjR9XaIicnIyHj11VfVavWKFSvYAEadq62trTca5sqVKzKZzM7Orri4mO+gbW1G1BPGxcWx/asogeagV69eq1atAoD3338/PT296QfUaDRKpTIpKWnu3LlDhgzp0KHDe++9V+8zmzZtQsRhw4YZTwIBgPOIGW1GeJ1A9GrixIl5eXmff/7566+/npmZyQYwNtyNGzdycnJOnz7NxlSpVKqamhrtD1y4cKHej8THx4Px/R8zlstRpVLp7e3t5OR0/fp1eilvPhBx9OjR8fHxMpksKyuLrZP9WBqNJi8vT6VSKZVKhUKhUqkuX75c7zOurq5saAcb49GlSxftNeoVCkX37t1btWpVVFTEdrE3EsbSlI0bNwLAqFGjKIFmRSKRrF27Nj8//9ixY6+//np6ejobwKht48aNS5YsebSjc3Bw8PX19fX19fPz8/X19fb2fvJFJrvUio6ONqoEgrGEUBDeT0tr3adPP3pHb35sbW137NgRGBh4+PDhCRMmbNmyRSKRaH+gurqaTUp8ckf3BLW1tUql0mjvd4zjcvTAAQgJAXd3yM+Hh/8BiJlQKpVBQUFlZWWffvrprFmztP+ouLj4jz/++NuOTltpaal41apUKk+ePFlVVSWTyTw9Pffu3auH5jeJcfSEbL7t+PGUQLPl5eW1ZcuWIUOGzJkzx8PDY/To0eIfubi4uLi4POFna2trz549yx7PsOc0xcXF2h+wsLDo2LFjSEgIex5rbIygJ6yuBldXuHMHlEqQyzk3hnC1ZMmS6dOn29jY7N+/v2fPnn/1scd2dNofcHBw6NixI7tqDQgI8Pf3N6p3EvUYQQi3bYPISOjeHf56MQtiPt55550VK1Y4OztnZWV16NAB/slzUfGOseG3i8bACC5H2bWo8d0uEy6WLVt28eLFvXv3hoSEBAcHq1SqR5+LtmjRwsfHhz0X9fPz8/b2ZjOGn1K8e8LSUnB1hdpaKCgAGrRNAACgvLw8ODjY2tpaoVCwkkY/F30q8O4Jk5JArYb+/SmBROTg4HDkyJGzZ5D9yOsAAAMjSURBVM9mZ2c35AXg0453T9inDxw6BBs2wPjxPJtBCD9cQ1hQADIZ2NhAcTHY23NrBiFccb2wjosDQYChQymBxJxxDWG3bjB4MF2IEjPH+56QELPHqSd87z0IC4M+feDoUT4NIMRoGPYVhVoN167BuXNQWgqpqXD5MgwfDqdOGbQNhBgZPYewtBSUSlAoQKEAlQpyc6FNG5g4EXr3BgDw8ICKClCr4ZEpZISYD52GUK0GpRJOn4acHMjJgdOn4c8/H67NCmxtQaMB7RtRmjlBzFuTQigOZmfDakfa2Pw7M/OhT7RoAd7e4OUFcjkEBEC3bmBnB7t3w/r1MHUqXL4Mjo5AU+mJefsHT0fVarVSqWTztc6cOXPq1Kk/H+7o+ri7/2ZnB76+4O8Pvr7g6wtt2z7mQIgQEwPnz0NNDSxaBC++2PSvQcjTq6EhXLlyZUxMjEaj0S50cnJiy3uw8exeXl42Njb6aSchJquhl6Nt27ZFRA8PD3GvMrlcLpfLJXRHR0jTNLQnVKvViEgdHSE6RyNmCOHMdGZGEvKUohASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsLZ/wOAov9RxgL2wgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxU9f4/8PfMsIOgLMqiJmgpbqG4hFhaooZimUZqprhc52Z2qZt9HZe6WAmOZkZlGW6FpRVZJlaoiKVAKJomIpoI4sIS+zLAMMzy++PzaH6kcubMzDnnMzO8n396D+e8ufGac+ZzPp/3R6TT6QAhRI+YdgEIdXUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcowhAhRhiFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGiDEOIEGUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcowhAhRhiFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGiDEOIEGUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcowhAhRhiFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGizI52ATQ1NzfX1NRUV1dXVVXV/FNwcPDSpUv9/f1p14hsn0in09GugXutra11HZSXl5eVldX9U2lpaUNDQ2dnCAgIaGhouHDhwoABA4SsHHVBVhbChoYGcteqrq7ueOOqqqrq+C8qlYrN2VxcXLy8vLz/5tXBl19+mZaWFhYWlpmZKZFI+P69UFdmKSHU37vue9ci/8g+XU5OTj068Pf39/Pz6/FPAQEB3bt37+wMDQ0Nw4cPv3Xrllwul8lk3P2iCN1N6BBWVlZu2bKF3K863ru0Wi2bH3d3d/fx8bnrruXl5XXXPzo6OppfakZGxuTJkx0cHHJzc4cPH27+CRG6L0FDmJ2dnZCQ8PPPP9/7P+nvXfe9a5F/9Pb2dnBwEKxaAFixYsUnn3zy8MMP5+bmCnxp1HUIGsLIyMgjR45ERETMnTv3rruZZX7vamlpGTFixLVr19atW7dhwwba5SDbJFwI//jjj5EjR7q4uJSUlHh7ewtzUfPl5OQ8+uijIpEoKytr7NixtMtBNki4l/UbNmzQ6XTLly+3ogQCQFhY2Ouvv65Wq+fPn69QKGiXg2yQQHfCq1evDhkyxN7evri42OregLe1tY0ZMyYvLy82NvaDDz6gXQ6yNQLdCRMSErRarZXOQXF0dNy7d6+Dg8NHH3109OhR2uUgWyPEnfDGjRsPPfSQSCT6888/AwMD+b4cTxISEtatW9e7d++8vLwePXrQLgfZDiHmjm7atEmtVsfExAifQIVCQV5IkneSHd9M6v8xIyPjwQcfNHiq1atXp6WlZWVlrVy5cs+ePQIUj7oI3u+EFRUVgYGBKpUqLy9vyJAhXJ3W4OzQ8vLy0tLStrY2g6fKzs4eN24cm4sWFRWFhIQoFIoDBw7Mnj3b7F8CIQAB7oTvvvuuUqmMjo5mmcD6+vq71jRU/63jv7S3t7M5m6ur632n1JApoz4+Pl5eXr6+vix/l/79+yckJMTGxi5fvnz8+PG9evVi+YMIMeD3TlhTU9OvXz+FQnHu3LnQ0FCDx3t7e9fU1LA5c7du3Ui0SJw6m8Lm5ORk9i/xDzqdLjIy8ujRo08//fQPP/zA7clR18TvnTAxMVGhUEyfPp1NAgHAw8OjubmZeeK1v79/QEAAJ7NDTSASiXbv3j1s2LBDhw7t27dv/vz5VMpAtoTHO2FjY2O/fv3q6uqysrLCw8PZ/IhOpxOJRDzVw6HPP/988eLFHh4ely5d6tOnD+1ybJNGo2lpaWlra7Ou2R0m4PE94ccff1xXVzdx4kSWCQQAq0ggACxatGj27NkNDQ1LliyxkLVgNqa2tjYiIkIqlfr6+k6ePHnv3r22PF1Jx4/m5uaePXsCQHp6Ok+XoKuyspIMzGzfvp12Lbbmzz//JC+NPDw87O3tyR9qt27dFi5cePToUbVaTbtAjvEVwsTERAAYM2YMT+e3BGRgxtXV9dq1a7RrsR1ZWVk+Pj4AQBZV19bWJicnR0RE6J+SvLy8pFJpZmamVqulXSw3eAmhSqXq27cvABw6dIiP81sOMjAzbtw42/t4pmLPnj1k3WZkZGRjY2PH/+nmzZtyuXzQoEH6h7gHHnhAJpPZwCcgLyHcuXMnAAwePFij0fBxfstRX19PBmY2b95MuxbrptVq4+LiSLpiY2MZ/nLy8/NlMpmfn58+jYMHD5bL5RUVFUIWzCHuQ6hWq8kD/VdffcX5yS1Qenq6SCRydHTMy8ujXYu1am1tnTdvHgDY2dlt27aNzY9oNJrMzEypVOru7k6iKJFIIiIikpOTm5qa+C6YW9yHcN++fQDQv3//9vZ2zk9umV588UUACAkJaWtro12L9SkrKxs9ejQZevn555+N/fHW1tbU1NTo6Gh9/xFnZ+fo6OjU1FSVSsVHwZzjOIRarXbYsGEAsGvXLm7PbMkUCgW5+b/55pu0a7EyeXl5DzzwAAAEBQUVFBSYc6p7h3A8PT2tYgiH4xAePHgQAHr37t3V7gnZ2dkSicTOzu706dO0a7EaR44c8fDwAICwsLC//vqLq9PeunUrMTExJCTkriGcP//8k6tLcIvjEIaFhQHAhx9+yO1prcL//d//AcCgQYNaWlpo12IFkpKS7OzsAOC5557j6f+xzoZwysvL+bicybgM4bFjxwCgZ8+ezc3NHJ7WWiiVSvIo/uqrr9KuxaKp1erY2FgAEIlEMpmM72fF+w7hhIeHJyUl3fUWhBYuQzhhwgQAkMvlHJ7Tuly4cMHBwUEsFp84cYJ2LRaqqakpKioKABwdHb/44gshL22xQzichTAnJ4fMM6qvr+fqnNbo7bffBoB+/fo1NDTQrsXi3LlzZ8SIEWTWy8mTJ2mVYWlDOJyFMDIyEgD+97//cXVCK9Xe3k7aky5dupR2LZbl9OnTZP30gw8+yHKaC99vucgQDvlcoDiEw00IL1y4IBKJXF1dKysrOTmhVbty5YqzszMAfP/997RrsRQHDhxwcXEBgIiIiLq6OjY/cv369SFDhqSmpvJdm06ny8/Pj4uL69evH5UhHG5CSBqurFy5kpOz2YD333+fjFFxOPJuvRITE8ViMQD861//Yvnt69dff/Xy8gKACRMm8Fzd/0eGcGJjY8mlAUAsFgswhMNBCK9cuSIWix0dHUtLS80/m23QaDSPP/44ADzzzDO0a6Gpra0tJiaGDITGxcWx/KmvvvqK9CV58sknqXy1vncIx8nJKSoqKiUlhY8hHA5CuHDhQgBYvny5+aeyJbdv3yb7H+7fv592LXTU1NRMnDiRrPb64Ycf2PzIXdO4qa9NqaurE2AIx9wQ3rx5097envS356QgW7J7924A6N69+61bt2jXIrTCwkKy7Mjf3//cuXNsfkSpVD7//PPkPR7LadyCKSkpiY+P79gxcMCAAVx95zc3hGTuckxMDBfF2KBZs2YBwOTJky18+iK37lqYy+ZHqqqqxo8fT6Zx//jjj3xXaDIyhEPaWM+aNSs7O9v8c5oVwvLycmdnZ7FYnJ+fb34pNqmyspK0+UhKSqJdi0AYFuZ25tKlS2RkMjAw8PLly3xXaD6NRjNnzhwAeOutt8w/m1khfO211wDg2WefNb8OG0Ymtbu6uhYWFtKuhV/sF+Z2dPToUTKN+5FHHrGihblffvklAMyePdv8U5kewurqajc3NwBg+cTflZEVq+Hh4dRHGvhjwsJcXYdp3NHR0dY18T0vL498MzT/VKaH8M033wSAadOmmV+EzaurqyNdMLZs2UK7Fl6YsDBXrVbLZDL9NG6r64TS3t7u6OgoEonMf4liYggbGhrI9mCZmZlmVtBFHDt2zFa7YJiwMLepqWnGjBnw996PfFfIE7Jk8bfffjPzPCaGcOPGjQAwceJEMy/fpUilUgAYMWKEtbRdYMOEhbkWMo3bfGQegvmNZ00JYWtrK1koeezYMTMv36UoFIoBAwYAAPu5IxbOhIW5Fy5c6N27N5nGbbFL3VnaunUrJ9NUTAkh2bd99OjRZl67C8rKyiJdMM6cOUO7FrOYtjBXP4170qRJLKdxW7Ljx48DwLhx48w8j9EhVKlU5AsAy4lI6C4rV64kS2b27NmTlZVVVlZGuyKjmbYwVz+Ne+nSpbbxQF5dXU3GoswcVTI6hF2nsS9PlEqll5dXx72cHB0dg4KCyP4ncrk8JSXl3LlzFts804RvdO3t7WRmlVHTuKlTq9UXLlxIS0tjOMbf3x8Arl+/bs6FjAuhvrFvl52UbD7yDOPq6vrss8+OGTOms32/RCJR7969H3vssUWLFr3zzjv79u3LycmhvjDKhIW5NTU1ZEGJq6vrwYMH+a6QQ2VlZWTqL8PDNlnL/t1335lzIeNCuH//fuhijX3Za2hoYPPV6IknngCA+Ph4/b+0trYWFRWlp6cnJSXJZLLo6OjQ0FCyLPhe971tCtNZy7SFuWQat5+f39mzZ/mukHNkyiHD9NfVq1ebP9JmRAi7ZmNf9ubPnz9s2LDc3FyGY06fPg0A7u7ubP6Ia2trz507l5KSIpfLpVJpeHg4eTd7Xz169AgNDY2OjpbJZElJSenp6UVFRdz9cqYszO04jfvmzZscFiOYSZMmAcDhw4c7O4DclmbOnGnOVYwIIdkJrAs29mWjqKjIzs7O3t6+pKSE4TAynrFu3TqTL3RXMiMiIoKCgsh7gns5OTnde9s0dnaYmQtzIyMjrbfnFZkdvWHDhs4OuHz5MgAEBgaacxUjQkga+37wwQfmXM9WLVu2DADIxr2d+eOPP0QikYuLC+edeFQq1b0PtOQdeme3zfDw8I7J7OzObP7CXKsewEtOTgaA6Ojozg5Qq9XOzs4ikcicJoNsQ0ga+3p7eysUCpMvZqtu377t6OgokUiY3z4/99xzIGxr4KqqqtOnT+/fv3/Dhg1LliyZOHFinz59yFPlvTw9PUePHj1nzpw1a9bs3LkzIyPj119/HThwIBizMLe1tZUszDVqGrfF+uOPPwBg4MCBDMeEhoaCefM32YaQfBxu3LjR5CvZsFdeeQUA5s6dy3BMYWGhRCKxt7en/u3ovrdNfXfqjsgqmZEjR7LsHlReXj5mzBgA6Nat208//cT3LyKAtrY20s2Z4d6zZMkSADDnE4dVCCsqKnr27Onq6mq9D/f8IUu6RCLR+fPnGQ5bvHgxAEilUsEKM1ZFRUV2dvYXX3zx1ltvxcTEjBs3TiwWOzg4sPyPfunSJTKLw1oW5rJEBiMZZjiRneHN+S/LKoTNzc0PPfSQnZ0dm2eSn376ad68eVb9TcAoa9asAYCnnnqK4Zhbt245ODhIJBLr2tuZhOrq1asGj7TShblsvPDCCwCwY8eOzg745ZdfAGDs2LEmX4Lt4ygZJgoODmYeW1MoFORlbmJiosk1WZH6+nrSUo2518iKFSsA4IUXXhCsME6Q1UbffPMN82EqlYp8dXz++edbW1uFqU0w7777LgCsWLGiswPq6upI52uTbzxsQ6hUKocOHQosOvympqaSwfGu0HjmnXfeAYBJkyYxHFNRUUEG0C5duiRYYZx44403AGDt2rUGj7x69WpCQoJNNrMiQ5KPPvoowzFkEqLJi0KMeEVx/vx5e3t7sVj866+/Mh9Jvv+MHDnSNubpdkahUJCX0cx7MJF9C62xC/C3334LANOnT6ddCE2VlZUA4O7uzvARQ17/pqSkmHYJ46atkfc/gYGBzF20GhoayNeJd955x7SyrMJ7771n8MtATU1Nt27dAIB5Jo1lunbtGgD06dOHdiGU9erVCwBu3LjR2QHr1q0DgDfeeMO08xsXwvb2dtJKxOBYUEZGhlgstrOzs8YZg2wolcqAgAAAYG6SST62pk6dKlhhHNJoNOQtRXV1Ne1aaJoyZQowrt375ptvAGDGjBmmnd/opUwFBQVkbrHBF0Fk0efgwYNt78u6Tqf79NNPAeDhhx9meEpRKBRkkcSpU6eErI1DZJu3X375hXYhNJEvFAwtRq9evQoAffv2Ne38pqys37JlCwD4+fnV1NQwHNba2krahq9atcq04iyWWq0mjSqYRw43b94MAGFhYYIVxjnSF6eLjHV3xmCLUY1G4+rqCgC1tbUmnN+UEGo0GrIz9rx585iPPHfuHBnLsep+Pvfau3cvAAwYMIChj2hraytZ8XnkyBEha+PWxx9/DACLFy+mXQhNbFqMkqlCBscs78vEbmvFxcVkvMHgSyTyndXgWI4V0Wq15G3NZ599xnDYtm3bACAkJMSqB+6zsrLIQDftQmhi02KUzOD/8MMPTTi/6c1/k5KSAMDLy4t5N9P29vZRo0aBDe2dduDAATJmyLCkS6VSkc0VzFxzTV1jYyNpl9rFl3EbbDH60Ucfgal7pJu1F8W0adMAICoqivmwy5cvOzk5iUQilr2ZLRz5TPn4448ZjtmzZw+ZYGQD0/eCgoIAwJamg5rAYIvRU6dOAcCoUaNMOLlZISwtLfX09ASA3bt3Mx8pl8vJihjmsRzLl5aWBgC9evVimL6nVqvJNC72ncgs2cyZM6HLdxUi74QZnubq6+tFIpGzs7MJjwzm7k9Ilve7ubkxN5zSaDSPPfaYNc6fvMujjz4KAO+++y7DMV9//TUABAUF2cYjHHnVKZPJaBdCE5sWo+QLCMuNADriYLtsslZ1/PjxzI9eRUVF5M2vybN7qDt58iRZ/Mo8yDRy5EiwoQ0Jv//+ewB48sknaRdCE5sWo08//TQAfP3118aenIMQVldXs1w5Qca7vb29mcdyLBaZObF+/XqGYw4fPkxeotrMFIWioiLyG9EuhDKDLUbJPmVr1qwx9swchFCn0x05ckQkEhlcOaHVakmfRpMn+FB0/vx5smKlqqqK4bDw8HAA2Lp1q2CF8U2r1ZJ199S7ntJlsMUoGTY3YbNAbkKo+3uRv8GVE3fu3CF9+5hfslkgMj7BPPsnIyODvLax2P7ZpiGfLMePH6ddCE0GW4wWFhYCQEBAgLFn5iyETU1N/fv3Z7Nygkw3cXd3Z+4OaFEKCgrEYrGTkxPz1hGksS9DhzwrtXz5cgB47733aBdCk8EWo/r57szPSvfiLIQ6nS4zM5Plyono6GgAeOKJJ6xlNsn8+fMB4OWXX2Y45syZM8C6sa91IbPVFy5cSLsQmti0GCVtQZnXl96LyxDqdLpXX32VzcqJqqoqMpZj2jQfgbFs7EuaQbBZh251cnJyyJIR2oXQxKbF6L///W8AeP/99406M8chVCqVLFdOHDp0CABcXFzY9BGii01j34sXL5LGvjY5eqFQKEjntS7efN1gi9FPPvkEABYtWmTUaTkOoU6n+/3331l2wSBTgUJDQy25CwbLxr5z5swBYRv7CozsxnXx4kXahdBERh8ZZixmZ2cDwIgRI4w6Lfch1P39wsTgyon6+vq+ffvCP7cosjSkse+cOXMYjrl+/bqFNPblz7PPPmszE/FMZrDFaGNjo1gsdnR0NOq+wksI9V0wXnzxReYjMzIyRCKRvb09yy7rAtM39v39998ZDiONrZYtWyZYYcJ7++23AeD111+nXQhNbFqMkncERnXW4yWEOp2uoKCA5cqJl19+2WK7YJDGvsxTC6y0sa+xyHf4yZMn0y6EJjYtRp955hkA2LdvH/vT8hVC3d/NHQyunCDtvQFg9erV/BVjgoaGBjaNfcmHyPz58wUrjIqSkhIA8PHxoV0IZQZbjK5fv97Yli48hlDfBcPgH2hOTo5EIhGLxRbVEGnDhg1gu419jaXVaslUJyud98sV0mL022+/7eyAgwcPgpH99XgMoa5DFwyDKyfIg19QUJCFTPhqbm5m09h31apVYJ2NfU1AFqMdPXqUdiE0rV27FhhbjBYXFwOAr68v+3PyG0KdTrd9+3Y2KydUKhV5CcPQ9F9ITU1NcXFxzJNx6+vryS4o1tjY1wTkwXvz5s20C6HJYItRrVZL/irYvzHmPYTsV07k5+eTsZy0tDS+q+IEefqfMmUK7UIEsnPnzq7w7ZdZWVnZjh07mN+Xjh8/HgDS09NZnpP3EOo6dMEwuHIiISGBzEM3rX+jkPSNfW2smyOD3NxcABg6dCjtQizdSy+9BABbtmxhebwQIdT93T7V4MoJtVodFhbm4OCQmpoqTGEms4HGvsZqaWmRSCR2dnYW+DLJopBGhAsWLGB5vEAh1LFeOVFYWMi85a0lUCqVZJ21tTw5c2XQoEEAYPn/geg6ffo0AAwfPpzl8cKF0LpWTjAjfTqsvbGvCcgU2c8//5x2IRaNzHe3t7dnOd9dDELx9vbesWMHAKxataqgoECw63Kuvb2d7N765ptvikQi2uUIavjw4QBw8eJF2oVYNFdX1/79+7e3t5ONYgwSLoQAMGPGjEWLFimVyoULF7a3twt5aQ7t27evpKRk0KBBpOFFl/Lwww8DhtCQvLy8qqqqp556iuwSYxjft+a76PcPteSVEww0Gs3gwYMBYO/evbRroeD27dsA4OXlRbsQy3Xo0CHS5IJ9xyehQ6iz+JUTzMi72sDAQNto7GsC8mLmzp07tAuxRImJiRKJBADmzp3LfgyZQgh1Ot1//vMfsNSVE8xIY99PP/2UdiHUPP7448Bii9iuRq1Wr1ixAgBEIlFcXJxRI3Z0QqjfP9TSVk4wI419fX19re6zg0OkjdDGjRtpF2JBGhsbp0+fDgCOjo5GLWIi7Lj+UsqKk5NTcnJyWFjY5s2bIyMjycxgulpbW+s6KC8vLysrq/un4uJikUgUExPj5OREu15qcID0Ljdu3IiKiiooKPD29j548CCZs2YUkU6n46MyNt544434+PigoKCLFy+S77J86Jiu+0arvLy8tLS0ra2NzdlEIpGPj8/x48eHDRvGU8EW7vz586GhocHBwVb9nokrp0+fnjlz5l9//TV06NDDhw+TPWGMRTOEZJLauXPnXnrpJfL62yj6dHUWrbKyspqaGpVKxeZsTk5OPTrw9/f38/Pr8U+enp6LFy8+evRo9+7d09LSHnnkEeN/aavX1tbWrVs3rVbb2Njo4uJCuxyavv3225iYmNbW1ilTpqSkpJDFEyagGUIAKCgoCA0NbWtr++mnn8hiC/g7XQzRqqurq66uZvmm0WC6/P39AwICHB0d2ZxNpVI9//zz3333nZub26FDh0jL7a5m6NChly9fPnv2LNkvtQvS6XSbN29eu3atVquVSqXbtm2zt7c3+WyUQwgAmzZtWr16dffu3X19faurq0kvDDY/2L17dx8fH69/8vb2vusfHRwcuC1Yo9EsXbo0OTnZxcXl+++/nzp1Krfnt3zz58/fv3//rl27li5dynxkaWnpgQMH5syZQ2Ys2oa2trZly5Z98cUXEokkPj5eJpOZeUL6IWxoaPD393dzc6usrCT/or933feZkPyjt7c35+liT6fTxcbGbtu2zcHB4auvvpo1axatSqggn5uxsbEffPABmyPFYnFYWNjChQvnzZtHOi1Yr5qamlmzZp06dcrNzW3//v2k7bq5uBypNQlZEzRixIgrV65UVlZayybvWq32tddeAwCJRNLVJjSTPcMnTJhg8Mjjx4/PnDlT/6jv4uIyb968w4cPW3K7ZwbXrl0jTckCAgKYu2AahXIIW1tbrXpNkFwuBwCxWLxz507atQinrKwMALp3787ylXRdXV1ycnJUVBSZTQIAPXr0WLBgQXp6uhUtQzl+/DjpvhcSEnL79m0Oz0w5hNu2bQMrXxO0adMmABCJRLa0MahBPXv2BABjO47fvn07MTGRzDoi+vTpExsbe+HCBZ7q5MquXbvI0Mvs2bObm5u5PTnNEKpUKvJehWH3U6uwfft2sVgMADKZjHYtAomIiAAAkxsg5Ofnx8XFBQUF6dM4ePBguVzOvP0jFVqtNi4ujhQZGxvLx9clmiH87LPPACA4ONhavgcy+PLLL+3s7LpODleuXAlmb4eq0WgyMzNjY2PJpHDyYB8eHp6UlNTQ0MBVqeZQKBRkwZqdnd327dt5ugq1EGo0moEDB4IN7THyzTffkCeW5cuX28DHCrPk5GQAiI6O5uRsSqUyNTV1wYIF+rf/Tk5OUVFRKSkpFDdjKy0tJS9Ce/ToYey+n0ahFkKyJigoKMiW1gT9+OOPZFrpCy+8YEu/1120Wi3pLeDh4bFx40YO96Kqr6+3kCGcixcvki3D+vfvf+XKFV6vRS2Etrom6JdffiGvwubMmWOlA/HMTpw4MWbMGPLoqP9GFxoampiYWFlZydVV7ty5k5iYGB4eftcQjjA9pn7++Wd3d3cACA8P5/CX6gydENr2mqDc3FzSZ3X69Om29AtevnyZtMwDAB8fn61btx45cmTBggX6yfeOjo5RUVHJyckKhYLDi947hBMXF3fjxg2uLnGXxMRE8vli1MJcc9AJIfmEs+Ex/d9//50MNkycOJF5p1SrcOvWLalUSh4R3dzcZDJZx1+qpaUlJSUlKipKP3/Sw8NjwYIFqampXD2T64dwyAYhPA3htLe3d1yYy9VpDaLT3gIAvLy8LGTvF54UFBSQeQjjx4+vr6+nXY6JampqZDIZ+aJrb28vlUorKio6O7i6ujopKSk8PFzfhM7f3z82NpZhk3dj8TeE09jYOG3aNDB1Ya45KISQrDwwc3TbKhQXF5PnqJEjR1ZVVdEuxzjNzc1yuZzMERGJRNHR0devX2f5syUlJXK5nGxzTwQHB8fFxbE/g0F1dXW7du16/PHH9V9NPT09169fb9rZiouLSf8ub29vDj8yWBI6hGfOnAEAd3f3uro6gS9Nxc2bN8nfYnBwcGlpKe1yWNFoNCkpKaQpHgBERESYPBySn58vk8l69ep11xAO+x2LDOo4hLNmzRoTzpCTk0MmAA0dOpR5m+PdvEsAAAdaSURBVAaeCB1CMut87dq1Al+XooqKCrIMPzAwsLi4mHY5BqSnp5MGFgAwatSojIwM88+pVqvT09M7DuFIJJKIiAhuh3AuXbpkwpTOlJQUZ2dnAJgyZQqtbw2ChvDixYsikcjFxYXDD0KrUFtbS4b1+/bta7Fb2+fk5Oib/TzwwANJSUmcTzm4dwjH3d2d2yEc9rRarVwuJ19fpVIpxfe6goaQ7GTw6quvCnlRC1FfX08emXx9ffPy8miX8w9XrlzRv3vw9vaWy+VKpZLXK9bU1PA9hMNMqVQuWLCA3JPlcrkwF+2McCG8fv26RCKxt7fncIKFdWlubp48eTKZBXLmzBna5eh0Ot2dO3ekUimZ9erq6iqTyQSetEmGcMgivY5DOIWFhfxdtLq6mtzz3dzcLGETPuFCuHjxYgBYtmyZYFe0QEqlkkwI9vDwyM7OplhJbW2tTCYjX4fIuwfm/cz5RoZwOnbB4HwIh+i4MNdC9ngTKIS3bt1ycHCQSCQW+41IMCqVijz7ubq6Hjt2TPgC2trakpKSyFtv8u6B19uOUdRqdWZmplQq1XfB4HYIJz09naeFueYQKIRkIkIX3+5cT61WL1myhLwXPnjwoGDXJe8eAgMDyd/3pEmTLHY7kHuHcJydnaOjo80ZwuF1Ya45hAhhRUWFs7OzSCS6dOmSAJezClqt9pVXXiGPgikpKQJcMT09PSQkhPxBDxkyRJiLmu/eIRw/Pz9jh3AEWJhrDiFCuGrVKgB45plnBLiWdfnf//5Hnrh2797N31XOnDkzceJE8ifYt2/fpKQktVrN3+V4QoZwyBpUYtCgQWyGcPQLcx0cHD777DNBijUO7yGsqakhz/e5ubl8X8sakVZRIpEoMTGR85NfvXo1Ojqa3EO8vLzkcrkNrOowagintLQ0NDSUTGrjdWGuOXgP4fr16wFg6tSpfF/Iem3bto3k5O233+bqnJWVlbGxseTdg4uLi0wms95J5PdF1lXcdwhHvzBAyIW55uA3hAqFgqzoOXnyJK8XsnY7duzgqlVUU1OTXC4nf5p2dnZSqdQCuydxSKFQ7Nu3b9q0aeQTBwC6desWExMTHx8v5MJcc/AbQtLYNywsjNer2Ib9+/eTP6MVK1aY1spBpVIlJSXpZ0tHRER0qZGwu4ZwvLy8AGDRokUUu9SwxGMIrb2xr/BSU1PJyj2pVGrUCJ5Wq01JSenfvz+JX1hY2KlTp/ir08IVFha+9dZbu3fv3rt3L+1aWOExhDbQ2Fd4aWlpZBbLvHnzWL4QS09P17fTDQ4OtpZ3D0iPrxDaTGNf4Z08eZJ8mZkxYwbzYObZs2cnTZpE4te7d28rffeA+AqhLTX2Fd7Zs2fJV5rIyMiWlpZ7DygpKZFKpWQsx9PTUy6X3/cwZBV4CaHtNfYVXn5+vp+fHwA89thjHVc2VFVVyWQyss+Rg4NDbGxsF+lRYMN4CaFNNvYV3tWrV3v37g0Ao0aNqq6uVigUcrmcPKmKxeLo6Gj+2v4hIfESQltt7Cu8oqIiMt+6b9+++m5/Tz311OXLl2mXhjjD/U69P/7444wZM3x9fW/cuEEG3JE5ysvLJ0+erFKpCgsLx44du2nTpgkTJtAuCnHJjvMzksmQq1atwgRyws/P78SJE3Z2dr/99ltUVBTtchD3OL4TnjhxYtKkSV5eXiUlJfrWWgghBmLDhxgjPj4eAP773/9iAhFiics7YW5u7tixY93d3W/evEmaCCCEDOLyTrhhwwYAePnllzGBCLHH2Z0wLy8vJCTE2dn5xo0bpKk4QogNzu6ECQkJOp1OKpViAhEyCjd3wqKiooEDB4rF4uvXr5O1zAghlri5E8bHx2s0mkWLFmECETIWB3fC27dvDxgwQKPRXLlypeOWdAghNji4E27atEmlUs2dOxcTiJAJzL0T/vXXX4GBgUqlMi8vb+jQoVyVhVDXYe6dcOvWra2trTNnzsQEImQas+6EtbW1/fr1a2pqys3NHT16NIdlIdR1mHUn/Oijj5qamqZOnYoJRMhkZoXQ3d3d09Nz7dq1XFWDUBdk7sBMS0uLi4sLV9Ug1AVxv7IeIWQUjtcTIoSMhSFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGiDEOIEGUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcowhAhRhiFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGiDEOIEGUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcowhAhRhiFEiDIMIUKUYQgRogxDiBBlGEKEKMMQIkQZhhAhyjCECFGGIUSIMgwhQpRhCBGiDEOIEGUYQoQowxAiRBmGECHKMIQIUYYhRIgyDCFClGEIEaIMQ4gQZRhChCjDECJEGYYQIcr+Hzug5ZdXOmSzAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVwT19oH8JMQICCLiKAIKLhXxaqgoixCxd3qRYsLauuK2sVrq22ovre2tbVoP61Le9uitlSlCChUrTurLIIKiohLVRRFUARkE5KQZd4/DjemrFlm5oTk+X76h4bJzEPlxzOZM+cMh6IoBAAgh0u6AAAMHYQQAMIghAAQBiEEgDAIIQCEQQgBIAxCCABhEEIACIMQAkAYhBAAwiCEABAGIQSAMAghAIRBCAEgDEIIAGEQQgAIgxACQBiEEADCIIQAEAYhBIAwCCEAhEEIASAMQggAYRBCAAiDEAJAGIQQAMIghAAQBiEEgDAIIQCEQQgBIAxCCABhEEIACIMQAkAYhBAAwiCEABAGIQSAMAghAIRBCAEgDEIIAGEQQgAIgxACQBiEkA0URZEuAeguCCGzpFJpaGjo2rVrSRcCdBcHfkkz6tatWx4eHkKhcN++fStXriRdDtBF0AmZNWTIkH379iGE3n333czMTNLlAF0EIWTcokWL3n//fYlEEhQU9PTpU9LlAJ0Dp6NskEgkAQEBaWlpXl5eycnJJiYmpCsCOgQ6IRuMjY1jY2MdHR0zMzMFAgHpcoBugU7InuzsbD8/P7FYHBERsXTpUtLlAF0BnZA9np6eu3btQgitXbs2NzeXdDlAV0AnZNuqVav279/fp0+fnJyc7t27ky4HkAchZJtYLPbx8bly5UpAQMDZs2eNjIxIVwQIg9NRtpmamsbFxdnb2ycmJn722WekywHkQSckIyUlZfLkyTKZLCYmJigoiHQ5gCTohGT4+/tv376doqjly5ffvHmTdDmAJOiEJC1duvTAgQMDBw68fPmytbU16XIAGRBCkoRCobe399WrV2fNmnXs2DEOh0O6IkAAnI6SZGZmFhcXZ2tre+LEiW3btpEuB5ABnZC8hISEadOmURT1119/TZ8+nXQ5gG3QCcmbNGnS559/LpfLFy9efP/+fdLlALZBJ9QJFEXNmzfv6NGjbm5uWVlZXbp0IV0RYA90Qp3A4XB+++23IUOG3LhxY9WqVaTLAayCEOoKS0vL+Ph4a2vrw4cP79y5k3Q5gD1wOqpbjh8/HhgYyOVyT58+PXnyZNLlADZAJ9Qts2fPDg0NlclkwcHBDx8+JF0OYAN0Qp0jl8tnzpx55syZkSNHZmZmmpmZka4IMAs6oc7hcrl//PFHv379rl27tnr1atLlAMZBCHWRjY1NfHy8ubn5oUOHwsPDSZcDmAWno7orKipq0aJFxsbGSUlJPj4+pMsBTIFOqLuCg4P//e9/SySSefPmlZaWki4HMAU6oU6TSqWTJk1KTU0dN25camoqLFiql6AT6jQejxcTE+Pk5JSVlbVx40bS5QBGQAh1nb29/dGjR01NTX/44YeIiAjS5QD6welo53Dw4MF33nmHz+enp6d7eHiQLgfQCULYaYSEhOzbt6937945OTl2dnakywG0gRB2GhKJ5I033sjIyJg4ceK5c+dgwVK9AZ8JOw38VBkHB4ekpKTNmzeTLgfQBjphJ3Px4kV/f3+JRBIdHT1v3jzS5QAaQCfsZMaPH79jxw6KolasWFFQUEC6HEAD6ISd0rJly37//fcBAwZcvny5a9eupMsBWoEQdkoikcjb2zs3N3fmzJnHjx/ncuGMphODf7xOic/nx8XFde/e/eTJk1999RXpcoBWoBN2YomJiVOnTqUo6vjx4zNnziRdDtAQdMJOLCAgYOvWrXK5PDg4+Pbt26TLARqCTti5URQ1f/78I0eODB48+NKlS1ZWVqQrAmqDEHZ6L1++9PT0vHnzZmBgYFxcHDxVptOB09FOz8LCAi9Y+ueff3733XekywFqg06oJ06cOBEYGMjhcE6dOjVlyhTS5QA1QCfUE7Nmzdq0aRMsWNoZQSfUH3K5fNasWadOnRoxYkRmZqa5uTnpioBKoBPqDy6Xe+jQof79++fl5cGCpZ0IhFCv4AVLu3TpEhkZ+dNPP5EuB6gETkf1UHx8/FtvvcXj8RITE319fUmXAzoAnVAPzZkz58MPP8QLlpaUlJAuB3QAOqF+kslk06dPP3/+vKenZ2pqqqmpKemKQJugE+onIyOjqKgoV1fX7Ozsjz76iHQ5oD3QCfVZXl7e+PHjhULh/v37V6xYQboc0DoIoZ47dOjQ22+/zefz09LSRo8eTboc0Ao4HdVzS5YsWbNmjUgkmjt3bnl5OelyQCugE+o/iUQyceLE9PR0f3//8+fP83g80hWBf4BOqP/wgqWOjo4pKSmffvop6XJAc9AJDUVWVpafn19jY2N0dPT8+fNJlwNegU5oKMaNG4dnGy5btuzatWukywGvQCc0LCtWrPjtt99cXFxycnJsbW1JlwMQghAaGpFI5OPjk5OTM2nSpDNnzhjyU2XkcnlNTY2xsbGFhQXZSiCEBufx48ceHh7l5eWfffbZF198QbocTQiFQpFIJBQKq6qq8B+a/VWV16uqqhBCwcHBa9eu9fb2JvjtQAgNUXJy8pQpU2Qy2ZEjR+bOncvOQXHnwQGorq5ubGx8+fLly5cvxWKx8utisbi+vr6d1yUSCS31cDgcKyururo6Ozu7J0+eEBy5gRAaqB07dggEAktLy+zs7CFDhrS/MS2dp7q6mq4fNj6fb2ZmxufzbWxs8B+a/VWV17t27crhcNzc3AoKCk6ePDljxgxaatMAhNBAKRYsdXR0nDZtWkNDg3LnqampEYvFuFPR2Hm6du1qampqbm5uZWVlampqaWnZpUsXU1PTdl7HgVF+3cTEhJZ6sO3bt4eGhgYFBcXGxtK4W7VACA1XRETEihUrbGxsXrx40f6WNHYedr411ZWWlvbu3dvIyKi0tJTU5WIIoSGSSqUbN27cvXs3/mtYWJijo6OpqamNjU3LjkRv59FBU6ZMOX/+/M8//7xmzRoiBUAIDU5FRcWCBQuSkpKMjIxkMpmbm1t+fj7pokiKiopatGiRp6dnVlYWkQLgjhnDcvXqVQ8Pj6SkpF69ei1cuBAhBCsFBwYGdu3aNTs7+86dO0QKgBAakIMHD3p7ez969MjLyysnJ+fGjRsIQoiQmZnZW2+9hRA6ePAgmQooYABEItG6devwv3hISIhYLH769CmHwzE3NxeJRKSrIy89PR0h5OjoKJVK2T86dEL9V1JS4ufnt2fPHj6f/+uvv4aHh5uYmJw7d46iKH9/f1gDCiHk5eXVr1+/kpKSlJQU9o8OIdRzGRkZHh4e2dnZzs7OaWlpy5cvx6+fO3cOwbno/3A4nLfffhshdODAAQKHZ7/5AtaEh4cbGxsjhCZMmFBWVqZ4XSaT2dnZIYT+/vtvguXplKKiIi6Xa2Zmhu/sYRN0Qv0kEomWL1++evVqqVS6bt26xMREe3t7xVdzcnLKy8tdXFwGDhxIsEid0qdPHx8fH6FQGB8fz/KhIYR6qLi42NfXNyIiwsLCIiYmZvfu3c3uTj579ixCaNq0aYQK1FHvvPMOInJGynLnBUxLSUnBTa9///43btxodZvx48cjhI4dO8ZybTqurq7OwsKCw+Hcv3+fzeNCJ9QfFEVt3749ICDg+fPn06dPv3LlyrBhw1puVl1dffnyZWNjY39/f/aL1GUWFhaBgYEURUVGRrJ6YDYTD5hTV1eHR5w5HI5AIJDJZG1teeTIEYSQn58fm+V1FomJiQghFxcXuVzO2kGhE+qDe/fueXp6Hj161MrKKi4uLiwsjMtt818WBifa4e/v37t376KiooyMDNYOCiHs9E6dOjVmzJibN28OHjw4Ozs7MDCw/e3Pnz+PIIRt4HK5ixcvRixfnmGt5wLayeVyRdObPXt2TU1Nh28pKChACPXo0YPN063O5e7du3jli/r6enaOCJ2ws6qtrZ0zZ05oaCiHwwkLC/vzzz+trKw6fJfiXFQH59fqiAEDBowdO7a2tvbYsWPsHBFC2CnduXNn7Nixx44ds7W1PXPmjEAgUDFU8IFQFSwPGMKk3s4nOjp65cqV9fX1I0aMiI+Pd3V1VfGNQqHQ1tYWT6FQvoEGNFNTU+Pg4CAWi4uKipydnZk+HHTCzkQmk4WGhgYHB9fX1y9atCgzM1P1BCKEUlNThULhqFGjIIHts7a2fvPNN+Vy+R9//MHC4SCEnUZFRcXUqVO3b99uZGQUFhYWGRlpbm6u1h7wuejUqVOZKVCv4DPSiIgINg7GzvUfoKWrV6+6uLgghOzs7JKTkzXbyeDBgxFC6enp9NamlyQSiYODA0Lo0qVLTB9L004okaDwcDRpEurZE5maInt75OeH9uxBYnErG9+4gRoaNP0tAVBkZKSXl1dRUdH48ePz8vI0u92sqKjozp07VlZWY8eOpb1C/cPj8YKDgxE7l2c0SW5REfXaaxRCrfzXvz/V7OZXuZzq1Yvi8SgvLyosjMrJoWCESmUSiUQgEOB/Kbwshca7+uWXXxBCc+fOpbE8/YbHVLt168b0CiDqh7C+nurXrylyb71FnT9P3blDJSdTwcFNLzo7U8rTIsvLqdGjKS73VVCdnKgVK6jYWOrFCxq/E/1TUlKCpzuYmpru27dPy73hO2n27t1LS20GYuTIkQihI0eOMHoU9UO4eXNTljZsaPNL69c3/1JFBRUbS4WEUE5Or9JoZES5u1MCAZWQQDU2avgd6KmMjAz8mcTJySk7O1vLvUkkEmtra4RQUVERLeUZiF27diGEZs6cyehR1AyhREL17EkhRPXoQTU0NP+qWNyUMWtrSihscyeFhdSuXVRAAGVq+iqQtrZUUBAVHk49fqz2N6F38FpMCCFfX99nz55pv8MLFy4ghF577TXtd2VQKioqTExMeDze06dPmTuKmiG8fLkpM6tXt77Bhx82bZCY2PHe6uuphARKIKCGDPnHB8u+fal166iEBMrwVuMTCoWKtZhCQkIaaTpB2LRpE0Loww8/pGVvBmX27NkIoe+//565Q6gZwr17m3Kyf3/rG0RFNW3w7bfq7bmwkAoPp4KCKCurV2k0N6cCApou5xiA4uLiMWPGIITMzMwOHDhA457d3d0RQmfPnqVxnwYCLzkzbNgw5g6hZgi3bGmKR0JC6xtcvNjmx0IVSSRUejolEFDu7hSH84/2GBJCxcZSrC+GxY7U1NQePXoghPr375+fn0/jnsvLy7lcLp/PZ21agD4Ri8Xdu3dHCOXl5TF0CDXHCevqmv7Q1mO+Fa/X1qq3ZwUeD3l7o7AwlJODnj1DsbEoJAT17IkePEB796J581D37sjDA33+OcrNRfpy4+vevXsnTZpUVlY2bdq0y5cvu7m50bjzs2fPyuXyCRMmqHuHDUAImZiYLFiwADE6YKheZjdsaOpLFy+2vsH1600brFyp/W+IV2QyKieHCgujAgIoY+NX7dHOrulyTmkpnYdjUV1dXVBQEFJhWQqN4VmqjH6q0W9XrlxBCNnb29P1Eb0ZNUP4xRdNP/1tfbrIyGhzAIMulZVUTAy1fDnl6PgqjVwuNWYM9Z//UJmZFInHCWjm3r17eC0mS0vLuLg4Jg4hl8vxWe6tW7eY2L+BwOcmJ06cYGLnaoYwIqLph/7nn1vfIDKyaYM9e7QvrmP4cs7MmRSf33RcY+NBDg4zZ84MDw/X8TGxU6dO2djYIIQGDRp08+ZNho6Sm5uLEHJycmJo/wZix44diLH7jdQM4bVrTT/rb7/d+gYffNC0QWam9sWpob6eOnWK+uCDsunTlU+2hw4dumHDhvPnzwvbGbdknfKyFG+++Saj665//fXXCKFVq1YxdwhDUFJSYmRkZGJiUl5eTvvO1QyhXE717k0hRHXtStXWNv9qY2PTKaKDA8FzwgcPHoSHhwcFBXXt2lWRRjMzs4CAgLCwsBzSox21tbX4DjIjI6MtW7Yw8SFQma+vL0Lo6NGjjB7FEOApYP/9739p37P6t61t29bU69asaf6lrVubvvTFF7QUpyWpVJqTk7NlyxZ3d3flJQB79uy5ZMmS2NjYqqoqlku6c+fOa6+9hhDq1q0bfjgZo2pra42NjXk8Hvvfqf45fPgwQmjMmDG071n9EAqFr6ZQBAZSSUlUYSGVkUGtXNn04uDB7d2zRsjz589jY2NDQkJ69eqlSKORkZG7u7tAIEhPT2e6I1EUdfz4cXwD5+uvv15YWMj04aj/DTR7e3uzcCy9JxQK8blVWw8X0JhGU5mePKGGDWt9KtOwYdSTJ/SWSLuCgoKwsLCAgAB8fybWvXv3oKCg8PDwJwzUL5VKFWsxLVy4kLVB89WrVyOEtm7dys7h9N6qVasQQqGhofTuVtOZ9Y2N1E8/UW+8QdnbU8bGlJ0d9cYb1M8/N02GiImhAgOpX36hdPv65MuXLxMSEtatW9enTx/lyzlDhgwRCAQJCQnazN9TqKiomDRpEkKIx+OFhYVpv0PV9e3bFyF05coVNg+qx/Cy3L169aL3qdrMLG+xcGErt5u1vJCjSwoLC/HlHEtLS0Uau3TpEhAQsGvXrocPH2q222vXruG1mOzs7JKSkmgtuQN37tzBHZ6FM23DMWjQIIQQvZ/nmQnho0dUeDg1Zw5lbf0qjXw+NXky9d13VEEBIwelSUNDQ0JCgkAgcHd3V17Ms2/fviEhIbGxsbUq/zZRrMXk7u7O/qAlngsXHBzM8nH129atW2n/v8rwQk9SadPtZl5e/5hc36MHFRREHTig45Prnz59euDAgaCgIDyqjvH5fMVoR1uLySsvS7FkyZKGlnMvmYefAUrvbAygeKo2jRecWVxtrby8aXK98u1mypPrJRL2ilGTRCJJS0vbvHmzh4eH8mhHqzNcnj9/jtdiMjU1JbWchEgk6tKlC4fDKe20d9XqLPyPu7+t2XzqI7TkYfuT64uLyVSlmvLycjza4ejoOGfOnGZfvXLlSu/evRFCjo6OWVlZRCqkKAo/emnkyJGkCtBjv//+O0LIx8eHrh2SXndUMbm+2fJtnWFyvVwuf/HP02nFshQ+Pj6MLojQoQ0bNjBxMR1QFPXy5Ut6n6pNOoTK2p9cz9gtzrQQiUQrV67E56g0LkuhsaFDhyKEUlJSyJahr95++22E0JYtW2jZmy6FUKHDyfUqPIiPTcXFxXhFXT6fHxERQbocqri4mMPhdOnShekFMw1WUlISQqhPnz60DP/oZAiVlZVRsbHUkiVUt26v0sjjUe7u1JYturCU8IULF/CEvd69e+vIsPj+/fsRQrNnzyZdiN6Sy+V4+Dc1NVX7vel8CBWkUuriRWrLFmrsWMrI6FUge/W6/umn0dHRFRUV7BcVHh5ubGyMEJo6dWplZSX7BbQKT9X/6aefSBeiz/7v//4PIbRs2TLtd9V5QqissrJptMPZmUJok7s7QojL5eK7sRMSElj4SCYUCvGDe/CyFPTex6QNqVTarVs3hBBdlw1Aq+7du8fhcCwsLOrq6rTcVecMobLr1//73XcTJ040NTVVDN/Z2NgEBQXt37+fibuxKYoSiUQjRoxACFlaWsbHxzNxCI1lZmYihAYOHEi6EP2HH1Jw6NAhLffT+UP4P/X19YrbzZTvxsa3m504cYLGyfWFhYVjx47t3r17ge7dgvfZZ58hhD744APShei/8PBwhFBAQICW+9GfECpTTK7H8/cwGifXZ2dnI4Ts7e0luneXD14++OTJk6QL0X/V1dVmZmZcLvfRo0fa7Ec/Q6igPLle+W5sV1dXfDe2xncA4gdunjp1it6CtVRZWYmXQtH+gwpQBV6S9Ouvv9ZmJ3oeQmWKyfX4aUeYxpPrt23bhhCaN28ecwVrAC/BoP0JElDRmTNnEEIDBgxo61Z+VRhQCBVkMllOTk7LyfV2dnZ4cn1JSUmHO3ny5ImRkRGfz3+hSxNBli5dihD6Vt0HgQBNyWQyJycnhJA29wkbYgiVaTO5Hs+X/+WXX9gsuB1yuRyvoEPvoyxA+z7++GOE0JqW656pzNBDqEwxud5C6Ukb7Uyuj4yMRAiNGzeORLGtyMvLQwg5ODhoc2oE1HXz5k2EkLW1tcazRiGErWhoaDhz5sz69evx1ReFwYMHr1+//u7du4rN8NXX27dvky0Y2759O0Jo+fLlpAsxOHhULDY2VrO3Qwg7oJhcr1hK+PLly4qv4gd6bt68mWCFCm+88QZCKCYmhnQhBmfPnj0IoRkzZmj2dg6lL08XY5pUKs3KykpOTv7Pf/6jmFyflpY2YcIEJyenoqIiIyMjguXV19fb2tpKpdKysjJbW1uClRigyspKR0dHmUz2+PFj5WvvKlLz+YQGjMfj+fj4bNmyRXl5Cx8fn379+j158iQ1NZVcaQghlJycLBaLR48eDQlkn62t7fTp06VSaVRUlAZvhxBqhcPhLFmyBDH6BEnVnDt3DiE0ZcoUsmUYLHw3f0REhAbvhdNRbRUVFfXt29fc3Pzp06fKa5ayrH///oWFhVlZWZ6enqRqMGRSqdTJyamsrOzq1asjR45U673QCbXl4uLi4+NTX18fFxdHqoaHDx8WFhba2NiMHj2aVA0GjsfjzZ8/H2l0TgQhpAE+FSF4Rnr69GmE0OTJk8leHDJw+McgMjKysbFRrTdCCGkwb948CwuLCxcuPHjwgEgB8IFQF4waNWr48OGVlZX4hlLVQQhpYGFhgRd0wffQsKyxsTElJQUhFBAQwP7RgTLNrtJ17gszFRUVX375ZV8l+NkP7EtISJg8ebKrq2thYaHynCkWJCcnT5w40c3NLT8/n83jgpbKysqcnJy4XG5JSUn37t1VfBeP0ZqY9vfff//www/Kr9jY2PRtwcXFRXlwjwkTJ050dnZ++PBhZmamt7c3o8dqBp+L4oc5A7J69OgxefLk06dPR0dHv//++yq+q3N3wuLi4iNHjjz4n6KiIrFY3HIzPp/fMpl9+/Y1MzOjsZhNmzZ98803K1eu3LdvH4277dCIESOuX7+emJg4ceJENo8LWhUbGzt//nwPD48rV66o+JbOHcKWqqqqHrSAJ0C03Jjetnn37t3BgwdbWlo+ffqUtbPiZ8+e9erVy9zcvLKyUnmpK0CKSCTq1atXVVVVfn6+m5ubKm/RtxC2SiQSlZaWNkvm7du3GxoaWm5sYmLi5OTULJmDBg1Snt/UFk9Pz0uXLkVFRS1cuJCB76MVv//++7Jly2bMmHHy5El2jgg6tGbNmvDw8E8++QTPa+mQQYSwLbS3zZ9//vndd9+dMmXK2bNn2fkWFi5cGB0dvWfPng8++ICdI4IOZWVljR8/vlevXo8fP1Zl5NagQ9gqtdqmqampo6OjIpM9evRYvXq1VCotKipydnZmulS5XN6zZ8/y8vK7d+8OGDCA6cMBFdXU1PTu3dvOzi4xMdHFxaXD7SGEqlKrbZqZmQ0ZMoTpi7SXLl3y9PR0dXUldZMAaOnWrVuBgYF3796dOHFiYmKiKm/p3EMUbLKxsXF3d2+2snDLtnnt2rX79+8LhcLc3Nzc3FzljZu1TWzw4MFdunTRrCQYnNA1f/3115IlS2pqal5//fW9e/eq+C7ohDSTSqXOzs7Pnj07ePAgn89v1jlbfUurnzZdXV07HPT38vK6ePHisWPHZs+ezcC3AtQgk8m2bt26detWuVy+cOHCffv2qf67FUJIvw0bNnz//ffvvffejz/+qPx6q582b926JRQKW+6kw7ZZXV1tZ2fH4XAqKiqsrKzY+MZAG168eLFw4cLz58/zeLyvvvpKIBCo9XYIIf0KCgrc3Ny6detWWlra4didVCotLi5+8OBBYWGhcjirqqpabszlchXJbGhoiImJmTBhAvFJ/QYuLy9v7ty5Dx486N69e3R0tAa3TEAIGTFy5Mi8vLy4uLg5c+ZotgdF27x58+atW7dwMh8/fiyVShXb2NjYmJubnzhxYtSoUTQVDtQTFRW1atWqhoaGUaNGxcfHN1u9VlUari8F2rVz506E0KxZs+jdrUQiKSwsPHfu3PDhwxFCPB4PIWRiYvLll1+y8EhGoEwikShOO5csWaLxoqMULHnIkLKyMmNjYx6P9/TpU9p3ju8MtrW1vXbtmkAgwMMew4cPz83Npf1YoFXPnz/HC0yampru2rVLy71BCJkya9YshNDOnTvp3e3WrVsRQmZmZhkZGfiVjIyMgQMH4sYoEAjaWbcf0CInJwefdjo6OmrzCAoFCCFT8JIzI0aMoHGfkZGRHA7HyMgoLi5O+fWGhgZFS3Rzc9P+AYygLQcOHMCTb7y9vek6zYEQMkUsFuNpnXl5ebTsMCkpCT9Das+ePa1uAC2RUSKRaNWqVfhDYEhICI0fwiGEDHrvvfcQQh999JH2u8rNzcXTODZt2tTOZsotcdiwYVeuXNH+0ICiqCdPnuC1JPl8/m+//UbvziGEDLp8+TJCyN7eXsvfmg8ePOjZsydCKDg4WJUnLmVmZg4aNEjREkUikTZHB2lpafj/v7Ozs/KTSOgCIWTWsGHDkHZPkK+oqMCJ8vf3V/0ME7dEPI8GWqI2wsPDjY2NEUJ+fn5lZWVMHAJCyKywsDCEUFBQkGZvb2ho8PLywpdbqqur1X07tERtCIVC/ORjDocjEAikUilDB4IQMqukpMTIyMjExKSiokLd90qlUnzDjYuLi8YX4pRb4tChQ5k4m9JLjx498vDwQAhZWFgcOXKE0WNBCBmH1+T96aef1H0jnixva2ur/UNIL168iB94Ci1RFWfOnOnWrRtCaMCAATdu3GD6cBBCxuHHZY0dO1atd3311VfNBuW1JBQKoSV2SC6Xh4WF4f9LM2bMqKqqYuGgEELGCYVC/JRf1RvaH3/8weFwuFxus0F57WVlZUFLbEttbe3cuXMVHwJlMhk7x4UQsmHlypUIoU8//VSVjRWD8rt372aiGOWWOGTIkEuXLjFxlE7n77//Hjp0KELIysrqzz//ZPPQEEI2pKen41sNO7zClp+fb21trXpiNZaVlfXaa68pWlH51KcAAAloSURBVKJQKGT0cDrur7/+wmcrgwcP1v4TuLoghCzBN5SdP3++nW0Ug/ILFy5UZVBeS8otsX///mlpaUwfUQfhD4H4HqP58+e/fPmS/RoghCz54osvEEKLFi1qa4OKigr8ac3f35/Nj2rZ2dm4JXK53JCQkPr6etYOTVxNTQ1ensfIyCgsLIyFX3ytghCypKioiMvlmpmZtTrmrjwoz84VOWXKLbFfv34XLlxguQAirl+/3q9fPzwIlJCQQLASCCF7/Pz8EEK//vprs9dlMhkelHdyciouLiZSG0VR165dGzFihKIlEjkxY83hw4fxklkjR47Ei8cSBCFkT0REBELI19e32et4UL5bt27sXxJoprGxMSwsDN8q2a9fv9TUVLL1MEF5WYrFixfrwuk3hJA9L1++tLCw4HA49+/fV7z49ddf4wky6enpBGtTlpeXp68tsby8HK+GxuPxwsLCSJfTBELIKvw45c8//xz/NSoqCg/KHz16lGxhzSi3xL59++pHS8zNzcVPhrC3t09JSSFdzisQQlbhhxO4uLjI5fLk5GS8KilDg/Lay8vLGzlyJL6DpLO3xIMHD+JlKby8vEpLS0mX8w8QQlbJZLLevXsjhCIiIvCgfGhoKOmi2oNbIr6Dp2/fvjrVQFQkFovXrVunWJZCB1f9gBCybfPmzQghfGmOnUF57V2/fl25JdbV1ZGuSFUlJSXjxo3DaxPu37+fdDmtgxCyDa95gS+TdqL7p5u1xOTkZNIVdSw9Pd3BwQGP/ejyLbIQQlY1NDR4e3vjEO7du5d0OWq7fv06XnJf91tieHg4/pXh6+v77Nkz0uW0B0LIHplMhmfK2NjYIIQmTZpEuiJNKLdEV1dXHWyJQqFw+fLlTKxNyBAIIXvw5QFra+uLFy+amZlxudzHjx+TLkpD+fn5+HmputYSHz9+PHr0aLwsRUxMDOlyVAIhZMm2bduUB+XnzZuHEPrmm29I16U5iUSi3BKTkpJIV0Slpqba29vjSSH5+fmky1EVhJANLQflT506hRAaOHBgp7g62g4daYlyuXzXrl34MVXTp09/8eIFkTI0AyFknGJQXvnxPRKJBF+4y87OJlgbLXBLxN+ji4tLYmIiywXU1dUFBQWxvywFXSCEzMrPz8dTtgUCQbMvbdy4ESG0du1aIoXR7saNG3iNQNwSa2tr2Tnu3bt38QrLlpaW8fHx7ByUXhBCBhUXFzs5OSGEFixY0PLXc0FBAb5S2olGC9vHfks8efIk/h03aNCgW7duMX04hkAImVJZWYlnrPv5+bUVMzzmxvTasixjpyUqL0sxa9YsDZYn1x0QQkYIhUI8KD9s2LB2Zsrv3r0bITRz5kw2a2OBckvs06cP7fPWa2pq/vWvf+FlKbZs2dLZL25BCOmnGJR3cnJqfySwoqLCxMSEoadqE1dQUICH7Ohtibdv38anGLa2tufOnaNln2RBCOmnGJRXZagKLzT03XffsVAY+5q1xPYXm1NFdHQ0vvd9xIgRDx48oKVI4iCENPvmm2/woLyKKwjGx8fjs1amCyOooKBgzJgxuCUuWbJEs0E8qVQqEAg4HA5CKDg4WBeWpaALhJBOhw8fxoPyql9raWxstLOzQwhdu3aN0drIUm6JDg4OJ06cUOvtFRUVkyZN0rVlKegCIaRNSkpKy0F5VeCFntavX89QYbrj5s2buCUihIKCglRsiVevXnV1dUUI2dnZ6eD94tqDENLjxo0beMDqk08+Ufe9OTk5iI6nancKUql0165d5ubmuCUeP368/e0jIyPxxu7u7o8ePWKnSJZBCGlQXFzs7Ozc1qC8Ktzc3BBC6p6kdV737t3z9fVVtMTKysqW2yivTaiby1LQBUKorerqahyhdgblO7Rjxw6E0Ny5c+mtTZfJZLLw8PC2WuLz58/9/f3xshT79u0jVSQ7IIRaUXFQvkPPnj3j8XgmJibl5eU0lqf77t+/37IlZmRk4LvbHR0d9eAG9w5BCDUnk8neeust/LOi/fTcadOmIYR+/PFHWmrrRHBLxKN/9vb2q1atwnMUfXx89PIehpY4FEUhoJH169fv3r3b2to6LS1t+PDhWu4tJiZmwYIFo0ePVqwE1YkIhUKRSCQUCquqqpr9oa3Xm21QWVnZ2NiIEOLz+SKR6MMPP9yxYweeH6j3IIQa2rFjh0AgMDExOX36NF5ZXUsikcjBwaG6uvrGjRt4bg7TGhsb6+vr6+rqxGJxbW1tQ0ODWCyuqqoSi8UNDQ2qv05XPcbGxsbGxitXrhw/fvz8+fPp2q3uM4jfNLSLjo7+9NNPuVxuZGQkLQlECPH5/Hnz5u3duzcyMjIsLKz9jTXoPM3++uLFC7FYTEvluHgzMzM+n29jY9PsDyq+bmtri0dZDRB0QrWlpqZOnTpVLBbv3Llz/fr1NO754sWLXl5elpaWCxYsqKmpEYvFLTsVnrNDy+GMjY0tLCwsLCxMTU2tra1xMKytrU1NTVV5HYfH2toazycCGoMQqqegoMDHx6e6uvrjjz/G4wo0oihq9erViYmJDx8+bH9L7TtPt27d+Hw+vfUDzUAI1YDXVC8uLp4/f35UVBRDHeDo0aMvXrxQ7jwmJiZdu3ZVdCToPHoGQqiqmpoaX1/f/Px8Pz+/s2fPGuwHGEA7CKFKGhsbp02blpycPHTo0IyMDHybKAC0gBObjsnl8kWLFiUnJzs6Op45cwYSCOgFIezYhg0bjh49am1tffr0aXyjNgA0ghB24Ntvv921a5eJiUlcXJz2t8UA0BJ8JmxPdHT0okWLKIo6ePDg4sWLSZcD9BPcMdOm1NTUpUuXyuXynTt3QgIBc6ATtk4xKL9x48Zvv/2WdDlAn0EIW8HOoDwAGISwOcWg/IQJE86dOweD8oBpEMLmAgICkpKS3Nzc0tLSYEgQsAAuzDQXGhpaWlqqeNwPAEyDTtgKmUxmZGREugpgKCCEABAG1/0AIAxCCABhEEIACIMQAkAYhBAAwiCEABAGIQSAMAghAIRBCAEgDEIIAGEQQgAIgxACQBiEEADCIIQAEAYhBIAwCCEAhEEIASAMQggAYRBCAAiDEAJAGIQQAMIghAAQBiEEgDAIIQCEQQgBIAxCCABhEEIACIMQAkAYhBAAwiCEABAGIQSAMAghAIRBCAEgDEIIAGEQQgAIgxACQBiEEADCIIQAEAYhBIAwCCEAhEEIASDs/wGvfu65Xz5SwAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAfO0lEQVR4nO3deVxU5f4H8M8ZYGBYZkBwAculDCUELCi3yiWza6bp1SwzxQU001xzuZk/10wzS5LcklQy62aamXk1c7lJ5C4qqCC4ghsoMwMIA7P8/jg2FzeWYeZ8mZnv+y84Hc750IuPz5mZ5zxHMJlMYIzRkVEHYMzZcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkFmJyYTVq9GmDby94e2N1q2xZg11JvsgmEwm6gzMIYwbh7g4+Puje3cIArZuxc2bmDgRn35Knay24xIya0hORvv2aNIE+/ejfn0AuHYNrVvj0iX89RfatKHOV6vx5SizhoQEAPjggzsNBNCgAaZNA4BVq8hS2QkuIbOGv/4CgC5d7tr48sv/+0/s4fhylFmDjw8KC6HXw8XlfxtNJri4wNsbWi1dMjvAJWTW4OoKmQylpfduVyhQVga9niKT3eDLUWYNXl4oK0NZ2V0bjUbodPD2JspkN7iEzBoaNwaA8+fv2njhAkwmNG1KksiOcAlZ9WVl3bulfXsA2LHjro07dwLA889LksmOcQlZNR0+jBYtMHz4XRtjYiAImD8fV67c2ZKXh/nzIZNh2DDpM9oXV+oAzK7o9Rg+HHo9fH3v2h4ZiSlTMH8+wsPRqxcEAVu24MYNzJqFiAiirHaD3x1l1TFzJmbNQnAwUlKgUNz7X9etQ3w8UlMBIDwcY8bgzTelz2h3uISsyk6eRFQU9Hrs2YMXXqBO4zj4NSGrGr0ew4ahtBTvvssNtC4uIauaRYtw6BAaN8a8edRRHA2XkFVBRgZmzQKAFSvg41PRnkYjpk7FxYvS5HIMXEJWGaMRMTEoLsawYXfmZFdg2TIsWICuXWE0ShLOEfAbM6wyX3yBsWMRGIi0NPj5VbTnxYsIC0NBATZtQu/eUuWzezwSsgpduHDntsClSytpIIARI1BQgDff5AZWC5eQPZzJhBEjUFiIt95Cr16V7Pz119ixA/7+iIuTJJzj4MtR9nBffYXhwxEQgLQ01KtX0Z5XryI0FPn5WL8e/ftLlc9B8EjIHuLKFUyZAgBffllJAwGMGoX8fHTvzg20AJeQPYTYqx490K9fJXt+9x1++gkqFZYvlySZo+HLUfYg69Zh4ECoVEhLQ8OGFe2Zl4fQUNy4gYQEDB0qVT6HwiMhu09eHiZMAIDFiytpIIDRo3HjBjp3xpAhEkRzSDwSsvv064cNG/Dii9i5E4JQ0Z5bt6JHD3h54cQJPPaYVPkcDZeQ3e2XX9CzJ7y8cPJkJStTaDQIDUVODuLjMWqUVPkcEF+OsnLUaowcCQALF1a+Nsz48cjJQdu2d36EWYpHQlZOdDQSE9GhA/bsqeRCdPdudOkCuRzHjiEkRKp8jolHQnaHfvt2JCbC0xOrVlXSwKIixMbCZMKcOdzAmuORkAGAVqt9PipqZVDQM6+9Jhs/vpK9R43C0qV46ikcPAhXXqaopngkZAAwefLkE2fPji0pMY0ZU8muyclYvhxyOdau5QZaBZeQYe/evStXrnR3d09ISHAp/zCJ+xUXY/BgGI2YNg1hYVIFdHBcQmd3+/bt2NhYk8k0ffr00NDQSvaePh1nzyI8HFOnSpLOKfBrQmc3YcKEzz//PCIi4tChQ25ubhXtevAg2rWDIOCvvxAVJVVAx8cldGoHDhxo3769IAj79++PjIysaNfSUkRGIjUVU6fi44+lCugU+HLUeel0umHDhhkMhilTplTSQCA5Pt50/jyaN8eMGdLEcx5cQuc1e/bstLS05s2bf/jhhxXvefz48Y5Tp3by9y9evRoeHtLEcx5cQid1/PjxhQsXymSyVatWeVTYK71eP2zYsLKysla9eyvatpUsofPgEjojc6/GjRv33HPPVbzzJ598cuTIkSZNmsydO1eaeM6G35hxRnPnzp0+fXrTpk1PnDjhXeGTdNPT01u1aqXT6bZv3961a1fJEjoVHgmdzpkzZz766CNBEFauXFlxA41GY0xMTElJSWxsLDfQdriEzsXcqxEjRnTp0qXinePi4pKSkoKCghYsWCBNPOfEl6POZdGiRe+//37Dhg1TU1N973nQ590uXLgQFhZWWFi4efPm1157TbKETohHQidy/vz5GTNmAFi6dGnFDTSZTMOHDy8sLHz77be5gbbGJXQWJpMpNja2qKgoOjq6Z8+eFe+8cuXKnTt3BgQEfPbZZ9LEc2Z8Oeosli5dOmrUqAYNGqSlpdWpU6eCPa9cuRIaGqpWqzds2NC3b1/JEjotHgmdwqVLl6ZOnQogPj6+4gYCGDlypFqt7tmzJzdQGlxCpzBixIiCgoJ+/fr16dOn4j0TExO3bNlSp06dFStWSJON8eWo41uzZs2QIUP8/f3T0tLq169fwZ65ubmhoaG5ubmJiYkDBw6ULKGT4+UJaiQlJeXYsWM+Pj6+vr4qlUqpVCqVSh8fn4o/BJfStWvXJk6cCCAuLq7iBgIYOXJkbm5ut27duIFS4hJaLjo6Ojs7e/fu3Q/8rx4eHn5+fn5+fgqFwvy12f0bFQqFr6+vUPEyZ9UnCMILL7xQVlY2YMCAivfcsGHDxo0blUolX4hKjC9HLbRp06Y+ffooFIoePXro9XqtVpufn6/VagsKCrRa7e3bty04ppubm1KpVKlU5kFVHFdVKpWvr6/5W/ELPz8/8Vt3d/dKj1xcXKxQKCrY4ebNm6GhodevX1+xYsXw4cMtCM8sxiW0hFqtbtmyZU5OzrJly9555537dxBrqVarNRqNWEuRRqPRaDTau6nVavELnU5nQRh3d3exluIlsbml9/dZFBgYeH8h33777W+//bZjx467d++2+mjMKsYltER0dHRiYmKHDh327Nlj3T/Z4uLi/HJKSkoq3nLr1q3qVnfKlCm9evVq06aNecu2bdu6d+/u6el5/PjxZs2aWfHXYVXBJay2//znP6+88krt+ZMtLi6+f1AVFRQUiBfJ5m9v3LiRm5vbrFmz1NRUcXVDrVbbsmXLy5cvL168eOzYsdS/jVMyserQaDSPPvoogEWLFlFnsYRerw8ODgbwzTffiFtiY2MBtGnTRq/X02ZzWlzC6hkxYgSA1q1b2++f7OrVqwE0a9asrKwsIyNDJpO5u7ufOnWKOpfz4svRati7d2/nzp3lcvmRI0cqXye3tjIYDKGhoenp6WvXrh00aNAff/yRkZERExNDncuJUf8rYDeKiorEV4Bz586lzlJTa9euBfD444+XlZVRZ2EmnjtaVdOmTcvMzIyIiJg8eTJ1lpoaMGBAixYtsrKyvvnmG+osjCdwV82BAweWLFni6uqakJBQyVrx9sDFxUVca3TWrFmlpaXUcRyNyWSKiYn5/fffq/EDrGIlJSXiK8Bp06ZRZ7EavV4fEhICYNWqVdRZHM2qVasA1KtXr7CwsCr7cwkr969//QtAixYtiouLqbNY0/r16wE0btxYp9NRZ3EcOTk5fn5+AL7//vsq/giXsBIpKSlubm4ymSwpKYk6i5UZDIawsDAAK1asoM7iOHr16gXg1VdfrfqPcAkrUlZW9vTTTwOYMGECdRab+Pe//w2gUaNGJSUl1Fkcwbp16wCoVKrs7Oyq/xSXsCKzZ88G0LRp04KCAuosNmE0GsPDwwEsXbqUOovdy83NrVevHoDVq1dX6we5hA91+vRpDw8PQRB27txJncWGNmzYACAoKOj27dvUWexbv379ALz44otGo7FaP8glfDCDwdCuXTsAI0eOpM5iW0ajMSIiAkB8fDx1Fju2ZcsWAF5eXllZWdX9WS7hgy1cuBBAw4YN8/PzqbPY3KZNmwAEBgbyYGgZtVrdsGFDAF9++aUFP84lfIBz5855eXkB2LZtG3UWKRiNxqioKABxcXHUWexSdHQ0gHbt2hkMBgt+nEt4L6PR+OKLLwIYPHgwdRbpbN68GUCDBg2Kioqos9iZ33//XRAEhUKRkZFh2RG4hPf68ssvxT/HmzdvUmeR1DPPPAPg888/pw5iTzQaTaNGjQB8+umnFh+ES3iXixcv+vj4ANi4cSN1Fqn98ssvPBhWl7jC0LPPPluT+0u5hHf5xz/+AaBfv37UQWi0bt26hv+oO5W9e/cKgiCXy0+ePFmT43AJ/+frr78G4O/vf+3aNeosNLZt2wYgICDAUScnWJH5/tI5c+bU8FBcwjuuXLkizrv99ttvqbNQEldh++STT6iD1Hbjx48HEB4eXlpaWsNDcQnv6N27N4Du3btTByG2fft2cTDUarXUWWqv/fv3u7i4uLq6Hj58uOZH4xKaTCbTd999J867vXz5MnUWes8//zyAjz/+mDpILWW+v/SDDz6wygG5hKa8vDxx3i3f3irauXOn+NqYB8MHmjZtGoDmzZtb6/5SLqGpf//+ADp37lzdebcO7IUXXgDw0UcfUQepdcz3l+7bt89ax3T2Eoofjnl6emZmZlJnqUV27doFwNfX1xmmzlZdWVlZZGQkgPHjx1vxsE5dQrVa/cgjjwD44osvqLPUOh07dgQwe/Zs6iC1yJw5cwA0adLEuh/hOHUJhw4dCqBt27aWzbt1bPv27RPfrLp16xZ1llrBfH/pb7/9Zt0jO28Jd+3aJQiCu7t7WloadZZaqlOnTgBmzJhBHYSewWBo3749gHfeecfqB3fSEhYWFj722GMAFixYQJ2l9kpKShIHQ2eby36/RYsWiesP2OJFspOWcNSoUQCeeuqpmk93cGxdunQBMH36dOoglM6dO+ft7Q3g559/tsXxnbGEycnJMpnM1dX16NGj1Flqu+TkZABKpdJpB0Oj0Sj+SzRo0CAbncLpSlhSUiKuPD1z5kzqLPaha9euDrb6eLUsXboUQN26dW/cuGGjUzhdCd9//30AYWFhvOx0FR06dEgQBG9vb9v9FdZa2dnZvr6+AH788UfbncW5SnjgwAEXFxcXF5eDBw9SZ7En3bp1AzB16lTqIFIT7y99/fXXbXoWJyqhTqdr2bIlgClTplBnsTOHDx8WBMHLy+v69evUWaQjPtJYgvtLnaiE//d//wcgODiYF/azwKuvvgpg8uTJ1EEkcvXq1Tp16gBYt26drc/lLCU8fvy4XC6XyWR//PEHdRa7dOTIEXEwdJJlB/75z38CeOWVVyQ4l1OUsKysTFxXc8yYMdRZ7FjPnj0BTJw4kTqIzX3//ffiBzOXLl2S4HROUcJ58+aJD+LjpVNq4ujRo4IgeHh45OTkUGexoby8vPr16wNYuXKlNGd0/BKeOXNGnHe7Y8cO6ix2T1wEZNy4cdRBbOitt94C0KlTJ8nuLxVMJlNVnqqdmpq6YcMGpVLp4+Pj5+enLMfHx0f8LKUWMhqNHTp0SEpKiomJ+eqrr6jj2L3U1NSIiAi5XJ6ZmSk+fcHB/Prrr6+++qqnp+eJEycef/xxaU5a1RImJiaKC+4/jIeHh5+fn0KhEL8wu3+LeWP9+vVdXFys9Is8WFxc3Lhx4wIDA9PS0sTF1FgN9e3bd+PGjWPGjImLi6POYmUajaZly5bZ2dlxcXFjxoyR7LxVLWFKSsrmzZu1Wm1BQYFGo1Gr1dq/FRQUaLVaS84tCL6+viqVyjyiKpVKlUrl6+tbfotSqRTHXvFbT0/PKh7/woULYWFhhYWFP/30k/gQY1ZzaWlp4eHhcrn87Nmz4i3RDiMmJiYhIaFNmzZJSUm2Hh7Kq2oJK5Wfn39PLdVqtUajMX+r1WrN7RW3FBUVWXAiV1dXpVIpttfcUnN7zVt8fHxmzJiRlJQ0YMAA8SHGzFreeOONH374YdSoUfHx8dRZrGb37t1dunSRy+VHjx598sknpTy11UpomeLi4vy/lZSUlP/2YVvy8/OrePB69eqVlJRkZWUFBATY9LdwNqdOnQoLC3N1dc3IyGjcuDF1HCsoKiqKiIjIysqaP3/+lClTJD47cQktUFpaKo6r+fn55jFWVP4iOTc39+DBg3K5/Ny5c4GBgdSpHU3//v2///77kSNHijcZ2Lv33nsvPj6+VatWBw8edHNzk/r00rwJS0Kc9DB27FjqIA4oPT3d1dXVzc3t3Llz1Flqivz+UkcuYWpqqkwm8/Dw4HW1bWHAgAEAhg8fTh2kRsz3lxIupePIJTSZTK+//jqA0aNHUwdxQBkZGQ4wGE6ePBlASEhISUkJVQYHL2FaWppMJnN3d+fB0BYGDhwIICYmhjqIhY4ePerm5kZ+f6nLzJkzpX4ZKqG6deuePn36+PHjOp2ue/fu1HEcTVhY2LJly1JSUgRBuHz58oULF65evXrr1q3i4mKj0ejq6irlp23Vpdfre/TokZOTM2nSpIonotia/b07Wl0ZGRlPPvmki4tLenp6kyZNqOM4mhYtWhQVFWVnZz9sh6rPoBK/DggIkMvlEiSfOXPmrFmzgoODU1JSFAqFBGd8GMcvIYABAwasX79+xIgRy5cvp87iUDZv3ty7d2+FQtGnTx+j0Xj/x0V6vd6Cw3p5eZWflmyeUyVuKT+nyjyPqrpzEk+ePBkVFaXX6/fs2SM+/YaQU5Tw7NmzTz75pCAI6enpTZs2pY7jIDQaTWhoaE5OTnx8vLiO6/1u375tnkRVfk6VuMU8p8q8RZxTZdnf5D21rGBGpJeX16BBg44dOzZ69OglS5bU7H+DFThFCQEMHDhw3bp1sbGxK1eupM7iIIYMGbJmzZp27drt27dPJpNZ8cjFxcX3T5aqeE5Vbm5utUZdpVKpUCgyMzPFVX1pOUsJMzMzQ0JCBEE4c+aMuAA+q4ldu3a99NJLcrn82LFj4uds5MpPSzbPqbpnizj25ubmXrp0qUGDBufPn5fm9WclCN+ZlZj4DtjQoUOpg9g985M8Fi5cSJ3FEgaDITw8HMDy5cups5hMDv85YXmZmZnim+bp6enUWezbyJEjATz77LN6vZ46i4V++OEHAI8++ijhZ/RmTlRC098PJBw8eDB1EDu2d+9eQRDkcvnJkyeps1jOaDRGREQAWLp0KXUWJyvhhQsX5HK5i4vLmTNnqLPYpaKioieeeAIO8QTfH3/8EUBQUBD5OrTOVUKTyRQTEwNg4MCB1EHs0oQJEwCEh4c7wCPljEZjq1atACxZsoQ2idOV8OLFi+JgePr0aeosdmb//v0uLi6urq6HDh2izmIdP/30E4DAwEDawdCaH+/YhUaNGg0ZMsRgMMyZM4c6iz3R6XTDhg0zGAyTJk0SV1J2AK+99tozzzxz9epV4pX4CP8BoHLp0iV3d3eZTGbXby1IbNq0aQCaN29eXFxMncWafv75ZwANGjQoKiqiyuCMJTT9/Sb7m2++SR3EPqSkpLi5uclksn379lFnsb5nn30WwGeffUYVwElLmJOTo1AoZDLZ8ePHqbPUdmVlZZGRkXDchbe3bt0KoG7dulRPSXDSEppMptGjR8P2z390AHPnzgXQpEkTB36SR+vWrUE3Ach5S3jlyhWFQiEIQkpKCnWW2sv8JI/ffvuNOosNbdu2DUBAQIBWq5X+7E737qhZYGBgbGysyWTit0kfxmg0xsTElJSUDB8+/KWXXqKOY0PdunV77rnn8vLyli1bRnB66Xtfe1y9etXT01MQhGPHjlFnqY0WLVoEICgoKD8/nzqLze3YsQOAv7+/9IOhU5fQZDKNHz8eQK9evaiD1Drnzp0T77XbvHkzdRaJiLfYz5s3T+LzOnsJr127Jg6GtOtt1TZGo7FLly5wsvl9v//+uzgYajQaKc/r7CU0mUwTJ04E0LNnT+ogtYj40qhu3bo3btygziKpDh06AJg7d66UJ+USmnJzc318fAAcOHCAOkutkJ2dLT719ccff6TOIrXdu3cD8PX1lfJlsPO+O2oWEBAgTqCZPXs2dZZa4d1331Wr1X379u3Tpw91Fql16tSpY8eOarX6iy++kO6sktW9NsvLyxMHw/3791NnIbZmzRoA/v7+165do85CIykpCYBKpbp165Y0Z+SREAD8/f3FCTSzZs2izkLp2rVr4h2Dixcvrl+/PnUcGu3bt+/cubNGo5HueeDSdL32y8vLUyqVABxyjnIViQ+Te+WVV6iDEPvzzz8BqFSqmzdvSnA6Hgnv8Pf3f++99wA47QSaH374YdOmTUqlktcpb9eu3UsvvaTRaD7//HMpzidB0e1Ffn6++K7gf//7X+osUsvLyxOvP1esWEGdpVZITk4G4O3tLcGHNDwS/o+vr++YMWPglG+Tjh079vr16506dYqNjaXOUiu0bdv25ZdfLiwsXLx4sc1PZuuW2xe1Wi0+WmTv3r3UWaTz66+/AvD09MzMzKTOUoscOnRIEARvb+/r16/b9EQ8Et5FpVKNHTsWzvQ2qVarfeeddwDMmzfv8ccfp45Ti0RFRXXr1q2wsPCzzz6z7ZlsWnF7pFar69SpA2DPnj3UWaQgrgHZpk0b+11O23YOHz4sCIKXl5dNB0MeCe+lUqnEWys+/PBD6iw2t2fPnoSEBHd394SEhNr8VF0qkZGR3bt3Lyoq+vTTT214Gtv1234VFBTUrVsXwK5du6iz2FBRUZF4/fnxxx9TZ6m9jhw5IgiCp6en7aYQ8Uj4AN7e3uPGjQMwffp06iw2NHXq1KysrFatWon3kbAHevrpp3v27Hn79u1PPvnEVuewUbntXUFBQb169QA46toqycnJ4nLaR44coc5S2504cUImk3l4eOTk5Nji+DwSPpi3t7c4i3LGjBnUWaxPp9PFxMQYDIYPPvjg6aefpo5T24WFhfXq1aukpGThwoU2OYEtmu0YCgsLxcFw+/bt1FmsbMqUKQBCQkIcbDlt20lNTRUHw+zsbKsfnEtYEfFlQFRUlNFopM5iNUePHhWX0/7zzz+ps9iTvn37AnjvvfesfmQuYUUKCwvFGZW//vordRbrKCsre+qppwBMmjSJOoudEQdDd3f3y5cvW/fIXMJKiMv+RUZGOsZgKL7EfeKJJ8ifjGmP3njjDQCjRo2y7mG5hJUoLi4OCgoC8Msvv1BnqalTp06Jj6NywttErOLUqVMymUwul58/f96Kh+V3Ryvh4eExadIkADNmzDCZTNRxLGcwGKKjo3U63bvvvisusMmqKyQk5I033igtLV2wYIE1j2vFQjuq4uLihg0bws6XwZ0/fz6Axo0bkzxuwWFkZGS4urq6ubmdO3fOWsfkElaJuPZWWFiYwWCgzmKJ9PR0hUIBR/y4RXpvv/02APFBJlYhmOz5EksyOp2uWbNm2dnZvXv3joqKUiqVPj4+SqVSqVT6+vqqVCrxW/EPvbYxGo0dO3bct2/f0KFDExISqOPYvczMzJCQEEEQzpw589hjj9X8gFzCqlqzZs3WrVs3btxY8W4eHh5+f1MoFOW/feCWOnXqeHh42DT5kiVLxowZExgYmJaWJt6yzGooOjo6MTFx2LBhq1atqvnRuITVcPr06S1btqjVaq1WW1BQoP1bfn6++K1Op7PgsB4eHuJAKg6q5YdZlUrl6+tbfotSqfTz8/Px8XF1da3KwS9evBgWFlZQULBp06bevXtbEI/dLysrq0WLFiaT6dSpU8HBwTU8GpfQmnQ6ndjP/Px8bTkFBQVqtVqj0Zi/1Wq1Go1G7LNer7fgXJ6enuZaikU18/HxMV8kf/TRR8nJyf3791+/fr3Vf19nNnTo0NWrVw8ePHj16tU1PBSXsFYoLi7Ov1tJSck9G8tvuXnzZmlpaVWO/Mgjj+h0urS0NPEOSWYtFy9eDA4ONhgMaWlpzZs3r8mhuIT2qrCw0DyumgfV8tfJ4mjctWvX119//dFHH6XO64BiYmISEhIGDRq0du3amhyHS8iYhcyDYWpqaosWLSw+Ds+YYcxCjRs3Hjx4sMFgmDt3bk2OwyMhY5a7dOlScHCwXq8/efJkSEiIZQfhkZAxyzVq1Gjo0KEGg6EmjzDhkZCxGrly5UqzZs10Ol1KSkpYWJgFR6jSB76MsYcJCgoaO3asTCYTZ/lbgEdCxojxa0LGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWJcQsaIcQkZI8YlZIwYl5AxYlxCxohxCRkjxiVkjBiXkDFiXELGiHEJGSPGJWSMGJeQMWL/D6bWq3986bHQAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3dd1hUV/oH8O/Qi4BSpIgSFVSwxhgL2KNsVDaWrL9gQaxYUFbdGDSJ4ia2GGOwIXYxriYas2rMJtFYEjVKLEQFXEIACzgUgaGMDDAz5/fHZUeaOsLMnBnm/Tw+PszcO/e+8NzvnFvOvUfEGAMhhB8T3gUQYuwohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohAaushKLF6NlS9jY4M03kZ7OuyDy0iiEBu7997FvH2Jjce4cpFK8+SYqKnjXRF6OiDHGuwbSUOXlcHbG+vWYOxcAHj1C69Y4dgxjxvCujLwEagkNWWIiSksxaFDVSw8P+PoiPp5rTeSlUQgNWU4OALRs+fQdNzfk5vIqhzQMhdCQWVkBqHEQWFEBkYhXOaRhKISGzMMDALKzn76TkwM3txrz0DG/3qMQGrJ27WBvj9Onq15mZuKPPxAQ8HSGo0fRowcKCrhUR9REITRkFhaYPx9r1uDrrxEfj5AQdO2KwMCqqUolNm3C7dtYuJBrleQF6BIFN4wxmUxmbW3dqKXI5Vi6FHFxkEoxZAi2b0ebNk+nZmSgWzeUluLrr/H2240smGgJhZCbBw8eeHl5+fr6Jicna3E1W7diwQK4uCAxscZ5VKI3aHeUm6ysLAB2dnYaW2JiYj1vhocjMBB5eZg9W2MrIhpFIeRGCKGHcIaz8ebNQ7duOHOm9vsiEXbuhL09jh/HV19pZl1EoyiE3Dx69AhAq1atNLM4Ly8whmnTUFhYz6QNGwAgPLzG9QyiHyiE3Gi4JVyyBP37IysL//hHPVNnzcKIEcjPR1iYZlZHNIdCyI2GW0ITE+zeDWtr7NuHf/+7nhl270aLFvj2Wxw4oJk1Eg2hEHKj4ZYQQMeOWLMGAMLD67lA7+GB6GgAiIjAw4caWylpNAohN0IINdYSCiIiMGgQxGIsWFDP1ClT8PbbKCrC9OnUnU1/0HVCbuzs7EpLSyUSiYODgyaXm5GB7t1RUoKjR/G3v9WempeHLl2Qm4udOzFrlibXSxqKQshHcXGxg4ODjY2NVCrV/NK3b8e8eXB2RmIiXF1rTz1+HGPHwtYWt26hfXvNr528JNod5UMr+6Iqc+bgzTfx+HH9F+jHjMH//R+kUsyaRTul+oBCyIeGT43WIhJVnQs9cQKHD9czw7ZtcHUtKig4umuXVgogL4NCyId2W0IArVpVXaCfNw+ZmbWnOjv/sXev85070xYv/vPPP7VVA1EPhZAPzV+fqGv6dIwaBYmk6jFQNXUYOXLCpElSqXTKlCkKhUKLZZAXoRDyIeyOajeEAHbtgqMjTp3C/v11J27ZsqV169ZXrlyJFq4fEk4ohHxofXdU4O6OzZsBYOFCVmen1MHBYc+ePSKR6IMPPkhKStJuJeTZKIR8aPfETHWTJrF33rnao8fomTPrXo4aPnz4jBkzysvLp0yZUllZqfViSL0Y4cHT0xNARkaGDtaVk5Pj4uICYPv27XWnlpaWtm/fHsCqVat0UAypiy7Wc6BUKi0tLRUKRVlZmaWlpQ7WeOLEiTFjxtja2v7+++/e3t61pp47d27YsGFmZmZXrlx57bXXdFAPqY52RznIzc2Vy+XOzs66SSCA0aNHBwcHS6XSqVOnKpXKWlOHDh06b968ysrKGTNmVNBQFjpHIeRAR2dlatq+fXurVq0uX768ZcuWulPXr1/v4+Nz69atVatW6bIqAgohF7q4SFhH8+bNY2NjASxdurTuo6VsbGzi4uJMTU3Xrl177do1XRZGKIQc6O7UaE1BQUFTp06VyWQzZsyoe4G+X79+CxculMvloaGhMplMZ1W5uaF796cvnZ1x6VLV+7t3V70pkUAkqnq/6aEQcsClJRRER0e3adPm6tWrn332Wd2pq1ev7ty58927d6OionRZ1d27+OUXXa5Qv1AIOeDVEgJwcHDYu3evSCRasWLFnTt3ak21tLQ8cOCAubn5hg0bLl68qNVKCgrw558QbuQKDER9B6rGwox3AcaIY0sI4I033ggLC9uxY0doaGh8fLy5uXn1qT179lyyZMmaNWvWr18/YMCAl114WRkKC2v8E4vx6FHtN/PyIJcDwNmzADBlCqZNQ2YmPD018zsaFgohBxxbQsFnn3129uzZhISEtWvXrlixotbUqKgoe3v7iIgI4aVCoSh4EU/PnefPd6/7sMXncHSEo2PVzy1bYvRoxMai1qnZ2bMxZ05Df0nDQSHkgMsliupsbW33798/cODAjz/+eOTIkb169RLev3bt2v79+4Vcff311/n5+QUFBUVFRS9coI1NkZBAKyu0aFHjn4cH3N1rv+nigpoNMObPx9ixWL68xptRURg3DgBKSuDvr5FfXR9RCHVNJpMVFhZaWFg4OztzLCMgIGD+/PmbN28ODQ29ceOGlZVVdHT0d99999NPP9Wa08TExMnJyfG5nJzat2jxtGVrAH9/eHriyJEaY5x6eKBLFwCQSBq+ZP1HIdS1rKwsxpi7u7uI95C6a9eu/eGHH5KTkz/++OPly5evX79eLBbPnz/f39//f9FycnR0bN68uW7qCQ/H1q1Vow8bFQqhrnE/IFSxsbHZt2/fwIED8/PzY2NjxWJx79696+1PoxsTJmDJEjx5wmv93FAIdY37AWF1/v7+ycnJXl5ewo0Uy2sdk+mWtTVmzMCnn0Imw9KlKC3lWItO0XVCXeN7faKuDh067N27Nysrq3v37qNGjdJ9AdnZGDy46uf168EY7OywYQNkMnTtWvV+8+ZgDP376746XdDHEGZnZ69YseKTTz6JjIwsLi7mXY6G6ejBFmqrrKz89NNPASxfvpz7YaqgTx+8+y4UCoSGoqyMdzU6wPuGxhpu3749bdo04QYfYRzptm3bXrlyhXddmhQcHAzg4MGDvAupsmfPHgC+vr4KhYJ3LU/JZKxrVwawRYt4l6J9+hLCixcvBgUFCd/EJiYmQUFBBw8e9Pf3B2BmZhYVFSWXy3nXqBlCN5Tz58/zLoQxxuRyeYcOHQD861//4l1LbTdvMnNzZmLCLlzgXYqWcQ5heXl5XFxc1//t+zdr1iwsLOzu3bvC1MrKyqioKBMTEwBDhgzJzMzkW61GtGvXDkBKSgrvQhhj7ODBgwDat29fWVnJu5Z6REUxgLVty4qLeZeiTdxCmJubu27dOtVJQjc3t6ioqPz8/Lpznj17VjiCcnZ2PnHihO5L1SxhN7ukpIR3IUypVHbp0gXA3r17eddSv8pK1qsXA9i8ebxL0SYOIUxNTY2IiLCxsRHi16NHjx07dpSVlT3nI7m5uUFBQcL8ISEhUqlUZ9VqVn5+PgAHBwfehTDG2NGjRwG0adOmvLycdy3PlJTErKyYSMR++IF3KVqj0xBevHhx/PjxpqamwoHfsGHDTp48qVQq1fmsUqmMjo4Wztn4+fndunVL29Vqw+3bt4X6eRfCGGPCM53qfQSbXlmzhgGsVStWUMC7FO3QRQjLy8uPHDnSu3dvoSmztLQMCQlJSkpqwKISExOFPSihr6PGS9U24Rhs4MCBvAthJ0+eBODu7v78fRB9oFCw/v0ZwKZP512Kdmg3hEVFRdHR0a1btxbi17Jly6ioqLy8vMYs88mTJ6q7bMaMGVPvYaQeSkhICAsLs7S09PLysrKy2rFjB996hDPPhvJF9t//MmtrBrBTp3iXogXaCmFaWlpERIStra2Qlg4dOkRHRz958kRTyz927FiLFi0AtG7d+ueff9bUYjVOoVB8++23Q4YMEf4OZmZmPj4+ws+TJ08uKiriUtWPP/4IwNXV1YCOrj/7jAHM3Z0ZyLfuS9B8CK9fvx4SEiIc+AEICAhQ/8Dvpdy/f79///4ATE1NIyMjKyoqNL6KxpDJZHFxcX5+fsLfwc7OLiIi4t69e4yxuLi4Zs2aAfDy8rp8+bLuaxOuVX7yySe6X3WDKRRs0CAGsEmTeJeiaRoLoUKhOHnyZL9+/YRtzsLCIiQk5M6dO5pafr2EC4lC4Pv06ZOWlqbV1akpJycnKipKdbugu7t7VFRUQc2zCunp6cLfSuiKoMveKhcuXADg6OhYbGhX39LTmZ0dA9jRo7xL0SgNhLC4uFh4hpewzbm4uERGRgp3zenG+fPnhaEd7O3tDx06pLP11pWSkhIRESFcCQTQs2fPuLi4Z10H59UVYdiwYQA++ugj3axOs7ZtY5aWslGjonJzc3nXojGNCmFGRkZkZKTqpk9vb+/o6GguhxkSieSdd95RXUgsLS3VcQF1u92dOXNGnQ/+9NNP7u7uQleEkydParvOq1evCt9WBYZ5vl+pZMHBYQD+9re/8a5FYxoYQuHAz8zMTNsHfi8lLi5O6APQqVOnhIQEHaxR6HYnXDWp2+1OTbm5ucI9RCKRKCwsTKvfYsKKPvzwQ+2tQtsyMzOF732+ez0a9HIhFA78AgICVAd+48ePj4+P11JxDZCcnNy9e3fhauS6deu0d6ylfrc7dVTvitC5c+fbt29rtlpBQkKCSCSytbU19H253bt3A2jevPnDhw9516IB6oawpKRkx44dHTt2FLY5BweHiIgI/fwTlJWVRURECHuGgYGBYrFYs8uv2+0uLi5OI+dmb9y4IdzTYG1trY0reOPGjQOwZMkSjS9Z90aOHAkgKCiIdyEaoG4Iw8PDhW3Ox8cnJiZG/68vHT9+3MnJSegh8P3332tkmfV2u9PIklWqd0UYN26cBrsiJCUlmZiYWFlZ6fKcmfZkZWU5OjoC2LdvH+9aGkvdECYlJQ0ePPjEiRN6devn82VnZwcGBgrHWhEREQ3upqzBbndqOnr0qNAVoU2bNr/88otGljlhwgQAERERGlmaPvjiiy+EnbL79+/zrqVR9OWmXi0RjrWEJ7336tUrNTX1pT4ukUiqd7tzdXVtfLc7Nd27d0849jY1NW38Pc2pqalmZmbm5uaGvr3W8vbbbwN44403tHpSsLIyXyZLLS2Nl0j+k59/MCdn86NHKx88iEhPn5yW9k7jl28Uw2X/9ttvEydOTEtLs7Ozi4mJmTx58gs/kp6evmnTpj179kilUgDdunULDw8PCQlRXQPUAblcvmrVqlWrVikUir59+x46dKht27YNW9TUqVPj4uJmz54tDFHYZOTl5XXt2jUnJyc2Nnb27Nnqf1CpVNZ6kv+QISUiUY5CUSCXFwj/q34AnpkRkci0Z8+KioqszMxFJSUXlMoyK6tOHh7/dHAIUr8YowghgOLi4rlz5x46dAjA+PHjd+7c+axn2l66dGnz5s3ffPONMIJfQEBAZGSk6hqg7p0/fz4kJCQrK8vBwWHHjh2qy6Hqe/DggTBOfUpKSoNjrLdOnDgxZswYW1vb33//3dvbWyKR5OTkPGvMDOHB/gUFBYV1xs24caMdY+n1rsLMzNHU1FH1f62X9vZvpqYOMzGx9fTcaGpqn5e3PTt7XZcuKRYWr6j7OzS+MTUgqk6br7zyyq+//lp9knD1pW/fvsKfRTjw03a3OzXl5eW99dZbQmEN6IoQFhYGYOrUqVoqjzvh2VkBAQEKhULYQX0hkUjk5OTk4+PTp0+fESNGTJo0KTn5w6ysqJycTfn5ByWS/5SWXpXJ/qisfMzYi3d0b9ywlEie3t8hFq+Wyf5Qv35jaQlVUlJSJkyYkJCQYGZm9sEHH6xYsUIqle7du3fjxo0PHjwA4OLiMn369IiICP15KqHgwIEDc+fOffLkia+v7+HDh7tXH9722TIzM729veVyeVJSkuoKUxOTn5/fpUuX7OzsTZs23b9//7vvvnv+yBkCDRaQlORnZeXr5bXLzKxBi23AF4+hk8lkCxcuFHYvfXx87OzshD+Fn5/frl279Pkm16SkpG7duuF/9zSrczZiwYIFACZMmKCD8jj69ttvAdjY2HB5gpZUei0pqfONGxYpKYOzszfK5YUv9XFjDKHgzJkz7u7uwk09etLtTh1CVwThW2P06NGPHz9+zszZ2dnW1tYikUhLXXD0SmhoKIB+/frxejqmVHpTLF59547PrVse5eUZ6n/QeEPIGBOLxdnZ2Yb4uJpvvvlG2KFydXX94dmPQHr33XcBjBs3Tpe18VJYWOju7m5mZqapK6sNo1A8SUzs+PDhu+p/xKhDaNAePHgwcOBA/K8rQt1+c48fPxb2tK9du8alQt0bOnSovb39c76VNEIul8hk6VLptaKiH/PzD+fl7UpNHaFUPm1+U1P/ev/+HPUXaHQnZpoShUKxYcOG5cuXV1ZW9u7d+9ChQ8LgSoIPP/xw9erVo0aNOnXqFMcidSY1NdXX19fMzCwtLa0Bg16VlkrMzfNV1wZrXSpUKArk8qqpjCmqf9DFZVZBwdfNmwe5uS01MbErLj7z4MHc9u2POziMUHPVFEKDFx8fP3HixPT0dHt7+5iYmEmTJgEoKip65ZVXJBLJpUuXVHe9NG2hoaHCCeSYmBjVm2VlZYV1iMXiR48eVX/n8ePHs2Z1mz79hjorMjW1/98FQydTU0cHhxHW1l2zst5/8uSaUllhZeXj6rrY0fHFHUJUaHxCg9enT5+bN2/OmTPnyy+/nDx58o8//hgTE7N582aJRDJs2DAjSWB6evqhQ4fMzc1nzZrVt29f1dV59dsYiURkadlOyNWzrssLP4hE9aTGx+eHBhdPLWHTceDAgXnz5kmlUh8fn7y8PIlEcuHChUGDBvGuSxdmzZq1e/fu6dOnb9myRfWMPwBWVlYt6vDw8HB3d6/+jouLi9DBmAsKYZOSnJw8YcKE27dvm5iYdOvWLSEhgXdFuvDw4UNvb2+FQpGcnNyhQ4f4+HjVFXk9GXHx+fRxkFDSYL6+vkOHDhV+FovFEomEbz268cknn1RUVAQHBwu3RPfp08fHx8fJyckgEggYZY+ZpkqpVP79738HYGFh4evrCyA0NJR3UVonFouFDgl60tG3ASiETYRSqRSefmBpaXn8+PG0tDShq/qxY8d4l6ZdixcvhoE/fI1C2BTI5XKh05aNjc3p06eFNzdt2gTAxcUlJyeHb3na8/jxY+G75vr167xraTgKocGTy+XCbcq2trZnz55Vva9QKAYPHgwgODiYY3latWzZMgB//etfeRfSKBRCw1ZeXj527FgADg4Ote6QZIxlZGQIPdeOHDnCpTytkkgkwp3ZXMbz0CAKoQGTyWTCzb4tWrR41tNfhedZODs7Z2dn67g8bVu5ciWAwMBA3oU0FoXQUEmlUmFUiZYtW/7+++/Pmk2pVI4YMQLAW2+9pcvytK2kpEQYcofvPRMaQSE0SKWlpcKYh66uri88NZ+ZmSk8QPHgwYO6KU8H1q5dC2DQoEG8C9EACqHhKSwsFJ6F07p16z/+UOtZJvv27UMTem68VMpGjPjS3NxGzVF39ByF0MAUFBQIjyH28vL6888/1f+gcP5m+PDhBvEAgefbuJEBbORIjQ38zBeF0JDk5OQIz5jp0KHDy7Zpjx49EsYF2LNnj5bK0w2ZjLVqxQD23Xe8S9EQCqHBEIvFnTt3BtCpU6eGjSchPHbV3t5eGLXbQG3dygDWowcz/Ba9ivGGsMFDU3Bx//594QG+PXr0aMzAZuPHjwcwdOhQA90prahgr7zCAPbNN7xL0RwjDeH9+/c7duxoKGcLMzIy2rVrB+C11157/uPVXigvL8/V1RXAtm3bNFWeLu3axQDm58cMZ1yiFzPGEKalpXl5eQHo27ev/g8y9d///tfT0xNAQEBAUVFR4xd4/PhxoZepmmdW9Ydcznx8GMAOH+ZdikYZXQhV2/Trr7+uGv0vMTHxpc406kxycrLwIPCBAwcWFxdrarETJ04E4O/vz+sRnQ1z4AADmLc3M6iqX8y4QqjapgcMGFB9mx4zZgyAdu3ahYWFHTlypKSkhGORKgkJCUKnkMGDB2u2pMLCQuGbaOPGjRpcrFYpFKxLFwaw/ft5l6JpRhTC52zTs2bNqj44gY2NzciRI7ds2cKxebx+/bpwRWHkyJHaeDL/6dOnRSKRpaVlYmKixheuDV99xQDWpg3TxMDk+sVYQvjCbVqhUFy5cmXdunUBAQEmJk+f+sGlebx06ZK9vb1wk45MJtPSWqZPnw6gT58++r9TqlSynj0ZwHbs4F2KFhhFCNXZpi9dutS6devffvuNMZaXl3fkyJGwsDA3NzdVGq2trYcNG7Zu3TqtDpTNGPv555+F+4+Cg4MrKyu1t6KioqI2bdoAWLt2rfbWohHHjzOAeXoyrX0j8dT0Q6japt95551nbdPnzp0TnpM3d+7c6u8rFIrr16/rsnn8/vvvhcGAJ02apNUECs6ePSvslOr5iDGvv84Atnkz7zq0o4mHUJ1tWs3tXgfN46lTp6ysrACEhYXp7NrJ3LlzhT4AdUez0BPff88A5urKnjSRvqK1NeUQqrNNN2C711LzeOTIEeH5s/PmzdNld5bS0lKhL87KlSt1ttKX0r8/A9inn/KuQ2uabAjV2aYbv91rqnk8fPiwmZkZgPfee68BZTTSpUuXTE1NzczM9HD8pnPnGMCcnJjmrpLqnaYZQnW2ac1u941pHnfv3i18JDIysvGVNMzChQsB+Pn56dtAxbdvs6Agtno17zq0qQmGUJ1tWqvb/Us1j7GxsUIlH330kcYrUV9ZWZmfnx+AZcuWcSzjWQyzt7m6mloI1dmmdbbdv7B5XL16tUgkEolEn3/+uVYrUceVK1dMTU1NTEwuXrzIuxbj0qRCuGHDhhdu01u3buWy3efk5MTFxQUHBwt9BlTNo4mJSWxsrC4reY7IyEgAHTt2fMLvRKSrK+vW7elLJycmfCe4urJdu6reLCxkAGsy3xVNJ4Tr1q0DIBKJtmzZ0ph5tE0ul//666/Lly/v1atXTEzMF198wauSumQyWdeuXQEsWrSIVw2urszcnP38c9VLCqHBENJlamq6d+/exsxDbt68aW5ubmJicuHCBR2vWipl5eXM1ZWNGsVUQ0sYQwibwki9wuDspqam+/btCwkJqXee5cuXr1q16vnzEACvvvrqsmXLPvroo2nTpt26dUvobNQYZWUoLHzxv0ePIJFg/34AmDIF06YhMxOeno3/hQyAYYeQMbZo0aJNmzZZWFgcPnx43LhxDZuHCEpLSzdu3Dhv3rz//Oc/169fX7p06bZt2+qds6SkpKCa/Pz86i+dnEbGx4cVFKCgAOXl6q7d2hoyGQC0bInRoxEbi1WraswwezbmzGnM76enDDiEjLEFCxZs27bN0tLyq6++Gj16dMPmISqxsbFRUVGXL1+Oi4t77bXXtm/fLpVKnZycCuqoqKh4znIGDHBJSgoTfrayQosWL/7n4YEWLQAgKgoA5s/H2LFYvrzGYqOiIHyFlpTA318bfwA+DDWECoVi5syZ+/fvt7GxOX78+PDhwxs2D1GRyWQbN24EsHjxYj8/vxUrVnz55ZdxcXH1ztysWTPHapycnKq/dHHxbt4cjo5wdISlZUOK8feHpyeOHEH1wXY9PNClCwA0sQGIDTKECoVi6tSpBw8etLW1PXnypGqA6Jedh1S3c+dOsVjcs2fPwMBAAO+9915oaGhMTIxjfSwsLLRdT3g4tm6FlZW216MH+J4XaoDy8nLhuK7ewcDUn4dUV1FRITz86sSJE3WnTp8+fdq0abp5hL6rKzt/njHGnjxhjo7Myqrpnx01sBCqMxhY9XmuXr2q4woNlDCCWvfu3et2ZE9LSzMzMzM3N8/IyNBBJaoQMsaWLHkaNgqhXpBKpcJx3XMGA1NnHlJLRUVF27Zt8YyxRGfOnAlgxowZui/MSBhMCNUZDOylBgwjKsKYTZ06dap7R+WDBw8sLCxMTU1TUlK41GYMDCOE6gwGVlhY2K9fv+fPQ+qSy+UdO3YEUG8HuvDwcACTJ0/WfWHGwwBCqM5gYA0eMIwIo8S0a9eu7qM9xGKx0MWcdiu0St9DmJ+fL3Qpfs5gYI0ZMMzIKZVK4c+7e/fuulMXL14MYPz48bovzKjoewjlcvnEiROfMxhY4wcMM2bHjh0TduDrjlH1+PHjZs2aiUSi69evc6nNeOh7CBljlZWVqkEjatHUgGFGq1evXnjGCE3Lli0D8NZbb+m+KmNjACF8FtWAYT179mzkgGHG6dSpUwDc3Nzq3sIrkUiaN28O4PLly1xqMypPn7lgWFJSUgYMGJCenv7666+fOXOm+u3qRE1r164F8N577wmPXa3u888/l0gkf/nLX/ybUkdpvcX7W6AhnjW4ElHfmTNnADg5OdV9DFxRUZEwPM4vv/zCpTZjY3gduBMTE4cMGfL48ePAwMB///vfNjY2vCsySP4xMUcHDhQHBTVr1qzWpG3bthUUFAwePHjAgAFcajM2IsYY7xpeTlFR0fDhw11cXI4dO2ZlFH3steDKFfj7w8EB9+/DwaH6lCdPnrRt2zY3N/enn3564403eBVoVAyvJXRwcDh9+rSNjY0O7qZpslauBIBFi2olEEBsbGxubm6fPn0ogTpjeC0haaybN9GrF+zscO9e1d3sKuXl/V999fLdu6dOnRo1ahSn+oyO4bWEpLFWrgRjCBMgf5AAAAPwSURBVA+vnUAAe/ZcfPjw10mT/EeO5FGZkaKW0MjcuoVXX4WNDdLT0bJljUmVlejQAffu4ZtvMHYsp/qMkaFeJyQNtGoVGMOcObUTCCAuDvfuwc8P9Dgs3aKW0JjcvYsuXWBhgbQ0eHjUmKRQwNcXqak4fBjBwZzqM1LUEhqT1auhVGLmzNoJBHDoEFJT4e2N8eN5VGbUqCU0Gmlp6NQJIhFSU+HlVWOSUonu3ZGYiP37ERrKqT7jRS2h0VizBnI5pk6tnUAAX3+NxES0aYOJE3lUZuyoJTQODx7AxwcKBe7ehY9PjUmMoVcv3LyJHTsQFsapPqNGLaFxWLcOFRWYNKl2AgGcPImbN+HpSTuivFBLaATEYrRvj/Jy3L6Nzp1rT+3dG9euYfNmLFjAozhCLaExWL8eZWUYP76eBP7wA65dg6srZs7kURkBKIRNX3Exdu2CSIT3369n6po1APDuu6hzXy/RGdodNQJ37uD0afzjH7XfP38eQ4fCyQkZGWj0YKCkwagDtxHo2hVdu9bzvjAG56JFlEC+qCU0Vlevol8/ODjg3j00b867GqNGx4TG6tYtWFkhIoISyB21hE1dZSUiI3HwIEpLMXAgYmLQrl3VJLEY1tYUQu6oJWzq3n8f+/YhNhbnzkEqxZtvQjXcvLs7JVAfUEvYpJWXw9kZ69dj7lwAePQIrVvj2DGMGcO7MvIUtYRNWmIiSksxaFDVSw8P+PoiPp5rTaQ2CmGTlpMDoMZN9G5uyM3lVQ6pF4WwSROey6o6CBR+Fol4lUPqRSFs0oQ76LOzn76TkwM3N17lkHpRCJu0du1gb4/Tp6teZmbijz8QEMC1JlIbdVtr0iwsMH8+1qyBtzdat8bSpejaFYGBvMsiNVAIm7p//hPl5Zg7F1IphgxBXBxMTXnXRGqg64SEcEbHhIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLC2f8Dga3RtNmn+pMAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAbxklEQVR4nO3de1BU5/kH8GeXq7iLXEQRRKsIGO8Ea1KxxbYkHRWNTcWJaWg1tttmJkMnTaZrJklJp/Ozm0w7ZZJpm41GpYkZXR3NYCbaMmq8JdFIYgyigHgB8YKCXOW6+/7+eO2KIHDYPWefA3w/42Qmybk8HvjunvOc95zXIIQgAOBj5C4AYLhDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCHoV2lpaVFR0VtvvVVbW8tdi4YMQgjuGgAeYN++fU8//XRoaOjly5eDgoKWLVuWlZW1ePFiPz8/7tJUhhCC7ggh3njjjVdeecXlci1YsCAkJGT//v1Op5OIJk6cuGbNmtWrV0+cOJG7TPUIAD1paWnJysoiIoPBYLVanU6nEKKqqspms8XHx8tfWqPRmJqaarfbm5ubuetVAUIIOlJRUZGSkkJEZrN59+7dPRc4efKkxWIZOXKkTGNYWJjFYiksLPR9qSpCCEEvDh06NGbMGCJKSEg4c+ZMH0vW1dXZ7fbU1FT3Cd20adNsNtutW7d8Vq2KEELQBbvdHhAQQESLFi26ffu2wrXOnDljtVpHjx4toxgUFJSZmVlQUOByuTStVl0IITBrbW1ds2ZNt4vAgW7B4XBkZGS4G6dxcXFWq/XixYsa1Ks+hBA4XblyZd68eURkMpl27Njh5dYqKyttNtukSZPc/Zv09HSHw9HW1qZKtRpBCIHN0aNHo6OjiSg+Pv706dNqbdbpdBYUFGRlZY0YMUKmMTw83GKxnDp1Sq1dqAshBB52uz0wMJCI0tLSqqurtdjF7du37XZ7cnKyu3+TkpJit9sbGhq02J3HEELwtY6OjuzsbJkKi8XS3t6u9R5PnjyZnZ0dGRkpdxocHKyr/g1CCD5VXV29cOFCmYTNmzf7ctctLS0OhyM9Pd1gMMg0JiYm5uTkXL582Zdl9IQQgu989dVXcrhZbGzsiRMnuMqoqKiw2WzugW9+fn6yf+OD7+QHQgjBR7Zu3So7JQsWLLh27Rp3OXf7N5mZmfL+JBFFR0dnZ2er2CJSCCFkc/PmzcrKyqKiIu5CNNfZ2Wm1Wt0XgXq7YVBbW2u322fNmtWtf9PY2OibAhBCHrI7n5aWJn/kubm5NTU13EVp4tatW+np6XI4y4YNG7jL6YscmGo2m2UUQ0NDs7KyCgoKtN4vQsjgnXfekd35yZMnjxo1Sv7IR44cuXr16iNHjnBXp6ZvvvlG3jofN27cZ599xl2OInfu3OnWv5k6darNZrtx44ZGe0QIfapnd163LTvvbdu2TT7u8PDDDw/Gv865c+esVuvYsWPlzyUwMDAjI8PhcHR0dKi7I4TQd/ruzuutZecNl8uVk5MjP1Z+/vOf37lzh7siz3V2dnbr38TExFit1vPnz6u1C4TQRxR2590tO3m+ytiy81h9ff2yZcuIyN/f32azcZejmqqqqvXr1yckJMifi8Fg+NGPfrRt2zbvt4wQ+oIH3XnZsps9ezZXy84zJSUlDz30EBFFRkbu37+fuxxNyP6NyWSSY+4+//xzLzeIEGrL++58t5ad2WyWLTudDLnq6uOPP5Z9pjlz5gyWx4g8VldX99vf/paIFi1a5OWmEEINqdid79m/0bplNyAul8tmsxmNRiJ66qmnhsarX/p19uxZIpo0aZKX20EItaJRd76kpKRry87dv1G9ZadcY2Pjk08+KYux2Ww6/IrWSHt7e0BAgNFo9PJDByHUxPbt2zXtzvfWsisrK1N9X30rKyubPn06EUVERPznP//x8d7ZTZ06lYi8fFIRIVSZj7vzV69ezc3NnTFjRrf+TVNTk6b7lfbt2xceHk5ESUlJZ8+e9cEe9Wb58uVE5GWPdIAhzMkRRKLbQJ7ISJGUdO9fm5rEyy+L+HgRGCgiI8WKFaLPN2cNJYzd+a4tOyIaNWqUxWLRdPxNbm6ufKdLRkZGXV2ddjvSs5dffpmIXn/9dW82onYIOzrEggWCSDzxhMjNFS++KEaOFGaz+Pprb6ocFPTQna+vr8/Ly5PdIEm+C1DdR9dbWlp+8YtfkBevZhoytmzZQkSrVq3yZiNqh/Bf/xJE4vnn7/3fgwcFkViwwJsq9c/dnZ89e7Zn3fkPP/zwb3/7m1ppOXv2rNVqla/xpP+9CzA/P7+zs9PLLVdWVs6dO1feLNm1a5cq1Q5eX3zxBRElJyd7sxG1Q5iaKojElSv3LTB/viASFRVe1KlfanXn5X15dbudbW1t+fn5mZmZ/v7+Mo2xsbFWq/XChQuebfDw4cOyMTtlypTh8BBWv+rq6ogoJCTEm9MBj0L4wQfi4sV7f8LD74UwJETExnZf64UXBJHIz/e4St1Sqzvvcrk++uijpUuXutMyfvz4V155Ra0Bij3nckhPT8/LyxvQR4Zn7+cd8saNG0dEly5d8ngLHoWw5x8Zwjt3BJGYPbv7WuvXCyKxaZPgu5elhbKyMtmWVLE7f+3aNe26nU6n88iRIxaLJSQkRG5czuXw1Vdf9b1ia2vrs88+i4vAB/rhD39IRPv27fN4Cx6F8E9/Ert33/tjNt8Xwlmzuq/1f/8niMTf/y4iIoTFIobEI3Nad+c17XYOaC6HqqqqRx55hIhGjhzpcDhUKWAoee6554goNzfX4y2ofU1oNosxY7qv9bvfCaK7/5R/kpPF22+L2lpPy2bms+681t3ObnM59HwX4LFjx+TpVlxc3MmTJ1XZ6RCTm5tLRM8995zHW1A7hAsXCiLR7UrmkUeE0Siqq0VxsbBaRVTU3SgGBYnMTJGfL7xu2fkMV3deu26n6H0uh/Xr18snqn7wgx/oZJCqDu3bt4+IFi5c6PEW1A7hhg2CSKxZc+//Hj4siMTSpff+S1ubyM8XmZnC3/9uGsePF1ar8LRl5zPs3fneup3l5eWqbL/bXA7y0Y0XXniBcWCq/l26dImIoqOjPd6C2iHs7BRpaYJILFkicnPFSy8Jk0lERooHjmm8ckXYbGLy5LtRNBpFerrIyxO6fBBbV915TWeudTqd+/btmzlzJhEtW7bM+w0ObS6XS16613p6eaXBsLXmZvHqq2LKFBEYKKKixKpV3c9Ou3E6xZEjwmIRISF30xgWJiwW0V/Lzpf02Z13uVyy26nFzLVbt24lop/97Gfeb2rIk9NdePx0r54GcNfVCbtdpKTc69+kpIjcXME6/eqg6M5rMXNtYWEhEU2fPl3FOoeqVatWEZHHb/XXUwjdioqE1SpGj74bxeBgkZkpCgqEzx9UG3TdeRVnrm1ubjYajYGBgbgg7Nfrr79OROvWrfNsdV2GUGptFQ6HyMgQfn530xgXJ6xW4cXQhAEZvN15tWaunTBhAhGVlpZqU+bQsW3bNiJavny5Z6vrOIRuFy6I114TcXEyip888khGRsauXbs0fRege/a8Qd2d93Lm2scff5yI8ofieEN1nTp1ioimTp3q2eqDIYSS0yn27hUrVqz435XP2LFjX3rppeLiYnX309HR0fXVTIPxtZ/dyG7nypUrg4KC5N9rzJgx77zzTr8ryvcUv/nmmz4oclBraWnx8/MLCAjw7Ldl8ITwf+T0q3PmzOk2ulKV6Vdv3rwphwIGBQVt2rTJ+w3qSteZa7ds2dLv8v/85z+JaO3atT6obbCTpxueDWAcfCF0k9OvRkRE9DbkaqC6vp/3+PHj6larKydOnFAyIvzAgQNElJqa6oOSBrvFixcT0e7duz1YdxCHUOr5LsCkpCSbzXb9+vUBbefDDz+UzxakpqbqYfY8Pbh69SoRRUREcBcyCPz+978nor/85S8erDvoQ+hWWlqak5PjwVwOOp89j1dYWBgRqft2jCHJbrcT0erVqz1Yd+iEUOo5/eq4ceOys7O//fbbBy5fU1Mjn1Hw9/d/6623fFyt/snbpIcPH+YuRO8OHTpERI8++qgH6w61ELrV1NT0O/3qN998M3nyZCKKioo6ePAgX7H69ctf/pKI3n33Xe5C9O7GjRty2KAH6w7ZELp1ezrWPf2qw+EY1LPn+cb69euJ6MUXX+QuZBCIjIwkIg8aCkM/hFJDQ8PGjRvnz5/v/mKUr2ZavXp1S0sLd3X6tWvXLiJasmQJdyGDgPzt8uCUykjDg9lsXrt27bFjx+T0qyaTyWQyPfvss5s3bw4ODuauTr/ka97PnTvHXcgg4PGxGi4hdJM3MCwWS0NDg/t5POjNlClTAgICLl682Nrayl2L3iUlJRFCqJz80CopKeEuRO8CAgImT57scrnKysq4a9E7fBMODM6ylMOxUgghHBj38RJCcNeidwihQpMnTw4KCqqoqGhubh7QisM0hFFRUZGRkQ0NDdeuXeOuRe88vtQZbvz9/ePj44UQpaWlA1pxmIaQFHzAO53O8+fPHzt2zIdF6RG+CZXz7FgN3xDKacz6OF5VVVUJCQkrVqzwYVF6JA9USUkJTt375VnDb/iGUJ5l9XG84uLiTCbT9evXb9++7cO6dCcsLGzs2LHNzc2VlZXcteidZ6fuwzeE/Z45GAyGxMREwp0MnJEqhtPRgVFyvPDLJ+E4KDR16lSDwVBSUuJ0OpWvNXxDOGnSpODg4MrKyqampt6WQWNQ6vfUHaTQ0NCYmJjW1taKigrlaw3fEPr5+U2ZMqXvhjK+ASQcB+U8OFbDN4Sk4Hjhl0/CcVAOIRyYfhvKiYmJfn5+5eXl7e3tPqxLdyZOnBgSEnL16tX6+nruWvTOg1P3YR1CebzOnj3b2wLBwcETJ07s7OwsLy/3YV26YzQaExISCJeFCuCbcGDQIFUOx0EhhHBgZEO5tLS0j4YyfvkkPPyl0Pjx400m040bN2praxWuMqxDaDKZYmNj29ra5GSrD4TuvIS7NQoZDIaB/s4M6xASGqSK4TgoN9BjhRD2c7zk8OU+mjfDRFJSktFoPH/+fGdnJ3ctejfQs4bhHsJ+zxzw5KEUEhIyYcKE9vb2CxcucNeid/gmHBg0SJXDcVAIIRwYhFA5HAeF5BiPCxcutLW1KVl+uIcwNjY2NDT05s2bNTU1vS2DBqmE46BQUFDQd77zHeVjPIZ7CN0NZTRI+4XjoFBtbW1ra2tiYmJjY6OS5Yd7CAl3KRSTx6G4uJi7EF0rKiqaN29eVVVVTEyMnNOqXwhh/2dZ7lfZ9fHk4XAQHR0dHh5eV1dXXV3NXYtO7dmzJzU1tby8PDk5ecuWLQrXQgj7/6JzP3mIt1Bj3ExvhBBvvPHG8uXLGxoaVq1adfToUfd8tf1CCAfQIMUte5yZP1BjY+OTTz65bt06g8Fgs9ncU68r5K9dZYNFQkKCv7+/bCgHBQU9cBkMX5bQIO2prKxs+fLlxcXFERER27dvlxM/Dwi+CSkwMHDSpEnyVb+9LYPTMAnfhN3s3bt33rx5xcXFs2bN+vLLLz1IICGEEhqkCuE4uMmLwIyMjLq6uszMzM8++0xOve4BhJBIWQj7ffJwOIiPjw8ICLh06VJLSwt3LZyamppWrly5bt06IYTVat2+fbucet0zCCGRgrNNs9ksX2V3+fJlH9alOwEBAfHx8cN8usLKysqFCxfu3LnTbDbv3r3bZrMZDAZvNogQEmEE6UAM80bx4cOH586dW1hYmJiYePz48SeeeML7bSKERF0mh+ljzpMhH0KFQxGG/HHow7vvvpuenl5dXb148eLjx4/LXxvvIYRERBEREVFRUU1NTVVVVb0tM7TvUpw6dWrmzJkbNmzod8nheZeitbV1zZo1v/nNbzo7O61W6549e8LCwtTaOEJ413BukG7dunX+/PmXLl3atm1bv/OfDeHj0Juqqqq0tLQtW7aYTKYdO3bYbDajUdXgCBBCCPHrX/+aiN5+++3eFpATg40ZM8aXVWlNfq7L3wSLxdLW1tbvKnKiuJCQEKfT6YMK2R09ejQ6OpqI4uPjv/32Wy12gRDe9de//pWInn/++d4WcLlcoaGhRHTr1i1fFqadmpqaxx57jIj8/f1tNpvCtRobG81ms9FozM7OPn/+vKYVsrPb7YGBgUT0k5/8pLa2VqO9IIR3ffzxx0SUnp7exzJz584lomPHjvmsKu2cPn1a3lwePXr0wYMHFa5VXl4+a9YsIgoODpbfnykpKXa7vampSctiGbS2tv7qV79ynyN0dHRoty+E8C75EPT48eP7WOaZZ54hovfee89nVWkkPz9ffqsnJydfunRJ4VqHDh0aM2YMESUmJu7YscNisZhMJvlrOmrUKIvFcuTIEU3L9pnq6uq0tDT5WbNlyxatd4cQ3uV0OkeMGGEwGOrr63tb5s9//jMR/eEPf/BlYepyuVzuvsLTTz/d3NyscEW73R4QEEBEixcvvn37tvyP9fX1eXl5XQdMTps2zWazVVdXa/Y30FxhYeGECRPkJ/KJEyd8sEeE8B55ovXll1/2tsCOHTuIaNmyZb6sSkUNDQ3Lly8nIj8/P+UXgS0tLatXryYig8FgtVof2I85e/as1WqV35NEFBQUlJmZmZ+f39nZqerfQHMffPDBiBEjiGjBggXXr1/3zU4RwntWrlxJRO+//35vCxQVFRHRQw895Muq1FJaWjpt2jQiioyMLCgoULjWlStX5s2bR0Qmk2nnzp19L9zW1pafn5+Zmenvf/cRudjYWKvVWl5e7nX5muvo6OjaKG5vb/fZrhHCe/74xz8S0auvvtrbAh0dHSUlJZpeo2vkk08+kTeXZ82adeHCBYVredydr6qqstls8fHx8nfaaDSmpqba7XblZ78+duvWrR//+MfyO3zjxo0+3jtCeM/WrVuJaMWKFdyFqKnrReDKlSuVtzFV6c6fPHnSYrG4nzAICwuzWCyFhYWebU0jp06dmjRpEhHFxMR8/vnnvi8AIbynsLCQiGbMmMFdiGoaGxtXrFjhvpxzuVxK1lK9O19XV2e321NTU7v1b/Rwx3Xbtm3yVRTf+973rl69ylIDQnhPU1OTwWAIDAwcjCecPZ0/f37mzJlEFBoa+tFHHylcS9Pu/JkzZ6xW6+jRo7v2bwoKChR+OqhLjhaSTyE988wzd+7c8X0NEkJ4H9mbLisr4y7EW59++mlUVJS8p1dcXKxwLd9051tbWx0OR0ZGhp+fn0xjXFyc1Wq9ePGiRnvsqb6+funSpQMdLaQRhPA+jz/+OBHt2bOHuxCv2O122Z9csmRJXV2dwrV8352vrKy02Wzyekz2b9LT0x0Oh5IhrN44d+6cHIY+evTo/fv3a7ovJRDC+2RnZxPRm2++yV2Ih5Tc0+uJsTsvhHA6nQUFBVlZWfIjgIjCw8MtFsupU6e02N2ePXtGjRpFRHPmzPHld28fEML7/OMf/yCitWvXchfiicrKyu9+97sK7+m58Xbnu7p9+7bdbk9OTnb3b+TA1IaGBlW237VR/NRTT+nnfglCeJ/9+/cTUWpqKnchA3bkyJGxY8cS0ZQpU5Tf02Pvzj/QyZMns7OzIyMjZRSDg4O97980NDT89Kc/HehoId9ACO8jn6wPCQk5cOAAS8vOM+6BnQO6p6eH7nwfWlpaHA5Henq6+zVKiYmJOTk5ly9fHuimysrKpk+fTkQRERH//e9/tajWGwhhdxs3bpRdDd+37DzQ9Z5edna2wrGa+unOK1FRUWGz2dxTO/j5+cn+jcJr171794aHhxPRzJkz9TmADiHs7tatWzk5ObJTL3/kixcv3rlzp9YtOw9UVVU9+uij8oQtLy9P4Vq66s4rJ/s3mZmZ8jufiKKjo7Ozs0+fPt3bKvIiUN4IWbp0aR/Px/BCCB/M3bJzz+yhacvOA13v6fXx5Ec3XbvzBw4c0LRCjdTW1trtdvnIS9f+TWNjY9fFWlpasrKyBtooZoEQ9uOBLbvc3NyamhrGqt5//33Z0P/+97+v/J6eDrvz3pADU81ms/y5hIaGZmVlyQdEKioqUlJSiEi+n5e70n4ghEoVFRVZrVZ1W3Ye8OyeXtfu/KpVq/TTnfdeY2Pje++913Vganx8vHxvQFJSknyXrM4hhAOjYsvOAx0dHQsXLpQfAZs3b1a4lp678yo6d+6c1WqV92lmzJixaNEi9xsAdA4h9JCXLTuPvfbaazExMV988YXC5cvKyuSzvPrszquuvb19//79NTU1er4I7AYh9Iq7ZScfvVPSsvN+jzdv3lS48N69e+WzvLrtzoNACNUiW3azZ8/uu2XnM4OlOw8CIVRdt5ad2WyWLTtf9m9aWlrk2xn1350HgRBqpGf/ZurUqTab7caNG1rvenB150EghForKSlxt+yIKDAwMCMjw+FwaPTwvvv9vAkJCWfOnNFiF6A6hNAXOjs7uw25iomJsVqt6s7l8MD384L+IYQ+dfXq1dzc3BkzZnTr33g5l4OcPQ8XgYMUQshD9m9Umcuh6/t5d+zYoXqpoDWEkFNvczkovxPY9f282t2cBE0hhLpQXFzswVwO7vfzpqWlDeo5WIY5hFBHepvLoeeL69va2uTUwqT97HmgNYRQj3rO5ZCenp6XlyeffqiurnYP4/bB7HmgNYMQgkCXXC7XwYMHN23atGvXrtbWViKKjIx87LHHPv300+vXr0+YMGH37t0PP/wwd5ngLYRwEKivr9++ffu///3vY8eO+fn5BQYGpqSk7Ny50z0GAAY1hHAw+frrr0+cOJGcnJycnOy+7w+DHUIIwMzIXQDAcIcQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMz+Hwxq3SsJIQvTAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxU5f4H8M+wDIuKIIiAC6ik7KhkekXN676gaYqmhll2IbXf2L1qmNWluyW3uollGaZdSdMb5gZaiRkiLqkgyK64gBuIqIAiDDDz/P54bByHRcCZeYbx+371R5w5c853hM+cc57znOeRMMZACBHHRHQBhDztKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohKRZcnNzk5KSlEql6EKMEIWQNMuaNWtGjBjx4Ycfii7ECEkYY6JrIIaurq6ua9euJSUlGRkZvr6+ossxNnQkJI938ODBkpISDw8PSqAuUAjJ48XGxgKYNWuW6EKME4VQx86exR/+AAsL+Pri+HHR1bRGbW3t7t27AcycOVN0LcaJQqhLjGHGDAwdiqIizJ+PqVNx/77omlosISHh9u3bvr6+Xl5eomsxThRCXUpLw8WL+Nvf0KkT/vIXSKVISBBdU4vxc1E6DOoOhVCX8vLg7g5rawCQSODri9xc0TW1jFwu37NnD4AZM2aIrsVoUQh1qaoKHTo8/LFDB1RWiqumNfbv319eXt6/f38PDw/RtRgtCqEude6M8vKHP1ZUPJLJtoDORfWAQqhLvXvj/PmHR7+MDKjfZzP4LmDV1dXx8fGgc1EdoxDqkrc3+vZFRAQqKhAVBRMTjBr14KWsLAQGYvNmofU9xo8//lhRUTFw4EB3d3fRtRgzM9EFGLvYWLzyChwd4eGB3bthYfFgeVoafvsN+fkYNw6OjkJLbBSdi+oH9R0VZ8oUxMdj9mxs3Sq6lAbcv88GDgzIzU2/ePGim5ub6HKMGZ2O6svdu6ire2TJ2rVo3x7btmHPHkE1NWXfPklOzukXXsihBOoahVAvDh6Etzc++eSRhT164J//BIBFix5pRDUM338PAMOH050JnaPTUb04dAgjR0IqRVoaPD0fLlcqMWwYjh3D4sVYu1ZcfZoqK9GlC+7fR0EBevQQXY2xoyOhXowYgfnzIZdj4UKof+uZmGDjRlhYYN06HDkirj5NcXGorERgICVQHyiE+rJ6Nbp2RVISNmx4ZLmHB8LDoVTi9ddRXS2oOE2xsQBAzaL6QaejerR9O2bOhI0NsrPRrdvD5TU1GDAA2dl4/338/e/i6nvg7l106QK5HFeuwMVFdDVPAToS6lFwMKZNQ0UFFi58ZLlUio0bYWqKVauQni6ouId270ZVFYYNowTqCYVQv774Ara22LsXO3Y8snzQICxciLo6hIVBoRBU3AP8XJQeo9cbOh3Vu+hovPEGnJyQkwM7u4fLKyvh64tLl7B6Nd56S1R1ZWVwckJdHa5dQ5cuoqp4utCRUO9CQzFqFIqLsXz5I8vbtcPataxDh//t3Xvp0iVBxWHXLsjlGDGCEqg/FEK9k0iwbh2srPDNNzhw4JGXJk5cMWvW7IMHw8LCRJ2hULuo/tHpqCCrVmHlSri5ISsL7dqpFpeWlnp5ed28eXPTpk2vvPKKnou6cwdOTlAqce2awfYqN0J0JBRk+XIMGICCAvztb+qLHRwc/vOf/wD4y1/+cuPGDT0XtWMHamowahQlUK8ohIKYmeGbb2rd3N4/dCglJUX9lZCQkMmTJ9++fXvJkiV6LurHHwEgOFjPu33qMSLOO2+/DcDf37+mpkZ9eWFhYYcOHQDs3r1bn/XU1LB9+9jt2/rcJ2EUQpEqKyv5Q+sffvihxktRUVEAXFxc7ty5I6Q2ojcUQsESExMlEomFhUVOTo76coVCERgYCGDhwoU6LeDOHQawNWse/Ojvz9LSdLpDoomuCQUbMWLEq6++KpfL33jjDabWUm1iYrJhwwYLC4vo6Ojk5GSd1mBtjfXrdboH0hQKoXiffvpp165dDx8+vP7RKHh4eKxYsUKpVL7++uvVunzAwtYWdnYG9SjV04VCKF7Hjh35FeDbb7999epV9ZdWrlzp7e197ty5f/Jn8J/AnTtITcX27fj3vxEWhsmT8eyzePFFAKipQVgYoqOfcA+klehmvaF48cUXd+3aNWnSpL1796ovP3HiRGBgoEQiOXnyZP/+/ZveSG0tu3JFUliIwkIUFDz4r7AQV69qDnDD9e2L336DuzuuXkXv3sjMxMiR2LQJ/fpp8ZORx6AhDw3FF198cejQoX379m3fvj1Y7VbdoEGDFi1a9Pnnn//888+qENbU1Fy9evX69etFRUUX1VhZOWRnn2hw+3Z26NXrwX/OznBxQa9e6NPnwTMblpaYORPffguJRPcflTyKjoQGZP369WFhYQ4ODrm5uQ4ODgAUCsW5c+fy8vKSkpKsrKwKCwsLCwsLCgqKiooa/MU5ODhaW99wdYWbG9zcwP/H1RU9ekAqbXinZWVwd0dpKXJzMX062rXDF1/gwAEsW/ZwkFSiUxRCA8IYGzNmzMGDB1999dVvvvnmzp07W7duffPNN+uvaW5u7uDg4OLi0utR3bt3Nzc3b9FOVSEEMHw4zpzB8OHYuxcTJ2LHDlhaauWTkaZQCA1Lfn6+v79/VVXV/v37d+3a9dVXX9nb2/fv39/Nzc3V1dXV1bVnz56urq4uLi6mpqZa2aN6CL/7Di+/jJ07sXgxiorw/PPYuxft22tlP6RxAu9RkgatWrXKwsJixYoVJiYmUqk0Oztb/zXk5jIXFwaw4cPZ3bv63//ThW5RGJxly5alp6fv379fqVQuXbpUyCTVHh5ITETXrjh8GBMm4O5d/ZfwFKEQGhwzM7P9+/enpaX16NHj3XffFVVGnz44cgQ9e+LIEYwfj4oKUYUYP7omNDjFxcUeHh7l5eVxcXGTJ08WW0xhIUaOxMWLCAhAQgI6dRJbjnGiI6HBWbJkSXl5+bRp04QnEICrKxIT4e6O1FSMHo1btwTXc+/evfXr1/v7+7/77ruff/65kRxCRF+UkkckJCQAsLa2vnTpkuhaHioqYl5eDGD9+rGSEjE1XLlyZfny5R07duR/tyYmJgCCgoJKRBWkPRRCA1JdXd23b18AH3/8sehaNBUXM29vBjBPT3b9ul53nZaWFhISorr/GRAQEBMTs2PHDnt7ewCOjo4//vijXgvSNgqhAYmIiADg7e2t8aC9gbhxg/n6MoD17cuuXdP57pRK5YEDB4KCgiQSCT/0BQUFHTt2TLXC5cuXn3/+eQASiUQmk8nlcp3XpBsUQkORn59vaWkpkUgSExNF19KokhLm789cXGqHDn35ypUrOtpLdXV1TEyMt7c3P/R16NBBJpMVFBTUX1OpVEZFRfGDZEBAwNmzZ3VUkk5RCA3FhAkTALz22muiC3mM0lI2btyfALi7uxcWFmp34yUlJZGRkS6/T4Lh7OwcERFx+3GD3pw4caJ3794ArKysoqKitFuSHlAIDcK2bdsAdOrUqU00M9y5c2fQoEEAevTocf78ea1sMz8/XyaTWVtb8/j169cvJiam+afl5eXlL7/8Mn/v9OnTH5tbg0IhFK+8vLxr164ANmzYILqW5iorK/vDH/4AoHv37vn5+U+yqeTk5ODgYN4V1sTEZPTo0XFxca3bVExMTPv27fm3Q3Jy8pNUpU8UQvH+7//+D8CgQYMUCoXoWlrg3r17f/zjHwE4OTm1ooNrTU1NbGwsP6ICsLCwCAkJeex2cnNzm17h0qVL/NvBzMwsIiKirq6upYXpH4VQsNTUVFNTUzMzs/T0dNG1tFhlZeWoUaMAdOnSJTMzs5nvqqioiIqK6vH7TNyOjo7h4eHXH3ffIzk5mbeUHjlypOk1a2trIyIi+I3EwYMHX7x4sZmFiUIhFEmhUAwePBjAsmXLRNfSSpWVlWPGjOFZysjIaHrlS5cuhYeH29ra8vg988wzUVFR9+/fb+It1dXVGzZsUG8p/e9//9ucwg4ePMgbeDp27Lht27bmfyL9oxCK9OWXX/LLqrtt+Xmh6upq3sPOzs7u1KlTDa6TmpoaEhJiZvZgOJXAwMC4uDilUtnEZsvKyqKiovjVMj/pjYiIuHXrVvMLu3nzpqrrX0hIyL1791r2wfSFQihMcXGxnZ0dgJ07d4qu5UnJ5fIXXngBgK2t7YkTJ1TLFQpFXFzc6NGjeRLMzc2Dg4PVV2jQ+fPn1VtK/f39o6Ojq6qqWldbTEwM35SHh8fp06dbtxGdohAKM3fuXADjx48XXYh2yOXyadOm8dO/48eP8xvuHh4ePEg2NjYymezy5ctNb0S9pVQikfCW0qYPmM2RnZ3t5+fHm38iIyMNrQGMQihGUlKSRCKxsrK6cOGC6Fq0pqamZsaMGbwDuqqnda9evdasWdP0+TY/YPJWTVVLaVZWlhZrq6qqkslkvAfcmDFjHtsOpE8UQgHkcrmnpyeAf/3rX6Jr0bK6urqgoCDeIsJ7WtfW1jaxPm8pdXV15fHr3LlzeHj4NZ31TN21a5eq2/e+fft0tJeWohAKwIfT7tOnT3V1tehatO/rr7/mDxk1vdr169cjIiL4VTHvBBcVFVVZWanr8oqLi8eNG6fq9m0IvwIKob4VFBS0a9cOwMGDB0XXohN///vfAbzzzjuNrXD69Gn1R5MCAwNjY2P1eVedd/uWSqUABgwYILzb91Mdwk2bNr322mtDhgwJCwv74osvDh8+rIfJAIOCgniLua53JMrChQsBfPbZZ/Vf2rdv3/Dhw1UtpXPmzElNTdV/hdzJkyf55JDCu30/vSGMjIwE0L7eqJrOzs6jR4+WyWQxMTEpKSnaPV3ZsWMHbz80qIYB7eL3Knbs2FH/pT/96U/4/dEkrT+B0QoVFRWG0O37KQ3hzp07TUxMTExMtmzZkpycHB0dLZPJAgMD+YmiOnNzcy8vr+Dg4IiIiLi4uCdpzKysrHRzcwOwbt06LX4WQzNw4EAAx48fr/9SXl7e6tWrKyoq9F9VE2JiYvjk5G5ubuoPDevN0xjC06dP87D9+9//rv/qtWvX4uLiIiMjQ0JCvLy8eBdEdba2toGBgaGhoVFRUcnJyc3vh7F06VIAzz77bJvoVdxqvI+LIRzomu/SpUtDhgyRSCSrV69uujlXF566IQ+LiooGDRp05cqVV155ZdOmTY9dv6amJj8/PzU1NScnJzs7++TJkyUlJRrrODs7BwQEeHt7e3l5BQQEeHp61o9uVlbWgAEDlErlyZMnBwwYoK2PY2iUSqWFhYVCoaiurpY2NgeNQZLL5e3bt1coFFVVVRb6nQrn6QphVVXViBEjTp48OXTo0F9++aV1/9bXr1/ngVQlU2MaXalU6u7uHhAQwJPp5+fXuXPnoUOHHjt2TCaTrVmzRkufxhAVFRW5uLg4OjreuHFDdC0tU1xc7Ozs7ODgcPPmTX3vW89HXoGUSuVLL70EoGfPnlp8gF0ul6enp2/evPntt98eN26camgGdXyeMxcXl/Lycm3t1zClpKQA6Nevn+hCWiw1NRWAv7+//nf9FE0S+t577/3vf/+zsbGJi4vr3LmztjYrlUr9/f39/f1VS8rKyrKyslRHyzNnzpSWlnbp0mXMmDE2Njba2q9hun79OoAGv4kMnMDKn5YQfv/996tWrTI1Nd22bZuPj0+D68yZM+fEiRP8uo5f4Hl5eUlaPnWtra3t0KFDhw4dyn9UKpXR0dGLFi1KT09/os/QFrSJEK5duzYxMXHRokX8iWSOQqhbR48efeWVVxhjn3322cSJExtb7cyZM3zSadWs8XZ2dn5+fr6+vr6+vv7+/t7e3vXvKz6WiYnJggUL/vrXv545cyYjI4N35zdWbSKER44c2blz54svvqi+kEKoQwUFBS+++KJcLl+yZMmiRYuaWDMtLU29ITQlJaW4uDgpKSkpKUm1jkZDqIeHR3Mm65RKpcHBwevWrfvuu+8ohMI1WGRRUREAZ2dnAQXp/zJUnyoqKnx9fQGMGzeuFfd/bt++nZycHBUVFRoaGhgYaGVlpfGvJ5VKvby8QkJCIiMj4+Li+FTyDTp69CgAFxcX475JyE80Wj1cmn7wQUrz8vLUF06aNAnA7t279V+PMYeQP1YDwNPTUyudQmtra7OysrZt27ZixYpJkyaphipS5+zsPH369Abf/swzzwD45ZdfnrwSg9WvXz8AKSkpogtpCn/QXqPjTv/+/QGcPHlS//UYcwjffPNNAA4ODtoaoLa+8vLylJSUmJgYmUw2evRofisiMDCwwZX/+te/Apg/f76OijEEjo6OAAy5Z+zt27cB2NjYaCzv0qULgKtXr+q/JKMNIX+qTSqVHjp0SG87VSqVFy5caGzwwvz8fIlE0qFDBz08NSdETU2NiYmJqampIZ9yZ2VlAfDw8FBfWFtbyyvXf581Zqxz1ickJCxcuFAikWzYsIFP3KMfEomkV69e6vcM1bm7uw8aNOju3bt79uzRW0n6VFRUpFQqnZycmtNYJUpjrTJKpbJLly6q8eD0yQhDmJeXN2vWrLq6uvfeey8kJER0OY/gD85s2bJFdCE60XabRsVWbmwhvHXr1uTJk8vKyqZPn/7BBx+ILkfT7NmzpVJpQkJCcXGx6Fq0j0LYOo8PYVkZJBLcu/fgRwcHaHT84Ct89tmDH/v101xBb2pra4ODg8+fPz9gwICYmJj6jzII16lTp/Hjx9fV1X3//feia9G+a9euoY2EUON+oKGHsDmsrbF+vVa29EQWL16cmJjo4uKyZ8+e+o/nGggjPiMVeb+72QzuTr22QmhrCzs7HDmilY210qpVq77++msrK6vdu3d369ZNZClNmjx5sq2tbUpKSnZ2tuhatIz/fasGrjdMDRYp9hiunRDW1CAsDNHRWtlYa+zcufO9994zMTHZunUrH17BYFlaWvIRcvnEoMaErglbp7kh7N0bTk5wcsLt2w28yhhmzMCvvzb86iefYOJErFiBrVuRkYHa2ieotyFpaWnz5s1TKpWRkZFTp07V8tZ1QHVGqlQqRdeiTYYfQsZYcXGxRCIxqGvC5t4VOX4cfH6O3+eo0mRpiZkz8e23qP/oz6FD+Okn/PTT77s0Q58+8PaGlxcCAuDtjZ49G3hXMxUVFU2ZMqWysnL+/PnLly9v5Vb0a/jw4W5ubgUFBUeOHFENAWgEDD+EN2/erKmpsbe3t7S0VF8u9kS6uSF0dAR/iKeJtISGYvp01G8QWbsWr72GzExkZiIjAxcuICcHOTkPV+jUCX5+8PWFry/z8zvZ/CeGqqqqpk6devXq1WHDhn311VfN/CzCSSSS2bNnr1q1asuWLUYTwqqqqjt37kilUj7OvGFq8Guiurr6zp075ubmwip/bJ+aO3cYwFTzedjbs7Q0zRXs7R/8/7BhzMZGcwUNcjnLymIxMSw8nAUFMScnBjz4z9W1hlfl7OwcFBQUHh4eExOTlZXVYDcopVI5a9YsaHu4Cv3IyckB0LFjx6anyGxDzp8/D8DNzU10IU3Zt28fgHHjxqkvvHDhAgBXV1dBRWl7eIuwMPw+mGqjpFJ4ez9yWnv9+oODZHFx8a+/9svJySkqKtq7d6/q4Vpra2s+YpLq+Vp7e/uVK1d+//33NjY28fHxWhyuQj88PT0DAgJSU1P37dvH22naOsM/F0UjraDCK398CG1toT4gW2kpACiVeP113L2LsjJ8/PGDhQDmzsXcuS0uwsUFLi4YNw5AdyCttrb27NmzWVlZZ86cyczMzMzMvHz58qlTp06dOqV6i4ODQ2lpqbm5+Y4dO7wbu041bC+//HJqauqWLVuMI4Rt6E592wthgy5exMCBWLgQBw/iv/+FdkfxMzc39/Hx8fHx4YOjAaioqMjPz1eNMpienl5aWurg4DBixAjVLLBtzpw5c5YvX/7jjz/yzyK6nCcl/E+5OfhNeUMLYSvvE7q7Y+FCANi9G2qD5eiKjY1NQEDAvHnz1qxZc+DAgZKSknXr1pWWlvLrkDbK0dFxzJgxtbW127dvF12LFjT4983J5fL4+Hi9V9QAA+wugye5Wa9Q4M9/hrs7pkzRYj3NIpFIXnvtNXt7+/T09MzMTI1Xy8vLDeRX/lj8huHmzZtFF6IFTRxPVqxYMWXKlHnz5t1TdUEWpMEihZ9ItzKEjGH+fIwejSVLtFtPc/Ghk1CvE2ZNTU2vXr2mTp16+fJlMZW1xNSpUzt06HD8+PFz586JruVJNRFCT09PKyurzZs3DxgwgI+xK4phXhO2MoTx8UhMxObNeOklREZqt6TmUvU7USgUqoVSqXTs2LFKpfK7774TU1ZLWFtbT5s2DW2/C1tSUlJubq5UKs3IyKj/amho6OnTp/v165efnz948OAPPvhASFchhUJRUlJiYmLi5OSkvlx8l1dR90a0gg+dpDHlLb+x4enpKaqqFjlw4ACA3r17K5VK0bW0RllZWWhoqPoQyQsWLGhwpqrq6urw8HD+fNnIkSN1NzF9E0pKSrKysjQW8nnR9DA/bGPadgj50Emvvvqq+sLa2lo+aI/AWWCbT6FQ8Gc+hMyM94Ti4+O7d+8OwNzcPDw8fOPGjfwJsr59+zb2j5+QkMCbQBwcHAxhZMS7d+8CsLKyElhD2w4hHzrJxsZGY+gkmUwG4M9//rOowlpk2bJlABYtWiS6kBYoLi5WDR0yZMiQ7OxsvjwnJ4ePemhubh4REaFQKOq/98aNG3x4UolEEhoaKnbYq7y8PADu7u4Ca2jbIWSMDRo0CMC2bdvUF548eRKAo6OjkMGzWoqP/9WpUyftTs2tO7GxsbybpbW1dWRkpEbSqqqqZDIZP0EdNWpUg6edSqUyKiqKT2Do5eV15swZfdWu6ddffwUwfPhwUQUwIwjh559/DmDSpEkayz08PAD89NNPQqpqKT42vpDhn1vk4sWLY8aM4QfAiRMnNjEd7/79+3n7R+fOnePj4xtcJyUlpU+fPgAsLS2joqKEXBXz1vWXXnpJ/7tWafMhLC0tlUqlZmZmxcXF6sv/8Y9/AJg7d66owlrko48+AjBjxgzRhTRKoVBER0fzp1s6deoUHR392LcUFxdPmDBBddrZYFf1+/fv82sHAFOnTi0tLdVB7Y0qKSnh3ylBQUH63K+GNh9CxtjkyZMBrFmzRn1hQUGBRCKxtrbWGO1crOzs7OHDh1++fFlj+bVr10xNTS0tLW/evCmksKadOXPmueee41EJDg5ufpHqp53e3t4ZGRkNrvbDDz/Y2dkBcHJy+vnnn7VXeKPy8/NlMhkfD5/3GQwJCbmrelZIv4whhHzksoEDB2osHzZsGICYmBghVdVXVFTEp69YsmSJxktKpbJHjx7dunXr2LFjSEhIXFycoVzNVlVtiYw0NzcH4Obm1rqEnDp1it9MsrKyioqKanCdwsJC/vuSSCQymUwulz9Z3Y1KTEwMCgrid0pMTEyCgoJWrlzJG3V79uwppI3aGEJYVVVla2sLIDc3V315dHQ0gDFjxogqTF1VVdXgwYN5W2L9Bhh+r8XCwkJ1t83JyWnx4sVJSUkNNjDqyZEjzMPjmqOjna1taGjok5xTVFRUhIaG8o82bdq0W7du1V+ntrY2IiKCj949cODA/Pz8Jyhdk0KhiIuL478C/k8dEhKiatTNzc3ljbpmZmaNNerqjjGEkDG2YMECAO+99576wrKyMisrKxMTkytXrogqjFMqlXPnzuUHE41rV8ZYbGysRCIxNTWNj4/PysqKiIjo27evKo0ODg6hoaHJycl6bbcoK2NhYUwiYQDz8bl16pRWtrp9+3Z+2tm9e/fG5gg5fvx4r169AHTo0GHz5s1PvtOKioqoqCjVFFqdO3cODw+v32ar3pegsUZdHTGSECYmJgJwdXXV+EudPn06gI8//lhUYVxERAT/q6p/UXTq1Cl+ZfLZZ5+pL+dp5GdxXLdu3WQymT7SuHcv696dAczcnIWHM63eOCkoKOATiZuYmMhkspqamvrrlJWVzZ49W3UJ2uq+LNevX4+IiOBnSfxmYFRUVNO3JRMSElSNunrrS2AkIVQqlW5ubgAOHz6svnzXrl0A/P39RRXGHj3QabxUUFDAO/csWLCgsbefOHFi6dKl6nMhurm5vf3222m66A9UXMxCQh6MNTJkCPv9bE271E87n3vuucYmrouJiWnXrl379u3PnTvX0l2kpqaGhISoZncJDAyMjY1t5lxRN27cUG/U1UNfAiMJIWPsnXfeARAaGqq+UC6X87YvUbeDGzvQMcYqKir47UH+VOFjN8WPjfxUDcCMwYOZqyuTyVhysnZqjY1lDg4MYNbWLDKS6Xh6s2PHjvXs2ROAjY3Nli1bGlwnNzd37969zd8mv/BTPedtbm4eHBz822+/tbQ2jUZdXf/xGE8IGxs6aeHChQCWL1+u/5KaONApFIopU6YA8PDwaNHplkKhOHz48Jtvvrl3zJiHI2T17cvef5/V65rcXBcvsrFjH2xqwgTW+C147SorK1MNnvCEdwiqq6tjYmI8PT351mxsbGQyWRN9CZpDoy/Bk2yqacYTQsbYgAEDAPzwww/qC0VNFl9RwcaOPefm1qfBA91bb70FwN7evvVtgAoFS05mMtkj49V5ebGICJaT04KNREez9u0ZwJ3jaSwAAAeiSURBVOzsWDNuwWsdP+1s9R2CGzduREREqMYHcXNzi4yM1NYjEfrpS2BUIfz000/5P5bGcv59ps/J4uvq2KRJDGCDBt0qKyvTeHXjxo38ZOnXX3/Vzs54Grt00UxjXl5Tb8zIYM8992D94GAmbtjI1t0hOHfunEwms7Ky4iHhU3Hp4v6qqi9Bt27ddDHxs1GF8MaNG2ZmZlKpVKNLB2+c1Odk8W+9xQBmb8/qH+eSkpL4xcb69eu1vFdVGjt31kwjb9vIy2ODBzOplPn4sGPHWFgYH+yVGUAP2xY9bZicnBwUFMT7iPMb7gcOHNBpeRp9CRps1G01owohY2z8+PEAvvzyS/WF58+fl0gk7du3b/BhU63buPFB837941xeXh7/Tl2xYoUOK5DLWXw8CwlhHTs+iKKPD1MqmY8PW7aM3brFPvmEOTqy69fZypVMUF+tBqmeNmzwDoFcLo+NjVVN+GNpaRkSEqLRQ0N3dNeXwNhCyDvFDxkyRGM57yqxdetWXReQlMSkUgaw+se5W7eU/v79eZcRPfXJqKpiu3ezOXPYf/7DUlOZtTXjDe5KJevWje3apY8aWqjBpw3Ly8ujoqJUM9516dIlIiJCz729uePHj6sadbXSl4AZXwgrKyv5aAVnz55VX7527VoAEydO1One8/KYnR0DWP3jXE0NGzGC9emTPnbsC/o5IGv67jvm5/fwxwkT2IcfCiijGRQKxUcffcRP2j09PefMmaOa8tXPz2/Tpk2661naHNrqS6BibCFkjM2bNw/ABx98oL6wsSeetOjWLdanDwPYtGms/nGOX385O7N6T1Doy4YNLDDw4Y8zZ7J33xVUSrPwOwRmZmb8DDAwMDAuLs5wRuJRNeq6ubkdPXr0STZlhCFMSEjgfZQ0fmHr1q07evSojn6LNTVs5EgGsP79Wf3j3EcfMYBZWbETJ3Sx8+bZs4f5+Dz8cfx4FhkprppmuXv37s8///zll19mZmaKrqUBubm5/fv3b2mjbn1GGEKFQsGHrzt+/LjednrzJnv2Webiwq5e1Xxp3z5masokEvboEBx6l5XFLC0ffkO4uLB9+4QWZAw0GnWv1v/1N4MRhpAxtnTpUgCLFy/W504rKxu4LZeW9uBOuEFcf/n7s6VLWXk5W72adeum3Z7ZT7MDBw6ohpDbs2dPS99unCFMT08H0KlTJ7FX8EVFrEcPBrB58wRWoebsWTZ4MLOwYP7+LCVFdDVGRdWoy7vgtajbt3GGkDHm6+sLoBVfS9py/z4bPJgBLDCQDjlPBd7tmz+Z3aIh5Fo/IYyBUw2SL2TvjGHBAvz2G9zcsHMn1J6YJ0ZLIpEsWbLk2LFjffr0ycnJ4f20mkWn3w0C8aGTLCwsLl68qP+9v/8+A1iHDqyRkY2IMbt3796SJUuKioqaub6EqU/Da1zGjRuXn59/6dIlOzs7Ly+vgIAAb29v/j+qXr+6EBuLl16CiQl270ZQkO72Q4yEMYdQoVAsW7Zsw4YNGtPimZube3h4+Pr6+vn5+fr6+vr68gkVtOLUKTz/PKqqsHYtFi/W1laJMTPmEHKMsYsXL2ZkZGRmZmZmZmZkZFy4cEF9NjUAdnZ2fn5+Pj4+gweHu7t39/FB+/at3N369Vi0CG+8gbVrtVA8eRoYfwjrq6mpyc/PT01NzcnJyc7OTklJKS4u5i95ed3LyWkHwNkZAQHw9oaXFwIC4OEBU9MGNlVWBjs7rFkD/uRnv37YtAn37+O55/D7+CaEPMbTGML6ioqK+EHy7Nm3UlPNcnIglz+ygpUVvL3h5wdfX/j6ws8PnTsDQFkZunZFz57IygJ+D2G/fgI+Amm76OsaAJydnZ2dnceOHct/rKvDuXPIzMSZM8jMRGYmCguRkoKUlIdvcXKCry++/hq2trCzw5EjGDpUTPGkraMQNsDMDF5e8PLCrFkPllRUID8f2dlITUVODtLTUVyMykrY2KCmBmFhiI6mEJJWotPRVrp0CVeuwM8P7u64ehW9eyMzEyNH0ukoaTE6ErZSz57o2RNlZQBgaYmZM/Htt1CbuZ2Q5qIQakdoKKZPx+/PfxPSAkbbd1TPPD3h4IBz50TXQdogCqHWhIWhokJ0EaQNooYZQgSjIyEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEYxCSIhgFEJCBKMQEiIYhZAQwSiEhAhGISREMAohIYJRCAkRjEJIiGAUQkIEoxASIhiFkBDBKISECEYhJEQwCiEhglEICRGMQkiIYBRCQgSjEBIiGIWQEMEohIQIRiEkRDAKISGCUQgJEez/AVrU8kFx0LYSAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO2deXxNV/f/PzeJSDTGmtqUmkoJTUhpiaGlSmuqFsVDTU+pChlEyECCECEila+pWkNLKU350arS1vDQ4ClRRKqoueakEpmTu39/7OPktk9Lhn3uPufe9X75YzevnM9ZPed+ss/dZ621TYwxEAQhDwfZARCEvUMmJAjJkAkJQjJkQoKQDJmQICRDJiQIyZAJCUIyZEKCkAyZkCAkQyYkCMmQCQlCMmRCgpAMmZAgJEMmJAjJkAkJQjJkQoKQDJmQICRDJiQIyZAJCUIyZEKCkAyZkCAkQyYkCMmQCQlCMmRCgpAMmZAgJEMmJAjJkAkJQjJkQoKQDJmQICRDJiQIyZAJCUIyZEKCkIw9mrBlS5hMyr8ePR72m9OmKb/WsqW1giPsD3s0oSW7duHECdlBEPaNvZsQwMKFsiMg7BsyITZswLVrsoMg7Bi7NmH16gBQUIDFi2WHQtgxdm3CgQOVwYoVyMyUGgphx9i1CTt1QsOGAHDvHj76SHY0hL1i1ybMyIC/vzL+4AMUFkqNhrBX7NqE2dkYPRrVqgHApUv44gvZARF2iZPsAGSSlwc3N4wbh5gYAIiNxeDBjz5qxgyt4xJM9erfpqcflB1FKXBzcwsODpYdhfWwaxMyBgATJyIuDgUFOHoUe/fipZcecdTs2dpHJpQuXXbv22ekl6G1a9euX7++i4vLG2+8ITsWa2DXj6Mcd/fiCTA2VmooBAAgPT19yJAhvr6+WVlZsmOxBnY9E6pMnoxPPwWAHTuQmormzR/2yzNnWicoYVSr1rNr1yqyoygp+fn5CQkJBQUF165dmzdv3mzDPXiUAWZ/eHgwgAFs9uziH3brpvxwzJjiH06dqvzQw8P6Ydovq1ev5h9OFxeXCxcuyA5Hc+hxVGHyZGWwbh1u3pQait0zYsSIdu3aAcjNzbWHFRoyoULPnmjRAgDy8pCQIDsa+8ZkMsXHx5tMJgCbN2/eu3ev7Ii0hUyoYDIhMFAZL1uG7Gyp0dg97du3Hzp0KB/7+/sXFRXJjUdTyITFDBuGOnUAIC0N/FuJA10eecTExDz22GMAfv75549sOquQPmXFVKyICROU8aJFMJtRoYLUgOwbd3f3qVOn8vH06dP/+OMPufFoB5nwT4wfD1dXADh/Hlu2KGNCFlOmTGnQoAGA27dv2/C7CjLhn6hZEyNGKOPYWKXgkJCFi4tLDE8pBBYvXpySkiI3Ho0gE/6VgACYTABw6BDOnpUdjd0zaNCgLl26ACgsLAwICJAdjiaQCf9K06bo00cZb9woNRQCABAfH+/o6Ahg9+7dO3bskB2OeMiEf4P64p56z+gBLy+vMWPG8LGfn19+fr7ceIRDJvwbOnfG88/LDoKwICoqqlq1agDOnTu3ZMkS2eEIhkz496iTIaEHatWqFR4ezseRkZE3btyQG49Y7MuEmzcjL69EvzlgAOrX1zgaojRMmjSpWbNmADIyMiIjI2WHIxIT45WtdsB336F7dzRujNhY2EexqK3x9ddf9+7dG4CDg8ORI0e8vb1lRyQGe5kJi4qU1NDz57Ftm+xoiDLRq1ev1157DYDZbPbz87OZ+cNeTLh0KU6eBIDKlTFnjuxoiLISFxdXoUIFAAcPHkxMTJQdjhjswoTp6cXl8GFheOIJqdEQ5eDZZ5+d8CDBNzAwMNsmql3swoTTp+PuXQBo1Ki40ShhUCIiImrVqgXgypUrcXFxssMRgO0vzJw+DU9PpbHv1q3o1092QES5Wb58+fjx4wFUqlQpNTW1vsEXsm1/JgwMVBzYtSs50EZ49913PT09AWRnZ4eFhckOp7zY+Ey4dSv69wcAR0ckJ6NVK9kBEYI4cOBA586dGWMmk2n//v0dO3aUHVHZsWUT5uejZUulEsLXV2bnmMxM7Nol7ewuLidzc3+VdfY+ffo4OztroTxgwAC+QOrt7X3kyBEH4/ZBkNnqTWPmzVMaFlavzm7flhlJSooSiZR/XbrIzMG7c+eORlf10qVLlSpV4mdZs2aNRmexAob94/Eobt5EdLQynjULNWtKjYbQgPr166sVhlOnTs3IyJAbT5mx2cfR0aOVZk3Nm+PnnyV3i7l6FX5+0s5ev/4nly//P1lnX7t2rZubm0bi2dnZzZs3v3z5MoCQkJC5c+dqdCJNsU0THjuGtm1hNgPAzp3o0UN2QIRmfPrpp++88w4AZ2fnU6dOPfPMM7IjKjU2+DjKGPz9FQf260cOtHGGDRvm4+MDID8/f9q0abLDKQs2OBN+9hn+9S8AcHbGyZNo2lR2QITGHD16tF27dmazGcC333776quvyo6odNjaTJiTg9BQZRwQQA60C7y9vYcNG8bHAQEBhUbb99zWTDhvHi5dAoDatRESIjsawlrMnz+/SpUqAE6fPr1y5UrZ4ZQOmzLhlSvFu3xGR6NqVanREFakTp06arvu8PDwuzxh3yDYlAmDgpSNXFq3xsiRkoMhrMzkyZObNGkCIC0tzVjtum1nYebHH9GxIxiDyYR9+9Cpk+yACKuTmJg4YMAAAE5OTsnJyS1btpQdUYmwkZnQbIafH/jfk8GDyYF2yltvvdW9e3cAhYWF/sapHLWRmXDlSowdCwCurkhNxdNPyw6IkERKSoqXlxdfIN2+fTtvDKVzbGEmzMzEjBnKeOpUcqBd4+Hh8e9//5uP/f3980rY4lIqtmDCmTPBm8E+9RSCgmRHQ8hmzpw5jz/+OIDz588nGGHrc8Ob8Px5/N//KePYWDz2mNRoCB1Qo0YNtV33rFmzrl+/LjeeR2J4E/r7K021O3TAoEGyoyH0ga+vL18azczMnKF+V9Erxl6Y4U21ATg44NAhtG0rOyBCN3z33Xd8pdTBweHw4cPP63iLHwPPhIWFUDeNHD2aHEj8iVdeeaVXr14wQrtuA5twyRKcOgUAlStj1izZ0RD6Iz4+vmLFigB+/PHHTZs2yQ7nHzGqCdPSoGYmzZhBTbWJv6FJkya+vr58HBQUlJWVJTeef8KoJgwPV5pqN26MiRNlR0PolYiIiLp16wK4evVqrJrdrzMMuTCTkgIvL6Wl77ZtxVvME8T/snLlyrFjxwJwdXVNTU19Wn/JHIacCQMCFAd260YOJB7BmDFj+NJoTk5OqFrxrSeMNxN++SXeegsAnJyQnAyDJMoTMjl48GCnTp0YYyaTad++fZ10luBvsJkwPx9qL58JE8iBRInw8fEZOHAgAMaYn58f70ajHwxmwthYpa19jRqYPl12NIRxiI2N5e26k5OT165dKzucP2EkE968iZgYZTx7Nh5/XGo0hKGoV69e0IPs/pCQkHv37smNxxIjmTA4GLzTeYsWSvUgQZScadOm8Z0Mb968Ga3ukaADDLMwc/Qo2rVTWvp++y2M1lqS0AXr16/nzRGdnZ1PnjzZVB8tMY0xE1o21e7fnxxIlJGhQ4fypdH8/Pzg4GDZ4SgYYyb89FO88w4AODvj1CkYcLsBQi8cO3asbdu2fIF0586dPXSwTYIBZsLsbDwo0cTkyeRAoly0adNmxIgRfBwYGFhQUCA3HhjChHPn4vJlAKhTB8bc8IPQF9HR0Wq77hUrVsgOR/cmvHIFixYp45gYVKkiNRrCJqhTp07Igz0SZsyYcefOHbnx6N2EgYFKU+02bTB8uOxoCFshMDCQ72SYnp4+c+ZMucHoemHmwAF07qw01d6/Hx07yg6IsCG2bt3av39/AI6OjsnJya1atZIViX5nQrMZ/v5KU+1//YscSAjmjTfe4EujRUVFctt163cmXL4c48cDQKVKSE1F/fqyAyJsjtOnT3t6evJ23Vu3bu3Xr5+UMHQ6E2ZkIDJSGYeEkAMJTWjRosW4ceP4ODAwUFa7bp2aMDISN28CQL16CAyUHQ1hu8yePZu36/7tt9/i4+OlxKDHx9Fz5+Dhgfx8ANi0CQMHyg6IsGkSEhImTZoEoHLlymfOnHnC6l3D9DgTTpqkONDHBwMGyI6GsHXef/99vjSamZmp9s+3JrqbCXfsQK9eAODggCNH4O0tOyDCDvjhhx+6desGwMHBISkpqV27dtY8u75mwoKC4m+A775LDiSsRNeuXfv27QvAbDb7+/tbeWbSlwkTEnDmDABUqVK8OkoQViAuLo63605KStqwYYM1T60jE96+XdxUOyICdetKjYawMxo3buzn58fHwcHB1mzXrSMThofjjz8AoEkTTJggOxrC/ggPD+dLo9euXVuwYIHVzquXhZmff4a3N4qKAODrr/H667IDIuySVatWjRkzBoCrq+vp06cbNGhghZPqZSb091cc2L07OZCQxsiRI9u2bQsgJydnmrWqV3UxE27erGyy6+SE48fh4SE7IMKOSUpK8vHx4b7Yu3dvly5dtD6j/JkwNxdqx52JE8mBhGTat28/ePBgPvb39y/iT2haIt+ECxbg4kUAqFEDMtIVCOKvzJ8//7HHHgNw/Pjx1atXa306ySa8dq24qfbcuahRQ2o0BAEAeOqpp6ZMmcLHYWFhWrfrlmzCadPA38d4eGDMGLmxEEQxwcHBfCfDW7duRUVFaXoumQszhw6hQweldn7XLnTvLisQgvgbNm7cOGTIEADOzs4nTpxo1qyZRieSNhMyBj8/xYEDBpADCd0xePDgzp07A8jPz1c3k9ECaTPhmjUYNQoAXFxw+jQaNpQSBUE8jOTk5LZt2/IF0m+++aZnz55anEXOTHj/PsLClHFQEDmQ0CmtW7ceOXIkH2vXrluOCefOxe+/A4C7O6ZOlRICQZSIuXPnVq1aFUBqaurSpUu1OIUEE/72W3FT7ehouLlZPwSCKCm1a9cOe/DYNnPmTC3adUswYVAQcnMB4MUXMWyY9c9PEKXDz8+P72SYnp4+Y8YM4frWXpjZswdduwKAyYSkJLzwgjVPThBlZPv27bz03tHR8dixY88995xAcavOhEVFUDsdv/MOOZAwDH369OFLo1q067bqTLh0qVKt6+aGM2fw5JNWOzNBlJfU1FRPT0++QJqYmPjmm2+KUrbeTJiejogIZRwSQg4kDEbz5s3H840ZgKCgoFy+sCEC65kwMhJ8YalhQ2qqTRiSyMjImjVrArhw4cIidYm/3FjpcTQ1FZ6e4K86ExMhbiYnCKuydOnSCRMmAHBzcztz5syTIp7orDQTBgYqDnz5ZXIgYWDGjRvHl0bv378fGhoqRNMaM+H27ejbFwAcHXHsGISu7hKEtVHbdZtMpqSkpBfKvcqv+UyYnw81AX3sWHIgYXi6du3Kt/hljAlp1625Cc+dUzadr1YNs2ZpfTaCsAaxsbEuLi4ADh06tH79+nKqWeNxNCsLMTF44gk8WOAlCMMTEhIyb948AO7u7r/88otbOXKgddHykCAMx/3795s1a/b7778DCA8Pn61u4VB6yIQEUUbWrFkzatQoAC4uLqdPn25Y1rpY+S0PCcKgjBgxgu9kmJubG6w2zy09NBMSRNk5dOhQhw4duIn27Nnz0ksvlUGEZkKCKDsvvvji0KFD+djX17ewsLAMIqU2YcuWMJmUfz16POw3p01Tfq1lyzIERhDGICYmhrfrTklJ+fjjj8ugUK6ZcNcunDhRHgGCMDzu7u5THzRKCg0NTUtLK61CeR9HFy4spwBBGJ4pU6bwnQzT0tLK0K67vCbcsAHXrpVTgyCMjYuLy/z58/k4ISEhJSWlVIeX3YTVqwNAQQEWLy6zBkHYCAMHDuQ7GRYWFgYEBJTq2LKbcOBAZbBiBTIzyyxDEDZCfHy8o6MjgN27d+/YsaPkB5bdhJ06KZ2z793DRx+VWYYgbAQvL68xD7YW8/Pzy8vLK+GBZTdhRkZx67QPPkCZXpAQhE0RFRVVrVo1AOfOnVuyZEkJjyq7CbOzMXo0qlUDgEuX8MUXZVYiCBuhVq1a06dP5+OZM2feuHGjJEeV3YR5eXBzw7hxyn/GxpZZiSBsB19fX96uOyMjY/PmzSU5pOwm5DmnEyeiQgUAOHoUe/eWWYwgbARnZ+eOHTuaTKaePXvyPUYfSXnfE7q7Y/BgZUyTIUHcunUrMTGRMbZz584NGzaU5BABCdyTJyuDHTuQmlp+PYIwMGFhYffu3QPw7LPPvvfeeyU5RIAJPT3RrRsAMEZZbIQBOHz4cFZWlhbKx48fX716NR/HxcVV4F/VHoWYUiZ1Mly3DjdvCpEkCE1IT0/v3bt306ZNP/zwQ7PZLFbc39+f763dq1ev1157rYRHiTFhz55o0QIA8vKQkCBEkiA0ISIi4s6dO7///ntMTEx+fr5A5c8//3zfvn0AKlSosLA0z4RiTGgyFW8vsWyZ0uOQIPRGamrq8uXL+VhtWyiEnJycadOm8fGkSZOaNWtW8mOFVdYPG4Y6dQAgLQ38qdiBqvYJnREYGMj3Nnv55Zd5A19RzJ8//+LFiwBq164dHh5eqmOFGaViRWXvQQCLFsFsRsm+lBKEldi2bdvOnTsBODo6xsfHC1S+du3aggUL+FjNXCs5Imer8ePh6goA589jyxZlTBB6ID8/f8qUKXysbuoiiuDgYL7c6uXlNXr06NIeLtKENWtixAhlHBurFBwShB744IMPfv31VwDVq1efOXOmQOWkpCT1pbxazVQqBH9vCwiAyQQAhw7h7Fmx2gRRRm7dujVnzhw+Vjf6FILZbFb3hBk0aBCv6y0tgk3YtCn69FHGGzeK1SaIMhIaGsqzWCy3vBbCmjVrjhw5AsDV1TUmJqZsIuJXMNUX99R7htADycnJZchiKQmZmZnqQqja66kMiDdh5854/nnhqgRRRvz9/XlmTJ8+fXr27ClQec6cOdevXwfg7u5enjb4mrzLUydDFcaQk6PFqQjiYWzcuHH//v0AnJ2dY4WW+fz222/qew61/2/Z0MSEAwagfv3i/8zJQadOKGUHKoIoL5ZZLH5+frzWVhSBgYG8i0z79u3VTvhlhGlMcjIzmRjAHB3Z8eOanGLPHpaYqIkyYWgiIiL4h7x27dp//PGHQOXvv/+eKzs4OBw+fLicapqbkDH22msMYADr2JGZzSKVb99mAwYwgNWqxdLTRSoTRufKlSvqI+LKlSsFKhcWFrZq1Yorjxo1qvyC1jBhaiqrUEHx4ebNIpVzcljDhoqyv79IZcLoDH7Q8aF169aFhYUClRMeFAq5ubn9/vvv5Re0hgkZY/7+ilXq1WNZWSKVN29WlJ2c2KlTIpUJ4/Ljjz+aeNYIsG/fPoHKaWlp6rv+6OhoIZpWMmF6OqtVS3HL7NmCxV96SVF+5RXByoQRKSoqatu2LffJ22+/LVbc19eXKzdq1CgnJ0eIppVMyBhbtkyxSqVK7NIlkcqnTjEnJ0X8q69EKhNG5KMHDeFdXV0vXLggUDklJUV9179lyxZRstYzYWEh8/RUrDJ8uGDxceMU5SZNWG6uYHHCQGRkZDzxxBPcJxEREWLFezzYFrdr164CZa1nQsbYDz8oVjGZ2H/+I1L57l1Wo4YivnChSGXCWKj1Sk899dT9+/cFKm/dupUrOzo6njhxQqCyVU3IGHvrLcUq3t6sqEikclycolylCrt+XaQyYRTOnTtXsWJFbpXPPvtMoHJeXp76rn/ChAkClZn1TXjpEqtUSXHLmjUilQsKmIeHojx2rEhlwij07duX+6R9+/Zmoa+k582bx5WrV69++/ZtgcrM+iZkjIWFKVapU4fduydSedcuRdnBgf33vyKVCf3z3XffCcxiseTmzZtVq1bl4osXLxaozJFgwqwsVq+e4pbQUMHir7+uKPv4CM7OIfRMQUGBmsUyevRoseJqx4rmzZvn5+eLFWdSTMgYW7tWsYqzM/v1V5HKZ8+yihUV8U2bRCoTeuaDDz7gPqlcubKQLBaVY8eOOTxoHLhz506ByipyTGg2Mx8fxSpvvilYPCBAq+wcQp+kpaU9/vjj3CcxMTEClc1mc6dOnbhy3759BSpbIseEjLGffmIODopbvv1WpPK9e6xuXUV51iyRyoQ+mfCg2WajRo1yhb4mXr9+PVd2dnY+c+aMQGVLpJmQMfbOO4pVWrRgBQUilZcvV5RdXQVn5xB6IyUlxcnJiVtl69atApWzs7OffvpprhwcHCxQ+S/INOGNG6xKFcUtS5eKVC4qYt7eivKwYSKVCb3x6quvapHFwrQsR/wLMk3IGIuKUqxSowa7c0ek8oEDSjGx8OwcQj98+eWXGmWxWJYjfvzxxwKV/xfJJszNZU2aKD708xMszut9AdamjeDsHEIP5OXlPfPMM9wnEydOFCv+9ttvc+XWrVsXafzpkWxCxtgXXxQXBJ48KVL58uXi7JzVq0UqE3ogOjpazWK5I/Q56uDBgxqVI/4t8k3IGOveXbFKt26ClcPDtcrOIeRy48aNKlWqcJ8kJCQIVC4qKnr+QdPOIUOGCFT+J3RhQsuCwO3bRSpnZbH69RXlkBCRyoRcRo4cyX3SokULsVksK1eu5Mqurq4XL14UqPxP6MKEjLH33lOs0rix4ILATz7RKjuHkMXRo0c1ymKxLEeMjIwUqPwQ9GLCu3fZ448rblmwQKSy2cw6dlSU+/cXqUxIwWw2d+zYkfvkjTfeECseFBTElYWXIz4EvZiQMbZokWKVypWZ0Ow/DbNzCOuzbt06NYvlV6HPNpbliBs2bBCo/HB0ZMKCAtaypWKVf/9bsPiIEVpl5xDWJDs7u/6D7u5Tp04VK967d2+u3KFDB7HliA9HRyZkjO3erVVBoGV2zpIlIpUJazJ9+nTukzp16ojNYrEsRzxy5IhA5UeiLxMyxnr1UqzSoYPggsA5c7TKziGsw+XLlytVqsStsmrVKoHKBQUFLVu25MpjxowRqFwSdGdCy4LAjRtFKuflsWeeUZQnTRKpTFiHQYMGcZ+0adNGbBaLur+S8HLEkqA7EzLGJk9WrPLUU0zsAlViolbZOYTWHDhwgGexmEym/fv3C1S+e/euWo44f/58gcolRI8mzMgoLgicOVOwuHbZOYR2WGaxDB06VKy4uoF248aNxZYjlhA9mpAx9uGHxQWBYpMWLLNztm0TqUxox4oVKzTKYrEsR9wm6QOhUxMWFbHnn1esIvoPHxs/XqvsHEILMjIy6taty30yU/SjkVqO2E3eo5FOTcj+XBAo9CuAhtk5hBZMfrD9er169bKEdg1KTEzkyk5OTmLLEUuFfk3IGBs0SLFK69aCCwLj45Vs0h49km7cuCFSmhDK2bNn1SyWzz//XKCyZTniJKnL5bo2oWVBoNDXQiw/n/Xrd61Bg1elvBciSk6vXr00ymKZO3cuV65Ro4bYcsTSomsTMsamTy8uCBTb5mP37t2yMiSIEmJ5j/4rNIXKshxxiewUKr2bMDu7uCBQdKqgtFxBoiQUFBR4eHjwG/Tuu++KFR8xYgRXbtGiRYHsZGK9m5Axtm5dcUGg2NaPllnzG8Wm5xDlZtGiRWoWy3Wh+2z99NNPajnitzooqzGACc1m1qmT4sN+/QSLS6kfIx6JZRZLbGysQGXLcsT++igwNYAJGWNHjxYXBIrdDsDyHZTVKqmJR/Lee+9plMXy6aefcmXh5YhlxhgmZIyNHFlcECh2Y5wPP/xQo2wMomycOnVKzWLZLrTpUFZWllqOGKKbpkOGMaFlQaDQ5loSumsRD6d79+78drzyyitilcPDw7lynTp17umm/Z5hTMgYi45WTFi9OhO7WaqV+0wSD+GLL75Qs1hOCi11sSxHXK2nRrRGMqFlQaCvr2Bxa3ZcJv6J3NzcJk2a8BvhJ7ol+4ABA7iy8HLEcmIkEzLGtmxRTOjoyMTm+l25ckX9M6n13gPEPxEVFaVRFotlOeJ/dLY5icFMyBh79VXFh6I34WEzZszgnwCtd+Eh/pbr16+rWSxLhW7TVVRU5O3tzZWH6W+bLuOZ8ORJpSCwadOcr79OEqhstf3oiL9l+PDh/OJ7eHiIzWJZvny5ugB+SX8bVhrPhIyxyZOzO3f+zNHRuUmTJkbcmZX4XyyzWHbt2iVQ+d69e+qr4Fm63LrZkCa03KN83rx5ApWts0c58RfMZrOPjw+/7G+++aZY8YCAAK4svBxRFIY0IWNs8eLF/MoKb4917NgxjfY5IP6JtWvXapTFcvbsWWdnZy6+adMmgcoCMaoJCwsLW7VqxS/u6NGjxYqPGjWKKzdv3lzsjj/E/5KVlVWvXj1+wUNDQ8WKv/7661zZx8dHt4UyRjUh+3PL5MOHDwtUvnHjRtWqVbn44sWLBSoT/0toaCi/1MKzWHbt2qV+Qn766SeBymIxsAkZY3379uVXuX379mL/zs2bN48rV69e/bbY9BzCgt9++83FxYVf6rVr1wpUtixHHDt2rEBl4RjbhJYFgevXrxeonJeX17RpU648YcIEgcqEJW+99Ra/yN7e3mKzWBYuXMiVq1SpIrYcUTjGNiFjLDg4mF9rd3d3sQWBW7du5cqOjo4SW3HZMD/88AO/wsKzWG7dulWtWjUuvnDhQoHKWmB4E1purRoRESFWvEePHly5q/D0HLunsLDQ09OTX97hw4eLFR83bhxXFv4mWQsMb0LG2EcffcSvuKur64ULFwQqp6SkVKhQgYtv2bJFoDKxbNkyfmErVaokNovl+PHjjo6OXPyrr74SqKwRtmDCoqKitm3b8ov+9ttvixX39fXlyo0aNcrJyRErbrekp6fXqlWLX9ioqCix4i+99BJXFl6OqBG2YELG2I8//qgWBO7du1egclpaWs2aNblydHS0QGV7xt/fn19S4VksmzZt4spOTk6nTp0SqKwdNmJCxtiQIUP41ffy8iosLBSonJCQwJXd3Nysv3md7ZGamqo+5G/evFmgck5OToMGDbhyQECAQGVNsR0TXrly5bHHHuM3YOXKlQKVLbNzRo0aJVDZPnnttdf4xezYsaPYt7uzZ8/myjVq1Lh7965AZU2xHRMyxiIiIvg9EF4Q+MkYmBEAAAYHSURBVP3333Nl4dk59sZXX32lXkmxWSxXr151c3Pj4suWLROorDU2ZULLgsCgoCCx4v369ePKwrNz7If8/PxmzZrxyzhu3Dix4sOGDePKwssRtcamTMgY27BhA78Tzs7Ov/zyi0Dl8+fPq9k569atE6hsP8TGxvILKDyLJSkpSV2ZE1uOaAVszYSMsc6dO/Ob0bt3b7HKU6dO5crCs3Psgbt376pZLHFxcQKVzWZzu3btuPKAAQMEKlsHGzShZUHgN998I1DZMjtn+vTpApXthG3btjVu3LhJkyZ5eXkCZdesWcNvSsWKFc+ePStQ2TrYoAkZY2PGjOF3RXhB4KpVq7iy8OwcOyE3N1ds2W5mZuaTTz7Jb0pYWJhAZathmya8efOmWhAYHx8vUFlt112vXr39YnfxJspESEgIv9F169bVT1PtUmGbJmSMzZ8/n98b4QWBBw8ejIiI0Ge3Envj/PnzajniJ598IjucMmKzJrQsCBw/frzscAhN6N+/P7/FL774onHfG5kYY7BRtm/fzkvvHR0djx079txzz8mOiBDJnj17unbtCsBkMiUlJb3wwguyIyojDrID0JA+ffr07NkTQFFRkZoxTNgGlvd0+PDhxnUgbNuEAOLi4niu8J49e7788kvZ4RDCWLFixYkTJwC4ublFR0fLDqdc2LgJmzdvPn78eD4OCgrKzc2VGw8hhPT0dDVPOCQkRH1FYVBs3IQAIiMjeUHghQsXFi1aJDscQgCRkZF37twB0LBhw8DAQNnhlBdbXphRWbp06YQJEwC4ubmdOXPG6H847ZzU1FRPT8+CggIAiYmJb775puyIyovtz4QAxo0bx5dG79+/r7aaJQxKYGAgd+DLL79sAw6EncyEAH744Ydu3brB+MvZdo7la6ejR4+q/doMjV3MhAC6du3KX+wyxvz9/e3kT4+NkZ+fHxQUxMdjx461DQfCfkwIIDY2lqc4HTp0SN2HkDAQixcv/vXXXwFUr1591qxZssMRhh2ZsFGjRurr3WnTpt2/f19uPESpuHXrlrqjfUREhNoCzwawIxMCCAsL40uj165di4mJkR0OUQrCwsLu3bsH4Nlnn33//fdlhyMSe1mYUVmzZg3fftDFxeX06dMNGzaUHRHxaJKTk9u2bVtUVARgx44dar8228C+ZkIAI0aM4K0QcnNz1c1kCJ3j7+/PHdi7d28bcyDscCYEcOjQoQ4dOvD/8T179qhd0wl98vnnnw8ePBiAs7PziRMn1H5tNoPdzYQAXnzxxaFDh/Kxr69vYWGh3HiIh5CTkzNt2jQ+njhxou05EPZpQgAxMTG8XXdKSsrHH38sOxziH5k/f/7FixcB1K5dOzw8XHY4mmCnJnR3d1f7F4aGhqalpcmNh/hbrl69umDBAj6OiopSOybaGHZqQgBTpkzhm4ekpaWpL6AIXREcHJyVlQXAy8tr9OjRssPRCntcmFHZvHnzoEGDADg5OR0/ftzDw0N2REQxSUlJPj4+/PO5d+/eLl26yI5IK+x3JgQwcOBAfmsLCwsDAgJkh0MUYzab1RTfQYMG2bADYeczIYDjx48///zz/B3U119//frrr8uOiACAVatW8Q7Orq6up0+fVncdtEnseiYE4OXlpbbr9vPzy8vLkxsPASAzM1NdCFW/utsw9m5CWCy7nTt3bsmSJbLDIRAVFXX9+nUA7u7u9pDVZO+Po5y4uLjJkycDcHR0bNOmjbOzs+yI7Jfc3Nzk5GSz2Qxg/fr1alqFDUMmBICCgoJWrVrdvHnzjz/+kB0LATc3N5PJ1LJly4MHD6q7DtowTrID0AUVKlTYuHGju7t77dq1ZcdCoFKlSj/99FNWVpY9OBA0E/6FAwcOyA6BgLOzs7rppz1AJiQIydDqKEFIhkxIEJIhExKEZMiEBCEZMiFBSIZMSBCSIRMShGTIhAQhGTIhQUiGTEgQkiETEoRkyIQEIRkyIUFIhkxIEJIhExKEZMiEBCEZMiFBSIZMSBCSIRMShGTIhAQhGTIhQUiGTEgQkiETEoRkyIQEIRkyIUFIhkxIEJL5/5hXX1UbFbyUAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3de1xT9f8H8Pc2GOMiiAjIBqhcxBtpICJewBRNETW1UBOVTC3vt4zKvlGWaXaRNPNaqKnlXTE1tSglvA3kovwEEZTLBshdbhtj+/z+OIY4LnLZ9tnG+/no4YMdtp2XxovPzu1zWIQQQAjRw6YdAKGODkuIEGVYQoQowxIiRBmWECHKsIQIUYYlRIgyLCFClGEJEaIMS4gQZVhChCjDEiJEGZYQIcqwhAhRhiVEiDIsIUKUYQkRogxLiBBlWEKEKMMSIkQZlhAhyrCECFGGJUSIMiwhQpRhCRGiDEuIEGVYQoQowxIiRBmWECHKsIQIUYYlRIgyLCFClGEJEaIMS4gQZVhChCjDEiJEGZYQIcqwhAhRhiVEiDIsIUKUYQkRogxLiBBlWEKEKMMSIkQZlhAhyrCECFGGJUSIMiwhQpRhCRGiDEuIEGVYQoQowxIiRBmWECHKsIQIUYYl1Hml8tLgR8EWiRZdk7ouzFooUUiY5WfKznS70800wXRp9lK6CVHzsIQ6b17mvIfSh0l9ku70uZMqSV0tWs0sn2wxOc89b5H1Irrx0AsZ0A6A2qWotuhM6ZkYt5ju3O4AENE9IrE6kXYo1DpYQt32sOahAhQeJh7MQycjJycjJ7qRUGvhx9H2EYth4kQwMYFu3eCLLzS//pLaEiOWEZfF1fyqkargSNg+M2eCpSVkZ0NGBowfD/b2EBKiyfXbGNpIibRaUW3MNtbkepEK4UjYDhkZcPUqfPUVWFmBlxcsXAg//aThCM5GzqZs05uVN5mHhbWFcx7NUYBCwzFQe2AJ2yE5GXg8cHN7+tDDA+7c0XAEM7bZ/K7z14rWZkgzCmoLQjJDDFgGbPzfqlPw42g7VFWBhcWzhxYWUFGh+RSb+JtWi1YPShnEYrGmdZ4Wbh8OAKmSVL80PwAol5fLifxwyWFrjnVqv1TNx0MvxCKE0M6gs6KiICAAJE8PjsPx4/DOO1BURDWTsisVV0anjbY2sE7sk2hjYEM7DmoEfm5pBxcXkEohOfnpw9hYGDCAaqBGjDAbMdJsZJ4sD7cVtRaWsB0cHWH0aPjgAygqgthY2LsXFiygnUkZG9iHehyyNbS9+OTid/nf0Y6DGoElbD2hEMaNgx07AAAOHQIAcHCAwEBYuxZmzqQbrVG2hrYR3SNYwPpI/NH1yuu04yBlWMLWi4mBixchPh4AwNYWzp6FqirIy4NJk6CsjHa4xo03H7/CZoWMyIIfBZfJtTRkh4UlbD2hEADAy0t5+dSpYGkJSUmaT9QSmwWbvU29M6QZC7K07jNzB4clbL1btwAalLCsDO7fBy4XevemEuqFDFmGh3ocMueYHys59nPRz7TjoGewhK1UUgLp6cDjQb9+zy2PjQWFAgYOBK72nsbpbOS823E3ACzPXn5Pco92HPQUlrCVhEIgBDw8wNBQeTkADB5MJVTLTbecPqfLnEpFZdDDoGpFNe04CABL2GpNla2pDUXt86Pjj715ve9W310rWks7CwLAErZaU2VrdENRK5myTY/2PMpj87YXbD9Veop2HIQlbK1GS5iXBzk5YGEBvXpRCdVa7sbum/ibAGBe5rxHNY9ox+nosIStIRKBWAydO4OLy3PLmWHQ0xPYOvPvudxm+WSLyW4VbgveXCCTyWjH6dB05odGK9R95mSxnluuI3tl6mMBa6/tXtFM0Z8n/ly/fj3tOB0alrA1mtog1J29MvV1Net66OAhDofz5Zdf/vXXX7TjdFxYwtZotGyEQGxsI8t1ga+v77p16xQKxaxZs/Lz82nH6aDwesIWIwS6dIHSUhCJgM9/tvzBA3B1hW7dIDeXXri2UygUY8aMiYqKGj9+/Llz51hKn7SR+uFI2GJpaVBaCnZ2zzUQdPWzaB02m71//34rK6sLFy6Eh4fTjtMRYQlbjNkr4+2tvFzHSwgA9vb2+/fvZ7FYoaGhN2/epB2nw8ESttT39+597+OTMnq08jd0cNdoQxMmTFiyZIlMJps1a9aTJ09ox+lYcJuwpYYOHXr9+vVLly6NGTOmbqFcLq/28jJLTgaRCLp2pRiv/aRSqY+PT3x8fFBQ0JEjR9S6LolEIhKJxGJxVlaWSCQSiUTZ2dkmJiZr164doH1ThKgblrBFZDKZhYWFRCIpKiqytLSsW56UlDRgwAB3N7eklBSK8VTlwYMHHh4e5eXl+/btmzt3bjvfTSKRiMVisVicm5ubkZHBfMH8+ejRI4VCecIbY2NjiUQSERHR/lXrFpzysEXu3r1bXV3dq1ev+g0EgFu3bgFAv5dfppRLxVxcXL7//vt58+YtWbLE29u7dwuujSwpKWHGMZFIlJOTk5OTU/ewtLS0qVdxuVw+n29vb+/g4MDn8x0cHOzt7ePj4zds2NDyVesNLGGLMGXzarD3RSgUNrpcd7311ltRUVEHDx4MCgq6efOmsbExAJSUlDQ6oD148KCs6Rk9jIyMBAKBnZ0dn89n/nRycmK+6N69O4fDUXr+tGnTMjMzmVXfunWLx+Op96+qNbCELdJU2Zjlg3V8r4ySH3/88caNG3fu3OnTpw+HwxGJRFKptKknm5ub1w1ojo6OAoGg7osuXbq0dtU7duwQCoV37tz54IMPOs7xEtwmbJEBAwYkJSXFxMQMHTq0bqFEIjE3N1coFGVlZaamphTjqdzOnTvXrl1b8d+E4jwer/44VveFs7Nz586dVbvquLi4oUOHymSyU6dOTZ48WbVvrqUIepHKykoDAwMDA4Oqqqr6y2NiYgDgpZdeohVMfSZOnAgA8+fPT0tLq66u1vDav/nmGwCwtLTMzMzU8KqpwOOEL3b79u3a2lp3d3dmA6mO/m0QMtLS0s6dO2dkZPTFF1+4uLhoftts9erVEydOLCkpCQ4OlsvlGl675mEJX6zj7JVhfP/99wqFYs6cOba2tlQCsFisn376ic/nR0dHf0Hj1qsahiV8sQ61V6a0tHT//v0AsHTpUo2ttLi4WGnEs7a2Pnz4MIfDWb9+fVRUlMaSUIElfLFGS1hWVpaWlsbj8fr3708pl1rs3r27oqLi1VdffemllzSzxqtXr7700ksbNmxQWu7n5xcaGqpQKObOnVukZfe6UjHaG6XarqioiMVimZiYyGSy+ssvXboEAD4+PrSCqYNMJnN0dASACxcuaGyl//zzD4fDYbPZUVFRDfMMGzYMACZMmKBQKDQWScNwJHwBoVBICPHw8DAwMFBaDnq3QXjixImsrCw3N7exY8dqbKV+fn7vv/8+sxWqNOIZGBj8+uuvXbp0OXfu3Pbt2zUWScOwhC/QofbKMMfHV69ezX5+xqpff/11x44dlZWValrv+vXrhw0blpOTM3fuXPL8gWsHB4c9e/YAwHvvvRfP3IRH/1AeibUec8Ts8OHDSssFAgEApKamUkmlDteuXQOALl26VFRU1F8ul8udnZ0B4PTp0+pbe1ZWFnOGzbZt2xp+99133wUAFxeXJ0+eqC8DLVjCF7hz587OnTuZcxo3bNhw5syZ2tpasVgMABYWFnK5nHZAlQkKCgKAdevWKS0/efIkAPTs2bO2tlatAY4fPw4ARkZGt2/fVvpWdXU1c4nTW2+9pdYMVGAJX+DJkyfh4eHM7gpms3DhwoWxsbHOzs6jR4+mnU5lsrOzDQ0NDQ0Ns7Ozlb7l6+sLAFu3btVAjHfeeQcAXF1dG454ycnJJiYmAPDLL79oIIkmYQmb9PDhw9DQ0LpzI11dXVetWsWcI/rJJ58QQmpqamhnVJn33nsPAIKDg5WWx8XFAYC5uXlZWZkGYtSNePPmzWv43d27dwOAmZlZSkqKBsJoDJawEbdv3549e3bd7tBhw4ZFRkYyu8jPnj3LLA8PD6cdU2UqKiqY7bFbt24pfSs4OBgA3nvvPY2FqRvxDh482PC7b775JgB4enpKpVKNRVI3LOEzcrk8MjLS39+f6R6Xy33jjTca/lz+8ssvLBaLzWYfOXKESk6V27p1KwD4+voqLReJRFwul8PhZGRkaDLPrl27mBGv4X6v0tLSnj17AsCaNWs0GUmtsISEECKRSPbv39+nTx+mfubm5suXL8/Kymrq+V9++SXT0osXL2oypzooFAo3NzcAOHnypNK31q1bBwBBQUGaTzVz5symRrxbt25xuVwWixUZGan5YOrQ0UuYn58fFhbW9b85mnr27Llp06bS0tIXvnDVqlUA0KlTp7i4OA3kVJ8zZ84AQI8ePZR2flZVVTH/LNeuXdN8qroRr9FPwps2bQIAa2trkUik+Wwq14FLmJxM5s8f5enJ1G/IkCHHjh1r+V545pxG5kdBp48Wjhw5stFNXOYz4aBBg6ikIs2OeHK5nDmnx8/PT90HTjSgQ5bwzz9JQABhsQjAmSFDpk6d+u+//7bhbWpqasaNGwcATk5Oubm5Ko+pAUlJSSwWq1OnTkqDv0Kh6Nu3LwD89ttvtLIRQjZu3NjUiJefn29nZwcAGzZsoJJNhTpSCWtqyNGjZPBgAkAACI9HZs8m9+615y0rKyt9fHwAwN3dvaSkRFVJNYYZzFevXq20/MKFCwAgEAjoHoaRy+XMLK+NjnhRUVFsNtvAwKBtv0PboKS2ZNbDWeYJ5laJVgsyF1TLn845cLr0tG2SrUm8yZKsJW14245RwrIyEh5OHBye1s/WloSFkcJClbx3QUEBs2Nj5MiRmp8Joj3y8/N5PB6Hw0lPT1f61quvvgoAmzdvphKsvuZHvPfffx8AHBwcioqKNBBmSvqUoSlDH0kfiWvEvqm+i7IW1f/umpw1WMLGPHxIQkNJ585P69erFwkPJ89PFdN+WVlZDg4OADB58mQd2kT53//+BwDTpk1TWp6SksJcvaWZn+wXunjxYlMjnkwmYz6JTJ06Vd0xCmWF7Dj29YrrzMN0SfrJkuf2J2MJG4iLI7NnEwODp/UbNoxERhK1XZOWmJhoYWHB5XLDwm6qaRWqJZFImNkrGv5kL1iwAACWLGnLz5OarF27tqkRLzMzk5mReceOHWpae4GsILEq8eKTixAHUkWTJwl01BKKRCQwkBgbE1tb8vnnhBAil5PISOLv/7R7XC554w3S4IC7Ovzzzz8+PhcByPr1GlhbezHXB3l6eiotLyoqMjExYbFY99q3taxazY94x44dAwAej5eQkNDmVRTXFt+tvnv5yeVdBbvCxGELMxf63/fvm9zXPMEc4gDiYGXWSqPbRs28Q5tLqOOT/86cCZaWkJ0NGRkwfjzY28PQofDaa6BQgKUlvPsuLF2qfDtBtfHz8/vgA5g2DT75BDp3hmXLNLPaNtq2bRsArFmzRmn5jz/+WFVVNXHiRK2aiN7AwODgwYMeHh4nT57cuXMnc2VTnddff33+/Pl79+598803hUIhc9ZbQ7W1tXl5efVvQaOYrhB2FubU5IhlYhmRNbV2S46lgCvoathVSqTVimpjtnFTz2yjtv7i0ALp6QSA1J3L++GHZPhwQghZsoRs3UrKy6mE2r2bABAOhxw7RmX9LcLMzcHn85V2ftbU1Njb2wPAX3/9RStbM44ePQpNjHjV1dXMpDhvv/22SCSKjY09evRoeHh4aGjo7Nmz/f39nZyclOZGAAC/U37MKAdxYJlg2Te5r/99/4WZC8PEYbsKdkWWRsZWxpbWPj14Uy4vN403/fvJ38zDAlnB7Iez5eTZtWwd8uNoZCTh8Z49PHaMWFjQS/PMZ589/SB8+TLtKE0YP348AGzcuFFp+YEDBwCgf//+Wjuhy/z5842MjBq9mikpKcnIyKiZqdA5HA6fzx8yZMjUqVOXL1/+9ddfn4g9EV0enSHNkCgkLVn7iuwVg+4NSpekP5Y9nvBgwluPnru4sc0l1OVp8I8cgRUrIC/v6cPLl2H8eKitpZrpqZUr4fvvwdwc/vkHtO2WTffv3+/Tpw+Px8vKyrKysqr/rcGDBwuFwoiIiJCQEErpXqCqqur+/fsDBw5s+C2FQsHsubGysnJycrK3txcIBA4ODgKBQCAQODo62traNhwMW0WikKwWrf6t+DcWizWt87Rw+3ATtkmqJNUvzQ8AyuXlANCJ08mCbZHaL7UV79uG4mqLv/4iRvU2lI8dI1260EvzHLmcBAURAGJnRzR7BcKLMRfOLlq0SGn5P//8AwA2Nja6dbSzzunTp0EjMwConC5P9OTiAlIpJCc/fRgbC1pzk1c2G375BcaOhdxcGDMG8vNpB/pPSUnJwYMHWSzWsgY7jphZnpYsWaKj9yTbsmULAKxcubLhTde0He3fAu0zejQJDCSFhUQoJFZWpMF0THQ9eUI8PAgAGTSIaMkERcxFWBMmTFBanpGRweFwjIyM8vLyqARrp6SkJNDgDACqpeMlzMt7dpxw0ybaaRrx+DHp1YsAkFGjiKRFG/9qVLfz83KDXUbLly+HJiaV0Alz5swBnb3SV8dLyLh+nbz8Mpkzh3aOxqWnk27dCACZMYPQnZzt4MGD0NjOz9raWicnJwBITEykla09mjkJVifo8jZhHQMDiI+Hu3dp52ickxNcugSdO8Nvv1E+gs9MY7Fq1SoWi1V/OYfDSU5OjoyM1Nj9J1Rr27ZtEolkypQpzK8SnaPLhyjq5OWBnR3Y2GjRDpAGoqIgIAAsLSEhATRzx7G8vLy6U0NEIlFiYuL58+etrKxycnJ0dNdLo6RSaffu3fPz85Xuo6xDdPy0NYaNDXC5UFAAUikYGdFO07hRo+DYMejXT8UNLCkpycjIEIvFubm5zJ/Mw8zMzIaz1nfr1q22tlYikehTCQ8cOJCfnz9o0CAdbSDoyUgIAD16QGYmpKeDLnwgYbFgwQLYvRsAIDwcEhJg374mn1xTAyIRiESQnQ1iMWRnQ04OiMVgbX3x3LkJzdzI1sbGhs/n29vbOzg48Pl8Pp+/devWxMTEadOmMXNd64cBAwYkJSUdPnyYmRtKF+nFSAgA9vaQmQkika6U8NQp2LgR6s5Xqa6GrKznOpaVBSIRiMXPzghS4uvLl8vllpaWdnZ2fD7fycmp/heOjo6dOnVSesnIkSM9PDxOnDixa9cu5pC9rrt06VJSUpJAIHj99ddpZ2k7PSohAOTk0M7RIoTAhAmwZw988MHTJd98A5980viTDQ3Bzg4cHMDeHvh8cHQEPh/s7cHRsbetbY2hoWHL1+vk5LRnz56goKCVK1cOGTJkgNac29BmzAH6ZcuWterfQdtgCel45x2YMQPee+/pw549wcUFBAJwcACBAASCZ2Xr1g2e35dZpy0/dm+88cbbb7/9008/NX/Vj05ITU29dOmSiYnJ/PnzaWdpF30poUAAACAS0c7RUn36QK9ecOrU04fBwRAcrKFV//DDD0KhMCkpadWqVcy8hjpqy5YtCoUiJCRE6TR0naMXxwlB90ZCAFi+HL7/vqlRTo14PN7hw4eNjY13797966+/anr1KlJcXNzUSbA6B0tIzYQJkJcHSUkUVt2vX79vvvkGABYtWvTw4UMKCdpt586dlZWVAQEBWjUDQNtgCalhs2HJEqB1sGDx4sXTp08vKyubPn16TU0NnRBtJZPJduzYAQDMzQh0nb6U0M4OOBzIy9OSi3qbsm8f3L797OG8eaBQUAuzc+fOHj16CIXCsLAwaiHa5MiRIzk5Of379x81ahTtLKpA++RV1eHzCQBpcKNZ7fHoETE2JhyOFl3me+PGDUNDQzabfenSJdpZWsHLywsAfv75Z9pBVENfRkLQgU+k778P1dUwcyb07Ek7yn+8vb0/+eQThUIRHBycm5tLO06LREdHC4VCa2tr3T1FRgmWUEOuXYNjx8DYGDZsoB3leR999JG/v//jx49DQkIUFD8ctxhzgH7x4sV6cwas/pQweeDAs35+V0tKaAdphEIBK1cCIfDBB+DoSDvN89hs9sGDB21tbS9dusTsMtVmjx49ioyMNDIyUpp6VKfpTwnPGRlNunLl7P37tIM0Yv9+EArB3h4azLWrFWxtbSMiIlgs1rp1665fv047TnPCw8Plcvmbb77ZrVs32llURn9KyEzckKN9H0crKmDdOgCAzZuh6UkxKRs/fvzKlStra2tnzJhRopWfJgCgvLx83759ALBixQraWVQJS6h2X34JubkwZAjMmEE7SrO++uorb2/vrKyshQsX0s7SuL1795aVlY0ePVoPTj1/Du3dsyqTnp4OAD169KAd5DlZWcTEhLBY5KYu3KzpwYMH5ubmALBnzx7aWZTVTYRz9uxZ2llUTH9KKJFIWCyWoaGhnO5sSs97/XUCoLVzUDXiyJEjAMDj8bRt0ifmQmRXV1et+v+rEvpyZT0AANjY2BQUFOTl5dlqZhaXF7l2DYYPB2NjuHdP63aKNiMkJGT//v39+vUTCoXGxqq+AxFAdXW10mQcdV8sW7bsww8/bPRVw4cPj4mJ2b59++LFi1UeiS59uZQJAADs7e0LCgpycnK0oYQKBaxYoaWHJZq3ffv2mzdvJicnr1mz5scff2zbmxQUFIjF4uzs7JycHLFYXP+GZBUVFU29qqlN+ri4uJiYGEtLS2Z+UT2jbyWMj4/Pycnx9PSknQUiIiA2VnsPSzTD1NT06NGj3t7eO3bs8PX1ndH0DiWZTJaQkFA3oZtIJMrKyhKLxTk5ORKJpKlXmZmZOTo6CgQCPp/PfFF355auXbs2+pLvvvsOABYuXGhmZtb+v6C20bcSQmO/TUtKStavX+/l5eXl5eXi4sJS/zV85eXwv/8BAHz9Nejixevu7u6bNm1asWLFokWLhgwZ0qNHj0afVlpaOnjw4Ea/xePxGs58w+fzmT9bFUYsFh8/ftzAwGDJkiWt/YvoBL0qoUAgAABRg+vrhUIhc7cTADA3N3d3d/f09PT09Bw+fLiapotlDkv4+MD06ep4e01YtmxZVFTUmTNnpk+f/u+//zY6iUvXrl2HDBliY2PDjGP29vZ1NyRT4cbktm3bampqZsyY4eDgoKr31C609wypEnMkd/bs2UrLHzx48Pnnn0+aNMnOzk7pry8QCF577bUNGzZERcWVlKgmRkYG4fF05rBEM4qLi7t37w4AH3/8Ma0MlZWVzOwVN27coJVB3fSqhH/++ScAvPLKK808RyQSRUZGhoWFBQYG1t8CGTnyE+Z2goGBZNMmEh1NKivbGGPaNAJA5s5t48u1ytWrVzkcDpvNbngPGc3Ytm0bAAwdOpTK2jWj1YcokpOTeTyes7Nzu8Zf9UhNTe3du7erq+v9lp1BSgi5f/++UCgUCoUFBUGnTg2rvzfBwADc3cHLCwYPBi8v6NsXWnKb15gYGDECTE0hNRVaue2jpT799NPPPvvM1tY2ISFBTWdsSqXSoqKihkcscnNzMzMzFQrFnj173n77bXWsWhu0roQJCQmjR4+2tLSMiYnRhsMASiorK83MzIyNjauqqtrwcpkM7t6FW7dAKIRbt+D//g/qT29tagp//glDhjT3DgoFDB4McXHwxRdPzxfVAwqFYsyYMVFRUePGjTt//nybd2tVVFQ0esRCJBIVFhY29So2m61QKMaPH3/u3DkN7FGjonUlrKqq8vf3v379uru7+9WrVzt37qy+ZG1jaWlZWloqFosbbv61VmUlxMdDXNzT/1JS4PFjaH5yvb17YcECcHCAlBSd3CnaFJFINHDgwMLCwm+//Xb16tXNPJM5EN9wQGP+bOpVXC7Xysqq4U5UJycnDoczaNCgwsLC7777Tj9mlGmo1R9HCwsLR4wYkZKSMnLkyAsXLmjVhZUZGRne3t5yuby8vLxXr16e/xk8eDCXy23nm5eUgKVlc08oLwc3N8jNhSNHICionWvTOufPnw8MDDQwMIiOju7Vq1ejHXvw4EFZWVlT78ActFDqGPNFjx492OwmryWov2pvb2/1/P1oastpa9nZ2cOGDcvOzp48efKJEye05BbhsbGxAQEBBQUFNjY2RUVF9e+UYmZm5uHhwRwnHDx4cE/1TC8RGgqbN4OPD8TEUJhNVANWrFixdetWQ0NDmUzW1HMsLCzqbkHj4OBgb29fd0TesvnfYc1avnz5tm3bnJ2db9++zZxirk/aeO7o3bt3fX19S0pK3n33XWbyObqioqKmTJny5MkTf3//kydPstns+Pj4uP/cu3ev/l/TwsKif//+zHFCX19flWzcVlVBjx5QVAQ3b8KgQe1/P20klUr37t27ZcuW4uLiRgc0Z2dnNW2hSKVSHx+f+Pj4oKAg5hRzvdLm/apXrlxhPouuX79eBbtp2+HEiRNMkuDg4JqamoZPePz48blz5z799NMJEybY2Ngo/Qv07Nlzxgzp5s3k77/Jkydtj5GXRyIi2v5yXdHov7AGpKWlMfeZ2rdvH5UA6tOu44RnzpwxMDAAgK1bt6oqUGtt27aN2ZxYvnx5Cy9yqX+osEuXLs7OfQFI3X9OTmT2bBIeTqKjSVWVuuOjVoiIiAAAU1NT5qON3mjvwfrdu3cDAIfDOXbsmEoCtZxCoWBmrWWxWGFhYW17E7lc/n//J46IIIsXEy8vwuWS+oXkcomXF1m8mEREkLt3SW3tc68FIAsWPP16yxY9OTqv5YKDgwHA3d29So9+QargjJnPPvsMALhcriZPqqitrV2wYAEAGBgY7N27V1VvK5WSmzfJDz+QuXNJ376EzX6uk506ET8/8t575OJFQghhsUjXrqSwkBAsoaaUl5e7ubkxH3xoZ1EZ1Zy2xky8Y25uHh8fr5I3bJ5EIpk2bRoAmJiYnDt3Tn0rKi8n0dEkPJzMnk369iUs1tM2Mn1jvti4kRAsoQbFxsZyuVwWi3X69GnaWVRDNSWUy+VBQUEAwOfzM9Q8yXtxcfHw4cMBwNLS8t9//1XrupTk55PffydhYYSZ5QSAXLtGHB2JTIYl1Khvv/2W+QHIzMyknUUFVHYCt1QqHTt2LAA4Ozvn5eWp6m2ViKGzNkQAAA3lSURBVMViZqYtPp+flJSkprW0EAApKSH+/uToUSyhRikUikmTJgHAiBEjapW21HWQyqY85HK5x48f9/DwSE9PDwwMbGYKgzZLSUkZMmRIYmJi3759b9y44e7urvJVtAGte312ZCwWa+/evQKBYCibTb78knacdlNtpx8/ftyrVy8AGDVqlEQiUeE737p1y9raGgC8vb0LCgpU+M5txoyEcjlxdibz5uFIqGll//xD2GzC4ZArV2hnaRfVX0+Ynp7OXPAyY8YMVc1Od/nyZeZA7cSJEyvbfJ2fqjElJIR89x0xN8cS0rBuHQEgAgHRjt/LbaOWi3oTExOZ05cWL17c/nf75ZdfmLkV5syZQ+t0jUbVlbC0lJiZYQlpkMnIsGEEgAQEEIWCdpo2UteV9X///beRkREAbGR24bdVeHh43QkxCp39V0ZqlJVFrKwIAKF32lY7qXF6i1OnTnE4HGYbug0vVygUoaGhAMBisb7++muVx0P648QJAkCMjMjt27SjtIV655hhLrDgcDgnTpxo1QtlMhkznQGXyz18+LCa4iH9sWgRASAuLu06B58StU/09PHHHwOAsbFxdHR0C19SWVkZEBAAAKamphcuXFBrPKQnqqvJgAEEgISE0I7SapqYbW3p0qUAYGFhkZCQ8MInFxcXDxs2DAC6dOly7do1DcRDeiI5mZiYEADyyy+0o7SOJkpYW1vLnOopEAgePXrUzDNFIhFzCL5Hjx4pKSkayIb0yp49BICYmRGd+uHR0Lyj1dXVvr6+AODq6pqfn9/oc5KTk5kplvv165edna2ZYEjfvPkmASCenkQqpR2lpTQ3+W9ZWdnAgQMBYPDgweXl5UrfvXHjBjMVr5+fX2lpqcZSIX1TXk5cXQkAWb2adpSW0ugM3CKRiLm1iL+/v7TeL6rIyEgTExMAmDx5sj5drInouHGDGBoSFovExNCO0iKangY/LS2NmVhp1qxZzMH3/fv3MyfEhISEyGQyDedB+unbb8lXXxEduacvhTv1xsbGvvLKKxUVFe+//z6fz1+1ahUhJDQ0dNOmTRpOgpBWoFL9P/74o+5WW2w2e/v27VRiIP0nEpHAQGJsTGxtyeef007TOJVdT9gqr7zyipeXl6GhIZvNfuedd/TvLuRIW8ycCRwOZGfD2bMQHg779tEO1AgKH0crKiqmTp16+fJlHo8nkUgMDQ0jIyPHjRun4RhI/2VkgLMzpKSAmxsAwEcfQXQ0REfTjqVM0yNhcXHx2LFjL1++bGtrGxMTExoaKpPJpk2bdu3aNQ0nQfovORl4vKcNBAAPD7hzh2qgxmn0dtmPHj0aN25campqz549L1686Orq+vLLLxcWFv7000+TJk2Kjo7u06ePJvMgPVdVBRYWzx5aWIAaZl1pP82NhMnJySNGjEhNTXV3d//3339dXV0BgMVi7dq1a8qUKUVFRWPHjs3KytJYHqT/rK2htPTZw7Ky5zqpNTRUwitXrgwbNiwnJ2fkyJHR0dH8evew5XA4hw4dGj58eE5OTkBAQHFxsWYiIf3n4gJSKSQnP30YGwsDBlAN1AQN7IE9ffq0sbExAEyZMqW6urrR55SWljJzGXp7e1dUVGggFeoQRo8mgYGksJAIhcTKimjltalqL2FERARz05glS5Y0P+9TTk5O9+7dASAwMBBPnUEqkJtL8vKeHSfctIl2oMapt4R1J8GEhoa25Pn3799nbl02e/ZsnFEGtUtpKenUiYwaRZr48KU91FVChULB3GGcw+Hs3Lmz5S+8efOmmZkZAHz00UdqyoY6hK+/JgBk7FjaOV5MLSWUSqXTp08HACMjo6NHj7b25efOnWNOavvuu+/UEQ/pv9pa0rMnASDnz9OO8mKqL2F5eTlzU4rOnTtfaevUyAcPHmSxWCwWa//+/aqNhzqEI0cIAOnVSycupFBxCfPy8jw8PACgW7du7bxN2ubNmwHA0NDwjz/+UFU81FH4+BAAsmMH7RwtosoSZmRkMIfgnZyc0tLS2v+Ga9asAQATExOc8Qm1glBIAIilJdGRY10qK2FSUhJzCH7QoEFNzSLTWgqFIiQkBAC6du2K8z6hlpo+nQCQDz+knaOlVFPCqKgoc3NzABg1alRZWZlK3pNRU1Mzfvx4AOjZs6dYLFbhOyP9lJNDDA2JoSHRnbnCVFDCkydP8ng8AJg2bZpqb4fGqKysHDp0KAD079+/uLhY5e+P9Mr77xMAMmsW7Ryt0N4S/vDDD8wNW5YtW6aqG6E1VFhYyFxg4efn19SJbwiRysqnN4e5eZN2lFZoVwmZE2JYLFZYWJiK8jQpOzvb0dERACZNmoQntaHG/fADASDDh9PO0TptLGFtbe0777zDnBCze/du1WZqyt27d7t06QIACxcu1MwakS5RKEjv3gSAtPLuQ9S1pYQSieSNN95gTog5fvy4yjM148aNG6ampgCggbEX6ZizZwkA6d6d1NbSjtI6rS5hSUkJM6G9paVly2+0pEJnz55lLssIDw/X/NqR9ho1igAQHTzVsXUlzM3NZaayt7OzS0xMVFOmFzpw4ACLxWKz2W04MRXppYd37lS5uZFOnYgO3kOhFSVMT093cXEBgD59+mRmZqovU0ts2LABALhc7qVLl+gmQdrgrbfeAoD9n3xCO0hbtLSEQqGQudLPy8vr8ePHas3UQsylUubm5nFxcbSzIJry8/N5PB6Hw3nw4AHtLG3R0hIyN+4MCAiorKxUa6CWUygUc+bMAQBra+vU1FTacRA1YWFhADB16lTaQdqopSUUi8Vr166tqalRa5rWqqmpefXVV5lTxnNzc2nHQRRIJJJu3boBwNWrV2lnaSMKM3CrVlVVlb+///Xr11966aUrV6507tyZdiKkUT///PPbb7/t4eERFxdHO0sb0bkXhQqZmJhERka6ublZWVkxJ9ChDmXr1q0AsHr1atpB2k7nR0KGSCSytrbmcrm0gyCN+vPPP8eMGcPn8x8+fKi7//c1Og2++ggEAtoREAXh4eEAsHTpUt1tIOjNSIg6oLS0tN69exsZGWVnZ1tZWdGO03a4EYV01ZYtW5jDVDrdQMCREOmokpISBweHqqqq5ORkXb+ZF46ESCft2rWrsrJy3Lhxut5AwJEQ6SKZTObs7JydnX3p0qUxY8bQjtNeerJ3FHUQNTU1YrE4IiIiOzu7X79+/v7+tBOpAJYQaR2pVFpUVJSbm5uRkSEWi+t/kZmZKZfLAaBLly6+vr4sFot2WBXAj6OIjvLy8uzs7JycHLFYnJWVJRKJRCJRVlaWWCwuKipq6lWGhobdunUzNja+f/++paVlQkICM/OQTsMSIjWqrq5udEDLyMgoKSlp6lVcLtfKyorP5zs5OdnZ2dX/wtHR0cDAgBAyZcqUM2fOeHt7R0dHM7cP0l1YQqQCiYmJmZmZdQNadna2SCTKycmprq5u6iWmpqaOjo58Pt/e3t7BwYHP5zs4ONjb2/P5fObK1eaVlJQMHDgwKyvr448//vzzz1X6t9E0LCFSAR8fnxs3bjRczuPx6o9j9Yc1Ozu7dm7RRUdHv/LKK4SQixcv6vQeGiwhUoHQ0NB79+4pDWiOjo4mJiZqXe+nn3762Wef2draJiQkMFcV6iIsIdJhCoVizJgxUVFR48aNO3/+vI7uLMUzZpAOY7PZBw4c6Nq16x9//LFlyxbacdoIR0Kk886fPx8YGGhgYBAdHe3t7U07TqvhSIh0XkBAwNKlS2Uy2axZs548eUI7TqvhSIj0gVQq9fHxiY+PDwoKOnLkCO04rYMjIdIHRkZGR48e7dSp09GjR/ft20c7TutgCZGecHFxYSZ9Wrp0aUpKCu04rYAlRPojJCQkODi4srIyKCiomZN1tA2WEOmVHTt2uLm53blzJzQ0lHaWlsIdM0jfxMXFDR06VCaTnTx58rXXXqMd58VwJET6xtPTc+PGjYSQefPmZWZm0o7zYjgSIj1ECHnttdciIyNHjBjx999/czgc2omagyMh0kMsFmvv3r18Pj86Olr7L3TCkRDprStXrowePZoQcvny5VGjRtGO0yQcCZHe8vPz+/DDD5kJggsLC2nHaRKOhEifyeXyMWPG/P333wEBAb///rt2XuuEIyHSZxwO58CBA1ZWVufPn9+2bRvtOI3DkRDpv99//33SpElcLvfatWseHh604yjDkRDpv8DAwEWLFkml0unTp5eXl9OOowxHQtQhSKXSIUOGJCQkzJ07V9sus8ASoo4iLS3N09OzvLz8wIEDs2fPph3nGfw4ijoKV1dXZh6axYsXp6am0o7zDI6EqGMJDg4+dOiQh4fHtWvXjIyMaMcBwJEQdTQ7d+7s1avX7du3P/zwQ9pZnsKREHU4t27dGj58eG1t7e+//x4QEEA7DnA+/fRT2hkQ0iiBQMDj8QBg8eLFZmZmtOPgSIg6JObHXkvOYsMSIkQZ7phBiDIsIUKUYQkRogxLiBBlWEKEKMMSIkQZlhAhyrCECFGGJUSIMiwhQpRhCRGiDEuIEGVYQoQowxIiRBmWECHKsIQIUYYlRIgyLCFClGEJEaIMS4gQZVhChCjDEiJEGZYQIcqwhAhRhiVEiDIsIUKUYQkRogxLiBBlWEKEKMMSIkQZlhAhyrCECFGGJUSIMiwhQpRhCRGiDEuIEGVYQoQowxIiRBmWECHKsIQIUYYlRIgyLCFClGEJEaIMS4gQZVhChCjDEiJEGZYQIcqwhAhRhiVEiDIsIUKU/T+75buP/xWOZQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAUl0lEQVR4nO3df1iN9x/H8fcp/SAZk8zv1mzRNhPbaH5c4mu2+VlM5EeF6zuENWsSXehKirDyY3XNmFyzGobMtbFZl6Ehme98J22WkS8iGqNWqvP5/nH73s63c5Bz7vu8z328Hn+5Pm3355bzvM+v+/7cOiEEAQAfB+4dAHjcIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBkiBGCGCAGYIUIAZogQgBki1Kz166lTJ3ruOYqL494VsIhOCMG9D/Do/viD+valEyfIzY169KBVq6hfP+59AjPhmVCbcnJo0CDy8KCGDWnsWNq3j3uHwHyIUJvKyqh587t/9vCga9dY9wYsggi1qUULKi29++fSUvL0ZN0bsAgi1Kb+/WnvXiotpcpK2rKF3nyTe4fAfA24d0AVNTU1ERERKm28S5cuU6dOdXR0VGn79dK+PcXHU58+pNdTWBj5+zPuixCipKRk0aJFKm1/4MCBgYGBzL9wVQm7k5ub+9xzz6n3G/Px8fH19a2oqOD56xUWithY8dln90ZKS0VsrEhN5dkfITZu3Pjiiy+q9wv38/MLCAjQ6/Vcf0G12dtXFHq93t/fPy8vb/DgwUOHDlV8+0KIlStXnjlzJi4ubsGCBYpv/+G++oqGDaMhQ+irr+6OFBZS587k40OFhdbfndu3b/v4+Fy6dCk8PLxHjx6Kb7+ysjI+Pv769euZmZljxoxRfPs2gfsooLD169cTUZs2bW7fvq3SFIcOHdLpdA0bNjx37pxKUzzIrl2CSAwZcm/k9GlBJHx8GHZGiLlz5xJRz5491XumWrduHRG1bdtWvX9TXnYV4V9//dWqVSsi2rx5s6oTjR49mohCQkJUncU0W4qwqKjI1dVVp9MdPXpUvVlqa2tfeeUVIlq4cKF6szCyqwjnzJlDRP7+/mq/fyguLm7UqJFOpztw4ICqE5lgSxGOGDGCiMLCwtSeKDc3l/PVh8rs59PRs2fPpqamOjg4pKSk6HQ6Vedq167d+++/Hx8fHxkZeezYMQeHx/GbnpycnJ07dzZu3DghIaHOj/bv33/s2DGzt+zi4jJr1izDkddee2306NFffPFFTEzM559/bvaWbRT3UUAxw4cPJ6Lw8HDrTFdRUdG+fXsi2rBhg3VmvMs2nglramq6dOlCREuWLDH+6XvvvWfJY9Ld3d14mxcuXHBzcyOiH374Qf2/n1XZyTNhTk5Odna2u7u74VH55MmT4eHhK1as6Gfxyc01NTVpaWlZWVn79+93cnIiooYNGy5ZsmT8+PExMTFBQUFPPPGEhVNoS3p6+smTJ729vU32FhAQYMnXeq6ursaDbdu2jYqKiouLi4yMzM/Pt6tXH9xHAQXU1NRI31MlJSUZjg8cOJCIIiMjLZ/izp07Pj4+RLRy5Up5UK/X9+nTh4iio6Mtn6K+bOCZsKyszMPDg4i2b99utUmFEBUVFR06dCCiTz75xJrzqs0eIly9ejUReXt7V1ZWyoNbt24loieffPLatWuKzLJ7924iatKkyeXLl+XB48ePOzg4ODs7//bbb4rM8nA2EOHMmTOJqH///labUSa9IfT09Lxx44b1Z1eJ5iOUj8o7d+6UBysrKzt27EhEaWlpCs715ptvEtE777xjOBgWFkZEI0aMUHCiB+GOsKCgwMnJydHR8eeff7bOjHVIrz7mzJnDMrsaNB/hjBkzjI/KixcvJqLnn3++urpawblOnz7t5OTk4OCQn58vD5aUlDRp0oSI9uzZo+Bc91Odt6dq1GuVCdPkkZoLv1aNeq1yZqAVZhdCvPHGG0Q0ffp060xn7KeffpJeffz6669c+6AsbUd46tQp6ah88uRJefDy5ctSFd9++63iM0ZGRhJR7969Db+KTExMJCJfX987d+4oPmMdN27sys+nM2fuPRP+/ffp/Hz65RdrPBNmZ2cTUbNmzUpLS60w3f1MmjSJiIYNG8a4DwrSdoSDBg0iohkzZhgOTpgwgYhGjhypxox//vlnixYtiGjr1q3yYFVV1bPPPktEq1evVmNSQ4wRVlVVSWfGp/KdLC65cuWK9HH0N998w7snitBwhDt37jQ+Kh8+fFin07m4uJw5c0aledPS0oioXbt25eXl8uCOHTuknVHqc6D7YYxw2bJlRNS5c2crPOE/1NKlS21nZyyk1Qjlo7Lhk49er5dO5J8/f756U9fW1nbv3p2I4uPjDcdff/11Ipo5c6Z6Uwu+CG3tyUd+AKxatYp7Xyyl1QiTkpKMD4QZGRlE9NRTT928eVPV2Q8ePKjT6Ro1anT+/Hl58NSpUw0aNKjzBlVxXBFOnjyZiIYOHarqLI/E5EshLdJkhCUlJdJR2fADyVu3brVu3ZqINm3aZIV9GDlyJBFNmDDBcFC6nF/VL9BYIrTZDySlDwUiIiK4d8QimowwPDyciIYPH244OG/ePCLq3r17bW2tFfbh/Pnz0oUUBw8elAfLysqaN29e50tLZbFE2LdvXyL64IMP1JvCPOxfWipCexGaPCqfPXtWurDNMAm1zZ8/3zj7VatWGZ++oyDrR5iZmWnLJ6kwnr6jFI1FKJ+uWeeEiaCgICKaOHGiNXemvLxcupBi48aN8qB8IuvSpUvVmNTKEdr+6ZryKVM7duzg3hczaSzCzZs3Gx+Vc3JyiKhx48YXL1608v5s2rSJiFq2bGn4UdC+ffuIyN3d/dKlS4rPaOUIFy5cSER+fn41NTVqbF8R8snDf//9N/e+mENLEcpH5fXr18uD8oVtCQkJ1t8lvV7fu3dvIoqJiTEcHzZsGBFNmjRJ8Rlv3TpQUNCtuPjepSFVVX8UFHQrKlL+5AStXMInv/pITEzk3hdzaClCaXUzPz8/w/dgH330ERE9/fTTXEdB6dq2OhdS/P777y4uLg4ODqouvqI2aXWzMWPGcO/Iw33//fdcr4YsV68IT5wQvXrd/XNoqDA4YUucOCF0OiF9RBId/X8/UtaFCxeMl3WR3w9s27ZNrYnrYeLEiUQUFBRkOKjygje1xcUzCwq6FhR0LSoaVVur/AHoxx9/1NayLqoueGP8OH9AFI9KgcuTvbzoww8t38xDREVFVVRUjBkzRvpgRrJ79+7r16/369dP+taOS2JiYuPGjb/77ruLFy/Kg7Gxsa1atTp8+HBWVpbiM5aX51dUnOjc+afOnU84O3coLz+i7Pb1ev27774rhJgzZ470FsD2rVixwsXFZdOmTXl5eWpsX73HuQLLW/ToQf/6F12/buJHL7xAp0495H/39NRfvfrwpRDc3NxcXV2lE2VkEyZM6NixI/vSEq1bt87KyurWrZu04KLE3d09Pj5+ypQpoaGhISEhD91IixbNv/nG1C/RiJubv7d3ZnV1SXn5ETe3Hm3bLr99+8fjx+u1ttWNG73+8Y/c+vyXrq6unp6e0vO5Jnh7e0+fPj0lJaWeaxC/+GKvf//7UH3+y7AwevfdBz3OLVTfZ8L8fPLyIi8v2rat7o+EoH/+k9LSFN4zo1lMrxTu7+/v6+ur7tz1MHjwYMMC1ebs3OGZZ74sK/v89OlXi4un6fUVVps6Ozu7VL4hFJ8tW7bcunXLmjMaP84fEMWjbrpeL4gf8J4wOFhUVoqXXhLvvafie0INfUggkVcizszMVGeGu281L1z44D//mafspmtra1999VUiWrBggeF4bGwsGa0tYH1Hjx7V6XReXl6GZ0QUFRWp92GY8ePctt4TEpGLCwUF0e7dimzMtOTkZDc3t6ysrAMHDqg4jXLi4+MvX77s7+8fHBys+MZLS9cWF08nEkTk6OgmxB1lty8v35qcnHzu3Dl5fNy4cU5OTuvWrTt+/LiyM9afEGL27NlCiJCQEBcXF3l89uzZVVVVoaGh0uFDDWo9zut5GKgT/dWrIiREREaKsDARHCyEEFevCldXFZ8JhcEXx9Y5O9QS8lcUeXl5amy/trb87Nlxv/zSqaCg65kzQ6qrVbmMYOzYsUQULP0D/4+0xmGvXr24bpNk8gQJ6SsKlU6QEP97JhQGj/MHRPGoy9+Y+T1hUZGQLpodPNi8DZjD9k+hkkk3hJo8ebLiW2b5sn7//v3y4M2bN1u2bEn/v7aA1cinCmZkZMiD91vz0srMjsKiL+tTUkR6uiUbeGSaWPHOnk5bk2792bVrV8PT1kyuLWAdLCfNPxIzojAzQr1eREcL6y79epfNXlYjqa6ufuGFF4ho2bJlamzf+idwe3l5EdG6devkwfutLaA2rsvH6snsKMyMMDNT9OwpoqNFdLSw8uliNnuBqSQ1NZWInnnmGbu5lEk62aDOqw+TawuozeSF1CbXvGRhdhRaOndUZrMr3l2/fl06KmdnZ6s0BeNFvVFRUYaDo0aNIqLx48erN68h6VoZ4yVFjNe81BxNRmhriw7Jpk+fTkQDBgxQbwqu5S0cHR2dnZ0LCwvlQfkmjVa4kLqmpuall14iosWLFxuOm1zzUnM0GaGwyRXvpIWeGjRoYJcLPU2ZMoWIhhguv3+fj0nU8OBlJrHQEw8bXPFOWvJw1qxZqs5iU0semlxbQHHsCy6rTasRCptZkl2yfft2svfFf5OTk41ffZj86lxZJm89YHLNS43ScITCZla8k4/Ka9asUXsuW1gGPyUlRR6839oCSrnfTXiM17zULm1HaCMr3i1ZsoSIfH19lb0JlEm8N4TZtWuX8asPk2sLKEW6Hd3UqVMNB02ueald2o5Q2MCKd/Kt0fbu3WuF6Wzk1mjTpk0zHAwNDSWiwECF90G6MWvTpk2vXr0qD8o3ZrXNL4rNoPkI2Ve8U+nxd182eZNQNY5E8i3KP/zwQ3mQ5xblKtN8hEKINWvWENOKd4/n7bJnzZpFRAEBAYaDCQkJyr4mX758ORF16tTJ8KMXk2teap09RMi14p38mcTcuXOtN6sNRCi/+vjyyy/lQfnTqbVr11o+xZUrV5o2bUpEX3/9tTxocs1LO6DAGjPsHB0dU1JSBgwYkJCQMHHiROm2MIaOHj26zbIVCBYsWODu7l5ncPPmzYcOHWrZsmVMTIwlG9ecZs2axcXFRUREREVFvfXWW66urkTk7OyclJS0du1aw5W4zObm5jZjxoyCggLpgxlJUlLS+fPn/fz8wsLCLJ/ChnAfBRTzgBXv0tPTLfwtGV+UJH9P/emnn1rjryezgWdCwbHmsnyWnOGal/bBHp4JJStWrNizZ09GRsa0adPqLHDQs2dP6S6zZpM+dTCUmJhYXFzcrVs3adHRx4306qN///6JiYlhYWHGrz4UJ615GRISosgzrW3hPgooKTo6mlRcb/cea567XJdtPBNKAgMDiSg0NFTtiXJzc7W1EvEjsasI5QXOPvvsM1Unevvtt4lo3Lhxqs5imi1FWFRUJN2R7siRI+rNUltb+/LLLxPRokWL1JuFkV1FKITYsGEDEbVp0+b27dsqTXHo0CHpqGzN61nvsaUIhRDSh1I9e/ZU79XHxx9/TERt27ZV79+Ul/28J5SEhoamp6fn5eUFBwdLqy0pSwixcuVKIURMTIz0wcxjbt68eRkZGUeOHJk8eXI9l75+JJWVlfHx8US0fPlyadUpO8R9FFBebm6udJ6xSnx8fHx9fSsqKnj+eoWFIjZWGL7eLi0VsbEiNZVnf4TYuHGj9D2tSvz8/AICArhWWLQCnbjP8vKaVlNTExERodLGu3TpMnXqVEfHh98/4zEhhCgpKZHWZVPDwIEDAwMD7fgXbp8RPhbWr6fkZNLradw4WriQe2/AfIhQm/74g/r2pRMnyM2NevSgVauoXz/ufQIzKXMvCrC2nBwaNIg8PKhhQxo7lvbt494hMB8i1KayMmre/O6fPTzo2jXWvQGLIEJtatGC5JsElpaSpyfr3oBFEKE29e9Pe/dSaSlVVtKWLWRwqQFojr19Wf+4aN+e4uOpTx/S6yksjPz9uXcIzIdPRwGY4eUoADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMDsv/gMeV7mM17WAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "xyHuYSjnc_9m", - "colab_type": "text" - }, - "source": [ - "Notice the visual distinction between the train/validation splits. The most-common scaffolds are reserved for the train split, with the rarer scaffolds allotted to validation/test." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "51_rHNLPc_9n", - "colab_type": "text" - }, - "source": [ - "The performance of common machine-learning algorithms can be very sensitive to preprocessing of the data. One common transformation applied to data is to normalize it to have zero-mean and unit-standard-deviation. We will apply this transformation to the log-solubility (as seen above, the log-solubility ranges from -12 to 2)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "apAo3BJlc_9o", - "colab_type": "code", - "outputId": "e32b6085-f24e-460c-ca14-8abd8427f3e9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 119 - } - }, - "source": [ - "transformers = [\n", - " dc.trans.NormalizationTransformer(transform_y=True, dataset=train_dataset)]\n", - "\n", - "for dataset in [train_dataset, valid_dataset, test_dataset]:\n", - " for transformer in transformers:\n", - " dataset = transformer.transform(dataset)" - ], - "execution_count": 12, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.047 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.011 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.010 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "hqEjFjU4c_9q", - "colab_type": "text" - }, - "source": [ - "The next step after processing the data is to start fitting simple learning models to our data. `deepchem` provides a number of machine-learning model classes.\n", - "\n", - "In particular, `deepchem` provides a convenience class, ```SklearnModel``` that wraps any machine-learning model available in scikit-learn [6]. Consequently, we will start by building a simple random-forest regressor that attempts to predict the log-solubility from our computed ECFP4 features. To train the model, we instantiate the ```SklearnModel``` object, then call the ```fit()``` method on the ```train_dataset``` we constructed above. We then save the model to disk." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "kSpYdUDkc_9r", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from sklearn.ensemble import RandomForestRegressor\n", - "\n", - "sklearn_model = RandomForestRegressor(n_estimators=100)\n", - "model = dc.models.SklearnModel(sklearn_model)\n", - "model.fit(train_dataset)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "63-ylGaPc_9t", - "colab_type": "text" - }, - "source": [ - "We next evaluate the model on the validation set to see its predictive power. `deepchem` provides the `Evaluator` class to facilitate this process. To evaluate the constructed `model` object, create a new `Evaluator` instance and call the `compute_model_performance()` method." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "OG3FfI20c_9u", - "colab_type": "code", - "outputId": "9b4c3226-91e8-409f-8757-c33f88970d94", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 51 - } - }, - "source": [ - "from deepchem.utils.evaluate import Evaluator\n", - "\n", - "metric = dc.metrics.Metric(dc.metrics.r2_score)\n", - "evaluator = Evaluator(model, valid_dataset, transformers)\n", - "r2score = evaluator.compute_model_performance([metric])\n", - "print(r2score)\n" - ], - "execution_count": 14, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.1564123392340252]\n", - "{'r2_score': 0.1564123392340252}\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "eDfVtZztc_9w", - "colab_type": "text" - }, - "source": [ - "The performance of this basic random-forest model isn't very strong. To construct stronger models, let's attempt to optimize the hyperparameters (choices made in the model-specification) to achieve better performance. For random forests, we can tweak `n_estimators` which controls the number of trees in the forest, and `max_features` which controls the number of features to consider when performing a split. We now build a series of `SklearnModel`s with different choices for `n_estimators` and `max_features` and evaluate performance on the validation set." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "pT9oo7rUc_9x", - "colab_type": "code", - "outputId": "2c36589c-173e-40fd-ddc5-f58409a14f1f", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 765 - } - }, - "source": [ - "def rf_model_builder(model_params, model_dir):\n", - " sklearn_model = RandomForestRegressor(**model_params)\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", - "params_dict = {\n", - " \"n_estimators\": [10, 100],\n", - " \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", - "}\n", - "\n", - "metric = dc.metrics.Metric(dc.metrics.r2_score)\n", - "optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", - "best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 15, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'auto'}\n", - "computed_metrics: [0.11429830055719803]\n", - "Model 1/8, Metric r2_score, Validation set 0: 0.114298\n", - "\tbest_validation_score so far: 0.114298\n", - "Fitting model 2/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.255910681043121]\n", - "Model 2/8, Metric r2_score, Validation set 1: 0.255911\n", - "\tbest_validation_score so far: 0.255911\n", - "Fitting model 3/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'log2'}\n", - "computed_metrics: [0.15376331917321695]\n", - "Model 3/8, Metric r2_score, Validation set 2: 0.153763\n", - "\tbest_validation_score so far: 0.255911\n", - "Fitting model 4/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': None}\n", - "computed_metrics: [0.08206916361536953]\n", - "Model 4/8, Metric r2_score, Validation set 3: 0.082069\n", - "\tbest_validation_score so far: 0.255911\n", - "Fitting model 5/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'auto'}\n", - "computed_metrics: [0.15969497770354368]\n", - "Model 5/8, Metric r2_score, Validation set 4: 0.159695\n", - "\tbest_validation_score so far: 0.255911\n", - "Fitting model 6/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.2339461932073177]\n", - "Model 6/8, Metric r2_score, Validation set 5: 0.233946\n", - "\tbest_validation_score so far: 0.255911\n", - "Fitting model 7/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'log2'}\n", - "computed_metrics: [0.25608787938563005]\n", - "Model 7/8, Metric r2_score, Validation set 6: 0.256088\n", - "\tbest_validation_score so far: 0.256088\n", - "Fitting model 8/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': None}\n", - "computed_metrics: [0.1463569411025908]\n", - "Model 8/8, Metric r2_score, Validation set 7: 0.146357\n", - "\tbest_validation_score so far: 0.256088\n", - "computed_metrics: [0.9424695369155991]\n", - "Best hyperparameters: (100, 'log2')\n", - "train_score: 0.942470\n", - "validation_score: 0.256088\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ytSp1h9Zc_9z", - "colab_type": "text" - }, - "source": [ - "The best model achieves significantly higher $R^2$ on the validation set than the first model we constructed. Now, let's perform the same sort of hyperparameter search, but with a simple deep-network instead." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "TS0-7gVYc_90", - "colab_type": "code", - "outputId": "4dc9e664-84eb-45e3-eea1-1a12f13f2116", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 479 - } - }, - "source": [ - "import numpy.random\n", - "\n", - "params_dict = {\"learning_rate\": np.power(10., np.random.uniform(-5, -3, size=1)),\n", - " \"decay\": np.power(10, np.random.uniform(-6, -4, size=1)),\n", - " \"nb_epoch\": [20] }\n", - "n_features = train_dataset.get_data_shape()[0]\n", - "def model_builder(model_params, model_dir):\n", - " model = dc.models.MultitaskRegressor(\n", - " 1, n_features, layer_sizes=[1000], dropouts=[.25],\n", - " batch_size=50, **model_params)\n", - " return model\n", - "\n", - "optimizer = dc.hyper.HyperparamOpt(model_builder)\n", - "best_dnn, best_dnn_hyperparams, all_dnn_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 16, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/1\n", - "hyperparameters: {'learning_rate': 0.0004948656946205111, 'decay': 4.283485107240641e-06, 'nb_epoch': 20}\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "computed_metrics: [0.27192770328945703]\n", - "Model 1/1, Metric r2_score, Validation set 0: 0.271928\n", - "\tbest_validation_score so far: 0.271928\n", - "computed_metrics: [0.7855675808634082]\n", - "Best hyperparameters: (0.0004948656946205111, 4.283485107240641e-06, 20)\n", - "train_score: 0.785568\n", - "validation_score: 0.271928\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Qcn6BidDc_93", - "colab_type": "text" - }, - "source": [ - "Now that we have a reasonable choice of hyperparameters, let's evaluate the performance of our best models on the test-set." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "s8TqBD6pc_94", - "colab_type": "code", - "outputId": "4b6c5108-1d72-4147-9d38-bec0c5a21e87", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 51 - } - }, - "source": [ - "rf_test_evaluator = Evaluator(best_rf, test_dataset, transformers)\n", - "rf_test_r2score = rf_test_evaluator.compute_model_performance([metric])\n", - "print(\"RF Test set R^2 %f\" % (rf_test_r2score[\"r2_score\"]))" - ], - "execution_count": 17, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.3447598620339224]\n", - "RF Test set R^2 0.344760\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "U-clxvGhc_96", - "colab_type": "code", - "outputId": "131320ec-fb59-4331-a4f0-6dffc701b586", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 51 - } - }, - "source": [ - "dnn_test_evaluator = Evaluator(best_dnn, test_dataset, transformers)\n", - "dnn_test_r2score = dnn_test_evaluator.compute_model_performance([metric])\n", - "print(\"DNN Test set R^2 %f\" % (dnn_test_r2score[\"r2_score\"]))" - ], - "execution_count": 18, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.36426843023910316]\n", - "DNN Test set R^2 0.364268\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "k5yQytmUc_98", - "colab_type": "text" - }, - "source": [ - "Now, let's plot the predicted $R^2$ scores versus the true $R^2$ scores for the constructed model." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "887Zb1-5c_98", - "colab_type": "code", - "outputId": "3c26ea66-5a84-4b3b-a634-5a4cdce47a33", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "task = \"measured log solubility in mols per litre\"\n", - "predicted_test = best_rf.predict(test_dataset)\n", - "true_test = test_dataset.y\n", - "plt.scatter(predicted_test, true_test)\n", - "plt.xlabel('Predicted log-solubility in mols/liter')\n", - "plt.ylabel('True log-solubility in mols/liter')\n", - "plt.title(r'RF- predicted vs. true log-solubilities')\n", - "plt.show()" - ], - "execution_count": 19, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAEWCAYAAABv+EDhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3debwcVZn/8c+XcJGELSJxNBdiUCSMAkkgoBJRWQRkkQgK7oIOCI6CLEEQf7I4DhmCOzMyoCwqKiAYgqAsE8QVISGBgBJF9osoCGHLBW6S5/dHnQ6dTlV1dXdVVy/P+/W6r9yurlt1uu5NnXOeOuc5MjOcc871n7XKLoBzzrlyeAXgnHN9yisA55zrU14BOOdcn/IKwDnn+pRXAM4516e8AnC5knShpP8I3+8saUmbzmuStmjHucrSaZ9R0i8l/Vur+0qaIOlZSaNq95X0IUnXpRy3bX9jvcgrgA4k6X5Jw+E/xaPhprp+1fsXSnoxvF/5OrjMMscxs1+b2aR6+0k6RNJv2lGmvFRXdK41Zvagma1vZiti3rvYzPaovK6tBLP+jbl4XgF0rv3MbH1gCjAVOKnm/TPDf5rK1yV5F0DS2nkfs1/4tXPdwCuADmdmjwLXElUELQst13MkXS/pGUk3SXpN1fsm6d8l/QX4S9i2r6RFkpZK+p2kbav2nyrptnCsS4B1q957h6SHq15vJukKSY9J+qeksyX9K3AO8JbQk1ka9n2ZpLMkPSjp76HMo6uONVPS3yQ9IunjKZ/3YEnza7YdI2lu+H5vSX8M5R+SdHyGa3g48CHghFDmq8L2+yV9TtIdwHOS1q5tsdb2HNKubZ0ybCTpe+FaPiDpC5LWCu+NkvQVSY9Luk/Sp0M5YislSVuEv4Onws9cUvXeTpJuDe/dKmmnhGOcKukHVa8nxpzzdZJukfS0pCslbZyyb+U4q3qHkn4VNt9e6fXG/I2Nl3R5uC73STqq6r0dJc0P5/+7pK9muda9zCuADidpU+BdwD05HvZDwJeATYBFwMU1788A3gS8QdJU4Hzgk8ArgP8F5oYb9DrAHOD7wMbAZcCBCZ9jFPAz4AFgIjAI/NjM/gQcAfw+9GTGhh+ZBWxJVPFtEfb/YjjWXsDxwDuB1wO7p3zWq4BJkl5fte2DwA/D998FPmlmGwBbA/NSjgWAmZ1LdM0qvbD9qt7+ALAPMNbMlqcdJ+3a1isD8C1gI+C1wNuBjwKHhvcOI/qbmQJsR/T7TPMl4Drg5cCm4diEG/TVwDdD+b4KXC3pFRnKF+ejwMeBVwPLw3EzM7O3hW8nx/V6QwV4FXA70d/LbsBnJe0ZdvkG8A0z2xB4HXBpk5+jZ3gF0LnmSHoGeAj4B3BKzfvHh1bjUkmPN3jsq83sV2b2AnAyUet7s6r3zzCzJ8xsGDgc+F8z+4OZrTCzi4AXgDeHrwHg62Y2YmY/AW5NOOeOwHhgppk9Z2bPm1ls3F+SwnmPCeV4BvhP4P1hl4OAC8zsTjN7Djg16YOa2TLgSqIbM6Ei2AqYG3YZIaroNjSzJ83stqRjZfRNM3soXLt60q5tolCZvh84ycyeMbP7ga8AHwm7HER0o3vYzJ4kqkzTjACvAcbX/F72Af5iZt83s+Vm9iPgbmC/pAPV8f2q39n/Aw4KnyUvOwDjzOx0M3vRzO4FzuOlv5sRYAtJm5jZs2Z2c47n7kpeAXSuGaFV+g6iG9YmNe+fZWZjw9cmsGrEROWh8M9Tjv1Q5RszexZ4gujmvMb7RDeG46oqm6XAZmH/8cCQrZ5R8IGEc24GPFCvVRyMA8YAC6rO+YuwnXDe6jImnbPih4QKgKj1PydUDBD1WPYGHghhkLdkKF+ah+rvskratU2zCVHFW/25HyBq9cKa12fV94pGzVT+Ru4Km08ABNwi6a6qkNp41ry21edpVO3vbIA1/65b8RpgfM31/DzwL+H9TxD1Ku8O4ax9czx3V/IHVR3OzG6SdCFwFnW68mZ2MWuGc+Ksau0rGl20MfBI9aGqvn8I+LKZfbn2IJLeDgxKUlUlMAH4a8w5HwImSFo7phKoTUn7ODAMvNHMhmKO9bfqzxDOmeZ6YJykKUQVwTGrTmx2K7C/pAHg00Rhgc1ij5Je5qTty4gqs4pXAZWYdeK1reNxXmq1/zFsmwBUrtXfiEI5Fas+j5n9Gli/6r3Kc6bDACS9FbghxNsfCeeoNoGoMq71HGt+zlq1v7OR8FmyXO8sHgLuM7PXx71pZn8BPhBCRQcAP5H0itAj6UveA+gOXwfeKWlyTsfbW9JbQwz/S8DNZpbUcj0POELSmxRZT9I+kjYAfk8Uyz1K0oCkA4hCPXFuIboxzQrHWFfS9PDe34FNQ3kws5XhvF+T9EoASYNVsdxLgUMkvUHSGNYMj63GzEaInk/MJqrsrg/HXCf0mjYK+zwNrEw7VpW/E8Xf61kEfFDRg9m9iOL1FWnXNu3zrCC6Bl+WtIGih/jHApWHsJcCR4drNhb4XNrxJL0vPGsCeJKoElsJXANsKemDih5oHwy8gehZTtznfJuiMf0bseaoNYAPV/3OTgd+Ejf0s460634L8IyiB/GjwzXfWtIO4XN+WNK48Pe1NPxM1t93T/IKoAuY2WPA9wgPQXPwQ6Kb5hPA9sCHU849n6h1eDbRzeEe4JDw3otELalDwrEOBq5IOM4KotjxFsCDRK3gytyFecBdwKNVzzM+F851s6SngRuASeFYPyeqFOeFfeo+uA2feXfgspoeyEeA+8M5jiB6QF49OSmpd/FdomcHSyXNSTnv0eFzLw3HXrVv2rXN4DNEre57gd+Ez3d+eO88ooe6dwALiW7ky4Gkm+0OwB8kPUv0bORoM7vXzP4J7AscB/yTKFS0r5mt8czJzK4HLgnnXEB8JfF94ELgUaLRYkfF7FPPqcBF4bofVFOGFaG8U4D7iHoX3yF6WA6wF3BX+JzfAN6f8VlNz5IvCNNfQjjpYTP7Qtllce0h6V3AOWZWG85xfc57AM71mBD+2DuEbQaJens/LbtcrvN4BeBc7xFwGlFYaSHwJ/ILH7oe4iEg55zrU94DcM65PtVV8wA22WQTmzhxYtnFcM65rrJgwYLHzWxc7fauqgAmTpzI/Pnz6+/onHNuFUmxs+U9BOScc33KKwDnnOtTXgE451yf8grAOef6lFcAzjnXp7pqFJBzrn3mLBxi9rVLeGTpMOPHjmbmnpOYMbXZpQBcJ/IKwDm3hjkLhzjpisUMj0QJRIeWDnPSFYsBvBLoIR4Ccs6tYfa1S1bd/CuGR1Yw+9olJZXIFcErAOfcGh5ZGp8mP2m7605eATjn1jB+7OiGtrvu5BWAc24NM/ecxOiBUattGz0wipl7TiqpRK4I/hDYObeGyoNeHwXU27wCcM7FmjF10G/4Pc5DQM4516e8AnDOuT7lFYBzzvUprwCcc65PeQXgnHN9yisA55zrU14BOOdcn/IKwDnn+pRXAM4516e8AnDOuT7lFYBzzvUprwCcc65PeQXgnHN9yisA55zrU6VWAJL2krRE0j2STiyzLM45129KqwAkjQL+G3gX8AbgA5LeUFZ5nHOu35TZA9gRuMfM7jWzF4EfA/uXWB7nnOsrZVYAg8BDVa8fDttWI+lwSfMlzX/sscfaVjjnnOt1Hf8Q2MzONbNpZjZt3LhxZRfHOed6RplrAg8Bm1W93jRsc845F8xZOMTsa5fwyNJhxo8dzcw9J+W2VnNqD0DSKEkX53KmNd0KvF7S5pLWAd4PzC3oXM4513XmLBzipCsWM7R0GAOGlg5z0hWLmbMwn7Zyag/AzFZIeo2kdcKD2tyY2XJJnwauBUYB55vZXXmewznX+Yps4Xa72dcuYXhkxWrbhkdWMPvaJblcoywhoHuB30qaCzxX2WhmX2315GZ2DXBNq8dxznWnSgu3cpOrtHCBjqgEyq6cHlk63ND2RmV5CPxX4Gdh3w2qvpxzriVpLdyyFR1+yWL82NENbW9U3R6AmZ0GIGmMmS3L5azOOUfxLdxWFB1+yWLmnpNW6yEBjB4Yxcw9J+Vy/Lo9AElvkfRH4O7werKk/8nl7M65vtZKC3fOwiGmz5rH5idezfRZ83JvmXdC5TRj6iBnHLANg2NHI2Bw7GjOOGCb3CqgLM8Avg7sSRihY2a3S3pbLmd3zvW1Zlu47Xh2MH7saIZibvZ5hV+ymjF1sLAeR6aJYGb2UM2mFbE7OudcA5pt4bbj2cHMPScxemDUatvyDL9UK7o3kyRLD+AhSTsBJmkAOBr4U7HFcs71i2ZauO0Iz1TKVPQooDJHQmWpAI4AvkGUp2cIuA74VJGFcs51n7Qhk3kPp2xXeKbI8EtFmQ+bs1QAk8zsQ9UbJE0HfltMkZxz3SatFQvk3sItenRMO5X5sDnLM4BvZdzmnOtTaa3YpPdOndv8xP+iR8e0U9Fj/dMk9gAkvQXYCRgn6diqtzYkSt3gnHNAc63YpcMjzFk41PRNux3hmXYoszeT1gNYB1ifqJKongH8NPDewkvmnOsaaa3YtJZsJ8z4LVuZvZnEHoCZ3QTcJOlCM3ug8JI457pWvVbsZy9ZFPtznTDjtxOU1ZtJCwF93cw+C5wtyWrfN7N3F1oy51zXqDdk8rSr7uLJZSNr/Fy7J1W51aWNAvp++PesdhTEOddeeQ/NTGvFnrLfG3tm1E4vSQsBLQj/3tS+4jjn2qGdk48qFc3wyApGSawwY9Dz/neEtBDQYmCN0E+FmW1bSImcc4Vr1+Sj2opmhdmqln8ZN/+y8/t3mrQQ0L5tK4Vzrq3aNfmoE1IqV3T64jNlSKsAzgV+AfzczO5uU3mcc23QrlQKSRXK0NJhps+a19aWeCdVRp0ibR7Ax4AngVMl3Sbp25L2l7Rem8rmnCtIuzJdJlUogravtNUJ+f07TWIFYGaPmtmFZvZ+YBrwPWB74DpJN0g6oV2FdM7lq12Tj+IqGrHmw8V2LANZZsqFTpUlGRxmthL4ffj6oqRNiBaJcc51qXZMPoqbHxAXeoJiWuLVD33HjhlgYC0xsvKl6qffh6LWrQAknQn8BzBM9ExgW+AYM/tBwWVzzvWA2opm+qx5bXn+UPvQ98llIwyMEmNHD/DU8IiPAiJbD2APMztB0nuA+4EDgF8BXgE412fyGEbZruRncQ99R1YY671sbRadskeu5+pWWSqAyj77AJeZ2VOSCiySc64ZlZvz0NLhQiZc5TWMsl0rbflD3/qyVAA/k3Q3UQjoSEnjgOeLLZZzrhFxE64g37HuWYZRZu0htOP5Q6cs6t7J6i4IY2YnEq0LMM3MRoDngP2LLphzLru4m3NFXiNs6rWoK5VQu4d3Jmnnou7dKi0VxAEx26pfXlFEgZxzjasX1sgj7FGvRZ3WQ6i8386JX+0KNXWztBDQfinvGV4BONcx0oZXVt5vVb2Ht2mzfstKwdArq4YVJS0b6KHtLIhzrnlxN+eKVsIetTH9A7cf5Ma7H4ttUSdVQqOkhlMwFJm0zRPCvSTLPICNgFOAt4VNNwGnm9lTRRbMOZdddbgjr1FAcaN+Ll8wlDhjOKmHkPRsIqnHUGTSNk8It7oso4DOB+4EDgqvPwJcQDQfwDnXorxapHmHOxpNnpYUc69USrWSwlJ5jjZq9TP1uiwVwOvM7MCq16dJil/gMyNJs4meMbwI/BU41MyWtnJM57pRJ7dImxlHn1QJNTLxK+too2aumc8NWF3dYaDAsKS3Vl5Imk40J6AV1wNbh0Vl/gyc1OLxnOtK9UbOlCmv5GmNJp6rd95WrpknhFtdlh7AkcBF4VmAgCeAQ1o5qZldV/XyZuC9rRzPuW7VyS3SrCkbsoRjGglPNTvaKMs1a1caim5RtwIws0XAZEkbhtdP51yGjwOXJL0p6XDgcIAJEybkfGrnytVps1UbGfVT2T/vEFbcA+3qFn4r18znBqxOZonL/kY7SGOBjwITqaowzOyoOj93A/CqmLdONrMrwz4nE601cIDVKwgwbdo0mz9/fr3dnOsatTdQiFqkReTmL6IsSZk9B8eO5rcn7lpIeQ7cfpDLFwx1xDXrFpIWmNm02u1ZQkDXEIVpFgMrs57QzHavU6BDiNYd3i3Lzd+5XtRJLdJmRsikTf4qqjw33v0YZxywTUdcs26XpQJY18yOzfOkkvYCTgDebmbL8jy2c90mLT7ezklLzcTWk8IxIip7K2Xt5OcjvSLLKKDvSzpM0qslbVz5avG8ZwMbANdLWiTpnBaP51zPaXdytWZGyMzccxJxyeENWh7JlHTesWMG1rgun71kEVNPv660xHPdKksP4EVgNnAyLy3lacBrmz2pmW3R7M8616tqW/vPvbC86UlLzfQcmhkhM2PqIJ+9JH5aUKst9aTymBE7u/jJZSMdM4eiW2TpARwHbGFmE81s8/DV9M3fObemuNb+0uGR2H3r3Vib7Tk0u1D8YEFj65PK81TCdYHOmUPRLbL0AO4BPE7vXIHS8vnXqndjbSXdQTPpJIocWx9XnqTUEhX+jCC7LBXAc8AiSTcCL1Q21hsG6pzLLutNK8uNtd0PT+NGMu2y1ThmX7uEYy5ZlPvD67TMp9C/s3qbkaUCmBO+nHMFSRpN8/IxA4xZZ+2GYvlJxxo7ZoDps+YVMqKouqVedH6jyjFOnXvXGmGyfp7V24y6E8E6iU8Ec70qzwlhcccaGCUwGFn50v/3esdvdghq2uSwSnbQvCohz+2fTdJEMK8AnOsQcTczaG6SWNyIoriHykkzdlupkDY/8WqS7iq16wP4DN728ArAuS6TZ68g6aYs4L5Z+6yxvZUUD0k/W1mkppljutYkVQBZhoE650qQNJrnuEtvZ/MTr2b6rHmZJz41Oskr64PkOQuHmD5r3mrlmbnnJEYPjFptv9EDo2Jv/mnncsWrWwFI2lLSeZKukzSv8tWOwjnXz5JujCvMVpsBO+W0+jNgk27KSQ9Ms1QYSfMNgNjx+0nzBdaSGq7QXD6yjAK6DDgHOA/INlDZOdeypNE8tZYORzNg5z/wRGLq5kaTziUNtVz24vJVOX7S5hv89sRdM60MBqzqGXTSamj9IksFsNzMvl14SZxzq6k33r3a8MgKLr75wVVx/ribaSOTvJKGWlanW2h0vkFtJbRWzDOBfl6ftwxZngFcJelTOSeDc87VUZsKYZTi0q69pDbC3mpahBlTB1nvZWu2ESvHbSZ53Iypg/z2xF25b9Y+rPRnAqXL0gP4WPh3ZtW2lpLBOeeySZtglUWrN9O0fP9fP3hKSykgOm01tH5UtwdQlQBuc08G51x5Kj2Cl48ZyPwztQ9ta0fsNPLz1Sp9kWaSx1U0+mDa5S9xHoCkXc1snqQD4t43sysKLVkMnwfgXGTOwiGOu/T2xKGVsPqcgWbnFMxZOMQxlyyKnUPQyvj9ykS1ypq/K8wYDDmE0tYgds1pZh7A28O/+8V87Zt7CZ3rYs20rlsxY+pgYgwd1myNp43YqXeepLM0G16qHj4K0Sig0QOj2GWrcVy+YKhtC+C4lGcAZnZK+PfQ9hXHue5TdPKzJEkx9LiWeSsZQgdzjtUnVUY/+sNDPiqozXwmsHMtarZ13apGYujNjNhp9DxZe0FpE9wa2d+1LssoIOdcirIWL48bq7/uQHybLuuiLWnZNSvbx44ZwAyOuWQRs69dsuoYWXtBST2XpFxBPiqoON4DcK5FrbSu8/DC8pWrvq9M1KptfWdZ7jFtKcnK+P2vHTyF50dWsnR4ZLV9Tp17V+ZeUFKP4gNv2sxHBbVZ3R6ApAXA+cAPzezJ4ovkXHfZZatxq83ChfbduBpZ/rHeTOAsx0raJ2luQlwvKC0txbTXbOz5/dsoSwjoYOBQ4FZJ84ELgOusm/JIO1eAOQuHYlelEnDg9o2vrduMPMNPWY7V6HGTekFJlVFleyUUVR1m8oogf1kmgt1jZicDWwI/JOoNPCDpNE8J4fpVJVwSt8iKATfe/Vjdn2912OichUOslZAeopnwU5ZQVtI+Lx8zkFv4Ji0U5fKV6RmApG2BrwCzgcuB9wFPA54W2vWluFBItbSWch43uMox4h6a1rvxJlU+WUb7JO1zyn5vbGlWcLWyRlX1o6zPAJYC3wVONLMXwlt/kDS9yMI5V5Z6a83WC4WktcAbids3cgyIRtLUW+e33midtM9db588wjSNLEbjzwtak+UZwPvM7N7qDZI2N7P7zCw2TYRz3SzLTTItV3+9FngecfukfVeatfSgN0vK6EbSSjcjS5K4sibf9ZosIaCfZNzmXE/IEoKIC4VAFAuPG15ZHXLZaHR8MrdG4vbNDj0ta85CI7KEojxMlI/EHoCkrYA3AhvVJITbEFi36II5V5YsN8msK2zFtVQHRomBtcTIypfi940+MM06satWN6RgznJtu6Ei6wZpIaBJREnfxhIlgKt4BjisyEI5V6asN8ksoZC4lurICuPlYwYYs87aTcevG13isaLZiqPd6l3bbqjIukFaMrgrgSslvcXMft/GMjlXqjxvkkkt0qXLRlj4xT2aLiM0F4tvtuLoNN1SkXW6tBDQCWZ2JvBBSR+ofd/Mjiq0ZM61Ue2IkgO3H8wlL30jLdV2jWop+iFuO/RKRVa2tBDQn8K/ha3AIuk44CxgnJk9XtR5nEsTF6e/fMFQ0+PYqzWShM1HtTSmFyqysqWFgK4K/15UxIklbQbsATxYxPGdyyqPcflJsrZUiyxDt/Jx/sVLCwFdBYmLAWFm727x3F8DTgCubPE4rke16wZQ9IiSLC3VPMrQSzdM7xG1R1oI6KyiTippf2DIzG5XQi6Tqn0PBw4HmDBhQlFFch2mnTeAThhR0moZ4q7XzJ/czqlz7+Kp4ZGOrhDiKi7vEbVHWgjoplYOLOkG4FUxb50MfJ4o/FOXmZ0LnAvRovCtlMl1j3beADphREmrZUgablpJVtfOFnQjPZGkir6R9NKueWkhoEvN7CBJi1k9FCTAzGzbtAOb2e4Jx90G2ByotP43BW6TtKOZPdroB3C9qZ0TfTphREmrZchyXdrRgm6055ZU0fvqYO2RFgI6Ovy7b54nNLPFwCsrryXdD0zzUUCuWrvDMp0woqSVMqTlJqpWdAu60Z5b2vrAowdG+Tj/giXmAjKzv4V/HwBeACYD2wIvhG3OFaaRBc/Llkdu/1Yl5SaqVXQLutGeW1J5Kumk80gv7ZJlSQf9b8AXiXL/C/iWpNPN7Pw8CmBmE/M4justnRCWySKPh9V5jN6JW7j92eeXt5RvqBmN9tzSnn10Qq+s16neyo6SlgA7mdk/w+tXAL8zs7Y3xaZNm2bz5xc2L825hk2fNS/2hjdK4isHTa57A6utQCC6AebR2i1jWGgzn6eXhq92KkkLzGxa7fYs6wH8kygBXMUzYZtzfS8thp2lJ1D0JLR230ib6bl5S788aaOAjg3f3kO0+teVRKOB9gfuaEPZnOt4aQ9fs9zIs8TMs7aQO6Ul7Tf07pG2IMwG4euvwBxeGgp6JXBfweVyrhB5P7Ct9/C12aUjK9uzrh8ct98xlyxiYokPpl3nS5sIdlo7C+Jc0YqYXTxj6iDzH3iCH9wcn9Kq+kYe1zqvNwEsa4gobr9Ki83TKLgkWUYB3UhMTiAz27WQEjlXkKLi7Tfe/VjsdhH1EFpZiD3rsMp6PY3hkRWcOveujggRVeuUsFW/yvIQ+Piq79cFDgSWF1Mc54pT1OzipJ83ohv89FnzMi/EXrkhHnPJIsaPHc1GowdWpXOoVhs6yjIRbOnwSC6pIfK6aXvCt/LVXRTezBZUff3WzI4F3lF80ZzLV7MLqTd73MGwPWvFExfHf+7F5QystXrCxLjx/FknglVrZhH1rM8ksvCF3ctXtwKQtHHV1yaS9gQ2akPZnMtVUbOL6x03a8WTlNBt/XXXrjsjdsbUwVUzZyEKP2XRaO8n6aZ93KW3N1wJ+MLu5csSAlpA1JsVUejnPuATRRbKuSIUNbu43nGzZvpsdf3guFBSpTzLXlzOk8vqh5LqaXXeQ+25y07D3e/qVgBmtnk7CuJcOxQ1Rj3tuFkrnjxviLXlSZqh22jvp9V5D9U6IQ13v0ubCHZA2g+a2RX5F8e53pSl4inyhphX7yeujNUaCd90S76nXpbWA9gv5T0DvAJwLkdF3xDz6P1Ufv64S2/PJV+/zxouV9pEsEPbWRDnXHfcECvl8/BN98syEWwj4BTgbWHTTcDpZvZUkQVzznUuD9/0hiyjgM4H7gQOCq8/AlwApD4jcM71tm7orbh0WSqA15nZgVWvT5O0qKgCOdftGl0Uffa1SxhaOrxqHdxBb027NslSAQxLequZ/QZA0nTAZ2o4F6OR9Aa1+1YeqnpKBNcudWcCA0cC/y3pfkkPAGcDRxRbLOe6UyPpDeL2rfczzuUpy0SwRcBkSRuG108XXirnClRkBspG0hvUGzPfaEoEz6zpGpVlFNDRRA99nwHOk7QdcKKZXVd04ZzLW9EZKBuZzVsvg2famPram/0uW43j8gVDnlnTNSRLCOjjodW/B/AKolFAswotlXMFaSRE08zqYY0knEvL4Jk2pj4uI+fFNz/omTVdw7I8BK4kFtwb+J6Z3SUpa7JB5zpKo6mZG21RNzI+vnrfRkYBpa3+Ve9zOVctUzZQSdcBmwMnSdoAWFlssZwrRtYQTSurhzUyPr6ZsfSN3NQ9s6ZLkyUE9AngRGAHM1sGrAN4mgjXlbKGaDo5V33STb22W+6pGVw9WVYEW2lmt5nZUkmnmtk/zeyOdhTOubxVL5yStsBKUauH5SGpEvvQmyfU/VzOVcsSAqr2buDUAsrhXNuUnZq5VZ6Hx+Wl0QrAH/66vtDpN1nPw+Py0GgFsH0hpXCuA/lN1vW6LBPBvlnzGuApYL6ZXVlQuZwrhc+mdf0kyyigdYEpwF/C17bApsAnJH29wLI511ZxE6xOumJxpglgznWjLCGgbYHpZrYCQNK3gV8DbwUWN3tiSZ8B/h1YAVxtZic0eyznWjVn4VDsMoeNLnTeyvnL6Hl4j6e/ZakAXg6sTxT2AVgP2NjMVkh6oZmTStoF2B+YbGYvSHplM8dxLg+Vln/cGrdQ/Nj/ovMTddp5XfhK21YAABBYSURBVOfIEgI6E1gk6QJJFwILgdmS1gNuaPK8RwKzzOwFADP7R5PHca5laWmZofix/43kJ+qF87rOkSUd9HclXQPsGDZ93sweCd/PbPK8WwI7S/oy8DxwvJndGrejpMOBwwEmTJjQ5OmcS5bWwm9m7H+jYZWyZh138mxn1x5Zh4HuAOwcvl8JPJKyLwCSbgBeFfPWyeG8GwNvDse+VNJrzdbsg5vZucC5ANOmTUvKeeVc05LyA42SGp5N20xYpZEU0nkq67yuc9QNAUmaBRwN/DF8HSXpP+v9nJntbmZbx3xdCTwMXGGRW4gqlU1a+yjONScptcJXDprccCy8mbBKIymk81TWeV3nyNID2BuYYmYrASRdRPQc4PMtnHcOsAtwo6QtiRLMPd7C8VwPyBo6yXvkSp6zfpsJq5Q167jTZzu74mUNAY0Fngjfb5TDec8Hzpd0J/Ai8LG48I/rH1lDJ0WNXMlr1m+zYZWyZh37bOf+lmUU0BnAQkkXhtb/AuDLrZzUzF40sw+HkNB2ZjavleO57pc1dNLpI1c8rOK6SZZRQD+S9Euih7UAnzOzRwstles7WUMnnT5yxcMqrpskVgBh8fdqD4d/x0sab2a3FVcs12+yhk66YeSKh1Vct0jrAXwl5T0Dds25LK6PZc2/38l5+p3rNokVgJnt0s6CuP6WNXTiIRbn8qNuGnwzbdo0mz9/ftnFcM65riJpgZlNq92eZRSQc865HuQVgHPO9aksqSAk6cOSvhheT5C0Y72fc84519myzAT+H6JcPbsCpwPPAJfz0rwA5zqGL3DiXHZZKoA3mdl2khYCmNmTktYpuFzONaxfFzjxSs81K8szgBFJo4jG/iNpHFGPwLmO0ulpIorg6xi7VmSpAL4J/BR4ZVjA5TdA3XTQLrs5C4eYPmsem594NdNnzfP/vE3q9DQRRejHSs/lJ0suoIslLQB2AwTMMLM/FV6yPtGvYYsidEOaiLz1Y6Xn8pNlFNAEYBlwFTAXeC5scznwFlx+Ws3E2Y09saTKrZcrPZefLA+BryaK/wtYF9gcWAK8scBy9Q1vweWnlTQR3doT89xIrhVZQkDbVL8OWUI/VViJ+kw/hi2K1GwmzrSeWCdXAJ4bybUi64pgq5jZbZLeVERh+pG34DpDN/fEPP20a1bdCkDSsVUv1wK2Ax4prER9xltw5ZuzcAgJ4vIijh0z0NBx/PfoukmWHsAGVd8vJ3omcHkxxelP3oIrTyX2vzIhKW7WZLnd+gzB9bfUCiBMANvAzI5vU3mca6u42H+1p4ZHmj5ONzxDcP0tcRiopLXNbAUwvY3lca6t6sX4sz6M7+ZnCK5/pfUAbiGK9y+SNBe4DHiu8qaZXVFw2ZwrXNIoLGjsYbyP5nLdKEsqiHWBfxJlA90X2C/861zXi5s8BjB29ABnHLBN5vBNq5PQnCtDWg/glWEE0J28NBGsonvWkXQuRV6jsHw0l+tGaRXAKGB9Vr/xV3gF4HpGXqOwfDSX6zZpFcDfzOz0tpXEOedcW6U9A4hr+TvnnOsRaRXAbm0rhXPOubZLDAGZ2RPtLEhRfHq+c87FazgZXDfx6fnOOZcsyzyAruWLrTjnXLJSegCSpgDnEE0yWw58ysxuyfs8Pj2/t3g4z7l8ldUDOBM4zcymAF8Mr3Pny+X1jko4b2jpMMZL4bxuWLbRuU5VVgVgwIbh+40oaH0Bn57fOzyc51z+ynoI/FngWklnEVVCOyXtKOlw4HCACRMaW4vep+f3Dg/nOZe/wioASTcAr4p562SiOQbHmNnlkg4CvgvsHnccMzsXOBdg2rRpDaeg8On52XVyjN2zbTqXv8IqADOLvaEDSPoecHR4eRnwnaLK4bLp9CGzvnayc/kr6xnAI8Dbw/e7An8pqRwu6PQY+4ypg5xxwDYMjh2NgMGxoxtK1+ycW1NZzwAOA74haW3geUKM35WnG2LsHs5zLl+lVABm9htg+zLO7eJ5jN25/tPTM4Fddj5k1rn+09O5gFx2nTJktpNHIjnXa7wCcKuUHWPv9JFIzvUarwBcx0gbidQpFYD3UFwv8QrAdYxOH4nkPRTXa/whcB+Ys3CI6bPmsfmJVzN91ryOTaDW6cn7On2uhHON8gqgx3VTFs1OH4nU6T0U5xrlFUCP66ZWaztn+zbTK+r0HopzjfJnAD2u21qt7RiJ1Gws3/MRuV7jPYAe563WNTXbK/J8RK7XeA+gx3mrdU2t9IrKnivhXJ68B9DjvNW6Ju8VORfxHkAf8Fbr6rxX5FzEKwDXdzol75FzZfMKwPUl7xU5588AnHOub3kF4JxzfcorAOec61NeATjnXJ/yCsA55/qUzKzsMmQm6THggSZ/fBPg8RyL0y5e7vbycreXl7s9XmNm42o3dlUF0ApJ881sWtnlaJSXu7283O3l5S6Xh4Ccc65PeQXgnHN9qp8qgHPLLkCTvNzt5eVuLy93ifrmGYBzzrnV9VMPwDnnXBWvAJxzrk/1VQUg6RJJi8LX/ZIWlV2mrCR9RtLdku6SdGbZ5clC0qmShqqu+d5ll6kRko6TZJI2KbssWUj6kqQ7wrW+TtL4ssuUhaTZ4W/7Dkk/lTS27DJlIel94f/jSkldOSS0ryoAMzvYzKaY2RTgcuCKssuUhaRdgP2ByWb2RuCskovUiK9VrrmZXVN2YbKStBmwB/Bg2WVpwGwz2zb8ff8M+GLZBcroemBrM9sW+DNwUsnlyepO4ADgV2UXpFl9VQFUSBJwEPCjssuS0ZHALDN7AcDM/lFyefrB14ATgK4ZJWFmT1e9XI8uKbuZXWdmy8PLm4FNyyxPVmb2JzNbUnY5WtGXFQCwM/B3M/tL2QXJaEtgZ0l/kHSTpB3KLlADPh269udLennZhclC0v7AkJndXnZZGiXpy5IeAj5E9/QAqn0c+HnZhegXPbcimKQbgFfFvHWymV0Zvv8AHdb6Tys30e9pY+DNwA7ApZJeax0whrdOub8NfImoJfol4CtE/8FLV6fcnycK/3Scen/fZnYycLKkk4BPA6e0tYAJsvy/lHQysBy4uJ1lS5PxftK1+m4egKS1gSFgezN7uOzyZCHpF8B/mdmN4fVfgTeb2WPlliw7SROBn5nZ1iUXJZWkbYD/A5aFTZsCjwA7mtmjpRWsQZImANd0+vWukHQI8ElgNzNbVmf3jiLpl8DxZja/7LI0qh9DQLsDd3fLzT+YA+wCIGlLYB26IBOhpFdXvXwP0UOzjmZmi83slWY20cwmAg8D23XDzV/S66te7g/cXVZZGiFpL6LnLe/utpt/t+u5EFAG76fDwj8ZnA+cL+lO4EXgY50Q/sngTElTiEJA9xO18FxxZkmaBKwkSpt+RMnlyeps4GXA9dH4DG42s44vu6T3AN8CxgFXS1pkZnuWXKyG9F0IyDnnXKQfQ0DOOefwCsA55/qWVwDOOdenvAJwzrk+5RWAc871Ka8A+oikFSFT5J2SLpM0poVjXSjpveH770h6Q8q+75C0UxPnuD8uE2fS9qJJ+mW9rI/V16Vm+zRJ3wzfHyLp7PD9EZI+WrW9oQye9a593jJegzdLOi/83n8Wtr1b0onh+xntLLNL5hVAfxkOWTm3JppPsNpY6zBLumFm9m9m9seUXd4BNFwB9BIzm29mR8VsP8fMvhdeHgI0VAFkuPZleBfwi+oNZjbXzGaFlzOAhiqAZv82XTqvAPrXr4EtQivt15LmAn+UNCrkZ781JHH7JEQZVCWdLWlJyI/yysqBqluFkvaSdJuk2yX9X0gBcQRwTOh97CxpnKTLwzlulTQ9/OwrQh77uyR9B1C9DyHp2NCjuVPSZ6u2/79Q1t9I+pGk42N+dj1JV4ey3inp4LB9N0kLJS0OSexeFvOzz1Z9/15JF1a9vbuk+ZL+LGnfsM+q1nDNcU6VdHzoNUwDLg7XaR9Jc6r2e6ekn8b8fPW1f1ZRMrjbJd0s6V8SzndR+J0/IOkASWeGz/oLSQNZrkH4O7kwXLfFko6pens34Iaa/Q8Jfz87Ae8GZofP+brw9QtJC0K5tgo/c6GkcyT9AeiKNTC6jVcAfSi0pt4FLA6btgOONrMtgU8AT5nZDkSJ5w6TtDlRKodJRC23jxLTopc0DjgPONDMJgPvM7P7gXN4aV2AXwPfCK93AA4EvhMOcQrwm7DmwU+BCXU+x/bAocCbiBLlHSZpqqJsqQcCk8PnTApZ7AU8YmaTQ6/oF5LWBS4EDjazbYhmyx+ZVo4YE4EdgX2Ac8IxU5nZT4D5wIdCPv9rgK3CNSV8zvPrHGY9olm0k4ly1B+WsN/rgF2JbsQ/AG4Mn3UY2CfjNZgCDJrZ1mGfCwAUheZGzOyphM/5O2AuMDP8PfyVaIH1z5jZ9sDxwP9U/cimwE5mdmydz+6a4BVAfxmtaBW0+UQLnXw3bL/FzO4L3+8BfDTs9wfgFcDrgbcBPzKzFWb2CDAv5vhvBn5VOZaZPZFQjt2Bs8M55gIbSlo/nOMH4WevBp6s83neCvzUzJ4zs2eJFvjZGZgOXGlmz5vZM8BVCT+/GHinpP+StHO4aU0C7jOzP4d9LgrlasSlZrYypBu/F9iqwZ8npPr4PvBhRStkvYX6aZJfJFoIBmABUUUU5+dmNkL0+UfxUrhmcfiZLNfgXuC1kr6lKJdPZS2CPYDr6pRzlfB73wm4LPw9/C9QnUPqMjNbkfV4rjEeV+svw6F1uYqi3CvPVW8iao1dW7Nfnss5rkWUzfT5mLIURtEqX5XK4BwzO0fSdsDewH9I+j8ga4rf6hwqtS382vwqzeZbuYCovM8T3QiX19l/pCpH1AqS/39XFhZaKan6Z1am/MxqzOxJSZOBPYlCfAcRpfp+F/DVLMcI1gKW1v5dVnkuYbvLgfcAXK1rgSOrYsFbSlqPKKRwcIj9vpqQnbTGzcDbQsgISRuH7c8AG1Ttdx3wmcoLRQnjCOf4YNj2LqDeAjK/BmZIGhPK+J6w7bfAfpLWDS3MfQHM7KGq5SnPUTTiZpmZ/QCYTRQKWwJMlLRFOMdHgJtizv13Sf8qaa1w3mrvk7SWpNcBrw3HzGK16xR6Wo8AXyCEWNqk7jUIoZ61zOzyUL7tFNXg2wL11tpe9TnDKmb3SXpfOK5CxeLawHsArtZ3iMIAt4X/0I8Rjdr4KVHc+I9E4aPf1/6gmT0m6XDginBj/AfwTqJW7E8UrbT1GeAo4L8l3UH0N/grolbkacCPJN0F/I466/Ga2W3h4estlbKb2UIARQ+17wD+ThTaiItJb0P0MHIlMAIcaWbPSzqUKCSxNnAr0TOMWicShVseIwqprV/13oOhTBsCR4Rjpn2UiguJnhkMA28xs2GixVHGmdmfshwgDxmvwSBwQfg9Q7SO7/bAwgyZan8MnCfpKOC9RKuXfVvSF4CB8H7XrcbWjTwbqOtJktY3s2cVzXX4FXC4md1WdrkapWi+wEIz+27dnUsWbuD3mNmPyy6Ly8YrANeTJP2QaMTSusBFZnZGyUVqmKQFRDHwd5rZC2WXx/UerwCcc65P+UNg55zrU14BOOdcn/IKwDnn+pRXAM4516e8AnDOuT71/wGnQuk628mxYQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "sai82xRPc_9-", - "colab_type": "code", - "outputId": "d4ef4b9a-8a29-4d4f-af34-87857170bbc2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "task = \"measured log solubility in mols per litre\"\n", - "predicted_test = best_dnn.predict(test_dataset)\n", - "true_test = test_dataset.y\n", - "plt.scatter(predicted_test, true_test)\n", - "plt.xlabel('Predicted log-solubility in mols/liter')\n", - "plt.ylabel('True log-solubility in mols/liter')\n", - "plt.title(r'DNN predicted vs. true log-solubilities')\n", - "plt.show()" - ], - "execution_count": 20, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAEWCAYAAABv+EDhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZwcVbn/8c83IUBYIxBEooGIEi5bWIICUZFFAgISA4JeXMAlgj83kGCQq4BXJRqvCxeuiLIpi+glBjAqiEFQFDEhwcAVlB0nqGEJ6wBZnt8fdTr0dKqrq5fqqu563q/XvGa6urrqVM3MOXWes8nMcM45Vz7D8k6Ac865fHgB4JxzJeUFgHPOlZQXAM45V1JeADjnXEl5AeCccyXlBYDLlCST9Lrw83mSPt+Fcx4r6XdZnydPRbtGSVuH3/Va7e4r6XOSvh+3r6RfSPpAwrG78jfWL7wAKABJD0oalPSMpGWSfi/peEnDqva5OPwjvKFq2+skWdXr30h6QdJrqrYdIOnBrl1MAjM73sz+s9F+4To+3I00dUp1QefaY2ZfMbPY37+ZHWxml0B8IZj2b8xFvAAojsPMbENgK2Am8Fnggpp9ngC+1OA4zwGZPAGlebpz8fzeuSLyAqBgzOwpM7sGOBr4gKQdq96+BNhZ0j4JhzgbeI+kbdKcLzy5flLS/ZIekzSrUvMIT1i3SPqmpMeBMyStI+nrkh6W9M9Q5R5Zdbzpkh6VtETSB2vOdbGkL1W9PlzSIklPS7pP0kGSvgy8GThH0rOSzgn7bifpV5KekHSPpKOqjrOppGvCcW4D6l57CCF8vGbbHZKmKvJNSf8Kx1pcc//rHfPm8OMdIc1HS3qrpL9L+qykfwAXxT2x1oTIEu9tgzTsLelPkp4K3/euem+cpJtDDfMGSedKujThWMeGv4dnJD0g6ZiwfZik/5D0ULhHP5C0cZ1jPCjpgKrXZ8Sc84Ph7+RRSSc32Lfy3m8kfVjSvwHnAXuFe74svF/7N3Zo+Bur1Kx3rnrvs5IGwnXeI2n/evekX3kBUFBmdhvwd6LMsOJ54CvAlxM+OgB8DzizidO9E5gI7AYcDlRn3G8E7gdeGc47E9gW2AV4HTAG+AKApIOAk4G3Aa8HDqAORaGsHwDTgVHAW4AHzew04LfAx81sAzP7uKT1gV8BlwObA+8G/kfS9uFw5wIvAK8KaR9S8NS4AnhPVTq2J6p1zQUODOnYFtgYOAp4POFYAJjZW8KPE0KarwyvtwA2Ccef1ug4JNzbJJI2Cek/G9gU+AYwV9KmYZfLgdvCe2cA70s41vrhOAeHGunewKLw9rHha1/gtcAGwDkprquefYn+Tg4EPltdYDRiZn8Bjgf+EO75qNp9JO0KXAh8lOjavwtcEwra8cDHgT3CdU4GHmzjWnqSFwDFtoQoA6n2XWCspIMTPncWcJikHVKe56tm9oSZPQx8i6oMElhiZv9tZiuIMtlpwIlh/2eICqR3h32PAi4yszvN7DmizKaeDwEXmtmvzGyVmQ2Y2d119j2UqHC4yMxWmNlC4CrgXZKGA0cAXzCz58zsTqKaUj0/BXaRtFV4fQww28xeBJYDGwLbATKzv5jZownHamQVcLqZvWhmg0k7ShLJ9zbJIcDfzOyH4f5cAdxN9DcwFtiD6P68ZGa/A65Jke4dJY00s0fN7K6w/RjgG2Z2v5k9C5wKvFuth7fODL+zxcBFDP2764RpwHfN7I9mtjK0HbwI7AmsBNYBtpc0wsweNLP7Onz+wvMCoNjGEMX9VwsZ1X+Gr1hmtpToyeyLKc/zSNXPDwFb1nlvNLAesCBUqZcBvwzbCZ+rPVY9rwHS/sNtBbyxcs5w3mOInrBHA2ulPW/IWOfycsb6HuCy8N48ovt2LvAvSedL2ihlGuMsNbMXUu7b6N4m2ZI1r/khor+fLYEnzOz5qvdW36sQZno2fH0uFNxHEz1dPypprqTt6pznIaJ7/8qU11gr6e+uE7YCPlPzd/MaYEszuxf4NNFDyr8k/UhSp89feF4AFJSkPYj+geO6+l1EFDaZmnCIWURV7N1TnO41VT+PJap5VFRPF/sYMAjsYGajwtfGZrZBeP/RmGPV8wj1Y/W1U9Q+AtxUdc5Rodp/ArAUWNHEeSGEgSTtBawL3Lj6xGZnm9nuwPZE4ZjpDY6VpPY6niPK5AGQtEXVe43ubZIlRJldtbFE4cBHgU0krVf13up7FXrNbBC+vhK2XWdmbyMKqd1NFFKMO89Yonv/z5g0DblWosK6VtLfXRqNpjJ+BPhyzd/NeqGGhJldbmZvIromA77a5Pl7nhcABSNpI0mHAj8CLg3V4yFCOOZ0op5CscxsGfBfwCkpTjtd0isUdR/9FHBl3E5mtoooM/impM1DesdImhx2+TFwrKTtQ4ZzesI5LwCOk7R/aFwcU/Wk+U+iGHPFz4BtJb1P0ojwtYekfzOzlcBsogbq9UJMv24/8eDnRP/0XwSuDNdFOOYbJY0gysBeIAqHpFGb5jh3ADtI2kXSulSFyFLc20bXs62kf5e0lqSjiQqwn5nZQ8B8ovuzdij0Dqt3IEmvVNQ4vz5RuORZXr4HVwAnhkblDYhCVFeGv8dai4jCQyMkTQSOjNnn8+F3tgNwHHX+7hL8E3i1pLXrvP894PjwO5Wk9SUdImlDSeMl7SdpHaLf8yDpf9d9wwuA4rhW0jNETy2nETXkHZew/xVET3dJvk0U62zkamAB0T/tXNbsflrts8C9wK2SngZuAMYDmNkviNoQ5oV95tU7SGjkPg74JvAUcBMvP11+GzhS0pOSzg5hmwOJwjZLgH8QPa2tE/b/OFGD5D+Ai4lqSHWFMNpsokbqy6ve2ogo03iSKCTxOFFNqjI46RcJhz0DuCSEGo6K28HM/kpU6NwA/I01a3d1722D63mcqJ3kMyHNpwCHmtljYZdjgL3Ce18iymhfrHO4YcBJRPf5CWAf4ITw3oXAD4GbgQeIMs5P1DnO54lqeE8SdUi4PGafm8L1/hr4upld3+haa8wD7gL+Iemx2jfNbD7wEaKw3pPhXMeGt9chanR/jOjvZnOiNo1SkS8IU26KBpK9PsREXQlIuhK428ySamiuBLwG4FyfC6GtbUKo7SCirr5z8k6Xy5+PTnSu/21BFPLalGhsyQmhK60rOQ8BOedcSXkIyDnnSqqnQkCbbbaZbb311nknwznnesqCBQseM7M1BhX2VAGw9dZbM3/+/LyT4ZxzPUVS7Oh4DwE551xJeQHgnHMl5QWAc86VlBcAzjlXUl4AOOdcSfVULyDnXLw5CweYdd09LFk2yJajRjJ98nim7Dom72S5gvMCwLkeN2fhAKfOXszg8mji14Flg5w6O5pF3AsBl8RDQM71uFnX3bM6868YXL6SWdfdk1OKXK/wAsC5HrdkWfxyw/W2O1fhBYBzPW7LUSOb2u5chRcAzvW46ZPHM3LE8CHbRo4YzvTJDRcTcyXnjcDO9bhKQ6/3AnLN8gLAuT4wZdcxnuG7pnkIyDnnSsoLAOecKykvAJxzrqS8AHDOuZLyAsA550rKCwDnnCspLwCcc66kvABwzrmS8gLAOedKygsA55wrKS8AnHOupLwAcM65kvICwDnnSsoLAOecK6lcCwBJB0m6R9K9kmbkmRbnnCub3AoAScOBc4GDge2B90jaPq/0OOdc2eRZA3gDcK+Z3W9mLwE/Ag7PMT3OOVcqeRYAY4BHql7/PWwbQtI0SfMlzV+6dGnXEuecc/2u8I3AZna+mU00s4mjR4/OOznOOdc38lwTeAB4TdXrV4dtzjnXk+YsHGDWdfewZNkgW44ayfTJ4wu9VnNiDUDScEmXZXTuPwGvlzRO0trAu4FrMjqXc85las7CAU6dvZiBZYMYMLBskFNnL2bOwuI+1ybWAMxspaStJK0dGmo7xsxWSPo4cB0wHLjQzO7q5Dmcc93Ra0++WZh13T0MLl85ZNvg8pXMuu6ewt6LNCGg+4FbJF0DPFfZaGbfaPfkZvZz4OftHsc5l5/Kk28l86s8+QKFzfg6pbrgszr7LFk22NU0NSNNI/B9wM/CvhtWfTnnXOKTbz+rDfnUs+WokV1LU7Ma1gDM7EwASeuZ2fPZJ8k510vqPeEW+cm3E+IKvlojRwxn+uTxXUpR8xoWAJL2Ai4ANgDGSpoAfNTMPpZ14pxzxRIX699y1EgGYjL7Ij/5dkJSASfoibaQNG0A3wImE3romNkdkt6Saaqcc4VTL9Z/xO5juGrBwJCn4aI/+XZCvYJvzKiR3DJjvxxS1LxUA8HM7JGaTcn1Hudc36kX67/x7qWcNXUnxowaiYgywLOm7lToJ99OmD55PCNHDB+yLW3BN2fhAJNmzmPcjLlMmjkvt66iaWoAj0jaGzBJI4BPAX/JNlnOuaJJivVP2XVM32f4tSrX22z31yL1mkpTABwPfJtonp4B4HrA4//OFVAW/fErx6zX06XfY/1JWin4ijReIE0BMN7MjqneIGkScEs2SXLOtSKLJ8vaY9YqQ6y/04rUaypNG8B/p9zmnMtRFv3xk7o65h3rL0ocvVn1akx51KTq1gBC98+9gdGSTqp6ayOiqRuccwWSxZNlvc8Kcu3pUqQ4erOmTx6/Rq0qr5pUUg1gbaK+/2sxdATw08CR2SfNOdeMLJ4si/S0Wq2XRx9P2XVMYXpN1a0BmNlNwE2SLjazh7qYJudcC7J4sizS02q1IsXRW1GUXlNJIaBvmdmngXMkrdEBwMzekWnKnHNNabVbYreP2QllHX3caTKL79wlaXczWyBpn7j3Qw2hqyZOnGjz58/v9mmdy5RPpdy8uN5JI0cML8UAtFZIWmBmE2u3J4WAFoTvXc/onSuLbjdm9kthU9SaSa9JCgEthvqznJrZzpmkyLkS6eagoF7uOROn03H0fikcm5E0EOzQrqXCuZLqZmNmkUagFk2/FY5pJXUDPR+YCow0s4dqv7qUPuf6Wje7WdYrVOIaU8uml7uVtiOpAPgA8CRwhqTbJX1H0uGS1u9S2pzre+3MKNmseoWKoGdG0Wal17uVtqpuAWBm/zCzi83s3cBE4AfA7sD1km6QdEq3Eulcv+rmoKDpk8ejmO0Gff+k20hRB7xlLc1kcJjZKuAP4esLkjYjWiTGOdemLAcF1TZs9uLC5d1Q1AFvWWs4GZykr0naSNIISb+WtBQ4yMwu60L6nHMtql20fGDZYGwNAPr/SbeRIk3P0E1pagAHmtkpkt4JPEjUMHwzcGmWCXPOtSeuYdOIYv7VNYFuPukWuatlUaZn6KY0BUBln0OAn5jZU1K95wjnXFrtZIZpPlsvrGNET7jdzoTL2tWyyNIUAD+TdDcwCJwgaTTwQrbJcq6/tZMZpvnsnIUDDJNYGTPVS16Llvs4hOJp2AZgZjOI1gWYaGbLgeeAw7NOmHP9rJ1+540+Wykg4jL/PBs2y9rVssiSpoKYGrOt+uXsLBLkXBm0kxk2+my9VbyGS4kNm1nH530Gz+JJCgEdlvCe4QWAcy1rJzNs9Nl6BcQqs8TMP+v4fFm7WhZZ0mygx3UzIc6VSaPMMOlpvNFn0xYu1eeIay/odHy+1Rk8i9xzqNc1bASWtDFwOvCWsOkm4Itm9lSWCXOunyVlho2exhtlpGmetGvPEddeAJ2Pzzfb1dJ7DmWr7oIwq3eQrgLuBC4Jm94HTDCzNdoIsuYLwrgymDRzXuwT/KiRI1h0+oGpjtHoqbneOWrl1WOool46s05Xv9U6ml4Qpso2ZnZE1eszJS1qMzGziNoYXgLuA44zs2XtHNO5flHvqXvZ4HLmLBxIlRE1etJO82RfhPh8Hj2HylTraNgNFBiU9KbKC0mTiMYEtONXwI5hUZm/Aqe2eTzn+kZSQ3CnJm2rd47hUqGmQshjkrYyTQ2dpgZwAnBJaAsQ8ARwbDsnNbPrq17eChzZzvGc6yfTJ4/n01fGV7I79eRbr50gi0y/nXBKHj2HyjReoWEBYGaLgAmSNgqvn+5wGj4IXFnvTUnTgGkAY8eO7fCpnctXvczxzGvv4snnl6+xf6eefDuxpm6ajL3dcEoea/+WabxCmkbgUcD7ga2pKjDM7JMNPncDsEXMW6eZ2dVhn9OI1hqYao0SgjcCu/5SmznCy0/hQNee0FuRlPY0jc15Ny4nSXttvaSdRuCfE4VpFgOr0p7QzA5okKBjidYd3j9N5u9cv0mKNVcyx7x7otR7yk87r08vhlPyqHXkJU0BsK6ZndTJk0o6CDgF2MfMnu/ksV359GqXvUaZY97TE8eFb068chHzH3oidcbeq+GUvO99t6TpBfRDSR+R9CpJm1S+2jzvOcCGwK8kLZJ0XpvHcyUVt+jJqbMX98Qat0VfhrDeegKX3fow6609PPYztWnv5prHrnlpagAvAbOA03h5HQkDXtvqSc3sda1+1rlqRZpiuNmaSCs9XLpZ20laT+C5l9acbG7EcK2R9jKFU3pRmgLgM8DrzOyxrBPjXLOKEmNupbdLs5ljtwco1Qvf1LP+2mvFpqMs4ZRelKYAuBfwOL0rpKLEmFutiTSTOXa7tjN98nhOvHJR3YXkaz01uGa3VVdsadoAngMWSfqupLMrX1knzLk0ihJj7kZNpNu1nSm7juGYPceusZB8vQVhjajbZy+0v7hImhrAnPDlXOEUJcacpibSbvw+j9rOl6bsxMStNhmS7n23G81VCwZiF53p53lz+lHDgWBF4gPBXFE1GjwU976AY/Ycy5em7LT6GEkFRJEGKFXSWq+NoMgDvcqo3kAwLwCc65CkDLzR9MuvWG8Ez76wguWrXv5/jMvcizbmYdyMubFtBAIemHlIt5Pj6mhnJLBzLoWkBt1Gcfq4eX/iGniL1qOmKI3wrjVpGoGdK5w5CweYNHMe42bM7YmGx1YzxIFlg4W+xqI0wrvWpFkScltgOrAVQyeD8wCfy0Un+8N3K6TSbJfKatUjnKG1xtWsrrMojfCuNWlmA70DOA9YAKxufTKzBdkmbU3eBuCgczNMttKo2k5Gesz3/sAt9z2ROn1xWmlcLVLjsctHvTaANCGgFWb2HTO7zcwWVL4ySKNzqXSqP3yzKz+1M+/QnIUD3P7wU4n7jBguRo0cUbefPbTW579MK1y55qQpAK6V9LEOTwbnXMs6NYlaswVJOxlp3Gdh6BKMs46cwKLTD+SBmYcwpoMTxRVlugxXPGl6AX0gfJ9eta2tyeCca0enlglstgdLmoy0Xoio3mdXmcV2l+zkUojt9NQpWrdT11lploQc142EOJdWpxoem81kG2WkSY3TzWbCnWxcTXOdcRk90NXJ51z31W0ElrSfmc2TNDXufTObnWnKYngjsOu0Zp5wGzWmJjVOd3MR9to0V0bsDpdYabY6PZXz1ruuddYaxrKYCd6GS6wy8xpBD2llINg+wDzgsJj3DOh6AeB6T9FDCM0MrGr0VJ4UIsqju2Rtxr7SbPWTf/V567VtxLVZVI4DXiPoB3ULADM7PXw/rnvJcf2k2/PXd0NSgdEozNPtUbztrtubRl6L77jO8JHALjNl635YtFGx9TL2gWWDQ0YW12uHeMV6I9a4nmbO44rP5wJymSlT98NKqGtw+cq6sfY0n+9keChpRa/q2li99onTD9sBeDlsNSxcV9x5XG/yAsBlpiwThaWNtaf9fKdCZftuN5pLb3247vuV2lhlZHG9AqhRY7HP+9O70swFtAC4ELjczJ7MPkmuX8Q9WY4YJp5/aQXjZswtZKNwK9pdqjGrpR5vvHtpw30qtbE07RM+70//SVMDOBo4DviTpPnARcD11ksLCbi2tBqeqM0wNh45gudeWrF66uN+aBSG9kNdWYXK0ny+soxjM7/TXv5duaEaNgKb2b1mdhqwLXA5UW3gIUln+pQQ/a+d+W8gyjBumbEfD8w8hPXXWYvlK4c+N3SzUTjNFNKtTDPd7tQUnZraotXPN/s7df0jVS8gSTsD/wXMAq4C3gU8TTROwPWxTvbkybNROE1B1mph127vn6x6D8Udt95Ec73QO6vX1oDoBWnbAJYBFwAzzOzF8NYfJU3KMnEuf53MtLNsFG4UpkoTZ281Ft9ubDyr2Hq949Zbl6DIvbPqNZTPf+gJbrx7qbdJtChNG8C7zOz+6g2SxpnZA2YWO02E6x+dzLQ7OcFZtTS9aNIUZO0Udu3GxrOKrccdt95i7kXunVWvcL7s1odXF2b90qbUTWlCQP+bcpvrQ3FhBIDnX1rRdBV8yq5jOGvqTowZNXL1FMidmAsnTZgqTZw9q1h80RRtwFoa9Qrh2ppML4SyiqRuDUDSdsAOwMY1E8JtBKybdcJcMVQy5zOuuWvIxGBPPr+8paetLJ500zy5p6l9ZFVDKZpe7M6ZNKitVpFDWUWTFAIaDxwKjGLohHDPAB/JMlGuWKbsOoZZ192zxsyQRZkHJk2YKk2m14sZY6t6rTtnXOEs1qwBQP/V2LKUNBnc1cDVkvYysz90MU2ugIo8rUPaJ/e0g516KWMsi7jCed/tRnPVgoG+r7FlKSkEdIqZfQ34d0nvqX3fzD6ZacpcoeQ5rUOjHj6dfnIv+hTWZRVXOE/cahP/XbUhKQT0l/A9sxVYJH0G+Dow2swey+o8rn15xcfTzpPTqSf3fpzCup95ja09SSGga8P3S7I4saTXAAcC9WercoWRV3w8q3lyinK+XuY1pd6XFAK6lvg2FgDM7B1tnvubwCnA1W0exzWpnbl9uv0P3u22h0bn80wv4jWl/pAUAvp6VieVdDgwYGZ3SPUGp6/edxowDWDs2LFZJak0iviPm5SpdrvtIel8Rbx3efGaUvdk+dCRFAK6qZ0DS7oB2CLmrdOAzxGFfxoys/OB8yFaFL6dNLni/eM2ylS73faQdL40964sNYQi9wrrJ1k/dNQdCSzpx+H7Ykl/rvpaLOnPjQ5sZgeY2Y61X8D9wDjgDkkPAq8GbpcUV1i4DivaP26jUbxZjR6uJ+l8acJD7cyc2kvKMmo6b1kvq5oUAvpU+H5oR84UmNliYPPK61AITPReQN1RtFW60hRI3W57qHe+RveuaLWrLJVl1HTesn5gq1sDMLNHw/eHgBeBCcDOwIthm+tBRZsHpqhPknFTDze6d0WrXWWp2zWzssr6/6PhZHCSPgzcBkwFjgRulfTBjpwdMLOt/em/e4r2j7vvdqPXmKM+7yfJeqEcIPHeFbUwy0r1Yj+3zNjPM/8MZP3AlmY66OnArmb2OICkTYHfE60M5npQUQbPzFk4wFULBob0NRZwxO7x6etWA2tSKCcpo/OwiOu0rMffpCkAHieaAK7imbDNubbEZbRG/GLm3eyC2Woop0yTybnuyfKBLWkg2Enhx3uJVv+6muj/83CgYS8g17869STeTEbbzQbWdhrK2/1nLUs3UlcMSTWADcP3+8JXhY/cLbFOPok3k9HWKywGlg0yZ+FARzPJZkI5cRk2tFYL8IFmrtuSBoKd2c2EuN7QySfxZjLapAVBOp1JVo5z5rV38eTz0RoI66y1Zn+JuAx7+k/uAMHylbZ6W9r0lakbqSuGNL2AbpQ0r/arG4lzxdPJro7N9EiqtzQlZLcM4AvLV63+edng8iGDuuYsHOAzP75jjQx7+Spbnfk3m75e7UYa12XW9YY0jcAnV/28LnAEsCKb5Lii6/RAsrQx88o+n75yUez7rWSSSfH2RiMwT529mJWWfmaSNOmrd2+HSYybMbeQbQIetuptDWsAZrag6usWMzsJeGv2SXNFlOdAsim7jmFMh/raN5q2IelpPK5waCRN+urVclaaFXZqiaynKnDZShMC2qTqazNJk4GNu5A2V0B5DyTrVAHUKONKGtSV9DQ/YpgYMXzo0La06au9t8NjZsotSuZaCfvUa5cpetjKRdKEgBYQdf8UUejnAeBDWSbKFVueA8k61de+Uby90aygcRnfcIlZ75rQVvqq7+24GXObSnu31IZ94vTr6Od+07AAMLNx3UiIc2l1ogBq1JbRqKCJKxyqa0KdKCCLNnFfRaMQmI9+7h1JA8GmJn3QzGZ3PjnOdUeaLqj1Cppujfgt6tQSSTWQMQVsqHb1JdUADkt4zwAvAFzPajcT70YYrKhTS9SrmYwZNZJbZuyXQ4pcq2RNdGXL28SJE23+/Pl5J8O5UotrA6gNgblikbTAzCbWbm/YBiBpY+B04C1h003AF83sqc4m0TnXC4paM3HNS9ML6ELgTuCo8Pp9wEVE6wM450qoKFOKu/akKQC2MbMjql6fKSl+OKZzQb/Oall7XftuN5ob717ad9fpyiFNATAo6U1m9jsASZMAH+Xh6urX6QHiruvSWx9e/X6/XKcrj4YjgYETgHMlPSjpIeAc4Phsk+V6Wb9OD5BmCoh+uE5XHmkGgi0CJkjaKLx+OvNUuZ6W9ayWWYeX6h0/bfrzHqnrXFppegF9iqjR9xnge5J2A2aY2fVZJ871pixHsGYdXko6ftKaBNVauc5+bTNxxZYmBPTB8NR/ILApUS+gmZmmyhVWmrnfs5wxNOvwUtLxk9YkqGjlOhvNTOpcVtIUAJUpCd8O/MDM7qra5kokbUaV5YyhWYeXko4fd13v3XNs29fZr20mrvhSzQYq6XpgHHCqpA2BVQ0+4/pQM0sWZtVPPOsJ0tJMEtfp6+rVlcBc70tTA/gQMAPYw8yeB9YGjss0Va6QipBRZb0gTR4L3iStPeBcltKsCLbKzG43s2WSzjCzx83sz91InCuWImRUWS9Ik8eCN3musubKranJ4CTdbma7ZZieRD4ZXL58ErDseC8gl6WWJ4OrPU6H0uN6kE8Clh2fW8flodkCYPdMUuF6hmdUzvWPNAPBzq55DfAUMN/Mrs4oXS5DHm5wzkG6XkDrArsAfwtfOwOvBj4k6VsZps1lwAcdOecq0oSAdgYmmdlKAEnfAX4LvAlY3OqJJX0C+H/ASmCumZ3S6rFces305e9FWdRu+rHG1I/X5JqXpgB4BbABUdgHYH1gEzNbKenFVk4qaV/gcGCCmb0oafNWjuOaV4S+/FnJYp6gfpzauh+vybUmTQjoa8AiSRdJuhhYCMyStD5wQ4vnPQGYaWYvAiquJTgAAA/ISURBVJjZv1o8jmtSEfryZyWLKRX6cZqGfrwm15o000FfIOnnwBvCps+Z2ZLw8/QWz7st8GZJXwZeAE42sz/F7ShpGjANYOzYsS2ezlVMnzw+ti9/Pww66kTtpjY0Um/2z16uMfVzLdA1J2030D2AN4efVwFLEvYFQNINwBYxb50WzrsJsGc49o8lvdZiRqWZ2fnA+RANBEuZXldHP/flb3eeoLjQiIC4P7perjFlPZ+S6x1puoHOJMqkLwubPilpLzP7XNLnzOyAhGOeAMwOGf5tklYBmwFLU6fctaxf+/K3W7uJC40YrFEI9HqNqZ9rga45aWoAbwd2MbNVAJIuIWoHSCwAGpgD7AvcKGlbognmHmvjeK5JWfUCybN3Sbu1m3ohECOaE6hfakz9XAt0zUkbAhoFPBF+3rgD570QuFDSncBLwAfiwj8uG1n1AilC75J2ajf1QiNjRo3klhn7tZu0QunXWqBrTppeQGcBCyVdHJ7+FwBfbuekZvaSmb3XzHY0s93MbF47x3PNyaoXSK/3LvFZOV3ZpOkFdIWk3xC1AwB81sz+kWmqXKay6gXS671LPDTiyqZuARAWf6/29/B9S0lbmtnt2SXLZSmrXiD90LvEQyOuTJJqAP+V8J4B/RUULZGseoF47xLnekvdAsDM9u1mQlz3ZBXq8BCKc72lqRXB8uYrgjnnXPPqrQiWpheQc865PuQFgHPOlVTDAkCR90r6Qng9VtIbGn3OOedcsaUZCfw/RBPA7Qd8EXgGuIqXxwW4EvGFRJzrH2kKgDea2W6SFgKY2ZOS1s44Xa6AijDVQzu88HJuqDRtAMslDSdMiChpNFGNwJVML0/14GshO7emNAXA2cBPgc3DAi6/A76SaapcZuYsHGDSzHmMmzGXSTPnNZUB9vJUD71ceDmXlTRzAV0maQGwP9HU6FPM7C+Zp8x1XLshnF6e6qGXCy/nspKmF9BY4HngWuAa4LmwzfWYdp+Ce3m2zDRrIbdTO3KuF6VpBJ7LywsjrQuMA+4BdsgwXS4D7T4F9/JUD43mKer1Bm7nWpEmBLRT9eswS+jHMkuRy0wnQji9Oltmo8IrqXbUi9frXBppVwRbzcxul/TGLBLjstXvs3U26uaZVHh5G4ErozSLwp9U9XIYsBuwJLMUucx0MoSTtk99t/rel7mB27lWpakBbFj18wqiNoGrskmOy1onQjhpM9tuxtXbDeH0e+3IuTiJBUAYALahmZ3cpfS4HpA2s+1mXL3MDdzOtSppSci1zGyFpEndTJArvrSZbTfj6mVu4HauVUnjAG4L3xdJukbS+yRNrXx1I3GumNL0qW9mv07o5TEKzuUlzVQQ6wKPE80GeihwWPjuSiptZtvNTHnKrmM4a+pOjBk1EgFjRo3krKk7+RO9cwmS2gA2Dz2A7uTlgWAVvbOOpOu4tPHybsfVPYTjXHOSCoDhwAYMzfgrvAAoubSZrWfKzhVXUgHwqJl9sWspcc4511VJbQBxT/7OOef6RFIBsH/XUuGcc67r6oaAzOyJbibE5c+XTHSuXJqeDM71J58O2bnySTMOwJWAL5noXPnkUgOQtAtwHtEgsxXAx8zstuRPtcbDGun4dMjOlU9eNYCvAWea2S7AF8LrjquENQaWDWK8HNbwpf7W1M1pG5xzxZBXAWDARuHnjclofQEPa6Tnc+k4Vz55NQJ/GrhO0teJCqG96+0oaRowDWDs2ObWovewRno+HbJz5ZNZASDpBmCLmLdOIxpjcKKZXSXpKOAC4IC445jZ+cD5ABMnTmxqCoq8Vnnq1XYHn7bBuXLJrAAws9gMHUDSD4BPhZc/Ab6fRRryWOXJu1M653pFXm0AS4B9ws/7AX/L4iR5TBHs7Q7OuV6RVxvAR4BvS1oLeIEQ489Ct8Ma3u7gnOsVuRQAZvY7YPc8zp21vNodnHOuWT4SuMO8O6Vzrlf4XEAd5t0pm9ervaac63VeAGTAu1Om572mnMuPFwAuc0lP+Em9prwAcC5bXgC4TDV6wvdeU87lxxuBMzBn4QCTZs5j3Iy5TJo5r9STzzUaF+GT0DmXHy8AOsxnIB2q0RO+95pyLj9eAHSYjwQeqtETfidGa3uNy7nWeBtAh3lMe6g08zG102vKexE51zqvAXSYx7SHyno+Jq9xOdc6rwF0WB4zkBZdluMivMblXOu8BtBhecxAWmZe43KudV4DyICPBO4er3E51zovAFxP87mXnGudFwCu53mNy7nWeBuAc86VlBcAzjlXUl4AOOdcSXkB4JxzJeUFgHPOlZTMLO80pCZpKfBQeLkZ8FiOySkqvy/x/L7E8/tSXz/dm63MbHTtxp4qAKpJmm9mE/NOR9H4fYnn9yWe35f6ynBvPATknHMl5QWAc86VVC8XAOfnnYCC8vsSz+9LPL8v9fX9venZNgDnnHPt6eUagHPOuTZ4AeCccyXV0wWApF0k3SppkaT5kt6Qd5qKQtInJN0t6S5JX8s7PUUi6TOSTNJmeaelCCTNCn8rf5b0U0mj8k5TniQdJOkeSfdKmpF3erLU0wUA8DXgTDPbBfhCeF16kvYFDgcmmNkOwNdzTlJhSHoNcCDwcN5pKZBfATua2c7AX4FTc05PbiQNB84FDga2B94jaft8U5WdXi8ADNgo/LwxsCTHtBTJCcBMM3sRwMz+lXN6iuSbwClEfzsOMLPrzWxFeHkr8Oo805OzNwD3mtn9ZvYS8COih6m+1OsFwKeBWZIeIXrKLe2TS41tgTdL+qOkmyTtkXeCikDS4cCAmd2Rd1oK7IPAL/JORI7GAI9Uvf572NaXCr8imKQbgC1i3joN2B840cyuknQUcAFwQDfTl5cG92UtYBNgT2AP4MeSXmsl6PPb4L58jij8UzpJ98XMrg77nAasAC7rZtpcfnp6HICkp4BRZmaSBDxlZhs1+ly/k/RL4KtmdmN4fR+wp5ktzTdl+ZG0E/Br4Pmw6dVEIcM3mNk/cktYQUg6FvgosL+ZPd9g974laS/gDDObHF6fCmBmZ+WasIz0eghoCbBP+Hk/4G85pqVI5gD7AkjaFlib/pnVsCVmttjMNjezrc1sa6Kq/W6e+Ue9XojaRd5R5sw/+BPweknjJK0NvBu4Juc0ZabwIaAGPgJ8W9JawAvAtJzTUxQXAhdKuhN4CfhAGcI/rmXnAOsAv4oq0txqZsfnm6R8mNkKSR8HrgOGAxea2V05JyszPR0Ccs4517peDwE555xrkRcAzjlXUl4AOOdcSXkB4JxzJeUFgHPOlZQXACUiaWWYOfVOST+RtF4bx7pY0pHh5+8nTZgl6a2S9m7hHA/GzdhZb3vWJP1GUuIi4dX3pWb7RElnh5+PlXRO+Pl4Se+v2r5lk2lKvPedlvIe7Cnpe+H3/rOw7R2VmTUlTennCdZ6iRcA5TJoZruY2Y5E4wOG9PUO4ymaZmYfNrP/S9jlrUDTBUA/MbP5ZvbJmO3nmdkPwstjgaYKgBT3Pg8HA7+s3mBm15jZzPByCtFMm6m1+rfpknkBUF6/BV4XntJ+K+ka4P8kDQ/zw/8pzA//UQBFzgnzpN8AbF45UPVTYZhL/XZJd0j6taStiQqaE0Pt482SRku6KpzjT5Imhc9uKun6sIbB9wE1ughJJ4UazZ2SPl21/fMhrb+TdIWkk2M+u76kuSGtd0o6OmzfX9JCSYslXShpnZjPPlv185GSLq56+wBF61P8VdKhYZ/VT8M1xzlD0smh1jARuCzcp0Mkzana722Sfhrz+ep7/6ykL4fruVXSK+uc75LwO39I0lRJXwvX+ktJI9Lcg/B3cnG4b4slnVj19v7ADTX7Hxv+fvYG3kE0ieMiSduEr19KWhDStV34zMWSzpP0R3yq90x4AVBC4WnqYGBx2LQb8Ckz2xb4ENGcSnsQTST3EUnjgHcC44me3N5PzBO9pNHA94AjzGwC8C4zexA4D/hmqH38Fvh2eL0HcATw/XCI04HfhTUMfgqMbXAduwPHAW8kmvjuI5J2VTT76RHAhHCd9UIWBwFLzGxCqBX9UtK6wMXA0Wa2E9Fo+ROS0hFja6JphQ8BzgvHTGRm/wvMB44J61v8HNgu3FPCdV7Y4DDrE43inQDcTDRSPs42RFOnvAO4FLgxXOsgcEjKe7ALMMbMdgz7XASgKDS33MyeqnOdvyeaWmF6+Hu4j2jx9U+Y2e7AycD/VH3k1cDeZnZSg2t3LfACoFxGSlpElNE8TDR7KsBtZvZA+PlA4P1hvz8CmwKvB94CXGFmK81sCTAv5vh7AjdXjmVmT9RJxwHAOeEc1wAbSdognOPS8Nm5wJMNrudNwE/N7DkzexaYDbwZmARcbWYvmNkzwLV1Pr8YeJukr0p6c8i0xgMPmNlfwz6XhHQ148dmtsrM/gbcD2zX5OcJU3f8EHivohW69qLxNM0vAZVaxgKigijOL8xsOdH1D+flcM3i8Jk09+B+4LWS/lvRXEJPh+0HAtc3SOdq4fe+N/CT8PfwXeBVVbv8xMxWpj2ea47H1cplMDxdrqZo7pfnqjcRPY1dV7Pf2zuYjmFEs5O+EJOWzChaDaxSGJxnZudJ2g14O/AlSb8Grk55uOo5VGqf8GvnV2l1vpWLiNL7AlFGuKLB/sur5nxaSf3/78pCQaskVX9mVcJnhjCzJyVNACYThfiOIlpL4GDgG2mOEQwDltX+XVZ5rs521wFeA3C1rgNOqIoFbytpfaKQwtEh9vsqwmyjNW4F3hJCRkjaJGx/Btiwar/rgU9UXkiq/PPfDPx72HYw8IoGaf0tMEXSeiGN7wzbbgEOk7RueMI8FMDMHglhh11C5r8l8LyZXQrMIgqF3QNsLel14RzvA26KOfc/Jf2bpGHhvNXeJWmYpG2A14ZjpjHkPoWa1hLgPwghli5peA9CqGeYmV0V0rebohJ8Z2BRg+Ovvk4zexp4QNK7wnEVChbXBV4DcLW+TxQGuD38Qy8l6rXxU6K48f8RhY/+UPtBM1sqaRowO2SM/wLeRvQU+7+KVuT6BPBJ4FxJfyb6G7yZ6CnyTOAKSXcBv6fBur1mdntofL2tknYzWwigqFH7z8A/iUIbcTHpnYgaI1cBy4ETzOwFSccRhSTWIpoe+LyYz84gCrcsJQqpbVD13sMhTRsBx4djJl1KxcVEbQaDwF5mNki0OMtoM/tLmgN0Qsp7MAa4KPyeIVqNb3dgYYqZZ38EfE/SJ4EjgWOA70j6D2BEeN9XbesCnw3U9SVJG5jZs4rGOtwMTDOz2/NOV7MUjRdYaGYXNNw5ZyEDv9fMfpR3Wlw6XgC4viTpcqIeS+sCl/Tiik6SFhDFwN9mZi/mnR7Xf7wAcM65kvJGYOecKykvAJxzrqS8AHDOuZLyAsA550rKCwDnnCup/w9WnjpOnG4CeQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zjq2lDjvc_-B", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "LnfcVQsNc_-C", - "colab_type": "text" - }, - "source": [ - "# Bibliography\n", - "\n", - "[1] John S. Delaney. ESOL: Estimating aqueous solubility directly from molecular structure. Journal\n", - "of Chemical Information and Computer Sciences, 44(3):1000–1005, 2004.\n", - "\n", - "[2] Anderson, Eric, Gilman D. Veith, and David Weininger. SMILES, a line notation and computerized\n", - "interpreter for chemical structures. US Environmental Protection Agency, Environmental Research Laboratory, 1987.\n", - "\n", - "[3] Rogers, David, and Mathew Hahn. \"Extended-connectivity fingerprints.\" Journal of chemical information\n", - "and modeling 50.5 (2010): 742-754.\n", - " \n", - "[4] Van Der Walt, Stefan, S. Chris Colbert, and Gael Varoquaux.\n", - "\"The NumPy array:a structure for efficient numerical computation.\" Computing in Science & Engineering 13.2 (2011): 22-30.\n", - " \n", - "[5] Bemis, Guy W., and Mark A. Murcko. \"The properties of known drugs. 1. Molecular frameworks.\"\n", - "Journal of medicinal chemistry 39.15 (1996): 2887-2893.\n", - "\n", - "[6] Pedregosa, Fabian, et al. \"Scikit-learn: Machine learning in Python.\" The Journal of Machine Learning Research 12 (2011): 2825-2830." - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/04_Introduction_to_Graph_Convolutions.ipynb b/examples/tutorials/04_Introduction_to_Graph_Convolutions.ipynb deleted file mode 100644 index acca8bee95f10047363303121f38aa18ff8bbc47..0000000000000000000000000000000000000000 --- a/examples/tutorials/04_Introduction_to_Graph_Convolutions.ipynb +++ /dev/null @@ -1,812 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "04_Introduction_to_Graph_Convolutions.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "ubFUlqz8cj1L", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 4: Introduction to Graph Convolutions\n", - "\n", - "In the previous sections of the tutorial, we learned about `Dataset` and `Model` objects. We learned how to load some data into DeepChem from files on disk and also learned some basic facts about molecular data handling. We then dove into some basic deep learning architectures. However, until now, we stuck with vanilla deep learning architectures and didn't really consider how to handle deep architectures specifically engineered to work with life science data.\n", - "\n", - "In this tutorial, we'll change that by going a little deeper and learn about \"graph convolutions.\" These are one of the most powerful deep learning tools for working with molecular data. The reason for this is that molecules can be naturally viewed as graphs.\n", - "\n", - "![Molecular Graph](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/basic_graphs.gif?raw=1)\n", - "\n", - "Note how standard chemical diagrams of the sort we're used to from high school lend themselves naturally to visualizing molecules as graphs. In the remainder of this tutorial, we'll dig into this relationship in significantly more detail. This will let us get an in-the guts understanding of how these systems work.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/04_Introduction_to_Graph_Convolutions.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "EoCLxSnBcj1N", - "colab_type": "code", - "outputId": "9f0d0869-0cba-41ac-afe4-0b227975d254", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 7382 0 --:--:-- --:--:-- --:--:-- 7382\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.37 s, sys: 553 ms, total: 2.92 s\n", - "Wall time: 1min 50s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "iEMqPVorcj1R", - "colab_type": "text" - }, - "source": [ - "Ok now that we have our environment installed, we can actually import the core `GraphConvModel` that we'll use through this tutorial." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Ph78CIgAcj1S", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc\n", - "from deepchem.models.graph_models import GraphConvModel" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "BX2erW0ncj1W", - "colab_type": "text" - }, - "source": [ - "Now, let's use the MoleculeNet suite to load the Tox21 dataset. We need to make sure to process the data in a way that graph convolutional networks can use For that, we make sure to set the featurizer option to 'GraphConv'. The MoleculeNet call will return a training set, a validation set, and a test set for us to use. The call also returns `transformers`, a list of data transformations that were applied to preprocess the dataset. (Most deep networks are quite finicky and require a set of data transformations to ensure that training proceeds stably.)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "JMi2V8Jncj1W", - "colab_type": "code", - "outputId": "f6ce921a-dcd3-418b-e0bc-550a3a8854a7", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 476 - } - }, - "source": [ - "# Load Tox21 dataset\n", - "tox21_tasks, tox21_datasets, transformers = dc.molnet.load_tox21(featurizer='GraphConv', reload=False)\n", - "train_dataset, valid_dataset, test_dataset = tox21_datasets" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/tox21.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "TIMING: featurizing shard 0 took 20.421 s\n", - "TIMING: dataset construction took 22.914 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 2.796 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 1.314 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 1.340 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 2.580 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.283 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.282 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QfMW0Y4Kcj1Z", - "colab_type": "text" - }, - "source": [ - "Let's now train a graph convolutional network on this dataset. DeepChem has the class `GraphConvModel` that wraps a standard graph convolutional architecture underneath the hood for user convenience. Let's instantiate an object of this class and train it on our dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Y9n3jTNHcj1a", - "colab_type": "code", - "outputId": "cb905258-d2bf-4839-af98-e697efc25b8e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "n_tasks = len(tox21_tasks)\n", - "model = GraphConvModel(n_tasks, batch_size=50, mode='classification')\n", - "\n", - "num_epochs = 10\n", - "losses = []\n", - "for i in range(num_epochs):\n", - " loss = model.fit(train_dataset, nb_epoch=1)\n", - " print(\"Epoch %d loss: %f\" % (i, loss))\n", - " losses.append(loss)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/layers.py:194: The name tf.unsorted_segment_sum is deprecated. Please use tf.math.unsorted_segment_sum instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/layers.py:196: The name tf.unsorted_segment_max is deprecated. Please use tf.math.unsorted_segment_max instead.\n", - "\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:108: The name tf.losses.softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.softmax_cross_entropy instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:109: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/math_grad.py:424: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Use tf.where in 2.0, which has the same broadcast rule as np.where\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n", - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n", - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Epoch 0 loss: 0.191648\n", - "Epoch 1 loss: 0.175314\n", - "Epoch 2 loss: 0.174189\n", - "Epoch 3 loss: 0.127369\n", - "Epoch 4 loss: 0.155123\n", - "Epoch 5 loss: 0.153067\n", - "Epoch 6 loss: 0.155021\n", - "Epoch 7 loss: 0.141685\n", - "Epoch 8 loss: 0.145031\n", - "Epoch 9 loss: 0.146506\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ozuyj_umcj1c", - "colab_type": "text" - }, - "source": [ - "Let's plot these losses so we can take a look at how the loss changes over the process of training." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "qbDXnYs7cj1d", - "colab_type": "code", - "outputId": "fb3d8126-c300-4280-d8d5-683897022e68", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 279 - } - }, - "source": [ - "import matplotlib.pyplot as plot\n", - "\n", - "plot.ylabel(\"Loss\")\n", - "plot.xlabel(\"Epoch\")\n", - "x = range(num_epochs)\n", - "y = losses\n", - "plot.scatter(x, y)\n", - "plot.show()" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAVWUlEQVR4nO3dcZBd5X3e8e/jlbDXdmzRsOMJK2IpMVEtB2y5CxNCTMZOGeGmBZVADHEyodMZJklJ7LaoRf2jndB2cKK0k7qhHUjqtJnaZhhX1qiNnSVjOzgzcVwtCBCSqo6qyrCL06xnKmM3G1sSv/6xd2G1OoK7sGfPXd3vZ+aO7nnPOVe/PSPdZ8/7vuecVBWSJC31uq4LkCQNJgNCktTIgJAkNTIgJEmNDAhJUqN1XRewUi655JLatGlT12VI0pry2GOPfaOqxprWXTABsWnTJqamprouQ5LWlCRfO986u5gkSY0MCElSIwNCktTIgJAkNTIgJEmNLphZTK/W3gMz7J48ynMn57h0wyg7t29hx7bxrsuSpM4NdUDsPTDDrj0HmTt1BoCZk3Ps2nMQwJCQNPSGuotp9+TRF8NhwdypM+yePNpRRZI0OIY6IJ47ObesdkkaJkMdEJduGF1WuyQNk1YDIskNSY4mOZbknob11yV5PMnpJLcsWfdrSZ7uvT7URn07t29hdP3IWW2j60fYuX1LG3+dJK0prQ1SJxkB7geuB6aB/Un2VdXhRZs9A9wB3L1k358E3gu8B3g98EdJPl9Vz69kjQsD0c5ikqRztTmL6WrgWFUdB0jyEHAT8GJAVNWJ3roXluy7FfhyVZ0GTid5CrgBeHili9yxbdxAkKQGbXYxjQPPLlqe7rX140nghiRvTHIJ8H7gsqUbJbkzyVSSqdnZ2ddcsCTpJQM5SF1VjwCfA/4E+DTwFeBMw3YPVtVEVU2MjTXezlyS9Cq1GRAznP1b/8ZeW1+q6l9W1Xuq6nogwP9c4fokSS+jzYDYD1yeZHOSi4DbgH397JhkJMn39t5fCVwJPNJapZKkc7Q2SF1Vp5PcBUwCI8AnqupQknuBqaral+Qq4LPAxcDfSvKrVfUuYD3wx0kAngd+tjdgLUlaJa3ei6mqPsf8WMLitn+66P1+5ruelu73l8zPZJIkdWQgB6klSd0zICRJjQwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCktTIgJAkNTIgJEmNDAhJUiMDQpLUyICQJDUyICRJjQwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCktTIgJAkNTIgJEmNWg2IJDckOZrkWJJ7GtZfl+TxJKeT3LJk3a8nOZTkSJKPJ0mbtUqSztZaQCQZAe4HPghsBW5PsnXJZs8AdwCfWrLvjwLXAlcCPwxcBfx4W7VKks61rsXPvho4VlXHAZI8BNwEHF7YoKpO9Na9sGTfAt4AXAQEWA/8nxZrlSQt0WYX0zjw7KLl6V7bK6qqrwBfAr7ee01W1ZGl2yW5M8lUkqnZ2dkVKFmStGAgB6mTvAN4J7CR+VD5QJL3Ld2uqh6sqomqmhgbG1vtMiXpgtZmQMwAly1a3thr68ffBv60qr5dVd8GPg9cs8L1SZJeRpsBsR+4PMnmJBcBtwH7+tz3GeDHk6xLsp75AepzupgkSe1pLSCq6jRwFzDJ/Jf7w1V1KMm9SW4ESHJVkmngVuCBJId6u38G+F/AQeBJ4Mmq+q9t1SpJOleqqusaVsTExERNTU11XYYkrSlJHquqiaZ1AzlILUnqngEhSWpkQEiSGhkQkqRGBoQkqVGb92JSn/YemGH35FGeOznHpRtG2bl9Czu29XVXEklqjQHRsb0HZti15yBzp84AMHNyjl17DgIYEpI6ZRdTx3ZPHn0xHBbMnTrD7smjHVUkSfMMiI49d3JuWe2StFrsYurYpRtGmWkIg0s3jK56LY6FSFrMM4iO7dy+hdH1I2e1ja4fYef2Latax8JYyMzJOYqXxkL2Huj3BrySLjQGRMd2bBvnvpuvYHzDKAHGN4xy381XrPpv7o6FSFrKLqYBsGPbeOddOY6FSFrKMwgB5x/z6GIsRNJgMCAEDM5YiKTBYReTgJcuynMWk6QFBoReNAhjIZIGh11MkqRGBoQkqZEBIUlqZEBIkhoZEJKkRq0GRJIbkhxNcizJPQ3rr0vyeJLTSW5Z1P7+JE8sev1lkh1t1ipJOltr01yTjAD3A9cD08D+JPuq6vCizZ4B7gDuXrxvVX0JeE/vc/4KcAx4pK1aJUnnavM6iKuBY1V1HCDJQ8BNwIsBUVUneuteeJnPuQX4fFX9RXulSpKWarOLaRx4dtHydK9tuW4DPt20IsmdSaaSTM3Ozr6Kj5Yknc9AD1In+T7gCmCyaX1VPVhVE1U1MTY2trrFSdIFrs2AmAEuW7S8sde2HD8NfLaqTq1YVZKkvrQZEPuBy5NsTnIR811F+5b5Gbdznu4lSVK7WguIqjoN3MV899AR4OGqOpTk3iQ3AiS5Ksk0cCvwQJJDC/sn2cT8GcijbdUoSTq/VFXXNayIiYmJmpqa6roMSVpTkjxWVRNN6wZ6kFqS1B0DQpLUyICQJDUyICRJjQwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCktTIgJAkNTIgJEmNDAhJUiMDQpLUyICQJDUyICRJjQwISVIjA0KS1KivgEjypiSv673/oSQ3JlnfbmmSpC71ewbxZeANScaBR4CfA/5jW0VJkrrXb0Ckqv4CuBn4d1V1K/Cu9sqSJHWt74BIcg3wYeD3e20jfex0Q5KjSY4luadh/XVJHk9yOsktS9Z9f5JHkhxJcjjJpj5rlSStgH4D4qPALuCzVXUoyQ8AX3q5HZKMAPcDHwS2Arcn2bpks2eAO4BPNXzE7wG7q+qdwNXAn/dZqyRpBazrZ6OqehR4FKA3WP2NqvqVV9jtauBYVR3v7fcQcBNweNHnnuite2Hxjr0gWVdVf9jb7tv91ClJWjn9zmL6VJK3JHkT8DRwOMnOV9htHHh20fJ0r60fPwScTLInyYEku3tnJJKkVdJvF9PWqnoe2AF8HtjM/EymtqwD3gfcDVwF/ADzXVFnSXJnkqkkU7Ozsy2WI0nDp9+AWN+77mEHsK+qTgH1CvvMAJctWt7Ya+vHNPBEVR2vqtPAXuC9SzeqqgeraqKqJsbGxvr8aElSP/oNiAeAE8CbgC8neTvw/Cvssx+4PMnmJBcBtwH7+vz79gMbkix863+ARWMXkqT29RUQVfXxqhqvqr9R874GvP8V9jkN3AVMAkeAh3szoO5NciNAkquSTAO3Ag8kOdTb9wzz3UtfSHIQCPDbr/JnlCS9Cql6pZ4iSPJW4J8B1/WaHgXurapvtljbskxMTNTU1FTXZUjSmpLksaqaaFrXbxfTJ4BvAT/dez0P/O7KlCdJGkR9XQcB/GBV/dSi5V9N8kQbBUmSBkO/ZxBzSX5sYSHJtcBcOyVJkgZBv2cQvwD8Xm8sAuD/Aj/fTkmSpEHQ7602ngTeneQtveXnk3wUeKrN4iRJ3VnWE+Wq6vneFdUA/6CFeiRJA+K1PHI0K1aFJGng9DsG0eSVL6CQ1rC9B2bYPXmU507OcemGUXZu38KObf3eb/LC4/EYPi8bEEm+RXMQBBhtpSJpAOw9MMOuPQeZO3UGgJmTc+zacxBgKL8UPR7D6WW7mKrqe6rqLQ2v76mq13L2IQ203ZNHX/wyXDB36gy7J492VFG3PB7D6bWMQUgXrOdONl/mc772C53HYzgZEFKDSzc096Cer/1C5/EYTgaE1GDn9i2Mrj/7IYaj60fYuX1LRxV1y+MxnBxHkBosDLwOwqydQZg9NEjHQ6unr9t9rwXe7lsXoqWzh2D+N/f7br7CL2etiJW43bekDjh7SF0yIKQB5uwhdcmAkAaYs4fUJQNCGmDOHlKXnMUkDTBnD6lLBoQ04HZsGzcQ1Am7mCRJjVoNiCQ3JDma5FiSexrWX5fk8SSnk9yyZN2ZJE/0XvvarFOSdK7WupiSjAD3A9cD08D+JPuq6vCizZ4B7gDubviIuap6T1v1SZJeXptjEFcDx6rqOECSh4CbgBcDoqpO9Na90GIdkqRXoc0upnHg2UXL0722fr0hyVSSP02yo2mDJHf2tpmanZ19LbVKkpYY5EHqt/fuD/IzwG8m+cGlG1TVg1U1UVUTY2Njq1+hJF3A2gyIGeCyRcsbe219qaqZ3p/HgT8Ctq1kcZKkl9dmQOwHLk+yOclFwG1AX7ORklyc5PW995cA17Jo7EKS1L7WAqKqTgN3AZPAEeDhqjqU5N4kNwIkuSrJNHAr8ECSQ73d3wlMJXkS+BLwsSWznyRJLfN5EJI0xHwehCRp2QwISVIjb9YnSWtU288rNyAkaQ1a+rzymZNz7NpzEGDFQsIuJklag1bjeeUGhCStQavxvHIDQpLWoNV4XrkBIUnLtPfADNd+7Itsvuf3ufZjX2Tvgb7vIrRiVuN55Q5SS9IyrMbgcD9W43nlBoQkLcPLDQ6v9rPD235euV1MkrQMqzE4PCgMCElahtUYHB4UBoQkLcNqDA4PCscgJGkZVmNweFAYEJK0TG0PDg8Ku5gkSY0MCElSI7uYJK0Zbd/eWmczICStCYNyBfMwsYtJ0pqwGre31tkMCElrwjBdwTwoDAhJa8IwXcE8KFoNiCQ3JDma5FiSexrWX5fk8SSnk9zSsP4tSaaT/FabdUoafMN0BfOgaC0gkowA9wMfBLYCtyfZumSzZ4A7gE+d52P+OfDltmqUtHbs2DbOfTdfwfiGUQKMbxjlvpuvcIC6RW3OYroaOFZVxwGSPATcBBxe2KCqTvTWvbB05yR/DXgb8AfARIt1SlojhuUK5kHRZhfTOPDsouXpXtsrSvI64F8Bd7/CdncmmUoyNTs7+6oLlSSda1AHqX8J+FxVTb/cRlX1YFVNVNXE2NjYKpUmScOhzS6mGeCyRcsbe239uAZ4X5JfAt4MXJTk21V1zkC3JKkdbQbEfuDyJJuZD4bbgJ/pZ8eq+vDC+yR3ABOGgyStrta6mKrqNHAXMAkcAR6uqkNJ7k1yI0CSq5JMA7cCDyQ51FY9kqTlSVV1XcOKmJiYqKmpqa7LkKQ1JcljVdU4U3RQB6klSR0zICRJjQwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCktTIgJAkNTIgJEmNDAhJUiMDQpLUyICQJDUyICRJjQwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCktTIgJAkNTIgJEmNWg2IJDckOZrkWJJ7GtZfl+TxJKeT3LKo/e299ieSHEryC23WKUk617q2PjjJCHA/cD0wDexPsq+qDi/a7BngDuDuJbt/Hbimqr6T5M3A0719n2urXknS2VoLCOBq4FhVHQdI8hBwE/BiQFTVid66FxbvWFXfXbT4euwKk6RV1+YX7zjw7KLl6V5bX5JcluSp3mf8WtPZQ5I7k0wlmZqdnX3NBUuSXjKwv5lX1bNVdSXwDuDnk7ytYZsHq2qiqibGxsZWv0hJuoC1GRAzwGWLljf22pald+bwNPC+FapLktSHNgNiP3B5ks1JLgJuA/b1s2OSjUlGe+8vBn4MONpapZKkc7QWEFV1GrgLmASOAA9X1aEk9ya5ESDJVUmmgVuBB5Ic6u3+TuCrSZ4EHgV+o6oOtlWrJOlcqaqua1gRExMTNTU11XUZWgF7D8ywe/Ioz52c49INo+zcvoUd2/qe3yBpGZI8VlUTTevanOYqLdveAzPs2nOQuVNnAJg5OceuPfMnj4aEtLoGdhaThtPuyaMvhsOCuVNn2D3pEJS02gwIDZTnTs4tq11SewwIDZRLN4wuq11SewwIDZSd27cwun7krLbR9SPs3L6lo4qk4eUgtQbKwkC0s5ik7hkQGjg7to0bCNIAsItJktTIgJAkNTIgJEmNDAhJUiMDQpLU6IK5WV+SWeBrr+EjLgG+sULlrHUei7N5PM7m8XjJhXAs3l5VjU9cu2AC4rVKMnW+OxoOG4/F2TweZ/N4vORCPxZ2MUmSGhkQkqRGBsRLHuy6gAHisTibx+NsHo+XXNDHwjEISVIjzyAkSY0MCElSo6EPiCQ3JDma5FiSe7qup0tJLkvypSSHkxxK8pGua+pakpEkB5L8t65r6VqSDUk+k+R/JDmS5Jqua+pSkr/f+3/ydJJPJ3lD1zWttKEOiCQjwP3AB4GtwO1JtnZbVadOA/+wqrYCPwL8vSE/HgAfAY50XcSA+DfAH1TVXwXezRAflyTjwK8AE1X1w8AIcFu3Va28oQ4I4GrgWFUdr6rvAg8BN3VcU2eq6utV9Xjv/beY/wIY2gczJNkI/CTwO13X0rUkbwWuA/4DQFV9t6pOdltV59YBo0nWAW8Enuu4nhU37AExDjy7aHmaIf5CXCzJJmAb8NVuK+nUbwL/CHih60IGwGZgFvjdXpfb7yR5U9dFdaWqZoDfAJ4Bvg58s6oe6baqlTfsAaEGSd4M/Bfgo1X1fNf1dCHJ3wT+vKoe67qWAbEOeC/w76tqG/D/gKEds0tyMfO9DZuBS4E3JfnZbqtaecMeEDPAZYuWN/bahlaS9cyHwyerak/X9XToWuDGJCeY73r8QJL/3G1JnZoGpqtq4YzyM8wHxrD668D/rqrZqjoF7AF+tOOaVtywB8R+4PIkm5NcxPwg076Oa+pMkjDfx3ykqv511/V0qap2VdXGqtrE/L+LL1bVBfcbYr+q6s+AZ5Ns6TX9BHC4w5K69gzwI0ne2Pt/8xNcgIP267ouoEtVdTrJXcAk87MQPlFVhzouq0vXAj8HHEzyRK/tn1TV5zqsSYPjl4FP9n6ZOg78nY7r6UxVfTXJZ4DHmZ/9d4AL8LYb3mpDktRo2LuYJEnnYUBIkhoZEJKkRgaEJKmRASFJamRASMuQ5EySJxa9Vuxq4iSbkjy9Up8nvVZDfR2E9CrMVdV7ui5CWg2eQUgrIMmJJL+e5GCS/57kHb32TUm+mOSpJF9I8v299rcl+WySJ3uvhds0jCT57d5zBh5JMtrZD6WhZ0BIyzO6pIvpQ4vWfbOqrgB+i/k7wQL8W+A/VdWVwCeBj/faPw48WlXvZv6eRgtX8F8O3F9V7wJOAj/V8s8jnZdXUkvLkOTbVfXmhvYTwAeq6njvhod/VlXfm+QbwPdV1ale+9er6pIks8DGqvrOos/YBPxhVV3eW/7HwPqq+hft/2TSuTyDkFZOnef9cnxn0fszOE6oDhkQ0sr50KI/v9J7/ye89CjKDwN/3Hv/BeAX4cXnXr91tYqU+uVvJ9LyjC660y3MP6N5YarrxUmeYv4s4PZe2y8z/xS2ncw/kW3hDqgfAR5M8neZP1P4ReafTCYNDMcgpBXQG4OYqKpvdF2LtFLsYpIkNfIMQpLUyDMISVIjA0KS1MiAkCQ1MiAkSY0MCElSo/8PGdewWsDjOHYAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kDDroutEcj1g", - "colab_type": "text" - }, - "source": [ - "We see that the losses fall nicely and give us stable learning.\n", - "\n", - "Let's try to evaluate the performance of the model we've trained. For this, we need to define a metric, a measure of model performance. `dc.metrics` holds a collection of metrics already. For this dataset, it is standard to use the ROC-AUC score, the area under the receiver operating characteristic curve (which measures the tradeoff between precision and recall). Luckily, the ROC-AUC score is already available in DeepChem. \n", - "\n", - "To measure the performance of the model under this metric, we can use the convenience function `model.evaluate()`." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "MeX-9RNWcj1h", - "colab_type": "code", - "outputId": "ad45b1ae-1d03-4b4b-df82-23f438c46912", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 122 - } - }, - "source": [ - "import numpy as np\n", - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)\n", - "\n", - "print(\"Evaluating model\")\n", - "train_scores = model.evaluate(train_dataset, [metric], transformers)\n", - "print(\"Training ROC-AUC Score: %f\" % train_scores[\"mean-roc_auc_score\"])\n", - "valid_scores = model.evaluate(valid_dataset, [metric], transformers)\n", - "print(\"Validation ROC-AUC Score: %f\" % valid_scores[\"mean-roc_auc_score\"])" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Evaluating model\n", - "computed_metrics: [0.8666525843373674, 0.9178571070166988, 0.9217018714002001, 0.9003978552985897, 0.8045825622157947, 0.8844489336147652, 0.912573326118501, 0.8530769166394683, 0.900605847595053, 0.8579473656818695, 0.9302700559756689, 0.8916651283772445]\n", - "Training ROC-AUC Score: 0.886815\n", - "computed_metrics: [0.7917182011576709, 0.7799272486772487, 0.8574614299367406, 0.8338205388917133, 0.6889772727272727, 0.72897490362678, 0.7396157840083074, 0.8472956152093756, 0.807581822462915, 0.777170981661273, 0.8805480370787324, 0.8253229974160206]\n", - "Validation ROC-AUC Score: 0.796535\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Wz43oG9rcj1j", - "colab_type": "text" - }, - "source": [ - "What's going on under the hood? Could we build `GraphConvModel` ourselves? Of course! The first step is to define the inputs to our model. Conceptually, graph convolutions just require the structure of the molecule in question and a vector of features for every atom that describes the local chemical environment. However in practice, due to TensorFlow's limitations as a general programming environment, we have to have some auxiliary information as well preprocessed.\n", - "\n", - "`atom_features` holds a feature vector of length 75 for each atom. The other inputs are required to support minibatching in TensorFlow. `degree_slice` is an indexing convenience that makes it easy to locate atoms from all molecules with a given degree. `membership` determines the membership of atoms in molecules (atom `i` belongs to molecule `membership[i]`). `deg_adjs` is a list that contains adjacency lists grouped by atom degree. For more details, check out the [code](https://github.com/deepchem/deepchem/blob/master/deepchem/feat/mol_graphs.py).\n", - "\n", - "To define feature inputs with Keras, we use the `Input` layer. Conceptually, a model is a mathematical graph composed of layer objects. `Input` layers have to be the root nodes of the graph since they consitute inputs." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "llRfKl-gcj1k", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import tensorflow as tf\n", - "import tensorflow.keras.layers as layers\n", - "\n", - "atom_features = layers.Input(shape=(75,))\n", - "degree_slice = layers.Input(shape=(2,), dtype=tf.int32)\n", - "membership = layers.Input(shape=tuple(), dtype=tf.int32)\n", - "\n", - "deg_adjs = []\n", - "for i in range(0, 10 + 1):\n", - " deg_adj = layers.Input(shape=(i+1,), dtype=tf.int32)\n", - " deg_adjs.append(deg_adj)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "oD2teCkKcj1m", - "colab_type": "text" - }, - "source": [ - "Let's now implement the body of the graph convolutional network. DeepChem has a number of layers that encode various graph operations. Namely, the `GraphConv`, `GraphPool` and `GraphGather` layers. We will also apply standard neural network layers such as `Dense` and `BatchNormalization`.\n", - "\n", - "The layers we're adding effect a \"feature transformation\" that will create one vector for each molecule." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "71_E0CAUcj1n", - "colab_type": "code", - "outputId": "91ef7ffb-110f-4e23-b092-fbb2497f55bf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 207 - } - }, - "source": [ - "from deepchem.models.layers import GraphConv, GraphPool, GraphGather\n", - "\n", - "batch_size = 50\n", - "\n", - "gc1 = GraphConv(64, activation_fn=tf.nn.relu)([atom_features, degree_slice, membership] + deg_adjs)\n", - "batch_norm1 = layers.BatchNormalization()(gc1)\n", - "gp1 = GraphPool()([batch_norm1, degree_slice, membership] + deg_adjs)\n", - "gc2 = GraphConv(64, activation_fn=tf.nn.relu)([gp1, degree_slice, membership] + deg_adjs)\n", - "batch_norm2 = layers.BatchNormalization()(gc2)\n", - "gp2 = GraphPool()([batch_norm2, degree_slice, membership] + deg_adjs)\n", - "dense = layers.Dense(128, activation=tf.nn.relu)(gp2)\n", - "batch_norm3 = layers.BatchNormalization()(dense)\n", - "readout = GraphGather(batch_size=batch_size, activation_fn=tf.nn.tanh)([batch_norm3, degree_slice, membership] + deg_adjs)\n", - "logits = layers.Reshape((n_tasks, 2))(layers.Dense(n_tasks*2)(readout))\n", - "softmax = layers.Softmax()(logits)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "oC20PZiccj1p", - "colab_type": "text" - }, - "source": [ - "Let's now create the `KerasModel`. To do that we specify the inputs and outputs to the model. We also have to define a loss for the model which tells the network the objective to minimize during training." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "31Wr0t2zcj1q", - "colab_type": "code", - "colab": {} - }, - "source": [ - "inputs = [atom_features, degree_slice, membership] + deg_adjs\n", - "outputs = [softmax]\n", - "keras_model = tf.keras.Model(inputs=inputs, outputs=outputs)\n", - "loss = dc.models.losses.CategoricalCrossEntropy()\n", - "model = dc.models.KerasModel(keras_model, loss=loss)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ivbKO0PTcj1s", - "colab_type": "text" - }, - "source": [ - "Now that we've successfully defined our graph convolutional model, we need to train it. We can call `fit()`, but we need to make sure that each minibatch of data populates all the `Input` objects that we've created. For this, we need to create a Python generator that given a batch of data generates the lists of inputs, labels, and weights whose values are Numpy arrays we'd like to use for this step of training." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "wgk6_WBwcj1t", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from deepchem.metrics import to_one_hot\n", - "from deepchem.feat.mol_graphs import ConvMol\n", - "\n", - "def data_generator(dataset, epochs=1, predict=False, pad_batches=True):\n", - " for epoch in range(epochs):\n", - " for ind, (X_b, y_b, w_b, ids_b) in enumerate(\n", - " dataset.iterbatches(\n", - " batch_size, pad_batches=pad_batches, deterministic=True)):\n", - " multiConvMol = ConvMol.agglomerate_mols(X_b)\n", - " inputs = [multiConvMol.get_atom_features(), multiConvMol.deg_slice, np.array(multiConvMol.membership)]\n", - " for i in range(1, len(multiConvMol.get_deg_adjacency_lists())):\n", - " inputs.append(multiConvMol.get_deg_adjacency_lists()[i])\n", - " labels = [to_one_hot(y_b.flatten(), 2).reshape(-1, n_tasks, 2)]\n", - " weights = [w_b]\n", - " yield (inputs, labels, weights)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VSTbjm9Hcj1v", - "colab_type": "text" - }, - "source": [ - "Now, we can train the model using `KerasModel.fit_generator(generator)` which will use the generator we've defined to train the model." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "59WW4rhwcj1w", - "colab_type": "code", - "outputId": "743bd3b6-61e9-4043-924c-fb17c54c0d2a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 479 - } - }, - "source": [ - "num_epochs = 10\n", - "losses = []\n", - "for i in range(num_epochs):\n", - " loss = model.fit_generator(data_generator(train_dataset, epochs=1))\n", - " print(\"Epoch %d loss: %f\" % (i, loss))\n", - " losses.append(loss)" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n", - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n", - "/tensorflow-1.15.2/python3.6/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n", - " \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Epoch 0 loss: 0.187834\n", - "Epoch 1 loss: 0.178864\n", - "Epoch 2 loss: 0.171304\n", - "Epoch 3 loss: 0.138762\n", - "Epoch 4 loss: 0.157564\n", - "Epoch 5 loss: 0.155848\n", - "Epoch 6 loss: 0.150968\n", - "Epoch 7 loss: 0.141407\n", - "Epoch 8 loss: 0.145868\n", - "Epoch 9 loss: 0.142054\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "4KKBu75ccj1z", - "colab_type": "text" - }, - "source": [ - "Let's now plot these losses and take a quick look." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "SaPi5y8icj11", - "colab_type": "code", - "outputId": "14b92c06-1e5a-4d6d-8404-0674e9277745", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "plot.title(\"Keras Version\")\n", - "plot.ylabel(\"Loss\")\n", - "plot.xlabel(\"Epoch\")\n", - "x = range(num_epochs)\n", - "y = losses\n", - "plot.scatter(x, y)\n", - "plot.show()" - ], - "execution_count": 12, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAX/UlEQVR4nO3df7RdZZ3f8ffHJOgVxVjJsEwAExVToyixF/w1MksZGiwjpA4o1B9DV1scp8xoK+nA/GGVjsPMZNQpynSBjqMuRUptZMWKDVYdddUfzeVnDDRdMY2QGxzD1IjoVZLw7R/nXDm52Qn3mnuyT+55v9a6i7Of/eN8cxbJ5+7n2ed5UlVIkjTVE9ouQJI0mAwISVIjA0KS1MiAkCQ1MiAkSY0MCElSIwNCOsol+aMkH227Ds09BoTmhCTbk/xmz/ZFSX6U5DdaqueKJF9vaD8+ySNJXjhb71VVf1JV/3K2ridNMiA05yT5HeBa4Nyq+toMz50/S2V8CnhFkmVT2i8CNlXVd1uoSZoRA0JzSpK3Ae8HVlXVN7ttT0vy10keSDKe5I+TzOvuuyTJ/0zywSR/D7wnyXOSfCXJ3yd5MMmnkyzseY8/7F7nJ0m2JDlrah1VtQP4CvCWKbveCnyye53fSnJnkt1JvpnkRT3vsb37PncDP00y/2Dvm+Q9ST7Vc+55STZ3r/u3SZ4/5bqXJ7k7yY+T/OckTzrcz11zkwGhueTtwFXAWVU11tP+cWAv8FxgJfCPgd4umZcC24ATgPcBAa4GFgPPB04C3gOQZDlwGXB6VT0VWAVsP0g9n6AnILrnngbckGQl8DHgbcAzgOuA9Ume2HP+xcC5wELgOdN53yTPAz4DvBNYBNwCfD7JMT2HvQE4B1gGvAi45CD1a8gZEJpLzga+DWyabEhyAvBPgHdW1U+r6ofAB+l09UzaWVUfqqq9VTVRVVur6ktV9Yuq2gV8AJgcy9gHPBFYkWRBVW2vqu8dpJ7PASckeUV3+63AF7vXvBS4rqq+U1X7quoTwC+Al/Wcf01V3V9VEzN43zcCX+jWvwf4C2AEeEXPMddU1c6q+n/A5+mElnQAA0JzyduB5wEfTZJu27OABcAD3S6X3XR+W/+1nvPu771IkhOS3NjtznmIznjC8QBVtZXOb+fvAX7YPW5xUzFV9TPgvwBv7dbzJrrdS9263jVZU7euk+jctRxQ1wzedzHw/Z7zHu1eZ0nPMT/oef0z4ClN9UsGhOaSvwPOAl4F/FW37X46v5kfX1ULuz/HVdULes6bOqXxn3TbTq2q44A30+l26hxcdUNV/Tqdf+QL+LND1PQJOl06ZwNPpfMb+2Rd7+upaWFVPbmqPnOwuqb5vju7+wHoBtNJwPghapQaGRCaU6pqJ52QOCfJB6vqAeBW4P1JjkvyhO4g9KEef30q8DDw4yRLgDWTO5IsT/Ka7ljBz4EJ4NFDXOsbwG7geuDGqnqk2/4R4HeTvDQdxyY5N8lTmy4yg/e9CTg3yVlJFgDvohOQ3zxEjVIjA0JzTlXdB7wGuCDJ1XT6/o8B7gF+BHwWeOYhLvFe4CXAj4EvAOt69j0R+FPgQTpdNb8GXHmIWopOt9KzeKx7ie4g+r8CPtytaSuHHiye1vtW1RY6dzwf6h77OuB1PcEkTVtcMEiS1MQ7CElSIwNCktSorwGR5JzuNz63JrmiYf+ZSW5PsjfJBVP2/VmS73Z/3tjPOiVJB+pbQHSnMrgWeC2wArg4yYoph91HZ2DuhinnnktnkPA0Ot9yvTzJcf2qVZJ0oH5OAnYGsLWqtgEkuRE4n86TJABU1fbuvqmP660Avl5Ve4G93flozqHzCF+j448/vpYuXTqb9UvSnHfbbbc9WFWLmvb1MyCWsP83VHfQuRuYjruAf5/k/cCTgVfTEyxNli5dytjY2KEOkSRNkeT7B9s3kNMIV9WtSU6n8+WeXcC36MxFs58kl9KZ04aTTz75iNYoSXNdPwepx+l8xX/Siczg6/5V9b6qOq2qzqYzzcH/aTjm+qoararRRYsa75AkSb+ifgbERuCUJMu6Uw1fBKyfzolJ5iV5Rvf1i+hMSXxr3yqVJB2gb11MVbU3yWXABmAe8LGq2pzkKmCsqtZ3u5E+BzwdeF2S93YnUVsAfKM7IedDwJu7A9aSpCOkr2MQVXULnQVLetve3fN6I52up6nn/ZzOk0ySpJYM5CD1kXTzHeOs3bCFnbsnWLxwhDWrlrN65ZLHP1GS5rihDoib7xjnynWbmNjTeUBqfPcEV67rLEZmSEgadkM9F9PaDVt+GQ6TJvbsY+2GLS1VJEmDY6gDYufuiRm1S9IwGeqAWLxwZEbtkjRMhjog1qxazsiCefu1jSyYx5pVy1uqSJIGx1APUk8ORPsUkyQdaKgDAjohYSBI0oGGuotJknRwBoQkqZEBIUlqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEYGhCSpkQEhSWpkQEiSGhkQkqRGBoQkqZEBIUlqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEYGhCSpkQEhSWpkQEiSGvU1IJKck2RLkq1JrmjYf2aS25PsTXLBlH1/nmRzknuTXJMk/axVkrS/vgVEknnAtcBrgRXAxUlWTDnsPuAS4IYp574CeCXwIuCFwOnAb/SrVknSgeb38dpnAFurahtAkhuB84F7Jg+oqu3dfY9OObeAJwHHAAEWAH/Xx1olSVP0s4tpCXB/z/aObtvjqqpvAV8FHuj+bKiqe6cel+TSJGNJxnbt2jULJUuSJvXzDuJXluS5wPOBE7tNX0ryqqr6Ru9xVXU9cD3A6OhoHdkqZ8/Nd4yzdsMWdu6eYPHCEdasWs7qldPKUknqm37eQYwDJ/Vsn9htm45/Cny7qh6uqoeBLwIvn+X6BsLNd4xz5bpNjO+eoIDx3RNcuW4TN98x3Y9KkvqjnwGxETglybIkxwAXAeunee59wG8kmZ9kAZ0B6gO6mOaCtRu2MLFn335tE3v2sXbDlpYqkqSOvgVEVe0FLgM20PnH/aaq2pzkqiTnASQ5PckO4ELguiSbu6d/FvgesAm4C7irqj7fr1rbtHP3xIzaJelI6esYRFXdAtwype3dPa838tg4Q+8x+4C39bO2QbF44QjjDWGweOFIC9VI0mP8JnXL1qxazsiCefu1jSyYx5pVy1uqSJI6BvIppmEy+bSSTzFJGjQGxABYvXKJgSBp4NjFJElqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEYGhCSpkQEhSWpkQEiSGhkQkqRGBoQkqZEBIUlqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEYGhCSpkQEhSWpkQEiSGhkQkqRG89suQIPj5jvGWbthCzt3T7B44QhrVi1n9colbZclqSUGhIBOOFy5bhMTe/YBML57givXbQIwJKQhZReTAFi7Ycsvw2HSxJ59rN2wpaWKJLXNgBAAO3dPzKhd0txnQAiAxQtHZtQuae7ra0AkOSfJliRbk1zRsP/MJLcn2Zvkgp72Vye5s+fn50lW97PWYbdm1XJGFszbr21kwTzWrFreUkWS2ta3Qeok84BrgbOBHcDGJOur6p6ew+4DLgEu7z23qr4KnNa9zj8AtgK39qtWPTYQ7VNMkib18ymmM4CtVbUNIMmNwPnALwOiqrZ39z16iOtcAHyxqn7Wv1IFnZAwECRN6mcX0xLg/p7tHd22mboI+EzTjiSXJhlLMrZr165f4dKSpIMZ6EHqJM8ETgU2NO2vquurarSqRhctWnRki5OkOa6fATEOnNSzfWK3bSbeAHyuqvbMWlWSpGnpZ0BsBE5JsizJMXS6itbP8BoXc5DuJUlSf/UtIKpqL3AZne6he4GbqmpzkquSnAeQ5PQkO4ALgeuSbJ48P8lSOncgX+tXjZKkg0tVtV3DrBgdHa2xsbG2y5Cko0qS26pqtGnfQA9SS5LaY0BIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEYGhCSpkQEhSWpkQEiSGhkQkqRGBoQkqZEBIUlqNK2ASHJskid0Xz8vyXlJFvS3NElSm6Z7B/F14ElJlgC3Am8BPt6voiRJ7ZtuQKSqfga8HvirqroQeEH/ypIktW3aAZHk5cCbgC902+b1pyRJ0iCYbkC8E7gS+Fx32dBnA1/tX1mSpLbNn85BVfU1umtDdwerH6yqP+hnYZKkdk33KaYbkhyX5Fjgu8A9Sdb0tzRJUpum28W0oqoeAlYDXwSW0XmSSZI0R003IBZ0v/ewGlhfVXuA6l9ZkqS2TTcgrgO2A8cCX0/yLOChfhUlSWrfdAeprwGu6Wn6fpJX96ckSdIgmO4g9dOSfCDJWPfn/XTuJiRJc9R0u5g+BvwEeEP35yHgb/pVlCSpfdPqYgKeU1W/3bP93iR39qMgSdJgmO4dxESSX5/cSPJKYKI/JUmSBsF07yB+F/hkkqd1t38E/E5/SpIkDYLpPsV0F/DiJMd1tx9K8k7g7n4WJ0lqz4xWlKuqh7rfqAb4t32oR5I0IA5nydE87gHJOUm2JNma5IqG/WcmuT3J3iQXTNl3cpJbk9yb5J4kSw+jVknSDB1OQBxyqo0k84BrgdcCK4CLk6yYcth9wCXADQ2X+CSwtqqeD5wB/PAwapUkzdAhxyCS/ITmIAgw8jjXPgPYWlXbute6ETgfuGfygKra3t336JT3XQHMr6ovdY97+HHeS5I0yw4ZEFX11MO49hLg/p7tHcBLp3nu84DdSdbRmTn2fwBXVNW+3oOSXApcCnDyyScfRqmSpKkOp4upn+YDrwIuB04Hnk2nK2o/VXV9VY1W1eiiRYuObIWSNMf1MyDGgZN6tk/stk3HDuDOqtpWVXuBm4GXzHJ9kqRD6GdAbAROSbIsyTHARcD6GZy7MMnkbcFr6Bm7kCT1X98Covub/2XABuBe4Kaq2pzkqiTnASQ5PckO4ELguiSbu+fuo9O99OUkm+gMin+kX7VKkg6UqrmxMNzo6GiNjY21XYYkHVWS3FZVo037BnWQWpLUsulO1icNnZvvGGfthi3s3D3B4oUjrFm1nNUrl7RdlnTEGBBSg5vvGOfKdZuY2NP56s347gmuXLcJwJDQ0LCLSWqwdsOWX4bDpIk9+1i7YUtLFUlHngEhNdi5u3k9rIO1S3ORASE1WLyweaqxg7VLc5EBITVYs2o5Iwvm7dc2smAea1Ytb6ki6chzkFpqMDkQ7VNMGmYGhHQQq1cuGYhA8HFbtcWAkAaYj9uqTY5BSAPMx23VJgNCGmA+bqs2GRDSAPNxW7XJgJAGmI/bqk0OUksDzMdt1SYDQhpwg/K4rYaPXUySpEYGhCSpkQEhSWpkQEiSGhkQkqRGBoQkqZEBIUlqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaOVmfpGlxbezhY0BIelyujT2c7GKS9LhcG3s49TUgkpyTZEuSrUmuaNh/ZpLbk+xNcsGUffuS3Nn9Wd/POiUdmmtjD6e+dTElmQdcC5wN7AA2JllfVff0HHYfcAlwecMlJqrqtH7VJ2n6Fi8cYbwhDFwbe27r5x3EGcDWqtpWVY8ANwLn9x5QVdur6m7g0T7WIekwuTb2cOpnQCwB7u/Z3tFtm64nJRlL8u0kq2e3NEkzsXrlEq5+/aksWThCgCULR7j69ac6QD3HDfJTTM+qqvEkzwa+kmRTVX2v94AklwKXApx88slt1CgNDdfGHj79vIMYB07q2T6x2zYtVTXe/e824G+BlQ3HXF9Vo1U1umjRosOrVpK0n34GxEbglCTLkhwDXARM62mkJE9P8sTu6+OBVwL3HPosSdJs6ltAVNVe4DJgA3AvcFNVbU5yVZLzAJKcnmQHcCFwXZLN3dOfD4wluQv4KvCnU55+kiT1Waqq7RpmxejoaI2NjbVdhiQdVZLcVlWjTfv8JrUkqZEBIUlqZEBIkhoZEJKkRgaEJKmRASFJamRASJIaGRCSpEaDPFmfJA2kYVmf24CQpBkYpvW57WKSpBkYpvW5DQhJmoFhWp/bgJCkGTjYOtxzcX1uA0KSZmCY1ud2kFqSZmByINqnmCRJBxiW9bntYpIkNTIgJEmNDAhJUiMDQpLUyICQJDUyICRJjQwISVIjA0KS1MgvyknSUarf61IYEJJ0FDoS61IYEJKOGsOyktt0HGpdCgNC0lAZppXcpuNIrEvhILWko8IwreQ2HUdiXQoDQtJRYZhWcpuOI7EuhQEh6agwTCu5TcfqlUu4+vWnsmThCAGWLBzh6tef6lNMkobPmlXL9xuDgLm7ktt09Xtdir7eQSQ5J8mWJFuTXNGw/8wktyfZm+SChv3HJdmR5MP9rFPS4DsSvzFrf327g0gyD7gWOBvYAWxMsr6q7uk57D7gEuDyg1zmPwBf71eNko4uw7KS26Do5x3EGcDWqtpWVY8ANwLn9x5QVdur6m7g0aknJ/lHwAnArX2sUZJ0EP0MiCXA/T3bO7ptjyvJE4D3c/A7i8njLk0ylmRs165dv3KhkqQDDepTTL8H3FJVOw51UFVdX1WjVTW6aNGiI1SaJA2Hfj7FNA6c1LN9YrdtOl4OvCrJ7wFPAY5J8nBVHTDQrbnH6RSkwdDPgNgInJJkGZ1guAj4Z9M5sareNPk6ySXAqOEwHJxOQRocfetiqqq9wGXABuBe4Kaq2pzkqiTnASQ5PckO4ELguiSb+1WPjg5OpyANjr5+Ua6qbgFumdL27p7XG+l0PR3qGh8HPt6H8jSAnE5BGhyDOkitIeV0CtLgMCA0UI7EBGSSpse5mDRQJgeifYpJap8BoYHjdArSYLCLSZLUyICQJDUyICRJjQwISVIjA0KS1ChV1XYNsyLJLuD7h3GJ44EHZ6mco52fxf78PPbn5/GYufBZPKuqGqfDnjMBcbiSjFXVaNt1DAI/i/35eezPz+Mxc/2zsItJktTIgJAkNTIgHnN92wUMED+L/fl57M/P4zFz+rNwDEKS1Mg7CElSIwNCktRo6AMiyTlJtiTZmmSo171OclKSrya5J8nmJO9ou6a2JZmX5I4k/63tWtqWZGGSzyb530nuTfLytmtqU5J/0/178t0kn0nypLZrmm1DHRBJ5gHXAq8FVgAXJ1nRblWt2gu8q6pWAC8D/vWQfx4A76CzprrgPwL/var+IfBihvhzSbIE+ANgtKpeCMwDLmq3qtk31AEBnAFsraptVfUIcCNwfss1taaqHqiq27uvf0LnH4ChXZghyYnAucBH266lbUmeBpwJ/DVAVT1SVbvbrap184GRJPOBJwM7W65n1g17QCwB7u/Z3sEQ/4PYK8lSYCXwnXYradVfAv8OeLTtQgbAMmAX8DfdLrePJjm27aLaUlXjwF8A9wEPAD+uqlvbrWr2DXtAqEGSpwD/FXhnVT3Udj1tSPJbwA+r6ra2axkQ84GXAP+pqlYCPwWGdswuydPp9DYsAxYDxyZ5c7tVzb5hD4hx4KSe7RO7bUMryQI64fDpqlrXdj0teiVwXpLtdLoeX5PkU+2W1KodwI6qmryj/CydwBhWvwn836raVVV7gHXAK1quadYNe0BsBE5JsizJMXQGmda3XFNrkoROH/O9VfWBtutpU1VdWVUnVtVSOv9ffKWq5txviNNVVT8A7k+yvNt0FnBPiyW17T7gZUme3P17cxZzcNB+ftsFtKmq9ia5DNhA5ymEj1XV5pbLatMrgbcAm5Lc2W37o6q6pcWaNDh+H/h095epbcA/b7me1lTVd5J8FridztN/dzAHp91wqg1JUqNh72KSJB2EASFJamRASJIaGRCSpEYGhCSpkQEhzUCSfUnu7PmZtW8TJ1ma5LuzdT3pcA319yCkX8FEVZ3WdhHSkeAdhDQLkmxP8udJNiX5X0me221fmuQrSe5O8uUkJ3fbT0jyuSR3dX8mp2mYl+Qj3XUGbk0y0tofSkPPgJBmZmRKF9Mbe/b9uKpOBT5MZyZYgA8Bn6iqFwGfBq7ptl8DfK2qXkxnTqPJb/CfAlxbVS8AdgO/3ec/j3RQfpNamoEkD1fVUxratwOvqapt3QkPf1BVz0jyIPDMqtrTbX+gqo5Psgs4sap+0XONpcCXquqU7vYfAguq6o/7/yeTDuQdhDR76iCvZ+IXPa/34TihWmRASLPnjT3//Vb39Td5bCnKNwHf6L7+MvB2+OW61087UkVK0+VvJ9LMjPTMdAudNZonH3V9epK76dwFXNxt+306q7CtobMi2+QMqO8Ark/yL+jcKbydzspk0sBwDEKaBd0xiNGqerDtWqTZYheTJKmRdxCSpEbeQUiSGhkQkqRGBoQkqZEBIUlqZEBIkhr9fytsohgssAHxAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "skrL9YEEcj13", - "colab_type": "text" - }, - "source": [ - "Now that we have trained our graph convolutional method, let's evaluate its performance. We again have to use our defined generator to evaluate model performance." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "f3prNsgGcj14", - "colab_type": "code", - "outputId": "3be59cd4-eedd-418d-843a-3f6850589dba", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 102 - } - }, - "source": [ - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)\n", - "\n", - "def reshape_y_pred(y_true, y_pred):\n", - " \"\"\"\n", - " GraphConv always pads batches, so we need to remove the predictions\n", - " for the padding samples. Also, it outputs two values for each task\n", - " (probabilities of positive and negative), but we only want the positive\n", - " probability.\n", - " \"\"\"\n", - " n_samples = len(y_true)\n", - " return y_pred[:n_samples, :, 1]\n", - " \n", - "\n", - "print(\"Evaluating model\")\n", - "train_predictions = model.predict_on_generator(data_generator(train_dataset, predict=True))\n", - "train_predictions = reshape_y_pred(train_dataset.y, train_predictions)\n", - "train_scores = metric.compute_metric(train_dataset.y, train_predictions, train_dataset.w)\n", - "print(\"Training ROC-AUC Score: %f\" % train_scores)\n", - "\n", - "valid_predictions = model.predict_on_generator(data_generator(valid_dataset, predict=True))\n", - "valid_predictions = reshape_y_pred(valid_dataset.y, valid_predictions)\n", - "valid_scores = metric.compute_metric(valid_dataset.y, valid_predictions, valid_dataset.w)\n", - "print(\"Valid ROC-AUC Score: %f\" % valid_scores)" - ], - "execution_count": 13, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Evaluating model\n", - "computed_metrics: [0.8675160312395302]\n", - "Training ROC-AUC Score: 0.867516\n", - "computed_metrics: [0.7743495839421475]\n", - "Valid ROC-AUC Score: 0.774350\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tvOYgj52cj16", - "colab_type": "text" - }, - "source": [ - "Success! The model we've constructed behaves nearly identically to `GraphConvModel`. If you're looking to build your own custom models, you can follow the example we've provided here to do so. We hope to see exciting constructions from your end soon!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true, - "id": "j1FrVn88cj17", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/04_Molecular_Fingerprints.ipynb b/examples/tutorials/04_Molecular_Fingerprints.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6c2928dace2058f846acd68423b99d1fcc2e3f9b --- /dev/null +++ b/examples/tutorials/04_Molecular_Fingerprints.ipynb @@ -0,0 +1,324 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "socSJe925zFv" + }, + "source": [ + "# Tutorial 4: Molecular Fingerprints\n", + "\n", + "Molecules can be represented in many ways. This tutorial introduces a type of representation called a \"molecular fingerprint\". It is a very simple representation that often works well for small drug-like molecules.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/04_Molecular_Fingerprints.ipynb)\n", + "\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "OyxRVW5X5zF0", + "outputId": "affd23f1-1929-456a-f8a6-e53a874c84b4" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "CMWAv-Z46nCc", + "outputId": "9ae7cfd0-ebbf-40b0-f6f1-2940cf32a839" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Jk47QTZ95zF-" + }, + "source": [ + "We can now import the `deepchem` package to play with." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "colab_type": "code", + "id": "PDiY03h35zF_", + "outputId": "cdd7401d-19a0-4476-9297-b04defc67178" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import deepchem as dc\n", + "dc.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "B0u7qIZd5zGG" + }, + "source": [ + "# What is a Fingerprint?\n", + "\n", + "Deep learning models almost always take arrays of numbers as their inputs. If we want to process molecules with them, we somehow need to represent each molecule as one or more arrays of numbers.\n", + "\n", + "Many (but not all) types of models require their inputs to have a fixed size. This can be a challenge for molecules, since different molecules have different numbers of atoms. If we want to use these types of models, we somehow need to represent variable sized molecules with fixed sized arrays.\n", + "\n", + "Fingerprints are designed to address these problems. A fingerprint is a fixed length array, where different elements indicate the presence of different features in the molecule. If two molecules have similar fingerprints, that indicates they contain many of the same features, and therefore will likely have similar chemistry.\n", + "\n", + "DeepChem supports a particular type of fingerprint called an \"Extended Connectivity Fingerprint\", or \"ECFP\" for short. They also are sometimes called \"circular fingerprints\". The ECFP algorithm begins by classifying atoms based only on their direct properties and bonds. Each unique pattern is a feature. For example, \"carbon atom bonded to two hydrogens and two heavy atoms\" would be a feature, and a particular element of the fingerprint is set to 1 for any molecule that contains that feature. It then iteratively identifies new features by looking at larger circular neighborhoods. One specific feature bonded to two other specific features becomes a higher level feature, and the corresponding element is set for any molecule that contains it. This continues for a fixed number of iterations, most often two.\n", + "\n", + "Let's take a look at a dataset that has been featurized with ECFP." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "saTaOpXY5zGI" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_tox21(featurizer='ECFP')\n", + "train_dataset, valid_dataset, test_dataset = datasets\n", + "print(train_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "F922OPtL5zGM" + }, + "source": [ + "The feature array `X` has shape (6264, 1024). That means there are 6264 samples in the training set. Each one is represented by a fingerprint of length 1024. Also notice that the label array `y` has shape (6264, 12): this is a multitask dataset. Tox21 contains information about the toxicity of molecules. 12 different assays were used to look for signs of toxicity. The dataset records the results of all 12 assays, each as a different task.\n", + "\n", + "Let's also take a look at the weights array." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 102 + }, + "colab_type": "code", + "id": "YEDcUsz35zGO", + "outputId": "5a05747f-8b06-407d-9b11-790a1b4d1c8f" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[1.0433141624730409, 1.0369942196531792, 8.53921568627451, ...,\n", + " 1.060388945752303, 1.1895710249165168, 1.0700990099009902],\n", + " [1.0433141624730409, 1.0369942196531792, 1.1326397919375812, ...,\n", + " 0.0, 1.1895710249165168, 1.0700990099009902],\n", + " [0.0, 0.0, 0.0, ..., 1.060388945752303, 0.0, 0.0],\n", + " ...,\n", + " [0.0, 0.0, 0.0, ..., 0.0, 0.0, 0.0],\n", + " [1.0433141624730409, 1.0369942196531792, 8.53921568627451, ...,\n", + " 1.060388945752303, 0.0, 0.0],\n", + " [1.0433141624730409, 1.0369942196531792, 1.1326397919375812, ...,\n", + " 1.060388945752303, 1.1895710249165168, 1.0700990099009902]],\n", + " dtype=object)" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "train_dataset.w" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "E8UCFrrN5zGf" + }, + "source": [ + "Notice that some elements are 0. The weights are being used to indicate missing data. Not all assays were actually performed on every molecule. Setting the weight for a sample or sample/task pair to 0 causes it to be ignored during fitting and evaluation. It will have no effect on the loss function or other metrics.\n", + "\n", + "Most of the other weights are close to 1, but not exactly 1. This is done to balance the overall weight of positive and negative samples on each task. When training the model, we want each of the 12 tasks to contribute equally, and on each task we want to put equal weight on positive and negative samples. Otherwise, the model might just learn that most of the training samples are non-toxic, and therefore become biased toward identifying other molecules as non-toxic.\n", + "\n", + "# Training a Model on Fingerprints\n", + "\n", + "Let's train a model. In earlier tutorials we use `GraphConvModel`, which is a fairly complicated architecture that takes a complex set of inputs. Because fingerprints are so simple, just a single fixed length array, we can use a much simpler type of model." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "e5K3rdGV5zGg" + }, + "outputs": [], + "source": [ + "model = dc.models.MultitaskClassifier(n_tasks=12, n_features=1024, layer_sizes=[1000])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_Zcd7jTd5zGr" + }, + "source": [ + "`MultitaskClassifier` is a simple stack of fully connected layers. In this example we tell it to use a single hidden layer of width 1000. We also tell it that each input will have 1024 features, and that it should produce predictions for 12 different tasks.\n", + "\n", + "Why not train a separate model for each task? We could do that, but it turns out that training a single model for multiple tasks often works better. We will see an example of that in a later tutorial.\n", + "\n", + "Let's train and evaluate the model." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "LJc90fs_5zGs", + "outputId": "8c9fd5ab-e23a-40dc-9292-8b4ff3a86890" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "training set score: {'roc_auc_score': 0.9550063590563469}\n", + "test set score: {'roc_auc_score': 0.7781819573695475}\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "\n", + "model.fit(train_dataset, nb_epoch=10)\n", + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", + "print('training set score:', model.evaluate(train_dataset, [metric], transformers))\n", + "print('test set score:', model.evaluate(test_dataset, [metric], transformers))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "aQa88cbj5zGw" + }, + "source": [ + "Not bad performance for such a simple model and featurization. More sophisticated models do slightly better on this dataset, but not enormously better." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "MhZxVoVs5zMa" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "01_The_Basic_Tools_of_the_Deep_Life_Sciences.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/05_Creating_Models_with_TensorFlow_and_PyTorch.ipynb b/examples/tutorials/05_Creating_Models_with_TensorFlow_and_PyTorch.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..14feaa5f9214ca0c638c343b239f6210a1e1dd88 --- /dev/null +++ b/examples/tutorials/05_Creating_Models_with_TensorFlow_and_PyTorch.ipynb @@ -0,0 +1,262 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tutorial Part 5: Creating Models with TensorFlow and PyTorch\n", + "\n", + "In the tutorials so far, we have used standard models provided by DeepChem. This is fine for many applications, but sooner or later you will want to create an entirely new model with an architecture you define yourself. DeepChem provides integration with both TensorFlow (Keras) and PyTorch, so you can use it with models from either of these frameworks.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/05_Creating_Models_with_TensorFlow_and_PyTorch.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are actually two different approaches you can take to using TensorFlow or PyTorch models with DeepChem. It depends on whether you want to use TensorFlow/PyTorch APIs or DeepChem APIs for training and evaluating your model. For the former case, DeepChem's `Dataset` class has methods for easily adapting it to use with other frameworks. `make_tf_dataset()` returns a `tensorflow.data.Dataset` object that iterates over the data. `make_pytorch_dataset()` returns a `torch.utils.data.IterableDataset` that iterates over the data. This lets you use DeepChem's datasets, loaders, featurizers, transformers, splitters, etc. and easily integrate them into your existing TensorFlow or PyTorch code.\n", + "\n", + "But DeepChem also provides many other useful features. The other approach, which lets you use those features, is to wrap your model in a DeepChem `Model` object. Let's look at how to do that.\n", + "\n", + "## KerasModel\n", + "\n", + "`KerasModel` is a subclass of DeepChem's `Model` class. It acts as a wrapper around a `tensorflow.keras.Model`. Let's see an example of using it. For this example, we create a simple sequential model consisting of two dense layers." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import tensorflow as tf\n", + "\n", + "keras_model = tf.keras.Sequential([\n", + " tf.keras.layers.Dense(1000, activation='relu'),\n", + " tf.keras.layers.Dropout(rate=0.5),\n", + " tf.keras.layers.Dense(1)\n", + "])\n", + "model = dc.models.KerasModel(keras_model, dc.models.losses.L2Loss())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For this example, we used the Keras `Sequential` class. Our model consists of a dense layer with ReLU activation, 50% dropout to provide regularization, and a final layer that produces a scalar output. We also need to specify the loss function to use when training the model, in this case L2 loss. We can now train and evaluate the model exactly as we would with any other DeepChem model. For example, let's load the Delaney solubility dataset. How does our model do at predicting the solubilities of molecules based on their extended-connectivity fingerprints (ECFPs)?" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "training set score: {'pearson_r2_score': 0.9787445597470444}\n", + "test set score: {'pearson_r2_score': 0.736905850092889}\n" + ] + } + ], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_delaney(featurizer='ECFP', splitter='random')\n", + "train_dataset, valid_dataset, test_dataset = datasets\n", + "model.fit(train_dataset, nb_epoch=50)\n", + "metric = dc.metrics.Metric(dc.metrics.pearson_r2_score)\n", + "print('training set score:', model.evaluate(train_dataset, [metric]))\n", + "print('test set score:', model.evaluate(test_dataset, [metric]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## TorchModel\n", + "\n", + "`TorchModel` works just like `KerasModel`, except it wraps a `torch.nn.Module`. Let's use PyTorch to create another model just like the previous one and train it on the same data." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "training set score: {'pearson_r2_score': 0.9798256761766225}\n", + "test set score: {'pearson_r2_score': 0.7256745385608444}\n" + ] + } + ], + "source": [ + "import torch\n", + "\n", + "pytorch_model = torch.nn.Sequential(\n", + " torch.nn.Linear(1024, 1000),\n", + " torch.nn.ReLU(),\n", + " torch.nn.Dropout(0.5),\n", + " torch.nn.Linear(1000, 1)\n", + ")\n", + "model = dc.models.TorchModel(pytorch_model, dc.models.losses.L2Loss())\n", + "\n", + "model.fit(train_dataset, nb_epoch=50)\n", + "print('training set score:', model.evaluate(train_dataset, [metric]))\n", + "print('test set score:', model.evaluate(test_dataset, [metric]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Computing Losses\n", + "\n", + "Now let's see a more advanced example. In the above models, the loss was computed directly from the model's output. Often that is fine, but not always. Consider a classification model that outputs a probability distribution. While it is possible to compute the loss from the probabilities, it is more numerically stable to compute it from the logits.\n", + "\n", + "To do this, we create a model that returns multiple outputs, both probabilities and logits. `KerasModel` and `TorchModel` let you specify a list of \"output types\". If a particular output has type `'prediction'`, that means it is a normal output that should be returned when you call `predict()`. If it has type `'loss'`, that means it should be passed to the loss function in place of the normal outputs.\n", + "\n", + "Sequential models do not allow multiple outputs, so instead we use a subclassing style model." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "class ClassificationModel(tf.keras.Model):\n", + " \n", + " def __init__(self):\n", + " super(ClassificationModel, self).__init__()\n", + " self.dense1 = tf.keras.layers.Dense(1000, activation='relu')\n", + " self.dense2 = tf.keras.layers.Dense(1)\n", + "\n", + " def call(self, inputs, training=False):\n", + " y = self.dense1(inputs)\n", + " if training:\n", + " y = tf.nn.dropout(y, 0.5)\n", + " logits = self.dense2(y)\n", + " output = tf.nn.sigmoid(logits)\n", + " return output, logits\n", + "\n", + "keras_model = ClassificationModel()\n", + "output_types = ['prediction', 'loss']\n", + "model = dc.models.KerasModel(keras_model, dc.models.losses.SigmoidCrossEntropy(), output_types=output_types)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can train our model on the BACE dataset. This is a binary classification task that tries to predict whether a molecule will inhibit the enzyme BACE-1." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "training set score: {'roc_auc_score': 0.9996116504854369}\n", + "test set score: {'roc_auc_score': 0.7701992753623188}\n" + ] + } + ], + "source": [ + "tasks, datasets, transformers = dc.molnet.load_bace_classification(feturizer='ECFP', split='scaffold')\n", + "train_dataset, valid_dataset, test_dataset = datasets\n", + "model.fit(train_dataset, nb_epoch=100)\n", + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", + "print('training set score:', model.evaluate(train_dataset, [metric]))\n", + "print('test set score:', model.evaluate(test_dataset, [metric]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Other Features\n", + "\n", + "`KerasModel` and `TorchModel` have lots of other features. Here are some of the more important ones.\n", + "\n", + "- Automatically saving checkpoints during training.\n", + "- Logging progress to the console, to [TensorBoard](https://www.tensorflow.org/tensorboard), or to [Weights & Biases](https://docs.wandb.com/).\n", + "- Custom loss functions that you define with a function of the form `f(outputs, labels, weights)`.\n", + "- Early stopping using the `ValidationCallback` class.\n", + "- Loading parameters from pre-trained models.\n", + "- Estimating uncertainty in model outputs.\n", + "- Identifying important features through saliency mapping.\n", + "\n", + "By wrapping your own models in a `KerasModel` or `TorchModel`, you get immediate access to all these features. See the API documentation for full details on them.\n", + "\n", + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/tutorials/05_Putting_Multitask_Learning_to_Work.ipynb b/examples/tutorials/05_Putting_Multitask_Learning_to_Work.ipynb deleted file mode 100644 index 1f602c90792ebcc8b22faf7875a5f60b32fe7e4a..0000000000000000000000000000000000000000 --- a/examples/tutorials/05_Putting_Multitask_Learning_to_Work.ipynb +++ /dev/null @@ -1,640 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "05_Putting_Multitask_Learning_to_Work.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "ElXOa7R7g37i", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 5: Putting Multitask Learning to Work\n", - "\n", - "This notebook walks through the creation of multitask models on MUV [1]. The goal is to demonstrate that multitask methods outperform singletask methods on MUV.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/05_Putting_Multitask_Learning_to_Work.ipynb)\n", - "\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Fc_4bSWJg37l", - "colab_type": "code", - "outputId": "d6d577c7-aa9e-4db1-8bb2-6269f2817012", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 14733 0 --:--:-- --:--:-- --:--:-- 14733\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 3.1 s, sys: 736 ms, total: 3.84 s\n", - "Wall time: 2min 19s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "9Ow2nQtZg37p", - "colab_type": "text" - }, - "source": [ - "The MUV dataset is a challenging benchmark in molecular design that consists of 17 different \"targets\" where there are only a few \"active\" compounds per target. The goal of working with this dataset is to make a machine learnign model which achieves high accuracy on held-out compounds at predicting activity. To get started, let's download the MUV dataset for us to play with." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "FGi-ZEfSg37q", - "colab_type": "code", - "outputId": "1ac2c36b-66b0-4c57-bf4b-114a7425b85e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 85 - } - }, - "source": [ - "import os\n", - "import deepchem as dc\n", - "\n", - "current_dir = os.path.dirname(os.path.realpath(\"__file__\"))\n", - "dataset_file = \"medium_muv.csv.gz\"\n", - "full_dataset_file = \"muv.csv.gz\"\n", - "\n", - "# We use a small version of MUV to make online rendering of notebooks easy. Replace with full_dataset_file\n", - "# In order to run the full version of this notebook\n", - "dc.utils.download_url(\"https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/%s\" % dataset_file,\n", - " current_dir)\n", - "\n", - "dataset = dc.utils.save.load_from_disk(dataset_file)\n", - "print(\"Columns of dataset: %s\" % str(dataset.columns.values))\n", - "print(\"Number of examples in dataset: %s\" % str(dataset.shape[0]))" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Columns of dataset: ['MUV-466' 'MUV-548' 'MUV-600' 'MUV-644' 'MUV-652' 'MUV-689' 'MUV-692'\n", - " 'MUV-712' 'MUV-713' 'MUV-733' 'MUV-737' 'MUV-810' 'MUV-832' 'MUV-846'\n", - " 'MUV-852' 'MUV-858' 'MUV-859' 'mol_id' 'smiles']\n", - "Number of examples in dataset: 10000\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "c9t912ODg37u", - "colab_type": "text" - }, - "source": [ - "Now, let's visualize some compounds from our dataset" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "KobfUjlWg37v", - "colab_type": "code", - "outputId": "01025d0f-3fb1-485e-bb93-82f2b3e062f9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "from rdkit import Chem\n", - "from rdkit.Chem import Draw\n", - "from itertools import islice\n", - "from IPython.display import Image, display, HTML\n", - "\n", - "def display_images(filenames):\n", - " \"\"\"Helper to pretty-print images.\"\"\"\n", - " for filename in filenames:\n", - " display(Image(filename))\n", - "\n", - "def mols_to_pngs(mols, basename=\"test\"):\n", - " \"\"\"Helper to write RDKit mols to png files.\"\"\"\n", - " filenames = []\n", - " for i, mol in enumerate(mols):\n", - " filename = \"MUV_%s%d.png\" % (basename, i)\n", - " Draw.MolToFile(mol, filename)\n", - " filenames.append(filename)\n", - " return filenames\n", - "\n", - "num_to_display = 12\n", - "molecules = []\n", - "for _, data in islice(dataset.iterrows(), num_to_display):\n", - " molecules.append(Chem.MolFromSmiles(data[\"smiles\"]))\n", - "display_images(mols_to_pngs(molecules))" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVgT1/oH8DeEIJugbNaNVnCDisXSuoGoiGIV0NoHtVq011asbS9uVSzWH1ptpS5Vr14V14qKXmrVgmvBhQIuFYtF7MWtiooFXAAFakKS8/vj9MYYQphMZnKivp+nf/QZJzMnwDeZOXPOeySEEEAIsWPFugEIvegwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhC9Dx4/Pjx1atXp0yZsmPHjlu3brFujnEkhBDWbUDIJCqVasSIEYcPH1YoFHRLy5Ytg4KCAgMDg4KCunXrZmVl0V82GEL0bFOr1dHR0SkpKQBgY2MzaNCg3NzciooKzQ5ubm6BgYF9+vQJCgoKCAiwtrZm11j9MIToGUYImTx5clJSkpOTU3V1NSGkrq5OKpX+8ccfOTk5ubm5GRkZ169f1+xvb2/frVs3+iUZHBzs7OzMsPEaGEL0DIuLi1u8eLGdnd2hQ4ciIyMfPnxYUVHRrFkz7X3u3LmTm5tLM/nrr79q/uClUmmnTp2CgoJCQ0P79evn7u7O4h0AYAjRs2v+/Pnz5s2TyWR79+4dOnToyy+/fPPmzRs3brz88ssNveThw4e//PJLZmZmTk7O2bNnNfeQAODl5UXvIQMDA1999VWzvIO/YQjRM2nVqlWxsbFSqXTHjh2jRo0CgK5du164cOH8+fOvvfYalyPU1NTk5+fTL8ns7OyqqirNP5m7X4cg9Kz57rvvJBKJRCLZsGGDZmNQUBAAZGVl8TigQqE4efLkkiVLIiIiXFxctAPi4uIyatQo4dquB4YQPWN++OEH2sO5bNky7e3h4eEAkJaWZvoprl27tnXr1piYGC8vLwDw9PQcPny46YdtiAWFsLy8fPPmzfv376+srGTdFmShjhw50qRJEwD46quvdP5p7NixAJCcnCzsGa9du0Z7ei5duiTskTUs5SHmd9995+3tHRsbGx4e3qxZM29v70mTJiUnJxcXF7NuGrIUubm5I0aMkMvlU6dOjY+P1/lX+rxB+9ZOEF5eXhEREQCwf/9+YY/8hEjhNkpKSgq99x08eHBQUBD9qNPw9vYeP378pk2bioqKWLcUMXPmzJmmTZsCwMcff6x3BxrLhQsXCn7qXbt2AUD//v0FPzLFPoRpaWkymQwAFi1aRLfU1dXl5eWtWLEiKipK5y7Zw8MjPDw8MTExOztbLpezbTkym4KCAldXVwB47733VCqV3n2++eYbAJg5c6bgZ6+srLSxsbG2tq6oqBD84IR5CI8ePWprawsA8fHxendQKpWFhYVJSUnR0dGenp7agXRwcAgMDIyLi8vIyKitrTVzy5HZXLlypWXLlgAwbNiwurq6hnZbt24dAEycOFGMNoSEhADArl27xDg4yxCePn3a8AVGfSUlJampqTExMb6+vhKJRBNIa2vrgICA2NjY1NTUe/fuidpsZE63bt165ZVXAGDAgAF//fWXgT137twJACNHjhSjGd9++y39Hhbj4MxCWFBQQC81o6OjG7rAMKy0tDQtLS0uLi4wMJBe0Gp4eXnFxMRs3br1+vXrQjccmU95eXnnzp0BoGfPno8ePTK886FDhwBg0KBBYrTk6tWrAODi4mLgq5g3NiG8fPnySy+9BADDhw/X+6727Nlz7Nixmpoajgesqqo6ePBgfHx8nz596PWtdiDHjx9/9+5dQd8BEl1lZWW3bt0A4LXXXnvw4EGj+588eRIAevToIVJ7OnXqBAA///yz4EdmEMKbN2/S0X2hoaGPHz/Wuw+9AtG+yOSeovr9OjY2NmPHjn348KFwbwKJq6amho6A6dixY2lpKZeXXLx4EQA6d+4sUpNmzJgBAHFxcYIf2dwhLCsro58ovXr1onNP6lMoFFOnTg0ICJBKpZovNIlE0qVLl8mTJ9Op0xxPp1Qq8/PzfXx8AGD37t3CvQ8kIrlcHhYWBgCenp7FxcUcX1VSUgIAL730kkitOn78OAD4+voKfmSzhrCiosLf3x8A/P39uVxgVFdXZ2dnJyYmhoaG2tnZaV9ktmzZMioqasWKFXl5eY3eUtLO6/fff1+g94FEpFAo6AC0Vq1aXbt2jfsLq6urAcDOzk6khimVSvqY5MqVK8Ie2XwhfPjw4ZtvvgkAnTp14niBoU37IpP+LDScnJxCQ0Ppw0O917f0QsXd3Z1fDxAyG5VKNXr0aABwc3O7ePGisS+n/XMN3eOY7t133wWAlStXCntYM4Wwtra2X79+dPhLSUmJiUdTqVQFBQWrV68ePXp069atdR4eDhgwoP5nVfv27QHg1KlTJp4aiUetVsfExNBP1bNnz/I4Av10Li8vF7xt1Pbt2wFg4MCBwh7WHCFUKBRDhw4FgNatW//xxx+CH58+PIyNjQ0ICKAzXO7fv6+zz5QpUwBgzpw5gp8dCWXatGkAYG9vn52dze8I3t7eYlwuaty/f9/a2lomkwk7x0D0ECqVSlMuMIxVVlZ25MiR+tszMjJoZ7fYDUD8yOXyadOmyWSygwcP8j7I66+/DgD8vkU5Cg4OFryTT9wQqtXqiRMnAoCzs3NeXp6o5zJMoVDQUfb4+N4yVVVV2djYSKVSUwY89e/fHwAyMzMFbJiOxYsXC97JJ+5UppkzZ27YsMHe3j49PT0gIEDUcxkmk8kGDhwIAAcPHmTYDNQQJyen4OBglUp15MgR3gehE/8En82kjfbcHjhwQKVSCXVMEUM4d+7cZcuW2djY7N69u0+fPuKdiCPNj491Q5B+pv+C6MVOZWWlYG2qx8fHp3379nfv3v3ll1+EOqZYIVy5cuXChQulUun27dvfeustkc5ilCFDhkil0qNHj9IHSsjS0Lmzhw4dUiqV/I4g0rxeHbSXUcBPc1FCuGXLlmnTpkkkkvXr10dFRYlxCh7c3d27d+8ul8uPHj3Kui1IDy8vLx8fn4qKitzcXH5HMMPlKPzvGzs9PV2oAwofwh07dnz44YcAsGbNmgkTJgh+fFPQH5+IdQqQaUwsJGGGy1EA6Nu3b7NmzQoKCm7cuCHIAQUOYVpa2vvvv69WqxctWvTRRx8Je3DT0d/xgQMHyLNQbfWZW13IdPRKz8QQiv1NKHgnn5AhPHbs2KhRo5RK5dy5c+Pi4gQ8slD8/PxeeeWVP//889y5c6zbYsiuXbsmTZrk6enZqlWrkSNHrly58ty5c2q1mnW7RBcYGOjq6lpUVHTlyhUeLzdPCEHwSyoBH3ckJSVZW1v/85//FPCYgvvkk08AICEhgXVDGpSenk7HQDo6Omr/ptzc3IYNG7Z06dLTp0+LMbXUQtDKhcuXL+fx2szMTBCzIpPGvXv3pFJpkyZNBJkfJ2QIO3ToAAA5OTkCHlNwdP51QEAA64bod+zYMTop+fPPPydaVWjbtWunHUh7e3taXyctLe05K9NKq1QMGDCAx2vPnj0LAK+//rrgraqvd+/eALB3717TDyVkCOnYP/rXY7EeP37ctGlTiUTCfVKi2RguuqMzRFYTSKlU6uvrGxMTk5qaKt7YZbOprKyUyWQymYxHaTN6Eevt7S1Gw3R8/fXXAPDhhx+afighQ0i7/v38/LQ3/vDDD8OGDcvNzRXwRCYaPnw4ACQlJbFuyFM0RXcMVPXTqKqqysjIoPV1bGxstL8kvby8oqOjk5KSCgsLzdNywdEJN//5z3+MfWF5eTkAuLq6itEqHQUFBQDQsmVLtVpt4qGEDKFCoaAParSnSsycORPEqQbJ28aNGwEgIiKCdUOeuHLliuGiOwZopj6Hh4frrHpp1NRny7F06VIAGDdunLEvlMvlAGBtbS1Gq+qj9wi//PKLiccReAD3yJEjAWDVqlWaLSdOnAAAHx8fYU9kirKyMisrKzs7O+6FpER18+ZNWlPHQNEdjhQKxalTp5YsWRIZGakz9dnFxSUiImLJkiWnT58WquUiKSoqol9oSqXS2NfSCgwNVU4RVlBQkJub27fffmvicQQOYXJyMgCEhYVptohXFMAUdI7//v37WTeElJWV0ap+vXr1arSqn7F0VheiunbteuLECWFPJLiOHTvy6+SjFxSmTxxv1Jo1a+iduc79Fw8Ch/D+/fv1u27HjBkDACtWrBD2XKaYP38+AHz00Udsm1FRUWFUVT9T3LhxY9u2be+9955EImnevLmFP+SYPn06AMyePdvYF9JPtN9//12MVmls27bNyspKIpHY2tqa3skn/HzCwMBAANizZ49mS0pKCr3WEvxcvP36668A0KpVK9Pvqnl7+JCEhr4FAL6+vuYsi+rr6wsAx48fN9sZeaCdfF26dDH2hT169ACAkydPitEqat++fXSBxG+++ebtt982vZNP+BAuWrQIACZMmKDZYkqns0jUanWbNm0AID8/n0kDamtJv37Ey+vCm28GmuHaSRsdzDRjxgxzntRYejv5uBg0aBAAHDp0SKSGZWZm0ge5dLzHpk2bTO/kEz6EhYWFAODh4aHdHde3b18ASE1NFfx0vE2aNAkAvvzyS/OfWqEg4eEEgLRuTUSoudOI7OxsAOjUqZO5T2wkuhK9dicfF7RrcOfOnWI06dSpU3QYk2ZYmCCdfMLPonj11Ve9vb3Ly8vz8vI0Gy1w+gKrOb4qFYwbB/v3g5sb/PQTPD0Sxhx69erl5uZ26dKly5cvm/vcxuD3NyPe8NHffvttyJAh1dXV48ePX7lyJd3o4eHxxhtv/PXXX8eOHeN9ZFHmEw4ZMgSe/vGJURTARAMGDLC3tz979mxpaanZTkoITJ4Mu3aBszMcPgy+vmY78xNSqXTw4MFgYZ+J9Q0ZMsTa2vrEiROPHj3i/iqRQnj58uWwsLCKiooRI0Zs3LhRe8SS6Z/mooSw/mdY586dO3TocP/+/TNnzohxRsPUavXu3bt1NtrZ2YWEhKjVanNWnZk1CzZsAHt7SE8HhjV3LPDCpD4XF5eePXvK5XJaKY8jMUJ48+bNgQMHlpWVDRo0KCUlhfbKaNAfZlpaGuE9P06Yi+WnyeVyOgbyxo0bmo2sRpZqSsrWLzoaHx9vZWXl6OhonmElc+cSAGJjQ0wo6icMsZeeFUpiYiI83cnXqFWrVoExK142qqSkhD5lDQwM1DsGQNPJ9+uvv/I7hVglD0eMGAEAa9eu1WzRO7LUDGbNmgUAdnZ2Ok+oCwsLXV1dtdecAQBXV9fIyMilS5eeOnVKoVAI2IwVKwgAkUqJhXROibr0rFD0dvIZRoeLjB07VpAG3L17lz7R6d69u4FZS3T+Ou9OPrFCuHnzZgAYOnSoZgvvTmdTzJs3DwBkMhmdTa9x9erVVq1aAUBYWFhRUVGj04VM/MbYvJlIJEQiIZs2mfZ+hCPq0rMCousXcB9ql5aWRi8RTT91VVXVG2+8Qb85DFdDpfVmunfvzu9EYoVQb9dt/ZGlovrXv/4FAFKpVOfz/vbt2zRvISEhOiswX79+PTk5OSYmxsfHR2c57ujoC9Onk337iLHP1b//nkilRCIha9aY/p4EI+rSswKKjY0FgC+++ILj/llZWQAQFBRk4nlrampose327dvfuXPH8M61tbX29vZWVlZ//vknj3OJWIG7Z8+emhtWqv7IUvF89913dF2KDRs2aG/XXGD06NHD8LRo7elCTZo08fJ6DEDof15eJDqaJCWRRmcLqdUkMpIAkG++Mf09CUy8pWcF9NNPPwGAv78/x/3Pnz9v+l2PXC6ndTrbtGnDsWQ7LV+0ideljoghXLBgAQDExMRotugdWSqGH374gXZhLVu2THt7ZWUlLQTu5+dXf9EYA6qrqzMy1AkJJCSE2NsTTRoBSNu2ZOxYsnYtKSwkdAxcfj6RSMilS4QQEhdHdu4k4jw6NtX8+av79ftywQKuq3AyobeTzwBaAc3T05P3GZVKJa3T6e7u/t///pfjq5KSkgDg7bff5nFGEUNIP5N0Zj3WH1kquCNHjjRp0gQAvvrqK+3tNTU1tBB4hw4d+F02UAoFOXWKLFlCIiOJq+tTgXR1JR9+SPLzSbt2hA4Oj4sj339v4hsSy/HjBICIsPKswPr06SORSPz8/L7++uuGlqDUqKioAAAnJyd+51Kr1R988AEAODs7G9XbWVJSIpFIHBwcdG5wuBB3QRi6Nv25c+c0W+qPLBVWTk6Og4MDAEydOlV7u+YCo23bthw/Uzm6do1s3UpiYki7dgSAjBpF8vPJ6NGkZ09y755Fh1Cp/PtDxJImmek6duyYTCazsnryQNva2jogICA2NjY1NbX+5YxKpaLzG/g9bTJleTa6JhSPYavihnDy5MkAMG/ePM0WHp3O3J05c0ZvjRbNBYaHhwf3CwwebtwgRUUkP5+MGkW2bCELFlh0CAkh775LAIjQK88KRvMLlUgkNjY2MTExfn5+2oGUSqXdunWLjY39/vvvNVc3Tk5OAMCjQ3v27NkAYGNjc/jwYR6t/b//+z8A+PTTT419obghpGN53njjDe2N7dq1c3R0FHyOb0M1WtRqNS0E3qxZM96PU41CQyiXk27dyLRpFh3CHTsIABF65VlhaP9CaSffjz/+SAh5+PBhRkZGQkJCaGgondCgQct5NG/eHIxfA++rr76iT7PS09P5NZguEcPjdlTcED5+/NjR0VEikdy+fVuzsaioSPBVxa9cudKyZUsAGDZsmHafu1qtpg9SmzZtanotEI5oCAkhCxaQ9u0tOoQPHhBrayKTEUsrm6jzC124cCEATJw4UWc3hUKRl5dH6+vQ7GmPuwgPD09MTMzOzm503MW///1vALCyskpJSeHdZrVaTRdvLygoMOqFoq/UGxkZCQA6zwmEdevWLU2NFp3bYjp3zs7OzpxzWDUhvHeP2NtbdAgJIcHBBMCyGln/F6q3k0+HUqksLCxMSkry8PCApzk6OoaGhiYkJGRkZNTvOElOTqa3kaYX4KOrsHz99ddGvUr0EK5fv55+nol0/PLy8oZqtHz55Zf0AoNVLZlp00j79kTkshWmWryYABBBV541SUNFd2gsuaz3TBe0WL9+PR0I5evrqzPuQrtfZ+/evfRp1uLFi01v/N69ewGgd+/eRr1K9BDeuXNHIpHY29vX1tYKfvDKysqGarTQgbxSqVSk+Z1chIYSAGLCBY45/P47ASDu7sT4ymbCM/AL/fjjj3U6+RpCC+knJydrtpSUlOzcufPTTz/t2rWrTr8OrdrK5bBcVFdX29raWllZlZWVcX+V6CEkhNABeAeFnjtQU1MTFBQEAB07diwtLdX+p61bt9ILjPXr1wt7UqMsX04AyJgxDJvASYcOBICIWZaFEwO/UEIInXGm08mnF41rQ6MjHz16pN2vM3369Pj4eAFa/z/0SdjWrVu5v8QcIZwxYwYAdOrUad26dYWFhULVViovL/f39/fy8tLu9SGE7Nmzh15gLF26VJAT8XbtGgEgzZsTyx6eSaZOJQBE0D9Fo9XW1vbv35/2LhYX6xnEo7eTT6/PP/8cABYuXMjlpIJfoNE+nqioKO4vET2E9+7d69ixI33aQzk5OWnukk3sJq2oqNB58v7TTz/R4TILFiwwreHC8PEhACQri3U7DMrMJACka1dmDVAoFHRqbOvWra9du9bQbsOGDaM3e4aPRmchsir6fvPmTYlE4uTkJJfLOb5E3BBWVVXRMrve3t6JiYljxoyh0x81HBwcQkJCEhISMjMzTa+anJubS4fLTJkyRZD2m27mTAJALGkRAD3kcuLkRKytiTE3MoJRqVSjR48GADc3t4sXLxrYc8OGDVw6+datW6f3eYbZdO3aFQAyMjI47i9iCGtra+nKHt7e3tpV/URaXSg/P5/OV/zHP/7BsJqojqwsAkAsaREA/bKz2fTiauoeODk5nT171vDOmk4+w6XN6OJqI0eOFLSlRpgzZ45R3wRihVChUNCe4tatWxuYxaszXUj7S9Ko1YUuXLhAi+2PHTvWolY+0YzPvHyZdVMs0meffQYA9vb2Wdwu2Wknn84UbR10CUrzzJjT6+TJk/QPmOP+ooRQqVTSopHu7u6GLzC01dTU8Ftd6OrVq3R0RWRkpAVOUR0zhgAQXivPmo/O9CvzPLungy1tbGy495zTUgmTJ082sA/NQI8ePYRoIx8qlcrNzQ04V+MXPoRqtXrixIkA4OzszOXRql51dXV5eXkrVqyIioqi70ejadOm2v06t27dotPkBwwYwGMWiRmkpBAAwmvlWfMx//QrWrpTKpUaVRKaFrNt27atgTuOixcvAtPqxqdOnbK1tY2MjOT4Byl8COkDCXt7e6GmbKvV6sLCwrVr144dO9bT01M7kPb29vQqtHfv3uZZDYuHykoikxGZjFhyZTOd6VfTp5M+fcicOeTgQVJVJfzptmzZQusebNy40agXakqbnT9/vqF97t+/Hx0d3bx5cyYV33/77Tc67nzcuHEc74wEDuEXX3xBLzDEWwxAu1/HysqqTZs27u7uFl66r29fAmApddb00pl+NXDgk5nKUinx9SUxMWTrVqLvAZ7Rdu/eTYvc8VvZj3bkGH4MSNeylkqlW7Zs4dlKXi5fvkzXZnv77be53xkJGcIVK1bQd/69uYYDFxcX29jYSKVSw8WwmFuyhDg4VH/22bnGdzU7+rBaZ/pVcjLJyCAJCSQ0lNjaPlU9oGVLEhX1d30dHp3QaWlpMpkMABYtWsSvwbSeWs+ePQ3vRp8WSiQS0xfx5OjmzZt0FvvAgQONegAuWAg3b95MLzA2b94s1DG5oKvwbN++3ZwnNdalS7dtbW3d3d15LD0rqqoqEhBA4uIMTb+qrSUnTpAFC0hYGGnaVDeQEyZ8snLlyvz8fC5v7ejRo3QGYP1CzNxxL222evVqOlI0Li6O9+k4Kisro4WzeNwZCRPC7du307Ga2tV+zYPe348ePdrM5zVWhw4dACA3N5d1Q56oriaBgQSAdOxIsrM5Tb9SKklhIUlKItHRpG1b4uHxp+b+XHu6kN6xYKdPn6YDpz755BMTW06ffnH5uN+2bRsdwzhr1izxnh7fvXv31VdfBQB/f38ed0YChPDHH3+k7zMxMdH0oxmLVtdydnYWtmC24FitAtAQuZy89RYBIG3aECPnoD9x6VLlxo0bx48f7+3trd1h1qRJk6CgoPj4+AMHDlRVVRGtafLR0dGmP8hdu3YtALzzzjtcdt63bx99BP3RRx+J8QxZUyO4S5cu/G6LTA2hZs3EuXPnmngo3uiH0DOx9Kz5VwHQS6kkUVEEgHh4EKFq7pSUlOzatUvvdKEuXbrQui9hYWELFizQLkXL+1wSicTR0ZHjrdeBAwfs7OwAYMyYMcJ+WNfW1tK1N729vRutEdwQk0KoWTORR3EbAT3fS88KTq0mH3xAAEizZkSkmjs604Vob2FoaCj9BouMjDT9FP7+/gBw5MgRjvtnZWXRD4KIiAihnicrFAq6CmCbNm1M+bXyD+Fvv/1Gq3pwfx4iErr0bPv27Rm2gQszrwLQkGnTCABxcCA5OeKeKC8vb/jw4TNmzMjKyrpz505dXR0d/GniurbU3LlzQWvFXC7Onj1LB37079/f9PLT2sPCOI6MaQjPEF6+fLlFixbGPg8RiVKppD/cS3TYlaUy5yoADZk9mwAQOztihot3vdXH6Kwaw4M/uaALXb788stGverixYt0IaDu3bsbVYJdh1qtpuVknJ2dtcvq8sMnhMXFxfyeh4jnvffeg3pF7y2N2VYBaMjChQSAyGSEb1E/46jVavoXf+HCBc3G+fPnNzr4kwuVSkUfi3MZ36/tjz/+oOsNduvWjcdkHWr69OnAt0ZwfUaHsLS0lPfzEPHs2rWLXmawbkgjzLAKQENWryYAxMrKrAtj0JLy2tXHzp071+jgT45oOVkeffLFxcUdO3YEgM6dO9+6dcvYl8fHx4MJNYLrMy6EmuchhtdMNL9nZelZsVcBaMiOHYeaN1dLJMTIoZqmql99TDP4Mz8/38SD79mzB/iuglZaWvraa6/RC1qj6lAvX75c8GFhRoTQ9Ochonpel541HR2r2anTqFWrzP0oVW/1MTr40/T6I5qDZ2Zm8hiKVFFR0atXLwB46aWXOJbr1dQI3rFjh/HtbRDXENbW1nJfM5GJZcuWwbOw9Cx9rr1+/Xrz3E5riu7Mnz/fDKerr371MTr4U5D5flOmTOnSpQsAODg40GWVGxqvo1d1dfXAgQMBwMXFpdHFgLdt20aHha1bt87khj+FUwhVKlVYWBiIsKSRgJ6VpWcTExPpB7Dh1YUEoSm6ExsbK8bxuVi9ejU8XX3MxHVtdcyePZsOCdSwsbHp3bt3XFxcenr6g8aKdjx+/Hj48OG0n9NAL8u+ffvosLBvRFjtles34Zo1a1q0aGHi8xCx0R4jQTqsRLVq1aqGVhdKTU0V5E+TaBXdef/99xkW3SkuLq5ffYzWVhNwrH9paWlaWhqtk0JnaWh4eXnFxMRs3fWIAlwAAAYOSURBVLq1oSVi6urqxo0bR3s79fa1ZGRk0CEHCQkJQjVYm3H3hGK0QEB0PrEZhswLotHVhWh9HX75KSoqoksyvPPOO8ynbtDqY5mZmZotdOjMiBEjxDid9ngdOlpN5wdL66Ro/2CVSiV97mdjY6PT43Ly5Ek6LMyogQFGMUfxX7M5fvw4APha/tqz9RhYXahFixbcVxeirl69Sh/QRUZGWsK4dtqnr71s661btyQSSXNnZ4XIN8badVLoCHINDw8PzQ9WLper1Wr6IS6VSjVf0efPn6e/jvHjx4t3NfFchVCpVNIftOCLH5qTZnWh6Ojotm3bav/dNDpdiBBy+/ZtWnQnJCTEQoru6K0+dvHdd1VuboTz4E/TqVQqzQ+WjjbR0PTr0O9DiUSyfPnyS5cu0WFhI0aMELWj4bkKISGElpFdabFrzxrv2rVrXFYXojuXl5f7+vrSvkedNaoYUqlU7u7uwW3b3i8qerJ17lwCQES7xmvU1atXt2zZMmHCBPrgXvv+nP4Pnf04aNAgsfuxn7cQbt++HQAGWubasyYzMF3I399/0qRJtGuqa9euInW38lb96acEgGgvP3bmDAEgRg7+FElZWZmmX4eu0wQArq6ugYGBZhgWJiGEwHPkwYMHLVq0sLKyunfvnvYCGM+f6urq06dP5+Tk5Obm5uTkPH78GAA6d+6sUql+/vlnOq7SguzeDVFREBwMWVl/b1GroXVrKC2FCxegSxemjXvKo0ePcnNzf/zxx1mzZrm5uZnjr0jslJsfHVSwe/du1g0xn7/++isrK2vhwoUFBQWWOZSCPHpEmjQhUinRHms1YQIBIHwrPj03rBpN6TOHFiDZv38/64aYj62tbXBw8Jw5c/z8/Ggxcovj6AjBwaBSwZEjTzaGhwMAvEi/Kb2ewxBGREQAwIEDB9RqNeu2IC1DhwI8HblBg8DWFk6fhnv3WDXKEjyHIfTx8Wnfvv3du3fppFJkKSIiAAAOHoS6ur+3ODhAv36gUsGhQwzbxdxzGEJ4Ia9InwFeXuDrC1VVcPLkk431vx5fPM9nCOnQxPT0dNYNQU+rfxMYGQkAcPgwKBRsmmQBns8Q9u3bt1mzZgUFBbQqKbIU9UPo6Ql+fvDwIeTksGoUc89nCGUyWVhYWGBg4IMHD1i3BWnp3RtcXaGoCK5cebLxhe8jfT5DCAApKSk5OTmvv/4664YgLVIpDB4M8HTkwsPBxQX+N07lBfS8jZhBlm7XLnj3XQgJgaNH/96iVgMh8L8Rmy8ga9YNQC+YwYNh0CAYNuzJFqvn9nKMI/wmROxs2gRLlgAhMGYMJCSwbg0zGELEyPXrEBwM+fng6Ai9esG330L//qzbxMaLfiWAmDl2DMLCwM0NbG1h1KinxpS+YDCEiJEHD8DV9e//d3V9kYePYggRI+7ucPfu3/9/9y5Y5uQPs8AQIkZCQuDIESgvB7kcUlNhyBDWDWIGH1EgRjw9YeFCCA4GQmD8eOjVi3WDmMHeUYQYw8tRhBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWLs/wFFO9mssTqu9wAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAfVUlEQVR4nO3de1hU1foH8HcYEFQEAkTFW3nX1FTwlve8paFmhpo8pKRNHk9qap7xlIY9nlOodcIiFa2TSKlhadnJTnlLPVopiuUNxQsgogIGaFwH5v39sXD/RkSc294L7ft5+qM9zOy18OE7e+2110XHzAQA8rjIrgDAnx1CCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCCZWiEsLS29cuWKyWRS6fwADwy1QtirV6/AwMDjx4+rdH6AB4ZaIfT39yeinJwclc4P8MBQN4TZ2dkqnR/ggaFWCOvXr0+4EgJYwVWl8/r5+RFCeP+7ceNGfHx8Zmamr69vRESEr6+v7Bo9gNQKIa6E97vk5OR169bFxsbm5eXp9fry8vKYmJiEhITu3bvLrtqDBh0zcJvS0tLNmzcPHTq0Q4cOS5cuzcvL69OnT0xMTK9evVJTU/v167dixQrZdXzgsDp2795NRAMGDFDp/PeN69d54kSeOZMjIrisTHZtqnP58uWoqKgmTZqIPwwvLy+DwXD8+HHxU5PJZDQaxY8mTZp08+ZNubV9kKgVQvGE8NFHH1Xp/PeN5cv588+Zmd98k7/9VnZtqmA2m3fs2BEaGurqWnFv0r59++jo6CpjtnXrVm9vbyJq27atkk9wkFohvHLlChE1aNBApfPfN2bP5kOHmJnj4nj1atm1uU1eXl5sbGyHDh1E9mrVqhUaGrpjx47qP3XmzJlOnToRkaen54YNG9St4v3TjnCEWiEsLS0dOHDboEE/m80qlXCfeO89Fn+pixfzDz/Irk2FI0eOGAyGunXrivgFBgZGRkZeu3bN8j0lJSUJCQlDhgw5fPhwpY8XFha+8MIL4rMGg6GkpMT5VSwv5+Tkmt+OcAq1QsjMPj5MxNevq1dCjbd0Ka9ezRMm8MyZbDCw7C+k4uJikSuRHxcXlyFDhiQkJJhMJsu3paenL1y4sGHDhkrMqjxbXFxc7dq1iSgoKOjChQtOq2VeHsfGcocO7OPDM2bc1o64dMlppdQkKoawdWsm4jNn1CuhZisoYJ2O3d25vJyZOSWFO3TgF1+UUpdz584ZjUbRZU1E3t7eBoPh1KlTld62f/9+y5vDdu3a3e3mUDhy5EiLFi2IyM/Pb/v27Y7W8tAhnjKFPTyYiIn4kUd4/vz/b0ds2sSNGvHYsZyX52hBNYyKIezdm4n4wAH1SqjZTpxgIm7btuLwu++YiAcP1r4iM2bMcHGpeBbVo0ePTz75pKioyPIN+fn5sbGxjz76qE03h8pnn3nmGSLS6XRGo7HMjju34mJOSOA+fSqy5+LCQ4ZwQgKXlXFubsU9ocHAP/zA9eoxEbdvzydP2lxKDaZiCEeNYiL++mv1SqjZtm1jIn7yyYrDDz9kIu2vhGvXru3evbubm1toaOj//ve/Sj+98+bQaDResrHVZzabo6Oj3dzciGjgwIFXrlyx8oO5KSk8dy77+lbEz9+f//Y3rqZle+YMd+7MRFy7Nn/8sU2VrMlUDGFEBBM9SP9WNoqOZiKeMaPi8NVXmYjfekvjWjz33HNEtGrVKssXrbw5tMmPP/7YqFEjImrcuPGdabdUXl4uHoo08fY2e3oyEQcFcWwsFxTcu5iiIp42rSK04eFcWGh3hatXUFDw0UcfjRgxIiYm5qefflKpFEHFEM6fz0QcFaVeCTXb7NlMxMuXVxyOG8dEvGmTxrXo2bMnEe3fv1955dNPP1VuDn19fefNm5eSkuKUsq5duzZ48GAicnV1jYqKMt/REXXt2rV//vOfzZo1E6XXqVPn9Guv8ZEjNpcUF8e1azMRd+vG5887pfKKM2fOzJkzx8fHR/mGqlWr1ooVK5xbiiUVQ7h0KRPx/PnqlVCzjR7NRPzllxWHXbsyEf/yi8a1CAgIIKLLly8rr4jBTEFBQbGxsQXWXHxsYTKZIiMjxS3omDFjcnNzxeuJiYkGg0H0phJRq1atoqKicnJy7C/p6FFu2ZKJ2Nubt2xxvObi+hwSEqLT6UQlg4KCVq1aNW/ePPHK008/nadOn5CKIfz4YybiiAj1SqjZOnZkIj56tOJQPLHJztayCjdv3iQiDw+PctFDy8zMZrP5qFIrdWzbtu2hhx4SYVu0aFHnzp3Fn7Wrq+vYsWN37Nhx50XSHr//LjoeNvfu/frrr9vTJ8TMzFevXo2KimrevLmopIeHR3h4eFJSkvKGr776SlwY27Rp89tvvzmh5rdTMYRff81EHBKiXgk1m7jbEZeC69eZiOvV07gKv/32mxiGpnG5zJySktKlSxcicnd3F2OnjEZjamqqk4sxm/PfffchHx8ieuKJJyqNN7gncX328PAQ8WvdunVUVNT1qh5tnz17VnyVeHh4fPTRR06qfQUVQ3jwIBNxr17qlVCDXb3KROzrW3F4+DAT8WOPaVyLr776iohGjhypcblCQUFBrVq1iGjdunWqjKq5Ze/evaJPKCAgYNeuXfd8/40bN2JjY5Xrs+iX2rZtW/XX56KiohdffFF8JDw8vNB5fUIqhrCkhNPT1eu+qtnEN1BwcMVhQgIT8dixGtfiX//6FxG9/PLLGpcrZGRkiGBoUFZWVpbo7HV1dY2MjLxbnJKTk41Go2gqE1HDhg2NRmNaWlo1Z167du3Zs2eVw7i4uDp16hBR165dzzupT0jFEMbHs5jJ9PbbfMfwwwfchg0bWj700Nu3BnydX7kyp1u3mwsXalyNmTNnEtG7776rTXH5+fn79u3LzMwUh/v37yeiXlq1hcrKypQ+odGjR//+++/Kj5RxsJadLnFxcaWlpdWfc8+ePS4uLl5eXl8qHWzMSUlJrVq1IiIvL68vvvjC8ZqrG8IRI3j37j9jCJcsWUJERqNRHL700ktE9MEHH2hcjZCQECLaunWrNsWJfte+ffuKw7i4OCJ67rnntCld+Oabb8SFrnnz5ocOHcrMzIyKimratKnIXr169QwGg/WdKzdu3JgwYcKdQ9Xz8/OfffZZMU5o1qxZ9wxz9dRdgXvyZPr4YyIiZsrPV7WomuXixYtE9Mgjj1geimGWWrpw4YJlNbQpTvk1Kx1qIyQkJDExsVu3bmlpaX369GnatOmCBQsuXbrUuXPnVatWZWZmxsbGiqlY1qhXr96mTZtiY2Nr1aq1Zs2avn37pqWlEZGXl1dCQkJ0dLSrq+v7778/ZMgQMXfPPqqE8JdfSKxr4e5OwcF08CB9+y21aEFr1hCzGgVWraTkwsWLYZcvv3bhQmh5uabfAZVSp3EYBGZOTU3Vstwqv3o0/q2JqEWLFgcPHhw3bpyvr69OpxPjYI8dOzZ9+nRPT087TmgwGA4cOPDwww8fPnw4ODj4+++/JyKdTjd79uxdu3YFBgbu27evS5cuO3futLPGjlxGq7R5M9euzX368Cef8NatXFjIjRrxkCEVI40GDNBu8O31659eufI2M5eUpJWV3dCoVGZmFg+dxEiUsrKyWrVq6XQ6J/anWSMzM5OI/P39NStx0qRJRBQXFycO+/XrR0S7d+/WrAKWPvjgAyKaMmWKs06Yk5Pz5JNP0q2h6sqj16ysrKFDhxKRXq+PjIy0fCRrJSeHMDqaXVyYiKdN40rt5IQEbtCAidjNjWfNYlXXKCkuvlBSkmo2m7KyYlJTp6al/bWsTLv5L6WlpXq9Xq/Xi1sIcTkKDAzUrALCgQMHiKhHjx6aldirVy8i2rdvnzhs3LgxEV28eFGzCliaO3cuEUU5ddik2WyOiorS6/VE9MQTT1y9elW8btknNGrUKMs+IWs4LYQmE0+fzkSs03FkZNXvyc3lWbNYr2cibtyYb31jOlF5fv6O8+dDjxxxTU2d+scfB8vK8pk5O/uj7OzVhYUarYmSkpJCRM2bNxeHe/bsIYvuCs3Ex8cT0YQJEzQrUQyRy8jIYOaioiIXFxc3Nze7B7I46OmnnyaihIQEp5959+7dDRo0IKImTZocPHhQef0///mPWJe1WbNmv9gyPtE594S5uTRsGK1eTXXr0tattHhx1W/z8aEVK+jQIerZky5fpsmTadQoSk11QgXKynKuXl124kTrlJShubmbdTpXna4Wszk9fXpGxt9u3txdXHz+9OmuV68uJVL9rrTSvZCUG0LSvF+koKAgOzvbw8NDPDdPTU01m83NmjUT1w3tVbotX7hw4cqVK8vKyhw/86BBgxITEx9//PGMjIz+/fsvXbpUvP7UU08dO3asZ8+e6enp/fv3t2FtSMe/GM6f5/btmYgbNbL2UURZGb//Pnt7MxEHBNxcvvw9uzt5CwoSU1MNR4/WSUykxEQ6frzllStRJlPlIZqXLs1PTNQlJtL586Fq3x+uXr2aiF544QVxuHDhQiKKvFvzQDVTpkwhojVr1mhTnBgi165dO3G4fft2Iho6dKg2pd/Jy8uLiMQYtLy8PCKqW7euc8asMvOtNSDFg8exY8cqY7uLi4tnzZpFRBMmTLByYUhHQ3jwIAcEMBF36sTVDjyoQmYmP/ccDxgwl4hat279gy3rIBUUFKSlxZ861U1kLzHRJSUlJC9vO/Ndb4tzc79OSvJJTKTjx1sXFv5qW11tIdbnXLJkiTgU3RXr1q1Tr8QqDRgwgIh27typTXFff/01WQyRi4mJobuvT6M2sRORt7e3ODx69CgRdezY0ekFJSQk1KtXj4g6depkeSERY+K+//57a07iUHN0w4aN06cnZWXRU0/RwYN0a5qYtRo1og0b6M03R3fo0CElJWXYsGHjx4+/du1a9Z86d+7cggULmjVr9vnnsYWFR/V6n4CAWR07nmvV6htv7xHVPHTx8Rndvv3h2rUfKylJSU7umZPzkW3VtVpwcPDIkSN/+umnV199NSIi4uzZsySvOarxQ0LpzycsK6PBE8vQ0NAjR4507tx5ypQpYm0B4caNGzaUaN8XgNlsXrx4sU6na9iw8fz5uQ7ee5eWlkZHR4tFFnx8fKKjo++8mzeZTFu2bBk6dKgy8ujJJwfl5HxqNhfbVFZ5eVFqqkFcPy9eDC8vd+aEuj/++GPNmjVdu3YVNRTD8xs0aLB8+fL8/HwnFnRPJSUler3e1dXVwcEc1hNtsHfeeUccjh07log+FwsWam7jxo1ENG7cOHG4fPlyInrllVdUKq6wsNCyoVupe/ye7AlhSUnJ888/T0R6vd6JM47Pnz8/cuRI8efbrVs3pX/pntO97JCTEyduI0+d6lpcfM7xyp89e9ZoNCqbFgUEBBiNxsTExOpnmqvnzJkzRPTII49oVqIYIrfl1vxaMY/pzjVLtfHWW28R0auvvioOZ8yYQUSqzo63dO7cObLoHr8nm0N4/fp1cbPh6em5bds2Wz9+T1988YXYDkGv10+cOHHcuHFiOgwRtWvXbsWKFc6a3VxQkHT8eKtffnEbPry33cNwq5yOHRsbqzyXv9tMc7X997//JaLBGi7uJhZrO3bsmDgUq+U7NHfeAWLO0YcffigOxUN2Nf5cq/TDDz8Q0cCBA618v20hPHfuXNu2bYkoMDDwiB1Lg1inoKAgMjLSzc1NTGd2cXEJCQlx2nRsC2VleTExs+jWGAibljm6cuXKnddn5U+wEmWmeevWrX/9VcU+IcXKlSuJaNq0aZYvTp061XKohxOZzWYxwUe0usVuXPU0n8SsEA2Q7777Thy2adOGiE6cOKFN6ZW6x+/JhhAeOHBA7DrYuXPn9PR0u6png7/+9a9E1KtXL8v1UZzOcrm+/v37W1NWYmJieHi4chfepk2bu03HtpSamip29vPw8FD7sUFpaeno0aOJ6LXXXlNePHbsmHhkN3z48Gxnr7Ihhi/7+fmJw0OHDhFRly5dnFuK9USPyJkzZ5i5vLxczO7XbCepSt3j92RtCDdt2iS6GUaMGHHjhirP2T777LO9e/cq97LDhg0jdUY83Gn//v2BgYFEVL9+/bsteltpOrZer7f1+lxYWDh16lTxcaPx09sX4HUOcX0WM3caNWrk6+trOdN8z549YnH7Jk2aHHDqqsxiiFz37t3F4eeff05EzzzzjBOLsJ7JZHJzc3NxcSkuLmbmS5cuEVHDhg01q8D48eOJ6LPPPrPy/daGUHR2zZkzR43GDDOXl5eLxqe4xppMJvH4RZkhqrasrCwR+zuH4Z4+fdpoNCpr4FkzHbsa69evf/jhXgEB5V268DkndAkxM5vN5l27do0bN85yezNxk+bq6vrOO+8o3xQZGRl9+vRR+oqcUzzz8ePHp02bpnz3v/3220Q0b948Z53fJuKBRNOmTcXh3r17iah3796aVSA4OJiILEe0Vc+qEKanp+v1+tq1a1e6BjrxJi0pKcmyN+/w4cPiDspZ57eG5TDckJCQq1evVpqO3adPn4SEBMc7/X/91SQ26vDx4a++cuhUYgX7jh07ihoqK9ibzea7zTS3HOqh0jJ+BoOBiGJiYpx+ZmuIKUX9+/cXh5988gkRhYWFaVYB0UmuDO++J2uvhKJHdOXKleLw3Llz48ePDw8Pt6eOVRED7SZPniwOxeIoU6dOddb5racMwxWXYiLy9vaeNWvW6dOnnVjKjRs8fnzFePdZsyrPOLFGUhK//PIcZQX7pk2bLlmy5M4l6CvNNFdeV3UZP7HcixO2iLHLmjVryGIS0xtvvEFEixYt0qb0/Px8IqpTp471lyhrQyha+R06dBCnzszMdHNzc3Nzc1aviVgsQFlMTrR+tR/qJVy8eHHgwIHTpk2rZs9ap4iN5Vq1mIj79WMr/yFLSjghoWJ+Zr9+LynX52q6dtPS0nr06CH6hKKjo5XXlWX8ateu7dxl/Fq2bElEycnJTjyn9f7+978T0ZtvvikOw8PDiejf//63NqWLNp1NQ+SsDaHJZBLTw5TZYuPGjbOpC6h6Yui96M4ym81iUoyzVrOyj6qr9CkOHeLmzZmI69e/xyaiFy/yggVcv37F9OiHHuI33kg9Y93Wc8qoYiIKDw9XFt4uKiqaNm2a8rrj047FDjOenp7+/v5FanQ9WWHixIlEtH79enEo7oH37NmjTelffvklEY0aNcr6j9jwiEJc1pV1e0TLu2nTpo7sIiKI4R3K3tonT54UnXsOnvZ+ce1axZVNr+e33qq8lWh5Oe/YwaGh7OpaEb9u3Tg2lv/4w+aC4uPjRfO1ffv2lpsTKsv4devWzb4vvqKiori4OLHvBRG5uLhI3NFeXPaVTWnE97vdHWm2EkPkZs+ebf1HbAjh5cuXKzVBRf+b44t5ffbZnkaNmjz77LPiUDzrnDhxooOnvY+YzRwVxS4uPH78ba/HxHCLFhXZ8/Dg55/nn392qKDTp0+LTerr1atn+fgnKSlJtCErLe93TxkZGZGRkeIBMt3afvSk1P0DRWVEv3pRUZFOp9NybrEYImfZ7L8n20bMiO0g//GPf4jD6OhocsacscmTmYg//LCijTR9+jyyGHP057FzJ69de9tirWKxghYtOCqKs7KcU0qlZfyUzt7c3NwxY8aIIN1z7IGyvZkyZ1elHWZsJaYveHh4iM6LkpKSbdu2fazhBn0jRowgG4fI2RbCHTt2WDZB8/Ly6tatq9PpHLwFF1/2ypDsZs3Y3//aiRMaDbOsUSot1nr2LG/fzmo8mhXL+BFR3759laaN2WxeunTp5s2bq/lgbm5udHS0MkfJ3d29yu1HZfn1119FD6KsCohxnTa1xm0LodlsFmUoTVDxRGjOnDk2ncdSRgYTsZcXi/ZCampFr4M6gwJquvh43rSJw8K0WDH58OHDDz/8MBH5+/tbM/200vZmLVu2jIqKcvoIOAdt3bpV9BhLKd1sNouBZTb1qNs8i0I0QYcNGyYOjx07RkQ+Pj5/2NFRwMzMGzcyEY8YUXEYH89EbEvf0gMlPp63buX33uNRo7RYtjw7O3v48OFU7XJ9otNFmSSpbOsrawWn6l27dm3MmDEuLi72rT7oIDFETulitJLNIVSaoErn+OOPP275iM9WM2bcto20wcBEvGyZfSe774kQisVatZmLJ5bxEwNrRo4caXk3KCZJ+vn5ifiJSZKyljC03qJFi+xefdBWlc6/b98+sn2InD2TesVkrblz54pDsbSe3UPmO3ViIlbuKdq1YyJH+wDBVsoyfk2bNj1w4ED1kyRrvp07d4pHzc2aNftZhT8mZYeZwMBAy2GM69atI6JJkybZdDZ7Qqg0QUVXWElJifiF7fhtb95kd3f28ODiYmbm7GzW6bhOHdbkOXlNFB/Pa9cyM8+bp/UuOunp6WL1XuWuz9PT86WXXrrbJMkaTvl13N3dbXpgcM/TLly4UExGET3Jlv8+kZGRRLTQxu237Fxjpnfv3kSk9PwuWLCALEZ+2qSggBMTK/7/yy+ZiDWcDl7jSAwhM5eUlMycOXPx4sVWTpKs4cRQdZGWsLAwu7sthP3794eGhirzVNq1a3fnkMawsDDLXFjJzhCuX7+eiLp27SoO09LS9Hq9u7u7rfsVV/LKK0zEixc7co77W3w8DxnCs2dzt27S9pMzm81aLoejti1btoi1Ntq1a2fH5HoxT0WMSyGLeSqV3ia2H61Tp07btm2VoZ1WsjOExcXFogmqLMck1vlZunSpfScUgoKYiK3Y8PiBJfdK+KBKTk4Ws708PT03btxo5afEOFhlnkpgYKDRaBSL/CtKS0s3bdok5hgJdvRQ2r/4r7jQK01QseKyI5Opb95kV1d2c2PZgy5kQghVUlhYGBERoYwTqmZ0fnFxseh0EW/W6XTikUylMdIObj9qyf4QpqamiiZoVlYWM5eXl1tOV7ODycT79rFTp9QA3CY2NlasNxMcHHzhwoU732A2m8WqUETk6+s7b948sb+dJXFzqCwyVOXNoU0cWgZfNEGXOeOh3p95g3vQ0pEjR8SYOz8/P2U5Nktz584Vj2QqdeTc7ebQ8ftnh0L47bffElGLFi1sGppgMvGVK3z8OP/4I2/ezCtX8t69f+oN7kFjOTk5YplpsdRlpaE/d7ZUq7w5vHTpkrPqo2MHNrBm5rZt26akpGzfvl0MHs/Nzc3Ozs7Jybl+/XpOTk5OTk5WVpZe3/PUqWdzcuj6dcrKory8yud5+WXq2ZPc3Oibb6hjRxoyhIKD7a4UwL0x87Jly15//fXy8vJBgwZt3LhRjFWwJGZgrFmzRkyd1el0gwcPNhgMY8eOVR5UOK02jli2bBkReXl5NWzY8G41e+yxmWJGnPhPr+cGDfjRR7l/f37mGZ4+nTdt0nrMJAAz//jjj1WuAanxJEmHroRElJOTs3HjRmXpBG9v74CAAH9/fz8/P39/f39///r16wcGdvDyCvH3Jz8/ql+fbu3XcJtPPyVPTxo+nFq2pG3bcCUEjVy+fHnChAkHDhxwdXVdsmRJUFDQ2rVrt2zZUl5eTkRBQUEGgyEsLExpi6rB0RASETOfPHlSpM5ydyiA+4LJZFqwYMF7772n0+nMZjMReXh4TJw48S9/+YtYKUNtTgghwANgy5YtycnJmzdvDgsLi4iIUOaOaAAhBJDMoZ16AcBxCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZAghgGQIIYBkCCGAZP8HC7DHrnnCuoMAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVyU1f4H8O8sCAOyyiaCWyoqhPuKCyYuGBZalPkTlzK01MG85rjl5LW8lJVT11JCU7Tu9aKW4ZKKC4oSGi6UIq4oiCwDSOwww5zfHwefJjZneWYO4vf96tULxpnnHGA+85znnPOcIyCEAEKIHSHrCiD0rMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDFmvhBWVVUdOnQoMzNTrVabrVCEWj5zhPDhw4cffvihl5dXcHDw6NGj/f397927Z4ZyEXoqCAghJjo0IeTkyZObNm06cOBAbW0tAPj6+iqVyry8PBcXl127dk2YMMFERSP0NCEmUFpaGhUV9fzzz9Mi2rRpExoaGh8fTwgpKCiYNGkSAAgEAqlUWlNTY4oKIPQU4TmEt27dkslkjo6ONH7u7u4ymSwrK0v7ORqNRqFQWFhYAMCoUaOys7P5rQNCTxd+QlhbWxsfHx8cHCwQCGj8BgwYEBMT08yJ7vTp0x4eHgDg4uJy5MgRXqqB0NPI2BAWFxdHRUX17NmTZs/S0jIsLOzKlSvaz0lJSQkPD1+0aFG91+bn59PLQoFAIJPJ1Gq1kZVB6GlkeAivX78ulUptbGxo/Lp27RoZGVlQUMA9obKycseOHQMHDqRPsLW1LS8vr3cQjUYTGRkpEokAYMyYMQ8fPjS4Pqgp8+bNCw8P//PPP1lXBDVO7xDW1tbGxcUFBgZyLU9/f//Y2Fjt89jDhw/lcrmLiwt9goODg1QqzcjIaOqYp06dat++PQC4urrS/hvEo3bt2gGA9ucjalH0DuGQIUNotOzs7KRS6Y0bN7T/NTExMTQ0VCwW0+f0798/Kiqq4Qmwoby8vHHjxgGASCSSy+W1tbX6Vgw1xc7ODgDwTNhi6RfCiIiITp06eXp6RkZGPnr0iHu8srIyJiam0TEJ3anVarlcLhQKAWDs2LG5ubl6vRw1xdraGgB0+ShETOgXQn9/fwA4deoU98jt27dlMpmTk1MzYxJ6OXHihLu7OwB4enqePXvW4OMgDh0NwiHZFku/GTOenp7Z2dn37t3r1KkT/bt26NChoKAAAEaMGLFw4cKpU6fSP7kxsrOzp02bdvbsWbFYvGrVqjVr1tDTIzKMUCgkhGg0Gu4yHulCrVYrlUo6xys/P1+pVObn5+fm5r7++usDBw6kV9q80COENTU1EolEKBRWVlZyV31yuTw7O3vhwoV9+/blq04AoFarP/roo3Xr1mk0muDg4JiYGO5ki/SiVqstLCxEIhHOm6+nsrLy0aNHOTk5Dx8+1P6C+zo/P59Ot6yne/futra2ycnJxp9vKD1CePv27e7du3fu3DkjI4OXsp/o0KFDs2bNKiwsVCgUERER5im0lamqqpJIJFZWVpWVlazrwl5JScnYsWPz8vKUSmVVVVXzTxYKhS4uLi4uLq6uru7u7vRrR0fHTz75JDMzc+XKlR9//DEvtdIjhCdOnAgMDAwICDh16hQvZevi/v373bp1k0gkeXl5EonEbOW2GqWlpXZ2dra2tiUlJazrwtiVK1cCAgKqq6tp/KysrBwdHT08PNq3b+/o6Kj9Nf3C1dWVa/FpS0pKGjVqFCHk5MmTo0ePNr5ijZTRFHr/Eb0aNBsrKyu1Wm1paYkJNAxthTb6ZnrWREZG/vnnnzNnzly3bp2Li4vB76jhw4fLZLL169fPmTPnypUrdATIGHp0eNy/fx8AOnfubGSRT5ScnJyWlkbfPUyS35rYqVQ1Xl65HTuyrghjd+/e3bdvn4WFxbp16zp27GjkZ/ratWsHDRqUkZGxePFi4+umRwjNlodp06b5+PjQzNNCzZD81kqkUllkZbUpKGBdEcY2bNigVqtnzpzZ0YjPo/Pnz7/22ms1NTVisTgmJkYikWzfvn3Pnj1G1q3FnQnVanV2drZQKPTy8jJboa0Z7RR9tpuj+fn5MTExAoFgyZIlAFBWVtZot2fzamtrZ86cuWfPng8//BAAevXq9cknnwDAu+++m5OTY0z1WtyZ8MGDB2q12sPDo02bNvA4hNgcNZxKBQDAU2f6U0qhUFRWVk6ZMqV3794AIJfLvb29jx8/rtdBRCLR9u3bRSLRJ598cvr0aQBYuHDhpEmTCgoKZs+erdd4e306DuqrVCqxWCwSiUw98SIhIQEARowYQb+lt+H//PPPJi20NUtLIwCkVy/W9WCmpKTEwcEBAJKSkgghhYWFbdu2BYBLly4ZcLSVK1cCQJcuXehc3OzsbDpq//XXXxtcQ13PhFlZWfQExdcAZVPqnW/xmtBY9Ez4DDdHN2/eXFxcPGbMmGHDhgHApk2bysrKgoKC+vXrZ8DR1q5dO3jw4IyMDDpw7eHhER0dDQDvv/9+enq6gVXUMax0bHDkyJEGx11HtMG9atUq+i390CouLjZ1ufzLySGhoeTNN4lCwbIaFy8SANK/P8s6sFNVVUUXcKCrN5SXl9M77BISEgw+5vXr1+mc+NjYWPrIzJkzAaB///7V1dUGHFDXM6HZuka1LwKVSmVZWZmjo6O9vb2py+Xf1q2wYAFs2wZHjtSdjph4tq8Jd+7c+fDhwz59+owfPx4Atm3bplQqBw8ebMwge8+ePet1yXz99dfdunW7dOnSunXrDDigfiE0Q7NQuzv0qewa/fxz+OEHAICsLKC94a6uUFjIrD7PcAg1Gs1nn30GAMuXLxcIBGq1+osvvgCAFStWGHnkBQsWaHfJtG3bdseOHSKRaP369WfOnNH3aLqG0Gy9lNqn3KdspL62FhYsgKVLYe5cyM4GLy+gaxwrleDszKxWFhbQtSt06MCsAuz8+OOPN2/e7Nq166uvvgoAu3fvvnfvnre390svvWTkkQUCQXR0dLt27Y4dO7Z582YA8Pf3X7p0qUajmTNnTmlpqX6H07HZOmbMGAAw9doTtbW1bdq0EQgElZWVhJANGzYAwOLFi01aKD/KysjkyQSAWFqS//yHEEJyc8nrr5PwcPLNN6wr94waOnQoAGzevJkQotFo6I0+3333HV/H/+mnnwDAysrq6tWrhJCamppBgwYBwNy5c/U6jq4h7NKlCwDcvHlT75rqIzMzEwDat29Pv124cCEAbNy40aSF8uDhQzJgAAEgTk7kzJm6B69fJ8wXB2ghnUMsxMfHA4Crq2tFRQUh5MCBAwDg6elpWN9JU2bNmgVaXTJpaWl0QtyePXt0P4hOIaT3pHEnKNNJTEwEgGHDhtFvJ0+eDAA//fSTSQs11tWrpFMnAkCee46kp9c9eO4ccXYmAwaQsjKWdVu3jtBuwIkTiRnvrK+qqioqKlIqlWYrsaHAwEAAWL9+Pf12xIgRAPDFF1/wW0ppaWm3bt0AYPXq1fQRhUIBAM7Ozjk5OToeRKcQ0gtCDw8P7Qf/9a9/1Vtf1Hi7du0CgGnTptFv6aI1hg2qmsmJE8TBgQCQoUNJfn7dg7GxxMqKAJApUwjblV3Cw8ndu4QQMnMmyckhqamksNCkBWo0mtjY2Oeee278+PHW1tZyubyqqsqkJTbq8uXLAoHA1taWroSUnJwMAE5OTiUlJbyXdfbsWZFIJBQKT58+TQjRaDRBQUEAMGHCBI1Go8sRdAohnaQzZMgQ7pFDhw4BgEQiiY6ONqzqjaI9vMuXL6ff0pGJoqIiHovg0YHvv9fQBL72GuHaCAoFEQgIAJFKCfM149atIydPEkJIUBApLyfe3sTJiSgUJqrYmTNnBg8eTPsaXF1d6Rfe3t7Hjh0zRXHNCA0NBYBly5bRb2mT6oMPPjBRcTKZbOzYsdzSSg8ePLCxsRk/fjxtCT+RTiFMT0+fMmWKjY3NoUOH6COVlZVSqZT+lmfMmFFaWmpY7euZO3cudyVdWFgIAHZ2drwcmV8ajWbNmjUCgeDDQYPI8uWEfuCpVGTePAJARCLy1Ves60gI+XvnUE4OeeEFAlB33ua1fXHjxo3Q0FC6ho2Hh0dUVJRarU5ISPD19aVvkuDg4Hv37vFYYjPu3LkjEoksLS3pNidpaWlCodDa2jqfa6rwTaVSaZ/0srKyLCwsxGKxji1SXTtmpk+fTntmV6xYoVKp6IN79+6lJytvb+/U1FR9q97QqVOnIiMjaV/TpUuXAMDPz8+A42zeTA4cIISQF1+s+3bWLEJI3f+NVF1dHRYWBgAikeivGYMlJSQoiAAQGxvSkme6xsWRjh0JABEKSVgYMfqyraCgQCaTWVpaAoCNjY1MJtNu8tXU1CgUCltbWwAwW+s0PDwcAN5++236LZ3O0nAXBtN57733AGD69Ok6Pl/XENbbSunBgwf08Rs3bvj5+dGO2qioKEOq3IQff/wRACZPnmzAaxuGcOJEkplZF8Jr10henoG1KioqCggIAIC2bdseoGUQUpGZSXx9CQBxdye//Wbgoc2mvJzI5aRNm7ruXENbp9XV1QqFgn4KC4XCsLCwpnYxyM7Oph9bANC9e3eT7v9z5coVS0tLoVDILUudkJAwadKkZhaA55cBE8T1W3f0/PnzdP6Ks7PzL7/8Qh/UbpqGhYWV8dQfSCc3GPYBtnkzeeklEhFB6Hl082aybx9ZvrwuhCNGEADSvj0JDSUKBUlJIbpdP5O7d+/26tWLDqJcvHiRPpiamtrRyytj+HDSuzcxV4uLB1evkoAA2jqtGT36d3362GjvCx21AoDAwEDtdlBKSkqjY3GnTp3y8fHhWqf379/n4ad47OrVq5GRkf7+/kKhsHPnzj169ODx4HpZu3YtAAQFBen+Er2XwVcqlbTzp95WSjt37qSbw/Ts2fOPP/7Q97D1VFRU0FXx161bZ8DLG54Jf/2VLFpEpkwhhJDgYNK2bd3FEf3P2Zm89BL59FNy7hypqlI1eswLFy64ubkBgK+vL/cGOnjwIP3YmzJxItFakvypERdHvLw2jB5NT2W67Fdx4sSJ/v370yz5+Phw3QSEkKysrPDwcKFQaGVl1egVYMPWqTGjdjU1NfHx8YsWLdKe2CiRSGh7be/evQYf2WCGTRA3ZFcm7abp6NGjuUbI9evX6aCCRCLZunWrAUcmhGRnZ8vlcmdnZwBwc3Pr37+/AYOT2iEsKyOrVpFffyVXr5IOHeqeoFaTq1dJVBQJCyNduvyVRgsLYmvrNmDAAKlUGhsby13KJycn04nz48eP5zZ1iI6OpgsozZo1i98hYLP688/VMhn9Qdzc3GJiYprqWL9+/TrtdaSj3rT3hf7To0ePli1bZmVlRS9Mli1b1syNL9qt0x49ehw9elSv+hYVFcXGxoaFhdG7BClnZ+ewsLDY2NjS0tJNmzYBgKOjozErwRvmq6++AoDBgwfr9SrDt0ZLSEjgdvnkfo8VFRW0h5M2TXXf/0Cj0cTHx7/88st0mzQA6Nu3L+3mDg0NNWZ/mBUriEhEmp91k5FBdu0i77xDXnyxUHu1b4FA0KtXr7lz50ZHRw8dOnTOnDn0nmaNRiOXy+kT5HK5jsNBLVl6ejod3QaAkSNH1utmUyqVUqmUBrVt27ZyuZzrfK+pqYmKiqJ/KYFAEBoaeufOHV1KPHnyJL3PnbZOMzMzm39+RkZGVFRUcHAwXXKB6tq1q1QqTUxM1H6HaDSaF198EQDGjRtnzj+NSqWi52R9p5cYtUkot8tnva2Utm7dSifv9OnT54mbAZWUlERFRXF92dqbyVy7do1+2q1YscKwGt68SSwtiVBIkpN1fUlxcfHhw4dXr14dEBBAz36c8ePHE0KqqqreeOMNWtWYmBjDKqYHM049i4uL8/T0BACxWCyVSouLi8vLyyMjI+mqfmKxODw8XHujnri4uO7du9NfzrBhw86dO6dXcbR1StvzNjY2DVuntbW1KSkpcrl8wIAB3F9BJBL5+/tHRkamc/OTGsjLy6OfC5s2bdL3l2CwnTt30ssxfc8Zxu7UW2+XT25gJC0tzcfHZ86cOc289ubNm9ob3Ldv314mk3H9rtSpU6foJ59hywdMnEgAyLx5BryUEELUavXVq1ejoqLCwsLc3NycnJzeeOONkSNHAoCtrS3XNWVa5p16VlxcHBERQU967dq1c358/0dISIj2m/7ChQujRo2i/+Tt7c3d3mqABw8ecK1Tb2/v+Pj4ioqK+Ph4qVRKm1qUjY0N3RDhkW7X3tzsauN7KHTBTRDfvn27vq/lZ8/6kydPNtzls6SkpNHmqL4b3H/33Xf08y8uLk6vWv3vf3Wd8LzMYaSL3/Tt21cikXTq1OnatWs8HFQX9aaemUVqaqq/v7+1tbVEIhk4cKB2N8P9+/fDwsLo365du3YKhYIbNzbG4cOH6SRMgUBARx2pLl26SKXS48ePG7C40ezZs8GIG971YswEcX5CSAjJy8ujFxXN7PJZXFysUCi4rm0rK6uGG9w3avXq1fTkc/nyZR3rU15eN63622/1+0GakpaWBo8nYek+N5cH2lPP+Hi764jeFCeRSLjLqqKiovfee482TKytrVetWsXvVMyampoNGzbY2tqKxeLevXvL5fKUlBRjLuq42dXcaimmY8wEcd5CSJrd5fOJG9w3T6PR0BaLh4fHE6/gqfffJwBk4EDepknSHeAcHR35OZyO8vJIbCyZMKFu6tm1a+TyZWKWnthHjx4BgIODA/dIbm6ura2tUCgMDQ010dh3cXEx8DpXkZtdbcyiMk/ETRA3bP4mnyGkjh8/TsfTPD09z5w5Qze4p9kTCASBgYH1NrjXUU1NzdixYwHAx8fnies+XbtGLCyIUEjOnzf0x2hAo9HQ+7nMOhpBm9SvvVb3rY8PASBXr5qhZKVSCQDOzs7aD37//fe83zrzxEKNtHz5ctBapNAU6ATxNWvWGPZy/kNICMnKyqJ7+tKBIwCwt7ePiIgw8p7g4uJi2ok6YcKE5q9DXnnlAQB55x1jSmsE7Seo13VkWt9/TwAINwuxRw8CQB5PyDKphw8fgtYN1qazb9++tWvX0mzTQt3d3Xk8PnfD+5tvvsnjYTnGTxA3SQgJISqV6tVXX3VycrK3t1coFHzdZpGRkUFPs2+99VZTz/nvf/8rkUgmTfqS91ugaPeXWe9v3LGDAPw18ZxOLNBtIM5I9CbSjh07mrqgOXPmAMC2bdvI46UVvLy8+C2Cu+HdmF7cphg/QdxUa8KKxeK5c+fu3bt33LhxPO7v2blz54MHDwYEBGzbtq1bt260paGttLR06dKllZWVr7zS9vHYB2/o0FNeXh7Px21GvbXS6MYSZlk6TaVSgVn2VKPbb9EJWLRQ3heY7tWrV2RkZERExLvvvjtixAjak98ouiZAMzv4ZmVlFRcXcwPIWVlZu3fvFovFdJcLw5jwV0zfsvn5+fweduDAgbt37w4JCVm5cqWnp+eMGTO0/3Xt2rXZ2dmDBg2i3dP8MtFP1Jx662ebcf1C7WyYlHbwTJf8RYsWHTly5Jdffpk9e/aRI0e44TEA+Pjjjw8dOpSXl5eXl1deXv7EQxUUFHBbO3322Wc1NTUzZswwZmHOpy+EABAcHLxhw4YlS5a89dZbHTp0oCvBAUBaWtpXX30lEomioqK0Z5/xxcOjY6dO3UpLRbwfuUn1Tn1mDKGJTkrNF2S65AsEgq1bt/r5+R07dmzLli3vvPMO90/37t379ddf6dd0+95mdvD19PTk5s0VFRV99913AoHgH//4h1GV47FxXE9NTY1AIBCLxcbM/GwGvX/KycmJm8lB02i62zf/9S8CQB6vmWAWn39OAMiSJXXf2tsTAGKWTQHoTdV9+/Y1dUEvv/wyAOzfv58QcvnyZZMWSm9Stba21p79k5aWdu7cuZs3b+rbc0HnDwcHBxtZKxOeCS0sLBwcHGhjmu5cw6+NGzdmZmbu378/KCgoOTn52LFjp06dcnV1/ec//8l7WZSbGwCAOVuj9U992Bw1zpQpU8LCwnbt2vV///d/v/76Ky2U3iPalIqKivz8/NzcXKVSqVQqc3Nz8/PzlUplXl7e+fPnAWDZsmVG1sq0l91ubm6PHj3Kz883RQiFQuF//vOfF154ITk5OSgoiO4K8Omnn2rf4cIvunaROftlGF4TtrLmKOff//53YmLixYsXP/roo7Vr11ZWVjbsg6nXGdPUobp06bJp0yY6l9gYpg2hq6trenp6Xl5e8x82BpNIJD/99NOwYcNo28nf35/2F5sIDaE5z4S/tW1bEhDg7uxM70hPHT68jUbTyyz7nJk5hPTsZ4ZC7e3td+zY8cILL2zYsOGjjz7SaDTNP18ikbi4uLi7u7u6ujo7O9MvXFxcXF1dO3TowK0VYAyThxBM3J3o7u6+cePG0NBQoVDYrVs37V4v3pk/hPuVyvUJCR8FBvoAqNXqvqdPi0QitSl/Rg49KTEZojB1oQ4ODiKRyMbGprKy0srKSrvfpWFnTPv27U36poJWEEJCyJdffqlWq0UiUUxMzMCBA+ni+abAXRMSAmYJwt/eoGa7SKNaa3O0rKxs2rRpKpUqJCRky5Yt3E3kDPHfj6+NhpBOCDSRXbt2JSQkuLm5ffvttwKBYPHixT///LOJyrKyAjs7qK6GkhITlVBfw06L1hpCszVH58+fn56e7ufnR0ezTFeQ7swRQtNNMSkpKaGTZjZs2PDmm2+uWbOmtrZ2+vTpFy5cMFGJZu6b0W4TMglhK2uOfv311z/88IOtrW1sbCydyNYSmCOEpmuOfvDBBzk5OSNGjKDzZuRy+ezZsysqKkJCQujUR96Z+bKwYVPNDKmgmAxRmLTQ1NTU999/HwC2bNni7e1tiiIM8xSH8I8//vjmm2/EYvGmTZvopbNAIPj222/HjRuXk5MzadIkenMav155BSIiwMWF9wM3zsxNtYZFMxknNEWhxcXFU6dOraysXLRoEV1OvuV4WjtmCCELFy5Uq9VLlizp06cP97iFhcXevXtHjhz5+++/T5ky5ejRo9qLcxnPiGm6hjDD5OammK24hk1u3s/2hJC33nrr7t27gwYNojvPtihP65lwx44dZ86ccXd3X7NmTb1/srOzO3z4sKenZ0JCwvz583ksdMsWoNPCTTA5vHHmnE1ST8PW76NHj8aOHWvAnuzN27VrV3x8vLu7O5isOSr48suX8/I82rf/3//+p72ATQth2hA6ODhYWlr++eefVVVVPB62pKRk1apVAPD555/TvRDq6dChw88//9y2bdvt27fT7daMl5UFAJCXV/eFedA3ZXp6+pUrV44dOwZMz4RffPHFyZMnAwICwsLCcnNz+SpoxIgRgYGB9P5vk5x+z58HmWxmUtLtqChufaOWxci5p09E17Hkd+OBBQsWAMDIkSObXwXo0KFDYrFYIBAYszpodTWJjSWBgcTamnz22d/2tPjiC5KYaPCBdXLgwAG6suBzzz0HAL6+vvxuCNmUoqIiOhv++eefL3y8r+gTlwk1Ht3mNjw8nLcjFhXV3QnNTYJveUweQrpvwW/87VV06dIlkUgkFot12YwtKioKACwsLI4fP65vQdnZ5IMPiJtb3Qr5jo5kyZK/9rRITSViMREIyKxZJtmaPicnJzw8nLYG7ezspk6d6uTkBACWlparV6/WcfdJA1RXV3/++ee0LLpphLOzc3R0NHcrTFZWFrdMqJ+fXyJ/n0Px8fGdO3d2d3e3sLCgS4wau7mQRkNCQggAGTzYPKtjGcbkIZw4cSIAaG8bYgyNRjNs2DAAWLp0qY4vobc829vb674IbEoKCQsjFhZ18evZkygUpKysbmMZuqdFdTWJjKzbFdvBgSgURP/FqxrX1KLXhYWFUqmU3ifp6elpisW/4+Li6CkXAMaOHbt//3765wOA/v37J2stY378+PGePXsCgEAgoK1TY8q9fPkytxqYu7s7dy+onZ3dzJkzDx8+bMCio4QQ8umndR+f5toXzTAmDyGdUW3AssSN2rp1K/07PXHBNU5tbe3UqVMBoHPnzs2/V8rLy3/44XyfPn9tDvPaa+T06eYOfucOCQ6ue36fPuTsWd1/lMarGhsb26lTJ/oWDAwMbPjBkZKSwm1JPXbs2OvXrxtV5GPJycl05UwA6Nmzp/ZaLHFxcbRKdOcm5eOllOn+hHQZSwcHB8NWAX7w4EF4eDidueLo6BgZGVlZWfngwQOFQuHv789N2nR0dAwLC4uLi9OjiORk0qYNEQjI/v361srMTB7CpUuXAsD8+fONb0EVFRXRfad2796t1wsrKiro+XPAgAGNtnDu3Lkjk8natWtnb+9gY1Pm6kpkMqL7ZWxcXN11h0BAwsIM3IE0Pj6eG2upt+h1PWq1etOmTfSOLSsrq40bzxjzq711K4N+SAFA+/btv/3224YLUpaXl8vlctqv6OTkpFAouNbpnTt3goOD6cv79OlzVufPobKyssjISHqFaWFhER4e3nC1snv37ikUCm4jCrFQWOPjQ8LDSZPPIZcAAAhySURBVGLiE/aULCysW/tZJtOxPgyZPIQHDx6kXQt0WeXw8PCYmBjD+mnoeMOoUaMMWJVZqVTSrUtefPFF7k2m0WiOHj06efJkrv0zZMiQPXtuGXD5UFFBPvigrnXar1/FN998o/vaqteuXePex15eXlFRUbqsRUBbp126eFtaVnXtSh7vGqyHwkIikxFf33KhUGxtbS2TyZpfmfPGjRt0/x/6MXFea1HXuLg42vFIW6d5zX4OqVSqqKgoOiYBAMHBwbdv326+qr///vvKlSv/SbcWof916UJWrCBN9Qv8+98EgIwYYc41yw1m8hASQrZv396/f/96k2W7d+8+a9as6OjotLQ0XUKVkpIiEonatGmTlpZmWDVu375NT6SLFy+mW0FxN4PRraB0/xRvyq1bJCiI9Ov3DwDo16/fE3cparQxpleJSUllXPt58uS6TSueqLKSREbWrZUhEpGVKw81tdN1Q3FxcXSZo3r7ilZUVKxZs4aONERERDT18vj4eG4HriFDhujdr3P1KpHL/7anZO/eRC4nDXdo+v57Ys4VYo1gjhBSZWVliYmJkZGRwcHBjn9fjdDOzi4wMFAul9MdeRq+tra2dujQoQAgM6J1kZGRsW3bNtqm4ibvenl5rV+/3uBlWxsVFxdH195q5rRAG2O0+5E2xpo/ezSjtpbExJB27QgAkUiIXE6aCbJGQ2Jj/3oPBwYSAxbULisrk8vldCpSvdbprVu3pk+fzo1qaPvtt98CAgLor71Hjx6xsbGG7zNRW0sSEsj8+cTZ+a80rlxptj3k+GW+EGrT3nKs3vipWCxuuFEuHWnw9PQ0ZhFhOr4fEhISEhJiYWHh6+u7c+dOXnYUaqiiokIul9PTgqOjo0Kh4FqntbW1MTEx2o2xW7duGV9iTg4JCyMCAQEgzz1HDh5s5DlJSWT48L/OH40+R3c3btygW5oDwKBBgy5cuNDUMzMzM+k22jS0kZGRVVVVRpXNUatJYiKRSomzM3n7bXPuIccjNiGs5/79+z/88MOCBQv8/Py0W60CgaBnz54zZsyg/fVGLp9ML00PHjxICElKSgLTr/F+69atoKAg+rP069cvKSkpPj6e7ihOG2Nnzpzht8TTp8nzz9fFTHvROZWKvPRS3eOenmT7dt72yYmLi/Py8qKt0/Dw8Hq91o8ePZLJZPTDqE2bNlKpVMfdBfVWWclkDzletIgQaistLeVardySTb6+vuPGjTPmsDU1NdbW1gKBgLaU6KaqISEhPNW6OXv37qUXUVyHe/fu3fft22ei4lQqolAQOzvyzjt1M3vo/6dPJzY2RCYjPO1I8Bft1qm7uzvd9b7hNtp3dbxgNRijPeSM1+JCqE2lUl24cGHjxo2XLl0ysgFDN6/y8fGh386bNw8ANmzYwEc1n4x28YeGhvr4+PDZGGtaQQHZvJlMnEgyM+tC+PChgWMnOkpNTeXWHfPz8+NGO1944YWLFy+asGBObi55/fW6PeSeKi06hDz67LPPQGtSIu2gS0pKMnM1jNnyUl+bN/9tpqt5xMbGurm5ubm50UsJU2zA0vqY6b4Y5s6dOwcAdMO24uLitLQ0S0tLOq/VnEy9blc9Hh5QXm6+FXEAIDQ0dPz48Xfv3i0oKBgzZozZbrx6qj0rvyO62QANYVJSkkajGTx4cAu8tYx38+bB4wF2M7G3t+/Xr59Zi3zKPRMhvH37dm5urpubG52drH1WbMW4+5kfPGBaD/Qkpr2pt4U4e/YsAHATlJ+REG7ZAgcPAgA8nhKHWqhn4kz4emrq+KFDcyZMAACVSvXbb78JBILhw4ezrhdCAM9ICCVHj0quX/f48ksAEF65kt29++/e3vS+1dYtOhqOHzfrehzIAM9Ac7SoCG7cAIkE+vYFAFFiokNq6iiT7dzUorz9NigU4OXFuh6oWc9ACM+dA40GhgwBuvbhuXMAAK39ghA9RZ6B5ihN3eNeGUhKAngmQsj1jtLuGdRiPQNnwrNnAR6n7tYtyM0FNzd4vJIKQsy19hBWV8PFiyAUwtChAA3Oigi1AK09hCkpUFUFvr5Ae2LwghC1PK09hPVShyFELU9rD6GtLfTuXZe6wkJIT+fGKhBqIQSEENZ1MJf0dAgPBxsb+OUX1lVB6C+tPYS5uSCVgq0t+PlBRATr2iDUiNbeHN26FRYsgG3b4MgRUKlY1wahRrT2EGZlQceOAACurlBYyLo2CDWitYfQywvu3QMAUCrB2ZlxZRBqTGu/JszLg4gIsLeHvn3hnXdY1wahRrT2ECLU4rX25ihCLR6GECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWLs/wF4qmiGe4rccQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deXhTVf4/8E+W7ktKW0o3wLIWUAqUYqE4pVCWgbKItohYnAcxjM+MRR2/XkfGKaMjc4UHH3BmnAkIDov+eAoqwyJoO9iyiEBYCxToAi3d6b4lpUnO749TQkwXkjS555Z+Xn/49LlJ7/lU8s6995x7z5EQQgAhxI6UdQEI9XUYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsSYnHUBfZROpzt9+nR+fv7YsWMnTJjAuhzEEoaQgXv37i1duvT48eMymUyr1UZGRiqVymXLlnl4eLAuDTEgIYSwrqFvOXnyZFJSUllZWVBQUGxs7A8//FBTUwMAPj4+y5cv/+1vfztq1CjWNSJhESQglUrl5OQEAL/61a/KysoIIVqtNi0tLT4+3vgvEhkZqVKpWlpaWBeLBIIhFEhjY+OSJUsAQCKRpKSk3L9/3+wN169f5ziuX79+NIr9+vVLSUnJz89nUi0SEoZQCDdv3nzyyScBwMvLa+/evd28s6GhQaVSjR8/nkZRKpXGx8enpaW1tbUJVi0SGIbQ4fbv369QKAAgPDz82rVrFv6WWq1WKpXu7u40jUFBQRzHFRYWOrRUxASG0IHa2to4jpNIJADw4osvNjU1dfq2zMxMjUbT6Ut1dXUqlYoeRU0PjDqdzpGFI0FhCB2loqJi+vTpACCXy3me7+ptZWVlzs7OCoVCqVRmZ2d39Ta1Wp2cnEw7dQBg6NChPM9XVlY6pnYkKAyhQxw/fjwoKAgAQkJCfvrpp27eeenSpaioKBotiUQyY8aMvXv3duy2ocrLy3meDwsLo+93dnZOTExMT083GAyO+TuQEDCE9mcch4iNjaXjEI9ErwA9PT1pugIDAzmOKygo6PTNer0+PT09MTFRLm+/12LkyJE8z1dXV9v170ACwRDaU2NjY1JSknEcwtouzfr6epVKFRERYWHXaGFh4Zo1a+ghFwA8PDxWr15tj78DCQpDaDc3btwYM2YMAHh7e+/bt68nu6IHRjc3N5qukJAQjuOKioo6fbNOp0tPT09ISJBIJJMmTdqxY0dPmkbCwxDax7fffkvHIUaNGnX9+nW77LO2tlalUo0ePZpGUSaTJSQkdHMFuG3bNgCIi4uzS+tIMBjCnqLjEDQny5Yt62ocwmYGg4FeARq7RocPH87z/L1798zemZubCwCDBg2ybwHI0TCEPVJSUhITEwMALi4umzZtcmhbxcXFf/nLX0JDQ2kU3dzcvvzyS9M3tLW1OTk5SaVSrVbr0EqQfWEIbZeVlUU7RUJDQ7sfh7Aj067Rq1evmr06ZMgQAMjJyRGmGGQXGEJbGAyGTZs20fPDadOmlZeXC19DSUlJx40zZ84EgEOHDglfD7IZTm9hNULIkiVL3njjDZ1O995772VkZAwYMED4MoKDgztuHDp0KADk5+cLXg6yHT5Zb7W8vLympiY3N7fdu3cvXryYdTm/gCHsjfBIaLUrV64cOXJk6tSpYksgAAwbNgwwhL0NhtBq9GhTUlLCupBO0Nry8vJYF4KsgHPMWK2pqcnLy8vV1bW5uVkqFde3WEtLi6enp7Ozc3Nzs0wmY10Osoi4PkO9gqenZ0BAgFarLSsrM25ctGhRYGDg3bt3GRYGAO7u7oGBga2treI8UKNOYQht0fGs7969exUVFWK4GMMz0l4HQ2iLjp2Q4umWFE8lyEIYQltgCJEdYQht0XEkQDwfffFUgiyEIbRFxw+6eAboxFMJshCG0BYdOz/oFvowEVvYMdPrYAhtERAQ4O3tXVdXR5eRAID+/fsrFIqGhobq6mq2tfn5+fn4+DQ2Nt67d49tJchCGEIb0YeGTM/66BYxHILwsrB3wRDaqKszUjF89PGMtHfBENoIRymQvWAIbYQhRPaCIbRRx5EA8YwNYAh7Fwyhjbo6EorhSkw8XwfIEvgok40MBoO7u/v9+/cbGxvpWvMGg8HDw0Or1TY2NhontGeCEOLh4aHRaBoaGry8vBhWgiyBR0IbSaXSJ554ghBy+/Zt0y0AUFBQwLIyAIlEIpJKkCUwhLYTQ98MIWTDhg0dz4HxjLQXwRDaruNFoMAf/bq6ukWLFr3zzjtJSUkGg6H72pBo4WxrtmN7JLx8+fJzzz2Xn5/v6+u7bt06s4k2qqqqZDJZbW2tAJWgHsIjoe0YhnD37t1TpkzJz88fP378uXPn5syZY3xJq9W++uqru3fvVigUb7/9tqMrQT2HIbRdx5PP4cOHjx49mt5E6iCtra2rV69OTk5uaWlJTk4+deqUaXN3796dNm3a559/7urqun79ej8/P8dVguyG5fTfvZxWq5VKpXK5vKvVre3u7t270dHRAODq6rplyxazV//3v/8FBAQAwODBg8+ePStMSajnMIQ9MnDgQADIy8sToK1jx47R+fYHDRp05swZ05cMBgPP8/SycN68eTU1NQLUg+wFQ9gjcXFxAPD99987tBWaMTqP6Ny5c83Wpq+urv71r38NADKZLDU1Va/XO7QYZHcYwh5ZuXIlAPzzn/90XBP19fXPPvssAEgkEo7jzDJ24cIFek3o7+/v6O8C5CAYwh557rnnJBLJiBEjDh065IhD0MWLF2mPq5+f39GjR81e3bFjh7u7OwBERkbevn3b7q0jYWAIbaTRaOhhkN44SrtD/vrXv5aVldmriZ07d9KMTZgwoaCgwPQlOg5B21Uqla2trfZqFAkPQ2iLoqKiSZMm0V7KTz/9VKVSjRkzhkZCJpMlJCSkp6cbDAab96/ValNSUugO6WiE6auFhYXG1rdt29bjvwYxhiG02nfffefr6wsAw4YNu3z5Mt1oMBjoKtZ0+V4AGD58OM/z9+7ds3b/RUVFTz/9NM3Y1q1bzV49fPhwx9ZRr4YhtILpSEBCQkKnIwFlZWU8z9OHGADAxcUlMTExPT3d8lbOnDnj4uIyaNAgs7E+S1pHvVHvDKFOR06cELjNqqoqeneYJSMBer2eHhiN65OFh4fzPG9hcg4fPlxbW2tz66h36SUhfP99EhdHYmPJ+vWEELJ/PwEgI0cSnidVVQK0f+HChbCwMBtGAoqLi3meDw0NpVH08vJSKpUXLlywqvXz588bW//hhx+srB2JXW8I4cmTJCaG6PWkrY2MG0cuXSK7dpGgIAJAAIiHB3nlFXLunOPa37Fjh5ubW09GAlpbW9PS0uLj4yUSCU1jZGSkSqVqamqyvPWJEyfeuXPHhtaRyPWGEH7yCfnTn9p/Xr2a0L4KnY6kp5OEBCKRtKcxMpKoVKSx0Y4tazQa+44E3Lx5k+M4433VCoVCqVReuXKlq9bpKAiOQzzeekMIN24ka9a0/7x6Ndm+/Rev5uYSjiP9+7dH0dubKJXk0qWeN1tYWBgVFeWIkQCtVksPjMbb6OmBUaPRdGzdzc1tu9mfjB4vvSGEJ0+SyZPbT0fHjyfXr3fyHo2G7NpFYmLaowhAnnnmwr59Wq3WtjaNIwHDhw933EjAtWvXUlJSjLNCDRgwgOO4goKCQ4cO9evXj7be1XESPTZ6QwgJIWvXtnfMrFxJli8nP/3U5TtzcgjHEV9fg0Lh7+7u4+OjVCqvXbtmeVOmIwHz588366V0hPr6+n/+859PPfUUjaJUKqWXjs8++2x9fb2jW0fM9ZIQGsXFtR/oIiLIv/5FGho6f1tjY97u3ePHjzd+rGfNmvXNN9+0tbV1v/uqqqrZs2ezGglQq9VKpdLFxcXHx2f16tU9uecG9SK9LYTFxYTnSWhoexS9vIhSSbru8acfa+PtnUFBQRzHddXHKJKRgEWLFgHAV199xaoAJLDeFkKqtZWkpZH4ePOu0S56/Ovq6lQqlen5Xnx8fFpamk6nM75HPCMB77zzDgB8+OGHDGtAQuqdITS6eZNwHPHza4+iQkGUSpKd3el7DQbDjz/+uGTJEmdnZ5rGIUOGHDhwQGwjASqVCgB+85vfsC0DCaaXh5Bqbibbt5NJk2gU9QEBc+Lj09LSupr6paKigud5+izs3r17jSMBX3zxhbB1dy4jIwMAnnnmGdaFIIE8FiE0On+evPpq5syZ9LAWGBj43nvvdXVuqdfrP/roIxGOBNy5cwcAgoODWReCBPJ4hZAQQkhDQ4NKpYqIiDC7AjTrGs3MzKQjAYsXLxbVSIBer3dxcZFIJJbc1IYeA49hCI1o1yh9OJ0eWziOKyoqoq/+4Q9/8PX1ff3110U4EjBixAgAyO7i4hY9Zh7nyX/pvWAlJSX0yffS0tKPP/44LCxs5syZBw8ezM7OrqmpmT17tvGmavHA5Vz6lMc5hBS9aSY7Ozs9Pf25556TSqUZGRkLFizw9vYGsS6Zgsu59CmPfwgpiUQSHx+/b9++O3fufPDBBwsXLqRTWYvzaIPrXfcpfW5VpuDg4Pfffx8A9u/fD2L9oGMI+5S+ciTsSMzXXRjCPqXvhnDIkCESieT27dt6vZ51LeaGDBkilUoLCwvb2tpY12Lu558hIaH956lT4epVptU8FvpuCN3d3QMDA+/fv19cXMy6FnMuLi4hISE6na6oqIh1Lcjh+m4IAc9IbXX2LMyZA3PmwLVrrEt5LPTpEIp5JEDMXxCTJsHRo3D0KDyYdhz1CIZQpB90MdcmcrW1tVu3bs3MzKypqWFdi0X63BCFKTF/0MV8lBazvXv3/v73v6+srHR1dQ0NDT148GB4eDjroh4Bj4SiDqEIa4uOhkOH2n8+eRKefJJpNSZu3bo1Y8aMpKSkysrKKVOmDB06NC8vLyYm5tixY6xLexTWN6+yVF3dPG5czbBhAq04b5X6+noAcHd3F9v95adPEycnQldqe+65rp6gFtT9+/d5nnd1dQUAX19flUplMBiamproRCFyudyhq7j2XJ8OISGkXz8CQCoqWNfRGX9/fwAoKSlhXcgvnD5NoqPJihWEiCOEWVlZo0aNAgCJRJKcnGy6DJbBYEhNTaUHm5SUFNEu4NGnT0cBYMgQAADxnfQBiPiM9Mknobqa/f+0mpqaVatWTZs2LScnZ8SIERkZGTt37qTfXJREIlm7du22bducnZ0//fTThISEhoYGhgV3pa+HcNgwAAyhlQiBNWvgww+BPgT25pswcybs3QuC3XpECNm5c+fIkSO3bNni6uqampp65cqV6dOnd/rmFStWHDt2rH///keOHHnmmWdEeP9DXw/h0KEAAOLsgxTbUGFTE3AcaDQAAFFRUFsLd++CTgdffQUZGZCUBOHhsGEDVFU5toxbt27Fx8e//PLLVVVVcXFxFy9eXLt2rYuLSze/EhMTc/r06fDw8CtXrkRHR587d86xJVqL9fkwY9u2EQDy0kus6+jMjh07AGDp0qWsCyGEkJwcMno0ASALFpBXXiGEkLNnCQDJzibl5YTnSVhY+5R3zs4kMZGkpxO79yi1tLSkpqbSyfICAwN37Nhh1a/X1NTQo6Wrq+uXX35p5+J6oK+HMDOTAJDJk1nX0ZmTJ08CQFRUFOtCyP79RKEgACQ8nHS1pIBeT9LTSWIikcvb00jXj6yutk8N//vf/+isH1KpVKlU2jYtUFtb2+9+9zsAkEgkqampIul57ushvHuXAJCAANZ1dKasrAwAfH19GdbQ1kY4rn2O5aVLu5pd+RdKSgjPk4ED26Po6koSE8nJk7bXUFpampycTE/cIiIiTp8+bfu+CCGEbNq0ia6gvGTJkpaWlh7uref6eggNBuLmRgCImOZbe8jLywsAqu11NLFSZSWZMYMAELmc8Lx1v6vTkQMHHk6SHhV1yPJ1UY30er1KpaITkbi7u/M8/8jVRCx05MgRutvJkydXsB6h6ushJKT9UufiRdZ1dHDixAlvb++AgICwsDCe501HwARw/Hj7asghIeTUKdv3c/MmeestEhu7hB7KfHx8UlJSLFkn69KlS08//TT9rYSEhMLCQtuL6MyVK1cGDx4MAGFhYVat22V3GEIyfz4BIPv2sa7jlz755BO5XA4A9AubdicsX778p26WhbMflYo4OREAEhtLysrssMOu1kXt9GywubmZ4zh6xhgSErJ37147VNCZ0tLSSZMmAYCXl9ehQ4cc1MojYQjJm28SAPLxx6zreKCxsXHJkiW08yAlJUWr1aanpycmJtIPJQCEh4fzPF9TU+OY1klSEgEgEglJSSFdrCRgu5ycHI7j6MTnxgNjXl6e8Q0HDhwYNGgQAMjl8pSUlIauVr+zE41G8+KLLwKATCbbvHmzQ9vqCoaQ/OMfBIAolazrIIQQcuPGjTFjxtDvZrMjQHFxMc/zoaGh9OPr5eWlVCovdL0snA2uXbu2ePFOurLO/v123LE5Oku66QKS8fHx//73v5999lm6ZcKECefOnXNgBSbo3W10+lmlUmmvy07LYQhJRQW5fp3Yuq62PX377bcKhYIe6653uio4Ia2trfS8zjhnsQ0dHp3as2cPXbj7hRfO3rrVw51ZynQBSbo0nY+Pz2effSb8fZ579uyhBcyaNauurk7Ipvt6CEXyTEBbWxvHcTRXL774oiWJunnzJsdxfn5+NIoKhUKpVNq2rA1tne5n2bJlwq+BUVNT87e//U0mk8nlcoaLQ/70008DBgwAgCeffLKAfiYEgSFk/0xARUUFvZNDLpfzVg4FdNXhodFoLG89Li7Ottbta+zYsQCQlZXFsIaCgoLRo0cDgJ+fn2CVYAjJypVk4UKSl8cmhMePHw8KCgKA0NDQbno+58+fn5iYmJ6e3tUbrl27lpKSQs8nAWDAgAEcxz3y6zwrK8uS1oXx+uuvA8AHH3zAtoy6urrZs2fT7ugzZ84I0CKGkLzyCjl7lrz8Mnn+eZKdTV57jSiVAg0bqlQqJycnAIiNjS0vL+/qbSUlJcau0cjIyK1bt3Z1xkg7PMaNG2fa4dFxWThCiMFg2LRpE2192rRp3bQumH379gHAjBkzWBdCdDrdggULfH19Dx8+LEBzfTeEDQ3ko4/IqVPttyMvWECefpqcPk1cXdvvt4qJIbt2EYtP66xtvSExMdE4DvHIHrnc3FyO4/r370/T5e3trVQqL1261NX7O10Wzjjebdo6x3E6nc6ef5utKisrJRKJu7s78xXLCSHz5s0DgA0bNgjQVh8N4dWrJDycAJBXXzV/JuDGDcJxxNe3PYo+PkSpJFev2rP1nJwceuHh7e29z5q7BIxXgGZdo83NzZ2+v7a2li4LR98sk8ni4+M/++wzY+tff/21nf4m+6CFnerJHTr2UFhYKJPJXFxcKisrBWiuL4bw22+JtzcBIKNGkS4GAohGQ9LSSExMexTpgTEtzQ6D11999RXtkY+IiMjNzbVtJzdu3OA4ztfX1zjkrVQqu7n36sSJE4mJifTkk3bER0REmA6Ri8Rrr70GAOvWrWNbxpo1awDgJaGecOtbIaTPBNBQLVtm0TMBajVRKomHR/tvBQYSjiO3b9vW+sORgJdeeqmrw5flNBpNWlpaTEwM3Sdd/i0tLe1+F18VpaWlH374Ic/z69atE8PTAx3t2bMHAGbPns2whra2tuDgYAA4ceKEMC0KHUKG//YlJe1HNhcXsmmTdb9bX09UKjJ2bHsUp0693VWHR1eKi4unTJkCAC4uLpusbf5RTIe8ASAwMJDjuNu2fVUwVVZWJpFIPD09u/oeEUBaWhoAjB49WrCnDYULYVZWVnR0dHR0dDd37jqy9fZnAkJDSU+eR8vKIkuXkgkTfkc/7k888cS6dese2bWYlZUVGBhIRwJ6/jhcV2prazdv3kynHqNXgAsXLvzxxx8d1JyDjBw5EgB+/vlnVgXQMdt//OMfgrUoRAgNBsP69evpMwHGuUACAgI4jsvPz3d862TTpvZnAqZNI3bpiqcdHrQXAQCcnZ3pIF7H704mIwH0wEiv/VJTUwVo0Y6USiUAfMzohvrc3FzaQ1tbWytYow4PoVlveFNT044dO8zu3O3mMqbHrZPExPZnAjiO2LcrXq/X0+cbaMYAYPjw4aYP/jU0NDz//POsRgIqKip4nr97966Qjfbcl19+CQBz585l0vpbb70FAK+++qqQjTo2hKZ98Wa94WaXMUFBQRzH2fe+wezs7Oef/5QOMxw4YMcdmystLeV5nj4hSo/2iYmJ27dvN/7t33zzjQObf7yUlpbS/2nCj15qNBp6L65arRayXQeG0LQvvqve8Lq6OpVK9dRTT5kdGHv+D/Dll196eHhIJJLExHRhuuKNB0Z6dwsdKB8zZkxqauq2bduEqOBxQSdcFTgJ5MH0dsLPrOWQENrQF08PjHQ5AdqBkZqaatvkH3YfCbBcWVnZqlWrli5d+s4776xfvz41NZXOmBYRESFYDY+BFStWAMDGjRsFbnfy5MkAIPw3pv1D2JO++PLycp7nh9C56bvt8HBE6z1XVVUFAF5eXsYttbW1AODp6SmS2fV6BXpEWrBggZCNtrRcPn9+5KZNM4X81qbsHEIL++LnzZvXzT3+xvM62qEKACNHjuR5vqqqqvvWMzMzBRgJ6B6duMH0die6RQx3SPcWdKb6fv36Cflob2Hha2o1FBWlCNaikd1CaNoXHxcX181n7tKlS8YrwHnz5h08eLCrK8CSkhKe5+mMIwDg6ura1eM8lrfuaJGRkQBg+lhQVFSUGO6H7F1oL1c3d6jbl17fePGit1oNLS0Mnii1Twit7Yvv9B7/oqKiTt+s0+nS09MTEhLM7lpubGy0rXWHSkpKAoBdu3YZt7zwwgsAsHPnToZV9TrLly8HAMFmXqqs/JdaDTdvxgrTnBk7hDAnJ4fepWFtX3zHe/wTEhK6uQLs9HGeb7/91rbWHeSPf/wjAKxdu9a4hd4N3OsGzdnatm0bACxevFiY5q5fn6BWQ3X1V8I0Z6anIaQjAQAwbtw4m29/Mb3Hv+OQtxmNRrNr1y7jXcu09fHjxws5KUg3Pv/8cwBITk42btm+fTsALFu2jGFVvU5eXh4A+Pn5CXBZ2NR0Wq2GS5f8DQY2s33ZHkLTkYDk5OSe9ymVlZXxPP/EE0+YDnl3M6HDlStXFi9eLJPJQkNDLZ9SxdEyMzMBYMqUKcYtWVlZABAdHc2wqt6I9gVkO37Gkdu3X1arobiYc3RDXbExhMXFxXRQxe4jAWZD3tDtXLd37tyhd9vYsYAeunv3LgAEmCwxU1xcDAD9+/dnWFVvtGzZMnD8jdQ6Xe2FC+5qtUSrtfHZzp6zJYTGkYCBAwc67m73goKCd999l05BRwffVq1aZfa9qNfrXVxcJBKJ8LP0dcVgMNA7p41rdxkMBtoFJfBslr3dli1bACAxMdGhrZSXf6JWw61bcxzaSvesCyEdCaDDd3FxcQIsZ2M2123H7jL65Itt8206CL1l1LR7nXY+2Xe27MfezZs36TmFQ+9zuHp1tFoNtbWOnG/8UaxYLps+D/HGG2/o9XqO4zIyMgICAiz/ddsYb5q5du3am2++SXuuTYlwYXdaUp7JGtwiLFL8RowYERwcXFlZSdPoCI2Nx7Ta605OwQrFPAc1YQlLQ3j58uXIyMivv/5aoVB88803PM9LpYKudz9q1KhPPvnEx8fHbLvYFnaHziL34qRJf502bXRFBbuieqVnnnkGAGjPliPcu6cCgP79lRKJ3EFNWMLSIL333nt5eXkTJky4ePHiokWLHFqTVUR4kPlNRMR/J01aVllp3LKkX781mZmjL19mWFVvFBsbCwAHDhwoLS3VarV2339o6IagoDX+/ivtvmerWPQFcOzYsWPHjoWFhZ06dcr4oINIdDz3Yy5iwICIs2fBy+vhpqFDAQDEVGSvEBsbGxwc/N1334WEhACAh4eHr6+vr6+v3wP0Z/rfceP69++vkMv9ZDJfC49szs6DgoP/CgCVlf+orv6Pt3e8QpHg6TnF8oOTXVhUa0BAgFardXJyElsCQZRHQhg2DADAtCQaQlEV2RuMHj364sWLM2bMqK6urqmpaW5ubm5upoNAHW3dGjV+/Dn6s0ymkMt95XJ/mcxXLveTy32NP7i6jnJ1HXPnzm8AdIQYZDLvwYM/b2g42tJyvqXlfHn5x05Ogd7ev1Yofu3tPUsmUwjwZ0oIIY98k0aj8fDwcHJyamlpMQ7fiURrayu9aUaj0RjvuWGsrQ3c3IAQaGkBOqeOTgfu7qDXP9yCrNfc3EzTWP0A/bmmpqampub1112Cg6/qdDV6fTUh+q524uf3sr//ioqKzUOHfg0A1dU7vL1ny2SKpqZT9fUH6+r+e/9+IX2nRCJzcxunUCT4+Mx3d58AIHHQ32VRCAEgJCSktLT09u3bxjtaMjMzr169mpSUJEAfafcGDx5cVFSUm5tLO2lEISwM7tyBmzdhxIj2LUOHQkEB5ORAeDjTyvoEvb5ep6vS6ar1+hqdrsb0By+vab6+L+TnJzo5BXl5TVMo5srl/qa/q9Fcra//rqHhSFPTSUJ0dKNX69gRu6bA3LkwfTo8mJPFbiwcyqD9VBkZGcYt9KL5hx9+cMTIiVXoyl5Hjx5lXYiJ+HgCQL777uGWmTMJAGG3MDoyc/9+aXX1/7t2bWxzc+fjt3p9U13dgTt3lJcvh1R+8mAydrmcxMQQniddz3duLUsvQDuOBIjnYuzUtYIAAAp3SURBVEyEoxSd9MR0vFBEgtPpagCgru7b2tp9Tk5Bvr4veHvH19X9t9M3S6UeCsX8wYNVY8fe9Z37d/jwQ4iOBoMBTp2Cd9+FMWNgxAh44w344Qdobe1JVZaGsGPkxBNC8VTyUMeeGOybYU2nq7l82e/q1eGenjE1Nbvz8xfl5y/SaLLLyv6SnR1WWLiqtnavXt/Y2a9KZCPHw5/+BKdPQ0UF7N4NS5eCnx/k5sLmzTB7Nvj5wcKF0EWP0SNZOkbZcSRAPMcfDCGyhEZzBQDkcn+5PGDo0P0A0NKivnFjskTicv/+naqqLVVVW6RSV0/PaQrFXIVirovL0E724u8Py5bBsmVgMMDFi5CRAQcPwk8/wfffQ79+thVmXQg7HgnFMEAnnkoe6iqEoiqyj6EhdHMba9zS0nKZEJ2v75LAwD/W1x9qbMxobMxsaDja0HD07t0UF5chXl7xPj4J3t6zJJIOfdpSKURGQmQkcByUlsKlS/BgmWRrWRrCbq4JCSHGiSeYoJXQh3rZVvLQ0KEgkUBBARgMQO/vo1vu3AG9HkQ2zNNHaDTZAODm9lTHLW5uY9zcxgQGcjpdVWPjj/X1B+vrD7e2FrS2bqmq2jL+9XESD39ISIBnn4UHMx79QnAwBAfDn/8MJ0+CwQDz5sH//Z8VlVneh9Nx1jA6XXFZWZm9uolsRue8KCkpYV2IiQEDCAAxnTiHLknzYLlcJLCcnElqNTQ2Zhm33Lw5Xa2GurpO1sQ2GHSNjSeKi/+Y/3MskUgerlMZEUHefZccP07MFuQ6eZLExBC9nrS1kXHjiDVTVFlxe474+2bEfka6di18/jl4e7OqqG8zaDTXoPMj4diO75ZIZJ6eU0NC1g15OhPKy2HHDkhMBIUCLl8Gnodf/Qr69YP582HLFigtBQA4exbi4kAqBbkcYmPh3DnLK7NDCMXw0RfP18FDGzfCmTMQFfVwi1IJr7wCHR4EQQLQavMMhmZn50EyWXv3SVtbqU53Ty73dXYOfcQvBwTA8uWQlgaVlZCRAW+9BSNHQlMTHDoEq1bBwIGwcSMQAqb3vVhzxWFFCHGo0DrR0TBpEnh4wJ//DNOnw7RpsGED65r6ro69Mh23PJqzM8yYARs3wo0bUFAAKhUkJoK7O4wdC08/DceOgcEAOh0cPw7R0Zbv1YrHqMR/OiqGSsydOgXHjsHx42AwQFQUzJoFERGsa+qLOvbKtLRcMdtinbAwUCpBqYSWFnB2BrkcZs+G+HgwGGDFCniwVKslrA4hDhVax3ipIJW2XypgCFl4cNzrpGu0p7t+MIc1pKbatoPHqmMmNzeXdSEd9OBSAdlRW30uWNwrIzArQhgcHOzu7n7v3r2Ghga6JTAw0MPDo6qqqq6uzjHlWWrAgAFeXl51dXV0FSQR6cGlArKbpqbwSTcmJA9zdWl/hIUYdK2aWwBSN7cxbEsDq0IokUjCwsLA5NAnkUjoMmYFBQWOKM4qtBIxHJZ/ISam/VIhPt7aSwVkN9nZoNdLXL0k0vbrL8n1G+NjpeM+mi6V2nibix1Z9xi/SM5Ia2tr29raOq1NDOMl5lJT4dgxyMyE3/+edSl91ZUrAABjTc48L1+G5maZVIgH5x/JPiEU8qN/8eLFiRMnvv3224+sDaF22dkAAE891d0WdnrZkXDbtm1TpkwpKCg4c+aM2fRbdOkCDCHqRMcjYcct7FgXQobj9a2tratWrVq5cqVWq1UqlcePHzeddaqoqEilUg0cOHDJkiWOrgT1PlevAvzyuCemEFo3DT4dAxg0aJBxCz0RHThwoFX7sVZRUdGkSZMAwNXV9fPPPzd79eDBg/Tm8hEjRnS10ijquwoLCQAxXZCnupoAEA8PIuBy3N2wLoRtbW1yuVwqlWq1WuOWr7/+2qFLQRw+fNjX1xcAhg0bdvnyZdOXDAaDcS7w+fPn19bWOq4M1FsdPEgASHz8wy3HjhEAIprF6qxelYmOBNy4ccMR1ZgxzVhCQoLZ6mhVVVWzZ88GAJlMlpqaKsBqkqhX+ugjAkDeeuvhls2bCQBRKtnV9AtWzzQsWHdodXX13Llz3333XYlEkpqa+t///refyfQBFy5ciIqK+v777/39/Y8ePbp27VqB18ZAvYa4u0bBhum+hemJuXjxYlRU1NGjR/39/Y8cOWKWsZ07d06dOvX27dsTJ05Uq9Xx8fEOLQb1bjk5ACLulQErO2YIIRs2bACAxYsXO27VuB07dtB1NidOnHj79m3TlzQazSuvvEIrVyqVra2tDqoBPT7u3ydXrhDjR0WvJx4eBIB0tvYzE1aH8Ny5c0lJSQAwbNgwnucrKyvtWI1Go1m5cmVXGbtz587EiRMBwM3N7YsvvrBju6hvqa4mJ06wLuIhW5bL3rNnz8CBA2lUXF1dk5OTT5061fNSCgsLo6Ki6D63bdtm9qrpOISo1uVFvcD775O4OBIbS9avZ11KJ2wJISFEr9enp6cnJibSpbMBIDw8nOf56upq23Z46NAhmrHhw4ebZUyn06WmptJrwgULFuA4BLJOD6ZgEoaNITQqLi7meT40NNT0wHj+/HmrdpKVlUWnKly0aFFdXZ3pS/fu3Zs1axYAyOVynucdunw5ejx98gn505/af169mmzdyrSaTvQ0hFRra2taWlp8fLxx2s/IyEiVStXU1GTJrxsMhsTExI5jfWq1mi4C1b9/f9O1aBCywsaNZM2a9p9XrybbtzOtphP2CaHRrVu3OI7z929fa0qhUCiVSksu4Toe4lQqlbOzMwBMnTpVXBOKot7l5EkyeXL76ej48eT6ddYFmbNzCCmtVksPjMaBEHpgbGlpseTXNRrNihUrcBwC2c3ate0dM3//O+tSOuGQEBpdv36d4zjjnS4BAQEcx9GZ87uSm5s7duxYAPD09NyzZ49Dy0NIDBwbQqqhoUGlUo0bN45GUSqVxsfHp6WltZlNJE7IgQMHfHx86DhEdna2ALUhxJwQITRSq9VKpdL9wRRxQUFBHMcVFhaSX45DLFy40KyPFKHHmKVr1ttRTU3Nf/7zH5VKdevWLQCQy+Vz5swpLy9Xq9VyuXz9+vVvvPGGWBZXQsjxGITQ6Pz585s3b96zZ09bW1tISMj9+/f37Nkzffp0VvUgxATLEFLl5eW7d++eM2eOn59fUFAQ22IQEh77ECLUx+GDsAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjGEIEWIMQ4gQYxhChBjDECLEGIYQIcYwhAgxhiFEiDEMIUKMYQgRYgxDiBBjGEKEGMMQIsQYhhAhxjCECDGGIUSIMQwhQoxhCBFiDEOIEGMYQoQYwxAixBiGECHGMIQIMYYhRIgxDCFCjP1/nSApovDRQ24AAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAd6klEQVR4nO3da1BTZ/4H8F/CRbkIAt4qiFUsFxVrQUHxiuK2Kottt/TfnW10utOlTDsbdzta2r5Jnd3ppm53jZ3ZOrzpNm6729LZ7S5Wa0W8V7lI8VIFKXhB8IpAq6AQk+f/4sEDRIkn4SRPcvh+Ji80OSd5SM73ec5zOYmGMUYAII5WdAEAhjqEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDCEEEAwhBBAMIQQQDD1hPDpp0mjof/8p/eeWbMoP7/30cTEftuHhvY+CiCQekJIRFot/fnPogsB4CRVhXDRIqqooEOHRJcDwBmqCuHo0fTMM2gMwcf4iy6AkqxWWr+e5s6l2lr7HiARMUa3bokoFoBDqmoJiSg9nTIy6P33H/BQXR2NGNF76+jweOEAHkRtISSidevok0/oyhX7+2Nj6eDB3ltQkIjCAdxHVaejXE4OTZxIH3xAfn797g8Kovnze/+rVWH9Az5JhUeiVkuvv05bttiH0IFz5+hnP6PISJo4kTZvdmfhAO6jwhAS0Zo1FBBAFRVObJ+aSi0t9M03ZDBQebk7CwfQnwpD+NlntHUr/eY3ZLXK2p4xysmh9etJq6XEREpOplOn3FxEgD40jDHRZVDYlCnU0ED19RQX5/S+zc2UlERHjtC0aW4oGcCDqLAl5CMuNpvTO7a307PPkl6PBIJHIYQ9rl6lxYtp3jz64x/dUSiAASGEREStrZSZSb/6Ff31r24qFMCAVBhCjYaIyKmu7ssv01NP0fr1bioRgCMqDKGzLeGlS/Tll/TRRzRqVM/NYHBf6QDsqXDFjLMhHD/euWYTQFloCQEEQwgBBFNhCF0YmAEQSIUhREsIvgUhBBAMIQQQDCEEEAwhBBBMhSHE6Cj4FhWGEC0h+BaEEEAwhBBAMIQQQDAVhhADM+BbVBjCkJD/I9JYrTtEFwRAFhWGsKurg4jU9y1yoFYqDKFWqyUiGzqF4CMQQgDBEEIAwRBCAMFUGEKNRkMIIfgOFYaQt4QYHQVfodoQoiUEX4EQAgiGEAIIhhACCIYQAgiGEAIIpsIQ8nlCTFGAr1BhCNESgm9RTwjPnj372WefEUIIvkYNv0/Y2dm5cePG9957jzGWmpqKEIJv8fkQfv755+vXr7948aJGo3nxxRfDwsIQQvAtPhzC2tratWvX7tq1i4hSUlI2b948f/58wunokHf58uXy8vKOjo6nnnoqKipKdHEezif7hG1tbWvXrk1OTt61a1dkZKTJZKqoqOAJpHshbGtrE1pGEMBisWzevDkxMfGFF15YvXr1lClT3nvvva6uLtHlehjmU6xWq9lsHjNmDBH5+/vn5eVdv37dbpvdu3fPmzdv+PDhBQUFN2/eFFJO8LwdO3bEx8fzo3rp0qWLFi3i/37sscf+97//iS6dI74UwvLy8vT0dP7OLl68+MSJEw/czGaz/fKXv+SbTZgw4ZNPPrHZbB4uKnhSfX19bm4u/8Tj4+O3b9/O7y8pKZk+fTq/PzMzs7q6Wmw5B+IbIWxubtbpdHwWPiYmxmw2P3SXsrKyuXPn8g9g1qxZBw4c8EA5wcM6OjoMBsPw4cOJKCQkxGAw3Llzp+8GFoulsLCQnzpptVqdTnf58mVRpR2It4ewu7vbZDKNGDGCiIKCgpw6w7RYLEVFRbGxsTyK2dnZZ8+edWtpwZOKi4snTpxIRBqNxnG6WltbCwoKhg0bJmX19u3bniyqY14dwpKSkqSkJBcidOvWLYPBkJGRYbVaOzo6jEZjaGgoEQUGBur1+h9//NGtxQZ3q66uXrBgAT8wUlJSvv32Wzl71dXVSWetEyZMMJvNXtJP8dIQ1tXVZWdn8/crISHh66+/lrmj1Wr96KOPxo4dS0R+fn7Sx9PU1JSXl8cHTh955JHCwsK7d++6rfjgLq2trXq93s/Pj4iioqJMJpOzn2Npaenjjz/OD6309PTDhw+7qajyeV8If/qp4U9/CgwMJKKRI0eaTCaLxSJz16NHj2ZkZPD3d/bs2WVlZXYbVFZWSjMZTzzxxL59+5QuPbgLHxgfPXo0HxjX6/Xt7e2DeSpeU2s0mtzc3PPnzytbWqd4UwhtNlZUxCZMYBrNy9On63S6K1euyNy1paVFr9fzhm78+PEOzjRsNltRUdGkSZOks9z6+nrl/gZwi/3790vNV2Zm5smTJwf/nLzPwgd1goODBU5oeU0Ijx5lGRmMiBGxOXO6jx6VuZ/FYjGZTOHh4UQUEBAgs8vX2dlpNBrDwsKkvVyuVsGtXBgYd0pjY6P0/NHR0YWFhVarVdmXeCgvCOGNG0yvZ35+jIiNG8cKC5nsd6G0tFSaCMrKyjp9+rRTr3zp0qW8vLy+HQz5p77gbl1dXX0Hxg0GQ2dnp5tey25C6+DBg256oQcSGkKLhRUWslGjGBELCGB6PZM9bnnx4kWdTsfftSlTpmzbts3lUlRVVS1cuPDeqc6aXbtcfiZQTHFx8eTJk6Uuw7lz51x4kjt37jQ3N8vcmPdThExoiQvh3r1sxoye88+lS9n338vcr7Oz0/H8rGuKi4vj4uIyMs4TsawspkSnA1xRV1e3YsUKaWB8586dLj/Vxo0bne3sCZnQEhHCpiam0zGNhhGxuDhWVCR/1+Li4kcffVQa1Lpw4YKC5bpz585779nCw3sb5hs3FHx6eAg+UsKn1EeOHGk0Gru6ugbzhL/+9a+lWcFPP/1U/qyghye0PBvCzk5mNLLQUEbEgoOZwcBkL1yoqal58sknpdkF9521t7QwvZ75+zMiFhHBjEamREMLjthsNrPZPG7cOGlx2dWrVxV55iNHjsyZM8e1zl7fS3PcOqHlwRAWF7NJk3rOP7OzmeyZmdbW1t/+9rf+/v5ENGrUKM+MX9XUsBUregobH+9Uaw3Oqaqq6ju7e+TIEWWffzCdPc9MaHkkhGfOsOXLe47oxET2zTdyd7RamdncNWPGqOBgXkHef+GSW5WUsGnTevutx4978sXVj8/u8tFpftbnvuq1b2ePL0KW39lz94SWm0N48yYzGFhgYM+5ncnE5M8BHDrEUlJ4Ag6//PJAFy65W3c3Kyxko0czIqbVMp2OyV5BAAPiFzfwy97lz+4OHh9U57OCznb23Deh5c4QXr3Kxo5lRMzPj+Xns5YWuTteutQ7chMdzcxmJnqhbWsrKyjoqUxCQ53qzIK9PXv2JCcnS7O7p06d8nABKioq5s2bJ63/dqqzV1VVJV0unJiY+NVXXw2+PG5uCZ99lqWlsfJyudt3dzOTiYWFMSIWGMj0euZNl8afOcNyc3vOTmNjvaFy8DF2s7tF4rravLPHR9pd6OzxCS2pHhnkMjr3n47KP05LStjUqb0jNw0N7iyZ63bv7p3gnDOHKT2OoE68W8W7ZMHBwV5yRR8vFV+U42xnj1/pKq2XzMvLu3btmmvF8IJla4yxH37obWLi49mOHaIL9BBWKzObe861NRqWm8sUnbBUG2l2l7c5ys7uDl5zc7PLnT0+tsSH7iMiIoxGowtLR5QO4eHDbPFiFhzMRoxgK1Y8fB1MRwczGNiwYb2drcHNz3pSeztbt66nozh3Llu1ihGxf/+7d4PUVPbKKz3/XrWKJST02z0kpPdRtaqtrZVmd2fOnOnNXzLSd/ViYmKi9EU1ctTU1EirfOLj4509zVY0hN99x4YPZ+nprKiI/eMfLCGBRUQ4aiOKi1lsbE9r4rPDjj/8wJ55hu3axVatYlotmzOn96GhHMK2traCggJ+XSj/WkqfuIrarrP3vezVlHzfhIQEvu+yZcvq6upk7qhoCLOzWWQkk86qGxqYvz/Lz3/Alt99x+bP7zn/TE1lXnB18+CtWsUyM5lWy6RVGUM2hC0tLfy7lfz8/F577bUbPrX8bzCdve7u7sLCwtGjR/v7+//973+XuVBOuRBaLGzYMKbT9btz0SIWG9vvnr4XLkVFMZNJ/oVLXm7VKvb88+wXv2A5OT33DNkQbtiwITY2dtasWcd9dn1D34UEvLMnfyFrS0sLX3cqs/FX7hu4m5upq4vufftqj4QEamyk7m4iIpuNtm6lhAT64APSakmvp4YGWruWtD75LeAPZLXS+vW0bRvV1j7gUcbo1q3em4odOHCgsbHRaDTOmDFDdFlcFBUVtXnz5pMnT65YsaKtre3NN99MTk7+4osvZO7L0yvztxiUC8Dt20REwcH97gwKIiLq7CQi+vFHWreOWlpoyRKqrqbNmyk8XLFX9xrp6ZSRQe+//4CH6upoxIjeW0eHxwvnKar5ndakpKTt27eXlJRMmzatrq7u+eefz8rKOnHixEN3dOoHUZQLYUgIEdnX8K2tpNH0PBQRQX/7G335JZWW0rRpir2u91m3jj75hK5csb8/NpYOHuy98QpKlVT2mzxZWVnV1dWbNm2KiIgoLS1NTU199dVXHf/EhVPVkHK/yhQdTUFBVFPT7876epo8mQICev5771sf1S0nhyZOpA8+ID+/fvcHBdG9K2OISE2n4fZUFkIiCggI+N3vfrd69eqNGzdu2rSppqaGX/c4EEEtoVZLy5fTjh1040bPPefPU1kZ5eQo9hI+Qqul11+nLVvsQzh0qC+EXGRkpNFoPHbs2JYtWxxvKSiERLRhA929S08+SUVF9M9/UnY2jRlDBQVKvoSPWLOGAgKookJ0OQRRawi5pKSkxMREx9uIC+H06bR3L4WF0UsvUX4+xcXRwYM0dqySL+ELKipo82ZauZKsVtFFEcRxj6i0tPSLL75obW31bKE8yqkQKv1LvWlptGePws/pI/77355/HDhAb75J69ZR34NQelSi4lkKx4fg22+/XVFRUV5enpaW5tlyeY64lhCI6N6Ii0rPxWRxfAiq+2SVc2p0FCFUHkKIEKIlFAwhdHwIqmYq3wGEUDCEEC0hQiiYRkNEpOqK/iEct3UIof3Gbi7MUISWEC0hQigYQogQYnRUMIQQIURLKBhCiBAihIIhhI5PxjBFYb+xmwszFGF0FC0hQigYWkKEECEUDCFECDE6KhhCiBCiJRQMIUQIEULBMDAjZ3QUIezd2M2FGYrQEsppCTFF0buxmwszFCGEOB1FCAVDCBFCp065EULlIYQIoVOn3Aih8rTabUQam23IfeGqBNcT4nRUsKGwNtIxtIQIoWBD4SBzDN8xgxAKhhCiJUQIBRsKB5ljCCFCKNhQOMgcQwixgFuwoXCQOYbRUbSEgg2Fg8wxhBAhFGwojP45Jmd0FCHs3djNhRmKhkJN7xgWcCOEgiGEGJhBCAUbCgeZYwghRkcFGwoHmWMYmBH6S70wNA4yxxy/A6tXr05PT58xY4ZnC+VRCKFgCKHjd2DGjBk+ncDu7u7a2lrHfwL6hIJhikLF1VBpaWlKSsqyZcva29sdbIYQCqbiQ1AmVb4D9fX1OTk5WVlZp06dioyMbGpqcrAxQiiYKg9Bp6jsHejs7HznnXeSk5O3bdsWEhJiMBiOHTs2ffp0B7u4c3T06acpMbHfPaGhlJ/f+98jRygzk0JCKCyMVq6kU6ece35VUNkh6AI1nZBv27Zt6tSpGzZs6Orq0ul09fX177zzzrBhwxzvJa4lrK6mJUvo9m36+GP68ENqaKAFC6ixUcmX8AUIoTregWPHji1cuDAnJ+fChQspKSmHDh3aunXruHHjHrrj3bt3GxoaSP47wJyyahVLSOh3T0gIe+WVnn9nZ7PISNbe3vPfhgbm78/y8517Cd9XXV1NRI8++qjogohx8eLFBQsWREREREdH79+/X3RxXNHa2qrX6/38/IgoKirKZDLdvXtX5r579+5NTk4mopdeeqmlpUXOLsqF0GJhw4Yxna7fo4sWsdhY517CxxUXF8fGxoaHh2u12rS0tMOHD4suked0dHQYDIagoCAiCg4O5rX8c88919DQILpoclmtVrPZPHr0aCLy9/fX6/XtUqPyMOfPn3/uuef4Xx0XF/fVV1/J3NH5EMbHs5s3e29SCM+fZ0TsD3/ot31eHiNiXV3OvYpvOnnyZGZmpvQZREVFEZFWq12zZk1TU5Po0rldcXHxpEmT+J+fnZ195swZo9E4YsQIIgoICHDqaBZl//79jz/+OP8TMjMzT548KXPHzs5Oo9EYGhrKax+DwXD79m35r+t8CInsbzyENTWMiP3lL/22X7uWEbG2Nudexde0tbUVFBQEBgYSUWRkJD97uXXrlsFgGD58OP9gCgoKbt68KbqkblFbW/vUU0/xY3fmzJkHDhyQHmpubs7Ly3PtvM6TmpubdTodH0+KiYkxm83y97Wrfc6fP+/sqzsfwthYdvBg7y0oqCeEjY2MiG3Y0G97nY5pNKy7m+l0zGBgd+44Wz4vx89exowZwxs9nU53/fr1vhs0NjZKn250dHRhYaHVahVVWsU9sPa5f7OjR48uXLiQH6ZJSUnbt2/3fFEH0tXVZTKZeIsdFBRkMBg6Oztl7uug9nGKcn1Cq5UFBbEXXuj36Ny5LC6OVVf3tJlTprAvv3StoF6ooqJizpw5/DNYtGjR8ePHB9qyrKxs7ty5fMtZs2a5/Gl5D5vNZlf7XLt2zfEuxcXFkydP5m9CVlbW999/75miyixSdnb2uXPnZO7Yt/aJiIgYZAuv6Ojos8+ysDAmjQidO8c0Gvb73zPGWEkJS07uiWJmJquudrnE3uDSpUt5eXl8ID46OtpsNttsNse72Gy2oqKi2NhY6SM/e/asZ0qruMrKSpm1jx3e7ISHh/OOYl5e3kOj6yZ1dXUrVqzgf0JCQsLOnTtl7shrn7Fjx8qvfR5K0RCePMmCg1lqKvv8c/bpp2zaNDZ2LLtypedRi4UVFrIxYxgR02qZTscuXx5k6T2vu7vbZDKFhYURUWBgoF6vd6qn19HRIfXg+e4//vij+0qrOBdqn/u1tLRIEwCRkZFGo7HLg0N3vK/OZ9tHjhzp1KtXVlZKZzQLFy48duyYIkVSNISMsfJylpnJgoPZiBEsJ4fV1dk/Q1sbKyhgw4YxIhYSwgwGJvsUXLiSkpKpU6dKTZnLI+9NTU3SoTxq1CivHa7o6/7a56effhrME54+fXr58uX8zYyPjy8qKlKqqAPhjRifbeeN2NWrV2Xue/nyZekjGz9+vGu1z0CcDKFS6upYbm7P2WlMDDObmXJ/kjv88MMPubm50hGjyNBCZWXl/Pnz+XM+8cQT+/btG/xzusnu3bunTZs2+NrnfiUlJdIzL126VP6ZrbOqqqoyMjL4C82ePfvIkSMyd+xb+/C5lkHWPvcTFEJuzx42c2ZPFNPS2LffiizMAPgENJ9p4It37yg6xms3wF1fX6/gkw9efX294rWPne7u7sLCwlGjRkkN1BWpC6OEvme/jzzyiFMD1H1rn6ysrJqaGgULJhEaQsaY1crMZjZuHCNiGg3LzWXOT7O4D1/+QkQajUan0112TyeWT/X2rWvbvGBm1d21j50bN25I442hoaGKvJzFYiksLOSrJvgbK78H3rf2eeyxx+Qvf3GB6BByt24xg4ENH86IWHAwKyhgSrf4zqqurl6wYAH/DFJTU791fyvNxzyk4QqTyWSxWNz9ogMpLi6eOHGiu2uf+9XW1kqH/pQpUwbZUSwrK+MztCtWrDhz5ozMvTxc+zBvCSHX2NgzuU/Exo9nhYVMxLz2jRs37BbvenJ6vaqqatGiRfwoTExMdGsF/EB9a5+UlBQP1D73Kykp4WugiSgzM7N6EBNab7311rZt2+Rv37f2yc3NbWxsdPml5fOmEHIHD7JZs3o6irNnd3lwAfRgFu8qq7i4OC4uTuqKyF/EOBiDuXRAcfxMsu9iAHe3xqdPn162bJlU+xw6dMitL9eX94WQMWazsaIiNnEiI3ptxgxlh+MGsm/fPumre5YsWeKZ494BPijH57X9/f3dOq/tPbWPHb4whc/p8TND+WvK5Otb+zhYfOc+XhlC7tatpvff51fEDB8+/K233lJ8aJhramqSlndOmDDBqcW77sZH9vz9/YkoIiLCaDQq3j9x+dIBj+k7P8RXVys1R2dX++Tl5cm8AlBZXhxCxpib57X7Lt514QoUj6mpqZHWWCk4rz2YSwc8b8+ePTNnzuRvQlpa2uA7q+Xl5WlpaVLtc+LECUXK6QJvDyFXWVkpjRZMnTr166+/Hvxzurx4VxQF57UHc+mAQLzh4kte+MCJa5+at9U+vhFCru+8Nv/mOdee58yZM1LDkpiYKH/xrnB8XpufPrk8r+1ztY+d+y/UlN9P4T1tqfbxkos8fSmE7F4V7vK8tt3iXbFzcS5rbW21m9eWeRbt8qUDXqjvhZrjx4+Xsw6mpKQkKSlJqn285yoWHwshd/36dbvhrIdmaTCLd73TmTNnpOGK2NhYx8MVg7l0wJuVl5dLK0JTU1MHulCzrq5u5cqVUu2jSHdGQT4ZQu67775bvHix9M46mJM9evRo38W7ZWVlniynW+3evVuaWZkzZ87965LVV/vY4Rdq8hl2um99uU/UPj4cQs5uXttujMtu8a6yV6B4CT5cwS8z5cMVFy5c4A9VVVXNmzdPqn3kXzrgc/iFmryzx6+0am9vLyoqmjBhAt1bfKfsunAF+XwI2b3e9siRI6XZnqtXr1oslr4Xcfvc5bPOunnzZt8Vj2+88carr77q2qUDvuvixYsvvvgi7yjyQBLR3LlzKysrRRfNETWEkLt27Vp+fj6f1w4PD4+JieGfwcqVK+vuv7ZYpS5cuKDT6YiIxy8wMPCNN95w0yIHr8UntFavXu0r36ylYar4wQCz2XzlypU1a9a0t7evW7du586dfDJj06ZN2dnZ77777k8//fT222/zYVXV27dv3/Hjx1taWnQ6XXx8vOjiCMAYs1gsRMTHkL2cSkI4e/bso0ePVlZWzpo1i4hOnToVFBQUExPDP4OYmJjm5uampqbo6GjRJQWwp5Jf6rX7BRJpZckDHwXwKir5fULHMUMIwZshhACCqSSEjn+VEiEEb6aSEDqOmZp+OBbUZ0iEEC0heDOEEEAwhBBAMJWEkPf6EELwRSoJIY/ZQEMvGJgBb6aqEKIlBF+EEAIIhhACCIYQAgimkhBi2Rr4LpWEEMvWwHcNiRCiJQRvhhACCIYQAgimkhA6XrY2LiRk0siRfugTgldSyXfM/Eur/RcRDRCzz+/cofZ2QksIXkklLSFptUQ0YMwcPwogFEIIIBhCCCAYQgggmFpCqNEQDTgwgxCCN1NLCB3HzHFEAYQaGiFESwheDCEEEAwhBBBMLSHkvT6EEHyQWkLIYzbQ0AsGZsCLqSuEaAnBByGEAIIhhACCIYQAgqklhFi2Bj5LLSHEsjXwWUMjhGgJwYshhACCIYQAgqnki55o2TIKCqJ58x786IsvUmoqLVjg2TIByKLBl8MDiKWW01EAn6XGEB45QpmZFBJCYWG0ciWdOiW6QACOqC6E1dW0ZAndvk0ff0wffkgNDbRgATU2ii4WwIBU1yf8+c/p8GE6e5bCw4mIzp6lhAR6+WXaskV0yQAeTF0hvHuXQkPp+edp69beOxcvpnPn6MIFccUCcERdp6PNzdTVRfHx/e5MSKDGRuruFlQmgIdQVwhv3yYiCg7ud2dQEBFRZ6eA8gDIoK4QhoQQEd261e/O1lbSaHoeAvA+6gphdDQFBVFNTb876+tp8mQKCBBUJoCHUFcItVpavpx27KAbN3ruOX+eysooJ0dosQAcUdfoKBF9/z2lp1NSEr3xBt29S+++Sy0tdPw4jR0rumQAD6aulpCIpk+nvXspLIxeeony8ykujg4eRALBm6muJQTwNaprCQF8DUIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQgggGEIIIBhCCCAYQggg2P8DZ8/X67zoa8YAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3de1yUVeIG8GdmEBCVwUsYaeQVd71hmmLrDYUsXN3SoNJNs0zatjJ/1kc2JK3UxLTWLpuSba3uaptmpbVpGd6wTMVbVt4AlbyhyDDcBOdyfn8cGkcU5TLvewZ8vp/+wMPwngPNM+95z3vecwxCCBCROkbVDSC60TGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGERIoxhESKMYREijGEdU9hYaHqJpAnMYR1TG5ubps2beLj4+12u+q2kGcwhHXMa6+9lpeXd+rUKR8fH9VtIc8wCCFUt4Gq6vTp0x06dLhw4cL27dt79+6tujnkGTwT1iWvvvpqSUnJqFGjmMD6hGfCOiM7OzssLMxms+3Zs6d79+6qm0MewzNhnTFz5syysrLRo0czgfUMz4R1Q0ZGRufOnZ1O588//9ypUyfVzSFP4pmwbnj55ZdtNtv48eOZwPqHZ8I64NChQ126dDEajYcOHWrbtq3q5pCH8UxYByQlJTkcjvj4eCawXuKZ0Nvt37+/R48evr6+R44cad26termkOfxTOjtEhMTnU7nU089xQTWVzwTerWdO3dGREQEBARkZWUFBwerbg5pgmdCr5aYmCiEmDx5MhNYj/FM6L22bt06YMAAs9mclZXVrFkz1c0hrfBM6L2SkpIAPP/880xg/cYzoZdat25dTExMixYtsrKymjRporo5pCGeCb3USy+9BCAhIYEJrPd4JvRGn3/++ciRI0NCQjIyMgICAlQ3h7TFM6HXcTqd8jSYmJjIBN4IeCb0Oh999NGYMWNCQ0MPHz7s5+enujmkOZ4JvYvD4XjllVcAvPjii0zgDYIh9C5Lly49ePBghw4dHnnkEdVtIZ0whF7EZrPNmjULwIwZMxo0aKC6OaQTBSHMz89/9tlnX3vtNf2r9nLvv/9+VlZWp06dRo8erbotpB8FAzOHDx/u1KlTx44dDx8+rHPV3szpdIaFhWVmZq5cuTI2NlZ1c0g/Cs6EVqsVQGBgoP5VezOj0XjXXXf5+Pi0b99edVtIV8pCaDab9a/ay/n7+9vt9kcffdRms6luC+lHQQgLCgrAEF7N7Nmz27Vrt2/fvjfffFN1W0g/7I56kYCAgMWLFxsMhunTp2dkZKhuDumE3VHvMmTIkLFjx164cGHixImczHSDYHfU67zxxhstW7bctGnThx9+qLot1bZ06dI333yzffv2TzzxxMqVK+UHLl2H0N2UKVMAzJs3T/+q64rly5cDMJvNJ06cUN2Wali8eLHRaHSfbefr6zto0KDZs2fv3LnT4XCobqCXUhDCCRMmAHjvvff0r7oOuffeewHExcWpbkhVyQQCmDVrVnp6enJycnR0tPu8n+bNm8fFxaWkpBw7dkx1Y72LghDGxcUB+Pjjj/Wvug45efKk7LF/+umnqttyfe+9957RaDQYDH//+9/dywsLC9evX5+QkNC5c2f3/le7du3i4+NXrFhhtVpVtdl7KAjh0KFDAaxdu1b/quuWd955B0BISIjFYlHdlmtJSUkxGAwGg2HBggXXeFlmZmZKSkpcXFxQUJArjT4+Pr169UpISEhLS7th+6sKQhgREQHg+++/17/qusXhcPTr169r14lTp3pvCBctWiQT+NZbb1XxR+x2+1X7qy1atJD91ePHj2vaZm+jIIR33/1jz57f/PKL976xvMfBg+cbNhQGg9i8WXVTrmbhwoUygW+//XbNjlBYWLhmzZr4+Pg2bdrcsP1VBSG85RYBiF9/1b/mOunllwUgwsLEhQuqm3K5N954A4DBYHjnnXfcy/fs2XP27NkaHPDQoUNvv/32iBEjGjdu7Eqjn59fVFTU3Llz7Xa7hxrudRSEsFEjAYiCAv1rrpNsNtGjhwBEYqLqpriZP3++TOA//vEP9/IdO3YEBQV169bt/PnzNT74lf3VW2+99c0336x1q72U3iG024XBIIxG4XTqXHMdtn27MJmEj4/YvVt1U4QQQsybN08m8N1333UvT09Pl+sUx8bGXrx40SN15ebmLl26tEGDBkaj8dy5cx45prfRO4R5eQIQQUE6V1vnPfusAESPHsJmU9wS+TS20Wj88MMP3cvT09ObNm3q2QS6REZGAli1apVnD+sl9A7h0aMCEKGhOldb5xUXi3btBCDmz1fZjLlz5wIwmUz/+te/3Mu/++47OSP/gQcesGnwOTFz5kwAf/3rXz1+ZG+gdwj37hWA6NZN52rrg7VrBSACAkRGhpoGJCcnywQuWbLEvXzr1q2aJlAI8f333wPo1KmTFgdXTu8QbtkiANG/v87V1hMPPywAMXiwgivqGTNmyAQuXbrUvXzr1q1yof6HHnpIowQKIWw2m5w/lJ2drVEVCukdwi++EIAYNkznauuJ3FwRHCwAcfnlmOamT58uE/jvf//bvTwtLU2jBK5evXrUqFFffvmlq2TEiBEAKnSD6we9H2WSj7bwMaaaad4cf/87AEybhrIynSqdPn36K6+8InuhDz/8sKs8LS0tJiamsLDw0UcfXbZsmY+Pjwcr3b9//6effvq///3PVRIVFQUgNTXVg7V4C51D/49/CED85S86V1uvJCaKAwd0qmvatGkATCbT8uXL3cs3b94sb6k/9thjWsz53LZtG4CwsDBXyf79+wHccsstHq9LOb1DOGeOAERCgs7V1jdpacLPT8iHDQcNEhrdP0tMTATQoEGDCk9ybNq0qVGjRgAmTJig0axru90u53m7nntyOp0333wzgF9++UWLGhVS0x3l+jK1Fx6OuXM1PH5iYuKrr77q6+u7YsWKkSNHusq/+eabmJiY4uLixx9/XD7BpEXtJpNp4MCBADZu3ChLDAbDkCFDUB97pJ7sx1fFnDl44QV49PLhBtWnDzIycPo0AHz2GRISEBgIsxmBgZe+CAqC2XypMCioqEmTI2azOSgoKDAwsLKrOCHElClTFixYIBMoHy+Wvv766/vuu6+0tHTixImLFi3SKIFSVFTUmjVrUlNTx48f7ypZvnx5amrq008/rV29+tMvDVu3IjoamZlo1QqRkfjkE7RooVvl9ZAQmDoVc+fCYIDVCosFFst1fuT227fv2RPt+mdAQIDZbA4MDAwMDHQl02w2//jjj6mpqX5+fqtWrfrjH//oev26detGjhxZWloaHx8vn2DS6FeTXCMxQghZV3R0NIBNmzY5HA6TyaRp7XrS9ZQke1BvvaVnnfVZZCTmzEF+PsaNw4QJyM+H1YqCAhQUXPrCYrn0dbNmJqczvKCgID8/32q1lpSUlJSUnJYnUzfBwcH+/v4rVqyokMD77ruvrKxs0qRJCxYs0DqBADp37hwSEnL69OkDBw7IB/NDQ0Pbt2+fmZm5a9euPn36aN0A3egaQvceFHnE1KmIjobRiKZN0bTpdV8eCex1/aOoqMhqtRb8RibTYrHMmDGjtLT09ttvd//JDh06yIdu33jjDR0SiN8uApctW5aamupaHSM6OjozMzM1NdUbQ5iYiO++g92O8eMxcWI1flCf8Z/cXJGWJp56SmzcKJ59VkRGajWgdyNwOERMjHj9dVFaqsnxhw8fDqDC3DQhRE5Ojib1Ve6DDz4AcO+997pKVqxYASAqKkrnllzfxo3lU5lKSkTHjtV6Xlbz0VGbDXPn4rbbcPAgAERG4sAB5OcDQGoq9uzRuv56aMMGrF2LBQu0Gt+q7LZ4cHCwJvVdryXyIlCWDBkyxGg0fvfddxcuXNC5MVdRXIxXXoHcOGTnTkRFwWBAw4aIiMDevdf74Uu0DeH27ejZE3/7G0pKsHt3eeHUqdi7F8ePIzYW/ftjxQpNm1AP/fOfADBhAjQam5Bv/W+//VaTo1dHaGhohw4drFZrenq6LGnevHn37t1LS0vl3XyVioowbBhmzMDkyQBgMMC1YroQ1ft/o9HJubhYJCQIk0kAon178e23FV9QViYmTBCAMBhEQoK4URfaqrbz54W/vzAahXaLd7puix/QbWJO5f7yl78AmD17tqvkueeeA5CodqGBoiIRGSkA0bq1OHJECCHS0kRkpHA6xYULIixMnD5d9YNpEsK1a0WbNgIQPj5i0iRRVFTpK1NSRIMG5VO68/O1aEt9s2CBAMQ992hbi9wquMLiMUqsXLkSwJAhQ1wlX331FYA+ffooa5MrgbfeWp5AKSlJDBkiBg4U1Zxl7uEQ5uWJ+HhhMJQ/Bp6efv0f2bxZ3HSTAESnTuLgQc82px4KDxeAWLlS21ref/99ACNHjtS2mirIzc01Go3+/v4lJSWypLi42M/Pz2Qy5eXlKWiQ1SruvLP8yfQKT3Z+/LGo0WpUngzhihXlcWrYUCQnV6M9GRmia1cBiGbNxPr1HmxRfbN9uwBEixZajYu6HD9+HEBQUJA3rHEmb5asd3tnDBgwAMDnn3+ud1Py80XfvuUJzMy87FsvvCAAMX58DY7qmYGZ7OyTw4eLBx7AuXOIjsZPPyEhoRqXpu3bY9s2jByJvDzcc4+2UyLrNDkkM3483PZc0YS8LZ6fn7/bNZ6mjpwo4z5aq+axJqsVd9+NH37Abbdh0ya0a3fpW0lJmDMHJhOGDq3JkWv50eB0OlNSUpo0aTJw4N6gIJGSUvOHvp1OkZxc3pV9/HFRVlbLplWVw1GSlTU6I2PUkSPDsrOf1anW6isqEoGBAhA//6xHdfHx8QDmzJmjR2XXtG7dOgC9e/d2laSlpQHo3Lmzfo2wWESfPgIQbdqIrKzLvpWYKADRoIGo6a4htQrhzz///Ic//EGGedy4v545U5uDlfvvf0VAgDyx7zvjkSNeT0FBalbWaPl1Ts6bdruXLvn8wQcCEP366VTdxx9/DCA6Olqn+ip35UWgzWaTq9rotHWcxSJ69y5P4NGjl31L9kJ9fcVnn9X48DUM4cWLF5OTk+VOdCEhIZ988kmNW3ClXbvE0KFnfHz8Q0NDd2u51KbNds5i+cxutx4+HH38+BN5eR97bQKFEP36CUB88IFO1V05IqLQoEGDAKxevdpVMmzYMAAVltvQQm5u7uNDh9patxZhYcI9805n+UKUvr6idlenNQnh999/36VLFwAGg2Hs2LG1WWu5MufOnZNLTfr7+1dYWaiWHI5iq3X9iRMJv/zSKz3dmJ6O0tLDQoiysl9zc//188/dSkuzrnsQ/f3yix0QTZpc636Px/Xo0QO/Pceg1vbt23/66Sf3ktdffx3A+BoNhFTduXPnwsPDAYwbOFCcPHnpG06nmDSpPIFuHw01U70QFhcXJyQkyKdI2rdvr+n/nrKyMrmdqMFgSEhIqM0T3E7nxcLCtFOnXjp4cMCuXQ3S0yH/27274eHD0WfOzLNYyv+OR4+Os1hq3q/QznPPPde1651JSWk6Vwrlt8UrsXfvXgCtWrXSroqzZ892794dQKdOnU5WSOAzz5QncM2a2ldUjRCuXbv2tttuA+Dj45OQkHBBlw1KUlJS5G4Ew4YNy6/O7Xyn02m17jtz5vUjR4bt3t3YFbxdu0wHDkScOJFYUJDqcFwQQly8eCYj408ZGfdlZPzp6NFHnM4Lv/46xWLxoq05y8rK5LzN7du361mvvC0eERGhZ6VVVFJS0qhRo8aNG4eHhyckJKxfv96zy73l5OR069bt6gl86ikBCD8/8cUXHqmrqiGcLCfIAb179963b59H6q6izZs333TTTfLPcfB6t/NPnjy5YsWK+Pj4Vq1azZw50JW9/fvbHTsWn5e3wm6/zh1ei2V1ejp27fK1WHS/DVUJOWukm+5LJiu+LV65kpISudWsv7+/a5y/efPmDz744D//+c/ar02ak5PTtWtXAL/73e9OnTrlKnc4HK9NmWIPCRENG4pvvqllLS5VCuGpU6diY2N9fHySk5OV3L3NyMiQf5RmzZqtv+J2fk5OzkcfffT444+3bdvW/e7L2LF3Hz06/vz5f1+8eOqqh63MiROJv+Wwtt19j7jnnnsAKNmWSNlt8cqVlJTcddddAIKDg3fs2HHt7bir1XuSzpw5I99sv//970+7TQF1OBzy+mjE737n9OiFWJVCeOzYMQCBgYEK508UFhbK5YZMJlNycnJRUZH86/fq1ct9pZMmTZpER0cnJyenp6c7a7FO9YkTL8gc5ud7oNNfG7/++qvJZPL19VWyJ9FLL70E4JlnntG/6qsqLi6W9+5btmxZYajGI9txnzlzRg46hoeHu++y6HA45FI3AQEB3175OELtVLU72q5dOwA7d+6sUF6bN3p1ORyOadOmyce63ZcYCQgIGDp06Ny5c9PT0z24At+JEwnekEMZg9GjRyupXcFt8coVFxfLBddatmz5c+VTFmq8Hffp06flGTU8PNz9I89utz/yyCPynabFYGRVQ3jl/InVq1eHh4cnJSV5vE3XNnnyZD8/P9cn3Pr167UbIjpxYupvOfzy+q/WgMPhkINhHv/0rSK9b4tXzpXAm2+++RoJrKCwsHD9+vWTJk2qcKly5XbcOTk5HTt2BHD77bfn5ua6jmC328eNGwegUaNGGzZs8PwvVvUQXjl/4ssvvwTQt29fLZolFRYWTpw48bvvvnMvlFfk83XaIsyZnf1Mejp27fJTkkM5Y6tt27YarbFbFbrdFr+GoqKiwYMHA2jduvUR96eHquMa/dUZM2bs2LHjoYceujKBY8eOlQncuHGjZ36ZK1Q1hFfOnygsLGzQoIGPj08Nrn2raPHixQAGDhzoKsnOzjaZTP7+/lrMEKiEMzv7aXlTsaBA79NRXFwcgJkzZ+pcrzt9botfQ1FRkZy5ceutt9Y4ge5sNtuWLVuSkpIiIiLcr2uCg4Pj4uLWrVsnX2a32+XeG40aNdq0aVPt661MNe4TyqkD7mfkfv36AVjjifuVV9W3b19cvuKQ3KBrzJgxGtVYCefx40+kp2P37gCrVZMOyVXl5ubKOwTuY+5nz559/vnnS7V+lsnNNW6Lz58//9NPP9XuU1gIUVRUJOeshYaGZmiwM2NhYeGaNWvi4+PbtGkjoxgbG/v111/b7fY///nPAMxm87Zt2zxer7tqhHDKlCkApk2b5iqRO2Y9+6wmTx7IDUDMZnNxcbEscV0gqZhI5Tx+/Mndu1uMHNlNowuDK8lT0PDhw90LZeewb9++7vevNOV0Olu2bAng0KFD7uUFBQVy2MNkMrmuzz27UXZ+fr78IA4NDc2s8PyeBg4ePChPfSNGjBgzZox8+/3www9a11uNEMp9qtznT2zevBlA165dNWhY+fSAJ5980lWydu1apRdIjuTk/5Odky1btmhXTVFR0VdfffXcc8/JWTIfXD5l+8cff5RjDLfccotuE2gefPBBAO+++657YV5e3syZMwcOHOg+AhkUFHT//fcvWrQoK6u2U3Dz8/MjIiIA3HbbbTokUDpx4oS80TV8+PCgoCAdEiiqFcKioiJfX1+TyWSxWGRJWVlZ48aNDQaDxz+Vy8rK5CyZdLcVMmJjY3H5mj86czqdTzzxhBaX6e6j6n5uT+waDIZWrVqlX75MSG5urhwn9PPz+1CX7ULfe+89APfff/9Vv+t+z/aqI5CuN0zVWSwWVwJrn+dqCQsLA7Blyxbdtn+q3gTu/v374/InSuRkjmXLlnm2Wf/9738BdO/e3VVy7tw5eWfisol8unM4HPKWUaNGjTZv3lybQzmdzr17977++uvDhg2Te/1JJpMpIiIiMTHxs88+k5dD/v7+FZbitdlsCQkJ8vWTJk3SehLF0aNHUbXVLk6dOrVkyZK4uLhmzZq5/0bV6q9aLBa5wHabNm2OVnh+T3tPPvkkgFmzZulWY/VCKMdFJk2a5CqZN28egMcee8yzzZLzkt5+++0KFY0YMcKzFdWAw+GQw9aBgYE1uGR3n9161fOG+0RNm832zDPPuMJWoR++cOFC2RWMiYnJz9dwqGbz5s0NGzZs0aLFTTfdJO93X/ej0G63b9u27eWXX+7Xr5/79k/NmjWLi4tbvHhxZT9osVh69+4NoGPHjr9WZx1rT/nkk08ADB48WLcaqxfCLVu2AOjSpYurRK5BEhoa6sE2HT9u69nzuYCARu5vRzmVwUsmMboGr6t44X727FkZvHbuC5MAN99887XncEiuR0liYmIqdO22bt3asmXL/v3f79hRaNF7On/+/Pjx4+UsJfeTm9Fo7NWr1wsvvLBhw4ay661EcmV/1b2P4y4vL++OO+4AEBYWpmp6gMVikfMEi/R6drN6Ibx48aLsOLn+QA6HQ168eeQGjjR9ugDExImXHujeunWrfMt6dvCtNtyHsK86QOLZ2a1paWlynCYsLKzCmrxHj2bfcYcTEEFBYu1aD/xqLmvWrJGna39//xkzZpSVlbnudzdp0sT1GwUEBFT9N8rMzFy4cOGVG10IIfLy8mRKw8LC1F50yGZ847nnJK6t2k/Wx8TEAPjPf/7jKpE3lBctWuSRBjkcIjRUAML9RkBCwkc+Pv5/+9vfPFKFp9jtdtdA9o4dO4QQNpvNNb7i6+vreps2bNhQvk3T0tJq/NhbdnZ2z549ATRt2rTC+6OoSNx/vwCEySSSkz3wq506der++++Xje/fv/+VS3GXlJS4PmLcN2lq2bKlPLdXd6zu7NmzV39+T4WpU6cCSNBrV/dqh3D+/PkAHn30UVfJokWLAMTFxXmkQV99JQDRrt2lVdusVtGokWjZ0n7kyNlr/qgCNptNjtkGBgb2799f7uQu+fj43HnnnUlJSRs3bvTUvfXCwsJRo0bht0dJ3L8l16ozGgUgRo8WNV4Xxul0LlmyRPY8zWbzggULrntDKCcn58qrXNlflYMx1/31XU/QVnh+T5Wvv/4awB133KFPddUO4Z49eypcBB45cgRA8+bNPXL7Tn6iv/rqpZKUFAGIyMjaH1sTdrv9oYcecl3t1GZcviqcTueLL75oMBgMBmNCQlaFt/eXX5Yvi9izp6jBo60ZGRny5geA4cOH12BcRPZXhw8f7v64rXt/9cofqewJWoXk08xGo1Gfx8eqHUKn0ykvAg8fPuwqlFN+ar8y2rlzws9P+PhctqaOXO5R6fzh67h48WJeXt6yZcv0WaNRCLFy5cq77koFxJ13Vtx6ZN8+0batAES1Nmuw2RyzZ8+WyQkJCVm1alUtW1hZf1WORS1ZskTO/nU9v1fhCVrl5GzV2v8dqqImq6098MADABYuXOgqeeyxxwDMmzevlq2ZN08A4k9/ulTy448CEGaz+G3uGpXbt698151bbhE7dlz2rXPnREyMuOLZz0rt2SN69RJ9+vxJLp/n/hiBR5w8efLDDz8cM2aM/PiW5M3DkJAQXPH8njeYNWuWv49Pii7DEDUJYUpKCoDY2FhXybJlywDcU+u9gjp3FsBlS8jJdeWeeqqWB66fcnPF4MECEP7+4mrDjddXXCymTCnfwe4Pfziuw7TYzMzMBQsWREdHy7Pu4MGDe/To4W0JFEJYd+xwNm4swsJ0qKsmIczIyKhwEXjmzBmDwRAQEFCbEYiffhJGowgJEa7hw7Iy0aKFAISWKwDXbTZb+ep7gJg0qXrbPG7eLMLCqrSDnRbkFNljx44VFhbqWnEV2WzCbBZATa6tq6mGK3DLOcS7du1ylciefS1nch0/ftl2osuXl48x0LW5tnmMiRFVGQ+yWC7tYNe9e8XeLJUbMUIA1d1ssAZquCuTHENz3xYnOjrax8fnwIEDNTugFBqKqKhL/3TtC03XFh+PDRsQHIy1axERgYMHr/XiL75A16547z34+2PGDOzcid699Wpo3SLfizrs/VSz7C5fvhzA3Xff7So5c+aMa7kOj8jKEkajaNhQeNmal94rO1v07CkA0bTp1RfFPHVKjBpV3ncdMIBbsl7P/v3lA18ar2ZWwxDm5OTU/iLw2pKSBCDGjdPo8PVTYaEYOVIAonFjUWGwY/Hi8luITZuK99/X+n1VLzidIiREAJrMynVTw+5ocHBwly5dSkpKfvjhB4+emMs5HFiyBGBftJoaN8aqVZg+HQsX4uBB+Pvj5EkAiIzEsWMoKMDw4fjxR0yYALdbd1QJgwGDBwOa90hrvlPvlfunetDZs2jfHmFhGDBAi8PXZwYDXn4ZDz8MAOHhl7Y9fvpprFuHL75A69YKW1fX6HJZWPMQyi2LV69evW/fvqNHj1osFqfT6almhYRg40bs3s0P7Frp0wcZGTh9GgB8fHD33aobVOdERwPApk1wOLSrxOf6L6nEoEGD2rZte/jwYbmLndS4cWOz2Rz4m6CgILPZbDabQ0Imm0ytAwNhNsNsRlAQ5NeBgWjY8NIxt25FdDQyM9GqFSIj8ckncJsRTdUmBKZOxdy5/CyrqdBQdOiAjAzs2oU+fTSqpOYhbNKkSVZW1pAhQ3JzcwsKCgoKCiwWS1FRUVFR0Ul5IeKmXbsJWVlXP06DBuVpbNoUjz9e3oN6660at4suExmJOXOQn6+6HXVXVBQyMpCa6o0hlDZs2OD+z4KCAqvVWuDGYrFYrdbS0uE0WVQAAALFSURBVFa5uSgogNWKggLk58NqLf+6rAy5ucjNBYBHH72sB0UeMXVqea+KaiIqCikpSE3FCy9oVINBCKHRoauorKw8jVYrzp/HmjWIjcXnn2PfPqxciRYt1LaObnh5eVi6FHfdhS5dNKqh5gMznuLnh+BgdOiAXr0QEAAAkZE4cIA9KPIOzZph8mTtEghvCOFVTZ2KvXtVN4JISkzEoEHo1w+LFwPA1q24777yb3XtWn4pVQu1vSb0rP790b8/AERFQXU3mQgAsGkTfvgBmzahtBTh4YiJ8XgN3hVCIq+zcyeiomAwoGFDRERg714EBWHbNtxzDwAcP177GhhComsyGC71yoSA3Ertzjvx+ecA0LVr7Wvw0mtCIm/Rty9SUyEESkuxcyduv93jNfBMSHRNcqAiOhp2OxITcfPNyMjwbA3q7xMS3eDYHSVSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUowhJFKMISRSjCEkUuz/Ac+vw8Xk3Z2xAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3da0BU1doH8GdmGBARBMwLmHm/kamAdzRCMVDR11KSPAffMqPs6HjJxONt7I7msUE9JpkpZlpYaZMKhcc0VLwhmngD8ZYGisp9hIGZ5/2wcDcOAsOw96w5vM/vkzMMe63B+c/ee+21nyVDRCCE8CPn3QFC/r+jEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCBsvoxHefBNCQyEoCDIyePeG1IhC2Hjt3g1lZZCUBCtXwty5vHtDakQhbLzS0+HZZwEA/P3h0iXevSE1ohA2XjIZIFb9W6Hg2hVSGwph49WvHxw8CACQng49evDuDamRDIUvS9LIIMJbb8HVq1BZCWvXgqsr/PYb9O0LPXvy7hl5BO0JG6+KCnjlFUhMhH37oEcPWLUKJk+GhATe3SLmKISNV7duMGgQZGdXPRw8GAAgNZVjj8hjUQjrlpQEs2cDAFy7BqGhvHtjOT8/AIAjR6oeDhkCAJCaCgYDty6Rx6EQNl5muz5vb2jfHoqK4MIFjp0i1Tnw7sB/h1274OJFePAAnJ15d8VybNcn7AnZM9evw5Ej0KsXr06R6mhPaJHx4yEpCeLjefejXvr1AycnyMiAwsKqZ+i00C5RCP87WTIv1MkJ/PzAaIRjx6qeqb5vJHaAQvjf48QJGDq06ozOwnmhZqnr0weaNYOsLMjLk767xFKWnhNmZmYeOHBAeFhYWGg0GoWHJSUlFRUVwsMHDx6UlZUJD8vLy3U6nfCwsrKyuLi4Y8eOK1ascHd3t7rrNhMaWjUo2qEDJCXx6EFODrzzDmzbBoiwfDls3mzpvFCz408HB+jXDw4cgNRUGDdO+n4Ty6BltmzZIm67TzzxRLNmzS5evGhhBzhKTEQvLywrw+JiDAmxbdsVFajRoJsbAqCzM6rV+OABIuK77+IXX1S9pmPHGn89NxcB0M0NKyurnlm4EAFwwQKJ+03qwdI9Yffu3aOiooSHbm5uCpM5wc2aNVMqlcJDZ2fnJk2aCA8dHR1dXFyEhw4ODq6urmvWrNFqtTNmzEhOTrY+yrbi5QVffQUREbZt9cABmDmz6pQvLAxWr4aOHat+1K8ffPMNvPZaHfNCW7eGDh3g2jU4dw569wao2jdiaqpM8t4Ti/FK/71795544gkA+OGHH3j1wUKJifjeexgcjEVFttoT3ryJkZEIgADYpQvu2WP+gqwsfPNNDAnBESPwwoXaNjV5MgLgZ5+xR/q7d2f17ftUixZ6vV6arpN64zYw4+np+e677wLArFmzSktLeXXDQkoljBoFWi0AwL/+BWPHwr59IMnU94oKiI2Fnj3hq6+gaVNQqyEjA0aP/usFN2/ClCnQqxfMmQNJSVXzQmt2feTIjUOHrjh/vuqNtGiR9ODBjXv3Tp8+LUHviVU4fgEYDIb+/fsDwNKlSzl2o06Jifjxx1hUhM8/j6Gh2L171S6qTx/84gvU6URraM+ePXsnTKja+qRJ+Mcfj/xYp0O1Gp2dq07zdu2yZJtpaWkA0KVLF+GZV199FQA0Go1o/SYNwzOEiHj48GGZTObk5JSZmcm3J4/14AG+8AJ++il+/DEi4oIFGBqKt29jTAw++WRVWJo3R5UKr11rUEPZ2dnjxo0DAEeFoiA0FPfvN3+FVosdOyIAymQYHo43bli45crKymbNmgFAbm4ue+bzzz8HgEmTJjWox0Q8nEOIiFOmTAGAsWPH8u6IOYMBX3oJAbB7978GFwXl5ZiQgAEBVVGUyzEsDJOT692KTqdTq9VsHMvFxUWtVpeXlz/yiqwsHD36r51vSkp9mxg+fDgA7Hq458zIyACAtm3b1ruvRBr8Q5ibm8uuFu7evZt3Xx7x9ttVx32nT9f2sqNHcfJkdHSsiomvL27aZHzALiTURavVduzYEQBkMll4ePgNs/1baSmq1ejkhADo7o4azWO+DCywePFiAIiOjmYPjUajp6cnAJg3RzjhH0JEXLVqFQB07tzZws+uDaxfjwCoVOIvv1j0+tzcv45R+/ff4+7urlKprl+/XtPrs7KyRj8cbunbt29Ktf1b5Y4dVZuTy3HaNLxzx+r3smfPHgAYNmyY8MyoUaMA4JtvvrF6m0REdhHCioqK3r17A8AHH3zAuy+IiD/9hAoFymQYH1+/Xywrwy1bMDxcxdKlVConTZp0+PBh09eUlpaq1WonJycAcHd312g0lY/u3y5evBgSErI7MBAB0M8Pjxxp4NvJz8+Xy+VNmjQRDnTfe+89AJg1a1YDt9z4pKenX7ly5e7du7Zs1C5CiIgpKSkymczZ2fnq1at8e3LiBLq4IAB++KH1Gzl58mRkZKQwgcHX1zcuLk6n0xmNRl9fXwCQy+XTpk3Ly8sz/a3CwsI5c+aw33rKy6t80yY0GBr6fhAR0cfHBwCOHj3KHu7btw8A+vfvL8rGGyI7O3vmzJmrV6/+5ptvjEYjx578+eefkZGRMpmsT58+7u7uMTExNjsus5cQImJERAQATJw4kWMfsrOxdWsEwGnTRNhaTk5OTExM27ZtWRRbtWoVHR29fPlyPz+/I4/u34xGY0JCQrt27Vg+IyMjzfLZQNOmTQOAVatWsYclJSUODg5KpbK0tFTEVupFp9MtXbqUjUg5OjoCwKBBg440eLdvBb1ev2LFCldXVwBo2rTp008/zf6/OnfuvGPHDht8NdhRCG/evMkG0xMTE7l04O7dqmuAo0ZhRYVomy0rK9u0aZMfKzYBoFQqIyIi7t27J7wgPT196NCh7Kf9+/c/duyYaG0/tHHjRgAIDw8XnmE75AMHDojeliVMR6T+/ve/f/7550899RT7C4SFhV2+fNlmPdm/f7+QurCwsCtXriBicnJynz592JMDBgw4dOiQpH2woxAiYkxMDAB07dq1rKzMxk3rdOVDhiAA+vtjcbEkTQjHqB4eHuyCQX5+vkqlYrNw27RpExcXZxDp+NPM+fPnAcDb27uwsDArK2vp0qVz58599dVXT9c+8iuBzMzMx45IlZaWxsTEsN2RUqlUqVT5+fmS9uTmzZuRkZGsJ127dt27d6/pTw0GQ3x8fJs2bYSxa5ZPKdhXCMvLy3v06AEAn3zyiS3bNRgMEyZMCAiI69LFmJMjbVvfffcdAAQEBMTHx7dq1QoAHBwcVCpVQUGBdI0ajcZ27doNGzZs+PDhzZs3Zx+s4OBgrVZrszMx0xEpDw+P6iNSiHjr1q2oqCj2reTp6RkTE2N+1VQMer1eo9Gww66mTZuq1eqavvRLSkpiYmLYKx0dHSX6b7KvECIiu6nC1dX15s2bNmt01qxZ7JNx4UK21G0lJCSwIx8PDw8ACAoKysjIkLpRRDQYDGzCmqen56RJk5wfVsvp3r372rVriyXa+z+k1Wrbt2/Pwh8ZGXn79u1aXpyWlhYUFMS6161bt4SEBBF7kpyc3OPhbNuwsLBaLiMJbt68GRUVJZfLAaBFixYajaZCxNMVOwwhIr7wwgsA8Le//c02zbGrlI6Ojvv27bNBc2vWrAGAt95666uvvvr2229t0CKzdOlS9sXPxkgLCgo0Gk2HDh3Yx9HV1TUqKur8+fOit3v2LE6Z8k/h/Or48eMW/mJycnKvhwWphg8ffurUqQb25PLly+Hh4cJXz88//1zTK7///vvU1FSzJ0+ePBkYGCj8uohfDfYYwuvXr7P7D3/99Vep29JqtQqFQiaTbdmyReq2GDZ/5d1337VNcwwbmFEoFLsenfZtMBi0Wm1wcLBMJmMDsyIeo5aUoFqNjo7Yvn2Wl5f3Y48/a6fX6+Pi4thBu1wuDw8Pv2bVJN265waayM/Pb9GiBdtPZmebHxlptdouXbqwKI4YMUKUk2p7DCEisrucnn76aUlvezt27FjTpk0BYPny5dK1YoZdLYiLi7NZi/v372fXANauXVvTa06fPj116lThVuwXXri+bh2WlFjZotGI8fHYpg0CoEKBM2bg/fvW/z/m5+dHR0ezvjVt2jQ6OrqoqMjyX9dqtWyHz46EhYnsNSkpKVm8eDH7YDRp0iQ6OtrsPJB9NbRs2VK4nvTnn39a88YestMQlpWVde3aFQBiY2MlauLy5cvsKzYqKkqiJh4rLCwMTKZTS+3s2bNsJGbhwoV1vjgvL++jjz4aMOBlubxq3mxUVB33DFeXnv7XvPb+/VGsCy7Xr19nF9PZMG9cXFyd+9XMzEw2QY+NxNbrSgM7D2RDRC1atKg+RHT//v3o6Gg2zuTi4hIdHW31ebWdhhARf/zxRwBwc3Nr4NfMY+Xl5bGQjxkzRtyT7DqxWyiFySuSunnzJpsAEBERYfkRpsGAWi0GB/91g0hwMGq1WOcG8vNRpUKFAgGwTRuMj6/7V+rr2LFjAQEBLFc+Pj57qtccQETLRmItkZaW9txzz9VyHpiZmRkeHs6+Gtq2bWvJV0N19htCRBwzZgwATJ06VdzN6nS6wYMHA0C/fv1KrD7kshZLhQ1m5xUWFrIZuYGBgdZdd01Px6gobNq0Ko3duqFG8/hjVHb82aoVAqCDA6pUWFjY0P7XhM0u6tSpE8tGcHDw2bNnTV+g1WrZpX9LRmItkZycLFzQHz58eHp6utkLUlNTh7DqkgB+fn71Hcuw6/UJs7Oze/XqVV5e/u6777Zu3dr0R/n5+aYP2VG78LCoqMhgsuyJWUXGM2fOZGZmdurUKTU1lR2R2gwiOjs7sxqQzlKW1K+oqBg9evS+fft8fHwOHTrELodYJy8PvvwS1q2DGzcAANzc4JVXYM4ceDiwCiUlMHw4nDgBADB8OKxZAz4+DX4DddHr9Z999plarS4sLHRwcJg6dep7771XUFCgUql++eUXAPD391+7du2gQYNEaa6iomLTpk1Lliy5c+eOXC6fMGHCypUrhVk+AGA0Grdt27Zw4cI//vgDAMaPH79q1aqOQmGu2jXwS0Jq4eHh7AxYRM7OzkqlksuNPPfv3weA5s2bS9qK0Whkt0p7eXlZN5xYXWXlI8eor776SBnI8ePR21uS48/a3b59+80333RwcAAAFxcXdgrXqlWrjRs3SjH3yPQ88LFDRDqdLiYmxs3NTS6Xb9iwwcJTALveE+r1+p49e165cmXAgAF9+/Y1/ZHZV3vz5s3ZtVSm9oqMO3fu3L59u5+f3/HjxxW2Xcz9woULPj4+3bt3v3jxonStLFq06KOPPnJ1dT148CCbIyqikyfh3/+GUaNg+XKYPh0iImDiRNiyBVxcwKSupU1dunRpwYIF+/fvr6iomDhx4qpVq1ghP4ncuHFj8eLFW7duRURvb2+1Wv3aa6+ZfpByc3OffPJJg8Gg1+tNP3g1Ev3bQkQrV64EgJ49e4o7dqLT6diY9fr160XcrCV+/fVXAHj22Wela2LDhg0AoFQqk5KSpGuFQxnIWt29excAPD09bdbib7/9xsbYAMDPz8/syge7oGLhzVD2uxZFfn7+Rx99BACffvopO94Qi7Oz8yeffAIACxcuZP95NpObmwsAbFqwFBITE6dPnw4Aq1evDgkJkagVxrQMJHdsR2S6NIPUhg0bduzYsYSEhI4dOzo4OJgNLrD+GCxbj9V+Q6hWq+/fvx8aGirFh2nixImhoaH3799fsmSJ6BuvBQuh2SCTWE6dOvXSSy9VVlaq1eo333xTiibMvP46bNkCMjuo5l2vD71Y2N0V586dS0hIkD36V2C7jcrKSku2Y6chzM7OjouLUygUK1askKiJ1atXOzk5ff7558ePH5eoiepu374N0oTw2rVrY8aMKSkpmTx5slqtFn371WVlwaZNYDJAyFP1D/3XX38dERFhg0UWnJ2d2dx0U41hTzh37ly9Xj9t2rRnnnmGPfPjjz9mZmY2ZJs6nW7JkiWrV69mD7t27apSqYxG44wZM2x2GMNCKPrhaGFh4bhx43Jzc4OCgjZt2iSTft/ElqmaNQsGDoTERKlbq1v1D/2RI0e+/fbbrKwsO+lPbaQ5a20QtgZbs2bNch7e25eXl+fu7q5UKi9dumT1ZtnlI1dX11u3brFnioqKWO2JL7/8UoR+W4DNovrpp59E3GZ5efmIESMAoFevXlLfCGuKFUMV9a1Yj+0DFQqF8MzMmTMBYPXq1Vz6w75nLZzsZXd7QqPROG/ePAD45z//Kewxli1bVlBQMHLkyG7dulm95ZEjR44fP764uDg6Opo94+rqunz5cgBYsGBBQUFBg/teN9H3hIj42muv/ec///H29t67d68t13vMzQUAkOb0tt7YrTDs2qDwDFh8ViY6dnj837on3Lx5MwA8+eSTQg2iixcvKpVKhUJhNjvJCuwmKZlMZjqxiE0OtE39P29vbwD4w2yRiQZ45513AMDNzc32hSratkUAy+vxS05IHXvIvsptXKJBwM4SLZwpYV8h1Ol0bCoQuxLKsNsOpk+fLkoTy5YtA4Cnn35auPaYkZHBQi7159hoNCqVSplMJkoFnaKiIvZelErlLxaWKBaP0YhKJcpkaPNiQDVit2sJ9zqw452YmBgunWFTW6vfjvhY9hVCdhuhr6+vMOdo//79AODq6pojUu0X4SYp07MFlUoFAEOHDpW04EpeXh407ILyrVu3EhISVCpVQECAUqls3bq1QqGw/ZQDRMzLQwC04bXxurE7AIUDqIULFwK/ctLsvMnCIQw7CmFubi4rtvXbb7+xZwwGg7+/v+jfZ8JNUkKwCwsLvby8AODrr78WsSEzZ8+eBQAfHx/Lf+XBgweHDh365JNPxo8fb3ZhQ6lUurm5AcA777wjXZ9rkpGBANizp+1brhH78AiTOVk5DxtXMBD07NkTACwsF2JHIZw6dSoATJgwQXiGFWUwPT8US/WbpL788ksAaNOmTaFkN+Gwa1ZBQUG1vywnJ0er1arV6uDgYNNVxwGgefPmwcHBarU6OTlZp9OlpaUpFApHR8cL9b3xtsH27UMArOut2BQblBLGh9lRFa+lL1l1HAtHMewlhGfOnGGfJ2GhQp1Ox26927Ztm+jNXb58uUmTJjKZTKjnYzQa2U2G0u1Ytm7dCgAvv/yy2fOVlZUZGRlxcXGRkZE+j94FpFAofHx8IiMj4+LiMjIyqh8tR0VFAUBwcLBEfa7J1q0IgBERNm62NmzStlC5/IMPPgCARYsWcekMu9+g+p2Hj2UvIXz++ecBYO7cucIzbNrHgAEDJDpPY+cMfn5+wnga27EolUopio7hw/nos2fPRsSioqLk5GS1Wh0WFmZ2XaFZs2YBAQHR0dFarda0UPdj3bt3j334vv/+eyn6XJN//QsBcPZsW7ZZB3a4Lkyk/vjjjwFgwYIFXDrDTqNOnjxpyYvtIoS7d+8GAA8PD2E1nFu3brGCa9XXDBNLaWkpu5fCtObSG2+8AQAjRowQvTmj0ciOt/v169ezZ0+zSS1du3adMmXK+vXrf//99/reCLdu3ToAaNeunS2rBMyfjwBVCxjbCXb5R5iJweboz5s3j0tnBgwYAAAWrmjAP4SVlZWsdoDpKuqvvPIKPLp2ghRYHV5PT0/hGEbYsXz33XcN335JSUlKSopGowkPD2ebZbWc2bCKv7+/SqVKSEhoYP0Fg8HA7qlZsmRJw/tsoTlzPhw4cPz27ebFOTliF7eEYr6snOycOXO4dIZVuzBbFa8m/EP473//GwA6d+4sXOE5ffq0XC53dHTMysqSunV2i4bpRcjPPvusITuWq1evfv311zNmzPDz8zO7A4vdkf3GG28cPXpU3FKOR44ckclkTk5ODZnWVy/s72a2fgNfrJaEsGJEbGwsAKhUKi6dGTZsmOk4f+04h7CoqIgdyu/cuVN4Mjg42GYHEufPn1cqlXK5XKgMbTAY2LHE4sWLLdlCRUXFyZMnNRpNZGSkUNDadFglKioqPj4+IyODLfTT8ErSj8WOHUJsdYOtpO/FOqwmr/DFzb7c33rrLS6dYdOwLKz4xDmE8+fPB4DAwEDhGa1Wy84P6xyTEMvbb78NAIMHDxZGgI4fP852xTXtWHJzc4WrCGb1mtzc3NhVBK1Wa1Y0ln3dSFHBERFv377NBnjEnR1eE/ZehBMwe9C9e3cAEK7WrF+/nh13cOkMm1KfnJxsyYt5hvDq1avsOsGJEyfYMxUVFez80JaT34uKitg5/ebNm4UnzXYs7CpCfHx8VFSUj4+P2bBKp06darmKIGxBoVDI5XLpypxqNBp2YC/1ErPCe5G0Pnp9sU+OsLoOK/MxTZTVXuuPHa5bWGGEZwjZ0rxTpkwRnmGLpXTv3t3G/7vsCt7rr78uPCPsWCIiIkaMGMFmYwhcXV2Dg4OXLl26d+9eC+8eysnJAYBWrVpJ9iawsrKSHSW+//770rWCiKw+QMuWLSVtpb5YkdUzZ86wh5s2bQKAV155hUtn2BqMNdUmNsMthCdOnGCL1N8wmYd/7dq1iIiIH3/80cadMRqN1a+FzJkzhxWQZ7y8vMLDwzUaTUpKihWL5p0+fRoAnnnmGZG6/HgpKSnsryppcWHbvJf6YnXlhNPULVu2AEBkZCSXzowdOxYAtFqtJS8Ws4BSvbRp08bf37+8vJxNi2Hat2+/fft223dGJpMJC1YziJiamlpYWDhs2LC5c+cOHjy4gTUppC7xxAwdOjQiImL79u3z5s1jq5FKQdJKOVYzq3AxePDguLg4YQUl6ej1+oMHD44cObKWztSO2029Dg4OmZmZZ8+eTbSH6gjVJCQkHD16tHXr1nv27Kk+edoKNvvgrlq1ys3N7fvvv5fuDytRkY4GMqso0aVLl6ioqOHDh0va6P79+319fUeNGnXmzJlaOlM7biFs06bNokWLAGDWrFnl5eW8uvFYer2e9e3DDz80Oxu0ms0+uDb4wwp79S+++OLcuXNSNGEFGxdcu3bt2osvvjhixIjz589369atrKzM6s7wLG8xe/bsnj17ZmVlseuq9kOj0WRnZ/fu3ZuNkYpCujpr1c2ePbtHjx7S/WHZeyksLHzjjTeGDh3KKhpzx44AHzx4IHVDer0+Njb2mWee2blzJ1tyND09feDAgcILLl++fOzYMbOq8LWR+AS1Dvv27QOAZs2a2XKF+trduXOHjcfUspyyFSZPngwAX331lYjbrAW7bUrcP+yNGze2b9+uUqnYQrYbN258+eWXAcDR0dFm76sWn3/+eadOnby8vOLj46W7OVur1QoLQoWFhd14tLxHaWnpokWL2NSo6rfL1IT/tLUJEybUq8dSe+uttwBgzJgx4m72tdeuBgae/vXX6+JuthYN/8NWVFQI91iZLTC0cuXKkpISo9HIbnaRyWRqtVq8vlujoKCAXaUAgMGDBx85ckTc7WdlZbHbUAGgR48e1UuK1HdJYAH/EN64cYPdMLF//37effmrqJRwzVcsTz+NAPj77+JutTbW/WELCgrYPVbVJwOxq6Pz5s175513TK+OxsbGslOgqVOn8r18bzAY4uPjWZEEqGHReSuwJUfZDdbu7u4ajcZsxsWlS5dCWSVWAF9fXwvnbQv4hxAR33//fZB+hXpLsKJSUkw4bNECAbDB61XWj4V/2Ozs7NonA2k0mpMnT7Jlhvr37y+TyZYtW2Z6yLdz505W4uX555+XrjSBhUpLS2NiYtiImqOjo0qlakg51tr3byUlJQ1fEtguQlheXs4K45jezWR7QlEpyw8kLKTXo1yODg4owZp5tanpD1tcXJySkhITExMWFubp6WmaOhcXl4CAAHaPlXCHl6mNGzeyIZD//d//Nc32sWPH2KIovXv3toczfNNF5z09PasvOl+nOvdvwpLAcrk8MjLyzp071nXVLkKIiElJSSDZCvWWkKioFHPzJgKgl5foG66b8IdNT083rdRmGjwvL6+wsLCYmBgLJwP9/PPPbD8zYsQI00nq2dnZbBZ1hw4dJKpOUF+mi85369at+qLzj1Xn/u3ChQvC1fl+/fodPXq0IZ20lxDiw0NBXpP9WFGpdu3a6XQ60Td+8iQCoK+v6Bu2SFBQkNlBpqOj46BBg+bMmbNjxw7r7oQ4c+YMW0GgV69epiOEd+7cYStUnw4NRcvuprOBOhedNzNjxgwAUCgU06dPN7ubJz8/X6VSsWOBFi1aaDSahi8JbEchFIovWXgrpIiEolLbt2+XYvu7dyMAhoZKse063Llzp3379o6Ojh4eHuPHj1+xYkVKSooot1lcvXqVFfbz9vY2/VjrdLrfZs1CAHRywm+/bXhDotDr9XFxceyAWS6Xh4eHC/fgV5eTkxMSEpKWlmb6pNFojI+PZ1d6HRwcoqKihGosDWRHIUREtlqgr6+vFWe3DcHG2QcOHCjR9aWNGxEAbb+P1+l0rM6Cv79/cXGx6Nu/f/9+YGAgO5FOTEz86wdGY1UVGpkMeV+6MMUWnWfjnI9ddL4mp06dYn9JAHj22WeFezVEYV8hFBayXrdunc0atUFRqQ8/RAC0ceEvg8Hw4osvsjM0seqXV1dWVsbmITzmkr1Gg3I5AuC0aV9jVdAAAAvtSURBVCjZXZRWuH79emRkJDtE9/b2jouLq+VL//79+yqVig3wSDQTwL5CiIhs7r+Hh8djh+akwOamvfTSS9I1oVIhAH76qXQtPMasWbMAoHnz5r9LfHXSYDCw1VfGdelSaTas9cMP6OyMAPg//4NiV3BuoKNHjwYEBLCdm4+PT/V6OeyqY8uWLQFAqVSqVCqJrr7YXQjx4SJ+tilMYJuiUpMmIQBKUMS4RqzGqaOj43/+8x/btLhx3bqKp55CAHz99Uf2e6mp2LIlAmD//ra+TloXo9HIFp1nUQwODhZqZp84cUKYDhoUFCT65A1T9hjCzMxMJycnuVxuYdnGhrBNUanAQARAm80ISkhIkMvlMpnM1lM6d+3Cpk0RAEeORNOdxuXL2LUrAmDHjnjxok27ZIHy8nKNRsMmDDs4OEyZMuX1119nc6/btm0bHx8vdQfsMYT4sABUv379qo//fvDBB8HBwQMHDvQ30bNnz04mvL29PUywap8bN2402xRbGcbT01PqolJ5efj77yjByMhjHDt2jE1e4bM037Fj2KoVAmDv3mi6DOPduzhkSNVKTnZz6cLU3bt32bUHuVwul8vZ8acUo1nV2WkIi4uL2WWo6slhwwBW2LBhg+l2Kioq2MIPa9aske6NJCailxeWlWFxMdqgHOHly5fZOUxUVJTkjdXkyhXs3h0BsG1bNB1FLCnBsWNRoUCbly+xXEZGRmxs7Jo1ay5fvmyzRmX4cHlhe7N9+/bJkye3atXq0qVLpqs1nDt3Licnx9XV1bS0btOmTdn8BsbJyYntDRilUimUvhasWbNGpVJ179797NmzZjNIRJSUBIsWwfTpEBEBEydCUpJE7QAA3L17d8iQIWyy/65du8xKD9vU/fswfjykpICHB+zcCYGBVc8bDJCaCo9WEiF2uidkgoKCAGDmzJmibzk/P5/VpbewFI/VEhPxvfcwOBiLijAkBKW7/KnT6dhUlX79+tlyUYoalZVVjUc5OqKUqz42AnYdQukWsmZD6s8995y4m60uMRE//hj/9S/cuhVDQnDZMvT3x/h4FPd2kcrKyvHjxwNAx44dRZ99bj2jEaOjH7lkbzDgG29gSAg+9xxatnbf/wf2ezjKzJ49OzY21tfXd/ny5ezqalFRkWnpjtLSUr1eLzwsKyszLXCg1+tLS0uFhwaDoaioqKSkZMeOHQaD4cSJE35+ftJ1HhF+/hlOn4Z//AMmTgS5HP74A1hNlrZtYfp0iIqCli1FaGjmzJlr165t0aLF4cOH2RRqO/LppzBvHgQFQVIS7N0LP/wAmzdDWhr885/wyy+8O2cfeH8L1KGgoKB169amZRFFMXr06Pnz50va86IiDAjAZcuq1g9bsABDQ7GsDOPj8ZlnEKDqSC08HFMbtrRRTEwMADRp0uTQoUOi9Fx8e/Ygu9li2TIURtqeeopjj+yKve8JAeDu3bsbNmxgN/sBgNmQjIuLi6Ojo/CwSZMmpveDmw3JKBQKts5779692RmURCoqYMwYSE6GXr3g9GlQKMxfcOgQrF4NO3cCq0zp7w8qFbz8MtR3hOjbb79lw8UJCQmsnoVde+89aNsWXnsNAKBTJ7hyhXeH7APvb4FGyGjEV15BAGzZEmufh5OdjW+/je7uVTvGJ5/EmBiD5fP1Dh48yMaEP7XxjDir7dmDrCT2qVM4ahTv3tgLCqH4li5FAGzaFC281fPBA4yPx169EAAHDbrp5OQUHh5e532i58+f9/DwAH7rYFrDaMQ338SQEBwxAh8un0QohCJjdy0pFLhrV/1+0WjEX37B119fKxSrDAwM/O677x47wf/PP/9s3749AIwdO9bGt30R0VEIxbR/Pzo6IgCuXWv9RrKzs6Ojo4XSL15eXmq12vQYtbS0lM0tHjBgQKmd3ZpArEAhFM3Zs9i8OQLgwoUibK24uDguLk4oyuDk5BQZGXnmzJnKyspx48YBQOfOnRu42D2xExRCcdy8ie3aIQBGRKCI93wajcakpKTRo0cLx6hsSm2rVq1sObmRSOq/4BKF/SsoKH7uuWZnzsjYFWmTKyaiyc7O3rBhw/r16xFRqVT+9NNPgwcPFr8ZwgOFsKEqKirGjBlz756fQvHhzz8rPDwkbGvv3r1jxowZOnRoSkqKhM0Q2+K5KlMjgIhTp05NTk7Ozf3qu+/+lDSBAFBcXAwAQpl30jhQCBtk8eLFW7dudXV13b1791NPiTy3rjrbLPdLbIxCaL0vvvjio48+UiqVO3bsYAumS82WixwSm6EQWikxMXH69OkAEBsbGxISYptGKYSNEoXQGqdOnXrppZcqKyvVajWLom3Q4WijRCGst2vXro0ZM6akpGTy5MmsdLfN0J6wUaIQ1k9hYeG4ceNyc3ODgoI2bdpkttCK1GhP2ChRCOtnyZIlZ8+e7dOnz65duxyluCpfM0S8c+eOTCZjq5qQRoNCWD+urq5yuXz+/Pns5mBbunfvXkVFhbu7u2ldOdIIUAjrp3nz5kajcdmyZeXl5TZump0Q0rFo40MhrJ/Zs2f36NEjKysrNjbWxk3TCWFjRSGsH0dHxzVr1gDA+++/f+vWLVs2TUOjjRWFsN6Cg4NffPHFkpIStmCGzdCesLGiEFpDo9G4uLhs27bt119/tVmjtCdsrCiE1mjXrl10dDQAzJw5s6KiwjaNUggbKwqhlebPn9+tW7dz586tW7fONi3S4WhjRSG0kpOT0+rVqwFg6dKlOTk5NmiRhZD2hI0PhdB6ISEhYWFhRUVFCxcutEFzdJ2wsaLyFg2SnZ3dq1ev8vLygwcPDhs2TLqGjEajk5OTwWAoLy+XbjVFwgXtCRukc+fObL37WbNmma4VJbq8vLzKysonnniCEtj4UAgbauHChR06dEhPT9+wYYN0rdDQaCNGIWwoZ2fnlStXAsCiRYvu3r0rUSs0NNqIUQhFMGHChNDQ0Pv37y9evFiiJmhP2IhRCMWxevVqJyenDRs2HD9+XIrt056wEaMQiqNr164qlcpoNP7jH/8wGo3ibrykpOTkyZNAe8JGii5RiKakpKRHjx63bt368ssvX3311QZu7c8//0xLSzt8+PChQ4dOnDih1+uff/75BQsWBAUFidJbYj8ohGLatm3b3/72t1atWl26dMnd3b1ev6vX69PS0lJTU48cOXLkyBHTWTgODg6+vr4LFix48cUXxe4y4Y9CKLKgoKADBw6oVCpL7vq9ffv28ePH2R7v8OHDDx48EH7k5uY2YMCAgIAAf3//Z599tnnz5lL2mvBEIRTZuXPnfH19jUZjWlpanz59zH5qMBguXrwoHGdeuHDB9O/fqVOngICAoUOHBgQE+Pj42LiUG+GFQii+2bNnx8bGDh069LfffpPJZMXFxWfOnGGpO3z4cH5+vvBKFxeXvn37stQNGTKkRYsWHLtNeKEQiq+oqKhHjx45OTlBQUF37ty5cOGC6Xhpp06dhgwZMnjw4ICAgF69eikUCo5dJfaAQiiJzZs3x8bGnj59GgAcHBz69OnDzu4CAwPbt2/Pu3fEvlAIJYGIBoNhzZo1gwYN8vPzo0qhpBYUQkI4oxkzhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeHs/wAgxn8cvQQd4wAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxUVf8H8M8w7IIIKeOwqbivIIg+prjigqI+lbsYmoWZiuaSabllPzVXNKMsNfVBLTX3HRMTzQRCcEtTUBm2IWQRZJ85vz8uTRMgwnBnLjN8369evOwy3POl/My5c88954gYYyCECMdI6AIIqe8ohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIjEJIiMAohIQIrH6FMDY2NicnR+gqCPkXEWNM6Bp0JCMjw93d3cTEJDw8vHnz5kKXQ0iZetQTvvvuu8nJyU5OTs7OzkLXQsg/6ksIQ0JCjh8/3qhRo//9739isVjocgj5R724HL17966Xl1dBQcGBAwfGjx8vdDmE/Ivh94SFhYWTJk0qKCiYNm0aJZDUQYYfwgULFsTFxbVq1Wrz5s1C10JIJQz8cvTs2bPDhw83Nja+evVq9+7dhS6HkEoYck8ol8unTp3KGFu7di0lkNRZBtsTKpXKIUOGXLx4cfDgwefOnROJREJXREjlDLYnzPnqK9GjR/b29nv37qUEkrrMQHvCqCj06sXMze8eOdLJx0foagipiiH2hHl58PdHSYlo2jRKIKn7DLEnDAjA3r3o1AlRUTA3F7oaQl7B4EJ46BDGjoWlJaKj0b690NUQ8mqGdTn6+DHeew8AgoMpgURfGFBPWFqKPn1w/TrefBM//SR0NYRUlwH1hCtW4Pp1ODnh22+FLoWQGjCUnjAiAv37gzFcvIj+/YWuhpAaMIieMCsL/v5QKPDpp5RAoncMoidMS4O/PwoK8MsvMDYWuhpCasYgQghAqUR2NuzshK6DkBrT58vRhQsxaBC8vXH0KIyMKIFET+ntxdvFi7h/H2FhyMlB164YOhQWFkLXRIgm9LYnjIoquwdjY4OWLfHokdAFEaIhvQ0hY1B9mmUMRnr7i5B6T2//7nbvjkuXACAnB48fo2VLoQsiREP6fHd00SLExKC4GP7+6NwZ//mP0AURogl9DiHnwgX4+qJLF9y8KXQphGhCby9HVfr2hUSC2Fj8/LPQpRCiCf0PoZkZZs4EgI0bhS6FEE3o/+UogMxMuLggPx+3b6NjR6GrIaRm9L8nBGBnh4AAMAZaY5voIYPoCQE8fIh27WBigidP0LSp0NUQUgMG0RMCaN0aI0agqAghIUKXQkjNGEpPCCAiAn36wM4OiYlo0EDoagipLkPpCQF4e6NHD2Rm4n//E7oUQmrAgEII4MMPAWDzZiiVQpdCSHUZVgjfegvNmini49MuXBC6FEKqy7BCaGwc+8knbRwcxq9dK3QphFSXAd2YAQDk5ua6uLhkZ2ffuHGD9iQkesGwekLA2tr63XffBRAcHCx0LYRUi6H1hACSk5NbtGjBGIuPj3dxcRG6HEJewdB6QgCOjo5jxowpLS3dunWr0LUQ8moG2BMCiImJ8fT0tLa2lslkNjY2QpdDSFUMsCcE4OHh0a9fv9zc3J07dwpdCyGvYJg9IYBTp06NGDHCyckpISHBxMRE6HIIeSnD7AkBDB8+vH379klJST/RNmmkbjPYEIpEorlz5wLYsGGD0LUQUhWDvRwFUFBQ4OLikpGRMWHChB49ejj8TSqVmtNe9qTOMOQQlpaWtmvXrqioKCkpqdy3zM3NuTSW++rq6urs7EyfIYkuGXIIly5d+vnnnzdp0iQwMPD58+dJSUlpaWlJSUlyuby4uPhlPyUSiSQSSdOmTR0dHaVSKfdV1YtKJBIjWu2b8MpgQxgREdG/f3/GWFhY2IABA8p9Vy6Xy+VymUyWlpaWnJycmprKfU1JSZHL5QqF4mWnFYvFEonE0dHxwoULjRo10vIvQeoFwwxhdna2u7v706dPP/3001WrVtX0x7OyslJSUrhMlvv69OlThUJhampqZWX1888/u7u7a6N+Ur8wQzRu3DgAXl5excXF/J65qKgoMTFx4sSJACZPnszvyUn9ZIA94bfffjt9+nQrK6uYmJjWrVurf+vw4cO5ublNmzZ1cnJq2rRpkyZNNGvi6dOnrVq1EolE8fHxzs7OfFRN6i9DC+HDhw89PDzy8vJCQ0MnTZpU7ruvv/769evXVf9qamr62muvVXqbVCqVSqVSkUj0sobGjRt38ODBRYsWraUJxKR2DCqERUVFPXv2vHnzZkBAwO7duyu+YMOGDbdv3+Y+4KWkpGRlZVVxNktLS0dHR1W3uWDBAgcHB9V3o6Ojvby8GjZsKJPJGjZsyPvvQuoPgwrh3Llzt2zZ0rJly5iYmOoEo6io6NmzZ5XegElISCgX0fj4eFdXV/Uj3t7eV69e3bJlS1BQEM+/CalPDCeE586dGzZsmFgsjoiI+A8fexXm5uZyQ4vc6MXs2bPLPWdz7NixN954o3nz5g8fPjQ2Nq59i6R+MpAQpqenu7m5paWlffHFFx999JFuGlUqlR07drx///6hQ4dGjx6tm0aJ4TGEhz8YY++8805aWlrfvn3nz5+vs3aNjIxmz54NYN26dTprlBgeQ+gJN27cuGDBgsaNG8fFxanfO9GB/Pz8Zs2aZWRkXLt27fXXX9dl08Rg6H1PGBMTs2TJEpFItGPHDh0nEIClpeX06dMBbKQtSomm9LsnfPHiRbdu3e7fvz9r1qwvv/xSkBrS09ObNWtWXFz84MGDVq1aCVID0Wv63RMGBQXdv3+/Y8eOAn4qs7e3nzBhglKp3LJli1A1EL2mxz3hTz/9NHr0aHNz8xs3bnTp0kXASu7cudOlSxcLC4vExMTXXntNwEpIRX/88YeJiYmjo6OFhYXQtVROX0Mok8nc3d0zMzNDQkJmzJghdDkYOnTo+fPnV69evXjxYqFrIWUKCgpmz5598+bNmJgYvHwmt1QqbdasmZWVlVB1aieECxciNhaFhZg3D2+8wfvplUrlwIEDL1++PGzYsFOnTlXxhKfOhIWFDR48WCKRPHnyhNbOqCNmzpwZEhLy2muvWVtbp6amFhUVVfFie3t7iUTi7OzMfVWf1S2RSMRisfbq1EIIL17Eli04eRI5OejaFXfvgu/LgBUrVqxcudLR0TE2NrZx48b8nlxjXbt2jY2N3b17d0BAgNC1EJw5c8bPz8/U1PT69etdu3YFkJGRwS2tkJaWJpPJ5HK5+mILpaWlLzuVkZGRRCJRX6PIw8NjxIgRfJWqhRCuWQMzM8ybBwCDBmHTJoSEYPduODhAKv3nq6tr2R+cnVGTNV2uXr3ar18/xtiFCxcGDhzIc/G1sGfPnilTpnTq1OnWrVt1oXOuz5KTk93d3TMyMjZt2vQht3Xsq7xyJrf6i729vfv3779y5UpeqtVCCFevhpkZuCdXfHywZQs+/RTHjr309UZGsLeHg8OSjh2fWVio3my4iwGJRKL+Fzo7O7tr165PnjxZvHjx6tWrea68dkpKSlxdXZOSki5cuDBo0CChy6m/lErl4MGDf/755yFDhpw9e7b2b4glJSXp6emqLjQhIWHbtm0KheL+/fvlJqxqiP95wmFhbNgwxhjLzmaurqyggDHGMjPZnTssLIzt2cPWrmVBQWzMGNarF3N1ZcbGDGCAS2WbRhgbGzs6Ovbo0WPUqFEzZ8708PAA0K1bt6KiIv4rrzXufWHIkCFCF1KvrVmzBoC9vX1qaqr6caVSmZaWxksT06ZNAzBjxgxezsZ3T5ibC6USq1cjJgbFxfjoIwwf/oofKS2FXI6UlBOpqerX6NxVe3p6uvpr7e3t8/LyoqOj27dvz2fZPMnKynJxccnLy4uNjXVzcxO6nPooOjq6V69eJSUlJ06c8PPzU/9WRkZGkyZNajOTW+XBgwcdOnQwNzd/+vQpD3cleInyP/z9WfPmLCqKr/MVFhY+efLk2rVrhw8fDg4OlkqlAA4dOlTuZTKZjK8Wa2nWrFkApkyZInQh9VFubm6bNm0AfPjhhxW/++eff75yFNfc3NzV1bV3795jx46dM2fO+vXr//e//4WHh//555/lzubr6wtg1apVtS+b1xD++CMDWIMG7I8/+Dytmq+++gqAl5eX6khRUdGAAQMsLCzS09O11GiNJCQkiMViU1PTlJQUoWupdyZPngygc+fOBdyHoMoUFhYmJydHR0efOHFi+/bty5cvDwwM9PPz8/T05N7iK9WhQ4dy57l48SIAe3v7KtqqJv5CGB/PGjZkANuxg7dzVvDixQuu97927ZrqIHfVsWLFCu21WyNvvvkmgCVLlghdSP3y448/ArC0tPyjFn1AXl7e/fv3f/nll9DQ0I0bN3744YcTJkzw9vYOCAio+GJu5GPnzp2aF80Y4y2EJSWsZ08GsLfe4ueEL/fJJ58AePPNN1VHwsPDATRp0iQ/P1/brVfHtWvXANja2ubm5gpdS32RkJDALWjy3Xff6azRvXv3AujYsaNSqazNeXgK4eLFDGBOTuzZM35O+HJyudzc3NzIyOjhw4eqg927dwfw7bffarv1ikpLSyse5OoJCQnRfT31UElJSc+ePcu9NetAcXExt+Dl2bNna3MePkL4yy9MLGZGRiw8nIezVcPUqVMBzJo1S3Vk//79ANq2batQKHRTA0epVI4aNSowMLBcJ9y/f//mzZvfuXNHl8XUW0uWLAHg5OT0TPt9QDncgpc+Pj61OUmtQ5iZyVxcGMCWL6/tqart9u3bIpHI0tIyIyODO1JSUuLi4gLg1KlTOiuDMcZNYrS1tVW/PcutttigQYP79+/z2diCBczHh/XuzY4c4fO0eu7KlStisdjIyOjSpUu6bz0nJ4e7DL5586bGJ6l1CP/7XwawXr1YSUltT1UTQ4YMAbB69WrVEW4zUG4TGN24c+cONzvmhx9+UB189OgR93/l+++/57OxsDDm58cYY9nZrEULVjc+/QouMzOTe/NdtmyZUDVwC16+/fbbGp+hdiEMCWEAs7Fhjx/X6jw1d+HCBQASiUR1g/j58+c2NjYAIiMjdVBAQUEBN4nxvffeUx0sKSnhVlscPXo0z+2tXs02biz7s48Pu3WL5/Prp7FjxwLo3r0775uOVN/jx4+NjY1NTEw0HqzWPIR/3LnzsG1bBrAff9T4JLXB7Yi0e/du1ZF58+YBmDRpkg5a5yYxtm7dWv0WKLfaorOzc2ZmJs/t/d//sQ0byv48cCCjT5uMff311wBsbGwSEhKErYRb8PLjjz/W7Mc1DGFBQUHnzp2trawiFi/W7Ay1x3306tSpk+oGsUwmMzExMTExefr0qVabPn36tEgkMjMzi4mJUR28fPmyWCw2Nja+evUq/01WfCK3oIB99x07epT/tvTB3bt3LS0tAezbt0/oWlhkZGRtBqU0DCH3cFarVq2eP3+u2Rlqr7i42MnJCcCFCxdUB8ePHw9g4cKF2ms3KSmJe2Bg8+bNqoN//fUXt9bbZ599pq2GP/qI+fiwPn0Yd/Np/34GsA4dWO0GqfRRYWEhdx00depUoWspwy14uXXrVg1+VpMQnjlzRiQSmZiY3LhxQ4Mf51HFWQvR0dEAGjZsmJ2drY0WFQoFN4lxyJAhqh6YG6gA0Lt370qHDXnz4AHLyir7c3Fx2X3pM2e02GKdxN0LadmypYB9QDk//fQTgBYtWmjwF6DGIUxLS5NIJAA2qD6iCCczM5NbGiQ2NlZ1sE+fPuW6KR5xsS83TYYbqGjUqNGTJ0+00WiZlSuZkRFbs+afI+vWMYANHKjFRjVSVFR0/PhxZ2dnV1fXXr16jRkzJigoaO3atXv27AkLC4uPj6/NTDRuiqCJiclvv/3GY821pFAouAUvDx8+XNOfrVkIFQqFj48PgMGDB+t4WPxluAtj9cuS48ePA2jWrFkJ36MmUVFRpqamIpFIfTRSNVBx4MABfpsr79w5BjAHB6b6G5yTw2xsGMDUPprWBQsWLKh6sgL3CapDhw4+Pj6TJ09etGhRcHDwwYMHIyIi4uPjq/gfJ5fLmzZtCmDdunW6/I2qg3svVp9dUE01C+EXX3zBPaVZd6YIcLMWzMzMVCUplUputuGPvN62VU2TmTdvnupgpQMVWuTmxgC2Z88/R+bOZQDz99dF69Vz4cIFIyMjY2Pj06dP37p16/Tp0zt37ly5cuWMGTNGjhzp5eXl4OBQ9bpJYrHYwcHBy8tr5MiRM2bMWLly5c6dO0+fPh0bGzt48GAA/fr1qyN9gLoXL15wU6V+/fXXGv1gDUIYHR3N9QMnTpyoYXnaxc1a+OSTT1RHuJvX3bp147EVf39/VJgmww1UtGrVSkfPau/axQDWufM/N2OePGHGxszEhCUm6qKAV0lPT+cmBH3++edVvzIzM/POnTthYWF79uxZu3ZtUFDQmDFjevXq5erqWkVEra2tbW1t604fUA634GVNR4mrG8K8vLy2bdsCmDNnTs1r0y5u1oKdnV1eXh53JD8/n7uBGRERwUsT3DSZBg0aqE+TqXSgQrsKC5lUygAWFvbPwbFjGcA++khHNbycUqnk1iDz9vauzQ2q4uJimUz266+/HjlyZOvWrUuWLAkICBg8eHCLFi3EYrGrq2stZy1oT1pamrm5uVgsfvToUfV/qroh5Jbx69SpUx2ZLlQO9xD9V199pTqydOlSAP/9739rf/L4+HjuSbQdalMlKx2o0IXPP2cAGzr0nyNRUQxgDRuynBydVlJBcHAw92FPS+O0lc5aSE1N/fjjj1euXKmNFjXAJSUoKKj6P1KtEBYUFAwYMMDS0vLevXua1qZdhw4dAuDq6qp6A05LS5s+fXpt5ndyVNNk3lKbKlnpQIWOPHvGGjRgAIuL++dg794MYMHBOq3k327fvs3doOL3o3g53F0J9VkLsbGx3GWqlgalaoqbXdCgQQPV7IJXqm5PqFAoavOcuLaVlpa2bNkSwFG+nyDhrvLLTZOpdKBCdz74gAHsnXf+OXL0KANY8+Y6foxehXuCCsD777+v1YYqnbXQr1+/OjJmxuEWvFy7dm01X6+FJQ8Fwm2K1Lt3b35Pu379egsLC/XPlqqBipMnT/LbVnXFxzOxmJmZMdVbgELBWrcubtbs9vHjglTEbdLYoUOHFy9eaLutOXPm4N+zFk6ePMm9UQr4GLe6c+fOAXBwcKjmcKjhhDAvL4+7QXz9+nV+z6y+WGWlAxUC4GaQLV2qOnBp1y5jI6MePXrovpYjR44AMDMz0821UsVZC6pBqf379+ugAHWlpaXLly8v97y+QqFo3Ljx4MGDq7n4mOGEkDG2aNEiAGPHjtVeE5UOVAggIoIBTCIp/vs+mWoJLK08Pv5yMhnz9d1sbGy8ZcsWnTU6ZswY/HvWwvbt2wF4enrqrAbOZ599BqDce9+mTZsANG3atJp/SQwqhMnJyaampmKxeM+ePXfu3OF9PlGlAxVCiZs8uY+zs/oyNtwSWG+88YbOaigtZX37cp9Pb+vyBlXFWQuFhYXc05SXL1/WWRk3btwwMTExMjIKUxsxunXrFrct17Fjx6p5HoMKIWNs6tSp3PP1HDMzM6lU6unp6efnFxgYuHz58u3bt584cSI6OrqmEa10oEJA3DtC69atVc+OVLoEllatWsUAZm/PdH9/qlevXvj3rIXly5cDGDlypG4KyM7ObtGiBYBFixapDr548aJdu3YAPvjgg+qfytBCqFQq9+7d279///bt21tbW1fxbBQAKyurdu3a9evXz9/ff+HChcHBwT/++CP3+GK5Cwmh1vOqQmlpqaurK4DjajdjuCWwZs6cqYMCIiOZiQkTidjp0zporTzug6j6rIX09HQLCwuRSKSbgbSJEydyF8Dqd1/effdd7gZVjYbTDS2E5XBruV6+fDk0NHTDhg1z584dP368t7d3q1atXrl5sp2dXceOHQcPHhwQEMC977q4uPA/Zb4WNm/eDKBPnz6qIxWXwNKS3FzWujUDmDZnblal0lkL7733ng6GSRhj33//PSqs5cXNZjIzM1Of01MdBh7CquXn58fHx0dERBw8eDA4OHjRokWTJ0/28fHp0KFDuc2TTUxMrKysjtSxZc6eP3/eqFEjAOqTeiougaUNkyYxgHl4MAF3x9q2bRv+PWvh/v37RkZGFhYWf/31l/baffToEXeRpb60ikwms7OzA7Bt27aanrBeh7Bqcrk8Li6OmwTQqVMnAMt1uKxjNS1cuBDA+PHjVUcqLoHFu927y/Yc4XdJx5qqdNbC8OHDoc31DYqLi3v06AFgzJgxqoMKhaJ///4AfH19NbhBRSGsll9++QV1aaV9laSkJO6GcHx8vOpgxSWwePToUdmeI/wu6agZbtlf9ScKL126BJ72aakU965Xbi0vbsteiUSi2f6HFMLq4t7/tm/fLnQh5U2aNAn/3gys4hJYfCkpYf/5DwMY70s6aqbSWQteXl5auokdHh7OTZVU34+o0oGKGqEQVteBAwcAtGnTpq5NJ42LixOJRNbW1ll/Lz9TVFTk4OBgZGTE+yMsixYxgDk762DPkeqaMmUKgNmzZ6uOhIaGQgt7IqimSqrvSVjpQEVNUQirq6SkpFmzZgAEe2T05bgPJOvXry8pKUlKSpoyZcrEiROXLVt27Nix69evy2Qyvh6qPHKE2dsz3T6T8woVbwgXFxdzy3Kf4W8JLKVSOXLkSFSYKlnpQEVNUQhrYOPGjQD69esndCHlnTp1CoCjo+Onn37KrbxYjkgkatq0qbu7u5+f33vvvbd8+fI9e345eZL9/jtLSWE16jD+njhdh3BrXqxRWwJr3bp1AAbytwTW1q1bUWEtr127dlUcqNAAhbAGVCvtC77WYzlKpfKtt95avHgxtzXKggULVq5c+f77748YMaJbt25SqbTighF9+hwAGPePsTFzdGTdu7ORI9kHH7DPPmO7drHTp9mtW+zMGWZlVXbx6ebGGGPh4Uy1xLmjo2C/srrz58/j37MWcnJyuP9TvCx68LJNR7iBij3qS/5ohEJYM/PnzwcwceJEoQspLysri7taVl9rR6W0tDQ5OTkyMvL48eMhISHLli1bsiRy2DDWpQuTSJgqjRX/WbaMeXmVzdaosyFkjLm5uZXLw9y5c93d3Wu/volqLa/AwEDVwUoHKjRGIawZbqV9Y2Njba+0X1Pc0uNeXl4afPwrKmKJiezaNfbTT2zrVrZ4MXv7bTZoEOvYkYWEsPnzmY8Py87+J4RSKRsyhA0Zwqyt+f9FNMNdGXbu3Fl1Q5ivIYr3338fQPv27dWnSnJrOvK16QiFsMYmTJgAYMGCBUIX8o9vv/0WgJWV1Z9//snvmcPD2fz57OhRtmoVc3cvO1IHe8LCwkLu1qXG4wSVSklJsbGxKfckWqUDFbVBIawxba+0X1MPHz7kPpyEhobyfnIuhEol69uXdepUdqQOhpAx9vnnnwMYqr4EFh8eP3588OBB1b9Wf03H6qMQaqJv374ANm3aJHQhrLCwsGvXrqjdJpVV4ELIGNu3j5mblx2pmyF89uxZgwYNAMSpL4HFq5cNVNQShVATJ06cgHZW2q+puXPnAnB1dc0Rer3DuuCDDz4A8I76Eli84tYx4n3TERFjrOKwEqkaY6xjx45//PHHDz/8MG7cOKHKOH/+vK+vr1gsjoiI4HYIrucePXrUtm1bS0tLa2trBwcHqVRa6VfNTn737l0vL6+CggLe/6dTCDW0ffv2999/v1u3blFRUYIUkJ6e7ubmlpaWtnbtWm5xHQLgt99+8/f3j4+Pf9kLzM3NbW1tHRwcXF1dy+XTycmJG12sqLCwsEePHrdu3Zo+ffo333zDb80UQg0VFhY2b95cLpdfuXLF29tbx60zxkaOHHnq1Km+ffv+/PPPVe+vUt/k5+cnJyenpaUlJSVV/Jqfn1/Fz9rY2Dg6OnKxVJFKpd98801oaGj79u2jo6O5HYJ5RCHU3PLlyz/77LNRo0YdO3ZMx01v2rRp/vz5tra2sbGx3HOSpJoKCgpSU1NTUlIqfpXJZLm5uZX+1GuvvZaXl3fjxg3uqQB+UQg1l56e3rx588LCwnv37nHL++jG7du3u3fvXlhYeOzYMW6HYMKXjIwM9W5TLpfLZDK5XD5o0KChQ4dye2Lzj8ebPPUQt7DPjBkzdNZiXl4eF3jdrOZEdIB6wlp58OBBhw4dzM3Nnz59yq29q23vvvvuzp07O3ToEB0d/cq1qoheMBK6AP3Wtm3boUOH5ufn837HrFJHjhzZuXOnubn5/v37KYEGg3rC2rp06dLAgQPt7e2fPn3KLb2sJUlJSW5ubpmZmV999RU3Kk0MA/WEtTVgwICuXbump6fv379fe60olcq33347MzPT19eX26ObGAwKIQ/mzZsHYP369dq7rFi1alV4eLiDg8PevXtFIpGWWiGCoMtRHpSUlLRs2VImky1btmzYsGFOTk4SicTY2Jiv81+7dq1fv35KpfL8+fM+Pj58nZbUERRCfgwfPjwmJiYtLU11xNbW9mXPLrq4uFQ/ojk5Oe7u7k+ePPn444/XrFmjnfKJkCiEPIiMjOzdu3dpaenrr79eWFiYkpIil8uVSuXLXm9sbCyRSJycnJo2bVrxa7mhjokTJx44cMDT0/PXX381NTXV/m9DdI1CWFt5eXmenp5//vnnvHnzuOXYAJSWlqanp5d7apF7/CItLe2vv/6q4oTcdm7cE4w5OTlhYWHW1tYxMTHc/ifE8FAIa2vy5MmhoaGdO3eOjIys5hBFcXFxRkbGy55g5JZSV73Yzs5u1qxZ3ELrxCDxdvOgfjp48GBoaJYFmCAAAA88SURBVGiDBg0OHjxY/UFCU1NT7vF8T0/Pit8tKChQdZ67du26ePHi3bt3ea2a1C3UE2ouISGha9euz58/37Fjx7Rp07TRhFwub968eUlJyYMHD1q2bKmNJojgaJxQQ6Wlpf7+/s+fP3/rrbe0lEAAEolk/PjxCoWCW1iBGCTqCTW0ZMmSNWvWODk5xcXFcbtDasmdO3e6dOliYWGRmJjIbcdHDAz1hJq4cuXKunXrjIyM9u7dq9UEAujUqdOgQYPy8/O/++47rTZEhEIhrLFnz/InTZqkUCiWLl3KbYfEu3v37qlHjlt7/8svvywuLtZGc0RYdDlaY2+8gdTU1EaNFpw6tYfHZ9NUMjIyHB0dlUplfHy8aukKd3f3uLi4PXv2vP3227y3SIRFPWHNfP01jh3D/fvSb77Zp40EAmjcuPHo0aNLS0vVb8Zw64tu2LCB3jQND/WENXDvHry8kJ+P/fsxYYIWG4qLi3N3d7e2tpbJZNwifMXFxc2bN09NTQ0LC6NnuA0M9YTVVVSEiRORn4933tFuAgG4ubn169cvNzd3x44d3BFTU9NZs2YBUD0ZRwyHEAvb6KXZsxnAWrViz5/rormTJ08CcHJyUm11lpmZaWVlBW3utUAEQT1htZw9i23bYGKC0FBYW+uixeHDh7dv3z4pKenw4cPcEVtb24CAAAA0cG9ohH4X0ANyedl2tuvX67Td7du3A/D09FQdSUhIEIvFZmZmKSkpOi2FaBP1hK/AGKZOhVyOQYMwb55Omw4ICJBIJL///vuVK1e4Iy1atBg5cmRRUVFISIhOSyHaRCF8hQ0bcOYMmjTBnj0w0u1/LTMzM26vZvWbMdzAfUhIyIsXL3RaDdEeobviOu3ZM2ZtzUQidvKkMAWkp6dbWFiIRKJ79+6pDnK7oIWEhAhTE+Eb9YRVsbPDlStYuxZ+fsIU0KRJE39/f8bY1q1bVQe5xd02b95cxQoaRI/QYH15ly9jxAg8fQo7O7i7IzZW4Hq4lfbNzMwSExO55WcUCkXbtm3j4+NpQxjDQD1hJdq3R3Cw0EX8rW3btr6+vgUFBV9//TV3RCwWz549GzRwbyjqb0+YkYHkZMhkSE5GSgoSE5GcjNJSLFuGU6cQF4fDh9G3r/A9IYDw8PABAwaor7Sfm5vr4uKSnZ3922+/9ejRQ+gCSa3o0xozly9jxw6EhgKAkxOSkl596ZiVhZQUpKYiIeFff5DJUOlukGIxFAoAmDkTX36JOrLUdf/+/b28vKKiovbt28fN4re2tg4MDFy3bt3mzZt/+OEHoQsktaJPIawUd+n42WcA8PPPOHUKycllXVxqKkpLX/qDNjZwcoKzMxwc4OwMR0c4OsLFBdxyhKNGITi4qh/XsTlz5vj7+69fv37q1KlGRkYAgoKCgoODDx8+nJCQ4OrqKnSBRHN6FsJLlzB0KAA8f152pE8fXL+OnBwAiIws/1nO1hZSKRwcyr66upb9oWVLNGpUeROXLwOASITAQGht7ZgaGzt27JIlSx48eHD+/HlfX18Ajo6OY8eODQ0N3bZt26ZNm4QukNSCwEMkNREeziZNKvuzo2PZkfnz2dGjbNUq5u7OIiPZ+vVs3z525QqLj2eFhbVqLj2dBQWx0aNrWzZf1q1bB+Djjz9WHYmLixOJRNbW1llZWQIWRmrJEEKoVLK+fVmnTjw3l5nJrKwYwOrIpIXs7OzY2NhyB7n1NSZMmHD58uU//vgjLy9PkNpIbejZ5WiltHTpaGuLgAB89RW2bMHOnTyfXAM2NjZubm7lDvbs2TMuLu7AgQMHDhzgjpibm1e6C41UKnV2dm7YsKHOCyevoN9DFDEx+P57+Ppi2DCtnP/xY7RuDWNjPH4MqVQrTdRGYmKiu7t7VlZW9+7dTU1NuVX0CwoKqvgROzs7qVRabhcaiUTi7OwskUhMTEx0VjxR0e+e8OJFbNsGpVJbIWzRAiNH4uhRhIRg1SqtNKExpVI5ZcqUrKwsX1/f06dPq3YOzcrKSklJ4Xa2SE5O5r6mpaXJZDK5XJ6ZmZmZmVnpuvoikUgikTRt2pTbi2bGjBkeHh66/Z3qKf3uCceMweHD+P57TJmirSauXUPv3rCzQ2IiGjTQVisaWLly5YoVKyQSSVxcnEQiqeZPcRGtdC+axMTEUrUxmbNnzw7l7kQTLdPvEDZrhsRE3L2LDh202ErPnvjtN4SEoO7sFc/tiKhQKHjcu1ehUMjlclUXOmrUKGkdvAQ3RHocwvR0SCSwtkZWFsRiLTZ06BDGjkXr1rh/X9dTCitFe/camDrwd0pTkZEA4Omp3QQCePNNtGyJhw9x8qR2G6qmGTNmPHnyxNPTkzYtNAx6HMKoKADo3l3rDYnFmD0bAOrCpIVdu3YdOHDAyspq//79tHu2YdDjEHI9oZeXLtp65x00aoSICNy4oYvmXubRo0fcUtwhISFt2rQRshTCHz0O4e+/A7oKobU1AgNZjx5p+/YJtjVScTEmT56Wm5s7ceLEyZMnC1UG4Z2+3phJSEDLlmjSBOnpOmoxKSnZ1bUFY+zRo0fNmjXTUatqFi7E6dP5jRvPPnVqMz34Ykj0tSfkPhDqcjqrk5PjuHHjyu3TojNhYdi0CQ8fWn7xxU5KoIHR7xDq5lpUZeHChSKRaMeOHdnZ2bps96+/EBAApRIrVqBnT122THRBX0OYnf2rp+ez7t1LdNloly5d+vfvr75Piw4whmnTkJoKb298/LHOmiW6o5efCRUKRaNGjfLy8tLT05s0aaLLps+cOTN8+HBHR8fHjx+Xe9z5yJEjFhYW3FPRPFa1dSvmzEGjRoiLw99bhhKDopchvHXrlpubm6ura3x8vI6bZox16tTp3r17+/btmzhxovq3Gjdu/OzZM+7PZmZmqiehHR0dnZzWSSRmjo6QSuHoCBub6jZ39y68vFBQgB9+wLhx/P4qpM4QbCZjLXBXg+PGjROkdW43eQ8PD/WDCoUiICBg0KBBHTt2tLW1Vf8vbG5uLhIpAab6x9KStW7NvL3Z1KlVNVRQwLp0YQCbPl27vxERll5OZYqKigLgpePbMn+bPHny0qVLY2JiLl++3K9fP+6gkZHR7t27Va8pKCjgnoROSkp69izv8WNRWhqSkiCXQybDixd4+BAPHyI5uaqG5s7FrVto3x60goyBE/pdQBNdu3YFcOXKFaEK4B7a9PPz0+zHc3PZvXvs0iV27txLX3P0KAOYmRm7eVPDIom+0L/PhIWFhQ0bNlQqldnZ2dzOtbqXmZnp4uKSn5//+++/c+8I/FIo0LEjHjxAcDDmzOH99KRu0b/L0Zs3b5aUlHTp0kWoBAKws7Pz8PCIjo728PCoYk0XFxcXa4329RWLER6O7dsRFMR77aTO0b8QRkZGosIHQqVSaaTDqX4rVqyIiIgwMTExMzMrLCxMSEhISEio9JWNGzceOHBtTs60pk3h5AT1rxIJjNX+81dcTXzFCt38NkRg+hfCindl5HJ5z54958yZM2vWLLGWJxcqFIpZs2Z98803YrF427ZtgYGBBQUFla4WkZCQIJPJMjIykpLMr12r5FRGRrC3Lxu0cHDA4MH/Wk2c1CNCfyitMW4KT0xMjOrI6tWrud/F3d09PDxce00XFhaOHTsWgJmZ2aFDh175eqVSmZaWFheXd+oU++47tmIFmz6djRjBPD2ZVMqMjJj6uMWPP7L585mPD8vOZm5u2vslSJ2jZzdmcnJybG1tzczMnj9/rv7AysmTJ+fOnctdE/r5+QUHB7ds2ZLfpvPy8t58882wsLBGjRqdOHHC29u7licsLYVcjuRkcKMXbdrg3Dn07o07d/DTT7h5k5eqiT4Q+l2gZu7evdu6dWtra+vLly+X+1ZRUVFwcDA3w8DExCQoKCgnJ4evdlNTU93d3QFIpdKKy2DzQquriZO6TM9CyBgLDAzk3j7GjBnz+PHjct9NTk4OCAjgbtL07j18506mUNS2xfj4+FatWgFo167d06dPa3u6l+BCyBjbt4+Zm2upEVIX6V8I8/Pz165dy936NzU1rbTHi4yM7NWrV5cuVwHWtSv75RfNm4uKirK3twfg5eWVnp5eq9IJqYz+hZCTnJwcGBjI9XhSqXT79u2lpaXlXnPiBGvRouy2h58fi4+vcSsXLijbtBkEwNfXl/ZaIVqiryHkREdH9+7dm7s69fDwqPggW34+W7uWWVszgJmasqAgVv3Pifv2MVNT5upaNH36R8XFxTyXTsjf9DuEnBMnTjRv3pyLop+fX0JCQrkXJCWxyZOZSMQAJpWyCvd0KrF1a9kQQlAQD58qCamCIYSQMfbixYsVK1ZYWloCaNGi9dKliooXj1FRrHdvZmHBqr63olSy5csZwEQi9sUX2iuZkDIGEkKOTCabOHFir16hAHNwYNu3l+/ElEp2+3ZVZygtZe+9xwBmbMx27tRqsYSU0bPB+uqIjsbcueCeFPP0RHAw/v7Y+Ar5+Rg7FqdPo0EDHDoEX1+tlklIGQMMIQDGcPgwFi7E06cA4OeHL7/E3x8bK5eVhZEjcfUq7Oxw8iRef10nhRJiqCHk5Ofjyy/x+efIy4OFBYKC8MknqHRqUUoKfH1x6xaaNcO5c2jXTue1knrMkEPISU7G4sUIDQVjcHDA8uV4991/7XBWUIAOHfDkCTp3xrlzcHAQrlZSLxl+CDnXruHDD8uWDPbzK7/JWWgoQkJw8iRee02Q6ki9pq+L/9ZUr164cQMHD8LFBW3awNoamZkA4O4OAP7+uHqVEkiEUV9CCEAkwpgxuHsXw4eXTZ9VVxe24CX1U737q2dlBSMj9OmD69eRkyN0NYTUwxCqzJyJL7+ESCR0HaTeq78hHDUKFy+itFToOki9V39DKBIhMBCPHgldB6n36ssQBSF1Vv3tCQmpIyiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwCiEhAiMQkiIwP4fgeF+INCkyY4AAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3de1xUZf4H8M8wM9xEERVlQC6SElRogpnlpYvg+ivNrGjbEql+imsp0hXt9Stst63RvODWq8JLha5WlLqL7aqB5CqV5oApIF64KPebgFyGgbk8vz8OTshFucycZwa+79f+wZ4Z5nwhPp7vec5zniNhjIEQwo8N7wIIGewohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiHp3u7dCAiAnx+io6HX865mwKIQkm4UFeGNN5CainPncOYM9uzhXdCARSEk3ThyBA8/DIUCMhkWL8bhw7wLGrAohKQbVVUYObLt61GjUF3NtZqBjEJIuuHqisrKtq+rqqBQcK1mIKMQkm6EhCA1FSUl0OuxezcefZR3QQOWjHcBxPJkZ+PYMdxzDzZtQkgI9Ho8/jieeIJ3WQOWhDHGuwZiYbZsQXQ0VqzARx+1bSkrw0svYfJkvPMO18oGJmpHSSdXrgCAt/fvW3Jz8c9/4tAhXhUNbBRC0knnEHbeQkyHQkg6ESLn5dVxC4XQPCiEpBM6EoqrlyGkyYQDnlqN6mrY2WHMmN83FhYCFEJz6U0Iu5xM2NqKzz6jQA4cxl5UIum4kUJoHr0JYZeTCd98E8uXY84clJebqUQiqi7zVlQE3HiWSEynNyHscjLh449DoUBqKiZOpDm+A0HnzrOqCk1NGDECQ4fyKmpg600Iu5xM+OCDOHMGc+agqgr/8z9YvZpaU+tGozKi600Iu5tM6OqKQ4egVMLGBuvWISQEpaXmqJWIgUIout6EcOzYtsmEAQGwt0dIyO8vSSSIiUFKChQKHD2Ku+/m0ppmZUEiwdmzABAXh7g48Uuwft2FkE4IzaaXlyieew45OVi3DgcP4p57kJFxw6sW0Jr6+uLDD0Xe58BSXAzQlXpR9eli/eTJmDoVubm4/35s2XLDS7xb00mTUFjYNphHekun0w0rL3/Qy8swdqxxY0VLS6uPj97Hh19dA1yfQujjg//+F1FRaGlBdDSeeAJ1db+/KrSmhw9jzBgcPYrg4MvHjpmq3C5pNPjyS3z7LQAYDIiKoka0j4qLixs0mjyDwUYuN26ce/Kk3eXLp6kdNZu+Tluzs8OWLdi3D8OHY/9+3H03Tp684Q2zZyMzE3PmVHh6jn/oodWrV+vN0JqWlWHtWnh54YUX8NZbMBgAYOFC/Pgjrl2DwYD4eBqs7YUrV64A8L6x8ywsLOy8kZgS66eCAnbvvQxgdnYsLq7jqzrd5vfes7GxAfDwww+XlZX1d3fXHTvGwsKYTMYABrDgYPbll+y339iCBYwx9vHHzM+PzZ7NAPbAA6ykxFS7HeB27twJ4NlnnzVuaWhoAODg4GAwGDgWNrD1O4SMMY2GRUW1pWHhQlZb2+H1H3/8UaFQAHB1dT106FA/d5WYyKZNa9ubXM7CwlhycturmZltIWxqYiNHshUrmLs7A9ioUezgwf7sdrD461//CmDNmjXGLVlZWQD8/f05VjXgmSKEgn372PDhDGDe3uzEiQ4vVlRUzJkzB4BEIomJidHpdL39+NJSFhvLRo1qi9/o0SwmhhUW3uK7KivZ3LkMYBIJi4pira293a3JGYqKXjt3LujcueCLF+fqdDW867nBkiVLAHz66afGLf/+978B/OEPf+BY1YBnuhAyxi5eZJMnM4DZ25d98UWHFw0Gg1KplEqlAB588MHS0tIefuqxY/VPPsmk0rb4TZ3Kdu1iLS09LcpgYHFxTC5nAJs1i3Nr2tSUce5cMGMGxlhZ2bqamq95VtNJaGgogP/85z/GLZ988gmAyMhIjlUNeCYNIWtrTbXu7re5uCxcuLC2H62pRqNJSEiYOHFicPAagNna3tB59tbRo22tqasrn9bUYNDqdNdaWgrPnvWur081GHrdC4jAz88PQHZ2tnFLTEwMgL/97W8cqxrwTB1CxhhjKd99N2zYMADjx4/PyMjo8OotW9OCgoI33nhjxIgRwtCRu7uHUtnc/zEdXq2pVltRVqY8e9arqOgVxphafbawcGVOzrSCgvDW1nKDgX+LLDAYDA4ODgDq6+uNG5955hkAu3bt4ljYgGeWEDLGCgoKpk6dCsDOzi6u06ipTqd75513jKOmFRUVwvbjx4+HhYXJZG0LMQYHB8fHxzc3N/eths4xE7k1bWpSFRREpKfbqVRQqXD+/HShCuHV8vKNFy/+ITNzXEPDT+ato2cqKioAjBw5sv3G++67D8CxY8d4VTUYmCuEjDGNRhMVFSXE6SatqZeXV2lpaUJCQmBgoPBmW1vbsLCwn37q75/mkiVswQJW02ns48cfmULRNrrz44+5/dxLZwZDa01N4sWLIUL2VCqbixdD6uqSGDNUVcVfvvyikMPKys/OnvVQqZCebltevskYTl5+/fVXAEFBQe03uru7A7hy5QqvqgYDM4ZQsG/fvuHDhwPw9vY+0WnU9MSJEy+++KKx83Rzc4uJiSkuLu7/fouKmLMzA5ivLzt1quOrQms6Zcp/pFJpVFRUq4l609bW8rIy5dmznkL8Tp92LiyM0mjyjW/Q65sLCp7Pyrrj3Lm7L1x4qLn5QnFxjEolUalw6dJ8rfaqScrom2+//Vb459K4paWlxcbGRiaTabVajoUNeGYPIeu+NT1w4IDQkQKYMWPGN998Y6owCC5fbruiKJczpZJ1uNqs1zOlcrNxtLakf71pU5Pq8uXIjAwHIX5ZWbdXVMTp9Y09+d7a2n/99tsIlQpnz3pybE03bNgAYNWqVcYtubm5wr+evEoaJMQIIeuqNc3LywsICHB0dAwLC/vll1/Mtl8WFcUkEgZ03ZoePXpU6LhcXV0P9n7YVKPR7Ny5MyUlXMheerosL+/J+vofe/s5LS1XcnLuEz6hpCSWMX1vP6H/jh079tprr7333nvz5s177733li5dKlwknDVrlvjFDCoihVDw9ddfG0dNP/vsMwAPPPCACPvdv5+5uDCAeXmxznmvrKycO3euMFrb89a0rKxMqVSOHTsWwKxZPqdPuxQWRrW0XO5zkQaDlmNrWltbu2HDBl9fX+EfSicnJwC33377tm3bjh49KmYlg5CoIWTXW1O5XL5mzRoAzz//vDj7zc1lwcFtU1y3by/o8KrBYIiLixNGZR944IGbt6Y//fTTM888I79+n8HkyZN37Nih1fZxCLcD8VvTCxcuREVFDRkyRPhxfH19lUrliRMnhHEye3v7zoPbxLTEDiFjrLm5+ciRI2+//TaA2NhY0fYrtKYKRdGIEaMWLFhQ06k3vXlr2tLSkpiYKAzZA7CxsZk3b15yn2cPdE+c1lSv1ycnJ8+bN09yfWnD6dOnJyYmGi/bqtXqyMhI4aXw8PDGxh6d35I+4BBCweLFiwHs2LFD5P3u35/i7Ows/JN/qtOwaZetqdB5CvkEMHr06JiYGLOO2hsMLYWF0SqVJDXV9YUXnuv870V/1NXVxcXF+Vy/SdfJySkyMjIrK6vLNyckJAgHSX9//7Nnz5qwDGLELYQPPPAAgJSUFPF3ffny5WnTpgGQy+VKpbLDTTp6vf4vf/mLMGo6ZcqU+fPnt5888OWXX2o0GnHqrK3dv2DBLAA+Pj4nT57s/weeP3++fed52223KZXKWyY8JydHaE0dHByoNTUHbiEU/iW+dOkSl70Lo7VCJ9Zda+rm5iYMI0mlUjN1nrd05coVoQGWyWSxsbF9u6lPr9cnJSWFhIQIP69EIgkJCWnfed6SWq1eunSphbSmBoMhOTm5sLBwwEwh4BNCnU4nl8slEkmfp6SZxP79+11cXIRLYZ0vk+zatUsYIeznJcR+0mq1MTExQn4ee+yxq1d7MWoqdJ7Gm+KHDh0aGRnZfn52r3BvTZubm40zq4KCgpydnb/99lvxyzA5PiEsKioCoFAouOy9vdzc3ODgYGEYsPDG2xM//vhjAMuWLeNVW3v/+te/hHlFnp6ePZnQd/r06cjISEdHRyF+48ePVyqVnWcO9lZWVtYdd9wBYPRo3127enw7Wb/l5ua+8sorwtQrAB4eHhMnThSO6q+++qpp53iIj08I09LSAEybNo3L3jsQWtM333yzw/Y33ngDwPvvv8+lqs7at6adT2UFxs7TOIQbEhKSlJRkwsUpGhsbIyIiZszIB1h4ODN3ZyrM6RdO0YXT8oSEhNbWVuGqkq2trXDqnpeXZ946zIlPCHfv3g3gj3/8I5e9d6nzn+nTTz8NYPfu3Vzq6dJNWtPKykqlUul1fU20YcOGRUZGnjt3zkyVJCSwIUMYwPz9mTk60/r6+vj4+DvvvFP4cezs7LqcWXXq1Klx48YBsOrWlE8I33//fQCdDz4WRRhBTUtL411IR8bW1MvL66effsrIyIiMjBRuBQTg5+cXFxfX0NBg7jJyclhgIAOYgwOLjzfZx+bm5sbExAjn6sI5S2xsbFVVVXfvr6ure+qpp4xXlVp6vuaCxeATwmXLlgH4+OOPuey9h4QVAIqKingX0gXjnHjj5RMbG5vHHnssOTlZzGXR1Gq2dGnbsiP9bE0NBpaczMLC2IwZz7XvPHtyA4e1t6Z8QihcED9w4ACXvfeERqOxsbGRy+V9WJNKHC0tLatWrXrppZecnZ2joqLy8/Nv/T3m0c/W9No1tmUL8/NrC3NQ0MmlS5eeOXOmt5/z66+/WkJrqtFohJlVKpWqh9/CJ4QBAQEALHkGxqVLlwCMGzeOdyG3JtrkgZto35pu3drT77p4kcXEtK3RBzB3dxYby6qr+14G39a0qKhozZo1rq6uwpF8xYoVPfxGPiEUJunX1dVx2XtPpKSkAHjwwQd5F2I1et6a6vUsOZnNm9d2ixnApk9niYnMJHcOc2lNVSpVeHi4cU5/UFBQfHy8Wq3u4bdzCGF1dTWA4cOHi7/rntuxYweAiIgI3oVYmc8/Z46ODGB33snarRfV5to1Fh/PAgLasmdvz8LDzTK4Kk5rKiwIOGnSJCF7wrIsfZhZxSGE6enpACZNmiT+rnvunXfeAfD222/zLsT65OSwu+5iy5axzEwGMOHkbvNm9vjjbaeOALvtNrZxY+e12k3JrK1pSUlJbGzsqFGjhPiNGTMmJiam8JZrUXeDQwj37dsnXOYSf9c9FxERAWD79u28C7FKjY1MrWaZmczXly1axBhjmzez8PDfO09xRrvM0ZoKnWeHBQF73nl2qa9PZeqHLh/9Y2msokiLNWQIhMuW7R8XOWkScnKQloawMFyfAGNeEolk1apVaWlp48aNU6lUQUFB3333Xd8+qqWlZefOnZMmTZoyZcquXbtsbGzCwsLS0tJUKlX7i7R9wy2EXpb9vLvGxh/uuqve2/te3oVYt/aPi5RK4e/PoYZ77rnn9OnTTz311LVr155++ulVq1a1trb2/NtLS0vXrl3r4eERERFx9uxZYUHAvLy8xMTE6dOnm6bE/h+ge2vhwoUAEhMTxd91D+n1zNaWSSSM6z0eVk94SJZezyZPZmvXss2beRbToTXtyWVV0y5FfRMcQhgUFATAJHepmklxMQOYmxvvOqyc8Ul1wuMi+YZQ0JNR0/Y3TMF0S1HfBIcQCmNK5eXl4u+6h376iQHs3nt512HlMjNZaChrbm57XKQlhJAxVldX9+STT6KrUdO8vLyYmBhzLEV9c2KHsKmpCYC9vb0lP/l1zx4GsLAw3nVYP+GahPknk/dO59a0uxumxKlHZpozyx4zjsoYF/myQFeuAACNjPZTdTWamuDiAicn3qXcSBg1nTp16jPPPKNSqSZOnNjY2AjA3t4+IiJixYoVkydPFrMePiG08KH/wkKAQthvFv5v2X333Xf69OlZs2bV1NTI5fKoqKgVK1YYr7+LSexLFFYRQgv/67EWlv9rHDFiRHBwcFlZ2Ycffrh27VouCQSvEHa+SKjRaOLj4w0Gg8j1dMny/3qsglX8GoU/SOMqrFxYypEwOjr6z3/+8yOPPFJVVSVySZ0J7ahlzyawAkIILfzXaAmtGZ8Qjh49usP2sLAwNze3w4cPBwYGCrcR8VJTg4YGDBuG60t7kT6y/COhXq8vKSmRSCTCg314ETuEa9ascXFxWbly5dmzZ9tvnz17tkqlmjlzZkVFxdy5c9euXcurNbX8Px1rYfm/ydLSUq1W6+bmZm9vz7EMsUPo7+/v4eGRm5s7bdo04Z49Iw8Pj9TUVOERMe+++25oaGh5ebnI5cEa/nSsheX/JgsLC8G7F4X4IbzttttOnToVFRXV3Ny8ZMkSYU1146symWzt2rU//PCDm5tbamrq3XffLX5r6uaG8HDMni3ybgeaxkbU1MDBAddXe7BElnBCCPCYwC3YtWuX8UmUnVf1KS4unjlzJgCpVBobG6vXc3hyLemPrCwGsNtv513HTVnI0pscbmUSLFq0SJiscOHChXvvvXfLli3tX+XSmmZlQSKBcK4aF4e4OGRlYcqUtldXr267JYf0hFVMeLCQIyG3EAK4/fbbT548GRUVpdFooqOjLaE19fXFhx+aeyeDguWfEMJibm3lGUIA9vb2W7Zs2blzp5OT0z/+8Y8pU6bwHTVtfyc46Y/a2s9nzlwSGHicdyE3Q0fC34WHh3NsTQ0GfP89oqPbvjbeCS7IzISPD3x88OmnJtznwJeZmXL8+A4Xlyu8C7kZYXSU+5GQ28BMZ83NzVFRUUJVixYt6vw0hZSUFDc3NwAeHh7Hjh3r/x47rMC3Z0/HO8EzM1lwcNubY2Is5Y44q3D//fcDMMl/JjMR5mZZwtKbFnEkFPS8NS0pKXnooYf605pmZ2P5cri7Y9ky5OTgttuwcSPGjQMAGxv87/9iz57+/0CDmoWcbt2EhfSisJB2tL2etKarV682GAzvvvvu66+/3qsPNxiQkoL58xEYiM8+Q1MTpk9HYiIuXMCrr/5+29sLL+DqVVP9QIORVqstLy+XSqUeHh68a+mWhVypByypHW2vJ63p+PHjz58/38MPrKtjcXFs3Li2ztPJiUVGssxMU9dNGGOM5eXlAfDy8uJdyM1s3rwZwMqVK3kXwulZFD0ktKbo5oJ+Tx6axRjLyipdurRtbXaATZjA4uKYBT8FYyBITU0FMHPmTN6F3Ex0dDSADRs28C7Eks4JO7t5a2pci65LBoPhwIEDoaGhQUETDhzQNzcjJASJicjJwapVcHY2c+mDm+Wcbt2E5Zy1WnQIcasL+l26evXqunXrxo0b99hjj6WkpNjbS19++fT580hOFm/t50HOikJoEUXyPhT31M1bU8Hp06cjIyMdHR2FH238+PFKpbKmpkbkUsmLL74IYGvPn1TIw8iRI2EZS29aTQgZY+fPn584cSIAe3v7uLg443a9Xp+UlBQSEiKs4GZjYxMSEpKUlGTJqyoObLNnzwZw+PBh3oV0y7i8miX8kVhTCFmnUdPi4uK4uDhjRzFs2LDIyMhz587xLnOwGz9+PICej12LLzs7G4Cfnx/vQhizuhAKEhIShgwZAkBYvxVAQEDAJ5980vlKBhGfwWAQblRvamriXUu3Dh48CCA0NJR3IYxZ+OhodxYvXnzq1CkXFxdHR8cZM2YkJSVlZ2cvX77cydJWmR18Ghsb33rrLQcHB2dnZ+GRzJbJgkZlxF/811QCAgJ0Ol1DQ8P333/vTBccLIBWq42Pj3/33Xerq6slEgljbMaMGT/88IM/l+eh3YpFhdAqj4QAampqGhoahg0bRgm0BCkpKUFBQStXrqyurp42bdqhQ4dmzJhRVFQ0ffr0kydP8q6uC5ZzkRDWG0JLWLOVAEhPT3/ooYdCQ0OzsrImTJiQmJj4888/z5kzJyUl5fHHH6+pqQkJCeG7hmWXLOpIaJUDM4yx/fv3A5g3bx7vQgavoqKiyMhI4UlGI0aMUCqVGo2m/Ru0Wu3zzz8PwM7Obu/evbzq7JKw0GhBQQHvQhiz0tFRxlhcXByAl19+mXchg1FjY2NsbKzwoHZbW9uoqKja2tou32kwGIQpmlKpdMeOHSLX2Z3W1lapVCqVSkV7+NnNWWsIX3nlFQDr16/nXcjgotfrExIShFurhU4kLy/vlt+lVCoBSCSSjRs3ilDkLeXn5wPw9PTkXUgbaw2h8LDVb775hnchg0hycrLxIdJTp049fvx4z7/3o48+srGxARATE2O+Cnuovr5+3759X3/9Ne9C2lhrCKdMmQLgxIkTvAsZFLKzsx955BEhfl5eXgkJCX2Y7bVr1y7hxpeXXnqJFpJtz1pD6OrqCqCsrIx3IQNcSUmJcfTFxcWl8+hLryQlJQlnks8++6yFnI9ZAqsMoVqtlkgkdnZ2ljD7dqBqbGxUKpVDhw4FIJfLIyMjKysr+/+xR48eHTZsGIBHH31UrVb3/wMHAKsMYU5ODoAJEybwLmRgEkZfFAqFcfTl0qVLJvx8lUolNDKzZs2qozUOrDSEhw4dAjB79mzehQxAycnJkyZNEuJ3zz33/Pe//zXHXnJycjw9PQEEBQVVVFSYYxdWxCpnzFjWdIeBIicnZ/78+aGhoWfOnPH09ExISDh58uSsWbPMsS9/f//jx4/7+fllZGTcd999wjWDQYtCSFBdXb1q1arAwMDvv//eyckpNjb24sWLixcvFm6SNhNvb+/jx49Pnjw5Pz9/xowZWVlZ5tuXpeN9KO6LRYsWAfjyyy95FzJACNd75HJ5VFRUdXW1mLuuq6sTnoE3YsSIX375RcxdWw46Eg523333XX5+vq+vb1ZW1pYtW4SVV0Tj7OycnJy8cOHCmpqa0NDQ5ORkMfduISiEg52Li0tNTY2bm5ufnx+XAuzs7BITE59//vnGxsb58+fv3buXSxkcSRhjvGvoHZ1O5+DgYDAYmpubjctbkD6rra0dOXKkg4NDfX29lN+CkIyxV199NS4uTiqVbt26VVivbZCwviNhSUmJTqdzd3enBJqEi4uLj4+PWq2+cOECxzIkEsnmzZuVSqVer1+yZMnGjRs5FiMy6wuhRd0TPTAEBwcDSE9P510IYmJiPvroI4lE8vrrr69evZp3OSKx1hDSCaEJTZ48GUBGRgbvQgBgxYoVn3/+uUwmW7du3Q8//MC7HDFQCEnbkdBCQgggIiJiyZIl9vb2wtPLBjzrC6EFPVZuoDCGsM8PXTUtvV5/5MgRjUYzfPhw3rWIwfpCSEdCkxs1apSnp2djY2Nubi7vWgBg7969ly5d8vX1XbhwIe9axEAhJAAQFBQEyxibAbB+/XoAMTExHC+ZiMnKQsgYKyoqAiDMwSemIoTw9OnTvAtBSkpKenr66NGjw8PDedciEisLYVVVlVqtHjlypHCzKTEVy7lKIRwGo6OjhXvwBwMrCyFdJDQTYQ53RkYG3xlUZ86cSUlJGTp06PLlyzmWITIrexYFnRCayZgxY/7v/+Z7elZoNAUODr68yvjggw8YY8uWLRsk46ICawqhWq1OTEwEhdA8IiIM1679qtFk8AphQUHB3r175XL5ypUruRTAi3W0o6WlpWvXrvX29v72228XLlw4qGb3isbRMRiAWs3tkv2GDRt0Ot2iRYsG2+mGpR8JU1NTP/7446SkJL1eD2DatGnh4eHCQ7OJaTk6Tga/EFZWVn7xxRcSieS1117jUgBHFhpCjUaTmJi4YcOGzMxMALa2tk888UR0dPT999/Pu7QBy9ExCIBazWeA9KOPPmpubl6wYMGdd97JpQCOLO5+wvz8/K1bt27fvv3q1asA3NzcIiIiVq5c6eHhwbu0ge/MmdE6XVVg4BVbW1EbwqamJm9v76tXr6alpU2fPl3MXVsCCzoSpqWl/f3vf9+/f79OpwMQHBwcFRX1pz/9SS6X8y5tsHB0DKqvP6xWZ4gcwm3btl29enXGjBmDMIGABSz01NDQEB8ff9dddwn12NnZhYWF/fzzz7zrGoyKi9eoVCgpeVvMnba2tgrD3QcOHBBzv5aD55EwLy9v27Zt27Ztq6mpAaBQKCIjI19++WVheWYivuunhaKOzXz11VdXrlzx9/c3PnNmsOEQQsbYkSNHtm7dum/fPmHMkzpPC3E9hCrR9sgY27RpE4DVq1cLz04bjMQ87NbX18fHxxuHv+zs7MLDw3/77TcxayA3ZfjttxEqFVpbS8XZ3/fffw9g7NixLS0t4uzRAol0JMzNzd2+ffvWrVtra2sBuLu7L126dMWKFaNGjRKnANIzEgeHuxsaUtXqDGfnR0XY37p16wC8+uqrg3rZLrNGXK/XJycnz5s3z7ig+vTp0xMTE7VarVn3S/qsqOh1lQqlpe+KsK+TJ08CcHFxqa+vF2F3FsuMR8KvvvrqnXfeEW7WHjJkyHPPPbdy5UrjKCixTEOHztJocuzsJoiwrw8++ADASy+9NMhvTDPXxfr09PT169cnJiaOGzdu2bJlS5YsEXl9ddIZYy063VWdrlarLdNqS/X6ti+MW8aP/7cweU0EFy5cuOOOO2xtbQsKCtzc3MTZqWUy15Hw4MGDiYmJTz75ZGJi4uAd9RKZWo2KCpSXo6oK5eW/f1FR0TxrzIWnUvT6upt/gFZbCtxdXPxGQ8OPgEQmc/X13SOVupij2PXr1xsMhhdeeGGQJxDmC6Fw419ISAgl0LxaW/HQQ0wFFYoAAAapSURBVG3Za2rq7l1Sl2n6hXUApFIXuVwhk7nI5e5yuUIqdbG1dZfLFcL/lclGq9W/NTQcDQhQAZLy8vX19T+4uPzR5FWXl5fv2bNHKpW+8sorJv9wq2PeENKNf2Zna4szZ9ri5+AAV1e4u8PVFaNHQ6GAqyvGjIGbm8xzzCQvV5ns1mcEMtkona66oeGok9MsN7c3r137T37+M7a2XnZ23ra2bf+TSof1s+pNmzZpNJqnn356wgQxTj4tnLnOCf39/S9cuJCdnX3HHXeY4/MHtd278d570OvxyCPYuBEZGXB2hkIBEw1vNDdnVldva2o6ZW8/QSodU1m5ocMbpNLhtrbetrZednY+whe2tl42Nl4ODoqefH59fb2Xl9e1a9dUKpWwts0gZ5YQMsacnJzUanV9ff0gH/gyvaIi3Hsv0tPh6orQULz4Iky/KhkDJAAqKjY1Nh53cXmqtbWwtfVKa2thS8uV1tbLBoO68/fU198/d65q7NixCoXC3d3d9zqFQuHr69t+1SalUrlmzZqQkJDB+TTCzszSjtKaaGZ05AgefhgKBQAsXozDh/sUQoNWW6nTVWm15TpdhVZbpdWW6XQVcrnCzs63qemkt/d2QGJjM8TGxn7EiOc6fLNOV90ulpeFLwoKXFpbW/Pz8zs/gF4qlSoUCh8fH29vbw8Pj23btgF48803+/TzD0BmCSGdEJpRVRWMF3tGjUJ1dRfv0WpRVYXKSpSVobJS+KIkQq12zNdqy3W6Kq22EuhixXsHhzv9/VWNjb9kZ99lY2Mrlbp4e2/r/DaZbJRMNkqYaGoUEACNpqWkpCQ/P7+0tLSsrCz/usLCwuLi4uLi4rS0NAB+fn6BgYGhoaH9/l0MEBRCa+PqCuOTW6qqoFDg+HHs3XtD5LpKZut9AfW+Odf/n0QmGy2Xu8pkY+RyhUzmKpePqavb29Skqq8/6OPzhfCmmpo9ly7NGTHiT+7u7/WkNDs7O6EF7bBdq9WWlJRcuc7f3z8sLKwvP/sAZcYQDrblekQSEoKYGJSUwM0Nu3dj+XJkZmLLlhveI5Vi9Gi4ukKhaPvC3d11oudIxTC53F0mc5XJXCWSjv/pm5pOAKr2a3/pdDUtLfl6/bV+liyXy318fHx8fPr5OQMVHQmtzdix2LQJISHQ6/H443jiCWRnY/NmuLrCzQ1jxrSl7vpkXSOnW30wYzoAN4ZTB8Cill8YkCiEVui55/Bcu8GSwEAEBvb/UzuHsKtYEtMzy3QWeoSgNaIQ8mKWENKR0Dp1bD4phOIwfQgbGhpqa2sdHR3phl3r0t2REBgUDwnkyPQhpMOglerquKcHIJFQCM2LQkjadHUk1IPaUfMzVwjpIqHVoYEZXuhISIw6XxWk64RiMP3vl0Jopby/u4PVucpXy3H9hgfnrOEOp2faz3bGGK6VDXSmPxLSRUIrNWRnhtPmo7L63/8khiaVj3zr+JD0Fo5VDQbUjpLr9HoAkLVrjnS6jluIGZg4hK2treXl5TKZTKHo0U3WxIJ0jhyFUBQmDmFRUZHBYBg7dqyM/stZHQohJyYOIfWiVoxCyIlZQkgXCa0ShZATOhKS6yiEnFAIyXUUQk5MHEK6SGjFuguhlCZwmxcdCcl1wnXC9pHrfOWQmIEpQ2gwGIqLiyUSiaenpwk/lohBr8fw4ZDLb1ichtpRUZjy96tWq+fPn9/Q0NB+uWViHaRS1NR03PjPf0KnM9Xq+qQ75noWBbFWHR50QSeE5kfPLSPtFBXhjTeQmopz53DmDPbs4V3QoEAhJO0YH3Qhk7U96IKYH4WQtNOTB10QU6MQknZcXVFZ2fa18KALYn4UQtJOSAhSU1FSAr0eu3fj0Ud5FzQo0CUg0k7nB10Q86NLFIRwRu0oIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4YxCSAhnFEJCOKMQEsIZhZAQziiEhHBGISSEMwohIZxRCAnhjEJICGcUQkI4oxASwhmFkBDOKISEcEYhJIQzCiEhnFEICeGMQkgIZxRCQjijEBLCGYWQEM4ohIRwRiEkhDMKISGcUQgJ4ez/AVgsU6C7g+/GAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVxU9foH8M/MAAMCKqAoIuCCpYiVKKTiVmKp4ZKmWVewsrh5VcrSa167P/Tel0V6NbTypqVJau6iuHZRUXFhM9xXxIVAAQVB2Zl5fn+ccRhxG5jlO+jzfvXqxRxnzvdr9vGcOed8n0dGRGCMiSMXPQHGnnUcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTDAOIWOCcQgZE4xDyJhgHELGBOMQMiYYh5AxwTiEjAnGIWRMMA4hY4JxCBkTjEPImGAcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTDAOIWOCcQgZE4xDyJhgHELGBOMQMiYYh5AxwTiEjAnGIWRMMA4hY4JxCBkTjEPImGAcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTDAOIWOCcQgZE4xDyJhgHELGaiKilJSU6dOnp6amnj9/3tTDyYjI1GMwVi+o1erDhw9v27Zt48aN6enpANzd3UtLS7du3dqjRw/TjcshZM+68vLy3bt3x8TEbNmy5ebNm9LGli1bDh48+Pz583v37rW3t1+/fv3AgQNNNQNi7JlUUlISGxsbEhLSqFEjbRxat24dHh6ekJCgVquJqKqq6qOPPgJgZWW1bNkyE82EQ8ieLbdu3YqOjh45cqS9vb02ez4+PhEREampqQ++X61WR0REAJDJZHPnzjXFlDiE7JmQm5sbHR0dHBxsbW0tBU8ul3fp0iUiIuL8+fNP/PjChQvlcjmA8PBw6SBpRBxC9jTLyMiIiooKDAyUyWRS9hQKRWBgYFRUVFZW1kM/olarT5w48eD2lStXSgEODQ2trKw04iQ5hOwpdOrUqYiIiC5dumhPOO3s7IKDg6OjowsKCh76kaqqqoSEhPDw8JYtW8pksqtXrz74nt27dzs6OgIYPHhwSUmJsWbLIWRPleXLl7du3VqbPScnp5CQkE2bNhUXFz/0/dIdiA8++KBJkybaT3l6eu7bt++h709OTm7atCmAbt263bx50yhz5hCyp0dMTMxrr70GoEmTJiEhIbGxseXl5Q99Z3FxsXRptGHDhtrstWnTRvfS6KOcPXvW09NTupyTmZlp+LQ5hOzpERoaCuDzzz9XqVQPfUNtL40+SnZ29gsvvADAy8vr3LlzBk6bQ8ieHs899xyAo0eP1th+7dq1xYsX17g0GhgYGBkZeeHChbqNlZ+f37NnTwDOzs6HDx82ZNocQvaUuHXrlkwms7Ozq6io0G4sKiry9/fXXhq1sbEZMGDAkiVLcnJyDB+xuLj4jTfeAGBvb79z584674dDyJ4SO3bsANCrV68a2zt27Ki9NHr79m3jDlpVVfXhhx9K8V69enXddmJV+wfdGLNESUlJALp161Zj+4YNG7y8vOzs7EwxqEKhWLJkibu7+6xZs959992srKzPP/+8tjvhpUzsKZGYmAjg5ZdfrrG9ffv2JkqgRCaTzZw5MyoqSiaTTZky5YsvvqBaLorgVRTsaUBELi4uBQUFf/75p7u7u5A5rFy58oMPPqisrBw7duzPP/9sZaXvaSYfCdnT4Ny5cwUFBR4eHqISCGDMmDE7duxwdHSMjo4eMWJEaWmpnh/kELKngfSF8MFzUTMLCgr6/fffnZ2dY2Njhw0bpudpJoeQPQ0sJIQAunfvfvDgwcaNGzdo0CAzM1Ofj3AI2dNAuirz4KVRITp06NCgQYPNmzfreUbKF2ZYvVdcXNy4cWOZTHb79u0GDRqIng4yMzM9PT0bN26cn5+vfU7gMfhIyOq9lJSUqqqqF198UVQCAwMDhw4dqq1PIx2Wu3fvrk8CAfDNelbvif1CmJeXd/jwYXt7+8aNG9dtPnwkZPWe2BBKx72AgADtjcHafkHlELJ671EPrAkZvbKyMi0tTSaTBQQE6LkHDiGr365evZqdne3i4uLt7S1kAjUelzt+/HhJScnzzz/v5OSk5x44hKx+05776XkVxLjUanVqaioAf3//GvPRfyccQla/if1CeObMmcLCwlatWrVo0aLO8+EQsvpN7G36B0fnIyF7tlRWVh47dkwmk3Xt2lXIBGoc927dunXp0qUGDRr4+vrqvxMOIavH0tLSSktLO3TooP9VEOOqcdxLTEwkIn9/f/3XMYFDyOo1seeid+7cOXv2rI2NzUsvvSRtqdvNEg4hq8cedRWksrLSDKMnJyerVKrOnTvb2tpKWx61uv/xOISsHnvokXDx4sU9evTIy8sz8+hElJKSAg4hexbk5eX9+uuvb7zxxs2bN5VKpW6DwfLy8vnz56empvbq1evq1asmnUaN4/CZM2du377t5eWlvV2hLyOVfmPM5M6fPx8ZGRkQEKC9Ly9d//D09Dx79qz2bTdu3PDz8wPg5uZ27Ngx082nWbNmADIyMqSXS5cuBTBq1Kja7odDyCzdY1osXblypVevXgCcnZ0PHTqk/cidO3ekphSNGzc+cOCAKWZ16dIlAK6urtotYWFhAObNm1fbXXEImSVSqVSpqakRERFSZXuJk5PTyJEjo6Oj79y5o31nWVnZiBEjANjb2+/YsUO7vby8fNSoUQCUSuWGDRuMPsNVq1YBGDJkiHaL1J1C9+8CPXEImQXRNgnU/Vr1xBZLup3lly5dqrv9448/xr0Svcadanh4OIDZs2dLL4uKihQKhbW1dR36FnIImXglJSVSozLtulgAXl5e4eHhcXFx+rTF1e0sP2fOHN1fioyMlLZHREQYcc7z5s3z8/OLj4+XXu7ZswdAQEBAHXbFIWTCFBQUrFu3LiQkxMHBQZs9Hx+fadOmPbFJ4EM9qrP8okWLpO2TJk16VNc0A3311VfS/uvwWQ4hM7e8vLzo6Ojg4GAbGxvd7EVEROhe5KybVatWaTvL67ZniomJkW6p/+Uvf9HdbixDhw4FsHLlyjp8lkPIzCo7O1vbJNDKyqpfv34//PDDn3/+acQhHtVZPj4+XurL269fv6KiIiOOSETNmzcHkJ6eXofPcgiZ+ZSVldnZ2Tk6Ovbr1y8qKurGjRsmGkjbWf7ll1/W7Sx/4sQJ6ZKPv79/bm6usYbLyMgA0LRp07p9vP6EUKWisDB6/XV65RU6fVr0bFhdHD58GECnTp3MMNajOstnZGS0a9cOQPv27a9evWrgKNI9zFatWjVr1kz3dkWt1J8QbtlCH35IRJSURIMGiZ4Nq4v58+cD+Oijj8wzXHZ29osvvogHOstfv35d2t6mTZuysrLa7raqqmrPnj0TJ05s2bKl9jvtK6+8UudLPvUnhDNnUnS05uc2bWjnTnqgNTmzcG+//TaAn3/+2WwjPqqzvPRITbT2/yg9lJaWxsXFhYeHS0+rSTw8PMLCwmJjYw252FN/QjhrFi1frvm5XTtq04YACgyk2Fiq/bVsJoSXlxeAU6dOmXPQsrKy4cOH44HO8nreAikuLpbuYUoXdSRt2rQJDw+v232UB9WHEJaV0axZFBNDY8cSESUl0eDBNGUKNWpEAAHk40M//0y1P69g5pSTkwPA0dGxqqrKzEPrdpb/7bff9PnIrVu3oqOjR44caW9vX+M+SmpqqnGnZ/EhVKlo1CgCaNQoGj+eXn+dgoLo/HkioqIiiooiLy9NFF1dKSKCdC6FMYsSExMDICgoSMjouo/UzJ0791Fvu3bt2uLFi4ODg7X3UeRyeWBgYGRk5IULF0w0N4sP4SefEECNGlFa2sPfUFFBq1aRn58mivb2NGFCSXqWeWfJnuyLL74A8OWXXwqcw4IFC6RHZ6ZNm6Z7JpmRkREVFRUYGKhdJKVQKAIDA6OiorKzs009K8sO4cyZBJCdHemzGiUhgUaOJIWiSmHTrlVFcDDFxZl+hkxvffv2BbB161ax01i5cqV0lBs7duzx48cftUjq9u3bZpuSBYfwxx8JIIWCNm6sxadOn079Yr1SqTku9uhBGzeSaZ4WZLVQVVUlPSCak5Mjei60Y8cOe3t73Yrdzs7OoaGhMTExdVgDYThLDeHmzaRQkExGdbqcnZNDERHUpIkmiq1bU1QU3b1r9FkyfaWlpQHw9vYWPRGNI0eOzJs3r23btuPHj4+LizPF06T6s8gQxseTrS0B9PXXhuymrIyio6l9e00UGzWi8HAy6lOKTF/R0Vl9+34zefK3oidiiSwvhCdOUOPGBNDf/maU/alUtHEj9eihiaKdHa1dS+7uVFFBBQU0dKhRBmFP8P77BNB334meh0WyrGprVZcuoX9/3L6Nd9/Fd98ZZZ9yOYYPx6FDSE1FSAiCg9GwIZo3x6pVRtk900tiIgAI6tpi6WREJHoOGnl5eQNffTXG0dGjQQNs3w6l0hSjqFSIi0NKCg4dwurVeP99bN5sinHEUasxfjyuXkVFBb7/Hj4+oieEwkI4O8PGBoWF0FlCyDQs5UhYVFT0+uuvHz116h0i9ebNJkogAIUCAKytERSEHTtMNIhQ27ZBrcauXYiMxNSpomcDAElJUKvRpQsn8OEsIoQVFRVvvfVWWlqat7f3pi1b5DrFDkwnLAy//AIRjSVNLC0NvXoBQEAAzp0DgNJSjByJ48dFzYjPRR9PfAjVavWYMWPi4uJatGgRFxfn6upq6hFVKty6BRsb+PmZeigRZDJov2JIx/3//AcbNqBPH+zfL2RGSUkAh/AxRF8Z0pSOa9SokUmLJeu6cIEAatvWPKOZ3fbt1U+6v/kmEVF5OY0eTQAplbRunZmno1ZrbtgavID2qaVvF7VDhw5FRkZ6eHi4u7t7eHi0bNnS3d3d09PTzs7OkL8CIiIiFi5caGdnt3XrVmmdpRn8+ScA6CzIfLoMHIht2zBgAFQq/PADANjY4Lff4O6OefPwzjvIz8df/2q26aSn4+ZNuLnB09NsY9Yz+obw9OnT27Zte3C7ra1tixYt2rRp4+bmpvuDt7e3bpuOh/rxxx//9a9/KRSKlStXSsXMTSQpCQ0b4rnnNGdnT3kIZTIsWgQACxYgNhZTpmg2/uc/aNoU06fj449x+TIiI80zHekLoaAOgvWDviEMDg7esmVLZmZmVlbWn3/+ee3aNemHsrKyjIwMqdBNDU5OTtLR0t3dvWXLltIP0hYHB4fNmzdPnDhRJpMtXrxYWnNpOkOGIDcX16+jeXPgqQ+h5OJFTJmCqirk5GDOHM0FqGnT0Lw5PvwQ33yDu3excCHkJr8owF8In0jfELZo0WLIkCEPbs/Ly8vKysrMzNTmU/ohMzOzoKCgoKDg1KlTD36qYcOGlZWVKpUqMjJy3LhxBv0OnqS8HHl5sLaG9oqPxYXQFHf22rXDunV491385z+4cQPLlkFaIDd2LJycMHo0fvgB2dn47Tfca3BpIpWVaNCAQ/g4JrxZX1paev369YyMjOzsbN0f0tPTCwsLmzRpIpfLs7OzFdI5IgDgzJkzTk5Obm5uRpxGRgbatkWrVrh8WbNl6FDExiImBsOGGXEcA8TGYutW/PQTkpMxaxa2bzfanuPjMWwYiooQFIRNm+DoqNm+fz+GDkVhIV59FTEx0CncYApVVZDJoPPnzO4n5HLQ9evX27ZtC2DRokXajb/88ouVlVVoaKhxx9q/nwDq2bN6i7QAOCXFuOMYoEYNK+NKTSVXVwIoIIDy8qq3nzxJ7u5lbdu+9corJlpetHMnP6OrFzH3CZs3bz5nzhwAX375ZX5+vrSxT58+VlZWK1asSE5ONuJYmZnA/SefFnc6+uCdPSPq0gVHjsDbG8nJ6N1b858DgK8vDhwY26zZhvj4Xr16XblyxcjjAgA/o6sXYTfrhw8fHhQUlJ+fL3XSANC6devJkycT0aeffkrGO0mWIufhoXn54FdE8bp2RXw8ACQnw9fX+Ptv0wYJCXjpJZw9i27dcPKkdvuCTZv8/PwuXLjQvXv3Y8eOGWW0khJs3IhlywBg6FCsWQOLeTzZUgk8Ch87dkyhUNjY2JyXCjcR3blzR/pCuHr1amONMmkSARQVpXl56RIB1KqVsXZvmIQEOnyYSktr1rAyhYIC6t2bAHJyooMHtZuN1dQ2P5/WraOQEHJw0JTd2r6dvv6a5s6llSv5dPRxBD8xIxWiG6rzR/Tzzz8DaNmyZXFxsVGGGDu2xNaWtK1a9+0jgHr1Msq+Dda3LwGkLbuSl0eXLplwuLIyeustsrb+qmfP7du3azeXl5dLZXmVSuX69etrtcvsbPrvf6l/f7K21qzYlMupe3eaM4e2bKGvv6bCQurXj954gwYNIoN7Lj2dBIcwJydHuqf/+++/S1tUKlXXrl0B/Pvf/zbKEP7+/gCSkjS1IjduPNm376HJky8aZecGqaoiBweSyUjbmWTBAgJo4kSTDrpt+nQA1tbWK1as0G5Wq9WTJ0+G3k1tr1yhqCgKCiIrK032FAoKDKSoqOraBTt3akojTJ1KbdsSQE2aUGKiSX5b9Zr4Z0e//vprAD4+PtqGrAcPHpTJZA4ODllZRqhcKPWs0u5K6ts6depUw/dsqLQ0Aki37Ir0hOdPP5l0WN0KnN98843uLz2xqe2JEydmzZrl59fVxUWlrVQwdCgtX063bj1u0LIyGjFCU5JSp7E8I7KEEJaXl3t7ewP48ccftRtHjBgB4L333jNw5xUVFXK53MrKSlv1eeLEiQAWLFhg4J6N4L//JYDGjKne0ro1AXTihBkG/+6777RNbXU7mSxbtszKygrAxIkTpe1qtToxMfHvf/+71MxI0r//yXfeofXra1E+q6qKPvqIALKyIp3G8swCQkhE69evB9C0aVNtsceMjAxbW1u5XJ6cnGzIni9fvgzA09NTu2XYsGEANtaqjKKJvPfefXVXcnIIIEdHMleV+I0bN0rNa0NCQnTLjW3evNnW1nbMmDEHDhwIDw/X7T3k4uISEhISGxtbh2ZGRKRWU0QEASST0f2N5Z9pFhFCIurTp0+Ns8Rp06YB6N69uyE9Nw4cOACgR48e2i3SF86kpCSDpmsUUh047UMDmzcTQP36mXMKe/bskfqcBAcHa6+EJSQkDBkyxMXFRZs9T0/PTz75ZN++fUZpI7FwIcnlBFB4OPfyIbKcEKalpcnlchsbG23F/6KiIul2xdq1a+u8299++w3A22+/rd0i9bUyyrdNgxQUkFxOtrZUXq7ZMn06ATRjhpknkpKSom1qm5eXd/To0bFjx0rZM27vIV2rVmmupoaGktCSnxbBUkJIRB988AGAN6V1qEREtGTJEgAeHh51vl0hPZfz+eefSy/Ly8vlcrm1tbX5GwPVtGuXprWb1iuvEECxseafS3p6uvQUoY+Pz5gxYwAMGDDgzJkzJh10925ydCSABg8mI92Nqq8sKIQ3btyQTo3+97//SVtUKpXUJ2D27Nl126e0bH/+/PnSS2nJlZeXl1EmbBCpzca9vx2oqooaNiSATNbG/fGkprYeHh4dO3YEkJCQYIZBk5OpaVPq2PFOnz6Dbj7D7bQsKIRENHv2bAAvvvii9kgVHx8PwMHBoW7NcVQqVVZW1q17l8+lr4iBuscfUQYOJKC62MTx4wQY/+nt2sjPzz958qSVlZW1tbWxnpR4orNn1Z069QHg6+v757NaHd2yQlhWViadF+neL37zzTcBfPDBB4bv/8GviEKoH6y7sngxAfTuu0LnRfv27QPQpUsXcw76qM7yzw7x1dZ0KZVK6XnuL7/8srCwUNo4d+5cpVK5fPnyP/74w8D9Z2ZmAmgpegFFenq6t0o177XXquuuWMb688TERADdzFuLws3NLT4+vmfPnlevXu3Ro8eRI0fMObolsKwQAhg1alTv3r1zc3OlJ2kAtG3bdtq0af/85z/bt29ft30WFRXt3bt3zpw5P/30k/FmWndHjhy5VFBwWKe86vhr15b27FnQo4fAWQFISkoC8LLZ/y5wcnLavXv38OHD8/Pz+/fvv2vXLqMPsWsXWrZEZSVu37aYxdxaog/FD/HHH3/UuF1RWxUVFadOnYqOjg4LC/Px8ZHrVFJxdnZ2dHTcs2ePcedcK3/7298AaB8Zu337tlwuVyqVdbsDbkQtWrQAYLq+0I9Xh87yDyotpcuX6eBB2rCBFiyg6dMpNJT+/W/auZO6dKFffrHEFcaWGEIieu+99wCMGDFCz/dXVlampaX99NNPYWFhnTt31jYclyiVyoCAgAkTJixdunTUqFHSH/OaNWtM+lt4DD8/PwD79++XXv7vf//D/U8UCCGt63VxcTH6XUH96dNZvrS0NCMj4+DBg+vXr1+wYMGcOQmhofTaa9SxI7m4aJ4mr/FPz560cyf961/0+uuUn29xIdS30JOZRUZGbtq0aePGjbt37w4KCnroe7Kzs48ePXro0KGDBw+mpaWVlJRof0mhUPj4+HS5p2vXrrb3yhm9//77Xl5ec+fOfeedd7Kzs6WlA+ZUWloqXYTUdmmWvomZ/ySwBu0XQpm43gAymWzmzJkNGzacMmXK1KlTjx075u/vf/369evXr0sFiq5fv64txSDp02fK/v09tS9tbODqCg+P+/7dpg1KSiy3AYmFhrBZs2ZTp0795z//OXXq1NTUVKkYlJQ6SWJi4s2bN3U/4ubmJkWuZ8+e3bt3t7e3f+ieZTLZnDlz3N3dJ0+e/Nlnn+Xk5ESaqwKnJCUlpbKy0s/PTztDUd/EarCQaQD47LPPXF1d33vvvQMHDqx6oDyGjY1Ns2bNWrZsKf27Xbue771XHblmzR6+T+lrZlgYhg+vrndlISw0hACmTJmydOnSY8eOjRgxQqVSpaSk5OTk6L7B3d3d39/f39+/a9eu/v7+Tk5O+u/8k08+cXZ2Hjdu3DfffJObm7tkyRJp6YAZ1LgCSUTS//1mvib5oKNH/4BlhBDAmDFjZsyYce3atdGjR/v6+rq7uzdv3tzd3b1Zs2aGdCtp2BBNmmi6RJm+5KreRJ8PP86aNWtatWqlnWrDhg0DAwPDw8Ojo6MzMjIM3//WrVsbNGgAYNiwYaWlpYbvUB9SpePoe+XVLl68CMDNzc08oz9KeTk5Olb4+KQWFOi9NsmUbt68KZPJ7O3ttatMjUKl0iwv3rLFiHs1lEWHUK1Wl5WVTZ8+ffXq1enp6aYYIjExUVou0LdvX+1CKpNyd3cHoC2rs2LFCtz/xKwQSUkEkI+P2FlU27p1q/SHYvQ9f/stAfTqq0bfcd1ZziH5IWQymXT7fvTo0dKTNEb38ssv79u3z93dfd++fX/9a+T9J7zGJ5Und3Z21i6QtZBvYpbWMcJ0p+jjxqFRI+zdCyMVlzMCiw6hefj6+h46dCg4eNzmzTMDA/GwthpGo70Qqr0CaSGXRi3jiZ1qNf6zZGVlxcfH37171/A9Ozri/fcBYOFCw3dmJKIPxZbi1i3q3p0AataM/vjDVKOkpqaOGzcuNDR0zJgxv//+e2Zmpo2NjUKhuHPnjqmG1E+bNgTQ8eNiZ6GhVqsbN24MnWWf33//PYCxUt/FOjl7lq5c0fx8+TIpFKRU0vXrBs/VGDiE1e7epQEDCCAHB7q3msporl27tnjx4uDgYO2DBDY2Nm3bth00aNDAgQONPFgtSYU17O3JqBdB6k5qIqS74iwkJAT3N02ole+/J5mM/vrX6i1vvkkAPaKclblxCO9TXk7vvKPpaWvAgv5qZ8+enT17dteuXbXnn0qlctCgQfPmzZOWDri5uZmtRfGjxMYSQK+8InYW1aTas7qLXaSv0H/U9RTlwgWSy8nOjrSLFqUOJa6uZK6L4o/DIaxJraYpUzSFNP/73zru4ejRY//4xz86dOigPe13cHAYOXLk6tWrCwsLpbcZq/S14WbMIICmTxc4hft89NFH0FmKfevWLZlM1qBBA0NuV7zxBgH01VfVW/z9CaBlywycrBFwCB8uMpJkMgJo2jR9P6JSUUICTZtG7dpRYOCPUvacnZ1DQkLWrVt392G1AcvLy6VnWZVK5QZtkXCz69ePANq8WdT4NXXq1AnA4cOHpZfbt28H0Lt3b0P2GRdHALVoUV3T59dfCSBfX/HFpjiEj7R8uaa89IQJpFOYs6byctqxgz76SNOATPrntdf+nDBhwu7du5/4l3dVVdX48eOhd+lro1OpNIU1LOQqRVFRkUKhsLa2Likpkbb83//9H4C///3vBu75hRcIoFWrNC/Ly8nNjWQy2r+/yMA9G4hD+DgxMWRrSzIZxcXV/KWSEoqNpZAQatSoOnutWlF4OCUkPC60D/XE0temc+IEAdS6tZmHfaQ9e/YACAgI0G6RTtoNLxX7008EkG7ZgIUL0729hw4ePNjAPRuIQ/gE+/fTvHn39bvs25eGDiU7u+rsvfQSzZplaOFsbenrSZMmqWobYgNkZtKsWRQZabYBn0CqMxQeHi69VKvV0lPBhlegKSmhpk0JoIMHNSegeXl5dnZ2MplMbFkNDqFedJeEvvqqpvdQly4UEUFG/OOLiYmRlly9++67Fc9qOc4hQ4YAWHXvrPHMmTO4v4a6ISIjbwYGLho9OkS7RVpGPGnSJKPsv244hHqpsSQ0OprqVPztyeLj46U2Vf369SsqMu13FctsZy1VZ750r0XcsmXLAIwcOdIoO8/JyVEqlQqFQrsA4NSpU1L3oYKCAqMMUQf82Jq+dJeEhobCzc0ko/Tt2zchIaFFixZ79uzp169fXl6eSYa5x9LaWV++fDknJ6dp06Zt2rSRthj32VpXV9dRo0apVKpFixZJWzp27BgUFHT37t2lS5caZYg64BDWQlgYfvkFpl533qlTp4MHD7Zr1y4lJaV3797Xrl0z7v5Pn8bMmejQARcuWFw76wfLvRm9ANynn34KYMmSJUVFRbpbvvvuu6qqKmONUiscwlpo2BB+fqHk2uAAAAu/SURBVOYYqHXr1gkJCZ07dz537ly3bt2OHz9u4A6rqrB7NyZMgLs7fH0xaxbOnUNy8n2H97t3UVBghMkbosZxr7i4+PTp09bW1n7G++/u5+fXu3fvoqKiX3/9VdoycODADh06XL16dfPmzcYapXZEnQezJ7pz507//v0BODk51a0ufWkpxcZSWBg1a1Z9LdfTk8LCKDaWtm2rbmc9ZAgNGEA+PnTtmtF/H7UgxW/37t3Sy7179wLw9/c37iibNm0C4O3trb0K/cMPP0BcaXYOoV527tS0jbh40azXMMrKyrSP1Oh/o+z27dsbN1556y2yt6/Ono8PzZhBqanVb9NtZz1wIHXqpImoqM7yZWVlSqVSLpdrn+yTas9ONHb/cJVKJS1Pjb3Xfqe4uFha2y2kZx6HUC+iQkhEVVVVH3/8MQCFQvHTYztp37x5Mzo6Ojg4WKlUdus2XJu9iAg6ffrJAxUUUK9eBJCzMx06ZLT560+qve3r66vdInV0XbFihdHH+vbbb729vTfrPKon9cN8V0QnAg6hXnbupFat6PXXqVcvMVfzH/NIzZUrV6Kionr37i3VpJPiOmDAGwsX1vrcUmxn+W+//RbAhx9+qN0iFSO+ePGi0ccqLy+v8UTEtm3brKysxo8fb/SxnohDqBeBR0Kt77//XrfL/KVLl6KiogIDA3UXSQUFBUVFRV034DFQgZ3lR48eDUB7tJdanZunGPGuXbscHBwAzDB7k1biEOrJEkJIRGvXrlUqlQCk3roSBweHUaNG6S6SMpCozvJSZb2TJ09KL8vLyw8cOLB+/XpTjxsdHS2ttA4NDRXyoBKHUC8WEkIi2rt3b+fOndu2bfv4RVKGM3NneamorKOjo5mbKEdFRUmnEuHh4aLq/3MI6x/pXNS4BTkfSttZ/rPP8k03XFlZ2Y4dOwYOHAigVatWZiu3U1VVJXXmUSgUP/zwg3kGfSgOIXuc3bupQ4dyV9cXBg8ebNz2vSUlJbGxsSEhIdKzsgCaN28OwN3dfeXKlaY+KJWVlY0cOVL6Ir3WKIVMDMAhZE+QlJTSpEkTAD179szPzzdwb7du3Vq+fPnQoUPt7Oy0X2tfeumlWbNmrV27tse9Do3+/v5HjhwxyvwflJ+f36tXL+kpCLGFRSQcQvZk6enp0t1tHx+fzMzMOuwhNzdXuodpY2MjxUwul3fp0iUiIkJ3LZ9arV63bp2Hh4f0hpCQkBs3bhjv90FEdOVKptRt1svL68yZM8bded1wCJle6tZZ/vLly9J9FG2fVoVCERgYGBUVpa0p+qC7d+9GRERI14EbN24cGRlZrq0MY5iTJ6ldu2Jf326+vr51+9vEFDiETF/5+fk9e/YE4OzsrK3C9FCnTp2KjIwMDAzUnnDa2toGBwcvXrw4JydHz+EuXLgQHBwsffz555/fYfDTA3Fxmmo6b755w1i3c4yCQ8hqoaysTOoqZW9vXyMVKpUqNTU1IiLi+eef12bP3t4+ODg4Ojq6zguUt27dqu3bER6+o87NuNavJ1tbAuittyyi1qguDiGrnRqd5auqqhISEsLDw6VuUxIXF5eQkJDY2FijnEZWVFRERUX5+fVWKKpsbCg8nGqb6Kio6nueZizfoy8OIau1tWvXjh07VnqW1VGn7W2rVq0mT56ckJBgikJVWVnqkBBNMVgPD1qzRq9HCLSlnM389E+tcAhZ7ZSXl0sLjiIjI4cNGyaTyTw8PMaNG5eQkGCGJ05SUjR9ewAKCKDHLzwqK6O339Y0NVi92tRTqzsOIasdqd5Ex44diUitVl+5ckX6lrhmzRrzTECtpuhozTJluZxCQuih13ry86l3bwKocWPav988U6sjLm/Bake36ItMJvPy8pK2GLECxePJZAgNRXo6IiJgbY0VK9C+PRYsgG6BmMJC9OqFAwfg4YGDB9G7t3mmVkccQlY7NcrAXL16NTs728XFxdvb25zTcHDAzJlIS8Nrr6GgAJ9+ij590LIlKitx+zbGjkW/fujYEYcOoWNHc86rLjiErHZqlD/TvpSZugrdw3TogN9/R1wcOnTAgAH3VXCcPx+HD8PDw/yTqjUOIauF3Nzcy5cvOzo6+vj4SFuMWxe0boKCcPw4/P3vq+CoUKBhQ4GTqgUOIasFqQxMQECAtpSGJYQQgNT+WLeCYz1iJXoCrD6pEbnKysq0tDSZTObv7y90XtXCwjB8OHRuXtYDfCRktVAjhGlpaaWlpR06dJAaJ1kCsxVoNiI+EjJ9qdXq1NRU6ITQ6DXqDTFgAAYMwKJFuHABU6eKnk1t8JGQ6evc6dNFRUWtW7eWGifBYr4Q6kpLw5YtOHVK9Dxqg4+ETF8+SUmqZs2yhg7VbrGoI6GkRQsAuH5d9Dxqg0PI9JaYKM/J8WjdWvMyL++UtfXFoKCOlnQ7XGpZl50teh61waejTG+JiQCgPe4lJtqdP/+CSqW9XWEJpBDWryMhh5Dpp6gIZ89CqcSLL2q2JCUBOpm0DNLpKB8J2dMoKQlqNfz8oFRqtkgHRku6KgM+ErKnWY3jnlqN1FTA4kLYvDkUCuTkQFDX3brgEDL9SCHURu7MGRQWolUrNG8ucFIPsrJCkyZQq5GbK3oqeuMQMv0kJwP3XZW576UlqXdfCzmETA/p6cjNhasrvLw0W2ocGC1JvftayCFkepAid69GffUWSz0S2tggN7dC9ET0xSFkeqhx3LtzB2fOwMYGnTsLnNSjeHjMraiQZWZ+JXoi+uInZpge+vdHaSn69dO8TE6GSoWuXatvV1iSpk0dAFyvP+ejHEKmh8GDMXhw9UtbWwwebLFLhqRO99n158oMh5DpR63G+PG4ehUVFfj+e8TGip7QI7m5uaFeHQn5OyHTz7ZtUKuxaxciIy18uV69OxJyCJl+0tLQqxcABATg3DnRs3mc5s2by+Xy3NxclUolei564RAy/chkmjJmACxp2cSDrKysmjRpolKpcuvJUzMcQqafrl0RHw8Aycnw9RU9myeoX2ekHEKmn4ED0aABBgzAjBmIjBQ9myeoX9dm+Ooo049MhkWLRE9CX3wkZEww6Uh48eJF0RPRi2LmzJmi58CYkd29ezcrK2vDhg3p6enPPfecq6ur6Bk9Dh8J2VNoyJAhvXv3VqvVK1as6NSpU3BwcLx0VckiyUh73Zmxp0tGRsaCBQuWLl1aXFwM4KWXXpo8efI777xjLXWusBgcQvaUKywsXL58+dy5c7OysgC4ubmFhYV98sknllO6n0PIngkVFRVr1qyZO3fuqVOnADg6Or7//vuff/65p6en6KlxCNkz5uDBg99888327duJSC6XDxo06MsvvxRbyZ9DyJ5Fx48fnzdv3po1ayorKwEEBgZOmzYtODhYSL9hDiF7dt24cePHH39cuHBhQUEBgHbt2k2YMCEsLMzOzs6c0+AQsmfdnTt3li1bNn/+/GvXrgFwdXUdP378pEmTXFxczDMBDiFjAKBWq7dv3z579myp35tSqRw1atQ//vGP9u3bm3poDiFj99m7d++8efN27twpXbl58803Z8yY0dmUJa34iRnG7vPqq69u3779woUL4eHhSqVy48aNZ8+eNemIfCRk7JFycnKWLl06depUkz5kwyFkTDA+HWVMMA4hY4JxCBkTjEPImGAcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTDAOIWOCcQgZE4xDyJhgHELGBOMQMiYYh5AxwTiEjAnGIWRMMA4hY4JxCBkTjEPImGAcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTDAOIWOCcQgZE4xDyJhgHELGBOMQMiYYh5AxwTiEjAnGIWRMMA4hY4JxCBkTjEPImGAcQsYE4xAyJhiHkDHBOISMCcYhZEwwDiFjgnEIGROMQ8iYYBxCxgTjEDImGIeQMcE4hIwJxiFkTLD/B19czSwHIR6HAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAdvElEQVR4nO3de1wV1doH8IeboBvZeCUIxbxViEl5Fy1fRfGWZoplHNTXCyV+vORreXk19Gi6S+tgWImKHkjD8HJOJqZpminHS5z8oGhYRwEVAa0NqCAq7PX+sXTOfhFxX2b2w9Hf9+Mf7Nl71gzqbz8za2atcRJCEADwcebeAYDHHUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjAzNXB27t161bJPcXFxcXFxSX3uXbt2oYNG/z9/Z2cnBy8ewCO5ySEUKWhsrKyvXv3VomTeczkD+Xl5Za0ptPpli9fPnnyZFX2DaA2UyeEubm5q1evNhgMD/2km5ubXq/X6/Xe3t7e3t766pw5c+a9997T6XQnT55s2bKl/bsHUJupE8I333xzzZo1AQEBvXr1UrJUJWby53r16lnS4KhRo7Zs2dKnT599+/bhoBQebSqE8MKFC23atKmsrDx9+vTTTz+tym79/vvv7dq1u3Llytq1aydOnKhKmwC1kwohjIqKWrt27ZgxYxITEy35fGlpabWnjh07duzbt6/yseTk5DfeeMPLyyszM7NZs2Z27iRArWVvCKstgzdv3jQYDFWSVlRUJH+oqKiotqkZM2b85S9/MV8yYsSI7du3Dxo0KDU11Z6dBLCE0WicN29eu3btLly48M477zRt2tQx27U3hNWWwfLy8rp16z5olbp165p3wyinji+++OLLL79s/smCgoJ27doZjcbExMQxY8bYs58ANdu+ffuUKVMKCgp0Ol1paalOp5s8ebJjomhXCGs4G1y0aJGXl5eStAYNGig/16lTx/JNJCUljR07Vq/Xnz59+sknn7R5VwEepKioaM6cOWvWrCGikJCQ2bNnf/HFF1u3bhVC6HS6CRMmzJkzx9fXV8M9EHaYNGkSEY0ZM8aeRh5q2LBhRDRkyBBNtwKPp5SUlCZNmhBRvXr1DAZDZWWlXJ6RkREeHi575nU63bRp0y5fvqzRPtgewtzc3Dp16ri4uGRlZam4Q/fLy8tr0KABEX355ZeabggeKwUFBSNGjJClqFevXr/++uv9n3FMFG0PoWPKoLRu3ToiatSoUUFBgQM2B4+8lJSUxo0bE5GXl1dsbKxSAKuldRRtDKHDyqBiwIABRDRs2DDHbA4c5/ZtEREhQkPFa6+J8nKtt5afnz98+HBZAMPCwnJzc5W3iouL9+zZ86AVtYuijSGUF9AdUwal3Nzc+vXrE9GWLVsctlFwhORksXChEEK8/77YsEHTTaWkpDRs2JCI9Hp9fHy8yWRS3vr222+bNWvm7u5+5syZGlrQIoq2hDAnJ8fBZVD6/PPPiahx48aFhYWO3C5oa84c8d13Qghx8KB4+22xc6eIjhZbt4rff1dxI5cvXx46dKgsgIMGDbp48aLyltFoVC6AdevWrdqTwyrUjaItIZRlcOzYsTZv1TYmk6lfv35ENHr0aAdvuvYrKirKycnJyMj48ccfv/nmm40bN3766adLly599913o6KiRo0aFRYW9s033+Tn53Pv6X3mzBHyIPDgQTFzphg/XhDd/dOypYiKEikporjY5uZNJlNiYqLs2/P29o6Pjzd/NzU11d/fn4jq1q1rMBgqKiosb1mtKFp9nTA3N7dt27bq3ilquZycnPbt29+4cWPbtm2vvvqqg7fO4tixY7m5ucqgsGpHYBYXF1vSlL+//5NPPpmWlubi4qL1blth82Y6fZoWL6Zly6hZM2rXjnbtov376R//IGXgm5sbde1KffpQ377UrRtZfKk5Jydn0qRJ+/btI6IhQ4asXr1audpsfnmwR48eCQkJzzzzjA27f/LkySVLlth1XdHa1HKVQcXKlSuJ6Iknnvjjjz+49sFhli1b1qNHD0v+Hb28vJo1axYUFBQSEjJo0KDRo0e/9dZbs2fPNhgMq1evTk5O3rp1q5+fHxEtX76c+9cyk5sr8vNFRIQYOFBERIhbt/79VlmZ2LdPzJsnunUTrq5KeUwPDe3fv7/BYPjpp59qKFwmkyk+Pl72IzRo0KBKAdyxY4f827ChAFYrPT395ZdfVqpiTk6O5etaF0Kus0FzlZWVL774orpfBMnJokuXuz9s2CCSk4X8jxoXp3VPQU2OHj1KRB4eHsOHD580adKsWbMWL14cFxeXlJT09ddf//DDDydOnDh//rzRaDTvYKjB3r17nZyc3N3dT58+rfXOW8RkEi+9JHx8xJEjD/lkSYnYsUNMny7at5/XqZPy1ePp6RkaGmowGNLT083/Es6fP9+nTx/5mfDw8CtXrihvFRYWhoeHy7d69ux59uxZFX+hjIyMoUOHNmrU6JVXXrF8LetCyF4GpXPnzul0OiL6+9//bmdTxcXi+nWRnCyCg8XRo7UrhAMHDiSiefPmqdjmuHHjiKh79+72f/erID5eEInGjcXVq5avVFhYuHnz5kmTJrVq1cr8WMDX1zciImLdunUffPCBp6cnEfn4+MijRIVyebDK/TEqMhqNRKTX6y1fxYpzQt6zwSo++uijWbNm+fn5ZWZmynNuIiorK7t/6EZ5eURhoa6khO7/U1RERBQTQ888Q/n5lJlJ/fpReTl5eNDChdSiBV24QO++S+PGMfyC6enpXbp00el058+fl/dVqaKkpCQoKOjSpUuxsbHTp09Xq1lbFBRQYCAVFdHmzfTaa7a1kZ+ff/jw4X379u3evfvChQtyYcOGDY1GY3h4+GeffSYjR0SFhYXR0dHbt28nov79+8sx6Kr8HlWYTCZXV1cnJ6c7d+44O1s2kZrlea0lZVCqqKjo2rUrEfn6+rZu3bpJkyZubm7V/oKBgTeUzrb7/9SvLxYsEMnJ4osvxIQJIja2tlRCeXOCumVQ2rlzJxHVq1fvt99+U71xK4wYIYjEwIFqtXf69Om4uLjmzZsT0YwZM8zfSklJadSoEVV3eVAL8kS02OIeXUtnW8vNzU1KSnJxcZk7d66Fq6jLZDKZf6+4uLiMHDny1KlT+fn5ykIPDw/zQVJy6Ebz5jecnHR6PVX506AB6fUkm9y8mYho0iSKjqapUx39q90vPT19z549np6eM2bMUL3xwYMHR0REbNq0aeLEiQcOHOCZPSQ1lbZto/r1KT5erSYDAwMDAwN/++23Tz75pEWLFsrygoKCiRMnXrt2bfDgwfHx8VqMxZkyZcqlS5cSExO9vb2JyNvb+/r16yUlJXq93pLVLQ3hkiVLbt++PXbsWK4D0aVLlx44cGDFihXPP/88EVVWVq5fv76srGz+/PmRkZFyXKK7u7s9m+jalVxcSAiybEY4DS1YsEAIMX36dBUPRM2tXLly3759Bw8e/Pzzz6Ojo7XYRE2uXaO33iIiWrqU1J4zQf6/N79m88QTT8TFxRGRdkNSv/322+zsbKPRKEOo1+svXrxYXFwsy/LDWVIur1y5UqdOHVdXV0tuJtBCSUmJvNvowIEDcsmmTZuIKCAg4JZ5p7bdLl0SYWFi8GAVm7TakSNHiMjT09O8T091X3/9NRHpdLpz585pt5XqvfWWIBJduwoNOodWrFhBRG+//bbqLdcgODiYiP75z3/KlyEhIUT0448/Wri6RSeOOp2ud+/eHh4eMujSuXPnhg0bdvnyZeu+NGyycuVKo9EYEhLSu3dvIqqsrFyyZAkRLViwwKohwg/l7Ew//USpqZSUpGKr1lm0aBERaVcGpaFDh4aHh5eWlk6aNEmoNPesRY4epTVrqE4dSkggDe4ZkJWwpKRE9ZYt36jV+2BJUk0mU1hYGBGNGjVKWThy5EgiGjp0qJXfGlZzWBmUEhMFkdDrxaVLqrf9cMeP/+rs7Fy/fv3fVb1zslpXr16VczesXbtW621J5eXl/wwPF87OYsECjTaxZcsWIhoxYoRG7VdL3pX6t7/9Tb584403iGjjxo0Wrm5p72hOTo7s81EuvChjbTdt2mTtTlvlz3/+MxGFhITIlxUVFfL2ooSEBI22OGyYIBIsQ/kHDBCtW5/68MPNjtlccnIyEXl5eV24cMEBm3vvvfeIaPHgwdoNWfruu++IqG/fvhq1Xy15trnhXk+6nDl+1apVFq5uxSWKTz/9lIiaNGminKskJCSQxmNt7y+DGzdu1K4MSnl5okEDQSQcPJT/yBFBJDw9rbp2bS95C25oaKjWHfe//PKLu7u7s7PzoUOHtNvKsWPHiKhTp07abeJ+U6dOJaLY2Fj5cs6cOUT0/vvvW7i6FSE0mUyhoaFEFBERoSyUl7O0Oyh1fBmU1q0TRKJRI+HIofxhYYJIzJ/vuC0KIfLz8+XX3F//+lfttlJZWSm7KyZPnqzdVoQQWVlZRNSmTRtNt1LF/PnziWjRokXy5bJly4jo3XfftXB1625bO3/+vLwhaPv27XLJpUuXZG9NSkqKVU1ZgqUMKgYMEETCYUP5WcqglJSURPc61jXaxCeffEJEvr6+RUVFGm1CKigoIKKmTZtqupUqli9fTkQzZ86ULz/77DMievPNNy1c3epRFLGxsfJv02g0yiWrV68mbcbacpVBKTdX1K8viIRjhvKzlEHFK6+8QppNaZeXlyc7DJWuC+3cvHmTiOrUqaP1hsytXbuWiCZMmCBffvnll0T02muvWbi61SGsrKzs1asXEY0bN04uMZlM/fv3J6LXX3/d2tZqwFsGpc8/F0QiJOSnqxqXJ8YyKGnazSYnrRw5cqTqLVdL3rNx8+ZNx2xOCJGSkmLeJSsnjB8wYICFq9sysv7s2bNygu1du3bJJdnZ2bLvdNu2bTY0WC3eMiiZTGLixPWurq5aj+XnLYOSRt1ssgNWr9dfctQ1H3ndxZFzCOzZs0d2bsmXhw8fJqLu3btbuLqNEz3Jg2A/Pz/lEF/eGaTWWNvaUAal7OxseRqs4vdLFexlUKFWN1t5eXlhYeGvv/76/fffy1sO1qxZo7ybkZFx5KEDCO3Qpk0bInLkkFfZJdu5c2f5MjMzk4gCAwMtXN3GECqdXfJ+C7nkpZdeIqLIyEjb2jRXG8qgQnYqaDeWvzaUQelB3WxlZWV5eXmZmZmHDh3asWNHSkpKYmJibGxsTEzMtGnTIiMjhwwZEhISEhgY6Ovrq4wsk5o2bfrUU08p1z9OnDjh6enp4+NjPtegujp16kREx44d06j9+1Xpkr148aIsURaubvvkv7/88ouHh4eTk9Pu3bvlEqXv1M7z7/vL4BdffEFErVq1un37tj0t20YZy6/FFI8mk1i8WAQEiFoyWYec0q5+/fqBgYH+/v7yLMNabm5ujRs3btWqVVBQkIuLi5OT01dffSXbr6ioGDRoEBEFBwffuHFDi19BXkj7Ts7g5hByKI/SJXvt2jUi0ul0Fq5u17Moli5dKg8Rr127Jpd8/PHHVfpObfCgMrh+/Xp79tYeKo7lr1ZtGOYuXb9+XafT+fj4mIfKw8PD19c3MDAwJCQkNDR0yJAhkZGR06ZNi4mJiY2NTUxM3LFjx6FDhzIzM8+dO5eXl2d+3V8+7q5u3brHjx+XS4xGozxifPXVV7W4Q0BObq/FNbMHKSsrIyJ3d3f50mQyyam07ty5Y8nqdoXwzp07nTt3JqLo6Gi5pLKysmfPnkQ0fvx429qsrKyUo6XuL4MW/koakbfn+/n52fP9Yq7KxDa1hMFgIKLnn3/+5MmTcpY3+9uUT0zw8/PLy8uTS7KysuRxr3KBW0Xjx48nB94QK1XpkpXH5BbeAGxXCIUQZ86ccXd3d3Jy2rt3r1yiHKampaXZ1ub169eTkpLkz7WhDErK98vEiRMt+bzJJIxGcf68OHFC/PCD+PprkZQk4uLEkiXinXdEamrViW1qgxs3bsiuxRpmg7fB7du3ZX9B9+7dy+/dNbp79+4qR6pqefvtt4loxYoV6jZbsxYtWvj6+iqpk6OKLRwmZumg3gd59tln582bFxMTExUVdfLkSU9Pz2eeeebDDz90cXHp3r27bW16enpGRkbKn5OTk7Oyslq1aqUs4bJp06aePXump6cnJCQEBAS0atWqyhOIzTVseDYj44GPSSUiFxfq0IHGjKE1a6hfP4f9Eg+xatWqK1eudO/eXV74VYubm9vWrVu7dOly5MiRqKgo+TzZsLAwg8Ewf/7iNWuCn36aOnRQbXMso5mys7PNX8o6b+k+2P8dcOfOnY4dOxLR9OnT7W/NXO0pg+Xl5XKe5sjISNlpVLP27UuIhLe3CAgQzz0nevUSQ4aIiAgRHS3mzhUGg0hLqzqxDTuNyqDixIkT8rz6448/VhbOmHGVSAQECBUHMMubuqZOnapai9aTlX///v2WfNjeSkhErq6uCQkJnTt3jouLGzFihLyfRhW1pwyuW7fu0qVLQUFB69aty8vLi46OLioqateuXZMmTfTVq2c2/rl6cnKw2jOxjUZlUBEcHJyUlDRy5Mh33nmnbdu2gwcPJqJlyxofPUpHj9Lw4fT992Tf/CR3VVsJMzIyDh8+PHHiRDvnQLGQEOL+fajp06qQN5K3bdu2rKzMzqaMRmN2dvbPP/8sJ5asPWVQuV6/a9cuInruuefsaVZWQiFE587if/9XaNPtaimty6BCDikMDV2jTLqbny/8/QWRuHcfpL3kvIZVnqInx936+PgYDAZN72grLS2dPXu2k5PT6NGjLbwWqloIb926FRQURESzZs0yXy6v8547d0651FvlOm94eLj5pV5X138X5+bNm7du3Zq3U1QIsWrVKiIKCgpS5oqVsy2qdep//LhwdhYNGwrNnsf8cLJT1PI7rWxmMpmmT/+Hk5N4+mmhDKj4+WdRr54gEnFxKmxi//79RNS7d2/zhdu3b+9w77zT399/1apV5RoMLP7++++feuopInJzc1u2bJmFa6kWQiHE8ePHXVxcnJ2dg4KCWrZs2bBhQ9sePKLX65s3b96+fft58+YpVyC53F8G5e25jRs3vn79ulpbYRzLLxxYBqWyMtGxoyAS/foJ5Qv2yy+Fk5NwdRUHD9rV+JkzZ/70pz8RUcOGDVNTU83fMplMO3bskP0XqldFWQDlrJzPPfdcenq65euqGUIhRJs2bao8j8bDw6NBgwbKpd4q13nj4+PNL/Xm5eWx3BNTA63LoMQ1ll+Sg1AdUAYVOTmiaVNBJO4NwRNCiLlzxYABNj4ErbS0dMOGDfJWSlmI5A9dunRxQBR3794tZzd0c3ObPXu2tbc3qxnCtLQ0Iqpfv/7+/fv/9a9/Xb16tbYlylqOKYPS2rUMY/mFWRl05H1eQojDh4W7uyASykX1ykphw7MhMjMzZ8+erXRZe3l5RUVF7d+/Pz4+Xj53iYg6dOiQkpJifneOWlEsLi6OioqSEygHBwf//PPPVv8C6oawb9++RBQTE6Nim7wcUwYlk+nundwOG8svOb4MKjZsEETCzc2WQ9CSkpL4+PgXXnhBOebq2LFjfHy8+ZdjeXm5VVH09/ePjY21PIq7du1q1qyZPNyz5/lqqoVQlkG9Xq/WXV38ystHd+nimDIo5eQ4dCy/4CuDiqlTBZHw8RGWz/b2449izBjRtm2QTE7jxo1nzpxZw4PmZRSVsyRVolhUVBQVFSU/371795ofc/9QqoXw0SuDYtUq4eqa+frryj+YdmVQ8dlndx8WpvZUIdVjLINSRYUYOFAQieBgUfOYCqNRxMeL9u3vPsnnv/5rWb9+/b766isLT8AsieK2bduUHtSlS5c+qKmdO3fKB1qo9YBRdUIohxI/YmXw7tWre1Na7f32W3nyUFpaqt1mTSbRr58gEm+8od1G7mIvg9Iff4jWrR94faKyUuzdKyIjRd26d+Pn6ytmzxa2Td5fWloaGxv70Cj26tWr2v/JRqNRKYAhISFqjRtWJ4TyqagLFy5UpbVaIS5OEImgIKWvwNSjx289eyarciWrRtnZokWLgm7dRipT2mmEvQwqzpwRH3wgqoxqyssTBoNo2fJu9pydRWioSEkR9l82fmgUq5WSkiJnCVD9AaMqhPBxKIMiNfXuYaI2Z4NVfPLJKrJ7WGbNakkZNGc+tut//ke4uNyNX8uW4v33xb1RUKqxPIqFhYXyoQ9E1KtXL9Ufi6RCCB+HMii6dhVEwlGjY5Sx/No9krX2lEGF+diu+Hjh4yPCw8WOHdqOeK4SxeDg4CpRVJ6w7eXlFRsbq8UTtu0N4WNUBn18hJZng1VoOpb/xo0b8shKGQVaGyQni48/FuPH3x1gqdnjKqpRbRQvX748fPhwuSQsLEy7SXHsDeFjUQa7dBFE4qOPHLwjqo/lV8h5SXr06KFus3ZiH9t148aN5cuXy6N0ee5HRN7e3gkJCZo+qMOuED4WZXDnTseXQcnasfwWqp1lUNwL4dGj4oUXOAdYKlVxwoQJAwcOdMDzquwKIcqg1rKysqpMaWezsrKy/Pz8rKysKVOm1MIyKP7/2C72Uc43b97U4vSvWk7C1qe0pqWl9ezZU6/XZ2dnV5lq8j/YiRP03ns0fjzJk4HUVBoyhHx86Px5qlePZY8MBsPcuXMDAgJOnTplPgHhzZs3i/6/8vLy+xeavytXdHJy0uv1W7ZskVMDAj+b4/sIlsH7sZZB6c6dO3I2W39//2effdbPz0922FjL3d29adOmbdq06dSpk3bPkwQb2FgJH80yWMW1axQRQenpdO4cVxmUTp06lZCQsHLlSvOFcoyYom7dulWWVHn3kf1n+s9nYwj79Olz4MCBhQsXxsTEqL5PbO7cof/+byospEaNKDHx7oQnRiNZMLOT1ioqKo4fP67MYGPbxNhQO9kSwke2DG7eTGfPUkwMLV1Kfn40bhz3DsFjwdmGdRYsWEBEM2fOfKQSSEQZGdSjBxFRz5508iT33sDjwuoQpqWlHThwQK/XT5s2TYsdYqYcFzg5se4HPEasDqFSBr0fOrHmf5wOHejQISKitDR6/nnuvYHHhXXnhMrZYE5OziMYwtu3afz4uz0x69dTnTrcOwSPBetm4C4uLg4ICBg/fvwjmEAiqlOHNm7k3gl47FjdO3r79u2Kiop6rNfNAB4ltt+2BgCqsOUSBQCoCCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRghhACMEMIAZghhADMEEIAZgghADOEEIAZQgjADCEEYIYQAjBDCAGYIYQAzBBCAGYIIQAzhBCAGUIIwAwhBGCGEAIwQwgBmCGEAMwQQgBmCCEAM4QQgBlCCMAMIQRg9n8OhMFC1fsNhQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3deVzUdf4H8PfMAKLigZAHKR4gGp5loWZraSiG9wHlrqhpC2v1QzMV3XZlO1RqM821AtM1tbVE12wzjzTz2lJ3UBBB7lsZUEBAQI6Z9++PD06jog4z3/Fj+no++iPG4T0flNfM5/v5fg4VMxMAyKOW3QCAhx1CCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUIIIBlCCCAZQgggGUJ4i9paysqiqVMpKIh+/FF2a+DBp2Jm2W2QqrSU0tIoIYFiYigxkc6epSVLSKejadOoZ0968UX6979lNxEecHayG3BP1dRQYiKdO0epibVvx02g+HjKzb35SenpVFZGHTuSnR095O9QcE88mCFcvZp0OoqIoBkzaPJkOneOzp6lc+coOZnq6sRT7Jd2OKvJzyMHB/L0pAEDqFcv8vamXr2oWzf6y18oJ4ecnMjeXu4PAg+DBzOERJSeTpWVlJVF48f/+qBGQz16UN++1Ls3lT7+dZvH2lG3bqS+5cI4JITCwqiyksrK6ORJGjjwXrYcHjYP5jXh6tXUsSOVlND+/VReXp+6Pn3I25scHc2u8pe/0LJlNHQoHTliw7behsFgiI2NzcrK8vT09PT0bNas2b1vA9wbD2wIx4yhlStJpaJPP7W0SmkpeXhQURHt308jRyrZvruprq729fVVq9VHjx4Vjzg7O3e7ztvbu1evXp6enq1atbqXrQIbeWC7o0Q0bJh1Q5utWlFYGC1aRIsWka9vA71WmwkNDT1+/Lirq6u/v39WVlZ6enpJSUlMTExMTIzp09q1a+d5i9atW9+zdoIiHsxPwhdeoMpK2rSJunSxrtC1a+TlRbm5tG0bBQYq0ra72rJly/Tp0x0dHY8fPz5gwAAiMhgMeXl5abeoqqq69dtdXV1FGmfNmjVs2LB702awxoMZwtatqbSUioqoTRura61bRyEh1L07JSaSnc07DrGxsU8//XRVVdWGDRtmzZp15yeXlJRkZGRkZGQkJCQkJiZmZGSkpqaWlZWJP/Xz85s4cWJISIjyrczKoiVLyM6OZs6k559Xvv7Dhh84Oh0TsbOzQuXq6vixx5iI161TqOJtFRUVde3alYjmzJljcZH8/Pxjx47NmzePiDw8PAwGg4ItrBcWxvHxXFvLkyYpX/zh8wCG8PhxJuKBA5mZDQb+v//jlStZr7ei4rZtTMQdO+orKxVqYwP0ev0LL7xARD4+PteuXbO+WufOnYnoxx9/VKR5N5g+nUtKmJknTlS++MPnAZw7mpZGROTpSUSk09E//kEREdaNqgQElIwfv7ht25Vr1yrSwgYtXbp07969Li4u27Zta9KkiZXV1Gr17NmziSgqKkqJ1t2oUyfKyaG6OkxmUIbsdwHl/fWvTMTh4czMR48yEQ8ebG3N/fv3E5Gzs3NxcbHVDWzAd999p1arNRrN/v37lap58eJFe3t7e3v7ixcvKlWTdTqOieHERJ46lYOC+MgRxSo/xB7AT8LUVCIiDw+iGz8VrTFy5Mjhw4eXlJSsXLnS2lq3SEtLCwoKMhgMy5cvH6ncDckOHTqMHj26trZ206ZNStWk7dtpwAD6+GPaupXefZc2bKBPPlGs+MPqAQyhCF737kRE6elE1wNppYiICJVKtXr1ap1Op0C566qqqgIDA69cuTJ+/PiFCxcqWJmIxNDounXrDAaDMhXFX6h4V0tIoM2b6dtvlan8EHsAQ2j6eyI+Fa3/JCSip556asKECRUVFe+9954C5a6bM2fOmTNnvLy8Nm3apFKpFKxMRH5+fp6enpmZmQcPHlSmomnXwvTdDqzwoIWwqKioadPBzzyz1NWViEilyvHyuta9u16R4itWrLCzs1u3bl26CLrVPv74402bNjk5Oe3cudMWc9BUKtWsWbPc3Drt2VOmTEXTdzUFuxk3Ki0t3bhx44cffrhr165z585du3ZN8Ze4v8i+KFXYL7/8QkRPPvmk+FL8ZhcVFVlTMyEhoaamRvz/yy+/TER/+MMfrG0o888//+zg4KBSqbZt22Z9tdvR6SodHevs7PjCBatr6fXcpAmrVCxu1bzwAhPxt99aXfcGZ8+ebdWqVadOnUx/S52dnQcMGBAQEBAeHh4dHa3VasvKypR9XYketBBu2bKFiF566SVmLigoEP9+1hSMiYlp3br12LFjxb277OxsBwcHtVodEBCwbNmybdu2xcTElJaWNrasTqd79NFHiWjBggXWNM8cU6YwEb/7rtWFMjKYiDt1qv+ye3cm4sREq+v+qqysrGfPnkQ0ePDgefPmjR49ukePHg4ODg1+fnTo0GHo0KGzZs1avnx5dHT06dOnf6PJfNAmcKelpRGRp6en8f+7W3HREh8f7+fnd+XKFXt7e41GQ0QZGRlE1K5du+3bt2/fvt34TNNVDkLv3r3bt2/fYNm6urrAwMALFy4MGTJk+fLlFjfPTCEhtGMHrVtHS5aQRmNFIdMLQr2esrNJraauXRVpJBEx88yZM5OSkvr163fw4EHj6i29Xp+Tk5Oenm6cNJuampqRkZGfn5+fn29caCK0bdu2e/fua9eu7d+/v1INs7UHLYTias3Dw4NuDKQFUlNT/fz8Ll++PGrUqK1bt9rZ2Z04cWLcuHE1NTU+Pj7+/v6mvxYNrnJwcXERc6m7d+9uXOXg4uKycOHCo0ePtm/ffvv27fa2v9/9/PPk5UUpKbR/P/n7W17nikdO5edPtXR92onIUJCtcndTsboxCzTvYtmyZTt37nR2dt65c6fp+kmNRtO1a9euXbv6+vqaPl9MnTXOm83IyEhJSSksLCwsLAwKCgoPD58yZYpSbbMt2R/FChs4cCARHTt2jJmXLl1KREuXLrWgTlpamugujhgxoqqqipnPnDnj7OxMRNOnT9ffMgsuLy/v8OHD69evX7x48ZQpU/r379+iRYsG/8JbtmypUqkcHBx+/vln639eM33wARPxuHFWFcnNna/VUn7++8xcWrpPq6XkpOeVaR/zwYMHNRqNWq3+/vvvjQ+mpKQYrwbDwsI2bdqk1WqvXLlyhzp5eXlz584lIl9fX6XaZmsPWghdXFyIKD8/n5lnzpxJRJs2bWpskawsHjs22c7OcdiwYZWVlcwcFxcnKk+ePLm2ttbMOsXFxVqtNjo6OiIiIjg4eMiQIWKxX5s2bYYNG9bYVlnj0iVu0oQ1Gs7OtrxIWto4rZZKSv7NzIWFa7Vays4OUaR52dnZrq6uRPT222+bPr5nz54G38hMkxkVFXXs2LESMZeVmZmvXLnSvHlzlUqVkpKiSPNszYYhrK6ujoqKGjhwYGBgYERERF1dne1eSygpKSEiJycn49IBnU7X2FGTvDz28GAiDgpKKy8vZ+akpCRxdTdhwgTjMKnFjh8/rlKpWrVqdfXqVStLNcpLL/06m88y5855a7VUWRnHzDk587Ra0uk+sL5hVVVVTz75JBGNHj36pi6GXq/Pzs7+8ccfo6KiFi5cOHHixD59+jRt2rTBZJp2ecQqsEWLFlnfvHvAJiGsq6vbuHGju7u76d/RE088odVqbfFygsFgWLFiBRG1a9cuLy/PsiIFBeztzUT8+OMsZommpqa6ubkR0ciRI61f3CAMGTKEiDZs2KBINTMdPsxE3L+/xQX0p0831WpVev1VZk5NHaPVUknJN9Y3TEw079Kly+XLl838FmMXIzw8PCAgYMCAAS1btvz888+NTzh58iQRubq6KvVPZq7MTH7pJZ42jQ8eNP+blA/hgQMH+vXrJ4LXq1ev6Ojo77//vkuXLkSkVquDg4MtGNC/q2PHjj3zzDNEJG4MOjg4BAUFpaamNqpISQk//jgTcd++LH4fMjLYz+9C06Ztnn/+eXFlqIjNmzcTkY+Pj1IFzWEw8L//zRb/TlZXZ2u1FBfnJr48d66nVkuVlfFWtkos8nB0dIyJibGy1E2dlCeeeIKIvvrqKyvLNo5FKy2VDOHx48d/97vfifh17tw5KirK2AWtrKwMDw8XN3zc3NwsuE67nVOnTj1/fXG3m5vb0qVLAwMD1Wo1Ednb28+ePTstLc2cOrW1PGAAE3GvXlxYyMycm8tduzIR/+EPF5XtOlZVVbVp04aIrP/Na5RVqzgsjJk5pPGXcmVlP2q1lJQ0lJmZ9TExTbRalV5fYU17Tp8+LfqWGzdutKZOgz777DMieu655xSv3ADjpZZFKy2VCWF8fHxAQIBIgqura0RERIPdgLi4uEGDBomnjRkzJtuaUQLm8+fPBwQEiPmWzs7OERERFRX1vxPp6enBwcF2dnbi4zcgICDRjHvK69dzjx71M0sKCurX0w8cyLa4A/zGG28QUYgFabDCqlU8ZQpXVHBICG/bxqtX8+7dnJTE1dV3/96ysp/Onx+cm/sGM1dXZ2q1dPZsR2saY9xG4LXXXrOmzu2Ul5e3bNmSiBISEmxR/1dVVTxwIEdEMDO/9RbHxXFtLQcGml/A2hBmZmYGBweLTx4nJ6ewsLBbe5uZmZkfffSRGFQ0GAxRUVHib6dZs2aWDdjk5uYaM9asWbOwsLAGl/mlpaXNmjVL3IhTq9Xz50fF3633JN46Cgu5V6/6KyjrZrzdVlJSkkqlcnJyskXn/HZWreLt23ndOg4J4ZEjmaj+P42Gu3blESN4zhyOjLxWUrKrqirBYLhtz7WurrS4eNvly5stboler/fz8yOiQYMGVZvzHmCR4OBgIpo/f76N6tebOZOJ2NOTy8s5J8eClZaWhzAvLy80NFT0MB0cHIKDgwsKChp85tixY4moX79+J06cEI9cvHgxKChIfCT279//5MmTZr5oUVFRWFiY6MPY29sHBwdfuNucyOzs7NDQUCcnpy5dUlQqHjOGT5260/NLSur7pX36sNkjBZZ49tlniSgyMtKGr3GjVas4NZX/9CeeM4e/+IJfe439/NjDg+3sfg2kj0+5VktaLWm16rNn3ZOTh2dlBet0H5SU7CwrO5iVNSs7+9XLlzda2ZLFixcTUdu2bXNzc5X4yRoWGxtLRC4uLgpez9/ss8+YiJs25dOnmZlzc7nx90UsCaFpEuzs7IKCgjIzM+/w/EOHDnl5ed06MLN7926xD4p4/M4T/65evRoRESHus6lUqoCAgEbdBbpw4UpoKDdtykQsonj9DeEGpaX81FNMxF5erOB69AZt3bpVvDfZ9mWYmbmykidM4AULODWVt227ua9UU8MpKbxnD69Zw+vXZ6Sm+p875xUTY389jfX/ZWX9sbR0LzNXVSVeurQhPz+iuHhHZWWsXl/eqMZ8++23KpXKzs7up59+Uu5HbJi4+bFlyxabVD95kps0YSL+4gtm5mvX2MeHW7XiY8caVaZxIayoqLgpCcnJyeZ8o+nATIcOHYwDMxUVFeHh4aLH6Obmtn379lu/t6amJioqyjgP09fX1+LxjMJCDg/nVq3q3/WHDLl5JHnUqPqehQJrDu6murq6bdu2RHTqzh/NSpgxo/6z3fwNrwyG2mvX0ktL9xcWfpKb+0Zq6piqqoSLF9/JzHz5ypX/nD8/2DSfcXHtk5J+l5n5cn7+Mp1u+x3mtaSkpIgR7A8//FCxH+/2Pv/8cyJ65plnlC99+TJ36cJEHBpa/0hwMBOxuztfutSoSuaGUK/Xf/LJJ8YkjBw50oIkxMXFDR482Dgwk5WVJR6PjY0deP3QFdMBG71eHx0d7XF9xdqgQYMOHTrU2Be91aVL/Oc/c8uW9VEcNoz//vf6YcNJk7h/f87IsP5FzCKW0s+ePdumr/LJJzf0mCxWVnaY2cBsSEubePny5tzcN9PSxick9Dp92tE0kCdP9m9wXsuBAwfS0tJ69+4tpj3YZC/GW1RUVIjPjPi7jgc0Sl1d/VX1oEH141pbtjAROzry//7X2GLmhjAhIUHsBu3j42PNLnoGg2HTpk1igN50YEav1xsHbJo3bx4REbF//37jRHhvb+/o6Ghl/9nKyjgigtu04dGjbxg2vCe/G/XS0tLUanWzZs1MZ10p68SJ+h6TNXeFamoulpUdLir6MitrVk7O3IKC1Tf+uaG6Oqes7NClS+vy8sKOHAnt169f8+bNqSEqlcrLy+terjl69dVXiSjU+HmlBENYGBNx+/b1XabYWG7WjInYogkY5oZw2rRpRDR37lxFkpCfn9/gwExeXt7EiRNN/826du26efPmWydMK6W0lLOzbxg2vMfETc61a9faonhBAXfsyEQ8d65VdS5dWq/VUmbmDGauqclPS5uYn7/srt914cKFI0eObNiwYcmSJQEBAY8//niTJk3UavU9WEJpKi4ujohat25tvINlpV27dj3bv39+hw58+DAzc3Exd+vGRBb/9pgbwsmTJxNRg9dsFrtpJo3x3XHXrl0ODg6Ojo5r1qyx3fi1KdNhw3ssOjqaiPr06aN45bo6HjGifsdHK/8W8/IWa7V08eK7zFxWdkirpaSk31lQZ/fu3UTk4eEh3lVPnTo1adKkezA+LO5OfyGGT6yTlJQk+muffvwxM7NeX7/DgI+PxdORzA2hv78/Ee3evduyl7mdq1evvvnmm+KO3+bN9fedxDknjo6Oyr7WHYgQ3jpseA/U1taKuamKr2xauJCJuF07tnQi7a/S06dotVRU9BUzX7q0TqulzMyXLahj3Bf84MGDbMs3oJts3LiRiAZbvf9seXl5r169iOjFF18Uj/xv5Uom4kcesWZ9irkhFOf7iHGR5cuXt2/ffs2aNRa/6k3OnDnz+uuvGzu6xcXFZPW2FL8hS5YsIaIZM2YoWHPHjh3PPfeeg4Ph6FEFqiUm9tdqqaLif8ycl7dIqyVzuqMNeuedd4goICCAbfkGdJPKykqxFvS0FWNTBoMhMDCQiHr06CFus4ktm2cPHcoHDljTPHNDKD7Qf/nlF2ZesGABEX3wgQLLWBqUl5cn7ljYqP79Jjs7W6PRNG3a1MoNqYzOnz8vlhRHRSmwyoGZz5xpodVSXV0xM6elTdJqqbjYws2p8vPzTfcFt8UbUINCQ0OJ6NVXX7W4wt///nciatGihZgCmZmZKZaYRogJa1YwN4R9+/YlotjYWGZ+7bXXbDeWwMypqaniysFG9e9DYg7X6tWr7/7UuykvL/f29qbru11Zr6bmolZLsbGu4suEhL5aLVVUWD71XIy9rVixgm3wBmR04sSJ9evXG788ffo0ETk5OXXr1s3X1zc4ODgiIkJs3FZpxjk/x48ft7e3V6lUO3bsYObKykqxSmPcuHHWD1WaG0KxXZK4NS9WTNpuOdzZs2eJqHfv3jaqfx/auXMnEXXv3t3K9W/GLlPPnj2Vug1QXn5Uq6Xz58UFleH06eZaLdXV3WmPiTvbt2+fGPcWwzOjRo1S6g3IqLCwUGyaaFzKJDYjt2vohEk7OztPT89Ro0a9/vrrq1ev3r17d1JSkumIYH5+focOHYhoyZIl4hGxaUP37t3vvNeGmczd6EkMloipaqb/bwu2rn8fGjt2bMeOHcvKyhwdHZ2dncWp9MaN23r27Hm72243WblyZXR0dIsWLXbu3Hm7TW4aq/n3Wf3X9K2d+gz1JH25rnW6+7WONRqN5VsVjxw50tPTMy0t7cCBA35+fiEhIfv27YuMjAwNDVVkD3K9Xj9t2rTc3NzBgwdPmjSJiL788suoqChHR8dDhw61bNnypgOPc3Nzxf+YFtFoNO7u7mJvro4dO3bu3Nnd3f1vf/sbEa1du/aLL75o3rz5N998o8yWzWaGVXR/L126xMzjx48nom++UeZ641aHDx8moqFDh9qo/v1JHEfR4Fu1Wq12d3cfPnx4cHDwBx98sHPnzrNnz97aifrpp5/s7OyMXSbFvPUWE/Hf/sZ8fX3+kCFWlhQbPU6cOJGZa2trxZ5aRxUZRGJetGgRmWywEBsbK/Zuu13fraamJj09/cCBA1FRUWFhYQEBAd7e3g3+Q3z99ddHjhwRsy+Ng/nWMzeE4scQa1vFyUH79u1TqhE32bt3LxGNGjXKRvXvZzU1NampqXv37v3HP/4RGhrq7+/v5eV1u91vH3300WeffXb27NkrVqz4/PPP27VrR0R//vOfFW5TYCAT8ZdfMjOvX89EbPU4ik6nc3BwsLOzEzn561//Sgrta75r1y4xO/zw4cPMXFxc3K1bN2r80s2ampqUlJQ9e/asWbMmNDT0hRdeEB96opc7b94865tqZG4IxYpB0YkXy+eP2OxsOnGBNBGnwJq4cOGC6Vv1gAEDbu2guri4+Pr6Kr+h1hNPMFH9qpMlS5iI33nH+qpiFfi7777LzDk5ORqNxsHBoVBsamCp5ORkEZWPPvqIlT78WMwFf+qpp+bMmWP9fl+mzAqhOJGjSZMm4kuxPMR2c///9a9/EdHvf/97G9V/MOj1+qysrIMHD0ZGRi5YsGDChAlarfZSI+fvm0WsOhGjlwEBTMRbt1pfVRwU1alTJ/GuMWbMGLJuacWtd9LFB2ybNm3uvNTOTFevXhUJV3guuJkhFHfPW7duLb4UP6riTTESbzm2XlsAZikoYCI2Tpzo35+J7rIs2jwGg0GsMhXTsL777jsi8vDwsHjEv8E76Wq1WsHrJlvMBWczT+q9abgSo6MPkZsOITQ9/NE6KpXqlVdeISKx4Zq/v3/nzp3T09PFsFxjGYeFv/nmm5YtW2ZlZc2cOVMcfizuwSpC3OfYvHlzZWWlUjXJzPMJGwyh6WkBykII7yP29uTnR888Q0Sk01F5Obm4kLOzIrVffvllR0fHPXv2ZGdnq9VqsQHpjh07Glvn559/XrJkiUql+uc///nYY49VVVVNnjy5qKho3LhxYqRUKX379h00aNCVK1fErFfFmPNxGR8fT0S9evUSX4pVkrZbAifuxlh2hgTYUGUl79nDiu7kOXXqVCIKDw9n5vz8/H379jV22Vp+fr6YgLp48WLxiLJ30m8i5oIPGjRIwZpmhfDUqVNkcvKmGDG33d7GYWFhdH1aE8hn0a7SZhKdzw4dOph/woepuro6sZ358OHDxQDPmjVriMjJyencuXNKN5bZZC54UlycUjUb0R0V/U9m7tixo4uLy+1uXlkP3dH7S2QkvfUWbdxIn36qeO1nn33W29s7Pz//dme/3JlGo/nTn/7Uo0ePr7/+WqPRnDhxQqwu2LBhgxg+VFzTpk13vPFGRc+ePSIjlarZ6GtClUqVnp5++fJlRWYY3eHlbHfNCY2Tn08dO5KdHTHborwYnnn//fdTU1Nra2sb++3Tpk2Lj49/5JFHCgoKpkyZUlNTM3/+fDFSaiPDp0xplpxMX35J5eWKFLRkYEZZmZmZ06dPF2fN34OXg0br1IlycqiujmxznumMGTM8PDwSExPF3CA3N7cRI0aEhIS8//7727dvj4mJuetQpL29fV1d3YsvvnjhwoWnn35anAtkQ489RkOGUHk5bdumSD2zJnBrNJpWrVqdOXMmMzOzq3LHIxcUFLz33nvr1q2rqanR6XQ//PCDeFz8pSOE94uQEAoLIzs7eu01W5Rv06ZNamqqv79/cnJyTk6OOATb9AkajaZTp06eN/Lw8HA0OSQ4LCzsyJEj4vBj210o/SokhI4fp6goeuUVBaqZee0oJhmJ/dGsn7NTXl4eEREh9uoQZ0UYj20pKyvr2bMnESm4ch9+K6qrq5OTk7///vvVq1e//vrro0aN8vT0bPBEcZVK1alTp2HDhv3xj3+cPn26SqWyt7dXagr43V27xo88wkSsxGl/5oawqKgoODhYXAf26dPH4v0IxMmhYqoxEfn6+oqFwsY/Elubtm7d2pxtueFhUFtbe9MqhwEDBtw0ZODu7n6v37Xnz2ciDg62vlLjduA+fPiw+JhSqVRBQUHmn+rI13fyNfZmn376aeMUcHGoqNgCSPzR1KlTxVoSd3f3b7/9tlGNhIdBXV1dRkbGDz/88Omnny5evNgWtwTvIimJVSp2cmKrj/Rp9FkUVVVV4eHhTZo0IaL27dubedLggQMHxAYZRNS7d+/o6GjTPzIeKio2+RWP325bboD7xXPPMRF/9pmVZSw8lSklJWX48OEiIf7+/neYpW48Q5eunxxqnBLx3//+d+jQocbuhOmhokJdXd3HH38sVoi3bNkycmtkncHmB98DmGvHDp461YJ9729i+dFoYkN7V1dXImratGl4ePitG/Xq9XrxAejm5hYZGWkc0THzUFHBeI6az399Hj//+KkKmx+fAnAvWXtIqE6nM25o37dv31sHbPbt22d6hq45h4o2aM+pPZ3iO1EM2Z22ezPvzat6Jc+vBrCQ6Zy+Vav4l1+Yma8vaDSTMsdl//TTTz169DAO2DS4fV1hYWFYWJg5h4reToW+IiwvTHNaQzH06NlHd5QoupMKgAXCwjg+nmtredIkyTmu+2IAAAMoSURBVCHk6ycQNjhgU1xcbDxUVNwVtGalc2xlrE+SD8UQxdCYtDE51TkKtB7AMtOns1hONHEir1rFL77Ic+dyQECjaigWQiE5OVlsmC8GbBITEyMiIsSs80YdKnpndYa6VQWrnM44UQytyMdiC5Dnrbc4Lo5razkwUP4noZFer4+MjBRrDo2bEY0cOVKrxNwCUznVOaG5odnV2VMzpr6S/cqWoi0V+op5ufPC8sKUfSGA28rJ4alTOSiIjxyxOIQqts3UeJ1O98Ybb3h5ee3fv3/58uXG+xmKW6ZbNqLFCJ/mPr9U/DK4+WBdrW514eqIRyNs9HIAijN3B+7Gat++/VdffUVEb7/9to1eQrhYe9HdwZ2IBjcfbNMXArARs5Yy3c/cHdwzazKJ6D+l/5HdFgBL2Ko7es9crrv8Zt6bjmrHIU5D2tu131u291TFqUDnwLlt58puGoBZfvMhBPit+813RwF+6xBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMkQQgDJEEIAyRBCAMn+H4o5oyW+17pvAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kDUrLw8Mg37y", - "colab_type": "text" - }, - "source": [ - "There are 17 datasets total in MUV as we mentioned previously. We're going to train a multitask model that attempts to build a joint model to predict activity across all 17 datasets simultaneously. There's some evidence [2] that multitask training creates more robust models. \n", - "\n", - "As fair warning, from my experience, this effect can be quite fragile. Nonetheless, it's a tool worth trying given how easy DeepChem makes it to build these models. To get started towards building our actual model, let's first featurize our data." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "eqEQiNDpg37z", - "colab_type": "code", - "outputId": "e1b919ac-1bb3-4224-ff91-65d2e3d16f3b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 357 - } - }, - "source": [ - "MUV_tasks = ['MUV-692', 'MUV-689', 'MUV-846', 'MUV-859', 'MUV-644',\n", - " 'MUV-548', 'MUV-852', 'MUV-600', 'MUV-810', 'MUV-712',\n", - " 'MUV-737', 'MUV-858', 'MUV-713', 'MUV-733', 'MUV-652',\n", - " 'MUV-466', 'MUV-832']\n", - "\n", - "featurizer = dc.feat.CircularFingerprint(size=1024)\n", - "loader = dc.data.CSVLoader(\n", - " tasks=MUV_tasks, smiles_field=\"smiles\",\n", - " featurizer=featurizer)\n", - "dataset = loader.featurize(dataset_file)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from medium_muv.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 0 took 38.166 s\n", - "Loading shard 2 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "TIMING: featurizing shard 1 took 8.325 s\n", - "TIMING: dataset construction took 46.915 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QQfINH2Ag371", - "colab_type": "text" - }, - "source": [ - "We'll now want to split our dataset into training, validation, and test sets. We're going to do a simple random split using `dc.splits.RandomSplitter`. It's worth noting that this will provide overestimates of real generalizability! For better real world estimates of prospective performance, you'll want to use a harder splitter." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "-f03zjeIg372", - "colab_type": "code", - "outputId": "5472a51a-42e9-43bc-e73e-d947ae3c6a33", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 136 - } - }, - "source": [ - "splitter = dc.splits.RandomSplitter(dataset_file)\n", - "train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(\n", - " dataset)\n", - "#NOTE THE RENAMING:\n", - "valid_dataset, test_dataset = test_dataset, valid_dataset" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Computing train/valid/test indices\n", - "TIMING: dataset construction took 0.529 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.254 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.272 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6nRCpb08g375", - "colab_type": "text" - }, - "source": [ - "Let's now get started building some models! We'll do some simple hyperparameter searching to build a robust model." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "BvfbTbsEg376", - "colab_type": "code", - "outputId": "9f96de90-ad90-4492-cced-0f5e74dcacb6", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 853 - } - }, - "source": [ - "import numpy as np\n", - "import numpy.random\n", - "\n", - "params_dict = {\"activation\": [\"relu\"],\n", - " \"momentum\": [.9],\n", - " \"batch_size\": [50],\n", - " \"init\": [\"glorot_uniform\"],\n", - " \"data_shape\": [train_dataset.get_data_shape()],\n", - " \"learning_rate\": [1e-3],\n", - " \"decay\": [1e-6],\n", - " \"nb_epoch\": [1],\n", - " \"nesterov\": [False],\n", - " \"dropouts\": [(.5,)],\n", - " \"nb_layers\": [1],\n", - " \"batchnorm\": [False],\n", - " \"layer_sizes\": [(1000,)],\n", - " \"weight_init_stddevs\": [(.1,)],\n", - " \"bias_init_consts\": [(1.,)],\n", - " \"penalty\": [0.], \n", - " } \n", - "\n", - "\n", - "n_features = train_dataset.get_data_shape()[0]\n", - "def model_builder(model_params, model_dir):\n", - " model = dc.models.MultitaskClassifier(\n", - " len(MUV_tasks), n_features, **model_params)\n", - " return model\n", - "\n", - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)\n", - "optimizer = dc.hyper.HyperparamOpt(model_builder)\n", - "best_dnn, best_hyperparams, all_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, [], metric)" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/1\n", - "hyperparameters: {'activation': 'relu', 'momentum': 0.9, 'batch_size': 50, 'init': 'glorot_uniform', 'data_shape': (1024,), 'learning_rate': 0.001, 'decay': 1e-06, 'nb_epoch': 1, 'nesterov': False, 'dropouts': (0.5,), 'nb_layers': 1, 'batchnorm': False, 'layer_sizes': (1000,), 'weight_init_stddevs': (0.1,), 'bias_init_consts': (1.0,), 'penalty': 0.0}\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:108: The name tf.losses.softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.softmax_cross_entropy instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:109: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n", - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "computed_metrics: [nan, nan, nan, 0.3168604651162791, 0.525, nan, 0.7647058823529411, 0.26775147928994086, 0.18300653594771243, nan, nan, nan, 0.5405405405405406, nan, 0.24614197530864193, nan, nan]\n", - "Model 1/1, Metric mean-roc_auc_score, Validation set 0: 0.406287\n", - "\tbest_validation_score so far: 0.406287\n", - "computed_metrics: [1.0, nan, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]\n", - "Best hyperparameters: ('relu', 0.9, 50, 'glorot_uniform', (1024,), 0.001, 1e-06, 1, False, (0.5,), 1, False, (1000,), (0.1,), (1.0,), 0.0)\n", - "train_score: 1.000000\n", - "validation_score: 0.406287\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/root/miniconda/lib/python3.6/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric mean-roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QhZAgZ9gg379", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!\n", - "\n", - "# Bibliography\n", - "\n", - "[1] https://pubs.acs.org/doi/10.1021/ci8002649\n", - "\n", - "[2] https://pubs.acs.org/doi/abs/10.1021/acs.jcim.7b00146" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/06_Going_Deeper_on_Molecular_Featurizations.ipynb b/examples/tutorials/06_Going_Deeper_on_Molecular_Featurizations.ipynb deleted file mode 100644 index 9fdfcc85f11426808151e13c3afec24b483ff4c9..0000000000000000000000000000000000000000 --- a/examples/tutorials/06_Going_Deeper_on_Molecular_Featurizations.ipynb +++ /dev/null @@ -1,780 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "06_Going_Deeper_on_Molecular_Featurizations.ipynb", - "provenance": [] - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "tTuYGOlnh117", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 6: Going Deeper On Molecular Featurizations\n", - "\n", - "One of the most important steps of doing machine learning on molecular data is transforming this data into a form amenable to the application of learning algorithms. This process is broadly called \"featurization\" and involves tutrning a molecule into a vector or tensor of some sort. There are a number of different ways of doing such transformations, and the choice of featurization is often dependent on the problem at hand.\n", - "\n", - "In this tutorial, we explore the different featurization methods available for molecules. These featurization methods include:\n", - "\n", - "1. `ConvMolFeaturizer`, \n", - "2. `WeaveFeaturizer`, \n", - "3. `CircularFingerprints`\n", - "4. `RDKitDescriptors`\n", - "5. `BPSymmetryFunction`\n", - "6. `CoulombMatrix`\n", - "7. `CoulombMatrixEig`\n", - "8. `AdjacencyFingerprints`\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/06_Going_Deeper_on_Molecular_Featurizations.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tS3siM3Ch11-", - "colab_type": "code", - "outputId": "30d477f6-86c0-4615-afa1-19d136de4416", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 21867 0 --:--:-- --:--:-- --:--:-- 21867\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.9 s, sys: 650 ms, total: 3.55 s\n", - "Wall time: 2min 9s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "omxBgQVDh12B", - "colab_type": "text" - }, - "source": [ - "Let's start with some basic imports" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Sp5Hbb4nh12C", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from __future__ import print_function\n", - "from __future__ import division\n", - "from __future__ import unicode_literals\n", - "\n", - "import numpy as np\n", - "from rdkit import Chem\n", - "\n", - "from deepchem.feat import ConvMolFeaturizer, WeaveFeaturizer, CircularFingerprint\n", - "from deepchem.feat import AdjacencyFingerprint, RDKitDescriptors\n", - "from deepchem.feat import BPSymmetryFunctionInput, CoulombMatrix, CoulombMatrixEig\n", - "from deepchem.utils import conformers" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_bC1mPM4h12F", - "colab_type": "text" - }, - "source": [ - "We use `propane`( $CH_3 CH_2 CH_3 $ ) as a running example throughout this tutorial. Many of the featurization methods use conformers or the molecules. A conformer can be generated using the `ConformerGenerator` class in `deepchem.utils.conformers`. " - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "4D9z0slLh12G", - "colab_type": "text" - }, - "source": [ - "### RDKitDescriptors" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "oCfATWYIh12H", - "colab_type": "text" - }, - "source": [ - "`RDKitDescriptors` featurizes a molecule by computing descriptors values for specified descriptors. Intrinsic to the featurizer is a set of allowed descriptors, which can be accessed using `RDKitDescriptors.allowedDescriptors`.\n", - "\n", - "The featurizer uses the descriptors in `rdkit.Chem.Descriptors.descList`, checks if they are in the list of allowed descriptors and computes the descriptor value for the molecule." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "WUfNkB5Wh12I", - "colab_type": "code", - "colab": {} - }, - "source": [ - "example_smile = \"CCC\"\n", - "example_mol = Chem.MolFromSmiles(example_smile)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Xxb9G_W9h12L", - "colab_type": "text" - }, - "source": [ - "Let's check the allowed list of descriptors. As you will see shortly, there's a wide range of chemical properties that RDKit computes for us." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "3dt_vjtXh12N", - "colab_type": "code", - "outputId": "3494245f-150e-46d0-a61a-64d1bb281f58", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "for descriptor in RDKitDescriptors.allowedDescriptors:\n", - " print(descriptor)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "EState_VSA5\n", - "NumRotatableBonds\n", - "Kappa1\n", - "MinPartialCharge\n", - "BalabanJ\n", - "VSA_EState1\n", - "SMR_VSA4\n", - "NumHAcceptors\n", - "PEOE_VSA6\n", - "Chi1n\n", - "SlogP_VSA7\n", - "Chi0\n", - "SMR_VSA3\n", - "NumSaturatedCarbocycles\n", - "EState_VSA1\n", - "SMR_VSA8\n", - "NumAromaticHeterocycles\n", - "TPSA\n", - "VSA_EState9\n", - "VSA_EState4\n", - "NumAromaticCarbocycles\n", - "MaxAbsEStateIndex\n", - "SMR_VSA7\n", - "PEOE_VSA14\n", - "Kappa3\n", - "Chi3n\n", - "SMR_VSA2\n", - "SlogP_VSA8\n", - "SMR_VSA1\n", - "PEOE_VSA12\n", - "NumAliphaticRings\n", - "NumAliphaticCarbocycles\n", - "MaxPartialCharge\n", - "SlogP_VSA6\n", - "EState_VSA4\n", - "HallKierAlpha\n", - "EState_VSA3\n", - "PEOE_VSA13\n", - "SlogP_VSA10\n", - "HeavyAtomCount\n", - "NumAliphaticHeterocycles\n", - "Chi0n\n", - "Kappa2\n", - "SlogP_VSA4\n", - "VSA_EState7\n", - "NHOHCount\n", - "PEOE_VSA3\n", - "VSA_EState3\n", - "RingCount\n", - "SlogP_VSA5\n", - "EState_VSA8\n", - "Chi4v\n", - "Chi4n\n", - "PEOE_VSA1\n", - "EState_VSA10\n", - "VSA_EState8\n", - "PEOE_VSA2\n", - "SMR_VSA5\n", - "PEOE_VSA10\n", - "Chi0v\n", - "MinEStateIndex\n", - "Chi1\n", - "NOCount\n", - "PEOE_VSA9\n", - "VSA_EState10\n", - "PEOE_VSA8\n", - "SMR_VSA10\n", - "ExactMolWt\n", - "SlogP_VSA11\n", - "PEOE_VSA11\n", - "SlogP_VSA12\n", - "NumSaturatedRings\n", - "VSA_EState5\n", - "EState_VSA7\n", - "MolMR\n", - "BertzCT\n", - "PEOE_VSA7\n", - "EState_VSA11\n", - "EState_VSA6\n", - "Chi2v\n", - "Chi3v\n", - "NumAromaticRings\n", - "MaxAbsPartialCharge\n", - "MolWt\n", - "MinAbsPartialCharge\n", - "PEOE_VSA4\n", - "VSA_EState6\n", - "SlogP_VSA9\n", - "NumValenceElectrons\n", - "MinAbsEStateIndex\n", - "Chi2n\n", - "HeavyAtomMolWt\n", - "MaxEStateIndex\n", - "MolLogP\n", - "FractionCSP3\n", - "NumHDonors\n", - "NumHeteroatoms\n", - "Chi1v\n", - "LabuteASA\n", - "Ipc\n", - "SMR_VSA6\n", - "SlogP_VSA1\n", - "SMR_VSA9\n", - "EState_VSA9\n", - "SlogP_VSA2\n", - "EState_VSA2\n", - "NumSaturatedHeterocycles\n", - "VSA_EState2\n", - "NumRadicalElectrons\n", - "SlogP_VSA3\n", - "PEOE_VSA5\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "KfyDpE81h12Q", - "colab_type": "code", - "outputId": "8691486b-0771-40f3-8203-e5a5515ee73d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "rdkit_desc = RDKitDescriptors()\n", - "features = rdkit_desc._featurize(example_mol)\n", - "\n", - "print('The number of descriptors present are: ', len(features))" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "The number of descriptors present are: 111\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "hE6G0Gboh12T", - "colab_type": "text" - }, - "source": [ - "### BPSymmetryFunction" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "41RwzbTth12U", - "colab_type": "text" - }, - "source": [ - "`Behler-Parinello Symmetry function` or `BPSymmetryFunction` featurizes a molecule by computing the atomic number and coordinates for each atom in the molecule. The features can be used as input for symmetry functions, like `RadialSymmetry`, `DistanceMatrix` and `DistanceCutoff` . More details on these symmetry functions can be found in [this paper](https://journals.aps.org/prl/pdf/10.1103/PhysRevLett.98.146401). These functions can be found in `deepchem.feat.coulomb_matrices`\n", - "\n", - "The featurizer takes in `max_atoms` as an argument. As input, it takes in a conformer of the molecule and computes:\n", - "\n", - "1. coordinates of every atom in the molecule (in Bohr units)\n", - "2. the atomic numbers for all atoms. \n", - "\n", - "These features are concantenated and padded with zeros to account for different number of atoms, across molecules." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "PpbPi0Qah12V", - "colab_type": "code", - "colab": {} - }, - "source": [ - "example_smile = \"CCC\"\n", - "example_mol = Chem.MolFromSmiles(example_smile)\n", - "engine = conformers.ConformerGenerator(max_conformers=1)\n", - "example_mol = engine.generate_conformers(example_mol)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "j7WrM5phh12X", - "colab_type": "text" - }, - "source": [ - "Let's now take a look at the actual featurized matrix that comes out." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "IuPE4MXZh12Y", - "colab_type": "code", - "outputId": "5f2cab7c-980b-472e-c63c-75c4415683c1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 357 - } - }, - "source": [ - "bp_sym = BPSymmetryFunctionInput(max_atoms=20)\n", - "features = bp_sym._featurize(mol=example_mol)\n", - "features" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "array([[ 6. , 2.33166293, -0.52962788, -0.48097309],\n", - " [ 6. , 0.0948792 , 1.07597567, -1.33579553],\n", - " [ 6. , -2.40436371, -0.29483572, -0.90388318],\n", - " [ 1. , 2.18166462, -0.95639011, 1.569049 ],\n", - " [ 1. , 4.1178375 , 0.51816193, -0.81949623],\n", - " [ 1. , 2.39319787, -2.32844253, -1.56157176],\n", - " [ 1. , 0.29919987, 1.51730566, -3.37889252],\n", - " [ 1. , 0.08875543, 2.88229706, -0.26437996],\n", - " [ 1. , -3.99100651, 0.92016315, -1.54358853],\n", - " [ 1. , -2.66167993, -0.71627602, 1.136556 ],\n", - " [ 1. , -2.45014726, -2.08833123, -1.99406318],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ],\n", - " [ 0. , 0. , 0. , 0. ]])" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 7 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "2w0oGOgMh12b", - "colab_type": "text" - }, - "source": [ - "A simple check for the featurization would be to count the different atomic numbers present in the features." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "1rbcGUf6h12c", - "colab_type": "code", - "outputId": "bcbc2fd8-0724-4d76-961a-b7bae46b3916", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "atomic_numbers = features[:, 0]\n", - "from collections import Counter\n", - "\n", - "unique_numbers = Counter(atomic_numbers)\n", - "print(unique_numbers)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Counter({0.0: 9, 1.0: 8, 6.0: 3})\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "f8T557NOh12e", - "colab_type": "text" - }, - "source": [ - "For propane, we have $3$ `C-atoms` and $8$ `H-atoms`, and these numbers are in agreement with the results shown above. There's also the additional padding of 9 atoms, to equalize with `max_atoms`." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "o-5L6sx0h12f", - "colab_type": "text" - }, - "source": [ - "### CoulombMatrix" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "SF3l5yJ4h12f", - "colab_type": "text" - }, - "source": [ - "`CoulombMatrix`, featurizes a molecule by computing the coulomb matrices for different conformers of the molecule, and returning it as a list.\n", - "\n", - "A Coulomb matrix tries to encode the energy structure of a molecule. The matrix is symmetric, with the off-diagonal elements capturing the Coulombic repulsion between pairs of atoms and the diagonal elements capturing atomic energies using the atomic numbers. More information on the functional forms used can be found [here](https://journals.aps.org/prl/pdf/10.1103/PhysRevLett.108.058301).\n", - "\n", - "The featurizer takes in `max_atoms` as an argument and also has options for removing hydrogens from the molecule (`remove_hydrogens`), generating additional random coulomb matrices(`randomize`), and getting only the upper triangular matrix (`upper_tri`)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "evLPEI6mh12g", - "colab_type": "code", - "outputId": "80baf653-ac25-4d0e-a833-a13133ac0a6a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "example_smile = \"CCC\"\n", - "example_mol = Chem.MolFromSmiles(example_smile)\n", - "\n", - "engine = conformers.ConformerGenerator(max_conformers=1)\n", - "example_mol = engine.generate_conformers(example_mol)\n", - "\n", - "print(\"Number of available conformers for propane: \", len(example_mol.GetConformers()))" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Number of available conformers for propane: 1\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "pPIqy39Ih12i", - "colab_type": "code", - "colab": {} - }, - "source": [ - "coulomb_mat = CoulombMatrix(max_atoms=20, randomize=False, remove_hydrogens=False, upper_tri=False)\n", - "features = coulomb_mat._featurize(mol=example_mol)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Uyq3Xk3sh12l", - "colab_type": "text" - }, - "source": [ - "A simple check for the featurization is to see if the feature list has the same length as the number of conformers" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ShTPO4wIh12l", - "colab_type": "code", - "outputId": "440c4962-74a2-49bd-df91-4072debc46fe", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "print(len(example_mol.GetConformers()) == len(features))" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "stream", - "text": [ - "True\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "P-sGs7W2h12p", - "colab_type": "text" - }, - "source": [ - "### CoulombMatrixEig" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "9NTjtDUzh12p", - "colab_type": "text" - }, - "source": [ - "`CoulombMatrix` is invariant to molecular rotation and translation, since the interatomic distances or atomic numbers do not change. However the matrix is not invariant to random permutations of the atom's indices. To deal with this, the `CoulumbMatrixEig` featurizer was introduced, which uses the eigenvalue spectrum of the columb matrix, and is invariant to random permutations of the atom's indices.\n", - "\n", - "`CoulombMatrixEig` inherits from `CoulombMatrix` and featurizes a molecule by first computing the coulomb matrices for different conformers of the molecule and then computing the eigenvalues for each coulomb matrix. These eigenvalues are then padded to account for variation in number of atoms across molecules.\n", - "\n", - "The featurizer takes in `max_atoms` as an argument and also has options for removing hydrogens from the molecule (`remove_hydrogens`), generating additional random coulomb matrices(`randomize`)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "XnNZB-Kxh12q", - "colab_type": "code", - "outputId": "eec2d4ba-c135-4039-940f-10703240fa3c", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "example_smile = \"CCC\"\n", - "example_mol = Chem.MolFromSmiles(example_smile)\n", - "\n", - "engine = conformers.ConformerGenerator(max_conformers=1)\n", - "example_mol = engine.generate_conformers(example_mol)\n", - "\n", - "print(\"Number of available conformers for propane: \", len(example_mol.GetConformers()))" - ], - "execution_count": 12, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Number of available conformers for propane: 1\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ga1-nNiWh12t", - "colab_type": "code", - "colab": {} - }, - "source": [ - "coulomb_mat_eig = CoulombMatrixEig(max_atoms=20, randomize=False, remove_hydrogens=False)\n", - "features = coulomb_mat_eig._featurize(mol=example_mol)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "_8PBHQYLh12v", - "colab_type": "code", - "outputId": "7770b03d-4bbe-4ee3-fffa-a23216e4caf0", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "print(len(example_mol.GetConformers()) == len(features))" - ], - "execution_count": 14, - "outputs": [ - { - "output_type": "stream", - "text": [ - "True\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "tm9ac-k8h12x", - "colab_type": "text" - }, - "source": [ - "### Adjacency Fingerprints" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "iBZBDvMYh12y", - "colab_type": "text" - }, - "source": [ - "TODO(rbharath): This tutorial still needs to be expanded out with the additional fingerprints." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "wssi6cBmh12z", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/06_Introduction_to_Graph_Convolutions.ipynb b/examples/tutorials/06_Introduction_to_Graph_Convolutions.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..12cdf4043bbc3be6a4c4092584058255af16a224 --- /dev/null +++ b/examples/tutorials/06_Introduction_to_Graph_Convolutions.ipynb @@ -0,0 +1,477 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ubFUlqz8cj1L" + }, + "source": [ + "# Tutorial Part 6: Introduction to Graph Convolutions\n", + "\n", + "In this tutorial we will learn more about \"graph convolutions.\" These are one of the most powerful deep learning tools for working with molecular data. The reason for this is that molecules can be naturally viewed as graphs.\n", + "\n", + "![Molecular Graph](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/basic_graphs.gif?raw=1)\n", + "\n", + "Note how standard chemical diagrams of the sort we're used to from high school lend themselves naturally to visualizing molecules as graphs. In the remainder of this tutorial, we'll dig into this relationship in significantly more detail. This will let us get a deeper understanding of how these systems work.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/06_Introduction_to_Graph_Convolutions.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 156 + }, + "colab_type": "code", + "id": "EoCLxSnBcj1N", + "outputId": "d0555806-a13b-4522-c845-c36a7f910fca" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 211 + }, + "colab_type": "code", + "id": "3Jv2cmnW91CF", + "outputId": "bd523c54-3038-4654-89ad-356ad1e207ca" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "BX2erW0ncj1W" + }, + "source": [ + "# What are Graph Convolutions?\n", + "\n", + "Consider a standard convolutional neural network (CNN) of the sort commonly used to process images. The input is a grid of pixels. There is a vector of data values for each pixel, for example the red, green, and blue color channels. The data passes through a series of convolutional layers. Each layer combines the data from a pixel and its neighbors to produce a new data vector for the pixel. Early layers detect small scale local patterns, while later layers detect larger, more abstract patterns. Often the convolutional layers alternate with pooling layers that perform some operation such as max or min over local regions.\n", + "\n", + "Graph convolutions are similar, but they operate on a graph. They begin with a data vector for each node of the graph (for example, the chemical properties of the atom that node represents). Convolutional and pooling layers combine information from connected nodes (for example, atoms that are bonded to each other) to produce a new data vector for each node.\n", + "\n", + "# Training a GraphConvModel\n", + "\n", + "Let's use the MoleculeNet suite to load the Tox21 dataset. To featurize the data in a way that graph convolutional networks can use, we set the featurizer option to `'GraphConv'`. The MoleculeNet call returns a training set, a validation set, and a test set for us to use. It also returns `tasks`, a list of the task names, and `transformers`, a list of data transformations that were applied to preprocess the dataset. (Most deep networks are quite finicky and require a set of data transformations to ensure that training proceeds stably.)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 89 + }, + "colab_type": "code", + "id": "JMi2V8Jncj1W", + "outputId": "56ab5eb6-07be-4d8f-c19b-88d1f73f2f46" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "\n", + "tasks, datasets, transformers = dc.molnet.load_tox21(featurizer='GraphConv')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "QfMW0Y4Kcj1Z" + }, + "source": [ + "Let's now train a graph convolutional network on this dataset. DeepChem has the class `GraphConvModel` that wraps a standard graph convolutional architecture underneath the hood for user convenience. Let's instantiate an object of this class and train it on our dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 245 + }, + "colab_type": "code", + "id": "Y9n3jTNHcj1a", + "outputId": "2caab2e5-5e5a-4f97-a440-753692341d35" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.28185401916503905" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "n_tasks = len(tasks)\n", + "model = dc.models.GraphConvModel(n_tasks, mode='classification')\n", + "model.fit(train_dataset, nb_epoch=50)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "kDDroutEcj1g" + }, + "source": [ + "Let's try to evaluate the performance of the model we've trained. For this, we need to define a metric, a measure of model performance. `dc.metrics` holds a collection of metrics already. For this dataset, it is standard to use the ROC-AUC score, the area under the receiver operating characteristic curve (which measures the tradeoff between precision and recall). Luckily, the ROC-AUC score is already available in DeepChem. \n", + "\n", + "To measure the performance of the model under this metric, we can use the convenience function `model.evaluate()`." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 69 + }, + "colab_type": "code", + "id": "MeX-9RNWcj1h", + "outputId": "642d3f81-33de-46bb-fc7a-8b5edda99881" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training set score: {'roc_auc_score': 0.96959686893055}\n", + "Test set score: {'roc_auc_score': 0.795793783300876}\n" + ] + } + ], + "source": [ + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", + "print('Training set score:', model.evaluate(train_dataset, [metric], transformers))\n", + "print('Test set score:', model.evaluate(test_dataset, [metric], transformers))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "l-LBxrKN6CMs" + }, + "source": [ + "The results are pretty good, and `GraphConvModel` is very easy to use. But what's going on under the hood? Could we build GraphConvModel ourselves? Of course! DeepChem provides Keras layers for all the calculations involved in a graph convolution. We are going to apply the following layers from DeepChem.\n", + "\n", + "- `GraphConv` layer: This layer implements the graph convolution. The graph convolution combines per-node feature vectures in a nonlinear fashion with the feature vectors for neighboring nodes. This \"blends\" information in local neighborhoods of a graph.\n", + "\n", + "- `GraphPool` layer: This layer does a max-pooling over the feature vectors of atoms in a neighborhood. You can think of this layer as analogous to a max-pooling layer for 2D convolutions but which operates on graphs instead. \n", + "\n", + "- `GraphGather`: Many graph convolutional networks manipulate feature vectors per graph-node. For a molecule for example, each node might represent an atom, and the network would manipulate atomic feature vectors that summarize the local chemistry of the atom. However, at the end of the application, we will likely want to work with a molecule level feature representation. This layer creates a graph level feature vector by combining all the node-level feature vectors.\n", + "\n", + "Apart from this we are going to apply standard neural network layers such as [Dense](https://keras.io/api/layers/core_layers/dense/), [BatchNormalization](https://keras.io/api/layers/normalization_layers/batch_normalization/) and [Softmax](https://keras.io/api/layers/activation_layers/softmax/) layer." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "71_E0CAUcj1n" + }, + "outputs": [], + "source": [ + "from deepchem.models.layers import GraphConv, GraphPool, GraphGather\n", + "import tensorflow as tf\n", + "import tensorflow.keras.layers as layers\n", + "\n", + "batch_size = 100\n", + "\n", + "class MyGraphConvModel(tf.keras.Model):\n", + "\n", + " def __init__(self):\n", + " super(MyGraphConvModel, self).__init__()\n", + " self.gc1 = GraphConv(128, activation_fn=tf.nn.tanh)\n", + " self.batch_norm1 = layers.BatchNormalization()\n", + " self.gp1 = GraphPool()\n", + "\n", + " self.gc2 = GraphConv(128, activation_fn=tf.nn.tanh)\n", + " self.batch_norm2 = layers.BatchNormalization()\n", + " self.gp2 = GraphPool()\n", + "\n", + " self.dense1 = layers.Dense(256, activation=tf.nn.tanh)\n", + " self.batch_norm3 = layers.BatchNormalization()\n", + " self.readout = GraphGather(batch_size=batch_size, activation_fn=tf.nn.tanh)\n", + "\n", + " self.dense2 = layers.Dense(n_tasks*2)\n", + " self.logits = layers.Reshape((n_tasks, 2))\n", + " self.softmax = layers.Softmax()\n", + "\n", + " def call(self, inputs):\n", + " gc1_output = self.gc1(inputs)\n", + " batch_norm1_output = self.batch_norm1(gc1_output)\n", + " gp1_output = self.gp1([batch_norm1_output] + inputs[1:])\n", + "\n", + " gc2_output = self.gc2([gp1_output] + inputs[1:])\n", + " batch_norm2_output = self.batch_norm1(gc2_output)\n", + " gp2_output = self.gp2([batch_norm2_output] + inputs[1:])\n", + "\n", + " dense1_output = self.dense1(gp2_output)\n", + " batch_norm3_output = self.batch_norm3(dense1_output)\n", + " readout_output = self.readout([batch_norm3_output] + inputs[1:])\n", + "\n", + " logits_output = self.logits(self.dense2(readout_output))\n", + " return self.softmax(logits_output)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "oC20PZiccj1p" + }, + "source": [ + "We can now see more clearly what is happening. There are two convolutional blocks, each consisting of a `GraphConv`, followed by batch normalization, followed by a `GraphPool` to do max pooling. We finish up with a dense layer, another batch normalization, a `GraphGather` to combine the data from all the different nodes, and a final dense layer to produce the global output. \n", + "\n", + "Let's now create the DeepChem model which will be a wrapper around the Keras model that we just created. We will also specify the loss function so the model know the objective to minimize." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "31Wr0t2zcj1q" + }, + "outputs": [], + "source": [ + "model = dc.models.KerasModel(MyGraphConvModel(), loss=dc.models.losses.CategoricalCrossEntropy())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Wz43oG9rcj1j" + }, + "source": [ + "What are the inputs to this model? A graph convolution requires a complete description of each molecule, including the list of nodes (atoms) and a description of which ones are bonded to each other. In fact, if we inspect the dataset we see that the feature array contains Python objects of type `ConvMol`." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "test_dataset.X[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Models expect arrays of numbers as their inputs, not Python objects. We must convert the `ConvMol` objects into the particular set of arrays expected by the `GraphConv`, `GraphPool`, and `GraphGather` layers. Fortunately, the `ConvMol` class includes the code to do this, as well as to combine all the molecules in a batch to create a single set of arrays.\n", + "\n", + "The following code creates a Python generator that given a batch of data generates the lists of inputs, labels, and weights whose values are Numpy arrays. `atom_features` holds a feature vector of length 75 for each atom. The other inputs are required to support minibatching in TensorFlow. `degree_slice` is an indexing convenience that makes it easy to locate atoms from all molecules with a given degree. `membership` determines the membership of atoms in molecules (atom `i` belongs to molecule `membership[i]`). `deg_adjs` is a list that contains adjacency lists grouped by atom degree. For more details, check out the [code](https://github.com/deepchem/deepchem/blob/master/deepchem/feat/mol_graphs.py)." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "o-cPAG0I8Tc4" + }, + "outputs": [], + "source": [ + "from deepchem.metrics import to_one_hot\n", + "from deepchem.feat.mol_graphs import ConvMol\n", + "import numpy as np\n", + "\n", + "def data_generator(dataset, epochs=1):\n", + " for ind, (X_b, y_b, w_b, ids_b) in enumerate(dataset.iterbatches(batch_size, epochs,\n", + " deterministic=False, pad_batches=True)):\n", + " multiConvMol = ConvMol.agglomerate_mols(X_b)\n", + " inputs = [multiConvMol.get_atom_features(), multiConvMol.deg_slice, np.array(multiConvMol.membership)]\n", + " for i in range(1, len(multiConvMol.get_deg_adjacency_lists())):\n", + " inputs.append(multiConvMol.get_deg_adjacency_lists()[i])\n", + " labels = [to_one_hot(y_b.flatten(), 2).reshape(-1, n_tasks, 2)]\n", + " weights = [w_b]\n", + " yield (inputs, labels, weights)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "VSTbjm9Hcj1v" + }, + "source": [ + "Now, we can train the model using `fit_generator(generator)` which will use the generator we've defined to train the model." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 245 + }, + "colab_type": "code", + "id": "59WW4rhwcj1w", + "outputId": "660ecb20-a2f4-4ae5-e0c8-bc72e309ee72" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.21941944122314452" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.fit_generator(data_generator(train_dataset, epochs=50))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "skrL9YEEcj13" + }, + "source": [ + "Now that we have trained our graph convolutional method, let's evaluate its performance. We again have to use our defined generator to evaluate model performance." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 69 + }, + "colab_type": "code", + "id": "f3prNsgGcj14", + "outputId": "dc95fbba-f5bf-4f7b-8d56-efdc37345d80", + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training set score: {'roc_auc_score': 0.8425638289185731}\n", + "Test set score: {'roc_auc_score': 0.7378436684114341}\n" + ] + } + ], + "source": [ + "print('Training set score:', model.evaluate_generator(data_generator(train_dataset), [metric], transformers))\n", + "print('Test set score:', model.evaluate_generator(data_generator(test_dataset), [metric], transformers))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "tvOYgj52cj16" + }, + "source": [ + "Success! The model we've constructed behaves nearly identically to `GraphConvModel`. If you're looking to build your own custom models, you can follow the example we've provided here to do so. We hope to see exciting constructions from your end soon!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "collapsed": true, + "id": "j1FrVn88cj17" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "04_Introduction_to_Graph_Convolutions.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/07_Going_Deeper_on_Molecular_Featurizations.ipynb b/examples/tutorials/07_Going_Deeper_on_Molecular_Featurizations.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..53ea539609e4ea667c9e5b34ff2da404a8d1d6b1 --- /dev/null +++ b/examples/tutorials/07_Going_Deeper_on_Molecular_Featurizations.ipynb @@ -0,0 +1,524 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "tTuYGOlnh117" + }, + "source": [ + "# Tutorial Part 7: Going Deeper On Molecular Featurizations\n", + "\n", + "One of the most important steps of doing machine learning on molecular data is transforming the data into a form amenable to the application of learning algorithms. This process is broadly called \"featurization\" and involves turning a molecule into a vector or tensor of some sort. There are a number of different ways of doing that, and the choice of featurization is often dependent on the problem at hand. We have already seen two such methods: molecular fingerprints, and `ConvMol` objects for use with graph convolutions. In this tutorial we will look at some of the others.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/07_Going_Deeper_on_Molecular_Featurizations.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "tS3siM3Ch11-", + "outputId": "3a96e0a7-46c1-4baa-91da-f98ca5a33d6d" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "D43MbibL_EK0", + "outputId": "e7b205ae-9962-4089-d49a-6d0ebe4c8430" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "omxBgQVDh12B" + }, + "source": [ + "## Featurizers\n", + "\n", + "In DeepChem, a method of featurizing a molecule (or any other sort of input) is defined by a `Featurizer` object. There are three different ways of using featurizers.\n", + "\n", + "1. When using the MoleculeNet loader functions, you simply pass the name of the featurization method to use. We have seen examples of this in earlier tutorials, such as `featurizer='ECFP'` or `featurizer='GraphConv'`.\n", + "\n", + "2. You also can create a Featurizer and directly apply it to molecules. For example:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Sp5Hbb4nh12C" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]\n", + " [0. 0. 0. ... 0. 0. 0.]]\n" + ] + } + ], + "source": [ + "import deepchem as dc\n", + "\n", + "featurizer = dc.feat.CircularFingerprint()\n", + "print(featurizer(['CC', 'CCC', 'CCO']))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_bC1mPM4h12F" + }, + "source": [ + "3. When creating a new dataset with the DataLoader framework, you can specify a Featurizer to use for processing the data. We will see this in a future tutorial.\n", + "\n", + "We use propane (CH3CH2CH3, represented by the SMILES string `'CCC'`) as a running example throughout this tutorial. Many of the featurization methods use conformers of the molecules. A conformer can be generated using the `ConformerGenerator` class in `deepchem.utils.conformers`. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "4D9z0slLh12G" + }, + "source": [ + "### RDKitDescriptors" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "oCfATWYIh12H" + }, + "source": [ + "`RDKitDescriptors` featurizes a molecule by using RDKit to compute values for a list of descriptors. These are basic physical and chemical properties: molecular weight, polar surface area, numbers of hydrogen bond donors and acceptors, etc. This is most useful for predicting things that depend on these high level properties rather than on detailed molecular structure.\n", + "\n", + "Intrinsic to the featurizer is a set of allowed descriptors, which can be accessed using `RDKitDescriptors.allowedDescriptors`. The featurizer uses the descriptors in `rdkit.Chem.Descriptors.descList`, checks if they are in the list of allowed descriptors, and computes the descriptor value for the molecule.\n", + "\n", + "Let's print the values of the first ten descriptors for propane." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "colab_type": "code", + "id": "3dt_vjtXh12N", + "outputId": "c6f73232-0765-479c-93b0-ba18cbf6f33a" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MaxEStateIndex 2.125\n", + "MinEStateIndex 1.25\n", + "MaxAbsEStateIndex 2.125\n", + "MinAbsEStateIndex 1.25\n", + "qed 0.3854706587740357\n", + "MolWt 44.097\n", + "HeavyAtomMolWt 36.033\n", + "ExactMolWt 44.062600255999996\n", + "NumValenceElectrons 20.0\n", + "NumRadicalElectrons 0.0\n" + ] + } + ], + "source": [ + "rdkit_featurizer = dc.feat.RDKitDescriptors()\n", + "features = rdkit_featurizer(['CCC'])[0]\n", + "for feature, descriptor in zip(features[:10], rdkit_featurizer.descriptors):\n", + " print(descriptor, feature)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Of course, there are many more descriptors than this." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "KfyDpE81h12Q", + "outputId": "46673131-c504-48ca-db35-5d689e218069" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The number of descriptors present is: 200\n" + ] + } + ], + "source": [ + "print('The number of descriptors present is: ', len(features))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "41RwzbTth12U" + }, + "source": [ + "### WeaveFeaturizer and MolGraphConvFeaturizer\n", + "\n", + "We previously looked at graph convolutions, which use `ConvMolFeaturizer` to convert molecules into `ConvMol` objects. Graph convolutions are a special case of a large class of architectures that represent molecules as graphs. They work in similar ways but vary in the details. For example, they may associate data vectors with the atoms, the bonds connecting them, or both. They may use a variety of techniques to calculate new data vectors from those in the previous layer, and a variety of techniques to compute molecule level properties at the end.\n", + "\n", + "DeepChem supports lots of different graph based models. Some of them require molecules to be featurized in slightly different ways. Because of this, there are two other featurizers called `WeaveFeaturizer` and `MolGraphConvFeaturizer`. They each convert molecules into a different type of Python object that is used by particular models. When using any graph based model, just check the documentation to see what featurizer you need to use with it." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "SF3l5yJ4h12f" + }, + "source": [ + "### CoulombMatrix\n", + "\n", + "All the models we have looked at so far consider only the intrinsic properties of a molecule: the list of atoms that compose it and the bonds connecting them. When working with flexible molecules, you may also want to consider the different conformations the molecule can take on. For example, when a drug molecule binds to a protein, the strength of the binding depends on specific interactions between pairs of atoms. To predict binding strength, you probably want to consider a variety of possible conformations and use a model that takes them into account when making predictions.\n", + "\n", + "The Coulomb matrix is one popular featurization for molecular conformations. Recall that the electrostatic Coulomb interaction between two charges is proportional to $q_1 q_2/r$ where $q_1$ and $q_2$ are the charges and $r$ is the distance between them. For a molecule with $N$ atoms, the Coulomb matrix is a $N \\times N$ matrix where each element gives the strength of the electrostatic interaction between two atoms. It contains information both about the charges on the atoms and the distances between them. More information on the functional forms used can be found [here](https://journals.aps.org/prl/pdf/10.1103/PhysRevLett.108.058301).\n", + "\n", + "To apply this featurizer, we first need a set of conformations for the molecule. We can use the `ConformerGenerator` class to do this. It takes a RDKit molecule, generates a set of energy minimized conformers, and prunes the set to only include ones that are significantly different from each other. Let's try running it for propane." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "evLPEI6mh12g", + "outputId": "c0895d51-a38d-494e-d161-31ce5c421fb3" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of available conformers for propane: 1\n" + ] + } + ], + "source": [ + "from rdkit import Chem\n", + "\n", + "generator = dc.utils.ConformerGenerator(max_conformers=5)\n", + "propane_mol = generator.generate_conformers(Chem.MolFromSmiles('CCC'))\n", + "print(\"Number of available conformers for propane: \", len(propane_mol.GetConformers()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It only found a single conformer. This shouldn't be surprising, since propane is a very small molecule with hardly any flexibility. Let's try adding another carbon." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of available conformers for butane: 3\n" + ] + } + ], + "source": [ + "butane_mol = generator.generate_conformers(Chem.MolFromSmiles('CCCC'))\n", + "print(\"Number of available conformers for butane: \", len(butane_mol.GetConformers()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can create a Coulomb matrix for our molecule." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "colab_type": "code", + "id": "pPIqy39Ih12i", + "outputId": "ca7b18b3-cfa4-44e8-a907-cbffd4e65364" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[[36.8581052 12.48684429 7.5619687 2.85945193 2.85804514\n", + " 2.85804556 1.4674015 1.46740144 0.91279491 1.14239698\n", + " 1.14239675 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [12.48684429 36.8581052 12.48684388 1.46551218 1.45850736\n", + " 1.45850732 2.85689525 2.85689538 1.4655122 1.4585072\n", + " 1.4585072 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 7.5619687 12.48684388 36.8581052 0.9127949 1.14239695\n", + " 1.14239692 1.46740146 1.46740145 2.85945178 2.85804504\n", + " 2.85804493 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 2.85945193 1.46551218 0.9127949 0.5 0.29325367\n", + " 0.29325369 0.21256978 0.21256978 0.12268391 0.13960187\n", + " 0.13960185 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 2.85804514 1.45850736 1.14239695 0.29325367 0.5\n", + " 0.29200271 0.17113413 0.21092513 0.13960186 0.1680002\n", + " 0.20540029 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 2.85804556 1.45850732 1.14239692 0.29325369 0.29200271\n", + " 0.5 0.21092513 0.17113413 0.13960187 0.20540032\n", + " 0.16800016 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 1.4674015 2.85689525 1.46740146 0.21256978 0.17113413\n", + " 0.21092513 0.5 0.29351308 0.21256981 0.2109251\n", + " 0.17113412 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 1.46740144 2.85689538 1.46740145 0.21256978 0.21092513\n", + " 0.17113413 0.29351308 0.5 0.21256977 0.17113412\n", + " 0.21092513 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0.91279491 1.4655122 2.85945178 0.12268391 0.13960186\n", + " 0.13960187 0.21256981 0.21256977 0.5 0.29325366\n", + " 0.29325365 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 1.14239698 1.4585072 2.85804504 0.13960187 0.1680002\n", + " 0.20540032 0.2109251 0.17113412 0.29325366 0.5\n", + " 0.29200266 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 1.14239675 1.4585072 2.85804493 0.13960185 0.20540029\n", + " 0.16800016 0.17113412 0.21092513 0.29325365 0.29200266\n", + " 0.5 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]\n", + " [ 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0.\n", + " 0. 0. 0. 0. 0. ]]]\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/peastman/workspace/deepchem/deepchem/feat/molecule_featurizers/coulomb_matrices.py:141: RuntimeWarning: divide by zero encountered in true_divide\n", + " m = np.outer(z, z) / d\n" + ] + } + ], + "source": [ + "coulomb_mat = dc.feat.CoulombMatrix(max_atoms=20)\n", + "features = coulomb_mat(propane_mol)\n", + "print(features)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Uyq3Xk3sh12l" + }, + "source": [ + "Notice that many elements are 0. To combine multiple molecules in a batch we need all the Coulomb matrices to be the same size, even if the molecules have different numbers of atoms. We specified `max_atoms=20`, so the returned matrix has size (20, 20). The molecule only has 11 atoms, so only an 11 by 11 submatrix is nonzero." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "P-sGs7W2h12p" + }, + "source": [ + "### CoulombMatrixEig" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "9NTjtDUzh12p" + }, + "source": [ + "An important feature of Coulomb matrices is that they are invariant to molecular rotation and translation, since the interatomic distances and atomic numbers do not change. Respecting symmetries like this makes learning easier. Rotating a molecule does not change its physical properties. If the featurization does change, then the model is forced to learn that rotations are not important, but if the featurization is invariant then the model gets this property automatically.\n", + "\n", + "Coulomb matrices are not invariant under another important symmetry: permutations of the atoms' indices. A molecule's physical properties do not depend on which atom we call \"atom 1\", but the Coulomb matrix does. To deal with this, the `CoulumbMatrixEig` featurizer was introduced, which uses the eigenvalue spectrum of the Coulumb matrix and is invariant to random permutations of the atom's indices. The disadvantage of this featurization is that it contains much less information ($N$ eigenvalues instead of an $N \\times N$ matrix), so models will be more limited in what they can learn.\n", + "\n", + "`CoulombMatrixEig` inherits from `CoulombMatrix` and featurizes a molecule by first computing the Coulomb matrices for different conformers of the molecule and then computing the eigenvalues for each Coulomb matrix. These eigenvalues are then padded to account for variation in number of atoms across molecules." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 51 + }, + "colab_type": "code", + "id": "ga1-nNiWh12t", + "outputId": "2df3163c-6808-49e6-dba8-282ddd7fa3c4" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[60.07620303 29.62963149 22.75497781 0.5713786 0.28781332 0.28548338\n", + " 0.27558187 0.18163794 0.17460999 0.17059719 0.16640098 0.\n", + " 0. 0. 0. 0. 0. 0.\n", + " 0. 0. ]]\n" + ] + } + ], + "source": [ + "coulomb_mat_eig = dc.feat.CoulombMatrixEig(max_atoms=20)\n", + "features = coulomb_mat_eig(propane_mol)\n", + "print(features)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "wssi6cBmh12z" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "06_Going_Deeper_on_Molecular_Featurizations.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/07_Uncertainty_In_Deep_Learning.ipynb b/examples/tutorials/07_Uncertainty_In_Deep_Learning.ipynb deleted file mode 100644 index 2d5670f17b7747bad7dc87d37ef18b6024b2a5e3..0000000000000000000000000000000000000000 --- a/examples/tutorials/07_Uncertainty_In_Deep_Learning.ipynb +++ /dev/null @@ -1,457 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "07_Uncertainty_In_Deep_Learning.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "Gn1RVu2xkMdA", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 7: Uncertainty in Deep Learning\n", - "\n", - "A common criticism of deep learning models is that they tend to act as black boxes. A model produces outputs, but doesn't given enough context to interpret them properly. How reliable are the model's predictions? Are some predictions more reliable than others? If a model predicts a value of 5.372 for some quantity, should you assume the true value is between 5.371 and 5.373? Or that it's between 2 and 8? In some fields this situation might be good enough, but not in science. For every value predicted by a model, we also want an estimate of the uncertainty in that value so we can know what conclusions to draw based on it.\n", - "\n", - "DeepChem makes it very easy to estimate the uncertainty of predicted outputs (at least for the models that support it—not all of them do). Let's start by seeing an example of how to generate uncertainty estimates. We load a dataset, create a model, train it on the training set, predict the output on the test set, and then derive some uncertainty estimates.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/07_Uncertainty_In_Deep_Learning.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "p0MdAUAvkMdD", - "colab_type": "code", - "outputId": "1d1b90f3-c60f-4f6d-a0f0-2360abb6b46e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 35845 0 --:--:-- --:--:-- --:--:-- 35479\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 3.12 s, sys: 699 ms, total: 3.82 s\n", - "Wall time: 2min 19s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "BUFgitSSkMdG", - "colab_type": "text" - }, - "source": [ - "We'll use the SAMPL dataset from the MoleculeNet suite to run our experiments in this tutorial. Let's load up our dataset for our experiments, and then make some uncertainty predictions." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "4mHPuoOPkMdH", - "colab_type": "code", - "outputId": "f38bfb12-e0b9-4838-a01b-499c5a629dcf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 768 - } - }, - "source": [ - "import deepchem as dc\n", - "import numpy as np\n", - "import matplotlib.pyplot as plot\n", - "\n", - "tasks, datasets, transformers = dc.molnet.load_sampl(reload=False)\n", - "train_dataset, valid_dataset, test_dataset = datasets\n", - "\n", - "model = dc.models.MultitaskRegressor(len(tasks), 1024, uncertainty=True)\n", - "model.fit(train_dataset, nb_epoch=200)\n", - "y_pred, y_std = model.predict_uncertainty(test_dataset)" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/SAMPL.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 2.714 s\n", - "TIMING: dataset construction took 2.763 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.038 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.023 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.022 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.036 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.023 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.021 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.030 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.009 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.009 s\n", - "Loading dataset from disk.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_DlPZsaekMdL", - "colab_type": "text" - }, - "source": [ - "All of this looks exactly like any other example, with just two differences. First, we add the option `uncertainty=True` when creating the model. This instructs it to add features to the model that are needed for estimating uncertainty. Second, we call `predict_uncertainty()` instead of `predict()` to produce the output. `y_pred` is the predicted outputs. `y_std` is another array of the same shape, where each element is an estimate of the uncertainty (standard deviation) of the corresponding element in `y_pred`. And that's all there is to it! Simple, right?\n", - "\n", - "Of course, it isn't really that simple at all. DeepChem is doing a lot of work to come up with those uncertainties. So now let's pull back the curtain and see what is really happening. (For the full mathematical details of calculating uncertainty, see https://arxiv.org/abs/1703.04977)\n", - "\n", - "To begin with, what does \"uncertainty\" mean? Intuitively, it is a measure of how much we can trust the predictions. More formally, we expect that the true value of whatever we are trying to predict should usually be within a few standard deviations of the predicted value. But uncertainty comes from many sources, ranging from noisy training data to bad modelling choices, and different sources behave in different ways. It turns out there are two fundamental types of uncertainty we need to take into account.\n", - "\n", - "### Aleatoric Uncertainty\n", - "\n", - "Consider the following graph. It shows the best fit linear regression to a set of ten data points." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "iLgia0GVkMdM", - "colab_type": "code", - "outputId": "18cf655d-be31-48b5-ff42-80ab279a6bba", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 265 - } - }, - "source": [ - "# Generate some fake data and plot a regression line.\n", - "x = np.linspace(0, 5, 10)\n", - "y = 0.15*x + np.random.random(10)\n", - "plot.scatter(x, y)\n", - "fit = np.polyfit(x, y, 1)\n", - "line_x = np.linspace(-1, 6, 2)\n", - "plot.plot(line_x, np.poly1d(fit)(line_x))\n", - "plot.show()" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXhV9b3v8feXDBASQgqEKQQIUyCACMSo1AelFE0RcbheLbX0OJU+am17rXjUXhnkOlSqh1o5WA4O5VbLaT0OiSCBCiJVi4DIhcQQUiIhQUgAwxgy/u4fiZ5IgQSyd9YePq/n4TF770XW58Hkk5211nf9zDmHiIgEv3ZeBxAREd9QoYuIhAgVuohIiFChi4iECBW6iEiIiPRqx926dXP9+/f3avciIkFp8+bNB5xziad7zbNC79+/P5s2bfJq9yIiQcnMdp/pNR1yEREJESp0EZEQoUIXEQkRKnQRkRDRbKGb2YtmVmZm25vZ7iIzqzWzG30XT0REWqolV7m8DDwHLD3TBmYWAfwaWOWbWCLSFt7cUsr8nB3sraikd0IMM69K5brRSV7HkvPU7Dt059z7wKFmNrsX+C+gzBehRMT/3txSykOvb6O0ohIHlFZU8tDr23hzS6nX0eQ8tfoYupklAdcDi1qw7Qwz22Rmm8rLy1u7axFphfk5O6isqfvGc5U1dczP2eFRImktX5wUXQD8q3OuvrkNnXOLnXPpzrn0xMTTDjqJSBvZW1F5Ts9L4PPFpGg6sMzMALoBk82s1jn3pg8+t4j4Se+EGEpPU969E2I8SCO+0Op36M65FOdcf+dcf+A14G6VuUjgm3lVKjFREd94LiYqgplXpXqUSFqr2XfoZvYn4Aqgm5mVALOBKADn3PN+TScifvPV1Sy6yiV0mFdriqanpzvdnEtE5NyY2WbnXPrpXtOkqIhIiFChi4iECBW6iEiIUKGLiIQIFbqISIhQoYuIhAjP1hQVkdCnuzm2LRW6iPjFV3dz/OoGYF/dzRFQqfuJDrmIiF/obo5tT4UuIn6huzm2PRW6iPjFme7aqLs5+o8KXUT8QndzbHsqdBHxi+tGJ/HEDSNJSojBgKSEGJ64YWRYnxB1zrEqdx9FB4775fPrKhcR8ZvrRieFdYE3VVh2jLnZuazfeYDpl/Rj3nUjfL4PFbqIiB8dPVnD79YU8uLfioiJjmDWlDSmX9rPL/tSoYuI+EF9veONLaU8uTKf8qNV3JTehwcyh9Itrr3f9qlCFxHxse2lh5n11nY+Ka5gVHIC//GjdC5MTvD7flXoIiI+cuh4NfNzdrBsYzFdY6N56sYLuHFMH9q1szbZvwpdRKSVauvqefXjYp5eVcCxqlpuG5fCz787mM4xUW2aQ4UuItIKG3YdZHZWLvn7jjJuYFfmTB3OkB6dPMmiQhcROQ9fHK7k8RX5ZG/dS1JCDP9+yxi+N6InZm1zeOV0VOgiIuegqraOJeuLWLi2kNp6x88mDuauywcSEx3R/F/2MxW6iEgLrcnfz6PZeXx+8ARXpvXgkSlpJHfp6HWsr6nQRUSa8fmB4zz6dh5r8ssYkBjL0tszGD8k0etY/6TZQjezF4EpQJlz7p9mVc3sFuBfAQOOAnc557b6OqiISFs7XlXLwrWFLFlfRFSE8fDkodw6LoXoyMC8DVZL3qG/DDwHLD3D60XA5c65L83se8Bi4GLfxBMRaXvOObK27uWJFfnsO3KSG8Yk8WDmULrHd/A62lk1W+jOuffNrP9ZXv+wycO/A31aH0tExBt5e48wJzuXj4sOMSIpnoW3jGZsvy5ex2oRXx9DvwN450wvmtkMYAZA3759fbxrEZHzV3GimmdWF/DHv++mc0wUj18/kpsvSiaijaY8fcFnhW5mE2go9MvOtI1zbjENh2RIT093vtq3iMj5qqt3LNtYzG9ydnC4soYfXtKP+yYNIaFjtNfRzplPCt3MLgCWAN9zzh30xecUEfG3zbsPMTsrl+2lR8hI6cLcqcMZ1ive61jnrdWFbmZ9gdeB6c65gtZHEhHxr7IjJ3nynXxe31JKz/gOPDttNNdc0MvTKU9faMlli38CrgC6mVkJMBuIAnDOPQ/MAroC/974j1HrnEv3V2ARkfNVXVvPyx8W8ey7hVTX1nP3FQO5Z8IgYtuHxkhOS65ymdbM63cCd/oskYiIH6wrKGdudi67yo8zcWh3HpmSRv9usV7H8qnQ+LEkInIGxQdPMG95Hqvz9tO/a0deuvUiJgzt7nUsv1Chi0hIqqyuY9F7hTz//i4i2xkPZKZyx2UptI/0/iZa/qJCF5GQ4pzjne37eGz5Z5RWVDJ1VG8emjyUXp1jvI7mdyp0EQkZBfuPMicrlw//cZChPTvxnzMu4eIBXb2O1WZU6CIS9A5X1rDgrwUs/Wg3ce0jmXftcKZl9CUyIjBvouUvKnQRCVr19Y7XNpfwVE4+B49XMy2jL/dfmUqX2OCb8vQFFbqIBKVP91QwOyuXrXsqGNvvW7x8WwYjkjp7HctTKnQRCSrlR6uYn5PPnzeVkNipPc/cNIrrRycF/ZSnL6jQRSQo1NTVs/Sj3SxYXUBlTR0zxg/g3u8MolOHKK+jBQwVuogEvA8LDzAnO5eC/ccYPySRWVPSGNQ9zutYAUeFLiIBq7SikseW57Fi2z6Su8SwePpYJqX10OGVM1Chi0jAOVlTx+L3d/Hv7xUCcN+kIcwYP4AOUaE75ekLKnQRCRjOOVbl7ef/LM9jz6FKrh7Zi4evHkZSQuhPefqCCl1EAkJh2THmZueyfucBhvSI49U7L2bcoG5exwoqKnQR8dTRkzX8bk0hL/6tiJjoCGZNSWP6pf2ICrMpT19QoYuIJ+rrHW9sKeXJlfmUH63ipvQ+PJA5lG5x7b2OFrRU6CLS5raXHmbWW9v5pLiCUckJ/MeP0rkwOcHrWEFPhS4ibebQ8Wrm5+xg2cZiusZG89SNF3DjmD60a6fLEH1BhS4ifldbV8+rHxfz9KoCjlXVctu4FH7+3cF0jtGUpy+p0EXErzbsOsjsrFzy9x1l3MCuzJk6nCE9OnkdKySp0EXEL744XMnjK/LJ3rqXpIQYFt0yhswRPTXl6UcqdBHxqaraOpasL2Lh2kJq6x0/mziYuy4fSEy0pjz9TYUuIj6zJn8/j2bn8fnBE1yZ1oNHpqSR3KWj17HChgpdRFqt6MBx5r2dx5r8MgYkxrL09gzGD0n0OlbYabbQzexFYApQ5pwbcZrXDfgtMBk4AdzqnPvE10FFJPAcr6rlubWFvLC+iKgI4+HJQ7l1XArRkZry9EJL3qG/DDwHLD3D698DBjf+uRhY1PhfEQlRzjmytu7liRX57DtykhvGJPFg5lC6x3fwOlpYa7bQnXPvm1n/s2xyLbDUOeeAv5tZgpn1cs594aOMIgHrzS2lzM/Zwd6KSnonxDDzqlSuG53kdSy/ytt7hDnZuXxcdIgRSfEsvGU0Y/t18TqW4Jtj6EnAniaPSxqf+6dCN7MZwAyAvn37+mDXIt55c0spD72+jcqaOqBhMYaHXt8GEJKlXnGimmdWF/DHv++mc0wUj18/kpsvSiZCU54Bo01PijrnFgOLAdLT011b7lvE1+bn7Pi6zL9SWVPH/JwdIVXodfWOZRuL+U3ODg5X1jD9kn7cNymVzh015RlofFHopUByk8d9Gp8TCWl7KyrP6flgtHn3IWZn5bK99AgZKV2YO3U4w3rFex1LzsAXhZ4F/NTMltFwMvSwjp9LOOidEEPpacq7dwisrlN25CRPvpPP61tK6RnfgWenjeaaC3ppyjPAteSyxT8BVwDdzKwEmA1EATjnngdW0HDJYiENly3e5q+wIoFk5lWp3ziGDhATFcHMq1I9TNU61bX1vPRBEc++u5OaOsfdVwzkngmDiG2vkZVg0JKrXKY187oD7vFZIpEg8dVx8lC5ymVdQTlzs3PZVX6ciUO788iUNPp3i/U6lpwD/dgVaYXrRicFbYF/pfjgCeYtz2N13n76d+3IS7dexISh3b2OJedBhS4Spiqr61j0XiHPv7+LyHbGA5mp3HFZCu0jdROtYKVCFwkzzjne2b6Px5Z/RmlFJVNH9ebhycPo2VlTnsFOhS4SRgr2H2VOVi4f/uMgQ3t24j9nXMLFA7p6Heu8heOk7tmo0EXCwOHKGhb8tYClH+0mrn0k864dzrSMvkRGBO9NtMJtUrclVOgiIay+3vHa5hJ+vTKfQyeqmZbRl/uvTKVLbLTX0VotXCZ1z4UKXSREfbqngtlZuWzdU8HYft/iD1MzGJHU2etYPhMOk7rnSoUuEmLKj1YxPyefP28qIbFTe565aRTXj04KuSnPUJ7UPV8qdJEQUVNXz9KPdrNgdQEna+v4yfgB3DtxMHEhOuUZipO6rRWa/6dFwsyHhQeYk51Lwf5jjB+SyOxr0hiYGOd1LL8KtUldX1ChiwSx0opKHluex4pt+0juEsPi6WOZlNYj5A6vnEkoTOr6kgpdJAidrKnj9+t2sWhdIQD3TRrCjPED6BClKc9wpkIXCSLOOVbl7Wfe23mUfFnJ1SN78fDVw0gK4xOB8t9U6CJBorDsGHOzc1m/8wBDesTx6p0XM25QN69jSQBRoYsEuKMna/jdmkJe/FsRMdERzJqSxvRL+xEVxFOe4h8qdJEAVV/veGNLKU+uzOfAsSpuGpvMzMxUusW19zqaBCgVukgA2l56mFlvbeeT4gpGJSew5EfpjEpO8DqWBDgVukgAOXS8mvk5O1i2sZiusdE8deMF3DimD+3ahcdliNI6KnSRAFBbV8+rHxfz9KoCjlXVctu4FH7+3cF0jonyOpoEERW6iMc27DrI7Kxc8vcdZdzArsyZOpwhPTp5HUuCkApdxCNfHK7k8RX5ZG/dS1JCDItuGUPmiJ5hM+UpvqdCF2ljVbV1LFlfxMK1hdTWO342cTB3XT6QmGhNeUrrqNBF2tCa/P08mp3H5wdPcGVaDx6ZkkZyl45ex5IQoUIXaQNFB44z7+081uSXMSAxlqW3ZzB+SKLXsSTEtKjQzSwT+C0QASxxzj15yut9gT8ACY3bPOicW+HjrCJB53hVLc+tLeSF9UVERRgPTx7KreNSiI7UlKf4XrOFbmYRwEJgElACbDSzLOdcXpPN/jfwZ+fcIjNLA1YA/f2QVyQoOOfI2rqXJ1bks+/ISW4Yk8SDmUPpHt/B62gSwlryDj0DKHTO7QIws2XAtUDTQndAfOPHnYG9vgwpp/fmllLd3D8A5e09wpzsXD4uOsSIpHgW3jKasf26eB1LwkBLCj0J2NPkcQlw8SnbzAFWmdm9QCzwXZ+kkzN6c0vpN5bfKq2o5KHXtwGo1D1ScaKaZ1YX8Me/76ZzTBSPXz+Smy9KJkJTntJGfHVSdBrwsnPuaTO7FPi/ZjbCOVffdCMzmwHMAOjbt6+Pdh2e5ufs+MZaigCVNXXMz9mhQm9jdfWOZRuL+U3ODg5X1jD9kn7cNymVzh015SltqyWFXgokN3ncp/G5pu4AMgGccx+ZWQegG1DWdCPn3GJgMUB6ero7z8wC7D3Naudne178Y/PuQ8zOymV76REyUrowd+pwhvWKb/4vivhBSwp9IzDYzFJoKPLvAz84ZZtiYCLwspkNAzoA5b4MKt/UOyGG0tOUd2+tXNMmyo6c5Ml38nl9Syk94zvw7LTRXHNBL015iqeaLXTnXK2Z/RTIoeGSxBedc7lm9iiwyTmXBfwS+A8z+180nCC91Tmnd+B+NPOq1G8cQweIiYpg5lWpHqYKfdW19bz0QRHPvruTmjrH3VcM5J4Jg4htr5EO8V6Lvgobrylfccpzs5p8nAd827fR5Gy+Ok6uq1zazrqCcuZm57Kr/DgTh3bnkSlp9O8W63Uska/pbUUQu250kgq8DRQfPMG85XmszttP/64deenWi5gwtLvXsUT+iQpd5Awqq+tY9F4hz7+/i8h2xgOZqdxxWQrtI3UTLQlMKnSRUzjneGf7Ph5b/hmlFZVMHdWbhycPo2dnTXlKYFOhizRRsP8oc7Jy+fAfBxnWK55/u/lCMlI05SnBQYUuAhyurGHBXwtY+tFu4tpHMu/a4UzL6EtkhG6iJcFDhS5hrb7e8drmEn69Mp9DJ6qZltGX+69MpUtstNfRRM6ZCl3C1qd7Kpj91na2lhxmbL9v8YepGYxI6ux1LJHzpkKXsFN+tIqnVubzl80lJHZqzzM3jeL60Uma8pSgp0KXsFFTV8/Sj3azYHUBJ2vr+Mn4Adw7cTBxmvKUEKGvZAkLHxYeYE52LgX7jzF+SCKzr0ljYGKc17FEfEqFLiGttKKSx5bnsWLbPpK7xLB4+lgmpfXQ4RUJSSp0CUkna+r4/bpdLFpXCMAvJw3hx+MH0CFKU54SulToElKcc6zK28+8t/Mo+bKSq0f24uGrh5Gk2wpLGFChS8goLDvG3Oxc1u88wJAecbx658WMG9TN61gibUaFLkHv6Mkann13Jy998Dkx0RHMmpLG9Ev7EaUpTwkzKnQJWvX1jje2lPLkynwOHKviprHJzMxMpVtce6+jiXhChS5BaXvpYWa9tZ1PiisYlZzAkh+lMyo5wetYIp5SoUtQOXS8mvk5O1i2sZiusdE8deMF3DimD+3a6TJEERW6BIXaunpe/biYp1cVcKyqltvGpfCLSYOJ7xDldTSRgKFCl4C3YddBZmflkr/vKOMGdmXO1OEM6dHJ61giAUeFLgHri8OVPL4in+yte0lKiGHRLWPIHNFTU54iZ6BCl4BTVVvHkvVFPLemkDrn+NnEwdx1+UBiojXlKXI2KnQJKGvy9/Nodh6fHzzBlWk9eGRKGsldOnodSyQoqNAlIBQdOM68t/NYk1/GgMRYlt6ewfghiV7HEgkqKnTx1PGqWp5bW8gL64uIjmzHryYP41/G9Sc6UlOeIueqRYVuZpnAb4EIYIlz7snTbHMTMAdwwFbn3A98mFNCjHOOrK17eWJFPvuOnOSGMUk8mDmU7vEdvI4mErSaLXQziwAWApOAEmCjmWU55/KabDMYeAj4tnPuSzPr7q/AEvzy9h5hTnYuHxcdYkRSPAtvGc3Yfl28jiUS9FryDj0DKHTO7QIws2XAtUBek21+DCx0zn0J4Jwr83VQCX4VJ6p5elUBr2zYTeeYKB6/fiQ3X5RMhKY8RXyiJYWeBOxp8rgEuPiUbYYAmNkHNByWmeOcW3nqJzKzGcAMgL59+55PXglCdfWOZRuL+U3ODg5X1jD9kn7cNymVzh015SniS746KRoJDAauAPoA75vZSOdcRdONnHOLgcUA6enpzkf7lgC2efchZmflsr30CBkpXZg7dTjDesV7HUskJLWk0EuB5CaP+zQ+11QJsME5VwMUmVkBDQW/0ScpJeiUHTnJk+/k8/qWUnrGd+DZaaO55oJemvIU8aOWFPpGYLCZpdBQ5N8HTr2C5U1gGvCSmXWj4RDMLl8GleBQXVvPSx8U8ey7O6mpc9wzYSB3XzGI2Pa6QlbE35r9LnPO1ZrZT4EcGo6Pv+icyzWzR4FNzrmsxteuNLM8oA6Y6Zw76M/gweDNLaXMz9nB3opKeifEMPOqVK4bneR1LL9ZV1DO3OxcdpUfZ+LQ7jwyJY3+3WK9jiUSNsw5bw5lp6enu02bNnmy77bw5pZSHnp9G5U1dV8/FxMVwRM3jAy5Ui8+eIJ5y/NYnbef/l07Mvua4UwYqitXRfzBzDY759JP95p+D/aT+Tk7vlHmAJU1dczP2REyhV5ZXcei9wp5/v1dRLYzHshM5Y7LUmgfqZtoiXhBhe4neysqz+n5YOKcY8W2fTy2PI+9h08ydVRvHp48jJ6dNeUp4iUVup/0Toih9DTl3TshxoM0vlOw/yhzsnL58B8HGdYrngXfH01GiqY8RQKBCt1PZl6Vetpj6DOvSvUw1fk7XFnDgr8WsPSj3cS1j2TetcOZltGXyAjdRMufwu3EurSOCt1PvvqmC/Zvxvp6x2ubS/j1ynwOnahmWkZf7r8ylS6x0V5HC3mnnlgvrajkode3AQTd15G0DRW6H103Oimov/E+3VPB7Le2s7XkMGP7fYs/TM1gRFJnr2OFjXA4sS6+pUKXf1J+tIqnVubzl80lJHZqzzM3jeL60Uma8mxjoXxiXfxDhS5fq6mrZ+lHu1mwuoCTtXX8ZPwA7p04mDhNeXoiVE+si//oO1UA+LDwALOzctlZdozxQxKZfU0aAxPjvI4V1kLtxLr4nwo9zJVWVPLY8jxWbNtHcpcYFk8fy6S0Hjq8EgBC5cS6tB0Vepg6WVPH79ftYtG6QgB+OWkIPx4/gA5RmvIMJMF+Yl3algo9zDjnWJW3n3lv51HyZSVXj+zFw1cPI0nHZUWCngo9jBSWHWNudi7rdx5gSI84Xr3zYsYN6uZ1LBHxERV6GDh6soZn393JSx98Tkx0BLOmpDH90n5EacpTJKSo0ENYfb3jjS2lPLkynwPHqrhpbDIzM1PpFtfe62gi4gcq9BC1vfQws97azifFFYxKTmDJj9IZlZzgdSwR8SMVeog5dLya+Tk7WLaxmK6x0Tx14wXcOKYP7drpMkSRUKdCDxG1dfW8+nExT68q4FhVLbeNS+EXkwYT3yHK62gi0kZU6CFgw66DzM7KJX/fUb49qCtzrhnO4B6dvI4lIm1MhR7EvjhcyeMr8sneupekhBgW3TKGzBE9NeUpEqZU6EGoqraOJeuLeG5NIXXO8bOJg7nr8oHERGvKUyScqdCDzLuf7efRt/PYffAEV6b14JEpaSR36eh1LBEJACr0IFF04DiPZueydkc5AxJjWXp7BuOHJHodK+BpCTcJJyr0AHe8qpbn1hbywvoioiPb8avJw/iXcf2JjtSUZ3O0hJuEmxa1gpllmtkOMys0swfPst3/MDNnZum+ixienHO89WkpE59ex6L3/sGUUb1Y88vL+fH4ASrzFjrbEm4ioajZd+hmFgEsBCYBJcBGM8tyzuWdsl0n4OfABn8EDSd5e48wJzuXj4sOMSIpnoW3jGZsvy5exwo6WsJNwk1LDrlkAIXOuV0AZrYMuBbIO2W7ecCvgZk+TRhGKk5U8/SqAl7ZsJvOMVE8ccNIbkpPJkJTnudFS7hJuGnJ7+5JwJ4mj0san/uamY0Bkp1zy8/2icxshpltMrNN5eXl5xw2VNXVO17ZsJsJv3mPVzbsZvol/Xjv/glMy+irMm+FmVelEnPKgh1awk1CWatPippZO+AZ4NbmtnXOLQYWA6Snp7vW7jsUbN59iNlZuWwvPUJGShfmTh3OsF7xXscKCVrCTcJNSwq9FEhu8rhP43Nf6QSMAN5rnFDsCWSZ2VTn3CZfBQ01ZUdO8sQ7+byxpZSe8R14dtporrmgl6Y8fUxLuEk4aUmhbwQGm1kKDUX+feAHX73onDsMfL3sjZm9B9yvMj+96tp6XvqgiGff3UlNneOeCQO5+4pBxLbXFaQi0jrNtohzrtbMfgrkABHAi865XDN7FNjknMvyd8hQsa6gnLnZuewqP87Eod15ZEoa/bvFeh1LREJEi94WOudWACtOeW7WGba9ovWxQkvxwRPMW57H6rz99O/akZduvYgJQ7t7HUtEQox+z/ejyuo6Fr1XyPPv7yKynfFAZip3XJZC+0jdREtEfE+F7gfOOVZs28djy/PYe/gkU0f15uHJw+jZuYPX0UQkhKnQfaxg/1HmZOXy4T8OMqxXPAu+P5qMFE15ioj/qdB95HBlDQv+WsDSj3YT1z6SedcOZ1pGXyIjdN8VEWkbKvRWqq93vLa5hF+vzOfQiWqmZfTl/itT6RIb7XW0gKdb24r4lgq9FT7dU8Hst7azteQwY/t9iz9MzWBEUmevYwUF3dpWxPdU6Oeh/GgVT63M5y+bS+jeqT3/dvMorrswSVOe5+Bst7ZVoYucHxX6Oaipq2fpR7tZsLqAk7V1/GT8AO6dOJg4TXmeM93aVsT31EQt9GHhAWZn5bKz7BjjhyQy+5o0BibGeR0raOnWtiK+p0swmlHy5QnufmUzP1iygZO1dSyePpY/3HaRyryVdGtbEd/TO/QzOFlTx+/X7WLRukIAfjlpCD8eP4AOUZry9AXd2lbE91Top3DOsSpvP/PezqPky0quHtmLh68eRpIOBficbm0r4lsq9CYKy44xNzuX9TsPMKRHHK/eeTHjBnVr/i+KiAQAFTpw9GQNz767k5c++JyY6AhmX5PGDy/pR5SmPEUkiIR1odfXO97YUsqTK/M5cKyKm8YmMzMzlW5x7b2OJiJyzsK20LeXHmbWW9v5pLiCUckJLPlROqOSE7yOJSJy3sKu0A8dr2Z+zg6WbSyma2w0T914ATeO6UO7dpryFJHgFjaFXltXzysbinl61Q6OV9dx27gUfjFpMPEdoryOJiLiE2FR6Bt2HWR2Vi75+47y7UFdmXPNcAb36OR1LBERnwrpQv/icCWPr8gne+tekhJiWHTLGDJH9NRNtEQkJIVkoVfV1rFkfRHPrSmkzjl+NnEwd10+kJhoTXmKSOgKuUJ/97P9PPp2HrsPnuDKtB48MiWN5C4dvY4lIuJ3IVPoRQeO82h2Lmt3lDMgMZalt2cwfkii17FERNpM0Bf68apanltbyAvri4iObMevJg/jX8b1JzpSU54iEl5aVOhmlgn8FogAljjnnjzl9fuAO4FaoBy43Tm328dZv8E5R9bWvTyxIp99R05yw5gkHswcSvf4Dv7cbVjRmp8iwaXZQjezCGAhMAkoATaaWZZzLq/JZluAdOfcCTO7C3gKuNkfgQHy9h5hTnYuHxcdYkRSPAtvGc3Yfl38tbuwpDU/RYJPS45LZACFzrldzrlqYBlwbdMNnHNrnXMnGh/+Hejj25j/7a1PS5nyu/Xs3H+UJ24YyVv3XKYy94OzrfkpIoGpJYdckoA9TR6XABefZfs7gHdO94KZzQBmAPTt27eFEb/pskHduP3bKdz7ncF07qgpT3/Rmp8iwcenZw7N7IdAOjD/dK875xY759Kdc+mJied3BUrXuPb87ylpKnM/O9PanlrzUyRwtaTQS4HkJo/7ND73DWb2XeBXwFTnXJVv4olXtOanSPBpySGXjdv+q3UAAAQgSURBVMBgM0uhoci/D/yg6QZmNhr4PZDpnCvzeUppc1rzUyT4NFvozrlaM/spkEPDZYsvOudyzexRYJNzLouGQyxxwF8a75NS7Jyb6sfc0ga05qdIcGnRdejOuRXAilOem9Xk4+/6OJeIiJwjjVOKiIQIFbqISIhQoYuIhAgVuohIiDDnnDc7NisHzvcGXt2AAz6M42/BlDeYskJw5Q2mrBBceYMpK7Qubz/n3GknMz0r9NYws03OuXSvc7RUMOUNpqwQXHmDKSsEV95gygr+y6tDLiIiIUKFLiISIoK10Bd7HeAcBVPeYMoKwZU3mLJCcOUNpqzgp7xBeQxdRET+WbC+QxcRkVOo0EVEQkTQFrqZ/U8zyzWzejMLyMuVzCzTzHaYWaGZPeh1nrMxsxfNrMzMtnudpTlmlmxma80sr/Fr4OdeZzobM+tgZh+b2dbGvHO9ztQcM4swsy1m9rbXWZpjZp+b2TYz+9TMNnmd52zMLMHMXjOzfDP7zMwu9eXnD9pCB7YDNwDvex3kdJosrv09IA2YZmZp3qY6q5eBTK9DtFAt8EvnXBpwCXBPgP/bVgHfcc6NAi4EMs3sEo8zNefnwGdehzgHE5xzFwbBtei/BVY654YCo/Dxv3HQFrpz7jPnXCCvWNzs4tqBxDn3PnDI6xwt4Zz7wjn3SePHR2n4pgjYG7e7BscaH0Y1/gnYqxHMrA9wNbDE6yyhxMw6A+OBFwCcc9XOuQpf7iNoCz0InG5x7YAtnWBlZv2B0cAGb5OcXeMhjE+BMmC1cy6Q8y4AHgDqvQ7SQg5YZWabGxeiD1QpQDnwUuPhrCVmFuvLHQR0oZvZX81s+2n+BOw7XWk7ZhYH/BfwC+fcEa/znI1zrs45dyENa/JmmNkIrzOdjplNAcqcc5u9znIOLnPOjaHh8OY9Zjbe60BnEAmMARY550YDxwGfnltr0YpFXgnylZBatLi2nB8zi6KhzF9xzr3udZ6Wcs5VmNlaGs5XBOIJ6G8DU81sMtABiDezPzrnfuhxrjNyzpU2/rfMzN6g4XBnIJ5bKwFKmvx29ho+LvSAfoce5L5eXNvMomlYXDvL40whwRoWrn0B+Mw594zXeZpjZolmltD4cQwwCcj3NtXpOececs71cc71p+Frdk0gl7mZxZpZp68+Bq4kMH9Q4pzbB+wxs9TGpyYCeb7cR9AWupldb2YlwKXAcjPL8TpTU865WuCrxbU/A/7snMv1NtWZmdmfgI+AVDMrMbM7vM50Ft8GpgPfabxU7dPGd5SBqhew1sz+Hw0/6Fc75wL+csAg0QP4m5ltBT4GljvnVnqc6WzuBV5p/Fq4EHjcl59co/8iIiEiaN+hi4jIN6nQRURChApdRCREqNBFREKECl1EJESo0EVEQoQKXUQkRPx/9P/Fa7z+OPsAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "7fTPkHSakMdP", - "colab_type": "text" - }, - "source": [ - "The line clearly does not do a great job of fitting the data. There are many possible reasons for this. Perhaps the measuring device used to capture the data was not very accurate. Perhaps `y` depends on some other factor in addition to `x`, and if we knew the value of that factor for each data point we could predict `y` more accurately. Maybe the relationship between `x` and `y` simply isn't linear, and we need a more complicated model to capture it. Regardless of the cause, the model clearly does a poor job of predicting the training data, and we need to keep that in mind. We cannot expect it to be any more accurate on test data than on training data. This is known as *aleatoric uncertainty*.\n", - "\n", - "How can we estimate the size of this uncertainty? By training a model to do it, of course! At the same time it is learning to predict the outputs, it is also learning to predict how accurately each output matches the training data. For every output of the model, we add a second output that produces the corresponding uncertainty. Then we modify the loss function to make it learn both outputs at the same time.\n", - "\n", - "### Epistemic Uncertainty\n", - "\n", - "Now consider these three curves. They are fit to the same data points as before, but this time we are using 10th degree polynomials." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hVoRaGn6kMdQ", - "colab_type": "code", - "outputId": "ec9c4dfb-f902-4b73-f785-e71b3e8cfc85", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 211 - } - }, - "source": [ - "plot.figure(figsize=(12, 3))\n", - "line_x = np.linspace(0, 5, 50)\n", - "for i in range(3):\n", - " plot.subplot(1, 3, i+1)\n", - " plot.scatter(x, y)\n", - " fit = np.polyfit(np.concatenate([x, [3]]), np.concatenate([y, [i]]), 10)\n", - " plot.plot(line_x, np.poly1d(fit)(line_x))\n", - "plot.show()" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAr8AAADCCAYAAABAOqrYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdeZhcZZX48e9be3dXL+l9y74RkoCBGPY9sokSQRFwQQeHmRF3RWFcZhx0QHH054riMqKjIggCAgGBCIhsSQhkIzsk6X1J7921v78/qqq7013VXVX31pKu83mePKSrqm+9Hbqqzj33vOcorTVCCCGEEELkA0u2FyCEEEIIIUSmSPArhBBCCCHyhgS/QgghhBAib0jwK4QQQggh8oYEv0IIIYQQIm9I8CuEEEIIIfKGLZNPVllZqefNm5fJpxQip23evLlLa12V7XXEIq9XIY4mr1chjh1TvV4zGvzOmzePTZs2ZfIphchpSqmD2V5DPPJ6FeJo8noV4tgx1etVyh6EEEKILFFK/Uop1aGU2j7utnKl1JNKqb2R/87K5hqFmGkk+BVCCCGy59fAxRNuuxl4Wmu9GHg68rUQwiQS/AohhBBZorV+Djgy4ebLgbsjf78bWJfRRQkxw0nwK4QQQuSWGq11a+TvbUBNrAcppW5QSm1SSm3q7OzM3OqEOMZldMObES/s66J7yMe7TqzP9lKEEEKkyb0bD9Pa5+H0RRW8bXYZdmt+52i01loppePcdxdwF8Dq1atjPkaIdBr0Bli/rZWeYR+9w376RvxcurKOMxZVZntpU0o4+FVKWYFNQLPW+jKl1HzgHqAC2Ax8SGvtS8ciQyHNzQ9sIxjSEvwKIcQMtflgD196YCtaw/eegkKHldMXVnL7lSupdDuzvbxMaldK1WmtW5VSdUBHthckxERaa2783as8uyd81cFmUViU4sUD3Tz9uXNQSmV5hfElc0r9aeCNcV9/C/ie1noR0ANcb+bCxnvlrSMcOjJMx4CHUEhOboUQYqbxBULcfP9W6ksL+MfN5/PTD57ElSc18vSudn730qFsLy/THgaui/z9OuChLK5FiJgefr2FZ/d0csslx7H96xex95uX8I11KzjQOcTmgz3ZXt6UEgp+lVKNwDuBX0S+VsD5wJ8iD0lrQf69mw4D4A9qeobTklwWQgiRRXc+s5+9HYN8Y90KGsoKuHhFHbeuW8HJc2axfnvr9Ac4Riml/gC8CCxVSjUppa4HbgfeoZTaC6yNfC1Ezugd9nHrIzs5sbGUj521ALfThlKKd55QR5HDyh83Hs72EqeUaOb3/wFfBEKRryuAXq11IPJ1E9Bg8toAGPD4eWxbKzUl4Utebf2edDyNEEKILNnbPsCP/raXd59Yz3nHVR9138UratnVNsBbXUNZWl16aa2v0VrXaa3tWutGrfUvtdbdWusLtNaLtdZrtdYTu0EIkVW3r99Fz7Cf/75iJVbLWHlDkdPGZSfU8+i2Vga9gSmOkF3TBr9KqcuADq315lSewOhu1Ee2tuLxh/iXsxcC0NHvTWUZQgghclB0T0eR08bX3nX8pPsvWVkHwPrtbZlemkjBc3s6ufOZ/bx8oJsRXzDbyxFp8MqbR7hn42GuP3M+y+tLJ91/1dtnM+wL8sjrLVlYXWISyfyeAbxbKfUW4Q1u5wPfB8qUUtENc41Ac6xv1lrfpbVerbVeXVWV/Ej0P248zJIaNxcuD3d6kcyvEELMHH/a3MTmgz189Z3Hx9zU1lBWwImNpTw+g0sfZopBb4BP3bOFbz2+i/ff9RIr//MJLv/xP9ja1JvtpQmT+AIhbnlgKw1lBXxm7eKYjzlpThmLqt38cVPulj5MG/xqrW+JXIqZB1wNbNBafwD4G/DeyMPSUpC/p32A1w73ctXq2VQXuwBol+BXCCFmBK01v3z+TVY0lHDFSfEr5y5eUcfrTX00945kcHUiWb998SC9w37u/qc1/PK61fzLOQto6xvhE7/fwlAOXwIXiXvg1Sb2dw7x9Xcvp9ARu2GYUor3r57NlkO97G0fyPAKE2OkgeKXgM8ppfYRrgH+pTlLGnPfpsPYLIp1qxpw2CxUuh0S/AohxAyx8a0edrcP8OFT503ZFumSFbUAPC6lDzlr2BfgF38/wNlLqjhnSRUXLKvhpouO44fXnMThnmH++7E3pj+IyGnBkOanz+5nZUMpFyyrnvKx7zmpAZtF5ezGt6SCX631M1rryyJ/P6C1XqO1XqS1fp/W2tRiXH8wxAOvNrN2Wc3opbDqYhftUvMrhBAzwm9fOkiJyzZt//Z5lUUcV1sspQ857PcvH6J7yMenzl901O1r5pfzsTPn87uXD432gxXHpse2tfJW9zAfP3fhtD18K91O1i6r4YEtzfgCoSkfmw05OzrnpQPddA/5eO/JjaO31Za6aOuTzK8QQhzrOgY8PL69lfeePJsCh3Xax1+yoo5NB3vokKt/OcfjD/Kz5w5w2oIKVs8rn3T/5y9cyqJqN1/80+v0DfuzsEJhlNaanzyzn4VVRVy0vDah77nq7Y0cGfLxwv6uNK8ueTkb/B4ZCvfznV9VNHpbTYmTjgF54xMiFUqp2UqpvymldiqldiilPp3tNYn8de/Gw/iDmg+cOiehx1+yshat4YkdUvqQa/648TCdA14+ecGimPe77Fa+e9WJdA36+I+Ht2d4dcIMz+zu5I3Wfv71nIVYLIlNbjt5bvhEaHdb7tX95mzw642kyZ22sSXWlLjoGvTlZApdiGNAAPi81vp44FTgRqXU5N5SQqRZIBji9y8f4sxFlSyscif0PYur3SyoKpKWZznGGwjy02f3s3ruLE5bUBH3cSc0lnHjeYt48LWWnMwEiqn95Jl9NJQVsG5V4iMdSgvsVBU72dcxmMaVpeYYCH7HLofVlIQ7PnQOSt2vEMnSWrdqrV+N/H2A8LjytAynEWIqG3Z10NLn4YOnzk34e5RSvGNZDa+8eUQSIDnk8e1ttPZ5+MT5i6atA/34uQupL3Vx+/pdhEI6QysURr3y5hE2vtXDP581H7s1ubBxcbWbvRL8Js7rDzfHdozL/NZGgl+p+xXCGKXUPGAV8HJ2VyLy0W9fOkhtiYu10+wYn2hpbTGBkOZwz3CaViaSteVQLwV2K2ctnr6Pv8tu5fMXLmVrUx9/2Zq7AxDE0X78t31UFDl4/9sTK1Eab1G1m/0dg2idWyc7uRv8xih7qI6MOJYND0KkTinlBu4HPqO17p9wn6GJjEJM562uIf6+t4tr1szBlmQWaX5leA/Igc6ZOer4WLSjpY9ldcVHjbidyntWNXB8XQl3PLEbb0AmwOW6jW8d4dk9nfzz2QsS2pg60aJqNwPeAB0DuXXF/pgKfkczvxL8CpESpZSdcOD7O631AxPvNzqRUYjp/GHjIawWxdVrZif9vQsi9cEHOnPvMmo+CoU0O1v6Y464jcdiUfz7pcto6hnhNy8cTOPqhFFaa779+C6qi51cd9q8lI6xqDr8mt3bnluv2ZwNfn2BEA6b5agaolmFDuxWJb1+hUiBCr+Yfgm8obX+brbXI/KPLxDiT5uauOC46tE9HMkoLbBT6XZI5jdHHDwyzJAvyIqGkqS+78zFlZyzpIofbthL77AvTasTRj2zp5ONb/XwyQsWp5T1hbHgd19HbnV8yNng1xsIHpX1hfAZY3jQhWR+RXb1Dfv5/L2vH2slOGcAHwLOV0q9FvlzabYXJfLHX3e20T3k49pTkq8djFpQ6ebNLgl+c8GOlj6ApDK/UTdfchwD3gA/3LDP7GXNCKGQpnvQy86Wfva0D+APZm6T54Nbmjn9tqf56P9uxGpRFNhSDxWr3E4KHVbueGI3829+lDNu38CDW5pNXG1qYg9mzgHeQOioTg9RNSVOCX5F1r38Zjf3v9qEy27hm+9Zme3lJERr/TyQWGGeEGnw+5cP0VBWwNkJbI6KZ35lEU/vajdxVSJVO1r6sVkUi2sSa1c33rK6Eq5++2x+/cJbvGdVAysakg+gj1UPbmnmjid209I7Qn1ZATddtJSLltfy3N5OntzZzov7u+kY8OAPjm0Ss1sVC6vcHFdbzKUr61i7rCbhfrvJru2WB7YxEmk6EAxpvvrQDmxWS1JtzqIeeq0Fjz9ItLlHc+8ItzywDSCl48X6t0vlOLkb/PpDkzK/EJ7ylosNk0V+aekdAeDeTYf5t3MX0jirMMsrEiK3vdk1xAv7u/nChUsMfWgvqCrij5t89I34KS2wm7jCmcusgGHi8Zp7R7BbFOu3taV0vJsvXsZTb3Twpfu38tCNZyS9ATJZWmu2HO7l1YM9dA366B700jPso9hlZ35lEfMri0YDzHQEljA5uGzuHeFz976G1aLwBzUlLhtnLa5iTkUhNcVOakpceAMhdrUNsLutn+f3dfPgay0sqCzi+rPmc+VJjTy+vc20/793PLF7dG1RI/4gdzyxO6Vj3vHEbiZ2tUv1eLH+7VINpHM3+I1R9gBQXeziuT3SIFtkV3PvCHarQqH48d/2cdsVJ2R7SULkpPGBEkCxy1jAOn7T26o5swyvL5cppS4Gvg9YgV9orW9P9hhmBgyxjucP6ZSPV1po57/evZx/+92rnHTrkwx4AqYG59Fg8No1cxjwBvjL6y2jv4d2q6K8yMGsQgcDngH+PO5SfKXbwblLq1m7rJozF1fx1M72tAaXIQ0FVgu//uhq1swvn7KXbiAYYv32Nu567gBf/vN2bl+/C48/OJolNvr/N5rYSfT2TB7PzMA8h4Pf0FE9fqNqS10MegMMegO4nTm7fDHDtfR6mD2rkLOXVPF/Lx3k385ZxJwKyf4KMd7EQAng9vW7KC2wpxw8RDs9vOcnL9BgQqCUq5RSVuDHwDuAJmCjUuphrfXOZI6Tjkyemcfz+INYFPR7AoD5wXlz7wh3/HU3FgVnL6nic+9YwrlLqygvchy1od7jD/JW9xC7WgfYsKuDv+5o40+bm7BZFCGtDV+211rz4oHu0eB7omFfkDMWVU57HJvVwrtOrOeyE+p46cARPvyrl48qjwBj/z8q3c6Yg8TqywqSPlb0+2L9zKkcz8xAOoc3vIVw2mPX/AJS9yuyqimSAfi3cxditSh+uGFvtpckRM6ZKlBKxYNbmvnek3tGv44GIrmwgSYN1gD7tNYHtNY+4B7g8mQPksuZPIDv/HVP3MviqYj1Owfhq8a//ugarjy5kQq3c9I0OpfdynG1Jaxb1cAPrlnFq199B/fccCpOmyXm+v77sTemHdzg8Qd5bk8n33hkJ+d+5xmu/fnLxBuCl2wwqJTitIUVkwLfqFT+f/QO+whqPWljSIHdyk0XLU36eAA3XbR00lX8VI8X798olUA6Z1OnvjhlD9H2OO39noRnwgthtpbeEc5bWkVNiYsPnDKXu198ixvPW8S8SBN+IYT5gdIdT+zGM2G0sZEsV45rAA6P+7oJOCXZg5iZeUvH8cz8HdFax82sJpsws1ktnLqggmFf7EEcHQNeTr3taeaWF1FX5qK21EWB3Ur3oI/uIS8d/V62NveF27ZaLayZX85n1i7GH9D8x8M7jgrQjQSXDXH+fyTbSjAU0nzu3tcZ8Pj59NrF3LepyZQyj3WrGghpzefvfR0dWW+qx7vpoqWTriSl+m+Xs8GvNxCKWdYwPvgVIhu8gSCdA14aysJlDv967gJ+/8pBfrBhL9+96m1ZXp0QuSOXA6WZQCl1A3ADwJw5sdvHmRkwpON4Zv2OHOgcHC1HiPc8qYi3vrICO6cvrKS5d4RXD/XQ1hfuzlBWaKfS7aS8yMGHT53LWUuqWDOv/Kg+uQ6bxbQa4lj/PwD6PX4e3drKO0+oS+g4dz67nw27Ovj6u5dz3enz+MzaJSmtJ5YrTmrkV/94k1mFDn57fdLnb6Oi/0b/+fAOekf81BQ7ueXSZTOv20NFUayyh2jwK4MuRHa09oZPvOrLwr+L1cUu3rmynr/t7sjmsoTIOTddtJTP3fvaUZeNcyFQOkY0A+PH4DVGbhultb4LuAtg9erVMa9/RwODbz76Bp2DXsqLHHztsuMNZfIAPn/f6wRD2nDddbzgbXl9CVrrSeUJE/WN+Lnruf38/O9v4rJZeP/q2Tz0WvNRVwjSEez/57uXH/Uzh0KaoNZTblaLWreqwbQrFdHjjA+mP3zaXB7b1sqNv3+V379SwacvWMKa+eUxv79zwMsdT+zivs1NvOvEej582lxT1jXR4upiXj7Qbfg461Y1MOAN8NUHt/OXT51JdXHyw3Igl4PfQBCnffIvkdtpw+200dYnmV+RHdEsU8OssQ/c8iI7I3EujwmRbma3sjLL/MoiQhpKXDZTdvKbnXXMcRuBxUqp+YSD3quBa1M50LpVDSyrK+Gi//cc/3X5ci47od7Qws5bWk0wpPnixUv5+LmLDB1rYvBWV+pidnkhf93ZzjcefYMbz1tEeZFj0veN+IL8+oW3+Omz++kb8XP52+r58juXUV3s4rSFFaa9HmIFl7GOZ7EoLFlqox4rmL7+zPnc/eJB7nxmP1f97EVOXVDOx85cQMOsAopdNoocNu7bfJgfPL0PbyDIP5+1gM+uXTLtyUaqFlW7+fOWZlOaFXgjr/9YsyASlcPBbwhnnDOomhInHQMS/IrsiGaeGsZlm1x2K55AMKFMhRBmirW7/ab7Xucf+7p4+7xybFZFfVkBJ8+dlVBWykw/3LCP0gI7/7j5fFO680Q/4L/x6E66Bn1UFDn4qoEsJuTuiYPWOqCU+gTwBOFWZ7/SWu9I9XiuSDLJ4zc+KWxHa+qT3WKZGLyFQpqvPLSdXz7/Jr9+4S3OXFTJu06sp6zAzvaWPna09LP5YA9Hhnyct7SKL1y09Ki1mJlZTcfxMsFmtXD9mfO5ds0c/vDKIX767H4+9ptNkx53/nHVfOWdy0ZbCKZLdI/W/o5BTpxdZuhY3khWP9a+sETldvAbI/ML4dIHyfyKbIkGv7WlY5dbXHYrWoMvGHsyoRDpctv6NyZdMvaHNPdtbuK+zU2jt5W4bJwT6V164fG1R9UgpsPOln6eeqOdz65dYmpbynWrGjh9UQVrvvk0nzx/keHA18weuGbTWj8GPGbGsVyR7knegPErVDtb+oFwaUI6WCyKb65bwQdOmcMjW1v5y+stfOG+1wFQKnxF4azFlXzglLlxL+eLsAKHlX86cz7XnjKHVw/20DfiZ8AToN/jZ1ldSULt1cwQnQK4z4Tg1zejg19/MG4QUVvi4uU3j2R4RUKEtfSOUF3sPOr3M/oi9Pgl+E3FxOzbFy5cwntOasz2snJWKKT5x/4u7n7h4JT7H/5x8/n4AyF2tw/w9BvtbNjVwV9eb6G2xMXnLlzClSc1Yk3TJKsf/20fxU4bHzljnunHrnI7KXbaONA1ZOg4ZveszWWuyPuSKZnfln5qS1xUup2GjxWPUorl9aUsry/lixctZWtTH/5giGV1JRRJj/+kuexWTs9QoBvL3PJC7FbF3o5Bw8eKzoEwcpU1Z3+Dwhm02FF9dYmLjgEPoZBO2whCIeJpjgRo441mVfxBkJGrSYmVffvsva9z2/pdrGgoZV5FEdesmc3imuIsrzT7tNY8ubOd7z65h11tA1QUOXA7bQx6A5Me21BWMFqaM6+yiIuW1xIKaV460M23n9jNF/+0lV89/ya3XLqMc5ZUmbrOfR0DPLa9lY+fuzAtI4iVUsyvKuJNg8FvPnWPcI6WPRjP/O5o6Utb1jcWpZThbKHILpvVwvzKIvaZEvzGboWbjJwccqG1Dpc9xPnhakuc+IOanmFfhlcmRHi6W8OE4LfAbl5WJd/Ea0rf7/HT0jvC714+yDt/8Dw/eWYfgWB+/vtqrXlmdweX//gf3PDbzXgDIf7nfSfywi3n8411K0Z//6LibQKzWBSnL6rkzx8/nR9du4phX5DrfvUKN933eswAOhX+YIgv/mkrRQ4b15+5wJRjxrKgsogDncaCXzOb5uc6p82CUmObhVLl8QfZ3zmU0eBXzAyLqt3s6xgwfJxwfGjsCuu0wa9SyqWUekUp9bpSaodS6uuR2+crpV5WSu1TSv1RKTV5O2aK/EGN1sSc8AZj7c7apNevyLBoE/XxnR5gLPMbK4ibiR7c0swZt29g/s2PcsbtGwxN2IqXZfP6Qzz+mbP5x83nc8Gyar79+G6uvPMF9rYbf/M8lrx8oJurfvYiH/nfjXQP+vj2e0/gyc+ezZUnN+K0WVm3qoHbrlhJQ1kBinDG97YrVk552V4pxWUn1PPk587mE+ct4v5Xm7j0+39n80Hj5WTffXIPrx7q5b+vWBlzl75ZFlS5ae4dMdRl5aaLliZ84nCsU0rhtFkmDQlJVmufh2BIM7dCBvqI5MyrKOJwzwjBiSPzkuT1x0+OJiqRsgcvcL7WelApZQeeV0qtBz4HfE9rfY9S6qfA9cCdhlYTfcJIQb4jXreHyEajjn4vy411bBEiKV2DPnyBEPWlR/cWdJl4STHXmb1JqKzQTs+wf9Lt0exbpdvJnR88mUe2tvC1h3bwzh8+z+1XrOSKGV4T/NrhXv7nr7v5+94uqoud3Hr5ct7/9jk4Yrzpp7ob3Wmz8oWLlnLu0io+e+9rvO+nL7J2WQ3bm/to7fMk3f3g2T2d3PnMfq5ZM5t3n5jeN+cFVeHg682uIY5PMQsZ/bm+dP9WvIGQ4Z61uc5ltxp+j2qNnKzOxOy4SK/aUhfBkObIkI+q4tTrxeO1wk3GtMGvDg+vjhZp2CN/NHA+Yz0H7wb+E9OC38hOvim6PYBkfkXmjfX4LTzqdtdo2cPMD37N3CQUCIawKIUi/KYSFSv7dtkJ9Zy6oIJP/n4Ln7v3dXa1DfCli49L24atbAiFNBt2dfDzvx/g5TePMKvQzpcvXcaHTps7+juWDqvnlfPYp87i+l9v4q8720dvT+bEpqPfw+f++BpLatx87bLlaVtr1PxK48EvhH+u3750EJfdwu8+dqpZy8tJLpvx4DdWq0chEhEdSNHe7zEY/Bove0how5tSygpsBhYBPwb2A71a62iRWBPhOeSxvnfa8YsTTdfDrTryjyYjjkWmtYxmPeJkfg1eUjwWmLlJ6KHXWuge8vGxM+ezfnvbtL1WK91OfnP9Gm59ZCd3PXeA3W0D/OCaVWnZVJVOE7tbXH/mfIIhzR82HuJA5xD1pS6+fOkyrl4zm2JXZn62Ypc95vS0RE5sBr0BPn3Pawz5Atxz7alpb6MGY8HvgU7jG2g8/iBlx9jvUCpcdovhfQktvR6UgprS9HV6EDNTdUn4dyY8pyH1HtHRbg9GJBT8aq2DwNuUUmXAn4HjEn2CRMYvTjTd9A671UJpgZ0jQ7LhTWRWvKyH05Y/mV+zRswGgiF+9Ld9LKsr4cvvXMZXLjs+oe+zWy381+UrOK62hK89tJ3Lfvh3vnfV21g9L339Ps0chPDglmZuvn/r6IlSc+8I//XITgBObCzl+1e/jUtX1mV8IAXEP4Fp7h0hEAxhm7AmrTWPbmvl1kd20jHg5dtXnpCxrhyFDht1pS7e7Da26Q2m7is/k5hR9tDSO0Kl2yktHUXSolftO6Zoz5iIcCvcDAS/UVrrXqXU34DTgDKllC2S/Z00c9wIX3D6BsYVRQ66JfgVGdbcO0KRwzop05hPZQ9mjZh9ZGsrb3YN8dMPnpRSv8ZrT5nD0lo3n/nja1z1sxe58bxFfOqCxaYHjbFqnG++fys9wz7OXFTJkC+IPxgiFNKENIS0xh8M4Q9qAsEQQ74gXYNeuga8dAx4eWxbK4EYGz5qip089IkzTV17suKd2ACc+51n+MApc6l0O3DZrdgsit+9fIjn93WxvL6En37wZFbNmZXR9daUuAx/kEL4devKg2DOabcavjrV0je51aMQiahyR6/aGwx+AyGKXcY69U773UqpKsAfCXwLgHcA3wL+BrwXuAe4DnjI0ErG8fqnrvkFKC9ycGRQgl+RWc094Tf+icFa9DKvNw9anUUznrc+spPuIR+VbgdfeWdyI2aDIc0PN+zluNpiLjy+NuW1nDw3XKv69b/s5Icb9vHcnk6+9q7jOXxkxHCmdsDj543WAf7j4R2Tapw9gRBf/8vOpI5XYLdSWeyIGfgCdAwYD+KMinVi47JZ+OCpc9l8qIdvPb7rqMcXu2z81+XL+cApc7NSe11T4jTc6xfyJ/PrtFlMyfwurZWe2yJ5DpuF8iIH7QPGSla9gRCVGaj5rQPujtT9WoB7tdaPKKV2Avcopb4BbAF+aWgl44zV/Mb/4cqLHBzsHjbrKYVISEvf5DZnEA4QIH9ana1b1cDbZpdx7nee4eZLliUdWD66rZX9nUP8+NqTDA+qKXbZ+c77TuT846r58p+3ceWdL2JREI0xE9m0FQiG2Nc5yKsHe9l8sIcth3oSmh72w2tWUeS04rBasahwOymLArvNgsNqwWZV4aDX7RydSnXG7RtMKRtJh+i/T6wTB601XYM+PP4g3kAQjz/E7FmFlBZmr1a2utjFSweMt2fzTDFRdCZx2a30GeiPr7WmpdfDeUurTVyVyCfVxU46DO7XylS3h63Aqhi3HwDWGHr2OEZbnU1V9uB28Oqh3nQ8vRBxtfR6OKFx8qShY6XsQSl1MfB9wAr8Qmt9e6rHqitzoRQ09SR3Eqq15kcb9rK42s0lK1LP+k506co6zllSxem3b6Bv5OjWaSP+IF//yw7KCu0MeYMM+QL0j/jZ2z7IG2397G4bGD3prihysGrOLN6zqoHlDSX8+wPbY3aWaSgr4F0ptPMyq2wkXeK1TVNKGdqhnQ41JU76RvzhsgUD3TC8/lBau2nkCpfNQruBq1O9w35G/EHqcuBETRybakpchq9yZarPb8aNlj1M8cOVFznoGfbJiGORMcO+AEeGfDFb/LiOgQlvkas3PyZcutQEbFRKPay1Tu76fYTTZqWm2EVTT3JdHl5v6mNP+yDfvvIE01+7RU4b/SOTewYD9Az7+cj/bjzqtvIiB8vqivnQqXNZ3lDCSXNmMae88KiylpsvCZgarE6VXRXJqY5soOkc8DK7vHCaR8cWCml8QeMfpscCl92KJ5D6CfrYhl/XNI8UIraaEie72rj6vuAAACAASURBVPoNHSNjrc4yLbGyByfBkKbf46esMH1ThISIaukNZ/9iBb9Wi8JuVYY+WDJgDbAvctUGpdQ9wOVASsEvQOOsgqQzv+u3t2KzKC5abl7Wd7x4m7aqi8PDMoqcVoocNopdNkoL7NNutktHsJrqUApxtGjby44BT8rBb/TzJi8yv3ZjNb+tfeH3wFwo0RHHpupiF50DXoIhnfI+AV8gw90eMiVa9jBdtweA7iGfBL8iI1qmmWxkRgP5NGsADo/7ugk4xcgBG2cVsPGtnoQfr7Xm8e1tnLawIm21ovHKCv790mWcPDe1bgQSrOamaOskI7vHE/m8mSlcdutosJ+K6d4DhZhOTYmTkIbuQe/olZtkmbFBNSdf7b5pJrwBozPju6Xjg8iQ0Ut+MTa8QaSNUA6XPSRCKXWDUmqTUmpTZ2fntI+fXV5IW7+HQDCxn/uN1gEOdg9zyYo6o0uNa92qBm67YiUNZQUowpn6265YKcHrDGTGwKPoazY/Mr/GTtBbekdw2CyjySchkhUNeFOt+9Va53fZQ4U7/OI7MpT99kAiP7T0jmBR4X6ssRQ4LKMDWnJUMzB73NeT+nMnO5SmcVYBwZCmtS+xy86Pb2/FouDC5TVJLTxZkqnND7MKHditytAGmmgw6MpCqzOl1PuA/wSWAWu01pvG3XcLcD0QBD6ltX7C6PO5bOEJb1rrlHprN/eOUF/qSul7hYDxV2s8rGhIfspbInMgEpGTmd/Eyh7CAYgMuhCZ0twzQm2Ja9KUqyiXzZrrrc42AouVUvOVUg7gauBhIwdsnBUOeBPd9LZ+extvn1dOpTu3ugaIY5PFoqhyOw1lfhNJtqTRduAK4LnxNyqljif8+lwOXAz8JLJh1RBnJLudaulDa59HSh6EIWNXa1I7YR17vc7E4DdyGWqqVmezisL1gjLoQmRKc2/sHr9RZowOTafINMZPAE8AbxDu2b3DyDEbI/8eiWx629cxyN6OQVPbmwlRXRLeQJOqbGZ+tdZvaK13x7jrcuAerbVXa/0msA8TWotGSztSHcYT3fApRKqqDJYqjQ1Bm6FlDxYFtil2AjptVoqdNsn8ioxp6Rth1ez4G6bCO6lzu+ZXa/0Y8JhZx6srLYj0+p0+8/vEjjYALk5jva/IP0anvOVot4cG4KVxXzdFbjMkGuB7AkFKSW7DqT8Yor3fQ32ptDkTqbNbLVS6HSmXKpm1QTU3M7+B8LSd6eqKyt0OjkjwKzJAa017n5e6Kd74jfbQPBY5bBZqSxLr9bt+eyur5pRRKx+ewkTVxcaa5kczv+nq9qCUekoptT3Gn8tNOHZSG1RdttSH8bT3ewhp6fQgjKsqdqU85c2ssoeczfwm0saivEiCX5EZ/SMBfMHQlBOunDYrXXlYhpNIr9/DR4bZ3tzPv196XIZWJfJFTYmT3uHUp7yNlT2kJ/OrtV6bwrdNuzk1cuykNqgaGcYT7XMuwa8wqqbESfuAwbKHmZj59QUSm7ZTUeSQsgeRER2RF+pUwa/LnvPdHtKicVbhtJnfx7eHSx7S2eJM5Kfq4rEpb6kwK5NksoeBq5VSTqXUfGAx8IrRg46WPaTwPtXaJz1+hTlqil0GNrxFr9QYO1nNqVd7VKI93MKZX2l1JtIvelk1+kEbS65veEuX2bMKaO0bwT9Fr9/121tZXl+S8hQuIeKpLhmb8paKdGd+p6KUeo9Sqgk4DXhUKfUEQGQj6r2Epy8+DtyotTb85jKW+U3+UM2jAy6kbEkYU1PipHvQm3B/+PFmdreHBEfXlRc5OTLkQ+tpr/YIYUj0gzX6QRtLgT3nW52lReOsQkIa2vpiBx+tfSO8eqiXS1dK1leYL3pCarh1Una6PfxZa92otXZqrWu01heNu++bWuuFWuulWuv1Zjzf2Ia3VMoeRigrtFPoyMlqSXEMqSpxhae8pXDl3qzXa24Gv/7QlG3OoiqKHPiDmn5PIAOrEvmsoz+a+Z267CHXuz2kQ7Td2eE4db/RkoeLpcWZSIOaaOY3xQ00YxvecqrbQ1o4DWx4a+n1UF8qJQ/CuBoD7c68Jr1eczP4TbDmNzriWDa9iXTrHPBSYLfidsbPekS7PeTblYjpBl2s397G0ppiFla5M7kskSeiU97aDdb8ZqPPb6YZqfmVHr/CLNEpbx0pXK2Z8RPeEqr5lRHHIkM6BrxUFTunbL/nslvReuzFmS9qS11Y4vT67RjwsPGtI1yyUrK+Ij2iU95S+SCFcCZJKXDEmdw4k0Q/V1MZchEOfqXeVxg3OuI4hTr9sW4PMzXzm8BZeGV0xHEetpcSmdUx4Jmy5AHGzkTzrfRhrNfv5LKHJ3a0ozVS7yvSqrrElfqGt8iVxun6ys8EoxvekuxHPuDx0+8JSOZXmKLS7UCp1Or0Z3TNb6KtzsYyvxL8ivTqGPBOudkNxo8Ozc9Nb7Eyv+u3tbKgqojF1VLyINKnuthY5jfHprulTaplD6190uNXmMdmtVBR5EypTn+GT3hLrNVZRaTmV3r9inTr7PdO2eYMjDWQP9Y1ziqgeULw2z3o5eU3j3Dpirq8yKqJ7KkpcaXcNN/jTyzZMhOMnaAn9x4VbXPWIGUPwiQ1Jc6UJjOOtTqbiWUP/sRanbnsVgodVsn8irQa8QUZ8AamHHAB4VZnQH62OysvnNTr98md7QRDWup9RdpVF4envHlTGC/uCeRP5tdutWC1qKTLHloj093qpNuDMElNiSvFbg/hz5hEOoJNJTeD30Birc5ARhyL9ItOjpou+DWyk/pY1zirgJAe+5AEeGx7G3MrCjm+riSLKxP5wMjucW8eZX4BXLbkWzK29I5gtahp9z0IkajqYmeKNb9B7FaF1WLsamJOvuITLXsAGXEs0m90wMW0wW/qPTSPddFev9FNb92DXl7Y18XFK2ql5EGknZEpb/mU+YXUJlG29I5QW+LClgcdMURmVJe46B5KfspbMvHhVHJyVIs3EEx4J195kYPOQWl1JtInkdHGYGx60rFudqTX729ePMidz+7n5TePENSad51Qn+WViXwQfW2mmvl15cGAi6hw8Jvce1Rbv2d0mIgQZqgpcaI1dA36qC1NvJbcGwgaLnmABDK/SqnZSqm/KaV2KqV2KKU+Hbm9XCn1pFJqb+S/swyvBgiFNP6gTvgyVHmRU1qdibSK7kidrtuDkelJx7raUheFDiuP72ijrc/Dh06dy73/chorGkqzvTSRB6KBWSo1hJ4kki0zgdNuSbrmt3Ng+g2/QiRjbCx5cq9Zs8qUEsn8BoDPa61fVUoVA5uVUk8CHwGe1lrfrpS6GbgZ+JLRBY1N70iw7MEdLnvQWsvlVZEWnYNerBZFeaFjysflc9mD3Wrh4U+cSYHDSoO0QxIZNqvQgc2iUto97vGHqCjKo8yvzZp0O8bOQS+nLqhI04pEPkr1hDXRCcDTmfYIWutWrfWrkb8PAG8ADcDlwN2Rh90NrDO8GsZP70i87MEXCDHky7+AQ2RGR7+XSrcDyzQF9tGyh1SmJ80Ei6rdEviKrLBENmOluoEmH0YbR7nsyW148waC9A77p93wK0Qyxqa8JfeaTXQC8HSSesUrpeYBq4CXgRqtdWvkrjagJs733KCU2qSU2tTZ2Tntc4w2ME6i5hfgiJQ+iDTpSPCSXz63OhMi26pSnPIWvoyaR5nfJDe8RcsKJfgVZqoocmBR0JlK5teEk9WEj6CUcgP3A5/RWvePv09rrQEd6/u01ndprVdrrVdXVVVN+zzRBsaJzlkfG3Qhm95EeoSD3+nf+PO57EGIbKtJccpb/mV+rUnV/I62enRL8CvMY7NaqHAnf7XGrJrfhI6glLITDnx/p7V+IHJzu1KqLnJ/HdBheDWMz/wmdiY+mvmVdmciTToHPNNudoP8nvAmRLalOuXNk3eZ3+TKHroGE+tzLkSyUmlV6wua83pNpNuDAn4JvKG1/u64ux4Grov8/TrgIcOrYSxwSDSyr4ycjUqvX5EOgWCI7iFfQlkPq0VhtyY/PSkTlFJ3KKV2KaW2KqX+rJQqy/aahDBTqlPePP48y/zakit7iGZ+KyX4FSarcDs4kuRV+3DNb2Yyv2cAHwLOV0q9FvlzKXA78A6l1F5gbeRrw8a6PSRZ8yvBr0iDcCeRcD1hIpL9YMmgJ4EVWusTgD3ALVlejxCmSmXKWyAYIhDSWRtyMdVJqVLqFqXUPqXUbqXURWY9pzPJPr+jwa976m43QiSrvMiZdOzm9ZtT8zttqzOt9fNAvG3uFxhewQRj3R4SezMqdFhx2iwS/Iq0iH6QJjrWM9kPlkzRWv913JcvAe/N1lqESIeqcVPeZpcXJvQ90T0mWRxv/CRwi9Y6oJT6FuGT0i8ppY4HrgaWA/XAU0qpJVprw2fWTpslqVZnnYNeSgvseVUaIjIjlbIHsya85dy1nmS7PSilwv+A0u1BpEGio42jXPbkPliy5J+A9dlehBBmir5GOwcS/yyIBr/Zyvxqrf+qtQ5EvnwJaIz8/XLgHq21V2v9JrAPWGPGc6ay4U3qfUU6lBc5GPAE8CUxFTWTZQ8ZlcqZeLnbId0eRFpEL/lVJ1j2UGC3Zq3VmVLqKaXU9hh/Lh/3mC8THlzzuzjHSKo1oRC5oqIouv8j8c+CaIlSFjO/440/KW0ADo+7ryly21FSeb267Bb8QU0wFLNB0ySdA17p9CDSIpWyVbOGXCQy4S2jUgp+U6gbESIRHUnWuyXbQ9NMWuu1U92vlPoIcBlwQaQ9Yaxj3AXcBbB69erEPh2FyAHRD9JkrgJGX6vpzPwqpZ4CamPc9WWt9UORx0x5UhpPKq/X6M/qDQQpdEwfAnQOejmxUfbHCvONb1VbW5pYgilc82v89Zp7we/omXjiP1xFkYP9HYPpWpLIYx0DHsoKE693S7aNUKYopS4Gvgico7UezvZ6hDCbw2ahxGVLOosEpLXbQ4onpc3A7HEPa4zcZpgrkljy+ENMM7EdkLIHkT7JZn611ngDwYTnQEwlJ671jJda5tchmV+RFh39iQ24iEq2ni6DfgQUA09GOrb8NNsLEsJslW7naF/aRHhSSLaYadxJ6bsnnJQ+DFytlHIqpeYDi4FXzHjOZIbxDHkDDPuCEvyKtKhwJxf8BkKakDanTCnnMr++QHLdHiAc/I74g4z4ghQ4ZEeqME+io42jnDYrXTm4+VJrvSjbaxAi3SrcyW1+Hu0rn70+vz8CnIRPSgFe0lr/q9Z6h1LqXmAn4XKIG83o9ADJBb8y3U2kU3m0Tj/B1+xocjQTrc4yLZUfbnzdSKMjsRY3QiSic8DLgsqihB9/jHR7EGJGqihycqAr8RK4aHehLHZ7iHtSqrX+JvBNs58zWuKRSHmWTHcT6VRWYMeiEs/8plIWG08Olj2Ef7hkajqio2ejZ6lCmEFrnXS9WzY3vAmR71LO/OZGt4eMiG4WSqQ8azTzK8GvSAOLRTGrMPFev2b25c65V7w3EMJhtWCxxJurMVn0snR7EpN9hJhO34gfXzCUZPBryVqrMyHyXYXbyZFhX8JtvLKd+c0Gly2JsofBaLcbCX5FeoT3bCUWu5lZ9pB7wa8/hCPJqL563GQfIczSkWSPXwj3+c3Fbg9C5INKtwOtoWc40cuo+Zf5jZY9eBN4n+oc8GJRY7vyhTBbMg0LRoegzdSyh2TfiCqKnFgtivZ+CX6FeZIdbQxj3R7itNEVQqRRsr1+PfmY+U1yw1uFO/z5KkQ6VLqdCZc9+GZ62UOyP5jVoqhyO0eDFSHM0DmY3GhjCH+waA2+oGR/hci00SlvCbY7i2Y/8zL4TbDmVzo9iHRKLvObfDeweHIu+PUFUpveUV3ipF02vAkTRU+mkqn5ddoS30kthDBXdBJjopmkHBtvnBHJdHvoHJQBFyK9yosc9A77CSSQMPKa2Jow517xqZQ9QHjTW4eUPQgTtfd7KbBbcTsT7wg4OjpUNr0JkXEV7iQzv4EQVovCbsLEqGNFUhveZLqbSLPooIueYf+0jx2r+Z2RwW/yZQ8ANSVOqfkVpmrpHaG+zEWk+XxCxurpJPMrRKZF+4Ymk/nNp6wvJP4eFQppuiTzK9IsmRHHM7rswesPpfSD1ZS46Bn2j54ZCGFUc+8IDbOSG5oSvaQo7c6EyDyLRVFe5Ex4yqInEMyrel8YX5o19XtU34gff1BLza9Iq/JxQ8qmM8Mzv8GkW53B2KYkGXQhzNLcO0JDWUFS31OQxE5qIYT5Kt2OpDa8ufIs82uxKBxWy7Qb3mS6m8iE6CbVhDK/M7vmN9WyBxl0Icwz7AtwZMhH46zkgt9k2ggJIcxX4U58YpQnxQ3Wxzqn3TJtn1+Z7iYyIZWyh2QmAMeTm8FvClH92IhjqfsVxrX0jgAknfkd3UkdkJpfIbKhvMiZROY3/2p+IXySPl2JoEx3E5kwq9AOJNabe7TswYQT1px71fsCqdf8gmR+hTmaeiLBb5KZX2cSO6mFEOarKHIkMeQiPzO/Lrtl2g1vkvkVmWCzWigrtCdW82viRMacC35TbXVWXujAJlPehEmaU878SvArRDZVuh0MeAMJbX72+IN5V/ML4XZn071HdQ54cdgslLgSb/UoRCoSHXThDYSwKLCZMHEw5171qdb8WiyKqmKnZH6FKZp7RrBZ1OgVhURFyx6mq6cTQqRHtNdvoh+m+dbtASJj2BMIfqvczqRaPQqRikSv1oSTo1ZTfienjTKVUr9SSnUopbaPu61cKfWkUmpv5L+zDK8kwutP/TJUdYmLDqn5FSZo7h2httSV9Ez76AeptDoTIjsqoq2TEvkwzdua3wTKHqTHr8iQiiJnwierZnR6gMQyv78GLp5w283A01rrxcDTka8N01qHW52luJOvptg5OpJWCCOae5JvcwbS6kyIbItmfrsS2PSW7cyvUupWpdRWpdRrSqm/KqXqI7crpdQPlFL7IvefZObzuuzWaVudyXQ3kSnl7gTLHvypVQbEMu1RtNbPAUcm3Hw5cHfk73cD68xYTCCkCenUi5mrS5y0S+ZXmCA84CL54FcmvAmRXZXuxDO/Hn9wtFQpS+7QWp+gtX4b8AjwtcjtlwCLI39uAO4080mdNmtCG94k+BWZUFHkoGfYRyikp3ycL5haQ4RYUn3V12itWyN/bwNqzFjM6Oi6FN+Maopd9A77JesmDPEHQ7T3e2hMIfNrtSjsVjVtVkUIkR7RzG8iu8fD442zl/nVWveP+7IIiH76Xw78Roe9BJQpperMel6X3YJ3is/JQDDEkWGfTHcTGVFe5CCkoXfEP+XjUm2IEIvho2itNWMv2EmUUjcopTYppTZ1dnZOeSyfwbnN0c1JMuVNGNHW5yGkk29zFpXITmohRHoUOaw4bZYEN9CEsp35RSn1TaXUYeADjGV+G4DD4x7WFLlt4vcm/Pk63nQb3o4M+dBa2pyJzBgbdDF17BbeE5bd4Lc9ehYa+W9HvAdqre/SWq/WWq+uqqqa8qBG5zZXRQZdyKY3YcRoj9+ywpS+32mf/pJitiilPq+U0kqpymyvRYh0UEpRUeSga5rgV2udkcyvUuoppdT2GH8uj6zjy1rr2cDvgE8kc+xkPl/Hc9ktUw7i6ZAevyKDoiOOpzth9aY4ByKWVBv4PQxcB9we+e9DZizG6NzmmmIZdCGMG+3xm2rmd5pLitmilJoNXAgcyvZahEinCrdz2rKH6B6TdGd+tdZrE3zo74DHgP8AmoHZ4+5rjNxmiumuTsl0N5FJiY44zmjZg1LqD8CLwFKlVJNS6nrCQe87lFJ7gbWRrw0bm9ucatlDJPMrgy6EAc2RzG9daXI9fqNcdmuutjr7HvBFpihTEmImqEhg93g0+Mtyt4fF4768HNgV+fvDwIcjXR9OBfrG7bMxLFr2EK5anCz6GVotmV+RARXRTarTBr/mdXuYNvOrtb4mzl0XmLKCcYyWPcwqdGC3Ktql5lcY0Nw7TFWxM+UPxXAPzdwKfiOXWJu11q9L03ox01UUOdnbPjjlYzwmjko14Hal1FIgBBwE/jVy+2PApcA+YBj4qJlP6rJbCGnwBzUO2+T3g+ZeD0pBbYoJACGSMaswwcyvP/tlD2lhtNuDxaKocjtlxLEwpLk3tR6/UQVZqvlVSj0F1Ma468vAvxMueZjuGDcQbq3EnDlzTF2fEJlS6XbQNehFax13GtRosiWLmV+t9ZVxbtfAjel63tGWjIEgjhjBf2vvCNXFTuwp9twXIhkOm4Vily2hsodYv6+pyK3g12+s2wNEprxJza8woLlnhOUNpSl/v8tuZdAbMHFFiYlXW6iUWgnMB6JZ30bgVaXUGq1124Rj3AXcBbB69WopjxDHpAq3A28gxJAviNsZ+2MuRzK/WeEcN4ynxGWfdH9rn4e60tQTAEIkq6LIkdGyh5x61fuCxsoeIFz3K90eRKpCIU1Lb2o9fqMSaSCfSVrrbVrraq31PK31PMJtk06aGPgKMVOM7R6PnwiJZn6zWfObLdHPWG+c96mW3hHqy6TkQWROeZFj+lZnGR5vnDFGuz1AuNevdHsQqeoa9OILhlLu9AC52+1BiHwR3UAzVbuz6AlqPga/0Z/ZG2MYj9aalr4R6iXzKzKovMg5faszE1sT5lbwa3DIBYR3p/aNyJQ3kZqmaJszA5nf6RrIZ1skA9yV7XUIkS4JZX79xq80HqtckZ851hWq8JTUEHUG3gOFSFZlAh1aZmzZQ/Qs1EhBc3VkypvU/YpURNucGc385mirMyHyQiKtk6LJlnzO/MY6SY/2Oa+XTg8ig8JlD7647feCIU0gpGd65tdY2QPIlDeRmmYzMr85VvMrRL6JNs2fKvPryefM72jwO/l9qrUv/NlZL5lfkUHlRQ4CIU3/SOzN4j6D3cAmyqlXvdeE3bfRQRdS9ytS0dwzQonLRnGMHdCJKnBY8QTiN5AXQqSXy26l2GmTzG8c0al2sTK/rX2RIT+y4U1k0NjVmtixm9E5EBPlVvA7+sMZqfmNjjiWzK9IXnPvCA2zCg0dw2W3ojX4gpL9FSJbKtyOKTfQjE14y6mPwYwY3+d3oubeEexWRWWRTHcTmVMe+X2LV/drxp6w8XLqVe8LhFAK7NbUJ1DNKrRjtyo6ZMqbSEFzj7EBFzB2ZiqlD0JkT4XbGTeLBOPLHvIw82ubouyhN9zj12KRSZAic6rc4eC3M07sZkZlwHg5FfxGd/IZGb+qlKK62DU6m1yIRGmtae4dodHAZjcY10ZINr0JkTUVRVNnfsfKHnLqYzAjpit7qJPNbiLDxkpWY8duYxMZZ2zwa/wsvKbESbtseBNJ6h8JMOgNGM78TrWZRAiRGRVuJ11Tbngz9zLqscQ5RbeHll6P4fdAIZJVXuTAblW0xdmvNaPLHsya21xfVsDhIyMmrEjkk8M9w4CxNmcwllWRdmdCZE91sZPuId/oLvGJvIEgdqvCmoeX96PvUd4J/zbBkKat3yOb3UTGTXfVfmZvePOb08B4UbWbwz3DOT1oQOSena39ACytLTZ0nLF6Ovn9EyJb6stcaB3/MqrHHxp9reYbh9WCUpPfozoHvARDmjqZ7iayYKqr9tGaXzMSpJBrwa9J0zsWVbvRGvZ3DpqwKpEvtjf34XbamF9RZOg4BQ4JfoXItmgAF+1bO5EnEDStfvBYo5SK9CM/+j3KjD7nQqSqttRFW5zXqxlzIMbLqVe+N2DO3OZF1W4A9nVI8CsSt7Wpj+X1JYZ3OY9uJolzuVUIkX7RTVvRvrUTha805mfmF8LvUxP3JUiPX5FN4bKH6fr8zsia35ApZ+LzK4uwKNgvwa9IkD8Y4o3WflY2lBo+llPKHoTIurqy6TO/+djpIcpln5z5be0N/1tJ2YPIhpoSFwPeAEPeyVPevDN6wptJZQ9Om5W5FUXsleD3mOQNBHlqZzs/eHovD25pZltTX8wXg5n2tg/iDYRY2Wg8+HVNsZNaCJEZbqeNYpeN1t7cz/wqpT6vlNJKqcrI10op9QOl1D6l1Fal1ElmP6fLbp204a25dwS300aJy2b20wkxrdrS+O3OzO7zm1O/4d5AiLKC1MfKjrewyp3RsoedLf1sPtTDvIpCltQUU13sNNSvON9orXlmdycPv97CUzvbGYgR7J6zpIr/uepEKt3mTx7a3twHYErmd3QntbQ6EyKr6ksLaIlbQ5gbmV+l1GzgQuDQuJsvARZH/pwC3Bn5r2mcNsvkzG+kx698dolsqBmd0OtlQZX7qPvMLnvIreDXH8RRbE5gs6jazbN7OvAHQ9it6XmD01rz3N4ufv7cAZ7f13XUfcUuG2uX1XDTRUupl80DU9rXMcBXH9zBiwe6KS2wc8nKWi5dWcfb55XT0jvC/s5Btjf38/O/H+CyHzzP+98+mz9tbqKld4T6sgJuumgp61Y1pPz8D25p5ut/2QHAB3/xMl+8+DhDx4tmfqXVmRDZNdUGGo/fnD0mJvge8EXgoXG3XQ78RmutgZeUUmVKqTqtdatZT+q0WyftS2jt84yWiwiRadUl0eA3RubX5LKHnAp+fSaVPQAsrnbjD2oOdg+PboAzU3u/h4/+70Z2tvZTU+LkSxcfx6Ura2nuHWFfxyA7W/r585Zm1m9v5YazFvAv5yykyJndf26tNUeGfLT2eWjr8xAIhWicVUjjrAJKC+yjZ/sPbmnmjid2mxZcxjPsC/CjDfv4+d8PUGC3cuvly7l6zZyjTlYW1xSzuKaYi1fUccnKWq771St8/+m9o/c3945wywPbAFJa44NbmrnlgW2jgWpLn8fQ8UDKHoTIFfVlLna09MW8zxsI4S7K7nuyUupyoFlr/fqEbGsDcHjc102R20wLfl0xMr8tvSMsry8x6ymESEptaQLB70wtezDrTLy5J1zntfa7z9JgcgA35A3wT7/eyMHuIb7zvhN594n1o73n5lYUcfrCSgA+ecFijj+58AAAE2pJREFUvrV+Fz/YsI8/bjrMf79nJRcsqzFlDYka8PjZsKuDR7e28ve9XXGzkcVOGyfPm0VNsYuHXmsezQgYDS7jeWF/F1+6fyuHj4xw5UmN3HLpcdOWMyyvL8VumfyLP+IPcscTu1Na3x1P7J70b2LkeABPbG8D4Lb1u/jNiwfTdvIghJhaXWkBXYO+mJ2EPP7g6IlqOimlngJqY9z1ZeDfCZc8pHrsG4AbAObMmZPU97rsVnqHx8Y/ewNBugZ9stlNZI3baaPIYaU9RseHaPDrMOlKfs4Evw9uaaa1b4T7X23ipQPdhgKGB7c085Nn9o1+bWYAFwxpPn3PFt5o7eeX172d846rjvvYhrICfnDNKq47fR5feXA719+9ietOm8vy+lK+//ReUy/bT8zUzq0o5GfPHmDDrg58wRC1JS6uPLmBhVVu6kpd1JYWYLMomnpGaOoZ5q3uIZ7d08kzuzsnHd9oMDh+fbWlLuZXFvHC/m7mVRRyzw2ncuqCioSP1RanYX1LnE0t04n3fake78EtzXzlwe2jX6fr5EEIMb3RTFKflzkVhUfdZ9YG6+lordfGul0ptRKYD0Szvo3Aq0qpNUAzMHvcwxsjt0089l3AXQCrV6/WyayrwG7lsGdsb0W0PCTaIk6IbKgpccXJ/AZx2iym1aMbCn6VUhcD3weswC+01rencpzopedQ5KVrNGC444ndk2qZzArgok3A33tS45SB73gnz53Fgzeezrcf380vn38TBUTfpcy+bN/cO8Ln7n2NkIayQjsfOm0ul66sY9Xsspj9a1eM2+CltWb+LY/FfJ5mA8Hg+PW19nlo7fNw7pIq7vzgyaMDIRJVX1YQcy2p1lWbfbx0ZJKFEKmpj2QxW/pGJgW/mcr8xqO13gaMfogopd4CVmutu5RSDwOfUErdQ3ijW5+Z9b4Aq+fN4vEdbWx86whvn1cuAy5ETogb/Jo0ATgq5SMppazAjwnvSj0euEYpdXwqx5oqYEhFOrJ5tzyw7agg6dFtrTy4ZdKJeFxOm5WvXnY8FUUOJp6eG/lZY/3bhTSUuGz840vn89XLjufkubMSGtyglJryje9jd2/k6TfaCYYSTzB8+/FdMUst9nYMJh34Atx00VIKJnxgWRR8/sIlSR8rejzrhH+bAruVmy5amtLxzP7dE0KkLjqsIdagC48/lNXgdxqPAQeAfcDPgY+b/QQfOGUulW4n33tyDzCux68EvyKLakqcMa/whudAmPd6NRJGrwH2aa0PaK19wD2Ed6gmzeyAIV7WLh3ZvGQdGfLFvD3VnzVeRnbAE0hpg12s4NJps/COZTW83tTH9Xdv4sxvbeA/HtrO49tb6Ynx84RCmm1NffzkmX1x2wyl+vOuW9XAbVespKGsAAWUFdgJ6fjlEIkcr6GsAIfNgiKc9bjtipUpZ2nN/t0TQqQuegm/pTf+ZdRcobWep7Xuivxda61v1Fov1Fqv1FpvMvv5ChxW/vWcBbywv5uXDnSPTXeTsgeRRTWl4Slv4UYnY8x+vRope4i1G3VSH8JECvLNvvR800VLj7rUDrmTzYv3sxY4rGxv7juqDCGeEV+Qv2xt4f9eOjjl86QiGvTF6vbgD4Z4+o0O7t10mHs3NXH3iwdRKjxRr8BuxWpRWJTi0JHh0SDfZlEEYmSKjQSD61Y1jK5Ta80n/7CF//nrHs5bWs2yuuR2KvsCIdr6PHz0jHnccumylNcUZfbvnhAidYUOG6UF9kntzrTWePzmZpKORR88dS4/e+4A33tyDwuq3FQUOXI5Gy7yQE2xC18wRO+wn1lFjtHbza7RT/uGt0QK8s0OGKKB0Tce3UnXoI+KIgdfvez4lLN5lW4nnYOTdx+mEsDF+lmtFoU/GOKyHz7PibPLuHbNbI6vK6WmxEmF20kwpNnbMcD25j5eO9zHY9ta6Rvxs6TGzZUnNfDo1tajapyNBlvjg8vx7FYLF6+o5eIVtfgCIbY29fLi/m62t/QRCGqCWhMMac5dWsVZiys5Y1ElL+zrTmswqJTi1stX8ML+br7y4Hbu+5fTEirxiNrTPoAvGEropCMRU508ZJtS6pPAjUAQeFRr/cUsL0mItKsrdU0qe/AFzW2bdKxy2a18/NyFfP0vOznQNTRaJiJEttREev229XuODn79IRwm9uU2EvwmtBs1EekIGNatauDsJVWcdOuT/Nu5C1M+ltaaYpdtUvCbagAX72c977hqHni1if976SBfun/b6OMtKhoch88b3E4b5yyt4sOnzmXN/HKUUpy1uCrjwZbDZmH1vHJWzyuf8nGZCAZnFTm45ZLjuOlPW7l302GuXpN4yx8zJ7tFxTt5yCal1HmEy5JO1Fp7lVKJ7dYU4hhXX1YwqezBE5m+KFlOuGbNHH767H7a+728bXZZtpcj8tz4Ecfjr+TmUtnDRmCxUmo+4aD3auDaVA+WjoChvMhBRZHD0Jjj9dvbONA1xFWrG/nHvm5TArh4P+tHz5jPR06fx87Wfpp7Rmgf8NLR7yEQ0hxfV8LKhlLmlBdOymzmYrA1XibW996TG7lvcxO3P76LdxxfQ0WCI5C3NvdR7LIxd8JO8Bno34DbtdZeAK11R5bXI0RG1Ja6eO1w71G3eSNXonJhvHG2hbO/i/iPh3dIpweRddXFsQdd5EzZg9Y6oJT6BPAE4VZnv9Ja7zBtZSZZWO1mb4rBrzcQ5Lb1b7C0ppj/fs9KbGkakzyeUorl9aUsrzcvE5kPlFJ8c90KLvn+37lt/S6+874TE/q+LYd6WdlQmg+z7JcAZymlvgl4gC9orTdOfJCRpvlC5KL6UhdHhnxHtTYbmxYlmV+A9799Nk+90c7ZSyqzvRSR56pLopnfo6+2ewMhSgvspj2PoWhOa/2Y1npJZEfqN81alJkWVbvZ1zE4aedgIn774kEOHxnhK5cty0jgK4xZXFPMP5+9gD9tbuLlA93TPv6FfV280drPO47P7NS9dFFKPaWU2h7jz+WET3TLgVOBm4B7VYyIX2t9l9Z6tdZ6dVVVVYZ/AiHMF51Y1jpu05tHMr9Hcdmt/Pb6Uzj/uJnxXiiOXU6blfIix+TMr9/csocZ/8pfXO2mb8RP12DsFmPxDPsC/PTZ/ZyxqIKzFksQcKz41PmLaZxVwE1/2kq/xx/3cVpr/ufJPdSWuLgmiRrhXKa1Xqu1XhHjz0OEu7E8EGmh9AoQAiTNI2a8aOuu1nFddqI1v5L5FSL3VBc7JwW/PpPLHmZ88Luo2g3A3o6BpL7v/146SNegj8+uTW14gsiOAoeV//f+t4Un592/LW7G/9k9nWw+2MMnzl+UL5teHgTOA1BKLQEcQFdWVyREBkSHNozP/HoDkvkVIleFp7xNLnsw82R1xr/yo8HvrtbEg99hX4CfPXuAsxZXTtvNQOSe1fPK+cKFS3l0Wyv/9/KhSfdrrfnuk3tonFXAVatnxzjCjPQrYIFSajvhgTTX6VRqgYQ4xoxmfse1O4v2Wi9xmVdDKIQwR22MEcfhCW+S+U1YbYmLRdVuHtnakvD3/PbFg3QP+fjM2sVpXJlIp385ewHnLq3i1r/sHG1nFvXkzna2NvXxqfMX48iTPp9aa5/W+oORMoiTtNYbsr0mITLBZQ/XEI6fNvnQay3UlbpM6+8thDBPTYmTrkEvgUg/7o4BDz3DPirH9f01asZ/8iuluGp1I68e6mVfAqUPQ94AP3sunPU9ea5kfY9VFoviu1e9jfIiBzf+/lX+vreTniEfoVA46zuvopArTsrd9nBCCPPUlrhGp7x1Dnh5dk8n61Y1YE1iII4QIjOqS1yENKN7te7b1EQwpE1tmTrjg19g9E3uvs1N0z72Ny8e5MiQj8++Q2p9j3XlRQ5+eO0q2vs9fOiXr7Dq1ic55ban2dU2wGfWLpEOHkLkifoy1+g4+odfbyEY0lyRw73RhchnteOmvIVCmj+8cojTFvz/9u4tRuqzDuP499llYQlsF+UMy6GWU8EINlu8wENBerC2tkYxVK0XmmAixBIbq+gNjemNseqNMcHDhfFA2gBNQxO1G4kNScv5DLahSnWXBlrlIEKlwM+L/WO2dXd2687M+2b/zyfZ7MzsZOYZwgO/nXn/738s7xk/umrPUfPTG+dgQkszS+dOYPPeLr5+x9w+h55zF99kw3Mv85E547ll+rvqnNJq4daZ72bHt5ZzuOsch7vOcajrHE2NDdy7cErqaGZWJ5NbR7LrxBkANu/t5H1trcye2JI4lZn15vopjk+df4PnLl6m88wlvnHXvKo+RymGX4AV7W10HDvFH196jY/e3Pteho9uPcL5N678X6cttny1jmxiyaxxLJnlnb3MymhSazPnLr3Jvr+e4cjJ86y/d37qSGbWh4nFiS5On3+D7cdfZ+yo4dy5YFJVn6M0n/sumzeBcaOH8+Tu3pc+dBw9xea9Xay+7SYfBGFmNoRMGdP9TtKPtr3MsAb5kx+zjI0dPYLGBnGw8xwdx07z6fa2qh+cXprht6mxgfsXTaXj2Cn+fuGt+8edvXiZdVsOMW9SC2uWeYcHM7Oh5PpZ3jqOneK2uRMYO3pE4kRm1pfGBjF+9Ai27Ovi6rXggVurfyKq0gy/ACvap3HlWvDU/rdue7b+6SOc+ddlHv/MwtJsfWVmVhZTiuEX4FMZ7fIiab2kLkn7i6+7e/xsnaTjkl6UdGfKnGb1NrG1mSvXgiWzxjJz3KiqP35p1vwCzJ3UwsK2Vn694xXGjGwigJNnL/HU/pOsXT6bBVO83MHMbKiZ2Nr9Tu8NzcNYdvOExGn+xw8i4ns9b5A0H1gJLACmAB2S5kTE1RQBzeptYkt3Zz+7eEZNHr9Uwy/A5z4wg0c2HeThJw/897aF08aweumshKnMzKxWRgxr5Kbxo1g6d0JVT5FaQ/cBGyPi38BfJB0HFgPPp41lVh/zJrVw9NXz3D6/9w0KBqt0w++K9jaWzB7H1avdZ3aVuo8EbvKer2ZmQ9YzX/0Qw/I8qcUaSV8AdgMPR8QZYCrwQo/7dBa3mZXC2uVz+MrSWTVbilq64VcSU8eM7P+OZmY2ZDQ3pXnHV1IH0Ns+Td8Gfgx8B4ji++PAF9/BY68CVgFMn179g4LMUmloEM0Ntets6YZfMzOzeomI5QO5n6SfAFuLq13AtB4/bitue/tjbwA2ALS3t8fgkpqVhz/rNzMzS0DS5B5XPwkcLi4/DayUNELSjcBsYGe985kNVX7n18zMLI3vSlpE97KHE8CXASLiiKQngKPAFWC1d3owqx4Pv2ZmZglExIMVfvYY8Fgd45iVhpc9mJmZmVlpKKJ+a+QlvQa80s/dxgGv1yHOQOWUJ6cs4Dz9GUieGRExvh5h3in3tSqcp285ZQH3NQXnqcx5+jaovtZ1+B0ISbsjoj11jutyypNTFnCe/uSWpxZye43OU1lOeXLKAvnlqYXcXqPzVOY8fRtsFi97MDMzM7PS8PBrZmZmZqWR4/C7IXWAt8kpT05ZwHn6k1ueWsjtNTpPZTnlySkL5JenFnJ7jc5TmfP0bVBZslvza2ZmZmZWKzm+82tmZmZmVhPZDL+S7pL0oqTjkr6ZQZ6fSzot6XD/9655lmmStkk6KumIpIcS52mWtFPSgSLPoynzFJkaJe2TtDWDLCckHZK0X9Lu1HlqJafOuq8V82TXV3Bn68197VtOnXVfB5Rl0H3NYtmDpEbgJeB2oBPYBTwQEUcTZvowcAH4RUS8N1WOIstkYHJE7JXUAuwB7k/15yNJwKiIuCCpCdgOPBQRL6TIU2T6GtAO3BAR96TKUWQ5AbRHRC77IVZdbp11Xyvmya6vRS53tk7c137zZNNZ93VAWU4wyL7m8s7vYuB4RPw5Ii4DG4H7UgaKiOeAf6TMcF1EvBoRe4vL/wSOAVMT5omIuFBcbSq+kv0WJakN+Djw01QZSiirzrqvFfNk1VdwZxNwXyvIqbPua33kMvxOBf7W43onCf+zyJmkmcD7gR2JczRK2g+cBp6NiJR5fgg8AlxLmKGnAH4vaY+kVanD1Ig7OwDua5/c2fpyXwcoh866r/0adF9zGX5tACSNBjYBayPifMosEXE1IhYBbcBiSUk+upJ0D3A6IvakeP4+fDAibgE+BqwuPuKzknFfe+fOWq5y6az72q9B9zWX4bcLmNbjeltxmxWKtT+bgF9FxObUea6LiLPANuCuRBGWAJ8o1gBtBJZJ+mWiLABERFfx/TSwhe6PHIcad7YC97Uid7b+3Nd+5NhZ97V31ehrLsPvLmC2pBslDQdWAk8nzpSNYgH8z4BjEfH9DPKMlzSmuDyS7oMo/pQiS0Ssi4i2iJhJ99+bP0TE51NkAZA0qjhgAkmjgDuALI5orjJ3tg/ua2XubBLuawU5ddZ9raxafc1i+I2IK8Aa4Hd0LzR/IiKOpMwk6TfA88BcSZ2SvpQwzhLgQbp/49pffN2dMM9kYJukg3T/o/psRCTf/iQTE4Htkg4AO4FnIuK3iTNVXW6ddV8rcl8rG/KddV/7lVNn3dfKqtLXLLY6MzMzMzOrhyze+TUzMzMzqwcPv2ZmZmZWGh5+zczMzKw0PPyamZmZWWl4+DUzMzOz0vDwa2ZmZmal4eHXzMzMzErDw6+ZmZmZlcZ/AElq3iA3AK7uAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "P_1Ag-VPkMdT", - "colab_type": "text" - }, - "source": [ - "Each of them perfectly interpolates the data points, yet they clearly are different models. (In fact, there are infinitely many 10th degree polynomials that exactly interpolate any ten data points.) They make identical predictions for the data we fit them to, but for any other value of `x` they produce different predictions. This is called *epistemic uncertainty*. It means the data does not fully constrain the model. Given the training data, there are many different models we could have found, and those models make different predictions.\n", - "\n", - "The ideal way to measure epistemic uncertainty is to train many different models, each time using a different random seed and possibly varying hyperparameters. Then use all of them for each input and see how much the predictions vary. This is very expensive to do, since it involves repeating the whole training process many times. Fortunately, we can approximate the same effect in a less expensive way: by using dropout.\n", - "\n", - "Recall that when you train a model with dropout, you are effectively training a huge ensemble of different models all at once. Each training sample is evaluated with a different dropout mask, corresponding to a different random subset of the connections in the full model. Usually we only perform dropout during training and use a single averaged mask for prediction. But instead, let's use dropout for prediction too. We can compute the output for lots of different dropout masks, then see how much the predictions vary. This turns out to give a reasonable estimate of the epistemic uncertainty in the outputs.\n", - "\n", - "### Uncertain Uncertainty?\n", - "\n", - "Now we can combine the two types of uncertainty to compute an overall estimate of the error in each output:\n", - "\n", - "$$\\sigma_\\text{total} = \\sqrt{\\sigma_\\text{aleatoric}^2 + \\sigma_\\text{epistemic}^2}$$\n", - "\n", - "This is the value DeepChem reports. But how much can you trust it? Remember how I started this tutorial: deep learning models should not be used as black boxes. We want to know how reliable the outputs are. Adding uncertainty estimates does not completely eliminate the problem; it just adds a layer of indirection. Now we have estimates of how reliable the outputs are, but no guarantees that those estimates are themselves reliable.\n", - "\n", - "Let's go back to the example we started with. We trained a model on the SAMPL training set, then generated predictions and uncertainties for the test set. Since we know the correct outputs for all the test samples, we can evaluate how well we did. Here is a plot of the absolute error in the predicted output versus the predicted uncertainty." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "r3jD4V4rkMdU", - "colab_type": "code", - "outputId": "387de7a2-73e9-40e9-fab4-5ea39fba4db8", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 279 - } - }, - "source": [ - "abs_error = np.abs(y_pred.flatten()-test_dataset.y.flatten())\n", - "plot.scatter(y_std.flatten(), abs_error)\n", - "plot.xlabel('Standard Deviation')\n", - "plot.ylabel('Absolute Error')\n", - "plot.show()" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3de7gcdZ3n8feHECTIJQhnHAjEoGJQFBI43IQdJQ6CipJFEBhUcHAzjLLo7MizYXQR2J01LF52EMWJyCKug6yImShoYCWIsHI5ISEQQjQDMiSwy+ESLiZqEr/7R9WBTqe7T5/TXdVVXZ/X8/Rzuquqq77d55z6Vv2uigjMzKy6tul1AGZm1ltOBGZmFedEYGZWcU4EZmYV50RgZlZx2/Y6gLHafffdY9q0ab0Ow8ysVJYsWfJ0RAw0Wle6RDBt2jSGhoZ6HYaZWalIeqzZusyKhiRtL+keSfdLWiHpogbbnClpWNKy9PHxrOIxM7PGsrwj+D0wKyJekjQRuEPSTyLirrrtrouIczKMw8zMWsgsEUTSZfml9OXE9OFuzGZmBZNpqyFJEyQtA54CbomIuxts9kFJyyVdL2nvJvuZI2lI0tDw8HCWIZuZVU6miSAiNkfEDGAv4FBJb63b5EfAtIg4ALgF+HaT/cyPiMGIGBwYaFjpbWZm45RLq6GIWCdpMXAc8GDN8mdqNrsS+G95xGNmViYLlq7l0kWreGLdBvacPInzjp3O7JlTurb/LFsNDUianD6fBBwDPFy3zR41Lz8ArMwqHjOzMlqwdC3n3/AAa9dtIIC16zZw/g0PsGDp2q4dI8uioT2AxZKWA/eS1BH8WNLFkj6QbnNu2rT0fuBc4MwM4zEzK51LF61iw8bNWyzbsHEzly5a1bVjZNlqaDkws8HyC2qenw+cn1UMZmZl98S6DWNaPh4ea8jMrMD2nDxpTMvHw4nAzKzAzjt2OpMmTthi2aSJEzjv2OldO0bpxhoyM6uSkdZBWbYaciIwMyu42TOndPXEX89FQ2ZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhXnRGBmVnFOBGZmFedEYGZWcU4EZmYV50RgZlZxTgRmZhWXWSKQtL2keyTdL2mFpIsabPMqSddJWi3pbknTsorHzMway/KO4PfArIg4EJgBHCfp8LptzgKei4g3Al8BLskwHjMzayCzRBCJl9KXE9NH1G12AvDt9Pn1wLskKauYzMxsa5nWEUiaIGkZ8BRwS0TcXbfJFOBxgIjYBDwP7NZgP3MkDUkaGh4ezjJkM7PKyTQRRMTmiJgB7AUcKumt49zP/IgYjIjBgYGB7gZpZlZxubQaioh1wGLguLpVa4G9ASRtC+wCPJNHTGZmlsiy1dCApMnp80nAMcDDdZstBM5In58E3BoR9fUIZmaWoW0z3PcewLclTSBJOP8rIn4s6WJgKCIWAt8CviNpNfAscGqG8ZiZWQOZJYKIWA7MbLD8gprnvwNOzioGMzMbnXsWm5lVnBOBmVnFORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFZZYIJO0tabGkhyStkPSpBtu8U9Lzkpaljwsa7cvMzLKzbYb73gT8bUTcJ2knYImkWyLiobrtfhERx2cYh5mZtZDZHUFEPBkR96XPXwRWAlOyOp6ZmY1PLnUEkqYBM4G7G6w+QtL9kn4iaf8m758jaUjS0PDwcIaRmplVT+aJQNKOwA+AT0fEC3Wr7wNeFxEHAl8FFjTaR0TMj4jBiBgcGBjINmAzs4rJNBFImkiSBL4bETfUr4+IFyLipfT5TcBESbtnGZOZmW0py1ZDAr4FrIyILzfZ5k/T7ZB0aBrPM1nFZGZmW8uy1dCRwEeAByQtS5f9HTAVICK+AZwE/LWkTcAG4NSIiAxjMjOzOpklgoi4A9Ao21wOXJ5VDGZmNjr3LDYzqzgnAjOzinMiMDOrOCcCM7OKcyIwM6u4lolA0gRJD+cVjJmZ5a9lIoiIzcAqSVNzisfMzHLWTj+CXYEVku4BfjuyMCI+kFlUZmaWm3YSwX/KPAozM+uZURNBRPxc0muBQ9JF90TEU9mGZWZmeRm11ZCkDwH3ACcDHwLulnRS1oGZmVk+2ika+ixwyMhdgKQB4H8D12cZmJmZ5aOdfgTb1BUFPdPm+8zMrATauSP4qaRFwLXp61OAm7ILyczM8tQyEaSTxlxGUlF8VLp4fkT8MOvAzMwsHy0TQUSEpJsi4m3AVlNNmplZ+bVT1n+fpENG38zMzMqonTqCw4DTJT1G0rNYJDcLB2QamZmZ5aKdOoI5wGP5hGNmZnlrp47ga2kdgZmZ9aHM6ggk7S1psaSHJK2Q9KkG20jSZZJWS1ou6aCxHsfMzDqTZR3BJuBvI+I+STsBSyTdEhEP1WzzHmDf9HEYcEX608zMctJOIjh2PDuOiCeBJ9PnL0paCUwBahPBCcA1ERHAXZImS9ojfa+ZmeWgadGQpFkAEfEYyTATj408gIPHchBJ04CZwN11q6YAj9e8XpMuq3//HElDkoaGh4fHcmgzMxtFqzqCL9Y8/0Hdus+1ewBJO6bv/3REvDCG2F4WEfMjYjAiBgcGBsazCzMza6JV0ZCaPG/0uvEOpIkkSeC7EdGoZ/JaYO+a13uly6wCFixdy6WLVvHEug3sOXkS5x07ndkzt7ohNLOMtbojiCbPG73eStoH4VvAyoj4cpPNFgIfTVsPHQ487/qBaliwdC3n3/AAa9dtIIC16zZw/g0PsGCprwPM8tbqjuD1khaSXP2PPCd9vU8b+z4S+AjwgKRl6bK/A6YCRMQ3SEYxfS+wGlgPfGzMn8BK6dJFq9iwcfMWyzZs3Myli1b5rsAsZ60SwQk1z79Yt67+9VYi4g5GKUJKWwt9crR9Wf95Yt2GMS03s+w0TQQR8fM8A7Fq2XPyJNY2OOnvOXlSD6IxqzbPNGY9cd6x05k0ccIWyyZNnMB5x07vUURm1dVOhzKzrhupB3CrIbPeazsRSNohItZnGYxVy+yZU3ziNyuAUROBpLcDVwI7AlMlHQj8VUR8IuvgrBrcn8Cst9qpI/gKyXhDzwBExP3An2UZlFWH+xOY9V5blcUR8Xjdos0NNzQbo1b9CcwsH+3UETyeFg9FOmTEp4CV2YZlVeH+BGa9184dwdkknb6mkIwDNANw/YB1RbN+A+5PYJafdhLB9Ig4PSJeGxF/EhEfBt6cdWBWDe5PYNZ77RQNfRWon0Ky0TKzMXN/gvFzayvrlqaJQNIRwNuBAUn/oWbVzsCExu8yG7s8+xP0y8lzpLXVSEX7SGsroJSfx3qrVdHQdiR9B7YFdqp5vACclH1oZt3VT01V3drKumm0Qed+LunqdHpKs1IauQtoNMhdWYe+dmsr66Z26giulrTVRDQRMSuDeMy6qr4IpZEynjw9eqt1UzuJ4DM1z7cHPghsyiYcs+5qVIRSr4wnz/OOnb5VgnNrKxuvURNBRCypW3SnpHsyisesq0a72i/rydOtrayb2hl07jU1L7cBDgZ2ySwisy5qVoQCMKXkJ0+P3mrd0k7R0BKSyepFUiT0KHBWlkGZdUuzIpQvnPg2n0TNUu0UDbUzUb1ZodT2F9hl0kS2n7gN69ZvdBGKWQOtOpSd2OqNEXFDq/WSrgKOB56KiLc2WP9O4J9J7jAAboiIi0cL2Gw09S2F1m3YyKSJE/jKKTOcAMwaaHVH8P4W6wJomQiAq4HLgWtabPOLiDh+lP2YjUmrzlZOBGZba9Wh7GOd7Dgibpc0rZN9WPGUYYgGd7YyG5tRRx+VtIukL0saSh9fktStVkNHSLpf0k8k7d+lfVpGyjJEg4e2Nhubdoahvgp4EfhQ+ngB+B9dOPZ9wOsi4kCS0UwXNNtQ0pyRRDQ8PNyFQ9t4NCtyuXDhih5F1JiHtjYbm3YSwRsi4vMR8Uj6uAh4facHjogXIuKl9PlNwERJuzfZdn5EDEbE4MDAQKeHtnFqVrSybsPGQt0VzJ45hS+c+DamTJ6ESPoLdLu56IKlazly3q3sM/dGjpx3a6E+v9lYtdOPYIOkoyLiDgBJRwIdF7ZK+lPg/0VESDqUJCk90+l+LTutOmcVrSI2y85WHgLa+k07dwR/DXxN0m8kPUbSEujs0d4k6Vrgl8B0SWsknSXpbEkj7z0JeFDS/cBlwKkRsdXgdlYcrYpWqlQR6yGgrd+006FsGXCgpJ3T1y+0s+OIOG2U9ZeTJBUridkzp3DRj1bw3PqNW62rUkWsWyVZv2mn1dCn0iTwIvBlSfdJenf2oVkRff79+1e+ItatkqzftFM09JfpXcC7gd2AjwDzMo3KCiuPitjxyLPy1q2SrN+0U1ms9Od7gWsiYoUktXqD9beijXqZd+Wth4C2ftPW6KOSbgb2Ac6XtBPwx2zDMmtfL4aUKFoyNOtEO4ngLGAG8EhErJe0G9DR8BNm3eTKW7POtNNq6I/pmEEfTucuviMifph1YGbt8vy9Zp1pp9XQ10n6DTwAPAj8laSvZR2YWbtceWvWmXaKhmYBbx7p7CXp28BDmUZlNgauvDXrTDuJYDUwFXgsfb038OvMIjIbB1femo1fqxnKfkQyAc1OwEpJ96SvDwPuySc8s3Ipw3wNZvVa3RF8scU6jwlkVseD0VlZtZqh7OeNlks6CjgNuD2roMygfFfXniLTyqqdOgIkzQT+AjiZZLL5H2QZlFkZr67dn8HKqlUdwZtIrvxPA54GrgMUEUfnFFullO3qN2tlvLp2fwYrq1b9CB4maTp6fEQcFRFfBTa32N7GqSxzAeepjFfX7s9gZdUqEZwIPAkslvRNSe/ilQHorIs80cnWyjjUc1FHZjUbTavK4gXAAkmvBk4APg38iaQrgB9GxM05xdj3ynj1m7Xzjp2+RR0BlOPq2v0ZrIxGHWIiIn4bEf8UEe8H9gKWAv8x88gqpIxXv1nz1bVZftpqNTQiIp4D5qcP65JuXv2WvdK57PGbldGYEoFlo1tj5RS9yeVoJ/mix2/Wr5SOJVcag4ODMTQ01OswCunIebc2bL44ZfIk7pw7qwcRvaL+JA/JXU9tcU+R4zcrO0lLImKw0bp25iwe70GvkvSUpAebrJekyyStlrRc0kFZxVIVRa50bqdlVJHjN+tnWRYNXQ1cDlzTZP17gH3Tx2HAFelPG6cidWiqLwZqFBdseZIvUvxF4PoSy0tmdwQRcTvwbItNTgCuicRdwGRJe2QVTxUUpUNTow5yzTqg1J7ks4h/wdK1HDnvVvaZeyNHzru1NJ303MnQ8pRZImjDFODxmtdr0mVbkTRH0pCkoeHh4VyCK6OiNLlsVAwUbN0bsf4k3+34y3wydSdDy1MpWg1FxMtNVgcHB8tVu52zInRoalamHyQn91ZFHd2MvwjjFY23eMf1JZanXiaCtSSznY3YK11mJdesrD/v1j+tTqZ5lL930hzW9SWWp14WDS0EPpq2HjoceD4inuxhPNYlRamraHbS3GXSxFyKjDop3inKd2jVkGXz0WuBXwLTJa2RdJaksyWdnW5yE/AIyZzI3wQ+kVUslq+i1FU0O5lK5FL+3knxTlG+Q6uGzIqGIuK0UdYH8Mmsjm/dNdailCLUVTTrsf031y1ruH23y987Ld4pwndo1VCKymLrrTIP/dDoZHrpolW5lL+XdQRVq55e1hFYSfRbU8a8yt9dvGNl4TsCG1W/NWXs1iB/7R7LJ34rOieCHOQ5VEAWx+rHpozdPkF7OAgrMxcNZSzP3q1ZHctNGVsrcw9mM3AiyFye5etZHctl3a31Wx1KXso6DlQ/ctFQxvIsX8/yWP1U1t3tYpx+q0PJQ5lbovUj3xFkLM/5iD338eiyKMZp9b37qrcx30UVixNBxvIsX3dZ/uiyOAE1+96P3m/AdQdN+C6qWJwIMpZn+XqVy/LbvfLO4gTU7Htf/PCwr3qb8N1rsbiOoOQalXdXbX7fsZQ3Z9UUtlEdSl5DWZSRe10Xi+8IMpZl00I3W0xcuHBF21feeRaf+aq3uSrfvRaR7wgyluXkKEWYeKXXFixdy7oNGxuua3TlnWevYl/1ttZPLdHKzokgY1lWirnCjZbl7c2uvLtxAmqnCWqeScesE04EGctyeIayDP2Q5fALrZJeVlfeY6mT8FWvlYHrCDKWZZl0GZqLZl2P0Szp7brDxMxOwG4Db/3GiSBjWVaKlaHCLeuTZrNk+Pn379+V/TfiIjnrNy4aykGWxQNFL3rI+qTZi3L4shTJmbXLicAy1c5Js9M6hLyTYaPWQAKO3m8gtxjMuslFQ5ap0eoxytgXYvbMKXzw4CmoZlkAP1iyttBxmzWTaSKQdJykVZJWS5rbYP2ZkoYlLUsfH88yHsvfaPUYZa14XfzwMFG3bCxxezA6K5LMioYkTQC+BhwDrAHulbQwIh6q2/S6iDgnqzis91oV3Yy1DiHvmcCaHa+Tug8PwWxFk+UdwaHA6oh4JCL+AHwPOCHD41kJNatg3Uba6io572KkVsfrZPiIst4FWf/KMhFMAR6veb0mXVbvg5KWS7pe0t6NdiRpjqQhSUPDw8NZxGo90qgOAWBzxFYn+bxPoK2O10kfjiI1P3URlUHvK4t/BEyLiAOAW4BvN9ooIuZHxGBEDA4MuGVGPxmpQ5ggbbWu/iSf9wm01fE66cNRlMHoylhRb9nIMhGsBWqv8PdKl70sIp6JiN+nL68EDs4wHiuo2TOn8Meor3pN1J6M8z6Bjna82TOncOfcWTw6733cOXdW2+X7RekR7iIqG5FlIrgX2FfSPpK2A04FFtZuIGmPmpcfAFZmGI8VWDsn+bxPoFkdryg9wotURGW9lVmroYjYJOkcYBEwAbgqIlZIuhgYioiFwLmSPgBsAp4Fzswqnl7Lu7VL2bQzZHPevYizPF6rllR5/a24h7SNUDS5JS+qwcHBGBoa6nUYY1LfXBCSk1wnV4H9klhqP8fkHSYSAc9v2NjWZ+qX76BWFn8rRTiW9Z6kJREx2Gidh5jIQbcnkOmXduj1n+O59RuZNHECXzllxqifo1++g3p5Tjbk+RJshBNBDrpdFtsvM5N18jn65Tuol3e5fdEHLbR8OBHkoNtlsf1SydfJ58jjO+hF0ZPL7a0Xet2PoBK63fqkKO3QO9XJ58j6O+hVG/uiNC21anEiyEG3mwv2y8mik8+R9XfQrOjpwoUrMu2JW5SmpVYtbjVUUv3SYqaTz5Hld7DP3Bu3Gl20EbeysbJo1WrIicCsgSPn3dqwrL6RKZMncefcWRlH1Fy/XBRYtlolAhcNWSH1ejC0ZoPhNdLLSnqPF2Td4FZDNqpezAHQ6z4CjdrYr//DJp5bv3GrbXtZSd+vzWgtX04ELfiWu/OT8ni+w6Kc3Orb2DfridvLSvp+aUpsveWioSZ8y53oZITKzy14gL+5btmYv8OintyK2KKnX5oSW2/5jqCJolyV9tp4T8oLlq7lu3f9a9N5fVt9h0XuVFW0nrjtDNZnNhrfETRR1KvSvI33ivPSRauaNr8c7TssYz+JXlVuF/EuxcrHdwRNFPmqNE/jveJsdbIf7Tvs5mBoedTz9Lpyu2h3KVY+TgRN+JY7Md6TcrNEKmj4HTY6YXfaNj+vE/R4ihH7qSFCP32WqnKHshb66Q+8101AIUkCpx8+lf8y+21tbRskRR3jjbVZp7BudwBr1gtZwKPz3rfV8n6aB6CfPku/83wE49Qvt9y9KLoYy51EoyvqkRNrJ7HmVc8z1mLEfmqI0E+fpcqcCCqgV/+s7SbS0U7M4401r3qesRYj9lNDhGYxr123gX3m3lj6O+mqqESroV4PV9BrRT/xtHNiHk+sebU+GmvLnaK3/R/L/0urmKvc/6Zs+j4RuGNY8U887YzrM55Y82xaOXvmFO6cO4tH572PO+fOanmMIjePHev/Szu/u3Y7IFrvZFo0JOk44B+ACcCVETGvbv2rgGuAg4FngFMi4jfdjMFlmMVvAVVbn7B23YaXK4pHdBJrEet5ijxX8Fj/X+o/y3j7jlhvZZYIJE0AvgYcA6wB7pW0MCIeqtnsLOC5iHijpFOBS4BTuhlH0YtF8lDkE8+I2hN2P7XWaqaICQrG9/9S+1matdQqyt2nNZblHcGhwOqIeARA0veAE4DaRHACcGH6/HrgckmKLrZpdcewRFFPPI2UKdZ+0+n/S9HvPq2xLOsIpgCP17xeky5ruE1EbAKeB3brZhBFLo81K5pO/1885EU5laL5qKQ5wByAqVOnjum9ZSgWMSuKbvy/+I6ufLJMBGuBvWte75Uua7TNGknbAruQVBpvISLmA/Mh6Vk81kD8h2nWPv+/VE+WRUP3AvtK2kfSdsCpwMK6bRYCZ6TPTwJu7Wb9gJmZjS6zO4KI2CTpHGARSfPRqyJihaSLgaGIWAh8C/iOpNXAsyTJwszMcpRpHUFE3ATcVLfsgprnvwNOzjIGMzNrre97FpuZWWtOBGZmFVe6+QgkDQOPdWl3uwNPd2lfWSl6jI6vM46vM0WPD4oT4+siYqDRitIlgm6SNNRsooaiKHqMjq8zjq8zRY8PyhGji4bMzCrOicDMrOKqngjm9zqANhQ9RsfXGcfXmaLHByWIsdJ1BGZm5jsCM7PKcyIwM6u4vk0Eko6TtErSaklzG6x/naSfSVou6TZJe9Wsu0TSg+mjqzOm1RzjKklPSXqwyXpJuiyNf7mkg2rWnSHp1+njjEbv73F8P5W0TtKPs4itk/gkzZD0S0kr0uWF+v2mf5f3SVqWxnh2keKrWb+zpDWSLi9afJI2p9/fMkn1A10WJcapkm6WtFLSQ5KmZRVnWyKi7x4kg9z9C/B6YDvgfuAtddt8HzgjfT4L+E76/H3ALSTjML2aZBTVnTOI8c+Ag4AHm6x/L/ATQMDhwN3p8tcAj6Q/d02f71qU+NJ17wLeD/w4w9/xeL+/NwH7ps/3BJ4EJhcovu2AV6XPdwR+A+xZlPhq1v8D8E/A5UX6/abrXsrq766LMd4GHFPze94hj5ibPfr1juDlaTIj4g/AyDSZtd4C3Jo+X1yz/i3A7RGxKSJ+CywHjut2gBFxO8mIq82cAFwTibuAyZL2AI4FbomIZyPiOZKkVaT4iIifAS92O6ZuxBcRv4qIX6f7eAJ4CmjY27JH8f0hIn6fbvMqMrpr7+T3K+lg4LXAzVnE1ml8eRlvjJLeAmwbEbek+3kpItbnEHJT/ZoI2pkm837gxPT5vwV2krRbuvw4STtI2h04mi0n2MlLs8/QzmfLQ1HiaGbU+CQdSnIF/i85xjWiaXyS9pa0PF1/SZqwChGfpG2ALwGf6UFMtVr9freXNCTpLkmz8w/tZc1ifBOwTtINkpZKulTShIZ7yEm/JoJ2fAZ4h6SlwDtIZkvbHBE3kwyd/X+Aa4FfApub7sVKKb16/A7wsYj4Y6/jqRURj0fEAcAbgTMkvbbXMdX4BHBTRKzpdSAtvC6SIR3+Avjvkt7Q64DqbAv8G5Jz0CEkRdhn9jKgfk0Eo06TGRFPRMSJETET+Gy6bF368+8jYkZEHENSvverfMLeQrPP0M4UoHkoShzNNI1P0s7AjcBn01v2XmjrbxR4kOSkkbdm8R0BnCPpN8AXgY9Kmpd/eM2/v4gY+fkISVn8zLyDSzWLcQ2wLC263gQsIKlr6Jl+TQSjTpMpaff0NhfgfOCqdPmEtIgISQcAB5BhWWgLC0n+ySTpcOD5iHiSZMa3d0vaVdKuwLvTZUWJrygaxpf+PfyQpOz2+gLGt5ekSQDp7/coYFVR4ouI0yNiakRMI7mivSYitmqV16v40v+LV0HyPw4cCTzUg/iaxkhyfposaaRualYPYwQynqGsV6K9aTLfCXxBUgC3A59M3z4R+IUkgBeAD6dZu6skXZvGsLukNcDn02MTEd8gKZ56L7AaWA98LF33rKT/TPLHBHBxRLSqsMo1vvS9vwD2A3ZM33tWRHQ1WXUQ34dIWnvsJunMdNmZEbGsIPG9GfhS+ncp4IsR8UA3Y+swvlx0+P39o6Q/klzozouITE6yHfwPb5b0GeBnSk40S4BvZhFjuzzEhJlZxfVr0ZCZmbXJicDMrOKcCMzMKs6JwMys4pwIzMwqzonACkXSZ/XKyKDLJB2WLv+0pB26eJzfpO3Mx/v+M9Vg5M10+XA6dMCvJS2S9PYOjnOxpD9vI5Y9a15fmY5nY9aWvuxHYOUk6QjgeOCgiPh9eqLeLl39aeB/krTH7kVsEyKi3aFGrouIc9L3HQ3cIOnoiFg51uNGxAVtbHYmSQ/kJ9L3fHysx7Fq8x2BFckewNMjo29GxNMR8YSkc0mGjF4saTGApCvSgcVWSLpoZAfplf5FSsb0f0DSfuny3ZSM/75C0pUknbVG3rNA0pJ03Zya5S9J+pKk+4EjJH1M0q8k3UPSY3VUEbGYZM7aOek+36BkvoYlkn4haT9Ju0h6TGlPd0mvlvS4pImSrpZ0Urr8Akn3KpknY37aY/UkYBD4bnoHNUnJ/BqD6XtOS7+HByVdUvfZ/l7S/UoGZyvSeEaWMycCK5Kbgb3Tk+3XJb0DICIuI7naPToijk63/Ww6sNgBJIMHHlCzn6cj4iDgCl4ZJfPzwB0RsT/JEBNTa7b/y4g4mOSEeq7SIUZI5qO4OyIOJBmh9CKSBHAUyXDl7bqPpKc1JEnh36fH+wzw9Yh4HlhGMvghJHdFiyJiY91+Lo+IQyLircAk4Ph0mIwh4PR0fKwNIxunxUWXkAxhMAM4RK+Mxvlq4K70s90O/LsxfB7rM04EVhgR8RJwMMnV8zBwXc0wEPU+JOk+YCmwP1uemG9Ify4BpqXP/4ykaImIuBF4rmb7c9Or/rtIBgnbN12+GfhB+vww4LaIGI5kjovrxvDRBCBpR+DtwPclLQP+keQuiHR/I7Olndpk/0dLulvSAyQn9/1HOe4hNTFvAr5L8j0A/AEYmUGu9nuyCnIdgRVKWloxOXgAAAG1SURBVA5/G3BbesI7A7i6dhtJ+5AO4RsRz0m6Gti+ZpORiV02M8rfuKR3An8OHBER6yXdVrOv342hXqCVmcBKkguvdRExo8E2C4H/Kuk1JMnw1tqVkrYHvg4MRsTjki5ky888VhvjlfFlRv2erL/5jsAKQ9J0SfvWLJoBPJY+fxHYKX2+M/Bb4Pm0bPs9bez+dpLx6ZH0HpJpPgF2AZ5Lk8B+JFMKNnI3SRHUbpImAie3+ZneQXKH882IeAF4VNLJ6TpJOhBevhu6l2QKyB83SEAjJ/2n0zuLk2rW1X43te5JY95dycQnpwE/byduqxZfBViR7Ah8VdJkYBPJqI0jlbfzgZ9KeiIijlYyodDDJDNA3dnGvi8CrpW0gmTSoX9Nl/8UOFvSSpLhnhvOT5AOcXwhyURF60jK9Js5RdJRwA7Ao8AHa1oMnQ5cIelzJCNVfo9kVjxIioO+TzKiZf3x10n6JknroP/LK6PPQnLH9A1JG0jmC6iNeS7JVKwCboyIf24Rt1WURx81M6s4Fw2ZmVWcE4GZWcU5EZiZVZwTgZlZxTkRmJlVnBOBmVnFORGYmVXc/wcXF0mzW7RppQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "rdGOqq_DkMdX", - "colab_type": "text" - }, - "source": [ - "The first thing we notice is that the axes have similar ranges. The model clearly has learned the overall magnitude of errors in the predictions. There also is clearly a correlation between the axes. Values with larger uncertainties tend on average to have larger errors.\n", - "\n", - "Now let's see how well the values satisfy the expected distribution. If the standard deviations are correct, and if the errors are normally distributed (which is certainly not guaranteed to be true!), we expect 95% of the values to be within two standard deviations, and 99% to be within three standard deviations. Here is a histogram of errors as measured in standard deviations." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "IrD6swafkMdY", - "colab_type": "code", - "outputId": "9e8d5c78-2a0d-4f20-dcd1-7f7c80d9878d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 265 - } - }, - "source": [ - "plot.hist(abs_error/y_std.flatten(), 20)\n", - "plot.show()" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAM1UlEQVR4nO3dfYxlhVnH8e+vLNgipFR3UhHYDiYNCZJWcIJUktqUNsHSgIn8scRWqDWbaLTUmDRbTWz0L4ym8aVGsilYVEJrKCpC0W5aGmLSortIW2BpixXbrdTdlgh9MeKaxz/mtl1uZubevefu3PuE7yeZcF/OzHl2z+yXM+fecyZVhSSpnxctegBJ0mwMuCQ1ZcAlqSkDLklNGXBJamrHdq5s586dtbq6up2rlKT2Dh48+LWqWhl/fFsDvrq6yoEDB7ZzlZLUXpJ/3+hxD6FIUlMGXJKaMuCS1JQBl6SmDLgkNWXAJakpAy5JTRlwSWrKgEtSU9t6JuYQq3vvnflzn7zpqjlOIknLwT1wSWrKgEtSUwZckpoy4JLUlAGXpKYMuCQ1ZcAlqSkDLklNGXBJasqAS1JTBlySmpoY8CS3JjmS5JHjHvu9JI8n+UySv05y1skdU5I0bpo98A8AV449th+4qKpeBXweePec55IkTTAx4FX1APD02GMfrapjo7ufAs49CbNJkrYwj2PgvwDcN4evI0k6AYMCnuQ3gWPA7VsssyfJgSQHjh49OmR1kqTjzBzwJDcAbwZ+rqpqs+Wqal9VrVXV2srKyqyrkySNmek38iS5EngX8FNV9e35jiRJmsY0byO8A/gkcEGSw0neDrwPOBPYn+ThJDef5DklSWMm7oFX1XUbPHzLSZhFknQCPBNTkpoy4JLUlAGXpKYMuCQ1ZcAlqSkDLklNGXBJasqAS1JTBlySmjLgktSUAZekpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlMGXJKaMuCS1JQBl6SmDLgkNWXAJakpAy5JTRlwSWpqYsCT3JrkSJJHjnvsB5LsT/KF0X9fdnLHlCSNm2YP/APAlWOP7QU+VlWvBD42ui9J2kYTA15VDwBPjz18DXDb6PZtwM/MeS5J0gSzHgN/eVU9Nbr9VeDlc5pHkjSlwS9iVlUBtdnzSfYkOZDkwNGjR4euTpI0MmvA/zPJ2QCj/x7ZbMGq2ldVa1W1trKyMuPqJEnjZg343cD1o9vXA387n3EkSdOa5m2EdwCfBC5IcjjJ24GbgDcm+QLwhtF9SdI22jFpgaq6bpOnrpjzLJKkE+CZmJLUlAGXpKYMuCQ1ZcAlqSkDLklNGXBJasqAS1JTBlySmjLgktSUAZekpiaeSi9Y3XvvzJ/75E1XzXESSfoe98AlqSkDLklNGXBJasqAS1JTBlySmjLgktSUAZekpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlODAp7k15I8muSRJHckefG8BpMkbW3mgCc5B3gHsFZVFwGnALvnNZgkaWtDD6HsAF6SZAdwOvAfw0eSJE1j5oBX1VeA3we+BDwFPFNVHx1fLsmeJAeSHDh69Ojsk0qSnmfIIZSXAdcA5wM/DHx/kreML1dV+6pqrarWVlZWZp9UkvQ8Qw6hvAH4t6o6WlX/C9wF/OR8xpIkTTIk4F8CLktyepIAVwCH5jOWJGmSIcfAHwTuBB4CPjv6WvvmNJckaYJBv9S4qt4DvGdOs0iSToBnYkpSUwZckpoy4JLUlAGXpKYMuCQ1ZcAlqSkDLklNGXBJasqAS1JTBlySmhp0Kr0mW91770LW++RNVy1kvZK2j3vgktSUAZekpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlMGXJKaMuCS1JQBl6SmDLgkNWXAJampQQFPclaSO5M8nuRQktfMazBJ0taGXo3wD4G/r6prk5wGnD6HmSRJU5g54EleCrwWuAGgqp4DnpvPWJKkSYbsgZ8PHAX+LMmrgYPAjVX1reMXSrIH2AOwa9euAaub3aKuyb1IQ/7MXktc6mHIMfAdwCXAn1bVxcC3gL3jC1XVvqpaq6q1lZWVAauTJB1vSMAPA4er6sHR/TtZD7okaRvMHPCq+irw5SQXjB66AnhsLlNJkiYa+i6UXwVuH70D5YvA24aPJEmaxqCAV9XDwNqcZpEknQDPxJSkpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlMGXJKaMuCS1JQBl6SmDLgkNWXAJakpAy5JTRlwSWrKgEtSUwZckpoy4JLUlAGXpKYMuCQ1ZcAlqSkDLklNGXBJasqAS1JTBlySmhoc8CSnJPmXJPfMYyBJ0nTmsQd+I3BoDl9HknQCBgU8ybnAVcD75zOOJGlaOwZ+/h8A7wLO3GyBJHuAPQC7du0auDpth9W99w76/Cdvumoh6x6yXqmjmffAk7wZOFJVB7darqr2VdVaVa2trKzMujpJ0pghh1AuB65O8iTwQeD1Sf5yLlNJkiaaOeBV9e6qOreqVoHdwMer6i1zm0yStCXfBy5JTQ19EROAqvoE8Il5fC1J0nTcA5ekpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlMGXJKaMuCS1JQBl6SmDLgkNTWXU+mlZeC1xPVC4x64JDVlwCWpKQMuSU0ZcElqyoBLUlMGXJKaMuCS1JQBl6SmDLgkNWXAJakpAy5JTRlwSWpq5oAnOS/J/UkeS/JokhvnOZgkaWtDrkZ4DPj1qnooyZnAwST7q+qxOc0mSdrCzHvgVfVUVT00uv0N4BBwzrwGkyRtbS7XA0+yClwMPLjBc3uAPQC7du2ax+q05IZcl3tRhs7s9cS1CINfxExyBvBh4J1V9ez481W1r6rWqmptZWVl6OokSSODAp7kVNbjfXtV3TWfkSRJ0xjyLpQAtwCHquq98xtJkjSNIXvglwNvBV6f5OHRx5vmNJckaYKZX8Ssqn8EMsdZJEknwDMxJakpAy5JTRlwSWrKgEtSUwZckpoy4JLUlAGXpKYMuCQ1ZcAlqSkDLklNzeV64NIL3ZDriXst8R6W8Zrx7oFLUlMGXJKaMuCS1JQBl6SmDLgkNWXAJakpAy5JTRlwSWrKgEtSUwZckpoy4JLUlAGXpKYGBTzJlUk+l+SJJHvnNZQkabKZA57kFOBPgJ8GLgSuS3LhvAaTJG1tyB74pcATVfXFqnoO+CBwzXzGkiRNMuR64OcAXz7u/mHgJ8YXSrIH2DO6+80kn5vy6+8EvjZgvkVz/sVqM39+d8OH28y/Cecfs8l2ntYrNnrwpP9Ch6raB+w70c9LcqCq1k7CSNvC+RfL+RfL+bfHkEMoXwHOO+7+uaPHJEnbYEjA/xl4ZZLzk5wG7Abuns9YkqRJZj6EUlXHkvwK8A/AKcCtVfXo3Cab4bDLknH+xXL+xXL+bZCqWvQMkqQZeCamJDVlwCWpqYUHfNLp+Em+L8mHRs8/mGR1+6fc3BTz35DkaJKHRx+/uIg5N5Lk1iRHkjyyyfNJ8kejP9tnklyy3TNuZYr5X5fkmeP+7n9ru2fcSpLzktyf5LEkjya5cYNllnYbTDn/0m6DJC9O8k9JPj2a/7c3WGap+0NVLeyD9Rc//xX4EeA04NPAhWPL/DJw8+j2buBDi5x5hvlvAN636Fk3mf+1wCXAI5s8/ybgPiDAZcCDi575BOd/HXDPoufcYv6zgUtGt88EPr/B98/SboMp51/abTD6Oz1jdPtU4EHgsrFllrY/VbXwPfBpTse/BrhtdPtO4Iok2cYZt9L6cgJV9QDw9BaLXAP8ea37FHBWkrO3Z7rJpph/qVXVU1X10Oj2N4BDrJ/hfLyl3QZTzr+0Rn+n3xzdPXX0Mf6ujmXuz8IDvtHp+OPfAN9dpqqOAc8AP7gt0002zfwAPzv68ffOJOdt8PyymvbPt8xeM/oR+b4kP7roYTYz+tH8Ytb3Ao/XYhtsMT8s8TZIckqSh4EjwP6q2vTvfwn7s/CAvxD8HbBaVa8C9vO9/5vr5HsIeEVVvRr4Y+BvFjzPhpKcAXwYeGdVPbvoeU7UhPmXehtU1f9V1Y+xfib5pUkuWvRMJ2LRAZ/mdPzvLpNkB/BS4OvbMt1kE+evqq9X1f+M7r4f+PFtmm0eWl8uoaqe/c6PyFX1EeDUJDsXPNbzJDmV9fjdXlV3bbDIUm+DSfN32AYAVfVfwP3AlWNPLXN/Fh7waU7Hvxu4fnT7WuDjNXpFYQlMnH/seOXVrB8n7OJu4OdH74S4DHimqp5a9FDTSvJD3zlemeRS1r/fl+Yf32i2W4BDVfXeTRZb2m0wzfzLvA2SrCQ5a3T7JcAbgcfHFlvm/pz8qxFupTY5HT/J7wAHqupu1r9B/iLJE6y/YLV7cRM/35TzvyPJ1cAx1ue/YWEDj0lyB+vvEtiZ5DDwHtZfyKGqbgY+wvq7IJ4Avg28bTGTbmyK+a8FfinJMeC/gd3L9I8PuBx4K/DZ0XFYgN8AdkGLbTDN/Mu8Dc4Gbsv6L6d5EfBXVXVPl/6Ap9JLUluLPoQiSZqRAZekpgy4JDVlwCWpKQMuSU0ZcElqyoBLUlP/D5hLEGXbe+UFAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "bucmsdGSkMda", - "colab_type": "text" - }, - "source": [ - "Most of the values are in the expected range, but there are a handful of outliers at much larger values. Perhaps this indicates the errors are not normally distributed, but it may also mean a few of the uncertainties are too low. This is an important reminder: the uncertainties are just estimates, not rigorous measurements. Most of them are pretty good, but you should not put too much confidence in any single value." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "4NwKVrwCkMdb", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on GitHub\n", - "Starring DeepChem on GitHub helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/08_Introduction_to_Model_Interpretability.ipynb b/examples/tutorials/08_Introduction_to_Model_Interpretability.ipynb deleted file mode 100644 index c284a6ccd440cc17b55a64e5b67909d806080744..0000000000000000000000000000000000000000 --- a/examples/tutorials/08_Introduction_to_Model_Interpretability.ipynb +++ /dev/null @@ -1,38005 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "08_Introduction_to_Model_Interpretability.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "cB0MgPvpkP1g", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 8: Introduction to Model Interpretability" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6NGHK1xmkP1i", - "colab_type": "text" - }, - "source": [ - "In the previous sections of this tutorial series, you have learned how to train models with DeepChem on a variety of applications. You have also learned about modeling the uncertainty associated with a model. But we have not yet really studied the question of model explainability.\n", - "\n", - "Often times when modeling we are asked the question -- How does the model work? Why should we trust this model? My response as a data scientist is usually \"because we have rigorously proved model performance on a holdout testset with splits that are realistic to the real world\". Oftentimes that is not enough to convince domain experts.\n", - "\n", - "[LIME](https://homes.cs.washington.edu/~marcotcr/blog/lime/) is a tool which can help with this problem. It uses local perturbations of featurespace to determine feature importance. In this tutorial, you'll learn how to use Lime alongside DeepChem to interpret what it is our models are learning. \n", - "\n", - "![Selection_110.png](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/lime_dog.png?raw=1)\n", - "\n", - "So if this tool can work in human understandable ways for images can it work on molecules? In this tutorial you will learn how to use LIME for model interpretability for any of our fixed-length featurization models.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/08_Introduction_to_Model_Interpretability.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "xdgY3YQLkP1m", - "colab_type": "code", - "outputId": "104c7f23-2627-4ea6-8360-1c71527b8a6f", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 34425 0 --:--:-- --:--:-- --:--:-- 34425\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 3.16 s, sys: 757 ms, total: 3.92 s\n", - "Wall time: 2min 24s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "1zuqJlT-kP1p", - "colab_type": "text" - }, - "source": [ - "## Making of the Model\n", - "\n", - "The first thing we have to do is train a model. Here we are going to train a toxicity model using Circular fingerprints. The first step will be for us to load up our trusty Tox21 dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "57IdQLKOkP1q", - "colab_type": "code", - "outputId": "fb10dc45-32ba-4408-931d-7e0b0ca8eacf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 476 - } - }, - "source": [ - "from deepchem.molnet import load_tox21\n", - "\n", - "# Load Tox21 dataset\n", - "n_features = 1024\n", - "tox21_tasks, tox21_datasets, transformers = load_tox21(reload=False)\n", - "train_dataset, valid_dataset, test_dataset = tox21_datasets" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/tox21.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "TIMING: featurizing shard 0 took 33.641 s\n", - "TIMING: dataset construction took 33.962 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.403 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.203 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.204 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.340 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.048 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.049 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "bOA0VkCskP1u", - "colab_type": "text" - }, - "source": [ - "Let's now define a model to work on this dataset. Due to the structure of LIME, for now we can only use a fully connected network model." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "u0ZLMRiHkP1v", - "colab_type": "code", - "outputId": "60c60616-81b4-4389-bcba-4ae82edd92e9", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 88 - } - }, - "source": [ - "import deepchem as dc\n", - "\n", - "n_tasks = len(tox21_tasks)\n", - "n_features = train_dataset.get_data_shape()[0]\n", - "model = dc.models.MultitaskClassifier(n_tasks, n_features)" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "RW58esW_kP1x", - "colab_type": "text" - }, - "source": [ - "Our next goal is to train this model on the Tox21 dataset. Let's train for some 10 epochs so we have a reasonably converged model." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "cnp0tJ2NkP1y", - "colab_type": "code", - "outputId": "1adee531-d874-4925-d8d0-a86b589092f5", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 445 - } - }, - "source": [ - "num_epochs = 10\n", - "losses = []\n", - "for i in range(num_epochs):\n", - " loss = model.fit(train_dataset, nb_epoch=1)\n", - " print(\"Epoch %d loss: %f\" % (i, loss))\n", - " losses.append(loss)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:108: The name tf.losses.softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.softmax_cross_entropy instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:109: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n", - "Epoch 0 loss: 0.225362\n", - "Epoch 1 loss: 0.146278\n", - "Epoch 2 loss: 0.125541\n", - "Epoch 3 loss: 0.115947\n", - "Epoch 4 loss: 0.112123\n", - "Epoch 5 loss: 0.101710\n", - "Epoch 6 loss: 0.100300\n", - "Epoch 7 loss: 0.101758\n", - "Epoch 8 loss: 0.090115\n", - "Epoch 9 loss: 0.090089\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "IJc49NbMkP11", - "colab_type": "text" - }, - "source": [ - "Let's evaluate this model on the training and validation set to get some basic understanding of its accuracy. We'll use the ROC-AUC as our metric of choice." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "5TWg2RelkP12", - "colab_type": "code", - "outputId": "d206634f-8c01-44c9-c251-80165ae7fda2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 156 - } - }, - "source": [ - "import numpy as np\n", - "\n", - "metric = dc.metrics.Metric(\n", - " dc.metrics.roc_auc_score, np.mean, mode=\"classification\")\n", - "\n", - "print(\"Evaluating model\")\n", - "train_scores = model.evaluate(train_dataset, [metric], transformers)\n", - "valid_scores = model.evaluate(valid_dataset, [metric], transformers)\n", - "\n", - "print(\"Train scores\")\n", - "print(train_scores)\n", - "\n", - "print(\"Validation scores\")\n", - "print(valid_scores)" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Evaluating model\n", - "computed_metrics: [0.9911460306475237, 0.9962989723827874, 0.9757023239869564, 0.986256863445856, 0.9259520300246388, 0.9873943742049194, 0.9918725451398143, 0.9379407998794907, 0.9928536256898868, 0.9772374789653557, 0.965923828259603, 0.981542764445936]\n", - "computed_metrics: [0.599564636619997, 0.8016699735449735, 0.810107859645929, 0.7260421962379258, 0.6494545454545455, 0.7463417512390842, 0.694942021460713, 0.8004415322107142, 0.7417588886272664, 0.722559331175836, 0.8338163788354211, 0.7412575366063738]\n", - "Train scores\n", - "{'mean-roc_auc_score': 0.975843469756064}\n", - "Validation scores\n", - "{'mean-roc_auc_score': 0.7389963876382316}\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "xMBwqFmDkP15", - "colab_type": "text" - }, - "source": [ - "## Using LIME\n", - "\n", - "LIME can work on any problem with a fixed size input vector. It works by computing probability distributions for the individual features and the covariance between the features. We are going to create an explainer for our data.\n", - "\n", - "However, before can go that far, we first need to install lime. Luckily, lime is conveniently available on `pip`, so you can install it from within this Jupyter notebook." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "WV50QNwSkP15", - "colab_type": "code", - "outputId": "2bde98a9-4334-47f7-d811-27a3095c3293", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 688 - } - }, - "source": [ - "!pip install lime" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Collecting lime\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/27/ee/4aaac4cd79f16329746495aca96f8c35f278b5c774eff3358eaa21e1cbf3/lime-0.2.0.0.tar.gz (274kB)\n", - "\u001b[K |████████████████████████████████| 276kB 2.8MB/s \n", - "\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (from lime) (3.2.1)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from lime) (1.18.5)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from lime) (1.4.1)\n", - "Requirement already satisfied: tqdm in /usr/local/lib/python3.6/dist-packages (from lime) (4.41.1)\n", - "Collecting pillow==5.4.1\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/85/5e/e91792f198bbc5a0d7d3055ad552bc4062942d27eaf75c3e2783cf64eae5/Pillow-5.4.1-cp36-cp36m-manylinux1_x86_64.whl (2.0MB)\n", - "\u001b[K |████████████████████████████████| 2.0MB 8.8MB/s \n", - "\u001b[?25hRequirement already satisfied: scikit-learn>=0.18 in /usr/local/lib/python3.6/dist-packages (from lime) (0.22.2.post1)\n", - "Requirement already satisfied: scikit-image>=0.12 in /usr/local/lib/python3.6/dist-packages (from lime) (0.16.2)\n", - "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->lime) (2.4.7)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib->lime) (0.10.0)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->lime) (1.2.0)\n", - "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->lime) (2.8.1)\n", - "Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.6/dist-packages (from scikit-learn>=0.18->lime) (0.15.1)\n", - "Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.12->lime) (2.4)\n", - "Requirement already satisfied: imageio>=2.3.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.12->lime) (2.4.1)\n", - "Requirement already satisfied: PyWavelets>=0.4.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.12->lime) (1.1.1)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from cycler>=0.10->matplotlib->lime) (1.12.0)\n", - "Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.6/dist-packages (from networkx>=2.0->scikit-image>=0.12->lime) (4.4.2)\n", - "Building wheels for collected packages: lime\n", - " Building wheel for lime (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for lime: filename=lime-0.2.0.0-cp36-none-any.whl size=284181 sha256=784faa7c9728629fe2d9ea8f11d74cd9e8e8e3c2346e8e9ecf7dc9f16ce42f0b\n", - " Stored in directory: /root/.cache/pip/wheels/22/f2/ec/e5ebd07348b2b1ac722e91c2f549fcc220f7d5f25497a61232\n", - "Successfully built lime\n", - "\u001b[31mERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.\u001b[0m\n", - "Installing collected packages: pillow, lime\n", - " Found existing installation: Pillow 7.0.0\n", - " Uninstalling Pillow-7.0.0:\n", - " Successfully uninstalled Pillow-7.0.0\n", - "Successfully installed lime-0.2.0.0 pillow-5.4.1\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.colab-display-data+json": { - "pip_warning": { - "packages": [ - "PIL" - ] - } - } - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "E9ksPtOskP18", - "colab_type": "text" - }, - "source": [ - "Now that we have lime installed, we want to create an `Explainer` object for `lime`. This object will take in the training dataset and names for the features. We're using circular fingerprints as our features. We don't have natural names for our features, so we just number them numerically. On the other hand, we do have natural names for our labels. Recall that Tox21 is for toxicity assays; so let's call 0 as 'not toxic' and 1 as 'toxic'." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "0yO0QUHlkP18", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from lime import lime_tabular\n", - "feature_names = [\"fp_%s\" % x for x in range(1024)]\n", - "explainer = lime_tabular.LimeTabularExplainer(train_dataset.X, \n", - " feature_names=feature_names, \n", - " categorical_features=feature_names,\n", - " class_names=['not toxic', 'toxic'], \n", - " discretize_continuous=True)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kAW-JA6jkP1_", - "colab_type": "text" - }, - "source": [ - "We are going to attempt to explain why the model predicts a molecule to be toxic for NR-AR\n", - "The specific assay details can be found [here](https://pubchem.ncbi.nlm.nih.gov/bioassay/743040)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "4Uu16LYakP2A", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# We need a function which takes a 2d numpy array (samples, features) and returns predictions (samples,)\n", - "def eval_model(my_model):\n", - " def eval_closure(x):\n", - " ds = dc.data.NumpyDataset(x, n_tasks=12)\n", - " # The 0th task is NR-AR\n", - " predictions = my_model.predict(ds)[:,0]\n", - " return predictions\n", - " return eval_closure\n", - "model_fn = eval_model(model)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "WIIfzqzQkP2C", - "colab_type": "text" - }, - "source": [ - "Let's now attempt to use this evaluation function on a specific molecule. For ease, let's pick the first molecule in the test set." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "VGPZDfmMkP2D", - "colab_type": "code", - "outputId": "0f7aef84-92b3-49a3-e2e6-bd37900cfd1c", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 184 - } - }, - "source": [ - "# Imaging imports to get pictures in the notebook\n", - "from rdkit import Chem\n", - "from rdkit.Chem.Draw import IPythonConsole\n", - "from IPython.display import SVG\n", - "from rdkit.Chem import rdDepictor\n", - "from rdkit.Chem.Draw import rdMolDraw2D\n", - "\n", - "# We want to investigate a toxic compound\n", - "active_id = np.where(test_dataset.y[:,0]==1)[0][0]\n", - "print(active_id)\n", - "Chem.MolFromSmiles(test_dataset.ids[active_id])" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "41\n" - ], - "name": "stdout" - }, - { - "output_type": "execute_result", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcIAAACWCAIAAADCEh9HAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3dd1QUVxcA8LvrAtuLQAQUNCsCglggKpZEVBQ1FtSYqJFo1ACWiCUKNrDEWBI/NJaAmBg0xq6xRQ0mERsqiCJFAaUrSNvCNtgy3x9jNgYBYdss8H4nxzMss+/dIXD3zcyb+0gYhgGCIAiiKzLRASAIgrRsKI0iCILoBaVRBEEQvaA0iiAIoheURhEEQfSC0iiCIIheUBpFEATRC0qjCIIgekFpFDFjRUXg7w80Gjg5wY8/AgCoVEAiwdOnr3YICIAtWwgMEEEAgEJ0AAjSsE8+AWdnKCmBpCQICABXV/DxITomBKkLjUYRc5WdDXfuwObNwOXCiBEwaRIcOEB0TAhSD5RGEXOVkQE2NtCx46sve/WC9HRCA0KQ+qGTesRcyWTAZv/7JYcDUumr7X79gEwGAKiuRqf5COFQGkXMjFwOVlZAJoOtLYhE/74uEgGH82r73Dno0gUAYO5c0weIIHWgk3rEzCxbBoMHQ0YGODtDRQUUFr56/cED6NXr1badHXTqBJ06AZVKVJgIooXSKGJOxGK4cAESE8HLCw4dgkGDIDwchEL48084exbmzCE6PgSpB0qjiDkpLoa0NFi0CFQqWLcOXryAnByws4PZs+GHH8Dbm+j4EKQeJFT9HjEXhYXQowf06gWxsVBWBkFBkJUFZDIsXAjffQcWFkTHhyD1Q6NRxGzk5wODATdvgpcXJCZCcjJERgKFAjk5KIci5gyNRhHzIJXC+fMwahSEhUFsLGAY9OwJ+/eDpSXweODkRHR8CNIglEYR87BkCezYAUOGQGwsFBRAcDDk5oKDA+TmgpUV0cEhSGPQST1iHry9wdoaEhKgTx/IyICHD2HZMoiKQjkUMX9oNIqYgcOHgUKBoUMhNBSOHgUA8PGBv/9G00KRFgE9xYQQrbQUFiwAkQjGj4c9eyAwEObNA09PlEORlgKd1CNE69ABvv8erK3h3Dnw8ICcHEhNhe++IzosBGkqlEYRQtXUwPDhwGBAejp89BGIxbB3L1Cp/ylKgiDmDZ3UI4SKjYW//4a//4aPP4bdu2HqVLCzQ6fzSMuCbjEhhMEwTC6V0g8cgFWrQCIBOzt4+hQYDKLjQpDmQSf1CGF+/vlnD0/PKy4ukJUFAQEQHIxyKNISodEoQpiRI0fGx8cDQGBg4A8//MCgUqFdO6KDQpBmQ6NRhBjPnj07derUjh07mEzm06dPaTQayqFIC4VGowgBamtrvby85HL5vn37unTpgmGYs7Mz0UEhiI7QnXqEACUlJWQyOTc396OPPiooKGCj6U1IS4bSKEKA69evJyYmRkVFdezYEeVQpKVDJ/WIqR0+fHjGjBnu7u63bt3icrlEh4Mg+kK3mBBT69Kli6urq4eHB8qhSOuATurNy507dw4dOrRr1y4yuXV+wiUnJ1+/fj0pKUmlUhEdC4IYBjqpNxe3b9/evHnzhQsXAODUqVOTJk0iOiLD02g0Xl5eVVVVGRkZLBaL6HAQxDBa55DHuIqKwN8faDRwcoIffwQAUKmARIKnT1/tEBAAW7Y0vb2bN2+OGzdu0KBBFy5cYDKZixYtGjhwoBHiJh6ZTN6+ffuBAwdQDkVaE3RS33yffALOzlBSAklJEBAArq7g46NzY+PHjz9//jwAcDic0NDQ0NDQ9u3bl5WVKZVKC2Ou4yYQCKRSKYPB4HK5JBLJeB29bubMmdu3b7exsTFNdwhiGiiNNlN2Nty5AydOAJcLI0bApElw4IA+adTT0/PatWvz588PCwvj8Xjl5eXr1q2LioqKioqaPXu2AQN/XVxcXGJiYkxMjPYVKpXK4/F4PB6NRsO3tRsNbeMb77zzDoXSpN+iu3fv8ng8lEOR1gel0WbKyAAbG+jY8dWXvXrBiRP6tBcWFhYWFsZms1+8eLF+/fp9+/bJ5XISifTw4UMDRFuf9PT0oKAgDMPs7OwUCoVQKAQAhUJRUlJSUlKiQ4McDodOpzMYDA6Hw2QyGQwGPsjFN1gsFpvNdnJy+vDDD/v27Wvoo0EQ4qE02kwy2X8qCnM4IJW+2u7XD/Db69XVTR+f4pPPlyxZEh0drVAoSCTSxIkTIyIievfubdC4X9FoNCEhIbW1tfPnz9+zZ4/2dblcLhAIBAKBQqF4c7veF/FtsVgsEolEIlHj/Xp7e1++fPn77783xkEhCLFQGm0mW1t4PWWIRMDhvNo+dw66dAEAmDu3ua0KhcLa2tqxY8euX7/ey8tLpVLFxcWRyeTAwEBDBP2vXbt23bp1y8HBYdOmTa+/TqPRaDSag4ODDm0KhUKpVCqTycRisVgsxreFQqFEIpHJZBKJpLKy8tixY/fv3x8xYsT48eMNdCgIYjYwpFmePcMAsIKCV19On47Nm4cplRgAlpPz6sUJE7DNm5vVakFBQXp6OoZhtbW1cXFxLi4uAGBnZyeTyQwYe35+PpPJBIDffvvNgM02xY4dOwDA09NTrVabuGsEMTY0Gm0mPh8GD4bwcNi7F+7fh7NnISFB/1adnJwAICYmZsuWLfn5+QDg6uq6evVqw96sDw4OlkgkU6dOnTBhggGbbYqQkJDt27enpaWdOnVqypQpJu7d2EpLS6Ojo1NSUhgMBpvNZrFYdS4QM5lMDofDYDDodDqHw2GxWE28L4e0COj/ZfMdOQKzZ4OdHdjawqpV4O0NBnog5+TJk/n5+e7u7mFhYdOnT6dQKGVlZe+8845BGj948OCVK1fat2+PDwxNzMrKatWqVfPmzVuzZs3EiRNbUxIpLCz09PRksVjPnz9v1hubNSPi9RdtbW2NOhkOaS70FJOuiorAyQk6dIDSUkM1mZSUlJeX99FHH5HJ5IcPH37zzTfx8fF5eXn6P3teUVHh7u5eXl4eFxf32WefGSTa5lIqlW5ubrm5uYcOHZoxYwYhMRjDhAkTzp075+fnt3DhQqlUKpFIRCIRfoFYJBJVV1fLZDKpVCoQCPCN6upqsVisVqv16XThwoVBQUGenp6GOgpEHyiN6grDoH17EAqhtBQ6dDBgw7dv396wYcOVK1cAgMlknjlzxs/PT882p02bdvTo0WHDhl29etVkk+3f9PPPP3/++efOzs6ZmZmtYzyFV6vicDiZmZnNvUHXxBkRb75YXl5Oo9G4XG52djYVraJqDoi+ONuSvf8+BoD98YcBm9RoNB4eHgDAYrFWrlxZXl6uf5sXL14EADqd/uzZM/1b04dKpXJzcwOA2NhYYiMxiIqKCvySy4EDB0zZr0aj6dWrFwDs3LnTlP0iDUFpVA8LFmAA2HffGbbVM2fOrF27trKyEsOwsrKy8PDwFy9e6NyaWCx2dHQEgKioKMPFqLtff/0VAJycnBQKBdGx6OvTTz8FgKFDh2o0GhN3ffbsWQCws7OTSqUm7hp5E0qjeoiOxgCwzz4zRttlZWWRkZH45PylS5fq3M78+fMBoF+/fiqVyoDh6UytVuMjqd27dxMdi15+//13fIz/9OlTQgLo378/AGzbto2Q3pHXoTSqh8REDADr3dvgDavVaj6fDwAkEmns2LFJSUm6tZOYmEgmkykUyoMHDwwboT5Onz4NAPb29rqNpKZkZHgnJ2v/26Sdw2tCEonk3XffBYDt27ebvnfc5cuXAcDa2lokEhEVA4JDaVQPEglGJmNWVlhtrcHb3r59e0BAwP379zEMU6lUQqGwuS0oFAp3d3cAiIiIMHh4eurXrx8AfKfT9ZApGRlXKisNHlKzfPnllwDQt29fYsf4Q4YMAYANGzYQGAOCoTSqL2dnDABLSzNS82q1+vjx466urnPmzGnue9esWQMArq6ucrncGLHpAz8jtrGxEYvFzX2vodKoRqMp0Gkke+fOnXbt2lEolJSUFP3D0MeNGzcAgMPhVBL9udLGoTSqn0mTMADs8GFjtK1Wq7UTA7t3715TU9P09z569MjS0pJMJt+4ccMYsenvgw8+AICvv/66uW/UP40WFhZu2bLF2dnZwcGhucPJmpoafCrF6tWr9YnBUPDJcGYSTJuF0qh+IiMxACwszEjNL1myhM/nx8TEKJXKpr9LrVYPGDAAABYuXGikwPR3/fp1AOByuVVVVc16o85pVCgUxsbGvv/++9qZs46OjjnaSghNExERAQAuLi5mMsa/d+8eiURiMpkvX74kOpa2C6VR/Zw+jQFgo0cbqXmJRIIPl5KTk7du3drEd23fvh2fVKTDKbMpDRs2TIdLt3VuMR1uWvpITk6m0Wh49mQwGJ999tnVq1ebWyfl8ePHVlZWZDL5+vXrzXqjUX344YcA8NVXXxEdSNuF0qh+nj7FALCOHY3Xg1KpHDduHH7X/tGjR2/dX1vG6ezZs8aLyiBu3boFAE0ZSdW8lvB0G40qlUp7e/tBgwbFxMTo9umiVqvxNbLmz5+vw9uNJzU1lUwmU6nU4uJiomNpo1Aa1Y9Gg7HZGABmiMeNGvLxxx8zmczly5eXlZW9deeRI0cCwPTp040XjwGNHj0aAFasWNHIPqfKy0c+fDghLe1SZaVao9H5pL66uvr1L9PT01esWBEZGdnEt+MlXRwcHAQCgQ69G9XkyZPN/BpO64bSqN4GDMAAsL/+Ml4PhYWF+FOhb532dODAAXwuYUu5UpacnEwikWg02vPnzxva58O0NO0pfEBa2qhHj36vqNC5x6qqqpiYmEGDBuEn+DwerynPUxFYqrUp0tPTyWSypaVlbm4u0bG0RWiBZb317AkAkJpqvB4cHR05HM7KlSsdHR3xOS71qqioWL58OQDs3LnTUOX1jM3b2zsgIEAul2/btq3eHWoxrKK2VvtlUU1NeW3tnhcvjpeV1Wg0ze1OJpN17tw5ODj41q1bPB4vJCTk4sWLVlZWb30jgaVam8LDw2Pq1Km1tbWbN28mOpa2CKVRfVX27/9wyJBLL18atRcLC4sbN25UV1fHx8c3tM+CBQsqKipGjx6NP+vdUmzcuJFMJkdHRxcVFb353SyZTPVGEbLS2tptRUWR+fnN7YtOp48ZM8bPzy8uLq64uPiHH37ApzQ0jthSrU20bt06CoXy008/ZWVlER1L20P0cLjFw4eH7733nrE7SkpKunfvXkPfvXDhAgAwGIyWeFo3depUAAgODn7zW7+Ulr5+X/71/6ZkZOjQV50yIvfu3ctotJ3y8nJbW1sAiIuL06E7U8JX5A4MDCQ6kDYH1RvVl1gs5nK5VCpVLBYbu6i7Wq0+cuRIp06dfH1968Tg4eFRXFz8/fff488ptiw5OTnu7u4kEunJkyd4MQGtsNzcPwWCN99CIZH2duvmxWLp1uPz589Pnjz5888/P3z4MDAw8ODBgw3taSalWpuioKDA1dVVpVKlpaV1796d6HDaEHRSry82m925c2e5XJ6Tk2OC7jZu3IjPEHz9xRUrVhQXF/fv3x+v59TidOvWbcaMGUqlcuPGjXW+laZdv/q/ljk66pxDAWDatGmLFy9++PChnZ1dF3w91/r8/vvvR48epdPpsbGxZp5DAaBz586zZ89Wq9Xr1q0jOpa2BY1GDQBfRuLo0aOffPKJsfs6c+aMUCicOXMmmfzqI/D69eu+vr4WFhYpKSn4c4otUUFBgYuLi1qtfn0k9bK29sO0tDd3Hmttva7h3NcUcXFxly9fDgwMHDlyZEPnENXV1R4eHkVFRVFRUYsXL9anO5MpKSnp2rWrQqFISUnp3bs30eG0FWg0agB4Ac20+v7gDW7ixImff/65TCbDv6ypqQkJCcEwbNWqVS03hwJA586dP//8c7VavWHDBu2LVx49enPPngzGqs6d9exu5syZR44cGTNmDIVC0Wg0mvpu+oeHhxcVFfXr168FXSext7fHfx8iIyOJjqUNQWnUAPACIo/q+5s3BoVCMWrUKHxNtPXr1z9+/NjNzS08PNw0vRtPREQEjUY7duxY6j+zx5J//VX23/vOthYW27p2tTTQ+fWzZ88iIiL4fD5eTP51d+7ciY6OplAoMTEx7dq1M0h3prFq1SoWi3Xu3Lm7d+8SHUubQegNrlbiyZMnAODk5GSyHr/99tuUlJTU1FQLCwsymXzz5k2TdW1UoaGhABAQEIB/2a9fPyCTecOGdf/1V+/k5Pfu3n0kkRiwO+01xLlz577+ujmXam0K/DPV39+f6EDaCpRGDUCtVjMYDABobrEifahUqvfeew8AQkNDTdapsZWVleEPC929e1cmk1laWr76tCeRqHz+/v37Ddtdbm7u559/fu3atTqzoMy5VGtTCAQCfFHua9euER1Lm9DK0+j9+/cfP37crEqdusHLuSckJBi1F4VCkZqaevTo0YiICB8fH3wIXOdR8ZZuxYoVADB69Gi8kp7WggULTBOA+ZdqbQr82ujgwYOJDqRNaM1pdPfu3fgkFQqF4uzsPHr06MWLF+/du/fq1asFBQWGXc1x7ty5ALBr1y4DtllTU5Oenn78+PHIyMgpU6a4u7vXuUjHZrM//vhjA/ZoDioqKvCF/IKDg7VHOmjQION9Fubl5S1fvlwikWAtpFRrU4hEImtrawCIj48nOpbWrzWnUby+XEPodHqvXr2mTJmyatWquLi4xMREfVZi2LlzJwAEBQXp3EJVVdWtW7diY2OXLVs2atSoLl26vDlRkUKhuLi4BAQErFy5csOGDW8t6tFC4aWR8SwAAI6OjkattHLz5k0AOHjwINZySrU2Bf58fd++fU2//nNb05rnjQ4dOvTatWvNeguPx+Pz+Xw+393d3cPDg8/nu7q64lfrGnft2rWhQ4f279//zp07TelIIBBkZGRkZmbm5ubiG3l5eXX+X1hYWDg6OuKR4P92796dTqdrd5g4ceJvv/0WGhpqzs9660AkEvH5/KqqKgCwsrK6fPlynae2DAvDsMjIyE8++YTJZPbo0UMikZw9e3b8+PHG69E0pFJp165dX758eeHCBby0M2IkrTmN9u3bNzk5Wf92eDyeNqviGdbNza3O+bVQKGzfvj2dTheLxdqJ8VrapIn/m56eXlpaWmcfS0tLZ2dnbcbk8/k9evRovPhQenp6r169LCwscnJyHB0d9T9S8xEcHBwbG/v6LyeVSqXRaFQqlcfjaTca2q7zIofDefN/ypv8/f3/+OOPTz/99JdffjHmwZlOVFTU0qVLe/bs+eDBg6b8BBDdtOY02r17d3wqksHh48TXB618Pt/X17eoqCg7O5vBYGgzZkZGRlpamlgsrtMCh8NxdnbWvt3d3b179+46/KLjT3yHhIT88MMPBjo44qnV6n79+qWkpHA4HCqVKpVKJRKJPg2SyWQOh8NkMhkMBoPB4HK5+AaLxWKz2QwGg06nZ2ZmxsXFWVtbZ2ZmtpQyg2+lUCi6detWXFx88uRJX1/f/fv34wfOZDK5XC6dTmcwGGw2m8Vi4T8EouNtqVpzGnV0dCwuLjZZdxYWFkqlEv+3zrdsbW09PDzc3NzwE/Pu3bs7ODgYpNNGinq0XN99993y5cudnJzS09NZ/zw4L5fLFQqFXC4XCAQCgUC7Xe+LdXbAy12/tV82m7169Wp8qkCrsXfv3gULFnh4eBw6dMjLy6vxnXk8njbPcjiciV27LhSLgcMBFgvodGAwgMd7tcFiAYfzapvDASYTLCxMc0RmqDWnUR6PJxQKTdkjnU6XyWTaiwD4vx4eHvb29sbrdNasWXFxcbNmzcJL37d0+fn5np6eEonk3Llzjd8kbDq1Wi0Wi8VisVQqlclkQqFQIpHIZDKJRCIUCmUymVQqvXTpUkZGxpQpU44fP26QTs2EUql0c3PLzc3dtWtXcXFxdXU1/kMQCAT4RnV1tUgkkkqlCoWiznu/6d9/ZdMfhbK0BAYDgoJgyxYDH4PZa81p1MLCQqVS6dkIhUJhsVj4aSCTyWQymTwej/kPDofDZrPx7TVr1mRmZl68eHHMmDEGib+J8vPzXV1d6xT1aKEwDPP394+Pj58xY8ahQ4dM2XUrLurx008/zZkzp1u3bpmZmY3UctRoNCKRqLq6Gv9cEQgE9iqVh0AA1dUgFoNUClIpiEQgkbzaFgpBKgWZDMRiEItBrQYAWLQIvvoK5s6F69fB1hYiI2HOHNMdKkFacxoNDg7et28fvq1Nglwul8Vi4dtsNhu/ZNZIfqRSqU3szsbGprKysqSkxM7OzmjHVL+QkJCYmJipU6ceOXLExF0bFv4Hb2Njk5GRYfoLlEuXLo2Kiho/fvybj9i3aCqVys3N7dmzZ6dOnZo0aZKxuqmpAbyq4dix4OwM338PSUkQEABXrsDgwcbq1EwQMs3KNDQaTUFBQVVVlQnmzZWVlQEAh8Mxdkf1ev78OY1GI5PJDx8+JCQAgygpKeHxeADw66+/EhJAeXk5fin2zp07hARgJBqNxsvLy8nJqfE6/4aRlYWRSJh2qecZM7DZs43eKdFa8xwIEonk5OTE4/FMUHAXnxLg5uZm7I7q5eDg8MUXX2g0mvXr1xMSgEF8+eWXAoFg9OjR06ZNIyQAGxubBQsWAEArqzIXFxeXkpIilUptbGyM3llGBtjYQMeOr77s1QvS043eKdFacxo1JXwdMVdXV6ICWLlyJZ1OP3PmzL1794iKQR8XLlw4efIkg8HYs2cPgWGEhYVxudwrV64kJCQQGIYBadeL/d///meK6yQyGbDZ/37J4UAD6xe0JsZdO6jtIDyN2tnZLVy4cNu2bevWrfv999+JCkM3YrF43rx5ALB169Z3332XwEi4XG5oaOj69evXrFnTyFrWLcj8+fPx9WKfPHlCIpG082S5XG6dibR0Op3NZrPZbHw+KZfL7clmv0OlApMJHA4wGNCU+wS2tiAS/fulSAQcjvGOzky05ltMpjRu3LgLFy4Y9xL+21RWVvL5fLFYnJCQ8MEHHxAVhg6CgoJiY2N9fHxu3bpF+MM2YrGYz+dXVlbGx8f7+fkRG4yeLl68OHbsWAaDkZaWtnv37v/973/NenvewIFdbt/+92syuZ45pHQ6sNnAZr968f33YeBAKCgAJycAgE8/BQ4H9u416GGZHZRGDcPFxSUnJyc9PZ3YlTwiIiI2btz4/vvv16kyZ84SEhKGDh1qYWHx4MEDvFgy4bZs2bJy5cq+ffvevXvX/Feya0i968Vq58niE2nxbaFQKJVK8afF8DmkMplMJBLF2dg4pKeDTAYCAchkUFPz9l5v3oTwcHB0hL174f59mDABEhLA29u4h0o0lEYNoLa2lsFgYBgmkUiaPkHKGLRFPf78889hw4YRGEkT1dTU9OnT5/Hjxxs2bFi7di3R4bzSOop64NPg+vfvf+vWLcOsg6JS/TuHVCYDoRAkEpDJQCIBkejViwsXApkMs2fD9evQoQN8/TUEBhqgazNH7ESB1iEzMxMAunbtSnQgGIZhX3/9NQAMHDiQ6ECaJCwsDAA8PT1NUFq7WfDz3549e6rVaqJj0UVCQgKJRLK0tExPTyc6ltYPpVEDOH36NAB8+OGHRAeCYRgmkUjwG7K///470bG8xcOHD/G1pG7dukV0LHXJ5fJOnToBwMmTJ4mOpdkUCgX+PNv69euJjqVNQBOeDIDw2/SvYzAYeHGNtWvXYmZ8xUalUs2ZM0epVIaGhg4cOJDocOqiUqmrVq0CgMjIyHqXXzZn2vVi8cE+YnRE5/HWYNasWQAQExNDdCCvyOXyjh07AsDp06eJjqVBW7duBYDOnTub7VpStbW1eNGsw4cPEx1LM7S+9WLNH0qjBoCvLmfs9eyaZdeuXQDg4eFhnpf2cnNz8bVUL1++THQsjfnxxx8BoFu3bkqlkuhYmqRVrhdr/lAaNYD27dsDQGlpKdGB/Kumpgafx37kyBGiY6lLo9Hg8zFnzpxJdCxvoVKp8Gs1P/74I9GxNMm2bdvMfIzfKqE0qq+XL18CAJfLJTqQumJjY81zJIWX3bKxsSkrKyM6lrfDFxTp3LmzQqEgOpa3yMvLw8f458+fJzqWtgWlUX3hD1/7+PgQHUhd2pHUgQMHiI7lX9oyTkePHiU6liZRq9U9e/YEgL179xIdS2M0Gs2IESMAIDAwkOhY2hyURvWFj63M8/z04MGD+EjKfGZl4g/LjhkzhuhAmuHkyZMAYG9vL5PJiI6lQfv3729BY/xWBk140pdZzXaqY/r06e7u7gUFBWayvsipU6dOnz7NZrOjo6OJjqUZJk2a1Ldv35KSErMNu7S0FC/jtGvXLltbW6LDaXuIzuMtHv6woNlOLcJXFnJwcCB8JCUUCvFpWGZ+dlyv8+fPA4CNjY1YLCY6lnpMnjwZAEaPHk10IG0USqP6cnZ2BgBT1BXXiUajwVcW2rFjB7GRzJkzBwAGDBhgnnOw3mrAgAEAsHnzZqIDqQtP8SwWq7CwkOhY2iiURvVSU1NDoVDatWtnzrdx8ZWFbG1tCZwE8/fff5NIJCsrK7P9vHmr+Ph4AOByuQKBgOhY/qUd4+/Zs4foWNoudG1ULzk5OSqVis/nW1lZER1Lg8aPH+/j41NeXr6XoLKPcrn8iy++wDAsIiLCTErh6cDPz8/X11coFO7YsYPoWP711VdfPX/+3MfHJyQkhOhY2i5UKE8vp0+fnjx58tixY/ETK7P1xx9/+Pv7W1tb5+bmsl9f48EkVqxY8e233/bs2TM5OdnCwsLEvRvQrVu3Bg8ezGazc3Nzra2t693n3LlzSqWSw+GwWCy8jDyPx6PT6cb4oMVLtVpaWqakpLTcz6dWAC0iohdiV7JrupEjRw4ZMiQhIWHHjh0RERGm7Do1NXXHjh1kMjk6OrpF51AAGDRokL+//5UrV7777rvNmzfXu8+iRYsKCgrefJ1CobBYLHwNDwaDga/sXe8aHq8v8uHk5ESj0ertqKamJiQkBMOwNWvWoBxKLJRG9WLOs53q2LBhw5AhQ7Zs2VJeXm5ra1vvHy2dTmcymVwu11Al31Uq1ezZs5VK5bJly6SEm24AAAy+SURBVPBbNC3dpk2b/vjjj127di1evLhDhw5v7jBmzJjS0tLq6mqxWIyXkRcKhRKJRKlUCgQCgUDQrO5u377d0M8tIiLiyZMnnp6eeEEvhEAojeqlBaVRJpNJoVBqa2t3797dlP2pVCqPx+PxeDQaDd/WbjS0jW+88847FMqr36tvv/02JSWlS5cu69atM+KxmZC3t/e4cePOnTu3devWepc2augCdG1trXa5DplM1tAaHq8v8lFdXY0/8fWm1NTUqKgoMpkcExNjaWlpyCNEmg9dG9ULj8cTCoVlZWVmPuc5KSlpxIgRIpEIANq1axcSEmJlZfXmHy3+F97cEdOb8EEulUotKSmpqamJj48fPny4IY7DLKSlpfXu3dvS0jInJwcv7WxiKpXKx8fn/v37S5cu3b59u+kDQOpAaVR3paWl9vb27du3r6ysJDqWxjx48MDPz6+qqqpTp07FxcUAMH369MOHDzf+Lrlcjp+EKhSKN7frfRHfFovFarUab6RDhw61tbWlpaWtbMT0ySefHD9+fP78+Xv27DF971u3bg0PD+/cuXN6ejqTyTR9AEhdhE63atmuXbsGAAMGDCA6kMY8ePAAr+M3efJk7eLPJBLp3r17xutUIBAUFxfv2LEDv/nWEh9balxWVhaFQrGwsHj27JmJu87OzsZvOl25csXEXSMNQfNGdWf+t+kfPnyIj0MnTZqEFx7FX8cwDH8E20i4XO69e/cWL148duxYANi4caNcLjded6bn4uIybdo0pVIZFhaWm5tbUVEhk8lM0C+GYfPmzZPL5bNmzRo5cqQJekSaAt1i0p2Z319KTU318/OrrKycOHHi0aNHLSwsXl9TKCEh4fz58+PGjTNG1xkZGTQabcKECZs3b05ISEhKSoqOjl6yZIkx+iLKmjVrjhw5cvXq1a5du2pfpFKpTbwRV+fF9u3bN2Vp7tjY2D///NPGxubbb7815sEhzYPSqO7MOY0+evQIz6Fjxow5cuQIPmGzztJsy5cvHzVqlMHncubl5fXp08fLy2v16tUUCiUiImLcuHHffPPN3LlzWSyWYfsi0KlTp1QqFYlE4vP5IpFIKpUq/gEAJSUlzW3Q0tJSO1efwWBoJ5lqp6OpVKqoqCgA2LNnj42NjeEPCdEVSqO6w0/qzTCNPnr0aPjw4RUVFaNHjz59+rT2+Zk6aTQrK+vHH3807EOEEokkOzubx+PdvXt3z54948aNGzt27IABAxITE/fs2RMeHm7AvgiUnZ29ceNGEol07NgxvFgyrs79t0ZuxNXZoaqqqqampra2tvFpEl5eXp06dfr444+Nf4hIM6A79TqqqalhMBgkEkkqlZrVbegnT54MHTq0tLR01KhRv/322+vPII4ZM+bSpUuv7/zOO+/k5OQY6vFQpVLp4uLSo0eP5cuXP3jwwMfHp3///gBw9erVESNGcLncvLw8LpdrkL4IhGGYn5/fX3/9NXv2bHzNO4NQKBTaufoymaze6WgTJ0708fExq983BADdqdfV48ePAYDD4WRmZhIdy78eP35sZ2cHAKNGjZLL5XW+6+/v/+YvAL6cvUHcvn0bv4lMIpHGjBmTlZWl/Zavry8AREZGGqovAuHFm+3s7KqqqoiOBTELKI3qbsOGDQBAJpOnTJny+PFjosPBnjx5Ym9vDwD+/v5v5lAMw14//dSi0WgGqVM5efLkoKCgBw8eREZGcjgcCwuLvLw87Xdv3rwJAGw2u6KiQv++CPTixQt8QH3ixAmiY0HMBUqjuisqKlq0aBF+gxVPpk+ePCEqGG0OHTlyZL05FMOwhh4lmj17tp695+XltWvXDgAsLS2/+OKL+/fvnzlzps4++Fg4PDxcz76IFRAQAABjx44lOhDEjKA0qq/CwsI6yfT1k1nTyMrKcnBwAIARI0Y0sljIsGHD6k2j7dq10+fSxM6dO7ds2XL37t1PP/0UT6YUCiU0NLTObsnJySQSicFglJaW6twXsY4dO4aPqYuKioiOBTEjKI0aRkFBwaJFi/D7OXgyzc7ONk3X2dnZTcmh2D8XKN9kZWWl83KSEokEr7zJ4/EiIyNTUlLwD5VNmza9ufP48eMBYMmSJbr1RSyhUIj/nGNiYoiOBTEvKI0aUn5+vjaZWlhYBAYG5uTkGLXH7OxsfA2JDz74QCKRNL7zBx98UCeBuru7L1iwQM/HCm/cuIE/rQQATCZz0aJF9+7dE4lEb+756NEjMplMpVJb4mhu1qxZADBkyBCNRkN0LIh5QWnU8PLz84OCgvBicXgyffr0qTE6ysnJwXPo+++/35R1lk6cOMFkMl1cXIKDg48cOWLYk+vr169rZwLQaLT79+/Xuxs+53HevHkG7NoE/vrrL3wtKbOamIGYCZRGjSUvL69OMjVsGYv8/PwuXboAwODBg5u+Vh1+90mpVAKAdqSMP7WpQwxvtrNo0aLAwEAXFxelUlnvWwgs6qEzqVSKP/G5ZcsWomNBzBEqTWIsXbp0iYmJyc7ODgoKwjDs0KFD3bt3Dw4OxkvV6amgoMDX1zc/P3/QoEGXLl1qerW0pjy4rQ97e/uDBw+mpKRoKzfXoS3qsWnTJqNGYkBr16599uxZz549ly5dSnQsiDlCadS43n333ZiYmKysrKCgILVavW/fvq5duwYHBz9//lznNgsLC4cOHapDDjUZBoPRyHc3bNhgaWkZFxeHFyUwc0lJSTt37qRQKD/99FNLX0sKMRKURk2Bz+fHxMSkpaUFBgbiyZTP5wcHB7948aK5TRUWFvr6+ubl5Q0cOPDSpUstsdhHly5dZs6cqVarN27cSHQsb6FSqYKDg9Vq9dKlS729vYkOBzFXRF9VaHMyMjICAwO1k9WDgoKeP3/exPcWFhby+XwAGDBggFgs1jkG/Jomj8eztra2tra2tLTU59qoDu0UFBRYWVmRyeTU1FQd+jUZPNG/++67b50FgbRlKI0SIz09PTAwkEwmA4CVlVVQUNCLFy8af0thYSF+o2PAgAH1TidqOjz93bhxo6ioqKioyN/fX580qls7CxcuBIDJkyfr0K9pZGVlUalUEokUHx9PdCyIWUMn9cTw8PA4ePBgamrqlClTamtr9+3b5+zsHBoaWlpaWu/+xcXFQ4cOffbsmbe398WLFw1Sk8nOzq5Tp06dOnXS876Tbu2sWrWKTqefPn06KSlJn96NRKPRzJ07V6FQzJkzx8/Pj+hwELOG0iiRevTocfz4cTyZyuXy77//vt5kqs2hXl5e8fHxDS2627LY29vPnz8f+6fCi7mJjo6+ceOGnZ3dtm3biI4FMXcojRLP09NTm0xlMpk2mb58+RIASktLR44c+fTp0z59+rSaHIoLCwtjsVgXLlxITEwkOpb/ePHixerVqwFgz549rekHjhgL0VcVkP/AkymJRAIAJpO5cOFCFxcXAOjTp09lZSXR0Rkenq38/PyIDuQ/JkyYAOZ93RYxK6j6vTlKTU3dtGnTiRMnAKBdu3a9evW6evVqqxwWCYVCPp8vEAj+/vvvhiqnmNjRo0enTZvG4XAyMjLwZ20RpHEojZqvxMTEU6dODR8+vH///vha863Sxo0bIyIiBg0ahJd2JlZlZaWHh8fLly/3798/Z84cosNBWgaURhGCSSSSrl27lpWVXb58ud5lTkzps88+O3TokK+vL16LhNhgkJYC3WJCCMZkMpctWwYAq1evJvZD/a+//vrll19oNFpsbCzKoUjToTSKEO/LL7/s2LHj/fv3z58/T1QMMpnsiy++wDBs/fr1zs7ORIWBtEQojSLEo9FoK1asAIA1a9ZoNBpCYli1alVubm6vXr0WL15MSABIy4WujSJmoba21tXVNT8//9ixY3hpZ1O6d+/ewIEDSSTS3bt3vby8TNw70tKh0ShiFiwtLcPDwwFg7dq1KpXKlF1ryzgtX74c5VBEB2g0ipgLpVLp5uaWm5vr5+fn7e3NZrMZDAadTudyuUwmk06nM5lMLpdLp9MZDIZBqgrgNmzYEBkZ2a1bt9TUVBqNZqhmkbYDpVHEjJw4ceLs2bOHDx9uys5UKpVGo1GpVB6Pp91oaLvOixwOBy+vlZWV1bt375qamvj4+OHDhxv5+JDWCaVRxLw8e/bs8uXLEolEKBTKZDKpVCoWi6urq6VSqUwmEwgEUqlUKpVKJBJ9eiGTyRwOh8lkisVikUgUHBwcHR1tqENA2hqURpGWSi6XKxQKuVwuEAgEAoF2u94X6+wgFArx33wmkxkWFvbll19yOByiDwhpqVAaRdoitVotFovFYnFNTQ1e/AVBdIbSKIIgiF7QhCcEQRC9oDSKIAiiF5RGEQRB9ILSKIIgiF7+D7q/LvvYTNB9AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 9 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "UJ3hePSwkP2F", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# this returns an Lime Explainer class\n", - "# The explainer contains details for why the model behaved the way it did\n", - "exp = explainer.explain_instance(test_dataset.X[active_id], model_fn, num_features=5, top_labels=1)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "BPs0Txu4kP2H", - "colab_type": "code", - "outputId": "3d7c93c4-9a95-4fbb-89a9-9d067250380a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 188 - } - }, - "source": [ - "# If we are in an ipython notebook it can show it to us\n", - "exp.show_in_notebook(show_table=True, show_all=False)" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "display_data", - "data": { - "text/html": [ - "\n", - " \n", - " \n", - "
\n", - " \n", - " \n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "F6SGgGagkP2M", - "colab_type": "text" - }, - "source": [ - "This output shows the fragments that the model believes contributed towards toxicity/non-toxicity. We can reverse our the hash function and look at the fragments that activated those fingerprints for this molecule." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "4ja4_jCKkP2N", - "colab_type": "code", - "outputId": "39a21a49-da47-48cf-bc6a-a64ee1c93c1f", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "def fp_mol(mol, fp_length=1024):\n", - " \"\"\"\n", - " returns: dict of \n", - " dictionary mapping fingerprint index\n", - " to list of smile string that activated that fingerprint\n", - " \"\"\"\n", - " d = {}\n", - " feat = dc.feat.CircularFingerprint(sparse=True, smiles=True, size=1024)\n", - " retval = feat._featurize(mol)\n", - " for k, v in retval.items():\n", - " index = k % 1024\n", - " if index not in d:\n", - " d[index] = set()\n", - " d[index].add(v['smiles'])\n", - " return d\n", - "# What fragments activated what fingerprints in our active molecule?\n", - "my_fp = fp_mol(Chem.MolFromSmiles(test_dataset.ids[active_id]))\n", - "\n", - "# We can calculate which fragments activate all fingerprint\n", - "# indexes throughout our entire training set\n", - "all_train_fps = {}\n", - "X = train_dataset.X\n", - "ids = train_dataset.ids\n", - "for i in range(len(X)):\n", - " d = fp_mol(Chem.MolFromSmiles(ids[i]))\n", - " for k, v in d.items():\n", - " if k not in all_train_fps:\n", - " all_train_fps[k] = set()\n", - " all_train_fps[k].update(v)" - ], - "execution_count": 12, - "outputs": [ - { - "output_type": "stream", - "text": [ - "RDKit WARNING: [02:41:10] WARNING: not removing hydrogen atom without neighbors\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "PAe3ZOhUkP2Q", - "colab_type": "code", - "outputId": "2f8ba2f1-8dc9-4e08-c4da-df0b201837c3", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 167 - } - }, - "source": [ - "# We can visualize which fingerprints our model declared toxic for the\n", - "# active molecule we are investigating\n", - "Chem.MolFromSmiles(list(my_fp[242])[0])" - ], - "execution_count": 13, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcIAAACWCAIAAADCEh9HAAAABmJLR0QA/wD/AP+gvaeTAAAKkklEQVR4nO3df0zU9R/A8dfxQ0zwR3mpIUpUQBrp/AWK1TTtmz9abm30321YcS3XmG0u/miOP/wH19rOrTbFtWJf+4f+cJ41ty/YEjIRxfkrEUIFIawwARHsQO6+f3zYR7wOvXjf3fvD3fPx14379RLZc5/P3efz/th8Pp8AAMYrTvcAADCxkVEAUEJGAUAJGQUAJWQUAJSQUQBQQkYBQAkZBQAlZBQAlJBRAFBCRmFJ/f3y6aeSny92uyQlSVqavPWWHD6seywgABvn1MNyLl2S11+Xjo4AdxUUyDffSGJixGcCxsTWKCymu1vWr5eODrHZZNs2+eknuXxZDh6UvDwRkW+/lQ8+0D0i8AC2RmEx27fLnj0iIl98Idu23f/50JCsXSvHj4uI1NWNVBWwADIKK/F4ZM4c6emRnBw5f15stgfuPXNGli0TEXnnHfnySy0DAv/ETj2spK5OenpERN5+27+hIrJ0qWRliYgcORLpwYCxkVFYydmzIzeWLw/8gBUrRERu3JCurgiNBDwKGYWVdHaO3Jg/P/AD0tP9HwnoRkZhJX19IzeSkwM/ICVl5Mbt25GYBwgCGYWVmJ+HjvXNp/nzOP50YRX8LcJKpk0buWFulvoxfz59eiTmAYJARmEl8+aN3Lh2LfADWltFRGy2MT88BSKOjMJKFi0auXHyZOAH1NeLiGRm3t9uBXQjo7CSvDyZOVNEpLJSvF7/exsa5OpVEZE334z0YMDYyCisJDFRtm4VEblyRT777IG7hodlxw4RkYQEee89DbMBY+BkUFhMb6/k5IwsTfL++1JYKHa7NDVJWZnU1oqIfPyx7N6te0rgPjIK62lqkg0bRr5N8vPuu1JeztFOsBQyCksaGJDPP5eDB6W5We7ckVmzZNUqKSqS117TPRngj4zCAtxuKSyULVvkq6+CfcrWrXLokHz9NV83QTt2jmABg4PS3T3mIfcB9fVJd7cMDoZtJiBYZBQAlJBRAFBCRgFACRkFACVkFACUkFEAUEJGAUAJGQUAJWQUAJSQUQBQQkYBQAkZBQAlZBQAlJBRAFBCRgFACRkFACVkFACUkFEAUEJGAUAJGQUAJWQUAJSQUQBQQkYBQAkZBQAlZBQAlJBRAFBCRgFACRkFACVkFACUkFEAUEJGAUAJGQUAJWQUAJSQUQBQkqB7AEBOJyX9Lzd3XmqqI+in/Dc1tT039z9JScvDOBcQFLZGoV+rx/NJff2hzs7gn3Kos/OT+vpWjyd8UwFBIqMAoISMAoASMgoASsgoACghowCghIwCgBIyCgBKyCgAKCGjAKCEjAKAEjIKAErIKAAoIaMAoISMAoASMgoASsgoACghowCghIwCgBIyCgBKyCgAKCGjAKCEjAKAEjIKAErIKAAoIaMAoISMAoASMgoASsgoACghowCghIwCgBIyCgBKyCgAKCGjAKCEjMLS2tra2tradE8BPEyC7gEAyc7O3rFjx6JFi0b/sLGx8aOPPmpoaIiLi1u4cKHL5Vq8eLF575YtWzIyMrKzsyM+LODP5vP5dM8APODmzZs7d+7cv3//8PDw9OnTRaS3tzc+Pr6oqGjXrl12u133gMAD2KmHhdy7d6+8vHzBggV79+612WxOp7OlpaW1tbWkpCQ+Pn7v3r2ZmZm7d+/2eDy6JwVG8QHWUF1dnZOTY/xZrlu37sKFC6PvbWpq2rx5s3FvVlbW4cOHdc0J+CGj0K+5ubmgoMBIZGZmZmVl5ViPrKqqeuGFF4xHrl+//uLFi5GcEwiIjEKnvr6+0tLSpKQkEUlJSSktLf37778f/pTBwUGXy2V8ZpqYmFhcXNzT0xOZaYGAyCj0GB4erqiomD17tojExcU5HI7ff/89+KffvHmzuLg4Pj5eRGbOnOlyue7duxe+aYGHIKPQoK6uLi8vz9g3z8vLO3HixPhe58yZM6+88orxOkuWLDl27Fho5wSCQUYRUe3t7Q6Hw2aziUhaWlpFRYXX61V8TbfbnZGRYcT0jTfeuHr1akhGBYJERhEh/f39ZWVlKSkpIjJlypSSkpK+vr5QvfjAwEBZWdnUqVNF5LHHHispKbl9+3aoXhx4ODKKSHC73enp6eYG47Vr18LxLh0dHeambmpqakg2dYFHIqMIr4aGhpdeeskI6NKlS2tra8P9jvX19atWrTLeccWKFT///HO43xExjowiXDo7O51Op/Flut1uj+SX6V6vt6KiYs6cOSJis9kKCgquX78embdGDCKjCD3j0M5p06aZh3b29vZGfow7d+6UlpZOnjxZRJKTk0tLS+/evRv5MRD1yChCzO12P/vss+aJRpcuXdI7T0tLi3mK1Lx58yoqKvTOg+hDRhEyjY2NGzduNIKVnZ39/fff657ovqNHj5oL8a1du/bcuXO6J0L0IKMIgVu3bhUXFyckJIjI448/7nK5hoaGdA/lzzhv6sknnzTPm/rzzz91D4VoQEahZGhoaN++fUabEhISnE6nxdt069atkpKSSZMmGcUvKyvzeDy6h8LERkYxfkePHn3xxReNPeVXX311Au0pX758edOmTdb8/AETDhnFePz666/m9zbPPffcQ5a2s7KqqqoFCxaY34b98ssvuifChERG8e9E2VFE/zw2i2X38G+RUQTL75h2h8Nx48YN3UOFRldXF8vuYdzIKIISC2dYNjQ0vPzyy+Z5qzU1NbonwsRARvEIo9f7mDt3btSv9+F2u59++ulwr6KCaEJGMSZj9TljabuYWn1u9Jp+xj88hGv6IfqQUQTmt1EWg2shx9pmOMaNjMIfV+YY7eTJkytXrjR+G7m5ueO+3gmiGBnFfVwnLqDRV9+LskMUEBJkFD4fVy0OgnHArHEtaOOA2UdeCxoxgozCV1VVtXDhQvNknosXL+qeyLqam5uj4PQthBYZjWlNTU2bN282opCVlfXdd9/pnmhiqK6uzsnJMX5v69atO3/+vO6JoBMZjVHd3d3mQkczZsxgoaN/y1jaym63m0tbdXV16R4KepDRmGN8YTJr1ixz2c0//vhD91AT1V9//WUutPrEE09Yc6FVhBsZjS0//PCDuQj8mjVrzp49q3uiaNDY2Lhhwwbjt/r8888fOXJE90SIKDIaK65fv+5wOLgkUfi43e5nnnnGPGGhpaVF90SIEDIa/UYvbTdlypSJvrSdlXk8HitcEhURRkajmdfrraysnD9/vnm59ra2Nt1DRb/Ozk6n0xkXFyciTz311L59+4aHh3UPhTAio1Hr1KlT+fn5xj7m8uXLjx8/rnui2HL69OnVq1cbv/9ly5bV1tbqngjhQkaj0G+//WZuDaWmprI1pIuxN5Cenm7uDbS2tuoeCqFHRqOK8dnc1KlTRWTSpEnFxcUxsrSdlfX39/t9Nj0wMKB7KIQSGY0eft8UX7lyRfdEuK+9vd1cdi8tLY1l96KJzefzCSa+/fv3O51OEVm8eLHL5VqzZo3uiRDAjz/+uH379nPnzolIeXl5UVGR7okQAmQ0Sty9ezc/P7+wsPDDDz80VrqDNXm93gMHDuzZs6empiY5OVn3OAgBMho9fD6fsc8I6+M/K5qQUQBQEqd7AACY2MgoACghowCghIwCgJL/A3XsPxPOBS/fAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 13 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "fK7Sy_vJkP2S", - "colab_type": "text" - }, - "source": [ - "We can also see what fragments are missing by investigating the training set. According to our explanation having one of these fragments would make our molecule more likely to be toxic." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "0_kZg3NCkP2T", - "colab_type": "code", - "outputId": "e759509b-34cb-481a-af58-492212b22aa1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 167 - } - }, - "source": [ - "Chem.MolFromSmiles(list(all_train_fps[242])[0])" - ], - "execution_count": 14, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcIAAACWCAIAAADCEh9HAAAABmJLR0QA/wD/AP+gvaeTAAAKR0lEQVR4nO3da0zV9QPH8c8PAUvxkpblBbqKd0s28zIqrUzmjrUe6OYYuDUntX8BTgW7DNCsodA6uGri1oOjtDnqEVijQW46lZGzojBQMwtNpgtmQoLczv/BoWkm1y/yPQfer4dw0I8HfHNuv99xvF6vAAB9FWR7AAAENjIKAEbIKAAYIaMAYISMAoARMgq/Vl5eXl5ebnsF0BWHFzzBnzmOI4mfUvgzbo0CgBEyCgBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGgm0PALoSFfW17QlANxyv12t7A9Apx5Ekfkjhz7hTDwBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGyCgAGCGjAGCEjAKAETIKAEbIKAAYIaMAYISMAoARMgoARsgoABghowBghIwCgBEyCgBGgm0PALqybJntBUB3HK/Xa3sDAAQw7tQDgBEyCgBGyCgAGCGj8CPr1slx5DiaMkXNzbe5wMKFchxFRw/4MqBzZBT+6I8/9NlntkcAPUNG4aeys8WrSBAQyCj8zqRJkvTzzzpwwPYUoAfIKPzOlCl6+mlJ2rHD9hSgB8go/E5dnVJSJOnoUR07ZnsN0B0yCr/T2KgVKzRrliTt3Gl7DdAdMgq/09Ymx9GmTZJUUKCqKtuDgC6RUfgd3xP0sbGaPFler7KybA8CukRG4adCQpSUJEl5eaqpsb0G6BwZhf9KSNCYMWpultttewrQOTIK/zV6tBISJGn3bl29ansN0AkyCpu8Xu3dqzVrOr1AcrJCQ3X1qnbvvs1n16zR3r0c7ATLyCisKSvTokVau1b796uo6PaXmThRsbGSlJOj5mYNG3bjU0VF2r9fa9dq0SKVlQ3EYOC2yCgsuHhRCQlavFhlZZo0Sbm5euGFTi+8ebMcRxcvKi9PI0bc+Pjy5crPV0RER45Xr9bvvw/AduBWZBQDqrlZOTmaPl179ig4WImJqqzU+vUK6vwnccYMuVySlJWle++98XHH0apVqqxUerqGD9fnn2vmTGVkqKnpjv8rgJuRUQycwkLNmKHkZNXXy+VSZaVycjR6dPdf6Ds2tKpKx4/f+qkRI5SRodOnFRenxkZt3aqpU3nAFAOKjGIgVFYqJkYvvqhff9WMGSoqUmGhHnmkp18eHa1FiyTp7NnbXyA8XHv36uBBPf64LlzQ2rVaulTl5f0zHugaGcWdVVenpCTNmaOvv9a4cXK79dNPWr6813/O5s3dX2bJEn33nTweTZigQ4cUFaX4eF2+3IfVQC+QUdwpLS3as0fTpmnXLjmO1q/XqVNKSvrXs+0999JLiozs/mJBQYqP16lTSk1VSIj27dO0adqxQ9ev9+UvBXqC96nHHVFSog0bVFEhSc89J7dbs2cP9IbTp7VxY8e5nyMj9cEHHU9VAf2LW6PoZ2fOaPVqLVumigpNnar8fJWUWGiopMhIFRaquFizZun0aa1cqWXLdPKkhSUY3Lg1in7T0KDsbGVm6vp1hYVp40a9+aaGD7c9S2pp0SefKD1df/2lkBC99pq2btXYsbZnYbAgo+gH7e3Ky1NKii5dUlCQYmOVlaX777c9699qa7Vtmz7+WG1tGjdOaWl6/fU+PlAL3IyMwlRZmZKSOg7HXLBAOTlasMD2ps59/72Sk3X4sCQ98YTcbj3zjO1NCHA8Noq+u3BB8fEdh7RPmSKPR6Wlft1QSfPm6dAhFRTo4Yf1ww9askQrV+rcOduzEMi4NYq+uHZNO3dq5041NmrECL3xht55R2Fhtmf1RmOjdu3Se++pvl6hoXr1VW3frlGjbM9CACKj6B2vV198oc2bO84D4nLpo4/04IO2Z/XVxYvaskV5efJ6NWmS0tO1bl1XB/gD/0VG0QsnTig5WUeOSFJUlHJyFB1te1N/OH5cSUkqLZWk+fPldmvxYtubEDj4tYseqalRQoIWLNCRI5o4Ubm5+vbbQdJQSfPn6+hR5ecrPFzHjys6WqtXq7ra9iwECG6Nohu+F12mpenq1Y4XXb77bo9OyxSI/v5bWVnasUNNTRo5Ups2acsW3XWX7Vnwb2QUXSks1IYNHedVcrnkduvRR21vuvPOn9fbb2vfPkkKD9f27YqPt70Jfow79bi9ioqK2NhtL7+ss2c1e7aKi1VYOCQaqn9Ou1dcrNmzdf68XnmlLTb2fxW+EwQA/8GtUdyqtrY2LS1tz549ra2tMTHlLtfchAQFB9ueZUNrq3JzdeDAp0VF64KDg9evX79t27bx48fb3gU/4wX+0dLSkpube99990nyVePy5cu2R9lXV1eXmpoaGhoq6Z577snMzLx+/brtUfAjZBQdvvnmmzlz5vh+uT777LM//vij7UX+paqqasWKFb7rZ9q0aV9++aXtRfAXZBTeM2fOrFq1yheIxx57LD8/3/Yi/1VcXDxz5kzfdfX888+fPHnS9iLYR0aHtIaGhvT09OHDh0saOXJkenp6Y2Oj7VH+rrm52e12jxkzRlJISEhiYuKVK1dsj4JNZHSIam9v93g8DzzwgCTHceLi4mpqamyPCiR//vlnYmLisGHDJI0fP97tdre2ttoeBTvI6FBUVla2cOFC3z3TJ5988tixY7YXBaoTJ0489dRTvmsyKirq8OHDthfBAjI6tFy4cCEuLs5xHEmTJ0/2eDzt7e22RwW8goKChx56yBdTl8t17tw524swoMjoUHHt2rXMzMywsDBJd999d2pqan19ve1RgwdX71BGRocEbi4NDG7sD01kdJC7+cG7efPm8eDdALjloefS0lLbi3BnkdFBi6eSLeKFEEMKGR2EeGGjn/jvy3Kbmppsj0L/I6ODDYfZ+BsOEhv0yOjgcfNB35GRkQcOHLC9CDeUlJRwyoLBiowOBpyCKCBwAq3BiowGtra2No/HM2HCBElBQUFxcXGXLl2yPQpdqa2tTUxMDA4O9v3Oc7vdLS0ttkfBCBkNYAcPHpw7d67vfuLSpUvLy8ttL0JPVVZWxsTE+L5306dP/+qrr2wvQt+R0YBUXV0dFxfn+08YHh7u8XhsL0JfFBQUPPrPG7O4XK5ffvnF9iL0BW8iEniuXLkSERFRX18fFhb21ltvbdiw4S7euzJgNTU1ffjhh++//35DQ8OoUaOqq6vHjh1rexR6h4wGpJSUlN9++y07OzsiIsL2FvSDmpqajIwM39ODtreg18hoQPJ6vb4DtzGY8G0NUGQUAIzwPvUAYISMAoARMgoARsgoABj5P2kJ4RysfUYRAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 14 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Tp5vzQj7kP2V", - "colab_type": "code", - "outputId": "b6aa4ad5-223c-4c94-d8c4-241161df9ff1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 167 - } - }, - "source": [ - "Chem.MolFromSmiles(list(all_train_fps[242])[2])" - ], - "execution_count": 15, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcIAAACWCAIAAADCEh9HAAAABmJLR0QA/wD/AP+gvaeTAAAPlklEQVR4nO3df0yTdx4H8E+x3jiUIUrEHwg7pkVBRRiw+AMvAZwkywR1OkQiU7dx6ra2YGsBf+FE/MEEjTHG6Z1mp567uLntcpJsYDRDXJxUDqS0TMwph8iJCIgI/XV/sGy5HbdZ2j6fp+37Ff9SE9+p8e372z59HonVaiUAABgqL+4AAACuDTUKAGAX1CgAgF1QowAAdpFyB3BtJpPp5s2b3CkEMnr06EmTJnGnABAdCT6pt0dra+v48eO5UwgkNTU1OTk5KyuLOwg4TH9/f0lJyYYNG0aOHMmdxYWhRu3S3t6ekJDAnUIIZrO5sbHRZDJdu3YtOjqaOw44RnFxsUqlSkhIKC8v587iwlCj8KxUKlVxcfGcOXO++eYbiUTCHQfs1dbWJpPJOjs7L1y4kJyczB3HheEjJnhWW7duHT9+/JUrVz755BPuLOAAubm5nZ2dixYtQofaCWsUbHDs2LG33347KCiooaFhxIgR3HFg6LRabUxMjFQqra2tlclk3HFcG9Yo2GDNmjUxMTHNzc3FxcXcWcAuCoXCYrHI5XJ0qP2wRsE2V65cmTdvnre3t06nCwkJ4Y4DQ3HmzJn09PSxY8caDAY/Pz/uOC4PaxRsM2fOnOXLl/f29ubm5nJngaH48e9u165d6FCHwBoFmzU3N0+dOrWnp+fy5cvx8fHcccA227dvLygoiIqK+u6777y8MKQcAC8i2CwoKCgnJ4eI5HK5xWLhjgM2+PF97dLSUnSoo+B1hKHQaDQhISFarfbEiRPcWcAGKpWqp6cnLS1t/vz53FncBw71MESnT59euXIlPqZwIfh40EmwRmGIVqxYER8f39bWtmvXLu4s8OssFotCobBarWq1Gh3qWFijMHS4hNuFHD9+/K233sJXJ5wBaxSGLioqKjMzs7+/X61Wc2eBX9Ld3b1lyxYi2rdvHzrU4bBGwS73798PCwvr7OwsKytbuHAhdxwYnFqt3rdv3+zZsysrK3FbGYfDGgW7BAYGDlzLrVQqjUYjdxwYxK1btw4ePOjl5XXgwAF0qDOgRsFeSqVSJpPpdLojR45wZ4FBKJXKvr6+1atXx8bGcmdxTzjUgwN8/vnnqamp/v7+BoMhICCAOw78pLy8PCkpydfXV6/Xe86TGgSGNQoOkJKSsnDhwo6OjoKCAu4s8BOTyaRUKolo8+bN6FDnwRoFx6ivr581a5bFYtFqtTNmzOCOA0REBw8elMvlL7744s2bN5977jnuOG4LaxQcIzw8PCsry2w2KxQK7ixARPTw4cMdO3YQ0f79+9GhToUaBYfZsWNHQEBARUXF+fPnubMAbdmypb29PTExcdGiRdxZ3BwO9eBIhw4deu+990JDQ+vr67GAGNXX10dGRlqtVrzHIgCsUXCkdevWzZgxo6mpqbS0lDuLR1MqlSaTaf369ehQAWCNgoNVVFQkJibiChtGn3322ZIlS/z9/RsbG8eMGcMdx/1hjYKDJSQkpKSkdHd35+fnc2fxRP39/Zs2bSKiDz74AB0qDKxRcLympqbw8HCj0VhVVRUXF8cdx7Ps3r07Nzc3PDy8pqZGKpVyx/EIWKPgeKGhoQPP7x24wSV3HA9y//79oqIiIiopKUGHCgY1Ck6Rn58/bty4p099zp3r5M7iQTQaTVdXV2pq6iuvvMKdxYPgUA/OcuZMc3p6UFAQ6fXk48OdxgNUV1fHxsZKpdK6uropU6Zwx/EgWKPgLG+8ERQXR83NtGcPdxQPYLVaBx7Ump2djQ4VGNYoOFFVFc2dS97eVF9PL7zAncatnTp1KiMjIzAw0GAwPP/889xxPAvWKDjR7NmUlka9vaTRcEdxa729vXl5eUS0e/dudKjwUKPgXHv30ogRdPYsXbrEHcV9FRUV3blzJzo6etWqVdxZPBFqFJwrKIhUKiIihYLMZu407uju3bsffvihRCIpLS318sK/aAZ40cHp1Gp64QW6cYP++EfuKO4oJyfnyZMn6enp8fHx3Fk8FD5iAiGcPUtpaTR2LBkM5OfHncaNVFZWxsfHe3t7NzQ0BAcHc8fxUFijIIQ33qD586mtjXbu5I7iRn78nlhubi46lBHWKAhEq6XYWBo2jP7xDwoL407jFo4ePZqVlTVp0qSGhgYffMOBD9YoCCQqilavpv5+2riRO4pb6Orq2rZtGxEVFxejQ3mhRkE4hYXk50d/+xuVlXFHcX0FBQWtra1z585dtmwZdxZPh0M9CKq4mFQqmjaNampo+HDuNC7r+++/nz59utFo/Pbbb2NiYrjjeDqsURDU+++TTEY6HR0+zB3FlSkUir6+vrVr16JDxQBrFIT25Ze0aBH5+5PBQAEB3Glc0Ndff71gwQJfX1+DwTBu3DjuOIA1CoJ77TVKTqaODtq6lTuKCzKZTAqFgoi2bduGDhUJrFFgoNNRZCRZLFRdTTNncqdxKaWlpUqlcvLkyXV1dXiEtUhgjQKDadNo3Toym0mh4I7iUh4+fLhz504iKi0tRYeKB2oUeGzfTgEBdPEiffopdxTXkZ+f397enpSU9Oqrr3JngZ/gUA9sDh+mDRvod7+j+nry9uZOI3o3b96cNWsWEWm12unTp3PHgZ9gjQKbrCyaOZNu36aSEu4orkCpVJpMpnfffRcdKjZYo8CpooISE2nkSNLracIE7jQidu7cuddff3306NEGg2HMmDHcceC/YI0Cp4QEWryYHj+mvDzuKCLW19en0WiIqLCwEB0qQlijwKypiSIiqK+Prl6luDjuNKJUWFi4efPmiIiIGzduSKVS7jjwc1ijwCw0lBQKslpJLif8n/6/Wltb9+7dS0QlJSXoUHFCjQK//HyaMIGuXqVTp7ijiI9are7q6lq6dOmCBQu4s8DgcKgHUThxglavpokTqaGBRo7kTiMa169fj4uLGz58eF1d3eTJk7njwOCwRkEUMjMpLo7+9S/as4c7imhYrVa5XG6xWHJyctChYoYaBVGQSKi0lIYPp6dPuaOIxscff1xZWRkYGLhp0ybuLPBLcKgHEWlpwdWjP3j8+HFYWFhLS8vJkydXrVrFHQd+CdYoiAg69EdFRUUtLS0vvfRSRkYGdxb4FVijAKJz+/bt8PDwvr6+y5cvz5s3jzsO/AqsURCXgACSSMjLi3S6n//Sxo0kkVBaGkcsYalUqqdPn2ZkZKBDXQJqFMTIavXcj+wvXrx47tw5Hx+fgVuLgvihRkGMvLzo9Gm6c4c7h+DMZrNSqSSivLy84OBg7jjwTFCjIEaJiWQ0UnExdw7BHT16tKamJjg4eKBMwSWgRkGM1q4lLy86fpz+/W/uKAJ69OjR1q1biWj//v0+Pj7cceBZoUZBjCZOpMWL6ckTOniQO4qACgoKHjx4MG/evCVLlnBnARugRkGMTCbSaIiIDh2i7u7Bf49EItCPuXOPSARx7NixYcOGHTp0SCKRCPlqg51QoyBSMTGUmEiPHtGRI9xRhPLyyy+XlZVFRkZyBwHb4PaFIF4aDZWXU0kJvf8+/e/jhAX84sgfiP4g2B8GLgdrFMQrKYliYujePTp5kjsKwP+HGgVRG7i30d69ZDZzRwH4P1CjIGpLlpBMRrdu0V//SsOGcacBGAxqFETNy4tUKiKi3btxV3wQKdQoiN2qVTRhAtXU0PXr3FEABoMaBbH7zW9o4IuRf/87dxSAwaBGwQVkZdGoUWQ0cucAGAxqFESkpWXwn/f1pQ0bhI0C8Mxw93sQi6oq+v3vSS6nffu4owDYAmsURMFiIYWCjEb67W+5owDYCGsUROFPf6I1a2jiRNLracQI7jQAtkCNAr/ubgoLo3v36M9/ppUrudMA2AiHeuBXWEj37tHs2ZSezh0FwHZYo8CsqYkiIqivj65epbg47jQAtsMaBWY5OfT0KWVmokPBVWGNAqeKCkpMpJEjSa+nCRO40wAMCdYosDGbSaEgIsrPR4eCC0ONApsjR6i2lkJDfyhTABeFQz3w6OggmYwePKBPP6XFi7nTANgBaxR4bNtGDx5QQgI6FFwe1igw0OkoMpIsFqquppkzudMA2AdrFBhkZ5PRSFlZ6FBwB1ijILQvvqCUFPL3J4OBAgK40wDYDWsUBNXf/8OzlbZvR4eCm0CNgqAOHCCDgaZNo3XruKMAOAgO9SCctjaSyaizky5coORk7jQADoI1CsLJy6POTnrtNXQouBWsURCIVksxMSSVUm0tyWTcaQAcB2sUBKJQkMVCcjk6FNwN1igI4S9/oRUraOxYMhjIz487DYBDYY2C0/X2kkZDRFRYiA4FN4QaBafbs4f++U+KiqLVq7mjADgBDvXgXM3NNHUq9fTQpUs0fz53GgAnwBoF51KpqKeH0tLQoeC2sEbBiaqqaO5c8vYmnY5CQrjTADgH1ig4y8DlTVYrqdXoUHBnWKPgLGfONKenBwUFkV5PPj7caQCcBmsUnKK7uzs7OzYqKqmk5BE6FNwbahScYufOna2trd7eT5YuxZWi4OZwqAfHu3XrVkREhNForKqqiouL444D4FxYo+B42dnZfX19b775JjoUPAHWKDhYeXl5UlKSr6+vXq8fP348dxwAp8MaBUcym81KpZKINm/ejA4FD4EaBUc6fPhwbW1taGioXC7nzgIgEBzqwWE6OjqmTJnS3t5+/vz5lJQU7jgAAsEaBYfZsmVLe3t7QkICOhQ8CtYoOEZ9fX1kZKTVatVqtTNmzOCOAyAcrFFwDKVSaTKZ1q9fjw4FT4M1Cg5w/vz5xYsX+/v7NzY2jhkzhjsOgKCwRsFe/f39arWaiHbs2IEOBQ+EGgV77d+/v7GxMTw8PCsrizsLAAMc6sEu9+/fl8lkXV1dZWVlCxcu5I4DwABrFOyi0Wi6urpSU1PRoeCxsEZh6Kqrq2NjY6VSaV1d3ZQpU7jjAPDAGoUhslqtCoXCYrEolUp0KHgyrFEYolOnTmVkZAQGBur1ej8/3JsZPBfWKAxFb29vXl4eERUVFaFDwcOhRmEoioqK7ty5Ex0dnZmZyZ0FgBkO9WCzu3fvTp06tbe399KlS/Hx8dxxAJhhjYLNNm7c+OTJkxUrVqBDAQhrFGxVWVkZHx/v7e2t0+lCQkK44wDwwxoFG1gsFoVCYbVaNRoNOhRgANYo2OCjjz565513Jk2a1NDQ4OPjwx0HQBRQo/Csuru7ZTJZa2vr2bNnly9fzh0HQCxwqIdnVVBQ0NraOmfOnGXLlnFnARARrFF4Jo8ePQoODu7p6bl27Vp0dDR3HAARkXIHANcwatSoGzdufPXVV+hQgJ/BGgUAsAveGwUAsAtqFADALqhRAAC7oEYBAOyCGgUAsMt/ALHNRzoMuI65AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 15 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "bgzEgQrikP2X", - "colab_type": "code", - "outputId": "351d45d5-7af8-41c6-b0ac-358953d54225", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "Chem.MolFromSmiles(list(all_train_fps[242])[4])" - ], - "execution_count": 16, - "outputs": [ - { - "output_type": "stream", - "text": [ - "RDKit ERROR: [02:41:18] non-ring atom 0 marked aromatic\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "UStW3HMakP2c", - "colab_type": "code", - "outputId": "b1b6266b-28d7-4ac2-edce-2460ba367f27", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "Chem.MolFromSmiles(list(all_train_fps[242])[1])" - ], - "execution_count": 17, - "outputs": [ - { - "output_type": "stream", - "text": [ - "RDKit ERROR: [02:41:18] non-ring atom 0 marked aromatic\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "z5o3gkGxkP2f", - "colab_type": "code", - "outputId": "3e3a91a2-9df7-455e-dede-5b97cad671c1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "Chem.MolFromSmiles(list(all_train_fps[242])[3])" - ], - "execution_count": 18, - "outputs": [ - { - "output_type": "stream", - "text": [ - "RDKit ERROR: [02:41:18] non-ring atom 0 marked aromatic\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "2Qy-7X61kP2h", - "colab_type": "text" - }, - "source": [ - "Using LIME on fragment based models can give you intuition over which fragments are contributing to your response variable in a linear fashion." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "5kZkMHOBkP2i", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/08_Working_With_Splitters.ipynb b/examples/tutorials/08_Working_With_Splitters.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..2c9fc762b13241f7095d1299e04450d66b1cb327 --- /dev/null +++ b/examples/tutorials/08_Working_With_Splitters.ipynb @@ -0,0 +1,207 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "tTuYGOlnh117" + }, + "source": [ + "# Tutorial Part 8: Working With Splitters\n", + "\n", + "When using machine learning, you typically divide your data into training, validation, and test sets. The MoleculeNet loaders do this automatically. But how should you divide up the data? This question seems simple at first, but it turns out to be quite complicated. There are many ways of splitting up data, and which one you choose can have a big impact on the reliability of your results. This tutorial introduces some of the splitting methods provided by DeepChem.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/08_Working_With_Splitters.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "tS3siM3Ch11-", + "outputId": "3a96e0a7-46c1-4baa-91da-f98ca5a33d6d" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "D43MbibL_EK0", + "outputId": "e7b205ae-9962-4089-d49a-6d0ebe4c8430" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "omxBgQVDh12B" + }, + "source": [ + "## Splitters\n", + "\n", + "In DeepChem, a method of splitting samples into multiple datasets is defined by a `Splitter` object. Choosing an appropriate method for your data is very important. Otherwise, your trained model may seem to work much better than it really does.\n", + "\n", + "Consider a typical drug development pipeline. You might begin by screening many thousands of molecules to see if they bind to your target of interest. Once you find one that seems to work, you try to optimize it by testing thousands of minor variations on it, looking for one that binds more strongly. Then perhaps you test it in animals and find it has unacceptable toxicity, so you try more variations to fix the problems.\n", + "\n", + "This has an important consequence for chemical datasets: they often include lots of molecules that are very similar to each other. If you split the data into training and test sets in a naive way, the training set will include many molecules that are very similar to the ones in the test set, even if they are not exactly identical. As a result, the model may do very well on the test set, but then fail badly when you try to use it on other data that is less similar to the training data.\n", + "\n", + "Let's take a look at a few of the splitters found in DeepChem.\n", + "\n", + "### RandomSplitter\n", + "\n", + "This is one of the simplest splitters. It just selects samples for the training, validation, and test sets in a completely random way.\n", + "\n", + "Didn't we just say that's a bad idea? Well, it depends on your data. If every sample is truly independent of every other, then this is just as good a way as any to split the data. There is no universally best choice of splitter. It all depends on your particular dataset, and for some datasets this is a fine choice.\n", + "\n", + "### RandomStratifiedSplitter\n", + "\n", + "Some datasets are very unbalanced: only a tiny fraction of all samples are positive. In that case, random splitting may sometimes lead to the validation or test set having few or even no positive samples for some tasks. That makes it unable to evaluate performance.\n", + "\n", + "`RandomStratifiedSplitter` addresses this by dividing up the positive and negative samples evenly. If you ask for a 80/10/10 split, the validation and test sets will contain not just 10% of samples, but also 10% of the positive samples for each task.\n", + "\n", + "### ScaffoldSplitter\n", + "\n", + "This splitter tries to address the problem discussed above where many molecules are very similar to each other. It identifies the scaffold that forms the core of each molecule, and ensures that all molecules with the same scaffold are put into the same dataset. This is still not a perfect solution, since two molecules may have different scaffolds but be very similar in other ways, but it usually is a large improvement over random splitting.\n", + "\n", + "### ButinaSplitter\n", + "\n", + "This is another splitter that tries to address the problem of similar molecules. It clusters them based on their molecular fingerprints, so that ones with similar fingerprints will tend to be in the same dataset. The time required by this splitting algorithm scales as the square of the number of molecules, so it is mainly useful for small to medium sized datasets.\n", + "\n", + "### SpecifiedSplitter\n", + "\n", + "This splitter leaves everything up to the user. You tell it exactly which samples to put in each dataset. This is useful when you know in advance that a particular splitting is appropriate for your data.\n", + "\n", + "An example is temporal splitting. Consider a research project where you are continually generating and testing new molecules. As you gain more data, you periodically retrain your model on the steadily growing dataset, then use it to predict results for other not yet tested molecules. A good way of validating whether this works is to pick a particular cutoff date, train the model on all data you had at that time, and see how well it predicts other data that was generated later.\n", + "\n", + "## Effect of Using Different Splitters\n", + "\n", + "Let's look at an example. We will load the Tox21 toxicity dataset using random, scaffold, and Butina splitting. For each one we train a model and evaluate it on the training and test sets." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Sp5Hbb4nh12C" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "splitter: random\n", + "training set score: {'roc_auc_score': 0.9560766203173238}\n", + "test set score: {'roc_auc_score': 0.8088861019955839}\n", + "\n", + "splitter: scaffold\n", + "training set score: {'roc_auc_score': 0.9582835670901536}\n", + "test set score: {'roc_auc_score': 0.6803307954037949}\n", + "\n", + "splitter: butina\n", + "training set score: {'roc_auc_score': 0.9578120869103354}\n", + "test set score: {'roc_auc_score': 0.6057007877463954}\n", + "\n" + ] + } + ], + "source": [ + "import deepchem as dc\n", + "\n", + "splitters = ['random', 'scaffold', 'butina']\n", + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", + "for splitter in splitters:\n", + " tasks, datasets, transformers = dc.molnet.load_tox21(featurizer='ECFP', split=splitter)\n", + " train_dataset, valid_dataset, test_dataset = datasets\n", + " model = dc.models.MultitaskClassifier(n_tasks=len(tasks), n_features=1024, layer_sizes=[1000])\n", + " model.fit(train_dataset, nb_epoch=10)\n", + " print('splitter:', splitter)\n", + " print('training set score:', model.evaluate(train_dataset, [metric], transformers))\n", + " print('test set score:', model.evaluate(test_dataset, [metric], transformers))\n", + " print()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All of them produce very similar performance on the training set, but the random splitter has much higher performance on the test set. Scaffold splitting has a lower test set score, and Butina splitting is even lower. Does that mean random splitting is better? No! It means random splitting doesn't give you an accurate measure of how well your model works. Because the test set contains lots of molecules that are very similar to ones in the training set, it isn't truly independent. It makes the model appear to work better than it really does. Scaffold splitting and Butina splitting give a better indication of what you can expect on independent data in the future." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "wssi6cBmh12z" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "06_Going_Deeper_on_Molecular_Featurizations.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/09_Advanced_Model_Training.ipynb b/examples/tutorials/09_Advanced_Model_Training.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6100ab27d43990d283ae0f6c25c0d0a2ee866be1 --- /dev/null +++ b/examples/tutorials/09_Advanced_Model_Training.ipynb @@ -0,0 +1,326 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "tTuYGOlnh117" + }, + "source": [ + "# Tutorial Part 9: Advanced Model Training\n", + "\n", + "In the tutorials so far we have followed a simple procedure for training models: load a dataset, create a model, call `fit()`, evaluate it, and call ourselves done. That's fine for an example, but in real machine learning projects the process is usually more complicated. In this tutorial we will look at a more realistic workflow for training a model.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/09_Advanced_Model_Training.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "tS3siM3Ch11-", + "outputId": "3a96e0a7-46c1-4baa-91da-f98ca5a33d6d" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "D43MbibL_EK0", + "outputId": "e7b205ae-9962-4089-d49a-6d0ebe4c8430" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "omxBgQVDh12B" + }, + "source": [ + "## Hyperparameter Optimization\n", + "\n", + "Let's start by loading the HIV dataset. It classifies over 40,000 molecules based on whether they inhibit HIV replication." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Sp5Hbb4nh12C" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "\n", + "tasks, datasets, transformers = dc.molnet.load_hiv(featurizer='ECFP', split='scaffold')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's train a model on it. We will use a `MultitaskClassifier`, which is just a stack of dense layers. But that still leaves a lot of options. How many layers should there be, and how wide should each one be? What dropout rate should we use? What learning rate?\n", + "\n", + "These are called hyperparameters. The standard way to select them is to try lots of values, train each model on the training set, and evaluate it on the validation set. This lets us see which ones work best.\n", + "\n", + "You could do that by hand, but usually it's easier to let the computer do it for you. DeepChem provides a selection of hyperparameter optimization algorithms, which are found in the `dc.hyper` package. For this example we'll use `GridHyperparamOpt`, which is the most basic method. We just give it a list of options for each hyperparameter and it exhaustively tries all combinations of them.\n", + "\n", + "The lists of options are defined by a `dict` that we provide. For each of the model's arguments, we provide a list of values to try. In this example we consider three possible sets of hidden layers: a single layer of width 500, a single layer of width 1000, or two layers each of width 1000. We also consider two dropout rates (20% and 50%) and two learning rates (0.001 and 0.0001)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "params_dict = {\n", + " 'n_tasks': [len(tasks)],\n", + " 'n_features': [1024],\n", + " 'layer_sizes': [[500], [1000], [1000, 1000]],\n", + " 'dropouts': [0.2, 0.5],\n", + " 'learning_rate': [0.001, 0.0001]\n", + "}\n", + "optimizer = dc.hyper.GridHyperparamOpt(dc.models.MultitaskClassifier)\n", + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", + "best_model, best_hyperparams, all_results = optimizer.hyperparam_search(\n", + " params_dict, train_dataset, valid_dataset, metric, transformers)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`hyperparam_search()` returns three arguments: the best model it found, the hyperparameters for that model, and a full listing of the validation score for every model. Let's take a look at the last one." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'_dropouts_0.200000_layer_sizes[500]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.759624393738977,\n", + " '_dropouts_0.200000_layer_sizes[500]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7680791323731138,\n", + " '_dropouts_0.500000_layer_sizes[500]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.7623870149911817,\n", + " '_dropouts_0.500000_layer_sizes[500]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7552282358416618,\n", + " '_dropouts_0.200000_layer_sizes[1000]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.7689915858318636,\n", + " '_dropouts_0.200000_layer_sizes[1000]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7619292572996277,\n", + " '_dropouts_0.500000_layer_sizes[1000]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.7641491524593376,\n", + " '_dropouts_0.500000_layer_sizes[1000]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7609877155594749,\n", + " '_dropouts_0.200000_layer_sizes[1000, 1000]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.770716980207721,\n", + " '_dropouts_0.200000_layer_sizes[1000, 1000]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7750327625906329,\n", + " '_dropouts_0.500000_layer_sizes[1000, 1000]_learning_rate_0.001000_n_features_1024_n_tasks_1': 0.725972314079953,\n", + " '_dropouts_0.500000_layer_sizes[1000, 1000]_learning_rate_0.000100_n_features_1024_n_tasks_1': 0.7546280986674505}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "all_results" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can see a few general patterns. Using two layers with the larger learning rate doesn't work very well. It seems the deeper model requires a smaller learning rate. We also see that 20% dropout usually works better than 50%. Once we narrow down the list of models based on these observations, all the validation scores are very close to each other, probably close enough that the remaining variation is mainly noise. It doesn't seem to make much difference which of the remaining hyperparameter sets we use, so let's arbitrarily pick a single layer of width 1000 and learning rate of 0.0001.\n", + "\n", + "## Early Stopping\n", + "\n", + "There is one other important hyperparameter we haven't considered yet: how long we train the model for. `GridHyperparamOpt` trains each for a fixed, fairly small number of epochs. That isn't necessarily the best number.\n", + "\n", + "You might expect that the longer you train, the better your model will get, but that isn't usually true. If you train too long, the model will usually start overfitting to irrelevant details of the training set. You can tell when this happens because the validation set score stops increasing and may even decrease, while the score on the training set continues to improve.\n", + "\n", + "Fortunately, we don't need to train lots of different models for different numbers of steps to identify the optimal number. We just train it once, monitor the validation score, and keep whichever parameters maximize it. This is called \"early stopping\". DeepChem's `ValidationCallback` class can do this for us automatically. In the example below, we have it compute the validation set's ROC AUC every 1000 training steps. If you add the `save_dir` argument, it will also save a copy of the best model parameters to disk." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1000 validation: roc_auc_score=0.759757\n", + "Step 2000 validation: roc_auc_score=0.770685\n", + "Step 3000 validation: roc_auc_score=0.771588\n", + "Step 4000 validation: roc_auc_score=0.777862\n", + "Step 5000 validation: roc_auc_score=0.773894\n", + "Step 6000 validation: roc_auc_score=0.763762\n", + "Step 7000 validation: roc_auc_score=0.766361\n", + "Step 8000 validation: roc_auc_score=0.767026\n", + "Step 9000 validation: roc_auc_score=0.761239\n", + "Step 10000 validation: roc_auc_score=0.761279\n", + "Step 11000 validation: roc_auc_score=0.765363\n", + "Step 12000 validation: roc_auc_score=0.769481\n", + "Step 13000 validation: roc_auc_score=0.768523\n", + "Step 14000 validation: roc_auc_score=0.761306\n", + "Step 15000 validation: roc_auc_score=0.77397\n", + "Step 16000 validation: roc_auc_score=0.764848\n" + ] + }, + { + "data": { + "text/plain": [ + "0.8040038299560547" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model = dc.models.MultitaskClassifier(n_tasks=len(tasks),\n", + " n_features=1024,\n", + " layer_sizes=[1000],\n", + " dropouts=0.2,\n", + " learning_rate=0.0001)\n", + "callback = dc.models.ValidationCallback(valid_dataset, 1000, metric)\n", + "model.fit(train_dataset, nb_epoch=50, callbacks=callback)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Learning Rate Schedules\n", + "\n", + "In the examples above we use a fixed learning rate throughout training. In some cases it works better to vary the learning rate during training. To do this in DeepChem, we simply specify a `LearningRateSchedule` object instead of a number for the `learning_rate` argument. In the following example we use a learning rate that decreases exponentially. It starts at 0.0002, then gets multiplied by 0.9 after every 1000 steps." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Step 1000 validation: roc_auc_score=0.736547\n", + "Step 2000 validation: roc_auc_score=0.758979\n", + "Step 3000 validation: roc_auc_score=0.768361\n", + "Step 4000 validation: roc_auc_score=0.764898\n", + "Step 5000 validation: roc_auc_score=0.775253\n", + "Step 6000 validation: roc_auc_score=0.779898\n", + "Step 7000 validation: roc_auc_score=0.76991\n", + "Step 8000 validation: roc_auc_score=0.771515\n", + "Step 9000 validation: roc_auc_score=0.773796\n", + "Step 10000 validation: roc_auc_score=0.776977\n", + "Step 11000 validation: roc_auc_score=0.778866\n", + "Step 12000 validation: roc_auc_score=0.777066\n", + "Step 13000 validation: roc_auc_score=0.77616\n", + "Step 14000 validation: roc_auc_score=0.775646\n", + "Step 15000 validation: roc_auc_score=0.772785\n", + "Step 16000 validation: roc_auc_score=0.769975\n" + ] + }, + { + "data": { + "text/plain": [ + "0.22854619979858398" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "learning_rate = dc.models.optimizers.ExponentialDecay(0.0002, 0.9, 1000)\n", + "model = dc.models.MultitaskClassifier(n_tasks=len(tasks),\n", + " n_features=1024,\n", + " layer_sizes=[1000],\n", + " dropouts=0.2,\n", + " learning_rate=learning_rate)\n", + "model.fit(train_dataset, nb_epoch=50, callbacks=callback)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "wssi6cBmh12z" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "06_Going_Deeper_on_Molecular_Featurizations.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/09_Creating_a_high_fidelity_model_from_experimental_data.ipynb b/examples/tutorials/09_Creating_a_high_fidelity_model_from_experimental_data.ipynb deleted file mode 100644 index a20ab3639f3a3391dac77837890c8a4df8066cf0..0000000000000000000000000000000000000000 --- a/examples/tutorials/09_Creating_a_high_fidelity_model_from_experimental_data.ipynb +++ /dev/null @@ -1,2111 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "09_Creating_a_high_fidelity_model_from_experimental_data.ipynb", - "provenance": [] - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "6MNHvkiBl55x", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 9: Creating a high fidelity dataset from experimental data" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "S2FM7Mwil554", - "colab_type": "text" - }, - "source": [ - "Suppose you were given data collected by an experimental collaborator. You would like to use this data to construct a machine learning model. \n", - "\n", - "*How do you transform this data into a dataset capable of creating a useful model?*" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "xpVK4q5Ol558", - "colab_type": "text" - }, - "source": [ - "Building models from novel data can present several challenges. Perhaps the data was not recorded in a convenient manner. Additionally, perhaps the data contains noise. This is a common occurance with, for example, biological assays due to the large number of external variables and the difficulty and cost associated with collecting multiple samples. This is a problem because you do not want your model to fit to this noise.\n", - "\n", - "Hence, there are two primary challenges:\n", - "* Parsing data\n", - "* De-noising data\n", - " \n", - "In this tutorial, will walk through an example of curating a dataset from an excel spreadsheet of experimental drug measurements. Before we dive into this example though, let's do a brief review of DeepChem's input file handling and featurization capabilities.\n", - "\n", - "### Input Formats\n", - "DeepChem supports a whole range of input files. For example, accepted input formats for deepchem include .csv, .sdf, .fasta, .png, .tif and other file formats. The loading for a particular file format is governed by `Loader` class associated with that format. For example, with a csv input, we use the `CSVLoader` class underneath the hood. Here's an example of a sample .csv file that fits the requirements of `CSVLoader`.\n", - "\n", - "1. A column containing SMILES strings [1].\n", - "2. A column containing an experimental measurement.\n", - "3. (Optional) A column containing a unique compound identifier.\n", - "\n", - "Here's an example of a potential input file.\n", - "\n", - "|Compound ID | measured log solubility in mols per litre | smiles |\n", - "|---------------|-------------------------------------------|----------------|\n", - "| benzothiazole | -1.5 | c2ccc1scnc1c2 |\n", - "\n", - "\n", - "Here the \"smiles\" column contains the SMILES string, the \"measured log\n", - "solubility in mols per litre\" contains the experimental measurement and\n", - "\"Compound ID\" contains the unique compound identifier.\n", - "\n", - "### Data Featurization \n", - "\n", - "Most machine learning algorithms require that input data form vectors. However, input data for drug-discovery datasets routinely come in the format of lists of molecules and associated experimental readouts. To \n", - "transform lists of molecules into vectors, we need to subclasses of DeepChem loader class ```dc.data.DataLoader``` such as ```dc.data.CSVLoader``` or ```dc.data.SDFLoader```. Users can subclass ```dc.data.DataLoader``` to\n", - "load arbitrary file formats. All loaders must be passed a ```dc.feat.Featurizer``` object. DeepChem provides a number of different subclasses of ```dc.feat.Featurizer``` for convenience.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/09_Creating_a_high_fidelity_model_from_experimental_data.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tbLbuh6wl8tX", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - }, - "outputId": "9b09fbf5-13a7-4fd1-fa5d-9932f28b120b" - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 2814 100 2814 0 0 21813 0 --:--:-- --:--:-- --:--:-- 21813\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.79 s, sys: 609 ms, total: 3.4 s\n", - "Wall time: 3min 40s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "-rrEZ5ihl56A", - "colab_type": "text" - }, - "source": [ - "## Parsing data" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "a0AhOo1nl56D", - "colab_type": "text" - }, - "source": [ - "In order to read in the data, we will use the pandas data analysis library. \n", - "\n", - "In order to convert the drug names into smiles strings, we will use pubchempy. This isn't a standard DeepChem dependency, but you can install this library with `pip install pubchempy`." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "fYBi59mkl56F", - "colab_type": "code", - "outputId": "172f8a13-7050-406b-fdc0-db4a58ec2858", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 190 - } - }, - "source": [ - "!pip install pubchempy" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Collecting pubchempy\n", - " Downloading https://files.pythonhosted.org/packages/aa/fb/8de3aa9804b614dbc8dc5c16ed061d819cc360e0ddecda3dcd01c1552339/PubChemPy-1.0.4.tar.gz\n", - "Building wheels for collected packages: pubchempy\n", - " Building wheel for pubchempy (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for pubchempy: filename=PubChemPy-1.0.4-cp36-none-any.whl size=13825 sha256=bd54eb755f3e83b75a2579701aadc27284d998fe33b1fc1e22342e2c109939d8\n", - " Stored in directory: /root/.cache/pip/wheels/10/4d/51/6b843681a9a5aef35f0d0fbce243de46f85080036e16118752\n", - "Successfully built pubchempy\n", - "Installing collected packages: pubchempy\n", - "Successfully installed pubchempy-1.0.4\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Gj-VYSail56Q", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import os\n", - "import pandas as pd\n", - "from pubchempy import get_cids, get_compounds" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zwhTD4OBl56V", - "colab_type": "text" - }, - "source": [ - "Pandas is magic but it doesn't automatically know where to find your data of interest. You likely will have to look at it first using a GUI. \n", - "\n", - "We will now look at a screenshot of this dataset as rendered by LibreOffice.\n", - "\n", - "To do this, we will import Image and os." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "5OxowmHIl56W", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import os\n", - "from IPython.display import Image, display" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "2nRrzbyUl56c", - "colab_type": "code", - "colab": {} - }, - "source": [ - "current_dir = os.path.dirname(os.path.realpath('__file__'))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "6CrNCoe0l56s", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# data_screenshot = os.path.join(current_dir, 'assets/dataset_preparation_gui.png')\n", - "# display(Image(filename=data_screenshot))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Ud2cRDy_l566", - "colab_type": "text" - }, - "source": [ - "We see the data of interest is on the second sheet, and contained in columns \"TA ID\", \"N #1 (%)\", and \"N #2 (%)\".\n", - "\n", - "Additionally, it appears much of this spreadsheet was formatted for human readability (multicolumn headers, column labels with spaces and symbols, etc.). This makes the creation of a neat dataframe object harder. For this reason we will cut everything that is unnecesary or inconvenient.\n", - "\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hVJDAGT8mbl1", - "colab_type": "code", - "outputId": "3665e5d9-91c2-4804-b6e3-a61562471d4a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 309 - } - }, - "source": [ - "!wget https://github.com/deepchem/deepchem/raw/master/datasets/Positive%20Modulators%20Summary_%20918.TUC%20_%20v1.xlsx" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2020-05-31 02:53:46-- https://github.com/deepchem/deepchem/raw/master/datasets/Positive%20Modulators%20Summary_%20918.TUC%20_%20v1.xlsx\n", - "Resolving github.com (github.com)... 140.82.112.4\n", - "Connecting to github.com (github.com)|140.82.112.4|:443... connected.\n", - "HTTP request sent, awaiting response... 302 Found\n", - "Location: https://raw.githubusercontent.com/deepchem/deepchem/master/datasets/Positive%20Modulators%20Summary_%20918.TUC%20_%20v1.xlsx [following]\n", - "--2020-05-31 02:53:46-- https://raw.githubusercontent.com/deepchem/deepchem/master/datasets/Positive%20Modulators%20Summary_%20918.TUC%20_%20v1.xlsx\n", - "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", - "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 42852 (42K) [application/octet-stream]\n", - "Saving to: ‘Positive Modulators Summary_ 918.TUC _ v1.xlsx’\n", - "\n", - "\r Positive 0%[ ] 0 --.-KB/s \rPositive Modulators 100%[===================>] 41.85K --.-KB/s in 0.02s \n", - "\n", - "2020-05-31 02:53:47 (1.69 MB/s) - ‘Positive Modulators Summary_ 918.TUC _ v1.xlsx’ saved [42852/42852]\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "pMvd0XzRl567", - "colab_type": "code", - "colab": {} - }, - "source": [ - "raw_data_file = os.path.join(current_dir, 'Positive Modulators Summary_ 918.TUC _ v1.xlsx')\n", - "raw_data_excel = pd.ExcelFile(raw_data_file)\n", - "\n", - "# second sheet only\n", - "raw_data = raw_data_excel.parse(raw_data_excel.sheet_names[1])" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "ei2QwtnVl57D", - "colab_type": "code", - "outputId": "42c4aa3e-4247-4794-e6c6-c1b884753eb8", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "# preview 5 rows of raw dataframe\n", - "raw_data.loc[raw_data.index[:5]]" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
Unnamed: 0Unnamed: 1Unnamed: 2Metric #1 (-120 mV Peak)Unnamed: 4Unnamed: 5Unnamed: 6Unnamed: 7
0NaNNaNNaNVehicleNaN4ReplicationsNaN
1TA ##PositionTA IDMeanSDThreshold (%) = Mean + 4xSDN #1 (%)N #2 (%)
211-A02Penicillin V Potassium-12.86896.7470514.1193-10.404-18.1929
321-A03Mycophenolate Mofetil-12.86896.7470514.1193-12.4453-11.7175
431-A04Metaxalone-12.86896.7470514.1193-8.65572-17.7753
\n", - "
" - ], - "text/plain": [ - " Unnamed: 0 Unnamed: 1 ... Unnamed: 6 Unnamed: 7\n", - "0 NaN NaN ... Replications NaN\n", - "1 TA ## Position ... N #1 (%) N #2 (%)\n", - "2 1 1-A02 ... -10.404 -18.1929\n", - "3 2 1-A03 ... -12.4453 -11.7175\n", - "4 3 1-A04 ... -8.65572 -17.7753\n", - "\n", - "[5 rows x 8 columns]" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 9 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kfGr4zPSl57Q", - "colab_type": "text" - }, - "source": [ - "Note that the actual row headers are stored in row 1 and not 0 above." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "adUjxQF2l57Z", - "colab_type": "code", - "outputId": "acdaa261-e58b-43a3-f2f4-6869ae4ea364", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 119 - } - }, - "source": [ - "# remove column labels (rows 0 and 1), as we will replace them\n", - "# only take data given in columns \"TA ID\" \"N #1 (%)\" (3) and \"N #2 (%)\" (4)\n", - "raw_data = raw_data.iloc[2:, [2, 6, 7]]\n", - "print(raw_data.loc[raw_data.index[:5]])\n", - "\n", - "## collapse multiindex so that drug names and number indexes are columns\n", - "#raw_data.reset_index(level=[1, 2], inplace=True)\n", - "# reset the index so we keep the label but number from 0 again\n", - "raw_data.reset_index(inplace=True)\n", - "\n", - "## rename columns\n", - "raw_data.columns = ['label', 'drug', 'n1', 'n2']" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "stream", - "text": [ - " Unnamed: 2 Unnamed: 6 Unnamed: 7\n", - "2 Penicillin V Potassium -10.404 -18.1929\n", - "3 Mycophenolate Mofetil -12.4453 -11.7175\n", - "4 Metaxalone -8.65572 -17.7753\n", - "5 Terazosin·HCl -11.5048 16.0825\n", - "6 Fluvastatin·Na -11.1354 -14.553\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "_AmIYJGjl57j", - "colab_type": "code", - "outputId": "d9cdb418-295a-4ec8-e757-07747257ad81", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "# preview cleaner dataframe\n", - "raw_data.loc[raw_data.index[:5]]" - ], - "execution_count": 11, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
labeldrugn1n2
02Penicillin V Potassium-10.404-18.1929
13Mycophenolate Mofetil-12.4453-11.7175
24Metaxalone-8.65572-17.7753
35Terazosin·HCl-11.504816.0825
46Fluvastatin·Na-11.1354-14.553
\n", - "
" - ], - "text/plain": [ - " label drug n1 n2\n", - "0 2 Penicillin V Potassium -10.404 -18.1929\n", - "1 3 Mycophenolate Mofetil -12.4453 -11.7175\n", - "2 4 Metaxalone -8.65572 -17.7753\n", - "3 5 Terazosin·HCl -11.5048 16.0825\n", - "4 6 Fluvastatin·Na -11.1354 -14.553" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 11 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6Htu9Bw6l57p", - "colab_type": "text" - }, - "source": [ - "This formatting is closer to what we need.\n", - "\n", - "Now, let's take the drug names and get smiles strings for them (format needed for DeepChem)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "3hGrrqu5l57q", - "colab_type": "code", - "colab": {} - }, - "source": [ - "drugs = raw_data['drug'].values" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zJAABOqPl57y", - "colab_type": "text" - }, - "source": [ - "For many of these, we can retreive the smiles string via the canonical_smiles attribute of the `get_compounds` object (using `pubchempy`)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "yfCp2htdl570", - "colab_type": "code", - "outputId": "847e66c8-eb78-42e8-fc03-e35c496685d5", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "get_compounds(drugs[1], 'name')" - ], - "execution_count": 13, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "[Compound(5281078)]" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 13 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "rsesx-l8l58L", - "colab_type": "code", - "outputId": "fd80dfc8-b365-4844-ef9f-f1dc91afef41", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 34 - } - }, - "source": [ - "get_compounds(drugs[1], 'name')[0].canonical_smiles" - ], - "execution_count": 14, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "'CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3CCOCC3)O'" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 14 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "x4qqWsWZl581", - "colab_type": "text" - }, - "source": [ - "However, some of these drug names have variables spaces and symbols (·, (±), etc.), and names that may not be readable by pubchempy. \n", - "\n", - "For this task, we will do a bit of hacking via regular expressions. Also, we notice that all ions are written in a shortened form that will need to be expanded. For this reason we use a dictionary, mapping the shortened ion names to versions recognizable to pubchempy. \n", - "\n", - "Unfortunately you may have several corner cases that will require more hacking." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "jGch_fRUl587", - "colab_type": "code", - "colab": {} - }, - "source": [ - "ion_replacements = {\n", - " 'HBr': ' hydrobromide',\n", - " '2Br': ' dibromide',\n", - " 'Br': ' bromide',\n", - " 'HCl': ' hydrochloride',\n", - " '2H2O': ' dihydrate',\n", - " 'H20': ' hydrate',\n", - " 'Na': ' sodium'\n", - "}\n", - "\n", - "ion_keys = ['H20', 'HBr', 'HCl', '2Br', '2H2O', 'Br', 'Na']" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "t-YXuLu2l59L", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import re" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "OVjTiTyJl59T", - "colab_type": "code", - "colab": {} - }, - "source": [ - "def compound_to_smiles(cmpd):\n", - " # remove spaces and irregular characters\n", - " compound = re.sub(r'([^\\s\\w]|_)+', '', cmpd)\n", - " \n", - " # replace ion names if needed\n", - " for ion in ion_keys:\n", - " if ion in compound:\n", - " compound = compound.replace(ion, ion_replacements[ion])\n", - "\n", - " # query for cid first in order to avoid timeouterror\n", - " cid = get_cids(compound, 'name')[0]\n", - " smiles = get_compounds(cid)[0].canonical_smiles\n", - "\n", - " return smiles" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "H-qPqmm3l59s", - "colab_type": "text" - }, - "source": [ - "Now let's actually convert all these compounds to smiles. This conversion will take a few minutes so might not be a bad spot to go grab a coffee or tea and take a break while this is running! Note that this conversion will sometimes fail so we've added some error handling to catch these cases below." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "PMlMlVJTl59t", - "colab_type": "code", - "outputId": "75129524-31a7-4e54-913a-f262f35d06a3", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 85 - } - }, - "source": [ - "smiles_map = {}\n", - "for i, compound in enumerate(drugs):\n", - " # print(\"Converting %s to smiles\" % i)\n", - " try:\n", - " smiles_map[compound] = compound_to_smiles(compound)\n", - " except:\n", - " print(\"Errored on %s\" % i)\n", - " continue" - ], - "execution_count": 18, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Errored on 162\n", - "Errored on 237\n", - "Errored on 303\n", - "Errored on 399\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "CgPwj-Pvl594", - "colab_type": "code", - "colab": {} - }, - "source": [ - "smiles_data = raw_data\n", - "# map drug name to smiles string\n", - "smiles_data['drug'] = smiles_data['drug'].apply(lambda x: smiles_map[x] if x in smiles_map else None)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "xV3mQWwrl5-v", - "colab_type": "code", - "outputId": "af23230b-9000-4e62-f6e7-13195c0f25c2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 204 - } - }, - "source": [ - "# preview smiles data\n", - "smiles_data.loc[smiles_data.index[:5]]" - ], - "execution_count": 20, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
labeldrugn1n2
02CC1(C(N2C(S1)C(C2=O)NC(=O)COC3=CC=CC=C3)C(=O)[...-10.404-18.1929
13CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3C...-12.4453-11.7175
24CC1=CC(=CC(=C1)OCC2CNC(=O)O2)C-8.65572-17.7753
35COC1=C(C=C2C(=C1)C(=NC(=N2)N3CCN(CC3)C(=O)C4CC...-11.504816.0825
46CC(C)N1C2=CC=CC=C2C(=C1C=CC(CC(CC(=O)[O-])O)O)...-11.1354-14.553
\n", - "
" - ], - "text/plain": [ - " label drug n1 n2\n", - "0 2 CC1(C(N2C(S1)C(C2=O)NC(=O)COC3=CC=CC=C3)C(=O)[... -10.404 -18.1929\n", - "1 3 CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3C... -12.4453 -11.7175\n", - "2 4 CC1=CC(=CC(=C1)OCC2CNC(=O)O2)C -8.65572 -17.7753\n", - "3 5 COC1=C(C=C2C(=C1)C(=NC(=N2)N3CCN(CC3)C(=O)C4CC... -11.5048 16.0825\n", - "4 6 CC(C)N1C2=CC=CC=C2C(=C1C=CC(CC(CC(=O)[O-])O)O)... -11.1354 -14.553" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 20 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ES-ak26xl5-1", - "colab_type": "text" - }, - "source": [ - "Hooray, we have mapped each drug name to its corresponding smiles code.\n", - "\n", - "Now, we need to look at the data and remove as much noise as possible." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ghu-RpSCl5-3", - "colab_type": "text" - }, - "source": [ - "## De-noising data" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "axbec0-Dl5-4", - "colab_type": "text" - }, - "source": [ - "In machine learning, we know that there is no free lunch. You will need to spend time analyzing and understanding your data in order to frame your problem and determine the appropriate model framework. Treatment of your data will depend on the conclusions you gather from this process.\n", - "\n", - "Questions to ask yourself:\n", - "* What are you trying to accomplish?\n", - "* What is your assay?\n", - "* What is the structure of the data?\n", - "* Does the data make sense?\n", - "* What has been tried previously?\n", - "\n", - "For this project (respectively):\n", - "* I would like to build a model capable of predicting the affinity of an arbitrary small molecule drug to a particular ion channel protein\n", - "* For an input drug, data describing channel inhibition\n", - "* A few hundred drugs, with n=2\n", - "* Will need to look more closely at the dataset*\n", - "* Nothing on this particular protein" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ls_jIMqUl5-5", - "colab_type": "text" - }, - "source": [ - "*This will involve plotting, so we will import matplotlib and seaborn. We will also need to look at molecular structures, so we will import rdkit. We will also use the seaborn library which you can install with `pip install seaborn`." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Xe0sqLZ0l5-6", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - }, - "outputId": "4e0be4fd-83a4-40c8-d636-153bc81943ae" - }, - "source": [ - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "\n", - "import seaborn as sns\n", - "sns.set_style('white')" - ], - "execution_count": 21, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/statsmodels/tools/_testing.py:19: FutureWarning: pandas.util.testing is deprecated. Use the functions in the public API at pandas.testing instead.\n", - " import pandas.util.testing as tm\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "mC-lBTuXl5--", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from rdkit import Chem\n", - "from rdkit.Chem import AllChem\n", - "from rdkit.Chem import Draw, PyMol, rdFMCS\n", - "from rdkit.Chem.Draw import IPythonConsole\n", - "from rdkit import rdBase" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "UOtjGja5l5_D", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# i will use numpy on occasion for manipulating arrays\n", - "import numpy as np" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "9fKzIHFnl5_K", - "colab_type": "text" - }, - "source": [ - "Our goal is to build a small molecule model, so let's make sure our molecules are all small. This can be approximated by the length of each smiles string." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "d95zFS4Ll5_K", - "colab_type": "code", - "colab": {} - }, - "source": [ - "smiles_data['len'] = [len(i) if i is not None else 0 for i in smiles_data['drug']]" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "HZjb8u_fl5_S", - "colab_type": "code", - "outputId": "d4ff3ed7-3580-4f50-f1b5-1a04b35bef1d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "smiles_lens = [len(i) if i is not None else 0 for i in smiles_data['drug']]\n", - "sns.distplot(smiles_lens)\n", - "plt.xlabel('len(smiles)')\n", - "plt.ylabel('probability')" - ], - "execution_count": 25, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "Text(0, 0.5, 'probability')" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 25 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEKCAYAAAAFJbKyAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXxU9b3/8deZmWSyTfZkEpIQhQTLLuKCqKChMRXkEdZWb2t/UtFblCtoi1VrqaXaW9v0At5rFUu19dpaL8hSjRUhiEFEBQEjiyQBQxJIJtskmWyznt8fIZGYkEUymSWf5+ORh5mZc04+x4R5z/l+v+f7VVRVVRFCCCH6SePpAoQQQvgWCQ4hhBADIsEhhBBiQCQ4hBBCDIgEhxBCiAGR4BBCCDEgOncePD8/n6effhqXy8XixYu57777urxus9l45JFHOHbsGJGRkaxdu5bk5GT27dvHH/7wB+x2OwEBAaxatYrrr78egLvuuouqqiqCgoIAeOmll4iJiXHnaQghhLiA24LD6XSyZs0aXn75ZYxGI4sWLSIjI4O0tLTObTZt2kR4eDg7d+4kNzeXnJwc1q1bR1RUFM8//zxGo5HCwkLuuece9u7d27lfTk4OEydOdFfpQggheuG2pqqCggJSU1NJSUkhMDCQOXPmkJeX12Wb3bt3M3/+fACysrLYv38/qqoybtw4jEYjAOnp6VitVmw2m7tKFUIIMQBuu+IwmUwkJCR0PjYajRQUFHTbJjExsb0QnQ6DwYDZbCY6Orpzmx07djBu3DgCAwM7n3v88cfRaDTceuut3H///SiK0mst1113HUlJSYNxWkIIMSxERUUB8Oc//7nba27t47hURUVF5OTk8NJLL3U+l5OTg9FopKmpiQcffJDt27czb968Xo+TlJTEli1b3F2uEEL4lQULFvT4vNuaqoxGI5WVlZ2PTSZTZ/PThdtUVFQA4HA4sFgsnSlXWVnJ8uXLeeaZZxg5cmSXfQDCwsK4/fbbu13FCCGEcC+3BcfEiRMpKSmhrKwMm81Gbm4uGRkZXbbJyMhg69atQHuT1LRp01AUhcbGRu677z5+8pOfMHXq1M7tHQ4HdXV1ANjtdvbs2UN6erq7TkEIIUQP3NZUpdPpWL16NUuXLsXpdLJw4ULS09NZv349EyZMYNasWSxatIhVq1aRmZlJREQEa9euBeDVV1+ltLSU5557jueeew5oH3YbHBzM0qVLsdvtuFwurr/+er773e+66xSEEEL0QBkO06ovWLBA+jiEEGKALvbeKXeOCyGEGBAJDiGEEAMiwSGEEGJAJDiEEEIMiASHEEKIAfHqO8eF5zS02LBYHb1uY9DriAgJ7HUbIYT/keAQPbJYHeQX1vS6zYwxsRIcQgxD0lQlhBBiQCQ4hBBCDIgEhxBCiAGR4BBCCDEgEhxCCCEGRIJDCCHEgEhwCCGEGBAJDiGEEAMiwSGEEGJAJDiEEEIMiASHEEKIAZHgEEIIMSASHEIIIQZEgkMIIcSASHAIIYQYEAkOIYQQAyLBIYQQYkAkOIQQQgyIBIcQQogBkeAQQggxIBIcQgghBkSCQwghxIDoPF2A8F8NLTYsVsdFXzfodUSEBA5hRUKIwSDBIdzGYnWQX1hz0ddnjImV4BDCB0lTlRBCiAGR4BBCCDEgEhxCCCEGRIJDCCHEgLg1OPLz88nKyiIzM5MXX3yx2+s2m42VK1eSmZnJ4sWLKS8vB2Dfvn0sWLCAuXPnsmDBAvbv39+5z9GjR5k7dy6ZmZk89dRTqKrqzlMQ35DLpWJzuHCpqvyOhPAzbhtV5XQ6WbNmDS+//DJGo5FFixaRkZFBWlpa5zabNm0iPDycnTt3kpubS05ODuvWrSMqKornn38eo9FIYWEh99xzD3v37gXgySef5Ne//jWTJ0/m3nvvJT8/n5kzZ7rrNEQ/OJwuDp4x89HpWg6WmCmta6GioRW7sz0wArQKE5MiuOayaEZGh6AoiocrFkJcCrcFR0FBAampqaSkpAAwZ84c8vLyugTH7t27Wb58OQBZWVmsWbMGVVUZN25c5zbp6elYrVZsNhv19fU0NTVx5ZVXAjBv3jzy8vIkODzoUKmZx974nJMmCxoFxiaGc2VKJLMnJuJSXZyubqa+xU7B2QYOldZzdWoU86ckSXgI4cPcFhwmk4mEhITOx0ajkYKCgm7bJCYmthei02EwGDCbzURHR3dus2PHDsaNG0dgYGC3YyYkJGAymdx1CqIXqqryx/eKee1AGQnhQay/40puviKeiOCAzm3KzS2d93HMmZRI3okqPiiuITo0kJuviPdU6UKIS+TVNwAWFRWRk5PDSy+95OlSxNd8esbMlsNn+d7VKfxi7jjC9L3/Kel1Wm6bkECT1cG7x01EhQYyfXQ05eaWi+4jd5YL4Z3cFhxGo5HKysrOxyaTCaPR2G2biooKEhIScDgcWCwWoqKiAKisrGT58uU888wzjBw5ssdjVlZWdjumcL+aJitvFVRw1chI/nPBRDSa/jU7KYrCgilJ1LfY2HroLAuvSqK4qvmi28ud5UJ4J7eNqpo4cSIlJSWUlZVhs9nIzc0lIyOjyzYZGRls3boVaG+SmjZtGoqi0NjYyH333cdPfvITpk6d2rl9fHw8YWFhHDlyBFVV2bZtG7NmzXLXKYgeOF0q/3ewDK1G4Yk5Y/sdGh10Wg1zJ4/A5nTxztHKvncQQngdtwWHTqdj9erVLF26lNmzZ3PbbbeRnp7O+vXrycvLA2DRokXU19eTmZnJyy+/zE9/+lMAXn31VUpLS3nuuefIzs4mOzub2tpaAH75y1/yxBNPkJmZyciRI5kxY4a7TkH04NAZM+XmVuZNSSI+POgbHSMxIphRcaH887NzOF0yVFcIX6Oow2CQ/YIFC9iyZYuny/ApF3ZsX+i594pxulT+IyONmVfEkRwVMuBjAJyoaOR/PzrDHdekMCk5ssdtZoyJ7fX4Qgj3uth7p9w5LvrtbH0rZ+tbueayqEseTntFgoEREUHsK7747LlCCO8kwSH67UBJHTqNwpUpUZd8LI2ikD0liTJzK2V1Fx9ZJYTwPhIcol+sDiefldUzKTmC4EDtoBxz1rfi0ShwvKJxUI4nhBgaEhyiXz4vb8DqcHHNZdF9b9xPoXodqTGhFJosg3ZMIYT7SXCIfjlcVk9cmJ6R0YPbWT3GaKCioY3GVvugHlcI4T4SHKJPNoeL0toWxiYaBn2OqTHGMACKquSqQwhfIcEh+lRS24xTVRkdFzbox04ID8IQpOOkqWnQjy2EcA8JDtGnU1VNaDUKqTGhg35sRVEYE2+guMoiNwMK4SMkOESfTlU3MTI6hECde/5cxiQYaLO7ep3wUAjhPSQ4RK+arQ7ONbS5pZmqQ1pcGArI6CohfIQEh+jV6Zr22WvT4t0XHMGBWlKiQyiukn4OIXyBBIfoVXFVE3qdhqTIYLf+nJHRIVQ0tOFwutz6c4QQl06CQ/TqVHUTo2JD0Q5w+vSBSokOweFSqWhoc+vPEUJcOgkOcVH1LTbqmm2McmP/RoeUqPYrmjLpIBfC60lwiIsqN7cCDPrd4j2JCA4gPEgnEx4K4QMkOMRFna1vRaNAQsQ3W7BpIBRFITkqhLLzYSWE8F4SHOKiztW3YgwPIkA7NH8mKdEh1DXbaLY6huTnCSG+GQkO0SNVVSk3t7p9NNWFUqKln0MIXyDBIXpU2dhGq91JUtTQBUdSZDAKUFYnzVVCeDMJDtGjLyra7+IeyisOvU5LQkSQXHEI4eUkOESPTposaBWFhHD3d4xfKDkqhHJzCy5VJjwUwltJcIgefVFhwRihRzdEHeMdUqKCabO7qG2yDenPFUL0nwSH6EZVVU6aLCRFuv/+ja8bcb5prKJB+jmE8FY6TxcgvE9ZXSuWNkef/RsOZ+9ToVvtzgH/7HiDHo2CTD0ihBeT4BDdFJytB+hzRFWr3cXhU3UXfX3KyMgB/2ydVkO8IUiuOITwYtJUJbo5dq4RnUbBGK73yM9PjAiSKw4hvJgEh+imsNJCakwIOo1n/jwSI4KwtDkwN0sHuRDeSIJDdFNYZeGy2MFfX7y/Es/3rcjCTkJ4JwkO0UWLzUFZXSuXx3gwOM7fO1IkwSGEV5LgEF10fMq/3INXHCF6HRHBAXLFIYSXkuAQXRSaPB8c0N7PIVccQngnCQ7RRZHJQqBWw4iooZ1q5OsSIoIorW2h7RvcCyKEcC8JDtFFocnCqLhQj42o6pAYEYxTVSkyyVWHEN5GgkN0UWhqYozR4OkyGHF+1cHjFQ0erkQI8XVuDY78/HyysrLIzMzkxRdf7Pa6zWZj5cqVZGZmsnjxYsrLywEwm83cddddTJkyhTVr1nTZ56677iIrK4vs7Gyys7Opra115ykMK81WB2frWxljDPN0KUSFBhIcqOXE+endhRDeo1/BsXz5cvbs2YPL5er3gZ1OJ2vWrGHjxo3k5uby1ltvUVxc3GWbTZs2ER4ezs6dO7n77rvJyckBQK/Xs2LFCh555JEej52Tk8P27dvZvn07MTEx/a5J9K6jMzrdC644NIpCWlwox881eroUIcTX9Cs4/u3f/o0333yTW2+9lZycHE6fPt3nPgUFBaSmppKSkkJgYCBz5swhLy+vyza7d+9m/vz5AGRlZbF//35UVSUkJISrr74avd4zU14MV4Wm9k/33tBUBZAWb+BERSOqrM0hhFfpV3BMnz6dP/zhD2zdupWkpCSWLFnCHXfcwRtvvIHdbu9xH5PJREJCQudjo9GIyWTqtk1iYiIAOp0Og8GA2Wzus57HH3+c7OxsnnvuOXlTGURFJgt6nYaR0UM/nXpP0uLDsFgdlJtlwkMhvEm/+zjMZjNbtmxh06ZNjB07lh/+8IccP36cH/3oR+6sr5ucnBzefPNN/va3v/Hpp5+yffv2If35/qyoqonRcWFoNYqnSwEgPb69r+WYNFcJ4VX6FRwPPPAA3//+92lra+OFF17ghRdeYPbs2fziF7+gubm5x32MRiOVlZWdj00mE0ajsds2FRUVADgcDiwWC1FRUb3W0nGMsLAwbr/9dgoKCvpzCqIfikxNXtEx3mFUXCgaBY5XSHAI4U36FRzf/e53efvtt/n3f/934uPjgfYRUQBbtmzpcZ+JEydSUlJCWVkZNpuN3NxcMjIyumyTkZHB1q1bAdixYwfTpk1DUS7+adfhcFBX177+g91uZ8+ePaSnp/fnFEQfms6PqPKGjvEOQQFaLo8N5YQEhxBepV8LOa1bt46ZM2d2ee573/te55t+jwfW6Vi9ejVLly7F6XSycOFC0tPTWb9+PRMmTGDWrFksWrSIVatWkZmZSUREBGvXru3cPyMjg6amJux2O7t27eKll15ixIgRLF26FLvdjsvl4vrrr+e73/3uNzx1caEiL+sY7zBuRASHzvTd7yWEGDq9Bkd1dTUmk4m2tjaOHz/e2RHd1NREa2vfHZYzZ87sFjgrVqzo/F6v1/Pss8/2uO/u3bt7fP5iVzji0nTcod3Rr+AtxiWG8+Zn52hotRMRHODpcoQQ9BEcH3zwAVu2bKGyspL//M//7Hw+NDSUhx9+2O3FiaFTeH5EVYqXjKjqMDax/QroREUj00bJPTtCeINeg2P+/PnMnz+fHTt2kJWVNVQ1CQ8orGoiLd57RlR1GDciHIDj5yQ4hPAWvQbH9u3byc7O5uzZs7z88svdXl+yZInbChNDq8hk8co35nhDELFheukgF8KL9BocHf0YLS0tQ1KM8IzGNjsVDW2ke9FQ3AuNTTTIkFwhvEivwXHHHXcA7XNVCf/V0TE+Jt67RlQ5nC7KzS2kRIfw0elaSmqa0Gm/GkFu0OuICAn0YIVCDE+9BsdTTz3V685PPPHEoBYjPMNbh+K22l0cPlWH06lid6psOXSOhIivFpiaMSZWgkMID+g1OMaPHz9UdQgPKjQ1ERygJTkq2NOl9KgjLCoaWrsEhxDCM/ocVSX8X1GVhbT4MDReNqKqQ2yYHp1GoaKhjSmeLkYI0XtwPP300/z85z/nxz/+cY+vv/DCC24pSgytQpOFG9JiPV3GRWk1CgkRQVQ0yCy5QniDXoMjOzsbYMhnwBVDp6HVjqnR6nX9G1+XEB7E8fNrc/Q2n5kQwv16DY4JEyYAcO2112Kz2Th9+jSKonD55ZcTGCidkr6socWGxeqgoLwegOjQAMrNXw27ttqdniqtR4mRwRw8Y6axzSFTjwjhYf2a5HDPnj388pe/ZOTIkaiqSnl5Ob/61a+6zUMlfIfF6iC/sIYDX7bPNlzdaCO/sKbz9SkjIz1VWo9GXNBBLsEhhGf1Kzh++9vf8sorr5CamgpAaWkp9913nwSHHzBZ2gjUaogI8e4344TwjuBo41sJ4R6uRojhrV/rcYSGhnaGBkBKSgqhoaFuK0oMnapGK/HhejRe3m+gD9ASHRpIRUObp0sRYtjr9Yrj3XffBdr7Ou69915uu+02FEXhnXfeYeLEiUNSoHAvk6XN66ZSv5jEiCAq6mVklRCe1mtwvPfee53fx8bGcuDAAQCio6OxWq3urUy4XavNiaXNQbzBN26qS4wI4vi5RqwOJ3qd1tPlCDFs9RocF67BIfyPqbG92ccYrvdwJf2TGBGMClQ2tJEaI02lQnhKvzrHrVYrmzdvpqioqMuVhgSLbzNZ2oMjPtw3rjhGRLZPiXKuvlWCQwgP6lfn+KpVq6iuruaDDz7g2muvxWQySee4H6hqtBKo0xDpI8Nbw4N0GPQ6ys3SzyGEJ/UrOEpLS1m5ciXBwcHMnz+fDRs2UFBQ4O7ahJuZLG3EG/Q+cye2oigkRQVLcAjhYf0KDp2uvUUrPDycwsJCLBYLtbW1bi1MuF9VoxWjj3SMd0iOCqamyUqbl93ZLsRw0q8+ju9973s0NDSwYsUKli1bRktLCytWrHB3bcKNGlrtNFkdxPtIx3iH5KgQVOCsDMsVwmP6FRyLFy8G2uesysvLc2tBYmh8WdMMgNFHOsY7JJ/vID8rzVVCeEy/gsNsNvM///M/HDp0CEVRmDp1Kvfffz9RUVHurk+4SUdwxBt864ojRK8jOjSQsgsmZBRCDK1+9XE8/PDDREdH8+yzz7J+/XqioqJ46KGH3F2bcKMva5rR6zQ+OWFgUmSwXHEI4UH9Co7q6moeeOABUlJSSElJ4f7775fOcR/3ZU2zT42oulByVDD1rXbMzTZPlyLEsNSv4LjhhhvIzc3F5XLhcrl4++23ufHGG91dm3ATVVU5VdVEQoR3rjHel+SoEABOVDZ6uBIhhqde+zimTJmCoiioqspf//pXVq1aBYDL5SIkJISf/exnQ1KkGFymRiuNbQ4SInyrY7zDiMggFODEOYunSxFiWOo1OA4fPjxUdYgh1PFJPcHHRlR10Ou0xIfrOV4hVxxCeEK/RlUB5OXlcfDgQaB9WO4tt9zitqKEe31R0f5J3VeDA2BkdCjHzjXicqloNL7XTyOEL+tXH0dOTg6vvPIKo0ePZvTo0bzyyiv84Q9/cHdtwk2+qGzEGK4nONB3pya/LCaEJquDwipprhJiqPXriuP9999n+/btaDTtOTN//nzmzZvHT37yE7cWJ9zjiwoLaT6yeNPFdMyOe6DELEvJCjHE+nXFAdDY+FV7ssUin/J8ldXh5FR1E6PjfDs4okICiA0L5GBJnadLEWLY6dcVx49//GPmz5/Pddddh6qqHDhwgJ/+9Kfurk24QXFVEw6X6vPBoSgKk5IjOVhi9nQpQgw7fV5xuFwuFEXh9ddfJzMzk1tvvZXXX3+d2bNn93nw/Px8srKyyMzM5MUXX+z2us1mY+XKlWRmZrJ48WLKy8uB9ilO7rrrLqZMmcKaNWu67HP06FHmzp1LZmYmTz31FKqq9vdcBV91jKfF+/56KpOSIzhb3yoTHgoxxPoMDo1Gw8aNG4mPj2fWrFnMmjWLuLi4Pg/sdDpZs2YNGzduJDc3l7feeovi4uIu22zatInw8HB27tzJ3XffTU5ODgB6vZ4VK1bwyCOPdDvuk08+ya9//WveffddSkpKyM/P7++5Cto7xvU6DUlRvnnz34UmJUcASHOVEEOsX30c06dP589//jMVFRXU19d3fvWmoKCA1NRUUlJSCAwMZM6cOd1m1t29ezfz588HICsri/3796OqKiEhIVx99dXo9V0n4KuqqqKpqYkrr7wSRVGYN2+ezNY7QF9UWhhjNKDT9Lt7y2uNigslTK+T5iohhli/+jjefvttFEXh73//e5fne3vTNplMJCQkdD42Go3dVg00mUwkJia2F6LTYTAYMJvNREdH9+uYCQkJmEym/pyCOO9EhYVbruj7itEX6DQapoyM5IBccQgxpPodHH//+9/59NNPURSFq6++mjvuuMPdtYlBVm2xUtNk5YoEg6dLGTTXXBbN2l2FNLTafXKmXyF8Ub/aK372s59x6tQp7rrrLn7wgx9QXFzc5zxVRqORysrKzscmkwmj0dhtm4qKCgAcDgcWi6XXNT6+fszKyspuxxQXd/RcAwATkiI8XMngue7yaFQVPj4tszULMVT6FRxFRUU8/fTTTJs2jWnTpvHUU09RVFTU6z4TJ06kpKSEsrIybDYbubm5ZGRkdNkmIyODrVu3ArBjxw6mTZvW6zTf8fHxhIWFceTIEVRVZdu2bcyaNas/pyCAo+XtwTF+hP/cMDdlZBTBAVr2Fdd4uhQhho1+NVWNGzeOI0eOcOWVVwLw2WefMWHChN4PrNOxevVqli5ditPpZOHChaSnp7N+/XomTJjArFmzWLRoEatWrSIzM5OIiAjWrl3buX9GRgZNTU3Y7XZ27drFSy+9RFpaGr/85S957LHHaGtrY8aMGcyYMeMSTn94KTjbwKjYUAxBATS02j1dzqAI1Gm4blQ0H0hwCDFk+hUcx44d44477mDEiBEAnDt3jssvv5y5c+cC8Oabb/a438yZM5k5c2aX51asWNH5vV6v59lnn+1x3927d/f4/MSJE3nrrbf6U7b4mqNnG7jmsp4HHviyG9NieSr3BBUNrST66BojQviSfgXHxo0b3V2HcLOaJisVDW2d9z74kxvSYgHYV1zLoqnJHq5GCP/Xr+BISkpydx3CzT4/638d4x2uMBqICQ3kw+IaCQ4hhoDv3wUm+sUfO8Y7aDQK09Ni+aC4RqagEWIISHAME59f0DHuj25Mi6HKYqW4qsnTpQjh9yQ4honPzzb4ZTNVh45+jr1FMrpKCHeT4BgGOjrGJ/pxcCRHhTAqLpT3TlZ5uhQh/J4ExzDgzx3jF/r2WCMfn66jyerwdClC+DUJjmHg846O8ST/6xi/UMa34rE5XewtrPZ0KUL4NQmOYeBwqZm0+DDC/bRjvMPVqVFEBAew64Q0VwnhThIcfk5VVQ6X1XPVyEhPl+J2Oq2Gm6+IY8/JKpwuGZYrhLtIcPi50zXN1LfYuWrkxWcd9iezxhqpbbZxpKz3hcaEEN9cv+4cF76nocWGxepg94n2ha5GRAZTbm7pfN1qd3qqNLeamR6HVqOQd8LE1NThEZZCDDW54vBTFquD/MIadh6vQq/TUFrXQn5hTeeXzemfTTkRIQFcc1kUu07IypBCuIsEh58rrWshJToETS/rnPibrPEJFJqa5C5yIdxEgsOPWe1OTI1tjIwO8XQpbuFwuig3t3T7mpISiQK88WmZp0sUwi9JH4cfKzO3ooLfBker3cXhU3U9vpYaE8q/jlbys9vGDnFVQvg/ueLwY6V17Z3hKVH+GRy9mZQcQUltCycrLZ4uRQi/I8Hhx8rqWogL0xMcqPV0KUNu/IhwNAq8VXDO06UI4XckOPyUS1UprWvx22aqvhiCApgyMpK3CipkjQ4hBpkEh58qqWmm1e7ksthQT5fiMRnfMvJlTTPHzjV6uhQh/IoEh586UtY+seHlwzg4br4ijkCths2flnu6FCH8igSHn/qsrJ7wIB1RIf49sWFvIoIDyBxvZNuRs1gd/nmnvBCeIMHhh1RV5Uh5PZfFhqIMoxv/evK9q1Oob7Gz87jcSS7EYJHg8EOldS3UNtmGdTNVhxvSYhkREcTrB+RmQCEGiwSHH/r4y/ab4i6LkeDQahQWXZ3CB8U1XSZ5FEJ8cxIcfuiTL+uIDA4g3qD3dCleYfHUZFQV6SQXYpBIcPihT76sY1JKxLDv3+iQEh3CTemxvPZJKXany9PlCOHzJDj8TEVDK6V1LVyZ7P8r/g3Ekhsuw9Ro5e3PKzxdihA+T4LDz3x8ur1/Y3KKBMeFbh4Tz+Wxoby8r8TTpQjh8yQ4/MyHp2qICA4gLT7M06V4FY1GYckNl3GkrJ5DpWZPlyOET5Pg8COqqrKvuJbrR8Wg1Uj/xtctvCoZQ5COlz740tOlCOHTJDj8SGldC2frW5meFuPpUrxSqF7HHdek8K+jlTI0V4hLIMHhRz48VQvA9NGxHq7EO/S0QuB3JiSgAH94t5BycwsNLTZPlymEz5EVAP3IvuIajOF6RseFcra+1dPleNzFVgicMjKSNz87xxUJBm6flEhESKAHqhPCd8kVh59QVZX9p2qZPjpW7t/ow4z0OJwulQ+KajxdihA+ya3BkZ+fT1ZWFpmZmbz44ovdXrfZbKxcuZLMzEwWL15MeflXd/Zu2LCBzMxMsrKy2Lt3b+fzGRkZzJ07l+zsbBYsWODO8n3KSZOF2mYb00dL/0ZfYsL0TE6J5OMva6mXpiohBsxtweF0OlmzZg0bN24kNzeXt956i+Li4i7bbNq0ifDwcHbu3Mndd99NTk4OAMXFxeTm5pKbm8vGjRv51a9+hdP51bTYf/3rX9m+fTtbtmxxV/k+Z1/x+f6NNOnf6I+ZY+JwOFX+IZMfCjFgbguOgoICUlNTSUlJITAwkDlz5pCXl9dlm927dzN//nwAsrKy2L9/P6qqkpeXx5w5cwgMDCQlJYXU1FQKCgrcVapf+LC4htSYEJIigz1dik8whgcxMTmCzZ+WU22xerocIXyK24LDZDKRkJDQ+dhoNGIymbptk5iYCIBOp8NgMGA2m/vc95577mHBggW8/vrr7irfp1gdTj48VcuM9DhPl+JTvv0tI3aHyuxiqccAABb5SURBVHPvFfe9sRCik8+NqnrttdcwGo3U1tayZMkSRo0axTXXXOPpsjzqYImZVruTmWMkOAYi1qDnOxOM/O3jM9w+OZGE8KBu2xj0Ohl1JcTXuO2Kw2g0UllZ2fnYZDJhNBq7bVNR0T7pnMPhwGKxEBUV1eu+Hf+NiYkhMzNTmrCA9wurCdRquF46xgfse9eMxKXCb9/+gvzCmm5fFqvD0yUK4XXcFhwTJ06kpKSEsrIybDYbubm5ZGRkdNkmIyODrVu3ArBjxw6mTZuGoihkZGSQm5uLzWajrKyMkpISJk2aREtLC01NTQC0tLSwb98+0tPT3XUKPuP9k9Vcc3kUoXqfu4D0uDiDnusuj+ZQqVn6OoToJ7e90+h0OlavXs3SpUtxOp0sXLiQ9PR01q9fz4QJE5g1axaLFi1i1apVZGZmEhERwdq1awFIT0/ntttuY/bs2Wi1WlavXo1Wq6W2tpYHHngAaB+1dfvttzNjxgx3nYJXa2ixYbE6qGps46TJwqyxo7tMo2G1O3vZW1xo5pg4DpaY2XXCxJ3XjvR0OUJ4Pbd+RJ05cyYzZ87s8tyKFSs6v9fr9Tz77LM97rts2TKWLVvW5bmUlBT++c9/Dn6hPshidZBfWMPBkvY7o3VaDfmFX93QNmWkTKveX4agAKanxbDnZDU3N7SSGCEj04Tojdw57uMKTRbCg3QYZZnYS3JTWhxBARp2Hjf1vbEQw5wEhw9zulSKq5sYYzTINCOXKDhQy4z0OL6otHCmttnT5Qjh1SQ4fNiZ2mba7C7GGA2eLsUvTB8diyFIx7+OVqKqqqfLEcJrSXD4sGMVjeg0igTHIAnUafj2WCOldS0cPdfo6XKE8FoSHD5KVVVOnGskLT6MQJ38GgfL1NQojOF6dhyrxOFyebocIbySvOP4qEJTE/WtdsaPCPd0KX5Foyh8Z3widc02Pj7dfS0PIYQEh8/KL6xGAb6VIMEx2MYYw0iLDyPvCxN1zTLtuhBfJ8Hho/KLargsNlTuFncDRVG4fVIidofK83tOebocIbyOBIcP+rKmmS9rmhmXKFcb7hJvCOLG9Fj+dbSSAyXSZCXEhSQ4fNA7R9sngBwn/RtudcsV8RjD9fxi21HsTukoF6KDBIePUVWVLYfKmZgUTpRM9+1WgToND317DF9UWli3q9DT5QjhNSQ4fExBeQNFVU18Z0Kip0sZFm5Mj+V7V6fwxz2n+Ph0rafLEcIrSHD4mM2flqPXacj4lizaNFRWzx1HanQID71+hIYWu6fLEcLjJDh8iNXh5J+fnSNrfAKGoABPlzNshOp1rL9jClUWK/f//VOsDpmyXgxvEhw+JO9EFQ2tdhZOTfZ0KcPO5JRInlk4iX3Ftaz8xxGcLpnLSgxfEhw+ZPOn5RjD9dyYFuvpUoalhVOT+cXt4/jX0Uoe2VyAzSEjrcTwJHeP+YjiqibeO1nF8lvS0GpkCnVPuefGy2lqc7B2VyGnqpv44/evYkTkxRd+6lip8WIMeh0RMjpO+BgJDh/x/J5T6HUa7p5+madLGfZWfDudMcYwVm0uYM6ze3nkO99i8dRkdNruF/AdKzVezIwxsRIcwudIU5UPKKtrYduRs9x57UhiwmSlP29w28RE/rn8BkbFhfHYls/JWpfPWwXnpO9DDAsSHD7gT3tPo1Hg3ptGeboUcYFRcWFs/vH1bLhrKgDL/36Ym3Pe45X9JbTZZeSV8F8SHF6uqrGNfxwoY8GU5F7b0oVnKIpC1vgE3n1oJi/8YCpxYXpWbz/Gzb/fw/9+dEamKhF+Sfo4vJiqqqzefgxUWHbzaE+XI3qh1Sh8Z0IC35mQwIenavivdwv5xbajjI4L5TvjE0mKktAX/kOuOLxY7ucVvHOskocyx3BZbKinyxH9NH10LJt+fD1/+uHVNLTaef79YnYeN+GSdcyFn5Dg8FI1TVZWbz/G5JRI7r3pck+XIwZIURQyxxn533uuZXJyJO+drOLVj85glb4P4QckOLxQm93JQ68foanNQc6iST0O8xS+wRAUwKKpycydPIJCk4Xn3z+FuUVWFRS+Tfo4PORiN4ZZ7U4e2/I5B0rM/G7RJNKNBg9UJzo4nC7KzS0XfV2ngd5uILfanSiKwvWjYogL0/P3T87wwvun+H/XXyaDHYTPkuDwkJ5uDLO02dl0sJxT1U08etu3WHx1ioeqEx1a7S4On7r4CoBTRkZyuLS+19c7pMWH8e8zRvOXD0v4097TfP+6VGaMkeljhO+RNhAvoKoqn56pY92uIr6sbWbhVcnMmSTrbfgjY3gQP545mqiQQP7y4ZfsOFbp6ZKEGDC54vAgq8PJ4dJ6PjpdS5XFymUxIcybkkS8IcjTpQk3iggO4L4Zo3j1ozP8+q0TtNldLJs5Go3MQSZ8hASHB5ypbeaP7xWz/bNztNldJEUGs3hqMpNTItEo7W8efbWty+gc3xYUoOXu6ZfxflE1v99xkn3FNTyzcBIp0SFdtpNJEgeH/H8cXBIcQ+hQqZkN75/i3eMmNIrC+BHhTB8VQ0p0CIrS9dNmf9rWhW/TaTU8OXcc3x5r5OncE2Sty+cH01JZPDW5c1DEhX1hLlWlsdVOXbONxjYHTW12UmNDiA7RE6LXMiIimBGRwaTGhBAUoPXkqXkdmWxycElwDIHDpWZ+v+MkH56qJTxIx/03jyZzrJHjFRZPlyY8TFEU7rx2JDPGxPGb3BO89MGXvJh/mtSYEBIjggjSaSk1t9DU5sDcYsPu7PsmQkWBlKgQ0uLDGB0XSmpMKHEGPbFhemLDAokN0xOq9/9/+k6XSlldC0VVTZSbWyipaSZMH0BCRJAsTXCJ/P+vx4OKqyz8fsdJdhwzERMayBNzxnLntSMJ1evam6EkOMR5SZHBPPf9q6hpsrLt8FkOl9ZTZWnjy5pmVCA2TE96fBixBj0xoXrCg3QYggKYPjoarUbBYnVQ1WilsrGN0roWSmtbOFPXwgfFNT0uOBUUoCEmVE9MWCAJ4UEYI4JIiwtlQlIExvCufWy+1Ixjd7rYW1TNP4+cY9eJKpp6aJ7S6zRcHhvKuMRwJiZFoJerswGT4HCDsroWns0r4o1D5YQE6ng4cww/uvFywobBpzxxaWLD9Cy9YBbkcnNLr00sKAoHz3w1HFiv05IebyA9vr2py6WqjB8RToBWQ02TlZomGzVNVmqbrNQ226i2WDle0ciuEyY6ZoSPCA5gdFwoo+PCGB0Xxu2TE70+OGqbrLz2SSn/+9EZTI1WIoIDmDMxkatSI0k3GmixOviguJb6Fhuna5opMln4otLCmwXnGD8iguAADYkRwXIl0k9ufSfLz8/n6aefxuVysXjxYu67774ur9tsNh555BGOHTtGZGQka9euJTm5fT3tDRs2sHnzZjQaDU888QQ33XRTv47pScVVTbzw/im2Hj6LVqPwoxsu5/5b0ogO9e5/dMJ/aRSFOIOe5KiQi25Tbm7hvS+qqWhopbSuhZLaFr6otHDo/P0p/zhQyswxcUxPi2XaqBgiggOGqvw+HT3bwF8+LOGfn53D5nBxU3osv86ewM1XxBOo++pug3JzC6V1rYyMDmFSciSq2t6Mdai0noKz9Tz0f5/xzDsnmX9VEguvSiYtPsyDZ+X93BYcTqeTNWvW8PLLL2M0Glm0aBEZGRmkpaV1brNp0ybCw8PZuXMnubm55OTksG7dOoqLi8nNzSU3NxeTycSSJUvYsWMHQJ/HHGpVjW28d7KKTQfLOXjGTFCAhh9en8p9M0aRGCF3BgvP688IPa1GITkqhOSoEKaPbr9SqWxo41R1E3XNNv7vYDl/3X8GAGO4ntSYUGLDAgkO0KHTtM8OHBSgJShAi16nQa/TEBigJSRAS1JkMKPiw4gL03d5M/8mVFXlVHUz7x6v5J9HzvFFpYWQQC3zrxzB3MkjOicDrbK0dTvHCymKwsiYUEbGhHbeM7XnZBUv5p/m+T2nGBUbyqTkCMYmhhMbpicqNABFUbA7XNidKnanC5vThd3pwu5w0dhmp8nqwOlS0SgKGo2CVlHQKKDRKARqFUC54LX25zWKglajEBqoxRAUgFajEByoJd6gJ84QRHiQrtvAGW/gtuAoKCggNTWVlJT2u5/nzJlDXl5elzf53bt3s3z5cgCysrJYs2YNqqqSl5fHnDlzCAwMJCUlhdTUVAoKCgD6POZgUlUVi9VBY6udxlYHljY79a12SmqaOVXdxOHSeoqqmgAYFRfKo7d9i0VTk4mVVfqEF/kmI/Q0isKIyPZRWjPGxBJvCOJwqZmDZ8ycrm7mTG0zJysttNqcNFkdtNicOPqx+mFUSADG8CDiw4MwGvQYw4MwhuuJCg0kTK8jJFBH+9RsCla7k8Y2B7XNVkprWzhd08yhM2Zqm9vn+rpqZCRPzh3H/KuSsbTZyS+sobSutd/n2CFAq2HGmFj+3/TLqLK08c8j5/jodB0fna5j25FzfZ6TO+l1GuIM+vNBoifeEPTV9+F64sKCiAwJOB/WWgJ1GgJ1Grc3ubktOEwmEwkJCZ2PjUZj55v/hdskJranvU6nw2AwYDabMZlMTJ48ucu+JpMJoM9jDqafbirgjUPlPb4WGxbI2MRwFk5N5sa0WMaPCPfKTwZCDIZAnYbrRsVw3aiYbq919MO41PZP4naniuP8f60OJ6kxIbhUqGq0YrK0UdXYRpXFSmGlheoma7+W2w3UakiJDubmK+K59vIobkiL7dL8ZmmzD8p5xhuCWHrTqM5+psY2O3VNNurOT0wZqNUQoG1/c9ZpFPS69sfVljY+Ol2HRqPgUlVUlS7/HT8inM/KG7o93/7VHmxxYXqcLpVmq4PqJivVFitVFitVjW1UN1k5Xd3Mx1/WUd/S/3OdkBTOW/9x06D8v7nQsOitPXv2LAsWLPhG+17Vy2vNwLvnvwbbZi9/3RtqGA7n0BdvOMe+fNTDczpgxPmvgfjy/NfrA9zP3efYl7f7eP2DfhzDcP5rQAphwXtrB7oXAFFRUURFRfX4mtuCw2g0Uln51Tw8JpMJo9HYbZuKigoSEhJwOBxYLBaioqJ63bevY/bk448/vtTTEUIIcZ7bJjmcOHEiJSUllJWVYbPZyM3NJSMjo8s2GRkZbN26FYAdO3Ywbdo0FEUhIyOD3NxcbDYbZWVllJSUMGnSpH4dUwghhHu57YpDp9OxevVqli5ditPpZOHChaSnp7N+/XomTJjArFmzWLRoEatWrSIzM5OIiAjWrm2/pEpPT+e2225j9uzZaLVaVq9ejVbbfpNOT8cUQggxdBRVlYWQhRBC9J+sxyGEEGJAJDiEEEIMiATHEMvPzycrK4vMzExefPFFT5fjdhkZGcydO5fs7OzOIdH19fUsWbKEW2+9lSVLltDQ0ODhKi/dY489xvXXX8/tt9/e+dzFzlNVVZ566ikyMzOZO3cux44d81TZl6yn8/7v//5vbrrpJrKzs8nOzub999/vfG3Dhg1kZmaSlZXF3r17PVHyoKmoqOCuu+5i9uzZzJkzh7/+9a/A8Pi9o4oh43A41FmzZqmlpaWq1WpV586dqxYVFXm6LLe65ZZb1Nra2i7PPfPMM+qGDRtUVVXVDRs2qL/73e88Udqg+uSTT9SjR4+qc+bM6XzuYue5Z88e9Z577lFdLpd6+PBhddGiRR6peTD0dN7PPvusunHjxm7bFhUVqXPnzlWtVqtaWlqqzpo1S3U4HENZ7qAymUzq0aNHVVVVVYvFot56661qUVHRsPi9yxXHELpwGpbAwMDOKVOGm7y8PObNmwfAvHnz2LVrl4crunTXXHMNERERXZ672Hl2PK8oCldeeSWNjY1UVVUNec2DoafzvpjephLyRfHx8YwfPx6AsLAwRo0ahclkGha/dwmOIdTTNCwdU6n4s3vuuYcFCxbw+uvt9/vW1tYSHx8PQFxcHLW1tZ4sz20udp5f/ztISEjwu7+Dv/3tb8ydO5fHHnuss6nGn//+y8vLOXHiBJMnTx4Wv3cJDuFWr732Glu3buVPf/oTf/vb3zhw4ECX1xVFGRZzfA2X8wS488472blzJ9u3byc+Pp7f/va3ni7JrZqbm3nwwQd5/PHHCQvrOh27v/7eJTiGUH+mYfE3HecXExNDZmYmBQUFxMTEdF6iV1VVER0d7ckS3eZi5/n1v4PKykq/+juIjY1Fq9Wi0WhYvHgxn3/+OeCff/92u50HH3yQuXPncuuttwLD4/cuwTGEhtuUKS0tLTQ1NXV+v2/fPtLT08nIyGDbtm0AbNu2jVmzZnmyTLe52Hl2PK+qKkeOHMFgMHQ2bfiDC9vtd+3a1Tm7w8WmEvJVqqry85//nFGjRrFkyZLO54fD713uHB9i77//Pr/5zW86p0xZtmyZp0tym7KyMh544AGgfWGv22+/nWXLlmE2m1m5ciUVFRWMGDGCdevWERl58fUSfMHDDz/MJ598gtlsJiYmhv/4j//g29/+do/nqaoqa9asYe/evQQHB/Ob3/yGiRMnevoUvpGezvuTTz7hiy++ACApKYk1a9Z0vkE+//zzvPHGG2i1Wh5//HFmzpzpyfIvycGDB/n+97/PmDFj0GjaP4M//PDDTJo0ye9/7xIcQgghBkSaqoQQQgyIBIcQQogBkeAQQggxIBIcQgghBkSCQwghxIBIcAghhBgQCQ4hejBlypRL2v/pp5/uNr3KN3HvvffS2Nh4STU99NBDlJSUXHItQnSQ4BBikJnNZj777DOuueaaSz7Wn/70J8LDwy/pGHfeeScbN2685FqE6KDzdAFCeLuNGzfyr3/9C5vNRmZmJg8++CDl5eXce++9TJ06lcOHD2M0GvnjH/9IUFAQ7777LjfddFPn/jk5OezevRutVsuNN97Iz372Mx599FH0ej0nTpygtraW3/zmN2zbto0jR44wefLkzokBMzIy2Lx5c7f5vHqqqaWlhZUrV1JZWYnL5eL+++9n9uzZXH311Tz66KM4HA50OvknLy6d/BUJ0YsPPviAM2fOsHnzZlRVZdmyZRw4cIDExETOnDnDf/3Xf/HUU0+xYsUKduzYQXZ2NocOHSIrKwtov/rYuXMn77zzDoqidDY7ATQ2NvL666+Tl5fHsmXLeO2110hPT2fRokWcOHGCsWPHDqimuro64uPjO1eWtFgsAGg0GlJTU/niiy+YMGGCm/+PieFAmqqE6MW+ffvYt28f8+bNY/78+Zw+fbqzvyA5ObnzzX38+PGcPXsWgOrq6s4rBIPBgF6v5/HHH+fdd98lKCio89i33HILiqJwxRVXEBsbyxVXXIFGoyEtLa3zWAOpacyYMXz44Yf8/ve/5+DBgxgMhs59oqOjfXbRIOF95IpDiF6oqsp9993HHXfc0eX58vJyAgMDOx9rtVqsVisAer2+83udTsfmzZvZv38/77zzDq+++iqvvPIKQOf+iqJ0OZZGo8HhcAy4JoAtW7bw/vvvs27dOqZNm8by5csBsNlsXUJLiEshVxxC9OLGG2/kjTfeoLm5GWhfQ6KvFQtHjx5NaWkp0L7Ij8ViYebMmTz++OOcPHnSbTWZTCaCg4PJzs7mnnvu4fjx4537lJSUdE5vLsSlkisOIXpx4403curUqc5P9yEhIfz+97/vnEa7JzfffDP/+Mc/WLx4Mc3Nzdx///2dVyCPPvqo22o6c+YMv/vd79BoNOh0Op588kkAampq0Ov1xMXFXfLPFgJkWnUh3OLOO+9kw4YNlzyUdjD85S9/ITQ0lMWLF3u6FOEnpKlKCDd49NFHOXfunKfLANo76OfPn+/pMoQfkSsOIYQQAyJXHEIIIQZEgkMIIcSASHAIIYQYEAkOIYQQAyLBIYQQYkD+PyVAxNJRKlTvAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "UmKR_T4Vl5_X", - "colab_type": "text" - }, - "source": [ - "Some of these look rather large, len(smiles) > 150. Let's see what they look like." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "X2H-4P1ol5_Y", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# indices of large looking molecules\n", - "suspiciously_large = np.where(np.array(smiles_lens) > 150)[0]\n", - "\n", - "# corresponding smiles string\n", - "long_smiles = smiles_data.loc[smiles_data.index[suspiciously_large]]['drug'].values" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "FDX7tagnl5_e", - "colab_type": "code", - "outputId": "c8ff34b3-0299-4302-ec80-5e2dfde22606", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "# look\n", - "Draw._MolsToGridImage([Chem.MolFromSmiles(i) for i in long_smiles], molsPerRow=6)" - ], - "execution_count": 27, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAADICAIAAAB3fY8nAACMMklEQVR4nO3dZ1wT2RYA8DNphN4RBLErYhcLiogFO3asa11dsKLurqJrwS72XlDsva4FK/aui12xoGIDRJQmNZCc9+FiHkKICIEAOf/f+7AvM7lzg8ncue0cDhGBEEIIIYQQQojm4am7AoQQQgghhBBC1IM6hIQQQgghhBCioahDSAghhBBCCCEaijqEhBBCCCGEEKKhqENICCGEEEIIIRqKOoSEEEIIIYQQoqGoQ0gIIYQQQgghGoo6hIQQQgghhBCioahDSAghhBBCCCEaijqEhBBCCCGEEKKhqENICCGEEEIIIRqKOoSEEEIIIYQQoqGoQ0gIIYQQQgghGoo6hIQQQgghhBCioahDSAghhBBCCCEaijqEhBBCCCGEEKKhqENICCGEEEIIIRqKOoSEEEIIIYQQoqGoQ0gIIYQQQgghGoo6hIQQQgghhBCioahDSAghhBBCCCEaijqEhBBCCCGEEKKhqENICCGEEEIIIRpK0zuEiOquASGEEEIIIYSoiUDdFShg6enQty/07w+dOwPHZT7y7Rv8/TeYmkKNGvD5Mzg6gqOjumpJCCGEEEIIIWrAYcmeI+vUCQICAACqVz89ZUord3ehUAgAN24k9u+vGxoKenrw5g2Ym8Phw9C9u5orSwghhBBCCCGFqUR3CBGhZk14+hS0tD5ZW1u9eVO2bNlx48ZFR0evX3+Q457Y2vJ27AA7O3XXkxBCCCGEEELUoUQvGU1NhS5dICwMYmN9EbW0tN69ezdp0qTU1FSBQODre9rLq4NQqO5KEkIIIYQQQoialOgZQiY+PmnLlsoLFoRHRABAqVKlAODgwYNNmzZVd80IIYQQQgghRJ00IMqogYHO2LFvQkPXr19frly5yMjIgQMHUm+QEEIIIYQQQkr0ktFMtLS0PD09OY7z9PSMiYlRd3UIIYQQQgghRP00YIYwE2vr6k2a9NPWbqTuihBCCCGEEEKI+mnKDCFjZOR044ZTid81SQghhBBCCCG5oVkzhBYWAABRUequByGEEEIIIYQUAZrVITQ3B1tbKFNG3fUghBBCCCGEkCJAA9JOfBcZCcOGwcCBkJYGnz+DoyM4Oqq7ToQQQgghhBCiPpqyh/Dw4RQPD/HXr/DqFTx9CjzNmhklhBBCCCGEEAVKfocwPj5+woQJV68mx8dvb9MGtmyh3iAhhBBCCCGEAJT4JaMfP350cnJ6//69rq7uunX/9e9fjePUXSdCCCGEEEIIKRpK+AzhxYsXeTxezZo1Dx48WKVKFXVXhxBCCCGEEEKKkBK+evLo0aNv3761srLatWuXuutCCCGEEEIIIUWLmjuEaWng6QktWoCvb4GUb2FhAQBnz549fPhwgVyAEEIIIYQQQootNS8ZFQjg0CH4+hW2b89HKevWwdOncPs2XLkC2tqZj7AOIQB8/vw5HxcghBBCCCGEkBJIzTOEHAcNGwIA3LqV1yJCQmDBAti0CYKCYP36LAfNzc0BgOO4r1+/SqXS/FSVEEIIIYQQQkoY9e8hdHFJatQoPCTkZh7fX6ECiMWQkgIAsHhxakJC5oOsQygUCqVSaUxMTK4K/LEEQgghhBBCCCmp1N8hrFv32u3b1idO/J3H9/P5MH068Hj3ypRxFQoXr1ghP4KI8fHxAGBiYvLPP/88fvz4/fuUtDSlpX37Bk5OMHAg+PnBixd5rBIhhBBCCCGEFAfqz0MYGxtramoqEoni4uJEIlFeipDJxrZvv/LsWQAwMjJ68uTJvXv3AgICTp069eHDB2NjY/ncoLX1h4gIG0tLGDIEnJzA2Rn09DKVI5VCp05w6hSIxeDkBIcOgaHhL1dm0yZo2hSqVs3LByGEEEIIIYSQQqT+PISpqamGhoZ8Pt/e3r5Xr149egyoV+8X08fzeM08PFaePSsUCmNjY8uWLSvfLmhjY+Ps7GxhYREREREWFvbxoyXHQXg4SCSwbBkMHAhTp8KoUSAQAAAs/Oef0aGhOlpakJIC4eHwq13lIUPg8GHg86F5c8g5qOmZM3DyJJibg0AAkyb92hUIIYQQQgghRIXUPEP45s2bNm3avH79WldXNzExEQBcXIJCQx26doV+/aBRo9yWk5KSUqZMmejoaH19/YSEhDp16ri5uXXq1KlevXrcj53L9HT49Al0dKBzZ7h+HQCgV6+rAwbEvXv3bvTo0WZicVDFimWjouD2bShX7tc+jLs7HDoEANCqFZw7l9NZzs5w7RpcuQLOzr9WPCGEEEIIIYSoljpnCB88gGnTDr5+/bpBgwYTJ07s2bMnj8d79MguJgZWrgSRKLcdwo8fsV+/Ll++fNHR0Zk2bdqwYcMMc17qKRCAjQ0AwNWrcOgQLF2aGhDQff/+Lzwej+O4LykpNUJD31y8aP6rvUHIFI3mh3WoWX379tNTCCGEEEIIIaQwqC2ozKVL0Lw5BARM9PRcfeHChVKlSrm6ular9ltMjC4ADB4Mnz/DgAEQGfmTcq5dgwYNOKnUz9bW9vLly3/99ZeS3mBmHAfu7nD5Mm/ZsrkWFhYCgYD1Cf02bjRnqTB+0VxDw5Hly8+tUeNopUpKTtPXTy5dOl1PT5aHSxBCCCGEEEKICqlnyWh6OtjbQ0gI9O0LW7eCPJRMTIz0+HH+lSuwcSO0bQuBgXD4MHTrlmM5Z85Ap06QlgZt2sDu3Ummpjp5q09sbOywYcMOHTpkbm7+4sULY2PjPBRSp06dhw8fAoCnp+f6bBkR5SwtLSMjIyMiIiwtLfNWW0IIIYQQQghRCfXMEJ45A23bwuDBsHMnZA4samzMHzgQ/P2B48DREQDg9u0cCwkIgGvXoFIl8PKCEycgz71BADAyMtq5c2eDBg2ioqJ69+6dJYX9+/fv//jjjwULFvTt2zc8PDynQr6xxaAAekrXgyYkJPz0HEIIIYQQQggpBOrpEF6+DHv2QJ8+wMv5+o0bp9apc/HLl+0Kj377BhMmgJER3LkDK1ZkhAnND7FYfPDgQXNz88DAQB8fH/bip0+fvLy8qlSp4u/vP3v27L1791avXt3ff3uWWVWJRLJhw4avX78aGRnxeLzHjx+z/IfZnTp1KjExkeO4jx8/KqlMbGxscnJyfj8SIYQQQgghhCilng6hTAZfv8KtW8rOadgw4eHDVnv2jEhPT89+dMECeP4cDh0CXV2V1crW1nbfvn0CgWDevHmbN2+eMWNGlSpVVq1alZaW1rNnz7Nnz7q7u8fGxu7cWd3ZGZ49AwBIS4ONG6FHjxOenp5xcXHGxsYCgeDs2bM1atQICAjIXPj169dbtmzZoUMHjuMQsW7duvPmzUtLS8tSh/fv33t6epYpU6ZNmzadOnUKCwtTWNXERHjxQmUfnBBCCCGEEKKhUB3270cAbN/+J6dVqlQJAO7du5fl9Y8fo01MZByHN2+qvm5Lly4FAIFAAAAcx7m7uz99+lR+9PDhs9bWCIBiMa5ZgxUrIgBynMzN7bdjx44h4uPHjx3ZalcANze3sLCwW7duubm5sVdMTU1nzJgxcuRIHo8HADVr1rx9+zYrOSwsbNSoUSKRiF1dLBYDgImJyZYtW2QymbwCqano54dWVlijBu7cicuW4fz5qv8jEEIIIYQQQjSBejqE798nOzmtq1mzc+auThapqamVK1c2Nzc3MjIaMGDAsWPHJBIJOzR48GArqwbjxr0ooOr16NHD0NCwQoUKQUFB2Y/GxKCHB5qb48qVaG6Odna4bRump///BKlUumLFCrZLUEcnY2ejkZHRrFmz4uPj2TlXrlyxs7MDAB6PN3DgwPHjx2tra7P/27Nnz5CQkLCwsG7fw+k0a9bsxYsXEolk/fr17duvBEAAbNwYP30qoD8AIYQQQgghRCOoLTG9jY1NWFjYixcvqlSpkv1oQkJC9+7dAwMD9fT0Er7n9zM1Ne3cuXOdOnXGjx8vEAiePn1aSWmChzw7ceKEm5tbw4YNb+cc0+brVzA1hefPoXJl4PMVnBAeHj5q1CihUHj69OmRI0d6e3tnCV6anJw8Y8YMNiGZnp7OcVzHjh3nzp1bq1Yt+Tm7d+8eN25cVFSUWCw2MjL69OkTAHTtGjVsmFnHjqr6uIQQQgghhBANpbYOYY8ePQ4fPjx69GhfX1/dHzcCRkdHu7m53bx509LS8uTJkyKR6MCBAwcOHAgODmYn8Pl8Ly8v1pUqCMnJyWZmZikpKeHh4aVKlcrptBMnwNQUvq8PVUwikaSmpurr6+d0wv379+/evfvixYv+/fvXrl07+wkxMTGTJk3at2+fgYGBnp7epEmT+vfvz1MSjYcQQgghhBBCckdtHcITJ06sXbv25MmTYrHY1dW1U6dOXbt2tbCwePfuXdu2bV+8eFG+fPkzZ85UrlxZ/pZnz54dPnx41qxZUqn0yZMnbMllARk5ct/589WmTq01YEDBXeQXREREJCUllStXjq9wOpIQQgj5FSfiTtxLvmcttO5r3Febp63u6hBCCFEbtXUIAWD37t1r1qy5deuWTCYDAKFQ2KBBg5cvX3758qVevXonT57MPjuXmpqqra0tEAiSk5MLtGu0ciWMHQt9+8Lu3QV3EUIIIYQQQghRJ3V2CJkvX76cPHnywIEDZ8+elUgkNWvWNDU1PXLkiKGhYfaTX79+XalSpXLlyoWGhhZord68gYoVwdgYoqIUbxEkhBBCCCGEkOIu3wnd883MzGzgwIEDBw6Mjo4+ceJE+/bt9fX1tbS0FJ784cMHAChTpkxB16pCBVi4EJycgOMK+lKEEJLJpk3QtClUrarwICIuWLAgMjIyy+t///136dKlObphEUIIKe6oHSx06u8QypmYmAz42Y69QusQAoC2NqxfDxcugKvrTyLHEEKICkRGQp06IJGASATbtkGbNtmOR7Zs2TImJiYiIiLLoZs3b3p4ePz++++FVVdC/u/ly5d3797t27evuitCCCnmqB1UkyLUIVQgPh6CgiAyEqOifL58iYyMvHbtGhRWh3DJEnj7Fl69gooVC+FqhBCNJ5XCp08gFkN0NKSkZD/+zz//BAcH161bVygUvn//Xv56ly5djh49GhoaynKoFmKNCQGJRNK7d+8HDx6cP39+5cqV8uy7mYWGhmpra1taWhZ+9QghxQm1g2pStLMXPHkCrVpBv37c2LH+S5du2LAhODhYIBCwnO8FSiaD8HDgOLC2LuhLEULIz92/f3/r1q0ikWjv3r0NGzasksnKlSudnZ0/f/48b948dVeTaByRSDR+/HgdHZ1NmzbVr1//8ePHmY9eu3atV69eVapUWbx4sbpqSAgpGagdLDhFe4bwxAkQi0EmA4nEWihkc8NSqfTz588FfeWoqIS6db9xnJ5YnGMKQUIIUaE0c+7JHT7H41DGr1CZyzLCOW7cOJlMNnbs2CpVqhw4cCDLe1esWFG/fv3ly5cPHTq0SpUqhVZnolEQ8eXLl7a2ttraP6SpGDhwYP369Xv37v3kyZNGjRrNnz/fw8Nj165dq1atevToEQBoaWmlpaWpqdaEkGKD2kF1KdozhBMmQEoKSCQgEDStWbNmzZoODg6IeObMmYK+8vv3z27fLp2W1qKgL0QIId+hjCeVcSjjSRF+iP+8Z8+eK1euWFhYTJkyReE769atO3jwYIlEMmHChEKpKtFEkZGRdnZ2ZcuWzX7I3t7+5s2bAwcOTE5OHjdunLm5+R9//PHo0SMrK6uZM2e+e/duxYoVhV9hQkhxQ+2gehTtGUJDQwgOBnNzMDNbBgAAUqnUwsIiIiLx9euoihXNC+7KLHpNXFzctWvXmjRpwuMV7Z4zIaSYunAB/vsPAOLa6SWYRXCcmMcTInKxsUdSUp49emRw/358WlraqlWrAGDevHlKtkbMnz//0KFDx44dO336dLt27QrvIxCNoTyum56e3rZt21q1ajVy5EihUFinTp0RI0YMHDhQLBYXbjUJIcUKtYNFQNHuEHIcVKuW+QU+n//bb8/Xrzc/eRLGjCnAK9+/f19XV/fVq1fOzs5lSpd+0727wM0NWrQAkagAr0oI0SibNsG5c7B3LwAklKr3qeY9ALCxWREWNu3r1y0AcO+e0+TJ1wHA1tbW3Nzc0NBw//79vXr1yl5SRETEunXrXFxcgoODKXQHKSCsQ2hra6vknG7dug0aNEhbW/v+/fuFVS9CSLFF7WDRoP7E9L9q+3YYNAjat4eTJwvqEomJiVWqVImMjJTJZFpaWr3NzLZ+/AgAYGQET55QnBlCiMocPw7XrgFAjLthgmVEdPRORBHHpRsYtBaJygYFmdy4ES2RSLZu3RobGwsA5ubmL1++NDIyylLM4MGDt23bpqWllZqa+u+//3bt2rWwPwjRACtWwNy5kX/8kTZ3rk1O5zx9+rRGjRp2dnbPnj0rzLoRQoorageLgKI9Q6hIu3bA48HNm5CWBkJhgVxiyZIl4eHhpqamiYmJKSkpwy0t4eNH0NYGkYh6g4QQVerUCTp1AgBjAL208M+fV3OcGDHF2LifkVFna2vo0gUAoGbNmkOHDtXS0oqKipo9e/aSJUsyl3H37t0dO3bw+fzU1NSWLVtSK0gKyPv3EBVVKttjWJZz3sPPZhEJIeT/qB0sAorf1jgLC5g7F0aNggsXCqT8z58/sy/Z/v37P3/+vHfv3hp164K+PiQnQ58+sGkTvHhRIBcmhJAcDB48uEGDBqmpqRzHrVq16kWmuxAissBriMjn85cvX66+apISjiX9Ut7Xy82yUkII+VXUDhao4tchBIBJk8DEBLp1YzPMKjZt2rT4+PiuXbu2bNlSX1+/d+/eehs2wOfPcOwYjBoFQ4dC1aqqvyohROMJv3IOLfTqthQ4tNAzvPHDzZnH461YsYLjOD6fn56efurUKfmhkJCQp0+famlpyWSyESNG1KxZs9ArTjRFy5bQqxe8fKnsHOWBZwghRAlqB9Wl+C0ZZSIjITkZ3N0hKAhsctzL8MueP3++efNmgUAwd+7cHw6IxWw6mxBCCooM4VsCJxZDSgqXLstysHHjxn369NmzZ0/t2rWNjY23bdsmP9SzZ88NGzYYGxv7+PgUbo2JZhkxAj5/hunTISQE1q0DXd2sJ8TGxrK8UNQhJITkBbWDalJcO4Tz5sG9exAc/H7cuDm7dq3S0tLKf5lxcXFjx45NT0//7bff7O3t818gIYT8Ao4DkQh4PBCJgM/PfnzBggVXrlyJiIgYPHhwlkP29vbDhw83MzMrjHoSDWZrCzo6sGMHxMZGzpsXVaNGDfZ6SEjI6tWrN2/enJCQMHr06JYtW6q3noSQYonaQTUpflFG5b5+hdatG96//9/QoUP9/f3zWdqzZ8+6dev24sULHo9naGh45MiRZs2aqaSehBDyCzZtgqZNc1qanpaWNn369PDw8Cyvz5gxw9bWlq+o+SREtZ4+heHD00JCGsXFPZs/f37VqlVXrFhx9uxZROQ4ztXVddq0ac7OzuquJlGT16/hyhU4fBiaNIEJE0BQXCceiDpRO1joinGHEAAePHjg5OSUlJQ0evToBQsW6Ojo5K2cI0dg9erh58/71axZs1SpUufOnROJRH5+ftmHHwghhBANl5SUNGbMmM2bN3NcxlOEjo7OgAEDvLy8aH2NptuxAwYOBIEA0tPh2zfQ01N3hQghP1csg8rI1alTx9/fv1evXqtXr65SpcqGDRukUukvlSCTwbRp0L073LixbNSoqbdu3Tp9+rS3t7dEIhkyZMjYsWNlsqwrmAkhhBBNpqOjs2nTJh8fH5FIZGRktHDhwo8fP65fv556g4QQUhwV7w4hAPTt23f06NH169cPCwvz9PR0cHAIDAzMzRvT0uC//+DbN9i7F/h8mDNHe/Xq2To6Onw+39fXd/369UKhcOXKlb169UpLSyvoT0EIIYQUL1WrVk1NTW3Tps2ECROMjY3VXR1SJCQJhZIKFeIMDCQVKiDHqbs6hJBcKfYdQgBwdna+c+fOzp07y5Yt+/DhwzZt2rRr127r1q3379///PlzlpPDw8N37bri7g5mZuDoCLt3Q/fusHw5/PnnD6d5enqePHlST0/v8+fPkZGRhfdhCCGEkOKAUg6S7A6lpWm9eWMWH6/15k1icd6URIhGKSGbfTmO++2333r27Llu3brp06dfv36dRb4GALFYXKZMGRsbG2Nj4zdv3jx8+FBHxyI9/VNqKlSvDg0awIgRist0dXVt0qTJ2bNnr1y50q9fv8L7MIQQzSSVwtu3sHUrNGsGTk6gaFN0RESEl5fX27dvs7y+bt26atWq6WbPA0BIgaGUg4QQFaN2UE1KSIeQEYlEY8eOjYqKmjt3ro2NjYmJSVhY2NevX0NCQkJCQvT09BISEkQikaurY7du0S1amPx0WPP169cAUKtWrcKoPSFEw0VGQqVKIBbDnDlw9Ch07pzl+KNHjxo0aKCrqxsTE5PlUNu2bYcPH541gSohBen9+/dAHULyo7i4OABgoYbS09PVXR1S3FA7qCYlYcloFmfPngWA1atXP3z48MuXL4mJic+ePatRo0ZCQgIAdO3a9ciRI4MG/bw3KEtM7G9q2sTWtmoOcW8JIaQw/f333xKJxM3NLUvoDl9f35iYmCVLlrx69UpddSMaiHUI9SiMJAEAAIlEsnjx4ilTpgCAtrY2ADg5Od06f17d9SIlCrWDBaREzRACwMePMl3daVWqrGnTpg17RUdHx87OrkqVKk+ePAGAr1+/5rIo3qNHM+7cmVGnDgiFBVVdQgj5DoUc1reXxUfzDEzAUJhluO7gwYOBgYEmJibLli1bt27dmzdv5IeGDBny9OnTHTt2TJo06eDBg4VcbaKB0tLSDh48+Pz5cwDo2rXrhAkTJk2aJBaL1V0vojbnzp0bO3ZscHAwALi6uvbp02fp0qXBwcHV/voLzM1hxQqgCLQkF6gdVBssWVasQAB0d8/6+uZJk3xq1fKvWvVynz65LWv9egTAgQNVW0NCCFFIIgkLCoK7d8VBQRATczTzoZSUlEqVKgHAunXrFL43IiLCwMAAAFh+cEIKyOfPuGJFcOnSpdkjhEAgAAAjsTilYUPctg1lMnVXkBS26Oho+RB8tWrVTp8+zV5PTU09t3YtGhggAIpEePu2eutJigVqB9WlpC0Z/fdfAIBu3bK+PkQonPHo0dAXL5pdupTbsmJjQVsbatZUYfUIISQPFi1a9OrVq+rVqw8bNkzhCZaWlhMnTgSA8ePH074dUhAiImDoULC1hb//rozIq1KlSrNmzdiXzc/CQuvOHRg0CIYPV3c1SWEzMjKKjY01MjLy9fV98OBB27Zt2esikajViBHw5g14eUHNmvDwIfj5wa1b6q0tKb6oHSxQHJagoMCIMHo0HD8Ojx+DoeGPx1atAi8vAAA+HyQS4OWuJ7xxI6SmQv364Oio8toSQgisXAmBgQAQ5WUVW+59UtJ9obCUVBojEpXj842uXbM9dOh9WlrapUuXUlNTL1y40KJFi5xKSklJqVat2tu3b3fs2NG/f/9C/Ayk5DtxAq5fhxUrICUFOnaEv/760KyZDcdxhw8f3urre/TZMy4hAbS04OBBcHNTd2VJYXvx4oWpqamZmVmOZ6SmgpZWIdaIFCvUDhYBJWoPIceBkxNUrgzPnmXrwZUuDVZWwOOBnR2kpCiMY6vAH38UQDUJIQQAADZtgrt3ISAAACQ96sWb3AOAUqVGR0auTEi4BgCfPjkFBFwHAGtr64SEhPPnz1+8eHHWrFnZS7p///6oUaNsbW3T09OrVKlSuB+DFB/x8WBgkOPRTZugaVMwMwNT0yxHkpOBx4MZM6B7d6hYEQAyIot27969e6dOsHYt+PiAkxNERsKLF0CR2DTMz2PvUW+Q5ITawaKhRM0Qyh0+DN27KzrAWjtqqwghRcTTp/DmDQAk1Ram6EWGhg7mODFiipXVVF3dhm/eiD5+lCDipEmTnj17xnGcUCh8/Phx9qbOxcXlypUrPB4PAO7cuePg4KCGz0KKgIQEEAhAQXiXkBDw8oLERLh8GThOwTtv3IBVq+DCBShdGu7fz3JwwADYuRO2boVBg3K4cFQUpKQApaAghPwqageLgBI1QyinuDcIAEOHFmo9CCFEuerVoXp1ANABEKaFy1/W0WlgaNipbl2oWxcAwMTEpFmzZnw+XyKRTJw48ciRI5nL2Ldv35UrV0QikUQi8fT0pFZQA504AY8fg60tBATAtWswdSoMG5axNyIhIeHumjUu06eDRAJaWtCvH8yYoWBg9NYt2LsXBAKIioLwcPgeNoZ5/x4AQFm6JnNz1X4iQoimoHawCCiZM4SEEFLspKdEvD/tkp4eKxAYWTRcrle6Q+ajPXv2PHjwoFAoTEtLO336tDxyQ3Jysr29/du3bwHAwMDgxYsXlpaWhV95UhSkp0PjxhAUBADw22+7hw+3jYmJGTNmTNiHD18tLAyiokAqhWrV4MkTBRvpnz0De3sQiUAiSd+yRTB4cOaD3bo9/fix1J49RpUqZRtHlslyuy2fkJ8aOhREIqhaFdq3p/VcGojaQXUpmTOEhOTKiRPw77/QuDFNHZOiQBCNFbqFgFgMKVFwNB06/3B06dKlJ0+eTE5ONjIySkxMlL8eFRVlZWUVERGRmpo6Y8YMagU1mUAAt27B1q2wY0f07t0Ddu2SaWlppaamisXi37W1DyKCmxusXau4/1at2pfWrc9/+LAlMdHg5Mn9mTqEMpns5Ml6aWlpNjZJCh4b5syBhw+heXMQiaB2bYrBRvLu2jXYsgV4PLC0BAcH6hBqIGoH1YU6hESDGRrCpk1w/jwMGgQC+i2QIq1MmTLjx4+fO3eujo7O6dOnT58+nfloampq1apVR40apa7qkSKCz4ehQ6FvX/GqVfNmz56to6MTFRWVkpISoq8fdfWqeZMmSt47rWLF9YGBAGCckJCeni74fld8+/atRCKxtLRUkHr+2jWYPRtkMhg9GnIO/UfIT6Wnp8v++UeECIgQGQlGRuquESlyqB0sOJr+EBwaCosWgYMDfPsGKSkwaZK6K0QUSkgAmQwOHID0dFWOQDs5QbVq8OwZHDwIffqopkxC8kxLC1q2hE+fwNJS4Y4stqX+7t27GzduzHKod+/egwYNEolEhVJRUtTp6Oh4e3sbGRkNHz7cysrK29t71KhRgp8Ne7Vr1279+vUikSgmJsbFxUUqlX7+/DkqKiohIUFHR+fLly8DBw7s1KlThw4ddHV1ASAmJma/r6+Hjg43ciT1Bkk+rVq1asWjR7tsbZ3CwmD8eMoCraGoHVQTjd5DKJNB8+Zw9SqMHg2rVqm7NoQ5dQpkMujY8YcXhwyBK1fgwAGoV0/Fl9uwIW3+/KNOTu47d+bl7SdOwNWroKMD06YpDtxHyK/6WTDk/fv3x8bGZnmxR48eptlSBRANd/jw4R49enTv3v3QoUM/Pfnr169LlixZuHChvr5+li+YlpZW5cqVnzx5wv6vgYFBx44du3fvvmPHjmPHjo10c1tz+DAIhQXxEYiG+PTpk52dXVxcHACMd3FZGhAAenrqrhRRH2oHC51GdwhXrEgcN07XygoeP86edYmow8yZsGQJJCTA6NGwcGFG6PT9+6B3H9DRgaAgqFZNtRdMT04uW6FC+KdPly5dcnFxyUsR7u5w6BBMnAgLFqi2boQQkh/Hjh3r0qVL586djx49quS0b99g7Vo4f/5FYKAdx3Ht2rXr3LmztbW1mZmZubl5qVKl9PX1ASA4OPjw4cOHDx++/z0phUgkEolEjx49Kl++fGF8HlKCXL0K48cDAFhbxyYk9Pj69evTp095PJ5EIjlw4IC7u7u6K0iIZtGYJaOnToG5OZQqlW5mJtDWBoDHjx9Pm9aiefPjEyc2VklvcPdueP8emjenHfV5hQh37kBCAvB4snMnXz6/WrbqVj7f5L3ZrPJOtfkDR6q8NwgAAm1tzxEjfHx8pk+fvmfPntI/RlrPFU9POHYMFi6Mq1zZcNgwldeQEELyhq0RTU9PV3LO1avQuTPExgKfX7V//3/GjeueU7h2e3t7e3v7qVOnhoaGHj58eMeOHSEhISYmJmUo9yD5FSdOwI4d0KAB3L0LACCR8B8/vgAAhoaGiYmJTZs2pd4gIYVPM2YIExJAX5/958hatba/fm1qaiqTyT5+/Dhy5Mg1a9bk/woPHkCvXmBtDWPG5JwFkSgnk8GiRTBtGvD5IVv14it94fG0tbTKJycHGxv0qFD5YAFd9u3bt7Vq1eLz+bGxsVZWVg7fOTo6mucys9a2bWEbNtR++HDD9u3dv//zJycnP3jwICgoiM/njxw5soAqTwghOTl79mzbtm3btGlz5syZ7EelUti0Cd6/h7Vrwd4e5s6FX1okkZaWVr169ZCQkLVr144YMUJllSaF5cQJMDVV2xB2fDyEhAAA8HhJMTG3ZDLZihUrAgICevTocfBgQTX3hJCcaEaHcMYMWLAA+HyQShuLxbdiYwFAR0fHzMwsODiYbY7Pp2/fwMIC0tIgPBwsLPJfngb777+Ye75v6h/mOAGPpycW26WkvLSzuykWVymgC06ePNnX19fY2BgRsyxJf/funW2WTMx37sDmzVC7NrRvD+XKyV+eO3fu1KlTxWLxqFGjYmJigoKCgoOD2cB8xYoVX716VUCVJ6RYS01N5TiOYgAUkAsXLrRq1app06ZXr17NfnTcONi0Cfbtgzp1IA9rIwDg0KFD7u7u5ubmr169MjAwyG91SWEJDYXUVLh1Cy5ehGXLwMRE3RUCAICwsLAqVaqIRFqBgS/q18/daCwhxV8RaQc1I5lsx46QkgKJiZCSEvn9NZbGRCW9QQDQ14eWLUEqhRMn0lRSoOZq0MDwj50WFl6IstKlZ+jo1JVK4168cI6LO1UQVwsNDV2+fDmPxztz5kxMTExYWNixY8d8fHxcXV2FQmHFihXZHvf/Gz4c/Pxg5EiYPz/zy1OmTBk0aBDHcUuWLNm8efOjR48AoHbt2r///vvff/+tEcMuhPyiQYMG6erqBgYGqrsiJVaZMmV69Ohx8+ZNT0/PT58+ZT7k7w8rVkBqKmhr57E3CAA9evRwdnaOiopaQDuoi4/bt6FxY2jfHh48gO3boWpV8PdHmUym7nqBtbX1rFkHeLyQ8eOpN0g0RdFpBzWjQ1i3Lrx6BTdvwvHjM5YtW7hw4dixY01NTR89enTx4kVVXaR//5d2dv0OHOiiqgI1Fo+nXbr0LKHQMixsUkzMQR6Pn57+OW3TPJgwASQS1V5rwoQJKSkpAwcObNCgAQCULl26U6dOM2bMCAwMrFSpUnp6+sePH394g7wC2cZynJyckpOTbW1tV65ceePGjfj4+AcPHmzatGn48OEcBSAlJBsDAwOpVPr8+XN1V6TEqly5cvPmzTmO27BhQ9WqVefPn5+cnAwAZ86c2bChg6Fh3KZN+c0WsXjxYo7jli5d+v79e9VUmhSk48eDWrSAyEioWhUGDYK2beHLF9i69W3Dhg3v3Lmj7tqBp2cHkcj02jU4ckTdVSGkUBShdhA11dy5c62sbDw8/lVVgV++fBEIBEKhMDo6WlVlaqyPwSOCgiA4uM7z505BQRB8wgTFYgTAevUwMlJVV7l27RrHcTo6Ou/fv89+tHXr1gBw+vTpzC+m160r09aWCgQ4fnyW89u0aQMAmzZtUlX1CCnZVq9eDQB//PGHuitSwj1//rxnz56sxbexsfHx8WHLO+fPX6mS8vv06QMAAwYMUElpJP/S09PXrFkTEBDw9OnTxMRE+etsOYyLy/yhQ1EiyXhx716sWbM1APD5/GXLlmUtKyAA//kHd+zAAwcKp/Jr16JYjNOno7c3zpiBvr7o64tLluChQ4VzfUIKVdFpBzW3QxgTk2RmlgqA//2nsjIbNmwIAK6urlu2bAkOfiaVqqxkzbJ1q9RA/OFk65SUEJksPTxsprRRTQRAsRgbNMD0dJVcRCqV1q9fHwBmz56t8IRJk97UqhW7dWti5hfLli3LnqsmT56c+fWoqCg2HPDlyxeVVI+QEu/jpUtXatX62q6duiuiEc6fP1+3bl0AYAsW+vTpI5PJVFJyaGioWCzmOO4/FbamJK/2799fp06dzOP+FhYWDRs2rF27NvvX9/VdkOUtsbGxPj4+fD5/6NChMTExWUsMDkaRCMVivHatEOqfloZbtuCQIQjww/8aNsRdu3DWLLx58yclyGSyiIiIZcuWXb9+XVVfcrldu3DZsp/XgZBcKjrtoOZ2CBFx4kQEwB49VFPay5cvjYyM5HEpq1atY2iIrVvj1KmYlKSaS2iEV69QTw8BcMuW/794+zZWrIiGhjh1Kj5/rpLrxMbGurq66urqxsbGKjxh+nQEQB+fH160srJi/74+Px7w8/MDgPbt26ukbiT//vvvv3Xr1oWGhqq7IiRnHz8iAJqbq7semkIqlY4bNw4AHB0dk5OTVVjyhAkTAMDFxUWFZZI8ePnyJZv+rV+/vra2to6ODp/PZ22WoaGhlpbWhg0bsr9rzpw5NjY2fD5fKBRmnlH8v9GjEUBmbf0+JKTAPwMiIr59i76+GfOE3t44fjxOn46tWmGZMj9/oFq2bJmRkREb+ChTpoyXl9fVq1dVUqtRo7BfP3z2rNhMV1I7WAwUmXZQozuEERFoY4Nz5qigqPDwSJaZ19bW1tfXt2fPnp07T2XDWsbGuHMnDSnlWkoKentjr15ZX4+NxRs3VHid9PT0qlWrspFyqaLJXD8/BMDBg/HJEzx+HDdvPv/XX3/Jw0A1a9Zs586dDx48SE1NRURXV1cA2Lx5swprSPKjV69eALBjxw51V4QoZWCAAEjz6oVl8ODBADB37lzVFhsTE2NmZgYAx44dU23JJPeSk5PZJHCvXr1Onz6deZLQ2dmZpZdcsmRJ9je2a9eOnda4cWPFRaenp/Tt+0eNGpUrV5avgpHJZC9fvty3b9+kSZMePHjw+fPngvtoiCiTYf36CIDz5ik77fbt2yKRiOO4Ll26ZM6QaW9vv3jx4vxUIDkZDQ0RAF+8yE8xhYraweKhaLSDGt0h/PoVFy/GHTvy21uLi0MHB1nDhn5aWloAsGLFCvZ6eDj++y/6+mKvXiqbhyz51q7FmTNRReN5yj1+/NjY2BgARowYkeVQTAxOnIgzZ2K1ahnrVZo3n8naFWtr68wNrVAotLOz4/F4AoGg6K8XnTULe/ZENzf8POBPbNIEHRzw1i11V6pATJ8+HQCmTJmiktJYt5+oWGIi+vrivn2YkKDuqmiEtLQ01m179uyZygtfsWIFAFSoUCEqKkrlhZPcGDZsGABUqlQpLi4uNTU1JCQkMDBw48aNU6ZM2b59+9GjRwGgYsWKWQZApVKpoaEha84mTJiQU+GJiYlsU0z16tVHjhzZtGlT/e/pnQGgSpUq9erV+/btW4F+wAsXEAAbNYrNqfMZHR1drlw5APjzzz/ZK0FBQV5eXqVKlQKABg0aXLp0Kc9XP3QIAbB+/TwXoAbUDhYDRaYd1OgOYe/eCIBjxuSrkORkdHHJ2ODm5HSuYcOGCT/+o375ggIBCoX49Wu+LqQpFi5EACys5dSXL18Wi8WZdxKGhr4fNw719REAPT3RzQ3t7LBdO5w27Zavr+/evXvPnTt36NChmTNnuru7V6lShS3IqVSpEgDUrFlz//79Kt+0oBLh4bhpU8YIKwB+cWiT8V/DhuHUqahwmVBxdmDv3irlyi0YNSqf5Xz58sXNzc3T07Nq1aq+vr6fPn1SSfU0hLLfwocPWL8+Vq2Kq1apah04UY5FNre3t1d5yTKZbO3atXw+v1y5cvr6+l5eXmFhYSq/ClFi9+7dACAWi+/du6fwBKlUytYxnTx5MvPrDx48AABtbW0AOHLkiJJLhIeHm5qa2tjYyPuBZcqU6dy588SJE9lmmQ4dOqSlpanyU2UzYsRejuONHj06+yGZTNalSxcAaNiwYZauS1pa2ujRowHA09Mzz5ceNGics/O2tWtVFtauEFA7WBQUl3ZQczuEe/ZIAVBPD1+/zlc5z56hhUXGfK+9PX79mpL9nFatEAC3bcvXhTRFdHTGHsIHDwrngkePHhUIBADg7e09YMAAgUBQr94XjsM2bfD69Z+/PSkpKSgoaP78+aVLlwaAWrVGODpiEVw25eGBIhFOmoT79+OxYxhz8T5eu4ZBQdiwIQLg7t3qrqCq3buX8ZtUSiqVxsTESOQR9350//59Nt4sHwsXCoXdunU7fvx4uoqCG5VU4eHhffr0GTx4sKur67Zt25J+3Pfz5to1NDdHAKxSJb+3YJKTtWtx6FCcMSNq9+5z5869ePHCw8MDAKZNm6ba64SGhrq4uLAfiDzslra29ujRo9++favaaxGFJBJJxYoVAcDf31/JaSxdZMeOHTO/ePz4cWNjY4FAwHFcpNIg3lKplDVzI0eOPHv2rHwqeOnSpQDAVkh5eHjk/+Mo8ezZM4FAIBAIDhw4EBQUFBQU9Po7NhtmbGyscMvcqlWrAEBhTzI34uPjtbW1eTzehw8f8vUBChm1g2pVvNpBDe0Qfvz4sWzZeo0avVOwv9rf/5e66bt2oZ8fPnyI//yDipIXICJu3vypadPN/fuPzVttNY103LhQJ6e135d8qFJkJE6ciNmWPaxbt04+RCoSiSZM2Pnw4S+XnZSUtHTpUheXL2zirajFYLe1zQjUdvz4jwdWrUIA7NAhN4UkqHtJwy9ITEQeD0UizHnE+uXLl7Vr1+7WrZuxsbGHh8eDH8cg9u7dq6urCwD16tV7/fr1sWPHunTpwsYOAKB06dIhhRVfoWhSHHwCERFv377NVlaz6XcAMDMzGzdu3KNHjxDRz89PKBQGOztju3aomUl6WCh/Pz9cswZziEb95cuXgwcPXrx4Me8/OrZ2BSDY2Zn9K7DlDDnNIOXN/v372cJ7CwuLI0eOzJ07l91FWUgPoVA4ZMgQhZu0iQpFRETUqFHDwMBg6tSp23Iee46OjtbR0eE47vDhwydPnty+ffuSJUu8vb3d3NzYalLlV7l06RL7UZctWzbzt+j169cWFhbyL9jy5ctV9sGyefLkib6+vnyNa2ampqYcx504cULhG1mvdXy2fFE/9fXr13PnzrHcKk2bNs33Jyhc1A4WsJLUDmpih1Amk7Vv3x4AOnfunPXYtGno6IgNG2Kul1x37owAeOeOsnMiIiJ4PJ62tnZxep5Wn9DQUIFAoKWlFR4erspy16/HsmURAJ2dMdMul/j4+LZt27JdEJMmTcrnSqeEBFy+HEuVQk9PXLQI169XdNKJE5iPnQx58ORJokiEWlqKZqqjomRmZsEuLpFKF4E8ePBg0KBBZcqUqVChgpeXV2BgYDHYTlCuHBoa5jS+c/z4cfZUYWJiIn+kaNy4sb+/f1xcnI+PD3ui7d+/f+ZRvYiIiOXLl9eqVUtPT69ly5Y5Dan+xOPHGB//q2NPRcejR4+aNGnStm1be3t7X1/fLHvGdu7cycZWxGLxjh07Vq5cmTkIPmsgOY6bMWWKqlLIFEuJiWhtjQDYtKnsx69BfHy8r6+voaGhQCDQ0dEpV65clmyoueLvjxUqsA7hkXLldHR0tLW1tbW1K1eurKpPIJPJevTowf5Ze/Towb4GgYGBTk5O8mkEPp9fp06dvXv3quqiGiiXN5nq1auzP3vlypUV9sCfPXvm5+fXqlUr9nyfhbW1tVgsvn//vpJLeHp6sh+vWCyOi4vLfOj27ds6OjrsKI/HO3z48K98xF/QokULAODxePr6+vXq1atXr16F79j9fM+ePQrf6OvrCwATJ07MzVW+fsWZM7FrVyxbFs3NS8k7P5UrV1bSASiiqB0sGCWvHdTEDuHDhw+1tbVNTEwULINu2zZjY1X2vmIORo9GAJw06Sensd3YBwortWtxx9Iot2nT5tChQ9evXw8NDVVBkHQPDwRgvaLb3bs/ffoUESMiIurVq8cGF6/nZoVo7sTHY3IyduqEQuGP6XzT0nDyZOQ4NDbG2bPzeR98/PjxxIkTJ0+e7Ovrq7yVWrhwoa6uhbX1uGbNgrN/63t27658WPe///5jrYU8yCoAGBkZ9ezZc/PmzUV3+D86GpcsQR0d7NkTAwPx+zp+mUzm6+vL4/EAoFevXhcvXmRj20KhkH009jGFQuGqVatyKpvFr3uet39BMzPkOKxYEYth8LcDBw7o6ekBAHv+Yw1enz59zp49m5qa6u3tzV5kcwU9e/Zk73ry5Im3t7epqam5ubmOjo6SSQwNcuwY2tggx3k3aODt7Z2amhofHz979mw24QYAzs7ObIUeAAwaNOjrr25DHzgQa9VCY+MN33toAFCzZk1VVf/evXstWrTQ0dHx8/PLcujq1atubm56enpr1qwBgNatW6vqohrl1atXTZo06dWrl5ub208zB6xcuVI+F3HmzJksRxMTE2vUqAEAWlpafD6/atWq7dq169+///jx4+fNm+fv789GyUuXLv0+h5VOaWlpbBow8+86s7179/J4vKZNm2praxfQDzz4wAEdoZDdnwcPHpzl6MaNGwHA2Nj4w4eP2d87e/ZsAJg6dWpuLuTvjzxexsNg6dLOOjo6dnZ2LDJN7969i2aYgBxRO1gASmQ7qMoOYXAw+vnhokW4bFnIhAkTPDw8Fi1aVATnxC5evKivr9+gQQMFx3r0yLgHtGyZy9JY2KuGDZUNkMTFxZUvX7527dqUDSaXfH19jYyMsgxhmpiYNGjQID4+Po+F3ruHNjYI8Mzams/jGRoabtiwge14qVix4suXL1X6CRARJ01CABQI8NChUEQMDw9fN3Ag8njI4yHHYfPmeRsZ+vr1q5+fn3wYnu3cqF+/vpK5zebNm7OT62cLkRYUFLRr1y4AqF27tsJ27s6dO6w36ObmxlpcjuPkY8w2Njb79u3Lw6coJLt2ZTTsuroN7ey8vb2fPn3q7u7OPoW3t7dMJrtz507btm1Zu8iG7vT19XV1dS9cuKCkYBaoXXkMBsVCQ+UDE8Vr62bmB4h+/frFxMTs37+/Xbt28j8dS4Amz3s2atSozCPHUVFRO3fuZPF4U1IU7LXWRDExwf/8w8bgy5UrJx+kb9my5fXr11+8eMESvrMFWqVKlcrbA8S3b9+ePn26c+dOALC0tFRV3dmmrLp163p5eSkcFYqIiLh79y4A1KlTR1UX1RxXr141NTWVP5fr6OjMnTtXybqM+Ph49gMERaufWLoR1lj07t07+9slEkmrVq0AoHr16gpz8544cULe4cxpAvDhw4ebNm0CAFdX11/5rLkTG4uWlrGWlsPKljU2Nla43bFXr94uLt5t26Zmb8rGjx8PADNnzvzpdRwd0cMDp0/HUqUQAF1cvAHA0dHx8ePHhoaGDRrMnDatYAPnqB61g6pTgttBFXQIAwLw5k3ctSsjzzsANm78nv0hypUrV6pUKRUv/Muf9PT0TZs2sZtaUFBQlqOPJk26XK/e9mrVjvTvn+sCsVmzqXy+6PHjxwpPSElJYYscqlSpUvTTEhQF79+/Z2PkLi4uXbp0cXR0tLGxYY2ioaGhrq7umzdv8lh0eHh0t24WurpsDIz9gB0dHQsuTrqvLzZuvE8kEk2YMIGNLx5iy1b5fJwzJ6ftQzlJTk5u3769/EZjbGzs6em5evVqNlJVunTpO9nWLkdFRS1ZskQgEPB4vG7dumVJxLRt2zZtbW1XV1eRSGRhYWFmZtazZ89t27ZFf1/RfuXKFbaPvFevXhKJ5OTJk61bt2ZPFQDQoUMHHo+npaWV9NNUwWoUGorTpz/t3JnVmf2jGxkZZdlqEhYW5uvrW7FixYCAAEtLSwBQPnwzZ87BZs1WrVz5y1/FlGPHkMdDsRgBitFSmfj4+K5du7J2ztfXN/Mh9qdjd3tDQ8OWLVtqaWlt3LgxSwknT56E7zt12fy8ZpHJMDAQ/fxw/fosaY6uXLlia2vLEkI0btz43Llz7HWJRDJz5kz2c5P/6C5evJi366empuromJUt66yq6Y29e/fKuys5NW3v379nD5equWTJExaGXbrg4sV440Z6ps7egQMH2C+lXbt2ISEhAwYMYKMGlSpVOnXqVE6FjRo1iv1CjY2NMy/p9Pf3BwA2rFC5cuUsqz3lYmNj2Sxi27ZtswcLHTBgAADUqlXL2tpayYIdlpJ306ZNuf0L5N6YMRldCI4L27JF4SmRkTILCwRAtt5FJsO7d3Hu3EUODg4cx9WtW/fQzzLK37mDAGhpienpOGUKjhiBO3a8+u+//9hHvnDhBbtzK43dUyRRO6gKJbsdVM0MIfuJXbqEHh7455+4cOEnX19fPz+/xo0bA0CnTp1UchWVePfunVAoZD+G7MsexowZw34t1apVy32ZLPmPwpEnqVTKVj+WLl2apgdzQyqVsv6zSCRq1apV5vnAV69e1apVCwDGjh37k1Jev8YNG7I/eCFiUlJS79692Q3RxMSkS5cuBb0lYN68+Wwcjn0oIZ8f0aoV5ump7q+//rKxseHz+SxiFat59+7d5c+LYrF4165diJiSknLs2LGePXuyxzU2oVe+fHn5aF9SUtKgQYPYt531MDNPyQoEgmbNmg0bNoy9sV+/fpmfDxISEo4ePTp8+PCnT5+yZMfnz59XwV+qIEml0sDAwL59+xoYGIjF4tu3byscBZBKpVKptGXLlgCg5MELEdevRwCUr1pKTU0NCAjo37//tGnTZsyYoWQgf9q0aRY6Ou6WlsfatPnVEQE1+u233wDA1NRU3l3JIj4+nuM4LS2tmJiY27dvZz/hzZs38hmGnz6WlUC3biEA1qql8CBbYtSxY8fss/QhISFshr9Lly66urpOTk6/dt1MW3RYWu2YmF+uu0Lnzp2D7yumclozlpyczO5OqrlkydOrV8Y4ukhkbWzcrFmzKVOmzJw5kz2ieHh4yG+8Fy5csLe3Z/fn6dOnKywsODiY/Qb37dv3+vVr+bzEvHnzWAP0012C8vAw2dMznD9/3tDQsEuXLkoSS0RGRgoEApFI9MvLm39KJsPBgzOmuRwclCyuOXoUAbBSJRwxgi0JQkdHd9YIzpo166fXGT8eAVDJI4a/PwKgiYnKfkeFidrBfCrZ7WDB7iEMCwtjT5kHDx4s0Av9EvYczOfzeTxelg765MmT2Q3Xysoq98vEjxw5AgAmJibjx4/fu3dv5kDbY8eOZTPIyu/CRG7WrFnyUed27dpl/lfo1q0bfF+vqGyuNSoKS5dGF5ecMj/KZLLatWsDQKtWrQonaPLixYvNzMxYv6tly5YRERF5KEQikYhEIh6Pd+jQocwrsb9+/cpu3Kx8juMcHR3l25AEAkGHDh0WLlzItkpyHOfh4ZGQkNC6dWv5mDF78khNTX39+vXy5ctdXV1Z91IgEAiFwmHDhinZJfjXX38peUApgtg8DABMynnj78iRIwFg2bJlSsq5fJmlSMbAQBw2DNu0ecmKZV3oVq1aKVx2hYgdOnRgZzo7O+fzsxSaN2/e1K9f39TUVPnkPNtP8urVK4VHpVKpfLvFnDlzCqamRRhbQjNunMKD8hXgCkKdIUql0p07d758+ZIN6/zCRS9dwj//xC5dcMIERKxUCQFQVavjHz58KO8QXr16NafT2PqFvC/1L9mGDUMA5PEiLC0hEz6fn31Tt0QiWb58uUgkcnd3zykRXPfu3WvWrCkvxNbWtlmzZl26dGG39C05TKxl9t9//2lpaY0cOTJLx+/Jkycs+oiStCVsH2OXLl1+epU88vTEqlWzj/NmsWEDbtiAtWohANrY4LRpN06ePJmbMARSqbROnY4uLkdu31Z28sKFOH8+Llv204oUXdQO5kGJbwcLPKjM2rVrAcDS0jK6aIRVRcRnz57xeDz29Pz7778jYlhY2P79+z08PNiPhH2VtbW1+/Q50qMHTpqEBw9mrIzNLi0trWPHjvJVfIylpWXnzp3Z2gmxWHypcENKFhuXLuHx43j3bmp4OOuY3blzRygUsrFMCwuLLB0nllWZ9RXnz5+fY7FdumSEEs2hs3f16j0WJ60wwyWzLRx2dnZ57oI+fvwYvq80yDJoJ5FI/vjjDwAoXbq0QCBgX2MW+Ur+N0xLS/P19WV/vQoVKvj4+LAxKrFYnH15T3x8/KFDh9gOlu7du3t7e+dUq2PHjgFAs2bN8vahCl/Tpk3Zj7Rbt245nbNixQr4WQrjL1/wjz+wf/+M8X1dXVmDBs7z5s07ceIES9xcvXr17EnYUlJS5H31n090FxkxMTHsxqh8mIyNMhzPmtjk/9hADAAMKGpZWQpeVNu2yOPh5cvZDyUmJsrzNCjZ48QSiNfKYY5RMRb0DAArVkRER0cEyFV61dwIDw+H72sTsm8qS0tLmz179s2bN9k+7ewbNAgi4uLFWKcO8vl3vidy1NLSEolEOS25TEpKEolEfD4/p+dsRFy8eHGzZs1sbW0zP5a0bt2aPe381OvXrzmO09fXz7525vTp02wMcf369YiYnp7+5MmTHTt2/PXXXy1btrxx4wZbFFawQWVzMVI/fDi2bYubNuHdu7k5/f9YYJUKFSrkcj6giE3w/AJqB/OgxLeDBd4hlEqlzZo1k3e9MktMTFTX1qPu3bvzvpOHcWPMzc2bNWtmZWUFAM2aPWTf8tatceNG7N4ds4TtkMlkQ4cOBQATE5ONGzfOmjWrY8eO5ubmrCgLCws9PT2KLKoAS8Pl7MzuIg9cXPh8funSpdk9gvUJFUZar1u3Lvvbli5dWuFqhO1+fmGOjmhsjO/eKbyyRII1amCTJrvnz1+h4g+lVHh4OBtouHbtWt5K2L17t/zx6+NHBVHUVq1aFRISoq2tzXHczRyGLu/evct2ibBdlBUrVnyYc8rF33//nf21rayscjonNjaWz+cX9W2EmbAF3qB0WfjZs2cBwMXFJacTvnzBiRNx+3Y8fx4bNsTp0zE4+P9HP378yAJMW1lZsefg9PT0q1evenl5sb66mZkZj8crRg0hIrIdsDlFIGTYkvtFixbldMLy5cvZTqTy5csrL6qEYZNpreztFY5Sse8bGza+/GOPce7cud26dduxY0dsbOzVq1cBoEmTJr9w4SVLEAA5DoVCTE/ftAn/+guvXMnnp8kgkUhYjoFNmzZl2RDx8uVLR0dH1giy/clsofv2p9sj05SlPtdQ8fEPz52bNm1aixYtypcvDznPwrFMgPXq1ctNqRKJ5PXr1+fPn9+0adObN29yGQ56zpw5ANA/hzAKbDuiUCg8e/asfIsN06xZM47jdHR01BtKMDUVTUwQAJ88ye1bZDLZq1ev9u/fzx4w/vnnn4KsYJFA7WDelOx2sDDSTjx//pyFSrO3t7ezsytTpoyxsTEbDR02bJhaMhS9ePFi3rx5QqGwUqVKAGBgYODq6urr6xsUFCS/acbHx9+/n7hnD86ahVu24LhxCID6+rh4cbp8aT5bYqqtrZ3lKf/Vq1e7du3asmWLwkBYJMOMGdihA9aqdcrNjX0fzMzMxGLx+fPnlyxZovAdO3bskD85jRs37s6dO2FhYfKVLU+fPmUTaI+PHcvpmgsXZmwwyH8ai181depUAHB0dMw8vCST4eXLuH073rz5k7FM+XpmY2PjnM558eIFANja2iopJyUlZfLkyUuXLh04cGCM0m0Q27Ztk3dBFc6mRkdHv3//ni1G3bBhg7LaFxmLFi0CAI7jhEJh9uxJISEhjo6ObNpTV1f38OHD2c959Cgjwdvvv6OZGVatitmTMMXExLB1vLq6uh07dpSvzwEABwcHNoIIAGvmzVP4r56UlBQVFVWkgpv//vvR2rUvBQYqi4rGcgwMGzbs+vXrCv90UVFRzs7ObMyCx+O1a9fu05EjWMQirRWE6dOnKxlr/+eff+B7erfz58/7+vq6ubm5ubk5OjrKvzkikah+/foA0LZt29xfN+Xo0dhate7Y2l5q3vzzu3cxMTh0KNaurbLEV15eXoaGhmXLll2+fDkbEpLJZH5+fmyZKLt1cBxnZ2cnEokEWgKT+yb8e/yWL1t+SaP4aopduXIFAKysrFNTFfwjsd6al5dXwVWAjRjmlN4dESdOnAgA+vr6LMNE5lxEVlZWIpHov//+K7jq/RTbQ1i79s/P3LVrl5eXV7NmzeTRWQFg5MiRz549K/Baqhu1g3lTstvBQspDuG7dOpbSNDOxWCwWi4VC4aNHjwqnGkxcHI4aNZvV4bfffnv06FFuvnAfP+KAAWwp4poqVaqcPn2a/asLhUIlt86f8/fH4GAMDPy1tzx6VMiZzQuaRCL58OEDW3CSJRJmltPmz58/aNAg+TQsAPB4PEtLy7p167Ld8EoWxshk2KQJAmC2LE2F4du3b2x4icVofv78+cyZ8SzmqLs7zp+Pbdoo26fu5uYmH4jN6ZyDBw8CgJubm0oqzCIEsrnE7MGyPn/+XLt27QoVKlhbW7NVYdbW1pPGjsVDhzCHKHZFwfHjx9kYULNmzbKM15w8eZLteTYwMODxeGyQwtjY2MPD48GDB+ycAwcOtG79FwA2aIChoVitGgLg6tUKLpSWlvbHH3/o6emxx2J7e3sfHx957I3NmzfbWlu/tbbGnj3/PziRmnrl1Kn+/fvr6+u7u7vXq1cv+2KbgvKz7MCengiAK1cqK4Ptpra1tWXrkbL86YKCgtjmCktLy7Zt24rF4orGxigWo5kZjh2LedpYW5R9+/bt6tWry5cv79mzJ+sg7c4hujqbl8j8YJ3l/2pra7Mw5QDg7u6e+zrcv38fvo+gXbx4USLJeIbbuVM1n/Hly5dVq1aVdwYWLlwYHx/fsWNH+L4/uVSpUmzpVExMzJ6zezq+6qh1T6vs47K7ondt+rIpSVo8lhUUsr59j5UtK9m1S8Ehli2w4DL9PHr0CABMTEyUhAORyWT9+vWrWLGi/Ptpbm7epk0bb29vVj1zc/OcNlAVgoEDPRo18l2z5oPy01asWCEP0gMANjY2bm5u06ZNK7xbrlpRO6iYZreDhZeY/uPHj3fu3Hn69Onbt2+jo6PZTqrRo0cDQIMGDQontgciBgVhpUpoafnR1rb8LoV3XKXOnMH69R3ZHYTjOI7j8ptZ8uJFLFsWOQ5z3pSf1d69aGGBWlpYiFvgCgfLdGRjY5N9TEXuzZs3bGt73bp169ata2VlJU//UqpUKT09vc+fP+f03oAA9PHBnPubBY5l7ipVqlSDBg0AwNl5IwDa2uKyZch6hnZ2iv9VX716xfrAffr0UZJB3sfHBwAmT56sqgpXqFCB/W0HDRqU+fVPnz6x0AXsLm9mZsZWWQ9p2hQBUCjEdu1+bfdGYWED8Hp6emvXrs08Qbp8+XK25YZ1gCtXrjx58mQ2WM40bty4c+fOrHX09j7PGq9//0Vj468dOkxWGMldJpO9fPlyx44dCnPSJJ09i/r6CIAtWuD58zh0KJqYnP6eMZJt4CxduvS9e/cK6m/BpKayVdS4cCHmHAl56VIEwJEjcyzm+fPnVapUYVP0kClNAgA8f/589+7drFvi5OTE9rV+/fr1v02bsE4dFmIRly0rRsHHlZDJZB4eHtWrV5ffl+Rq1aqVfeuXr6+vsbEx++6xIKJeXl7btm179OjR+fPnR40alWVTwy9leIuNjZX3zVhAkc2bWRCICCWxIn+JVCo9cOAAWybAnn7kXcTevXtnjzYZkx4TlEj7CZXZsAEBMPvSYKlUyp7UP3z4SW8nz9g6FOXbxhAxKSnp48ePs2bNOn78eOb9CxKJhE37VKtWrfDDRiQkJFy4cEFXV5fjOOVB3cPDw1kG9hkzZpw5c0bJM0NJRe1gVtQOFmaHUKGEhAS2Yj6nJYKq9f59RtYTBwd89UpZKnklWKQvAwMDY2PjRo0a5atC4eEoEiHHZdQpyxJ/hffTe/cykp0DYPZYXkUmck/eyGSy6tWrA0BOffW0tLQmTZoAQPfu3TO/GBYWdufOHdZFWbNmTU7lN2iAbdqgGvNiSiSSihUrsnucgYHBhAmLLl7M+Gf/+BHr1WN7sofJk0PExsZu27bN1dWVLe0Qi8W9e/dWuFvv2bNnnz59YikocpqIyAO2jbBz586ZgwS+f/++cuXK8pudnZ3dx48fZTLZ/fv3n61ejc2aoUCAbduivz9+HxUrIi5fvmxhYcEaM1b/nj17BgYGpqSkODo6svUbAODm5iZ/cA8KCvLy8pI3SzweL0v2oXbt3CDP204ePcIyZdDHBwcPZvtp4zp0mDdv3ps3b+Lj41nOX11d3Szb01NSUo4cOdKnT58VK1YsWbIkvytqgoIQAHV0EADPnmWv3bt3jz3cGxsbly8vLVUKbWzQ0xO3b8c3bxQsOAwICGDDNPb29kOGDJEvDeLz+aVKlerXrx/7vyyYbdY3372Lc+bgjBnFMLeXAn379mXRFFiKTvZlmz9/frVq1QDAxcVFHu0wPT19+PDh7K/0559/3r9/P6dO2pMnT3x8fCpVqlS+fHmO47J8A+WiozEgAP/4A1u0wJo1sVOna+ypjs/n8/n8+vXr+/n5hYd/dndfqqOjo/JMcVevXmUTRGXLljU0NPTz81Nt+ZojIQGNjREAd+3CPXtw5UpcvPjU8OHDWUImGxsblV8xPPzriRMn5syZwzqcec51iYhxcXEsO5SLi4uSaUbVSEzEwEBcuDB9wIDq9vbyIDoNGjRQ/r6lS5cCQI8ePQq2ekUVtYMKUDuo9g4hfs/SqKOjU9BrDLZtw5kzsX9/9PBQwWLdmzdvsjGAnQoX3+R+gvuvvxAgvaxJmH/9qKjv34OQEOzZEytWxPXrFSTT69kTASR2Fm/31IiLy/ji4r172KwZNmmCGzcqzL9XXLA967Vr1/727duIESNmzpzp7+8fEBDAxpamTZvGWkSFaY7YgskKFSoonHCOjESOQx0dLOhGSjn2Edzc3LL36759wxEjdgCASCQaM2ZMjx495MNLurq6rVq1YmFpnJyc5Gs8oqOj/fz8nJycOI77559/2JOownG4vDlx4gRbAMby1/v6+gYHB79//75MmTLyFSDh2XvYsbG4bBmWKoUTJ6qqJrkklUobN27s7e39+vXrLIfS1q1zLFMGAFq3br1x40ZXV1f5HE7FihXZQwyPx5s1a1b2piUxMZGlkMke0uPWrVscx2lra+dx2D4qCjduxF270Mfnhy353xfbsOZkzZo1WXbkAwBrewYMGJCvBy8/v4xJXQD8npPq9u3b8nFNfX0Zi601eDDevo0VKqCNDU6Zgq9evUVEmUzm6+vL/pJ9+vRhkQlTU1NZGkw2N1WmTBmBQLBiRc5hnE6fRgBs3jzvn6JoCAoKAgATE5MhQ4awv55QKGzYsOHRo0c/fPhga2vLhlfS09NTUlJ69erFnsZyH3hs2bJl7E89evTozDFCvnxBHx80MMCpU9HVNSPin7PzCzbwZG9vL/+qi0Qi9lW3sbFR7fO6VCp1d3dnHcLCDOBcIk2ahCNGYJkyGf+UzZrtZ/98lpaWAoFAVXlfd+3Cdu2wVCls0iRU/vXg8/kLFy7MT7Hv3r3LWDAyZIhK6pmjBw8y/kAAjU1NeTyejo6OmZlZmTJllM9PshU6JTjgH7WDv4zawaLQIcTvqR4bN2589erVM2fO7N+/f9OmTStWrJg3b96dO3dUsqP02ze0sUFX15xS0+UF20Oop6f3/5y8UikeO4YNG6KFBeYy3XlsbNycjvdu84OC4MEDi9TUjx/fTZKVt82IYKMoryW+fftlcYugO1xQEDx5Uk0i+Rj2bAwa6CMAWlgomewuFlJSUlhbMmHCBMjEwcHh6tWrLHtkTs2hVCplUYIUpvvcuxcB8FciMhQItiy2eQ6/ealUyiblTExM2H3ZycnJz8+PTdA9evSoXLly7GHu3r17Z86cYTMA7LmTTSEOGzZMtc95//77r4ODg3w0EQAqVarE4sHWr18/xwTEZ85k5CcqXCw3CfvTvejXD3fvxuRkTE1l+b4SqlefMmmSfLzgw4cPvr6+bJFCvXr19PX1s0fPl3v27Bkbbsh+aNSoUatXr1ayzjnPZDIZG0EAAH19/cw/h4ULF+7YsYOFQ2jRokX2+EBJSUkHDx68d++e8qfzzZMn727Q4L9KlRKyZTyPziCLiMDJkzPiB1SujABYrVoQx3HNmjVjwST5fL7Crb/v37+fNWvWtWvXrivPdfD2LQKgpWVu/iZF2ZQpUwBg1KhRd+/eXbZs2Y0bNzJnP3vy5An7Xf/+++9sZZ2RkdGVXwz6efjwYTYW2bVr16SkpJiYmGnTplWr9ooFE/Xxwf/+w8BAfPgQw8JS5beCsLCw1atXt2zZUj6LIhAIRo8ercKADaFz55YyNzcyMgr+8Xkus7i4uKgLF3DGDGzXDvv1Q3//Yj18WdBGjcJevXD0aFyx4tGaNWsOHTo0cOBA1snPZ+SF1FQ8dgxbtcroT9WundSyZcu//vprzJgxbNtYPpeZ3L17VywWe3p6FsRd8f/Wr0cdHbbGas737ItsklPJ/OSrV69YXo3iEhk7D6gdlKN2MPeKRIcwKirKwMAgc5gQOVdX18GDB/9acYrCR7KcwA4OKouuxvTt2xcAatWqFRcXt2nTptGtWmWs/9TSwvbtc7kUOCpqQ1AQ3LsnunfP4MED06Ag+LzMCYcMUbK7NCxsalAQ3L2r9eCB2f37+kFBEDelJf71F+acm6gYmTNnDotWzPD5fG1tbUtLS3ajnzJlipL3rl69WuHwFSKOGzerceOla9aoeej68+fP7KaWUxDw2bMzIh6JxeLsIYmjoqJYHhc9PT0WKVcefILH4zVv3lx5ytT8VHv//v0DBgxgT7Rt27Zt0KBBjr1BRExMRJEIBYLCDzATFBTk4eHhyNISA6CxMc6enbEaRNFTjlQqPXPmzPPnz7MPpmYmkUiEQiGPxyv8x4gtW7a0aNFCT08vy458RHz06BHboW5vb8923qenpwcGBg4YMIC1kTVq1NDT0wsICMip8IYNG7Lvj5KEVIjYrx8CoL9/Rlxcb+9tbL66dOnSBgYGZ7+vsckjmex6gwbrnZ3ji/mid7YuVMkEzpUrV+TbS6ytrfM2mX/p0iV2M6xYsSL7Dyenge3bY26CO0ZFRW3atMnZ2dna2hoA+vbtm5uE3T+3eTMCvC1X7koOn33JkiU1a9bk8Xh7mjXL+GGamBTNbcZFmUwmY08d5cqVyyk3fW5s346lSuG0aXj4cNYx5AULFgCAtrb2jRs3sr4tPByfPs1N+WFhYTweTywWK9xUpkp166JQiLVr/zdp0vLlyy9fvvzs2TO2Uian+UnWwha1FHAqR+0gtYO/qkh0CMPCwsRiMdvk0Lp16549ew4ZMmT06NHDhw9nbeexnLMIKDBwINasib6+su8P08+evXBwiOHxFM+35UdsbCyLuiEftAi2scn4+VWvnutipE+e2D9/3iwsbGpQED84uHZCQrYbcZY3SBMePbIJCen04cP4oCB48cIlKSnXOXeKPJlMdv78+VatWtnb28uzlwJAy5Yte/furTwWQmJiIltIsH379k+fPmUe/65SpQoA3Fb5l+DXsTtXTrGte/fuzT5v/fr1FZ6QnJzMJtXlfxw7OzsfHx/l93FVSU9Pv3bt2qdPn376HPnvoEGty5U7efJkIdQqO1lsLK5di/Xro5UV+vlh//6Y713pdnZ2AKAkc2PBkclkCpNPIuL79+/Zpn9zc/OuXbtm/sk0atSIjVwKBIIjisJfpaWlyfsns2bNUlKB5s1DOU529+7/X4mLi+vSpQsAeHh45O/DISKyqCQ55c8sFp4+fQoApqamSu5R27dv9/DwMDQ0rFGjxrsccqXmxuPHj3V1dVnUYicnpzxs+jpz5gxbauXo6JifrgUi4vPnKBIhAOaQe2b16tVskZ5IJPLu2hXHj8cDBzCH7zNRLikpiYXjdnBwkGf8+/xZdvYs+vri7t0YEPDzOdd27RAA169XfHTkyJEAYGZmdunSpf3790+ePPne8OFoaYkA2Lhxbiq5ePFiAOjZs+cvfLC8efcu+yaQu3fvsri+c+bMkb+YlJR0+/bt9evXs1+NuhqmQkbtILWDuVckOoTs7qPw3sGSpdja2maOaaHM6tWorc26ZCebN3dyclq+fLmLiwvH8aZMOf7zt/+6O3fuiEQigUDAJmoG2dujmRn6+ORycar0y8c3b/oGBXH37um+fTvk/n39kJBOyt+SFvXi5ct2QUHw8KHV69c9793TevdulCo+ShGVlJT0+vXra9eu3b9/Pzfnd+3aVb64XCgU2tjYNGrUqG3btqzfrqrYevnRrVs3ANixY4fCo/M7dbLV0wMAJXPjMpls7dq1GzZs8PLyunPnToHVNF9YdjVvb2811+P7foD869q1KwCoJXWqct++fWvfvr38a88GUF++fImIMpnMx8enoq3tZ1NT9PL6f+Sq5ORTR4+6u7sLhUJTU1MtLS0ln+vbt288Hs/U1CxLbrQDBw4AQKdOP7ll5QbbcM8iYRZTbOZBSdobttqKtRSrVq3KVaHJybh0afYNCGwBlb6+/sGDB/Nc4cePH7Ml6OXLl3+au5mfrOLicNYs/PIFlyzBnH/pLPz68uXLU4pArq0SIDIyko1E16xZs1OnTjY2Ns7Ofmwguk0bXLEC3d0VJIWTi4qSiEQyoTDHW2NaWhqL5CEf6V5cvXpGFEQDA9y48aernxwcHABAycrDgnbixAk+n89xXN++fX/77bfq1avLF0uLxeKpU6cW7FrWIojaQWoHf0b9HcK3b7Fly8dWVvUUNkjp6eksG++YMWNyVdzu3RkTdCKRXaZkoyYmJlGq+z1kMW7cOHavsbGxWbZsWfr3QbufSEpCX1/U139xu/a9e9rPnjUICoK7d4VBQRAXd0rxW2Ji0NtbZqz35F75+/eNnj6tGRQEQUG8u3f5SUmFmsuxyAoJCWFtWLly5TJnQQUACwuLrl27qruCiIhz584VCASKQwWmpKBAgBwnNTX9ktP4bTFx5swZNppz7ty5Ag83VygmTZoEADNmzFB3RRSQSCQhISGrV69WOPP8bevWjO3yAwbgmTM4aBAaGq5u0gQAOI5ji2qqV6+e05wVi1Hu4OCQ5fXHjx8DQOXKlfNf/7lz51auXHnr1q35L6qQxX76dPTo0SlTprAbzoEDB9auXatwQTjLKi4QCDiOy74aXAF//4zstzY2uG2bvMy9e/dyHMfn87OE3cuDiIgItlbKyMho9erVuV8G9u3bt/QFC9DEBAGwXTslPQSZTMbWteZ3HpJkMmbMGJFIJN9o4+o6xtkZvbxw376MCPZDh+b43nXr1llblx0xQtlvLT4+3tbW1tzcnAUPa82mB9n/fpa3/dWrVwBgYGCg3k16q1atYoP18tHhWrVqDRo0aPny5SV492AhoHYwy+slph1Uf4dw2DAEwIEDc9xL8PDhQ7Zk+WpuMvVFRuKyZVi/fvT3TWgCgcDc3Hy1wpSZKnLz5k0AsLa2/rWn3n/+YbfXZN+RqalvU1Pf3runHRTEha+oLm3fAhVOZPXvjwDI5ydum5KW9jkh4U5QEO/uHUHUnAbSPt0VnK9pJJLuLVoAQN++fdkLKSkpb9++vX79+owZMwCgRYsW6q0gExcXt3Xr1izZYDPcv5899nExlZCQMGPGDBZ3S0dHx9XVdfny5cU37e+3b98aN25cuXJlFWb1KFTnzqGhIa5ahS1asDvPx169Fi1a9O7du7CwMLZr19LSMijohxxxSUlJBw4cYFGFhg0blqXI1NRUgUDA5/M1cebn/Hns3RvLlo2tVUs+6iQQCNj+JYUjmPJkfY1zt+4OP39GXV3U0kKAj40a2dvbnzhx4urVq+wZXVWNWkpKSr9+/VgYerFY7Obmtm3bNiVbv1JTU/38/CwtLZ+w3YAuLnjtmpLyQ0ND2VdLJbUlDEv8wyYJQ0JCMm+OuHMnowFZvVrxRhJnZ2clS1TkWIQ2ADA0NGzdsqXszz9x2zZ89Ejxw0kmrLX95egPqsbmbWxsbDZu3BgUFKSJ96gCQO1gCW4H1dwhDAlBoRD5/J8sQGD56y0sLNzd3Vu3bj148M5atbBCBezcOce3fHjxYtq0aRUqVGDp4799+6byysulpaXp6+sLBFoREV9y+56AABw/Hhs3xkwh5r4+XJDerA5r/jF7aNqAABwxAp2cMud2+3LlL2ntKhn7N44cye8nKe4mTUovXXpmmzbZsz/fuXMHct6VV8iOHz9evXp1xQNsR46gUJjxD5pzVKHiIiIiYtKkSbVr184cpNQpWxSvoiA5OTkuLu5BDokTX716xfYnGBsbF+OJjogI9PfHVatw9mz8MeRafHw8SyLH0j1l2ZEPAH369IlQ9IVkD6ZPnpScPcy5EhCAnTuz5wlZqVKtW7X6559/Vq5cyabC2IKRuXPnZn5HaGgox3FsvmLRokW5usr9+2hnxwYBnU1M2D8Ey2s8btw4FX4amUyWOY4XALCe4ZIlS7Zu3bp48eKJEycOGTKkU6dODRs2rF27Njvnj44d8dy5nxZ+5MgRAGjTpo0KK6zhWHPGxgUU5ns8cgSbNdvGcdyePXvYK2lpaY8fP2ZbWFlW2+3btyu5BEugYm5uzlbc5canT9GnTp2aP38+y1Z3+vTpX/1cvyY5Wdm6WESWlXfp0qUFW42ShdpBTW4H1dwhjI/H2bPxzz9/cpqrq6tAIGB3GQBo3nwyW7lQvz5OmYING2L2aFhMSEgIW7Sm8ppnMWzYBwMD2fd7b14lJWHZshmrMtq1y9VbPn1CA4OMt6hiS2sxdvky8vkoEKCiwL7BwcEAULVq1cKvV3bLli3LaQ4BEVEiwYkTcdmyQq1TAYuMjGRBSg0MDGxsbFxdXdVdox98/PixYcOGrq6uAGBvb+/r6/vly/8Hdy5dusSWZlWpUkVJSP3iLjU1ddCgQaw/I987xHFco0aNli1bltNWfpamMj872YqrsDDcvBmfPMFMq0MvX74sFosBgMXu98+UYvjr169LlixhR38h6W5aGq5Zc7tDB/ZvIU8YrTDVan5cv35d3hvU0dFhF2I7wbLQ0tKqVKnSiRMnclnyzJkzAWDChAmqrbAmS5s4Mbx+/em2tqUMDDLfqTJjrYyWllaXLl0aNGjAvngMG7bQ1tZWErtizpxnTZsO+ftvZTG9mf37sUMHLF0aHR0/svIFAoFAIJg9e3beP+FPzZ2LDg6Y8yRkXBy6us4pX75KTjcukh21g6jZ7aA6O4QBAdi7988jZV+7dg0AWL61jh07njlz5saNl/fv4+vXGB6OVlbKgmWxrPeF8PQ5fz4C4IgR+S5o1y4sXRqnTPmFeNzz52OlSpjLIecSbNUqFAgwh3XtHz58AIDSpUsXcqUUmjhxIgDMmzdP3RVRg4SEBLFYzHFcwe3pzdHbt6hoZ9e1a9csLS0BwMzMjE2/sKel/v37X7hwYf369WxWp2PHjtlnnkseX1/fDh06cByXeUd+Tt6/f29ra9ukSZOfpFfSJEePHmVfGKFQePDgQYlE8uTJEz8/vwEDBlSqVMnQ0NDOzu5Xy/z27ZuPjw+bEbK3t89tiLVfxDYTsijqADBx4sRVq1YNHDjwzz//nD9/ft++fYVCIYvFl9MEQnYfPnxgM4qbN28uiDprIqkUra3ZKLBU6a74nj17yjfScxxXqVKlnj17sqgVbBK7a+3aMkVjEzJZxtC08mCHEgkeO4Zt2mSMSNvbpzZv3nzcuHF//vknC+jy01WpP/HiBSYno79/1iVkISFYtiwKBAiAOcSE3LpVvSm+izBqB3NBM9tB9e8h/KlzQ4caammx7Q0fPnzIcnT//owMK2FhCtrI5cuXA8DIkSMLupI3b7IklfkuSCbDhAQFd0AlUlMxNfXX3lLyBATgP//gv//mlGgyOjoaAAwMDAq5XgoNGDAAinlAxfxo2bIlFGYAurt3sVcvXLcObW3R1hZ9fCSZ9jH6+fmxqI8uLi5v375lK0O0tLTka1zZmnMfHx8VpvAu4qRSaWhoKCJ+/fr148ePz3O4sZw/f549btaqVasoBO8tOtatW8cmCatVq8aGMjPL88J1Dw8PABg/frxqaysXFBTEcZyuru7Ro0f/+eefLHFxGjVqxOpfLTftXGzstx07WrRowWYajY2Nq1Wrlqs4OuSnLl5EABSLEQB37lRy4rBhwwCgbdu2V65cybwplMV1721hITUxQTu77BHRL19GACxb9ifj0vv2oYUF/vMPHjyIWXIesflJkUh04cKFX/18/1erFuroYKdOWdeA3buHenoIgDzeM2fnnYr+CG3bIgAqWk6rqagd/EUa2A4W+Q5hYCACfDM03NWkyaQclpYOHRpbs6ZnZ0UbCkeNGgUAywp+9V1aGu7ciatX47JlP08BRAqDTIYvXuDu3ffmzm3WrBmb+ufxeEXhdtaqVSsAOHPmjLoroh5sCdnYsWML42K7dmXkoZk8GStWZEPZo2rXdnJyWrdu3Z9//slaOw8PD4lEEh8fP3/+fJavkg0fGBoaCoVCHx+fwqhqEXPjxg0rKyvWe3dwcPDz88u8E9vPz491ddq3bx8TE6O+ahZRPj4+bC8Kn8+3t7cfMGDA8uXLz5w5w+fztbS08hZ7oGnTNgBwJdO2c5Xr3Lmzwj7nq1ev2Mazny9tiIhAd3fWXWldrpxYLG7fvj3LvGpjY6N4ajE9HTVgzkFl4uNxyxasUwcNDDDnuWKJRMJ22WTf15Senr7T0xO1tTMCFri4ZJkyun8fe/fGmTN/UpGuXRWHO2DGjBkDAKampi9evPheJXz4ELduxYULc5EsMTgYATJiQmZ/hDt+HAWCQ1Wrsn7L5cuX2csREXjyJM6bh+PGYblymMNyWs1D7WBeaVQ7WOQ7hI0bIwByHGprY1iYwlPCw8NZDspNmza9ffv26dOnd+7cCQwMPHz4MNv/mvvdDnkmk+GAATh9usKpeKIOI0ZkhJCqWZPd1/h8ftWqVX9h906BqVatWkE/2BVlly9fBoDatWsX6FXS09MnTJjwr6MjAuAff2BqKspkeOFC4vDhut/Xw/D5fG1t7SzrmmQy2ZUrVwYPHjx37lw2J7Ny5coCrWoR5O/vzxYo2trayvdR6OvrDx069PLly7///jsbM/b29laYX4EgYnJy8sWLF7PEM6tevToA3L59+1dLY+GHGzd+VKB/8AcPHgiFQnd39+Tk5Myvs0EcoVDIcdybN2+UFbF+PRoYII+HrVo937OHrS77+vWri4sLAOjp6R09elR+btq9e/jnn1i6NHp6or8/rl9P46m5whYEZctOmdnx48fZrIXiw9++Yd26CIDa2tm33AQE4F9/obt79pTv/xcbm6atLePzMTxc8Qnp6emdOnUCAGtr6wEDBjg4ODg7/8YWl2pp4ZYtOHBgTs90iIifFixAgQAFAuTxUNHGrUubNsnXvpqZmbVr16569Rby1BjTp9PoPCK1g/mjae1g0e4QJiXhkCEZSyMmTlRy4oYNG+B7wt8sqlatWgiZuz98QAAsVaqgr0NyrX171NJCLS3p97seM2fOHPXWKyEhoW7duiKRiOO4WrVqPZw1C//9F6Oj1VurwpSSkqKtrV2g2wijo6Pbtm0LACbGxnEbN2Y5Ghsbu27dOjZ87uPjoySywtKlSwFg1KhRBVTPIigtLc3b25v9WAQCgZubW3x8/JYtW1hucTk9Pb0DBw6ou7LFD3uGyMOj1dSpCIDDhxdEpX7AwuVt27bt3r178hft7e3Zv3vTpk2VvXnGDOzZE1etyv4En5qaOnDgQPb0OXny5OXLl9epU+dS8+YZz++NGhXEZ9FkbK+gsunc8HCsXh2PHlW436RBAwTAQYNyXDW6efPm0qVtPTz8FR9GRMTLly/r6enJ9zGWLVupShXs1Qt9fXHcOATAevUwpwDwlSpVKq+n51+79ufffsup/PHjx0OmXa8AULVqWosWOH78T3MlagRqB/NMM9vBot0hZHx8sHt3VPrsKJPJzp49W7NmzTJlytjZ2Tk4OLi6unbp0oWlhNq2bVtB1/H8eQRAZ+eCvg7Jtdu3Mx41zMxG9ew5ZcqU6dOng4qSh+YH219kZGTE9m0nVqvG9kLgggXqrVhhYgswWrVqtXv37pyi5OXHnTt3tLS0LCwslEzDDh48mN3Ta9SokdM5LCpVy5YtVV7DounLly/sn4aNuwPAmDFj5Psinj9/7u3tra+vz3Hc+fPn1VvVYsrPzw8Afsv5GTcnrVoVUmpSlqQOAHr37s1eYRkI2Hjr2rVrlb25YUMEwEuXFB6UyWSzZ8+WJ4AGgN4ODujlhbduqfxTaLjExEQ9PT0ACPkxqn5WOU9r3L2bsU1v+XLF8SRbt24NAJnj6Gbn5eXFplCEQmFgYGDmYEhfvmDlygiAw4Z9zh4y9/bt2/A9r8aGDRtyrr60SZMmANCoUaNDhw79ZO5a81A7mDca2w4Whw5hPrCdzUOGDCnoC23ffsPZef2ECU8L+kIkt1JT8cABDA2Vv5Cent6q1W+NG++9dUudk/u1atUCgL1796akpFy5ckU6cyY2b45iMY4erTkrpoKDg1mYZgDg8XgODg7e3t5Xr15V4bqLI0eOKA9i4evryyogEoly2gv+5s0bKDLBaQvB9u3bAYBFktTS0soSGfLz58/9+vVj4/35ihVRsiUkYOfOOYXPfvDgAQBUqlTpl4oMCMAZM3DzZuV511SDZW/L/Pz34sWLgQMH8ng8oVCobFZfKkVdXQRQvt6Bba2sX7/+yZMni3sMhiKL5X7Mcb1o7gQEYPPm+ziOk2csTE9Pf/r06a5du0aNGsXj8fh8/sZs805yUqm0dOnS7LvUpUuX7Ce8fImtW78Ri41Gjx4tfzEqKiowMLBz584siolQKFQyYiiVSq2trQGgEFaBFVPUDuaBxraDJbxD+ODBw8qVa3brNvfnp+YP25Xr6+tb0Bci+fHnnwiAalz4cPHiRQCwsrKSZHmyS0pStl2jJHrx4sWSJUtat27NhoGZzZs3JyrdGKNC7JmJpedSOI6ekpKSnp7OJnI1IdA2IrJGbufOneXKlcu+gigxMZHH47G4kT+ZKdJY/v7o4sJSyePffycnJWU+GB8fP2/ePIFAUL58+YsXL+a+1Lg47NQJ27dXcWUV8vT0ZD9GeXciJSWFbSJqr7QG70JCRjVrttfNTXn5bAZSY0NqFY7o6OgaNWoYGhr+ZIbwZ1avXg0AQqGwa9eujo6OOpn2X7BkhlpaWjnNPsnOn/9at+4iW9uyhoa7d+9WeM61a9fYHbht27adOnViwYfkA4UCgaBJkyZKqnf+/HkAqFixYlGIFVdMUTuYnca2gyW8QyiVoqkpAmCm+LoFws3NDQozkj7JExaYwcREbZ0vd3d3ANDMaF05SUhIOHbs2IgRIwwNDQUCQU6PDir3/Plz+Sjg8ePHsxwNCwtzdHScOnWqpaWlUCjMfeK1Yq1BgwYAcO3atdQcfiG2trbscc3Ly6uQ61Y83L6NfD7yeMjjvWrSpHz58oGBgYgYHx8/e/ZsFvyMMTY2fvz4cS5LTUtDjkM+vzCClk2dOpXV0MrK6urVq15eXiwbtZWVVfny5ZXMNuzfvx8A3JR2CGUyGetIfPr0qQDqTjKkp6d36dKFdZY+f/6cn6J+++03ts2MqVChQvfu3X/77Tf5grp21avLFGZpGzo0Y9eGiUlqQkJO5e/atUssFsuHBfX19Zs2bTpmzJjx48cLBAKO45Rs+WF5NaZOnar8IyxatOjo0aM53dM0HLWD2WlsO1jCO4T4PTLy1q0FeImUlJT69etXqlTp6VNaMlrUDR2KmzdjnqK+51dYWJhQKBQKhR8VxUwjCxYsAABPT8/CuVxaWhoL7ePu7n7t2rXMh1ikafi+iWXgwIGFUyW1YxkylSwDYyEKAKBNmzaFWbFiIzERvb2Rz5caGlbW0wMAjuM6depUqlQp9ndzcnI6f/48W5bZvPnEd+9yW7CxMQIURhh9lrzX0NCQbUJjatSowX4RNjY2bx4+VPjG0aNHA8CUKVOUFB4aGgoAFhYWBVN38n+JiYkNGzYEgKZNm2aJGftL2D9rixYtLly4kDmw/tixYwGgq7l5upkZVqqUNcpDaiqamGR0CPv2VVI+m6EqU6bMvn37Xr58mXmub8OGDXp6+o6OJ8+dU/BGJXk1MouOjhaJRAKBgMYgFKJ2MDuNbQd5UNI1bw62tpCWVoCXWLp0aVBQkLW1tTwUGymyunWD4GBo3x4+fizsSy9cuDA9Pb1u3brynRUks+bNmwPApUuXCudysbGxOjo6urq60dHR4eHh6enp7PUtW7a0aNEiIiJCLBanpqaWKVOGJdTSBFWrVgWAFy9e5HSCnZ0d+49nz54VUp2KFx0d8PWFK1c2tWwZkpDAcRyPxzt+/HhkZGTTpk0vXrx47dq1li1b7tq1a+BAv//+W9CuHXz9mquCW7W64+Jy7OvXiAL+AMDn801MTOLi4hISEsqUKePl5XXv3r3Hjx8/efKkefPmFU1NrVu0gKNH5eenf/mydu1aJyenNWvWlC5d+sqVK8nJyTkV/ujRIwCoU6dOQX8KoqOjc+TIEVtb22vXrg0aNAgRs5wgkUh+WohMJjt8+DAALF68uEWLFmx2l1myZMm2ESMOJyXxv32DV6+ga1f4fgsFAODxYMsWcHQEHR3o00fJJfbs2QMAY8aM6dWrV+XKleWZ0AHgjz/+GD8+5Nat9j17wvPnGS+mp8OTJ8E7d+7s1avX169f7ezsWCqXnBw8eFAikbRq1Uo+KEMyo3YwO81tB9XdIy1wx47hzJm4bx8GK46VlV/v379nUY9Pnz5dIBcgqta/PwKgk1NhRGiQO3DgAMdxLFKfk5PTk0ePCu/axURaWhoLPximJDuVijx48KBcuXLwfewTAGxtbadPnx4aGrpmzRoAEAgEAODs7BwZGVnQlSk6Dh48CEpX/b169ers2bMCgcDY2Dg/0w4lnkQi8fX1Zb93IyOj7K1DbCzWqoUA2Lx5rgp0cnKCQkleOnz4cABo1KjR1atXs2zNSklJifvjj4wdkqtW4Z496OaGIlHrcuUAQEdHh/2amjZtmlPsmRkzZgDA33//XdCfgjAPHz5kN1UXFxcPD4+uXbs6OTlVrVqVde1Onz4dn3Nqe0QMDAwEJTGQEhKwfn0EQLEYV61ScIK/P96/r6ShTUhI0NXV5TguNFP4t8ykUuzeHQFw3Dj09MSGDVFbG6tX/53dtC0tLU1MTJQHTWnRogUAbNmyRck5GovaQYU0th0s+R1CRLx0CXV0sEYN5Xlc86hHjx4A0KtXL9UXTQpGVBSWKYMAOHdurhds5c/du0/YnuyhQ4eWKVOGx3Ff69dHLy+MiyucChQXHTp0AIAC30a4b9/Oli3ZU29QUNCcOXMqVqzImkMej1ehQgX2315eXpoWBfHJkyfs+S8wMNDBwcHPzy9LXvU7d+6wXD4LNClLSp7t2LEDAHIKjBEejjVr4tmzuGMHTpuGyrfnsCj/Bb1NXSqVskViyjYLLV+OIhGeOpWxJlAovDlq1K5duxITEx89elS2bFkAqFChQvCPQ7BBQUFeXl56enqGhoZ5yLpB8uzUqVN6eno6P+bjZfc6AFizZo2S97K0mco2vUdEYPXqePiwwmSGP7Vr1y74WXLLxERcswZ37sxYNc1x2K3bIXd391mzZrG9XtWrV8+8ljWz8PBwPp+vpaWV0wkajdrBHGhsO6gRHcKEBGTJ3v74Q8Uls/EzHR2dtwUdtYao1PXr2KLFQY7jjhw5UtDXCg/HChVkTZtuYulPYmNjT86YgXw+AqCNDb56VdAVKEYKfBuhVIqTJyPHoUjk99dfKZn2kgYFBXl4eOjq6lavXt3ExGTTpk0FVYci7O7duwKBQE9Pjz3WA4ChoaGnp+eLFy8QcdeuXSz2gJOTE23IyY1Tp06B0n0mx4/jzJlYs2bGZqtOnQImTJhw8+ZNmUwWFxd39epVX19fNzc3CwsLAChfvvy0adMKtMKfbtzoa2dXoUKFn5wXGor+/ujtjatW4Y8xS8LDw9ljurGx8fnz5589ezZ16lT5w6W8HzJ8+HBNe8pUF7ZM19TUtHz58pk7hEuWLAGAatWq5RSiMzU11cTEBAB+EhwhH5GOWDS+1atX//TMgACcPx8vXcLMcS7j4uJq1KgBAG3btlX4dWKJx7p165bnGpZM1A4qpbHtoEZ0CBHx8WO0tJQ5Oi7ZuXOnqspMTU1lK4lL2CCBhli8eDFbzfVSYYQ0FUlMRAeHjIVhEkmm9LsPHmDjxujggBs2aE76wZ+6ffu2vr7+8OHDC+oCb9+ioSEKBLhypcLjMTExT58+jSu5M7dJPyZCyOzgwYN63+OgsFZQR0eH/ffFixe9vb3Zix4eHhSvL5cS/v03sVq1TyNHKj8tMBA9PbFUKWzc2J39ke3s7DJvpgIA9mheqlSphJwDNqrAuHEIkKI0MMxPJSYmsqg5QqFQXn8bG5sJEyY8fPhw37592traVUxMvvXpg5oRxV691q5dCwC//fbb2LFjW7Vq1a9fPy8vr9mzZ3/48MHa2trOrt+VK18VvpENZ9jZ2RVQxeLi4kQiEZ/Pz89TdWhoKNscOHToUPmL0dHR586dW7x4MZvu3rt3ryrqW4JQO0jtoCKa0iFERH//7QBgYGCQz8w8TFxcXOfOnUuXLm1kZFRomdOICslksqpVq5qamlpbW588ebKArvLkCVpaYqVKisIDSqVYsoaX8k8qlRbsvMGGDejriyUrmWzuyWSyKlWquLq6btu27Ye7llSK06ZNrl8fAAYMGBAcHDx16lS2JIZN6bD/FolESgKvEQU2b0YAHDw4N+emp+OVK9f79+8vEAjYI0j16tW9vLy2bdvG4ig6OjpCgWa7lcnQ1hYB8j8+JZVK27Zta25urq2tPWDAgGPHjqWn/3847ObNm1EsW6O9Pb55k89rEeUmTnzQqNG8DRv2ZD+0eHEsAHbvrviNcXFxdevW1dfXf/7ra0FzycPDg+O4f//9Nz+F3Lp1iy2Ibd26ddeuXeWzOuyZXktLa8KECSqqb0lB7SC1g4poUIdQIpGw6I5OTk7//fdffoq6ffs2W33Bdtx26tRJyXgDKZrmzJkjX7/E4/EmTpyo2vGe69exbl1cuBCnT8d9+1RYMMmr+HgUiVBXFzU1i3FwcDBLQAwA1xo3Rg8PvHcvI+s5gMzMbHOmyBBSqTQwMHDAgAFisbhs2bJ6enqXLl1SY+WLpRUrEADHjMn9Oy5fvszGpCFbPP2zZ8+ytX8FNXIfHo6VK6O1tUp+ICyX/eLFixUfDg3FGjUQAKdPz/+1iBLlyiEAKoxiFhmJWlrI5ytO1CyVSrt27QoAFStWjMpfMsOczJs3j33b79y5k59yjh8/bmRkxMLnAICenl6TJk1GjRr1119/scBO69atU1Wdiz1qB6kdzIGmdAhlMlnmVKo8Hs/Dw+PLryd1kslky5cvZ7cYBweHAwcOmJqaCgTi/v3fRUcXRMVJgWBbC/h8/o4dO5YvX86WNjk4OLA14vkUHx//55/feLzczw2QwmJpiQCY++xvJU50dPSqVavc27fP2LUGgIsWoUCApqYYGKjwLWfOnAGAmjVrFnJVS4IHD3DlSrx4MffvCAgIAAD2vPIu2xfVxcUFAGbNmqXKSjKpqXj2LPr74/XrKimvUaNGbJFVjmfExeGcObhxI62ZLzgfPyIAGhkp2OgXE4OxsdivH1atijduKH57UlKSo6PjxqZNZU2aFEhQPsQRI0Z0rVQptWbNfG6nj4mJ8fPz27Nnz/Pnz6WZPq2/vz9bvXz27Nl8V7akoHaQ2kFFNKVDGD19eg1zc/bcr6ury3p0xsbGy5cvz7yURbkvX76wPdAcx3l5ebEJpeDg4E6dngFg9er44UNBfgbyKyIiItavX1+3bt2ePXsuX748KChIfmjFihXsH3HDhg3slTt37lSqVAkAtLW1Fy1aJM3HLvlTp06VLVu2WbNRAgF6e2Om3dqkCGjeHAHwzBl116MIePIEvb2xalVcvx6XLMHXr3M6MS4ujuM4sVic+1sl+T9//1/q8Hw5fPhD48ZBZcved3FJ/Jp1c9fVq1dZhIOv2Q7l1+DByHGKkwfkgVR6xdFxiYtLrMrrSX7F48fo4oJdu2Z9/eNHrF0bnZywRw9csQKVLJn6FhmJ5csjALq75yd+TE4kEklaly4Z64cLJhbopEmT2Hahhw8fFkT5xQ+1g3LUDmaiGR3CBQsQINbYuLGFhYGBwb1794KDg1u1asWmjOvXr79v376ctjXLZBgc/H716tU9e/asXbs2W41w9OjRzOd8+IDVqyMADhxYKB+H/ExkZKS9vT38yMbGZsCAAUOGDOE4juO4LGtI4uPjnZ2d2fIYa2vrsWPHXr9+O/dLKr58+bJ79+6+ffuyazVq1Ojx45K24bgEWDZ5srWl5fLly9VdkSIjd19x39atT7i4JNJ2r0KwYQMCIFtgoGgRe5UqVbS1tXv16pX7Je7fvn17/fr1zZs33759q/BpJt3XFwFQVxfv3ctX5eVevEAALFNGNaWRvAoIwKlT0doaPTz+H8Hnv//+a9duJI8n1dJCAGzd+melPH2KRkZoYVFQGz5jY7FGDeTz8dChgiheJpP17t0bAGxtbSMiIgriEsULtYNZUTuIiBrRIbx1CzkOOQ4B0q2tb2ZawXLs2LGyZctyHMeStBoYGPTr96RvX/TxwcuX8cgR7NULLSywadNn8k2lbAtH9igy0dHo6YmbNuGyZbT4Rc1iYmLq1avHJveDgoL8/PwGDBgg3xlsYmLCcVz25Evp6enVqlXL3IFs2vR3GxscNw5v3VJ8IalUGhQU5Ovr6+rqynaTCoVCHo9nZmYmKcyc9yTX2FLhESNGqLsixU3LlgiABRZ7ifzf0qUZS5i0tLIcSUxM7Nevn3zveosWLRTOEyYlJW3cuNHT07NOnTo2Njby3TIA0Lx5c1NTUxblRd6fPHjwoGu5cqmVKmH+Ynv84MABBMCcMzuTQnPiBAoECIBWVrh3b+CbN290dXUBwNZ2JADWqJG7lYNXruDr17863f0L3r3DgwfzlswwN5KTk52cnLp27UohAJHawTwr6e2gBnQIEXHMGOQ45PFw//4sRw4fPswWCrKg3rVrx7K2+O+/sXv3jHa5QYPk/v37e3p6yrd2rFixQsnVCmaQiyhw4QI+e/bDK7GxsfXr1weAKlWqZB4LlMlkjx8/XrRokUAg4PF42R+k9u/YIf/3HTNmzN9//92mzWX2BWjfHrdtQ2NjNDbGRo1SKlSoULVq1bp165qamsqftMRiMetPshLyGTaNFBAWSL1FixbqrkhxM2oUAuDSpequhwa4cAFHj8Y2bXDIkB9ef/06ols3sUCgr6+/ePHiMmXKAEClSpUyR4BMTU318/OztrYGAJafjREIBNra2vb29lWqVJG/qKWl1aNHjwULFrCcWquXLVPlp3j0CCdNwu9r8ol6PX+Orq7o5BQKAM2aNWvRogUbU/DwOFpUEn/ExqK+Prq5YYFFmY6Li8vPZpCShNrBPCrp7aBmdAj9/XHOHMw2KYSILNLMnDlzEPHLly+3bkm2bsV//sHAQHzyBDduRHmOuvT0dNaaDihbNqJdO6QpIPUJCMCZM/G//zLip1lZ4bhxWzZs2HD//n0WyaBy5cphYWEK39uiRQsAOHDgwA+vSiSyypUfVK9e38REnulLJsNbt3DYMOzZE7t1yxgdKF8+NfNjlpWVlYeHx/79++Pj4xMTE83MzABATyg8PGhQwf8ZyC+LiorauXPnI4UR94gS/v5Yvz7Om4clNzNV0ZJlquTUKTQxQYC7PXqwLOFhYWEODg5sycPFixclEsns2bMtLS3ZrcnBwcHf379Hjx6Zlzzs3r0bEa9du+bq6sqCqzEcx3l4eKj+I/j6orm5wmaXFD6ZDHfu/NfCwkI+ajl06NAitJJlyxYEwJYt1V0PjUDtYB6V9HZQMzqEOUhISNDT0+M47o2iNcEBAVlXRlzeuTO2Vi0UixEAt24tpFoSRQ4dwqQk7NsXLS1RIEA9PSv5842VldWHnMP7LFiwoFXZsuf/+eeHV9m+HS0tFIne/JhhZvZsBMDffsPoaPz6Fd+8kbx+/frZs2dDhgwBgPHjx2c+ecWsWcednVMNDVWSy4uQIsTfHxs2RB0dHDgw7to1maaGLFePDh0QADt3zpzJ/du3b506dWIr1Y2NjdndTygUtmnT5ujRoxKJ5PDhw5MnT166dOn27dtPnjwZGRl5/fp1fX191gmUZ41ftWpVgXQMli1jo2gyb28FMzORkTmGtiQFJjo6euTIkfv27Vu0aJG66qDw1pHcqRMC0JQyKepKdDuo0R3CHTt2AEDTpk1z+waJJCNvLwDa2RVExC2SB8+eJa9du9bJyYkFj9XX11fyfCN99AgBsubaatw445/Vygp/TCnJwgWdOpW1nOvXrwNAxYoVf3g1MhLFYrZhFd3d8/3JCCkyUlKwZUv23e7bpEmZMmW8vb3fKsxfRlQrIADHj8fJk7NHPpDJZD4+PqVKlZKvWZCPiwUqCp4eFxdnbGyspaXFzmnSpMn58+cLsOabN6NIdKBZs/bt28fHxyNiQkLCu0OHsEMHFAiwbFlctQobNMArVwqwDqSI8fX1tbe39/X1DQ8PZ69ERkbq6+v3a9pU9uuZwAgpVCW6HdToDmG7du3gVzOWslHPMmVw927qEBYpx48fZ9tBAeDy5cs5nieTobV11ky9SUn4xx9oYZFlgdODB8EuLrdq1UrNvq9BKpWy5TfPs2yCHzoUAVAgwD596BtCSprXr9OnT69cqRLrUSxZskTdFSJ47969Jk2ayGf8OI7T09ObOnXqunXrZs2a5eXl1a9fv9atW9euXbtu3brsHEdHx8JJy/bpyhW2P79atWo9evTQ09MbwsbYhELs1An/+gsBUCzGPXuyv1cmk129evXbt2+FUE9SaJo0acK+hHw+v3Xr1ps3b160aBEAdOrUSd1VIyR3Smg7qLkdwsjISIFAIBQKfy09fUICnjiBGzcWUCwskmcJCQlaWlocx9no6x/39VV26u+/o6Ulnjjxw4v+/nj/fpY47xMmTACA0aNHKyxm4MCBAJB17c3Tp+jlhdOn0zeElFTp6emnTp3q1atXZGSkuutCMsTExOzfv3/AgAF6enoAUKtWLchGW1tbLBYHBAQUZsVCQkIqVKjAFrVyHOfk5JTq54es2U1Lw5EjEeCRi8usWbPk66+Cg4N9fHwqVqwIANu2bSvM2pKClpiYuGfPns6dO8tnqln89jFjxlAIUFKMlLx2kEPE7M2GJpDJZBcvXnz69KmXl5e660JU46Knp/25c6XevYNateDevRzP+/YN9u0DqRRq1wZHx5zOQsTy5cu/e/fuxo0bjRs3zn7CgQMHevXq5eLicunSJVVUnxBC8ishIeHUqVOxsbFBQUEWFhZm35mbm7u6usbExLx69Yr1tQrNokWLJk6c6ODgcODAgfLly2c5+mXDBtsxY5Ilkh49ejRo0GDv3r0PHjxgh2xtbWfOnDl48ODCrC0pHLGxsceOHdu+ffuFCxc4jpPJZNra2q1atRo4cGCXLl3YBhBCSOFRd4+UENVZvDhjMRLH4ff9CXkWHx//22+/aWlpnT59WuEJt27d4vF4hoaG7u7ux44dK0IR2wghJJs+ffoAwJr1hR3508PDAwCUJMIOCAjQ09PT0dFhjyVGRkYDBgwIDAwsYTEbSHYLFiwAgHr16jVu3JjjOPYFMDExGTVqlLqrRohm4ampH0pIAWjXDgCAxwMtLXj0KJ+F6evrGxkZpaamduzYcfbs2TKZTH5IJpMtXbq0efPmMpksISHh4MGDnTt3fta1K4wZAzdvgqbOuhNCirLuk7tXC6p2stXJQr7uo0ePAKB27do5ndCxY8e///47KSmpbNmy//7776dPn7Zv3+7q6irvIZCSas+ePQDg4+Nz48aNd+/eLV++3MnJKTo6+t27d+quGiGaRXOXjJKS6Z9/4NMnGDMGvodPyA9EXLhw4ZQpU6RSacuWLXfv3l2qVKl3794NGTLk4sWLADBgwIAJEyYcPXr0+MGDt96942JjAQCWLIE//8z/1QkhRIU+pX0q/bi0mCf+WuurNk+7cC4qk8kMDQ0TEhK+fPliamqa02leXl6rVq1asGDBxIkTC6diRO2eP39erVo1IyOjT58+ybcUAsDTp0/T0tLq1KmjvqoRonEEPz+FkGJk3jwVFsZxnLe3d6NGjfr27XvhwoX69et7enouXbo0JibGwsJi48aNnTt3BoCaNWtOnToVnj6FAwdgxw5ISQE/P+UbFAkhpJBZCi3r6tS9l3TvasLVNgZtCueir1+/TkhIsLGxUdIbBICHDx+C0llEUvKw6cEePXpk7g0CQPXq1dVUI0I0F80QEvJzYWFhvXv3vn79ulgsTklJcXd3X7dunZmZmYJTEYGWORFCiqSz8WevJl61Fdn2N+5fOJOEBw8e7NmzZ/v27U+eVLZU1cTEJCYmJjw83MrKqhBqRYqCqlWrvnz58ty5c61atVJ3XQjRdNQhJCRX0tLS6tWr9+TJkzFjxqxcuVLd1SGEkGLg1q1bEyZMePv27Y0bN8qUKaPwnPfv35ctW9bc3Pzz58+FXD2iLtHR0W5ubm/fvv3w4QOfz1d3dQjRdBRUhpBcEQqFFSpUAAAayySEkFyqW7fuly9fPn782KRJE3k+iSzu378PtF5Uw5iYmNy4ceP58+fUGySkKKAOISG5JZFIAIDyIxFCSC5paWldv369efPmHz9+bNq06dGjRzMfffr06YwZMzw8PCwsLCIjI1NTU9VVT6IWBgYG6q4CIQSAOoSE5B7rEGbZ/k4IIUQJExOTM2fODBo0KDExsVu3bjNmzAgNDZ07d669vX2NGjVmzpz5+fPnhISEx48ft2rV6suXL+quLyGEaBzaQ0hIbjk7O1+7du3q1atNmzZVd10IIaSYmTt37rRp0xCR4zKePUqVKtW7d+/ffvtNLBa7ubnFREa+btXKYskSqFZN3ZUlhBANQjOEhOQWW85ES0YJISQPXF1djYyMeDweIrq6uh47duzDhw8rVqxo2LBhrVq1bt++HdSzp8WpU9CkCZw7p+7KEkKIBqEOISG5JRQKgTqEhBDy63bvlvbuPT4mJobFGk1KSnJzc2M3VcbKyqrqhg3QowfExsLateqrKSGEaBxaMkoIIYSQgiKTwdSp4OsL1aolurr6zJgxxd7e/tOnTwcOHHB3d1dw9qpVIBQCnw+1a4OjozqqTAghmoU6hIQQQggpKDdugLMz8PmwciUMHw4A4OfnN3z48PLlywcHB4vFYnVXkBBCNB11CAkhhBCieidOgKkpvHkD9+5Bhw7QsmXG61KptF69eo8ePZo/f/6kSZPUWkdCCCHUISSEEEJIwTh8GLp3V/D6+fPnWYyZjx8/6urqFnq9CCGE/B91CAkhhBBS2Hx8fLp27Vq3bl11V4QQQjQddQgJIYQQQgghRENR2glCCCGEEEII0VDUISSEEEIIIYQQDUUdQkIIIYQQQgjRUNQhJIQQQgghhBANRR1CQgghhBBCCNFQ1CEkhBBCCCGEEA1FHUJCCCGEEEII0VDUISSEEEIIIYQQDUUdQkIIIYQQQgjRUNQhJIQQQgghhBANRR1CQgghhBBCCNFQ1CEkhBBCCCGEEA1FHUJCCCGEEEII0VDUISSEEEIIIYQQDUUdQkIIIYQQQgjRUNQhJIQQQgghhBANRR1CQgghhBBCCNFQ1CEkhBBCCCGEEA1FHUJCCCGEEEII0VDUISSEEEIIIYQQDUUdQkIIIYQQQgjRUNQhJIQQQgghhBAN9T+GcJntL8/5hwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 27 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "kazyeOPYl5_i", - "colab_type": "text" - }, - "source": [ - "As suspected, these are not small molecules, so we will remove them from the dataset. The argument here is that these molecules could register as inhibitors simply because they are large. They are more likely to sterically blocks the channel, rather than diffuse inside and bind (which is what we are interested in).\n", - "\n", - "The lesson here is to remove data that does not fit your use case." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "xkFF2eMgl5_j", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# drop large molecules\n", - "smiles_data = smiles_data[~smiles_data['drug'].isin(long_smiles)]" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QjSLGiv0l5_m", - "colab_type": "text" - }, - "source": [ - "Now, let's look at the numerical structure of the dataset.\n", - "\n", - "First, check for NaNs." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "H5wkbrWgl5_n", - "colab_type": "code", - "outputId": "243bea73-449c-4d9a-a99a-edbf1fea1492", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "nan_rows = smiles_data[smiles_data.isnull().T.any().T]\n", - "nan_rows[['n1', 'n2']]" - ], - "execution_count": 29, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
n1n2
62NaN-7.8266
162-12.8456-11.4627
175NaN-6.61225
187NaN-8.23326
233-8.21781NaN
23730.83696.16932
262NaN-12.8788
288NaN-2.34264
300NaN-8.19936
301NaN-10.4633
303-5.613748.42267
311NaN-8.78722
399-1.45559-6.47666
\n", - "
" - ], - "text/plain": [ - " n1 n2\n", - "62 NaN -7.8266\n", - "162 -12.8456 -11.4627\n", - "175 NaN -6.61225\n", - "187 NaN -8.23326\n", - "233 -8.21781 NaN\n", - "237 30.8369 6.16932\n", - "262 NaN -12.8788\n", - "288 NaN -2.34264\n", - "300 NaN -8.19936\n", - "301 NaN -10.4633\n", - "303 -5.61374 8.42267\n", - "311 NaN -8.78722\n", - "399 -1.45559 -6.47666" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 29 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Z6xL_ztsl5_u", - "colab_type": "text" - }, - "source": [ - "I don't trust n=1, so I will throw these out. \n", - "\n", - "Then, let's examine the distribution of n1 and n2." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "KhvgrLnjl5_v", - "colab_type": "code", - "colab": {} - }, - "source": [ - "df = smiles_data.dropna(axis=0, how='any')" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "txAjPzOAl5_2", - "colab_type": "code", - "outputId": "a4564f8e-817d-4df7-9ce1-2becb341767c", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "# seaborn jointplot will allow us to compare n1 and n2, and plot each marginal\n", - "sns.jointplot('n1', 'n2', data=smiles_data) " - ], - "execution_count": 31, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 31 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa0AAAGoCAYAAAD1m7qEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3df3CV5Z338c+dk4QeHtGAkpMhRRQXQh/Kjzy6o9YurJEQIY1EEPeP2l2ydHGYzrBRJ1t+WKSg/EptnelMGdxK6bZd14oYdWM3SiLG4TE62iAsY1PtYwoEcyhIACGbn/fzRzzHkJxzck6Sc+77Ouf9mnEG7oScbw54f3Jd9/e6Lsu2bVsAABggzekCAACIFqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADBGutMFAMnq/OVOXezoHnR93Jh0XTM204GKAPMRWkCcXOzoVv0fzwy6Pm/6dYQWMExMDwIAjEFoAQCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoAQCMQWgBAIzBjhiAC7DlExAdQgtIsO6eXp08d/mKax1dPXrnk3ODPpctn4ArEVpAgrV39arxT59dcS3/+iyHqgHMwjMtAIAxCC0AgDEILQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxCC0AgDFYXAyMglDbMHV09ThUDZC8CC1gFFzs6Fb9H89ccY1dLoDRx/QgAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBgsLgZiEGrnC4ndL4BEIbSAGITa+UKK3+4X3T29Onnu8qDr48ak65qxmXF5TcDNCC3Axdq7etX4p88GXZ83/TpCCymJZ1oAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGOwTgswUKhFx+lpUnfv4M9lITKSCaEFGCjUouP867PUeLxt0OeyEBnJhOlBAIAxGGkBIbAxLuBOhBYQQqI3xgUQHaYHAQDGYKSFlBFuyi9U110yTQNyvAmSCaGFlBFpym9g110yTQNyvAmSCaEFI8QySmIEEZ1QI7B4vneh/g6T6e8q3L/RZPoe3SClQ8st/8hiuSGHW0CaTAtLQ70fHV09eueTc4M+N9Qo6Rs3TaDzLwqhRmDh3rtYfjiI1Hk58O8w1tFeooMvltcLN5JnRDu6LNu2baeLGKmVK1fq3LnBNzQAMNX48eP1zDPPOF2G6yRFaAEAUgMt7wAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjJEVorVy50ukSAMARqXb/S4rQYgsnAKkq1e5/SRFaAIDUQGgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoAQCMke50AQDgdlWNLaqsadKptnZNyvKqoihPpfm5TpeVkggtAIigqrFF6/YfVXtXjySppa1d6/YflSSCywFMDwJABJU1TcHACmjv6lFlTZNDFaU2QgsAIjjV1h7T9UTr6bV1/nKn02UkjOOh1dPTo9LSUj344IOSpBMnTmj58uUqLCxUeXm5OjtT5y8DgPtMyvLGdD3Rem1bFzu6nS4jYRwPrX/7t3/TTTfdFPz9j370I61YsUKvv/66rr76au3bt8/B6gCkuoqiPHkzPFdc82Z4VFGU51BFqc3R0GptbdXBgwd13333SZJs21ZDQ4OKiookSffee69qa2udLBFAiivNz9W2pbOUm+WVJSk3y6ttS2fRhOEQR7sHt27dqoqKCl26dElS3wmcV199tdLT+8rKycmR3+93skQAUGl+LiHlEo6NtN544w1NmDBBX//6150qAQCMl2ZZGjcmdVYvOfad/v73v1ddXZ3q6+vV0dGhzz//XE888YQuXLig7u5upaenq7W1VT6fz6kSAcD1PGmWrhmb6XQZCePYSOuRRx5RfX296urq9OMf/1i33XabnnzySd16662qqamRJL344osqKChwqkQAgMs43j04UEVFhX7xi1+osLBQbW1tWr58udMlAQBcwhUTobfeeqtuvfVWSdLkyZNpcwcAhOS6kRYAAOEQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAYLBe29b5y51Ol5EwhBYAGKyn19bFjm6ny0iYdKcLAPClqsYWVdY06VRbuyZleVVRlKfS/FynywJcg9ACXKKqsUXr9h9Ve1ePJKmlrV3r9h+VJIIL+ALTg4BLVNY0BQMroL2rR5U1TQ5VBLgPoQW4xKm29piuA5KUZlkaNyZ1Js0cC61PP/1U3/nOd7R48WIVFxfrl7/8pSSpra1NZWVlWrhwocrKynT+/HmnSgQSalKWN6brgCR50ixdMzbT6TISxrHQ8ng8Wrt2rV599VU999xz+vd//3d9/PHHevrpp3X77bfrtdde0+23366nn37aqRKBhKooypM3w3PFNW+GRxVFeQ5VBLiPY6GVnZ2tmTNnSpKuuuoqTZ06VX6/X7W1tSotLZUklZaW6sCBA06VCCRUaX6uti2dpdwsryxJuVlebVs6iyYMoB9XTISePHlSH374oebMmaOzZ88qOztbkjRx4kSdPXvW4eqAxCnNzyWkgAgcD61Lly5pzZo1Wr9+va666qorPmZZlizLcqgyIHass+I9QHw5GlpdXV1as2aNSkpKtHDhQknStddeq9OnTys7O1unT5/WhAkTnCwRiBrrrHgPnNBr2zp57rIkadyY9KRvynDsmZZt29qwYYOmTp2qsrKy4PWCggJVVVVJkqqqqnTXXXc5VSIQE9ZZ8R44oafXVv0fz6j+j2dSYjsnx0Za77//vl566SVNnz5dS5YskSQ9/PDDWrVqlcrLy7Vv3z5NmjRJTz31lFMlAjFhnVX477WlrV13bK9jyhAj5lho3XLLLWpqCv3TV2DNFmCSSVletYS4aafSOqtw74ElBa8zZYiRYEcMYJSEWmdlSbpzxkRnCnJAuPfAHvB5TBliuAgtYJSU5udq2c256t/vakt64f0WVTW2OFVWQoVaazYwsAJSadoUo8fxlncgmbzxh7+EHVWkylTYwLVmd2yvS/lp03iyJOVfnyVJ6u7p1fnLnUndQUhoASMwcE1SqJuzlNqjioqivCva4KXUmzaNJ1tS4/G24O/nTb8uqUOL6UFgmAJrklra2mWrr8Eg3FJ4W30jjlSZJuyPaVOMJkZawDCFWpNkK3TjgZS4rjk37kjBtClGCyMtYJjCTfnZ6mtACCXeXXOhRn/r9h91fETDGjaMFkILGKZwjQS5WV4dWlsQdqownjdqt+5IwVlhGC2EFjBMQ51/5cSN2q0jGs4Kix9L0q03jte86ddp3vTrkv4U4+T+7oA4CjyLCff8KFTXXLxv1CPdlSNez8OGeq8wfLakMRkefXX8WKdLSQhCCxiBSOdfOXGjHklQxnuHds4Kw2hgehCIo9L8XFUU5WlSllen2tpVWdMU16aIkZx+7NbnYUB/jLSAOHLifKnhjmjc+jwM6I+RFhBHJo1e6PAzkyUpPYXu5Cn0rQKJZ9LohQ4/M9mSunudriJxCC0gjkwavYzkeRiQKDzTAuLIibb3kaDDD25HaAFxFE3beyL3CnTjvoRALAgtYISGCoJIo5dEdhc60ckIjDaeaQEjMNINahPZXWhSJyOiZ6nv8MeT5y4H/zt/udPpsuKG0AJGIFwQlD93OKrzs+LRXVjV2KI7ttfpxrXVV9QQ7mu2tLWn7FlfycCW9H//9Jnq/3gm+N/Fjm6ny4obpgeBEYgULtFMv0XaK3A4z58iTQFGOlmZqUKYgpEWMAJDta4PNf0Wbm3UnTMmDmvaMdzIb9PLx0K+Viy1Am5AaAEjMFQQSJFHY+HWRr3xh78M6/lTuNdqa++SpOBrDadWNwg39YnUwfQgMAL9W9rDTb0NNRoL1V340HOHQ37uqbb2kNOGgRoGHmnfX2VN05Drw9y46DmA7kdIhBYwYoHQGXhTlWJbSNw/jNIsSz324AjKGpsx6MZd8fwHkiV19USKrC9v8gNHcMOp1QmRuh9TObQsSfnXZ11xLZn3IiS0gFEykvOzBgZeqMDyZnhk2xp04+7qjRxWAR7LChtYuQYsNDZpH8dEsiU1Hm+74tq86dc5U0wCEFrAKBruNkihRhFSX9D02nYwAMNNGw7Fm+EJG1iWpENrC4b1dRNppKcyIzkQWkCCBKb/Wtra5fli+m/82AzZ9peNEgP12rY+2V4c/H2kZ2cDDQy8cH/WlJu+afs4Ij4ILSABwk3/nbscOqwCBgZKqBt3Rpo16JmWN8MTcod2k2/6I5l+RfIgtIAECDf9F4mlL3erCNycw924B167c8ZEbXr5mMq/mE4cPzZDj5XM1LKbc/XsOyfUY9vyWJaW3WzWru7sQj9YqEaMwLZOA40bk65rxmYmqLL4ILSAOOjfCXiNNyPs9F8kgXHTwNbucDfuwLWqxhZVPP/BFQ0a5y536ZHnP1Cavhzl9di2Xni/RbdMmUAQGCxUI0Y486ZfR2gB6NP/mZWlL0NnOIE1UCyt3ZU1TSE7Cnt6bQ0c69EyDtMQWsAoGPjMKrom9NgEpgqHep4Tawt4yxcLlgkumCCJl6ABiTOcZ1aSZFmxfX40exEOpxswluNUACcRWnAtk/aZi3V0k5vlVfP2Yv3k/rnD/p8w3F6EFUV5fR2FA3jSrJDXI30twG1cOz1YX1+vJ554Qr29vVq+fLlWrVrldElIINP2mYt07MdA/dvM3/vzZ+odweuGCsvA+7Pp5WPB52mB7kFJwY7CaL4W3C9U92A4ga5Ck7sIXRlaPT092rx5s37xi1/I5/PpvvvuU0FBgf7qr/7K6dKQIKbtM1dRlBc2DCQpzZJ67b4Fv//n+mtUWdMU8fOjlWZZunFt9aBnXJFaw01fZIwrxdI9GGByF6ErpwePHDmiKVOmaPLkycrMzFRxcbFqa2udLgsJZNo+c6X5ucryZoT8mKW+wJL62swP/emzqEdlQ+mx7YjPuEJNsYY7w8uURcZIba4MLb/fr5ycnODvfT6f/H6/gxUh0cL91O/m0cCme2aGPFsrHp2EoQx8LhWYYu3fvPHQc4dV/txhfSUjTVnejCvO8HLjCBYYyJWhBZg4Gggc6Dh+bOgR12iJ9hDHUFOsgQA9d7lLHd29+snfzdWhtQUEFozhytDy+XxqbW0N/t7v98vn8zlYERIt3Im+TtxcY+liLM3P1djM+D4qPrS2IGxw9R+JDjWVSscgTOTKRoxZs2apublZJ06ckM/nU3V1tZ588kmny0KCuWGfueF0Mcb7udujVUd154yJ+nXD8UEfu3PGxOCvo9k+auDehjBPLN2DAeH2JpTcvz+hK0MrPT1dGzdu1He/+1319PRo2bJlmjZtmtNlIQWF62L84SvHwt7kI7W/eyxLt00dr+az7cGdLWJtyvhNw/Gwz/be+MNfJPWF7aXO7qi+ntuXEyCy4XQPRuL2zkJXTg9K0vz581VTU6MDBw5o9erVTpeDFFTV2BI2UM5d7go7TRjqeVxAj23r3U/O6XKUgRJKoKkilMAor7Km6YqjSobCVCFM4drQApwUmBaM5IevHAt5vf/zuFC6em2du9wVMXyGy5Z0x/a6YX1dty4nAPojtIAQotlLMNJoy0mBXeZDyc3yRtXEAbiVK59pAU6LdtRR/txh/fCVY7Jt6Xx7V/AAxhfebxnWBrqjxZauOB5FunLJgMknGCO1EVpACLE0SJy7/GWHXktbu37TcDxhC4ojsdU3sgp3lAnH1ieH4XQPRhKps3Aoieg8JLSAECqK8gaNRqLlhsCS+gLr0NqCkB9zw3ICjI7R7h4ciUR0HhJaSAr9j7cfjZFD4M+Oxqa2TmC6D8mKRgwYL9Qee6NxqGFpfm7ELZPcJnBUVt82UrbKnzusG9ZWK3/za65sGAGGg9CC8SIdYzJSodZchTtI0Wn/b1uxHrjtep273KX2ri9P6Tp3uUsV+z4guJAUmB6E8eJ5jElgmjBwBpXHstTV65anVlf63z/4nS53hT5SsqvHdu1ZZBiZ0W7EGIlYmziG07hBaMF44Tr9RmvdUeBGP9zGjEQJF1gBLB5OTm5qxIjVcBo3mB6E8RJxjEk0i43djsXDSAaMtGC8/lN48Vp3NNrbLSVahseimxBJgdBCUoj3uiOPZanHduezrKFY+vKZlsRO7jAb04NAFEwNrDR9udh5tJYCAE5ipIWUFOti5NxhnHvltLEZaYOaMwJLARhtJQ83dQ/GKn0YwyZCCylnOKcRVxTl6aHnDrtmi6ZohOsmpIswuZjePRgrpgeRcoZajFzV2KI7ttfpxrXVumN7naoaW1San2tUYEVCFyFMxkgLKSfSYuRIozATpwgHYk9CmI7QQsqJtBg53Chs08vHZLlz96aIsrwZ+l9j0ke8FGC0NyQGhovQQsoJdeyIN8OjO2dM1K8bjof8M23tXSGvu4U3I03dvba6eux+1zzadM/MEYfLcJ4BAvFCaCHlhFqMHDht2BSBdWO5/UY98RoNRXoGSGg5z+TuwYF7FUazFyGhhZQ0cDHyHdvrjNqm6U/bFgd/HWgcCYTVT/5u7qiGSTw3JE5miZpSNbl7cKBo9iIktACZdQP29Hu4loipu3hvSJyMmFKNH1reAZl1A+6/O0ekxpHRkogNiZNNPM94S3WEFqDQN2YnWZLGRNguILB+LNwIsa29a9S2ayrNz9W2pbOUm+WVpb7W/21LZzFiiIAp1fhhehDQ4OaMsZkeXep05hlXmiXdPnWC3v3kXNjPCUw3XePNCNvZOJqNEvHekDjZJHJK1YlGjEyPpTFx+CFv3JihI4nQQsoK9aD80NoCVTW26OHnDjtWV68tHfrTZ0N+XntXj76SEX40xk/1zqhqbNGlju5B1+M1pepEI8a86dfpq+PHJvQ1AwgtpKRID8ora5oU+Qxg92i73KXxYzN07vLg0ZZJz+mSxcB/VwHjx2bosZKRr5kDz7SQoiI9KDdphDIpy6vHSmaGbJS4c8bEQXsoIr7CnXA9NjOdwBoljLSQkiI9KA/3PCLRLCniJr2B6aZIi6VpuU4sGjDij5EWUlK4qbPAsy03/I/x7duuV+4XdQ7c9tCStOzmL5sjSvNzdWhtgT7ZXqxDawv0xh/+Qsu1AyL9u8LoYKSFlBRu/8H+I5d1+4+ofcCZVIHtk+Ltgduu1+OlsyT1tbcPHPnZkt74w18G/blAc0m4kSI/8cdXpH9X8TKc7sGRdv9F0+UXL4QWjDGa2+KEmlLr//XCtXjfuLZ6+N9ACGlW300t0F6f5c0YtMlttFNO4ZoA+uMn/vga6t9VPAyne9DJ7r+RIrRghGi2xYk11Iaz9igrTKfecH0lPU1P3Bt5oW60a37CNQEEsItFYrCmLb7cMHUPDCma04bX7T+qlrZ22foy1EazY66qsUWf/8/g9Tcjcbmrd8g6o91GKdLUH7tYIFkQWjDCUFNko7XXW2DH9FBt4pU1TerqHf3nWUPVGe02SuGm/nKzvDq0toDAQlJgehBGGGqKbDitxgOnE4dqE49nE8NQXzuaKScnmgCARHMktHbs2KE33nhDGRkZuv7667Vt2zZdffXVkqTdu3dr3759SktL06OPPqq/+Zu/caJEuMxQN+RY93oL9Yws1KnF/Q87DPcauVlenfpiWnK4RqNBwokmADivf/dgtF2BTnb/jZQjld9xxx165JFHlJ6ersrKSu3evVsVFRX6+OOPVV1drerqavn9fpWVlammpkYej3t234YzhrohxzrKGKppob/AKCjSa0RqMx/KaI6GaAJIPf27B03uCoyWI6H1zW9+M/jruXPn6r/+678kSbW1tSouLlZmZqYmT56sKVOm6MiRI8rPz3eiTLhMpBtyrKOMWKb6AqOgoV5jYKBleCxlpFm6/MVar0A7eyx1ArhSxND6/PPPtXv3brW2tmrevHkqKSkJfmzTpk3atGnTiAt44YUXtGjRIkmS3+/XnDlzgh/z+Xzy+/0jfg2khlhGGdFu1TRwFBTuNWINTUIKGJ6IobVu3TpNmTJFRUVF2rdvn1577TU9+eSTyszM1AcffBDxC69YsUJnzpwZdL28vFwLFiyQJO3atUsej0f33HPPCL4FIHahpvoG8lhWTG3iTM0B8RcxtI4fP66f/vSnkqQFCxZo165d+vu//3vt2rVryC+8d+/eiB/fv3+/Dh48qL1798qy+nZW8/l8am1tDX6O3++Xz+cb8rWAWAXC5YevHAu5WDgjzVLl8jmEEOAyEddpdXZ2qrf3y73XVq9erfvvv18PPPCA2tqGf+hYfX29fv7zn2vXrl3yer/smiooKFB1dbU6Ozt14sQJNTc3a/bs2cN+HSCS0vxcjc0M/XPbVV9x7iiJSGvFgIEC3YP512cpPQVW3kYcad15551qaGjQN77xjeC1pUuX6rrrrtPjjz8+7BfdsmWLOjs7VVZWJkmaM2eONm/erGnTpmnRokVavHixPB6PNm7cSOcgRl3/9Vnh2tTbRnGrplhEs10V0N/A7sFkFzG0/uVf/kVS34irpqZGLS0t6u7u28ZmJM+hXn/99bAfW716tVavXj3srw1EEs2mspJzG8tG2tmD0AKibHlfvXq1xo0bp5kzZyozM1OSgs+hAJNEsz7LyV0kOEQQiCyq0PL7/XrmmWfiXQswItHs8j7UzT/3iz8n9Z1jlei1VLHu7AGkmqge2+Xn56upiRNP4V7R7vI+1M3/VFu7Nr18TBX7PojrjvHhRLujOxBAI0YI77//vl588UXl5uYGpwcl6ZVXXolbYUAson0WVFGUp4eeOxy2AcOW1NY+uAkjUc+V2D8QsaIRI4R//dd/jXcdwIiEm/ZraWsfNM337duu128ajse8wW2iniuxSBkIL6rQys3lfyC4W7hnQZYUvN7S1h4cZWV5M2RZfa3t0YYXz5UA56XADChSQahnQZY0KJACv29r79L/dPXqJ383V7lRhBHPlQB3ILSQFEKd7jvUCCrwnCpU4GWkWRo/NiPiScEAEs/ck8CAAQY+C7pje92QO7mfamun+QFGC3QPZnosow93jFbyf4dIWdHs5N7/rCy3h1Q069CQegLdg/OmX6drxmYO+fmmI7SQtPqPoFra2gc94zLpORV7EgJ9CC0ktf4jKJNHKuxJCPQhtGCc4YZPoqcARzMk2ZMQ6ENowSimTJONdp3sSQj0oeUdRok0TeYmo10nexIinED3YHdPr85f7nS6nLhjpAWjmDJNNtp10paPcAbuPZjsHYSEFowy3GmyRDdhhKvTVt/6seG8vglt+UC8MT0IowxnmizaY0viXWdAIo86AZINoQWjhNquaagtlpx4Dta/zlDc+BwOMAHTgzBOrNNkbn0O5vTrIzlYkm69cbzGZHjYxglIBk60iw9seQ9XFzBStqQxGR59dfxYp0tJCKYHkfScaBcPNSWZyNcHkhUjLSQ9J9rFI0395dKuDgwboYWUkOh28XBTkrlZXh1aW5CwOoBkw/QgEAfsYAHEByMtIA7YwQKJkmalxuGPAanznQIJxg4WSARPmpX0Wzf1x/QgAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGI6G1p49e5SXl6fPPvtMkmTbth5//HEVFhaqpKREx44dc7I8AIDLOBZan376qQ4dOqRJkyYFr9XX16u5uVmvvfaatmzZok2bNjlVHgDAhRwLrW3btqmiokKWZQWv1dbWqrS0VJZlae7cubpw4YJOnz7tVIkAAJdxJLQOHDig7OxszZgx44rrfr9fOTk5wd/n5OTI7/cnujwAgEvFbZf3FStW6MyZM4Oul5eXa/fu3dqzZ0+8XhoAkKTiFlp79+4Neb2pqUknT57UkiVLJEmtra1aunSpnn/+efl8PrW2tgY/t7W1VT6fL14lAgAMk/DztPLy8vT2228Hf19QUKB9+/ZpwoQJKigo0K9//WsVFxfrgw8+0Lhx45SdnZ3oEgEALuWqQyDnz5+vN998U4WFhfJ6vdq6davTJQEAXMTx0Kqrqwv+2rIsPfbYYw5WAwBwM3bEAAAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABgj3ekCMLqqGltUWdOkU23tmpTlVUVRnkrzc50uC0Cc9Nq2zl/u1DVjM50uJSEIrSRS1diidfuPqr2rR5LU0taudfuPShLBBSSpnl5bFzu6Uya0mB5MIpU1TcHACmjv6lFlTZNDFQHA6CK0ksiptvaYrgOAaQitJDIpyxvTdQAwDaGVRCqK8uTN8FxxzZvhUUVRnkMVAcDoohEjiQSaLegeBFJHmmVp3JjUuZWnzneaIkrzcwkpIIV40qyU6RyUmB4EABiE0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGMOx0PrVr36lu+++W8XFxdq5c2fw+u7du1VYWKiioiK99dZbTpXnOlWNLbpje51uXFutO7bXqaqxxemSACDhHNkRo6GhQbW1tXr55ZeVmZmps2fPSpI+/vhjVVdXq7q6Wn6/X2VlZaqpqZHH4xniKyY3zskCEE6qHQLpyEjr2Wef1apVq5SZ2fcmX3vttZKk2tpaFRcXKzMzU5MnT9aUKVN05MgRJ0p0Fc7JAhBO4BDIVOFIaDU3N+u9997T8uXL9cADDwSDye/3KycnJ/h5Pp9Pfr/fiRJdhXOyAKBP3KYHV6xYoTNnzgy6Xl5erp6eHp0/f16//e1vdfToUZWXl6u2tjZepRhvUpZXLSECinOyAKSauIXW3r17w37s2WefVWFhoSzL0uzZs5WWlqZz587J5/OptbU1+Hl+v18+ny9eJRqjomRLzyUAAAvmSURBVCjvimdaEudkAUhNjkwPLliwQO+8844k6ZNPPlFXV5fGjx+vgoICVVdXq7OzUydOnFBzc7Nmz57tRImuUpqfq21LZyk3yytLUm6WV9uWzqIJA0DKcaR7cNmyZVq/fr2+9a1vKSMjQ9u3b5dlWZo2bZoWLVqkxYsXy+PxaOPGjSnfORjAOVkAQrEkpafQiltHQiszM1M/+tGPQn5s9erVWr16dYIrAgAz2ZK6e52uInE4udgwVY0tqqxp0qm2dk3K8qqiKI8RGICUQWgZhEXGAFJdCs2Emo9FxgBSHaFlEBYZA0h1hJZBwi0mZpExkLrSLEvjxqTOkx5CyyAVRXnyZly5BIBFxkBq86RZKbNZrkQjhlECzRZ0DwJIVYSWYVhkDCCVMT0IADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBi3vLsDO7QAQHULLYezcDmAkem1b5y93psyuGEwPOoyd2wGMRE+vrYsd3U6XkTCElsPYuR0AokdoOYyd2wEgeoSWw9i5HQCiRyOGw9i5HQCiR2i5ADu3AxguDoEEABgj1Q6BJLQAAMYgtAAAxiC0AADGILQAAMYgtAAAxiC0AADGILQAAMYgtAAAxiC0AADGILQAAMYgtAAAxiC0AADGILQAAMZwJLQ+/PBD3X///VqyZImWLl2qI0eOSJJs29bjjz+uwsJClZSU6NixY06UBwBwKUdCq7KyUt/73vf00ksv6Z//+Z9VWVkpSaqvr1dzc7Nee+01bdmyRZs2bXKiPACASzkSWpZl6dKlS5KkixcvKjs7W5JUW1ur0tJSWZaluXPn6sKFCzp9+rQTJQIAXMiR4y7Xr1+vlStXaseOHert7dV//Md/SJL8fr9ycnKCn5eTkyO/3x8MNQBAaotbaK1YsUJnzpwZdL28vFwNDQ1at26dioqK9Oqrr2rDhg3au3dvvEoBACSJuIVWpBD6/ve/rw0bNkiSFi1apEcffVSS5PP51NraGvy81tZW+Xy+eJUIADCMI8+0srOz9e6770qSGhoadMMNN0iSCgoKVFVVJdu2dfjwYY0bN46pQQBAkCPPtLZs2aKtW7equ7tbY8aM0ebNmyVJ8+fP15tvvqnCwkJ5vV5t3brVifIAAC7lSGjdcsst2r9//6DrlmXpsccec6AiADBTr23r/OVOXTM20+lSEoIdMQDAYD29ti52dDtdRsIQWgAAYzgyPei0qsYWVdY06VRbuyZleVVRlKfS/FynywIADCHlQquqsUXr9h9Ve1ePJKmlrV3r9h+VJIILAFwu5aYHK2uagoEV0N7Vo8qaJocqAgBEK+VC61Rbe0zXAcDN0ixL48akzqRZyoXWpCxvTNcBwM08aVbKtLtLKRhaFUV58mZ4rrjmzfCooijPoYoAANFKnTHlFwLNFnQPAoB5Ui60pL7gIqQAwDwpNz0IADAXoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwRlLsiNHS0qKlS5c6XQYAjJrx48frmWeeierzUoll27btdBEAAESD6UEAgDEILQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxCK0offjhh7r//vu1ZMkSLV26VEeOHJEk2batxx9/XIWFhSopKdGxY8ccq/FXv/qV7r77bhUXF2vnzp3B67t371ZhYaGKior01ltvOVZfwJ49e5SXl6fPPvtMkrvewx07dujuu+9WSUmJvve97+nChQvBj7nlfayvr1dRUZEKCwv19NNPO1ZHf59++qm+853vaPHixSouLtYvf/lLSVJbW5vKysq0cOFClZWV6fz5847W2dPTo9LSUj344IOSpBMnTmj58uUqLCxUeXm5Ojs7Ha0PUbARlbKyMvvgwYO2bdv2wYMH7QceeCD465UrV9q9vb12Y2Ojfd999zlS39tvv23/wz/8g93R0WHbtm2fOXPGtm3b/uijj+ySkhK7o6PDPn78uH3XXXfZ3d3djtRo27Z96tQp+x//8R/tv/3bv7XPnj1r27Z73kPbtu233nrL7urqsm3btnfu3Gnv3LnTtm33vI/d3d32XXfdZR8/ftzu6OiwS0pK7I8++ijhdQzk9/vt//7v/7Zt27YvXrxoL1y40P7oo4/sHTt22Lt377Zt27Z3794dfD+dsmfPHvvhhx+2V61aZdu2ba9Zs8b+z//8T9u2bfsHP/iB/Zvf/MbJ8hAFRlpRsixLly5dkiRdvHhR2dnZkqTa2lqVlpbKsizNnTtXFy5c0OnTpxNe37PPPqtVq1YpMzNTknTttdcG6ysuLlZmZqYmT56sKVOmBEeJTti2bZsqKipkWVbwmlveQ0n65je/qfT0vo1i5s6dq9bW1mCNbngfjxw5oilTpmjy5MnKzMxUcXGxamtrE17HQNnZ2Zo5c6Yk6aqrrtLUqVPl9/uDf7eSVFpaqgMHDjhWY2trqw4ePKj77rtPUt8Iv6GhQUVFRZKke++91xXvJSIjtKK0fv167dy5U/Pnz9eOHTv08MMPS5L8fr9ycnKCn5eTkyO/35/w+pqbm/Xee+9p+fLleuCBB4I31IH1+Xw+R+qTpAMHDig7O1szZsy44rpb3sOBXnjhBc2bN0+Se95Ht9QRycmTJ/Xhhx9qzpw5Onv2bPAHvIkTJ+rs2bOO1bV161ZVVFQoLa3vtnfu3DldffXVwR9S3PLvDpElxd6Do2XFihU6c+bMoOvl5eVqaGjQunXrVFRUpFdffVUbNmzQ3r17XVNfT0+Pzp8/r9/+9rc6evSoysvLHfmpMVKNu3fv1p49exJe00CRalywYIEkadeuXfJ4PLrnnnsSXZ7RLl26pDVr1mj9+vW66qqrrviYZVlXjLAT6Y033tCECRP09a9/Xe+8844jNWB0EFr9RAqh73//+9qwYYMkadGiRXr00Ucl9f2kG5hCkvqmIHw+X8Lre/bZZ1VYWCjLsjR79mylpaXp3Llzg+rz+/1xqy9SjU1NTTp58qSWLFkiqe99Wrp0qZ5//vmEvoeRagzYv3+/Dh48qL179wZvsol+H8NxSx2hdHV1ac2aNSopKdHChQsl9U1Tnz59WtnZ2Tp9+rQmTJjgSG2///3vVVdXp/r6enV0dOjzzz/XE088oQsXLqi7u1vp6elx/3eH0cH0YJSys7P17rvvSpIaGhp0ww03SJIKCgpUVVUl27Z1+PBhjRs3LjgdkkgLFiwI/gT5ySefqKurS+PHj1dBQYGqq6vV2dmpEydOqLm5WbNnz054fXl5eXr77bdVV1enuro65eTkaP/+/Zo4caJr3kOprzPv5z//uXbt2iWv1xu87pb3cdasWWpubtaJEyfU2dmp6upqFRQUJLyOgWzb1oYNGzR16lSVlZUFrwf+biWpqqpKd911lyP1PfLII6qvr1ddXZ1+/OMf67bbbtOTTz6pW2+9VTU1NZKkF1980RXvJSJjpBWlLVu2aOvWreru7taYMWO0efNmSdL8+fP15ptvqrCwUF6vV1u3bnWkvmXLlmn9+vX61re+pYyMDG3fvl2WZWnatGlatGiRFi9eLI/Ho40bN8rj8ThSYzhueQ+lvr/nzs7O4I13zpw52rx5s2vex/T0dG3cuFHf/e531dPTo2XLlmnatGkJr2Og999/Xy+99JKmT58eHE0//PDDWrVqlcrLy7Vv3z5NmjRJTz31lMOVXqmiokIPPfSQnnrqKX3ta1/T8uXLnS4JQ+BoEgCAMZgeBAAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AJi8Lvf/U7FxcWaMWOGjh496nQ5QMohtIAYTJ8+XT/96U/113/9106XAqQkFhcDIZw8eVL/9E//pJtvvlmNjY3y+Xz62c9+pptuusnp0oCUxkgLCOPPf/6zvv3tb6u6ulrjxo0LbvcDwDmEFhDGV7/6VX3ta1+TJM2cOVMtLS0OVwSA0ALCCByoKUkej0c9PT0OVgNAIrQAAAYhtIAYvP7665o3b54aGxv14IMPauXKlU6XBKQUdnkHABiDkRYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBj/H6mi1Vp2nJmIAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "dqNjNcTNl5_7", - "colab_type": "text" - }, - "source": [ - "We see that most of the data is contained in the gaussian-ish blob centered a bit below zero. We see that there are a few clearly active datapoints located in the bottom left, and one on the top right. These are all distinguished from the majority of the data. How do we handle the data in the blob? \n", - "\n", - "Because n1 and n2 represent the same measurement, ideally they would be of the same value. This plot should be tightly aligned to the diagonal, and the pearson correlation coefficient should be 1. We see this is not the case. This helps gives us an idea of the error of our assay.\n", - "\n", - "Let's look at the error more closely, plotting in the distribution of (n1-n2)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "guGcilXIl5_9", - "colab_type": "code", - "outputId": "3c8985b4-af4d-475f-bd0a-d5aa77f595f1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "diff_df = df['n1'] - df['n2']\n", - "\n", - "sns.distplot(diff_df)\n", - "plt.xlabel('difference in n')\n", - "plt.ylabel('probability')" - ], - "execution_count": 32, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "Text(0, 0.5, 'probability')" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 32 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEGCAYAAAB/+QKOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3dfXyT9b3/8VfSNOltegdNSilVoIDSCkynnXNUi6FKB6VQ5n5n6mAydeqEqeg5c+Mce7zD0x0EtzkQcXM35xxRkEnUDluw6tChouX+Rqi9D7RN2/QuaZLr90dpRmkpbWmatP08Hw8ejyb55sonpO271/fuUimKoiCEEEKcR+3rAoQQQvgnCQghhBA9koAQQgjRIwkIIYQQPZKAEEII0SONrwsYLNdddx3x8fG+LkMIIYaViooKPvnkkx4fGzEBER8fz9atW31dhhBCDCuLFi264GPSxSSEEKJHEhBCCCF6JAEhhBCiRxIQQggheiQBIYQQokcSEEIIIXokASGEEKJHEhBCCCF6JAEhhBCiRyNmJbUQDS0ObHZnt/vDdRoiQrQ+qEiI4U0CQowYNruTomM13e6fPWWMBIQQAyBdTEIIIXokASGEEKJHEhBCCCF6JAEhhBCiRxIQQggheiQBIYQQokcSEEIIIXrk1YAoKioiIyMDk8nExo0buz3ucDhYuXIlJpOJJUuWUF5eDsBf//pXsrKyPP+mTZvG4cOHvVmqEEKI83gtIFwuF7m5uWzatAmz2cyOHTs4ceJElzZbtmxBr9ezc+dOli5dSl5eHgALFixg+/btbN++neeee47x48dzxRVXeKtUIYQQPfBaQBQXF5OYmEhCQgJarZbMzEwKCgq6tCksLCQ7OxuAjIwM9uzZg6IoXdqYzWYyMzO9VaYQQogL8FpAWCwWjEaj57bBYMBisXRrExcXB4BGoyE8PByr1dqlzdtvvy0BIYQQPuDXezF9+eWXBAcHM2XKFF+XInxENuATwne8FhAGg4Hq6mrPbYvFgsFg6NamqqoKo9GI0+nEZrMRFRXleVy6l4RswCeE73itiyklJYWSkhLKyspwOByYzWbS09O7tElPT2fbtm0A5Ofnk5qaikqlAsDtdvPOO+9IQAghhI947QxCo9GwevVqli9fjsvlYvHixSQlJbFu3TqSk5OZM2cOOTk5rFq1CpPJREREBGvXrvU8f+/evcTFxZGQkOCtEoUQQvTCq2MQaWlppKWldblvxYoVnq91Oh3r16/v8bnXXXcdr732mjfLE0II0QtZSS2EEKJHEhBCCCF65NfTXIW4kMbWdh5453NOnG7C4XSTZAjj0Vum+rosIUYUCQgx7FibHfzkT59T1dDG7CljCVBD/kELWo2ab00c4+vyhBgxJCDEsFLbZGdj0UkU4I93Xct1E2MAWPPuEV7c/RW6gAC+kRjV+0GEEH0iYxBiWCk8cpo2p4vf/uAbnnAAeNg0hW9MiGT7lxXU2Ow+rFCIkUMCQgwbja3tFJc3cE1iNBPHhnZ5TBOgZvV3r8TpUviivN5HFQoxskhAiGFjz8la3IrCtyf3PM4wJlxHQnQIxyy2Ia5MiJFJAkIMC3ani09O1TJ9nJ7o0AvvwTTVGE65tRVbW/sQVifEyCQBIYaFz7620tbu5oaksb22m2oIB+C4pWkoyhJiRJOAEMPCvtJ6xkcFMyE6pNd2cRFBhAdpOCrdTEJcMgkI4fccTjdVDa1Mjg27aFuVSsVUQzjHT9twuZWLthdCXJisgxB+r6K+FbcCCVH/PHtwutyUW1u6tLO3uwCYYgjn06+tlNa1cPmYrrOdhBB9JwEh/F5nECSc073U2u5m31d1XdrNmhAJwOTYMNQqOFptk4AQ4hJIF5Pwe6V1LUSFBBKm69vfM0GBASREh3CqRgaqhbgUEhDC75XVtXQ5e+iLuIggTtvsKIqMQwgxUBIQwq+dbmyjsc150dlL5zPog7A73TS0ynoIIQZKAkL4tUNVjUDXAeq+iA0PAsDS2DboNQkxWkhACL92sLKRALWKuIigfj3PoNcBYGmUjfuEGCivBkRRUREZGRmYTCY2btzY7XGHw8HKlSsxmUwsWbKE8vJyz2NHjhzhtttuIzMzk/nz52O3yw/6aHSospFxEUFoAvr3rRqi1aAP0sgZhBCXwGsB4XK5yM3NZdOmTZjNZnbs2MGJEye6tNmyZQt6vZ6dO3eydOlS8vLyAHA6naxatYonnngCs9nMq6++ikYjM3JHm3aXmyPVtn4PUHcy6IOw2CQghBgorwVEcXExiYmJJCQkoNVqyczMpKCgoEubwsJCsrOzAcjIyGDPnj0oisJHH33E1KlTmTZtGgBRUVEEBAR4q1Thp06eacbudDM+KnhAz48N13G60S4rqoUYIK8FhMViwWg0em4bDAYsFku3NnFxcQBoNBrCw8OxWq2cOnUKlUrFXXfdRXZ2Ni+99JK3yhR+rKS2GYAxYboBPd+gD8LpVqhqaB3MsoQYNfyy38blcvHZZ5/x+uuvExwczNKlS0lOTuZb3/qWr0sTQ6isrmMFdW/be/fGoO8Y2D55pplUuVa1EP3mtTMIg8FAdXW157bFYsFgMHRrU1VVBXSMO9hsNqKiojAajXzzm98kOjqa4OBgZs+ezcGDB71VqvBTX9e2EKbTEBw4sO7F2LMzmU7WNA9mWUKMGl4LiJSUFEpKSigrK8PhcGA2m0lPT+/SJj09nW3btgGQn59PamoqKpWKG264gWPHjtHa2orT6WTv3r1MnjzZW6UKP1Va18K4yCBUKtWAnq/TBBAVEsipMxIQQgyE17qYNBoNq1evZvny5bhcLhYvXkxSUhLr1q0jOTmZOXPmkJOTw6pVqzCZTERERLB27VoAIiIiWLp0KTk5OahUKmbPns2NN97orVKFnyqra2FCzMBmMHUy6IM4JWcQQgyIV8cg0tLSSEtL63LfihUrPF/rdDrWr1/f43OzsrLIysryZnnCj7ncCmXWFlInxVzScQz6ID48UYPD6UarkXWhQvSH/MQIv1Td2Ea7SyE+cmBTXDvFhutwuRVK61ou3lgI0YUEhPBLX5+d4jousn9bbJyvcwZUmVUCQoj+koAQfqlziuulnkFEhXQERLmcQQjRbxIQwi99XduCRq3yTFUdqLAgDdoAtXQxCTEAEhDCL5XWtRAfFYxGfWnfompVx06wZXWymlqI/pKAEH6ptK6l3xcJupC4yCAZgxBiACQghF8a1ICICPaMaQgh+k4CQvidhtZ26lvaBy0gxkUG0djmpKFFLj8qRH9IQAi/0/nX/qAFRETHTCjpZhKifyQghN/pnHF0qdtsdIo7u5ZCupmE6B+/3O5bjE4NLQ5sdifF5fUAaNQq7O2uSz5ubHjHVNn9FQ2kjI8AIFynISJkYNuICzFayBmE8Bs2u5OiYzV8XlpPcGAAn31dj8N16VeDC1Crzx7PStGxGoqO1WCzOwehYiFGNgkI4Xdsre1EBAcO6jGjQgOxtjgG9ZhCjHQSEMLvNLY50QcPbu9nVIiWumaZxSREf0hACL/T2NpOeNDgnkFEh2qxtjhwK5feZSXEaCEBIfyKy63QZHeiH+SAiArR4nIr2Npk7EGIvpKAEH6lye5EgUHvYurc9tvaLOMQQvSVBITwK42tHeMEEYPdxXR2SmudDFQL0WcSEMKvNJwNCP0gz2KKDAlEhZxBCNEfXg2IoqIiMjIyMJlMbNy4sdvjDoeDlStXYjKZWLJkCeXl5QCUl5dz1VVXea5LvXr1am+WKfxIY1tHQIQHDW4XkyZATViQhnrZj0mIPvPaSmqXy0Vubi6vvPIKBoOBnJwc0tPTmTx5sqfNli1b0Ov17Ny5E7PZTF5eHs8//zwAEyZMYPv27d4qT/ipxlYnASoVobrB/9aMDA7E2ipnEEL0ldfOIIqLi0lMTCQhIQGtVktmZiYFBQVd2hQWFpKdnQ1ARkYGe/bsQZFpiKOara2d8CANapVq0I8dGaKVHV2F6AevBYTFYsFoNHpuGwwGLBZLtzZxcXEAaDQawsPDsVqtQEc308KFC7n99tv59NNPvVWm8DMNbe2DPv7QKSokkPrWdlkLIUQf+eVmfbGxsezatYuoqCgOHDjA/fffj9lsJiwszNelCS9rbHViuMTrUF9IxNm1EE2yD5MQfeK1MwiDwUB1dbXntsViwWAwdGtTVVUFgNPpxGazERUVhVarJSoqCoDk5GQmTJjAqVOnvFWq8CON3jyDOHtcGagWom+8FhApKSmUlJRQVlaGw+HAbDaTnp7epU16ejrbtm0DID8/n9TUVFQqFXV1dbhcHds8l5WVUVJSQkJCgrdKFX6ixe7E4XQP+hqITpFn10LUy1oIIfrEa11MGo2G1atXs3z5clwuF4sXLyYpKYl169aRnJzMnDlzyMnJYdWqVZhMJiIiIli7di0Ae/fuZf369Wg0GtRqNU888QSRkZHeKlX4iTNNdmDwV1F3igyRMwgh+sOrYxBpaWmkpaV1uW/FihWer3U6HevXr+/2vIyMDDIyMrxZmvBDZ2xnA8JLZxBBgQEEBaqpl6muQvRJn7qYHnjgAXbv3o3b7fZ2PWIUO9PU8YvbWwEBEBmslTMIIfqoTwHxL//yL7z11lvMnTuXvLw8Tp486e26xChU03kG4aVBaujoZpKAEKJv+tTFdP3113P99ddjs9nYsWMHy5YtIy4ujiVLlrBgwQICA733Ay1Gj5omO0GBarQa7+0AExkSSElts9eOL8RI0uefRKvVytatW9myZQtXXHEFd955J4cOHeJHP/qRN+sTo8iZJrtXu5ego4uprd0tayGE6IM+nUHcf//9nDp1iqysLH73u98RGxsLwLx581i0aJFXCxSjxxnbEATE2ZlMlsY2phn1Xn0tIYa7PgXE9773vW6zkRwOB1qtlq1bt3qlMDH61NgcJEQHe/U1OtdCVDe0efV1hBgJ+tTF1LnD6rluu+22QS9GjF4ut0Jds2PIziCqGyUghLiYXs8gzpw5g8Vioa2tjUOHDnl2Wm1qaqK1tXVIChSjQ22THZeieHUGE0CYTkOAWoVFziCEuKheA+LDDz9k69atVFdX88wzz3juDw0N5aGHHvJ6cWL06PyL3ttnEGqVisjgQCyNdq++jhAjQa8BkZ2dTXZ2Nvn5+bKyWXhV55iAt7bZOFdESKB0MQnRB73+NG7fvp2srCwqKip45ZVXuj2+bNkyrxUmRheLl7fZOFdUsJaSOlkLIcTF9BoQneMMLS0tQ1KMGL0sDW0EqFSEDfK1qHsSGRJIbakDu9OFThPg9dcTYrjq9afx+9//PtCxF5MQ3lTd2EZ0qNYrlxo9n2cmU0MbiTGhXn89IYarXgPiySef7PXJv/jFLwa1GDF6WRrbGBOuHZLX6lwLUWFtlYAQohe9BsT06dOHqg4xylU3tGGMCBqS14o8O5W2vF6magvRm4vOYhJiKFga20gZHzEkrxURHIgKqJSAEKJXvQbEU089xeOPP869997b4+O/+93vvFKUGF1aHS4a25yMDdMNyetpAtREh2mpsEpACNGbXgMiKysLQHZsFV7VuSZhTPjQBASAUR9EhZxBCNGrXvdiSk5OBuDaa69l5syZ6PV6IiIimDlzJtdee+1FD15UVERGRgYmk4mNGzd2e9zhcLBy5UpMJhNLliyhvLy8y+OVlZXMmjWLl19+uT/vSQwznYvkxoQNzSA1dASEdDEJ0bs+bda3e/duTCYTTz31FP/5n//J3Llzef/993t9jsvlIjc3l02bNmE2m9mxYwcnTpzo0mbLli3o9Xp27tzJ0qVLycvL6/L4s88+y3e+851+viUx3Jy2dQTEUHUxARj0Oirr23C7lSF7TSGGmz6tSnr22Wd59dVXSUxMBKC0tJS777672xbg5youLiYxMZGEhAQAMjMzKSgoYPLkyZ42hYWFnjUWGRkZ5ObmoigKKpWK9957j/j4eEJCQgb85sTw0HkGMTZcR2nd0PxVb4gIwuFyU9NkJ1Y/NLOnhBhu+nQGERoa6gkHgISEBEJDe58/brFYMBqNntsGgwGLxdKtTVxcHAAajYbw8HCsVivNzc289NJLskBvlKhubCNUG0CozvurqDsZz4aCTHUV4sJ6/Yn829/+BnSMRfz4xz/m1ltvRaVS8e6775KSkuK1on7961/zwx/+8KIhJEYGS2MbhiH+K75zzUVlfSvfmBA1pK8txHDRa0Ds2rXL8/WYMWPYu3cvANHR0djtvW+XbDAYqK6u9ty2WCwYDIZubaqqqjAajTidTmw2G1FRUXz55Zfk5+eTl5dHY2MjarUanU7H7bff3u83KPyfpdE+5AHR+Xoy1VWIC+s1IM69BkR/paSkUFJSQllZGQaDAbPZzK9+9asubdLT09m2bRuzZs0iPz+f1NRUVCoVf/nLXzxtXnjhBUJCQiQcRrDqhjauvTx6SF8zTKchPEgjU12F6EWfOn3tdjuvv/46x48f73Lm0FuAaDQaVq9ezfLly3G5XCxevJikpCTWrVtHcnIyc+bMIScnh1WrVmEymYiIiGDt2rWX/o7EsOJ2K5y2DX0XE0B8ZLBMdRWiF30KiFWrVjFx4kQ+/PBD7r//ft566y0mTpx40eelpaV1m+m0YsUKz9c6nY7169f3eoyf/vSnfSlRDFN1LQ7aXQpG/dBNce0UHxlMuXQxCXFBfZrFVFpaysqVKwkODiY7O5sNGzZQXFzs7drEKNA5xdUnZxBRwdLFJEQv+hQQGk3HiYZer+fYsWPYbDZqa2u9WpgYHToXyRmGaCfXc8VHBmNrc9LY1j7kry3EcNCnLqbbbruNhoYGVqxYwU9+8hNaWlq6dBUJMVCV9R0BERcRhGuIVzXHRwWfraEVvdH7lzoVYrjpU0AsWbIE6NiTqaCgwKsFidGlsr4VjVpFbHgQVQ1D290zLrIjIMrrWplm1A/pawsxHPQpIKxWK7/+9a/5/PPPUalUXH311dx3331ERckCI3FpKutbMUYEEaD2/qVGz5cY3bGNS2mdXHNdiJ70aQzioYceIjo6mvXr17Nu3TqioqL42c9+5u3axChQWd/m+Ut+qEWHagnXafi6ttknry+Ev+tTQJw5c4b777+fhIQEEhISuO+++2SQWgyKivpW4n0UECqVisQxIZTUyhmEED3pU0B8+9vfxmw243a7cbvdvP3229xwww3erk2McE6Xm+rGNp8FBEBiTKicQQhxAb2OQcyaNQuVSoWiKPzhD39g1apVALjdbkJCQnjssceGpEgxMp222XG5FZ91MQFcFhNC/oFq2l1uAgP69PeSEKNGrwGxb9++oapDjEKd21yMi/Td9RgSY0JxuhUq61tJjJHdg4U4V5834C8oKODTTz8FOqa73nTTTV4rSowOnauYfdnFdNnZUCipbZGAEOI8fTqnzsvL49VXX2XSpElMmjSJV199tdvOrEL0V4XnDMK3XUyAjEMI0YM+nUG8//77bN++HbW6I0+ys7NZuHAhDz/8sFeLEyNbZX0rkSGBQ3olufONDdcRHBhASY3MZBLifH0elWtsbPR8bbPZvFKMGF0q69sYF+G7swc4O9U1JkTOIIToQZ/+dLv33nvJzs7muuuuQ1EU9u7dyyOPPOLt2sQIV2FtJeHsamZfSowJ4cTpJl+XIYTfuWhAuN1uVCoV//d//8f+/fsBeOSRRxg7dqzXixMjW2V9K6kTh/ZKcj25LCaUXUfO4HIrPtnyQwh/ddGAUKvVbNq0iXnz5jFnzpyhqEmMAo1t7djsTs+Oqr6UGBOKww8W7Qnhb/o0BnH99dfz8ssvU1VVRX19veefEANV6QczmDp5ZjLVyDiEEOfq0xjE22+/jUql4i9/+UuX+2XrbzFQ/hQQiWP+uRbi+sk+LkYIP9KnM4i3336bH/zgB0ybNo0rrriCO+64A7PZfNHnFRUVkZGRgclkYuPGjd0edzgcrFy5EpPJxJIlSygvLweguLiYrKwssrKyWLBgATt37uzn2xL+rsLq+0VyneL0QWg1apnJJMR5+hQQjz32GF999RV33HEHt99+OydOnLjoPkwul4vc3Fw2bdqE2Wxmx44dnDhxokubLVu2oNfr2blzJ0uXLiUvLw+ApKQk3njjDbZv386mTZtYvXo1TqdzgG9R+KOK+jYCA1SMDdP5uhTUahUTokM4JV1MQnTRpy6m48eP8/bbb3tup6amMm/evF6fU1xcTGJiIgkJCQBkZmZSUFDA5Mn/PIcvLCzkgQceACAjI4Pc3FwURSE4+J9/VdrtdlQqmVky0lTWtxIXEYzaT2YNTRobynGLTHUV4lx9OoO48sor+eKLLzy3v/zyS5KTk3t9jsViwWg0em4bDAYsFku3NnFxcQBoNBrCw8OxWq2e18jMzGTBggU88cQTaDS+W20rBldDi4OvzjQxJkxLubXF88/e7hqyGpwud5fXjosIoqS2mdONQ3vZUyH8WZ9+6x48eJDvf//7jBs3DoDKykouv/xy5s+fD8Bbb7016IXNmDEDs9nMV199xWOPPcbs2bPR6XzfHSEunc3u5OSZZlLiIyg6VuO5f9aEyCGrobXdzb6v6jy329rduBU4WNlIrN734yJC+IM+BcSmTZv6fWCDwUB1dbXntsViwWAwdGtTVVWF0WjE6XRis9m6Xed60qRJhISEcOzYMVJSUvpdh/A/9S0OWttdjAn3n8A36Du2HD9V04zsUyxEhz4FRHx8fL8PnJKSQklJCWVlZRgMBsxmc7cdYNPT09m2bRuzZs0iPz+f1NRUVCoVZWVlxMXFodFoqKio4OTJkwOqQfinsrqObpwxYVofV/JPMWFa1Co4KQPVQnh4rWNfo9GwevVqli9fjsvlYvHixSQlJbFu3TqSk5OZM2cOOTk5rFq1CpPJREREBGvXrgXgs88+46WXXkKj0aBWq/mP//gPoqN9vyWDGByldR07p/rDDKZOGrWaMWE6mckkxDm8OvKblpZGWlpal/tWrFjh+Vqn07F+/fpuz1u4cCELFy70ZmnCh8qsLQSoVESG+M8ZBHR0M508IwEhRCe5CK8YcqV1LUSHav1uYzyDXkdVQxstDllzIwRIQAgfKKtr9asB6k6x4R0D1bIeQogOEhBiSLncChXWVr8aoO5kPDuT6ZhFLoglBEhAiCFWYW3F4XL71QB1p+gwLdoAtQSEEGdJQIghdbKmo/smxg8DQn328qPHpItJCEACQgyxzllCY/1wDALg8jGhcgYhxFkSEGJInappJkynIVQb4OtSenT52FCqGtpoaG33dSlC+JwEhBhSJ2uaSIgO9tsdeiePDQPgcFWjjysRwvckIMSQOnWmmQnRIb4u44KmGsMBOFDR4ONKhPA9CQgxZFocTiob2kjw44CIDtVi1AexXwJCCAkIMXROnO6YHXRZjP8GBEByfIQEhBBIQIgh1PlLd4oh3MeVXJjT5SYxJphTZ5o5Vt1IubWFhhaHr8sSwickIMSQOVDRQERwIHERQb4u5YJa2920uxQUYMtnFRQdq8Fml72ZxOgkASGGTHF5AynxEX47g6lTfGTHFeUq6+Xyo2J0k4AQQ8LudHHMYiM5PsLXpVxUeFAg+iANFRIQYpSTgBBD4mi1jXaXQsowCAiAcZHBEhBi1JOAEEOic4B6OAVEjc2O3enydSlC+IwEhBgSnQPUCdHBvi6lT+Ijg1GAqvo2X5cihM9IQIghsb+igeR4vd8PUHfqHKiWbiYxmnk1IIqKisjIyMBkMrFx48ZujzscDlauXInJZGLJkiWUl5cD8NFHH7Fo0SLmz5/PokWL2LNnjzfLFF5md7o4Wj08Bqg76YNloFoIrwWEy+UiNzeXTZs2YTab2bFjBydOnOjSZsuWLej1enbu3MnSpUvJy8sDICoqihdffJG33nqLZ599lkcffdRbZYohcKy6aVgNUHcaHxVCWV2Lr8sQwme8FhDFxcUkJiaSkJCAVqslMzOTgoKCLm0KCwvJzs4GICMjgz179qAoCldeeSUGgwGApKQk7HY7DoesZh2uhtsAdaeE6BBqmx3Uy0pqMUp5LSAsFgtGo9Fz22AwYLFYurWJi4sDQKPREB4ejtVq7dImPz+fK6+8Eq3W/65hLPpmX6mVyJBAv97FtScJUR3jEIer5AJCYnTS+LqA3hw/fpy8vDw2b97s61LEJdhzspbrLo8eNgPUneKjglEBhyrl2hBidPLaGYTBYKC6utpz22KxeLqNzm1TVVUFgNPpxGazERUVBUB1dTUPPPAAa9asYcKECd4qU3hZWV0L5dZWvjUxxtel9JtOE4BBH8RBuXiQGKW8FhApKSmUlJRQVlaGw+HAbDaTnp7epU16ejrbtm0DOrqSUlNTUalUNDY2cvfdd/Pwww9z9dVXe6tEMQT2nKwF4FuTxvi4koEZHxXM4apGFEXxdSlCDDmvBYRGo2H16tUsX76cefPmceutt5KUlMS6des8g9U5OTnU19djMpl45ZVXeOSRRwD405/+RGlpKb/5zW/IysoiKyuL2tpab5UqvOjjr2qJDtUyxRDm61IGJCE6BFubk1M1zb4uRYgh59UxiLS0NNLS0rrct2LFCs/XOp2O9evXd3vefffdx3333efN0sQQUBSFj0/Wkjpx+I0/dEqI6hhY/6Ksnoljh2fICTFQspJaeE1pXQuVDW3DcvyhU6xeR7A2gC/K6n1dihBDTgJCeM2erzrHH4ZvQKhVKqYZwyUgxKgkASG8Zs/JWsaG65g0zLtmrozTc7iqkbZ22dlVjC4SEMIrFEVhz1e1pE6MGbbjD52uHKen3aVwUNZDiFFGAkJ4xcHKRk7b7MxOGp7TW881PU4PIN1MYtSRgBBeUXjkNCoV3Dg11telXLIx4TriIoIkIMSo49dbbYjho6HFgc3u9Nx+50AVVxj1aAOGd/dSp5kJkXxRZr14QyFGEAkIMShsdidFx2o6vm5r53CVDdOVBmx2JxEhw3+jxZkJkbxzoJqaJjtjwnS+LkeIISFdTGLQHbN07H46zRju40oGz8yESAC+KJVuJjF6SECIQXek2kZEcCBGfZCvSxk0KeMjCFCrZBxCjCrSxSQGldPl5vjpJmYmRKJSqXC63JRbu4quicsAABclSURBVF6VzT7M1hM4XW7qmh1MHBPKxydrPe8nXKcZEd1nQlyIBIQYVCdrmnE43Z7updZ2N/u+quvSZtaESF+UNmCd7yEqRMuX5fXsPnoGtUrF7CljJCDEiCZdTGJQ7S9vQKdRD/vV0z1JiA7G7nRTY7P7uhQhhoQEhBg07S43ByobmD4ugsCAkfet1bmza2ldy0VaCjEyjLyfYuEzR6tt2J1uZiRE+LoUrxgbriNEG8DXtRIQYnSQgBCD5svyesJ0GiaOGXndSwAqlYrE6BBKauXiQWJ0kIAQg6LZ7uRotY2U+I7poCNVYkwotc0ObG3tvi5FCK+TgBCDoujYGZxuhRkJw2uGUn9dFtMxDiHdTGI0kIAQg8K8v5qokEASooJ9XYpXjYsKRqNW8bV0M4lRwKsBUVRUREZGBiaTiY0bN3Z73OFwsHLlSkwmE0uWLKG8vBwAq9XKHXfcwaxZs8jNzfVmiWIQfPa1lS/K6vnWCLj2w8Vo1GoSokP4WmYyiVHAawHhcrnIzc1l06ZNmM1mduzYwYkTJ7q02bJlC3q9np07d7J06VLy8vIA0Ol0rFixgkcffdRb5YlB9NtdJ4gIDuTay4fvpUX7IzEmhMr6Vlodw2tFuBD95bWAKC4uJjExkYSEBLRaLZmZmRQUFHRpU1hYSHZ2NgAZGRns2bMHRVEICQnhmmuuQaeTXTP93aHKRgqOnGbJNePRakZHj+VlMaG4FThUJVeYEyOb136iLRYLRqPRc9tgMGCxWLq1iYuLA0Cj0RAeHo7VKnvuDye/3X2CMJ2GRd+I93UpQ2ZCdAgqoLhcNu4TI5vsxSQu6PyLAHXq3KTuH6fqeHt/FfekTUIfFOiDCn0jKDAAY0QQn30tASFGNq8FhMFgoLq62nPbYrFgMBi6tamqqsJoNOJ0OrHZbERFRXmrJNFP514E6Fyzp4yh2eHivj9/xmUxofzkxkk0to6udQFTDOF8eLyGhtZ2IoJHTziK0cVrXUwpKSmUlJRQVlaGw+HAbDaTnp7epU16ejrbtm0DID8/n9TU1BE/C2YksLe7uOePn9HW7mbjndeMqrOHTlcYw3EpCkXHzvi6FCG8xmtnEBqNhtWrV7N8+XJcLheLFy8mKSmJdevWkZyczJw5c8jJyWHVqlWYTCYiIiJYu3at5/np6ek0NTXR3t7Oe++9x+bNm5k8ebK3yhV91Opw8a9b97O/ooGX7ryGybEjc1uNixkfHUJkcCCFR04zf8Y4X5cjhFd4dQwiLS2NtLS0LvetWLHC87VOp2P9+vU9PrewsNCbpYkBqG2y84c9X1Pf4uC5nKswXWm4+JNGKLVKReqkGHYdPY3LrYzo7UXE6DU65iWKS3bGZud3739Fi8PJ2ttm8L1rEnxdks9dPymG+pZ29pXKzDsxMklAiIuqb3Gw+aNToFJxz+xJzJogEwkArr08Go1aRcGR074uRQivkIAQvWqxO9n8UQlt7S6WXX8ZY8Nl8WKnMJ2Gay+PpvCwBIQYmSQgRK/ePlBFXbOdO791GeMiR/ZGfAMx5woDRy02DsuqajECSUCIC/q81MrnpfV8J2ksl48J9XU5fmnxN+IJ0Qawseikr0sRYtBJQIge2Z0u8vKPERUSyE1TY7s85nS5Kbe2dPlnbx+dG9dFhmj5f9dO4K9fVlJulR1excgiASF6tPH9k5TWtbBgRny3Tfha290UHavp8s/hUnxUqe/ddcPlqICXPzzl61KEGFQSEKKbmiY7L77/FbOnjGGqMdzX5fi9cZHBZM2M53//UYa12eHrcoQYNBIQopvf7DpBW7uLe2ZP9HUpw8a9aRNpbXfxxFsHKatr7tYF19AiwSGGH9nNVXRRbm3hzx+XknP1eBJjQvm6ttXXJQ0LSYZwHrhpMr/edYIWh4sbzxu3mT1lDBEhWh9VJ8TASECILta9dxyAFTdPQVFG77jCQDw8dwpHLTb+dshCRHCgLCgUw550MQmPw1WNvPF5ObenJhIvax76TaVS8W+3TuPyMaFs+aycrZ+Xy2VJxbAmZxAjwMUu7NMXiqLwyzcPEBEcyE/TZdfcgdJq1Cy9/jIKj5zmg+NnOGqxcfM0A9dPiu7zMXr6PPvzWQoxWCQgRoDeLuzT118qb3xewadfW1mzOIWoUPlFdCkCA9RkTDeSPC6Cv35ZwbYvKvj4VC333TSZhTPHEd7L9TNaHE4+PlVL/gELKhVEhWgZG64j86o4CQgx5CQgBA0t7Tzz9mG+MSGSJVfLLq2DJT4qmHvTJnHMYuOTU3X88s0DPG0+zC3JRpLjI5gcG4bT5aamyc7R6ib+UVLLocpG3OcN/ahVUFzewL/eOo2E6BDfvBkxKklA+NildA/Z2to5Um3jk5O17Cutp8nuJCgwgDFhOuIigvo0yOxwuvnZa19gbXHwhx9di1qua9BnnSvKz3X+inKVSsVUo57l37mc2uZ2/vcfpfztkIVt+yq6tNNq1CSP0/PDb13GZWNCaGzt+J6wtrRzpLqRnYcs/O1QNT+5cTI/TZ9MYMCFhw8Ho8tReNdw+YwkIHzI6XLzeamVnYdO09DaTmu7i47f6QqTY8Mw6IPQBQYQHBhAgBoaW51YWxwctzRxsLKBktquv5yCAwNwON24zgbDjuJKbk9NJHtWPDFh3XdhbXe5+en/fE7hkdM8uTCZ5PiIIXjXI0dru5t9X9V1uW/WhMge26pUKmYmRDIzIZJnFinUNDn4+GQNhypthAVpCA/SoFGrPcfYV1oPQEyYjsmxYTwydwp//LiU9QXHef/YGdZ+bwYTx/Z8Nb/B6HIU3jVcPiMJiCF22tbGO/urKThyms9K6mg+b5aLClCAXUcvfK3jhOhgpsdFsPgb45keryciOJBDlTYC1CpcboX6FgcnzzRz1GLjSfNh1rx7BNOVBjJTxpEcr2dsuI6iYzX86eOv+fBEDf8+/0puT0307hsXHiqVirHhOmZNiMLW1rdZTrH6INbeNpM5V8Ty+LYD3PL8B9z5rUQeSJ9MpB/9QhEjiwTEJerLjJPaJjvvHKjmzX0VfPa1FQVIjA7BNN3A9Dg9Da1OIoIDCdEGoFKpUBSFay+PwulWsDvd2Ns7zgrCdRoiQzSoVF27F+ztLs8lLwPUKmLCdMSE6Vhx82RK61oxF1fy7kELb++v7vK8yJBAnlgwnR9ef1mP72O0bsDnDX3pjurL82cmRPKHZd/kpQ9O8fKHp/jfvWVcnRhFcrwebUAAFlsbX9c2U1LTgq2tHU2AmqBANVEhWupa7GTNiJdxDB9zON3sPVVH4RELFdZWbHYngQFqdBo1J2uauDU5jhnjI9D00o04VLwaEEVFRTz11FO43W6WLFnC3Xff3eVxh8PBo48+ysGDB4mMjGTt2rWMHz8egA0bNvD666+jVqv5xS9+wXe+8x1vltonLrfC17XNHLPYsDTaOWOzU9Nkp6K+FRUq1CpQqeCyMaFEBmux2Nr4/GsrRy02FAUmRIdw07RYUuIjMOiDgK7dCZ1UKhVON+wrbehWQ0f7vnVrtLa7Kbe2MiMhiunxEVQ3tBEepKHF4SJ1YgzXXR7t+Sbs6ZT3QscV/def7qi+PP/bk8cwaWwYJ2ua+OpMMxveP4nTrRATqiUqVEuoLgCjPgiXotDqcFFubSUv/xh5+cdIiY8ga+Y4MqYbJSyGSLPdye6jZ8g/WM2uI6ex2Z2ogDFhOiJDAnG6FeqaHWz64BSbPjhFTKiWW1OMZKaM45rLonodc/ImrwWEy+UiNzeXV155BYPBQE5ODunp6Uye/M859lu2bEGv17Nz507MZjN5eXk8//zznDhxArPZjNlsxmKxsGzZMvLz8wkICPBWubjcCm3tLtraXbS2u2hsdVLV0Eq5tZWjFhuHKhs5Wm2j9Zy/+gLUKrQBapxuN26lYy2BooBytnsoXKdh5oRIbkk2MvdKI+FBAXxwvNZr76E3GrWa8VEhzJ4yhvFR8kthJDBGBPG9b45nfFQIdqcLFSq0GjXl1pZuYa8oCpePCeVgZSNvFVfypPkwT5oPM2lsKN+e3LEpY1JsOLHhHb+wQrQaAtSdf/TIxIW+cLrctLS7aGxtx9Jop7K+lYOVjeyvqGdviRWH00302V/8VydG0Wx3ERTY9XfaVeP1lNS28O6Bal7/rJw/fVxKuE7D9ZNjuGp8JEmxYUyICSEqREtEcCDaALVXJ5Z4LSCKi4tJTEwkIaFj2mRmZiYFBQVdAqKwsJAHHngAgIyMDHJzc1EUhYKCAjIzM9FqtSQkJJCYmEhxcTGzZs0a9DrrWxxkPF+EpdF+wTb6IA1XxOn5/rUJXBGnZ5oxnHGRwUSFaKlqaO32w/idpBjiI0NQnffDJdcLEN6i0/T+x5NKpWJCTAjXTx7Dj2dP5FRNM4VHTrP76Gm2fFre5Q+f7s+FAJUK9dnAUPcSGBebOKfQe4OLP3+gD3r3tRVF6TY9GSAwQMVUYzi3X5dIxnQD11wWTYBa1WOIQ8f1Rb4bH8l3rxpHs93JB8fP8P6xGj48cYb8g5YeX1ujVvHL717JD6+/rPc3MABeCwiLxYLRaPTcNhgMFBcXd2sTFxfXUYhGQ3h4OFarFYvFwowZM7o812Lp+T+nU0VFBYsWLRpQrfFn//WmvRi+pOPfxbzej9e+UNv+3D8Ybf2hBmk7OG0v5EJtr+jHMcQAHIYv6Ph3MRf6jMae/Xch24/A9n4X1qGiouKCj42YQepPPvnE1yUIIcSI4rWRD4PBQHX1P2fNWCwWDAZDtzZVVVUAOJ1ObDYbUVFRfXquEEII7/JaQKSkpFBSUkJZWRkOhwOz2Ux6enqXNunp6Wzbtg2A/Px8UlNTUalUpKenYzabcTgclJWVUVJSwlVXXeWtUoUQQvTAa11MGo2G1atXs3z5clwuF4sXLyYpKYl169aRnJzMnDlzyMnJYdWqVZhMJiIiIli7di0ASUlJ3HrrrcybN4+AgABWr17t1RlMQgghulMpclUYIYQQPfD9Uj0hhBB+SQJCCCFEjyQgztq8eTNTp06lrq5jOwNFUXjyyScxmUzMnz+fgwcP+rjC/luzZg233HIL8+fP5/7776exsdHz2IYNGzCZTGRkZPDBBx/4sMqBKSoqIiMjA5PJxMaNG31dzoBVVVVxxx13MG/ePDIzM/nDH/4AQH19PcuWLWPu3LksW7aMhobu264MFy6Xi4ULF3LPPfcAUFZWxpIlSzCZTKxcuRKHw+HjCvuvsbGRBx98kFtuuYVbb72Vffv2jajPzEMRSmVlpfKjH/1IufHGG5Xa2lpFURRl9+7dyl133aW43W5l3759Sk5Ojo+r7L8PPvhAaW9vVxRFUZ577jnlueeeUxRFUY4fP67Mnz9fsdvtSmlpqTJnzhzF6XT6stR+cTqdypw5c5TS0lLFbrcr8+fPV44fP+7rsgbEYrEoBw4cUBRFUWw2mzJ37lzl+PHjypo1a5QNGzYoiqIoGzZs8Hx2w9HmzZuVhx56SLn77rsVRVGUBx98UNmxY4eiKIryy1/+Uvnzn//sy/IG5NFHH1Vee+01RVEUxW63Kw0NDSPqM+skZxDAM888w6pVq7psi1FQUMDChQs79vGfOZPGxkZOnz7twyr774YbbkCj6ZioNnPmTM/akgttZTJcnLuNi1ar9WzjMhzFxsYyffp0AMLCwpg4cSIWi8Xz/QewcOFC3nvvPV+WOWDV1dXs3r2bnJwcoOPM/OOPPyYjIwOA7OzsYffZ2Ww29u7d63lPWq0WvV4/Yj6zc436gHjvvfeIjY1l2rRpXe4/f6sQo9F40e0+/Nkbb7zB7NmzgZ63QRlO7224138h5eXlHD58mBkzZlBbW0tsbCwAY8eOpbbWN5s8Xqqnn36aVatWoT57MSSr1Yper/f84TIcf67Ky8uJjo7m3/7t31i4cCGPP/44LS0tI+YzO9eI2WqjN0uXLqWmpvvGWCtXrmTDhg1s3rzZB1UNjt7e28033wzAiy++SEBAAAsWLBjq8kQfNTc38+CDD/Lzn/+csLCuV4pTqVTDckfVXbt2ER0dTXJy8ojaCsfpdHLo0CF++ctfMmPGDJ588slu42DD9TM736gIiN///vc93n/06FHKy8vJysoCOk6HFy1axJYtW7pt91FdXe2X231c6L112rp1K7t37+b3v/+95xt2uG9lMtzrP197ezsPPvgg8+fPZ+7cuQDExMRw+vRpYmNjOX36NNHR0T6usv8+//xzCgsLKSoqwm6309TUxFNPPUVjYyNOpxONRuO3P1e9MRqNGI1Gz4ait9xyCxs3bhwRn9n5RnUX09SpU9mzZw+FhYUUFhZiNBrZunUrY8eOJT09nTfffBNFUfjiiy8IDw/3nD4OF0VFRWzatIkXX3yR4OBgz/3DfSuTvmzjMlwoisLjjz/OxIkTWbZsmef+zu8/gDfffJM5c+b4qsQBe/jhhykqKqKwsJD//u//JjU1lV/96ldcd9115OfnA7Bt27Zh99mNHTsWo9HIyZMnAdizZw+TJk0aEZ/Z+WQl9TnS09N5/fXXiY6ORlEUcnNz+eCDDwgODubpp58mJSXF1yX2i8lkwuFwEBnZceWyGTNmkJubC3R0O73xxhsEBATw85//nLS0NF+W2m/vv/8+Tz/9tGcbl5/85Ce+LmlAPv30U37wgx8wZcoUTz/9Qw89xFVXXcXKlSupqqpi3LhxPP/8857PcTj65JNP2Lx5Mxs2bKCsrIyf/exnNDQ0cMUVV5CXl4dWO7yuq3348GEef/xx2tvbSUhI4JlnnsHtdo+ozwwkIIQQQlzAqO5iEkIIcWESEEIIIXokASGEEKJHEhBCCCF6JAEhhBCiRxIQYsR74YUXePnllwFYt24df//734GOKaaZmZlkZWXR1tbGmjVryMzMZM2aNb4s94L279/Pk08+6esyxCgi01zFiPfCCy8QEhLCXXfd1eX+1atXc/XVV3tW0l999dX84x//6PPlbTtXAwsxUsl3txiRXnzxRd58802io6OJi4vz7Jj6r//6r9x4443YbDbeffddPvzwQ4qKimhubqalpYVFixZxzz33kJqayr//+79TWVkJwM9//nOuvvpqXnjhBUpLSykrK2PcuHH84he/uGC7yspKysvLqays5Ic//CF33nkn0LHK9uWXX0alUjF16lT+67/+i7q6uh6Pc65zF5v1dvxzzZo1izvvvJNdu3YRFBTEb3/7W8aMGeO1/3cxwvhqn3EhvGX//v3Kd7/7XaWlpUWx2WzKzTffrGzatElRFEV57LHHlHfeeafb14qiKDNnzvR8/dBDDyl79+5VFEVRKioqlFtuuUVRFEVZv369kp2drbS2tl603W233abY7XaltrZWufbaaxWHw6EcO3ZMmTt3rue6I1artdfjnOvjjz/2XFPhQsc/35QpU5SCggJFURRlzZo1ym9+85v+/4eKUUvOIMSI8+mnn3LzzTd79p8ayF4/f//73zlx4oTndlNTE83NzZ7jBQUFXbRdWloaWq2W6OhooqOjqa2t5eOPP+aWW27xbOTWuRXDhY4TGhp6wRp7Ov6526ADBAYGctNNNwGQnJzMRx991O//CzF6SUAI0QO3281rr72GTqfr9ti5Gx/21u7c/YUCAgJwOp0Der0L6cvxAwMDPbv4qtVqXC5Xn48vhMxiEiPON7/5Td577z3a2tpoampi165d/T7GDTfcwB//+EfP7cOHD19Su06pqam8++67WK1WoOPa0wM5jhBDQQJCjDjTp09n3rx5ZGVl8eMf/3hAu/A+/vjjHDhwgPnz5zNv3jz+53/+55LadUpKSuLee+/ljjvuYMGCBTz77LMDOo4QQ0GmuQohhOiRnEEIIYTokQSEEEKIHklACCGE6JEEhBBCiB5JQAghhOiRBIQQQogeSUAIIYTo0f8HlP4RyS644fMAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VTbA5r_Zl6AD", - "colab_type": "text" - }, - "source": [ - "This looks pretty gaussian, let's get the 95% confidence interval by fitting a gaussian via scipy, and taking 2*the standard deviation" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "j1h2EExUl6AF", - "colab_type": "code", - "colab": {} - }, - "source": [ - "from scipy import stats" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "J9xFhoRGl6AL", - "colab_type": "code", - "colab": {} - }, - "source": [ - "mean, std = stats.norm.fit(np.asarray(diff_df, dtype=np.float32))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "PcBDorCcl6AS", - "colab_type": "code", - "outputId": "3dad2352-5006-4258-fb6f-de2097813079", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "ci_95 = std*2\n", - "ci_95" - ], - "execution_count": 35, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "17.629011154174805" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 35 - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "N_6SzWXyl6Ak", - "colab_type": "text" - }, - "source": [ - "Now, I don't trust the data outside of the confidence interval, and will therefore drop these datapoints from df. \n", - "\n", - "For example, in the plot above, at least one datapoint has n1-n2 > 60. This is disconcerting." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "y5fC5Pu0l6Ao", - "colab_type": "code", - "colab": {} - }, - "source": [ - "noisy = diff_df[abs(diff_df) > ci_95]\n", - "df = df.drop(noisy.index)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "qR8D_BKel6Ay", - "colab_type": "code", - "outputId": "de0902ae-167b-4168-a566-fb3b26d7e2fc", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "sns.jointplot('n1', 'n2', data=df) " - ], - "execution_count": 37, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 37 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa0AAAGoCAYAAAD1m7qEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3df3CUVZ7v8c+TTodtVjSgpLNkEH8sximGHyndiw6zsEZCxBjJBHHr1rg7sMxgUVPFRq3MCLiIgPwc51o1VUvFUYbZGZdVEaNO3A2agLEcozUahOU6WZ1rFmhMMyABJFmSdJ77R+g2JN2dTkjyPKf7/aqyJE863V8ayCfnnO9zjmXbti0AAAyQ5nQBAAAkitACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABgj3ekCAEhnWtt17kJnn+tjRqXrqtEZDlQEuBOhBbjAuQudqvuvk32uz77pGkIL6IHpQQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxCC0AgDEILQCAMbhPCxhh0W4kvtARivrYzlCXjp1u7XOdm46RqggtYIRFu5E479rMqI9t6+hSwx+/7HOdm46RqpgeBAAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiD0AIAGIPQAgAYg9ACABiDvQeBYRJtY1wp9ua4APpHaAHDJNrGuFLszXEB9I/pQQCAMRhpAQaKds4WZ2whFRBagIGinbPFGVtIBUwPAgCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjMGOGMAQiLajO7u5A0OP0AKGQLQd3dnNHRh6TA8CAIxBaAEAjEFoAQCMwZoWkCSinbElcc4WkguhBSSJaGdsSdK3bxzXp7NRIsxgJkILSHKxwoxDI2EiQgsYgGj3Y0lm3pMVbTqR0RfcjtACBiDa/ViSmfdkRRuBMfqC2xFaQBTJNKICkgmhBUSRTCMqIJkQWgAiaJuH2xFaACLoNITbEVqGibXWkqo/CUd7P9LTpM6uvo+N9R6xQ3v/RrrTkL/niCWlQ8st/zAGUkestZZYPwlHe+5k+ocfa3f1hiMtfR4b6ybbCx0hvf/56T7Pga8NV6dhvIaX3n8mQ/Waw8Ut30+SnWXbtu10EZdr6dKlOn26719wADDV2LFj9dxzzzldhuskRWgBAFIDu7wDAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjJEUobV06VKnSwAAR6Ta97+kCC22cAKQqlLt+19ShBYAIDUQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGOkO10AACSjyoaAtlU36nhLmyZk+lRemKuSvBynyzIeoQUAQ6yyIaCVew6prSMkSQq0tGnlnkOSRHBdJqYHAWCIbatujARWWFtHSNuqGx2qKHkQWgAwxI63tA3o+uUIddk609o+5M/rVo6HVigUUklJiR588EFJ0tGjR7Vo0SIVFBSorKxM7e2p84cBIDlMyPQN6Prl6LJtnbvQOeTP61aOh9a//Mu/6MYbb4x8/NOf/lSLFy/Wm2++qSuvvFK7d+92sDoAGLjywlz5vJ5Lrvm8HpUX5jpUUfJwNLSam5u1f/9+3XfffZIk27ZVX1+vwsJCSdJ3v/td1dTUOFkiAAxYSV6ONpVOVU6mT5aknEyfNpVOpQljCDjaPbhx40aVl5fr/PnzkrpP4LzyyiuVnt5dVnZ2toLBoJMlAsCglOTlEFLDwLGR1r59+zRu3Dh961vfcqoEADBemmVpzKjUuXvJsd/pRx99pNraWtXV1enChQv66quv9OSTT+rs2bPq7OxUenq6mpub5ff7nSoRAFzPk2bpqtEZTpcxYhwbaT3yyCOqq6tTbW2tfvazn+m2227TU089pZkzZ6q6ulqS9Morryg/P9+pEgEALuN492Bv5eXl+uUvf6mCggK1tLRo0aJFTpcEAHAJV0yEzpw5UzNnzpQkTZw4kTZ3AEBUrhtpAQAQC6EFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMAahBQAwBqEFADAGoQUAMEa6Uy/8xRdf6Mc//rFOnToly7J0//336/vf/75aWlr00EMPKRAIKCcnR08//bSuuuoqp8oEALiIYyMtj8ejRx99VG+88YZeeOEF/eu//qs+++wzPfPMM7r99tu1d+9e3X777XrmmWecKhEA4DKOhVZWVpamTJkiSbriiit0ww03KBgMqqamRiUlJZKkkpISvfXWW06VCABwGVesaR07dkyffPKJpk+frlOnTikrK0uSNH78eJ06dcrh6gAAbuHYmlbY+fPntWLFCq1atUpXXHHFJZ+zLEuWZTlUGYDhVtkQ0LbqRh1vadOETJ/KC3NVkpfjdFlwMUdHWh0dHVqxYoWKi4s1b948SdLVV1+tEydOSJJOnDihcePGOVkigGFS2RDQyj2HFGhpky0p0NKmlXsOqbIh4HRpRumybR073aozre1OlzIiHAst27a1evVq3XDDDVqyZEnken5+viorKyVJlZWVuvPOO50qEcAw2lbdqLaO0CXX2jpC2lbd6FBFZgp12ar7r5M6d6HT6VJGhGOh9eGHH+rVV19VfX29FixYoAULFujtt9/WsmXL9O6772revHn63e9+p2XLljlVIoBhdLylLer1QEubrn+0SrM21zLqQh+OrWndeuutamyM/hPVr371qxGuBsBIm5DpUyBGcPWcLpTEOhciXNE9CCD1lBfmyuf1xH0M04XozfHuQQCpKTx6CncP2jEeF2saEamJ0ALgmJK8nEh4zdpcG3W6cEKmb6TLMoolKe/aTKWnyLxZivw2AbhdrOnC1vZOGjLisCU1HGlRZ5fTlYwMQguAK5Tk5WhT6VRl+ryXXD/d2sH9W4ggtIAUVNkQ0KzNta5rLS/Jy9Gfj+q7akFDBsJY0wJSTHgnivCNvW5rLY/VeEFDBiRGWkDKcftOFLEaL2jIgERoASnH7SOZaA0ZPq9H5YW5DlXkbpakmdeP1Zgo06rJKDV+lwAiYu1EkehIZrh3Zu99/xa7v8dnSxrl9eiq0RlOlzIiCC0gxZQX5l6ypiUlPpIZqfWwnvdvAT0xPQikmHBreU6mT5aknEyfNpVOTSgk3L4ehuTHSAtIQYMdybh9PQzJj5EWgITR2ec+aZaVMk0YEqEFYADo7HMfT5qVMk0YEtODAAaAzj44jdACMCB09sFJhBaASwz3fVjA5SC0AES4fV9CgEYMABHch2WeLtvWsdOtOna6VWda250uZ9gx0gJSTLzpv1j3WwVa2nT9o1VMF7pQqMtW3X+dlCTNvumapO8kJLQAQw1m7am/6b9Y+xJK3XvcMV0IpzE9CBgoHD6BlrZLwqS/wxxjTf898uLHqmwIxDzyvvfjmS6EUwgtwECDXXuKNf0Xsu3ICKrnvoQDfR5guDE9CBgo3h6A0aYNpe6gs+M8Zzj0eu5u4bEshey+X8W2TXAKoQUYomcYpcUIk8zR3j5rVuUvfSxZUkcoXmQp8vieXx/tNdi2yV0sSXnXZkqSOkNdOtPantTNGIQWYIDeDRSxwsS21WfasKOr/7AK81hWn68PX++ybboHXciW1HCkJfJxsncQElqAAaKtYUl9w+ShFw4M+jV8Xk/U15C67wX6fHPRoJ8bGCqEFuASlQ0BrX3tsFraOiRJY0d7VTTtL7TvD3+K2YbeO0y2VTfGfGxvY0d7NToj/ZK1r1hfzxoW3ILQAlygsiGg8pc+vmQq73Rrh35TfyTu1/UOk/LC3EumESXJm2b1WdPyeT16vHhK1Gm+3l/PGhbchNACXGBbdeOA1p7CWts7VdkQiIRPrKNDYl2b8cTeS0Z2jxdP0cJbcrTr/aMK2bY8lqWFt7Cru5v1bMSQupsxjp1ujfn4MaPSjV7zIrSAEdazC/Aqn1eW1T2qGozTrR19dqiIdXRIz2uxRnaPvPSx0vR1o0fItvXyhwHdOmkcweVSvRsx+mN6owY3FwMjpLIhoLx1e1X2woHIThYtbR2DDqywwexQEWtkF+qy+1xnBwy4CSMtYAT0blkfarFuKo41OhrojhaBi8/PaAtOY6QFjIBYLevxjB3tlc+b2D9Rr8fSQz1GcP3tRTiYbsBE9jYEhhuhBYyARNvQJSkn06emzUVqWDNPC2/5RkJf0x6y+2zRFG9ar7wwt7ursBdPmhX1en/PB4wU104P1tXV6cknn1RXV5cWLVqkZcuWOV0SMGix9vDrrXd7+SsfXd7IJtY0YHiar/d9YY8XT5EklcW4SZmNct2nd/dgf9INH6q4MrRCoZDWrVunX/7yl/L7/brvvvuUn5+vv/zLv3S6NGBQ4gXWaG+aWju6JElpVneQPPTCAf2ZN01tF68PVpplxTy8MVaXoRT7JmVuMnafwXQPmsyVmXvw4EFNmjRJEydOVEZGhoqKilRTU+N0WcCg5cT4Zp/p88rucQjI+faQWto6ZEuXHVhSd1j2t8ZV2RDQrM21uv7RKs3aXBvzXC1uMoYbuDK0gsGgsrOzIx/7/X4Fg0EHKwIuT6zDFc/8T8ewdRT2Fm1NKtphkg+9cEBlF0d6mT6vLHWH7qbSqXQPwnGunB4Ekk34m/0Trx++5L6sBJa5Biwn0xez8aP3mlS0rsZwSadbO+TzevR//nYGYQXXcOVIy+/3q7m5OfJxMBiU3+93sCKgr2jTavGU5OVodMbw/5xYXpgbczqy95pUf40VdAzCbVw50po6daqampp09OhR+f1+VVVV6amnnnK6LCCi983C4TUjSXFHJSPRfVf+0sf6X9ePjTrauuPm8ZFfVzYEYh4m2VOgpU2zNtdyjpZLDbR7sL+9CSV370/oytBKT0/XmjVr9IMf/EChUEgLFy7U5MmTnS4LiIg2rdbWEdIjL34sKXZwTYgzdSd1dw+OSk/T/3R09fvYWDq6bNX/v9NRP7fvD3+S9HXoJtKGLyUeyhh5A+0eTISb9yd0ZWhJ0pw5czRnzhynywCiihUmIduO+8092tEhPXXZUmfIVuZor463tCV8f1e0OqIJj/QGs0NHeKqQ0IKTXLmmBbhZZUNA0feM6NbWEdITrx+O+rmSvBxtKp2qnEyfLHXfdNxbR5et063dbe+DCay4LOn6R6vijuBirYdJ3FwM5xFawABtq27ss2VSb6dbOxLap2/IQ6kftq24tedk+vTuo/kJN3IAI82104OAWyU62ih74YCeeP2wbFs609ahCZk+3XHzeL38YWDE7s0aiJ43D0ebxuTmYrgBoQUM0EAaJHrekxVoadPz9Uf6HaWNNEvqs81TrBOQWc9yn4F2DyYikQ7DaEai65DQAgaov2aKeNwWWOHpwGji7U0I9xiO7sHBGomuQ0ILGKDwN/JYO6Gbguk+mIhGDGAQSvJy4nbZudWfZ3hkXfz//3SEVPbCAd248g09VnnI6dKAhBBawCBF2wQ31gGKbvD0387Q4XV36ds3jtP59lBkqjJk2/pN/RGCC0ZgehAYpGgHKUa57coVPJalshcO6OEXD6grxsLarvePakPJ1JEtDJdtOBoxBmswDRwDbd4gtIDLdKHz63Ov2kNua7XoFr4fLFZg9XwMzOKmRozBGGjzBtODwGUYzHZIbhVtdw7AbQgt4DIk07ZG/3vmRKdLAPpFaAGXIVm2NfKmSc/XH0noXDDASYQWcBlMv89ptDdN3jRLHV3dayPhI0gILrgVjRhAD5UNgQFtXVSSl3NJ96BJMn1eWZbU2nFp7RxBYhY3dQ8ORmeoS2da2xNuxiC0gIsGexrx2nun6KEXDrhui6b+xAvaZFqrS3amdw9KA+sgZHoQuCjWacTbqhsjH1c2BDRrc62uf7Qqsv5TkpdjXGD1J1nW6pB8GGkBF8UaXYSvxxuJDfaEYTdiT0K4GSMt4KJYo4vw9VgjsbWvHTY6sDJ93shJyjmZPm0qncp6FlyLkRZwUayDD++4ebxmba6NeYaW25swLHWve2T6vDrf3qmOHrt2+Lwerb13CiEFYxBawEXRDj5080nD8YSnK3N6dUAOtDsS7md696D09Z6FiexDSGgBPfQ++HDW5lrjAkuS/rjp7sivw80jBFVySobuwbBEuggJLSAOE1u/e+4hONg2fsCtaMQA4jCx9btnU0is5pFHXvyYXS9gJEILiCPaQY9uYEkalR77n2/4HrJYI8WQbbNdE4zE9CAQR+/mjMzRXp1p7VBXP183nNIs6fYbxumDz0/HfEx4GvAqnzdmdyPbNSUHpxoxMjyWRg3xD3RjRvUfSYQW0Eu0Drt3H82XJH3vF+/p3T9+6Wh9XbYSqqGtI6Q/86bJ5/XEbCYxcc0Ol3KqEWP2TdfoG2NHj/jrMj0I9BBuXAi0tEXd9dzpwBqoltYObSqdGvOARxPX7JDaCC2gh0T2HzTJhEyfSvJy9NT90/uszbFdE0zE9CDQQ3/7D7pJeKeLWHqGUrQbp7lfCyYitIAeJmT6om7XFJ5Gm3XjONdMEX7vtmu17w9/UqClrU+AWZIW3nLpjdK9b5wGTERoAT3E2n8wPGJ5/oe3R23GGOld3h+47VptKJkqSVH3RbQl7fvDn0asHjhnoN2DQ9X1l0in33AgtGC0od5LL5FptOd/eHufr7v+0apBv2Y8s24cp//7xTmdbu1uW8/0eftscGvSlCaG3kC7B53q+hsqhBaMlegWRQMNtsFMo8WaVrxc14+/ImpIJvLadAYiGdE9CGMletJwvBb2oXLHzeOH9PnCnq8/0m+t0XbtoDMQyYrQgrESmRYbqhb28E7p1z9aFdkiqafhWj+ypX5rLcnL0abSqRzkiJTA9CCMlci02GDWe3pPJ/Y+UyvaNORwrh8l8tx0BiJVOBJaW7Zs0b59++T1enXttddq06ZNuvLKKyVJFRUV2r17t9LS0vTYY4/pr//6r50oEQbor9NPGvh6T7R1st/UH+nzuN779sV6nZxMn45fnJocLNamEE/P7sFEOgOd6vobKo5MD86aNUu//e1v9frrr+u6665TRUWFJOmzzz5TVVWVqqqq9Oyzz+qJJ55QKGTeAXwYGYlMiw10vSfadGIsPUdA8V7nckKHtSn0J9w92HCkRaO8Hn1j7Oi4//V3yKLbORK53/nOdyK/njFjhv7jP/5DklRTU6OioiJlZGRo4sSJmjRpkg4ePKi8vDwnyoQB+psWG+hOEAOZ5usZRv29Tu8RoddjyZtmqbWje7/4cCv7QGoFUlHc0Prqq69UUVGh5uZmzZ49W8XFxZHPrV27VmvXrr3sAl5++WXNnz9fkhQMBjV9+vTI5/x+v4LB4GW/BlLbQNZ7BtK63nsEFOt1BhqchBQQW9zQWrlypSZNmqTCwkLt3r1be/fu1VNPPaWMjAx9/PHHcZ948eLFOnnyZJ/rZWVlmjt3riRp+/bt8ng8uvfeey/jtwAMnWjrZNGMHe0dULjQKAEMjbihdeTIEf385z+XJM2dO1fbt2/X3//932v79u39PvHOnTvjfn7Pnj3av3+/du7cKevisQl+v1/Nzc2RxwSDQfn9/n5fCxgq4WB54vXDkV0oevN5PXq8eMpIlgXgoriNGO3t7erq+vqM1uXLl+v+++/XAw88oJaWwR86VldXp2effVbbt2+Xz/f1ukB+fr6qqqrU3t6uo0ePqqmpSdOmTRv06wCDUZKXo9EZ0X+e81gW90DBVcLdg3nXZio9Be68jTvSuuOOO1RfX69vf/vbkWulpaW65pprtGHDhkG/6Pr169Xe3q4lS5ZIkqZPn65169Zp8uTJmj9/vu6++255PB6tWbNGHs/QHucMxNLz/qxYLepdtk1gwVV67j04+6ZrnC1mBMQNrR//+MeSukdc1dXVCgQC6uzslKTLWod68803Y35u+fLlWr58+aCfGxiM3vdnxcI9U4CzEmp5X758ucaMGaMpU6YoI6O7x9+KcXw3YKJE7s/ininAeQmFVjAY1HPPPTfctQDDIpFd3vu7Pyu8liV1n1/FfVSAMxIKrby8PDU2Nio3l58yYZZEjy/p7/6skG1r7WuHdb69Ux0hO+5zASOp5zZOKd+IEfbhhx/qlVdeUU5OTmR6UJJef/31YSsMGArxdnnvvd1Tf2taLW19W+CjPRcwkmjEiOIXv/jFcNcBDItY036BlrY+03ybSqdq7WuHo4bTYF4DwNBLKLRycvgpEmaKNe1nSZHrgZY2PfTCAdnq3nT3nul/oX1/+FPC2znRUQiMnBSYAUUqi7b7uiX1uQ8r/HGgpU0vfxhQeWGuchIIIzoKgZFFaCGpRTu+pL+zrcLrVNECz5tmaexoLycEAw4x+zQwIAG9N6udtbm236m/4y1tA96dHXCCJWnm9WM1yusx/oDHRCT/7xDoJZFOwfA6Fbuzw+1sKXL4YyogtJByeo6gAi1tfda4WKcC3IvQQkrqOYJKZMcMAO5AaCEpXE7wMAUImIPQgvES3aoJgPloeYfx4m3VBCQ7S1JnqEtnWtudLmVEEFowXqxtlNheCanAlvS7P36pcxc6nS5lRDA9COPF2qopke2VaMIAzMJIC8aLtnNFIm3r4bWwQEubbH29FlbZEBjGagFcDkILxou2VVMi2yuxFgaYh+lBJIXBtK2zFoZkkGZZmn3TNSmxhZPESAspLNaaF0eNwCSeNEvfGDtaV43O6P/BSYDQQsoa7FoYAOekxngSiIJd3AHzEFpIaWzhBJiF6UEAgDEILQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxCC0AgDEILQCAMQgtAIAxHA2tHTt2KDc3V19++aUkybZtbdiwQQUFBSouLtbhw4edLA8A4DKOhdYXX3yhd999VxMmTIhcq6urU1NTk/bu3av169dr7dq1TpUHAHAhx0Jr06ZNKi8vl2VZkWs1NTUqKSmRZVmaMWOGzp49qxMnTjhVIgDAZRwJrbfeektZWVm6+eabL7keDAaVnZ0d+Tg7O1vBYHCkywMAuNSwHQK5ePFinTx5ss/1srIyVVRUaMeOHcP10gCAJDVsobVz586o1xsbG3Xs2DEtWLBAktTc3KzS0lK99NJL8vv9am5ujjy2ublZfr9/uEoEABhm2EIrltzcXL333nuRj/Pz87V7926NGzdO+fn5+s1vfqOioiJ9/PHHGjNmjLKyska6RACAS414aMUzZ84cvf322yooKJDP59PGjRudLgkA4CKOh1ZtbW3k15Zl6fHHH3ewGgCAm7EjBgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAY6U4XgOFR2RDQtupGHW9p04RMn8oLc1WSl+N0WQCGWJdt60xru64aneF0KSOC0EpClQ0BrdxzSG0dIUlSoKVNK/cckiSCC0gyoS5b5y50pkxoMT2YhLZVN0YCK6ytI6Rt1Y0OVQQAQ4PQSkLHW9oGdB0ATEFoJaEJmb4BXQcAUxBaSai8MFc+r+eSaz6vR+WFuQ5VBABDg0aMJBRutqB7EEh+aZalMaNS51t56vxOU0xJXg4hBaQAT5qVMp2DEtODAACDEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGM4Flq//vWvddddd6moqEhbt26NXK+oqFBBQYEKCwv1zjvvOFWea1U2BDRrc62uf7RKszbXqrIh4HRJADBiHNkRo76+XjU1NXrttdeUkZGhU6dOSZI+++wzVVVVqaqqSsFgUEuWLFF1dbU8Hk8/z5gaOCcLQG+pdgikIyOtXbt2admyZcrI6H6Tr776aklSTU2NioqKlJGRoYkTJ2rSpEk6ePCgEyW6EudkAegtfAhkqnAktJqamvT73/9eixYt0gMPPBAJpmAwqOzs7Mjj/H6/gsGgEyW6EudkAUh1wzY9uHjxYp08ebLP9bKyMoVCIZ05c0YvvviiDh06pLKyMtXU1AxXKUljQqZPgSgBxTlZAFLFsIXWzp07Y35u165dKigokGVZmjZtmtLS0nT69Gn5/X41NzdHHhcMBuX3+4erROOUF+ZesqYlcU4WgNTiyPTg3Llz9f7770uSPv/8c3V0dGjs2LHKz89XVVWV2tvbdfToUTU1NWnatGlOlOhKJXk52lQ6VTmZPlmScjJ92lQ6lSYMACnDke7BhQsXatWqVbrnnnvk9Xq1efNmWZalyZMna/78+br77rvl8Xi0Zs0aOgd74ZwsAD1ZktJT6I5bR0IrIyNDP/3pT6N+bvny5Vq+fPkIVwQAZrIldXY5XcXI4eRiQ1U2BLStulHHW9o0IdOn8sJcRmAAkh6hZSBuMgaQqlJoJjR5cJMxgFRFaBmIm4wBpCpCy0CxbibmJmMg9aRZlsaMSp2VHkLLQOWFufJ5L70VgJuMgdTkSbNSZrNciUYMI4WbLegeBJBqCC1DcZMxgFTE9CAAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYtLy7CDu3A0B8hJZLsHM7gMHosm2daW1PmV0xmB50CXZuBzAYoS5b5y50Ol3GiCG0XIKd2wGgf4SWS7BzOwD0j9ByCXZuB4D+0YjhEuzcDgD9I7RchJ3bAQwUh0ACAIyRaodAEloAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGMQWgAAYxBaAABjEFoAAGM4ElqffPKJ7r//fi1YsEClpaU6ePCgJMm2bW3YsEEFBQUqLi7W4cOHnSgPAOBSjoTWtm3b9KMf/Uivvvqq/vEf/1Hbtm2TJNXV1ampqUl79+7V+vXrtXbtWifKAwC4lCOhZSIyGHYAAAmpSURBVFmWzp8/L0k6d+6csrKyJEk1NTUqKSmRZVmaMWOGzp49qxMnTjhRIgDAhRw57nLVqlVaunSptmzZoq6uLv3bv/2bJCkYDCo7OzvyuOzsbAWDwUioAQBS27CF1uLFi3Xy5Mk+18vKylRfX6+VK1eqsLBQb7zxhlavXq2dO3cOVykAgCQxbKEVL4R+8pOfaPXq1ZKk+fPn67HHHpMk+f1+NTc3Rx7X3Nwsv98/XCUCAAzjyJpWVlaWPvjgA0lSfX29rrvuOklSfn6+KisrZdu2Dhw4oDFjxjA1CACIcGRNa/369dq4caM6Ozs1atQorVu3TpI0Z84cvf322yooKJDP59PGjRudKA8A4FKOhNatt96qPXv29LluWZYef/xxByoCADN12bbOtLbrqtEZTpcyItgRAwAMFuqyde5Cp9NljBhCCwBgDEemB51W2RDQtupGHW9p04RMn8oLc1WSl+N0WQCAfqRcaFU2BLRyzyG1dYQkSYGWNq3cc0iSCC4AcLmUmx7cVt0YCaywto6QtlU3OlQRACBRKRdax1vaBnQdANwszbI0ZlTqTJqlXGhNyPQN6DoAuJknzUqZdncpBUOrvDBXPq/nkms+r0flhbkOVQQASFTqjCkvCjdb0D0IAOZJudCSuoOLkAIA86Tc9CAAwFyEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYSbEjRiAQUGlpqdNlAMCQGTt2rJ577rmEHpdKLNu2baeLAAAgEUwPAgCMQWgBAIxBaAEAjEFoAQCMQWgBAIxBaAEAjEFoJeiTTz7R/fffrwULFqi0tFQHDx6UJNm2rQ0bNqigoEDFxcU6fPiww5V2+/Wvf6277rpLRUVF2rp1a+R6RUWFCgoKVFhYqHfeecfBCr+2Y8cO5ebm6ssvv5Tkzvd0y5Ytuuuuu1RcXKwf/ehHOnv2bORzbnxP6+rqVFhYqIKCAj3zzDNOlxPxxRdf6O/+7u909913q6ioSL/61a8kSS0tLVqyZInmzZunJUuW6MyZMw5X+rVQKKSSkhI9+OCDkqSjR49q0aJFKigoUFlZmdrb2x2uMMXYSMiSJUvs/fv327Zt2/v377cfeOCByK+XLl1qd3V12Q0NDfZ9993nZJm2bdv2e++9Z3//+9+3L1y4YNu2bZ88edK2bdv+9NNP7eLiYvvChQv2kSNH7DvvvNPu7Ox0slT7+PHj9j/8wz/Yf/M3f2OfOnXKtm13vqfvvPOO3dHRYdu2bW/dutXeunWrbdvufE87OzvtO++80z5y5Ih94cIFu7i42P70008drSksGAza//mf/2nbtm2fO3fOnjdvnv3pp5/aW7ZssSsqKmzbtu2KiorI++sGO3bssB9++GF72bJltm3b9ooVK+zf/va3tm3b9j/90z/Zzz//vJPlpRxGWgmyLEvnz5+XJJ07d05ZWVmSpJqaGpWUlMiyLM2YMUNnz57ViRMnnCxVu3bt0rJly5SRkSFJuvrqqyV111pUVKSMjAxNnDhRkyZNiowYnbJp0yaVl5fLsqzINTe+p9/5zneUnt69gcyMGTPU3NwsyZ3v6cGDBzVp0iRNnDhRGRkZKioqUk1NjaM1hWVlZWnKlCmSpCuuuEI33HCDgsFg5M9ckkpKSvTWW285WWZEc3Oz9u/fr/vuu09S9yxAfX29CgsLJUnf/e53XfPepgpCK0GrVq3S1q1bNWfOHG3ZskUPP/ywJCkYDCo7OzvyuOzsbAWDQafKlCQ1NTXp97//vRYtWqQHHngg8k20d61+v9/RWt966y1lZWXp5ptvvuS6G9/Tnl5++WXNnj1bkvveU8mdNUVz7NgxffLJJ5o+fbpOnToV+UFw/PjxOnXqlMPVddu4caPKy8uVltb9rfL06dO68sorIz/AuO3vZipIir0Hh8rixYt18uTJPtfLyspUX1+vlStXqrCwUG+88YZWr16tnTt3jnyRF8WrNRQK6cyZM3rxxRd16NAhlZWVOfbTYLw6KyoqtGPHDgeqii5erXPnzpUkbd++XR6PR/fee+9Il5dUzp8/rxUrVmjVqlW64oorLvmcZVmXjLydsm/fPo0bN07f+ta39P777ztdDi4itHqIF0I/+clPtHr1aknS/Pnz9dhjj0nq/ik2PFUkdU8n+P3+Ya1Til/rrl27VFBQIMuyNG3aNKWlpen06dN9ag0Gg8Nea6w6GxsbdezYMS1YsEBS9/tWWlqql156yZXvqSTt2bNH+/fv186dOyPfVJ14T/vjxpp66ujo0IoVK1RcXKx58+ZJ6p7CPnHihLKysnTixAmNGzfO4Sqljz76SLW1taqrq9OFCxf01Vdf6cknn9TZs2fV2dmp9PT0Efu7ia8xPZigrKwsffDBB5Kk+vp6XXfddZKk/Px8VVZWyrZtHThwQGPGjIlMczhl7ty5kZ8MP//8c3V0dGjs2LHKz89XVVWV2tvbdfToUTU1NWnatGmO1Jibm6v33ntPtbW1qq2tVXZ2tvbs2aPx48e78j2tq6vTs88+q+3bt8vn80Wuu+k9DZs6daqampp09OhRtbe3q6qqSvn5+Y7WFGbbtlavXq0bbrhBS5YsiVwP/5lLUmVlpe68806nSox45JFHVFdXp9raWv3sZz/TbbfdpqeeekozZ85UdXW1JOmVV15xzXubKhhpJWj9+vXauHGjOjs7NWrUKK1bt06SNGfOHL399tsqKCiQz+fTxo0bHa5UWrhwoVatWqV77rlHXq9XmzdvlmVZmjx5subPn6+7775bHo9Ha9askcfjcbrcPtz4nq5fv17t7e2Rb7TTp0/XunXrXPmepqena82aNfrBD36gUCikhQsXavLkyY7WFPbhhx/q1Vdf1U033RQZZT/88MNatmyZysrKtHv3bk2YMEFPP/20w5XGVl5eroceekhPP/20vvnNb2rRokVOl5RSOJoEAGAMpgcBAMYgtAAAxiC0AADGILQAAMYgtAAAxiC0gAH493//dxUVFenmm2/WoUOHnC4HSDmEFjAAN910k37+85/rr/7qr5wuBUhJ3FwMRHHs2DH98Ic/1C233KKGhgb5/X798z//s2688UanSwNSGiMtIIb//u//1ve+9z1VVVVpzJgxka17ADiH0AJi+MY3vqFvfvObkqQpU6YoEAg4XBEAQguIIXyIpiR5PB6FQiEHqwEgEVoAAIMQWsAAvPnmm5o9e7YaGhr04IMPaunSpU6XBKQUdnkHABiDkRYAwBiEFgDAGIQWAMAYhBYAwBiEFgDAGIQWAMAYhBYAwBj/H8slXs3h7RpZAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "oORmeyHNl6A1", - "colab_type": "text" - }, - "source": [ - "Now that data looks much better!\n", - "\n", - "So, let's average n1 and n2, and take the error bar to be ci_95." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "7NsMKc6Nl6A3", - "colab_type": "code", - "outputId": "cee13ea9-a9a6-4e76-81b6-8eae6dbb0f22", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "avg_df = df[['label', 'drug']]\n", - "n_avg = df[['n1', 'n2']].mean(axis=1)\n", - "avg_df['n'] = n_avg\n", - "avg_df.sort_values('n', inplace=True)" - ], - "execution_count": 38, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:3: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " This is separate from the ipykernel package so we can avoid doing imports until\n", - "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:4: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " after removing the cwd from sys.path.\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "FIUv_SV2l6A7", - "colab_type": "text" - }, - "source": [ - "Now, let's look at the sorted data with error bars." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "YN1DgKJNl6BD", - "colab_type": "code", - "outputId": "b162d0ab-ab94-46ae-f09e-5cb1bfecdc5a", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "plt.errorbar(np.arange(avg_df.shape[0]), avg_df['n'], yerr=ci_95, fmt='o')\n", - "plt.xlabel('drug, sorted')\n", - "plt.ylabel('activity')" - ], - "execution_count": 39, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "Text(0, 0.5, 'activity')" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 39 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAEGCAYAAACdJRn3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3de1SUdf4H8PcwOIaCghcG5aAbRpqGl1ZTM3MXwVEQQS5WW7ZyqMzcDOngT7t4Essou5ieLrhaWnE87iZCgeYFr8fyuhRaaulCAslwQEQRY5iZ5/cHOxMMM8M8yszzDPN+/RPzncH57MPO857v5fk+CkEQBBAREYngJXUBRETkfhgeREQkGsODiIhEY3gQEZFoDA8iIhLNW+oCnGn8+PEIDg6WugwiIrdSWVmJY8eO2X1Nlw6P4OBg5ObmSl0GEZFbSUhI6PA1HLYiIiLRGB5ERCQaw4OIiERjeBARkWgMDyIiEo3hQUREojE8iIhINIYHERGJxvAgIupiHs7+Dg9nf+fU92B4EBGRaAwPIiISjeFBRESiMTyIiEg0hgcREYnG8CAiItEYHkREJBrDg4iIRGN4EBGRaAwPIiISTbb3MI+IiEDPnj3h5eUFpVKJ3NxcXL16FYsXL0ZlZSWCg4OxZs0a9O7dW+pSiYg8jqx7Hps3b0Z+fj5yc3MBAOvXr8fEiROxe/duTJw4EevXr5e4QiIizyTr8LBUVFSE+Ph4AEB8fDz27t0rcUVERJ5J1uGRmpqKhIQEbN26FQBQW1uLwMBAAED//v1RW1srZXlERB5LtnMeW7ZsgVqtRm1tLVJSUhAaGtrmeYVCAYVCIVF1RESeTbY9D7VaDQDo27cvoqKiUFJSgr59+6K6uhoAUF1djT59+khZIhGRx5JleDQ2NqKhocH885EjRxAWFoaIiAjk5eUBAPLy8jB16lQpyyQi8liyHLaqra3FwoULAQAGgwEzZ87EQw89hPDwcKSlpeHLL7/EwIEDsWbNGokrJSKSl7ziShRfugqdwYhJWfuQoRmK+DHBnf4+sgyPkJAQfPXVV+3aAwICsHnzZgkqIiKSv7ziSizLPQ2dwQgAqLx6E8tyTwNApweILIetiIhIvNW7zuNms6FN281mA1bvOt/p78XwICLqIn67elNU++1geBARdRED/X1Etd8OhgcRUReRoRkKn27KNm0+3ZTI0Azt9PeS5YQ5ERGJZ5oUX/JlCXQGI4L9fTxrtRUREd2a+DHB2HL8EgBg6/yJTnsfDlsREZFoDA8iIhKN4UFERKIxPIiIupCHs7/DT5evOf19OGFORNRFhL+6C41NevTo7vxTO3seREQkGsODiIhEY3gQEZFoDA8iIhKN4UFERKIxPIiISDSGBxERicbwICIi0RgeRERdwMPZ36GxSe+y92N4EBGRaLLbnuTy5ctYsmQJamtroVAoMGfOHPz973/HunXr8K9//Qt9+vQBAKSnp2PKlCkSV0tE5JlkFx5KpRJLly7FiBEj0NDQgMTEREyaNAkAMG/ePKSmpkpcIRGRvLhqM8TWZBcegYGBCAwMBAD4+voiNDQUWq1W4qqIiOTJ2maIwwf0cupdBAGZz3lUVFTg7NmzGDVqFAAgJycHsbGxWLZsGerr6yWujojIc8k2PG7cuIFFixbhxRdfhK+vLx599FHs2bMH+fn5CAwMRFZWltQlEhFJytUrrFqT3bAVADQ3N2PRokWIjY3FtGnTAAD9+vUzP5+cnIxnnnlGqvKIiCQlxRyHJdn1PARBwEsvvYTQ0FCkpKSY26urq80/7927F2FhYVKUR0QkqfBXd+Fk2RWpy5Bfz+PUqVPIz8/H3Xffjbi4OAAty3ILCgpw7tw5AEBwcDAyMzOlLJOIyKVMvQ1bw1SNTXoYhJafj5VewYjl3+D12eGIHxPslHpkFx5jx47F+fPn27Xzmg4i8lQd3V7WFBqt3dAZ8MK/fwAApwSI7MKDiIjazmvc6qS4wShg9a7zDA8iIk/QUU9DjN+u3uyEitqT3YQ5EZEn6+zltwP9fTrt32qNPQ8iIglZToR3Rm/DROmlQIZmaKf9e60xPIiIXMDy2ozODAulouW/rSfOe6qUnrXaiojInVib2DYFQuvHzuhZtNb6Pcb+qY/T97ZieBARdaD1BLa9gPAknvW/log8nq05Bnu9BKn2j3KUUQAaftfDNGpVc73J6e/J8CCiLsNyiasn9BKMAmB5jWBp7Q3kFVc6bb4DYHgQkYzcTq+g9XOexMrF5TAKcNrFgSYMDyK6LY5OGFs+9oRegZScdXGgCf9KRNSOveEfnvTlo+F32z0tZ10caMK/NlEX8XD2d+atum/3m78nDv+4m8YmvdUhKxNnXRxowvAgkgHLEz+He6gj1nbSbc2Z8x0Aw4Potom5BsCVF41R19VRcKiUzt+2kP+vpS6ps1btdIVrAKjraH3DJ3tCApw73wEwPEgGrJ3oO+vETtQVWLuWw55+ft2dVosJP2EkSkeTshy2IepcHU2MWzJtkuhs/NR2cbe65JKTskTSMfU0rttZimuNi3IDAMPDrYkZ7iEi9yC2p2Hiqh6HCcNDhsQODRFR1yB2bqM1V1+f43Znn0OHDuH111+H0WhEcnIynn76aalLui32tnYgIs/g6CoqaxQAvFzc6wDcLDwMBgMyMzPx6aefQq1WIykpCREREbjrrrukLs1h9q4JICLPoNMbzGEhdl7DpLu3AipvZZveRo/u3hg+oJfTbwQFuFl4lJSUYPDgwQgJCQEAxMTEoKioSLbhYW/4iYi6HlsT3a0f32pYtKYAoPJWtmt3VXAAgPMvQ+xEWq0WQUFB5sdqtRparVbCimyzvF8xEbknUy/BILSc+Fv/bPn4VucrxJBqmMqSW/U8iIg6g71ho87uJXQGU2DIadNKtwoPtVqNqqoq82OtVgu1Wi1hRdax10EkHaNgPxAAwKB3RR+hc7h6Ca6j3GrYKjw8HGVlZSgvL4dOp0NhYSEiIiKkLqsNBgfRrTM6MDTU0XPuEwsd6+6tkO2CGnlWZYO3tzeWL1+OJ598EgaDAYmJiQgLC5O6LDNrE+REnsSRCWPLx3IZGpKL7t4K6P83pmZtUlwu3O4MN2XKFEyZMkXqMtpgb4O6oo6GfzoKCHKcZWDoDfI/lm4XHkRknaOTwJaPGQLO0XquwpH94twhMFpjeNwiXhlOzib2m787TQK7s9YrnwD53OvFldd4AAwPok5heaLnN3/30nrYiPd3d4xDq60+//xz1NfXO7sWt8E5DvdhvI1VO2Ke43d+eVKgZfjI7w5v+N3hDaXij8etf5bzxLRcOdTzqKmpQVJSEoYPH47ExERMnjwZCoVMFx87GVdUOZeYb/BcwdO1KQD43sFbBDuiR3dvlw5ZAQ72PBYvXozdu3cjKSkJ27dvx7Rp0/Duu+/i0qVLzq5PVtjjsM2R9fn8Bu9ZTN/sHfnmb+05OWzB4Q5MmyG6msMXCSoUCvTv3x/9+vWDUqlEfX09Fi1ahLfeesuZ9VEnErNHD0/61Frr4R9HA4E9c9dw9US5iUN/3c2bNyM/Px8BAQFISkrCkiVL0K1bNxiNRkybNg1Llixxdp2Sk2uvo/V9ADgJS61ZTgIDHP7paqQKDsDB8Kivr8e6desQHBzcpt3LywvZ2dlOKcyTudumbeR81jbGc+Sk727XDpD7cCg8ysvL2wVHRkYGVq9ejSFDhjilME+i0xvQ1GqNPtfruzdbE72Wj/nNn27F2D/1kay30ZpD4XHhwoU2jw0GA3788UenFCRXP12+Zr4D4K2y1aNgWLiGmCt+b/c5ImeQcpjKkt0zYXZ2Nj7++GM0NTXhvvvuAwAIggCVSoU5c+a4pEB3Zdmb4BCTeJ25VJPf4MmdufL2so6yGx7z58/H/Pnz8c477+CFF15wVU1uzyB4dm+iM4dtiDxVj+7eOP2qRuoybLIbHhcvXsSQIUMwffp0q8NUI0aMcFph7sroRplxK3v08KRP5HxSXbshht3w2LRpE1auXImsrKx2zykUCnz22WdOK8xdSZkdHS3NtHzMEz2RPMltiMoau+GxcuVKAC17W1HHnHUydnTTNne5DwARtSeXVVSOcmjpUGxsLGbOnIkZM2Zg0KBBzq7JLRk6qcthmi/gen2irk/u8xr2OBQeH3/8MXbs2IG0tDQoFApER0djxowZGDhwoLPrk73Wy2/FsjXnQERdg7v1JsRwKDyCg4Px1FNP4amnnkJZWRk+/PBDvP322zh79qyz65M1y+W49vjZWXJKRO7BHeYiXMXhK94qKyuxY8cO7Ny5E15eXsjIyHBmXbJm/N9GgI4ux+XmoETy15V7Cc7gUHgkJydDr9dj+vTpeP/99xESEuLsumRLpzeIXlHFraWJpMFAcB6HwuPNN99EaGios2vBm2++if3796Nbt24YNGgQ3njjDfTq1QsVFRWIjo7GnXfeCQAYNWoUMjMznV6PNY4OU5kwN4hujTtPJnsCu+GRn5+PuLg4HDx4EAcPHmz3fEpKSqcWM2nSJLzwwgvw9vbG6tWrkZ2dbR4eGzRoEPLz8zv1/cQSOzHeeoktkSfiN/+uy2543Lx5EwBw48YNlxTz4IMPmn8ePXo0vvnmG5e8ryPETm6bJsi5xJa6AjnurUTSshsejzzyCABg4sSJ+POf/9zmuVOnTjmvKgDbtm3DjBkzzI8rKioQHx8PX19fpKWlYezYsU59/9byiitF9To4VEVywxM/dTaH5jxee+01bN++vcM2R8ybNw81NTXt2tPS0hAZGQkA+Oijj6BUKjFr1iwAQGBgIPbv34+AgACcOXMGCxcuRGFhIXx9fUW//61Y8bXj28+brt0g6kz85k9yYzc8iouLUVxcjCtXruDTTz81tzc0NMBgMNzSG27atMnu87m5uThw4AA2bdoEhaLlLKxSqaBSqQAA9957LwYNGoTS0lKEh4ffUg1i5BVXoq6xucPXKRW8doP+wBM9dXV2w6O5uRmNjY0wGAxt5j18fX2xdu3aTi/m0KFD2LBhA7744gv4+PiY269cuYLevXtDqVSivLwcZWVlLlsuvHrX+Q5fw46G++HJnej22A2P+++/H/fffz9mz57d7ja0zrBy5UrodDrzKi7TktwTJ05g7dq18Pb2hpeXF1asWAF/f3+n1wMAv129afd5rqhyHi7VJJIvh+Y8Xn75Zbz//vvo1atlf/n6+nqkp6dj48aNnVrMnj17rLZrNBpoNNKcRPx7dLM5bMUVVfbx2z1R1+VQeNTV1ZmDAwB69+6N2tpapxUlJ4IHdCp4kicisRwKDy8vL/z222/mXXQrKirMk9ldXf3NjifL5YBDPETkSg6FR1paGv72t79h3LhxEAQBp06dkmx7EFcb6O+Dyg7mPVyBV+oSkZw4FB4PPfQQtm3bhq1bt2L48OGIjIzEHXfc4ezaZCFDMxTLck/jZnPbpcnO6HdxLT8RuQuHwuPf//43PvvsM1RVVWHYsGH44YcfMHr0aI+4h3n8mJZVZou3fg8Bf4TG7VwIyCEmInJ3DoXHZ599hi+//BJz5szB559/josXL+K9995zdm2yET8mGK/kn0Fjk170hYDsTRBRV+RQeKhUKnTv3h0AoNPpMGTIEJSWljq1MHfHwCCirsyh8AgKCsK1a9cQGRmJlJQU9OrVi/cvJyLyYA6FxwcffAAAeO655zB+/Hhcv34dkydPdmph7qxHd2/2OoioS3P4HuYm999/vzPq6DJMcxxERF2Zl9QFdDWc6yAiT8DwcMDD2d85tMKKwUFEnkL0sBW1x+W4RORpGB63gYFBRJ6Kw1a3iCuqiMiTMTyIiEg0DluJxKEqIiL2PEThUBURUQuGRwcezv4OP12+JnUZRESywvCwo3Vw8MpxIqI/MDyIiEg02YXHunXrMHnyZMTFxSEuLg4HDx40P5ednY2oqChoNBocPnxYwiqJiDybLFdbzZs3D6mpqW3aLly4gMLCQhQWFkKr1SIlJQW7du2CUql0SU1cZUVE9AfZ9TxsKSoqQkxMDFQqFUJCQjB48GCUlJRIXRYRkUeSZXjk5OQgNjYWy5YtQ319PQBAq9UiKCjI/Bq1Wg2tVitViUREHk2SYat58+ahpqamXXtaWhoeffRRPPvss1AoFHj//feRlZWFN954Q4IqiYjIFknCY9OmTQ69Ljk5Gc888wyAlp5GVVWV+TmtVgu1Wu2M8gDw+g4iIntkN2xVXV1t/nnv3r0ICwsDAERERKCwsBA6nQ7l5eUoKyvDyJEjpSqTiMijyW611erVq3Hu3DkAQHBwMDIzMwEAYWFhmDFjBqKjo6FUKrF8+XKXrbQiIqK2ZBketixYsAALFixwYTWATm+ATi/gWOkVTMrahwzNUMSPCXZpDUREciO78JATnd6AJr1gflx59SaW5Z4GAAYIEXk02c15yImuVXCY3Gw2YPWu8xJUQ0QkHwwPO9pHR4vfrt50aR1ERHLD8LBDYaN9oL+PS+sgIpIbhocdKu/28eHTTYkMzVAJqiEikg9OmNuh8lYCaFltJQAI9vfhaisiIrDnYZdpma4AQKX0YnAQEf0Pex421FxvarNMV2cwcpkuEdH/sOdhw69XGtu1cZkuEVELhocVecWV0ButL9TlMl0iIoaHVfZ6F1ymS0TE8LDKXu+Cy3SJiBgeVtnqXfj7dONkORERGB5WZWiGwsvi+kAvBfDqrBHSFEREJDMMDxssry2/s29P9jqIiP6H4WEhr7gSy3JPw2BrV0QiImJ4WFq96zxuNhvatZfXcYkuEZEJw8OCrZVWOoPRxZUQEckXw8OCrZVWKiUPFRGRCc+IFjI0Q+HTTdmuPSSAFwcSEZlwY0QLphVVi7d+DwEtq65U3gr08+suaV1ERHIiq/BIS0tDaWkpAOD69evw8/NDfn4+KioqEB0djTvvvBMAMGrUKGRmZjqtjvgxwXgl/wwam/To0V1Wh4iISBZkdWZcs2aN+eesrCz4+vqaHw8aNAj5+flSlEVERBZkOechCAJ27tyJmTNnSl0KAGD4gF7YOn+i1GUQEcmGLMPj5MmT6Nu3L/70pz+Z2yoqKhAfH4/HH38cJ0+elK44IiJy/bDVvHnzUFNT0649LS0NkZGRAICCgoI2vY7AwEDs378fAQEBOHPmDBYuXIjCwsI2w1pEROQ6Lg+PTZs22X1er9djz549yM3NNbepVCqoVCoAwL333otBgwahtLQU4eHhziyViIhskN2w1bfffovQ0FAEBQWZ265cuQKDoWXLkPLycpSVlSEkJESqEomIPJ6sVlsBwI4dOxATE9Om7cSJE1i7di28vb3h5eWFFStWwN/fX6IKiYhIduGRlZXVrk2j0UCj0UhQDRERWSO7YSu54TJdIqL2GB5ERCQaw8OOHt292esgIrKC4UFERKIxPKzIK65Ew+96XP9dj0lZ+5BXXCl1SUREssLwsGC6h7npFuaVV29iWe5pBggRUSsMDwvW7mF+s9mA1bvOS1QREZH8MDws2LqHua12IiJPxPCwYOse5rbaiYg8EcPDgrV7mPt0UyJDM1SiioiI5Ed225NIzXQP8yVflkBnMCLY3wcZmqHmdiIiYnhYFT8mGFuOXwIAXiRIRGQFh62IiEg0hgcREYnG8CAiItEYHkREJBrDg4iIRGN4EBGRaAwPIiISjeFBRESiMTyIiEg0ScJj586diImJwbBhw3D69Ok2z2VnZyMqKgoajQaHDx82tx86dAgajQZRUVFYv369q0smIqJWJAmPu+++G+vWrcO4cePatF+4cAGFhYUoLCzEhg0bsGLFChgMBhgMBmRmZmLDhg0oLCxEQUEBLly4IEXpREQEifa2GjJkiNX2oqIixMTEQKVSISQkBIMHD0ZJSQkAYPDgwQgJCQEAxMTEoKioCHfddZfLaiYioj/Ias5Dq9UiKCjI/FitVkOr1dpsJyIiaTit5zFv3jzU1NS0a09LS0NkZKSz3paIiFzAaeGxadMm0b+jVqtRVVVlfqzVaqFWqwHAZjsREbmerIatIiIiUFhYCJ1Oh/LycpSVlWHkyJEIDw9HWVkZysvLodPpUFhYiIiICKnLJSLyWJJMmO/ZswcrV67ElStXMH/+fNxzzz3YuHEjwsLCMGPGDERHR0OpVGL58uVQKltuCbt8+XI8+eSTMBgMSExMRFhYmFNr5E2giIhsUwiCIEhdhLMkJCQgNzdX6jKIiNyKI+dOWQ1bERGRe2B4EBGRaAwPIiISjeFBRESiMTyIiEg0hgcREYnG8CAiItEYHkREJJokV5i7SmVlJRISEqQug4jIrVRWVnb4mi59hTkRETkHh62IiEg0hgcREYnG8CAiItEYHkREJBrDg4iIRGN4EBGRaAwPKw4dOgSNRoOoqCisX79e6nIQERGB2NhYxMXFma9buXr1KlJSUjBt2jSkpKSgvr7eJbUsW7YMEydOxMyZM81ttmoRBAGvvfYaoqKiEBsbix9//NHlta1btw6TJ09GXFwc4uLicPDgQfNz2dnZiIqKgkajweHDh51a2+XLlzF37lxER0cjJiYGmzdvBiCPY2erNjkcu6amJiQlJWHWrFmIiYnB2rVrAQDl5eVITk5GVFQU0tLSoNPpAAA6nQ5paWmIiopCcnIyKioqXF7b0qVLERERYT5uZ8+eBeD6zwMAGAwGxMfHY/78+QA6+bgJ1IZerxemTp0qXLp0SWhqahJiY2OFX375RdKa/vrXvwq1tbVt2t58800hOztbEARByM7OFt566y2X1HL8+HHhzJkzQkxMTIe1HDhwQEhNTRWMRqNQXFwsJCUluby2tWvXChs2bGj32l9++UWIjY0VmpqahEuXLglTp04V9Hq902rTarXCmTNnBEEQhOvXrwvTpk0TfvnlF1kcO1u1yeHYGY1GoaGhQRAEQdDpdEJSUpJQXFwsLFq0SCgoKBAEQRBeeeUVIScnRxAEQfjiiy+EV155RRAEQSgoKBCef/55p9Rlr7b/+7//E3bu3Nnu9a7+PAiCIHzyySdCenq68PTTTwuCIHTqcWPPw0JJSQkGDx6MkJAQqFQqxMTEoKioSOqy2ikqKkJ8fDwAID4+Hnv37nXJ+44bNw69e/d2qBZTu0KhwOjRo3Ht2jVUV1e7tDZbioqKEBMTA5VKhZCQEAwePBglJSVOqy0wMBAjRowAAPj6+iI0NBRarVYWx85Wbba48tgpFAr07NkTAKDX66HX66FQKHD06FFoNBoAwOzZs82f0X379mH27NkAAI1Gg++++w6Ck66DtlWbLa7+PFRVVeHAgQNISkoC0NLz6czjxvCwoNVqERQUZH6sVqvtfpBcJTU1FQkJCdi6dSsAoLa2FoGBgQCA/v37o7a2VrLabNVieSyDgoIkOZY5OTmIjY3FsmXLzMNCUv6dKyoqcPbsWYwaNUp2x651bYA8jp3BYEBcXBweeOABPPDAAwgJCUGvXr3g7d2yu1LrY6PVajFgwAAAgLe3N/z8/FBXV+ey2kzH7b333kNsbCxWrVplHhpy9d901apVyMjIgJdXy2m+rq6uU48bw8MNbNmyBdu3b8c///lP5OTk4MSJE22eVygUdr/xuJKcagGARx99FHv27EF+fj4CAwORlZUlaT03btzAokWL8OKLL8LX17fNc1IfO8va5HLslEol8vPzcfDgQZSUlOC///2vJHVYY1nbzz//jPT0dHzzzTfYtm0b6uvrJZk33b9/P/r06YN7773Xae/B8LCgVqtRVVVlfqzVaqFWqyWsCOb379u3L6KiolBSUoK+ffuau7zV1dXo06ePZPXZqsXyWFZVVbn8WPbr1w9KpRJeXl5ITk7G6dOnrdbmir9zc3MzFi1ahNjYWEybNg2AfI6dtdrkdOwAoFevXhg/fjy+//57XLt2DXq9HkDbY6NWq3H58mUALUNJ169fR0BAgMtqO3z4MAIDA6FQKKBSqZCQkGDzuDnzb/qf//wH+/btQ0REBNLT03H06FG8/vrrnXrcGB4WwsPDUVZWhvLycuh0OhQWFiIiIkKyehobG9HQ0GD++ciRIwgLC0NERATy8vIAAHl5eZg6dapkNdqqxdQuCAK+//57+Pn5mYdoXKX1mPLevXsRFhZmrq2wsBA6nQ7l5eUoKyvDyJEjnVaHIAh46aWXEBoaipSUFHO7HI6drdrkcOyuXLmCa9euAQB+//13fPvttxgyZAjGjx+PXbt2AQC2b99u/oxGRERg+/btAIBdu3ZhwoQJTuvNWastNDTUfNwEQWh33Fz1N33hhRdw6NAh7Nu3D++++y4mTJiAd955p1OPG3fVteLgwYNYtWoVDAYDEhMTsWDBAslqKS8vx8KFCwG0jK/OnDkTCxYsQF1dHdLS0nD58mUMHDgQa9asgb+/v9PrSU9Px/Hjx1FXV4e+ffviueeeQ2RkpNVaBEFAZmYmDh8+DB8fH6xatQrh4eEure348eM4d+4cACA4OBiZmZnmD+xHH32Ebdu2QalU4sUXX8SUKVOcVtvJkyfx2GOP4e677zaPQaenp2PkyJGSHztbtRUUFEh+7M6dO4elS5fCYDBAEARMnz4d//jHP1BeXo7Fixejvr4e99xzD95++22oVCo0NTUhIyMDZ8+eRe/evfHee+8hJCTEpbU98cQTqKurgyAIGDZsGFasWIGePXu6/PNgcuzYMXzyySfIzs7u1OPG8CAiItE4bEVERKIxPIiISDSGBxERicbwICIi0RgeREQkGsODqJV169Zh48aNUpdhV0VFBb7++mvRv7d06VJ88803TqiIPBHDg8gBpqtypabX61FZWYmCggKpSyEP5y11AURS++ijj5CXl4c+ffpgwIAB5h1m586di2HDhuHUqVOYOXMmfv75Z/zlL3/B9OnTAQBjxoxBcXExjEYjMjMzcfToUQwYMADe3t5ITEw0v86anTt34oMPPoCXlxf8/PyQk5ODpqYmvPrqqzhz5gyUSiWWLl2KCRMmIDc3F7t370ZjYyOMRiN0Oh0uXryIuLg4zJ49G3PnzsXbb7+N48ePQ6fT4bHHHsMjjzwCQRCwcuVKHDlyBAMGDEC3bt1ccjzJMzA8yKOdOXMGO3bsQF5eHgwGA2bPnvApdhoAAALpSURBVG0OD6Blz6fc3FwALcM+1uzevRuVlZXYsWMHamtrER0djcTERLvv++GHH2Ljxo1Qq9XmLS5ycnIAAF9//TUuXryI1NRU81YSP/30E7766iv4+/u3uWIYALZu3Qo/Pz9s27YNOp0OjzzyCCZNmoSzZ8+itLQUO3bsQE1NDWJiYjqsi8hRDA/yaCdPnkRkZCR8fHwAoN0+ZtHR0R3+G6dOncL06dPh5eWF/v37Y/z48R3+zpgxY7B06VLMmDEDUVFR5n/n8ccfBwAMGTIEAwcORGlpKQBg0qRJNrefOXLkCM6fP28OmuvXr+PXX3/FiRMnEBMTA6VSCbVajQkTJnRYF5GjGB5EdphCBWjZfttoNAIAjEYjmpubb/nfzczMxA8//IADBw4gMTER27Ztc7gOS4Ig4OWXX8bkyZPbtLe+bSxRZ+OEOXm0cePGYe/evfj999/R0NCA/fv323xtcHCw+b7T+/btM4fHfffdh927d8NoNKKmpgbHjx83/84777yDPXv2tPu3Ll26hFGjRuH5559HQEAAqqqqMHbsWPMqqtLSUly+fBmhoaHtfrdnz564ceOG+fGDDz6ILVu2mOspLS1FY2Mjxo0bh507d8JgMKC6uhrHjh27hSNEZB17HuTRRowYgejoaMTFxaFPnz52dzmdM2cOnn32WcyaNQuTJ09Gjx49APxx287o6GgMGDAAw4cPh5+fHwDg559/trql/1tvvYVff/0VgiBgwoQJGDZsGEJDQ/Hqq68iNjYWSqUSb7zxBlQqVbvfHTp0KLy8vDBr1iwkJCTgiSeeQGVlJRISEiAIAgICAvDhhx8iKioKR48eRXR0NAYOHIjRo0d30lEj4q66RJ3ixo0b6NmzJ+rq6pCcnIwtW7agf//+SE1Nlf11I0S3guFB1Anmzp2La9euobm5GU8++SQSEhKkLonIqRgeREQkGifMiYhINIYHERGJxvAgIiLRGB5ERCQaw4OIiET7f9U/Veki6ZHMAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "NxsJUoS0l6BH", - "colab_type": "text" - }, - "source": [ - "Now, let's identify our active compounds. \n", - "\n", - "In my case, this required domain knowledge. Having worked in this area, and having consulted with professors specializing on this channel, I am interested in compounds where the absolute value of the activity is greater than 25. This relates to the desired drug potency we would like to model.\n", - "\n", - "If you are not certain how to draw the line between active and inactive, this cutoff could potentially be treated as a hyperparameter." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": false, - "id": "MQPUH1ogl6BH", - "colab_type": "code", - "outputId": "b3117e20-e8d6-43b1-a9eb-e4281d32d0f5", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "actives = avg_df[abs(avg_df['n'])-ci_95 > 25]['n']\n", - "\n", - "plt.errorbar(np.arange(actives.shape[0]), actives, yerr=ci_95, fmt='o')" - ], - "execution_count": 40, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 40 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX8AAAD4CAYAAAAEhuazAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAASKUlEQVR4nO3dfUyV9f/H8dfh4NmclDcUh8WApNBaornlTC2dR/GkSDDUmqstmE4zFyNNJ7mxiStptmb6R8Os4R/O2YqkcXKmeIPTypvc0K0sN5nAVw4T0rzLo8fz+8M8v0wxgXNxec7n+fiL8znuXG9tPb28zuecyxEKhUICABglzu4BAAC9j/gDgIGIPwAYiPgDgIGIPwAYKN7uAe7H6NGjlZKSYvcYABBVWlpa9NNPP931uaiIf0pKiqqrq+0eAwCiSkFBQafPcdkHAAxE/AHAQMQfAAxE/AHAQMQfAAxE/AHAQMQfAAxE/AHAQMQfAB5Qr1b+oFcrf7DktYk/ABiI+AOAgYg/ABiI+AOAgYg/ABiI+AOAgYg/ABiI+AOAgYg/ABiI+AOAgSy/h6/H41G/fv0UFxcnp9Op6upqnTt3Tu+8845aWlqUkpKiNWvWqH///laPAgD4W6+c+W/cuFE1NTXhm7CvX79eY8aM0ffff68xY8Zo/fr1vTEGAOBvtlz2qaurU35+viQpPz9fO3futGMMADBWr8R/zpw5Kigo0JYtWyRJ7e3tSkpKkiQ9+uijam9v740xAAB/s/ya/+bNm+V2u9Xe3q6ioiJlZGTc9rzD4ZDD4bB6DADAP1h+5u92uyVJiYmJys7OVkNDgxITE9XW1iZJamtr06BBg6weAwDwD5bG//Lly7p48WL45/379yszM1Mej0dbt26VJG3dulWTJk2ycgwAwL9Yetmnvb1dCxculCQFg0FNnz5d48ePV1ZWlkpKSvTVV1/pscce05o1a6wcAwCiztajLTp6+pwCwRsaV7FLS7xDlT8yJWKvb2n8U1NT9e23396xPnDgQG3cuNHKQwNA1Np6tEWl1ccUCN6QJLWcu6LS6mOSFLG/APiELwA8YFZvP6Er14K3rV25FtTq7ScidgziDwAPmP+du9Kl9e4g/gDwgHlsQN8urXcH8QeAB8wS71D17eO8ba1vH6eWeIdG7BiWf8gLANA1t97UXfpVgwLBG0oZ0De6dvsAALonf2SKNh88LUnaMn9MxF+fyz4AYCDiDwAGIv4AYCDiDwAGIv4AYCDiDwAGIv4AYCDiDwAGIv4AYCDiDwAGIv4AYCDiDwAGIv4AYCDiDwAGIv4AYCDLvs//zJkzWrp0qdrb2+VwOPTKK6/ojTfe0Lp16/Tll19q0KBBkqRFixZpwoQJVo0BALgLy+LvdDq1bNkyPfPMM7p48aJmzJihcePGSZIKCws1Z84cqw4NAPgPlsU/KSlJSUlJkqSEhARlZGTI7/dbdTgAiDlW3MHrll655t/c3KxffvlFI0aMkCRt2rRJubm5Ki0t1fnz53tjBADAP1ge/0uXLqm4uFjvvfeeEhISNHv2bO3YsUM1NTVKSkpSRUWF1SMAAP7F0vhfu3ZNxcXFys3N1ZQpUyRJjzzyiJxOp+Li4jRr1iwdO3bMyhEAAHdhWfxDoZCWL1+ujIwMFRUVhdfb2trCP+/cuVOZmZlWjQAA6IRlb/geOXJENTU1GjJkiPLy8iTd3NZZW1urX3/9VZKUkpKi8vJyq0YAAHTCsvg/99xzOnHixB3r7OkHAPvxCV8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMBDxBwADEX8AMJBt8a+vr5fX61V2drbWr19v1xgAYCRb4h8MBlVeXq4NGzbI5/OptrZWJ0+etGMUADCSLfFvaGhQenq6UlNT5XK5lJOTo7q6OjtGAQAj2RJ/v9+v5OTk8GO32y2/32/HKABgJN7wBQAD2RJ/t9ut1tbW8GO/3y+3223HKABgJFvin5WVpcbGRjU1NSkQCMjn88nj8dgxCgAYKd6Wg8bHq6ysTHPnzlUwGNSMGTOUmZlpxygAYCRb4i9JEyZM0IQJE+w6PAAYjTd8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBAxB8ADET8AcBA8Va86Icffqjdu3erT58+SktL06pVq/Twww+rublZ06ZN0+DBgyVJI0aMUHl5uRUjAADuwZL4jxs3TosXL1Z8fLxWr16tyspKLVmyRJKUlpammpoaKw4LALhPllz2eeGFFxQff/PvlWeffVatra1WHAYA0E2WX/P/+uuvNX78+PDj5uZm5efn6/XXX9fhw4etPjwA4C66fdmnsLBQZ8+evWO9pKREkydPliR9+umncjqdevnllyVJSUlJ2r17twYOHKjjx49r4cKF8vl8SkhI6O4YAIBu6Hb8q6qq7vl8dXW19uzZo6qqKjkcDkmSy+WSy+WSJA0bNkxpaWk6deqUsrKyujsGAKAbLLnsU19frw0bNujTTz9V3759w+sdHR0KBoOSpKamJjU2Nio1NdWKEQAA92DJbp+VK1cqEAioqKhI0v9v6Tx06JDWrl2r+Ph4xcXFacWKFRowYIAVIwAA7sGS+O/YseOu616vV16v14pDAgC6gE/4AoCBiD8AGIj4A4CBiD8AGIj4A4CBiD8AGIj4A4CBiD8AGIj4A4CBiD8AGIj4A4CBiD+AqPBq5Q96tfIHu8eIGcQfAAxE/AHAQMQfAAxE/AHAQMQfAAxE/AHAQMQfiEJse0RPEX9EPUIIdB3xBwADWRb/devW6cUXX1ReXp7y8vK0d+/e8HOVlZXKzs6W1+vVvn37rBoBANCJeCtfvLCwUHPmzLlt7eTJk/L5fPL5fPL7/SoqKtL27dvldDqtHAUA8A+9ftmnrq5OOTk5crlcSk1NVXp6uhoaGnp7DAAwmqXx37Rpk3Jzc1VaWqrz589Lkvx+v5KTk8O/xu12y+/3WzkGAOBfenTZp7CwUGfPnr1jvaSkRLNnz9Zbb70lh8OhTz75RBUVFVq1alVPDgcAiJAexb+qquq+ft2sWbP05ptvSrp5pt/a2hp+zu/3y+1292QM/MutbY9b5o+xeRIADyrLLvu0tbWFf965c6cyMzMlSR6PRz6fT4FAQE1NTWpsbNTw4cOtGgMAcBeW7fZZvXq1fv31V0lSSkqKysvLJUmZmZmaOnWqpk2bJqfTqbKyMnb6AEAvszT+nVmwYIEWLFhg1aEBxJitR1t09PQ5BYI3NK5il5Z4hyp/ZIrdY0U1PuEL4IG29WiLSquPKRC8IUlqOXdFpdXHtPVoi82TRTfiD+CBtnr7CV25Frxt7cq1oFZvP2HTRLGB+AN4oP3v3JUureP+EH8AD7THBvTt0jruD/EH8EBb4h2qvn1u3xHYt49TS7xDbZooNlj6xW4A0FO3dvUs/apBgeANpQzoy26fCCD+QJQxcdtj/sgUbT54WhKfXI8ULvsAUYRtj4gU4o+oduss+KdTHRpXsSvmI8i2R0QK8UfUMvEsmG2PiBTij6hl4lkw2x4RKcQfUcvEs2C2PSJSiD+ilolnwfkjU7SqIEsu583/dVMG9NWqgqyY3+2DyCP+iFqmngXnj0zRyLQBGj14kPYv8xB+dAvxjzEm7X7hLBjoPj7kFUM62/0iKWaDyId/gO6J+TP/Vyt/CN/TNtaZuPsFQPfEfPxNYuLuFwDdQ/xjiIm7XwB0D/GPIabufgHQdbzhG0P46lsA98uS+JeUlOjUqVOSpAsXLuihhx5STU2NmpubNW3aNA0ePFiSNGLECJWXl1sxgrHY/QLgflgS/zVr1oR/rqioUEJCQvhxWlqaampqrDgsAOA+WXrZJxQKadu2bdq4caOVhwFgAP4lG1mWvuF7+PBhJSYm6vHHHw+vNTc3Kz8/X6+//roOHz5s5eEBAJ3o9pl/YWGhzp49e8d6SUmJJk+eLEmqra3V9OnTw88lJSVp9+7dGjhwoI4fP66FCxfK5/PddlkIAGC9bse/qqrqns9fv35dO3bsUHV1dXjN5XLJ5XJJkoYNG6a0tDSdOnVKWVlZ3R0DANANll32OXDggDIyMpScnBxe6+joUDB48+sHmpqa1NjYqNTUVKtGAAB0wrI3fL/77jvl5OTctnbo0CGtXbtW8fHxiouL04oVKzRgwACrRgAAdMKy+FdUVNyx5vV65fV6rTokAOA+8QlfIAqx7RE9xXf7AICBOPNH1OMsGOg6zvwBwEAxHX+T7mcLAF0Rs/Hv7H62/AUAADEcf+5nCwCdi9n4cz9bAOhczMaf+9kCQOdiNv4m3892y/wxbH8EcE8xu8+f+9kCQOdiNv4S97MFgM7E7GUfAEDniD8AGIj4A4CBiD8AGIj4A4CBiD8AGIj4A4CBiD8AGIj4A4CBehT/bdu2KScnR0899ZSOHTt223OVlZXKzs6W1+vVvn37wuv19fXyer3Kzs7W+vXre3J4AEA39Sj+Q4YM0bp16zRq1Kjb1k+ePCmfzyefz6cNGzZoxYoVCgaDCgaDKi8v14YNG+Tz+VRbW6uTJ0/26DcAAOi6Hn23zxNPPHHX9bq6OuXk5Mjlcik1NVXp6elqaGiQJKWnpys1NVWSlJOTo7q6Oj355JM9GQMA0EWWXPP3+/1KTk4OP3a73fL7/Z2uAwB613+e+RcWFurs2bN3rJeUlGjy5MmWDAUAsNZ/xr+qqqrLL+p2u9Xa2hp+7Pf75Xa7JanTdQBA77Hkso/H45HP51MgEFBTU5MaGxs1fPhwZWVlqbGxUU1NTQoEAvL5fPJ4PFaMAAC4hx694btjxw6tXLlSHR0dmj9/vp5++ml9/vnnyszM1NSpUzVt2jQ5nU6VlZXJ6bx5S8WysjLNnTtXwWBQM2bMUGZmZkR+I53hJi4AcCdHKBQK2T3EfykoKFB1dbXdYwBAVLlXO/mELwAYiPgDgIGIPwAYiPgDgIGIPwAYiPgDgIGIPwAYiPgDgIF69Anf3tLS0qKCggK7xwCAqNLS0tLpc1HxCV8AQGRx2QcADET8AcBAxB8ADET8AcBAxB8ADET8AcBAUbHPv7vq6+v1/vvv68aNG5o1a5bmzZtn90iWKi0t1Z49e5SYmKja2lq7x+kVZ86c0dKlS9Xe3i6Hw6FXXnlFb7zxht1jWerq1at67bXXFAgEFAwG5fV6VVxcbPdYlrt19z+3263Kykq7x+kVHo9H/fr1U1xcnJxOZ2RvahWKUdevXw9NmjQpdPr06dDVq1dDubm5od9//93usSx18ODB0PHjx0M5OTl2j9Jr/H5/6Pjx46FQKBS6cOFCaMqUKTH/3/nGjRuhixcvhkKhUCgQCIRmzpwZOnr0qM1TWe+LL74ILVq0KDRv3jy7R+k1EydODLW3t1vy2jF72aehoUHp6elKTU2Vy+VSTk6O6urq7B7LUqNGjVL//v3tHqNXJSUl6ZlnnpEkJSQkKCMjQ36/3+aprOVwONSvXz9J0vXr13X9+nU5HA6bp7JWa2ur9uzZo5kzZ9o9SsyI2fj7/X4lJyeHH7vd7piPgumam5v1yy+/aMSIEXaPYrlgMKi8vDyNHTtWY8eOjfnf8wcffKAlS5YoLi5mk9WpOXPmqKCgQFu2bIno65r3J4mYdOnSJRUXF+u9995TQkKC3eNYzul0qqamRnv37lVDQ4N+++03u0eyzO7duzVo0CANGzbM7lF63ebNm/XNN9/os88+06ZNm3To0KGIvXbMxt/tdqu1tTX82O/3y+122zgRrHLt2jUVFxcrNzdXU6ZMsXucXvXwww9r9OjR2rdvn92jWObnn3/Wrl275PF4tGjRIv34449699137R6rV9xqVmJiorKzs9XQ0BCx147Z+GdlZamxsVFNTU0KBALy+XzyeDx2j4UIC4VCWr58uTIyMlRUVGT3OL2io6NDf/75pyTpr7/+0oEDB5SRkWHzVNZZvHix6uvrtWvXLn388cd6/vnn9dFHH9k9luUuX76sixcvhn/ev3+/MjMzI/b6MbvVMz4+XmVlZZo7d254i1gk/+AeRIsWLdLBgwf1xx9/aPz48Xr77bc1a9Ysu8ey1JEjR1RTU6MhQ4YoLy9P0s0/hwkTJtg8mXXa2tq0bNkyBYNBhUIhvfTSS5o4caLdYyHC2tvbtXDhQkk33+OZPn26xo8fH7HX5yudAcBAMXvZBwDQOeIPAAYi/gBgIOIPAAYi/gBgIOIPAAYi/gBgoP8DuOP5dhZ3WlQAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "9rz2KjJ8l6BS", - "colab_type": "code", - "outputId": "1d0ab9c5-fe4c-4789-a585-c4e530e3d23b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "# summary\n", - "print (raw_data.shape, avg_df.shape, len(actives.index))" - ], - "execution_count": 41, - "outputs": [ - { - "output_type": "stream", - "text": [ - "(430, 5) (391, 3) 6\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "TiNqzX0Kl6BV", - "colab_type": "text" - }, - "source": [ - "In summary, we have:\n", - "* Removed data that did not address the question we hope to answer (small molecules only)\n", - "* Dropped NaNs\n", - "* Determined the noise of our measurements\n", - "* Removed exceptionally noisy datapoints\n", - "* Identified actives (using domain knowledge to determine a threshold)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "46rf9hMkl6BW", - "colab_type": "text" - }, - "source": [ - "## Determine model type, final form of dataset, and sanity load" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "vUK150zHl6BX", - "colab_type": "text" - }, - "source": [ - "Now, what model framework should we use? \n", - "\n", - "Given that we have 392 datapoints and 6 actives, this data will be used to build a low data one-shot classifier (10.1021/acscentsci.6b00367). If there were datasets of similar character, transfer learning could potentially be used, but this is not the case at the moment.\n", - "\n", - "\n", - "Let's apply logic to our dataframe in order to cast it into a binary format, suitable for classification." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "WwcvCbigl6BX", - "colab_type": "code", - "outputId": "5154a1d4-2a56-4cbf-bfe7-e89c5a1baeda", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "# 1 if condition for active is met, 0 otherwise\n", - "avg_df['active'] = (abs(avg_df['n'])-ci_95 > 25).astype(int)" - ], - "execution_count": 42, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:2: SettingWithCopyWarning: \n", - "A value is trying to be set on a copy of a slice from a DataFrame.\n", - "Try using .loc[row_indexer,col_indexer] = value instead\n", - "\n", - "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", - " \n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "2t7vmHnNl6Bc", - "colab_type": "text" - }, - "source": [ - "Now, save this to file." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "a6AGQoB2l6Be", - "colab_type": "code", - "colab": {} - }, - "source": [ - "avg_df.to_csv('modulators.csv', index=False)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Vs7Pkg7Il6Bp", - "colab_type": "text" - }, - "source": [ - "Now, we will convert this dataframe to a DeepChem dataset." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "GBneufPbl6Bq", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "NRpnbgyAl6Bv", - "colab_type": "code", - "outputId": "5a9a02f8-81eb-4669-dd83-61dd0ad27523", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "dataset_file = 'modulators.csv'\n", - "task = ['active']\n", - "featurizer_func = dc.feat.ConvMolFeaturizer()\n", - "\n", - "loader = dc.data.CSVLoader(tasks=task, smiles_field='drug', featurizer=featurizer_func)\n", - "dataset = loader.featurize(dataset_file)" - ], - "execution_count": 45, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from modulators.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "TIMING: featurizing shard 0 took 1.601 s\n", - "TIMING: dataset construction took 1.774 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "D9GElTwzl6B0", - "colab_type": "text" - }, - "source": [ - "Lastly, it is often advantageous to numerically transform the data in some way. For example, sometimes it is useful to normalize the data, or to zero the mean. This depends in the task at hand.\n", - "\n", - "Built into DeepChem are many useful transformers, located in the deepchem.transformers.transformers base class. \n", - "\n", - "Because this is a classification model, and the number of actives is low, I will apply a balancing transformer. I treated this transformer as a hyperparameter when I began training models. It proved to unambiguously improve model performance." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "-Ll5i93il6B1", - "colab_type": "code", - "outputId": "fc14f85f-3775-4027-ce04-1e1dd6019f89", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 0 - } - }, - "source": [ - "transformer = dc.trans.BalancingTransformer(transform_w=True, dataset=dataset)\n", - "dataset = transformer.transform(dataset)" - ], - "execution_count": 46, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.200 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "L57S8x7sl6B4", - "colab_type": "text" - }, - "source": [ - "Now let's save the balanced dataset object to disk, and then reload it as a sanity check." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "MwFyB7Ryl6B5", - "colab_type": "code", - "colab": {} - }, - "source": [ - "dc.utils.save.save_to_disk(dataset, 'balanced_dataset.joblib')\n", - "balanced_dataset = dc.utils.save.load_from_disk('balanced_dataset.joblib')" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Oydv-y4Fl6B9", - "colab_type": "text" - }, - "source": [ - "Tutorial written by Keri McKiernan (github.com/kmckiern) on September 8, 2016" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "F2E5bL1Jl6CD", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!\n", - "\n", - "\n", - "# Bibliography\n", - "\n", - "[2] Anderson, Eric, Gilman D. Veith, and David Weininger. \"SMILES, a line\n", - "notation and computerized interpreter for chemical structures.\" US\n", - "Environmental Protection Agency, Environmental Research Laboratory, 1987." - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/10_Creating_a_high_fidelity_model_from_experimental_data.ipynb b/examples/tutorials/10_Creating_a_high_fidelity_model_from_experimental_data.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..761caf04e4dcd6d3ea92ff2d565c0f183f5dcecf --- /dev/null +++ b/examples/tutorials/10_Creating_a_high_fidelity_model_from_experimental_data.ipynb @@ -0,0 +1,1753 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6MNHvkiBl55x" + }, + "source": [ + "# Tutorial Part 10: Creating a High Fidelity Dataset from Experimental Data\n", + "\n", + "In this tutorial, we will look at what is involved in creating a new Dataset from experimental data. As we will see, the mechanics of creating the Dataset object is only a small part of the process. Most real datasets need significant cleanup and QA before they are suitable for training models.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/10_Creating_a_high_fidelity_model_from_experimental_data.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 323 + }, + "colab_type": "code", + "id": "tbLbuh6wl8tX", + "outputId": "5ddc020c-80ff-42fe-fe5b-85dd0b25446f" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 + }, + "colab_type": "code", + "id": "iR6NiQ6rLqbK", + "outputId": "5c2fb16e-80c3-40c7-9a05-2e9e3c397a99" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xpVK4q5Ol558" + }, + "source": [ + "## Working With Data Files\n", + "\n", + "Suppose you were given data collected by an experimental collaborator. You would like to use this data to construct a machine learning model. \n", + "\n", + "*How do you transform this data into a dataset capable of creating a useful model?*\n", + "\n", + "Building models from novel data can present several challenges. Perhaps the data was not recorded in a convenient manner. Additionally, perhaps the data contains noise. This is a common occurrence with, for example, biological assays due to the large number of external variables and the difficulty and cost associated with collecting multiple samples. This is a problem because you do not want your model to fit to this noise.\n", + "\n", + "Hence, there are two primary challenges:\n", + "* Parsing data\n", + "* De-noising data\n", + "\n", + "In this tutorial, we will walk through an example of curating a dataset from an excel spreadsheet of experimental drug measurements. Before we dive into this example though, let's do a brief review of DeepChem's input file handling and featurization capabilities.\n", + "\n", + "### Input Formats\n", + "DeepChem supports a whole range of input files. For example, accepted input formats include .csv, .sdf, .fasta, .png, .tif and other file formats. The loading for a particular file format is governed by the `Loader` class associated with that format. For example, to load a .csv file we use the `CSVLoader` class. Here's an example of a .csv file that fits the requirements of `CSVLoader`.\n", + "\n", + "1. A column containing SMILES strings.\n", + "2. A column containing an experimental measurement.\n", + "3. (Optional) A column containing a unique compound identifier.\n", + "\n", + "Here's an example of a potential input file.\n", + "\n", + "|Compound ID | measured log solubility in mols per litre | smiles |\n", + "|---------------|-------------------------------------------|----------------|\n", + "| benzothiazole | -1.5 | c2ccc1scnc1c2 |\n", + "\n", + "\n", + "Here the \"smiles\" column contains the SMILES string, the \"measured log\n", + "solubility in mols per litre\" contains the experimental measurement, and\n", + "\"Compound ID\" contains the unique compound identifier.\n", + "\n", + "### Data Featurization \n", + "\n", + "Most machine learning algorithms require that input data form vectors. However, input data for drug-discovery datasets routinely come in the form of lists of molecules and associated experimental readouts. To load the data, we use a subclass of `dc.data.DataLoader` such as `dc.data.CSVLoader` or `dc.data.SDFLoader`. Users can subclass `dc.data.DataLoader` to load arbitrary file formats. All loaders must be passed a `dc.feat.Featurizer` object, which specifies how to transform molecules into vectors. DeepChem provides a number of different subclasses of `dc.feat.Featurizer`." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "-rrEZ5ihl56A" + }, + "source": [ + "## Parsing data" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "a0AhOo1nl56D" + }, + "source": [ + "In order to read in the data, we will use the pandas data analysis library. \n", + "\n", + "In order to convert the drug names into smiles strings, we will use pubchempy. This isn't a standard DeepChem dependency, but you can install this library with `conda install pubchempy`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 190 + }, + "colab_type": "code", + "id": "fYBi59mkl56F", + "outputId": "8536d712-eedf-411c-859c-4db4f7204dfa" + }, + "outputs": [], + "source": [ + "!conda install pubchempy" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Gj-VYSail56Q" + }, + "outputs": [], + "source": [ + "import os\n", + "import pandas as pd\n", + "from pubchempy import get_cids, get_compounds" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "zwhTD4OBl56V" + }, + "source": [ + "Pandas is magic but it doesn't automatically know where to find your data of interest. You likely will have to look at it first using a GUI. \n", + "\n", + "We will now look at a screenshot of this dataset as rendered by LibreOffice.\n", + "\n", + "To do this, we will import Image and os." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "6CrNCoe0l56s" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArYAAAIOCAIAAADC+NmjAAAAA3NCSVQICAjb4U/gAAAAGXRFWHRTb2Z0d2FyZQBnbm9tZS1zY3JlZW5zaG907wO/PgAAIABJREFUeJzsnXd8FcX2wM/s7u0lN4VUWgoBAiiEDoJUnzRBREUEu88GYi9YUET0/RTFhj6fWFFABCyogIKgdKnSIQUIIQlpt7fdnfn9sTc3N7ek3psEmO8n4t7dKefMzs6cnTkzi8b8a3hcbBTDsoIgYpEAEKBQKBQKhXJ5goBhGI5jsIi5qCgdIORyultaKAqFQqFQKC0NARGLoiDK5BzHscjt4ltaIgqFQqFQKK0It4vnBBETQicXKBQKhUKh1IAThdrsA0KI1e5wOly8IGCMGYaRcZxKpdColQgxrSoihUKhUCiUMMIRgkNdszucZrNtVHri8I6J6QadTsFZXEKO0fpHftGmvGJ9lFalVISKaDLb0tLT0lI7GgzRCoXc5XIbjca8/Pz83LyoCESk1IJAWKfTIQruOK2aD21mYcJa7A6Rd8drWEGmak4JKZSG0mqqK2tzukTBHa1kRY62TgCt6NZQwgC6YdKYoBcsVrsOyNyhXVNVHBF4wCIQAggBwyJOlm8XXv7rmBWQVqsOjEiAGTbsao1GLQgCxpgQghBiGIbjOKvNtmXznwzgMEZsBgSiqLxw3saj9DYKUaERCcMLbjUHGHFBAzS/hKEQCWvmtcnt4jvGQ+HufQ6lJqiVgAlrdinadsns2j7KufPXYzZUy7MdqD6F0pw0qLqGHe/DnhanMkKMPj4utV0Mc2L7aScXaStBJDKj0eiw2QQCwLBpcQpRrjPZBbup0i1iAKZtrMKG9JbyEk4hbxcXxSMUUXkCadlbQwk7XFBHBIfTpSP4nWGdtaJdNAuBAdqz3KKhnWZvPml3uFRKuW9EAszoa0YSjO12m18st9sl49jR14zYsH5TuCKGghCuvKzMVu2JieQyzqDlNBoNhoY9NjxRmM2qSS+9f31y0eoFC/+s4MvMfFRSGz1fYkIKjDi/ALuMVkHeJCvB6OBM5UUATHq80psUAa68rNzmdDOcIj1B567fw29zc7xh0BNPDIuCkwv/3M3LBTcjCwwmYrnLph177y395O5d/6w9aiKEUwZN0CGozBUVvuo3Ws1acIsys6nC6XCKBACQXC5ro2dlSu2l5zVjdbMOY5nDLRBAMhmXZJAxCo2PmsjqQtayEhfhuiRpHKyn5rux3GyqcNodIgGG5WL1rE6jF6viEMJWlJZZ3UGeXGDkaQltiooLHQRSouWcJgoACJGVlxTbBKzVyNtER0njigLmLFary253CyIBYFhWo+QSolR8sPrTBIIrWDv1r65NARNFRcl5m4D1GnlsVbH4Pewrz+nueXpmP7l716tbT1tcpH7yh6KqzG0uAQMgjmOjNFyUVisiBAAikRtN0HPyg1OHZyWqAFvOrn/njeX5zri+Ex+7YVBWGwWIFTvef/Uz95i3Fg1VH/rqlQ+2yaL0TbcSGlQTmufWUJqNICYCIcRisS0YnKZ2mLAYrJUBAAANyz2XnTJ7e75CziGEpIhmi23kyOG82y2KYtBYgiCwLDtwYN9Nm7Y0PWItlFfC4Cff/nf3qq5atF3IP7H7j19/2nHGYGjYY+PmkajL7JemBEjt11W38o/2L3zxQBYy/vr8s1uMTlGh8QuwaXO5QtZ4F1CTQ4Y73bbygy4IjOtemrO5zOJWaAlwleV89qPvzb5SDudXPTrnN0WMtj5Wgp8cBBOCgohGatYDAhDUdnRjtdmeOOeLl33Vb6B+deMQ1VaX9l933nlNn4x4FQC2Fecc/vPHVTvzrUipuZSsBJODI9Fdb39o/JDOMXLgK3L2/vzN8r1FJlatJwBOkbVWGmUZ1zzzn8k91Hnv3/96kZI4WLlTVFocqpG33TFhUEasDFxlp7b+uPy7rQW6aL0AiBC2stzd6/n/zc4MlqV12wtP7Jz1xfx+csj576PvHzAiVVxFORn9+pLpbcH8x4Inl57WGaJsgspqEbpfc/O4oVd0SdZxAM7yM0f3bVv/059GloTLSgilYJ0R61ldmwImisoKcu3/LbklGYy/zX9m+VmtIQoHtAbfFoRNDIegslqEK669ZdzQKzonaljgjWeP79r065rNOTEGtZNVVhqd7W6a88i/YgBAdDhBq7CZeZQ+de79g/UAwDscmNgqhcQRfZI5BL0Gtme2XcBuN9Mkq8XRwJrQDLeG0pxwgXfP5nBenahtx7pEhxMAHCI5axc66zxV4YSFb6/mVCwCgA5K1dAE9R6LS61WShHbtm2rUCjc7tp2WRBFUalUtU1JrqysCBpx06Y/AmONGDE8MGJtYEYX4+m9MAaG1cRnZI/PyB6Q+saLy3LUBh0PnNnqdFrNvEiAYaPUXIxeLSAWAFmcxGGqdAkiAGI5tmOM22Hdt3iJZmh8yV9bKhB0ZD1dM84pcyMWpyeAbwCTgzhMZXKFPDlWJyJkdHCWimJAsswkjQ0pLFanw2rmRYJY1qBho7Q6sUZPjwSej04xIAAAw79uHbh2/jaVgpjcMjF56C1XygEAopJ1QHjAIshDqAAClptNlU6HQySgiK5O3UnUJYVFLmAy4hQuhbbCItjMlQwnb9fGfzCQAFthsjptNuk9npNxiVEyItMAMH7qZySonYjFwAYVBhO50WzknU5ewASAYWVp8aoyt7xmCSvdMs8NFYnSZlHc/NqT4+OrRGE0iZnZnbhlO4jowhpj0ZlA+VPjtQ6iNBrL3W5eEDEAsBynUyucTqfLLRJAKpUsKUrFszIRy+sTDIKpzyjUor86nAyJLoEoFPKkWJ2IUKWdsVZeAEbeKVHjRGwtNdQpqN2yTnPmTe/KAhCHTVTFZAyY8WI75TPz99rtJjC42dhR98665aoUbzNMAEQit5ngXy8+Oz0VAbFcqJDHx3Uaedez8fyzSw6YGbWeAGAAwASCjpbhGr5HQRtvJ1bZnFG3zH98bHL1SWVsh+xhls3fbWHVpCGbqHAVZWU2lxD4OCTHxbi4+EAFa4rEVFjcNrMRkKxTosrJqMqNFrvNLlPIGFZfU5HAx1ZV7GSDxZW3j9W66/J9DiwZSTalzO2wVD3sf1YwKM57Pa/MCeBSqrhkg4pn5EFrvoNRBW0BXFhtc0ZNm//YmOoylxna9/jXHT2GDVj54hsbWZ2a4JirB8QAkJP/e/KFjeWYVahkXPr1vfUA/MGPHnh9l5GAXqsia7/9Td9PdezH0wQ4RkaAMdncTqvJLRAApFbJEqO1PGIwsHW1ReCqoyagYpPD7xkBmf9EMAGAEDLUfgsorYEgowguFz8kViXaLIBFADhjw3OPmeZ00vWI4g6ZhAWnLC93jcrUMAAAAj8kVr61zKxSKaSISUmJLpcT45AukFVZ4MSkpKLi4lARx48f5xt+7dqfRVEIjFgLXq2Mv81/aEkur0oeet+zMwdo44aP7/TtohIRLpj5jsNvnD6uX2aMDFsK9238/ssfD6mi1MVOmTp1yAPTR2QnaxDwluJj37+9+Iy1/cQZ1/aTuw3/7HrrjJSwYcz8/44BAPOW5x5eyjNdPAEOH9s58dXZWSx/8L2Zbx3i1bFMlxkrZmex9l3zZi4pIEz6qKnTx/XNMLCiuWDPhpVf/HJCE6X1GdVAROD1STrPj04TJnTc/nuZ02Ujve4a6ek0NXExclKMmcrgKqisRGO2cFdNf+zGoRkxnGgud1e1prjQ0eGl5Qs6g/HX559dUeSMGvnMF9OS4dzK2c/9Ttgob+lhAuUmoeP1s2cOS4vXsACC8fSBX5Yu3VFQTiDJT/3nZy+TRemLgwmjiNJdsMt6T7rr2j6ZaYk6DsBduve/L//P2GHUYzVK+MNDDoebUwKAnQeSMmhIPADYd7774js7ykV5dMcuKYpzLpGLM9qSXlz+UjD5fzMqoobf+8Q1WW2TDUoE4Cw+8sfmPG2fqwZkRMuw5fSuX5Z8vokouXK7os5gSIWKrCRQ/V2FViNOqKFOWc5RMaNnAvAH3nto0SFeGS3ree9392eAZeuLs78kUbVYCchmt7W59pquLEDZ+uefXHFCSLj2ufl3d0kZPzF9w39POAkZ/cpjt7WvWasJcQisGJs9MhUBFH752Es/lci7//v1ucN03Qa3s20/olcRACE6htu74N9T3AKAost9b8wfroNzK2c9/UuRCIjlZIouvs8JJjWNCUzMRnu3+54ZmwwApj0rvlq1I7fYzka3y8hqU1oAgELMTgaDq6jgez/9UfDHYfaaPi8EUdAncabCCnEjH/385vbIuus/jy855GQ637HgheFR/OEvHllU5ltdK+zI77Fds/BjfvATQeM++dZ2WVRwpxxfSWraWB7JBCwXhShva7AwV7oq73nfq0vjYpTEdvbv9Z8tWW9TsSUO/5r/8bz/7bSwGaP9WwC1PspqtHV74JkxyQBg/PubT7/+M9fIJVx57bSHxmcoutz46HVH5v5YhFFslAoAUOa9b664FwDy3r//LUeUEgBkV97/ybL7AcihN2YtUg8d3TMd2p9fu+E0waiikk+6auKMCQO6JSiB2Au2L1v02S5GF1Nm5Otqi1hrbTUBlVvYjtc/6PeM7DxXDlBtvWFCMGaNle5AGYi+hpFHaZ0wJABBEDpyAnbasMuBXY5OOu7Fq7osOGn5vsCy4KRlzqDOnXScdAk7bR1lvCCI3ohqtYrnBUEQa//jeUGjUYaKCACVNQGAoBFrwVdJrZpVyZy7tuSJAMBGtdFAoUlsP+XpeTMGZ8bIQCSMLqXPpIdee6CnxWgVUaeZj0/onawBU8m5Yps8WmO14qAD+thqqqg0lhWbsW8vgC1HtuQRAFlm/7Yc8HZHxqB0FsB5ZHu+W97xxqdfunVAhgEsRivSt+s/5bEXJiVbzWbkK7gIuhg1ABAbD2AYMaWXxeoSYgfd1EsOokUEANDEaVAoFawmm9nkzn7wmftGZMRwQNxEHxvcXahmIfm3+gSLgjyujcJtNpodhDN07DPtqbtSkdNr/fmoj4vMQlBh7EaTwOsGXNM7M1HHARFEkGuxSd5nVkAJM8jTPSAAcFocAADq9t26tNOxLLGU551yyhR8ze7WT2JB0HTrk5FiUCLg3QSUid3GTJ0wJCOa4wkwuo4Db356eobZbBbFuoMZTdag6ncgottPHY1p24Y8AJBl9k1hQHC4M/p3AAD7oR3FmLiDPV/eG01ErNDKAQCspRVuxOLSPdsLAUCV2klPkEIl7vh27dpv3ntm/m/l1SoTBAScJhsAgCEzI4Zj1AnxKgAoPlEqk2GxqiOLjtWnJMWpVdW3nuHkHRMN8W0SanSMAU+LSBBWdZ3UXw0AF35c9P7P/9hFUaMilqKTu/cXgkojVkWqBwJSyEI9DqedlUEV9EGMUrOlv378VQ4Gbf8Hb+se03nSzOFR4Dz40Ydby31eQ1yiLPCxtdndJcHjblPqZG5AwUUO0Yj4nAz+/qOIi5G5RUCa9v0mv/jkcLfRygfU/Aorm3rTM4EtgNlsxuqs6/urAODCj+98sP44ksvUjH3f8vff3esCgMQRI9v4vnjZTRWVxoqiModXRGIzVhorKovLXTXEKzKTpImPv37PsG4JSgCBB3UbpcUikFKzUGdb5BDltdYEbdBnpCO4faeKCYFysxhUBspFAYdxoC8CaEQndjk8v93Ozmr3jO4pHxwouKdHSjfHedFu8QbWIgZjIiUirXjgeY+H4M6duwMfpAED+nmeKIU8VEQAEAN8ILxnfCPWQg2HLwcjS0gcM74zCwBCRZFVFDU9bxsTDyAc+WzuqxuKNX3uWfj4QH3fG69J2P8tJMezAFC84rW5q067EStvb4gK9rphXP/605/lCSzLxEfFIXt1zuZDW0+RTpmqrKvas4fPpAzppgBw/bM5z2noO2NMPED5T3Nf+PKEU9vznneeGZQ08l8Jaz61C4KDYQGAACJIEatFAFD0x5qikTf17nHdiPjDR8dd0xYgb83PzslTsxh1nAbxfHAVRq/av8LZ+4beSgDnng+ef+Ovcnm3exa/MFhXWxEFXBJFvU5R8NVTN33OaKL0Wn2X2+fe3VfVqX8Kt6vAX/0YfRxGXYILk7B/2QUpvHvXq7PfOORk5QpF8nV+JZwWLbODnGACAArG6Sj7c8m63nOuTU4eec//jbzjwtEdG37+edORcpVOWw/53btenf3mqZTb33x+fBwYf1/w0JKzHaa/umBcrLZHdgycMtcrWI5WywVTn92Z76eOUhY16NoZaenqble1Y4+eTRyQKQNw/vPnGZ0KuWurpSIrVxX9fdj8r6v0Hae/887VpyuxLqU9AIBcLWcgWuEqzfnj26NOnNjZGwcTIkN2cB75eNnJ+bdkDnjoP33uIpwKCSdWLPzpgkavcdbIkfj59WBMSE3LChPpZI0zKC49EQGA5fC+4lgNcjMcAqKUAcjUAgk0JmuByOWsLcTjYNDgimAK1ny0nXKNZv373/R9c3rXwbP+MxCpwLbjw8+PuTlfa5HHAAb/xzYtWq7UoMC4J3kRcZ7KBsBWGs1Ol2fmhOVkKbEaHhAA4AATGmOCA+baq2ohv++NR1/b69D3vuvtJwfrM0aPSPrj6xLpUnVVkesHvBCsBUhe87/cuLQEAADzwT1FcRrkQgwLbqRRHd1+Fnp3Al27FBXkOKsSfPuZNw+7GJblkKonAACYN7/z0Mc5AmLUSjXTu7o4RUXX2yemAJDTq1976btTNsJF6VlW00ZEnetsi9w8qbUm4BBNhPcZAQBwiXJQdAmUQadXC3W14ZTWQJAxQ4SQ2S1Ekepn5NAF41eFlvt6tl925Fz7ZPEKTXVQk1tgGI9DCkLIbnci5Pk5YuSIwPzMJpMU0mZzhooIANJYgi/SGb+I9cEw+rmvR1f/PL/+xxM8w6X26IAA8KlftpUkxUYXH9q02zxwlD62e6pm2c59uy6MHR+fOPX1D4Yd+nPt6p/+Pm0mKDFY2ky7KM4lU7tJjVEGZDm8/piYmaXtNaitlhnUUw1gP7jhuF2W1r0DAoDYCS8vnuANrUmIV5LTWCCIAQBMEEEKgwoAwH5u76o/RvUemzh2xo29u+nAuf+7PwsGjwNQqaJVSKYOqcLqiswkABBzN+4zRhvibTwf/MUnWFMngYE1mqHHTU/cP76zofq0XCP31dSjvtmF2A4hhYGS6oJpb5DZuShr6X6/Et5z1iLXyKokEKK03NFlr967rec1o4cOG9Q5PmvI9KwhI375zysrc4msXT3kR5xQcrBAGB/HqfRyOScWHC4Ux8WyKoMK4XoGK7Ay3YOqX50Pam+QOWUas/mfDSfJA5m67MHJarHPFRoAx6GNp9yMWlF7LdXIHKb8H+YudtwxaVCP5Had4tyel0KHyYGBwUQrxzbG4JMEIYQQxBABYtro5QBiSc5ZRUaaCrjOoyZcuev7YxaosejDf2CIEEKCnQwQTToljTgHDVBfFMjltB4J9jg4GbVSi4IpWDM7FWtzWva/93nfd+/prGLAum3JZ/ussqg2YPfNRQhaqTSaIHE10TrekwVrNPFdHn7/+d5VvhA5S+5/eacqWsMDIgHVS5IsRGkQ3o2To5SlR7cdcgwerIrtlCiHYu9V1N4gKyVRKClUCwA5PpMa3kLwvV0131JQuyjOLdcbjdWvVRolUup0FU65dwqWYMym9ExjAMiJVb/mJ0SrrYwcgWjhgW1Xd1tEJIsIIGhNIMAZLSGaCJ8ScorAtA8iAw9MQ2xNSosRxERgWJRrFXrJMRAMAKcc6PXz3Jy+HbKQJbVPhwV/n34+WeikkmoOk2sXWEYmJcKwyGw2a7Ua6WdlRUWoXBFCFouVZdigEQEgcF2DdMYvYi34XRYdxqL847s2/bru70JB0cZ73bOgAqrn36KYwm+ee6Vg4oQJo3q27THinh79uy96ftHBGml7lEek6sWsRnZKmfvghqN8Vg9D3xEj9VdoAcy7N+XxCBiGAQCw7P9tW4GrKrRwoRSQGzGSRpggApKJINrMlrz16/LGTku7YogWoGLD2n9MpLsTQKXQqVlwhFTBoz7DyRhEavqFYM9vTslBYCPoxcRz8qybHx3fWQElf6349YAxeuhtE69UScEC1PdxMgsUxm+0lkUuucIWWMLLjlpA4eneMMFyOSEl//ywZM+qTwz97nz68eExyWOu7/rDG9twveQHBKJbBOAYFhFCkMALACxCqL7BsKzr1KDq18iREAQCKBR71x8TMrOi+w0bBFnRAPb9f57hsRLqrKVEryLlB9a+vm05L2Kk7f7QW48P04P51CkbEpTSSHjNV3wgxOrm2PTrZo5KBPvu/3v+v/sdmu63vvDimDbDbrvm18dWWznOZDNzrGjQqHlA/utZCMFAgAguAUAOaoPKwZs5GSMSRZQKAEBwCgyAWHGuAsAA2qye8Z+vKtTpeIw4N0Eib9crFELDvMx4UCoDH4cCASuADaogIcQlKpw2Y5UWciSIKekJ0uJabXpmvPywBfO+sRjgQ1QqKxI0fnGdmHcjmSezoHfFc9v87eoqEyHk8BsBBIiVSd0r8a/5DEIkRAtwAWS4LL8Uro4CfY/shKWrCnVRghsrsd3ZZWA7AABrYaGd+BZ8oC1FSECuAMAwqEpT6YqAAYDUpy1iGeQOXRMIig7VRPiVWlAZOIYaCBcHjDTy6Psnl8n+MiKCCMEiwWI7nfLl/qldHOeF0sIujvMv909tp1NKlwgif5mAk3HeiMUlFzDGQl1gjEtKLoSKCAD79x/w/QOAoBFr+fM+Ksbf5t869c6b73rsqQWfrD9YpNRqWJYRzx8+CwBMpzGDE4oqjJoew/vqAaD86Gm7yEVHoXOblr7z6L3P/O8EBtD0HtrOd2UP4e1WHgD0qe2154xCaaVTiWu4eOs47Di0bocVwDBwWn8NQNmmdfkqpVYsPl4IAKDWmQ78sGzFl0tXfL1m48Z1WysR5wZWEhsTRJAySgkAvM1NoGL3d39Lw4tnflhXIAJvcwEAqPRKIaQKNr7oWCEAoMxJo9raLWU1XpwFc5kNALSZWXGiwwE1wCIGAHmbJI3T5dImxSsAoGjHqp/+/HPr3lzv5FKA+lFIDF2eNj83DkyQCIrAErY5CSJACIiYtbg0GT27JiiJ6HaLoq28wgkAgFRaRe3yhw/ChFTfLyABNcc4Dvy6wwZgGHL3NbEA5u0bcrQqRqirihICBDjRaRdERt+x/21PzxqmB4Dz6345HaWSizWrsTc7QRAUbRK0AMA73RgQtheeqRABQKFTIGzkOky+/8H7JmWzdhvnt3EqAUKAAQGJFceKAACSr73uyjYai92dNur6kbEAIJ47Wa6SA2M5simHAEDCpNkPjumuRMjqxJw26co+XRRuO1s1XVHPP5UMAh8HnYoTQijoFJUutp1XC7MDqbJvfWS4AWyn9hQSSLz2sVs7O6yVvtW10oGDViqjDQLjWiwuFhNCgBAxSscee2fmlKl3Sn83v7RToVe7CapuRqqwOXBRqamk1FRWVhJ8WTaQ82Y+fsjYXgoAKDl0zuVX81kGhWoBjESObEd+2ucCgMRJD/97ZCZyiy5QZ095aHZfJQCUbdlUTPyXOBPib8b4WQgIIaH4+HkAQJnXDW9bVGG/UGq28qAiYn3aIjly11ITdG5TyGcEVd8aN+8Ug8nACUKDahH9a6m/IKMIchm3w4rGO1BHlgDBcpe1ozFfdNgAQDRXduTd2OUkWATE5NuZHTZWq/WMIshlnLGy0tomVqGobYgVIWS3OyorTTqtOmjEgYMGBsYyGY2BEeuJRokMGp1nQxgAGbLbrQe/Wl82719x3e589ZvbCLAIAJz716w7z7vajXxz/kidtbLCjqLjGQAoP2sUodrbnxXO7zgh9OnOZd775pfTnByX+9bMt4s8fv4AAAzwRCj8aVPZ0OviAADyNmwqwqwGQeXeb7aMmXO1IWPKM0umCLzIyVg4982zr/5eglSeYXaMgXBKtQwAeKcAUQrx0Kofd2h6KvN+3G3CiAgOHgBAoVewpj1frR8XqMKGIpEhe7/+Y+zzww1pN8/9arLTySi9K0RZsfiPvdYhQ7UpN72y9Dqn6L1CAAkVJy/AwPaQdterc8Q5b+TlmyFFnzTptTd7njVzSVVLEFEQ9ReVOw4EFWb9eZHxmTQmBGy8Ere5akFACbMsEQkhAA5BAbED/v3Q2DZ+d/H0rsMmYFFJCPn9uprQtYGEOPYFiaYQ6tdIiQAhhEVOLBT8uLFsyHVxDACUbduQL4qaOmYZAICAzGJnBz/6nzuviFF5Sqly+0fv/FHKI7XOM6rknyORc5z96NajQlpW1NC5H/YoqmQTEvQMQOXubQUCl3LNLZMHxgJ0qNz9z++ldiCamrEJgBsUqh0r/pwyZ2hM1IBZ/zdglvd60YYVB52cUs4yzJaPlg949Zbuipj+0x7pP80bonT1U89vNbt4tgGb4bDgCnwcQKMMoSDYne52N1Rr8WVZx5n39tGAbduH739wfvD8hTeljbj3ll1z/nu03FtdnxDn/OdMkEol6q68JyDu1F1z1uRZiVzyaxH0OiVAtTpu8NQl4lM5DKOf95msLFr+/IoALeX9n337K0GpkgMAmLd/t7kU+9V8BThdoVsApVq7d8knWzo8dHVs7OA7nhp8R3Vc8fRPi9YUKNUGq6tGgoFeEQFDC4hU7ln217hnh+gzp8396haeJzJZ5YanH19pdu6psy0CcLNKTciakPvc96GaCL761jwlPvfan7uDyuDS1LYkmNJKCOpxDXKF4q0SmRkjQjAR3KLN7Bk2wKJoMxPBTQg2Y7SwhFMo5D7evqBQyHNz8908L4o46FoGUcRuns/NzVcqZKEilpeVB/6FiBgaXy0J1Lwi6tXMqW9en7d8d55JBBaBvfjAzx898+4ul8wg5xznim2MNjo+3sDZLxxa/8mbPxb5NGQkWuHY+eEH3x8qdQEoNEqh0qZS+Y7oEgBRoVIVrv/pmAgAjp2rdoJMdIOo0cDhT+fP+2bHyTIXACdjwVV59ozS6GpQAAAgAElEQVSTQT76iABIplEAAIgugahZJ6rc++6C/1v03VFQ6RGIkomg1CnUjD2oCpxKoVGjo1+8On/5rpwKN8iUShbcltK8vUcqAQxq5vgXC//3R045D4xSKQPBWlZw8MA5wokKFd703mebcowCYFuZQ8hZ9doXO3IqeVVix86ZbfWC7cLpQ8dNEK10BarfRikEFQapDDVuA2AgYpAS/qlYoVRjQgghCNycWLh1Z06xxeOgKlqKj/yx7MXXNhAVp1JxQeXHrP80SwjqYyAAACPkfR9MfV+vEwKACSGYCEqVqnD9T8cJAED+L5stLF8vh3mMicCL2mgVy5tL8v7esPy1R+Z8tqsUVFocqiITomBsyH74jRe/+O1YmUsenZSgJ+Zze374YO5Xp4hKW7p/72kX8AV795aKIpIHc0YgWpnDmfP9M6+u/PNkuTRCJVqLD/2+9Lm5qx0ycACjYq1g3v/6Ywu/3Hi0wOSZ7eZNRce27ywQkUDqoVoNgj4O1YnUlBBzHOfVYs8FruvUGf3VIB799ut/bKR8+8drSwH0w/89KUNevrG6utoZ1r9S/eeHik4h4uoxqxDr4VIfom741gEWmY/tyys2uYFRquTAV57e8d2iZz8+wCh9diORqgoItbQAcmRlhYKPnpr77g97cso8t8Vy/vgfyxc98tKPNhnrICzxS9D/+zqEEOJXqeM05NAnr7z67e6cCjcgmYxxl5fyMhbHqXGdbREhpNaawAp5a4I2EVEKh09LYo9R2oPK0MBaRGkZ0DWjrw76HLhdvF5wzI61pHG8VP08FxACgDxB/k651iRTKuT+W3e5XbyISdu2KQqFAny8e6T9EF0u97lz5xgWwhgxFBaTzS0SjYKoq4YQfMGEs7nc2GkXMABCGgWjUyl4xNgFpdtaLmKPm45KjqLUSifR2kwXeAIpesbJqd1Y4bSZBF4gAIhhUvScFQy+AQDAhdVO4wUBUKIOeJnGm6nd5RacDil9xDBRSsLV3DHQRbTOymIBUNso5GBr7ENCQG41G90C1ipBpdYKIVQAAAKs1cmL1RkhGYvitEoXYkSQ2Ww20eWWyoRhGKUctGq1COAUFS6bURBwig4cnM7iErHT7ikKQCyLEjTIxakC1Xcw8lDlKWK1X8kELWHfRepOUc47LALPV+WMlHJkUMt5xAFAKPndNTLSmaxOwe1SyFCUTmMSNG5zCUFsuyi5BXT1CWZDsqDq25lYX3UwYa0OB+/kdVfe/tazV0c79785e3Ehw1pIlM1SKhK/sWHQKzHn41FICGexWHhBZBhGIUM6JSsG7C1oFzQucwkGtp2Bs1dtZmcXZYLNIgjSLUAaOdKolAJibLyCt5YThknUyV0MJ21ZI7icDMvG65W+22/YeVZwWgRe2vYGqeVIr66xrNSNZS6HXeRdIgapCsk5FKPi3FUSilhdHx09tTrY4xBKwWot9KoLdlFwOTmOidOreGDNDpF32BDDJuvlRqL2VtcKFBtQqdRldiFUXCdT9yssAbnVVOkW/af3k/SaMotFqgN21mCzWUSBryolRisHtUolIBRY86GuFkAgcqfDLrhd0lWWRSoF0iiUkv9HsARZs9nGC4JKAVqNVnoizC4FbytHDJsUpXAhhgBndbhFlydHjmPidUoXYurTFtVeE1yMMlQT4duSOGWaUDLUeQsoLQ4aPXJoqGuCILrd7v5y+0CFM03O6xC2ECbXLdvhVO7m1TK5XMYFf8wEQXS5eY1ardVpVSolw7AYi3aH02qx2u12uVwW9ogUSvNDCGexM72fePWxLjJWwQGQnK+fe/O3Ql6eYBPjH3jlnh7+3lt84dpFH2w8Ty7+baQFrLZf6jpSKJTaNkpjWUalUuzhuV1WDcaSZxFiGIZlGaWSRQiFisuyjFold7ld9gt2v4gqpTwSESmU5ocQBKymjUxkFCoQKg7/8uUH64sZlQYTBEQel2yICYgi6FlM8CVQjwlc+jpSKBQ0cviQlpaBQqFQKBRKq6P+261TKBQKhUK5jOD8XWApFAqFQqFQ6CgChUKhUCiUoFATgUKhUCgUShCQ7/cVKRQKhUKhUCQ4h93W0jJQKBQKhUJpddD9rSgUCoVCoQSBmggUCoVCoVCCQE0ECoVCoVAoQaAmAoVCoVAolCBQE4FCoVAoFEoQqIlAoVAoFAolCNREoFAoFAqFEgRqIlymlF4oUSiULS3FRQwtwEsSelubzqVdhpe2doFQE4FCoVAoFEoQuKZEjvloTiNi4RNnjW8vbUq+FAqFQqFQIk2TTAQAqLh/QUOjGB6d3sRM68Ot02cEnrxh8uTJk69vhtxDEVQqP+Ry+RNPPN4tK6sZ5KkPp6ff53em49L/Bp6Uzjc6l/qUzNdLv4pQ+hPGj5869eZGJ14f/vnnH4xxz549/c4fOHCAENKrV6+mJB7p0qszo7AkHhYkCZtNnuPHT2zYsOHhh2c1T3aXHoSQPzZv3rdvf35+PgCkpqZmZ/caPmwYQigS2R0+cuS1114HgGeffaZ7t26RyEKi23K2QeGPTBUjJEkTaaqJcHGxavVqAGhZK8HbeN06fYb32Gg0PjTT08o88cTj7733/qxZM1uPlRBIoDUQ1GhoELU36/XpBRuX/q3TZ/y0di0ARNRKKCkpWbd+Q/fu3Tmu+qETBOGrpV+PGD68iSYCRL70KIEcO3b83ffemznzoZYWpF7k5ORkZGS0tBQ1qKw0Ll68mBeE8ePG3X3XXQCQm5u79uefd2zf8eCDD0ZHG8Ke486dO70HETURoKrX31uKHtnKvDEID0ggoY4bak80J5eXiQAAq1avlgwFP1rPa1C3rKxZs2a2fishErTgS2qkrYRBgwZ9t2r1unXrx48f5z25bt16k8k0ZMhVEco0XNRpYXgDtJ7nKNIcPXbsvffenznzoYvlIc3IyGhVVgIhZPHixRkZGTfddKN3zKBPn969e2d/++3KxYsXz5nzbHjHEkRR/PvvPR3at2c5bvfuv++4/XZfez0S7LqAHt/GLByM+8eTWo4jKkMTuexMhIsCyUp4882Fn326pKVl8RAzbYp+7Gjzuo0VS7+NXC4t28FE1ErQaDRTptywfPmKq64abDAYAMBoNK75/vsbp0zR6/VhzChUd97EEfivl35Vu6FQZ4BLCbfb/e6771kslgULXpPOcBzXtm3btNTUkSNHdOzYsUWlq6aiovLcuQJBqB7EPnXqVKdOnVpQJC9/bN7MC4KvfSCBELrpphtfnvfKH5s3jxg+PIw5Hjz4j9VqHT9+HMuwX3/zzaFDh5o+elcL9bQPHt/WqhcNNNVEiJ73gOcoLgoEEYxW/xAGLXAslJmkXyg+uok5XiZ0y8pyu90tLYUHWVKifswo2+59FV+vjGhGLT7V/dPatZEbSBg5YsTGjRuXr1hx/333AcDyFStiY2NHjx4V9oxqmVIJe16XJ3K5/OGHZ/mOIgiCUFBQcPLkyTcXLhw+fPj1kyYxTEs2/fn5+ampqTEx0TEx1U1uqxpF2Ldv//hx44KOEyCExo8bt3nLlvCaCDt27kQIDRwwgGW5b5Yt27FzV0RNhHraBwsH47s2tV4roakmQuWLH3qPo+c94Puz+vzCR33PN4+7oh+1Oyq2VNN5sTTZ2qEDXadyyz76FAgJe+LeQqilNKRLEfJE800woneEYZjp06e//vp/Ro0cCQBbt257+qknWbb1TkP6cflMItSHrK5dH541S/JF6JaVxXFcampqamrqoEGDPlj84Zrvv79h8uSWko3n+dTUVL+Trco+AID8/HzJ/yAo6enpn33+eRizc7vde/fuzcjIiIuLA4DOnTP37NnjcrkUCkUYc/Gl0gW+fX99jlshHhMhL+ek91RaRmYLCRMEfVQNjxWzydiIRMK+kCEsUoW9wQ2LVEEhbr7kjfeJmw9Xgn7UvyguFqMqFN27devdO/uLL78CgF69evbo0aOlJWoYYR/miVylbQa6du0y++GHN2zY4OuOoNPpHnrwgWfnzOmdnd2IGYemF4g0fhB4vin2webNm31/Dhs2rNFJtRT79u93uVyDBw2Ufvbv3//48RMHDh7s369fJLLruXaa9FB43dJrP4CpLWB/S5VNqmO+x754TIRuV1SPtzjstmYSsB6YTUbvM9PQpyVyLz1NkSpyRE4q45q1YUytoVxirnC3Tpv21NPPEEJmzfxPS8vSYMJ+C8JSaQMNF98zEa02Xbp07tKls99JnU43Yfz4jRs33X13yLfkUDS9QILaB01k2LBhXishXPZBampqbm5unz69g17Nzc0NryI7d+5kGKZflUEwoH//pUu/3rF9R4RMhIsCqbLpowxmk9H32DeMx0RoVWaBH5LoracnlmiiVHV6fjUu2QiVFavXxdxxCwBUfL5MNFvCm3idRKiJ53n+u1Wrtm7dBgBXXTX4xilTIu3eLKHT6bJ79ZIOmiG71k/rfMCbSGZm5p9/bW1c3KYUSEVFpa/zQRiRrIQwjh9kZ/da+/PPvXtnB7ojEELW/vzz0KFDwpWXw+E4ePAfjuMWL66e8mZZ9uA//zgcDpVKFa6MLjrqtBLCOtGgkAc/L2tqy9s6m48mShUhn7Kwl5WmX++YO25h9ToAUHbJrPh8WXjTbym+W7Xq3LnCV+bNI0A+/fSz775bFendkyRWr1mza/duAIiNi7112rRmyDGMBFbOsBhwEXqUWpDk5OSioqJGR290gRSXFEfIRIBwzy8MHzZsx/Yd33670m9RAyHkg8Uful2u4eHLbs+evW63e8iQqww+8zjRMdF//bV1z569rX/VceSoc66hqRMN3j2YCcYoShO4JTPBGDg25qM5BGPUoi6+lMbR5uF/e49Zvc73Z3iJ0MhKKLZu3TZv3stSe3rXXXfOnTt36tSbI93ZnD9/fv36DampqV27dlm/fsPwYcOSk5MjmmN4aYWdsdlsfuDBkJsXyeXyFlk5fP78+aSkpObPNzEhsfkzbRwIoQcffHDx4sUvz3tl/Lhx6enpULV1ktvlKq+oOHrsWLj2nNixc6dWq73n7rt9RwpFUTx48J+du3ZFyEQIdMT2a+K8T1NLPVYN8EVo9ESD8OY30gH7wPXE5hS//NUvAHvbGNAohQ/XePJ74iJ7baKcnn5fqJ2Vm76jYutBFHGEUi6vqPjh+x/27tvXs+eVxcXFbVNS5r38EkLo6NFjSz79NDEx8cCBg72zsydOmhgbExP23C92B886OVdYWMvV8O45UX9OnjyZFgGfgDqJ3BBCJIiONsyZ8+wfmzdv3rJFWr+Qmpo6dOiQ4cOGSTtThWX7OJPJdPjw4VGjRvnNJLIsO2DAgI0bNxqNRmmrkvBSm3NiFb577DY/vgZBqIGrpk40mHNOe4+j5z3g+7P6/MJHfc+3yKJHSqQxTJ7Q6Lj16cbC3tVdddXgTz/97K677gSAJUs+7d07O7zpe/nh+x8USsX8V1755ddfjh8/8eILL0gr5m+/bcbL815JS0uTLv3w/Q+SMJGj6cvwWqHr6JHDR2q52iImgsVi+Wnt2icef7z5s4bQKxpaJwihEcOHB+5/EMZNZrdt2y6K4pCrBgdeGnLV4A0bNmzfvmPs2DFNyeISprWvaKBcFBgmTzBMHt+4uH7bEgT2PREytKfccMN3q1bNnTtXFHF2dq/9+w8cP34i0Du96ezdt2/+K69ERxvGjhm7efOWzp09JnhmZqZarR47Zqx06fkXXmiKiVCnCRWWpb+txzKQMJvNv65bV0uASNzQ2rFYLB8s/nD48OEttcdiamoqz/MymaxFcg8j4bIStm7dmpKSEtRsSktLa9eu7datW5vfRPCbfWhtT5aXi2BFA6WV0xT7oAWRyWS3TJ16y9Sp0s+TJ08ueufdBx98IOwfd+mdnf3Lr7+MHTP2l19/GThggO+lgQMGeC/1zm78MEaduz+1+DdOI4Eoiu+//4HL5QoVQC6XS7tUNQPe3RV/WrtW2l2xefINikwma/1jCWj5QgAgUx+v5TgsVsKCBa/WcvX1115rXLJNxG/2odVyKX+joXXOwtZ/M8HWQy0+B2G3DyKne9CUvc9nZmbmzJkPbfx9Y9hNhImTJv7w/Q/Pv/CC5HBQz0tNwU/TsNgHra1OAsAnS5YcOXq0lgB33H57QkJCM0hy6/QZ3m80PPH4463hGw2SfeD9RkN2dgR3Go4okpWwaeOmsLgu3r3nHQbB/3rPruU4XNTprnhRgJqy3CjmozkV9y8I9TPUecOj041vL210ppSwUHqhpG27Di6Xs6UFuVihBXhJQm9r02nNZXjv3ncQoI97P1zLce20Zu0iQZNMhMY5HpJzF0wrNzQ6U0pYuNwqetihBXhJQm9r07m0y/DS1i6QJpkIFAqFQqFQLlXoXkYUCoVCoVCCQE0ECoVCoVAoQaAmAoVCoVAolCBQE4FCoVAoFEoQqIlAoVAoFAolCNREoFAoFAqFEgRqIlAoFAqFQgkCNREoFAqFQqEEgZoIFAqFQqFQgsAVny9saRkoFAqFQqG0OhDP8y0tA4VCoVAolFYH57DbWloGCoVCoVAorQ7qi0ChUCgUCiUI1ESgUCgUCoUSBGoiUCgUCoVCCQI1ESgUCoVCoQSBmggUCoVCoVCCwDUotCgKAs9jjCMkDaVBqNSaSGfR4ne8GXSkUCgUSlAaYCK4nA5jZeXZM6dtNmvkBKLUn06duzoc9gZFQQhxLKvW6uLi4hCqYwypNdzxSOtIoVAolFAgs8lYn3ACz1eUl544fqz7lb0M0TGRFotSH06dONa774AGRSEYO13O8+cKXG5Xu3YdagnZSu54eHUsvVDStl0Hl8sZVhkvI2gBNh1ahk2EFmBzUt9RBFEUCgrOZHW/on2H1IgKRKk/BGMAIIQghAghAOA9kI69V6sPEFKp1GkZmSePHy0vL4uNjQuVeCu54xHVkUKhUCi1UN9hWIyxzWozxNDxg1YEJp7uE+MaBxKEEKkrJVVgjL3H7TqkVpaX1ZZ467jjEdWRQqFQKLXQAF8EjDHDsJEThdJQMCa+P31frwkhSDqoetX2vmdLAZRKpbuuz3O0hjseaR2DMuO222vx0OQ4rn379jdMvr5nz56NSDwot06fUcvVr5d+Fa7EfZOSzjcx8dqzC0XYM62T+kjlS/NL6EczCHxpZNGCWe/bt79B4bOzezU0ixanYSsaKK0Kbzfm7Rp9u0+o2V96R+MlGIYRRbFZxW0ULaJj7Ss4BEHIy8t7482FTz7xeLisBN/mKTc398W5L6Wnp897+aWwJE65GGkGG+XSyKJls/bt9fft21/7z2aQJ+y0jIngzP1u0WfnBj/68JDYGjMd2PTPjyt3K6+9/dq2shYR7OKC+Ayq+71eQ0DHKQVACHlH7MnFsHi1RXSc9/JLX3/zzYkTJ71npOYmNzfX9/yq1WvCOJDgZf2G3wDgX9eMDnvKzYNf0xyhsYpLnlpegsNbmPV52/7g/fcMBkMz5Ng8owi0NtafFjER3Gd+XfbzoUETNAwAn//VQ/9eZnjmq9dHxiLzniWLvnA+OfGuulLwjXX5rmrzecMGAAIg9Zre69KR148PEAJCMEJICnNR7G/RIjqmp6ffOm3ai3Nfqv18UVFRIxKvHaPRuGvXLoPB0L9//7AnTrmIaLZurPn7y+bJsXly8RsbqP3nxUjYTARszd34zVerf9t5rNgmAtIkXzHitqcfHdcuyGiAWH5o7wV5l4HtlQAAwEa169otWY0AnPnbT4opN2YZ6tHrV8e6fJFc+QAAY+LtFBEC7zs3gKe/9F4F8B5UR284xHVu/esPLzg49L/LHumqqPt842kpHdPT0+sM0759u8YlXgubNv0hCMKIEcM5jk4CUiitHTrRUB+IM2/13IcXbXekXj3lnme7JqqFstyDBy0xuqCubsR6cvtpSLs+QwMAIEud9san0wAAgC/8+5A1qmfvpLonGXxiXcZ4RtGlnpIQ8I69V/WcIL1fV3n2edcEekI26g0bW/O2rPjkv1/+VYghrh7nm0iL6BgKaaJBOmYYZtLEiWFMHAAEQdi4aRPHcSNHjAhvyq2QtWt/Hj9+XEtLUYNhw67evHlLS0tBuVjx80a8GJ0TA/GYCHk51dOuaRmZDUoCV277v4cXbTfc8H9fzBoY67EKhoyaBADYmvf7N5+uWLfreKkTQJ163auLn+jDnd5+Uki+rls0AyAWfnvX1P+1fXPNq/21uOLw7iJwKd+Zcc2pYocm7ZoHXnryulQlwpW7P/m/j387mFds4SFq5P8tm9vPvNIbK0zlcFEieftLPSEAEKh6dya+/5M6SwIIAQICBFVd8FssUD/E4vVvvL0xasJzL1Z+OG9H3eebSkvoGITAec24uLjk5OSwJO5l165dRqNx8ODBYZz3bZ2cPnNm2fLl3Xt079ihtv27AtFH1SiZeu78Vq+U9foZ06efO1eYk5PT4LgRkCqii1zqk0WLZNpsKxrClZF3bCCUQdCaBw+keitVV99jXzwmQrcrqtVz2G0NycV5Yunbv5k6PfDBQ177QIIv/HX+zAV/cAOnTn/uvralXz7zbpE2So74wr0HTdrufZNlAMR6asdZSLsxTQ1ArKe25YGqc/b4W4amy/PX/GfRm6907vPxTSmOo9+t2Goa/+LC0UnEYjf00IB1rzfWZU31RDup/re0tPTtRYtcLpdvSJZlH5k9OyUlJXj0BsAmT/7ghxsY5Dr+7kf1Od9UWkLHenHhwoVn5zx377339OvbN1xpXuyOivXnxx9+lP59+OFZDYpoNhm9/XEY7QMAmDTxOqVSecvUqa/Mn9/QuJGQ6lJdbnCJ+Vj4WQbeyQXvgTdAK7QVpHqrjzKYTUbfY98wHhOhgWaBD86cnzZckPWfNa59zekBPn/Zc69tjr7zo/fuzNIgPm/JBV57Rf+2cmw8uruQzbw3VQkArtM7Tgkpk7IMDIDzzLbjfOL1jz04uZMc4IqYKd9vXLKnwHVT7NmdJ9zxo8de3auz3JPlwepYlzUYY2nsXRp4l9z727Rpc92ECb/8+mt6ejrLshjjvLy8sWPGJCcne1YJShP1NfcgagCICe4AEup802gZHQPwNjf5+fm//f771q3bRFG02+3vvPPunDnPdsvKanoWubm5ubm56enp9XGDuKjJycnZtXs3AOzavTsnJycjI6NB0aW2LLz2QVJS0qhRowCgS5fO/fr23f333w1NIexShdrNIkKEeudu5qwjNIpAlzCEok4roakTDWLlieNGaNcvU1uzf3Ae+/bb3LgpH0/L0iAAXHloZyHX5b50JdgPbsuFDnd31iIAvnj/IZO2W+8kGQBfsvegUdO9f4pkBxCRF0GhU7JC6YH9Faqsfm3lVUn7xrq8IQQTySOPECCAq6bq+/Xrm5eff/r06YSE+KKi4g4dOvTunY2xCAghAoAQAWCAJY13V2w+WpuOqamp/773XgDYsuVP6cxPP60Ni4lw+QwhrF7zve/xU08+0dAUwmsfAMANN0xmWc8g6E033fj3nj2NSCS8UjVzr3bpbWAQKtluy6tHu2UMdDaQB7vjqxs7Z+g30eAdM2jNgwe+1DnX0NSJBiI43QAKOVfTQuAvHDxk0vS8qqMSAIBYT/yVC6kPddEi16ndx52xQ69swwFg8/HdhUynuzsqAbDp6K5zTObdqSoAABBL//6rgEm/vZ3MvG9HAcqYnq6qSto31mWO9IZdvQkxxgRActm7ftLEhQvfOnPmrEqlunHKDRhjxDDSpgFe17+LZtFj69Nx9KhRXhPh9OnTTU/wslrrmJ+fH/S4BUlPS/MeJyUlKZW0dblkOTK1ejs1HsOeUvTCLgb6NNJKqNMnsTXbCg3wRWj0RANr6Jgih33bjxivi4+pHvgnLosTWM4z9MyXHDrNxw3JimHBWXSyFJKyEuUA4Mjbnovb35apQwC23K05pOPdnXUIAPhzP7/1v5NRo98cGOPK2ZaD203rqvem7RvrMgdjTDwbBJGq3tMzGs8wzJ133vHhR/+9ZerNcrkcPJclZ38kvYlfLCZCK9eR48KwR3VE1zpGRxsqK4O/4EZHt4BfZN8+fTZu2uQ9bn4BAjl58mR8fLx0XFBwzuFwtKw8zcNFtyl12JExMDCBvNIfFh1krk5ualtR+6LHVoivQRBqDKypEw1Imz1jUvL2b+c9uuDc1CEZBsZakntwb1nfO/ukyZdt+Xzl8Lt7khO/fLyyBJKj1QwApzEoYc/vvx/OnpRZ+fcxe0z/K9twAK6CHUedyjRZ0aEdhw7+vuKrDQUZdyx6pK+ez919xGbom53gnVRwn6+OdblDMKl6w/ZuQSgNsQMAxMbFzZ79sF6nq3oF9wzBe/48nW5rp6V0zM3NreXqb7//7j1uuutApNc6pqen79mzVzqW3B2gSsEW8XuYOGkiAOzdt693drZ03OIs/fobpVLVvXu3s2fPfrLk05YWB6BZHPJbpMtvhSsaroghOaZGvnJeLlsnNX5FA1J1f2DxW4b3Pl7zxYL1LgBO37bbwOv+FTfo0WcmPL/o46ceYGKyxky5bSD+2+XAAJqe984aefr9T788MvLxyr3FsswB7RUAhHeycQb37kWP7WZ0ba8c+u+3/29yn3gZ4JIje4plXQa19+7CgyuPeGNd9mBP/4kJIVUHAODdO4Bo1CqMRe9HjgCAAGKAASAESBO2Tmo+WkRH3/0P/PC6K0o/EULjxzV1cX+k1zqOHzdu79590kzN1998c+u0adJBWISvk8BGPzYm5q677rzrrjsjnXX9sVgsby9a1NJS1KDZ+u9m/uxWa1vR0G05+9HVuL2ukblcLlsnNX5FAwBwsX1nvNQ3oJqNfmrJ6Ke8v2Z42gN5h+teWnqddLzkr0meq9pes/73U+DqJyZh0ifeMFVnltQ8cxmDMQYCBEur/wmWOlOEoGorAfB+ywCBZ34egQiYYRggTRuEVxD+I3sAACAASURBVHR5ePVfD9f/fGNpER0Dt14O2owihG666cZOnTo1IgtfIu2o2KlTp5tuuvHbb1cSQk6cOClpFy7hJd6KCo9x81hYPf70TZDKN27YvSPrQ9AqF4n+taWmDyKtYP3Tn/s3eqXfRTCk2iLQ4fqLGO88PRAi+fwTb8dJPB0nQgiQtOGQZz8hydePwEXjrtj8OjIMU3vE6GhDenr6+HHjmtLF+jVhfnZJeBvu6yZM6Nqly9qff/bOLzRR+IuFpvfuTbEzmkKoChDpz2I1W/qRNk3qX4AldvTvzcjXjbH+XC4TDZSLESLtGQAEECBAWDoGRDCRdidGCBGCETBVew5K4/BVs/sXg4nQIjp+9eUXYVYjGM389tapU6dHH3mkOXOkRIKW6lkvlvQbKkDjLAOJVu6NGBYasPsQwzAYN740KWGn+pPH3s8WIQBpEyPk+dfzB97PI3r+LS8vU8jrcOhoDXc80jpSKBQKJRT1NREYhtFqtMbKiohKQ2kQGJOqbxYgAEAIIcQghBiGkQ6kY4ZB0n/ekCzLHT54sG2HjrUk3krueHh1bBOf4HI5m1mFSwlagE2HlmEToQXYnNR3ooFluaS27Y4dPiSTyZOSUxgmDGvBKU2EEAwEFApljY8beRfveP1vUPVPURQtFtOhgwcIEdMzapuKbiV3PKI6XvLUf+F7iw//RoiW8iSgUC4NUP3deQiAqbLidH5ei79ZUiQsFktDozAMo1KrO3ZMT6vHxvit4Y5HWkcKhUKhhKIBJgIAsCzLMAwgurFhq4B3uyOdRYvf8WbQkUKhUChBadiKBlEURZF6LF5G0DtOoVAoly2X+/eUKRQKhUKhBIWaCBQKhUKhUIJATQQKhUKhUChBoCYChUKhUCiUIFATgUKhUCgUShC44vOFLS0DhUKhUCiUVkfD9kWgUCgUCoVySbJ582a/M/RLjxQKhUKhUAAAJlw30fcnNREoFAqFQqF4sJhN3mPqrkihUCgUCiUIHACIoiDwPMa4pYW53FGpNS0tAoVCoVAoHjiX02GsrDx75rTNZm1pYS53OnXu6nDYGxQFIcSxrFqri4uLQ4iOCUUcak9TKJTWBsMwDMPI5Iqwp8yZjJUnTxzrfmUvQ3RM2FOnNIhTJ4717jugQVEIxk6X8/y5gnPnCtq16xAhwSgSLqejoqIiPy/XZm3wJ6opFAolEjAMo9Vp23dIS0xK8fsoLyHYbDYbjZXSJ3Mb8UrJFRScyep+RfsOqZEQndIgCMYAQAhBCBFCAMB7IB17r1YfIKRSqdMyMk8eP1peXhYbG+dNrfRCSdt2HVwuZwtockngV4ACz1dWVBw7crhndu/omNiWlY1CoVAksIgvXCg5deKoUqmMaxPv+23egoKzSoWyY2q6VqsDAIKxy+UsbMgrJWez2gwxdPygVYCJx0TwWgleg0A673sgXfWeadch9cTRQ74mAiW8iKJw5kxejyt7dUhNb2lZKBQKpRpDTIxWpz38z8GhCUleE6G8rJRlmbSMTAAi9RiAQKFUpmVkngh4pQwFhzFmGDbC8lPqBcbE96fvEAIhRLIUSNVwgncsQQqgVCrdPN9EAURR3Lp122+//56fn//10q+amJqX+a++euzY8fqHD2PWYQRjbLVYY+j4AYVCaX0kJibv3rENfCYaKivKO2f1AACpl6jqKwAhaN8h9fiRer1S0n0RWhFeJzhv919j5KCmTeA7wAAADMP4ji81jq1bt235c8udd9yenh7OF+XJkye/8cYbb7/1lsFgqDPwrdNnhDHr8IIxZljqE0qhUFodDMv6uVE7XS6VShUYkhCiUCjcvLteyYZHOko4IBhLZgGuOvCcrzmz4Oem4A1Mmuxm/9vvv986bVp47QMAyOraNT09/edffglvshQKhUIJBcEYIUQIliYafP8YhsH1e6WkJkIrwmcUAbz30msoVJ0B8FgJBCEgBHuHEpq+Ei8/Pz/s9oHE5MmTf/99o9lsjkTiFAqFQvFDensEQISA3x/Uu7+ozUQg1r/njx8y5Pq3Dwdxiieuc+tenjx08qJjrkZJTwlEclcEAOnOeu9l1bgCEOK9696ryGtCeKO3QrK6dk1PT/v113V1hmydjggUCoVycYGrlsgBQA3rwOdqndTiiyAWb/hko0Ujw2uX/Dlj4TVxXmsCW/O2rPjkv1/+VYiBOtCHEc9MgeRkUHU7JS8EyQ0BwOOMInkvetc9ekI2YRTB6wEgHUSin548efKbby4cN26sVqsNe+IS9fFjoCYIhUK5HKgyAqT1cZ4haAQeJ7YmmwjOkyuXHku4ddF9Z+e8+PmPp4fflSaTLojF6994e2PUhOderPxw3o6m6UDxRVrRIPX2AFB1T6unFwAAQDIICCAECAgQVHXBb0FEg5A6zlunz4hcD5rVtWtaWuq6deunTLkhQlnQ7p9CoVAkMMbSOyYhGCFPRyG9e7IsaaKJQEx7v/7F3Gv29T0HVU5MvHvNyiNTn+6pBgAANnnyBz/cwCDX8Xc/CpMqFADwNetI9b+lpaVvL1rkctWYz2FZ9pHZs1NSUoJHb61Mnjz5rbfeHjt2jFqtbmlZKBQK5VIGYyx1ItdfP9n3/Jo1a4A00RcBX/hz6XbmqmlD4hhF+sSbO5t/+3pnpTdBxKDg0ShNAmMs2XiSj6L0/zZt2lw3YYJGo7niiit69ep15ZVX6nS6KTfckJyc7A0mxWr9JgL4LNqkUCgUSuTAnnFpsnr1au/J1atXS61wk0wEvmD9iqOGa266UosA2MQRt/SB3Su2XGjqsntK7RCCCQAmmBBMsIgJxlgURaFfv75dunQpLCx0uZxnz57t0KFD797ZGIuYYIJFQjAmmACQVuyuKLF69epRo0bSIQQKhUKJNMQz0UAIwau+WwkAq1d9Rwj2vFo2YaLBmfP992dw6Zn7Rq/yObtyXcG4OzrKwiE6JSjSKEL1/gcYEwDJLfH6SRMXLnzrzJmzKpXqxik3YIwRwyBCpIWPkntjJEYR3n//g6FDh1xxxRUAcOLEyb+2/nXP3Xc3Lqmjx47l5OTOfOihsApYA+quSKFQKBKYYELg/9m767AotigA4Gd2F5YFli4lBCQUQRFRQgkRxUZFxeIZ2FjYjYmtz1ZEfXYXdqAYYCsmAiqISEjn5sy8PxYQlqVjd+X+Pt/7doeZ2XNn7szcqXuIoo784dy5szhBCHrmo5DVfQNOVBMh/+P522k6g9eu7Kld3DMzJ+bQvI0hIV+Hz2hd/9kmkSKC60IkKbiaIGghkIBhJElSKJSxY8fs3bd/+DBvWVlZKPqz4IUGTPDyQ0M0Ebq6dd2xY6ff1Cl0utyWrVsnTBhf61ldvHjR2cmpOh0s1ho6/CMIgggUn3MK3qEvflgRwwTPvNf6cUUy5+35R3l6Iz0dLEpdMtD1st89587FT75LbBTqrQRIWSRR/CACSZRcTcCwopv36hoaM2fOUGIyiy8zYBgmuMpQtMrJOrzRUJE2FhYzZkzfsWMnAEyaOLFDB5vazedzVFR0dMzECROqHLNB36pAEARpIv6ccxY/tVbyEGEdnkUgMp+ff87W83DVLXNLQdGir71CTtj5tznoabMGQxS1EQhS8Aii4KEEkiAIgiBwAucryDMIAicI4s+fihoTBAmkJHeddPHiRXt7ey0tLXEHgiAI0iSUunNddJggSLLokbXaX0WgqPfY/qBH+REVO6649bjMEHqrGRcfz6hV6IhIgndUSELQwwFJCBoMGAbF3SVASb4GDIqeQcAAB4JCoVT/JZYa+fT5s9CNho62tjWdyeeoqC9fotcFBtZ7eAiCIIhIhOAQQgiyQJMkSVKK+lAiSKhrvwiIGJRcFwKSJEiiOD+DoPvMP9mbABPcVirqCgMrSuTQII8rPrj/wG/qFMHjinNmz3785HEtmggXL14kSXLhokX1Hh6CIAgiEinotl9wQRoDkiRxIDAAgqQIDhjVmQlqIkgQUtAvApCAAQYYIfgMGEmQgh6YBWm7MKAU96soSPdU/ARDnZsI5R8CmDbtzwsI5uZm5uZmtZjt0iVL6hSWxKBQKAQuuXdzEARBStBl6ZnpaQpMJgkAxJ/++wEgMz2NLlutVw8oFAqFIFCHBxKhJOvGn/6FMABBV1VY0f+L/gEUJ+Qo+n9GRno1VzlSOxQKhclkZmZmiDsQBEEQYT8TfigrK0OphwWb6ep9/xZLo9GKbkpjmODgQqVQPn54p2vQojqzpSgqKGZnZTZU1EhNFF0WAhC0AjAMwzAKhmEUCkXwQfCZQsEE/5WMSaXSPr57p9fCsPTcNLW0ORwROTqRahJagFQqTU+/xcf3kT8TflQz1TqCIEhDI3A8JTnp7asXpuYWpU/4jU1MC/LzIx6HZWVmytLpsnQ6jSbDYrPCHz8kCLyliWl1Zk5rpqcf9fGDjIxss+a6FAq16imQBkOSBJBAp8uVSeBU+j0VAezPVxzH8/JyPryLJMnqrnKkdmgyMs309OXk5b/GfHn5LFzc4SAIggAAUCgUeQVFaxtbTW1tPo9XMpxKpXa0d/wRF/fm1XMWiyUYkyEvb2jY0tjEpJozx3JysnOyMuPjvqNrCWKXl5dX00lqscqRuqBSqRQKBTCUpQRBEIlBkjhB1P3qZlhYWL/+nnm5OSVDaBiAuoamppY22uuJHY/LFXcISBVwHMfRXQYEQZoGGqC9HoIgCIIg5VSQDBpBEARBkKYNNREQBEEQBBEBNREQBEEQBBGBFhYWJu4YEARBEASROLTuPUTkbGpoe/fsmawvoSkjv94PMd12W9xRNJ69e/ZMmTpV3FEgkkh664b0Rl4l6S0airzxlUR+986d2s0B3WhAEARBEEQElMYJQRAEQZqE/p4Dyg8MuXK5ovFREwFBEARBmoRKWgMioRsNCIIgCIKIIP1XEWSNZHt5UWUBssPYoa9IQtzx1AiZ/3bXoq2PU9kkAABGV9Uzs3EfPMzDQhkl1Gp8JCfl1dWzVx68jvnNIoGmpNfKtsfI0T1byqOuyesLmfdq86wNL4pykcgoNWvZpqNL775dW6lIX4Uncx4vnbAjpswwavtFhxe3Z4gpotoTVRam2+q9U8wlP8E8yUl5de1cyIPX0amFJGDyWiZWjv1GD7PXlOQqJVjgXN896z1K4uTFn5g575bRsqB5beu/Brm6ugIASZIZ6WnJSYkcFhswTEaGypBX1NDUUlVTxzAMAMq/4SjtTQQK1qIzVRYAAFQ60tTf89KkK80Bnp+SytbqO2dqJyWclZv27fnVC4cDvnG2BQ7Uk/ZVI2VIVuzZFcvPf1e0cPec2EZXEfJSvn36lktSUfugPhGs7DzQ7Dlrip0ywc79HRf56Ob+ZTdDBy1dOqy1gjQuaq1es6bYqRZfjaUqtZD8Y2qFypQFk1EzkBVvPNXwZ7Pt7jmpTXMGPyct8euXXAJttuXhOB4d9amwoKCliZmSioqsLJ3H5WZlZqSmJOfn5uoatKBSRbSqpPw4RNOmmesAcIjffIqWAs3ChP/oMymhb1NWTKG5aatW6hQAaN/BiB09P+RNAmuAHhNV8kbE+Xb23/Pf1TyWrvdtV7zku7iLN6a/loKumUUbTSoAdHB07+l8dMnKi1uOWG6fYiWFjQT55matLST6hLX6pK8sgs1WtceSDeOtS3aYPTzFGpNkIkky6sN7wLBODp2p1KLjPl1OTqe5rpZOs2+x0cmJP3UNWpSfUKqfRcAwbTsaA6Agkvf2BQEAWnZURWmq4GWQOCsj9vGNp2lUUzsjhvTtK6UaK/b6/TRa25FD2qKWWePCFNsM8rGl54Zf/1Io7lhqhcAJvBghdacnZUlbWVix1++nUS1HDGknnZstyeeVwuXhDbfMU1OS8gvy2lrblLQPSlAolJam5mw2Oy8np/yEtbyKUPe81HjStzrOASjqMk4tAYB4e4MXzQMrV3pzdZouzrlf1znXvXTVRRIkCfA9aLJ3UPEg2VYj/LtqYo0XQ2OWV1Lh2fEJhaBloc8gcOl6mqWh1XPdEFR4IAi81HKm61pqw6vkuEyONaP+Lms2eK0WlCX+8PQRh4sHGYzeEdhbq8HPUuq/aCLK0so/OMBesZ5/p34jF2y22m1ayDf8ZtsgG8KPIzN9jpT9g6wxgdfvbwnmlpKSZGhsSqWJ3sAoFIq6pmZWVkb5P9V+i6xj/mg8+XtdJgcA0HaQ18EAYjjhL/Esggh7Sx9hg5l1wi7e47Pq1BprvNTYJE4AQPMBC/06qVAIbl5a3Otbp04uW4WvDeivJ9NIQTRieSVV0TkTSRBoWZRVz8tDUOGFljNOEABAkjiO4/V3Mtjga1JQFu1esyfbqwiuxlLktJQbY2uq/58QURb5ZvT6/536nSPOx0kADEpXJzL/1ba5B9jegQu6qtdnW61BNgStnjMndSp5kIWffHdH0GsSr+edkGBuOVnZFhZtSw/n8/m0Ui0GVTWN9N+p5SeX3mcR6FS7PhQAADPGght/HgClO9LNVfiRWWKLqxboanoGBmoUADA0sTCT++63935EUq+hLRqtjdDkURWba8vCu+hfLLKZolRespRiJCvhYwrItjFQkca9kZyWYcuWGlJ7e7MMaSsLlamrLQvvY5JKb7YkLz83n82RiouBDO2WpmYlC5xHf9Nwz4fiOK6gyCz5mpGR8frNGxsbGw11dcEQKpVKECKWmtQ+i0BvTbdWBwDgZBG5GURuBpGbRZIAQKN1dqRIbbGA4BRwAGiyNHSgakwME/eOirzIcze+1e0CFFJTZP6niyfecJkOvczlxR0LImUYJu6dFLlvz938zkabbfUJ2gdycnKvX79Ozyi6uUCp4Kgpje12AMAwo340eQCIZ+/046TzAQCAgnXYqDTECvT7yajdLh4oDQqTYqKilEhOQVZy9LNbNxKoZj722lK6ZqQUJm85wtfp8/bLK5Yk9PKwN9dWwNhZv2JiWLY+Qy2k8Dl7yVaQ+OXTR2WSk/s7/kP43UexHOMBC0dZSGf3E4VJ0Z8+ppTsXalMA7MWStJzIi7dMHnL4ROco7ZdClic0Keng7mOIoXz+1MmAGptVigjI+PV69c4jufn5wPAq1evbG1tS64llCedByJMiebYCQOApJu8zJKmAEFGX+fjVjSqkWx7Pc7dePHFV30UBU1NmdhbOwJvAQDQmNpGFp5TPfs76kjnipFiFJWOk9avNL906U7EqT03+ACYnLqhlVNbHgGA9vj1hiKnrAixoXs2hAIAjalj3NplQkBvZzOp7Sss7e7uDXdLfTefuX+ZXX0/5YdUhKLSYWLgSvNLl24/ObnrGh8AZJhaxjbW+gzpvZLcsNTV1fv07i00kM+v8IxaOo9EZA7vUD8R72fkPyhY8qDxw6k9TNHab9sRP3GHgQAAAEWxpZvPXDcfccfxF8MUO/jvOynuKOoHptR5xfHO4o6ifkhzWSiKLbv6zO0qXZutqAUu02LYjhPDGuHHK2kQlIeaWgiCIAiCiCCdVxEQBEEQBKmh9LS0qKhPqakpPC5XRlZWU0PTxKyVrq5uReOjJgKCIAiC/P1evXz+82dCu3bWHTt1kpOTz8nJ/pmQ8PxZuJ6uXoeOdiInwbZu2dzIUQq4x14Ry+9Wxz1T1Mk3giAI8pcwbqHHVFbjsFkuXbvRqFQcxwkgcR6PJEmChLD7d5WUVZWZCsm/0/v198zL/fOkH22yfm1eKMWTvtWle8SEHFbbfY9rPXlDI8lH4g6h8Tx8+NDFxUXcUSCSCMOw3K+x4o6iacKTQ6Y4riIDr+0briPibY+Cl4vtlmvtm/J29NlB4Uc9dSp4qKzww5ZBe/T27/RuIR3Xi/GEowMsNxuFPN3hqgAARHroohGhfc+sdVKVyjdixU/JxJQkSQA4d+pESkrSgEFDqFQqSRAYhgFBYBhGEATO57u6db9xPYSCNSs/B3E+rkhKHjEuDQRB/gIFL+bqm5i5Hv7BKxnEjdnQ21TJYcUrVrmxuV9WebjNfccu9wdMhqml20xTWfShHU+P+pCuqI8l/WZRGHIUAF7iWf/OSu1mhBWUGovMDttzIkpGp8wbpUJj4r9vBg4zb22qZN1/+tVErmDI2iGGJqZqTr6b3+RW3UshJ2q5q6nutLAsEgCA+22vo0kbzxsZBAAA8fvyECUTUyUTUyVTO5fFl75xqpxdaRQFbdVfIbuupKBe0euKy+W2bWct6HGZBCBIAgAIkiRIkgTA+VwjI+P09PTyE9auiUDB9HoprLitvH67rHRm2EIQBGkw5JvdOx7mCE45iLQH2/6NqWC8/G/PEkUeNikaXdc8uRrYW3R/yLz0uBx5fbXC+BxlIy1ZbsIJ/yHjr/4GoXOcvMhjEVjXoR1USnbSwmOS2Y9XTziS3X/9gd0eWUeWrHuYS6SHBvj+lzVw07717T6smvbv6/LNGiF0Y6+ezfOeXH5bAAD8xIfXPtJsff5kHgAA00UHT18KsP91dpHftdQaHe0Zpv0GqH08/kTEoQupEQ6Pq6dvACQJACQABpigDgjOjXEcb2liVlgoItlqTZsIGCga0ez+UfDqT5Ore9gIgiB/Gwzkm7OvLz8dxwUATkzQplBCi170N37qjbXDWpqaKrXt5Xs0ZP6QWU84v4O8rMxXRX49OVCr6/zZw221h19LIyE39B9lE+9zv2IP+3tom5hqdJt1+JvgYgPrRUC3rseS00Mmep1P+3Vi+LCzEeFZDitnWgqdrrETn7/ntvQo1bE1nh0VUWZMkqLde82OPSv7uw7sZ8UoTE5i5b05/6Sw3ZT5nt1Gzxhp/Pvuha9/rnDkPhin3c7Le7CDskmn/jvPLBtup2RibjHpLOneUy//2ZUYFuAZT27EytgOdlYrfWRhmljZdPMa218T//Y68lJJ8c98YwHgGREbJvVoZmKq1nn0hrd5RU0X/q9jY9opd9/8Fjfs1lY2+kmdc/41eSRBKCkpA1a05kkgCSBJkiCBBACSJOlyotN21bCJQFGhOXrJGGjUOWAEQZC/E0ltPd2vbUzQv6FZ+O/7W3endlroY04DAMCTr80fe1Zu3rnQ5+utHm08aTJ/rD5ojD4S/mpBGzkg2T8vP2o+Zt1U2+Lzfn7c2fmzrslM3HPqxISOLZiCxG5y1v5By00Ytiv2zTJgOG19cH7ksD0ntvxjRBdqIvAzE3IUDQ0UKASPw+Zw2BwepuWxu8yYFKXWvcZ4GMtD7surrzkm7vZKeXHJbCVDfWUK0LVb62JpUWm80vPkFSSrjT34YGWrZ9uX3nfY+XT/UCJ06/Y8t37aGffuxRXmvLnwmWo/xF6jzIGFJPjczC8Pw9OA/LJhcknxVy+9mIoTBQnRpMvGQ7umK0esXX01EQcAPOX2moVPNGdsnmotL9vMTIOTjK4i1BOSBAAMAMMwDDAMMIwEwS12GpUq8lZ7DR9iIbLxj88wZir3q5LcoK6o3yUEQZByaC0GzBx11GfFfzd7XHuoMuzMQJ21qwEA2FG337AK2MtG9aGQfBbeIpFNpQJGV1Ri0ik5ACDrErhiWg8mAAALAKDgU2gcabFiUjdbPapt8cwxWTl+SoGCiS71N0uldQsV2Qpu9hLcQp6MFp2Se310h5EvAKDdvqdnR2iWHxtPubNy4hXlaSf/MZPNDquiaM2dOrVqk9FeB746uLRt0yLDEE4nFRpM89Dad+/h5w6fXpLttzqolz00vJ3gaAkAjHb/TJc7vbHgZ0nx47JwmVbDDgUBAPfTG4Nt5+PSeACFdyYuBtnOW6dZyWOAyynKArdAZChIzZAkhmHF7QAMw4AEkgQSI0mo+Dm8mj7nSpJJT3gABL99XUJFEAT5i2GKNnNmWrdfOns7wyFovKX806LBFBkqGM8Ovz7FTAYAgBO9rczL3xQZWpUnXpwvq/sPCUomYcIEAIBRkx3DDg8SlYWHylCmc7ILCQWnVefu5OBAVTJVpYDwAwtk/rtdg/3DrFdfXm6jiAFp1Ewu90diDtGJ9vvLL1LDQ1NUTnqszCeSlGvt2UP7xKXdhzO47Za4CLUQwHTxofV9DHRNWzCezbyw0XhaSfEBgJ8WsX3d9mNPviRmFoI2CQAg3yNoQf70FQcvJPaaakAUZLFATqmqhYJUw5/2AQAIrihgJAkkAElCRU/rowsBCIIg9Y6q33/BuOYUo1Fz+2uX7GblLHp1ZHw/GnjmydOw4wsDjkfxZGmQHxP5Ib5Q5A5aoU231tjnE8EP39w8tPN0HAcAgG7qt9pHn2GzbuPg5kynvec29lQFbmFuDotPktz8vAJ28UsIMhot1Qu/x2SDiom1fYcO9tam6jRSaEzuz4u+Y3fFdxjZl/4u5PbLnxz59oMc5CL3bbn28PjOE981uw1qWa2Hzhhm/fuqxV14ltfeq7OW8BOWzJaWVlaG6nKYcPE/sVivtvmvfN1u5427Z4Zqliw6LTffPoqfdvz3sRBYCZ/SmUZaNVv2iEjFdxkEn0kMwwAE1xIAyJLHFISgJgKCIEgDkG+/4cGH1/OtSmUmpmj1WHtkgsnLDWM9Ju1+SSorG/QY4yAbtnr6vOtJolLr0AyHrN/Yg713oveEiz/5Ra0IKiU/gWXUzV4lh6vn3NlUS55MPTHC1nrRO5IVOqJLv02xXMF4srp2dopx19/n/ml94EJjZj/dsuZmLuQ+3Tt9tv84vzl7Y/ma7quCRqmELBi/6H2bRdtnd6xmXmWGuXc3DcCsRzmJfgdDZPHlKbIt3Xu0zjzcv+e4YI6+avGlBYpSx2leuknn9t5LjL7+CaycW1YvCKQKZPFFpJArF6+HXL518zoJgGGY4JFFkZNghWc31eKXcH57+eHdKPCFtXYWN6/C2xgVEXSdJIH9EGCY4LqZxAXWcFDXSUhFUNdJUq7w6VL3gT8WRB6psG8lCZcXMa+dX+72+/v6ob6TaqWk66SDQXvH+k7k8fmC1xzlGAzBCGm/Uwg+jhO4krLKz5ZliwAAIABJREFUmVMntHSaC/euKJ7AJRJWfKWlCTYUEAT568jbTpoz6qwyVq97MqaJqeBDXtnmY0XD64Ag5EwGTHPuhtoH9UJwYMMwIMnCgnyCIHk8Dvx5s0E0cTYRsApufiAIgiB1J6PvtXlOfc6QaWJa0gKozue6oSjbTNpqUx9zQgBK+k0iSfLqlUuCYV3du1feszDt6/2QWv1aCDxaWasJpQZqwSCIUvGpIYKQgofdSlUJsvizyOFoByoJHj58WH6g9wgfwYe036kYhlVysKOZbrtdi18lcFxkT0zVFBwcLIGpkoQWUxO50YAJvQmDIMVQ3ZBAWMXXhBtBRT8tcrjQQKzJ7FQlB4ZhgkfNvkZ/Lj286EYDnwtQ4YOKAuhZBARBEASpLoKPYzQqhmc8P3uX4jG0o5r0PQtacqPBzb2H4MXHitS0bLzfr89vmTd+2LDhI0ZNmrvxzOsMUa/qSKeSWzIo6yOCSDRezEYbY9+HeYJvRNoFT0OPkyl/shIWPBln3OVw8p8BZNaNIfrtV38qlzKJ9WK6uWNQYlNNJUgA+AM4AghldDwHYAdgD7AMQBqXDevFdH2GQYcuXezbt3OfEyLyjVKh8c0dgxLxCuvJH/kPxthPe1oIwP1xLejA9QRuvQbe4ARPLHqP8BH8A6jiYFezJgJZ8OXCwbs5pv2mzpk7fVRH6serW3c8SJPGGoQgiPSSMfae2Cx0/9McAAAi9fa+dzZ+HtqV7M0wla7bbp6ZZk6veJQm6TRAAgCj7MAUgGUAIQCPiv9JI7k2s8+FPXkWHtzu2qJ9UdXMQl11PcGzv37NwgEAGDarH4SusJaefIalXtkrLMjPz8vLykyHSh9EgJreaMAULCdu30ORoZI4juMWmilvl9+P/MXtrsmoeloEQZB6QtP3nGK6IehJVvc+Ssm3DsY5zda+NM750E+mCsEznnJwTTPg/by0YMip7zGxhY7bru8ZoPJhg9csq9DHE3ULXm0bN+Xkb6YiVcd7/39jiudIFkYd9JtUModtQ/RF9Tz8V4kEOAqwFWBm2eFvAVoBaAMAgA/ARYCuYoiufuB5v1IIbVfZ2OCxU0tW7mqtZfYLqB7tieTot3G6/mcPjm5WPD5bVD0ZvHpUxtb1d38XZib+zlOUy/iePb1vnBqWG/WswO/Nm0UG3w9Nn3wglirHLVTpuyF4kTP98ZgO8whnc87vhKhUy7VXdvRTjt0zeszxTBUFRvvFRwK7iuctzlIXyTEMA4zEMEzQ9zKQZEUR1fRZBIwqQwXBcygkJzuDDWotVNHzDAiCNC6KTi8/6xUHHmb2sLl6KNlt0seZSznLXt/to5Z9a7Tb0seBVEyp47zjy6z4930sl1xN7lf0ADfwYvdN2qu57fU5ZyWST2A0zkvBcH5c8Ng1pefQ84gbU2ylawTpAFMBTpe7xQAAGQAlyXy1AZIaNa56w/myd6xbUHS8wbRDR11uD+5ReuWuJgo4VtO3zzGlJuzr4rL6+eA9wpcNytYTfl5y0N3lGrTE/U5db7drzcZ3XgtypL5f0mE0AD/+sG8gP+BlWA9V7udA+wEru78LBLwQt110aKYx59G4tgtDkru7XjsYabvl2Q5XVbE+t1DSkSJJFmXXAAwDCibIDC1yklof3vGsl8cPvZFzmtPz729sIwgiaTD1rn6OC7fdft3jaM6ATdrRQ9hpl1f538VIFktHns1l05T1NOgAmKqOAi+3JG8BsBNepjT3smBiAFjplEmcX28Sys6B/Lvf2gsD4AJMAygAeAkQALCquMDqACXZl38DNKtoFpKN3mrKwSt2exxG/cSwFOGVm0dl6ijLAGCqJrrcU0ks0kho6jL1BMOzIo+uWRWVz/3xPZtnxgTZUmNyEl780na2VMYA6AYOrVgBsXmkHlAUNBRpADhTXY6bw6Yaj9k2cfYSh1a449ygnRMsFcRVswT5mkgSSJIgBQ0EIAWXECq43VC7JgI/LXz/il2RhmMCfG2UpO9pTgRBpJ+yw+Ru08bO2qnofdWmVXhrrXc+q7Y5K2Mkn8Nnv5y0QPREdL32Wr8efs7zdmby8gsxxeLdl5xhlzJzoNL/5vYBAAwGGAwAAPEAEwBWAmAAXwEMANoDfAH4DaACcBxguXgDrQMK037ZLrcOU9d3ndxKi/ln5bKejZvDzmUTAJyvD6IUOpgoYMJP1JWuJ5mv1nktJoMjgmzY52ND91Iwgk/8OeWmG3TSS330MWdsc1VuQkQUw9qfibGEI6GqdfY/+nhW4kFXl61vRh5yUmjoolcMwzAMIzGMQgL8Se2EVfTqYy2aCLyUBzuW7o0yG79ymqsOuoKAIIh4KNhMGMANvjv6pClD32jf3Oe+Xe1BlSkrbzV1s2dF08ia+e0b7zO5q7OGGkPbc9sB36LhVP1RZeawd9MgvSZ2C5UN0BdgF4A7wBqAvgAUgO4AzuIOrA4wVbe122ztliWum8peWrJyN/UFXvzJ6QMupv1IURocdKI9A14JTVi6nmh26dNJ6fTa2XnGTBamrNXeg71mmKNXwKlFAABAMxwbvOjlBI8ua+k8lppX8GFbBjwWmhv/55lpYw5nKFGytSdvaV/NzFgNhyQF7zliFAwE/S9TKnxoEWOxCms0b1bM8flL7+n4rvXvqlXr9kFwcLD/7HrtF7T+NLXuYppaeZHqQ3VDAom366S6kJyuk/If+rRZ1vPlw5FadblURHLS45MxXUN12arHFZeSTfhQ8P4h3iPodLrglX6CwAmc4PN5fD6fwHECx4GC3bl9U1VNs25pnIiM8CPXUvQHjTFiJ8bFAQAARV5bX1se3W1AEARBmgyMrmFkKO4gqkuGRkv4EW9qZi74ikHR7QXBxQMKjZqQkMBUUCw/YQ2bCJykdz9I4FxYv+RCybBWM/cv7yTGWysIgiAIUl2KLsd+SGlvD7XFYMhFvn5lbNySSqVC0YOKgn+AUSgYhsVERzXTbpbPYgtNSNu7Z0+Nfkne0dVNaND7U8HvaxyxJCdJkuTYGkJTKy9SfahuSCDpXSWoOjU+QRonGRpNXl3xwb07bj16YgAkkEASGACFSqVQaXfv3NTS0mEyFUU0EaZMnVqLX62PNE4ScVOqvKZ2/7WplRepPumtG9IbeZWkt2go8sZXksbpyoUzpiam375/u3ThrKVVu2bNmikymWwOOyEhIerzJ00tLQuLNkmJCeXn0MSe2UUQBEGQpodCoZiamuXl5v78Eff2zSs+jycjI6Oqpta2rbWqqmpFU6EmAoIgCII0CUrKyioqKoLeETDBy46V3v1BbyIgCIIgCCICaiIgCIIgCCICaiIgCIIgCCICaiIgCIIgCCICaiIgCIL8hXJCulGwzifTavy2XnUmLD8ON3pda0xj3JOC2sSKSCrUREAQBJF+BY/HqGOluVzIEndMiNRDTQQEQRDpJ2c1/+Ktm+cD2gK0mHT42s27W52Y4o4JkXqoiYAgCCL9qCoWLh493W21qaDSxtWjp3sHLRrA98PDTekYRX/A3mgO/nOfLaPl6KkuavKOm4P9u2hTMIzZZuSBKBYAL/746DYKGMYw8dr9hQ1lJwTgJV6e7dychmFyRr1XhaWX7lmXE3t4VCsGhukOPBwvpqIjDQc1ERAEQf5WbHqPLSF7BxZeWbbhZQEAyf5+7IHB9HnGwVMPyi+J+PrxoO39mZPO/Cr4sGvx0YLx19/c3TjCXltWaMLcpNNjhm1L7Pdf2L1tti8DvPzv5xQ/g4D/Oj1x0gnWqOOPTk1oKSPOkiINAjUREARB/lYWI8b27zFoUCvI+pHJBwCg996ya27nggRe3t35blYdR59K4ad/ywBVY13KjwML193mmJooU4QmzPlwPpzTetIcb5duo+cM1s68fzWmONtP4eeQlzzTcbOGODn3dtYVXzmRBoI6YEYQBPmrYRhA8Wk/RYZGwaiyNGi1NvLd4layxeO0ufaUsWFN4LoRjh+O7CTLTliug94/A0iCjwNNTgZlcPxLoasICIIgTYq85WAn+S87AoLvPLmx23/q7g+FRM67OzGGvhvX9Ffm/M7mCr3uKG81uDM9KmjbuUf3j209n6rm1tdMruhPjJYuZhB99vST9+GXb/9o9JIgDQ01ERAEQZoUivbA4DPzLJ7P83Dqv/oZqSpP4aU8v7iyd6vWw5/aLtvsqS10XKA2H/bfmdl6V3xcus142XHlhW1uysVXDWSMx+5c7JAQ4OY8P6qVpTy10cuCNCyMxSqsxWQEjuM4XvV4FQgODvafPafWkzco6U0NXjtNrbxI9Ulv3ZDeyKskvUVDkTe+ksivXDhjadUOitM7CmV6FEhKTEj+nd6vv2debk7JHNBVBARBEARBRPirmgj5j8cbWS5/eNJd2+14MiH8FUEQ5G9S+Gqx88ADMV8ODDZkKJr7nPzJB+DG7uzRfsqDHJJICxnbZcLtTKk8+61QNffqgiUTHSVBS0ZKI5f4JoKIXkWTnk9vjmHaE8OFOgPnp0W+SlMywn4ksygKchShr+IJH5Eg1a5L+Q99VDHDBZGFqcccisel63WZfOhD/t+1v20Q+Q99VDFKp3+/8koGcT6utsKwZn7Pa3NXU/Kwog+NtlLCMEzdZsyhqARxVRIy497aPZ9kNWLXrfz0z70Lzo+Wb39fkBqy4F+YvthJGaMo6qonnlh1IbGyO8ISuLKyLrjIKLjv/y6IKOeqO11l+IP8oj+W26tz406M0MWUhobmlZpF0ZJR/VKHJdOwkRPJV+c46dIxTLn9hFPxXHFHXokaHjmJ3M/X9q6YPmbYsOEj/vFbtP3Kp9wGPj0v36uo7Y+Tl5MplN/Xjr8vW4d5aTGZ8kaaBbFZymbN6MJfGx/v21YrwZ6Dptt1wc3UCtYqN2abg4rprBf5We/vP03hAZkRMkhVqefJ5MarBU1EDepSaeYLLty/F7JrCHHU1+mfc2i1VAv5cvWK0CzBsZL4fX3pxo9iDqj+4L/OTJh8kjY++NS+CeYkn4oBiKeS5Dw/FIq5+1plxeZptTM1a6uW+elrxMblsd4bhuvTAIDRZsRgzdeH76ZUFU3DrCzu132DPRa+qE1Dg18YOmPsgW+88n8pu1fnfPtvpMOoU0kg1CgrWjLt8r7Wcck0UOQ6hXdm+GzP8Dp47eCg9OCJc0KzxR95RWrYRMCoZCHFuOeE+YsXzBzZnnh1blPQ69wGbTOX61XUIu3sxdT2C5fYZoSc+PCn+hU+nWpit+tn2sl+vQ8nJ+510NFRLfXVeeDFDLGc/1EN5rzITLrt/WPj6KXPRKdAk9FxGefv198g68qkQUueFwKm3Gn6ps2zXTTQw8H1rLp1SYhKa0eXbv0mbD6316Xg0sqjX0Vs/khZGCjqss4sCIrmAgD7466FIXjz4rfkeL9CZpfp+hf/HbrasxUTw2T03Fc/zckO6San6jzY1UAGo+gPCorliLMgovDSohJ4au17Dhw6af2pIxNNZAHEUknYcWGRnNZ92mrpmyj+fhcd/T5ThXlv6eV2gbPbMQRjyJl6dKRH3YlmVTqfhlpZZOHPd69icmpzLKMAxn3kP2b/V26ZwUI7eef+waGP0rutX9lBqFOG4iWjXccl02CRO3k977bx7MX1I/sMGW7LyE/4VYCLPfKKS1QjmEKboZP+6dPZpq21nccon45y3ITYDH7DhCYaO+r0hSSTocOHDTNLuXzyU8mCYnRYc3WthbzdrivzWyq4nkhI+hRR6uv3qwPVxdS3B0ZjNLP37KKcFjy6sz4dw2Sauy68nZoZHuCiIyeroOex7XNG5IG16zb69fN7lvNgmKGampaF16IdV39wicxHq3q2KJ4k5echO3ldVycTZaqM/qDg7+hAVWcV1SXRqFr2PU0h5kHs33GxvEGRtHZzlnb8smH57XR+6tUl//5yCfCzogEA4EmnRw8PLt31L07kfYsie22/eX6Oaujymad+EcDNjpEbe+H1hTH8S0s3vpK05U1v2beP/u/DHnoW/eYd/5BX5jJqY1YSXvq3bCVTQ6aS/bIVFsd6ej+z98i6Q52+uqdGyV6d3txKm/0zpZAEgIInE/X/3GOT73oitShyCVxZVLXBW2YYP5kzZl9s6S4ahHby329MnXjowXFfMzmhfXsNl0x9ql7k8Y9WTJ44qJU8ZD8/Fc6xGNBZkyr2yCtS21v0JCcj+uGtT1ydTh2bNWYPjeyo0+d+Gnj2MmnZvb/hr4unPhf3A4rJMnhJeUwzQ1pqgVqblhqK8qW/qsuKsfMvkhUXHp4D8C3tn8c5ea+nZW7+x3/ftt0vTP/9mPgqeIwJHQDkrGZtGKRCcz0dn5kZf8KhIDGNXfh2zcgV30cVT7LkaQHJZesueJbyfpl8yKaTQs1UpMYqrEtI3ckY+awfS7+4aPv5LYtvqE5aN0RPsBMs/HgxorBM1798mZYTT4ZsG9ez78iBLSE5Oo0PAC1797a16uJuDBnfMyStMYwxnba/efbf4r6MR5t9HEZdEc/1SQCCk8+TZdIpQDedcCGuIPVE66cJ/VY73R/RUoGu7bQ8PIcEihyTDpx8LgEA8rZrHsVEF3t/rP+fo059ryxe9CYbOibXLvBr5qUeShjWbPTDfJFFqAgJ8rZLTiwzCZ87ek/sn/UvtJOvcK9ewyVTn2oSOZ58aZrPCbXZB6aX9HEpxsgrUpvDO+vthgmBrzkANNMhAYNNhJtwDYn95cy5BIhfb81YDwAAF05/DuxgIwfAfrfUxmH3TxL69gEAcDS8o/I7NhOKvrp5OsfdHqolhmYCnrC5k/JmPmgyAbid3SyUFDGXTqrL38mP8neaMsVcd9e4Q9eWi5608PurZAXrkkkiY3KAothcV5GuqqdMYeew0TsadVNhXaoA/vvZrVgw+8dUvrEilGqYkuOilQ5mk0Zsku92dE4HhftFg8t3/ctPubdpTsChu+8T0vJBt9TxFgMQvsksIWgadqPXnh06cI55xwvPkzz//KExKwlVXpXOyigUXKLGf51beEw/4BZ1t+urvjdfGM10WXZ7eqhnQUYhMFTkKABQ+HKRo/OhlKKJZZyOJYSN0ilqJdTzypIxHn/5k2fu5y29fb6vCNvtpKysp1DT0mEKNgtPBFyyWbGABMVhAFBuJ1+0V6/7kqln1Yv8Vq/4VX1GXO+w9/VaR6WSI5N4IxepNr8jZ+G7ds3yueP7tUk7t2LNlYTGO53lxJw7G6808MTb6Ojo6MjT3qoJ589+YQMAyFn6753eQt5x63/jdJU9Dr99de/cjD9fn/7XR0M8lxEozSdfj03PL0wIm6PPfXP3Q1ZuzP1nmRrt27b1PhiTHuqTeXznY8EbLBhgGBA8Lp8o2tAUWnZqXvBnEmtzZfRWRj2quC4Jy/z46N6dy0Fzh0x5yOi/7B8TlM6uemgtRmycbEBp6bfOS7fkuZryXf8WPl82YnG43YEPMSHjdcQZb3XlP19o127IqqPnjx+++YvRumNzGoilkshqt9YoiP6SiQOQeRFrVyePXufVnIKTFBlZWRkqAAlQGP8mVcmsOQMDAPmO6yL+XEX4fNJTs/T+pH5XloyqgYmZqaEGncbUNzEzNdZm1Gb3K2+94PiKtnhJ00NoJ//0vz4awM3Pzi7gkwQnLyev5Kyphkum/lUZefe8IyM9Vn3vPHUA4/nFi48S2KSERF5ebY46GF1D36S1TVfvmTNcFeJuXPvWWBdoOTHnzsSp9Z3Sx9rMzMysXe/Jnprx587FcAAAqJTcr4Xm/TurZXKNejpb6DPzS39triCmwytGU9TUVlegyVkvPrHS9JSLmrLtHo35RxcrnRhv17yF5yXdSYvdBa0XOZNeLkqPhhi5H0gARWUleRqj7CRr7GvcEEcqVlldEhK72dvDY+C0s9ioA0+OezdHT5FWm6LDv98Kv2ywLXVKXb7rX7qp56A2advc23jsZRurSUH7S66FW2+DdxtGD5l4SmHC4b391DEQSyWRbeHqqBRz5WUWyfsavCSi+9YZbegKnRYusjjtZBmg4L+2p0bhxytvwLpXK3kAAExOy8jUrJiJHlMoxnpfWfS2a2MzLnZn1qGIjHbzT6zvJFccqdBOvrkC+es/VzXT8c/JwpCB+tZrPnFqtWQaQuWRK71dOutqNuTcD5wwcsRwr1HbJShyYXXqgJkTc2jWyohWi3bNtKz4+qwoqANm0XA2i6AzGjdpmvT2LYo0NOmtG9IbeZVKFS3/ySTTnl83xd4d1UzUKVBu6D8mXtn7v14ZKKZrqEIacaXU85L5CyKvdQfMNXwWgRN340Ik3dhQkwH5qVGPLtzPUXLuaiSOTgf+SlQ5BjpHRRCkWhTtFgSOOaiKiT54EQTDYvCynh6S0T5oXNK7ZCQu8ppdRSBy353ceSTsY2IOH0BW1cjaZcCw/rbaNX5dAF1FkBxNrbxI9Ulv3ZDeyKskvUVDkTe+xk7jRFFqN2rJ1uBTZ8+cPnXy8M61Mwd3rHn7AEEQBBEXPPXSUFVM+U/fwKVISN/SiMRAD8kjCII0GUT6zTmTzmWDqFM7ielbGpEYqImAIAjSRBCZ9+ZPumExtZ+ODACQ2XfHaGIaPnfSvu/pLKc88Fz0Z4noWxqRHKiJgCAI0iSQ2Q+XjD/fcs0ub0HnB5iK67JlHbNOLVq5astrs4WBAzv2k4i+pRHJgZoICIIgTQCZGx4w7ojG4n2+xsWdJYKM8ZgNY9Xf7D6SOXDDpFZ0CelbGpEcqImAIAjSBHC+X70Sz3q7qI2cgsO+X+mnPXoe+oWTrPTUAgDI+RqTxYfivqUjQmfrF7x7nlQqRV9R39KuqAPypqUxUzAhCIIgYiLXdsWzb+Nz+SSwIgO6T8z998jw5rwPyxbf0pm+qd2xeYHrI4b63hww6VufOd7Nnhf1Lf24qG9p2fibG2c9ZPQ/hTogb2JQEwFBEKQpoDB0jE11AKAwU0VWBppp039fXbo9oeMG/8ldVA+1C1x8aeLS3gYXN4w+X6hqO+nw3n7qj7cK+pbeDDLNHccceLIFdUDe1KAmAoIgSJMib783MQ0AAIZczRsCAAArP5MrAQDgaq+APyMaPiV9Gj06RJKgZxEQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBEBNREQBEEQBBGBtnfPHrH8MIZhYvnd6pDk2BpCUysvUn3SWzekN/IqSW/RUOSN7+HDh3WZnDZl6tRaTEbgOI7jtf7V4OBg/qfrtZ68QdHa9CHJJpQDFcOwJlVepPqkt25Ib+RVkt6iocgbH4ZhLi4uAHDlwpnazQHdaEAQBEEQRIRaNxGI3HdH5owc5bvrM7s+46khfsa1wKnqw24l1v6KRiMhcp6v7aJlt++ncKR44sHOCoZz3wgWI1nwYdfwzvaO9g4DNzzPJYEdc2ics72jnY3LjJBkiS9kzbFeTNdnGHTo0sWunXWfFffTqygimXVjiH771Z84FQ5nvZhu7hiUiJd8qGiSYryYjTbGvg/zBN+ItAuehh4nU4hqxcb9srHnhLD8apa1qkj+VrykqzMsVeyCijZSfvK1eT062jnaWTv6Ho/jlh4TTwsN6Gff2blzR3vv7ZH5EnraRua/WtHFZuKlJH6pgfyEY8NtLS0tLS0t25ioyxrNeyPO/WIVhNYIAAAQ2Q/9TRUdj6US1RhZnITi4SWcm+Zm59DZ3q7PstC08jFWWK7GJxQ59/sxX2fHrt26dHKfcz2lTOSVbCONrnZNBLIw+mzgv4/zxHsNgp/634Ll61KUlCU+PSlZ8Hqdl98TeeXyiTXxXzfPy3hbPTv1kQUA/IRjMw4b7bgf/ug/p+uzdn1Jvbc8WHNzaET4xaFvVh+KFW9daSBybWafC3vyLHx/q5OTAiMr37FiKl233TwzzZxezeGV/wkAAGSMvSc2C93/NAcAgEi9ve+djZ+HNqVasXGTPyZVf51UFclfiRd3eFTPwJ+aJRtpwfOVS2PHhkRERFzz/Rqw8lnBn3FZb9ZOv+dxMuxRePhh61OzjiXwRc5SvIisBwFLEqdd3j2weemtmWbgc+rVx48fP358c97XtL3vmDZyYguxcuXWCAAA/+fpacsS2hkzqjOyGAnHQ6RcmLoszS/kSfij470e+S18lFt2/IrK1fjKLcn88OUBcRMuhIY+ONbt/vx/P5Tau1SyjTS+2hzk8YzHuzeHqv8ze4B+vcdTE1QV92mrr/tZqEj83RJMzmTEgds7++mUayLwE2+cp/kunNP2xckPLIC8dzdyunq1kcdkjfv0Iu6E83ufjtjQSQEASKDRqdL6wEw1YIrmzgbZ779GBo91dnTv09utx7RzP7MejjHv9M8EH29Pl7b2fldTcWB/2OA15lwKDmTuq62DO9o6u7l2HbE3hlsyvLySP+WXmxsAAND0PaeYPg16kkUCnnzrYJyzn6s6JiK2D9/eHRrv6uDSrauDw8C1jzJ4qTdXLr/y4cbCod6LL1xYMci5SxdbC6vBe6M57C97vO0du/fs3n/Rg+SoUp/fF0WSE9Jdf/C9PADIDxvZwuVkGsl6MbON9bDxo4f2cug8bvO6KSO8Bzi37TzzpojTIulC0+6+6vbNFZ1Vi/eMCp33vb08vJngK5VOK72oSZzA+TgJJCcrLSv1R5bkNRHw1JBpY09nft89wmvB5Z+88iMQGbdXHdddMLG1xDYEy60RgMLIzeOPtv13tXO5XamIkcVKOJ78t6c/tRrtrk0F2ZYDfTSeXPzCKjV2xeVqfOWWJJWpxShIy+eTnKw0LlNLsdQSrmwbaXTlT2urwku69e/BBIfZa5w0I+42QETVh9H1jOicGLHGUE1UZSMjfnz54fyf1y7Shx0x6Whkufzku0Jbg/Q8WU0mFQCoTG1GQXIuH3RlgUi7HXjMYPZlY5nGjrvx8JPunojVM3vst4az6vVClpaoAAAgAElEQVTdPmrZt0a7LX28mijEbRcdmmnMeTSu7cKQ5N6jikbnxe6btFdz2+tzzkokn8BonJfV+RFcaG4T9KgAQNHp5We94sDDzB42Vw8l99hgryQyNvOI6YH8NS/DeqhyPwfaD1jZ/d2ORTM7nL20/OwJV3rWL5e+ARq0xP1O3ba/cDQ+GGm75dkOV1UKL3bz3OLPwHpxseLYCC7VdmHwXMOErbZ291dEXx0gG+bTduXtxR6jtMW/f6s9TF7PXJ7zoewwAOAnX16ynTHroq38n+GMDsv39Js5w8tbhRNxLq9PmInEnYgXvFizLGn2vUczjBP39nSbfMnx6lCtMmuHGxO0OmpAUG8tyV1nwmuESL81b+qbkadPtiX2VjmyuAnFQ7IysigagouzFEVtRVZSTkmzstJyNb5yS5LR1m+muoOd5VHllMwuR14Yld25V7SNNL6a1mR+8u3dZ7K7TR/eRvEvPqVtWAXPFrh3tLV18bubFh9y7GP84YkDhge++XHtyFsWQ53JTc/HAQDP+82S12ZSgSx4v23MJtUNu72aSUhLvp6xP20d4trFacgR/U1HBuYns9Mur/Kf5b/iEktHns0lKQoaijQAGlNdjpvDLrmfyE54mdLcxYKJAVBolGpXRdFzw9S7+jl+OXD79aWjOQMm2iiIjq1/9m9tZ0tlDIBu4NCKFRmbV3KvnORnRR5dPHnC1OUXvmdn8Q3HbJuYu8yhleO4oGjt0SWfPxZUenOdQleSowBFQZ3J1FCXw4DGVKdz8jgSekO+Uqy3q/vY2dp26r7weaGov+Npt+d5bdPZfmKyiWzp4VRNtyUnb56e1zIVBvwb4KDYONFWH/fH3XDFAf1NGRjd0KOfzo+3yWVvM5EZd1cd0Zo/zUri2jYVI9LCjj7lZp6dNmig7543zzdNCniULWF1ruLqhDHUVYn0XD4AAJH/O5/RTKn4pFciylXJhsD7tmfUBo1D0V8iY94tT57le0HoYYmKtpFGV7OrCERG+MGz6Q7zF5syMOKvvDHeGBTsN9x7CQDA+7p1NXPNzVvjdKmQ92Bct2NvsIW9lNdc+lzY3jLx5k2Kxz4tPP74hImRI88eclWV3POSupFrM/tc2CxjGgDgP3Nba73zWbXNWRkj+Rw+69m4eaInouu11/r18HOetzOTl1+IKdZx4Sg7TO42beysnYreVy1K795LxcaPT9dLffQxZ2xzVW5CRBTD2p+JAYaROAHcqM1ei7nBEUE27POx9y8CVa2z/9HHsxIPurpsfTf8QMnnN8FFrQ+KnBKWk8UhQZGVmcOVsB1yPWC0X3b9+bIK/oin31voGQABVwO7qgtWGz/r6zeegbmWLACQec9Wjj5qseOlJLaIaeqmmqnnI7OmG6mmRoRmmU7QpZcOnhsdvPJ9n13BEhh5xSjag0++GQwAwI/f6TZBZf9KZxWs9BoRv0qqk2J77zZfjt7/7TFQJe7K8fQuy1sziteIqHI1athQaeREQVqOjJaOAgVoStoqeEYmh/xTl8pvI+JToyYCmR156x0rC1aOvV8y7EfguPgBG9YN1hfDRXDuh6N7Vjz4EB0lN3HB77lLfdwavw5UE5F+d9m0zZffvqFM9GVt2OffVg4AeHGXzmP9j2tTAQAU2w9p6X8kUm3HjjG+k90645j2wO1HjON3OIw9k2T8tofVKopm/8O3N9qJ9ZpTQ6Pqj9o397lvV3tQZcrKW03d1LeiMWXN/PaN95nc1VlDjaHtue2Abx1/WcFmwgBu8N3RJ00r2ivSDMcGL3o5waPLWjqPpeYVfNiWAXwrz1ZzJrt7uvSwZt5bOzvPmMnClGT5P89MnHY4Q4mSrT15g8bVid3/E3ze0l7+P8G8FO3nj8PH9elzpqWOEodR85t90oT9ftvUgKsPv7xVGD8qYd6u5aaXhvTd/L5Zq1lON0lQsF119Uj/lPV9+yfu+nzCXYnMebLc97L9vghPbUk8zFK0B+5Ycmu0m+0a4Ci4bjjRRwNjvykOnpkZuvao1vzbHSR9ExVaI6u6lTsKsd+XrJGqR25c5eIZvGdNxNi+nTdTCJXuu/9zVoI/a0T4hqF4CUfuOnPbwFG+nR2YdDa7+czdw/Wo7EhB5EfMzgpvI0Obi217wFgskZcCRcML0lKyOCQAgeMEP+PJ9vU3tSYGjGmvp8mUqcnROTg4eEbP1jUOtlGgrpMQREB664b0Rl4l6S0airzxlUR+5cIZS6t2gGEYhlEwTPABwyhYKUmJCcm/0/v198zLzSmZQ83OYKgKmroKAILeFbkyqrJAlddopsX8ix+jQxAEQZCmSdw3OhAEQRAEkUh1S+Nk7OoAEceDI2rzw2361P53G5j0ZuyonaZWXqT6pLduSG/kVZLeoqHIGx9K41TP0LMICCIgvXVDeiOvkvQWDUXe+FAaJwT5SxB8nAQAPOP5qdMvM8XfozyCIEiNmwhkXviyIUO9hw0fMXLUiJGjRoxc+7oGr0TUKzzz+ualdsPmOAyaM+FqqoR301DLNE78X5dmdrOzt7NxGHs8XkR/r9KL93WzlbzlinelOkzlxv5r/2dR1E7Bk3HGXQ4n1+0IW5ILqoxKkzbVrDi8hBP/2Fg6+ez5UpzTKf/BGPtpTwsBuD+uBR24nsAVHYM0QmmcJM3flMapqoxHkpvGSeQQAEmrSzV+J5tk57Mpys5+czy0MQAAiry2mLoSK3h/ctmP7tdPuGil33H/58Rz99lO4k/WIVr10jjZ2DKK0jg9Pmn1c5v7mF39b7ruWp89+0ZEL97pvv0OfB6ytp3Edv1eCzRa8t7pB3xCZ7SUAQA86bz/queFSl3EHVZFuMkfk7idKv579YuDp9zY/bzLocgdnYprLJ799WsWDgDAsFn9IBQAWC/qO36x4MUdHuUZxBVO43QzYrhW8kE3l5XPBv7nXNydpSCN07WwacZk1DrnKccG3JvSQuI6jShO47RXRBonHwAAbtQ65zG4ZKdxKrNGACpP41RuZDGqSXUCkLQ0TmWXZIXLVrLqUo2vIuCsbBam1KKlsZFAC215Md2sUGg/7fVOl+JcFzLizXVRuVqncSro9O/jQ33UKXhhNl+9hYqEbKf1hd7Gb5H2/jnnknAAMuv+0qU/hs53UOTH7+1i7HM3hwTIfzypdSe/2QOL0zW9e18mkRIB+Q/HmNv7zpgydoibta3Pke9cAADez0sLhvToYmVkM+lyMg5kYVTlqaGA9UVotiXw3/dWCvIzWTj1H7FAkLRp6e1nB0rNkFd5caDc/DkJF9YGRX675N+vi62DnaN7n94dVWj6815+vzy9b/fu3ez0FCzXRUv4RbFqQ2mcJM1flcap8oxHEp3GqeplKxl1qcaLjshPLyAoZF5GLkf8V24wAMAzr2y/wvDx6iCprXYAQRonEZcQitI4uZt0HGH56uS7QoJVNo1Tai4hI0vJebK4u+u2ZpP760ncKVUdUbX7rptZsGbpvYz8Nxv9w3vvmmfNwGgGXjMtwnfdzyByIvbfp9Guhehsuf/wfljoIY8nUwL5i2+EhT4IO9jx/PiVr1gAeAHHYvr2w+fuhoz/umL18wIAwJQ6zjt+50nEwTa3NlxN5sQFj13DWXD17vXrF2bnBi59nIcX4h0WHTpz5foui5vrQpI58Yd9y822OEDVDuOD7j5+8vz2TPyLvHNrgx7rz54YFz1rbZkZVloc4AvP/53moAWjLU08Z3bnpxstv3r3+vU7p0fq5PC1Buy8dvfujQOjjf6ifkYweT3zZoyyu5lSKWrmlUvj9GKGl/cIL+8deS59JTeNU+i9wz1fTJ986bfwPlCQxmmZpKdxKr1GitIdBc1uK1/+LEvU6hOnGlSnSsvV+MpHXtWylZC6VOODDsEHpmLqyWUzjwMombgOHTeqaws5sa0CPOfO5rX/aky65N1cMroTr46CZws8p4dmy9utO+//+djHeMWJA87gaT9iiLdrA9WZ3Kh8HECmJI0TYMpdAh+8cx7rvuSJ+0EXiUttUzcyhqP/HXnQ22+W/BubrRFdlO8BAEXLw9956YYbn7FrHzt46jx+LEjXhOGJL3+VTqQUEJtH6gGVqaMsA4CpmuhyTyWxSF2gKetp0AEwVR0FXi6b9etNAjvt8ir/uxjJYunIs7l5RcmccEEyJ1bCi3KzNS0Kj+RnRR5dsyoqn/vjezbWXgUAgFNuhqRgN1VBcTgi5q9ZPJ/MkvkwIF5C773XHOvt6sGTr6SSqu47r6wX0Wd4cYqaK6LSOLkVvFrqOFCC0zjNNmVgNEOPfjp73iZzh2qVascUpXF6IIVpnH5MG3SuIPbNS5lJAS3+WyWGdAaVqFV1kohyVRV5JSSmLtW4icBo67fjkB/BK0iNjrhy8GDwGpbylmkdlMRRo/Dc0G2rVsDIKzPbij3XRU1UP43THuLciEEZK0/7mZAkzmPz/5pjSCl0ixnbeth6fVv5uqcGJUswTMlxev8kn9mrGT13L1aKPl2Uromn3r5cIiUWEOxcNgHA+fogSqGDiQLGEpq/nGGXylND0Q06lc/PJFA2P1OwIGmTXMuyM6TSS1f+8sWpaP5yhl1aa8UUzYeXYOfm+oT4S1YwSuMkPaQ7jVMl1UnC0ziJIpF1qbaHVoqchpnLP5M91Atf348Ty/OWeMKlwP6H438/3u/qOanNkM3nyl30kyBE+t0loybte/tm90Tfbe+LlpcgjZPrnzROz45Eqo3eMebrdLfOzqMf9t4+zUKv5zTrq152jl16BcrOXdWFKc4yNBSM6bg1OvnK8NKJShhtJ/xDfcz1Htehrd++8b9mdnXu3qPvtAeddy7CVnt0cXFyGvPCK3iFLQMAePEnpw/o7mj/z+vBQUval38siao/at9c9tKu9s7d3LsPXHj1l/ANbprh2ODysxX8SdvBRene2tn+89bdw5TUrDxbPZvs3n+3wYY5Fc+wXHEqmn+ZwAatT7TUuD7M0evAt7/lKYQi7Pfbxg2beerL213jRy0NzeAmHB7Sd/P71FuznNq0atVh1NkknP1+fd9u/o9yAaA4jdNOCU7jpLjBzbZDe48Dxht29NHA/gRPCtI4+UtDGqfSa0TEbrPUGql65MZVo+okUcovSeEhklmXapbGqYSg6yRu3Ik5S+8ZzN8zr13NnhhFaZwkh2T2CsJ6Pc9hQaurt331Kz1U5D/0abOs58uHI7Uk6cJonZCc9PhkTNdQXQLO4CSzblSH9EZeJektGoq88TV2GifA055eDMtpZqzDpLBSoh5eupmh6DjWWNx3SxDpVdKtaektkMxi2G6OBADaeBDaMkWOL97hpYfUfbYYXcPIsMLxEQRBGlENmwgEOyvxzdUr59I5ADQlfas+00d6tWf+NWdwSOMqfXwt/ZmiJnp4uc+KLsd+PKpinEb63Dg/gSAI0ohqnsaJrt/WUb/4S8Gn60c/1e6HURoniSGW8ha9BlDqp8niz9I4vOGKA2KtjdK7LUhv5FWS3qKhyBsfSuNUz9CzCI2mol+V0uEN/XON7y+4Bfv3kd6iocgbH0rjhCAIgiBIg6hdE4EoiI84v2ftvCnjRk74973wm+iNhcgP37/KznuOo9fM4ad+iCuZVDXVMo2TYNirpZaa/W5K3Gs8kqK6KZQqzcNU3t+TSEm8UBonSSPVaZwEuN+P+To7du3WpZP7nOsppUKT4BVRZklWGmeFpRODmjcRSE7CrU3+y058kbPs4zsnYMlwEzH1IM35dsX/keXR41ueHBjKOnIyokA8YVRH9dI4AUBRGqf74Y/+c7o+a9cXLgCZ/WjlovcmLdBrI5URpFD6VtRpflEKJeGRuMkfk/6yXgckHi/u8KiegT+F8+6EREREXPP9GrDyWanNVpDG6WTYo/Dww9anZh1LkLwcDX/SOO0Wkcbp48ePHz++Oe9r2t5XstM4lVkjAJWncSo3sgTID18eEDfhQmjog2Pd7s//98Ofw6ukrgjhJVlZnBWXTgxq2kQgWdEnN5zM7b16y5Jxnq62bcwMxZbGiW4y7PGRgeYygLPzucrNNCQ4gUGt0zil8dNvLl7DX7DKXar6j2x8FaVQ+pPDyaVdS8ux5z/cWDjUe/GFCysEyZmseo8f0tp8yoNs7vc97la+x88GFA0fvDea82fu5fI85ZfLBSWULOqvyttdFyiNk6SR6jROxahMLUZBWj6f5GSlcZlaiuXDk7QVUdGSFBVn1aVrRDU8rpK5r8/e/U1qPd409WwGl97MwnXwGG97bfGkncGodBr+616Q95pnmtPXS0pVEImqbGTEjy8/XJDG6YhJRyPL5SffFdoalE3j9OvruTlblVdeclE9tqmxQ5YyVO2+62Ze77/0nscOjc3+4b1337BeFf6aHxc8dg1n2eu7fdSyb4229jqmOmD92ROu9KxfLn0DNGiJ+53cwid7RMycu6rle+NNN4fYsXt6rtCgJe536rb97dAxglkL8jAFvAzrocr9HGg/YGX3d4GAF+K2iw7NNOY8Gtd2YUhyj24XS/2Q29LHPY+4/ZV9YdYUJq9nLs/5UHYYlOTduVgujdPMGV7eKpyIc3l9wiQ3jdMM48S9Pd0mX3K8OrRskh1B6p0gcafeqYzwGilKd3T6ZFtib5UjSwpGW7+Z6g52lkeVUzK7HHlRPvGZxK2ICpakyDirLF1jqmETgZ3wIpYvb2bv2addc3lOfNiR4J1reKqbxpqLa2Om6rpPeWzX2W/E9uOdA311JKU+VKraaZwYjA+XPhXmrhs5qPDrs3dKM4OM9000l+SWkFiJTKEklHIJMkgom5wpR91i8kRZq2nvFka6qhFJN0qSNqmzim/KisrDpAdCuaAqye3U9KA0TuKLsaYkIt1RNZXUK7eF3R9v0DgUfa2vyo/gXq6+FxyvDtUutfOXlBVR1YYgOk7etz2jKitdQ/r69eu2rVtKD6lZE4HkZKazQbtTty7t1EgcN9Id//3VyvDQuOHmrRt9XZCZj/4dEt713AJrVQCSwHEJfbipvGqnceq5b+LiudMAgP9jT5/5RttR+6By5VMolc3hlNw1w+4yUTY5070Dd1cHaW/contg4X8unfYvJoqTNl38M1sReZiqShZVNrdTU4PSOEkPKUjjVKKkXnHeL2kvo6WjQAGakrYKnpHJISVyRVSRxkk4zqIiKBek5QiXTnxqeBWBKkMFyMth44KnGGjKzZWAlVUojuQe/7N3lmFRNW0Ant2luwQDVMIGBETpBlFBRFExQFEMFJCwAxGxeA0U81WRFxvjU0KxBTEAA7GwFQxamg02vh/UskXvzsG5L394neKZM885Z86c2blxCmOmTEvYbT1DVBbQB8zynQVzFwK95HaI366rWS/wi7yJEUeCdMVAo8bpdLPGKSj2pUJUlJe3j60ZDacyeV/sUMguUdipVygBABonFyCoeRxZkeFtYwzkpUUkhtgbaqX72E+yGqsnfWdrcJWGdG3Fz9s7P0d+POVLkEy03fZrjOTbrcFVGtJEnEzzqRcaOO/42qcLHc23itYRFdyOxxiKgzSWv9zyD+ksPbxziirEg2P4COlV5NLQxNT3WZILPPJWHtg46Mo0512v+gwNtEhmAEnDzYmxLgU7nF1+Hnh3xl6mUeP0GGKN0425toZbAFnSOuKMkxKO9KIxeOl69c5NGNQ7PGGpkc12bOOcSK+aaqT1jQWB6IiAyMke3mYm0qIkUt+AgzNVCaSXkFcE25lUKGeJs+m027CVToBxt1PjVPft1LLViUpLDm60lGPQQO3r/QERHyw37/HUaNfDDGmc4AG7s4Iguhvs5gZ2I28V7BYNRc5/2qtxSnucznKEdjYJhdXsnbTA+/8OX3326eu7O7HRT4gDx9r1Ry+7CMGDwzX8g2Q5AoFAYJz2doYK9ZmwOoR87ETintALDLE++m6rFzj1RT2qCEEDg8kJKZcQCETPov0aJwAAkBg8xmowAACAsoyLMRkd+sNI4wQNWC8vpEaoHgF2cwO7kbcKdouGIuc/SOPUxaCxCFgEEpNTq8uxBXZzA7uRtwp2i4Yi5z9I44RAIBAIBKJbaF8TgVZ0fcW06dOmTXefMXPWbI/6f/5nvgtwull61evl493ME8oF8cPLdtBBjRP1y16jPurDtbX1xm3JhsZHInDI7/ZMGq03crACDic/SFfP0Gn7q646OYyy69PU9MPfknltVJ3q2d+4UWxTm+GrZbDrcx0AdT+T1o3X0Rw0WEtDz23nk/IaJIICSOMEH5jWOLHHwzXC8gQn5b5DtLW1dS2CHgjYgschSI6JxG1jgdG+kYYEBbPAHYOIdECn0+k0asXLU3uvVBro9RLYeEVacdyWU3lDerPaRyCjbRonA0PxBo1T2lmdH5H2XgdcUgNKPiuvuvUwaJBA5+CEDtHhwfFPg8lvNxlOJZ7KjNDrwom7cHI2kclxkh2YpYr89h9X3xy/e2+9NEVrvzzMEhXH/+66uDBK3bcYj0lHKawap+THM5Xzo22twtIn/2cp2bCqXuOUlOKnwcjZbrnklOudJQOgGwrdqHE6zEHj5AkAAJSc7ZZeNFjsQeyw1QgAvDVObBsLEPZ4eERIKXhbY3vi6flxMnyNkQOcguSSSNCd83Z+aBCSVdUcNGjQoEFaWprKlY9ufVV08ps5XFJAAznI2TFRpwYvCjOUhPx7SYc1TkWVud8I/RVR+4AnLAql0htuw8cvcjPStliyc02zlqnq0/H5Exwc7Iz6EUR0ZkxvoWUivT/kbmzqMM7BZe39/FcRbl4XC2gcXE08Ib2Nia2ZvWmmphgAOAlNCzNVNBcm0jjBB6Y1Tuzx8IiQ/PNtnboKDL/IZw+SRyJBds47/Gwlf4k/mc4w8pyoKSaYBgKj5GG07zvrI17qEvAPNSXIqqtz6EJo0DjZa42epf3sbHYtndhS41T4p/BbwfOtbraW9vMPZlVB2vEqaOpdTasTb1+7djm4ctuGtCpK/ieFNfdep0Ut8D96O+1hxk2/35H73vZbcOL6zTPLhsgM1fnzRGLd9ZS791OiR19aEPas8kdS9EvDbReu307YbiPfnE60WtqotSfi4q8dGJ68PSGfBhjkstLa+qYC6c0+L1cXFxcXl2nBV39TAagr/lypMKw3as61ACehOqSPeMvbDJPGaSWbxilzmZv7LDf3qCorZ3g1TnfvxIzL9Pe5UsTaK19v5QmBxx7EDmuNNGicjgbrcriRcqo+QcIeD/cIaRV5Pz5f8nO2sbSfu+fRH0F+iWYLklciQXbOOxgIvSzzwp0/A12n6UkL6PlMr0iNf0+pSFu2bMvCc18yY/aHPavByCO0Jn21/WhDQyvf28XfE069+R6zyHXmthe5SbFZRHFFaUpJNQ0AQKsqIkqoKOiEPP/x4v7d5B19owPOsI1jQADA5GoKDNp0pV6hJK5hNlgSx6CWvTy5zmfh0o2Xv5aXEemAVnAlMKQ6cP0wYVUmLdPLTyR1r8hFlSEmQ03nH33DnEYNriahelcTvfyaW18l84PfqAAAMe2A/64mJCQkJFzc49pXCAAhRXXp0rf5FK5x/j0Qs8KdjAwNxzisyeA4d2ujxukMJ41T8vmVmoUAYo2TyyBxnOhAx4m9c7NYarvByuMnaHtQe2jQOF3wmzLZ+9CLjJ2LQx+UQ3YjbS2duEBQnZ9a+OnRnXs3dqnHLvyny0YrdQGtJRJMdOxTH60wLfENXjfAQnAzqePl3HZGuQEAAO37mbWLpf1DDQX1vaO9tFnj5HikF4NEpouJ4vEEgBcRhaPjCTpYFUo196cEAwBa6pru/Q/Q8i8FhDNCkr1GXzm1saWWSUjBLOhkWuDPaGurPS+OS3L7S7I2+27drdXpLwTy2FeKa891F3LadH5hnJemKOn7neRig7HdV2ioQRon7IABjVMrMiRWmoKnkYkMEXEhHA6PwwmLCUH0eGglkaCiQ70I1PxH93+I6tprC6oLAXPQS26v91h8JOvFwUXekY3N2XqNk3Wzxik99qXC3Civz/62ZpZzUyfs8xtc8zB8sqmxibGlf+6CvdP7YunWwz8Iah5HVpA22Bhb2tk7TF6T+KvhE7aQiomVzJ2twUErt9/ByYjUPFrjd7noy5E5zj63hjiRwx3NrSwsvDLdjm8yFP4Rt8hhrOvUBadUfHz1udtfcJKaFrY6clyqQUxvQ/xutWgHTY1hIwzdo3MBqi4AAOlV5PwZAefeZx1Y4LHhbiklL2aa865XhTcCLUYMHTrK48JvGunVDme7+hHnjRqn/RBrnKQibA1H6Tse04iIclLCNQfPqLcHBUFmD2KHpUY4dMAz1UjrG/MX9nhYlzQGz6jO2jvbwtjUzNhy8TuPo4HDBTk6hCVIwCuRIDvn7dQ4AQAAoBUkrvQ/Kx9wcPUYqY61EZDGCR6wOysIorvBbm5gN/JWwW7RUOT8h+8aJwAAYNR+f/kb9NUdII76EBACBjZ1E1I6IRCIHkQHxiJQS78W0oRV+8nA2BWI+IuAQdeE9E4IBKLn0jGN03Bbc/D0zPGnnfnDSOMEDVgsL6TqpjYsxxZYzI16sBt5q2C3aChy/oM0Tl0MGouAFWBTNPU8pRN2cwO7kbcKdouGIuc/SOOEQHQXdCqNAQCglWacO/9UoDOvIBAIhEBodxOBQfpx/1io75yZs2Z7LFi55+LLMsHN51OTuGSWmtNS3Um+VhFvBGzpaI0OapwAAID8MWq8sV9yMZo4qZHqezMURFR1DQ0NDQ0tFyfmpXfOlkR5/8+4hSnVLH/jvpex35NaACi5SUePXcvjNbcJMXPZoME+Nxt+nlSbvlR74rWKjkbT40AaJ9j4WzROkAXfrtPOQ/LEX9o5XJFRlfXvxkPPNab5btKTr/lwPfrkzl1iu8Im9hGEbaWu7C1x5PG41Y7QzcLGSsc1TuuGMT4cXZ/pdjp6fC80PJQJCcPwmw/n9cEDAAAx81anjkXJf/ObMqfZRWAAACAASURBVKblMlr558/1rV9xg/D7d1s7BKP207/T5hq/vuI1AM3AzAzSOMHG36Nxgir4dp12HpInvtPOXgTKz8dZNSrj50wyGqKuOdLRc6omyM3KE1B7mVLyrk5FBQt35A5rnIpJHw8vjHiXd3L+zE13UTdCa1QkOKhNvVMFAKhOmT3A6mzBr5MOWrNuljMAqLgzZ7DRYl/XJqsTuUHd5KArLG176vX1NdPd112+vKle+zSw/8BxO19+verv7OBgZ6Qqqb39A4W31UnWyHfCG/+p/2Q3TTNCK7oT1iyR4umW7sEgjRNs/D0aJ6iCb9dpbzXN+Eh7TY9yA3qBwudPf5IYADCqvr4pxKnqqAqmvUyrLvqR98h/6Rpr7z2RWVVQfyvuoMap4OetzWeHRz+4f/u4xW2/TU/bPctVz6X2RfgkSwsLK9fwF0SuGxH6OPmNfH7sYRm97OGxF2NC1qw93mR1yvxQr266+aqu9KJN/7E7LsRtc7FdWK99WqsqOthumKbr/qTbt68fm6te3w7lZXXCK4zdc3UVfcfklXf/1C8hyI9a0CyRyvpL6w5pnGDjr9E4wRV8e057q2nGT9rZiUHoPdZv3tvNMWsCs010RD+n/xixcL1zP8H0hBBUxt5PGwsAo/b9WbOVl2wvzRsJbcu9BTXpqyf53y2XMNp+KejdqTffpRa5xtGKcz/Ss7ZuU5Sm5FTTABCmVRURJRT+pLzvP8lEkSAqZWctlvS1mmGMAa0lX5AwCIl/0PShgdtWOEVbP/MN+1M/U898sg4yZmSfXBeeU03J/VquSB3oFbkoeL3JUJrp/LH16iYGtezlyS2b6zeoGywNWKdLb7A60ZisThN/R3zNXAgAADhJvTWX9z0xcJ8HTOpYj6ZIhLoJ28UQs8Kn+sQXMuTt98fvMGKfkLhR4xTPSeNkW/Nsg+lkiDVOwYPEcUIDHSf2PpSVT5muzNSOadA43cegxinXb8rFmk8vngovDh3w32ZLOZhuM62lE7xwj5zXaW8tzfhLe5/uDGLJ76K6XmMmGKlVZn+ll2WnPv42evJggTy4aHVEICROwOHwOJyQCAGmpOZJ2zVOB7Xkzn159IU4XqcoPYMxcjJyYvAGLyaDqygjM4AU8U8FhQEAANLGSxyXB27aTXWKlDjstpTJ6kRoUjfpBGfhx9Nbap/uHsbj6FQ6zxFzzVan4voFwgO9Tp5N1x9/rNDRh1Ui9TeBNE7YoQdrnLoxpDbCPXJep12JQ5oJjvY1ERjVL6P33SJM2xs4QRnQxjqNu71tVeyR5DERbqp8HxLAqH5/1Wvr4wIcro6hPDvMfzgEGcEVesntEL9dV7Ne4Bd5EyOOBOmKgUaN0+lmjVNQ7EuFqCgvbx9bMxpOZfK+2BFajF3TPReYGlCoCs6R5/TZR7UgmJEyXjWfNt/JKU6ztwxZvD63xXXnTy4ddXnBa53+361kQrYGV2lIE3EyItQfcYv8Ykpl8OUqK45sSlnrYz/Jaqye9J2GDWSV9R1JW2aYuoWeW8vtz+EkNS1sAQCg+VMHXskxMn7ziwlP6iVSzX+uu4sOLaRXkUtDE1PfZ0ku8MhbeWDjoCvTnHe96jM00CKZASQNNyfGuhTscHb5eeDdGXuZRo3TY4g1Tjfm2hpuAWRJ64gzTko40ovG4KXrNU43saBxYq6RzXaKrH3xpFdNNdL6xvyFPR4KyxKzXDiDb89pZ0szgURcT/s0TtQfF4ODEwds+DdIW4RGo4G6vLiV626orzkcoN2ufhCkcYIH7M4KwicY5JLv+bh+AxX/vsc8dnMDu5G3CnaLhiLnP53XOLWvF0FIeeRIxYs3omNuedlrSRJ/pF+6USwyzF3177t5IiCgaUpUlqu3q5crdclxsHmLQSAQfzPtHIsgOsgzNFAoOu7c9pRaAITlNY0913oas/1mA4HobmBwMiGHEwKB6NF0SOOkMMTYZkjD/3+mno/uiCUCaZzgAXPlRQ4nvoG53GgCu5G3CnaLhiLnP0jj1MWgsQiYADYnU89zOAHM5gbAcuStgt2iocj5T4c1TkJCQo6OjuUVFdDNb4pAIBAIBEJQKCoqOjk7iQiL9OnTp92jCOhVOfF7VnjPnDlr9sLlEeefFgt2dkhG9dszllP3Xy2CfXLiDmmcyJ8OTTbQ1tbW1h6uISdp9l/+3zT/TqtwkieVl12fpqYf/pbMRc7UNTCa/0oTVXenq9nHI3kTO0jjBBs9SeNUl3fRz9bIxMzYyCmkxRz18NVI20+7QC8EvZF6rq6uIiKi9d9W2tlEoBXcjNh8+n2fKavDt6zzGPHneuSW8x8Fd+bpla/C9pb47l/iqgzjz6ibaJvGCQDQoHG69+jBfxbXAg98HbD0yos3b968eR43Z4Stz1iIp3UVCPXypJO5zc1UnJxNZHKc3xDRBjlT9/xdpr+C4E3dtxiPcdt+sGqcEh4/fpzk/Tk0LL2medt6jdPZlAePHsXonQs8lQefo6HZr3OQg8bpzZs3b968uOQ9SN8bbo1TixoBgLfGiW1jAcIaD73g8tKQYt+Eh48enB7/wHfNg2bhL2Q10p7TLtgLYeDAgTgAAINR/22lfc8cav7D5A94gwWLJuhpaYyw8lwyQakkJemDgKafp5UnbomMqyg4tHLb2rsC7s3gTcc1TnQAAGCUp+xOGhbogqlZ2/gCuzyJ9DrCzetiAaUwOWxjPIucSWfq4Q9Vn47Pn+DgYGfUjyCiM2O6tYmVnY2JyeStN26t0xuy5H455eshex3v0xdCmQxM7AKnhr9CA4zq7Khp+tpjbJ08t6dXMgAAjNqc4/MsTe2dJtiO9bsoYAWL4EEaJ9joURqn6qzzb4fOtVchABHNyZ5KD//3ns3YAkmNtOu0C/RCSElNqSUS66gN2d2+JgKdVEkCItJSIvXXtbDyUFUh8q8vgrmUa16f31g0+ebJbbe2jsrceuDqH0j7JQHosMapsJIKAKD+uLT7/cRAc1msDqntPtjlSQ0QVMavDRjFImfy+x25722/BSeu3zyzbIjMUJ0/TyTWXU+5ez8levSlZYn2eyc9Dlixefl/Gjv/meawqIWBiU3g1AD124nFeyV2pGXcSzqxbKQEDgDqt+PztpBXJ96+du1ycOW2DWlV/D8rMIE0TrDRkzRODGJpGV6p/s6Kl1KRIv6uYH0WwVIj7Tntgr0QysvLL1y4kJf3o66uDrS3iSDce+Rg8ZrMK/e+VNEYdHJ5fn4tA1DraIJ4OFNzn7yTsjMeJIYT7WfgrFT8Eup+BGZq0lfbjzY0tPK9Xfw94dSb7zGLXGdue5GbFJtFFFeUppRU0wAAtKoiooSKNAEA0pt/j+IWe8P7WiJQ6uVJQ8+5zzvL5Y2dQS17eXKdz8KlGy9/LS8j0gGt4EpgSHXg+mHCqpbasjgARPubDCVm5w9eskgkemu2na+1Ap1ll0aBk1CDwKnx2KTc9F+9rXRkcQAnLCkuBAAg/3qRRyq+ujkoMGjTFWJvCRIF4pZrd0HMCncyMjQc47Amg2MPY6PG6QwnjVPy+ZWahQBijZPLIHGc6EDHib1zs/Jbfslq0Dj5YVDjdMFvymTvQy8ydi4OfVAOWcpyTyecuKI8vaSSCgAA9OqiavE+Mi1fxARcI9wj533aBX0hUKnU+/fuPc3M/Pr1W/t+0YCT0l8YPHnP/ph1C2MAAABPAHSgriQpiF4oguIA2aKbX8pmq8iX5tyr7LtAme+aiA7Sdo3TEWUC48/NXdeHLb/fD5KePghhlicxL8fhGDQWOdO9/wFa/qWAcEZIstfoK6c2PnhTMa+vPCXvcY647pz0Lf+o/LO737E1/1mN+XcdvS0GJhGVobIFmV9q5vcV//OtgMQAQGyg+TDlbM/NkZayOAaVTCWI/oV9P0jjhB2wrXGS0ncf8f7kvSLHyXLf4k+XmG8cJg5TjXRM4wTLhfAuJ+ddTk57f/SIl9WbGXZ8akVxcTVDDP/+UPCBXyM0ZQTRh4NTsVu89uEe++nnAUXMevnKCfIQ34s7qHEaKkLNvbj3u+tuK6jkrNDRLE9iQkpn0tDlLHImGZGaR2v8Lhepl8xxjpYc4iQe7mi+VbSOqDBl15yE9VHz/nfYlyCZaLvt1xjJt20xMIkM8dnpMmmesYmmtr62oqIIAAQ1jyMrMrxtjIG8tIiEztLDO6eo/tW/LEYaJ9joWRqnqYe2PJ7nbLYLT5dzOPifpQyAtUbaddohuhDap3Fqgk6j1ZU+Pb5hzwNFz90bHdtbDKRxggfszgqC6G6wmxvYjbxVsFs0FDn/4bfGCQAGsSg3r7Dox7vM29cffBU39fW3F3gzB/FXwy+ZU2eXIxAIBNZobxOB9OlUaHg6Xb7/4JFOAUvGGfYTQz3gCMEBg5wJCZwQCEQPpf0aJ3EjWxsAAAAl2cmnszv+h5HGCRowWl5MyJwweWaZwG4JsBt5q2C3aChy/oM0Tl0MGouAIWCTM7Evx+qZBQBgOTewG3mrYLdoKHL+02GNUxPwTIqBQEAKnUpjAABopRnnzj/9gzwZCATib6HtTQQGJT9132L3xSe+NE8XQq98c2XPigUes2Z7+YWfelLIz8mLaHkJEWMmLdWdtFTXZYHyyPmr33XThPxdQ4c0TgxGddZuNzMrOwuj8aFpZejhxIvqezNUTWO63nRVfd/L2O9JLQCU3KSjx67lQZ1mMII0TrDxV2icAKB8PeVtaWpjZz7Gfvm1AsGHj80LoU1NBHpN3pO4XcEBBx/+Ye5toebf3B1x6ZPyhKUrAmdqV9zav+3cBxLfSkPo77I6M/7Qq/hDz/aO1Ro2do4WHPN8cKKjGqecwtshsX12Jd+9e8L42sb/5Qs+y/8+aOWfP5fRAABA3CD8/t1NehiaOQ8CkMYJNv4WjROofrQx9NvCy3fv3j9ld2/V3teCbbNh9kJoSxOBVvzgaPQjupn/Mnt5psXkr9cSvoga+/hONjUY7egdMEml5N7ll1X8bvHQK28dut/Pe9wweFsIHdY4Pa6QUcCV/SHR6yrL6DJyouizUDsgvj+xoMnS9KCUDoiZASP0ZiyYO328idn8XduXzHJ3tdQ1C0gupoGKBAe1qXeqAADVKbMHWJ0tZpDeH3I3NnUwkxVSW/Tk61V/ZwcHOyNVSe3tHyjsViekbuIK0jjBxl+jcSJIK4vXFFdTGeSyYoq0spRgi4DZC6EtDx2CyrjwY1GrZxr1EWEqBrU0530lXt1Is95CIayipyNL/fayu/y73KDk3tjy1WQDe3JDRUc1TjR9f+e3cwxG6jpfNw6wVcTqmFoBQP0e472N2mRpWhD2jAgAnUIwXHP8QuJptxc7HjnuP3/12j6NhIibJewfJ+p+JEW/NNx2Ia2Clv+fuabr/qTbt68fm6teP8k3i9WJjNRNXEEaJ9j4azRO4rq+AYo7jLT1dcZeMlg9S12wE/Rj9kJoW+Vz+sEHrbqkGkgoygo3rCJIKUmBmuIqvr5CMSrvHLqr7O2sDW2bnQNt1zgpFh7yveOe+v7V+6dBOQFhT9lEpwhukPMyf6kwWZpefqpiAIAXlRHDA7ykorS0kqIYDghJK4qSq8jsPV/CGl6RiypDTIaazj/6poZtfUurExGpm1qCNE4CCrAD9FiNU92XQx4RSic+vH/5MXtjfqD3ZbZRFt1PT7gQOjN9PAPUt4Ma4f87LuX7zc0fR+8PV4CkG6xttF3jtFe0aL+4tpwwjiDVS4qYUglfxyu0iPYfo1rIZGnSC5LmkZ94MRlcRRmZAaSIfyooDAAAQcEs6GRa4M9oq6E+WZKedF53TaRuYgFpnLBDj9U4ydYUVwgr95bEAyEZFTla6R8ObwLdTU+4EDrehUSQ6iUFaksq6hpOPK26uAZIKknxrzuHUXXv33vK810N4G+z00tur/dYfCTrxcFF3pGvGsbN1GucrJs1TumxLxXmRnl99rc1s5ybOmGfn97o4NChJydZWJiO/UdmVZi5tCDLgAFqnq63M9DT0zOZE1eoNu/4Wly4o7mVhYVXptvxTYasI7GYkTJeNZ+2yclp6uzVSWRxIQCoP+IWOYx1nbrgVO81x9doXJth6nbsC7dvaAQ1jyMrSBtsjC3t7B0mr0n8hZpyzZBeRc6fEXDufdaBBR4b7pZS8mKmOe96VXgj0GLE0KGjPC78ppFe7XC2C3pQCQBotNfsF7y9hhN4lclR66UibA1H6Tse04iIclLCNQfPqJcGBcEhDeIBS41weLdmqpHWN+YvLPGA3lMPbVE64GxmZjnrmvnBHZYyTcGLjgiInPzK28zE3MQ+jB6wb6aqYFMKqxdCezROlC8x/mufGG0/MF9TiEaj1b47Fhieob0qaqmuBA7U/fjf6jXxir5R60xlW32HQhoneMDurCD8hkEu+Z6P6zdQEY53Kz6A3dzAbuStgt2iocj5D/81TkyIajo5qaee+/ew2hwH1ar0c1cKFO0W6cv85X2sCAHTfTYmnKiS+kBkb0IgEH8PnRmLINR3/IpV5OOx8ft3kAjyQ+x9l88cKo5aCAjBAYOxCdmbEAhETwG3Z/cugfzh4OUrBPJ3ET0VBtuA2aYHNZ+XIxAIBCSkpKQAAMpLCjr4oQFpnFhAYxGwCzz2pp4BdnMDu5G3CnaLhiLnP0jjhEDwG2R1QiAQfwmd0zjVLy9IO+Dn6Rf7ld9+m7qC0yGrzL3WWbqvX5laBrm/oGMaJ0D5fnaxtamVtYX1/FiuP7pDgLrPuwy0fDnPT9K1IKtTe8GmvYY7SOMEAa2kCsc6ggNekZcnOCn3HaKtra1rEcSsmxAkbRquSK/Jy0iKO3P5aSEDMFsa6LU/niZfOn/lOcty/lCddTrs57gHJ2yVci8YB1x9YzxvJKxzLLZN42RgKN6gcUo7q/Mj0t7rgEuS0bZd9JD7KXYiL9aNDUudfNJBRgDhI5phsToJOhzoqfsW4zHpKIXVXpP8eKZyfrStVVj65P8sJRtW1dtrklL8NBg52y2XnHK9s2RAZ8ZTdwuNGqfDHDROngAAQMnZbulFg1vj1KJGAOCtcWLbGAJ4pwqXOoICXpFTCt7W2J54en4cTHf5TmicAK0k7UTME4bJkqW2/G8gAECQlBUjVlTTGOTKCoqknCRkScxMRzVOj0potDoShQYY5IqSoh9F/PNoYhTa75MOWrNuljMAqLgzZ6jd5m1zm+1KpTfcho9f5GakbR2a9Ye70onFyUSpfLZn6mhDS1trm1mHnz+MCIh62ZrVCdEMZu01nEEaJ0jgkSqt1pFg4RE5+efbOnUVyKZdaUsji6AyLvzYeByO8iXmdIvlymM3HnbE4SjfTp7rpvB4IT7YeZncctNJd2VLqs22RapD115kgiCrrk79zr68XuMUqzVaXXvj2exaw/4tNU75dNNtG28GeU6LpaQniPovl0ND53lD6OPkN3LrsYdlDmZPjmUqiGX+W7f7xW0nhfIbc203pK2j5H9SiM1+PVoSR8w8QSEYrjm+YmDeHkOje5s+JLqKpHjqht1cZ2caN28LOeR5w16rT7z6cLhX5POLljIMKh0nBA6uSM5+FJF01JTwav2ouQAAAGi1NMO1JwI0yA/m665JyJ+wUMDzuMEETkJ1iAT5dctloMle8z82e03AMjd3OfLji1VOKfBqnJZp/Dw8ztbnimni9JbCpnqN01HYNU7MNdKgcTp/Vpd+uNWNYYFHqrRaR4KFR+S0irwfny/5OacTQH/XsMhAMwUY4u64xonXcn5A/XJu1075wHdJB3L+NzN/x97/lWLlFbvtGicVafGB03Zf+V/YyEqJWbsXDYasfQkhOEVbP/MPJ1I/p8Z8MpkkUUluaVcS1zAbLNmQs1yUTqSWTib8x0f5fa2GS+MAwAvhuaR7S6sTGsDYE+w1nEAaJ8HClFdkbqnSWh0JhrZEDgiq81MLPz26c+/GLvXYhf+8gmM0C8yv3rxh1JbVCCvIS+KBkJScHL3qD4X9h/Fw0naN0xFlAgCUT4d9ojX/eWwnj4nSCRpp4yWOywM37aY6Re0YkHeGya5Uc39KcOv7sziZaj/tNk9IfVflbildV12Lk5LA4XF0Kk+rE6In2Gs4gTROgqVlXrGkSkPMShzqSPC0JXJlERqZyBARF8Lh8DicsJgQHPd7GHoyOobwcM9Frh/3WcxaaeZ9lu6xeIYKxGXpoMZpqAigfo/1O6y8ZddEmDrLoEZcd/7k0itV0+Zqa3XErsTiZLopufjIgl8BNpYOY519Yj4ScXL6jr15W50QzGDVXsMJpHGCp4eMNVUaY+ZQR4IOlQVukTOqs/bOtjA2NTO2XPzO42jgcBjaNp3SODVNnUT5djI4JGP05si5Gm1sdSKNEzxgd1YQAfMXWJ2wmxvYjbxVsFs0FDn/EajGCYGAje5zOLEvr7c68d4egUAgsAxqIiB6CjB4m5DDCYFA9CCQxgnRE4DH4YSVQbMIBOJvAGmcuhg0FgGjwONw6glnEwCA5dzAbuStgt2iocj5T+c1TuhDAwKBQCAQCKClpTXRZVJVZUXTks5onOqKs67sW7dk9myPWR6LV/wT97yUvxOm0iruHQgznb3KYnrwzFNfYZW+NNBGjRPLlqxWp55Ldaqnkkg/HUNDQ30dfcfguM/ELjw4MdN/iGlnPTSNB2GUXZ+mph/+ltyOfatTPfsbNwZQm+GrZbDrcx0AdT+T1o3X0Rw0WEtDz23nk/KarogTOpDGCTZ6gMaJW1T0sodbJ40eY2qsb+h+8A0ftG7thmuSQ5pCbWoi0GvynsTtCg44+PBPc28Lo/bjldi7FVpOPkHB/h6jCW8S90TdL+ZjGhFz4pY9MTgd+0/amaCR14+ezocqhVvQNo0T+5YNVqd7jx78Z3Et8MD7nv1LfAmD0OSMZ8+ysm4EEDc6r35UJeiAOIOTs4lMjvMb0ulfLZPf/uPqm+N+9e2nj5/eXA40VRPvgVNf1H2L8Ri37Qerxinh8ePHSd6fQ8PSa5q3rTfcnE158OhRjN65wFN58DkamhVBBzlonN68efPmzYtL3oP0veHWOLWoEQB4a5zYNhY43KMiv9sXeN3i9MPHj2+tJUaGQXgP4Z7kkKZQxzVOOInh3jv3hcx3tjA0MBk3Z6GtLOPby1/8fIgx6Aw6jcYADEp1SWVZXiW8TYQ2a5xYtmS1OhXDM3NJNyKk4rg2SPbSvvQqFqNSZcfcSx+ZkpL4/sQCaxMrOxsTk8lbH5TSAYN5y3cfD5treN6uYABQnbZ42Bjf4MkcDkJ6HeHmdbGABtgFTixh8FTIkN7GxNbM3jRTUwwAnISmhZkqHFOldC1I4wQbPUDjxD0q0REbHqUEDxEBtNoyioKWkrAgwuNNq0kOWQp1RuOEIwg3VhGDXF5KAgoD5Pk4tkF8+MwD1v8GBuyQq8u5VDP6Xn+Ip7Bpo8ZpjLEE85Z0IovVqZIK+kFcyi6DIKfem/bjZ/mX48xGpQ1pu2Q65l4iP60/LvV7jPc2aujTlLHylHfbjF3DHJ75pSxm2hJfEjB854F7pXa2L/69JyRESeh96sX/WA7CDIvAaazd/1oG7HhUv44moygBACC92eflmiSBA4BW+vw3NQjUFX+uVHDrDeFNrCtBGifY6AEaJx5R4YREham/riyb5hvfKzxlOHQZ1HqSw5ZCndM4NUAre3r6xAsxiznj1Ph5wyPI2ixedW23m2YJmLRmljHs8542wV3jVNNyQzyb1Qmqtnz3Qav4XkDo108ov4VRSYJEYXTOvUTOy/ylYqktiwNAtL/JUOLLT8W5TwuYtsQrOwZZvj98/d39429GTZKh9mufwInYMgyJgitT+iqZH/xGBQCIaQf8dzUhISEh4eIe175CAAgpqkuXvoVCMtPVII2TgALsAFjSOLURoX6TDzzKOdln94JTbEO/BESbNE4AwJhCnX/rpxY/Ph5+6OVAr1BvAxl+N3wYxIxDe05pLk53UMDO05OHxqnazII5ZaRHsludej60ojs7IsunHDfpNSCP2ahEpWQHr+O8Syvupca0FO0/RrXwwZuKeX3lKXmPc8T1gpTU9JV/MW0pKWPq7/LbMzhcfNzBdTIfznM4CA9YwqDiqXlqarU6/YVAHvvG4tpz3YWcNp1fGOelKUr6fie52GBsh88ZXCCNE3bAksaJOw0B96q+Pt/tpsflSDt5ABi0OhosjZ22aZygTKFOPtPrClMPhhzKVl8QFuTQl+/5xKh4cWrB3aGH1ptAKX1hoq0ap2dVLbZ8p8JqderJELO2TDQzMR49ynEnPiThHzNpIbW2ephacS81biY0cN7xtbhwR3MrCwuvTLfjmwxlB/uybCmuu3AOIY3iPn+ULuuqVmEJI/G3qKaFrY4cl+QU09sQv1st2kFTY9gIQ/foXAB5EncMpHGCjR6gcWKNqjFgnIL1iulFqyyNzM3t11X5HZitBmEacdM4QZpCndE4UUmfzq0Nu99nXniQXfs+qSKNEzxgd1aQboL4fKXJ6qGJN71hvL3wF+zmBnYjbxXsFg1Fzn/aq3HKLyphmRehEx8a6H8en0kuVJ00V53489s3AAAAeAkVNRUJSIZZIDAIPz1MHJfT/6Tsua7lt1qVINB4EAgEAgI60USg5L/KYwDy1X9CrjYtGxrw78Yxkl0QF+IvBAb3Eg43nsEYL+gYUIsBgUDAANI4IaAAKg8TVMsRCASiw7RL48ThQwPSOLGAxiIICng8THAu5z/w5EZ7wW7krYLdoqHI+U/nNU5o3AACgUAgEAgOdELjRK/MST62JXjR7Nkes+b4rt0X/7aSXz+JoZYmbVuqOONGoxim+NKWtSazVpjOCA1Nr4BksgwWOqZx4rUjxNQ+WdxXpLf2KENDXSkcDhdV0rC85uF8DfOY/A6lSe2TxYMsWtuX8v6fcQtTqjmsYTE5cbJGNfiZMu+2Iciqu9PV7OMreG7DBnvx2+CX4mKN6lAAggFp6S4/oAAAIABJREFUnGAD0xqndgXPcxVfaV/YHHNMMLRpuCK9Ji8jKe7M5aeFDNBsacARGLV49bHzJvSTqPv99H+nL+6s6xu1fLRMd39KpRb+t3rzsTqZRocHo+DWwdAy53unzOR/Jjr6/pd2PsAasnnZ2qZxMjAUZ92Sx46QI6a7JiE9UIN0f47pf7p8mymMkv/mN2VM27aVMAhNfrRIlUAtvO5v6bxa9WmUqU1kcpykWtHy7g2yfdRboyQ7b40SDHXfYjwmHaWwapySH89Uzo+2tQpLn/yfZeP45nrDTVKKnwYjZ7vlklOud5YMgC7xGzVOhzlonDwBAICSs93SiwaJg4cDbDUCAG+NE9vGAqRdwfNexU/aGTaXHBMMHdc4AZzkcDfv2eNMDHRGGjl6eI4Wo+R94ocOmiBn7xd+zXd4o3KE+DI5b8gkPRUCEFEz9ZB7e+UbdHPadlTjxGNHLEArTA7bGP/6+prp7iFpFfXvhHU/rqyeNtZcR91g8dWf+XfCpliamxsO15l6+AOZyN3SVE/dz/j1cybZ6Krrevz3hQIqEhzUpt6pAgBUp8weYHW2oKD5z93KiG6pa+JOszWqyc/UHKT33o0mTGIn48BVU/S1x9g6eW5Pr2QAACpvuA0fv8jNSNs6NIvI2xH1kcJS/BZqUrZ9m45sPGXGBK+LBTQAAGBUZ0dNaw6gPdYoAYE0TrCBaY1Tu4LntYq/tCvsVnOMv7Tl1BFUxoUfi1o906iPCMceAga59EPqjbeU3mNG9+HDowwnqqquIN4UCYNSWolTqrcX4MVUJCi/qyDpEWOCIKuuzqknoF7jZK81epb2s7PZtexbctsRekhv9kyzm7ot+WElMN9xIS7cQra+xnAyo1eevvXwcfSIGxHXSXoLjt5Oe5hx0+935L6sWkCnEAzXHL+QeNrtxY5HjvvPX722TyMh4mZJwxMeJ6kXcCz+/vNbXjkbNz1m+5xAUBm/NmBU/7E7Lpzx/hy8nbruesrd+ynRoy8tCHvGc27EBmtUdVNDojnIO+dEpg95dOBeKb3i8b/3xSSSLkrvSMu4l3Ri2UiJ+gJR8j8prLn3OiVMpzDGe1uLP1r56cjiw71230u9l3L35OJBIizFT2xuI9T7pVgCbjqyfuN7BvXbicV7JZoCoH07Pm8LeXXi7WvXLgdXbtuQBp/6FuAkVIf0YbFcM2mcVrJpnDKXubnPcnOPqrJyhlfjdPdOzLhMf58rRaxtz3oHTwg0Dh4OsNZIg8bpaLCuBPvtnVP1CZJ2BM+zXHymPee81RzjM53VOBGzd3t5ePtvPvWqj5vvVC0x/lcGTkRRhlHvOgJ0UlGtSB8pSFq83GizxgnLiGkHX0xJS3tw0aZ/i9lEhWRVlUQBEJbvLVlXUV368uQ6n4VLN17+Wl5GpAPAxdLU8FFaSH6AsigAwr111Wl5hSTun6rZdU1VvL5rN1ijJJsuBqYgaWJWAU1iJxdJch8rHVkcwAlLijc23MQ1zAZL4trgiGItfiWp6eLnHHDjkZsg5ab/6t0cAPkXm+aqrdXTvSCNk4AC7AAY0DhxTycewcNcLl6xtZZjfKezL6hiQ73CN08oznt1+9LFTVsYWze68t3JLK43vv+H+OwiM1O5X+lnykeEaECuMmi7xqkHgMMxaFyawZScXW6n+8c+PmpAuvTp3v9aPxadVE2iA0D+mvpeavQgKUKJDK6ijMwAUsQ/FQ1Px/o/JzqQVdckzb3t2mSNkgY5nNbLmPq7rKgXO62ReH0q80vN/L7if74VsDZRWndEcS8Z+77SuD/sm4moDJUtaA5A1NJ8mPL3ZmsUQVTgr0v1II0TdsCAxol7OvEKntMqfkbNA15hK3HIMcEG28n9caKKaprDDGzcA5ZZS367nvSFD+N4Ka9P7p21PfVDTuKi1SfvlYPeY303yye5eKywXplpvsELxodsBzVOr0gcd8QCpFc7XIwMDa3n3fsRZ2futOZBBVsTXkjFxErmztbgoJXb7+BkWr8b4YXIaaHu4y1NPTImH1mrJyZlvGo+bZOT09TZq5PI9S/1UjqThqb72E86orl7TQtdE4fhSmzWKG43kGax00ifnS7Z84xNHGdH5CgqskTcFkcUN9j35Ti+SmRIiwDE26y5EiBI4wQbmNY4tSt4eGh72BxyjP/hMtMZjVOLT/7kjycCwx4PXXsgQLv1XjakcYIH7M4Kwjf+WrETdnMDu5G3CnaLhiLnPwLVOJG/37j6SlR9QC9JPLH4w4PL9ypkLG3UoR3Ii+A7AncysS/vwBEYZc1iJwhLxHE5AoFAdAUdbyLQyZVl3x+lXr9QSQVARF5dzzVohouOJCyfexACBgYHEm83Upv3Gn/m7XhIIkfmJwQCwUeQxgnR9WDCyQRPJF24HLXQEQgEM0jj1MWgsQhdAmwOJPbl8ETShcu7tiJ7wCfYngd2i4Yi5z9I44RAIBAIBKJb6ITGqRl6ZXbs8tke3gfe8elXeSwaJ45L4IOjkKnyhouc3AhTC1N9bYtlCfnUmtcHZpoZmxqbTI7IqGQA0scT8y2NTY0MrJYl5ENcNv5SfW+GgoiqrqGhoaGh5eLEH+mtypC4wUWS1JndMSRY4g9I4wQb2NU4sScP5espb0tTGzvzMfbLrxW0iJHHKgHDxdJUl3fRz9bIxMzYyCnkbjEkAXdC49QAo/bjxW1706r41h/BqnHitAQ+uAuZ8MpOB26dsRF6sWqUz5m0vGsx6lFpZ3V+RNp7HXC5qht2vNeuuycMig9aTzvxadz6oXBMZiJ4JAzDbz6c1wcPAADEzFsdPUwnJUkYdyzxAaRxgg0sa5zYk8cgc2Pot4WP7noqfdo+Zsre13Y79BpPfPUjrqsEC5cUohdcXhpS7Jv60E3+6wGHiWseZEbbyAgsyCY6oXGqX1f66PCe+4pzgl3VuiE6jrBqnDgtgY/WhEwMckWV2ADFbzcrbNxGSOBENJzG0289ok44/zhijCQAgAGERAloMBpvWMVOxQx2PRKLWqlJ3VSd6jVkzJyFnu6TrHSNfRMLaRzUSuzbNO3OIlgCAGDAscQHkMYJNrCscWJPHoK0snhNcTWVQS4rpkgrM0++z2OVIOGaQtVZ598OnWuvQgAimpM9lR7+7z1Pswy/aEsjnaAyLvzYeByO8iXmdMs1dfm3DsTmGQdutVB+fLtb4uMATlRVXZT8kecSCCHIqqtTv3NYQS+9H+rpJvYls2xcrC89XKSXNAEAQJBWEa/Jr6SCfiKAXnxz26n+wVc1hPkcM8TUvgifZHlClKA4NvIc99/G1OuRQp+mjJWnvNtm7Brm8MwvZfHhXpHPL1rKMKh0nBD5adPGtFqa4doTARrkB/N11yTkj3VIYNk3exvrNhM8Gv9QvWDpeYajXFmCy4goAKjfjs/bQg55fttJofzGXNsNaeNibaW7+aRACE5CdYgE+XXLZaBJ4/Q/No1TwDI3dzny44tVTinwapyWafw8PM7W54pp4vSWwqZ6jdNR2DVOzDXSoBQ6f1aXfrjVjQUPS/KI6PoGKJoYaZ+ULfhjHpupznSLFOe+SoBwTSEGsbQMr1Tfy4yXUpEi/q6AooncGY0TNf/OvxcrrH1nDJdCr7fto9nkVMbAK9qEnbp8KyvdP3vJhnd4Sr2SilZVRJRQkSYARs2rSK+d8hEHYZy0XnBIGITEP0hLS70aYsDDBc+uRyphVys1gZdUkhICQEhaUYxSQSJyVCu13Kbpuy2LYAkAAK1jiQ8gjZOAAuwAMOuOGmiZTi2Sp+7LIY8IpRMf3r/8mL0xP9D7cvNQCh6rBFiElYnXuaQQTlxRnl5SSQUAAHp1UbV4HxkovrJ1YuqkP09iL5cYB60aJIYDkIyswAxNJidQeeNg/SK8qKQIDT/MUfbolXe1+to/k5PxjkeUad9PL1z0cvaFE9by8L6XQANejEXsxK5HUmRRK3E/q5zUSlx7/lgESwwAxAaaD1POhtCxxAeQxgk7YEvjxJo89JriCmHl3pJ4ICSjIkcr/UNmNNaILIdVgi8CvfDc+HWcU0hJ333E+5P3ihwny32LP11ivnEYj5cf/tHhJgKjPPv2K2I52LYopWlZ7rb5310jtk9V694uHcrrk4c23X/9IUds0eqiFRs8beXq2JbAdzuml9wO8dt1NesFfpE3MeJIkG7jmwa96Jqfg4UcrZKsueLU7DlUirePrRkNpzJ5X6zG9yiTeXG/NbLG6mzG93KJufmPEeyOGIEiZbxqPm2+k1OcZm8ZsrhQgx7p6UJH862idUQFt+MxhrJyQ44s8PSxsVRSEFeZFHnMm+vB2PcVB2ncNhYZ4rPTZdI8YxNNbX1tRUURAAhqHkdWZHjbGAN5aREJnaWHd05RheKtgM+QXkUuDU1MfZ8lucAjb+WBjYOuTHPe9arP0ECLZAaQNNycGOtSsMPZ5eeBd2fsZRo1To8h1jjdmGtruAWQJa0jzjgp4UgvGoOXrtc43cSCxom5RjbbKbK2k0mvmmqk9Y35CLXeAcacPG4BkZM9vM1MpEVJpL4BB2eqEkgvG4K3YVsluMib4JlCUw9teTzP2WwXni7ncPA/SwjGKoLOaJwoVQWFZY2dp/Syh/t2JCsvCvXSV+0lLdzaAxppnOABu7OCILob7OYGdiNvFewWDUXOfwSpcSJI9Orb1F6micqLAIKEUh9laTgGhSD+AmCTJ/Fejs1bDAKB+Jv5G7s+ET0BGIRJSLCEQCB6NEjjhMAe2JUwIRAIBD9BGqcuBo1FwARQyZM6sBwTYDQ3AJYjbxXsFg1Fzn+QxgmB4B90Ko0BAKCVZpw7//SPAH9mjUAgEPygUxonRtWTTbM9ZjX/2/q8zT+P6BQs0iban2u7NhjNWG4yZfnCxEI2yxQsdETjRP11JcDOyNjIwGTe6e9/6Ry+HYCY6a8m3n+Uubmx/kj75Qm/qY2+peq8M3MMtC08D71vIW6qfbJ4kEVMPu+HfvV9L2O/J7UAUHKTjh67lgdtokEG0jjBBnY1ToAtHh7qIx6ZJhDafiGwbyw4OqVxYpCryXgZiyVB4/rUTxspocKHacXYpE01r86G5DpcO2OlXHLLfs6ZDPtgCyjmnGhBxzROydYHdpQHX388vu6888Rj76ZtHQnt1O+wITYi+GJKoAbl6XJ9ryNejmE2kclxklplcQszzE+8jBrT/gyhlX/+XEYDAABxg/D7d7s84J4J0jjBBpY1Tmzx8FIf8cg0AdCeCwGq094pjRONWEHCyfTX1FCvZ4CKBB8+XLBJmyT1/Z7vt2p0ewgLQTlCrGMap5oxe9NOOCniabXlVMUBchAkDMagVf0qoKv0lxMivY5wm7374OajL79cCXZfePDBtbAplubmhsN1ph7+QAYA1P2MXz9nko2uuq7Hf18obEaorxkRAVEvv171d3ZwsDNSldTe/oHCwfyEYAFpnGADyxontnh4qY94ZRr/adeFANNpb8sjnaAyLvxY1OqZRn1EWp5lek1JDR3PqPpTSebjd1mcqKq6gnjLSHAAANqf+H3x4p5uo+BsvhNk1dXZuxBAo8bJ0WLOM7slo+nVzBqnwkq6sAi+4uE6B+vIPj4uf+XsfB2F/P7wPNvh/UYfGXb4rJcaAQAAhPtPWT1XW8ttb9wxXxPTBUdvpz3MuOn3O3JfVi0AOEm9gGPx95/f8srZuOlxNevhpMesObhilKbr/qTbt68fm9vghKHV0katPREXf+3A8OTtCfmojcAGTkJ1SB/xlrcZJhPPSjaNU+YyN/dZbu5RVVbO8Gqc7t6JGZfp73OliPW+V69xCoFd48RcIw0ap6PBuhLsD1FO1SdIWOJpTX3ELdMEQHsuBKhOe2c0ToBOBVKSRedDg3zmz/EJPX4vlySwr4e0ilu7tu5VWnzKvS8c84rzph0aJ4CTNd92P3s/Lnz9Q7YHF4IrokOXRMdHz5L49AMnxt4WZ1DLXp5c57Nw6cbLX8vLiHQAhOQHKIsCINxbV52WV9jWTOZidfrLQRonAQXYATCncWKmVfURt0zjEx29EGCiU++l4to+e/4FgE4q+ZwRHx19fAtRdrffKBm+d+jQKu9Gbt4EZscH6ApyBvF20EaN0yHaxVlTSsPO+2oxGLQ6EhWySxd28NLGIQdsRy3d9PhZpEGLNZScXW7rKMcfHzUgXfp0738AAEAnVZPoAJC/pr6XGj1IilDCYoQCOBweR6fSUR20DtI4YQdsaZxYkeKgPmoKnj3T+E0nLgRo6IozhxdTGmw1x8dRsfb5vW98GMdLeX1y76ztqR9yEhetPnmvnJp3ZZtLzPeitH+tJy0eMW3XRbbePyigl9xe77H4SNaLg4u8I18xnaZ6jZOZkd12qRVbZ/tFeX32tzWznJs6YZ/fcLVxfnqJbkam5uO3iazYbC4tuPCxCU7edmukYfySHS9aNuKFVEysZO5sDQ5auf0OTkYEAIAXIqeFuo+3NPXImHxkrZ6YlPGq+bRNTk5TZ69OIosLAQBwcvqOva/NMHU79kXwg6OxA+lV5PwZAefeZx1Y4LHhbiml3sRTeCPQYsTQoaM8LvymkV7tcLYLelAJAGjUOO2HWOMkFWFrOErf8ZhGRJSTEq45eEa9xikICxon5hrhcLdkqpHWN+YvLPGA3lMPbVE64GxmZjnrmvnBHZYyTcFT2TMNpsh5XwgQnfaOa5xYpk6ifDuzfMOd/qsOrRzZ+mBxpHGCB+zOCiIwGOSS7/m4fgMVoWrtdwPYzQ3sRt4q2C0aipz/CFLjBGglGfEPKnqr95EVIhd9SL2SXCplOk8DQ1/hED2dbvIz4USV1Afy2h6BQCB6BJ1oItBJZb9eXk/6XwkZACEZNR0n/9lu+tJQ/uIQ8ReCXE0IBALROZDGCdEDEZTnCTWQEQgEVCCNUxeDxiL0DATiZ+ph5xG7uYHdyFsFu0VDkfMfpHFCIBAIBALRLXRK4wQAAIBem/vk0qGtK5fMn71w7ysil727FhaNE7360b+bjdyXm7oFzDyXyx+TVAfoiMYJAAAYNc82aPeamFwpmLD5Tt3PpHXjdTQHDdbS0HPb+aScDgAx03+IaXuNJo3qJnLrm3bT0apTPfsbN4Zdm+GrZbDrcy1L6Wo6UjRMgTROsIFpjRMTnGOGzd7UAp55AmXkndI4AQb5x+0D285872dl7+Tt2ldJSZUPM5OzaZzIX+KDHmifOj1lUNWjKbPOPnZZay9AXQcXOqZxSl03TLj8QdjaV1oD/pafipDf/uPqm+N3762Xpmjtl4dZoh2ehxQnZxOZHCc5pGtysmuORn77j+uylqX73SXhwQrSOMEGpjVOzHCJGS57Eyu88gTOyNtyBdZrnKTt/JdVnop63rycQfp0YVdc5fiwnRMHivFvoBZBzt4v3JWSZLulYYGo1oy0WLyoEKgjVVNk+yhBd1cBoFHj5J7k6s15fb3Gadi3mxU2mxo1TosfFa/qlb1uC3X1Tvud2/kbr6AgvY2JrZl9caamGABAQtPCjHkl8f2JQJ9jnwhilFo554jjay0lngUYLio0GUn/9fFXHzdn0Rcv83/mFOtHXN0zvhcBkF5HuAXqxG95PGkdwVGfnv8h61u/oAvRczVEQOUNN+N9ioPfH4v/LmVgNoRYMdD/0hmvilVtOtoauuUQclFeTqH21vioicrknGjfxSd+SMvR6zSWREdOUxNupXRXmUtH7NlNBCEVh803J5MOWvs1LJA0O5JlBnAA0EArGqeKMiqArYnQoHHSzJzllrXi4BZXtrpu0Djdh1zjxFwjTRqnPWKBAa1vDAlcY+aVYPDAKU/gjLwtVyBBZVz4sfE4HOVLzGmmxYzKF5fuFoNeaXv8L5ZSRPsMt57q5W6swv3+2EXgRFXVRckfmZcQRIVov+4cdd+S3st/B6QXJ0FWXZ36ncOKeo2T2JfMsnGxvvRwZo3Tz/dxy/fIhl2xkj+1k8/hCoq64s+VCm69OWUR9XuM9zZq6NOUsfKUd9uMXcMcsiMAnUIwXHN8xcC8PYZG9zZ9SHQVSfHUDbu5ztFDpan3gVZD1vHft3wQIe+IuVV4xtQYC0kAKPmfFGKzSg9SJfsqCf3818JuX9Z0rzYdrZZmuPZEgAb5wXzdNQn5Y+3+N28LOeT5bSeF8htzbTekOR7Vr6PJKErUv3eR3uzzck2SwAFAK33+mzhcjFvpeig4CdUhEuTXLZeBJnvN/9g0TgHL3NzlyI8vVjmlwKtxWqbx8/A4W58rponTWwqb6jVOR2HXODHXSIPG6fxZXfrhVjeGBJ4xc08weOCWJxBG3rZGOkeNE/nn0y9UCa0xLk56qlJ131Nij+/fUie/c94QgVzYhH72S9KMzHxn7Tttts27N7xXKAAAgJr01ZP875ZLGG0/6YxXtAk7dcZG/NfJCbYbqKoUQjUNAGFaVRFRlPH8ytva6u2zp9R+Ts+WCTiqcWRRF3WbQ4uQorp06dt8ClBhSyNyXuYvFRttWRwAov1NhhJDP1UxBgG8qIwYHuAlFaWllRTFcAAvrShKriK3+I5NkO4tKwwATl6rH+XcbyIDSAIAxDXMBomWPzqxdXNONSX3a7kikQ7adLQGdROtXt1E/PUij1R8dXPQbRyDSOwtUXBlSl+v/IivmcvUhQAAYtoB/11dpEoAoDbDV9erVJ3xg3PpehLErPCpPvGFDHn7/fE7jNjvdI32mnhOGifbmmcbTCdDrHEKHiSOExroOLH3oax8ynRlppps0Djdx6DGKddvysWaTy+eCi8OHfDfZks5ON5gWWjMKzmTyQo8Y+aWYAKj5RUhzj1PoIu8E1MnMUh/SklA2dDGTFcJD4B6vwVfn4U9uvtt5pBh/L1AGH8e7J32yObiaj15ABh0Gg3SUU7MtE3jNOFI4LrQVQAAau4hp1Xq+3p8+wAAIK49113IadP5hXFemqKk73eSiw0mjVYAAAAg2n+MauGDNxXz+spT8h7niOsFtXWiLjqpkkQHgPz5fo7kKC3Jxr04yJzaj9hA82HK2Z6bIy1lcQwqmYqn5qmp1er053hliWvPdXkT0KJ0Yzv2Z6EGaZywAwY0Tk20yKu1AHCOWfD2JnZaRE553zJPoI68Mz96JAgTACBWkhoGugrJ9pUBxLLa7jdOsGicgMKYKdNKT1jPWGnpHVs1a8ksOLsQOqJxGgrZJcoPxPQ2xO9Wi3bQ1Bg2wtA9Ohc03W+FBs47vhYX7mhuZWHhlel2fJNh6zaQeuq+n/V3dTA1nvN86tH1+k17scucOgBBzePICtIGG2NLO3uHyWsSf4tqWtjqyHF5SnAvXU8FaZxgA9MaJ67Aam9igy1PII+8Exon0pczyzdcV1y4P8RKAQ9A7ev9AREfLDfv8dRo9WaLNE7wgN1ZQdpIdarniJBxT1NnK0PZeQoz2M0N7EbeKtgtGoqc/whU4ySsajtO8/qZ00fipaaNFM+9Gf2EOHCGXf+/8MUX0fV0nYFJCoBccLoIzO6m43flcmzehhAIRE+lM78pEuozbvlayomT1/eGXmKI9dF3W73AqS9kP1JCYJFusCspC8rqhCxQCAQCsyCNEwIuBGVggmc5AoFAdBVI49TFoLEIAkcgBiYIlwscCHOjjWA38lbBbtFQ5PwHaZwQiA5Cp9IYAABaaca580//wDtUG4FAIARFxzVOtOKba2Z7zGr5z//M97ruCpUJFo0TAAAAetXr5ePdzBPKob3Zd0TjRP2y16iP+nBtbb1xW7IhFsO0k+p7MxREVHUNDQ0NDS0XJ+alt0FlVHV3upp9fAXPbdoRwX0vY78ntQBQcpOOHruW14XOFFL2Wg1cP7/0Vn8qRHn/z7iFKdVtWIhpkMYJNjCtcWKPh1eEHCtLoFC+nvK2NLWxMx9jv/xaQYuQe5jGiSBv4h+uRWxIKHrFy1N7r1Qa6PXq9vGKbBonAACgFcdtOZU3pHdbfynPdzqocQoo+ay86tbDoEE9bs5eCcPwmw/n9cEDAAAx8xaf/zqt/PPnMhoAAIgbhN+/25WHrn0ZfUtrie7Do+mVxrYyvLak5L/5TRnTloXYBWmcYAPTGif2eHhGyKWyBEn1o42h3xY+uuup9Gn7mCl7X9vt0GvMFDg1Tm3pRajXONHN/JfZM3sehWT6aWhqaWlqaWlqKlc+uvVV0clv5nDJbh97RZCz9wu/5jtcrjl2cnZM1KnBi8IMJaH9cFKvcdo/sTeXVK3XOCl+u1lh49aocbr1qKgy9xuhv2KPax9woyLBQW3qnSoAQHXK7AFWZ4sZjOrsqGn62mNsnTy3p1cyAACVN9yGj1/kZqRtvercwQXWJlZ2NiYmk7c+KKUDRuWzPVNHG1raWtvMOpz96kTLtc07Bv+7KSDq5der/s4ODnZGqpLa2z9QSO8PuRubOoxzcFl7v6z2Pcu+1aleQ8bMWejpPslK19g3sZAGGLU5x+dZmto7TbAd63fxR1PnWWX60YfaK9atGZP1b+ofRsuQPlKa97Iaqak979Lr62umu6+7fHnTFEtzc8Phwy1cZq2Of319zXT3DTfTj3E6PsYQUnHYfDN5k5l84x1c0uxI1tWZDRPL8dQ45ZZB8+bXRIPG6evBWW6rr3KqlAY9zyJITTGAQ400KZHCLeVY758cNhYo7PHwiLDVyhIEBGll8ZriaiqDXFZMkVaWYgqb16UhODqhcWqG/CX+ZDrDaPlETX4IH1k1ToySh9G+76zP7lSnn+/+v95hOqJxyi8toBc8v+Jmu48w0H3nvqX6bZ1xGAPUvgifZHlClKA4NvIc9x+3UL+dWLxXYsfzDEe5sgSXEVEAgAYDU3ZWr1hr+yJmq9Mzv5TFh3tFPr9oKcMgfT1sN7al82lLw46vR0vi6AViKdmPIpKOmhJerR81F4C6H0nRLw13p0dZy+Op3w9ZsfiitrVqbxoXaysNAONP6pGsMf5RqobyNkGbbxWNNYhtColKx4HMN2qeAAAgAElEQVRv+5n20nM7Je+648IZa9GyX1bOofU2qfuWw/oP2nEhtv8Jc1v242MOpHGCDUxrnNjj4R5hq5UlEMR1fQMUTYy0T8oW/DGPzVRv+frXszROjdDLnl6482fg9Gl6AnmA0StS499TKoqWLXtYk/vlmfD+sL7Bmwy7vzOjU7RN4yShoqi74PmPEMCoeRZiEXBm4n2f/pC05juPhEFI/IOmDw3ctiLlpv/qba8jiwM4YUnxxmwV1zAbLEnJzvylYslsdSrOfVrQ1224NA4AHO3nU5a1VQzF+h05poawhlfkouD1JkNppisip2X8UrFqua9qK/YmEoUBAI5edPNgyp+q6lkuQrTy8oxniV/WN4ckhAc1LHuBUgYAgEEte3lyS4NNCqcvBwAAZI7H7+pK6CaQxklwMbYXDGicWksnzrRWWXylqQi2axzSIpROfEhylss9Pt7a+7Jp4nQVpoZLT9I4NUIrfJT0Bq8bYCGgWdXxcm47o9wAAID2/czaxdL+obC3D0BbNU6OR3oxSGS6mCgeTwB4EdEe0zzgCl5MBldRRmYAKeKfCgoDABGVobIFmV9q5vcV//OtgMQ8gI3d6qSkpq/8K/VdlbuldF2doj6b8+lP8844HB5Hp9KZjkdQMAs6mRb4M9ra6mCJD9u+RJZIWe1NBFEcALRfCUd/eVzO3GMoDgDlbZjxgkTquKaQqmtx4i32yrcpNbpKZ7FJHcfhGDQ6ENPkcHysgDRO2AEDGiee6cROQ/BKHCpLYDQVgfxqvb6wcm9JPBCSUZGjlf4hMyDXOHW6iUAteJLyU1TXXZt/XQiU1ycPbbr/+kOO2KLVRSs2eNpC1eTlBr3kdojfrqtZL/CLvIkRR4J0Gxu09RonOVolWXPFqdlzqBRvH1szGk5l8r7YwTUPN3qsuVsGaIRBC46s7YulW0+HkDJeNZ8238kpTrO3DFlcCACRIT47XSbNMzbR1NbXVlRkvmUJDZx3fO3ThY7mW0XriApux2MMZeWGHFng6WNjqaQgrjJp2/61L5cxrRUHTGMicXL6jr1XzDB1Cz23FgAAAPVH3BKvmFIZfLmKz+7JvpIU34Ut9k1jiZSg5nFkRYa3jTGQlxaR0Fl6eOcUVca3S9GFdlEj6sd8iWi5TSR5PnWIWrBjWUNIkcf8mPcaYm+ole5jP8lqrJ70na3BVRrSRJyMgs6kodt97F2mbI5YnhHc4viwjLhqF6RXkUtDE1PfZ0ku8MhbeWDjoCvTnHe96jM00CKZASQNNyfGuhTscHb5eeDdGXuZRo3TY4g1Tjfm2hpuAWRJ64gzTko40ovG4KXr9Tw3saBxYq6RzXZsjyPSq6YaaX1j/sIeD4VliVluY/BslSXQyOsRHREQOdnD28xEWpRE6htwcKYqgfSyPuDYwRdYL43pENzyO6FxotEAALTC6+uC4+QDDq4eI9X2GkAaJ3jA7qwgXQODXPI9H9dvoCIcb0xQgd3cwG7krYLdoqHI+Y9ANU4AAMCozc3+DfpYDRCHoYWGwBgwuJRwokrqA7v4+AgEAtEj6GQTgfrnWxFNWK2fjOD7QxAYAwZnEvIwIRAIBHeQxgkhAHqeqwmBQCAgBGmcuhg0FoE/wOZM6jHupS6kB3yC7Xlgt2gocv6DNE4IBAKBQCC6hY5rnAAADNLP1Jgty+Z5zJrtsWDlnosvy/hk+mDVONUkLpml5rRUd5KvVcSbSv7E0H46onECAADyx6jxxn7JxZBoVARE3eddOhLam7KZJiigfNprLDlwxfP869PU9MPfkpm2bovzicM2jDL2Q7VGdapnf+NGhUxthq+Wwa7PdQDU/UxaN15Hc9BgLQ09t51Pymsy26Cq6pEgjRNs9CSNU13eRT9bIxMzYyOnkLvs90iYNE5tj5yH6onvdFzjBBjVL6PD/81Sd1uy0UCR+OF69Mmdu8R2hU3s080/32bXONWVvSWOPB632hG66dia6aDGad0wxoej6zPdTkeP7/XXDwkVEso/7H/M8+4yTWEAAO33paDNGbUy5jg5m8jkOMkhXTAzSpcdivz2H1ffHL97b700RWu/PMwSFcf/7nx4GARpnGCjR2mc6AWXl4YU+6Y+dJP/esBh4poHmdE2TOo0iDRO7Ymch+qJ/3RC40T5mZ5dozx2lsuYweqaIx09p2qC3Ky87m87s2ucKCXv6lRU4HYddUzjVEz6eHhhxLu8k/NnbuLQRP7LEB3hu1bl3+UXf9MAYJTd27Ahd/oqEykASK8j3LwuFtAAu/MJEFmFTLy8UKFZZY2Haru3iROktzGxNbM3zdQUAwAnoWlhpgqt1aebQRon2OhRGqfqrPNvh861VyEAEc3JnkoP//eeqZcRKo1TeyLnoXriP21pIhBUxoX/v73zDmsi6+LwmRQgEHpvIiIiggVFsQD2sjbsfqti71iwl7X3hth7Z3XXtWNXVOwNsaFioat0AklIn5nvj0AISeglid538+yDd9q5M/fcOXPb7+COBX/62OrI+zPNpI45ZLx+9V1AApCc+Jh0zKGxQ81HO5iug7OZ/DoMODcjJfnx9KkLO4zbGvqao9hQpiFQjZ2dlZsQoFDGqbvfyKjOU1oSXHkZp7SkG6tONTr84N7tQ363p614We5Vrn5RqNa918/MX7MkIpsbvWnW45675jWTLwgFmk8Pn9+9cmRGU30MQJJ4dNw6yeJrkXfuRR5ueXb8yiiO0j4g1YVaePd95Eovua8onIe3WHTk9KWruxpdXx+eKkw4NGaNcMHl21evnpvNXrfkIVvIyuZJozZBzPbR/fr27du37+DZF39KAMSZ39hm7jaaHbXWCpi+g5sto3g1I6dVM09JxunFjIFDhw0cuoPTvrfmyjjdiTja48X0yRcyFCsbqYzTUk2XcZJ/IgUyTgdmN9FXnqWj6vGpEwV7SH42i2IhrVYpTGsm/2deUVhZ5sOqVSpiOaNJ0EzzDT6eXo27nW2+YJizWuuR8j18lTJOVOsuUwK9cs/9NWf1zn2bl+745DEhuLe9GtpzqNbd7j08+PDw+mvzrE8sO/u+Ih3JaiL/2YIuLb292wfdZpEU844rw87dev1s+tspSz5SRFlcHABwTgZf3zgzMrZOQBtzqq5T5w56sfGa2jtbe9Drjto2/MOcoOAp55pvXeFrXKxcCpKe/bBpL6/5JEwuLvX05mtmouI+AKBa3qlAt4mmoNsUPGvFBb6NftqFAXYWvrsTJACg5znz2MXw8PDw8DNb+9nRAGjmzobZH1JF8HvCf726l4+3d6uuC5+rjGoLtWpOqpJxuv7vPJd00GAZp76uDEy3bvc+NkmvFZ5wgYzTNC2Ucfpv2oD+4/ZEP988afmDXA2rZkouThjD3JTIYksAAAhuBpdhayR7AZX1sGqDylkujtszYqPFkc+xb768XZYaPO6c0viQ2qQq8SHJz07LFFu27NTKUQ+jEKy3958k8NRRunAxHyel0jwYTYeqBTPWDVpvjHgZFXV/d1fTQmtlMk6RFz7ySFHC9euU7h3cGzDiHsfxQZL27DnZtMEvpAVdaXQbzQjt9vqq7coNPSwUSm+h5hOApEDzqVDqiQQQJj/5xGjmam6juE850avr627VMHBV6LZt2/f9c3HH/wbtuHXn+Mg6KmNihueoobSwFf/GCQFAkBhx4WWOhjZu1QgMr6VXn0dFvbitUuaxQKvmgryM0+eMgvq7UMZpt8bKOD14wyJAkv7kDsvVRyrjVGi8VMZpWT8NtLxkKNaDTkVHRVwJD79weGpzn3mFMk6yJ6J+SilOTK+hHrEn7mYQIEq49HeW70B3RuETUfWwtMNyIj8zT1HqSX1U/qOf5L47vjuCOmDzjJ42VOjRq8ftdfOP77veauNAhxpuF1GQcRrR6sfF0WufpGGYmLQavnJ6I81cbL8yMk6eruSWIYHj2zYXScx6h/7jpTiY6LcEM2y79XMqACitTaCs+aRC6sm4bkm6UKWjQrfJrxMAAFfV3nrNllwKmTe6q8sqXWN9oybjd7WsbHa1HSTjpGn8WjJOg/aseTKmd7stFMKk6+5j/kZQ9EQ0S8apApZ3VJJ6UqPdlZdxEiadmb/gWp2FO4MbMwAAxMmn5y2+4bxw70zPMpvZkIyT5qC9q4IgahrtLRvaa3mZaG/WkOW1T9VlnCofEtIsGzcxE7w4Hnb7bVxSwqdHZ4/fyNRxb+Wgmd/wiF8EDCv4aW86AoFAaAlVGF2oW3/YX9Nox86e3vSAB0A3dWkduCiwtdK0GQSiutAElSak9oRAIH4bkIwTQjtAyk8IBAJRUZCMUzWDxiJoLJqm2PTLKz9pUdlQQHstLxPtzRqyvPZBMk4IBAKBQCBqhCrJOBGc2Ms7Fk0cMWLY8AlzNv77MrO21rhUlHECAJL74aT/oJ0XMzR3leKKyzgJv+7p39zT09PTs1E9E4N2x1J/p8n1lUf4cWtAy2ZNG5hhmKlrk2bevda/q8i64OLkkyObe/oF7oktZRGuygg+/aYgGSdNQ6tlnADKc7dLzI4aUbiTBOvR2oCWrdq29vIeujum2MTCUnyk1qmCjBOefjtk/T+ZzUfMG9+Q9j3yxOHQNeIVGwMb1PTKYsoyTgAE+93KbVlBO4P6WWnifGqovIzTheipACB8v6LTYpduGrysqyah22j2pZezhR9WeA/ih73YWEEJFDzt2u7nvkfe7GhV2jIU1agd9UuDZJw0Da2WcSqgrLtdUnbUiNKdFH7cHnzN7+Sjua555/q1Wfl45JmuhgWbSvGR2qfyMk6StCc3v1KajRn3R1OXeh7tA6f0tMiKvPK55mUElGWc8NzLa0JP56Xtmbdu0Z1aa8qoGJWUcSIAAMjcyJAr7sF9tWrVNs1AQXuJpSTOpKjzJEw+t/bAm7gLs4dO2Pf6bdGmVf+Gdq4/7GYuCZAXMdK9z5nktxsHjj7z5WqRBBS/bHN+P5CMk6ah1TJOCqi+2yVnR40o3UldjyWPI2e76QDOY4nM6lvILThYmo/UOuUJ0qnWPVYf/APDRHFH/y5KJQRsAegYMulS++lWDR1oV37EsSRN9Ws28sd0HZx1hV+KEvLf/7sso//NE33rpV/rOWbXRa8Vg800b/A41djZWZKoYoNUxkkv7gWrx/EgYrW8jFMqWwL2OpKUsyGxfbYqCBIgyoEk4dCY/4RrX93uZZZ7Y1SnJQ9XEzzce9GRmfWED8Y2WRie2q1r+Lh1kuUvI7uZij6ua91vZde3WxeM2hKZsO30Fouj7bsUbQqI6tng1cFHrK7tnh6MbjFthyXlAQBIJaCOv33fUkniAQEgVa/RF74vngYyGafzSjJOM2cMHGoifHKG0ytSc2WcZtT7vrdHp8kX2l4eUrxlTyrjdEDTZZzkn0iBjNO/p5oQe8vcWbNQdbdLzY4aUb6TGE2XLvlxYcbgoEuWqyMbFS/sJflI7VN5GSe6dWNXvfyXlyLjuThJCHNTU3kkSMR4rXcgSpKefmR2bu2qh+naN+9tkflGQ9sR5CmnjJO1IRVAELP/ADZpnOZ+lmgwQgXtJYGIVBBnUtJ54hQWYAUJKEGK46h2n4/c/3b/6NcOU31NZB6hUgLq9wbJOKnJwEqgBTJOMoqXK5V3W7OyU5YjAM2+/67Hn07YhowPSyk+1qMkH6l1Kv/BjzGbjZ3Zd8fesCWTwgAAKFQgwNnCoNZbpKjmTsYZN+NYw61Nsz/dZduNt9J8DV6D1hsjXgIAAPvGbmmSTMbpwIWPPC/P79evU7rvs6KSOTe3XHOfc89eM1v6NBy9ur6OXCxwVai/MUZKhBL+s7Hziu1QqPM0xs5UlPzkE6PZLJlUlvKmriO7bwxeESLptbuFAWjQMCiNg+G19OrzpSVsLJRxuiwv4xQnruNmpQNFMk4vNVbG6ewb1nRn0/Qnd1iuE6QyToXGS2Wcdh3SQMtLhmI96FT0IAAASeLOThNMCmWcZE9EgyhWrkSxxe92gc2qsqMmc0t1BDLn2tiBN0ecC+1sCkDiYpyUcwRlH1EfVekToBg3GbJ0T39uTg6PYkCJ3TN71w8PF6Oaz5GCjFNgp86TFj3a2mXIvyDS6zBnXk9Tjfykq4yMU0MdSdKZbYn9QtqrsZhrM1THEft6HA6SaS9t7q2wg7LOEwOSStpkxmT2z25xbvx7d10ANPKgfCAZJ01Dq2WcCiEV77aczWo1rESU7mSHuUPCRvr7MIwBd5q2d7gjVfBamoXjDf5T9JEhdmrzh8rLOMkvnUTkRh9asvWBeWDIsu7l8W0k46Q5aO+qIIiaRnvLhvZaXibamzVkee1TdRmnqrQikILM5OSMzB+fo+5cfxTPaBs0vYtGxv6I3xXZEBoF967pdAQCgfglqEqIIPx6as36F4Spo2vTXjOn9PC210Ot4QiNAekzIRAIRNVAMk6IXxB1aT6hGBmBQGgUSMapmkFjEX4N1KLJ9IvdR+0tG9preZlob9aQ5bUPknFCIBAIBAJRI5QnRBBnvDobMm/8kMFDBg8ZE7zu1MvswoVRCXbMha1zx48YNnz0tNVhT9PVIuOEJ4dvbBUwtUnA1CZ9x1s1Hbvgo3plL0qk4jJOJMl9HTKwXfvOfj5/LH/IQlPxAUD8/criPxq7uDaoX6/ZwM1Pc/NfTHdrW0WNmQJNpqiHRafiv5ju1nb/+8tlazXxX8xwbTD5Zrb06fCeTfXsczWvtAN+N5CMk6bxa8s4ieLDxvm37djZt1WXOVfTNMjq8jqCZpWlsocrkvmx5w7fzmvWJ2iwPS3z1fmwi5tDzHes6GIOktSbIRvPZjQeOHWYI+fF6b93riPNNga61fSgRUUZJ2qdvgte9AUAEMX/12ExMVLNq1GpplIyTn0uuC89brvl2bkWCSvbTjs/MWLc776CkvDDpn5Bn6bd/TDaRZcX9+i1LoPys+pnLdBkcspdpjK9LK0mkvd1/+BRrd9fGO2k+at21TJIxknT+NVlnLiPly1PmPD4TqDF1/WtBmx733lDBXXcaoYKOIJmlaWyWxEwA8+J2/esnBjQvpV3u16jJ3cxIeOjf4gAhPFXw+N0W08O6t+2ecvu42YGWGfdPfeGU+Ohv7KMkxSCfWvPPftxPdw1MUKonIzTk1xDM4yVIyDEbBZhZKL723cLCT4cPZ4/fMWfLnoAmL6LXzsHubd3MUGmG7cWN3Obci9XFL+nS+OJt3LEGRErB/j7+no3ajxo72chyY7aOqilt3+nDh2H7f0iErzfOHD0GeUvDvl0rrIEVCHGPkE9Y6YP2vRWfo0RvPgVa+yeaDZIxknT+NVlnKiGVoz8TK6EFLIyRYZWTA2xvEKOIEUzylJ5gnSMSi/MFiliZfPBvK4ZDSRpn2LZFGcfF30MAIBu3ayx8cWnb36KWhvVbI6UZJykiJJurIlvs1+j1L3kqZSME9F8eu+5PZo3NeEIu59Zbv7bD5gXZ35jmw20UfWtLkk8WkyQ6XLIgYBZM+aucnlXb/P1LmZ0vMX4A7eXWdC+7/frvP1Fm7rBey1DX53xNyIlBEYTvizP1fFiElA/Ow/SByNzfQCgmHXbus+6j1//ea0fDCvcm2oqf8XXo/a01vRl92oCJOOkafziMk7AaBI007yNj+cJ47Qc3+MvnDWkYa8ijiBFQ8pSha6Os54fOxjFaD+mpyMdcG4WF/TNjQuUHoHKtGBCfiZHPRpKJDtizx2rcb09NTZ4L0Y5ZZxMf+wOihh6P/Zd7MtZn2aufPnbL/pLM3c2zP6QqmqwiYLqEv9taoMpE3UOr33bOaiDOQVICevNicWTJ0xddi4+l8VKfJlm176RIQZAoVHKHXnJS0AlnetvZ+G7O0H6pYsZNFt4bnvDf4aOOVX4bVn8ivzfaSAJknFSk4GVQLN0j0qnHDJOII7bM2KjxZHPsW++vF2WGjzunNLgilqkso4AGlSWyh8iSDIe7V28Ndp5/PKJ3sYUAOkML/naVY3fuKLEm6u+tFza2UxDWpXKwqD1xoiXUVH3d3eVCUrIZJwiL3zkkaKE69cp3VvrZ+czLE3oGJVpyeSnsjWv4bWWYXiOGkoLW/FvnBAABIkRF17mFFYAhapLJIAw+cknRhOzZ2sOWG8KaXxu4alkiejTloGLWcO2Hti3ZUIjBug6eFn9uP+RQwIp4uZXKqo17rj91p3jI+vI2uHodUefODUgds+VBDEAgMIVfysYXkuvPo+KenF7g49yy0mhRM0FeRmnzxkFL9tCGafdGivj9OANiwBJ+pM7LFcfqYxTofFSGadl/TTQ8pKhWA86FR0VcSU8/MLhqc195hXKOMmeiAZRrFwp3u0Cm4n8zDy6lY0BBWhG1iZ4do5QjQFP5R1Bc8pSOUcDidPu7Viy95Pb5HXBnWzpAAQAlWnJBF5WnpgEXQwAcG5mPhg4M2u+XUdJxsmYe3f/Xauxq5urO+AqjcrIOHl55S5vOCLA7zidJ3SZf8LXUK050AT0mi25FDJvdFeXVbrG+kZNxu9qWbiluOrSgC0jw//aMeb83iCqweUuM89229+mvdHStbM59Qz5mJGOToOgfeMDJ3f0tzBjWAeEHhxXcUswAxe/pnSQ13OiWHQPvbQquudTAACadbErVj3rWgqScdI0fnEZp44zQ/uPGNeujaGuQGA3c/efDhpSkiriCJpUlsoj40Tyv/w9f0mE7YQN87raSiMAAsdx3seDwaufe87fMbWJPgbilPMLFl4yD9qxuK1xmc0JSMZJc9DeVUEQNY32lg3ttbxMtDdryPLaR17Gyc3dg06nl7S6Io7jGWk/KyXjRGQ/Pn4lzXHAGGd+Snw8AABQ9CztLXVdevVyvv/P/r2OI7s6cJ79cyHNvPNEL6PffkgdouZRlz4T0nlCIBDaST6XY2JqVtJWoUD1ULdyhAjCn2+TSBCeW7/onCzNfdahpd56dn/MnS88dPzSzg0Cqqlbl6A5fzZkoAgBUcOoS58J6TwhEAitJSsz09DImEZT8dInCILLVr3gG5JxQmgT6tJnqno6AoFA1D4yGSeHOk6cvDxHJycqhSrf0QAAmRlpxiamrOwsJONUNmgsgoajFn2makzXIrSubMjQXsvLRHuzhiyvfeRlnNzdPb9+iU1OTDQzN2cwDHR0dQhcwufzuBy2iamZvUMdVnaW8hk0bn1TBAKBQCAQ1QuFSnVz92Dn5f78+T31RwoJQNehGxubOru4GhoZl3hUOc5csowTAAApSnu4a1rgtOPxtTePtpiME4A47e+l831HL/Yf+te8+ywN0u0oTiVknECUeGpSh7btO/h1GHs8TvMmKqsJ8bctjfU9V7yVG18j+rqttUHducUFT8TJJ0c29/QL3BNbyvrHBRpOxbSaOHeGOHa5VLIYE/d+YJ3WhXIsvOdB9Ztv+SauGYkp7QfJOGkaWi/jVGpR0dhSpHgnc8N7Wdm5eXp6NvGb9YBdbM/kM9M6+bRp19qn19I7mdV7442MTRq6e/q082/dzr9Fyzb1GzQsJT6A8oQIBTJOrn2C5s0PHtWa+u7i5pA7GTgAAMFLeX5u24K5+5+warEVRpJ+bMGy9WkyGSfgvv575fceZ46su7Oh6d2QizEauRx+GTJOD588PdHmzpqTD4/OOOq84+7jB8f8rgbvis1+uHQLsfR6ZOStEJtDK++zVZ3594RGS907/WBcwapH+M+zs1YpLWCGp13b/dz3yK2wqQ1LWXRTqtU0rSytprKRSkwNvfjh65evMeeC2zoyNGMKuVoRJxwd0WNdiqJ6TfiTJ0+ujPu2fOWz/KJ9pTJOpyIfPH58tNk/wWHJmrhUWKGM024VMk4xMTExMdFnx7l6jdNsGadiTwSgdBknpZ01gFKKioaWIuU7KUr7kN/pyMuYmJh3D0P9jYp2JdLOTV2aGRT+6PGDv/94ELTwgVrr/SrIOAGe9fDI0adkmylTO5nWvKUylGScqAbGevw8Lk4K2XkiAxMDDSvNUiol4/Q4U4KLBSIcSGFeVkZKhkBzImJ1o+sRtMh6/5wzP3EAknV3yZKkIfPbMOXFkxq26jJy55u4C7P6+Hq38WnbpVfPTt2mnUlhKakxybSaSO7bHYO9PFt16hW4/hmbBACS9+nQGH/ZsaUuw1iqxNRvC5Jx0jR+DRmnUoqKZpYi5Tsp/P5B7GytYlE17ut/PzQc1cWaCjou/QMtHp2PVefK++X5zsGodGqBI8vJOAFQrbot27tl9pCWtrW6dhym6+BsJj+5ktGg9wyTM20Dpjeb8Lj5uA7Omjm+gmrs7KzchACFMk7d/UZGdZ7SkuDKyzilk+3WLbM6EDj4z8Ej/9bt4W2CRsnLoFr3Xj8zf82SiGxu9KZZj3vumteMgRWKJz189PzmLFqSXpN69QNmdpVkOS+7fPvq1XOz2euWPOTgPLzFoiOnL13d1ej6+vBUWTOeJOHIpG36Gx4+v3vlyIym+hiAJOHQmDXCBUXHsoWsbJ70AEHM9tH9+vbt27fv4NkXf0oKJKbcVUpM/cZg+g5utgrNKXLqNfOUZJxezBg4dNjAoTs47XtrrozTnYijPV5Mn3whQ7FVXiq9s1Td0julofhECmScDsxuoq9cvah6fJpAKUVFQ0uR0p3E85JTvp2d1rujf5dRWx/nFJUkkp/NolhI3xUUpjWT/zNPnVFOhV6nhTJOC3o60oHEATBMA95Zkrh/tmw2Df64p5XJz5u9Rm8777VisKZrIuY/WxAw/U6uvs/6E70p5h1Xhp3syPhxomenJRIHEZWLA9BxTgZf39qQUXdwyIW+H9b792eGTGzw+y7jqwp63VHbhh8eGhSsH9186xNf4wiAAvGkNas+cUVJ8blsKz0Q/ohOFuRcXDXrNkby+Tb6AhGnQI0JNzTXE+UJZL4pSHr2w6ZLY2MMMLoBgwYgPTaz6Ni0CwPsRqdujH8xFkDPc+axixMdqAC850FNpshJTFlrRp2kRvivVw+afNjxFdgAACAASURBVCmdNO2y85LK1ekL1GsuqZJx6pQftaRtfw2WcZrtysBodbv3sdnzOlU0xEruaRdI79xTu/ROBSiQcUqaNuBM/tfol/RJy52OrfLXzI+RYuWqxKKiQaWoNEegOoy9nz4WgOS9Wdlm2KZO0RuaSYsNxjA3JbKkgjwEN4PLsDWqzq/e2M+fjx8/LpuQSKVSp0yeXKdOnZL2L/+1JRmP9i3fEe08fs1Eb2NKVaY8VjMkj5VPNzM1oACNaWJCcHJEypPnNQ2D1hsjpPrD7Bu7pUkyGacDFz7yvDy/X79O6b7Pigog+rp38mGXTU86m2p4pmof3UYzQrt5D4xb+aqHBYUFUCCeJDr05EBzwdmvt0IAQK+ur7vVl8BVof7GGCkRSvjPxs5TfTYd64bGaS/i8sfaMXIS0gRkwbFvi46lSJIdHXmN69AgWflwhueoobReK/6dcHq0i64gMeJ6ZvNuNZh3TYbhtfTq86UlbCxUr7ksr14TJ67jZqUDRTJOLzVWxunsG9Z0Z9P0J3dYrhOkMk6Fxkuld3Yd0kDLS4ZiPehU9CAAAEnizk4TTAplnGRPRIMoXq4UioomlqLSHUHIJ3UYNAyjYBhdj4bJsmDhNdQj9sTdjO79TRIu/Z3lu8y9OmXgGrq5jRo1KiwszMjIKD8/f/y4caXEB1AVGSf1oSjj1D5wYr8F2/2GMXSFIrsRU/9nrXntYlA5GaeGOiBJPD5tr9Wae300uOlSfWCGbbd+TgWAwmUIlMWTqI4j9s2dP65jazA11NFvPHVz75JOpuM2eXPfgDGt27h4enmam+sUHPu86Ni9mwf4dQIA4Ko6vmSJqd8ZJOOkafwSMk6gWFTkbNbMUqRwJ1f6JGwbOf18GgUTE04jDuxvpAtFZWnQnjVPxvRut4VCmHTdfUx+KGO10NDNLTAwMCwsbNLEiaXHB1AlGSdZO4Io4cTspc9brgodVa+cUSeScdIctHdVEERNo71lQ3stLxPtzRqyvPaRWX7l4lm/Dp11dYv6wMRiMZ1eNHRKJBR++xr7IzW9GmWcqi0bCERl0DRxplLStbN+QSAQvwYGTMOsjAx7x6I2A/n4AAA4HDaDoaJHo0oyTpW3F4GoIpogyIQEnBAIhDZQx6lu3NcvNrZ2VNUyTnhGWqqVtS1AosImJOOE0D6QmBMCgUCUB6mMk1ezpjFv3xAk2dy7FYVSbEAJQRDfPn/U0dW1c6hz//59hY6G8oxFUEHVZZzQWAQNQXu72RA1jfaWDe21vEy0N2vI8tpHZjmHnSeRiL/EfhLweE7OLkYmJnp6DFwiYbFy0lJ/GBgwHeo4USiUyMjIio9FQCAQCAQCoc3QaHR3j8bZWZlpaT8T4r6SADo6NIaBoY2Nvam5WUlHYey8XJIkMzPSk5MS8rncktoGJBIJjy8QSyQkodYJjwiEdoJRKHQaVU9XT0cHLcCIQCCqDQzDKBSKvr6BnUMdO3t76YqG0v4FGR06dCjPqVS0IuA4HvvhHYfDqVe/gZmZBUNfxazeqJfPv39PadW6rbWNjZFUFYokVY/JQoOzEL8tJY9SxHGcx+clJSZ+iHlnoKvnWt9VoTsQgUAgFCAJQiwWcbncHFa2hYW1eyNPClXFUg8EgfN5vKzMjOSkeFZ2ZkMPTxqt2r5DsMcP7mEUSpMmXioHOgLA82dPeDxe+46d6XQ6SRDSuk/Ww0HKVmFGkQECIUeBa8gHDQBAkhE3rwOF4ubmpkbbEAiEFkESxPeUZKaRsWfjpqXsRhBEdNQLKgXzaNLs/v378psq34ogFAhatWlHpRaLD7Kzs83NzQEgLS01NfVn34ABVCqVkHYxSCMDkgTZaO3CSrAoRkDhAuJ3o7hcCQaFTiJtb4MCp8AAOnXrceHcf7m5uaamtSmQikAgtBWMSnV0ckpKSGDlZJuamctviv38uWHh9waFQmnu3er5k4eZ6WnSlL4B/cIvXazKpWlOzi7K8UHUq1e9evaUSCTJiYkenk1oVCpJEGRhECCrC0m5/xc0LRT+XRWbEAjtQ37dJAwDqV+QJEmSBW0J0p4FksQA3NwapiQnm5uVOEQIgUAgioFRTU1Nv39PVggRjh8/PmrUKPkowd6xTlraz+q6LMXMvNj1srOzX0VHywYtJqck2dra4jhOkiRB4EAQJIETJEFKIaTg0kSCJEgCJ0mCBBL9aud36eI5PX19lX+gX63+pIVf6h0EjhO41D2kPkICWeAmBIGThIurWx47DzAM/dAP/dCvnD9DY1Meh6PwCsdxPCwsLPbzZ1mKja2dSCCsrhCBpqdXtOaiRCJ5FR3NYDA4hXYIBQJjYxORWAQEAQAkSRJAYoBhGABJkpisBVX6BwEYBkACiVoRahVcgpf0B6KWwJQWScJIDIAgSQoJJAAJGGCAkQBA6urqSMRijZBSRyAQWgKNRlX5ZjUyMgoLC1uxfLl0TWUaja6iOqr0RcUiEV1Hp9ACWvPmzV+9eiXbrKunl5/PpWAYhmEEWdixQII0UAAAIAkAIDGsoEEVoQ7EuLikPxC1Byk37EDqDCRJAgYAGAWTLgmJAYYB8IVCXV1dDEOTGhAIRHnBJWIdXRVSifn5+ZMmTpRpLohFIjqdBvzqaUigsVg5VtY2sn9bmJu3aNEiKipK+k9ra5vk5CSnOk4EQRT0LgCJkUAWrIErTZEGCyhKUAOdunTLzEhX+Qei1sDkx+oWJZGFzW2FwQGGYQAUKjUlOcnMzBw5CwKBKD8CAd/ERHEAE5VKHT9unLymc15eLtPQiMXOr5aL0tJ+frewsJSfbWlhbu7t7S39293d4+GDe/b2DkAWDEAAaQ8DSZIAJJBY4dhEaW1HYBho6UqVCERlUX7VFw5RlDoKhmGAFTQnUACjfPr4wcOzMQoREAhEOSEIgsNh13NxVUifMnmyfHxA4Hha6g8bG/uUH6nVcl2avgEz7tsXF1c3+bVcLMzNJRIJAFhYWtrZOzx6cN+/QycSFxM4LgsTQPpfYcuqtCLEiILFEhCI3w0S5Oc1FIzckTawYRQMIwGjUWk0WuTdCEsrKwtzC/VZikAgtAmSJDPT08zMLQylSxfKUSw+IIjPsR+ZBkwTs4IJ1VWc8QgANIc6Tmk/v3/+GGNlY2vAZMqPXpTi3dLndXTUrRtXXRu4WVnbGJuYYIVGg/RTSf5jCEUIiN8ZOV/A5HyEIAiBUJiSlPTlyydLCyuPRp5qtBGBQGgFJEniOC4U8LnsPBMzM3uHOip3IwhCIhZzOOyMtFSGPsPW3rEaWyhpVCrVzqFOPpeTm5OTnvqDUCXBoKdDt7WxSUqIf//urbR1AYFAlB8Mw6hUqoGBga21raEh8+f3ZHVbhEAgtAlWdhYrO6ukrRQKRVdPz8bWjmlkVL3XpQEAhmFMQyOmYTWfGgEAhw4dUrcJpTFr9hx1m4AoRujWkPHjx6vbCgQCod2Uc8XlMkFi0DVO39691G2CasKvXFW3CQgEAoHQXNDMbAQCgUAgECpAIYIWQYq4bD5aNbFKEGK85OG0hCAvT6hiMA4CgUD8lpQrRIiMjCwzpZyU/8BqvESlTyVj2PAR8r8qnq1ykJxXu9cciWZrzStMnHRqavdOw7a94ZLlSq9hyLwnKwZ26Dz1cqbKOyhOOjm2Y9eBG97yy3c6NeUCgUAgao/ytiLIT8eUvnGVJ2iWSUUPrMYooRLWqhGSnxRxLGTx3HnBc5asPRQRzyvzLUQKEq+tn/vXkS/Vpt5RcN68h0sC/P2kdOo7ct6O63H55QxS8NyERJ74R1waJ+Hfef/r/WdIjKB4uqBiL9e8yOn+fn7+E8+lFrak8N9v7efn5zfyn+/lmGZD4py0DFxSovE4L58obcKuOLFacoFAIBBaQ4U7Gmqh/UCNl9MQMF0zF59+0xcvWTZrgPOP22de5pTevUByY8+FPc6tgW4jEudm5pAAFg2bNW1gnp/w7My6oPWPc8sVJOg1mXPk4N4T67uZ8lI+/8jjSEiFdIsK2Sth/8glAchPJ89KAyEi58mxq9kAkPczrxZm4uLc6sgFAoFAaA8Vm9Gglhd2ZGRk5eZvVPpAGSX1KcjST538uyrnL4BgPT+y879PHBwzcGo3aGyApzHF0NndEEhJvoiCUQ2szPUKXkN4+t19q85m5VNMXTsN+vOPhoYUACB5sVfD07wH+b+/VFPaDLZ/LNw60YWSdnHy4JDYl3fiBd7GEaHrD9z4nIeZefSevGh6Dyc9SdrdXSu2X/yQQ9CMGgzcuHua28+wmRNOZPmGnBkOAJB3ZWrXKwDui/6b/yN4womfAAAO48JOjK4L3w4MHxOW2WLl+bV1HmzfdPzOp0zSqvmgWUsm+loWFVAJJ50NAACZV/+JHr2yDePnzbAXIgAATjoHBwAg+XGXFa3CJBkP9q7cevZdNhgYFkQ2gpjN/5sSTvTec3aB69eNg6ZegZ67z85lyuVX+OVQ8LxTMTlioJi59w5aHtzNvkAiRSEXWb4h4etbUpSuS8+M2PTXvkfxGWwRSTXz6DNt6YyuhadAIBAILaECX0Bq/KD/xdsSKIbuvSctWbVy+eQ2kqfnbySLAADE3y+sXvTX5pPv7HoGuBsUrpWlY+8XuOCveZPaEA9PnH7NJgBIYdLtCylNhnSwVyEBVr2QEpEEADBgv9g6c/P1zzw7D3er3A/h64J3vc3Pf7d34/kPubY+3f/wb1zXwYSqdLhVi45du/byddQrWvjr++N3LALPevssFcCts1NsaPCW658kDXxb2bGi/1my4lqaXOsJwc3KBzBytaXkP/z3aY4gPvx8HFg2NAMQ57EEBJDsl8pW4Rm31iz/7102w8W7hZNuuXNK0TUycWzm276NGzPnU/iGkEe5st4EpVyovi434U1sOptev2VLN2bOh4vrNt3N1ppBJAgEAiGlvCHC5fBLlbtAdb2kf+kogWZkY21qoG9ar5WPtTAtR0QCAN2h35I1K4OHePw4dzAitUDamWrq5uFsY2bVoH2fdgbxT+P5JJ797PKnOr3bO9RogJB6eeXUscP6/bnzGwCzlQ/n0p08MOsbemTf/iMbOxtA1s3/YvIlIhxAx9K9w/D565b2c1D6ZDZuNW7xsmULRzZmYgAAFHtPM4Bv92JyWe/vxQE06Nog+ezdPLAcuGrN4mVrp7sD/u762zzZe5UUc3KFALZdR3VgSqJPX71z+loa1Wt4XxcaQH5WPkGy3/yrZNX71PfX30rAasi2/aFb1k/2KG9+6U5D1u/aum7l2pDlnQ1AnPwps1BbWyEXACRHxXVjpCJrOl6TV2/dvqKzAUi+vfouqvz9RyAQCHVQ3hChT9+Ayl2gutZ46tM3oHJDDittea1BCn8+PrV92cJ5wfM3hf8Uy5QyMYqusaN3QE+HzFfvshQ626mG1kzIzxMIkiIfiFt2ddWvYdHA3ITYryk8U2efwYt3z2vMzxEDWHvY6wHGcGhoDSDIzMCaTJziayl4c2zRiF6B627+FJdxRpp9+y72IIm59fzZrY8ENOjpw8jMEgNknpver2fvEVs/AQAnkysLEQheLh8AGLathnQzhy8HN97KY7Qd0s7ejAHAz+MTBDctW8mqtKx0NgFgUd+qeMSCAQAQJY4zJHKe7pk+oIN/h06959zJByAkJc+UVHndDF5R8wfd2NoQQMQToVYEBAKhZVRgLEKHDh0q90Ve6QNlSF/zHHZe5Q7UbIjc6LMXEp3HLwpyY3Ie79gSpWIfpRcUzv7JwQxNaVkfP2f/yF0//0ZB+v7VxycvHeVa/ib18mEbeOzkRJeC1yyZl2JOg/i09yn8P8zxlI/pALoWlgyG88D1/3X5evfY+nVnr4ce7OG/2KTwcJouDYCdyy0W5+g4du7p9N/Bx7v2gBjzCPC1NYoxpgAY+AUvGeCkAwBAMahjLeuvIPi5fAAdA4Z+g4A+9ueP/QDjjoObm+i90gXg5wkIio21slXWlrYmVIAfr+N5PZrLrkzRNdAFyEzPFha7rxQMABcICRDFnd76z5ts1z+XTvTiXFq17REAAGAqcwEUporrWupT02R7YBQAJG6GQCC0kIoNV1RLlFDF+KASB8qQjUZUGLdYPaMUZUiVtXGxGC/40MTzvr5Jpjs6mFByv9y+kWzo2ddc+qBIUS4rjw38hEdnHwncRtYzcmzy17be0mMybofsSem3qPrjA0UwI68hnYxf3roye/Q3F+LL53ww7TXIk3i6auxRtoMjk83GAShUSlG7ho6VuyMVoh+tnTWnoaFJ5+Ch0mS6Q9f+DQ9ui+UA3XuQryXVyGtAW/3Xjx5u35TWtC5TnCdwm7WjuewshIAtAKDr0Sl0p57D29w4yuk52EOfmsygA+Tl8XGVVjW2rSf2NYi+f33OmJQWdpwvBRe2beFhcOrOq/UzZjfVi8sDMAYAKtPKEOBHVMi6i1tHUTEAIuNTdBQhThMAMAAA6KpzgRmquhsG8L6GnwICgUDUOBWesFXpjoPKHajG+KC2oJh4BXSz+nB47bL5izZcyDaxMqASvJSoy4fWr1m7du/NjAaDJ/3hoAMANGNHG8G93euWrQk9HWvRe8JgWX94LYMZ+cwOnd3dVT8t9nOmkVvP+aEzmzOE+Zgo+fm9W3de5Vg1Hbxgoqde0f5mfsHBnZ0YebEvnn/8np5f2AhPte40vI0egFHH4a1NKUAx8/9r+6w/3M3yvr559forC6NKJEXf3oSAIwCgMegYUG37bDpzfu+Y+joAdD06gDifJ1FpFZNi6rtwW3B3N+OMmJfRSbiBtUdjO12M2WLG8mHNrSTxUc8+cgwsXJo1MKZSLDpMH9fSmp6XLtKtN2huYEs74Zurp0/f+kY1q+fhxKQApYRcqLyueh4MAoFAVCsYOy+3zJ0iIyP79A2Qf90qp5ST8h8o3RMq/ppXPrDS1sqodCvCoUOHNFnGCSk9ahpI6RGBQKgL5XdluToalBsAaqEtQbpnJd7rygdWfchkNfcsIBAIBAKh8ZS3o0H5VV3pj/LyH1iNl9CGHgcEAoFAIDQItHgsAoFAIBAIFVRsRgMCgUAgEIhfFYVlEss1XBFRaQ4dOqRuExBaBhquiEAg1ILy+oRY+KWLajEFgUAgEAiEJoNaERAIBAKBQKgADVdEIBAIBAKhgl8zRPj44V3lNJ8QiBoClUmEMr9SqaihvKDTqpdfM0RAIBAIBAJRRcoTIuCC3Jx8vOz9EAiEVkLw0uMTWWXpd/9akCJObr6kBAHO3/GGIBAqKDtEIHOfrJsV8jS38mr3pFhYkidWEIL9/vTqoFHDho+YvOs9r1pOiUD8BpTug0Tui51rjr7Pr1nBajz95vJJS8N/ShTskU+vPbgvN8zYcC9bdbVW+g1Rv/EIOe4/eKj8e/joMSsXjcSvBpSWTiI4n8IPHroSncoHCtPBZ0jw5BZVugCecXPZ4lcDdixuoV8snRT+fPzvkf/uxWaJGfatBk6d2N2ZUZY8nijp0sGboq6Ltne0wQgGo4RTbZ7Zo0oWIxC1jODjnlnrHrEBAKhMW/c2vYYNbV+3THcoCSUXntj47SpVPljLBlMMXdt1gbrGVIU6QZZeQSP4b0OCtgrHhC70Myv40iH5sUfnrInrF7qqm2VFz1Zeqsn434vhIwLl/3ny77DqPX97f7/7Dx629/cDgPsPHgKAp6fHp0+x7u4NTU1MqvdavxpK1cWUzjbyYYFiiCD48u/286k+k1fNczUQpMWlGhpVteyTEjGhIhgn2HEfeZ6Byyc4it7+HRK2706zdb1tSr8WKUiN49i0a+dqWdwhFU4FgEIEhFZBirki4w4LVg+ri3HTY64dOLT2oEno9GaVk5RW4cIl+GAtG4zp1+s2uB4A4Nxi9sjSK2iDICdXJI4/ey3RZ0Q9HQAAPOvRP/dySYscPgFQY+9ssjqM/82QxgTDRwRWe3BQEqYmJu7uDWNiPvj5tqudK2opZb7xlUKEjO88s5adWtazoQNYWtcFIHMBxEn/Lh57hCvRs/P53/QJXevoYUDy4m8dPnTxRRJH16n9qJmj/azpoDoRQPgpZMIIAKg/ZfdKX2NpNUK19Js4RXpN/x4eZ3cks8QgHyKQgqQ7h/edeZycr2fbot+ECX3cmEDghCj5RPCoEwC2gzZt7G9HU3Wqar6FCERtQNExYDKNdJhGvgMD7kT+8+Ds+oN3PuThmGH9ruOCR3ibUUlhyv2j+08/TOCQoGfh3mPW/EHOOsoeR1N2YTxdzgcnb/jj0Yp/HP/aMsJZB0TxYfPWZ40NHSUzowQXLtPg/z6kxGXdPVzcYQWJ1/fu+i8qTYQZNhmzak6L2NWzbvhvWd0RitUJyz3frZx1w3/zPIP9c/5xXhoyrC4d8PRrSxa96rn2T97ZIyUYI8nP5jNsXfAH59/0ndPKCANh4s0rP+3rGXFY+YTKCgQDkvs5fN/+S9HpIgMbG7GY1hwAT7+6aNGbobv+aqEPooST8zblTQqd2lB2FTwrcuvyw2+KHoSxSuO3rO5ijqmosvI/HN+w70ECS0CCXOWJqBGk7QcyTE1MCKLy/eO/B6Sq6qKYRyiORWC6tnLMubbnaMTHLFFRqEy1bD953c7ta8e5fPn7wP1MHEjehxNb/stsEbRl57pR9m8O73+cTahOBADQbThr//GwE8dXtDNWdg+Sn/DiG9RvbqdbLDH2780nUzwnbty+eVY77sWQQ1FsEgBAx2l06NGwE8c397NTlpeQngqB0FoIQeaHiIgksG3SPnBx6L59OxZ3Ft89ej5OBIIvJzce++oycu32naHz2pGJiXmq3RBT7cIyH/S1c2vjwH77LksCgOd++Zhv7+1U1GdXkguXabAh/0qogsMSnOi/zyR5zd116Mju5YEtTYs5rFydUFAHYQb1W9mw38eycACS+/VVplUj8YWtJRtD8nOFzMZ9+zt9vfwoAweSGxP+mNblf34WYhYPV1mBkNy3R7deZPlM27Rt09y+DcrVSEM1bvZn8Qeh0viSqixxTnyKXs+lobtCV492/iytPBE1QXt/P/mfus3RFkqoLuRQDBFotn8sXjHaI+fahpmT5m4L/8DGAQAo+rZ1bMwtnFp1b22S/iFNBMK4u1FEy//18rQ2q9O6T3vT5JdJAtWJUjsoFCqVSqUo+ySe/fTQ7mf2/xvdslj0IIyPeCFpNXJgC0cL28a9A7vov7/5kSPNAIVaxqkqcZ8QCDUjYd1aNnbY8JFjgzfewToGTfJ1drS3MGRauLXvYCf4niUQxEU8E7UYNbRNXQtTc0sTHQwAVHucahcu8kGKSeMOTjnPX2fjJC/xdZZVqwaGMmcq2YXLMHhsV85XXMFhuRQ9fUr+9+QsEd3Yxt5IIaRXrhOoZp6trdKff2KTJC/+2XezJgZvoksxBuezhHRj25YBXrm376QIcl5c/mzXu0s9M10iP5fPV1WB8BMi31JaBwY0d7C0c2vRzKpcGnZ0E4diD4JUbXzJVRamY2Jhbmbl3LpHQeWJQGgOJVQXcjsoHUIxqNdh5ML2/8t4c2H3zo0hjM0z9Iq2MYx18UQxSQrzcnicb2vGPSrYgLlyRYSqxFJ7QAn2m+PrjqR3mL+4o1WxDhBSyMoRGjYzkVpHM7E3Il5n8UqLv+VOVdoVEQjNhGbYbuqiQfUMDYyNmToUUpAcsffg+RcJuWI6jZTUJUgBK1No5GWhI3+MSjckQV/RhTdOLHYpimnTLs6nrrzIaO/8ItnEO9CcCuzST6jqc7uYwVjeo1VHDespOCzjj3Gzeh07vj7ogn374ZMC/Zll3AOqZYu25uGPY9nezMdxhq166MZc4USXZAwp4fFwOlOX6dbTj7LpciT5PcP9z5ZmOvH6NBFXyFdRgeQLJLliprNirCI9WwkmkYLkO0eLPYgS9iy7yqIyTHTxJHHNzhr5fVHoZQAA1JZQPlRXF7JyWlIgjelYeQX8z/v2hqgUka9cMoZJtxqZ6Jt2Wbh1dH25OouvKhHPoFIAF+EkgEJFg2fc273nbcOpqwc0UKyDMF0TMx3Oz1wJ2NEAJLk/8jBjC/2SByCVcioEQiugGVjYWFtKPYfIfnwo7IvbnJBljY1zI5YtfAhANzSi8jK5uLzLqnbDoq2FLvxd7FXMBykmXr08TobdepT8Vb9VgHV5T1iKwaDSYSlUI89+Mzf1THtyZO2+nTYN5heuOoeVUCfQrH3aW1y7H/3GINawXX8n8++lGEMKuUJSh0HH6PYdu5nOPX7NqNvKRkyMpOtRRVwh3UXZHgMG05zO/pEjBvlOSoyur0PycvkE6CvPAFfxIDDVxpejyqJgqG6qUeRjAuWIAVEqctUF2VreIxS9ghcXce3Rh8TUzPSUjw9uveeZOJmpiiL06nVsJnl04uKrpKxcVkZyfCqPLCGRyrQ2x348iUrKykhOyBQWxdD82LPnfnqPH+LJwMVisViMy4fXui5dWlFfnDj36ntWWszVsAhe426NDEt0sOKnqtKNQiA0AZIkSSBwkUhS0P2u5+zXSPTk7yvv0/NykmITpM2BqjyOVHZhCxMFH8SYHr3bSW6feKrTto0NHQCj6dIJVvx3jo4qFy4PKh1Wkv3ly488AaFv52BCEeXLmhRLrBOAauXTyfpz2OHXxv5tbJilGkOI8oVA16NjQLVoM6Bj49YB3evqAGB0BlXMFemoskfPpUtr+vMjJx/FZebmZGXzSQAAiqGzu/H361dfpWSzcrI50upDdkMIpQdRkvEVq7IQSjMhVaYgahhV1YVZMY9QmtHASn9z8eypVC4BNBOXNmNn9a9Lj1I+McZsMnbeoMOHDi66xAXMsOHgxYsC9OkqExmNhgyov/nIkhkEs9HwZQt62kkHJYuzYr/msdI3BEVKz2jSZVVo0ecCxmgYOH/44X3751/j6dq26D93QksjDEpYCUPhVKdO/l3V+4ZAqBOKeZvA/tF7Q4NviAGo+nbtDKkUoxbjU202kAAAAXpJREFUp3ffc3TbrPMipqWJgFqfhql0QzsVLmxMUfRBnbpduzncedrB15YOABjTvZev3p59p1uHjFfhwuWwWJXDkqwnN3bte5GFA9XUrfvYbg60mIK9i9cJ89sWnYdq0bKH68l94q5trKgYRVV9UrgnKeJJqHo6FADAjLxGL/IqMISuRxXlCElGY+UKBIOGIxaMOXHg1LJgNglUpmM7a10M6E4BkwKS9uxacBMHTM/So68hVXZDzvisUnwQJRlfoSoLIaW6YgJpm4FCywFqSCgHhIrqQl/PUs4jfk0x6I8f3vm08eOw89RtCAJRQFXLJCnISsujMA31SE7c7T273vutXdrFojKT/3EBT0xwP54NOZ43dM205uhLV538SjVVKXmpytJJNXSLtOu0aqRcg3oRCISaEX2/uWvL9UQuATQz1/YjJrevVHwAIEo++9fSa5mmnv2nzvAqOz4g856un707Rm4qAcWk7aKQqR56JR+DQBSn1lZMQlQ7/wcW9G0bYEbL3QAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import os\n", + "from IPython.display import Image, display\n", + "current_dir = os.path.dirname(os.path.realpath('__file__'))\n", + "data_screenshot = os.path.join(current_dir, 'assets/dataset_preparation_gui.png')\n", + "display(Image(filename=data_screenshot))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Ud2cRDy_l566" + }, + "source": [ + "We see the data of interest is on the second sheet, and contained in columns \"TA ID\", \"N #1 (%)\", and \"N #2 (%)\".\n", + "\n", + "Additionally, it appears much of this spreadsheet was formatted for human readability (multicolumn headers, column labels with spaces and symbols, etc.). This makes the creation of a neat dataframe object harder. For this reason we will cut everything that is unnecesary or inconvenient.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 309 + }, + "colab_type": "code", + "id": "hVJDAGT8mbl1", + "outputId": "52892aeb-f4e9-4a03-a7a3-1edaf512aa0d" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "dc.utils.download_url(\n", + " 'https://github.com/deepchem/deepchem/raw/master/datasets/Positive%20Modulators%20Summary_%20918.TUC%20_%20v1.xlsx',\n", + " current_dir,\n", + " 'Positive Modulators Summary_ 918.TUC _ v1.xlsx'\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "pMvd0XzRl567" + }, + "outputs": [], + "source": [ + "raw_data_file = os.path.join(current_dir, 'Positive Modulators Summary_ 918.TUC _ v1.xlsx')\n", + "raw_data_excel = pd.ExcelFile(raw_data_file)\n", + "\n", + "# second sheet only\n", + "raw_data = raw_data_excel.parse(raw_data_excel.sheet_names[1])" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + }, + "colab_type": "code", + "id": "ei2QwtnVl57D", + "outputId": "39406331-090a-4537-d9fd-74b9ba46172d", + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
Unnamed: 0Unnamed: 1Unnamed: 2Metric #1 (-120 mV Peak)Unnamed: 4Unnamed: 5Unnamed: 6Unnamed: 7
0NaNNaNNaNVehicleNaN4ReplicationsNaN
1TA ##PositionTA IDMeanSDThreshold (%) = Mean + 4xSDN #1 (%)N #2 (%)
211-A02Penicillin V Potassium-12.86896.7470514.1193-10.404-18.1929
321-A03Mycophenolate Mofetil-12.86896.7470514.1193-12.4453-11.7175
431-A04Metaxalone-12.86896.7470514.1193-8.65572-17.7753
\n", + "
" + ], + "text/plain": [ + " Unnamed: 0 Unnamed: 1 Unnamed: 2 Metric #1 (-120 mV Peak) \\\n", + "0 NaN NaN NaN Vehicle \n", + "1 TA ## Position TA ID Mean \n", + "2 1 1-A02 Penicillin V Potassium -12.8689 \n", + "3 2 1-A03 Mycophenolate Mofetil -12.8689 \n", + "4 3 1-A04 Metaxalone -12.8689 \n", + "\n", + " Unnamed: 4 Unnamed: 5 Unnamed: 6 Unnamed: 7 \n", + "0 NaN 4 Replications NaN \n", + "1 SD Threshold (%) = Mean + 4xSD N #1 (%) N #2 (%) \n", + "2 6.74705 14.1193 -10.404 -18.1929 \n", + "3 6.74705 14.1193 -12.4453 -11.7175 \n", + "4 6.74705 14.1193 -8.65572 -17.7753 " + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# preview 5 rows of raw dataframe\n", + "raw_data.loc[raw_data.index[:5]]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "kfGr4zPSl57Q" + }, + "source": [ + "Note that the actual row headers are stored in row 1 and not 0 above." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + }, + "colab_type": "code", + "id": "adUjxQF2l57Z", + "outputId": "976bffc4-5792-4ba4-882d-660525ba229f", + "scrolled": true + }, + "outputs": [], + "source": [ + "# remove column labels (rows 0 and 1), as we will replace them\n", + "# only take data given in columns \"TA ID\" \"N #1 (%)\" (3) and \"N #2 (%)\" (4)\n", + "raw_data = raw_data.iloc[2:, [2, 6, 7]]\n", + "\n", + "# reset the index so we keep the label but number from 0 again\n", + "raw_data.reset_index(inplace=True)\n", + "\n", + "## rename columns\n", + "raw_data.columns = ['label', 'drug', 'n1', 'n2']" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + }, + "colab_type": "code", + "id": "_AmIYJGjl57j", + "outputId": "402dd41a-d077-44d0-ed6f-dad28e0cef3b" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
labeldrugn1n2
02Penicillin V Potassium-10.404-18.1929
13Mycophenolate Mofetil-12.4453-11.7175
24Metaxalone-8.65572-17.7753
35Terazosin·HCl-11.504816.0825
46Fluvastatin·Na-11.1354-14.553
\n", + "
" + ], + "text/plain": [ + " label drug n1 n2\n", + "0 2 Penicillin V Potassium -10.404 -18.1929\n", + "1 3 Mycophenolate Mofetil -12.4453 -11.7175\n", + "2 4 Metaxalone -8.65572 -17.7753\n", + "3 5 Terazosin·HCl -11.5048 16.0825\n", + "4 6 Fluvastatin·Na -11.1354 -14.553" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# preview cleaner dataframe\n", + "raw_data.loc[raw_data.index[:5]]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6Htu9Bw6l57p" + }, + "source": [ + "This formatting is closer to what we need.\n", + "\n", + "Now, let's take the drug names and get smiles strings for them (format needed for DeepChem)." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "3hGrrqu5l57q" + }, + "outputs": [], + "source": [ + "drugs = raw_data['drug'].values" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "zJAABOqPl57y" + }, + "source": [ + "For many of these, we can retreive the smiles string via the canonical_smiles attribute of the `get_compounds` object (using `pubchempy`)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "yfCp2htdl570", + "outputId": "7ec9923b-02ea-42ce-b98d-fb80fd684626" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[Compound(5281078)]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "get_compounds(drugs[1], 'name')" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "colab_type": "code", + "id": "rsesx-l8l58L", + "outputId": "6f087c85-b3bc-4a56-f052-3b463e9d71aa" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3CCOCC3)O'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "get_compounds(drugs[1], 'name')[0].canonical_smiles" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "x4qqWsWZl581" + }, + "source": [ + "However, some of these drug names have variables spaces and symbols (·, (±), etc.), and names that may not be readable by pubchempy. \n", + "\n", + "For this task, we will do a bit of hacking via regular expressions. Also, we notice that all ions are written in a shortened form that will need to be expanded. For this reason we use a dictionary, mapping the shortened ion names to versions recognizable to pubchempy. \n", + "\n", + "Unfortunately you may have several corner cases that will require more hacking." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "jGch_fRUl587" + }, + "outputs": [], + "source": [ + "import re\n", + "\n", + "ion_replacements = {\n", + " 'HBr': ' hydrobromide',\n", + " '2Br': ' dibromide',\n", + " 'Br': ' bromide',\n", + " 'HCl': ' hydrochloride',\n", + " '2H2O': ' dihydrate',\n", + " 'H20': ' hydrate',\n", + " 'Na': ' sodium'\n", + "}\n", + "\n", + "ion_keys = ['H20', 'HBr', 'HCl', '2Br', '2H2O', 'Br', 'Na']\n", + "\n", + "def compound_to_smiles(cmpd):\n", + " # remove spaces and irregular characters\n", + " compound = re.sub(r'([^\\s\\w]|_)+', '', cmpd)\n", + " \n", + " # replace ion names if needed\n", + " for ion in ion_keys:\n", + " if ion in compound:\n", + " compound = compound.replace(ion, ion_replacements[ion])\n", + "\n", + " # query for cid first in order to avoid timeouterror\n", + " cid = get_cids(compound, 'name')[0]\n", + " smiles = get_compounds(cid)[0].canonical_smiles\n", + "\n", + " return smiles" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "H-qPqmm3l59s" + }, + "source": [ + "Now let's actually convert all these compounds to smiles. This conversion will take a few minutes so might not be a bad spot to go grab a coffee or tea and take a break while this is running! Note that this conversion will sometimes fail so we've added some error handling to catch these cases below." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 68 + }, + "colab_type": "code", + "id": "PMlMlVJTl59t", + "outputId": "cf54a840-fb35-4904-c96e-e016ab7c1935", + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Errored on 162\n", + "Errored on 303\n" + ] + } + ], + "source": [ + "smiles_map = {}\n", + "for i, compound in enumerate(drugs):\n", + " try:\n", + " smiles_map[compound] = compound_to_smiles(compound)\n", + " except:\n", + " print(\"Errored on %s\" % i)\n", + " continue" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "CgPwj-Pvl594" + }, + "outputs": [], + "source": [ + "smiles_data = raw_data\n", + "# map drug name to smiles string\n", + "smiles_data['drug'] = smiles_data['drug'].apply(lambda x: smiles_map[x] if x in smiles_map else None)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + }, + "colab_type": "code", + "id": "xV3mQWwrl5-v", + "outputId": "e031e783-4912-468f-abbb-64225e6b1ec6" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
labeldrugn1n2
02CC1(C(N2C(S1)C(C2=O)NC(=O)COC3=CC=CC=C3)C(=O)[...-10.404-18.1929
13CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3C...-12.4453-11.7175
24CC1=CC(=CC(=C1)OCC2CNC(=O)O2)C-8.65572-17.7753
35COC1=C(C=C2C(=C1)C(=NC(=N2)N3CCN(CC3)C(=O)C4CC...-11.504816.0825
46CC(C)N1C2=CC=CC=C2C(=C1C=CC(CC(CC(=O)[O-])O)O)...-11.1354-14.553
\n", + "
" + ], + "text/plain": [ + " label drug n1 n2\n", + "0 2 CC1(C(N2C(S1)C(C2=O)NC(=O)COC3=CC=CC=C3)C(=O)[... -10.404 -18.1929\n", + "1 3 CC1=C2COC(=O)C2=C(C(=C1OC)CC=C(C)CCC(=O)OCCN3C... -12.4453 -11.7175\n", + "2 4 CC1=CC(=CC(=C1)OCC2CNC(=O)O2)C -8.65572 -17.7753\n", + "3 5 COC1=C(C=C2C(=C1)C(=NC(=N2)N3CCN(CC3)C(=O)C4CC... -11.5048 16.0825\n", + "4 6 CC(C)N1C2=CC=CC=C2C(=C1C=CC(CC(CC(=O)[O-])O)O)... -11.1354 -14.553" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# preview smiles data\n", + "smiles_data.loc[smiles_data.index[:5]]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ES-ak26xl5-1" + }, + "source": [ + "Hooray, we have mapped each drug name to its corresponding smiles code.\n", + "\n", + "Now, we need to look at the data and remove as much noise as possible." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ghu-RpSCl5-3" + }, + "source": [ + "## De-noising data" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "axbec0-Dl5-4" + }, + "source": [ + "In machine learning, we know that there is no free lunch. You will need to spend time analyzing and understanding your data in order to frame your problem and determine the appropriate model framework. Treatment of your data will depend on the conclusions you gather from this process.\n", + "\n", + "Questions to ask yourself:\n", + "* What are you trying to accomplish?\n", + "* What is your assay?\n", + "* What is the structure of the data?\n", + "* Does the data make sense?\n", + "* What has been tried previously?\n", + "\n", + "For this project (respectively):\n", + "* I would like to build a model capable of predicting the affinity of an arbitrary small molecule drug to a particular ion channel protein\n", + "* For an input drug, data describing channel inhibition\n", + "* A few hundred drugs, with n=2\n", + "* Will need to look more closely at the dataset*\n", + "* Nothing on this particular protein" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ls_jIMqUl5-5" + }, + "source": [ + "*This will involve plotting, so we will import matplotlib and seaborn. We will also need to look at molecular structures, so we will import rdkit. We will also use the seaborn library which you can install with `conda install seaborn`." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 71 + }, + "colab_type": "code", + "id": "Xe0sqLZ0l5-6", + "outputId": "4e1a4198-0617-4159-e193-8c3e485de045" + }, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "%matplotlib inline\n", + "\n", + "import seaborn as sns\n", + "sns.set_style('white')\n", + "\n", + "from rdkit import Chem\n", + "from rdkit.Chem import AllChem\n", + "from rdkit.Chem import Draw, PyMol, rdFMCS\n", + "from rdkit.Chem.Draw import IPythonConsole\n", + "from rdkit import rdBase\n", + "import numpy as np" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "9fKzIHFnl5_K" + }, + "source": [ + "Our goal is to build a small molecule model, so let's make sure our molecules are all small. This can be approximated by the length of each smiles string." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 297 + }, + "colab_type": "code", + "id": "HZjb8u_fl5_S", + "outputId": "136daa91-c521-4d32-e204-bbb05eec8149" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0, 0.5, 'probability')" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAEECAYAAAArlo9mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAYh0lEQVR4nO3de3BU5R3G8eckIWATYiI3WyLCQhgEh6k0JlogggXCOCpKsRAwGSW2BhRIKZgQEgJDSqDc2mGggIKdbgyQUmodqJUSaqPEbqkFKVvQKhcr1yIIyRY2t9M/qFvQXBbcs7mc72eGmezZs3l/eWd58ubd877HME3TFACgzQtp7gIAAMFB4AOATRD4AGATBD4A2ASBDwA2EdbcBTQkMTFR3bt3b+4yAKBVOXHihFwuV73PtdjA7969u7Zt29bcZQBAqzJ27NgGn7Mk8Ldt26bf/OY3kiSv16tDhw6puLhYixYtkmEYiouLU35+vkJCmFECgGCxJHHHjh0rp9Mpp9OpAQMGKDc3V6tXr1ZmZqaKi4tlmqZKS0utaBoA0ABLh9h///vf9eGHH2r8+PFyu91KSEiQJCUlJam8vNzKpgEAX2Bp4K9bt07PPfecJMk0TRmGIUmKiIhQRUWFlU0DAL7AssC/dOmSjhw5ovvuu+9qQ9fM13s8HkVFRVnVNACgHpYF/t69e/Xtb3/b97h///6+S4XKysoUHx9vVdMAgHpYFvhHjx5VbGys73FWVpZWrVql8ePHq7q6WsnJyVY1DQCoh2XX4T/zzDPXPe7Vq5eKioqsag4A0AQuhAcAm2ixK23tzFtdq9AQo8nzautMtW8XGoSKALQFBH4LFBpi6MkN9e+Fca2i9MQgVAOgrWBKBwBsgsAHAJsg8AHAJgh8ALAJAh8AbILABwCbIPABwCYIfACwCQIfAGyCwAcAmyDwAcAmCHwAsAkCHwBsgsAHAJsg8AHAJgh8ALAJAh8AbILABwCbIPABwCYsu6ftunXrtHv3blVXVyslJUUJCQnKzs6WYRiKi4tTfn6+QkL4fQMAwWJJ4rpcLu3bt0+bNm2S0+nU6dOnVVhYqMzMTBUXF8s0TZWWllrRNACgAZYE/ttvv62+ffvqueeeU0ZGhoYNGya3262EhARJUlJSksrLy61oGgDQAEumdC5cuKCTJ09q7dq1+uSTTzRlyhSZpinDMCRJERERqqiosKJpAEADLAn86OhoORwOhYeHy+FwqH379jp9+rTveY/Ho6ioKCuaBgA0wJIpnW9961t66623ZJqmzpw5o8uXL+v++++Xy+WSJJWVlSk+Pt6KpgEADbBkhD98+HDt3btX48aNk2mamjdvnmJjY5WXl6cVK1bI4XAoOTnZiqbxBd7qWoWGGE2eV1tnqn270CBUBKC5WHZZ5gsvvPClY0VFRVY1hwaEhhh6coOryfOK0hODUA2A5sSF8ABgEwQ+ANgEgQ8ANkHgA4BNEPgAYBMEPgDYBIEPADZh2XX4aF1q6kxJdY2ew+IsoHUj8CHJvwVaLM4CWjemdADAJgh8ALAJAh8AbILABwCbIPABwCYIfACwCQIfAGyCwAcAmyDwAcAmCHwAsAkCHwBsgsAHAJsg8AHAJgh8ALAJy7ZHfuyxx9SxY0dJUmxsrDIyMpSdnS3DMBQXF6f8/HyFhPD7BgCCxZLA93q9kiSn0+k7lpGRoczMTCUmJmrevHkqLS3VyJEjrWgeAFAPS4bYhw8f1uXLlzV58mSlpaVp//79crvdSkhIkCQlJSWpvLzciqYBAA2wZITfoUMHpaen64knntCxY8f0/e9/X6ZpyjAMSVJERIQqKiqsaBoA0ABLAr9Xr1668847ZRiGevXqpejoaLndbt/zHo9HUVFRVjQNAGiAJVM6W7du1eLFiyVJZ86cUWVlpQYPHiyX6+o9U8vKyhQfH29F0wCABlgywh83bpzmzJmjlJQUGYahRYsWKSYmRnl5eVqxYoUcDoeSk5OtaBoA0ABLAj88PFzLly//0vGioiIrmgMA+IEL4QHAJgh8ALAJAh8AbILABwCbsGwvHVivps6UVNfoOWZwSgHQChD4rVhoiKEnN7gaPceZnhikagC0dEzpAIBNEPgAYBMEPgDYBIEPADZB4AOATRD4AGATBD4A2ASBDwA2QeADgE0Q+ABgEwQ+ANgEgQ8ANuFX4G/cuFHnz5+3uhYAgIX82i3zlltu0dSpU9W1a1d997vfVVJSkgzDsLo2AEAA+TXCT0lJ0ebNmzVt2jS99tprGj58uFatWqVLly5ZXR8AIED8GuFfunRJO3bs0G9/+1t17NhRc+fOVU1NjaZOnaqioiKrawQABIBfgT9u3Dg9+uijWrlypb7+9a/7jh8+fLjB13z66acaO3asNm7cqLCwMGVnZ8swDMXFxSk/P18hIXxeDADB5FfqPvPMM3r++ed9Yf/LX/5SkvTDH/6w3vOrq6s1b948dejQQZJUWFiozMxMFRcXyzRNlZaWBqJ2AMANaHSEv337du3evVsul0su19Vb6dXW1uqf//yn0tLSGnzdkiVLNGHCBK1fv16S5Ha7lZCQIElKSkrSnj17NHLkyED9DAAAPzQa+EOHDlWXLl302Wefafz48ZKkkJAQ3XHHHQ2+Ztu2bbrttts0dOhQX+Cbpum7qiciIkIVFRWBqh9B5M9N0yWpts5U+3ah1hcE4IY0GviXL19WYmKiunbtet3x//znPw2+5te//rUMw9A777yjQ4cOKSsr67pr+D0ej6Kior5i2WgO/tw0XZKKuHE60CI1GvgbN25UTk6O5s2bd91xwzB88/hf9Morr/i+Tk1N1fz587V06VK5XC4lJiaqrKxM9913XwBKBwDciEYDPycnR5LkdDq/UiNZWVnKy8vTihUr5HA4lJyc/JW+HwDgxjUa+EOGDGnwubfffrvJb37tLwqu1weA5tVo4PsT6gCA1qHRwF+zZo2mTp2qmTNnfmnvnOXLl1taGAAgsBoN/AcffFCSNGHChKAUAwCwTqMrbfv16ydJiouL0+7du7Vx40a99dZbuuuuu4JSHAAgcPzaWiErK0s9evRQZmamunXrpqysLKvrarO81bWqqa1r9J/Z3EUCaJP82jzN6/Vq4sSJkq6O+t944w1Li2rL/Fm85GThEgALNBr4R48elSTFxMTo9ddfV3x8vA4cOKDY2NigFAcACJxGA//aFbbFxcUqLi6WJO52BQCtUKOB39AK2+rqakuKAQBYx685/M2bN+vll19WTU2NTNNUu3btmMcHgFbGr6t0SkpK5HQ6lZSUpMLCQvXu3dvqugAAAeZX4MfExKhr167yeDxKTEzUxYsXra4LABBgfgV+x44dtWvXLhmGoc2bN1+3vz0AoHXwK/ALCgrUvXt3/ehHP9KxY8c0f/58i8sCAASaXx/ahoeH669//auOHTumuLg4xcfHW10XACDA/N5a4cyZM7r//vt1/Phx341RAACth18j/HPnzmnlypWSpBEjRujJJ5+0tCgAQOA1OsKvqqpSVVWVYmNjdeDAAUnS4cOH1bNnz2DUBgAIoEZH+KNHj5ZhGDJNUy6XS+Hh4aqqqlL79u2DVR8AIEAaDfzdu3f7vjZNU+fPn1dMTIxCQvya+gcAtCB+JbfL5dKIESOUnp6uESNGaM+ePVbXBQAIML8+tP3pT3+q4uJidevWTWfOnNHzzz+vwYMHW10bACCA/Brhh4aGqlu3bpKkbt26MYcPAK2QXyP8yMhIOZ1O3Xvvvdq7d69uvfXWRs+vra1Vbm6ujh49qtDQUBUWFso0TWVnZ8swDMXFxSk/P5/PAgAgiPxK3KVLl+rkyZNauXKlTp06pUWLFjV6/h//+EdJV7dVnj59ugoLC1VYWKjMzEwVFxfLNE2VlpZ+9eoBAH7za4Q/f/58LV++3O9vOmLECA0bNkySdPLkSXXu3FlvvvmmEhISJElJSUnas2ePRo4ceeMVAwBuil8j/KqqKh0+fFher9e3GKspYWFhysrK0sKFC5WcnCzTNH23RoyIiFBFRcVXqxwAcEP8GuEfO3ZMGRkZOn/+vDp16qSQkBC/pmSWLFmiWbNm6Xvf+568Xq/vuMfjUVRU1M1XDQC4YX6N8KdNm6aQkBA5HA6FhoZqwYIFjZ7/6quvat26dZKkW265RYZh6O6775bL5ZIklZWVseMmAASZXyP8NWvW6Fe/+pU6deqkc+fOKSMjQ0OGDGnw/FGjRmnOnDmaNGmSampqlJOTo969eysvL08rVqyQw+FQcnJywH4IAEDT/Ar86OhoderUSZLUuXNnRUZGNnr+1772Nf3sZz/70vGioqKbKBEAEAh+X4efnp6ue++9V263W1euXNGKFSskSTNnzrS0QABAYPgV+N/5znd8X3++4hYA0Lr4FfiPP/641XUAACzG3gYAYBMEPgDYBIEPADZB4AOATRD4AGATBD4A2ASBDwA2QeADgE0Q+ABgEwQ+ANgEgQ8ANkHgA4BNEPgAYBN+7ZaJpnmraxUaYjR5nhmEWgCgPgR+gISGGHpyg6vJ85zpiUGoBgC+jCkdALAJRvgIuJo6U1Jdo+fU1plq3y40OAUBkETgwwL+TG8VMbUFBB1TOgBgEwEf4VdXVysnJ0cnTpxQVVWVpkyZoj59+ig7O1uGYSguLk75+fkKCeF3DQAEU8AD/7XXXlN0dLSWLl2qCxcu6PHHH1e/fv2UmZmpxMREzZs3T6WlpRo5cmSgmwYANCLgw+zRo0drxowZvsehoaFyu91KSEiQJCUlJam8vDzQzQIAmhDwwI+IiFBkZKQqKys1ffp0ZWZmyjRNGYbhe76ioiLQzQIAmmDJRPqpU6eUlpamMWPG6JFHHrluvt7j8SgqKsqKZgEAjQh44J87d06TJ0/W7NmzNW7cOElS//795XJdvUyvrKxM8fHxgW4WANCEgAf+2rVrdenSJa1Zs0apqalKTU1VZmamVq1apfHjx6u6ulrJycmBbhYA0ISAX6WTm5ur3NzcLx0vKioKdFMAgBvAxfAAYBMEPgDYBIEPADZB4AOATRD4AGATBD4A2ASBDwA2wQ1Q0Cz8uSuWxJ2xgEAi8NEs/L3pO3fGAgKHKR0AsAkCHwBsgsAHAJsg8AHAJvjQFi2aP1fzcCUP4B8CHy2aP1fzcCUP4B+mdADAJhjh+8FbXavQEKPRc8wg1QIAN4vA94M/0wpOphUAtHBM6QCATRD4AGATBD4A2ASBDwA2QeADgE1YFvjvvfeeUlNTJUnHjx9XSkqKJk6cqPz8fNXVNb0POgAgsCwJ/BdffFG5ubnyer2SpMLCQmVmZqq4uFimaaq0tNSKZgEAjbAk8Hv06KFVq1b5HrvdbiUkJEiSkpKSVF5ebkWzAIBGWBL4ycnJCgv7/5ou0zRlGFdXqkZERKiiosKKZgEAjQjKh7YhIf9vxuPxKCoqKhjNAgCuEZTA79+/v1yuq1sTlJWVKT4+PhjNAgCuEZTAz8rK0qpVqzR+/HhVV1crOTk5GM0CAK5h2eZpsbGxKikpkST16tVLRUVFVjUFAPADC68AwCYIfACwCQIfAGyCG6AA1/Dn7mYSN05H60TgA9fw5+5mEjdOR+vElA4A2ASBDwA2QeADgE0Q+ABgEwQ+ANgEgQ8ANkHgA4BNEPgAYBMsvEKrV1NnSqrz67ywJlbRmgGqCWiJCHy0ev6ujnWmJzZ5npMVtGjDmNIBAJuw9Qjf342y+DMfN4ON2JoPfV8/Wwf+jUwFADeKjdiaD31fP6Z0AMAm2uwI358/6ZiqQUvgz1VG/k49+PO+t9s0Bv6vzQa+P3/SMVWDlsCf96q/Uw+B/F5oe5jSAQCbCNoIv66uTvPnz9f777+v8PBwFRQU6M477wxW80Cr5u/iskBOU7bU6aGWOl3r75VB/iwAtKpfgxb4u3btUlVVlbZs2aL9+/dr8eLF+vnPfx6s5oFWrTmuKGup00Mtdbo2kAsArerXoE3pvPvuuxo6dKgk6Zvf/KYOHjwYrKYBAJIM0zSD8tfP3LlzNWrUKD3wwAOSpGHDhmnXrl0KC6v/j4zExER17949GKUBQJtx4sQJuVz1/wURtCmdyMhIeTwe3+O6uroGw15SgwUDAG5O0KZ0Bg0apLKyMknS/v371bdv32A1DQBQEKd0Pr9K54MPPpBpmlq0aJF69+4djKYBAApi4AMAmhcLrwDAJgh8ALAJAh8AbKJNbZ7G9g0Ne+yxx9SxY0dJUmxsrDIyMpSdnS3DMBQXF6f8/HyFhNjz9/97772nZcuWyel06vjx4/X2S0lJiTZv3qywsDBNmTJFw4cPb+6yg+raPnK73crIyFDPnj0lSSkpKXrooYds20fV1dXKycnRiRMnVFVVpSlTpqhPnz4t831ktiFvvPGGmZWVZZqmae7bt8/MyMho5opahitXrphjxoy57tizzz5r/vnPfzZN0zTz8vLMnTt3NkdpzW79+vXmww8/bD7xxBOmadbfL2fPnjUffvhh0+v1mpcuXfJ9bRdf7KOSkhJzw4YN151j5z7aunWrWVBQYJqmaZ4/f9584IEHWuz7qE0N6di+oX6HDx/W5cuXNXnyZKWlpWn//v1yu91KSEiQJCUlJam8vLyZq2wePXr00KpVq3yP6+uXAwcO6J577lF4eLg6duyoHj166PDhw81VctB9sY8OHjyoN998U5MmTVJOTo4qKytt3UejR4/WjBkzfI9DQ0Nb7PuoTQV+ZWWlIiMjfY9DQ0NVU1PTjBW1DB06dFB6ero2bNigBQsWaNasWTJNU4Zxdce+iIgIVVRUNHOVzSM5Ofm6Fd/19UtlZaVvOuzz45WVlUGvtbl8sY8GDhyoF154Qa+88oruuOMOrV692tZ9FBERocjISFVWVmr69OnKzMxsse+jNhX4N7p9g1306tVLjz76qAzDUK9evRQdHa1PP/3U97zH41FUVFQzVthyXPs5xuf98sX3lcfjue4/rt2MHDlSd999t+/rf/zjH7bvo1OnTiktLU1jxozRI4880mLfR20q8Nm+oX5bt27V4sWLJUlnzpxRZWWlBg8e7NuvqKysTPHx8c1ZYovRv3//L/XLwIED9e6778rr9aqiokIfffSRrd9b6enpOnDggCTpnXfe0YABA2zdR+fOndPkyZM1e/ZsjRs3TlLLfR+1qZW2bN9Qv6qqKs2ZM0cnT56UYRiaNWuWYmJilJeXp+rqajkcDhUUFCg01J73Of3kk080c+ZMlZSU6OjRo/X2S0lJibZs2SLTNPXss88qOTm5ucsOqmv7yO12a+HChWrXrp06d+6shQsXKjIy0rZ9VFBQoNdff10Oh8N3bO7cuSooKGhx76M2FfgAgIa1qSkdAEDDCHwAsAkCHwBsgsAHAJsg8AHAJgh8tEnbtm3TsmXLbvh1BQUFOn369E21+e9//1vz58+XJD344IPyer039PoFCxbo3LlzN9U24A8CH/if/fv3KywsTLfffvtNvb5Lly6+wL8ZqampWr58+U2/HmgK+w6gTXM6ndq+fbsMw9BDDz2ktLQ0ZWdnKzw8XCdOnNDZs2e1ePFiDRgwQE6nU08//bQkaefOnXrxxRcVFham7t276yc/+YlWr16t48eP68KFC7p48aImTpyonTt36ujRo1qyZIk6d+7sW5z0uVOnTikvL09er1ft27fXwoULddttt2nGjBmqrKzUlStXNHv2bCUmJsrhcOjIkSO6cOGCYmJimqvL0IYxwkeb9a9//Uu/+93vVFxcrOLiYu3atUtHjhyRJH3jG9/Qhg0blJqaqi1btkiS/vKXv/iWum/fvl1PPfWUNm3apCFDhvg2uerQoYM2bNigUaNG6U9/+pPWrl2rH/zgB9qxY0e9NSxZskSpqalyOp1KT0/XsmXL9PHHH+vcuXNau3atli9fritXrvjOdzgc+tvf/mZlt8DGGOGjzTp48KBqamr01FNPSZIuXryojz/+WJJ01113SZJuv/12X8DW1dUpPDxckjRnzhytW7dOmzZtksPh0IgRIyRd3SNFkjp27Kg+ffpIkm699dYG5+s/+OADrVu3Ti+99JJM01S7du0UFxenSZMmaebMmaqpqVFqaqrv/C5duuizzz4LcE8AVxH4aLP69eunK1eu6KWXXpJhGPrFL36hvn376ve//71v69prtW/fXrW1tQoNDdWWLVs0bdo0derUSfPmzdMf/vAHSar3dY1xOByaPHmyBg0apI8++kh79+7V+++/L4/Ho/Xr1+vs2bOaMGGC785HFy9eVKdOnb76Dw/Ug8BHm/X5VtApKSmqqqrSwIED1a1btwbPHzRokNxutwYOHKiBAwfq6aefVnR0tCIiIjRs2DAVFRXdcA1ZWVmaP3++vF6vrly5orlz56pnz55avXq1Xn31VbVr107Tp0/3nX/o0CHNmjXrpn5eoClsngb8z759+7Rjxw7l5uY2S/sffvihXn75Zf34xz9ulvbR9vGhLfA/99xzj2pra2/6Ovyvyul0XnerPCDQGOEDgE0wwgcAmyDwAcAmCHwAsAkCHwBsgsAHAJv4L0zocQiyvNrbAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "smiles_data['len'] = [len(i) if i is not None else 0 for i in smiles_data['drug']]\n", + "smiles_lens = [len(i) if i is not None else 0 for i in smiles_data['drug']]\n", + "sns.histplot(smiles_lens)\n", + "plt.xlabel('len(smiles)')\n", + "plt.ylabel('probability')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "UmKR_T4Vl5_X" + }, + "source": [ + "Some of these look rather large, len(smiles) > 150. Let's see what they look like." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "X2H-4P1ol5_Y" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABLAAAADICAIAAAB3fY8nAACNfElEQVR4nO3dZ1xT2RIA8EkhofcuUmxgRcUuxe7aK/ayNuy9967Y3dVdRXdVLKhgL6hr76KgYgUFsaF0kN6SeR8O5iEkETEQJPP/vQ9vc29uTmLIuXPKDAcRgRBCCCGEEEKI6uEquwGEEEIIIYQQQpSDAkJCCCGEEEIIUVEUEBJCCCGEEEKIiqKAkBBCCCGEEEJUFAWEhBBCCCGEEKKiKCAkhBBCCCGEEBVFASEhhBBCCCGEqCgKCAkhhBBCCCFERVFASAghhBBCCCEqigJCQgghhBBCCFFRFBASQgghhBBCiIqigJAQQgghhBBCVBQFhIQQQgghhBCioiggJIQQQgghhBAVRQEhIYQQQgghhKgoCggJIYQQQgghREVRQEgIIYQQQgghKooCQkIIIYQQQghRURQQEkIIIYQQQoiKooCQEEIIIYQQQlQUBYSEEEIIIYQQoqIoICSEEEIIIYQQFUUBISGEEEIIIYSoKL6yG1DyXrwAfX2wtCx85OBB0NKC6tVBJAIAcHAo7aYRQgghhBBCiBKV6xnC+/chOBg8PKBSJfDwyAgLkxyJiYHu3WHAANi9G4KD4fFjiIiA6GgltpUQQgghhBBCShsHEZXdhhLj7w9hYTB9OohEwOO5WFjoOzouXLjw8+fPO3dyz57tYmgI27ZBnz55px87Bj17KrXBhBBCCCGEEFKKynVAiAjz58Pq1QDwwtq6TmSkSCRSV1fPzMzkcLjDhr1ftqxChQrKbiQhhBBCCCGEKEm5XjLK4cCqVXDnDnTtOl8oFIlEXC5XW1tbKBSuXr1q504LigYJIYQQQgghqqxczxDm8/z58zVr1vj5+WVmZk6YMGHLli3KbhEhhBBCCCGEKFm5niHMp2bNmnv37l2+fLmyG0IIIYQQQgghZYWqBIRMpUr1mzTppaZWV9kNIYQQQgghhBDlU4E6hPno67e6d6+Vhoay20EIIYQQQgghZYBqzRCamAAAxMQoux2EEEIIIYQQUgao1gyhpSV06QJVqii7HYQQQgghhBBSBqhKllEAyM2FU6dAQwOqVAGRCADAwUHZbSKEEEIIIYQQ5VGVGcKICBgyBG7dgunTIS0NcnNBT48CQkIIIYQQQohKK/8BISJu377dz6/drVuVbWyga1dwdVV2mwghhBBCCCGkDCjnS0bj4uKGDBly7tw5Xd2KAwe+Wr1aXU9P2W0ihBBCCCGEkLKhnM8QhoaGPn78WFdXd9s2zwED1JXdHEIIIYQQQggpQ5QfEJ47V4JZXlavXv358+fRo0erq1M0SAghhBBCCCHfUH5AmJUFjx+DSASIxQ0I798HoRAcHQsfMTU1BQAvL6/AwMCePXv+XEsJIYQQQgghpFxRfmH6Xbtg9Gjo0AGEwuJeIi4OkpLg3TsotB+SBYQAEEPV6AkhhBBCCCHkW8oPCOPiIDER7t+H4k/g1awJK1ZA1apw8mSBIyYmJuz/xMbG/kQbCSGEEEIIIaQcUn5A2KQJAMCDB7nFfH52NjRpApcuQU4OLFwIYnH+gywg5PP5mZmZKSkpksdDQiAkpNhNJoQQQgghhJDyQPkBoZvbIysrlzt3uhfz+QIBTJgAAKCu/k5X97+jR/MfNDQ0BAAzM7OpU6fm5ORIHn/8GCIipF1NLIYrVwAA7t+H4OBiNokQQgghhBBCfgXKr0P47t07W1tbIyOj2NhYDodTnEukpn7s3n34s2cXo6OrVav2/PnzxMTEa9eunT59+sSJExwOJzk5GQB0dHR1db/Y2ICVFezdK2PL4qxZsH499OkDgweDtja4uf1YS2Snt5Eo0ayqhBBCCCGEEFJ0ys8yamlpqaurm5ycPHDgwEGDBrVu3Vr4o+lltLUPd+hw8fJloVD46tWrKlWqvH//XhLoVqtWrUaNGu/fv9fQ0A8Ph8hI0NaGK1dAUxM2bIAVK6BOnbzLRPn4mK9bB1wuHD4Menrg5fVjzXj2DM6dA11dMDAAa2tZZ2VlQXAw5OaCnh4YGICZ2Y+9CCGEEEIIIYQoipJnCNPS0vr06ePv76+pqZmeng4ALVu+MzOz7tkTOnYELa2iXicjI6NChQrJyck8Hi87O1tDQ6N58+adO3fu3r27jY0NOycnBz9/5rx/D0lJkJsLW7fC5cvA48HkyQnTpmW8fv36t/btj1au3CkkBNTU4MIFaNHix96MuzscOQIA0KoVXL4s66wpU+DdO1i9Ghwc4Nixn0ilQwghhBBCCCE/R5kBYXw8TJvmu3dvX1NT09GjRy9fvlxb2ywr63NODgcANmyALl2gatXvXyc7G2bPXrx58zIAGDhw4JAhQ9zc3L47zZiQAGvXwubNUKvWvBcvNiNiZmYml8vd2LjxxJEjucOH//D76dABzp8HAOjatXC+U4n69eHRIwgKgvr1f/gVCCGEEEIIIUSBlJZU5sMHcHGBvXv79Ojxx61bt+zs7HR1dWvWnJ6Tw9HSgq5dwc6uSFldYmOhfXu4cWOSlpbuv//+u3///nbt2hVl0amhIXh6wvPnmXZ2rzMzM7OysrhcrlgsjnR2Lk40CPDRyCjGzi7RxialQgU5p6WmAgBoaxfjFQghhBBCCCFEkZQ2Q7hoESxfDo6OcO4cWFgAAGRlZd28meLra6ynB+vWwYIFsHcveHtDy5YyL5KSAjVqwMePYG0NJ07E16tnVLzG3L17d9asWbdu3TI3N3///r2amloxLlKvXr3Hjx8DgIeHh5fs/YetWy/JyNA7enS4hYVe8VpLCCGEEEIIIQpR2kllWPW/iAgYOBCEQhg/HvT18w4JhcI2bYRt2uT9Z3IyfPgAAQHSA0LJdXr3hqAg8PMDM7NiRoMA0LRpU19fXycnp8+fPy9atGj16tX5j+bm5u7bt8/CwiI8PHzMmDE8Hk/qRVLZ3B+AttzpvwcPNqakpGhpjSh2awkhhBBCCCFEIUp7ySgrAJiYCAcOwNCh/48GC2vcGADgwYMcqUcfPoTz5yErC5o1gytXFJCr08LCws/PTyAQrFmzxtfXlz0oFov9/Pxq1ao1fPjwIUOGTJgwoWnTpk+ePCn89KdPn4pEIl1dXXV19fj4eFmvEhUVlZqayuFwuFzlV4AkhBBCCCGEqLjSDkv69YMOHeDKFVi+HI4dk3dmkyYJ1arVDQiwk3o0NRWmToXr18HdHfgKmuZs3rz52rVrEXHEiBFPnz49ceJE3bp1+/TpExoaWrVq1WHDhtnY2Dx48GD8eN05cyAzM+9ZT5/CmDHPHR0dIyIisrKyMjMzvb29u3Tp8uHDh/wXT0hIWLJkib29PZ/P5/F4devWvSwtEykLQZ2dnSdNmrRlyxaxWCyrtefOwevXEBKSN1lKCCGEEEIIIT8MlWHHDgTAAQO+c5qpqSkAREREFHg8JQUtLBAA/fwU37bBgwcDgIaGBvt8bGxs/vnnn5ycHERMSUlZvPgwl4sAaG+PCQk4aBCy/2zWrNusWbNiY2N9fX2NjIwAQFNT09PTMzc398uXL0uWLNHV1QUADofTqlUrBwcH9v9HjRqVmJjIXlckEu3fv7/q17SqHA4HAJo2bfrs2bMCLQwORnd3XLIE/fzw4EH098eoKMV/DoQQQgghhJByTzkB4ZMnYgeHtB49Hsk/zdHRkcvluri4HD58ODk5WfL4ypVbXV2fuLiIxWLFty09Pd3JycnS0lJHR8fT0zMjI6PACQ8fYv36OHAg+vtj9+4oEKCHB0ZG/v+Ez58/9+7dm8V1VatWNTQ0ZP+/Q4cOgYGBiJiTk+Pp6amurg4A5ubmhw8fPnXqVN26ddlpdnZ2Xl5ep06dsra2BgA1NbXZs2dnZmYiYnBw8PjxrzkcBMC2bf//ikePKv5zIIQQQgghhJR7yskyKhaLDQwMkpOTo6KizKTt/0PEefPmeXp66unpffnyBQCEQqGLi0vnzp3d3NycnZ3T09Nv377XtGmjkmje3r17hw4d2q5duwsXLkg9ITsbMjLg6lWIiYEOHaBiRSnnnDlzZty4cXp6es+ePWvevPmKFStafFvpPiQkZNSoUbdu3ZI8Ymtru2jRosGDB/P5fABITk6eM2eOl5eXWCyuVq2ara3tpUuXKlZsmZR0acwYmDULvkaahBBCCCGEEFIcSis70bp16ytXruzZs2fo0KEFDolEorFjx+7cuZPP569cuVJNTe348eN37twRiUQAwOFwELFXr15HjhwpobbFxsaam5sLBIL4+HhNTU1Zp7HNew4OMq/z5cuXlJSU9+/fN2vWTOoJiLh9+/bHjx8/efJkyJAhI0aMEAgEBc65c+fOqFGjQkNDhUKhWCwePXr0vHkrTE2pjiEhhBBCCCHkZyktINy4ceOBAwceP35cr169zp079+/f397eHgCysrIGDhx49OhRLS0tPz+/Dh06sPPj4+PPnj3r5+fn7+9va2t7+PDhBg0alFzzhgz59/59pw0bHDt14sg659Ah0NODrw0sQVlZWVevXs3Ozq5fv76VlVWJvx4hhBBCCCFENSgtIASAxYsXr1u3LiMjg/2no6Njhw4dLl++/ODBA0NDw9OnTxeeWMvNzWVb7zIzM/mKyi4qzZIlsHQpTJgAW7aU3IsQQgghyhGSGfIk40lDrYZiFFcWVlZ2cwghhCiNMgNCAMjIyLh06ZKfn9+pU6fYXkEDAwMdHZ0LFy44SFuL+f79exsbGysrqwJFHRTu/n1o3BiqV4cXL0r0dQghhBAlOJR4KCwrrI9Bn2RRcgPNElxxQwghpIxTckAokZ2dffny5ZMnT44YMcLCwkLWwsjbt287Ozs3bdr0zp07JdoesRiWL4cePYDt6ZOzUZAQQhTp/n0QCsHRUdbx//77LyUlpcCDbdq00dPTK+GWkfIGATkgc1sEIYQoB/WDpa4EV13+EIFA0KFDhw7f25DHJgYrSk3rqVBcLjg6wqtXkJsLenoUEBJCSl5ODgQHg78/WFqCrS0U6tgyMzMnTpx46dKlt2/fFjjk4eExaNAgFxeXUmoqKRcoGiSElC3UDypJWQkIZcrMhNhYiI29ExcXFhXl5+cHpRIQAsD69ZCdDcePQ4UKpfBqhBCVFxUFDRuCujpkZkKFCtCpU4HjGzZs+Oeff0xNTU1NTZOSkiSP29nZ7dix48GDBw8ePODxeKXaZkLyef/+PaugSwghxUH9oJJwld2A76lXD6ytwclp38yZQ4cOPXPmDABYWlqWwis/egQPHhQemyCEECWIjIxcvXo1ABw+fLhixYrZ+Rw/ftzW1vbRo0f//vuvsptJVFdoaGidOnVGjx6dnZ2t7LYQQsoh6gdLThkOCO/fh+BgyM0FdXXQ0tJMTGRpRfX09KKiokr6xePiID0dDAxAmwr+EUJKRa4h9+M+x7dbDD/uc0x3VCtwdPbs2Wlpae7u7i1atLh06VJMPvb29mvWrAGABQsW5B8xJaQ0vXjxIisra8eOHa1atZLaTQcFBXl5eZV+wwghvwrqB5WlDAeEcXGQlASOjpCZCWlp9Wxtc3NzBQLBly9fbt26VdIv/vHjkwoVmjs7Ly3pFyKEEEasJo6uHpzolBBdPThHJyf/oXv37vn4+Kirq69duxYA9PX1TfLhcrl9+vRxc3OLjY1dsWKFkppPVEhiYmJubm6BB3v06HH37l1bW9vbt287OTndu3ePPf7ly5cdO3Y4OTk1aNBgwoQJnz9/LvX2EkJ+DdQPKktZyTIqBSJwOHD7NsTHg4lJoqmpmpkZh8MxMjISCo3fvHllZKRZci9+6tSpbt26OTk53bhxQ1OzBF+IEKLSkpPh6VMAyKzMz9KIj472FIlSeDx9Q8NB6uoOcXH8yMhcRBw/fvyTJ08WLFiwfPlyWVd6/PhxgwYNuFzu06dP7e3tS/E9EJXTuXPnixcvnjlzpm3btgUOxcXF9e3b98qVK0KhcNKkSZ8+fTp69GhmZiYAGBsbDxo0aPbs2ebm5spoNSGkTKJ+sAwow0llOBwAgObN2X8ZfH24f/93Pj5mFy7AgAEl+OJPnjwBgKCgIBMTk3Hu7uvatIHOnUFfvwRfkhCiUlha7Y8foXNnAEj4x+lz3SBNzboVK26Jidny7t1IAAgNdRs48DoAmJub6+np9e/f//Hjx3Xr1i18sfT09JiYmPbt2wcFBcXFxVFHSEpUSEhIdna21AJRxsbG58+fnzZt2tatW3ft2hUfH8/lctu0aTN48GB3d3cNDY3Sby0hpIyifrDMKMNLRmWoU8csOxvOnSvBlxCLxUeOHNHU1BQIBOnp6fZPnsDgwWBqClOnluCrEkJUClsVb2UFzZpBs2Z8E3tt7aYCgW1KylUA0NZurK3dTFOzUrNmzRo1apSQkJCcnFy/fv2hQ4eKRKLCF1u3bl379u3v3LkTHR1dOBk3IQqUlSVKT8/i8/mVK1eWeoKamtqWLVsaNmwYHx/fq1ev8PDwixcvDhkyhKJBQsg3qB8sM8rwklEZQkOhenVo1w7Ony+pl9izZ8+wYcPU1NRycnIAINzWtlJUFGRnw8CBMH26nEKZhBBSVGxVfD7Z2R+ePrXmctXF4swqVc7o6f0/3fa8efNWr16trq6emZn5999/jx07Nv8TP3786ODgkJaWBgAODg5PnjxRUyu4F58QRXn+HGrVAkfH1MeP5WVdq1mz5osXLx49eiR1LJ8QQqgfLDt+vRlCRFi3DrZsKanrZ2RkLF68GAB27tx5+fLlGZMn2wFAZiaIxVC/PlDmIkKIQnB+oCb4vHnzLC0t2UasBQsWxMfH5z86c+bMtLQ01vlt3LiRekFSokJDAQAqVpQXDYpEovDwcA6HU6VKlVJqFiHkl0P9YJnx6wWEjx/Dly8QHAy+vvDxo+Kvv3Hjxvfv39etW3fw4MGtWrVat3kz580buHcP1qyByZPBzU3xL0kIIQD8JK7j3AY1xhk7zm2g/fKbzkxbW3vVqlUAIBQKExIS/v77b8mhkJCQw4cP8/n8nJyczp07d+jQobTbTVRMUhLo6oL87TkRERFZWVkVK1bUptpNhJAio35QWX69JaPMP//AqFHQuDFcvw5CocIuGxsbW6VKleTk5EuXLrVu3Vph1yWEkO/68AGsrUFdHTIz4cwZ6NQp/0FEbNKkyf37911cXHbu3MnqsjK+vr4LFizg8XiUV42UtJAQuHEDWrYEc3PQ0ZF52pkzZ7p06dKuXbsLFy6UYusIIb846geVpAxnGZWrZ09YtQoCAmDx4qeenrUVck1EnDJlSnJycocOHSgaJISUNnV1aNkSoqLA3ByMjQsc5HA4mzdvdnZ2Dg4OdnBwKHBUS0tr9OjR1AuSkvb4MXz8CI8fw+7dULEibNkCAoGU0168eAEA9IVUUSx1JCVcIMVA/aCS/KoBoaEhHD8OM2duWL9+to3NlgJbS4shNTV12LBhx44dEwqFkZGRUVFRVCiJEFKqTEzgyhU591JNmzZ98eLF+PHjIyIiChzas2ePI91+kZLXrx8AwIMHcPUqZGZCaCj6+iaYmhpJTggNDd29e/fff/9duXLl2rUVM1xLfjFxcaClpexGkF8T9YNK8qsuGWV8fHwGDhwoEAh27tw5ZMiQYl/n1SuYOXP0qVM7dHR0tLS0oqKirK2tT58+XadOHQW2lhBCvs/fH7S0aLsyKeMePYIePcDO7r/Xr4cfPXq0Vq1avr6+//777+3bt9kJc+bMWb16tXIbSZTg2jX480949w5sbODQIekzyITIR/1gqfu1A0IAmDFjxoMHD27cuNGmTZtNmzbVqlXrR69w5gwMHgxcbpyj41Avr82GhoY9e/a8ceOGtrb2gQMHunbtWhLNJoQQ6Qql4SakbIqKyunTp83NmzfU1NQEAgFL+K6rq9uvX78RI0Y0atRI2Q0kyuDtDb//DjweiESQng5UfJIUA/WDpe7XyzJawNq1awcMGKCtrX3p0qV69eqNHz8+Jiam6E/PzIQJEyApCVq1Mj516mzVqlWNjIz++++/IUOGpKam9uzZc82aNSXXeEIIKYh6QfKLMDdXu3Ll8u+//66vr5+enu7k5OTl5RUZGenl5UXRICGk+KgfLHW//AwhExcXt3z58r///js3N1dLS2vChAnjxo2ztraWevKrV2lnzmidOwe2ttCzJ4SHw9u3sG7dN18/RFyxYsXixYudnJxu3bolVGAmU0IIIaS82LVr14gRI3r06HHs2DFlt4Uo353Dh69t3BgTGWlaocKM69cF6urKbhEh5Pt+1aQyBRgbG//xxx/jxo1buHChn5/fH3/8sWbNGk1NTWtraysrKysrK2tra01Nzffv3587d87IaFBg4DIAsLCAjh0hPBzatSs4GMHhcBYuXHjp0qUbN27s379/xIgRynljhBBVIzdBHyJ6enoWXgcxc+ZMCwsLDo2qklIXGhoKAPXr11d2Q0iZ8Dozc/79+zweTxQZObVcTDkQJaB+sNSVk4CQsbe39/X1nTZt2qZNmwQCQXp6ekhISEhICDtqYGCQmJgIALm5h4cOXdq+PaddOzAygh49ZF4wISEBAChnESGkNKSmwr//wuXLYGMD06eDrW2B41FRUa1atUpMTIyKiipw6M6dO2PHjv39999Lp6WESLBOtnAKeKKa2Lqz8rH6jCgB9YNK8svvISzs+fPnAODl5ZWSkvL8+fPz58//888/lpaWLBrs1KnT27cv9+zh9O8PRkZyL5Sd3Vdbu6mVVY0aNUql4YQQ1ZaYCFOmwMWLsHUrPH9e+Pi8efNevnxpYWFhZWWV//Fu3brdv39/zpw5ycnJpdVWQvKwGUKq/UUA4MGDB1u3bgUAfX19DoczYcKE2OhoZTeK/FKoH1SS8hYQJiVBbu54a2vnLl26aGtr16hRo3379iNGjKhbty47ARG53KK96xcvFty7d0dbW1NTs+QaTAghRfHw4UNvb2+BQHDo0KFGjRpVzWfLli3Ozs7R0dGU5Z+UspycnDdv3nC53CpVqii7LUSZPnz4MGDAgMaNGwcFBVWoUMHZ2VlNTe2Yj4++qyssWQJZWcpuICkPqB8sQVi+eHsjALZpU/DxndOmLa5d+9/q1W8MHvxj1+rTR7EtJIQQqbKyPjwM4D96oPkwgJ+UeDb/IbFY7OLiAgCzZ8+W+tygoCAulysQCF69elUqjSUERSLRzp07AUBDQ2Pw4MGxsbHKbhFRgvT09KVLl7Khcw0NjUWLFqWlpSHiq1evbo0fjwAIgM2bK7uZ5NdA/aCylLcZwuPHAUDKtsCRPN6Sp0+Hv3zpcuNGUa9laAht24KzsyLbRwghMqGYl4s8sZiXC5xvduAcPHjw5s2bZmZmc+fOlfrM+vXrDxkyJDs7e9asWaXSVKLS3r2DTZtu2Nrajho1ytDQMCsra9++fTv79oV9+4A2j6kYRPz333/T09M7d+78/PlzSXBYtWrV5lu3wqVLULs2DBsG9+/DiRPwNa0DITJQP6gc5aTshMSBA3DwIHh5QYUK3x5Ytw7Y90NDA9LTi3QtluOIFZyg7fKEkJLw8SP4+ADAl3baaabxmZlPeDw9RBGfb8rnG0VE6N68mZyTk/Pnn3/GxMTs2rVr2LBhsq4UHR1drVq15OTkCxcutGvXrhTfA1Ehb9/C6NFw6RKYmkbGxdlaW1vHxMSkpqaaamp+1NRUi4uDNm3g/Hng8ZTdUlJ6/P39tbW1XV1dpR/OyQEeD86fh4AAaNIEOnQo3daRMo/6wTKgXGUZBQBDQ9i0CVJSICTk2yCudm1wd4cvX6BpUxCJitRXxcWBlhZ8/gx6ehQQEkIUjA05ffwIs2cDQJqR0+e6QZqadStW3BITsyU6ei0ARES4zZ59HQBsbGxMTEx0dHSOHj3aq1evwhf79OmTl5eXq6traGiomZlZKb8VUn7IzvYeEgI3bkCTJnDvHggE0Lp1BQ+PIBeX2k+fPh0zZswGoVDt2jUQCMDGhqJBVdOxY0d5h9XUAAA6dAD5pxEVRP1gmVHeZghPnIDcXMjNBUTo37/QYbmFTQpCLFidkBBCFMXfH7S0wNoatm0DgMSe+qkVYnNzP4vF2RwO8njmPJ5OWJiRv398VlaWt7f3ly9fAMDMzOzVq1e6uroFLjZ06NC9e/cKhcKsrKyTJ0927dpVCe+IKBVbixcRAVWqQHY21Kwp7aSAALh4Edq1k9kVsq+lm1vhHvDQIbh8GVxdoUIFqFcPDAz+f0gsFsM//3DnzAF1dfjnH6hQoaj9LCFElVE/WGaUt4BQ4tgx6Nmz0KOSro4QQpSr0A13dvaHp0+tuVx1sTizSpUzenqdJId27tzp4eHB+rmZM2euXbs2/xODgoIaNWoEAGKxuHXr1pcuXSqdd0DKlEOHABE0NODjR5g5E5o0gb/+AknVpLdhYbYLFoCvLwDAjBnQqZP0rtDHB/75Bz59gpEjYcaM/Eeio8HcHPT1ITFRRguioyEiAhISqJ8lhBQJ9YNlRnlLKiMhJRoEgA4dqJcihJQJP7IAYcSIEQ0aNMjKyuJwOH/88cerV68khxBx8uTJYrEYAPh8/qZNmxTfVPIr6NcPhELo3h2qVAEtLbh2Dfr1S5s/f35kZOScOXMcatV6Hx6et5jz1i2ZXeHatXD1KoSGws2bBY6EhgLI31BvZpa3Q4z6WVJsHz8CANy/D8HBym4KKXnUD5YZ5TYglI6WgBKJkBDw9aUuh5QdgliOUyuduq3UnFrp6N775seZy+X+8ccfHA6Hy+Xm5OScP39ecujVq1fPnz8XCoVisXjcuHG1a9cu9YaTsoKNhP72G4SGwsiRgDhl1apVtWrVWrNmTU5OzsA3b4DDAT4fnJxk1oX7GsuJHz4Ui0T5j7x/H1OpUlbNmmKZL89u4kNDKZMkKaY3b6BaNWjdGkJDISlJ2a0hSkD9oLKoWEAow7lzcOECdWEq5vFjeP4cDhyA339XdlMIAQAAMUJyCiczB5JTOLkFb7ubNWvWp08fkUjk6Oior6+/96uAgAB3d/esrCxDQ8NFixYppeGkrDEygp07Yc+eMY0aNUpOTmb3SbcTEx+NHw/Pn8OWLXkJtAuJb9PmjJubu5mZVlTU02fP8h96/HjtmzfqlSuvkfK0zExISoK4OEhKgsePISKiJN4UKf8mT4aMDLhxAyZOhCpVlN0aogzUDypJecsy+qMyMkAggKwsuH4d2rUDAwOgvERllMKrgPTrB2lpYGsLcXEwejQ0baqAaxLyM/T0YOFCCA+HypWhatXCx9esWXPr1q3IyMihQ4cWOFS9evXx48cbGRmVSkPJr8HJyenOnTt6enppaWkNGjTYuHFjPRcX+U8RN2nS9cYNHo+Xm5t748YNR0fHnJyc1NTUlJSUoKAgAHCQ+vM7fjzcuAHHjgENzJPiOnvypHFqamOhELKyoG3bQtXDiGqgflBJym1SmSIaPhzCwmDvXrC1BZCVioaUstRU0Nb+5pHjxyE7G8zN86qAKLCK0cKFsGJF9qBBgn37fvi5ISHw5AnY21M+PaJIcpMh5+TkLFiw4PPnzwUeX7ZsWcWKFXmU7p8UoqmpmZGRkZ6erqGhUZTzbW1t3717x+FwhEJhbm5ubm6u5JBAIKhVq9bo0aP79++vo6PDHozeu9ds6FDQ1IS7d6FOnRJ5D6S8S09Pr1mz5tu3b5saG5+uWtXo8GGoWFHZjSLKQ/1gqVPpgPDkSejeHdTV4cEDqFVL2a0h7O//yxfo0QO2bv1/2ZD798HZGczN4elT0NNT8ItGR3v37z/53r2gp08rV678Y889dAhevQIXFzhxAlxdQVpVHEJ+GCVDJgrF5/NFIlFubq78+ySxGM6eFa1d2/rWresAoK+vn5SUBAA8Hk9XV1dLS0sgEHz69CkzMxMANDU1u3fvPmjQIH19/VYtWx5p2rTjqFGcAQNK5Q2RXxXbmOPgAOfOQVYWnDsH2tpgaJiQkrI2JCTk5MmT6urqmZmZa9asmTVrlrIbS5SK+sFSpzJLRnNzgf/Nm42Jidm+/ZyOzpDlyzkKiQbPnQMuF2xsqIh9ccXFASIMHQoJCTDa43PNALNaKwEwLnSFqa4O9Oql+GgQAMzMblaq9OXq1Q0bNvz9998/9tx+/QARjh2DP/+EHTvAzAycnRXfQqJqOnSg9FdEUcRisUgk4nK58qPBQ4dg+XJ48YLXrFlvY+PnEyZM6N27t6Wlpba2thqrKg4AAElJSUeOHNm3b9/Nmzd9fHx8fHxYCviLjo6dKBr8pUhis9L0+DHo6YGBQd4+nR07AAAcHXnBwWsAQF9fPz093d7efsqUKaXaLFIGUT9Y6lRmhnDECPDxAROTCy4umxMSjI2NX79+HRAQ0KvXJF/fP7iKyK3z55/w338wfjzUr08bEYsFEdasgblzAeDzEptPnd+pq9traNRLTDxklNXK1slfVhaEn/T06dO6devq6Ohoamo6feXs7GyQv+5yfoVXMowbB15eJ9u1s9+0Kf8Gm5ycnGfPnqWkpLi6upZEywkhRL7MzEwNDQ2hUMhm9gqQ1LL384Pdu8HGBubNSxo4UE1LS0v+ZT9+/Hj06NHdu3dnZWWFhIQsXLhw2bJlJdF+UkIOHVLw9ouiY3tzwsLgyhVITQWBIC4t7d+MjAwfH5/Xr1+PHj16+/btSmgWIapNBQJCdvs+ZAg8eQIAntWqzX31CgC4XK6Ojs6TJ0+sra1//kU+fgRra9DSgpgYOHeONiIWFyL8+WdWwPFn069zOOra2o3Mzee+fz+uSpUz6uo1vv/0Ytm+ffvYsWP5fH7+rTI8Hs/BweHy5ctmhYP7wisZRKLDkyf3++svW1vb3bt3v3//PigoKCgo6OHDhxkZGfXq1Xv48GEJNZ4QQuRISUnR1dXV1tZOSUkpfHTLFkhIAEdHiI4GTU3o1w/yTQcWyc6dO8eNG5ebmzt27NitW7dyFTK8SkreuXNQpQpoaoKlZVmZiXnw4EHjxo0tLa0CAsIqVBAouzmEqBYV+O1mibDV1dl/JX5dOKqpqVmrVi2FRIMAYGUFTk6Qmgr//UfR4E/gcGDyZNz1t4ZGHT5f19h4jIaGo5ZWk/fvJ+bkRJbECyYnJy9ZsgQADh069OzZM29v70mTJjVv3lxNTe3169e9e/fOzs7+5gnLl4O3N/j5wfHj/3+Qx+uydm2jRo3evXvXsmXLoUOH/vnnn7dv387KyrK3t3eklDOESLN69eqmTZveuXNH2Q0pz3g8Xt++fUUi0ZAhQ+Lj4/MfiomBLVtgxQpQU4PRo2Hw4B+OBgFg1KhRx44d09DQ2LZtW+/evaXOQ5IyJT4eOnaEkBB48ACcnaFFC/i2vIjSNGzYcNq0AykpT+fPp2iQqIoy1A9iuScWIyIeO4br1+Ps2VunTevSpUuDBg0EAgGPxwsNDVXU66xfn9Ws2duZM/0UdUFVlpubEhbW7fPntR8+TA0KEgQGQvS/rfHUKYW/0PTp0wGgRYsWBR7PzMy0tLQEgDdv3nxzoEYNBEAAnDixwFO8vb0BQE9Pr2/fvuvWrbt69eqXL18U3mBCyo3hw4cDwLZt25TdkHLOz8+Pz+cDgLm5+f79+8ViMSJ++fKlV68AAKxfH3/+h+r69et6enoA0KpVK/rdK8tevhRXqoQA6OKCgYFoaooAaGoqmjt3SUpKirJbhxERKBQil4tBQcpuCiGlouz0gyoQEMrg4eFhZWU7b95lRV3w1atXAKCvr5+VlaWoa6qsT5HLAgPh6VO7p0/tAgPh+Vk91NVBDgcXLFDgq4SFhQmFQi6XGxgYWPho8+bNAeD69evfPFqlSl5AOH16gfP79esHAGvXrlVgCwkpx9auXQsAkydPVnZDyr+QkJDWrVuzUWBXV9egoKCWLVtqaGh07Xo+OloxL/Ho0SMzMzMul7tq1SrFXJH8hKdPnyYnJxd48NatW3XqNDQxiWrUCKOiEBETEnDcOHRz2wcAVlZWZ86c+eYJL1/iy5cYEICPH5dWw3HaNGzcGB89wps38eJFvHIFAwMxMDCvwYSUM2WnH1TdgDA8PEldPVsgwA8fFHbNGjVqAMDs2bNfv36tsIuqoICAzOZ2IQ/qJCdfzs6OfBXSNtetHnI4yOPhiRMKfJ1u3boBwMiRI6UenTo1tH79OF/f1PwP+nTp8lflyseaNLm/bl3+xzMyMnR0dDgcTkREhAJbSEg59u78+Rt16iT06KHshqgEsVi8e/duU1NTAGA7/SwtLQuugPg5mzdvZjHnlStXFHhZ8kOCgoImTZrE/iGMjIycnJx69eo1ffr033//XSAQAICHx/y0tG+e8ujRo6ZNmwLAwoULMzMz/3/g4EH098ezZ/Hff7FRI0XeLckWHIxeXvjqFZqb542+sv95eOCrV3kh6neJxeJbt26lp6crvHn+/j/QDEK+q+z0g6obECJiv34IgFOnKuZqycnJNjY2kk2JrVoN7tQJly7F27cVc31VIRZjrVoIgPPm/v+RzZtRIMBhwxQ4VBkfH29lZaWurv7582epJ8yYgQC4Zs03D0pyzCxevDj/40eOHAGAxo0bK6Rt5OdlZmYGBwfT6rUy7dUrBEBra2W3Q4UkJib269dPKBTWqFHjyZMnir04W1UBAO3bt1fslUkRJSUlsYK6xsbGBQqN6Ovr83i8CRMmiESiAs/as2dP37591dTUOBxOfHx8wYuKxdi+PbLlxSW/rPTgQVy4EP38sFcvbNMGW7ZEJyd0csIZM3DvXvz9dzx79jtXuHz5spOTk7q6uoaGRufOnX19fRW1buvlS/TzQz8/PHgQfXwUcsmSRf3gL6DM9IMqHRAGB2OXLnjvngIulZWVzRbkGBgYdOzY0dzc3M1tDRvWGj4c/f3x/HkaTyqagAA8dQpHjsTs7G8ef/gQMzLw7Fm8dk0hr5OTk1O7dm0A2Lx5s9QTNm/+/1bBxER88uTz8ePHNTU1Wec6ZMiQ/JOBffv2BYD169crpG3k5/32228AcEKhU8pEwXJzUShEDqcU7jKJxOTJk0tihVJ0dDSbeGR1C4NoE1ipE4vFvXr1AgBHR0d/f//80WD16tXNzc0BwNfXt/AT2bMAoHbt2tIvHReH1aqlVq8+ZcCA3Nzc/EciIyPPnDkTHR1dOM5UOEdHBMBDh+Sd8/HjRzYNXrFiRcnbNzIyGj16dMENID9IJMKKFdHQENm0+tGjP3OxUkL94C+gzPSDKh0QsnQzPx+tiUTYpw+6uNxh6zH++usvRHz3LufwYZw2DU+dQl9fHDgQ/f0V1O7yTX7Ix/7NFMTHx4fL5XI4nD179hQ+GhSES5bg9Omor48A2KzZfta1mJiYSLoZPT295s2bjxgxQl1dncPhvH37VoHNKwlPn+KWLejpiUlrvXD2bFyxopT3h5SaqVOnAoCnp6eyG0Lk2rULL11SdiNUSE5ODgsMpG6c/hmbNm1ii1FZWNivXz/FXp9818aNG9lMYHh4+KdPn3bt2rVo0aIhQ4a4uLgsXrz4r7/+AgA3N7fCT7SwsGA92tixY2VdPDsszKFiRTYYevjw4Tlz5rRv316yZKZnz579+/cXK7SDLmznTgTASpUwK0v6C+Xk5Li4uLDkRrm5ue/evdu8ebNk4trZ2TkmJqbYr379et5ETgm/S0WifvDXUDb6QZUOCBctQnd33LcPp0z5qWhtwgQEQDU1dHJ6aGVlXWAXWUwM6uqinh5SrpkiiYjAESOwtPbXbt26lQ1p+3/9BohEeOoUNmuG9vZ47BhOmYIAqKWFHTs+7Ny588SJE5ctWzZ16tQ2bdrkL1FoYWHB5XI9PDw+ffpUOi0vBn9/XLo0bz/Gl9rNEQBtbRU46Vqm7Nq506FSpT9nzvzJ6+Tm5q5atWrr1q0eHh73FLKcgDCZmThtGu7dWy6/fmXW2bNn2XyRwq9cv359lrEGAHg8Ho/Ho730pSkgIEAgEHA4nKMypq5SUlJYJtjg4OD8j79+/RoAhEIhAOzfv1/OS9y6dUsgEBgYGOSfezQwMGjatCl7+pIlSxT5lgrJzcX27b80bLho06ZNUk+YNWsWAFhZWRUI/J4+fdqhQwcA8PDwKParL14caGOTPXt2sS+gBNQPlnVlqR9U3YAwIAD5fORy8caNn7qOSIQeHigQIJ+Penr49Gla4XPq1EEAmiEsmtBQ5HJRXb3UcorNnj0bADQ1NS9duuTl5dW4cVd1dQRAY2N8/RpTU1HOkGJMTMylS5c2b97cv39/Ni7u5vbHrFkYG1s6bf8Bvr44fTouWIDjx+Ps2Ri9zhs9PXHnTrx8Gd3df/bPoAxiw7lNmvzMNRISEtiSG11dXXb34+Dg4OnpGa2ozIzlV05OztatW7ds2bJy5cqPHz8WPBwZiU2aIAAWqviiEiTJG69fxzVrMCdH6llpaWkhISGyNjkXyYMHOGYMbtqU+t9/b968EYlELBmywhOBvnjxgq2YePDgAYfDMTY2BoBx48Yp9lWILFlZWba2tgAwbdo0OadNnDgRAEaPHp3/wb1790oCwu8ucrGzs+NwOM7OzosXLz5+/Dg7/+TJk+rq6my5jbe398+/HTnYUlgDA4PCex3PnDnD4XD4fP4Nad3Zli1bAGDChAnFe92cnBy2OCg4WGG1ykoD9YNK9Wv1gyoaEKalpbm7X+fxcMaMQsd+cAUdSzn17BlevCgzwl+yBO3sshYsoD0VRdO1K3I4URs2KPiy7F+20CptsVg8bNgwAGArfgGgZ8/H27bhj+YnCw4O7t69Z+XKWQCoo/P9je+ljGXqOXiw0IGFC/O2upYz0dEIgPr6ck5JSUkZO3bs7Nmz27Rp4+vrm/3tttXQ0FAHBwcAMDY23r1794wZMyRzwgKBoFevXlJ+3wkiIsbGxrZq1QoAdHR02DJC9gmz1A5BQUETXF1RSwutrPDBA2U3VhkkyRtbtsy7Xft2Mu3Lly+bN2+2sLCoVKmSpqbmpEmTvsn9WEQBAThqFFsS8K5ZM3bTz+fzSyIZ8unTp11dXQcMGICId+7cCQgIqFWr1qBBgxT7KkSW+Pj4bt26mZiYnD179s6dO7JOe/XqFYfD0dTUTEhISE1NffPmzd27d0+ePDlhwgQAqFChgvxXefLkCfsWDRw4MH9+Wn9/f/a9YsttLpXk4reYmBgzMzNWV1NbW9vAwMDAwMDc3LxSpUr6+vogu/ITW087tVhZBNPT09mKaAcHh59rfqmjflB5frl+UEUDwvHjxwNAly7LpXSyZ8/i2bNYqICPLDt24LBh+P69vHOePQthe88K7MYmUsXfutWpUiVTU1MF54w+exZPncKaNXHSJMy3/V0sFs+bN48to3JycvL29v6Zf6YnT9DdHTU0cN8+fPUKX7yQtj21GPd2P+fDB+RwUE0NAXDXrm+PsQxX+vo5aVImtyUSEhL++eefLl26TJw40d/fvyTSeStegwbYpQvKeF/h4eEsq5CWlpZk3e/s2bNDQ0MR8cyZM2x5Vd26dSV3z7m5uRcvXnR3d1dTU9PU1Jw7d67UKxfJr7x188uXL3PmzJk5c+bEiRMfF3oXwcHBdnZ27MZx/vz57u7ubPIBAExNTX/77be8xWnDhpXFmfTSJBbj7dtYqRJyufOaNNm8ebNYLH7//v3UqVO1tbXZJ2ZnZ8dyRdatW/fZs2c/dv2zZ7FVKwRADueUnZ26ujoAqKurN2/eXOFvhZXSateu3ZYtWxAxISGBbWZT+AsRqdLT042MjNh3pnXr1lLPSUhIkJSXkPxJSlhbW5uYmMjfYjd37ly2HpjL5UZGRuY/tGPHDvhazkRPT+/p06eKfHv5sNFblrioABMTEx6PJ2tz7Jo1awBg1qxZRXyhW7dw/XocNAh/+60fiz+tra2b/NxUm3JQP1gyyl8/qIoB4cOHDzkcjkAgePToUcFjXbrkbbHq1auIV+vbFwHwu7NZ9vb2AHD16tUfba1qaty4MQAsXLjw8+fPCtunnpODDRogl4sAmf37p6SkIGJOTs6oUaNYJ8eyASnEx494/Dj++Sfa2eG2bd8eO30aLS0xMPDnfwqzs7NPnTp16NChW7duyT/T2/ucm9sFc/NoHg9fvSp49PrQoU0sLQ9KmT3MExMTU7duXQBgY8DstrJNmzaenp7Pnz//mbdQstgnvH077tiB3+bdvnbtGlv/U61atcOHD3M4HMmPNYfDqVKlCnun/fv3T5PWj0ZGRrLFMwXuiopq9WocNw4PHCje21KukJAQNmAs+cScnJz++uuvxMRERDx8+DC7sWDz7V26dEHExMRELy+vevXqAYCBgQHbbVtgHFpVsO9k/ipmiYlBM2awT7JKlSrsc+NwOL/99tvVq1evX7/O5XLZR62uru7p6fkDuRzFYlywAKtWRT5/a7NmkvvmUaNGKfxtsYQlrOVisTgrK4vdtSv8hVRKSkrKjBkz1q1bt2HDhhwZ64ol2PY5FikVjsfEYnGXLl2EQqGRkRG7xdfU1LS2tm7UqFGnTp2GDBliY2MDAC4uLnLmoqtUqSIn5pwxYwYAWFpacrncXQXHHRUj9t49M21tNkTi7OycnJyckJCQkJDw6dOn8PBw1pVXr15d6njl8uXLAWD+/PlFeSF//7ylfADo4DCcy+WamJiwv81135Yg/gVQP1gCymU/qMiAMCEBX7zABw/w6tXPx48f379//4ULFxR4fUUJDg62t7dv2rSplGNdu+b9BhS5jNLhwwiA8odcxWJxvXr16tatq/DEbuXVqlWrJGPkampqFSpUaNy4cdeuXYu33iPPzZtsjkykrj7J0bFOnTrPnz9nG821tLROnz6tuObnGTQIAdDMDFnvnJOTE71iBXI4CIAWFujrW+xtxM+ePZs9ezZbucG264waNUrOL0uPHj3Yh1mvXqsCh5KSklhmnY4dO0p9blRUFBtBtLe3X7JkCbsxlUSGJiYmZTd1xNmzePlyXnljLa3DM2feuHFDLBZ7eXmx26aOHTsmJSWdOnVKkiaBz+cLhUIdHR0NDQ1PT085gxEsc93ly5d/uFVsDQ+brt29+2feX+nz9/dn67Lq1Klz+vTpiRMnGhoaSsYI6taty74YbEC9fv367969y//048ePszNLIUN9WfT6NR46hNeu5S0ZzcfX11dTU9POzo7L5Xbu3Pn+/fvs8du3b7NhZvaRAkCbNm1+eH4+K+vNixfHjh3r2bMnAIwcOVJRb0hiz549bFgNANi9I/sTU1T9NxUUGRnJ8vSwf/ratWtL3Ron8fbtW5bLB6TtlGPzY+wfpX79+oU34H369IlVUe7Tp4/U3727d+9Kbn//+eefwieIRKJt27b5+PjIihh/Vk4O1q2bY2KywNZWTU2tcNCbmZnp5NTQzc1z8mQpPy+LFi2CoqW9efAAjx3DkSOxRg0EwHr1trDbfT8/Py6X6+q6/8gRxbyhUkL9oKKV137wZwNCyUCnvz9On54XTDk7v5ZEzB06dCjO5oeSdO3aNTbmKmVUY9Ag5HCytbQyf/utiFdLScGmTaNcXP6Rk15yypQpAKCrq/tT6QFURmpqarVq1QDA3Nw8f40HFn6wccFiXvrWLTQx2dWwIevV2OCNkZHR7du3FfoO8mRmYpcuaGgY6+TkeuHChdatW9cxNc3V0cn7O1m9+scvmLly5Uo2LsXUrFmTFRRmfXBCQkLhZwUGBrLBKj09vYmsruJXDx8+tLOzmzlzJo/H09XV7dmz57///pv/m/zp06caNWqwYdfIyEgvLy+2Gp6xt7cXCoUGBgZl7Xctj1iM2dm4dy+2aCEyNtb++s/Nhj8XLlwoaXZmZqavr2/nzp15PJ6Xlxf7uGLlLuSYP/+wi4vXP//8+PaJc+cQADU1EQALL1Ioq8RisaenJ1sS5u7unpqayh7P/9GxvT2sCNjvv/+ekZGR/wqsFyxi7oryqVMnFAhQxtgTK5tWOCfHly9ffv/9d/YXp6enV79+/WL/Xh08eFRf327UqHnFe7ocfn5+kn9ctuyQ3TBJ/UUi3/D3x3v3CtTdffr0KZuyq1y58u7du9lvPofDGTx4cJTsdGtdunRh3xMdHZ2UfLvl7969y4rOw9eiFFKf/uzZM/avtmDBgsJHWflKfX19gUAgpXj9V3369AHZG/l+CisNLBQiQNTSpVJPefhQJBAgh4P//Zf3SHY2XrlyZ8qUKUZGRqamprLKDku8eYMcDlarhiIR7tiBtWrh5MmvN27cyKLxbdteAKCGBt69q9C3VqKoH1Sc8t0P/mxAyAY6o6Lw+HGcMAHt7dHJCYcPf9u1a9d+/fqxT2ThwoUKaatChIWF8fl8dgM9o1BKmcnjxrHf0zp16hT9mt26dQOAv//+W+rRVatWsdijbM6XlkEjRoxgwyfTp0/Pzs7OzMx89+7d7du3jx07VqlSJQDYuHHj969SeGkWIiImhoWx5bs8Ho/P51erVk1W76gQmZk4fPh0ydi5UCgcWa0aCgS4eDH+eBB16NAhVjDK0NDQw8Pj5s2biDhkyBD2cQFAlSpVXn59v5GRkZs3b2ZjzOzXv3fv3vln8/755x/2LMmCNPbl53A49erVmzt3rq+vL/vA69WrJ+kVsrKyLl++PHPmzFq1al29epWd8PDhQ0V8WiUoLjx8/vz5lpaWhoaGOjo6R2SM8UZERKSlpTVs2BAA2Mcry7p1CID54+vY2Nht27YdOXJE/nDpxS1bbri4vK5QIbdChV+oFs24ceMAgMvlrly5UuqAcVBQEBv2evXqldTV18+ePQMADQ0NADh37lzJN7mMiY5GNTXk86XmT2YfjlAo5HK5x48fL3zCkSNHWOIHAJg37wcjuq+r00+cQADs2rU4zZfv3Llzkn9ctteoQoUKAPDhwwfFv1i5wf5d7OzYnfEfgwcvXLjw/Pnzko1bTZs2ZdF1RkbGkiVL2E+0vr7+vn37pF7v/Pnz7G+wwE/QoEGDWB/E4XCOHTsmp0Xnz59n0xrbt28vcOjYsWOmpqbTp09/8eKFrKenpqZqaWmVRNYiRMTr19HKCgGwQgU5WR5WrUIOBxcvxv/+w379UF8f3dymsK5txIgR332RVasQAAcOlHnC6NEIgL9oAl3qB39S+e4HFbNkVEbZG7x79y6Xy+Xz+WXqfrFFixbsrldLS6vA4MeMr3s57Ozs8j9eKKz4BlstU7Vq1ePHjxeYJ9y3bx+Hw+FyuYcPH1bomyi3jh49KgmfatSokX9xFNtNDgBWVlbyFl5nZ+Mff+DJk1KXZiFicnIym4Fs3Lix/NEvhcjKyurZs6dAIGBvytLS8l2xBvhFIhFbRnvgwIH8+0kePXrE5hZYXGdgYDBnzpy2bduyl2PRYLt27dhaf1NTU3ZDMGbMGHaUdf9OTk7379/39vZ2d3eXpJZmI8qNGzdmy+KlGj58eFFD9DIgOzub3VQ5ODjI2QoyePBgANi5c6ecS505gwDYti0mJuLu3ThgQDL7JC0tLQFg0qRJspbZ9O7dm328DRo0+Nn3U1o+ffo0aNAgDQ2NEydOyDmNjTvIWiuRlZUlSUUoq4xYOXb7n3/i6tcXd+sm9Shb0cc+HzkbciZNmlScT+/YMTxyBBMTL15EACyJ1Xw3btxg/eayZcvi4uLw67b5hQsXXrx48cMXCgulOXsWfX2Rx0NNTRGXa/B1SI79dPfr16/A6qrw8PBOnToBwNSpUyVTE/mJxeJq1aqZmppyOJwKFSo0b9580KBBCxYs2LRpE5v6m1mEenReXl4AMHTo0AKPHz9+nN3MyPkR2L9/PwA4OzsX6e3/qIAADAjA9etRbkybm4s3bnyzZq1Ll3tz5869c+dOUVay9O9/38wsV84Okpwc3LULz57FV6++c2dYZlE/WDzlvh8s8aQyLJ9nw4YNy06CzQsXLsDX5YJLvy48CA8P9/b2btCgAYv+BQJBmzZtFi9+vmkT+vujl5e8KoIzZ87U1NSU7KqqWLGiu7v7+vXr165dy/42vrtKQUVlZGBSUv4HPnz4IFmKLRQKCyRu+ueffyQTWT4+PjIvO2sWAqC7u6zj4eGRGhoaHA7nWmlVAs3JyWnZsiUAWFtbF7tyfVhYGHv7ampqBWabP3z4wHYqszUeLG4UCoWdO3eW5Dh++/ZtmzZt2Gfr7u6+ePFiScQ4ePDg/IF3ZmbmxYsXp02bxq6zatUqOdWKvb29AaCbjNvcMqhOnTrsXQ8ZMkTWOStXrgSA6dOny7lOXBweO4avX2OzZpLcA706deo0cuRItgDB3d29wFoRhq0Eg58rkVzKPnz4AAAmJibyT2vWrBkAXLlyRdYJVatWZe99zJgxim5jWcc6l2N+flKPurm5sU/Gyckp/+MikWjSpEmXLl1i97KsiqC8X78CRCI0N8/bt7x16717aGCAMjYL/5Tw8PD27ds3atRIJBI9fPgwIiKievXqkqGlKg+raD3Sqv+y/sdsylCfj1iM584hj4cAqV//NDQ1NdXU1MaNGyc1esnJyWGdl6wScHFxcb///rtkx6mEjY1N8+bNi5LBIjY2ls/na2pqphQq0cR+GDU0NO7mWzEZERFx/PjxRYsWhYWFsXhVgenZvnH2bBF33T98iAcO4NatuHkz/tACIDZ7Y2JilpX1nfvV48fRz+//S+R+OdQPFkO57wdLPCBMTU1le+I3KLys3E9gfTO7de7Tpw8bzJCwsrL62jfHsa/4hAn46hWGhGDhqPbvv/8GAD6f36dPn7Zt27KVHpJfdihyVisVIinKvHUrWyrzpXVrFxeXvn37sq8Ki9X//PPPAs/LzMxkCyYL3zZJfLx2DXk85PNRdi2mjh2xcePj48fPVuSb+p5Hjx5xuVx1dfUC24uZogw0SjYiA0DhPC6pqal9+vS5ePEiiwnXrVtXePeOWCz+66+/2AnsOhoaGnv27JH1iiwFBQDUrFlT1jnv378HAH19/bIz4iNf37592Ztq1KiRrHPYNHWnTp1knSASYU4O+vujjw9u2oStW+POnRgfn/cJ/Pfff+xWuHnz5pIp6ISEBG9vb7bHwNjYWCgULpWxDaYMEovF7B2xyR9Z2GJvWYvnEXHYsGGOjo4A0LBhQ4VlDy7bxGJxWFgYK4qtp6cnNR9MUlKSmpoa25cyZsyYQ4cOzZs3r2vXrp6enpLdg1ZWVrNmzWrUqBEA/FidNzMzdqf2cuwfL1/mFc5VuNzcXLZ6/MiRI82aNWPDoyYmJqNGjWrRsoXVEysIAv5D/q2UW2+y3oRlhim+Bb+uL1/Q3//l+vVubm4aGhpsSHTHjh1Sz71//z4A2Nvby79kTk7O27dvr127tmfPnsWLFw8dOvTOnTtFLCPObmlk5RhjS0tMTEzCwsJWrVolSUMCABMmTFBTU+Pz+XJ2Of6Uov1iiMVoZ4fq6vijCbA/ffrEeocfDVFkLZEry6gfLIZy3w+WRtmJs2fPshvQkSNHenh49O3bt3Pnzi1btnRyclq9evVLZcy4X7hwYeHChRoaGpIAw9TU1N3dffPmzYGBgdnZ2WFhYf7+/n//nTpuHLZpg2vW4N69aGGBdeti/uV+J06cYOvyJUm3RCLRixcvdu/ePWbMmClTppw7d65M/XuXCZKizEuXopYWAnxq1Ij9K1SoUEFXV3fVqlW9e/eW+rmxzNFsbeSpU6cKHI2NjbW0tFzQoEG6p6esFz9yBAFQTw+LO1FXfP379weA33//vcDj2dlSV7YWtGzZMslAg6ylLyw8kz+CFRER0bJly8GDB1epUkWSzFCqP/74A74uY5Pax4tEoszMTHYj+Ktk0F28eDH7GHV1dQt/x1JTU2fMmBEQEAAA5ubmUrNAJSdj9+44YQIeP47btqHU7AlPnjxh63gdHBw2btzYqVMnSeEsoVDIhpx0dHTuXbxYEu+xJAwe/HezZj537hScNMiPFaObNGnSu3fvpM6E5+bmjh8/Xl9fn8/nV65cecWKFTnlcY/Z8+fP//zzz1GjRjVu3FiSLblSpUqWlpZSs7f/+eefbCBMssyEkWT5l2Q2Zwv/njx58gOtcXREPj/LxOTfzvv9/fPK4YSGKuzNSrDKE3Xq1OFwOGyGKv+tXmJu4sO0h+e+nAvNDH2QViaqMJdB2dnZ7GOsV0/6iCcrj16UvXDF5uLiAgCytilmZ2e3bdsWAKpVq8a2JgqFQsn+cxsbG0NDw5IKCIvmzh0EwIoVv79JPzw83NfXd968eR06dDA3N2dvwdXVtdSWDikR9YPFU777wVKqQzh06FBnZ2coxNTU1NTUtBT2ceUnFuPOnWdYF9ugQYO///5bziZpiYcP0cYGAdDc/POoUWOio6OvXr3KfgdXrlxZ/NYUoxjdr1zK8/8kv0EpKQkhIdeuXdu3bx/7gZC10RkR4+LiOnXq1KlTJ0mR5UqVKjVv3rx3796TJk1iqyCcnZ3lzFZt2oQCQaHagKUiIiKCJY1gW2pzckT+/jhgAJqY4NGj+OoV+vvLGwN1d3f/7pAeG3z5bspvkUiUnZ393cJWwcHB8HUu0dfXt8DR3NzcoUOHtmvXrnr16qampjo6On379r1w8GAZX0Bz8OBB1hupqakVuHGJiIhg43a2trZsaS6Px2vTpo2vr69kqVVISEjTpp8B0MgI37/PS+UtNV3U+/fva9euzeVy2V0+j8dr3rz55s2bY2Njs7KyBg8eXN/BIUVXF78dR2Q78lu0aDF37twpU6aUXtb+7/2qsBoq0rLN/9+pU6cAwMnJaejQoYU/uvj4+Hbt2rEhBpY9uK+9PfJ42LEjyt2S8Wu5du0aq4cmoa6uzqp+A0Dfvn0L3H6dP39eV1eXLY7gcDiVK1fu3r37woULfX19Dx48OHjwYEnBaB6Px26n5NxwF15rMKBrV/b0rl27IuK+fcjlFr3O7g/IyMiQ3FWzUaTC+brESMOj35GVldWp03Fr62ypuTzYzqsSqvKHiJGRkWwly5dvC9bll5SUVKtWrfzppgFAW1u7WbNmrAdv2rTpD1dGUZxly27UrJkid50jIuLt27erV6+e/y0YGBi0bNkyODi4VJqpZNQPSqfa/WDpFaYPCgpau3atl5eXj4/PqVOnLl++/ODBA1dXVwAYPHhwqTUjKQnd3VFPL9HKyrbA1qnvSkvD+fPR1XUoG1ZhK0IL5PH/YWfP4pkzOHs2Fj0r19mzeOwYTp5cYANeOcAGRxs2bCjnnOTkZHbzxNI05WdhYaGtrS0rvxm7VfL3x4sXi5HgUzFYARIXF5fZs2dXqFDBwuI9AHI4uGABjh2bt/NR2hQCikSiypUrA0CVKlVGjx4t6/qenp4AMGXKFIW0ViwWsyKHADB+/Pj8h7Kzs1mAyu5Q2d8CAKx1c0MOB52cyuwyGjbqaWpqWmCH6vXr11lWZDZUpK2t7ezsLBnONDc3nzVr1o4dO/T19WvVatqgQeabN4iIa9eyMqSJUudsk5KSjhw5Mn78+G3bthUY9hKLxUkrVuTtuti2Db98wd27sX37l1/3krGZpZYtW8rJ6KMYIhG+ffvd/TnLlyMAyslJkZKS0qFDB319fQ6HIxAI2AJI9tG9fv1aUsbX2Nj4ypUrIpHo/PnzT6ZOZUnksXfvcjLOhdiqVSt2N5D/p6levXovXrxg83uLFy+WnLxhwwa2lbdLly537twpvGsLEdPT01lCcz6fz+Px9PX15aR5KLzWgGWGgK8DSVFRqKGBXO4PL6grihUrVkjeckllFlEBCxciAPbrJ+UQW9P0qiRW/SIi4rp16wDAXfYOfObdu3fXrl1r27bt7NmzDx06FBISwn4AY2JiWD/Vu3dvpdQiyszMZKMSgYHyEhkmJydraGgIhcLffvtt4cKFx44de8N+0FUG9YMFUT9YmgGhVBEREWz4s/DyvxLSsiUCoIEBnjpVzGjq9evXHTt2BABDQ0MnJ6ef/dXbuRP19OQnQflGTg6uXYuslt20aT/10mVPWloai0Bu3bol6xxWZaF+/fpZWVlpaWmhoaE3btw4ePDghg0b2HCLrBLzBw/imjV44ADKSOtQGmJjY/X09CQLwNzd/1q+PG8o4Ny5vC/CoEEv8y8zePHixeLFi+3s7DQ1NXV0dGSlpcrNzc3MzBw4cCAA/Pvvv4pqMKto37179/zdRlZWVvfu3SXRoLa29pUrV8LCwrZu3Ro2ZEheZaEVK8rg/f2nT58aN27M5mnZXfvmzZvj4+MzMjLY2Dbbv1q1atXnz58jYkJCgpeXFxsuBQC2Q7hPnz6pqXlRe0YGdu/ubWBgWMwx+9270c4OT5/GnTtZpygyMenaufPevXvv3LnDFtvUqFGjcLWiqKiorVu3Xrp06e7P18N6+TLvN7FtW0mU8OrVqzZt2rRp08bd3cPdHd3dsV073LEDnzxBqRXIXr9+XbNmTQDQ0dHJXydTQ0NDX1/fx8eHfXR169Yt+F7i4nDzZrx5s+gZI8qyjx8/8ng8dXX1z58/Ozg49OrVa8mSJUeOHAkLC8OvOf05HM7+/ftzcnJYxjUOhzN79uyi7CyIjIxkT+FyudukLXL48AH9/TE0FKtXR1tbNDBAS8u67B9CTU3NxMTE29s7LS1t7Njcpk19Z8z44Tqo3xUXFycZp/Py8lL49VXEhw/I52PFipicjJGR+PgxXr4csW/fvnnz5rGb+JJ40aSkpGvXrrH1//JLU8j34sULtrFw1qxZCmyeTGlpePcu/v33yilTGjRowH7Aq1SpIv9Je/fuBQBXV9fSaGHZQ/2gFNQPKj0gxK8jUtbW1sWvNl4Ektmho0fRxeUHZuNkYauE1dXVH/1kSc1u3RAA1YWpk1ukJH/9HmRl4dq1GBSEAQF4/Pg3a4BEInRyQgCxpnriklYZGSF5j6ek4Pz5ePq01OJ7v5CFCxcCQI8ePTIzM/fv33/t2rWXL19KBs59fX0BQEtLS+rW040bNwJAixYtpF45ORn5fFRXR+UtZkFEHD16NAA0btz43r17BQ49f47t20cLBNoVK1a8dOnSpk2bWBVBRjJZN23atPxrYp89ezZ79mwLC4vt27ezHyP5OwN/yNatW6tWrcrlcp2cnBYvXhwYGCgWi+/fv6+hocH6DH19/YK/xenpeO4cHj6M//6LD5SwWWjWrFmXL1+WMlJz7954Z2cAqFChgru7uyT/k5aW1tChQwcOHMh6xw4dOhQejLxz507r1q3ZVE+Be/cDBw4AgKWlpdRE8N+XmYlnz+KVK9i7N9uSLzkSGRlZt25dNrj44MEDRExMTGQ78lkoXrlyZR6PJ2fzepHs3y8p94zv37PH7t27xz4cS8smbPRWIEB/fzxwANXUsE8fvHAhR/IJnz9/nt0C2tvbsz/MR48eTZw4UZIxmI36DRo0SOaKjKAgbNQI+/f/qTdSBqxevZrdKsk6ge3LFQqFLLGZurq6nBS+Uv31118cDofD4eTP0/b0KQ4ejGpquHgx+vmhunremLu1tRsAGBgYSNadGhgY9O8/gMvlCoVCOfUtio3tK+Pz+VSSvthevsT9+/HUKRwxIu/f0c0tiP3zWVtbGxgYKKqe9YcPuHIl9u6NbdvGsxV92traWlpaZ8+e/ZnLXrt2jQ16bt26VSHtlKdePfYZzapVi31EFhYWderUkT9S36FDBwCQOqpSblA/+GOoHywLAaFIJGratCl8XXuZkpISFRUVHh4eGBh47do1RfVYPj44ejQePKjI2SF2Z1+lSpVvVtu/f4+TJuGJE0Wd+X39OrtH8xen9QMD4cWLumJx7pfoM+jggADo4oJnzuCiRQXXAN26lTHI5clF7cBAeP26E6I4+dVBtLREAGzRQlbxvV9FdHS0hoYGl8tl2+EkmjRp8v79e/b3JmvFVHJyMluUFRAQUPjo2bMIgM2alfAb+B4fHx/4up+nsNjY2CZNmsDXlRIs4ho1atTNmzfFYrGPjw/b0de+ffukpKR79+7Vrl1b8hGZmJhoamrWrFlT6sKzYtu6dWv+JOZWVla///47K+RoYGAgM/j088urT1S6Hjx4wNpZ3cEhZ8UKyS877tqFQmGOldXQzp0ltZ59fX3btGnD7oTMzMw4HM7cuXNl3Umw/H516tQp8LhYLG7YsGH//v2Lvxda9uxQUlISKxaiqanZpEkTFoSziKJr1659+vRh/zlp0iSpzY6MjIyOjpa/ASNq+XKRoSECoLGx5MHExMSLFy9evHjx3Lkbvr7o64vTp2OLFtimDUuSj02b+lSoUGH27NkrVqxgX4/OnTsnfbuIPSMj4+DBg23btt2+fft3KlW+eIEAWLmyvHN+BbVq1QLZixQYlqeRy+VaWlo+KNaIiZeXF7tpW7Jkyc2bNzt37uzs7A2Aamp5qR1CQ/HNG0xIwIyMvI0rnz592rBhA7uvkihca+4nRUREmJub8/l8BS5SUEGsAz9+HIcORQsLrFMHR44MGzhw4KRJk9iQX61atWTt8SvKaLBkfJwVkQNATU2xtrZuo0aNWPkifX395z+3pHj37t0A0E/qslcFCgjABg1QUxN5vFP29pLfRpBbLCE2NpZlQy1i2tVfEfWD+VE/WETKDwgR8fHjx6zuDRQycuTIn437ERFxxw72j/fzV/q/jIwM1r/27dsXEV++fPlw7lxUU0MAtLEp+szvx4+zAwMhKEg9ImJIaGibwEBI/90Vq1XDM2dk/YW8edM/MBAePlR/+9bj5ctGgYGQ074hNmiAslda/kJGjhzJcvuqqalpampqamry+XxbW1sWhPTo0UPOc2fNmgUA/aUNsXh6PqlZM3XhQiVnNXj16hUb65J1AitRraamZmxs7OvrW6CGz+3bt9kS/1q1al29epXD4UgS1gOArq5uSaTtTU1NPXXqlIeHB1u8AQDVqlUzMzOTl+0wJgY5HNTUxFLbDo6IiJ8/f16yZImNjU3fpk0RALlcHDYMk5LQwiKvgEyhSlxhYWHz588/c+bM8ePH5Vw5OTmZw+Goq6sXTlkktc6SouTk5IwePbpixYo8Hk+yI5/15Yi4e/duNkrau3dvSTPi4+PZACqfz+/Ro0fNmjXlTCmwCpkOBgY7hg+X04yRIxEA//wTP37EFSvwt9+GsG+Cjo4Oh8NZunTpT6VTzs5GNTXkcrEkP8mSFhQUxMZl5FR7GzZsGFvXbWNj8+EnMsvt3r1bUkcUAGrVajhxIhZl3uj58+eLFy+uUKGCqakpl8udPVtx1Xeio0d27gwALVu2/M5fxNu3uHkzHjr0q69nKX1fvnxhg4AtWrSQeoNblNHggwdx2zZcswYPHsQZM3DfPnz2DHNzRYgoFotZNmxbW9ufSRaalJQkEAiEQmHJbv06exYnTkQA5HA+dO7cu3fvlStX/vXXX/LnJ7dt2way62qUD9QPIvWDP65MBIQpKSl6enosaZWWlpapqWmlSpXq169fr149DoejoaER+kNJsj9+xPHj8c4dSTQVF5dlZIQAWChR4s969eoVa7aTkxOXy61pbCxWU0M+HwHw/PkiXiQ3NzksrHty8rUvX/4LDjZ79Mgg7u3fhf9c88vKeh8W1j021jst7eHDh1rBwZZJ7w4WsUpP2RcbG7tr1y7JRjsJR0fHSpUqya8A8/HjR4FAwOPxCu8RZ2kefqyEVwkQi8VsGlPW7PfQoUPZ+61Ro4bUE8LDw9k4sWQWkc/nd+zY0cfHp6Rzu4nF4sePH69evTooKIhti5Ijo0WLMGfn14VWxpYCkUiUcPky9uuH6uo4fz4GBODevfjTqfkqVKgAAIVzJ5Y0sVgcHR19/PhxqYOvFy9eZMt+GjduvGXLlvbt20tmdNXV1dnwgYWFxWtpi9sl30YAmDt3rpw21K+PAP8fcRKLxVevXmV5fRUyDzDwt9+MDA1/yRR/WVnBwcH79+9nq0DHjRsnq8OKiori8/ls+GbGjBlFvX5mZuHf9pSUlMqVK2toaBgZGU2aNElqcnM5RCLRpk2b2PdEVg30H/PlC9avnykUzu3bV9bk1ZMnT5YtW/a2b9+8aak+fX719SxKwaZhAWD4t3euKSl4+zb6+RWpziRLlih1Q3p6ejqrrO3k5JSamvrp0yd/f/+kTZuwd2+sXBmLtpp0165dUIR81z9LLMYPH/DatQIJ9g4ePMjhcHg83smTJ/M/npOT8+zZMxZRy6qrUZ5QP0j94A8pEwHhqlWr2K9P4W5p2LBhAODq6lrUsDsgAAcOZP1NQtu2ixYtCgkJGT16dIMGC/r1K5E9it7e3gDAfn34fP69Ro0QANu1w6KVZROLcmJi/n782OjDhxkJCUc+f14aHb1Z/lNEuemfPq14+FArImJ4aurdmJit8fF7FfFWypzY2NinT59euHDB29t71apVfn5+mZmZ331Wx44d1dTU1NXVHRwc3NzcBg4cOG3atJUrV3K5XIFAILUOWCljq/AL9FUSa3v1stHRkcw8S/Xly5f+/ftPmDChRo0anp6eUssEKR1LgLFq1SplNiI+HuPiFLVRm/3D/eQGm5Lw+PHjChUqSHLBsQFULy+vL1++JCcnd+jQwc3RMdPQEL9dyhgXF7dt2zYDAwM2+HL48GFZ18/KyqpXz61Fi8nJyd/8Du/Zs0dRHWG3bt3kt6Es+v13rFUL1dSaWFqyT97AwMDJyUkoFK6VVpZr/fr18DVhQ5G6fLbvYNw4dHHBfLPxWVlZ7du3B4BKlSr9zDTjqVOnWA6Y7t27/+zo/owZCIBVq6K0ZXg5OTkrV65kH9GfzZqhjg726UNxYLEFBgay/UijRo3y9PTs27dvixavuVwEwHnz0M8P5W/jSktDbW0EwNevpZ8QGxtbtWpVdgPN/tVeNWmSF8YPGlSU7TCsVqGcdLglbdGiRWzMdN++fdu3bx89enSjRo3Yt11bW7tly5Zy6mqUQ9QPUj9YBMoPCJOSsH37FxYWDaVO3SQlJbHRiO3btxfpcmfPorU1cjiorv7X1yIzLP1rUYoNFk/fvn2NjIwAgMvlLhk58gcSaVy5gvXrvgh0CAyE0NDW79+PCwzkBwWp/T9VTGGnTonqVX/yyCIwEN686R8W1jMwkPPokX5OToxC3suvLikpqWLFipJfhPyMjY3btGmj7AYiIi5ZsqRZs2ZnzpyRciw3FzU0EEBkbBwrI6FovnNlllssC1gGoEaNGil2T2NxKGjyfMKECQCQP5lH2fHhw4cTJ0707dt3586d8d9mQMvKyvoyejQCIJ+Pp05hYiJ6e2PnzhdbtWJ/Guzmsk+fPrIGXNhiyOrVqxd4nOUur1u37s+3f86cOcofPiiily/x8GF8/DgvoQWPt7lLl969e48ePZptfOBwOFwu90SholKSNH2Ojo5FeqGzZ/HPP/PyHKipHVm+PCUlRSQSsT0zJiYmP7Z2Rprr16+zYfXi3yJ/+IABARgQgKNGyUrXtmnTJmNjYx0dnVGjRl0+dw6LMK5H5Js2bRqHw2Gb6gGgWbMDAgHWr4+nTuGzZ2hri3K22hw7dsXV9e927eTt9Xr58qWurq6urq5AINDQ0NhuZ5cXEPbs+d24IiYmhs/nq6mpyV/OU6LEYvGgQYNY+iXJPQCHw6lUqVLv3r3LwriwElA/SP2gXMoPCJcsQQBs00bmkhU/Pz8A0NXVfS/ZFytHWhr26IFCIaqpVdLV5fF4XC5XTU2tRDMg+/v7A4C5uXlIiOxArrCICLYvNXVau8TEo6mpdwIDOYGB/BfnzTKXekh/yq1b7Ec5aZN7cvKV+PiDgYEQFCR4fdIue/N8hbyXX928UaMAoGHDhjExMc+fP7906dLevXvXrFnDyiS0b99e2Q3MExAQIL3wUUgIArCYEEurFksJiY6OlhRPa968uaenJ0tSqux2FV+PHj10dHQ8PT2V3ZBi8fTEmjXx0qW8UkoAGQ4O3bp18/HxOX36NIsKmjVrVng1TmRkZL9+/QBg4MCBBQ59+fKFrer/+TWH0dHRv0yOh4MHcelSvHYNr1/HBw/ypy0+duyYpASztrZ2/mItnz59YrfXALB+/foivZBYjO3bs5jzua0tAFhZWbGiR7q6ug8fyqu0VnSPHj1iSxDr1Kmzdu3aH0jkdvMmdu6M6uq4b5/8CIHVLP1Hfjln8iMaNmzI7mKrV6++e/fuJ08+SHaZ7NrFBhCkFwpHRNYb/vHHH/JfIn/GsklOTtigAY4ahQe/vzlly5YtANClS5cfflcKdfHiRQDQ19cfOnTopk2brl27llTuSjcrBfWDBQ6Vm35QyQFhSkpe7bUbN+SdxmZR69evv3nz5pUrV65aFTJ2LA4Zgp6e+OqVtE3pCQmx+/a5ublJ+uaSyK8tkZyczOfzhUKN1NSi7eCSDDDPmIGrVkn2j757OTRpoStqaiBAwZX6kqcMGYLbt2Pe1JD4TVCHlPHNkM9HHg9/xe03iuXtLdbT29qy5etCS2Fu3brF/s6V0q4C2F7np0+fSjn28CG6uOT9Vfz6pXLDwsKaN2+eP/uFjY3NpEmTlN0u6UQikaxEZOnp6SwXiLa2tmQj+68nPR3PnsWzZ7FjR9y1C/Ple3j69Km1tTUAVKlShZW9zr8jHwB69uwptdaTpaUlAET8fCWfX4vse2K2A4J9aKNGjZI8/uHDhwMHDrAUUB8/fizqC8XH46RJqKXV1MxMsq1aQ0Pj+vXrP/kO8gsLC6v1NWs/l8tl66xkFYISiUSnTp1q0qRJSvXqCIA6Oih1sUM+LBlVyS3SUTVhYWEsnRgArF4tpZ7kvHlYsWJOnTr9CpQdf/Pmzb59+wQCAYfDuXLlipyXCA8P53A4mpqaO3bsePLkSU5OTlEalpycfPPmTVaBzcfH54felMJ5eHgAwLx585TbjF8O9YMq2w8qLSCUJD7288MFC75zcr9+/QQCgaQOW5MmR9nihVGj0M8P//lH5mYEVkXEzMxM4e0vYMCAt4aGYlkDcgVJBpgL+PgRtbQQAPX1CybAkfWUhw+Ry0UeDy0t8erV4jW+nAgPR11dBMDduwsffPz4MQDUrl271JslBRtAHTt2rPTDbIfG+/flJktQSkoKS1LKln/b29vPn1+2JrRzcnJmz549ZcoUAwMDDw+PAsVFP3z4wMbjtbW1jx49qqQ2KojsL9WHDx/Y5ngjIyNnZ+f8O/J79uzpL+NHtlWrVgAg66hqGjlyJADo6OisXr16/Pjxbm5ukqV9lpaWxSiHHXH3Lvun4fF4dnZ2JfEl3Lx5M3zdDM+aqq2t3bt37ylTpnh4ePTp06dr165t2rRxcnJq1KgRO2FTmza4ciV+L41kZGQkm6hRQOoagoiIuatXRzdosMra2lRLS+pKE7EYR4yYCQAVKlTYsmXLtGnTWrZsKUmbwRLM2tjYyNl8/uefr5s3HzJ8+JjvNubTJ1y9Gvv0wTZtkljCJHV1dS0tLT8F1vgqgHWROTkoO6bNzsaOHddUrGgnLxU2+Rb1g6ja/aDSAsKDB9HHB48f/35hwNevX/P5fDbR17hx47lz5/7117O//sI9ezAkBLduRW1tlDVaevnyZQBwcXFRePsLmD07bz93Ucn6Oq5alVfGsPCmbVlPmTgR58/HixeLVPawHIuMxNatUUYilvDwcACwtbUt5UZJNW/ePABYtmyZ9MMK2vldBonF4mvXrrGi2CWdELXooqKinJ2d4WsBK6Zx48ZsM/rNmzfNzMzYkKH0Sd1yJCUlpWPHjnZ2dvDtjnxZ52dkZNSsWbNChQrHjh0rzXaWcZmZmc2bN2frMCWMjIxYKFV4C0pR5OTk9OjRAwA6d+6s8AYjYlpaGqtxKpmE5HA4Li4uUIi6urqZmdnmzZuLuBGLbfr47bffSqLZKqpmTbbaTeTmJuuUjIyMBg0aSDJRM+bm5ix5O/ut2+DujjL+EWvXRgB5U7+SMf0LF/J2FwqFqK1t4OTkxBJ66+rqKiBZotQENmfP4unTWK8e8vmysp6yssO1apWTQdVSQP2ghMr2g8pcMlrEIYa7M2eaaWmxmLBwFWy2BbFqVZR6e8kKzowYMeKnG/sd584hAHbqpKDLFSMkKL9RxPexrikgAB8+lNW9xcTEAIBxvpKjSsRy58rMwFZeJgZlqV+/PgDIX7CkSPHxmJSEAQG4ZQsOHIiXL+f/hB8+fGhjY8OG0u/cuWNiYsLSBUvufdmESfv27RMSEkqpwUqVnZ0dHR29b98+tiM/PT1dVu6ijx8/sginSpUqRVxRpjpiYmIePnw4dOjQdevWXbhwgZWFyMzM5PP5fD6/eCk9nZ1doSQT0C1btoz9a9rZ2XE4nKtXr164cGHjxo1eXl4HDx4cMmQIl8tleRoLd8RyTJ8+HQCWLFlSQs1WOW/eII+HAkFeQTTZJk+ezP5BV65c6e/vz76EGRkZrKqEp5UVCgTYrRsW+gNn9bENDOQVkT14EL28cPVqPHwYp0/HPXswOBizs3MRUSwWDx48mM2Hy0r9UKTyk1u3oqenlBub9HR0dEQOh7UyQtpQOEs2v3Ll915CdVA/+CNUsx9UflKZ73j+HHm8dC2tk02ajJRWbTwrC2vVwnr14pct8yp8dNq0aQBQCptf09PRy0vGhsZiKEZIUN6jCHlYJSupIfG7dzGnT8+bN49lwRYKhcpoX0EsZXwZzNpcOthf5eLFi0vjxUJC0MEB27XD06exWTM2lP2oR49ly5a9f//ex8eHpYVs3rz558+fP3/+3KFDh/xL5rhcromJSc+ePX/1H/piiIuLa9++vYeHh4WFxaRJkwqMCt++fZulpK9YsWJg0UrsEESsXr06ABQjH8zLl6illdq27f6Sm1qPj49nE0qBgYEBAQH5D4nFYlaHAIqcIlUsFj948GDevHlsuey2bdtKpNGq6fNnnDoVmzVD2Ws+xWKxra0tANwolKEhNjb2aq9eyOHkRZVbtuQ/+vIlbtyI8+bhmjXfacWIEQiAUm+vsrKyWKXvevXq5c8yLRZjeDhGRBSh/OSnT8jjIY+Htrb4338Fjy5ZggAiTc3xNWpYWlrmL77y5QuGhuKJE9i9O36vVq7KoH6wuFSqHyzzAWHv3izHGnK5spKmPHjwjs9X4/P5+XvZrKyshIQEtq63dKZxjxxBPz+qsqs8BULip0/R1BQBcqys2O8ah8OpUaNGWRjfYtnbHkkrkKoKTp48CQBushc7KcqVkydzDAwQAJ2cMDYWP35ET0+sVKlL5coAwLa7AMCYMWPy76GPjIz09PSsUqXKmDFj2CK93dJ2pZZvkgFjXV1dyZ+Pq6urt7d3Wlra3r172fJCV1fXXyY1aNnAKkZ4e3v/6BPnzUMALOnFLlOnTgWAAQMGFHicbb5ga8m+H9pdvYoTJ6K1dcOvVey4XK6+vv4tSTln8pOKUAnwzp077D5V+tbN0FA0MkIAbNGiwDzgwYO4cCH6+clabZMnKwsNDREAnz+XfkJ8fLy9vT37qd+1a9eUKVNGjDjK0qWNH4/+/hgSgpLMqFJs3IgAeSFr4TWKYnHquHE9qlRh30k2Czpp0v4qVZDDwdq10d9fcQP0vzjqB4tN1frBMh8QXriAtrYIgNKmByVYN2ZgYGBnZ2dgYJC/DJ2ent69e/dKupkxMaimho0alfTrkKIJCMA7d5DLRYEA1dWtNTQk34d///1XuU3Lzc11dHTk8/n29vYTJkyIP3kSCyU4Lt+SkpJ4PF6JbiMUi8Wenp5cLneSmxv275//1kYsEv3333/9+vUTCoV6enoLFixIlVbFWSwWp6SkLF26FABmz55dQu0smw4dOsQGjIVC4fTp02/dujVs2DDJZiSNr39NU6ZMUcEB4+9gd+rh4Sgjgcry5csBYMaMGT964W7dEKDEtwW8f//ewcGhT58+BcrDsHzrAKClpfWdcoUBAXnlGQHOdO06YcKEc+fO9e7dGwA0NTWlF18lP6oIO0QmTpz4nW/ajRu4bBneuyc1tgwLQ3v7AnOHBZpwy9X1L1dXeakmL1++rKGhIUlm4+w8FAAtLXHmTPT1xebNsV8/mWublvbqda9OnXR9faxVS+oJkklIyZ43KytnAFRXx6ZN8dgxGqCnfvCnqGA/WOYDQta/njqFr17JOSs1NXXRokVszyvD5/MNDAxY+H6q5Ou5sQKBFBCWFay/rFqV3ZesdHFxc3NjCzVbtGih3KadOHFCMiynzueLtbWRy8U6dTAqSrkNK01sG+H06dPzL/VRIFYalM/nyymey4pxaWtrN23aVNY5hw8fBoBu3bqVRCPLoNzc3NmzZ7MMXmwDSbNmzbKzsxExPT3d19e3TZs2GhoaOjo6f/31l7IbWyadPYu+vmhhga1aobTdUz4+PgDQpk2bol9Skr3D319WmKlI7PZ64sSJ/31dpxcXF6eurs6+Fd/fkH/2LM6fj3Pn4v37kpv93NxcVgOAx+NJhuSyMzPx3Dn08MBKlfD6dXz8mCZ0iup7O0Ryc3NZTqOgoKDvXEpGbHngAHI4yOPJzCvTt29fAFgjd10pCyR4PJ6uru7SpUsvXrwt6eWePkVtbQRAqVsHWNELNTU1AY93bPNmWddPSEioUqUKAFSoUGHq1Kne3nufPsXycnOuANQPFo/K9oNlPiD8kVwpL1++DA8Pj4+Pz8zMZI8sWrSI3XeWWPvyeHtnAmChepVESVh/uX8/7t2LL16w26jk5ORmzbo1a3ZIuaViWrduDQAbNmy4deuWz4YN2Lo1amigiUneSK1q3BIdO3aMfQ4AUKlSpUmTJl28eFFW7aPimTNnzsWLF+WcMH/+fNYAfX19Wec8efIEAOzt7RXYsLLsr7/+YqMVkrt/yW8pIiYlJW3bto3daJbCsotfkliMrVuzdW5ZNWocPHhQciQ6Onrx4sV6enpaWlqGhoZFT6rEZjmOHy9YiqiEGBkZsb+L5cuXs0fWr18vmYf5zr+7gwO2bi2rEIWnpydbczV48OB+/frp6enlWFnlZahcsgSvXVP1CR3FYTXZK1Wq9P1TZceWixejmVlu/fojC2x5fffu3eHDh9kYgfw8+zVq1GDfpebNmxc+6u+P6uro6uqzZ8+e/I+npqaOHj2a3YtzOBypdTUkatasCb9+uv+SQ/1gMahsP1jmA8Kfy5Vy5cqVSpWqDxhQ4kll5s6dq62ts3bt9pJ+IfIz+vdHAFyxQmkNePHiBYfD0dbWTkpK+v+jmZn44kXe2IfK3BIdOnSoR48eOjo6kll9XV3d0ky0s2/fPgAQCAQAECVjejYzM5MlW8vfH5Rjbdu2VVdXnzhxIp/PL5yLKy4uDgDYgvwC93Akz7t3aGzM9j4NtLMDAHd39wcPHowaNUqysI1tSjE2Ng4PDy/iVS9cQC5XcVms5WJ1mQFgzpw57JGXL1927NgRvlvH9e1bBEAjIzm99p9//snhcExMTNhL3B8wAJcsweBglU6KVgIiIyObN29uY2OTP6HLjxKLcezYBQBgbm6+ZcuWmTNntmnTRjJeYGpqyuPxrKysPn78KP35jx8nOjpusrGpZmi4Rcba0127zrEf4fXr169du7Z///4ODg4snYmampqmpmbNmjXltDA4OJj9KWXL24xI5KF+sDCV7QfLfED4czIyUF0duVws6UwivXr1AoD848GkDPL3RwBU4jjXhAkTQFZJepW8JcrNzQ0MDFy8eLGTkxOHw6ldu/b58+dL56UfPHgAAGyTwLVCyxCys7MnTJhw+vRpS0tLAHguK3NC+cJyYD569CgkJETqCcbGxux2cO7cuaXctl9GdDR26XK3RQs2xcHj8dj6cC6X6+7uHhgYKBKJunbtam/f18Ul43tF3fNcuZKX/qMUsO8AWzWKiGKx+P79+2yq50+5RQ4yfH2LUnypQoUKbNmO/Jkf8jPi4+PZWsouXbrISpdfFFlZWU2aNGE/khImJib5ixmu7N4dpYads2blZQRUV8+QnXJj+vTpHA5HktOSBSf16tVr0KABW8ooZ9XrnDlzAGDMmDHy38Xly5djVWyjftFRP1iYyvaD5TwgREQ3NwTAkydL9lVYf1kO0s6Wb8+e4aRJuGOH/B2pJSU5OZnlqnry5IkSXr7MY1kQSm3nekpKCrsRsbS0PPntD0RMTIybmxsAaGlpAcC4ceNKLv9NmdKzZ0/5A1uscjEAdO/evTQb9osRi//Zvp3dY5mZmampqQ0ePPhlvqXgX74kN2ggAsD27Yu05SkgAAGwYcMSbLIEuxEHgE6dOk2aNMnKygoAWBaovn37ypmKmTJliqmx8SHZO74QMTY2lsPhaGlplZs0DGVWeHg4m4kdPXr0z1yHBV12dnbLli07ffo02/WdmZnp6uoKAEsqVEChEDt2LFzMEO3t89YDt28v5/o3btwAAD09vYkTJ/77778PHz5kewfEYvHQoUPNzCydnR++eyfliWKxmJUOv379upzrZ2Zm6uvr8/l8mTOZqo36wcJUth8s/wHhhg3YowcWqsSjSIcOHQKAbt260bqFMk6yG2fvXixWaeifsnr1agBoWDq3db+gCxcuAEDjxo1L5+VEIpGhoSEALF26NDHfTM2jR49Y/S62ikZdXb2cLQuRY968eSC3ROTIkSNZR+jg4FCK7folhYSE1KpVC2Qk3oiIQFNTrFmzSMmknj17qa6uX6NGDcW3spAWLVqwr71kxsba2rp79+4svp06bBjKuClk1ZkvXbok5+KnTp2CMpDZS0XcvHmT/Tv+8ccfxb4IG+z+r1AlwPj4+P/c3f9fzHD9+oLPTErCOXOwfXvcu1fO9ceNGydrHDArK6tHj88AWLs2Fkht+/bt2zVr1gCAhYWF9LoaXx07dgwA6tevL+ccVUb9YGEq2w+W84BQkp+t5GaE0tLS2J6QHTt2lNRrEIV69gzt7fF7y0wULDg4WFtbmw1VTpo06We2dpRXaWlpAoGAz+d/J6+9IiQmJrJtUWwTo1AodHd3P3XqVG5uLstnwFZDWVpalrNd4/J5e3sDQN++fWWdcPny5Q0bNnC5XDU1NRr/+q5u3bqB7EK4J07ggwfo749Ll+L69RgZKfM6ERERAGBjY1NC7czPxcWF5etm2Z5u3rzJSlDcv3+/Xs2a7y0tsUkTjIuTnJ+ZmXn27NmRI0dqampyOJy3b9/KuTi70ypn66zKssOHD3M4HC6Xu2jRon379m3cuHHu3LnDhw/v0qVLkyZNYmJi5C8offjwIVsjKn1GNywMTUwQAJs1kzLCWoRiiTk5OezLJqsqb2Ii1qiBJiYYHIznzuH06di6NbZqtZndjleuXNnOzi5D7uCuu7s7AKxbt05+S1QT9YNSqWw/WM4DQsmM0MKF+HPrJmSaNWsWADg5OckfpiJlR3AwamgggPyBS0WKjo5nowbVqlXjcrmGGhrpDRuWUtLAX0qzZs2gFFLGvXhxu3dvdqOzcOHCtm3bSorz2tnZ9erVi+0Xb9as2adPn0q2JWXM/fv3AcDR0fHZs2cLFy4snPUkPj6+TZs2HA5n3bp1YpXc9fpD3gwa9NjN7ePdu1KPsu7p2LG8m2oeDz08Nu/du5eNFqWnpwcGBu7evXv69Ols4ZacNICK8vr1aw6Ho6mp+Vjarbz4+XOsWDFvo2BmJh46hH37Ztnbs78d9kdUs2ZNOWvzWOG4kyW9hYPks3LlSh6Px+Z5CtDW1pb/bzFz5kwAmDBhgswz7tzBBQukFzMsQor48+fPf3eaJSICw8PR3x/d3fOWoLZocdfMzKxVq1Ysw02/fv1k/RYlJycXZZxCRVE/KIPK9oPlPCBkkpLQwAABcOtWBV/51atXQqGQy+Wq1PBJObB9O1pY5DZqNLUUNklnZWGLFujmdrV5c+fMzMx79+7dGzgwr2cbPrykX/3XwiYQSnYb4cmTqKuLHM6Bfv0kdwkfP3709PSsVKkS6x0BYPDgwfIHnsulyMhIDofD5/MlRUGcnJy8vLxYzeLg4GDJR3T16lVlN/ZXULkyAshfoJKbiydOYM+eqKeXrqurBwCampq2trb502wAAEuALj+D/M9bvmQJAPz+++8yz3j3Dp2d8fBhDAhALS32Ozazc+dly5ZdvnzZ0dGRzWRKTcaQnJzMyjrLSmZISsKbN2/YVI8kuSszdOhQAGjXrp2sJ4rFYrZo8Pbt2995DamxXxFulFkbli5d+t0zjx/H+fNx+XI8cwYjI/OmK58/f86q3s+bN0/qs1gKTVdX1+9eX+VQPyibyvaDKhEQIqKfHwJgzZppDx9+Zw3DD/ntt98AwMPDQ4HXJKVjxIipbDw7LS2t5F5FLMZBgxAAbW0xOvprnT2RCL28UE8PFy5UndqDRfHff/9BSW8xGjECAbBfPyz07y4SiS5evHj8+PHv3wCVR6GhoSy7GgtFeDyeJCbx9fX19fVlqQXq1av3TmqSB1KYjg4CFNz/JENCQvr27dtNTU35fD6fz+dyuZUqVercufPixYt9fX0nT54MP1jR/oeJxeLq1T80afLku7V/WQCweDFu2oT56romJCQ0b94cAAwNDe9+nRdNSEjw9fUdPHiwtra2iYmJjo6Oav59KcvevXsBoHv37u3atRMKhRUqVKhbt+5vv/328OFDLS2tGjUGh4ZKv+Nn6V4qVqz4/TmQYk2SZGZm6unpAUBoaOh3T5bVSV69epVNfv7999/5H09PTw8ICKhXrx4AbNu2rRjNK+eoH5RBlftBVQkIEXHRohcaGoZVq1ZNTk7++auJxeL58+draGhoa2vH5dtQQX4VKSkpVlZWWlpa7du3//z5cwm9Sm4uTpiAOjooJbFodDSeOaNStQe/Kz09/f79+z+TJ/07AgLw/n3cs0c1i3wgYv/+/VeuXCllUZ+/v0+7dmyIZP369a6urmxKCgC0tLRat27N/nPo0KEqOGBcfIcPo4wKbLKwW1gAGDZsWP7Hv3z5wu6eS3A1ys2bCIAVKkjJGFmA7D+ftLQ0titJS0tr/PjxrVu35vP5kklOtmFMS0vryoULCm48kWHRoqAGDRZu3ryz8F/uzJmJADhpkvQnRkVFtWvXztLS8puquYqTkZExbtw4TU3Np0+ffvdkOZ3kv//+CwBqampr167dsGHDoEGDatasyb51fD5fR0dn9+7dim35L4/6QeoHpVGhgDAjI8PCwoINlf1kTBgXF9elSxcAUFdX53A4UpPIkTLO399fIBCwRfOmpqZnzpxR7PXj4vDMmbyERpcvy5gCVNWfY+XIzcXly3HaNGW3Q2meP3/O+jahUJjo7o5Hj2JWForFuHo18ngoEGwdO1aS7ujVq1eLFy9me18tLS25XG7hEr3kO1hejR9ZAsDK9EldBcf2q7u7u8t6bmJiYnh4+OPHj+/du8fG+A8ePPj69euitnb9euRy8acXbOfk5AwfPlxSp47H4zVv3nzz5s0fP37Mzc0dNWpUDWPjrMqVkW7TS0XNmgiAUvexPnuGAKijI30OOykpycHBAQA6dOggKplBOlbAuVKlSjExMT9znblz53I4HLbnjVFTU6tTp07Dhg3ZGuyAgABFtfmXR/0g9YMyqFBAuHHjRtY5CQQCCwsLb2/v4m0GDQgIYNVv9PT0hg0bxuVytbRMFy2Kp5wyv5CLFy+yfNyjRo1i49kcDsfDw0NRy0d9fdHUFIVC3LIF/fxoCrBsEItRWxsBMCFB2U1RDrYWaPDgwX2cnfN2sVavjjk52KoVcrm4fHnhEYrc3NyVK1cCgKOjozKa/ItjSyuL/vcvFudYW8cYG0c7Ob0vlLY6KipKXV2dy+W+krYp8ezZs927d4dC3N3dV61a9d3XxfBwDAjA8+dREcslPn/+zO63vL29E779cxOLxUnLliEAcrm4c+fPvxaRIyEBuVzU0MCsLOkn/PYbenigrLrxb968sTA3v+rmhiNHlkTz0tPTmzRpsrpxY5GLS+G1i0UnFotv3LgxcuTIsWPH7tixIzAwMDMzkx0aO3YsABgbG//AyEj5Rv0g9YMyqEpAmHPwoLu9PZvtlSwIdnNzK8paBQmxWLx582Y2CtWwYUOWeujw4cNNm0YB4IABMn9zSenLysp6+PDhxIkTt23bViDDwc2bN1lug5EjR4rFYvbPygaza9SoITW9XtF9/Pixe/ce9vaPALBVq/z7a0gZUL8+AuCdO8puh5KJY2Nx82asUwenTMGAALx+XU7Ewm7uSyHFZTn0o2OO8fEIgGpqCIA+PoWPjxgxAgAGDhxY4PHIyEgNDQ2hUGhoaKipqamlpaWhoaGurl67dm0OhyMQCJ49eybzRS9exAYN0Nwcjx37blrIImLF3+TteNy8GfX0cO9e2kRdoj5/xsmTpdRYysjA8eMxIgLfvPlOXa6Ue/fysnKXTOWGxKiovNxLvXtjCQyr5+TktGvXDgCqV6+eoKohUEHUDyIi9YOFqEZAeP48CgS5mppDray4XO7+/fu9vb1NTU3ZEvNJkybJXyIvFouDg4P/+OOP9u3bs6mkWbNm5a89cukS6uoil4slnAGOFFVOTk7Pnj01NDQkw+RmZmbu7u5eXl4HDhxgJXeGDx+ev1JIUFCQtbU1ANjb2/ft2/fo0aPpMuovy/L+/fvVq1ezfT4NG7bauZMWhJY5T6dM2erqeufgQWU3pMzIzCxKdvgVrq5n3NyyKDlkSXv5EgFQUxMBsFCF9/fv3zs5OQmFQg0NjQKZeMeMGQMAffr0mT9/fv7pwb/++osdaty4sZStuf7+2LBh3hi5lRU+fKio93FjzRr3pk03Ll8u76To6B+eQSU/QlKHedcuPHr0/4/Hxsa6uz8FQGNjNDLCv/9GPz+5F/LzQz4fN20qqYY+f476+ti9u5RihoqQnJzM8t+6ublJZg5VGfWDBVE/iIgqERCKxdi4cd4CFXX1y1/X4SQkJIwbN47NFlpYWFSrVq1z585LlsRt346XL2NKCkZF4Z9/Ys+e2LLlZ9a5sh2DNjY2hdeaBgXhjh15I2003Klcubm5AwYMYGt6586d269fP3Nzc8kdkqamJgD8/vvvhetGurq65p9Abtduef/+eOyYvE4qJyfn5s2bs2fPdnJy4nA4enp6XC7XyMjow4cPJfsmSbEsW7YMSrqsxS+nKOMWTZoggKKmj4hMubl4+jRu3Yr+/hgfn/+I+OjRDjVqAICRkRH7jRo7diz7EYuIiBAIBDwe78WLF2/evLl582ZgYGBYWNjHjx8zMjK+fPliZWUFAFvzlV3677//lvbujRwOAqCFBf7xh4LvxV1cEADPnv3OaTRmVpJYoH3oUF6hy99+w7CwrIiIiKpVq5qamuvrhwOgnl7R/qzDwoqxIfYHhITg3bvfLWRfbO/evbOwsKhatSp1zUj9oFTUD6pEQIiICQlYtSoCFN4uv3//fvha4gkATExEbLR040Y8dixv5FRLS1y5soOTkxOLCQHgxIkTUl/n+PG8DWPSFvsQxfv8ueBfsUgkGjx4MADo6urm30f+/PnzrVu3duvWTSgUqqurF84QdefqVQBgCaydnZ0bNWpUqdIL9gWYNAmDg7FrV3R3R3d3HDt2soeHx8iRI9u1a8cSEEvGC9hedgCgLexlk6+vLwB07dpV2Q351fz+OwLg9u3KbocKKDxQnZGB48YhQHLt2n179YqPjz916hRb/tC9e/eMjIwhQ4aA3OKBp0+fBgAdHZ33799fu3bNxcWF/WS9b9sWN2zAH1wK8X3Z2aipiRwOxsYq+Mrkx4lE6O2Nxsaory+2sHAcO3asvb29UCi0sanq4JD46FGRL1Si07lZWbhkSeFZcQV6+vQpJYRnqB8spvLeD6pAQMiGtdLTccuWwmMArLjTpEmTnj59euzYcU9PHDECXVxw3z48fBhHjsRdu/DNG0TE5ORkAwMDDsAMB4cY2XnemPzLM4hiSZbBvHqFlSujmRn27Yve3tdDQkLEYvHo0aMBQEtL6/r161KfXrt2bQCQUk7U2TnC0bGlsbGGhsanT58QMSIC167Fxo3R2xt37cobHQBANTVNdjvFosEaNWrMnj374sWLb9++FQgEHA7HRFPz4owZJfwxkOIIDw/38PDw9vZWdkN+NX5+OG0a7TkpDYUHql+8QA0NVFfP34Vdv36drU5v3Lgxn89XU1Nje9pl6dGjB5tdZL9dxsbGa9asYXWWFS8lBZctK6E0JKR4oqNxzpz/2Ng3G/d0cnKKjPyRBEIlOp17+jQCIFWQLxXUDxZTee8HOYhYOC9ZueLvD1pa4OZW+IhIJLKysoqKigoMDGQTgBIhIQAADg7fnP/fhg0uO3dqvH0LWVlw+TK0alWCzSYyHDoEiKChAcnJMG8eREYClwv6+tUSEl7r6el9+fJFU1Pz3LlzbP1nYbNmzXp65MjCsWObzZz5/0fPn4cOHUAggNzct8uX286bJzny99+wezfMng1qapCdDQCQmLgDAPbu3Xv79u05c+asXr1acvKSceOaPX7cJjiYm50N4eFgbV0ynwEhpe7+fXj3DtasAQ8P6N8fdHSU3SAVEBICT56AvT2EhkK1alC3bv6Djx8/7tChQ1RUFACMHj3a09NTX19f6mX8/PyMjIy6du0qFApzcnLGjRs3d+5cFk+WlPv3QSCAihXhawhKyoLAwMBx48a5urqGhYUdOHAg/woXJRswAA4ehNWrYc4cZTeFENnKdz+o7Ii05Mke1jp37hwAODg4FD4kfVlEXBxqa+ftu6DZduXJPwEbGor//pvat29ftrVGXV3d1NRUznNzr1xBACyQO3jDBlRTQw4HtbQKZOBu3hwBcP/+gtc5ceIEADRt2vSbR588QQ4nL0mgCtf5IeXQ2bPYuzebJT/l4jJs2LBbt24pu03l3cGDuHSpnC0rYWFhTZs2FQqFzs7OOjo6gYGBhc+JiYnR1tZmBVdr1KiRmJhYgg2WOHsW+/ZFW1sMDS2NlyNFJiW3UOk6cuTInDlzgoODJY+kpaUtatfueZUqecuxCCmzynU/qAIBoWyDBg0CgOXyM6EVMHkyGhvjoEEqW8KlzNq9ezd83cj34sULmedlZeVF9ZGR/38wIAD37sUhQ3Dhwvznvn+foqWFmpr4tUjp/6WlpWloaHC53KgCKafatkUAtLPD48d/8h0RUoaIxZiWht7e6OrasWFDNp64evVqZTervPveOr179+5xuVwW7zk6OuZPf81MnToVAFhZnXPnzpVYQ7+VlpaXy83U9HPhbWrPn+Pbt3m7OYiKad26Nfv1qFGjxuLFi0NCQg4ePAgAzZo1U3bTCPmect0Pqm5AmJaWpq2tzeFw5G+9KCghAdPSqCcrgz5//szhcPh8vp2+/mX5u36HDcMBAzAs7P+PyMg4vGbNGi0ts6lTj0m9TIcOHQBgz5493zz64AFeu0bfEFKOvXr1avHixTY2NvJGXkhpYaW3Wci3du3a/IdYfUK2c6x58+al2qzUVOzQIdjVVVtb+7///hOLxffu3XuzZg3a2+ctoDh7FvftK9E8IqQMunnz5tixY42NjSXr1NhS5x8bmidE2cpfP6gCewhlSEpK2rRpU3h4OEs0+mNk70skSnR54MDat26ZfvwIrVvDf//JO/X+fRAKQSgEAHBwAET4mmk2v7p16wYHB586dapLly6Fj/7999/jx493d3dnObu+Qd8QUt6JxWI2MUWUKzk5uWbNmh8/fgQATU3NZ8+e2dnZsUMLFy5csWKFQCDIzs6+evVqixYtSrNhmJ09eNiwAz4+AoFAX18/JiZmbZMmM+/dA2NjGDMGJk2CRo3g0yfYtw/69JF+BUSOtF9m8qsTiUR3797dt2/fwYMH09LSuFyuWCxu2rSpu7t7//79WZloQsq+8tQPlpO3UQz6+vpLly4tTjQIAB060L1+GdTa2tr0/XtAhBs3IC1N3qlxcZCUBI8fQ0QEAEiNBpOSknJycoRCIatpW1irVq0A4MyZMzt37kxKSvrmGH1DSHlXbnrBX52uru727dsBQE1NbcqUKRUrVhSJRImJiR8/fhw0aJCrq2t2dnb79u1LORoEAI5AsG///j59+hgYGMTExNjY2KQ6O8PVqxAVBcuXg7Ex9OwJ2dmiyZMP7NgheZZYLA4KClqyZIm9vf3169dLuc2kdPB4PGdnZy8vr7Vr14rFYkNDQ4FAcPv27SlTplhZWXXu3PnAgQPKbiMh31eu+kFlT1ESojjXrrHCkdiq1Xe2pxctg/aIESMAwNzc/MqVKwUO3bt3r1q1avB116JQKIz18EBfX8UX9SKEkO/p1asXAGhoaLBSqBIaGhp8Pv+a8oopL1y4EGSXScxdu7aPvT0ALFiw4MyZMyNHjjQxMZE0fs6cOaXcWlLK2JbCXbt2paen+/r6du7cmX2BCyZsI4SUMNVdMkrKoZwcOHcOdHTA0BBkTOv9kLi4uEGDBl24cIHH4y1YsGDRokVcLjc3N3fFihUrVqwQiUQ1a9bs06fPrVu3MkNDb3z4AIigrw/v3oGu7s+/OiGEFNHnz59PnTo1ZswYAOBwOPr6+hoaGhoaGklJSfHx8VOmTNm0aZNSGta2bdtLly4dO3aM1UIs7N9//x09ejSfz8/KymKPVK1atUePHt27d2/cuHG5GoAn34qKirKysuLxeJ8/fzY0NGQPxsbGHjlyxMzMrGfPnsptHiEqhQJCUu4odP8eIq5du3bevHlisbhTp07z58+fMGHCw4cPuVzuhAkT1q5dy3I5ZEZFqfv6go8PaGjAmjXfbFAkhJCSh4hJSUmamprsR4l5/Phxw4YNOTzO3dC7TjZOcp5eEthqwC9fvkRGRlpaWso6bejQofv377ewsBgzZkyPHj1q1qxZmo0kyvLnn39Onjy5e/fux48fV3ZbCFF1FBCSckdGhpifceHChUGDBsXFxampqeXk5FSpUsXb27tZs2ZSTs3MhCtXQEsLPn8GPT3o0EGxLSGEkB+yYueKw/UPczW5gQ6Bahy17z9BcZ49e1a7dm1bW9sItltbhu7du588eXLfvn2sFhRREU2bNr13797hw4f7yMgqRAgpNRQQElIkHz58cHFxycrKatCgwcGDB7W1tWWeWgIRKSGEFE+GOKPOyzphWWErLVfOM59Xmi+9c+dODw+Pfv36sVpzslhYWERFRYWFhVWuXLnU2kaUKy4uztramsfjRUdHa2pqKrs5hKg6Wp1PSJFUrFixTp06UVFRHh4e8qJBkJ6zlBBClEKDq7HdersJ36SSsFIpv/S9e/cAQP4S0IiIiKioKGNj40qVSrt5RImMjY2joqL8/f0pGiSkLKCAkJCiys7OBgCBQKDshhBCyA9ordM6olZEJUGl66mlWshh8uTJpqamO3bseP36taxz7t69CwBNmzalkoOqRldX18XFRdmtIIQAUEBISNGxJHgUEBJCfjlaXK0EUUJ8bnxpvqiVlVXlypXZevtHjx5JPUcSEJZmwwghhORHASEhRUUzhISQX1d73fY99Us1lb+hoeHFixfbt28fHR3t6up66dIlyaGMjIzTp0+PHj16z549AoGAflcJIUSJKKkMIUXVsGHDwMDABw8eNGjQQNltIYSQX0N2dvaQIUMOHz4sFAq3b9+OiCdOnLh48WJGRobkHKFQeODAgV69eimxnYQQorL4ym4AIb8MmiEkhJAfJRAIfHx89PX1vby8JkyYkJaWBgBcLrd58+bdu3fv1q3bX3/99ccff2Rt2QJxcTB6tLLbSwghKocCQkKKysfHJzU1lRKjE0LID4mOjn758iUAZGRk6Orqrl27tlu3bubm5uzo5s2bO1Sq1H7aNLh+HWJjYcECpTaWEEJUDgWEhBSV/OTphBBCCrtxA+bN03r48JmlpWVubm5MTIyBgYEkGmTaT5oE6uowbhwEBoJIBDyeslpLCCEqiPYQEkIIIaREbNwIs2dDbi6MGvV8xQqTkydPenh4VKtW7fnz53x+oSHpy5dBIABdXRAKAQAcHEq/wYQQooIoyyghhBBCFCYkBEJCAADOnYPUVBCJYPZs2Latpqmp6fDhw+3t7V+9erVv3z4pz2zdGlJSICkJHj+GiIhSbjYhhKgsmiEkhBBCiMIcOgR6elC/Pty9C7m5ULky1Kv3/6M+Pj4DBw60sbEJDQ0VspnA/BCBKtQTQkjpooCQEEIIIQp27Bj0lFb1UCwW161b9+nTp3v37h08eHCpt4sQQkhBFBASQgghpPRcvnw5Li7O3d2dy6V9K4QQonwUEBJCCCGEEEKIiqLBOUIIIYQQQghRURQQEkIIIYQQQoiKooCQEEIIIYQQQlQUBYSEEEIIIYQQoqIoICSEEEIIIYQQFUUBISGEEEIIIYSoKAoICSGEEEIIIURFUUBICCGEEEIIISqKAkJCCCGEEEIIUVEUEBJCCCGEEEKIiqKAkBBCCCGEEEJUFAWEhBBCCCGEEKKiKCAkhBBCCCGEEBVFASEhhBBCCCGEqCgKCAkhhBBCCCFERVFASAghhBBCCCEqigJCQgghhBBCCFFRFBASQgghhBBCiIqigJAQQgghhBBCVBQFhIQQQgghhBCioiggJIQQQgghhBAV9T82Nf9junap9QAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# indices of large looking molecules\n", + "suspiciously_large = np.where(np.array(smiles_lens) > 150)[0]\n", + "\n", + "# corresponding smiles string\n", + "long_smiles = smiles_data.loc[smiles_data.index[suspiciously_large]]['drug'].values\n", + "\n", + "# look\n", + "Draw._MolsToGridImage([Chem.MolFromSmiles(i) for i in long_smiles], molsPerRow=6)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "kazyeOPYl5_i" + }, + "source": [ + "As suspected, these are not small molecules, so we will remove them from the dataset. The argument here is that these molecules could register as inhibitors simply because they are large. They are more likely to sterically blocks the channel, rather than diffuse inside and bind (which is what we are interested in).\n", + "\n", + "The lesson here is to remove data that does not fit your use case." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "xkFF2eMgl5_j" + }, + "outputs": [], + "source": [ + "# drop large molecules\n", + "smiles_data = smiles_data[~smiles_data['drug'].isin(long_smiles)]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "QjSLGiv0l5_m" + }, + "source": [ + "Now, let's look at the numerical structure of the dataset.\n", + "\n", + "First, check for NaNs." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 421 + }, + "colab_type": "code", + "id": "H5wkbrWgl5_n", + "outputId": "a4b2e5eb-4feb-40e4-b12d-e1f28dc2d3b7" + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
n1n2
62NaN-7.8266
162-12.8456-11.4627
175NaN-6.61225
187NaN-8.23326
233-8.21781NaN
262NaN-12.8788
288NaN-2.34264
300NaN-8.19936
301NaN-10.4633
303-5.613748.42267
311NaN-8.78722
\n", + "
" + ], + "text/plain": [ + " n1 n2\n", + "62 NaN -7.8266\n", + "162 -12.8456 -11.4627\n", + "175 NaN -6.61225\n", + "187 NaN -8.23326\n", + "233 -8.21781 NaN\n", + "262 NaN -12.8788\n", + "288 NaN -2.34264\n", + "300 NaN -8.19936\n", + "301 NaN -10.4633\n", + "303 -5.61374 8.42267\n", + "311 NaN -8.78722" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nan_rows = smiles_data[smiles_data.isnull().T.any().T]\n", + "nan_rows[['n1', 'n2']]" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Z6xL_ztsl5_u" + }, + "source": [ + "I don't trust n=1, so I will throw these out. \n", + "\n", + "Then, let's examine the distribution of n1 and n2." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 458 + }, + "colab_type": "code", + "id": "txAjPzOAl5_2", + "outputId": "6679981a-60cd-473f-f6fb-86166d7c5b5e" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAasAAAGoCAYAAAD4hcrDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3de1yT590/8E8SAoGEICAnEXAIVkSFWoa2A9vqY+vL2fWwoa6r/XXV2sM8T2vXqWjrYT6ddlU3q13bOXtSZzvXPq1rZ7taq7bVFVuZZyooBURASAIhIff9+wNzm0A4KeS+Qz7vvzQE8oXafLiu63tdl0oURRFEREQKppa7ACIioo4wrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixQuQuwCi3mbTv8/gfE09ACAhPASP35Yic0VEvo9hRdTNztfUo6jSIncZRL0KpwGJiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBSPYUXUgzQqldwlEPUKbF0n6kGxYTq3fVcjEsNRaWrkPiyiLmJYEfUw131X8X2CUXq5gfuwiLqI04BERKR4HFkRXaeW03xE1P0YVkTXqeU0HxF1P04DEhGR4jGsiIhI8RhWRESkeAwrIiJSPIYVEREpHsOKiIgUj2FFRESKx7AiIiLFY1gRyYinshN1Dk+wIJJRy1PZeQo7kWcMKyKZuR7XRESecRqQiIgUj2FFRESKx7AiIiLF45oVURe4NkMAvL+KyFsYVkRd0LIZgvdXEXkHpwGJiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBSPYUWkIDzYlsgztq4TKUjLg20BHm5LBDCsiNrlGhze2gDMg22JWmNYEbXDNTi4AZhIPlyzIiIixWNYERGR4jGsiIhI8RhWRESkeAwrIoXj3isidgMSKV7LvVcjEsNRaWps8+/cl0W9EcOKyAe0bKEvvdzQ5t+JeiOGFZELOTYBE1HHGFZELrgJmEiZGFbk1ziSIvINDCvyKy3DqTeOpDx1D7p+32zAIF/EsCKf1Zk3YH8Ip5Y8dQ/ycFzydQwrUqyOwsj1DbitvUj+EE6etPd9e2vflj9ddcKRa8/zq7D6x9FSVJoaAQBRoUH4SUa84mpw/XhqtAGXG+zS3zvzOXJ9X92h5ffekYTwEOnPGQl93D7f+TVcnxNj1EHt8kZ9vX/via/pjdfw9LPqjn9XXf3v1xly/NvuzM+GvE8liqIodxFdMW3aNNTU1MhdBhFRtwsPD8fLL78sdxmK5HNhRURE/odnAxIRkeIxrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixWNYERGR4vlcWE2bNk3uEoiIZOdv74U+F1Y8F5CIyP/eC30urIiIyP8wrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixWNYERGR4gXIXQARkRIJgohzVRZU1FkRY9RhQKQearVK7rL8FsOKiKgFQRCxp7Ac83cUwGoXoNOqsW5SJsanxzKwZMJpQCKiFs5VWaSgAgCrXcD8HQU4V2WRuTL/xbAiImqhos4qBZWT1S7goskqU0WtmaxNcpfgVQwrIqIWYow66LTub486rRrRoTqZKmqtSRA6flIv4vWwqqqqwq233oqzZ8+iuLgYP//5z3H//fcjPz8fgp/98IlImQZE6rFuUqYUWM41qwGRepkr819ebbCw2+1YunQpdLrm305Wr16NuXPnYuTIkVi6dCn27t2LcePGebMkIqJW1GoVxqfHYvDsXFw0WREdym5AuXl1ZLVmzRpMmTIF0dHRAIDCwkJkZ2cDAEaPHo0DBw54sxwiojap1SokRxkwKrkvkqMMiguqALV/reJ47bt9++23ERERgdzcXOkxURShUjX/A9Dr9TCZTN4qh4jIp4Xq/Gvnkde+2127dkGlUuHgwYM4fvw4Fi1ahOrqaunjFosFRqPRW+UQEZEP8VpYvf7669Kfp06dimXLluG5557DF198gZEjR2Lfvn0YNWqUt8ohIiIfIuuk56JFi7BhwwZMnjwZdrsdd955p5zlEBGRQsky6blt2zbpz6+99pocJRARkQ/xr3YSIiLySQwrIiJSPIYVEREpHsOKiIgUj2FFRESKx7AiIiLFY1gREZHiMayIiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBSPYUVERIrHsCIiIsVjWBERkeIxrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixWNYERGR4jGsiIhI8RhWRESkeAwrIiJSPIYVEREpHsOKiIgUj2FFRESKx7AiIiLFY1gREZHiMayIiEjxGFZERKR4AXIXQOTPBEHEuSoLKuqsiDHqMCBSD7VaJXdZ5ANM1ia5S/AqhhWRTARBxJ7CcszfUQCrXYBOq8a6SZkYnx7LwKIONQmC3CV4FacBiWRyrsoiBRUAWO0C5u8owLkqi8yVESkPw4pIJhV1VimonKx2ARdNVpkqIlIur00D2u12PP300ygtLYXNZsPjjz+OlJQUPPXUU1CpVEhNTUV+fj7UauYn+YcYow46rdotsHRaNaJDdTJWRb4iwM/eK7323f7jH/9Anz598MYbb+Cll17Cs88+i9WrV2Pu3Ll44403IIoi9u7d661yiGQ3IFKPdZMyodM2/2/oXLMaEKmXuTLyBaE6/2o58Np3O378eNx5553S3zUaDQoLC5GdnQ0AGD16ND7//HOMGzfOWyURyUqtVmF8eiwGz87FRZMV0aHsBiRqi9dGVnq9HgaDAWazGbNnz8bcuXMhiiJUKpX0cZPJ5K1yiBRBrVYhOcqAUcl9kRxlYFARtcGrk55lZWV48MEHcffdd+Ouu+5yW5+yWCwwGo3eLIeoWwiCiKJKMw6evYSiSjMEQZS7JK/jz4B6mtemAS9duoSHH34YS5cuxc033wwAGDJkCL744guMHDkS+/btw6hRo7xVDlG34F4p/gzIO7w2snrxxRdRV1eHP/3pT5g6dSqmTp2KuXPnYsOGDZg8eTLsdrvbmhaRL+BeKf4M5FJltmHZP47h1c+/k7sUr/DayGrx4sVYvHhxq8dfe+01b5VA1O3a2yuVHGWQqSrvautnUFFnlT7Oo6S6n80h4ES5/6zz+1fvI1E3416ptn8GdoeICes/49QgdQv/2lVG1M087ZVa89PhSAwPkbky72nrZ7Bk97ecGqRuw5EV0XVQq1W4Iy0GW6Zm4XBxNRwCsO6jk9Bq1H4zivC0X6zK0ojiqga35/nb9GhPC9SoMTg2FEl+somcYUV0nUpq6jFj22G3abD5OwoweHau37wxO/eLuX6//j492tMCNCokRoQgxhgkdylewbAi6gJP90+xyaI159Tgmj3HMXF4PDRq4IdJEX41PdrT6m0OfPjfCgyODcWEYf3kLqfHMayIOqmt/URD4kLdRhFxYTrkZfVHvc2BokqzV7rglHaJo3N61O4QsGjXN2yyoOvGBguiTmprP5FDgNRgEBemw4M3J2HLviI8/JfDmLD+M+wpLO/REx2cITph/Wf4+UtfeOU1O6Okpl4KKoBNFt3N39asGFZEndTWdF+l2Yrx6bF4f3Yu/jA5Ey/sPe3VN2ilbsrlfV09K0CjQvYPIvDLH/1A7lK8gmFF1EnO/USunE0DzgYDQRS9/gat1FBo7+dF16/e5sCX31XLXYbXMKyIOqkz90/J8QZ9Pa/ZkwfQ8r4u6k5ssCDqpM7cP+V8g27ZhNGTb9DX+po9fQAt7+ui7sSwIuoCT/uJWn7c22/Q1/qaba11def+sI5+XnTtAjVqv2muABhWRD1G9GIz3rWEAveH+bYAjcpvNgQDDCuibuVLdzvxEF7f5myw8IcNwQAbLIi6lVLbyD1hAwT5Eo6siLqRL02tsQGCfAnDiqgb+drUGhsgfJe/NVhwGpCoG3FqjbyFDRZEdM06O7XmzYNnlXbILXUPf2uwYFgRdVFHb/4dTa15s2PQl7oTidrDaUCiLmh5wvkv//Il9p+51KXjirzZMehL3YlE7eHIiqgLXN/848J0mJyVKN0S3NlRS3d3DLY30mvrtU5VmACAU4I+zN8aLBhWRF3g+uZ/34j+WP9x6+tAOjquqK2OwVijDkWV5i6tLXU0zdfWa31bWoe52ws4JejDAjQqiKKIFe8VounKiD4pUt9rrwzhNCBRF7iecK5S4Zqu5vDUMbjx/hvx3zJTly9QbDnSm5aTjBPldfi2tBaCIHp8rdljUvH2fy74zJRgT54M78uc19of+74OJ8pNOFFuQrHC/1teD46siLrA9YRzANe0p8pTx6AoAj/e8FmXRmmCIKLS1IjpuckI1qphCAzA6j0nYLUL2LKvCOsmZeKOtBgMiQvF1l9m43K9HacvmrDtUDHKaq3S6yhxw7ITG0TIiWFF1AWuQVNtaURqtEG6ur0re6padgwePHup3XWslutSieEh+PB4hdub+JyxqQgPCURZbfNU5Zo9x2F3CG71zRmb6vYaSt6wDHjnZHjyDQwroi5yDZoRgohh8WHXfFyRM4TUKlWbozRPo4stU7NavYm/sPc0puUk44+fnAEATBweLwWV63NmjE7G+r1nfGLDsi8dX+VtgRo1BseGuj3WmxsuGFZE1+F6jityDaHwkEDMGZuKF/aebjVK8zS6OFxc7fFNXOWSkxq15zW1GxP64K0ZI33iLEBfO77KmwI0KiRGhAAAYsN0vX5zMMOKSCauIVRWa8VfDxZjxuhk3JjQB0mReilIPI0uBNHzepkzd3RaNX6YFOHxOUmRep8Zlchx87KvcDZYAMDg2FCGFRFdO0EQUVJtQUVdIyy2JiRF6JEUEYKSmnqcqjC5BUlZrRXr957BWzNGuoWJp9HFu0dLseanw1utlw2JC8UtAyMRHdq8ruXrb/Q8GZ6cGFZEPUQQRHx8sgKnK8xu03sr7hmKDR+fxl0Z8Z2a4vI0ulg0Pg13pMV4XC8b0Pdq0PWGN3qeDO+Z65pVb16rcmJYEfWQc1UWfHOhFlv2FbmtNy3++zFMy0nGriMXMHtMKtZ/fBrhIYHIy+qPQdGhEMXmoHOGSnujC9c38aYmAd+WXkZZrRVxYcFIjzMiIEAtjaQq6prb1X0xsKg11zWrlpuDnXrTJmGGFVE3cG0tjwvTwSEApy+aEB8W3GYjRFmtFdsOFWPO2FREhQZh8d+PtbmXqKPRRVOTgL8fLXX7GivuGYqfDOuHf528yH1KvZDrmpU/4AkWRNdBEEScvWjG+9+WYXdBKVa/fxwfHCvHjzd8hsde+w++r22QTo9w0mnVEK/8AlxWa0WD3SGFDHBth80WltW2+hqL/34M33xfy4NsqVfgyIroGnna/7Rk4hBs2XdWCocdhy+0aknPvysdu46U4Fe3pyBMp0FChN7j6KuizgqT1d5qWs8T50bgll+j3KWTMC5Mh/tG9IdKBVSaGzkdSD6FYUV0jTztf3r2vf+6bcx1tqS/9GAWmhwCIvWB2PjJKdw7IgFb9p3F5KxEnCivQ1JkMCYOj5f2Sb17tBRWuwO//MtXbtN692TEewysuLDgNg/H1WnVCA8JxNRRSdLBu3/+rIjTgT7O06bglnpT4wXDihTD1260bet0BU2LLKmptyG+T7B0bNIDo5IxY9thTMtJxvqPT2NQtAGPjU7B8vcKpWB69u6h+OMnp1tN66VGG5CREN6qlvQ4I1bcM7TVmtXwfmFYNykTJ8rrrumEeFIu1waLtjgbL+LDQ3y+0YJhRYrgiweWhgQGeBzNDO/fR3rc094mhyBITRZWu4DcQdFSUAHNjy3Z3dwxeLi4Vvo8q11Aea0VGQmtawkIUOOejHikRhtQXmtFbJgO6XFhCAhQY3x6bKs6nV+Pxxb5rq40WLTsEvRFsoeVIAhYtmwZTp48icDAQKxYsQJJSUlyl0Ve5osHltocDqn13BlMs8ekIjRIjQ9m5+Ki6cpG4PAQlFRbUGluRJXZBhFwu7KjratGgls0ZiRFBiPSEISDZy95HHkGBKiRkRDeKszUahUGROp5bBH5NNnD6l//+hdsNhu2b9+OgoIC/O53v8OmTZvkLou8zBcPLI3UB2H74RJMy0mGSgWIIrD9cAkmDMvGf8tMrU5EB4AGuwO7C0qlRozZY1LR2OTwGCRD+hmRFBmM4qoGJEUG41e3p+KBl7/ocOTpaTqVxxaRr5M9rI4cOYLc3FwAQGZmJo4dOyZzRSQHXzywdECkHovGp7kFwKp7h6Guwe7xRPTnfpaBkxUmFFc1YPuXJVg0Pg0Xqi0Y2C8MSycOwTPv/ddthLb83UKsy8vEJXMjIg1BUlA5v6ankWfLw3GdG43T4oy4Iy0G7/v4aRZ0VWcaLJx6Q6OF7GFlNpthMFz9n02j0aCpqQkBAbKXRl7ki7/5O0+WuGFWLo6X1+FUhQnP/fMk8rL6exwl1tuaoFFdnQIMClChf4QeIVoN+kSFYMEdgxAapEVIUABKL9fD1iSizmrHnUPjOrzvysk5ndqy+891JKbUkSp1TWcaLJx6wwkXsieCwWCAxXJ1g6IgCAwqP6SkA0u70pWoVqugUgELdh6VwqStE9ErTY2ICAnEMz8ZAkFU4VdvfO3WuReoUSP/3asdgUsmDkFYsBZNTUKnR57O6dT7RvRn918vxxMsvGzEiBHYt28fAKCgoACDBg2SuSKSi/NIoVHJfZEcZZAtqPYUlmPC+s/w85e+wIT1n2FPYTmEdrqpWq63Oc/8c22imDM2Fa9/UYJXDnyHAZEGLHv3avdfeEggSqrrEaTVYHpuMuLCdNKerX+fqsTfj5aiX6gOK+4Z6vY1V9wzFInhV3+zFgQRIYEazB6bgsRwz8c8FVdZcPDsJRRVmtv9noiURvYhzLhx4/D5559jypQpEEURq1atkrsk8mNtdSXeMCsXA6M9j0hajnrKaq34+EQ5Xps2EpfMjYgz6tBHr8WNiX0QHapzuxokLkzXarpu9phUbDtUjLJaK+L7BON8dT1OXDRhw8en3Zo5Nnx8GiMSw6X9W66t/3PGpngciX19/rLbLcFK3hpA5Er2sFKr1XjmmWfkLoMIQNtdicfL6/CDvp6nA1uutyVFBmNKdpJb597G+2/EDyINqKizIsoQJAWJp+m69VdC6eX9RQgJbP5ftKSmHj/PTsTWA80hBjQHXaWpERV1VoQEBmDNnuPtHvM0Z2wq/nqwWHodTgv6tq40WLTFlxovZA8rIqUQBBFNDtHjiORUhQmJ4cEYGt+nVWC1XG8L1mowecsht2m+0xVmzLyyRpWVFIZn7h6KpbuPtbnHSqMG5oxNxar3j6Om3oY5Y1OhUQEP3TIAgihCpQKMOi3+36tfehyROY952vrLbIgQoYIKc7cXSEHnfB0lbw2g9nWlwaItbTVeuFJKEwbDiuiKc1UWLN79LZZMHIJnW7SRbztUjMSIEFw0N2LMDTFtTp2JIlBlsbkF0H0j+ksjHAAovdwIe5MDz0/KhD5I4zEcB0WHYuX7x6Vwcba+L/zbUcwYnQyHAPzhX55HZM5zCWvqbYgKDUJylAFFlWbU1NvcalX61gBqn781WDCsiK6oqLOiuKoBJqsdM0YnQxCbw2fboWLU1NtQUl0PXUAovi2tRXqcESU19dL9Va6bgF3Xi+LCdFKzQ1yYDg/enARjsFYKw6TIYORPTHc7F3DlPcPcggq40vre2ASrXYAgot0RGYBWrf++uDWAyBXDiugKZ6PE1gPFmDkmxePo6qc39ceCvx2VrqYvrmrA7LEpbrcBf3LiIp6flImzlWYMjDLgfLUFWUlhmJYzEA5BxIK/XW1zL65qwIv7zuB/f5aBMxdNGBJnRJQh0OMoqNLcKN2FpVJ5bo8fOzgatwyMbNX6r6StAdQ9umPNqjOUsq7FsCK6YkCkHmt+OhyLdn3jcXQVGKDCoJhQTM9Nxvnqejw9YQiW/aMQgnh1lBMXpsP4oXGY53aqxVDcP3IA5u0owBO3pUijLOfdUgAgis2f/90lC0prGpB/VzqWX2lvT4oMxlPj03C20oznJ2di0ydnUGm2tTqXcN2kTAzzsKbm1NFtw+RbumPNqjM6s67Vnu5a82JYkU/qietE1GoV+vXRYVpOMgAg1qiTjkBKigzGY6NT8OSVUZFz0+5jo5NhtjVJ91ENjg3FQpeRk9Uu4FxVPbbsK0J4SCAyEsKQFBmMyVmJbkGzZOIQ/PvERcwYPRAajQoatQrPT8pElaURIYEBbuHn7OrbfrgELz5wE4IC1D5xpQp1L65ZESlcT14nEqkPwsv7i6TRz8zbU5AUqYcKcJu+c27aXfLjNNyUGI7oUB02fnIaQ+IGY3puc9jtOnIBZbVWCCKk44+W7j7mdp6g69d6flKmFErO0VR8eAgef+1Iq3MGf/+zDBiDtbglORIBAWo0NQn4tvRyp24VJvJF/NdMPqetjbvnqiwdfGbHnI0IOq0aZbVWbPzkDIID1dLruLLaBRh0WhwoqsLGT07j/uwk/HrnUWz8+Az+/FkRZo5JwfB4IzQqIC+reT9VcVUDzlw0t7mXyxmSk7MSMW9HAY4U13h8rj5Ig5yUvggIUMNmc+CdglJM3nIIj732H0zechB/P1qKpib3zyPyZRxZkc/pyetEXBsRqi2N0GrUaGwSEBbsuaHhbKUZgghMHB6P5/91yuNoydRgg1pz9XNtDsHj13Jc+WvLjcKenhuhD8SRkmoEqtWoa2zCkt3H3F67vVuFqXfwVoPF9equBg2GFfmcnr5OxHlZ4YlyE9bsOY7JWYnYfrikVUPDkolDsPHjM/h/tyQhPCSwzdFSWqwRQVoVZo9NgSACwVo1fjN+MFbvOXG1Xf3eYVi/9xQAIFSnkY5VMgRpMO9/BklB6Hzuf4prEBsWjN/tOY67MuI9vnZbtwpT7+CtBovr1V6DRleaLxhW5HO8sWfIOdU4LSdZCqhth4oxLScZGjWQFmuEWq3CL0YmYkCkHucuWdocLZ2vtkCESmpv12nVmD9uEDbefyNOlJkwOM6IHV+dw4zRA7Fl31mE6rTShl+dVo3fTkjDHyZlwiGK6BMSiK0HziI1pg9qGux4anwaKs1Wj68dG8YNv72ZvzVYcM2KfI5zqu792bl4a8ZIvD87t9sPZHVONbpuvi2rteKPn5zBzsMXUF1vw5y3vsbvPzyFudsLEKzVIH/iELdT0WePScV735Qitk9IqynCdR+dQlOTiOQoA94/WoqJGQkI1QXgd/cNl/Z3OZ+78v3j+G95HX71xteYtvUr3H5DHN77phTr957BvB0FCFA3j9RcX/vZu4ciPS6s234eRHLjyIp8Uk/vGXJONQKt14zysvq3CpTVe05g+cQ0bLx/BL65cBkOofmK+8lZifi+psHjNJ2psQnvf/M97kiPk1riZ49N8fhcQYS0N6usrgEL7hiMVVdOuXjmvf9i5u0pmDE6GanRoehrCER8uA5fFVezpZ16DYYVkQcDIvVYde8wvLD3VKu1quS+Bs9HHWk0ePa9QkwcHg+NGlh17zA8/9FJTMsZ6HGaLlirwb03JUhBBbR9caM+UNPuVSI2h4ABkXoM7WfE2Utm3PH8Z93e1k/K4isNFu3pSvMFw4r8Qlc3EavVKoxI7IOFdwzG95fr8cLkTNgcAlRQITBA3SpQkiKDodNqcFdGPABg5+EL2F1QiiUT0/Hse4UemzNCgtSwNAoeL250fe6csalwCCI2fnKmzatExg6OxrD4PjhXZZFOd3c+j1eB9E6+0mDRnhhjUKefy7CiXu9aNxEnRjR3BOq0GtQ22OAQVVj+biHCQwLd7opKigzGr25LkTYNu456auvtKK5qkJoznBcnWqx2NIYEtjrjr6zWiu2HS7B56k346lwNdAFqhGg1qKq3eR7NqYHV9w6DzSFIYdxTbf2kLL2hwWJwbCgmDOvXqecyrKjXa2sTcUejDbVahUhDILQaFWxNonS6hPOuqBmjkxEfFozgoAC3qTznqGfO2FREhwZJG4ydV3c0X8Y4As++V4gwnbbVqetP3JaCDXtP4XBxLYDmtarf/jjN4/RgVlIEluz+FsVVDdBp1XhpalaPtvUTyYXdgNTrtTfacBIEEUWVZhw8ewlFlWYIV/aEBGo0aLBdPV3CqazWivV7z0ClAhquXN3R8uvHhQVj26EiLHHpEkyKDMaffjECdQ123JURj0qzDR8WlmHz1JvwwpRM/PXhbAzoG4z7Rw6QPqem3oYqUyPm/c8gt6aPJROHSEHlfM3Fu7/Fmp8Od3serwKh3oAjK+r1OtpE3NY04R1pMWhsckBE240PxdUNCFB7/ljRJTNSY/pIJ7inRhtgaXTgidf/I73OgjtugFatwqPbjrhNIX58ohx/un8EzI1NOFNpxov7igBA2ueVEh2K8sv1UlA5FVc1IL6PDu93w1UgPXFYMHUfNlgQ9TJtbSJODA9BUaUZ56osOFleh/CQQJTVNo/C1uw5DrtDwKJd32B6bjLePVra5gkWANq8XTgvqz/S44yobWiCVqPGwne/cZsu/P2HJzFjdLLHxokn3vgP/vpwNsrrrKi5smb18v4i/O9Ph6NfWBCSIkKg055uFZIR+qDrbuvvycOCqXv0hgYLURTx1wPn8OAtAzp8LsOKej1PFw8mhofgw+MVbm/Grq3gE4fHY9Gu5mDZdeQCpo5KwvbDJdLIZnCsES/++4x0m29btwvfnByJmno7oALsDsHjdGHLU2hcNyNX1Flxy8BIZPTPRr2tCYkRevygb/MIRxDEHjvJ41rX+ch7ekODBYBOjw4ZVuQXWm4iLqo0t3ozdo5o/vjJGWjU7idXbDtUjF+MTERSpB6RBi2Onq/F6BuiMfqGaOw6cgFvflmCx25NkS5MdJ4i8dTb30jND5t+McLjdGHLgYrzNmCdVg19UACsdgdGJke6Pcc5RRceosX2GTfD7nAgQh/UbVN17Cq8Npw67TkMK/JLbb0Zq1TNTRA/TIpoFSwatQq///AE7s9OktrWnfugkiJCUNtgk0ZXg2NC8dyHJ9yaH05XmNxa3p1nBEYaAqXXco7wnAfnLn+3EOun3OhWZ1tTdCMSI6F8tMMAABwnSURBVLrtjbGnDwvujTh12rMYVuSX2nozTo8zIi5Mh8W7v3Vbo8rL6o8X9jaPvFqe8/fC3tN4YXImlr93XHp85piUVs0Prx4oxoI7B0mBplYBkYZAbP38HKblJCMlSg+NRo3vL9dj4vB4aUqy3uZw+zrOKbrwkEDcN6I/VCrgZHkdhsSFYkDf7hn1eOOw4N7G21OnvaHBAuh8kwXDivySpzfjOWNTca7KIo18XE9ZT40ObXWwrZPVLuByg73V4y3D0NmCnpUUgYLzl9EkCKgyNeKb0jqcumjG5gduwqMutwI7v0ZIoAYHz16SppUq6qzSzcOuDR9JkXokRnTPtJOndT5OabXP21OnvthgERum6/Qm4JYYVuSXWr4ZRxl00AYApTVWPHP3UMQag3CuyoKLJht2Hr6Apyekue2Vmjg8Hqor79vvHi1FXJj7SG3XkQuYP24Q1n10yi0MI0OD8Pt/nsCpi2Ys/nEaUqINePGBEYgLC8YNUQasuGcoFv/9mPQ5z9w9FLPf+lpa91o3KRND4kKlm4ddf4t/+p1vkZnQp9veGHv6sODexttTp77YYNGVEytaYliR3/C0+O18M25qEvD3o6VuQZF/VzoOna3EgzcnocbSiDljU7H3eDkeG53iduJE/sR0vHboOzxz91AsvXJjb029DboANf50/whUWWzQaTWosTQiNEiLR0YnI8oQhPM1DXjo1a+kr7Pmp8Px1pdXj2UaFBOKtS3WvebvKMD/zcrFoCsjPVdsgJCPIIhQq5oPL376nW85ddoDGFbkFzpa/C78vlYKKqD5jX/5u4VYl5eB+TuPYs7YVESEaPHknWl48NUv3Z/3XuGVLsLTWD/5RjQ6BGg1KgQHarB09zFpVLTy3mG4WNcAjUaD0xerpMsYnV9n0a5vpG5EwPO6l9UuoNJsRVqckQ0QCuH6bys8JBAzRidjUEwo0mKN0jaDnuCLa1bXc8U9w4r8QkeL36W1nu+csjtEWO0CEiNCUGVuxP6zl9rsIiyuasA339e67ctyXheS2b8PArUqnChrxO/fP4Hpuckev46mxQFobQVSWw0QahXc1re4xtTzXP9tOY/h0mnVeH92bo/+/Ltrzep61pG8iWFFfqGjxe++hiCPwdBHr0VSZDCMOi1OVpiQGh3q8XnOfVGieHVfVl5WfwyONUIQRPzhX6dw6qIZv7tvuPS5bR1Mq9OqER4SCH2gBmt+OhxFlWbsOHwBNfU2aVrJ05rbd1VmjH+B91h5m1x70rprzep61pG8iWFFfqGjxe8YYxDy70p329Sbf1c6KusaMGtMKh7ZdhhWe/N1IMt/ko78f1x9nnNf1Op7h6G8zoqZY1KgUQEDow1Ys+e421Ted5fM0GnVHu+tWjcpE7ckR2LPnFz8p+Sy29rHqnuHYURiH7duP9cGiKJKM++xkgn3pHkHw4r8Qkf7hhLC9ejXx4Lf/ywDFlsT9IEBCNKqERMahElbDklvRMVVDfjTv8/g+UmZaBKaL2N0iAJ+kZ2IKovNbcPvynuHwdbkfpbSjsMX8OzdQ7Fk9zFsO9R8zcjAKAPS44xIjjI0H6MkQgoq4Gqnn6dpJWfTyKkKE6bnJmPXkQvSEVBsuPAO7knzDoYV+YWO9g2p1SrkpkRfaVe/+vEvvqtqNcVTXNWAwrI6vHu0FL++YzDKaq1IiQ7FzDf+4xYwv33nW8wYnYz1e89In1tTb0NabCi2zxiF8lorYsN0SI8LQ0DA1cWqzk4reWoacT3fkL/de4dce9K6q8HiepoevIlhRYrV0Tlr13JVfXv7hjx9vK0pHlFsDq0zF01Yv/cM5o8b5DFgEsND3I5SeubuoUjpa4BOF4CMBM91dnZayVPTiOtV9/zt3nvk2JPW2QYLX2mg6AjDihSpo1Zzb53Dlhge0mqjbv7EdLz5ZTF0WjWG9+8DnVYNm0PwGDDldVZMy0nGoBgDVCoV/nqgCEEBatw1vF+bdXZ2WqmtEdjweCPen53LbsBerrMNFr7SQNERhhUpUket5t46h62kph4broxWVKrm6z9e3HcGd2fG44GbB+CGWD22PzIKlZZGaS2q5ZRcTb3Nbf/Use+/wbD4sHZHeJ2ZVmprBJYaE8p1Kup1GFakSB2t21xru3DLqcPE8BCU1NS3OZVYUWdFcVWDFDROw+LDcGtKFP518qIUmkmRwXh+UiYa7A6cq7JIQeUMra7U2ZlpJS7skz/xWliZTCYsXLgQZrMZdrsdTz31FG688UYUFBRg5cqV0Gg0yMnJwcyZM71VEilYx63mXW8X9jR1+OzdQ1FjacSrB4qlfUyuU4ltvc7AKAMu1Da4je6Kqxowb0cBZt6eAocA5GX1x83JkXjq7W+kDr3O1NlZPGzWv3W2wcJXGig64rWwevXVVzFq1Cg89NBDKCoqwq9//Wu88847yM/Px4YNG5CQkIAZM2agsLAQ6enp3iqLFKqjUcO1jCo8TR0u2X0MM0YnY+qoJGw7VNxqKrG91/HUKWi1C7A2NV8/P2dsKkQImD/uBunW4e4e/fCwWf/lbLDoLQ0UHfFaWD300EMIDAwEADgcDgQFBcFsNsNmsyExMREAkJOTg4MHDzKsqFOt5l0dVbQ1dSiIcLsl2HWKrr3XaWvUdXNyBHJT+iLGGITEiOZQGhYfxtEPdStng0VvaaDoSLthVV1djS1btiAoKAgPPfQQwsPDAQAbN25sd7pu586d2Lp1q9tjq1atwvDhw1FZWYmFCxfi6aefhtlshsFw9TdCvV6P8+fPX8/3Q73ItbSat6e9NnTn+X6epujaep22Rl03J/dtFUYc/RBdn3bD6sknn8S4cePQ1NSEBx54AFu2bEF8fDy+/PLLdr9oXl4e8vLyWj1+8uRJzJ8/H08++SSys7NhNpthsVikj1ssFhiNxmv8Voja5ylclkwcgu1flkCnVUOtQpem6LhmROQ97YaVzWbD5MmTAQBpaWl44oknsG3bNoii2N6neXTmzBnMmTMHf/jDHzB48GAAgMFggFarRUlJCRISErB//342WFCPUatVuCMtBlumZuFwcTUcArBl31lM+WEiHo0IQXo/Y5dv2uWaEcnF2WDRWxooOtJuWDkcDpw8eRI33HADRowYgUcffRSPP/446uvru/xCa9euhc1mw8qVKwE0B9WmTZuwfPlyLFiwAA6HAzk5OcjIyLi274SoE0pq6jHjyqG0Ti/sPY3/m5WLAX3lCZyunsRBBFxtsIgxBsldile0G1aLFy/GihUr8Pzzz6Nv376YMGECmpqapMDpik2bNnl8PDMzEzt27Ojy1yPqLNcwaLA7PDZZVJqtGBjt/bDy1kkc1PuwwcJFWloatm3bhnfeeQdbtmyBzWaDKIoIDg72Vn1E16XlLa6//XGaoq5z8NZJHES+rlOt63/+85/x4osvIi4urqfrIepWzjAIDwnE1FFJeO6fJzzeIyXXqQ9yXdxH5Gs6FVYJCQlISkrq6VqIuqQzaz3OMLhvRH8poLYdKsa0nGRo1MDYwdEYFt8HAFBUafb6uhEv7qNrxQYLD3Q6HaZPn460tDSoVM3/A8+fP79HCyNqT2fXepxhoFJBCoSyWqt01t8tAyMBQLZ1I57vR9fK2WAhiiL+euAcHrxlgNwl9ahOhdWtt97a03UQdUln13qcYXCyvM7jCEYFFb4tvSzbuhH3atG1cr0ipDsuYVS6ToXVvffe29N1EHVJZ9d6nGEwJK55usR5XbxOq8acsamYu70AeVn9ZV034l4too7xihDySe2t9biuZYUEBsDmcCBSH4SfDO+HzIQ+KK6y4Ovzl/HXg83XvwsiuG5EPsf11HV/WLdiWJFPamutJzE8pNX60+wxqdh+uASLxqdhfHosKuqsWL/36v1Uu45cUFSHIFFnBGhUyP5BhF/ssQIYVuSj2lrr8bSW5TxR3bkO1XJUVlZrxfbDJdg+YxQa7A6uG5FPqLc58OV31QwrIqXztNbT1lqWsxvwosmK7AGRrUZli8anYVh8HwYUkUIxrKhXae8aEOc6FDvwiHyPWu4CiLqTcy1Lp23+p+1cs3rvm1K3dSjnqGxUcl8kRxkUGVSCIKKo0oyDZy+hqNIMQej6bQfUewVq1H7RWOHEkRX1Kq6jpuZuQA3sDgHjh8b61OiJB9xSRwI0Kr/ZEAwwrKgXkmvfUnde9cEDbqkjrqeu+wOGFSmeL9z31N0jIR5wS+SOa1akaM4QmLD+M/z8pS8wYf1n2FNYrrj1m7ZGQueqLNf09ZyNIq64UZn8GcOKFK27Q6CntDcSuhaeGkW4UZlc8dR1IgW51ukwb08dtmyZjwvTIS+rP+ptDhRVmrv8+myvp47wBAsiBbmW+57k6KRzPf4pPCQQD96chBf2nr6u1+cBt9QefzvBgtOApGjXMh0mx9ShcyT0/uxc/GFyphRU3np9ot6OIytStGuZDpO7k66m3sZOPqJuxpEVKV5XT5uQo5POtWvx2Pd17OSjHudvJ1gwrKjXkaOTznXq0XnlCDv5qCcFB2rwyx/9QO4yvIbTgNTryNFJ5zr1WFZrxbZDxZiWk4zh8UakxoSyk4+6XWOTQ+4SvIojK+qVvH1Qbcupx7JaK17eX4TUmFDFHpRL5EsYVkTdgJt4iXoWpwGJugE38ZK3Baj9a6zBsCLqJtzES94UqvOvt2//imYiIvJJDCsiIlI8hhURESkew4qIiBSPYUVERIrHsCIiIsVjWBERkeIxrIiISPEYVkREpHgMKyIiUjyvh9XZs2dx0003obGxEQBQUFCAvLw8TJkyBRs3bvR2OURE5AO8GlZmsxlr1qxBYGCg9Fh+fj7Wrl2LN998E0ePHkVhYaE3SyIiIh/gtbASRRFLlizB/PnzERwcDKA5vGw2GxITE6FSqZCTk4ODBw96qyQiIvIRPXJs786dO7F161a3x/r164cJEyZg8ODB0mNmsxkGw9UTqvV6Pc6fP98TJRERkQ/rkbDKy8tDXl6e22Pjxo3Drl27sGvXLlRWVuLhhx/G5s2bYbFYpOdYLBYYjcaeKImIiHyY1y5E+eijj6Q/jxkzBq+88gqCgoKg1WpRUlKChIQE7N+/HzNnzvRWSURE5CNkv71r+fLlWLBgARwOB3JycpCRkSF3SUREpDCyhNXHH38s/TkzMxM7duyQowwiIvIR3BRMRESKx7AiIiLFY1gREZHiMayIiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBSPYUVERIrHsCIiIsVjWBERkeIxrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixWNYERGR4jGsiIhI8RhWRESkeAFyF0DXThBEnKuyoKLOihijDgMi9VCrVXKXRUTU7RhWPkoQROwpLMf8HQWw2gXotGqsm5SJ8emxDCwi6nU4DeijzlVZpKACAKtdwPwdBThXZZG5MiKi7sew8lEVdVYpqJysdgEXTVaZKiIibzJZm+QuwasYVj4qxqiDTuv+n0+nVSM6VCdTRUTkTU2C0PGTehGGlY8aEKnHukmZUmA516wGROplroyIqPuxwcJHqdUqjE+PxeDZubhosiI6lN2ARP4kQO1fYw2GlQ9Tq1VIjjIgOcogdylE5GWhOv96+/avaCYiIp/EsCIiIsVjWBERkeIxrIiISPEYVkREpHgMKyIiUjyGFRERKR7DioiIFM9ru8ocDgdWr16NY8eOwWazYdasWbj99ttRUFCAlStXQqPRICcnBzNnzvRWSYrFe6qIiNx5Lax2796NpqYmvPXWW6ioqMAHH3wAAMjPz8eGDRuQkJCAGTNmoLCwEOnp6d4qS3F4TxURdQZPXe8h+/fvR2xsLGbMmIHFixdjzJgxMJvNsNlsSExMhEqlQk5ODg4ePOitkhSJ91QRUWf426nrPTKy2rlzJ7Zu3er2WHh4OIKCgrB582Z89dVX+M1vfoO1a9fCYLh6rp1er8f58+d7oiSf0d49VTwDkIj8VY+EVV5eHvLy8twemzdvHm677TaoVCpkZ2fj3LlzMBgMsFiujhgsFguMRmNPlOQznPdUuQYW76kiIn/ntWnAm266CZ9++ikA4MSJE4iLi4PBYIBWq0VJSQlEUcT+/fuRlZXlrZIUifdUEVFn8IqQHjJp0iTk5+dj0qRJEEURy5cvBwAsX74cCxYsgMPhQE5ODjIyMrxVkiLxnioi6gx/uyLEa99tYGAgVq9e3erxzMxM7Nixw1tl+ATeU0VE5M6/xpFEROST/Gsc6WO4OZiIqBnDSqG4OZiI6CpOAyoUNwcTUXt4ggUpQnubg4mI/O0EC4aVQjk3B7vi5mAi8lcMK4Xi5mAiag83BZMicHMwEbWHm4JJMbg5mIiomX+NI4mIyCcxrIiISPEYVkREpHgMKyIiUjyGFRERKR67AWXAA2qJiLqGYeVlPKCWiKjrOA3oZTygloio6xhWXsYDaomoO/DUdepRPKCWiLoDT12nHsUDaomIuo4NFl7GA2qJiLqOYSUDHlBLRNfL364I8a/vloiol/C3K0IYVkREpHgMKyIiUjyGFRERKR7DioiIFI9hRUREisewIiIixWNYERGR4jGsiIhI8RhWRESkeAwrIiJSPIYVEREpHsOKiIgUj2FFRESKx7AiIiLFY1gREZHiee1CFJPJhHnz5qGhoQFarRbPPfccoqKiUFBQgJUrV0Kj0SAnJwczZ870VklEROQjvDayevvttzFo0CC8/vrrmDBhAl5++WUAQH5+PtauXYs333wTR48eRWFhobdKIiIiH+G1sBo0aBAsFgsAwGw2IyAgAGazGTabDYmJiVCpVMjJycHBgwe9VRIREfmIHpkG3LlzJ7Zu3er22NKlS/H5559jwoQJqK2txeuvvw6z2QyDwSA9R6/X4/z58z1REhER+bAeCau8vDzk5eW5PTZz5kxMnz4dU6ZMwYkTJzBr1iy8+eab0mgLACwWC4xGY0+UREREPsxr04BGoxGhoaEAgMjISFgsFhgMBmi1WpSUlEAURezfvx9ZWVneKomIiHyE17oB58yZg8WLF+ONN95AU1MTnn32WQDA8uXLsWDBAjgcDuTk5CAjI8NbJRERkY/wWljFxMTgpZdeavV4ZmYmduzY4a0yiIjIB3FTMBGRDzJZm+Quwau8NrKSkyCIOFdlQUWdFTFGHQZE6qFWq+Qui4jomjUJgtwleFWvDytBELGnsBzzdxTAaheg06qxblImxqfHMrCIiHxEr58GPFdlkYIKAKx2AfN3FOBclaWDzyQiUq4Ada9/+3bT67/bijqrFFROVruAiyarTBUREV2/UF2vnxhz0+vDKsaog07r/m3qtGpEh+pkqoiIiLqq14fVgEg91k3KlALLuWY1IFIvc2VERNRZvX4cqVarMD49FoNn5+KiyYroUHYDEhH5ml4fVkBzYCVHGZAcZej4yUREpDi9fhqQiIh8H8OKiIgUj2FFRESKx7AiIiLFY1gREZHiMayIiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBTP545bKi0txX333Sd3GURE3S48PBwvv/xyp5/rT1SiKIpyF0FERNQeTgMSEZHiMayIiEjxGFZERKR4DCsiIlI8hhURESkew4qIiBTP5/ZZ9TSTyYR58+ahoaEBWq0Wzz33HKKiolBQUICVK1dCo9EgJycHM2fOlK1Gh8OB1atX49ixY7DZbJg1axZuv/12RdXodPbsWUyaNAkHDhxAUFCQomo0mUxYuHAhzGYz7HY7nnrqKdx4442KqhEABEHAsmXLcPLkSQQGBmLFihVISkqStSYAsNvtePrpp1FaWgqbzYbHH38cKSkpeOqpp6BSqZCamor8/Hyo1fL/TlxVVYX77rsPr7zyCgICAhRZI3VAJDd/+ctfxDVr1oiiKIrbt28XV69eLYqiKP7kJz8Ri4uLRUEQxOnTp4vHjh2TrcZdu3aJ+fn5oiiKYnl5ufjqq68qrkZRFEWTySQ+8sgj4qhRo0Sr1aq4Gl944QXpZ3f27FnxnnvuUVyNoiiK//znP8VFixaJoiiKX3/9tfjYY4/JWo/T3/72N3HFihWiKIpidXW1eOutt4qPPvqoeOjQIVEURXHJkiXihx9+KGeJoiiKos1mE5944gnxjjvuEM+cOaPIGqlj/HWihUGDBsFisQAAzGYzAgICYDabYbPZkJiYCJVKhZycHBw8eFC2Gvfv34/Y2FjMmDEDixcvxpgxYxRXoyiKWLJkCebPn4/g4GAAUFyNDz30EKZMmQKgebQaFBSkuBoB4MiRI8jNzQUAZGZm4tixY7LW4zR+/HjMmTNH+rtGo0FhYSGys7MBAKNHj8aBAwfkKk+yZs0aTJkyBdHR0QCgyBqpY349Dbhz505s3brV7bGlS5fi888/x4QJE1BbW4vXX38dZrMZBoNBeo5er8f58+dlqzE8PBxBQUHYvHkzvvrqK/zmN7/B2rVrFVVjv379MGHCBAwePFh6TGk/x1WrVmH48OGorKzEwoUL8fTTT8taY1ta1qTRaNDU1ISAAHn/99Xr9QCa65s9ezbmzp2LNWvWQKVSSR83mUxyloi3334bERERyM3NxZYtWwA0/yKlpBqpc/w6rPLy8pCXl+f22MyZMzF9+nRMmTIFJ06cwKxZs/Dmm29Koy0AsFgsMBqNstU4b9483HbbbVCpVMjOzsa5c+dgMBgUVeO4ceOwa9cu7Nq1C5WVlXj44YexefNmRdUIACdPnsT8+fPx5JNPIjs7G2azWbYa29Lyv60gCLIHlVNZWRl+9atf4f7778ddd92F5557TvqYEn52u3btgkqlwsGDB3H8+HEsWrQI1dXV0seVUCN1DqcBWzAajQgNDQUAREZGwmKxwGAwQKvVoqSkBKIoYv/+/cjKypKtxptuugmffvopAODEiROIi4tTXI0fffQRtm3bhm3btiEqKgqvvPKK4mo8c+YM5syZg7Vr1+LWW28FAMXVCAAjRozAvn37AAAFBQUYNGiQrPU4Xbp0CQ8//DAWLlyIn/3sZwCAIUOG4IsvvgAA7Nu3T/af3euvv47XXnsN27ZtQ1paGtasWYPRo0crqkbqHB5k20JFRQUWL16M+vp6NDU1Yfbs2fjRj36EgoICrFq1Cg6HAzk5OZg3b55sNdpsNuTn5+Ps2bMQRRHLli1Denq6omp0NWbMGHzwwQdSN6BSanz88cdx8uRJxMfHA2gOqk2bNimqRuBqN+CpU6cgiiJWrVqFgQMHyloTAKxYsQIffPABkpOTpcd++9vfYsWKFbDb7UhOTsaKFSug0WhkrPKqqVOnYtmyZVCr1ViyZIkia6S2MayIiEjxOA1IRESKx7AiIiLFY1gREZHiMayIiEjxGFZERKR4DCuiTli1ahXefPNNucsg8lsMK6J2VFdXY/r06fj444/lLoXIrynjzBYiBXj77bfx6aefwmq1oqSkBI888gh++MMfYtasWdIJEkQkD46siFyYzWZs3rwZmzZtwpYtW5CQkICMjAy5yyLyewwrIhfOU+Lj4uJgs9lkroaInBhWRC6cV0cQkbIwrIiISPF4kC0RESkeR1ZERKR4DCsiIlI8hhURESkew4qIiBSPYUVERIrHsCIiIsVjWBERkeL9fysckGrG7UMRAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "df = smiles_data.dropna(axis=0, how='any')\n", + "# seaborn jointplot will allow us to compare n1 and n2, and plot each marginal\n", + "sns.jointplot(x='n1', y='n2', data=smiles_data) " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "dqNjNcTNl5_7" + }, + "source": [ + "We see that most of the data is contained in the gaussian-ish blob centered a bit below zero. We see that there are a few clearly active datapoints located in the bottom left, and one on the top right. These are all distinguished from the majority of the data. How do we handle the data in the blob? \n", + "\n", + "Because n1 and n2 represent the same measurement, ideally they would be of the same value. This plot should be tightly aligned to the diagonal, and the pearson correlation coefficient should be 1. We see this is not the case. This helps gives us an idea of the error of our assay.\n", + "\n", + "Let's look at the error more closely, plotting in the distribution of (n1-n2)." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 296 + }, + "colab_type": "code", + "id": "guGcilXIl5_9", + "outputId": "89bcc713-0d04-443d-eda0-19deb9abf560" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0, 0.5, 'probability')" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAEECAYAAAArlo9mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAYq0lEQVR4nO3de1DVdf7H8dc5CGQgXigvK9lK4aiz7SUJdFQ0V8XactTVEFsax0viJUXTQFSwJMgcdVsaU7cxWxDtgu627Tjtiu2iYmdyJ9fCdMu85I1UvACLcIDv74+m86vFg8f1fM8Bvs/HTDPw+Z7O5/3pnF58zud8v5+vzTAMQwCAVs/u7wIAAL5B4AOARRD4AGARBD4AWASBDwAW0cbfBbgTGxur7t27+7sMAGhRzpw5I4fDccNjzTbwu3fvru3bt/u7DABoUcaNG+f2GEs6AGARBD4AWASBDwAWQeADgEUQ+ABgEQQ+AFgEgQ8AFkHgA4BFEPgAYBHN9kpbWM+lyhpdrXY2am/fNlDhocF+qAhoXQh8NBtXq51K3/Fpo/bssQ8Q+IAXsKQDABZB4AOARZi2pLNhwwbt3r1bTqdTiYmJiomJUVpammw2m6KiopSZmSm7nb83AOArpiSuw+HQJ598oq1btyovL0/nz59XTk6OUlJSVFBQIMMwVFRUZEbXAAA3TAn8vXv3qlevXpo9e7aSk5M1dOhQlZaWKiYmRpIUFxenkpISM7oGALhhypLO5cuXdfbsWa1fv16nT5/WzJkzZRiGbDabJCkkJEQVFRVmdA0AcMOUwO/QoYMiIyMVFBSkyMhIBQcH6/z5867jVVVVCgsLM6NrAIAbpizp9OvXT3v27JFhGCorK1N1dbUGDBjgus9icXGxoqOjzegaAOCGKTP8hx9+WB9//LHGjx8vwzCUkZGhiIgILVu2TGvWrFFkZKTi4+PN6BoA4IZpp2U+99xzjdry8/PN6g4AcBOcCA8AFsFeOjANm6EBzQuBD9OwGRrQvLCkAwAWQeADgEUQ+ABgEQQ+AFgEgQ8AFkHgA4BFEPgAYBEEPgBYBIEPABZB4AOARRD4AGARBD4AWASBDwAWQeADgEUQ+ABgEeyHD58LsNv01YXKRu11DYYfqgGsg8CHz1XV1OmF9w83as94rK8fqgGsgyUdALAIAh8ALILABwCLIPABwCIIfACwCNPO0hkzZozatWsnSYqIiFBycrLS0tJks9kUFRWlzMxM2e38vQEAXzEl8GtqaiRJeXl5rrbk5GSlpKQoNjZWGRkZKioq0ogRI8zoHgBwA6ZMsY8cOaLq6mpNmTJFTz31lA4ePKjS0lLFxMRIkuLi4lRSUmJG1wAAN0yZ4d9xxx2aOnWqJkyYoBMnTmj69OkyDEM2m02SFBISooqKCjO6BgC4YUrg9+zZU/fee69sNpt69uypDh06qLS01HW8qqpKYWFhZnQNAHDDlCWdd999Vy+99JIkqaysTJWVlRo4cKAcDockqbi4WNHR0WZ0DQBww5QZ/vjx47V48WIlJibKZrMpOztbHTt21LJly7RmzRpFRkYqPj7ejK4BAG6YEvhBQUFavXp1o/b8/HwzugMAeIAT4QHAIgh8ALAIAh8ALILABwCLIPABwCIIfACwCAIfACyCwAcAizBtP3zAWwLsNn11obJRe/u2gQoPDfZDRUDLROCj2auqqdML7x9u1J499gECH7gFLOkAgEUww8dtu1RZo6vVzkbtdQ2GH6oB4A6Bj9t2tdqp9B2fNmrPeKyvH6oB4A5LOgBgEQQ+AFgEgQ8AFkHgA4BFEPgAYBEEPgBYBIEPABZB4AOARRD4AGARBD4AWASBDwAWQeADgEUQ+ABgEaYF/qVLlzRkyBAdO3ZMJ0+eVGJioiZNmqTMzEw1NDSY1S0AwA1TAt/pdCojI0N33HGHJCknJ0cpKSkqKCiQYRgqKioyo1sAQBNMCfyVK1dq4sSJ6ty5sySptLRUMTExkqS4uDiVlJSY0S0AoAleD/zt27erU6dOGjx4sKvNMAzZbDZJUkhIiCoqKrzdLQDgJrx+x6vCwkLZbDbt379fn3/+uVJTU1VeXu46XlVVpbCwMG93CwC4Ca8H/pYtW1w/JyUlafny5Vq1apUcDodiY2NVXFys/v37e7tbAMBN+OS0zNTUVOXm5iohIUFOp1Px8fG+6BYA8D0ezfA3bdqkMWPGqFOnTrf05Hl5ea6f8/Pzb60yAIBXeRT4bdu21axZs9S5c2f9+te/VlxcnOtLWABAy+DRkk5iYqK2bdumZ555Ru+9954efvhh5ebm6tq1a2bXBwDwEo9m+NeuXdNf/vIX/elPf1K7du20ZMkS1dXVadasWSzVAEAL4VHgjx8/XqNHj9batWvVrVs3V/uRI0dMKwwA4F0eLelMmzZNc+bMcYX9H/7wB0nS/PnzzasMAOBVTc7w33//fe3evVsOh0MOh0OSVF9fry+++EJPPfWUTwoEAHhHk4E/ePBg3X333bpy5YoSEhIkSXa7Xffcc49PigMAeE+TgV9dXa3Y2FjXJmjf+c9//mNqUQAA72sy8Ddt2qT09HRlZGT8oN1ms7nW8QEALUOTgZ+eni7ph1fMAgBapiYDf9CgQW6P7d271+vFAADM02TgE+oA0Ho0Gfjr1q3TrFmztGDBgkZ756xevdrUwgAA3tVk4A8bNkySNHHiRJ8UAwAwT5NX2vbu3VuSFBUVpd27d2vTpk3as2eP+vTp45PiAADe49HWCqmpqerRo4dSUlLUpUsXpaamml0XAMDLPNo8raamRpMmTZL07az/gw8+MLUoAID3NRn4x48flyR17NhRO3fuVHR0tA4dOqSIiAifFAcA8J4mA//7V9gWFBSooKBAkrjbFQC0QE0GvrsrbJ1OpynFAADM49Ea/rZt2/TGG2+orq5OhmEoMDCQdXwAaGE8Okvn7bffVl5enuLi4pSTk6P77rvP7LoAAF7mUeB37NhRnTt3VlVVlWJjY3X16lWz6wIAeJlHgd+uXTvt2rVLNptN27ZtU3l5udl1AQC8zKPAz8rKUvfu3fXss8/qxIkTWr58ucllAQC8zaMvbYOCgnTgwAGdOHFCUVFRio6ObvLx9fX1Wrp0qY4fP66AgADl5OTIMAylpaXJZrMpKipKmZmZsts9+nsDAPACj7dWKCsr04ABA3Ty5EnXjVHc+fDDDyV9e3bP3LlzlZOTo5ycHKWkpKigoECGYaioqOj2qwcAeMyjGf7Fixe1du1aSdLw4cP1m9/8psnHDx8+XEOHDpUknT17VnfddZf+/ve/KyYmRpIUFxenffv2acSIEbdROgDgVjQ5w6+trVVtba0iIiJ06NAhSdKRI0f04x//+KZP3KZNG6WmpmrFihWKj4+XYRiuK3RDQkJUUVFx+9UDADzW5Ax/1KhRstlsMgxDDodDQUFBqq2tVXBwsEdPvnLlSi1cuFBPPPGEampqXO1VVVUKCwu7vcoBALekycDfvXu362fDMFReXq6OHTve9MvWP/7xjyorK9OMGTPUtm1b2Ww2/eQnP5HD4VBsbKyKi4vVv39/74wAAOARj760dTgcGj58uKZOnarhw4dr3759TT5+5MiROnz4sJ588klNnTpV6enpysjIUG5urhISEuR0OhUfH++VAQAAPOPRl7a//e1vVVBQoC5duqisrExz5szRwIED3T7+zjvv1CuvvNKoPT8//3+vFABwWzya4QcEBKhLly6SpC5duni8hg8AaD48muGHhoYqLy9PDz30kD7++GO1b9/e7LoAAF7m0Qx/1apVOnv2rNauXatz584pOzvb7LoAAF7m0Qx/+fLlWr16tdm1AABM5NEMv7a2VkeOHFFNTY3rYiwAQMvi0Qz/xIkTSk5OVnl5ucLDw2W329kLBwBaGI9m+M8884zsdrsiIyMVEBCg559/3uy6AABe5tEMf926dXrnnXcUHh6uixcvKjk5WYMGDTK7NgCAF3k0w+/QoYPCw8MlSXfddZdCQ0NNLQoA4H0en4c/depUPfTQQyotLdX169e1Zs0aSdKCBQtMLRAA4B0eBf4vf/lL18/fXXELAGhZPAr8sWPHml0HWoBLlTW6Wu1s1F7XYPihGinAbtNXFypveKx920CFh7IFCPB9HgU+IElXq51K3/Fpo/aMx/r6oRqpqqZOL7x/+IbHssc+QOAD/4W7iAOARRD4AGARBD4AWASBDwAWQeADgEUQ+ABgEQQ+AFgEgQ8AFkHgA4BFEPgAYBEEPgBYBIEPABbh9c3TnE6n0tPTdebMGdXW1mrmzJm6//77lZaWJpvNpqioKGVmZspu528NAPiS1wP/vffeU4cOHbRq1SpdvnxZY8eOVe/evZWSkqLY2FhlZGSoqKhII0aM8HbXAIAmeH2aPWrUKM2bN8/1e0BAgEpLSxUTEyNJiouLU0lJibe7BQDchNcDPyQkRKGhoaqsrNTcuXOVkpIiwzBks9lcxysqKrzdLfAD390c5b//uVRZ4+/SAL8x5QYo586d0+zZszVp0iQ9/vjjWrVqletYVVWVwsLCzOgWcHF3cxRujAIr8/oM/+LFi5oyZYoWLVqk8ePHS5L69u0rh8MhSSouLlZ0dLS3uwUA3ITXA3/9+vW6du2a1q1bp6SkJCUlJSklJUW5ublKSEiQ0+lUfHy8t7sFANyE15d0li5dqqVLlzZqz8/P93ZXAIBbwMnwAGARBD4AWASBDwAWQeADgEUQ+ABgEQQ+AFgEgQ8AFkHgA4BFEPgAYBEEPgBYBIEPABZB4AOARRD4AGARBD4AWASBDwAWQeADgEUQ+ABgEQQ+AFiE129xiJbjUmWNrlY7G7W3bxuo8NBgP1QEwEwEvoVdrXYqfcenjdqzxz5A4AOtEEs6AGARBD4AWARLOmgkwG7TVxcqG7XXNRh+qMa73I2N7y1gBQQ+GqmqqdML7x9u1J7xWF8/VONd7sbG9xawApZ0AMAiCHwAsAjTAv9f//qXkpKSJEknT55UYmKiJk2apMzMTDU0NJjVLQDADVMC//e//72WLl2qmpoaSVJOTo5SUlJUUFAgwzBUVFRkRrcAgCaYEvg9evRQbm6u6/fS0lLFxMRIkuLi4lRSUmJGtwCAJpgS+PHx8WrT5v9PADIMQzabTZIUEhKiiooKM7oFADTBJ1/a2u3/301VVZXCwsJ80S0A4Ht8Evh9+/aVw+GQJBUXFys6OtoX3QIAvscngZ+amqrc3FwlJCTI6XQqPj7eF90CAL7HtCttIyIi9Pbbb0uSevbsqfz8fLO6AgB4gAuvAMAi2EunFeGGJgCaQuC3ItzQBEBTWNIBAIsg8AHAIgh8ALAIAh8ALILABwCLIPABwCIIfACwCAIfACyCC68sIMBu01cXKhu11zUYfqgGgL8Q+BZQVVOnF94/3Kg947G+fqgGgL+wpAMAFsEMH2gCG9KhNSHwgSawIR1aE5Z0AMAiCHwAsAiWdHyEtWBr4/VHc0Dg+whrwdbG64/mgCUdALAIZvg3wUdxa/DW1cju3i/euqqZ92Pr4uvXk8C/CT6KW4O3rkZ2937x1lXNvB9bF1+/nizpAIBFtNoZ/q1+VLrVj+LulgDuCAzQdWf9bT/PrdbZVB/wPm8tAXnrfQR4wmeB39DQoOXLl+vo0aMKCgpSVlaW7r33XtP6u9WPSrf6UbypJYBbWRpw9zy3WmdTfcD7vLUE5K33EeAJny3p7Nq1S7W1tXrrrbf07LPP6qWXXvJV1wAA+TDw//nPf2rw4MGSpJ///Of67LPPfNU1AECSzTAMnywKLlmyRCNHjtSQIUMkSUOHDtWuXbvUps2NV5ViY2PVvXt3X5QGAK3GmTNn5HA4bnjMZ2v4oaGhqqqqcv3e0NDgNuwluS0YAPC/8dmSzoMPPqji4mJJ0sGDB9WrVy9fdQ0AkA+XdL47S+ff//63DMNQdna27rvvPl90DQCQDwMfAOBfXGkLABZB4AOARRD4AGARzXYvnWPHjumJJ55QSUmJgoODdfDgQb344osKCAjQoEGDNGfOHH+X6BUVFRVatGiRKisr5XQ6lZaWpl/84hetdryS77fZ8Aen06n09HSdOXNGtbW1mjlzpu6//36lpaXJZrMpKipKmZmZsttb15zr0qVLGjdunDZt2qQ2bdq06vFu2LBBu3fvltPpVGJiomJiYpr/eI1mqKKiwpg+fbrRv39/4/r164ZhGMbo0aONkydPGg0NDca0adOMzz77zM9Vescrr7xivPHGG4ZhGMaxY8eMMWPGGIbResdrGIbxwQcfGKmpqYZhGMYnn3xiJCcn+7ki73v33XeNrKwswzAMo7y83BgyZIgxY8YM46OPPjIMwzCWLVtm/PWvf/VniV5XW1trzJo1yxg5cqTx5ZdfturxfvTRR8aMGTOM+vp6o7Ky0vjd737XIsbbzP78SIZhaNmyZVqwYIHatm0rSaqsrFRtba169Oghm82mQYMGaf/+/X6u1DsmT56siRMnSpLq6+sVHBzcqscrWWObjVGjRmnevHmu3wMCAlRaWqqYmBhJUlxcnEpKSvxVnilWrlypiRMnqnPnzpLUqse7d+9e9erVS7Nnz1ZycrKGDh3aIsbr1yWdd955R2+++eYP2n70ox/p0UcfVe/evV1tlZWVCg0Ndf0eEhKir7/+2md1esuNxpudna2f/vSnunDhghYtWqT09PRWM153/nt8AQEBqqura/LK65YmJCRE0rdjnTt3rlJSUrRy5UrZbDbX8YqKCn+W6FXbt29Xp06dNHjwYG3cuFHSt5O31jrey5cv6+zZs1q/fr1Onz6tmTNntojx+vX/sAkTJmjChAk/aBsxYoQKCwtVWFioCxcuaMqUKdqwYcMPtmWoqqpSWFiYr8u9bTcaryQdPXpUCxYs0HPPPaeYmBhVVla2ivG6c6vbbLRU586d0+zZszVp0iQ9/vjjWrVqletYa3tNCwsLZbPZtH//fn3++edKTU1VeXm563hrG2+HDh0UGRmpoKAgRUZGKjg4WOfPn3cdb67jbXZLOn/729+Ul5envLw83X333dq0aZNCQ0MVGBioU6dOyTAM7d27V9HR0f4u1Su+/PJLzZs3T6tXr3ZtLNeaxytZY5uNixcvasqUKVq0aJHGjx8vSerbt69rj6ji4uJW9Zpu2bJF+fn5ysvLU58+fbRy5UrFxcW12vH269dPe/bskWEYKisrU3V1tQYMGNDsx9usr7QdNmyYdu7c6TpLJzs7W/X19Ro0aJDmz5/v7/K8YubMmTp69KhrZ9DQ0FC99tprrXa8kjW22cjKytLOnTsVGRnpaluyZImysrLkdDoVGRmprKwsBQQE+LFKcyQlJWn58uWy2+1atmxZqx3vyy+/LIfDIcMwNH/+fEVERDT78TbrwAcAeE+zW9IBAJiDwAcAiyDwAcAiCHwAsAgCHwAsgsBHi1dTU6Nhw4ZJkl588UWdPXtW165dU0JCgqZMmaJDhw7pV7/6lVavXu3nSr+1ceNGHTp0yN9lwII4LRMtXk1NjR555BHt3r3b1XbgwAG9+eabys3N1bp169SuXTslJSX5sUrA/1rf9eywhKqqKi1cuFDXrl1Tjx49XO1JSUlasmSJVqxYoW+++UbTpk3TV199pcDAQHXt2lXt27fX2rVrFRAQoHvuuUcvvPCC/vznP6uwsFANDQ2aO3eurly5os2bN8tut6tfv35auHChcnNzdfr0aV26dElnz57V4sWLNXjwYH344Yd69dVXJX17Je3zzz+vAwcONOojMDDQVWNaWpoeffRRXbx4Uf/4xz90/fp1nTp1StOnT9e4ceN+MM6RI0fqwQcf1PHjxxUeHq7c3NxmdzEPWg6WdNAi7dixQ7169dKWLVtcu41+JzAwUOnp6erfv79ef/11jR07VpMnT9bw4cO1bNkyvfrqq8rPz1eXLl20Y8cOSVJYWJi2bt2qPn36KDc3V5s3b9bWrVtVVlamffv2SZKCgoL0+uuva8mSJdq8ebPq6uq0YsUKbdy4UYWFherSpYvOnTvnto8bqays1IYNG/Taa6+5Nh37vq+//lrz5s3TW2+9pfLycn366ade/K8Iq2GGjxbpiy++cG2x/LOf/cyjzdfKy8v1zTffKCUlRZJ0/fp1DRw4UD169FDPnj0lSadOnVJ5ebmefvppSd9+kvhup9I+ffpIkrp27ara2lpdvnxZYWFhCg8PlyTNmTNHly5dumEf7ny3K2y3bt1UW1vb6HjHjh3VrVs312NqampuOk7AHQIfLVJkZKQOHjyo4cOH6/Dhw6qrq7vpv9OxY0d17drVtaZfVFSkO++8U+fOnXPdmSgiIkLdunXTpk2bFBgYqO3bt6tPnz7atWuXa+vb74SHh+vatWu6cuWKOnTooKysLI0ePfqGfbjz3895q8eBW0Hgo0V68skntXjxYiUmJioyMvIHa+Tu2O12LVmyRE8//bQMw1BISIhefvllnTt3zvWYTp06afLkyUpKSlJ9fb26d++uRx55xO3zZWZmasaMGbLb7erbt68eeOCBG/YBNAecpQMAFsGXtgBgEQQ+AFgEgQ8AFkHgA4BFEPgAYBEEPgBYBIEPABbxfz98wAoJ3/LnAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "diff_df = df['n1'] - df['n2']\n", + "\n", + "sns.histplot(diff_df)\n", + "plt.xlabel('difference in n')\n", + "plt.ylabel('probability')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "VTbA5r_Zl6AD" + }, + "source": [ + "This looks pretty gaussian, let's get the 95% confidence interval by fitting a gaussian via scipy, and taking 2*the standard deviation" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "PcBDorCcl6AS", + "outputId": "ee99844a-4b00-4056-bc5b-ee4282a5172d" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "17.75387954711914" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from scipy import stats\n", + "mean, std = stats.norm.fit(np.asarray(diff_df, dtype=np.float32))\n", + "ci_95 = std*2\n", + "ci_95" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "N_6SzWXyl6Ak" + }, + "source": [ + "Now, I don't trust the data outside of the confidence interval, and will therefore drop these datapoints from df. \n", + "\n", + "For example, in the plot above, at least one datapoint has n1-n2 > 60. This is disconcerting." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 458 + }, + "colab_type": "code", + "id": "qR8D_BKel6Ay", + "outputId": "c5f59a48-4780-4883-a3fa-b47320071f6c" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAasAAAGoCAYAAAD4hcrDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3de0DUVd4/8PdcGZgBBJSLChgJgqiQmtouWulqrmvbZUPdnrVfF7OtVVBXs0xFy0s+pm1ZW7rbxaw1da1t61ndynYzky5WWJJXTFBDQTSYGRhmmPn+/sD5OgPDTWHmzMz79RcOw8yBkrfnnM/5HIUkSRKIiIgEpvT1AIiIiNrCsCIiIuExrIiISHgMKyIiEh7DioiIhMewIiIi4TGsiIhIeAwrIiISntrXAyAKdOs/LsHJ87XynxOjw/DA9Vf7cERE/odhRdTFTp6vxbFKk6+HQeTXuAxIRETC48yKyMtUCkWbz+HSIZE7hhWRlyV007mFkacg4tIhkTuGFZEPMIyIOoZ7VkREJDyGFZGPtWcPiyjYcRmQyMea7mENSY7y8YiIxMOwIhKA6x5W76hQH4+GSDxcBiQiIuExrIiISHgMKyIiEh7DioiIhMewIiIi4TGsiIhIeAwrIiISHsOKiIiEx7AiIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuGx6zpRJ+N1H0Sdj2FF1Mm64roPXtBIwY5hReQHml7QmBgdhgeuv9rHoyLyHoYV0RXy1rKf64yNKNgwrIiuEG/5Jep6rAYkIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuExrIiISHgMKyIiEh7DisgPsf0SBRseCibyQ2y/RMGGYUXkp9h+iYIJlwGJiEh4DCsiIhIew4ooALDgggId96yIAkDTgguARRcUWBhWRB0k6rX1TQsuPM22WEFI/ophRdRB/nJ/VdPZ1pDkKFYQkt9iWBG1ounSmkgzqfbwl2AlagvDiqgVTWci/IVP5BusBiQiIuFxZkVBzXWZLzkmDPePCp6CA5a7kz9hWFFQa7qnI2qlX1doWoARbGFN/oVhRUGjPcUSwVaQ0FpYAyxvJ3EwrChgtHWGiMUSbWvPWa2uwJCktgRVWP3ftz+i0lgPAOgRHoJfDeop5DhcPw8AqbEG/FRn69DX+PL76wqu31/Tn4fzsbYkRoe5/TkuQgeFyy/jtv58OV/TGa/hy7FmJ3Xrkr83Tf97+jtRfrcEMoUkSZKvB9ER9913Hy5cuODrYRARdbqoqCi89NJLvh6GkPwurIiIKPjwnBUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJz+/C6r777vP1EIiIfC7Yfhf6XVixLyARUfD9LvS7sCIiouDDsCIiIuExrIiISHgMKyIiEh7DioiIhMewIiIi4TGsiIhIeAwrIiISHsOKiIiEp/b1AIiIAoXDIeFElRlnayyIi9ChT4weSqXC18MKCAwrIqJO4HBI2Fl8BnO2FsFic0CnUWLtpGyMz4xnYHUCLgMSEXWCE1VmOagAwGJzYM7WIpyoMvt4ZIGBYUVE1AnO1ljkoHKy2ByoMFq65P2MloYueV1RMayIiDpBXIQOOo37r1SdRonYcF2XvF+Dw9H2kwKI18OqqqoK119/PUpKSlBaWorf/va3uPPOO1FQUABHkP3wiShw9InRY+2kbDmwnHtWfWL0Ph5ZYPBqgYXNZsPixYuh0zX+S2PlypWYNWsWhg8fjsWLF2PXrl0YO3asN4dERNQplEoFxmfGIz1vJCqMFsSGsxqwM3l1ZrVq1SpMmTIFsbGxAIDi4mIMGzYMADBq1Cjs3bvXm8MhIupUSqUCKT0MGJHSHSk9DF0aVGplcO3ieO27feuttxAdHY2RI0fKj0mSBIWi8T+mXq+H0Wj01nCIiPxauC64Th557bvdvn07FAoFCgsLcfDgQcyfPx/nz5+XP282mxEREeGt4RARkR/xWli98cYb8sdTp07FkiVLsHr1anz++ecYPnw4du/ejREjRnhrOERE5Ed8uug5f/58rFu3DpMnT4bNZsNNN93ky+EQEZGgfLLouWnTJvnj119/3RdDICIiPxJc5SREROSXGFZERCQ8hhUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJj2FFRETCY1gREZHwGFZERCQ8hhUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJj2FFRETCY1gREZHwGFZERCQ8hhUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJj2FFRETCY1gREZHw1N56I5vNhgULFuD06dOwWq148MEH0bdvXzzyyCNQKBRITU1FQUEBlErmJxERufNaWP3zn/9Et27dsHr1aly4cAG33XYb0tPTMWvWLAwfPhyLFy/Grl27MHbsWG8NiYiI/ITXpjHjx49Hfn6+/GeVSoXi4mIMGzYMADBq1Cjs3bvXW8MhIiI/4rWw0uv1MBgMMJlMyMvLw6xZsyBJEhQKhfx5o9HoreEQEZEf8eoGUXl5Oe666y7ccsstuPnmm932p8xmMyIiIrw5HCLyAYdDwvFKEwpLzuF4pQkOh+TrIZEf8Nqe1blz53Dvvfdi8eLFuO666wAA/fv3x+eff47hw4dj9+7dGDFihLeGQ0Q+4HBI2Fl8BnO2FsFic0CnUWLtpGyMz4yHUqnw9fBIYF6bWb344ouoqanBn//8Z0ydOhVTp07FrFmzsG7dOkyePBk2mw033XSTt4ZDRD5wososBxUAWGwOzNlahBNVZh+PzP9Umax45dMffD0Mr/HazGrhwoVYuHBhs8dff/11bw2BiHzsbI1FDioni82B0iozztZYEBehQ58YPWdZ7WC1N/7cggUPNRGR18RF6KDTuP/a0WmU+ObkT/jtXz7HhGc/wc7iM9zHomYYVkTkNX1i9Fg7KVsOLJ1Gifwxqdi27xQALgtSy7y2DEhEpFQqMD4zHul5I1FhtEABBWZtKUJ5tUV+jsXmQIXRgpQeBh+OVHxalRLJMXpfD8NrGFZE5FVKpQIpPQxI6WHA8UoTLtRa3T6v0ygRG67z0ej8R6hWhXt+fpWvh+E1XAYkIp9xXRZMiNQhb0xfPHVHFiQJ3LdqQ32D3ddD8CrOrIiChMMh4YRgVXfOZcH++SPxddlPWPD2dzx/RR5xZkUUBJyHcSc8+4lwVXdKpQIOCXJQASy0aA91kN1QEVzfLVGQEv0wbkvnryqMlha+glRBNuNkWBEFAdHDoKXzVyy0aFmw7VkxrIiCwJWGQVc3n/V0/mrtpGz0CaLSbGodCyyIgoAzDJo2kG1PGHij+WzT81ex4WIUgJA4GFZEQeBKwqCl/a70vJGdenDX9fwVtS3YCiwYVkRB4nLDoLX9LgaL77DAgojIBYsfxMQCCyIiFyx+IBFwGZCIWsXiBxIBw4qI2sTiB/EEW4FFcH23REQBItgKLDizIiIhm9xS64KtwIJhRRTkvHHol+hKcRmQKMiJ3uSWCODMiigotLbM19Kh3yNnjQDAJUFBBVuBBcOKyE9c7r5SW8t8zkO/roGl0yjx3ekazNpSxCVBQTkkCcveK0aDQ0JyjD7gr7hnWBH5gSvZV3Iu80WFaXH74N5QKIDDZ2rQPyEcfbobPDa5zRudik2flXZZH0C6crVWOw78WOPrYXgNw4rID1xOM1nnTOzIWSNmju4Lg1aNlTsPyYGUHKNHUnRjF4r+CeHYeM8w/FRrw9EKIzZ9Voryaov8XuwDSL7GsCLyA201k226RJgUFYb3D551my3lj0lFVJgW5dWNr7Xg7e8wOKkbvi83NnueK/YBJBEE1w4dkZ9xXnqoVChabCbrXCKc8Own+O1fPseEZz/B3uNVzWZiz+w6itsH95a/3mJz4GxNvcfn5Q7tLb8H+wCKSatSIj0+HOnx4UgOgv8+nFkRCcp1nyoqTIv8Mal4ZtfRZpcneloi3Fd63uNMTOGyvaXTKGG2Nnh83jWJ3fDm9OHsAygwtUqBpOgwAEBcRIiPR9P1GFZEgnINofJqC14rLMX0USm4JrEbkmP0coh4WiJ0SPBY4efMHGfYJUfrPT4vOUbPPSrB1VrteP/7swCA9PhwTBjY08cj6loMKyIfce4zVZnroVUpUWu1y/tNZRdqceSs0S1EyqsteHbXMbw5fbhbkHgqPX93/2ms+s0gzN/+rdtMrH9COH52dYw8YwJw2dfdE3kTw4rIB5xLfKt2HsTkoUl49qNLy3vLbh2AdR8dxc1ZvTzOepoWO3gqPZ8/PgPjMuIwsFdks2s9+nR3nzHx+g//5NyzAsA9KyLqGs4lvvtyUuSgAhr3ixb+4wDuy0nB9q9OIW90Kp796CiiwrTIHdobabHhkKTGsHMGSmv3TTW91qOhwYHi8mqUV1uQEBmKzIQIqNVKeSZ1tqaxXJ2BJT7XPSvJ5YCwq0A6LMywIupirmXlCZE62B3A0Qojpo1MgValbLEQorzagk2flSJ/TCp6hIdg4T8OtHgguD33TTU0OPCP/afdXmfZrQPw64E98eHhCjay9TOue1bBgGFF1EUcDgk/nDPjYHkNjlYY8Z9DFfjlwAS3ir6nL14X33SpT7r4D+TyagvqbHY5YID2HQj2pLi8utnrLPzHAfSJ0Xf4wDGRtzGsiLqAp/ZIiyb2x4bdJW6h8OTOg1g0sT+eeO97+XkFEzOx/esy/OHGvojUqZAYrfc4+zpbY4FSAZytqYfZ2oDkaD2u6t7y8p3zMHDT1znjUk2YEKmTWzJVmuq5HEjCYFgRdQFPZ5+eeO973JeTguf/c0x+XmlVHXpGhmDL9BGotdoRplXh+f8cwW2DE7FhdwkmD03CoTM1SI4JxcRBveRzUu/uPw27JGHHgTPNzl61tHyXEBnqcRYXf7GaMCpMi6kjkuU9tL9+cpzLgQJzLbBoSSAVXjCsiLpAS+2RVE16xjSeabq01+RwSPjdiBRM37RPLr5IizXg96P6Yul7xXIoPXHLAJyoNMlB5Xz91pbvMhMisOzWAc32rAb1jMTaSdk4dKamWbEHlwPF5Vpg0RJJkvDa3hO462d9vDOoLsSwIuoCYVq1x1nMoN7d5Mc9nWlyOCTU2+xykYXF5sDItFg5qIDGxxa9cwCr78hqtV9gU2q1Erdm9UJqrAFnqi2Ij9QhMyESarUS4zPjm423rdcj32pvgUVbsy9/4fOwcjgcWLJkCQ4fPgytVotly5YhOTnZ18MiuiJWu10uO3e9diM8RIkdeSNRYby0z+SsFqyus+JYhRmnLtTKfQB1GqUcWq4sNgesDfZmAZMcE4pQjQqFJec83nmlViuRlRiFrET38SqVCvSJ8dzNgk1sSQQ+D6sPP/wQVqsVW7ZsQVFREZ588km88MILvh4W0RWJ0Ydgy74y3JeTAoUCkCRgy74yTBg4rFmXc+ch4D+OS8eidw4gKkwrF2PkjU5FvYdQ0mmUiDaEYMEv07FiR+O1H8kxoZg5OhWTN3zW5h6Wp4scPR0uZjcLEoXPw+qrr77CyJEjAQDZ2dk4cOCAj0dEdOX6xOgxf3yG2y/+FbcNRE2drVnhhfMQcF19g9wHcMsXZZg/PgOnzptxdc9ILJ7YH4+7VAzmjU7Fkn8ewLrfXoON9wxDrbUB0WFaTP7LZ23uOTVtkOs8bJyREIFxGXH4F7tZ+IX2FFgAgVNk4fOwMplMMBgu/UVSqVRoaGiAWu3zoRFdNmdXiX4zR+LgmRocOWvE6n8fRu7Q3i0eAg4Lcd/nClEr0DtajzCNCt16hGHuuDSEh2gQFqLG6Z9qYW2QUGWy4sb0OABAYcm5du05ud4c7Fr95zoT4x6V+NpTYAG03N3CyV+6XPg8EQwGA8xms/xnh8PBoCJheVo+a2nmoVQqoFAAc7ftl0OkpW7okgT8ZXeJfMbqjiFJ+MPfvnFbKtSqlCh491JF4KKJ/REZqkFDgwNqtdJjQ1tPe07OSsXbB/dm9Z8fC7YOFj6/fHHw4MHYvXs3AKCoqAhpaWk+HhGRZ54uOdxZfAaOFv7FCjQvYXf2+3MtoFh26wC89+1pfHu6Btu/LsPDN2VgybuXqv+iwrQoO1+LEI0K00amICFSJ5/b+u+RSvxj/2k0NDiQFBWGZbcOaPbaSVGX/vXtcEgI06qQN6YvkqJCPc7ESqvMKCw5h+OVpla/NyJv8vkUZuzYsfj0008xZcoUSJKEFStW+HpIRB55Oug7Z2sR+s0ciatjPc9Ems52yqst+OjQGbx+33CcM9UjIVKHjLgIDE6KkveJXK8GSYjUNVuqyxudik2flaK82oJe3UJx8nwtDp2tQZhWjXUfHXUr6lj30VEMTopCSg9Ds64a+WP6epyJfXPyJzy76xh7BJJQfB5WSqUSjz/+uK+HQdQq5/Kfp5nIwTM1LbY5alphlxwTiinDkvG7lz6Xw+e5O6/BVTEGuR+ga6cJT0t1z14MpJf2HEeYtvGv8A/nzNCHqHBzVi9s/+rUxa7qja2Tjpw1AgCUCriF7dZ9p5rdPpw/JhWvFZbK78VlQXG1t8CiLf5SgOHzsCISnXNGcvhMjceZyJGzRmTER3icXTW9viNUo5JLy4HGJb6jZ02Y4bI/9cyUbDz+60ws/mdxi2esVEogf0wqVvzrIC7UWpE/JhUA8NdPjmP2L9IQqlFCpVK69RxccdtARIVpUV7deA2I8/bhjfcMgwQJCigwa0uR/Hnne/FQsJjaW2DRFn8pwGBYEbXBtXrOUwn5ps9K0S8+vNUmskDjslyV2eoWPrcP7t2sZdKKfx1EwcRMPD0pG/oQlceATIsNx/J/HZSD5ZldR+WOFk9/eARP3ZGFuX/f7/a6C97+DtNHpeDZXZd6E16otaJHeAhSehhwvNKEC7VWtzHzULC4gq3AgmFF1AZnkUR5tQUxhhBMH5UCh9QYPps+K8WFWivCQ9T475EK9InRy9fSO++vcj0E3HSfKETd+LFzyS5cp0K4ToOH/va1vGxYMDHTrS/g8lsHugUV0BhGtfUN8sfmi2e2XFlsDqTFhbfY7omHgklkDCuiNrgWSfxwzgSdWtWs4OFQeQ1W7Djs1pGitKoOeWP6YsPu43Jw/OdQBZ6elI2DZ2oQolYiPSEcQ5MjcW/O1Th0pga9uoVhnsuMqLSqDi/uPob/vSMLxyqM6J8QgR4GrccZUKWpXv646Zkt5+MZ8REtHvpt7cZhEk9n7Vm1RZQ9LYYVURtcZxymejve3X+6WRulKdcm4Q839oVCAZw8X4sFE/pjyT+L4ZAu7TklROowfkACZrt1tRiAO4f3kWczc8elISpMK98pBTSWu9dZG2dNP5wz40SVGXPH9cNT7x+WX2fhrzLwU60NeWP6IiMhAlu/KGvWm3DtpGx5qbKlPaj23DhMYuisPau2tLWn1R6dse/FsCJqg3PGEXPPMHxz8gIeGHW1277Vo+PTYWlw4KU9x90O7P5+VApM1gb5Lqr0+HC3WZPF5sCJqlp55pUQqcPg5G5QKRXNKvTOm63Ytu8UHpuQAZVSgXCdCn+alI0jFSZ0C1UjVKvGsv876PY1O74rx/RRKUiLC0dGfESbe2rkX7hnRUTNKJUK9AgPwZ8+PIqoMC1m3NgXPQwhSOimQ/GPNc2KJJ5473vkj0lFYpQOeWPS8Njb32HeuH7yjAxonDE5Z17O81T7T1Y3e61ndh3FY7/MwF3XJctFEzqNEgt+mY6M+HBY7Q78cdv+Zl+zYeoQ9OoWJodUQ4MD353+6WJZeygyEyKgVvu8LwBRuzCsiNrJdTnwqfePyGekEqPCPBYz9IzUwRDSWCyRFmtAZJgGq12W7mb/Ig0KSNBplPJ5qmkjUzy+Vje9Fst3HHQreTdb7Zj55jctfo1GpZTL6a1WO9759kcsesf94sVbs3oxsMgvMKyI2sm1AOG8uR4alRL1DQ5Ehnru99c9PASf/3AeFpsDD97QV96rAiCXmP/5zsGY/Ys0mK2Xqvc8vVaETt1qybunrwnTqvDliSpolUrU1DfIQeV8/4X/OIDUWAOyEqO67odGXcZbBRadoTOKNBhWRB3gvKTw0BkjVu08iMlDk7BlX/NihvwxqfjudLXch0+SJI+zn1qbHRIkXJcSDaAv1Eolnp6UjSd3HkRpVZ38WiFqpRxICZE6pMeHY9rIFADA7sMVzd5/+W0D8XXpBcRHhuLJnQdxc1Yvj+9/ptrS7CJG8g/eKrDoDE2LNC6n4IJhRdRBzkPC9+WkyAGx6bNS3JeTApUS6B8fgR+qzOhuCIEhRI2VOw4idVy6x9kPJGDj3lLc8/M+cqGFs0AjQqfGqQt1iI0IwVPvH5IvZJw8NEku1HCWzn/5QxWeuiMLCkXjEuGre0uQGtcNF+pseGR8BipNFo/vHx/JA7/+igUWRNQq5yFh11ZI5dUWPP+fY0iI1GHG6L5u1Xx5o1NRZbJ47MN38kItbh/cG2s/ONKsQONPk7KRHKPHh8XluOtnKQAkPHn7INzz6pfNegU+PSnbrSS+YGImXtx9TJ6dLZ7YH4+OT8fKnYfk5zxxywBkJkT66sdI1CEMK6IOCtOq3a7hcJ2t5A7tLffjAy6FSf6YVBhC1HL3C6UCCNOosP3rU/if4ckel+iM9Q3417c/YlxmAh6+OJPKG9O3xWa6rp0wymvqMHdcOlZc7HTx+HvfY8aNfTF9VApSY8PR3aBFrygdviw93+a9XEQiYFgRdZDVbkfe6FSPe1VJ0Z4rA212ye1uqLTYcLy69zjGD0jAj9V1HpfoQjUq3DYkUQ4qoOXLG+2Otq8Tsdod6BOjx4CeESg5Z8K4pz9xOzDMq0D8iz8VWDR1OQUXDCsKeh25/RcAYvQh2LKvDHPHpaP8p1o8MzkbVrsDCiigVimahUlyTCiyEiPxVekFOKTG81UAsHhif8y+2CC3aejljU6FTqtEndXh9lrOyxtdn+vcy2rrOpEx6bEY2KsbTlSZ5S7vzufxKhD/408FFk05Cy56RYW1u9CCYUVBremFhO2ZZfSJ0WP++Ays2nkQ9/7sKlTXWWGXFFj6bjGiwrRue1PJMaF46Ia+eGDTV81mO0crTHKDXGeBhkIB9IsLx1PvH8Ij4zOgULjPpMqrLdiyrwyv3nMt9hyramz39EUZJg9NgqXB3uJ1IituG4jIUA2A5rcXO5/Hq0D8SyAUWHSkhRPDioJaS7f/tjbLkNsv6bUw1dtgbZDk4gbnHVHTR6Wgd7dQ6LRqt2U852xnxo19YbM75CByFmjoNEpMH5WC6aOuxpM7DyJSp2nWdf331/eFStl4d5XzdStNVjz2q4xWrxO5UGvF2knZ6OfSed31ebwKhETGo+sU1FqbZQCNM6/jlSYUlpzD8UqTvO+kVCqg06hQZ71U3OBUXm3Bs7uOwaBTo87q+aqO+EgdenXToeDmTLlYIzkmFM/dORhpceGI0WthbZDw7ekavF9cjvVTh+CZKdl47d5hSIzWQaVQ4snbB8lfe6HWiipjPWb/Is2t+CN/TKp8nYgziFVKYO2kbLfn8SoQEh1nVhTUXK//cHLOMlpaIhyXEXfxvqo6qJSKFoseDp0xtvi5svO10KqUUKsUFyv0DDDX2zHj4j1WOo0Sc8am4b39P+Laq2KaLSNu2VeGP47thydu6Q+HpMDpn+rw4u7jACCf9xqaHIX5279rdu/VmRoLrwIJAP5cYOHUkUILhhUFNU8XDj535zWQJOC/Rypw+EyNfBW8xebAqp0HYbM7MH/7t7DYGi9TfHf/aY9FD8991Hgj76KJ/d2ul3fuWeUO7Y3kmHCEqJVQAJj392/dlgvXfnDE7fyU83Fn0cTD27/Fq/dci6f+fQij0+NxobbxFuKX9hzHitsGIlrv+d6r2HAdrwIJAP5cYOEkSRJe23sCd/2sT5vPZVhRUGt64WB8ROPNvr9a90mzcCmvtmDioF5yUAHA1n2ncNd1yXjzyzJ5RpPVuxue+fCIPKMxWmwebxcekRKDC2YrQtRKGC2elwvtLbRpch5IrjTWY9HETNjsDmyZPgK1Vrtc0QiAN/8GsEAosADQ7tkhw4qCnuss43ilqVnBhXMm8/x/jkGlRLP9qdcKS/HkbwbibLUFkWFa6DRK3JAei1H9YrH9q1PYuLcUd12X7Na94olbBuDRt76VO0y88D+DPS4XRoVpPD4uXVxe1IeoYbM7MLRPjNv35CzHjwrTYMv062Cz2xGtD+FyH/ktFlgQuWip4MJZQj6wZ6RcmOCkVStQabTi+f8eQ0mFCfdt3Idndx3DXz85jqkjkqFVK9CrWwiev3Mw8sb0xf/ekYXn/tN47b3z9Y+eNSJ/TGqz4oiSClPjmSuXx/NGp+K9bxuXHpe+WwyNyn08zr22Cc9+gt/+5XNM3lCICqOVQUV+jTMrIhctFVz0iwtH/phUVHro8VdwcyYeeuNrt8a2wKVZ2drcLITrNLh/0z5YbA7MGN1XDiqnV/aWYu5NaW7tmGIjQvDUv48AaCya6NtDD5VKiR9/qsXEQb3kpclaq93ttVzL8Z3tlw6dqUGvbqEY2CuSgRUgAqHAAmh/kQXDisiFp4KLZbcOwFPvH8LEQb3wzK7Gm4KdB3iVCuC82dqssa2TxebA92eMSOwW2iwAXf/sLD1Piw2HxWZHpake3Q0hbkUT6383BA+8/pXHe6sKS87Je1XO2WHT9ksbdh9nW6UA4o8FFvGROkwY2POyvpZhReSiacFFbLgOvSNDkRZrwOmfLFiTmwW1Sokfzplgqrdj275TWDAhw+2s1MRBveSr69/dfxrpceGICFXLAbX9q1OY/Ys0PP3hEbcO7DHhITh5obaxWOPnV0GpUGDdb69BhE6DuIgQ9IwIxbJbB2DhPy7d9vv4LQOQ9+Y38t7X2knZ6J8Q7nb7MNsqBSZ/LLBIjw9nWBFdLk+9AZ0FFw6HhB0HzuCP24rcgiVMo8LmL8pw13XJqK6tx7JbB+LNL07g96P6unWbKLg5Ext2l0CrVuDxXw/A4n8eQNm6PWMAABkNSURBVHm1BX/7ohQv/m4Iqkz10KpVOGe0oMpYjz4xejyVm4UT52px/2v73Kr4AGDdxWIPhQJIiwvHmvcPue19zdlahP+bORJrJ2XjUJPDys7nsK0S+SOGFQW1tnoD/nDOLAcV0PjL/pldRzF9VIq8LLhuyjXYe6wC827KwP975Qu35y59t1iuJARK8fLd1+L0hTrER+hQbbHiMZdZ0rJbB8AhSfjk6Dn5Ikbn68zZWoSN9wxDaVXdxdeCx70vi82BSlPjod9e3ULdXgdgW6VA4o97VldyvT3DioJaW70BS8+bPc5OHBLkPapaawNiI8Pwacm5FisJAWBfaTX2nbgAtVKBRe8cQFSYFtNHpSApOgw9DCGQIOH3r3+NaSNTPL5OrbXBY/GHpzBSKhUY2CuS56wCWGfsWV3JHpK3MawoqLXVgVyvVXsMBKUCsDsa96ii9SEoOWdGaqznBrGSdOnj+gYHNn99Sj5APPyqaBSfrsaa948gd2hv+Ws9vU5S9KXij6gwLfRaFVb9ZhCOV5qwdd8puVGtM4w87b+xfD1wdMae1ZXsIXkbw4qCWmu9ARs/H4I5Y9Pka+ede1axESHY/HkpZo5OlUvSk2NCsfTXmSj4Z7Hbc18rLEVyTCgWT8zEwfIa/GZIb7y7/zT+cENfLHj7O3kpz9lH0NOdVf/7m0G4qrseV3XXo3/+SHxd9hMWvP2d/PkVtw3E4KRuSIp2DyO2VaJAwbCioOapVN11dpIUrUdKDz3yx6QiOkwLfYgaUXoN9FoVFk3MxOQNn8lBV1pVhz//9xientR4GeOPPzWG0KO/7AdjvR1/cGlSu2hifyRFh7rtObmG1KbPGq8Zuaq7HnERIRiWHCOHkEOCHFRA40xwwdvf4V95IzlrooDFsKKg1tZSmVKpwOh+cUjpbmj2+UIPe1SlVXUoLq/Bu/tP44/j0nHkrBEalQpPvOceLk+89z3+dPGajqYXK265fwTO11oRplUjLiKk2WyJlycS0DkFFldS8OBtDCvyK+25gr6j19S3tVTW0udjwz0vIUpSY2gdqzDiuY+OYcbovh7D5UiFqVk3jDlj+yGzZyTU6pY7obW1dEnBoT0FFv5UQNEWhhX5jfZcQX8519Rf7lh+qGoeNrN/kYZX956ATqPEoN7d3Hr6NQ2X+gYHNn9xCi/+bggqaiyoMNZj7QeHoVEpWx1vW0uXFBzaU2DhTwUUbWFYkd9ozxX0l3NN/eWOZcbfvmnWeskhSbhQa8XjtwxAv3g9ttw/ApXmejxxywAseufSmSrntSMXaq34uuwCnt11TH7ttsbLKj8KRgwr8hvt2au53P2cpkuHSVFhF28D9ryU6Hyf8mqLfEgXAJ6/8xpsmT4CGXER+PBwhRycyTGheHpSNupsdpyoMstB5XpJY0fGyyo/CjZeCyuj0Yh58+bBZDLBZrPhkUcewTXXXIOioiIsX74cKpUKOTk5mDFjhreGRH6mPXs1l7Of42npcPltA1FltKCm3o5395/G/PEZbktzLb1PRkKEx3uxSqvqMHtrEWbc2Bd2B5A7tDeuTY7GwfLqFm/zJWpNewos/KmAoi1eC6tXXnkFI0aMwN13343jx4/jj3/8I95++20UFBRg3bp1SExMxPTp01FcXIzMzExvDYv8SHv2ai5nP8fT0uFjb3+H+3JS8NKe48gbnYpVOw8iPT5cnsm09T4tzfAsDY0d1PPHpMIBB8b2j0f3cJ3bmSnuP1F7OAssAqmIojVeC6u7774bWq0WAGC32xESEgKTyQSr1YqkpCQAQE5ODgoLCxlW5FF79mouZz+ntQsXXW8Kdl2aa+t9Wpp5XZcSjZF9u7uVpCfH6JGd2I37T9QhzgKLQCqiaE2rYXX+/Hls2LABISEhuPvuuxEVFQUAeO6551pdrtu2bRs2btzo9tiKFSswaNAgVFZWYt68eViwYAFMJhMMhktr7nq9HidPnryS74cCXHv2ajq6n9NSsDjbJFlsDqiUaLY019r7tDTzui6le7Mg4v4TUdtaDauHH34YY8eORUNDA373u99hw4YN6NWrF7744otWXzQ3Nxe5ubnNHj98+DDmzJmDhx9+GMOGDYPJZILZbJY/bzabERERcZnfCtHl8RQsiyb2x5YvygA0BtfQ5OgOLc2xYo+oc7UaVlarFZMnTwYAZGRk4KGHHsKmTZsgOf/J2QHHjh1Dfn4+/vSnPyE9PR0AYDAYoNFoUFZWhsTEROzZs4cFFuR1SqUC4zLisGHqUOwrPQ+7A9iwuwSThyah2mLDnLH98LOUmA4HDWdM1JWcBRaBVETRmlbDym634/Dhw+jXrx8GDx6MBx54AA8++CBqa2s7/EZr1qyB1WrF8uXLATQG1QsvvIClS5di7ty5sNvtyMnJQVZW1uV9J0RXoOxCLaZfbEjr9OxHR7Fl+ggM7NWNMyISjlqlwLCrooNivwpoI6wWLlyIZcuW4emnn0b37t0xYcIENDQ0yIHTES+88ILHx7Ozs7F169YOvx7RlXI9W1Vns3sssqiz2RlUJKRaqx1f/HCeYQU0Lv1t2rQJb7/9NjZs2ACr1QpJkhAaGuqt8RF1CdezVVFhWjz2qwz22yMSWLtK1//617/ixRdfREJCQlePh8grnGerosK0mDoiGav/fajZHVI870QkjnaFVWJiIpKTk7t6LESdoj1d151nq24f3FsOqE2flco3+I5Jj5X3qjraxZ3IG7QqZdAUVwDtDCudTodp06YhIyMDCkXjX9I5c+Z06cCILkd7u647z1Y5D/4CcOvz97OrY+Sg8kYXd6KOUqsUkCQJy94rRq+oMNzz86t8PaQu1a6wuv7667t6HESdor1d151nqw6fqfG4V6WAAscrTZAkeKWLO1FHuV4R0uDo+HEif9OusLrtttu6ehxEnaK9Xdedh3b7JzSeU3HtzZc/JhWzthThQq0VT92RxVt5iQTAK0IooLTUOik+QofjlSacrWnsJqFSNi77xUXo8OtBPZGd2A2lVWZ8c/InvFZYivJqCwDgaIWRVYIkJNeu68Gwd8WwooDiqXXSc3deg+/LjW6P5Y9JxWuFjXdKOfegztZY3C5BBICt+05hxW0D2RWdhMNDwUR+zFNPPkkCfrXuE7d9p2d2NXZSf/4/x+Q9KE+zsgu1VgxO6oZ/sccfCSbYDgUrfT0Aos7m7Mk3IqU7UnoYUGFs+QoQ58cVRos8K9NpGv9aOGdRSdF6t9djUBF5H2dWFPDaugLEuQfFTulE4uLMigKepxlT/phUvPX1qWZ7UE1nZQwqEhUPBRMFmKYzph6GxmrAa5K6cfZEfitUqwr4g8CuGFYUFDzdLdWnO89Jkf+qb7D7eghexbAiv8R+fUTBhWFFfof9+oiCDwssyO+01P/vRJXZxyMj8h61Mrh+fQfXd0sBobX+f21xOCQcrzShsOQcjlea4AiCBqAUmFRBtorAZUDyOy2dm2qrXx+XDymQBFuBBWdW5Hda6jTRVr8+Lh8S+S/OrMjvXG6nifZeH0JE4mFYkV/ydG6qLZe7fEgkIhZYEAWoy10+JBJRuC645hrB9d1SUGOjWiL/xbCioHI5y4dE5HtcBiQiIuExrIiISHgMKyIiEh7DioiIhMewIiIi4TGsiIhIeAwrIiISHsOKiIiEx7AiIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuF5PaxKSkowZMgQ1NfXAwCKioqQm5uLKVOm4LnnnvP2cIiIyA94NaxMJhNWrVoFrVYrP1ZQUIA1a9Zg8+bN2L9/P4qLi705JCIi8gNeCytJkrBo0SLMmTMHoaGhABrDy2q1IikpCQqFAjk5OSgsLPTWkIiIyE90yeWL27Ztw8aNG90e69mzJyZMmID09HT5MZPJBIPh0iV4er0eJ0+e7IohERGRH+uSsMrNzUVubq7bY2PHjsX27duxfft2VFZW4t5778X69ethNpvl55jNZkRERHTFkIiIyI957Vr7Dz74QP549OjRePnllxESEgKNRoOysjIkJiZiz549mDFjhreGREREfsJrYdWSpUuXYu7cubDb7cjJyUFWVpavh0RERILxSVh99NFH8sfZ2dnYunWrL4ZBRER+goeCiYhIeAwrIiISHsOKiIiEx7AiIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuExrIiISHgMKyIiEh7DioiIhMewIiIi4TGsiIhIeAwrIiISHsOKiIiEx7AiIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuExrIiISHhqXw+AOofDIeFElRlnayyIi9ChT4weSqXC18MiIuoUDKsA4HBI2Fl8BnO2FsFic0CnUWLtpGyMz4xnYBFRQOAyYAA4UWWWgwoALDYH5mwtwokqs49HRkTUORhWAeBsjUUOKieLzYEKo8VHIyKirma0NPh6CF7FsAoAcRE66DTu/yl1GiViw3U+GhERdbUGh6PtJwUQhlUA6BOjx9pJ2XJgOfes+sTofTwyIqLOwQKLAKBUKjA+Mx7peSNRYbQgNpzVgESBTq0MrrkGwypAKJUKpPQwIKWHwddDISIvCNcF16/v4IpmIiLySwwrIiISHsOKiIiEx7AiIiLhMayIiEh4DCsiIhIew4qIiITHsCIiIuF57VSZ3W7HypUrceDAAVitVsycORM33ngjioqKsHz5cqhUKuTk5GDGjBneGpJf4D1VREReDKt33nkHDQ0NePPNN3H27Fns2LEDAFBQUIB169YhMTER06dPR3FxMTIzM701LKHxnioiagm7rneRPXv2ID4+HtOnT8fChQsxevRomEwmWK1WJCUlQaFQICcnB4WFhd4akvB4TxURtSTYuq53ycxq27Zt2Lhxo9tjUVFRCAkJwfr16/Hll1/i0UcfxZo1a2AwXOplp9frcfLkya4Ykl9q7Z4q9gAkomDSJWGVm5uL3Nxct8dmz56NG264AQqFAsOGDcOJEydgMBhgNl+aJZjNZkRERHTFkPyS854q18DiPVVEFIy8tgw4ZMgQfPzxxwCAQ4cOISEhAQaDARqNBmVlZZAkCXv27MHQoUO9NSTh8Z4qImoJrwjpIpMmTUJBQQEmTZoESZKwdOlSAMDSpUsxd+5c2O125OTkICsry1tDEh7vqSKilgTbFSFe+261Wi1WrlzZ7PHs7Gxs3brVW8PwO7ynioiIh4KJiMgPBNc80o/xcDARBTOGlR/g4WAiCnZcBvQDPBxMRE2xgwUJp7XDwUQUnIKtgwXDyg84Dwe74uFgIgomDCs/wMPBRNQUDwWTcHg4mIia4qFgEhIPBxNRMAuueSQREfklhhUREQmPYUVERMJjWBERkfAYVkREJDxWA/oIG9MSEbUfw8oH2JiWiKhjuAzoA2xMS0TUMQwrH2BjWiK6Uuy6Tl2OjWmJ6Eqx6zp1OTamJSLqGBZY+AAb0xIRdQzDykfYmJaIrkSwXRESXN8tEVGACLYrQhhWREQkPIYVEREJj2FFRETCY1gREZHwGFZERCQ8hhUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJj2FFRETC89qFKEajEbNnz0ZdXR00Gg1Wr16NHj16oKioCMuXL4dKpUJOTg5mzJjhrSEREZGf8NrM6q233kJaWhreeOMNTJgwAS+99BIAoKCgAGvWrMHmzZuxf/9+FBcXe2tIRETkJ7wWVmlpaTCbzQAAk8kEtVoNk8kEq9WKpKQkKBQK5OTkoLCw0FtDIiIiP9Ely4Dbtm3Dxo0b3R5bvHgxPv30U0yYMAHV1dV44403YDKZYDAY5Ofo9XqcPHmyK4ZERER+rEvCKjc3F7m5uW6PzZgxA9OmTcOUKVNw6NAhzJw5E5s3b5ZnWwBgNpsRERHRFUMiIiI/5rVlwIiICISHhwMAYmJiYDabYTAYoNFoUFZWBkmSsGfPHgwdOtRbQyIiIj/htWrA/Px8LFy4EH/729/Q0NCAJ554AgCwdOlSzJ07F3a7HTk5OcjKyvLWkIiIyE94Lazi4uLwl7/8pdnj2dnZ2Lp1q7eGQUREfoiHgomI/JDR0uDrIXiV12ZWvuRwSDhRZcbZGgviInToE6OHUqnw9bCIiC5bg8Ph6yF4VcCHlcMhYWfxGczZWgSLzQGdRom1k7IxPjOegUVE5CcCfhnwRJVZDioAsNgcmLO1CCeqzG18JRGRuNTKgP/17Sbgv9uzNRY5qJwsNgcqjBYfjYiI6MqF6wJ+YcxNwIdVXIQOOo37t6nTKBEbrvPRiIiIqKMCPqz6xOixdlK2HFjOPas+MXofj4yIiNor4OeRSqUC4zPjkZ43EhVGC2LDWQ1IRORvAj6sgMbASulhQEoPQ9tPJiIi4QT8MiAREfk/hhUREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJz+/aLZ0+fRq33367r4dBRNTpoqKi8NJLL7X7ucFEIUmS5OtBEBERtYbLgEREJDyGFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJz+/OWXmD0WjE7NmzUVdXB41Gg9WrV6NHjx4oKirC8uXLoVKpkJOTgxkzZvh6qLDb7Vi5ciUOHDgAq9WKmTNn4sYbbxRyrE4lJSWYNGkS9u7di5CQECHHajQaMW/ePJhMJthsNjzyyCO45pprhByrw+HAkiVLcPjwYWi1WixbtgzJycm+HpbMZrNhwYIFOH36NKxWKx588EH07dsXjzzyCBQKBVJTU1FQUAClUpx/O1dVVeH222/Hyy+/DLVaLfRYg4ZEzbz66qvSqlWrJEmSpC1btkgrV66UJEmSfv3rX0ulpaWSw+GQpk2bJh04cMCXw5QkSZK2b98uFRQUSJIkSWfOnJFeeeUVSZLEHKskSZLRaJTuv/9+acSIEZLFYpEkScyxPvPMM/LPsqSkRLr11lslSRJzrP/+97+l+fPnS5IkSd988430+9//3scjcvf3v/9dWrZsmSRJknT+/Hnp+uuvlx544AHps88+kyRJkhYtWiS9//77vhyiG6vVKj300EPSuHHjpGPHjgk91mDCfx54kJaWBrPZDAAwmUxQq9UwmUywWq1ISkqCQqFATk4OCgsLfTxSYM+ePYiPj8f06dOxcOFCjB49WtixSpKERYsWYc6cOQgNDQUAYcd69913Y8qUKQAaZ68hISHCjvWrr77CyJEjAQDZ2dk4cOCAj0fkbvz48cjPz5f/rFKpUFxcjGHDhgEARo0ahb179/pqeM2sWrUKU6ZMQWxsLAAIPdZgEvTLgNu2bcPGjRvdHlu8eDE+/fRTTJgwAdXV1XjjjTdgMplgMBjk5+j1epw8edLnY42KikJISAjWr1+PL7/8Eo8++ijWrFkj5Fh79uyJCRMmID09XX5M1J/rihUrMGjQIFRWVmLevHlYsGCBEGP1pOm4VCoVGhoaoFaL8ddbr9cDaBxnXl4eZs2ahVWrVkGhUMifNxqNvhyi7K233kJ0dDRGjhyJDRs2AGj8R5aIYw02Yvzf7EO5ubnIzc11e2zGjBmYNm0apkyZgkOHDmHmzJnYvHmzPNsCALPZjIiICJ+Pdfbs2bjhhhugUCgwbNgwnDhxAgaDQcixjh07Ftu3b8f27dtRWVmJe++9F+vXrxdyrABw+PBhzJkzBw8//DCGDRsGk8nk87F60vS/t8PhECaonMrLy/GHP/wBd955J26++WasXr1a/pwoP0cA2L59OxQKBQoLC3Hw4EHMnz8f58+flz8v0liDDZcBPYiIiEB4eDgAICYmBmazGQaDARqNBmVlZZAkCXv27MHQoUN9PFJgyJAh+PjjjwEAhw4dQkJCgrBj/eCDD7Bp0yZs2rQJPXr0wMsvvyzsWI8dO4b8/HysWbMG119/PQAIO9bBgwdj9+7dAICioiKkpaX5eETuzp07h3vvvRfz5s3DHXfcAQDo378/Pv/8cwDA7t27hfg5AsAbb7yB119/HZs2bUJGRgZWrVqFUaNGCTnWYMNGth6cPXsWCxcuRG1tLRoaGpCXl4ef//znKCoqwooVK2C325GTk4PZs2f7eqiwWq0oKChASUkJJEnCkiVLkJmZKeRYXY0ePRo7duyQqwFFG+uDDz6Iw4cPo1evXgAag+qFF14QcqzOasAjR45AkiSsWLECV199ta+HJVu2bBl27NiBlJQU+bHHHnsMy5Ytg81mQ0pKCpYtWwaVSuXDUTY3depULFmyBEqlEosWLRJ6rMGAYUVERMLjMiAREQmPYUVERMJjWBERkfAYVkREJDyGFRERCY9hRdQOK1aswObNm309DKKgxbAiasX58+cxbdo0fPTRR74eClFQE6snC5EPvfXWW/j4449hsVhQVlaG+++/H9deey1mzpwpd4ggIt/gzIrIhclkwvr16/HCCy9gw4YNSExMRFZWlq+HRRT0GFZELpwd4RMSEmC1Wn08GiJyYlgRuXBeBUFEYmFYERGR8NjIloiIhMeZFRERCY9hRUREwmNYERGR8BhWREQkPIYVEREJj2FFRETCY1gREZHw/j+ufBtVf3hkXQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "noisy = diff_df[abs(diff_df) > ci_95]\n", + "df = df.drop(noisy.index)\n", + "sns.jointplot(x='n1', y='n2', data=df) " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "oORmeyHNl6A1" + }, + "source": [ + "Now that data looks much better!\n", + "\n", + "So, let's average n1 and n2, and take the error bar to be ci_95." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 204 + }, + "colab_type": "code", + "id": "7NsMKc6Nl6A3", + "outputId": "cef1fc9d-6b55-403a-c0c5-97cd92303624" + }, + "outputs": [], + "source": [ + "avg_df = df[['label', 'drug']].copy()\n", + "n_avg = df[['n1', 'n2']].mean(axis=1)\n", + "avg_df['n'] = n_avg\n", + "avg_df.sort_values('n', inplace=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "FIUv_SV2l6A7" + }, + "source": [ + "Now, let's look at the sorted data with error bars." + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 296 + }, + "colab_type": "code", + "id": "YN1DgKJNl6BD", + "outputId": "23bb0034-c1c8-4a91-b915-48d2a76a2e6c" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0, 0.5, 'activity')" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEECAYAAAAyMaOFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAf4klEQVR4nO3de1QU5/0G8GfZ5aIsBC81XuIFrVpF8VKqPyNi06hrjOIVlSZYL0fEVBONrAJRwYDGaIxpjTGkJ2k8aoNCCKfNaQ5Ek9YYiYlWQsSI1VBN0BIJemC5Lbs7vz+QLZcBF9jdmd15Puf01H13y3z7iu+z884776gEQRBARETUjIfUBRARkTwxIIiISBQDgoiIRDEgiIhIFAOCiIhEaaQuwF4mTpyIfv36SV0GEZFLKS4uxrlz50Tfc5uA6NevHzIzM6Uug4jIpSxYsKDV9zjFREREohgQREQkigFBRESiGBBERCSKAUFERKIYEEREJIoBQUREohgQREQkigFBROTClqTmYklqrkN+NgOCiIhEMSCIiEgUA4KIiEQxIIiISBQDgoiIRDEgiIhIFAOCiIhEMSCIiEgUA4KIiEQxIIiISBQDgoiIRGmkLgAA5s2bBz8/PwDAI488gpiYGMTFxUGlUmHo0KFITEyEhwezjIjImSQPiNraWgDAkSNHrG0xMTHYsGEDJk6ciO3bt+PUqVOYPn26VCUSESmS5F/Lr1y5gurqaqxcuRLLli1DXl4eCgoKMGHCBABAWFgYzp49K3GVRETKI/kZhI+PD1atWoWIiAj85z//werVqyEIAlQqFQDA19cXFRUVEldJRKQ8kgdEYGAgBg4cCJVKhcDAQAQEBKCgoMD6fmVlJfz9/SWskIhImSSfYsrIyMDu3bsBACUlJTAYDJg8eTLOnTsHADh9+jRCQkKkLJGISJEkP4NYtGgR4uPjERkZCZVKhV27dqFbt27Ytm0bXn31VQwePBg6nU7qMomIFEfygPDy8sK+fftatB89elSCaoiIXEfWxWJcvHkPRrMFk3d/Ar1uOOaN62e3ny/5FBMREbVf1sVixGd+A6PZAgAovleN+MxvkHWx2G7HYEAQEbmgvdmFqK4zN2mrrjNjb3ah3Y7BgCAickG37lW3q70jGBBERC6ob0CXdrV3BAOCiMgF6XXD0cVT3aSti6caet1wux1D8lVMRETUfg2rlTZn5MNotqBfQBe7r2JiQBARuah54/rhvS9vAgCOr5lk95/PKSYiIhLFgCAiIlEMCCIiF7UkNReXb5c77OfzGgQRkYtxdDA04BkEERGJYkAQEZEoBgQREYliQBARkSgGBBERiWJAEBGRKAYEERGJYkAQEZEoBgQRkQtx1k1yAAOCiIhawYAgIiJRku/FVFdXh4SEBBQXF8NoNGLt2rXo3bs3YmJiMGjQIABAZGQkZs2aJW2hREQycPl2OapqTejq7fjhW/KA+Otf/4qAgADs3bsXd+/exfz58/H73/8eK1aswMqVK6Uuj4hIFhquPVTVmpq0j+zj75CHBQEyCIiZM2dCp9NZX6vValy6dAlFRUU4deoUBg4ciISEBGi1WgmrJCJSHsmvQfj6+kKr1cJgMODZZ5/Fhg0bEBwcjM2bN+PYsWPo378/Dh48KHWZRESScebKpcYkDwgAuH37NpYtW4a5c+dizpw5mD59OkaNGgUAmD59Oi5fvixxhURE0liSmovz/ylrMbXkDJIHRGlpKVauXAm9Xo9FixYBAFatWoX8/HwAQG5uLoKCgqQskYjI6Zak5mJ0UrYkZw4NJL8G8eabb6K8vBxvvPEG3njjDQBAXFwcdu3aBU9PT/Ts2RPJyckSV0lE5BwNZwwAWl2pZBGAipr6M4pzRWUY92IOEucEYd64fnatRfKA2Lp1K7Zu3dqiPS0tTYJqiIikY8u1BrPQsu1uVR30GV8DgF1DQvKAICJSOlvOGgDAaDK3+l6dWcDe7EIGBBGRO2jv6iSjSeT0oZFb96o7W1ITDAgiIgmMTspu9x3RbccD0DegS+eKaoYBQUTkJI1DoSPLVlVoPSQ81SrodcM7VV9zDAgiIjtrvi1Gw1lCZ+9l8NKoUCsyzeTrpcbO+aPdbxUTEZGrEBv4WwsBR2ym56VRw2gyWc8iVAAG9/TFqdhf2/1YAAOCiMjqQd/8nbGD6oN4qOr/u6GWnn7eDjuW9P9viYgcQGzpqFTf/O3FaDJb74Mw1JjgpVE59Hjy7QkiolaIXex1pYG+IywCmlx/EFD/urSi1mHHdK8eJCLZabzWv63BvCPvKUlrq5e+v2vfex8aY0AQUZvae2FWKd/o5cJotjjsZ/NvjEiBbLkYy4HdNWg8HHcdgn/zRC6G3+iVR2yDvgaC8KD7qzuOvyFETuCIeXhShgddb2krPDqLv2lENuC3dpJC42WtrfFSO+65b/xtJZfVfLMzew3e/NZOUrMI9auWzA/YvRUA+nez7wZ9jfG3nhyuMzcs2fIekTsx1JgeuGtrA7WKd1KTEzlqMCeiB2vP9QS1yvH/tvgv18U1f+AIB3Mi12I0mUV3aH0QZ/xb5WjgAtoz105ErsMs2HadoTnH7sD0PxxVJNTepY9E5D46szzVgffGNcGAcKK2zgSIyP01rE6qqOnYlz5vjQomR9740IxsRyaLxYKkpCQUFhbCy8sLKSkpGDhwoNRlPZAt1wSISDksQscDAaifTmo4Y/DSqGEy/+9njezjj+NrJnWywtbJNiBOnjwJo9GI48ePIy8vD7t378ahQ4ekLktUW1NFROSemg/8zUOgM6HQoGGlUvMvl129NfgmSdfpn/8gsh3FLly4gClTpgAAxo4di0uXLklcUesa7rBlKBC5vqpak/X6gCMGfVupIP0XTdmOaAaDAVqt1vparVbDZDJBo5FtyUQkU82Xkrb1zV9qfj7ymY6W7Wir1WpRWVlpfW2xWGQZDktSc2Xzl0mkJO2Z4unIUlIpqJ21ftVG8htx7xs/fjw+/fRTzJo1C3l5eRg2bJjUJTXR/GI0EbVPa1M5bQ30cvu2by9yC4YGsg2I6dOn4/PPP8fSpUshCAJ27doldUlWDAdSutaWayp9oG+vxttlyHEmQrYB4eHhgRdffFHqMloQ26uIyJWJLcN09OocpWpYsuoqy945whG5oMbPCejot3YO9PajAqD1sX3PM1fBgLABzxrI0dpzwZUDu/N4a1Tw0qhltRVOV28NRvbxd8qxONoRtYPY3Dvn4V1Lw7d9WwZ9L41akhrlggHRBl6Mdh2OmHLhYO463HWKR2oMiFYwHByvM9MqHLzdS+NN6PhkwbY5ev+lxhgQreD2GeK4vJGaa7yGv6MPqmq+CR3Jg02jX3JyMhYtWoQRI0Y4uh5ZcIe7o1ubcmn+mqteqDGxZZjtGejJcZy1QV9jNgXE1KlT8eabb6KkpATh4eEIDw9vsk8SOV5716q7ytYCZB+NB3ag84+eJQIAD1s+FBYWhj/84Q944403cOHCBYSGhiIuLg7FxcWOrs/tNQz8ZqH+CVMVNaYWr833p3VIGVSo37DNz0cDtap+Cqf5n5u/dtYTxkgazlza2phNZxDXr19HZmYmPv30U0ycOBF/+ctfYDKZsH79emRmZjq6RrfR2adJkfyIbZXQ0W/tSrzgSq2TYkqpOZsC4oUXXsCSJUuwfv16+Pj4WNsXLlzosMKkYq/VS21tL0yO19Epl/Z8loM5uTubp5jmz59vDYd9+/YBAJ566inHVeYiGi4Gm5tNFdXyGkC7dGRahVMu5I5G9vHH9ZeelPzsAXjAGUR6ejoyMjJw/fp1nD59GgBgNpthMpmwadMmpxQoZ83PEpTGHssb+U2cqF7IoO5Ou7/BVm0GxNy5czFp0iSkpqYiJiYGQP0uqz169HBKcXLnSuHQ/Eakzg7mXPVCZB/OvPGtvdoMiMLCQowePRozZsxAUVGRtf369esIDQ11eHEkrqNr1XkjEpG8dPXWyDYcgAcERG5uLkaPHo2///3vLd5TekCYHXDywP1kiNyfHFYn2arNgIiOjgYADB8+HPPnz8dDDz3klKLkrPEdyp3R2vw9EbknOU8ltcamZa5msxkrVqxAYGAgFi9ejIkTJzq6LlkyCx27Q/lB2wsTkXtwxRBoi00BsWrVKqxatQr5+fl4++23sW3bNuTk5Di6NlmxtCMX5P6cWSJqW8Ody+402HeETQFRU1OD7OxsZGVlQRAEPPvss46uSzbau5SVy++J5M2VrgFIzaaACA8Ph06nQ1JSEgYOHOjommTD0s4b3tRMByJJcNB3jDYDwmQyQaPR4IMPPoCnpycAwGg0AgC8vLwcX53E2nu1gRebiTqHA728tBkQW7Zswb59+zBnzhyoVCoIQv2QqVKpcOrUqU4fvKKiAnq9HgaDAXV1dYiLi8O4ceOQk5ODPXv2oE+fPgCA9evXY8KECZ0+nq06slKJZw+kdBzc3U+bAdGw59Jrr72G4OBga/u5c+fscvA///nP+L//+z8sX74c3333HTZt2oQPPvgABQUF0Ov10Omc/8tWWlHbrmmlxncoE7kDDvTUoM2AOH/+PK5du4Z3330XK1asAABYLBYcO3YMH374YacPvnz5cutUldlshre3NwCgoKAA3377LQ4fPozg4GDExsZCo3HOoz9vlFXZ/FlvjYqPSiRZkuO+PuR62hx1/f39UVpaCqPRiDt37gCon17S6/XtPlB6ejoOHz7cpG3Xrl0IDg7GnTt3oNfrkZCQAACYPHkypk2bhkceeQSJiYlIS0vD008/3e5jdoTJxvWsKtQ/R5fInri8kuSkzYAYNmwYhg0bhoiICJSVlWHEiBE4efIkHn300XYfKCIiAhERES3aCwsL8fzzz2Pz5s3W6wwLFy6Ev3/905Mef/xxZGdnt/t4HZF18cFPyOOUEonhoE7uyKZ5m507d2LSpEkYMWIEioqK8NFHH1mvT3TGtWvX8Nxzz+G1117DL37xCwCAIAgIDw9HWloaevfujdzcXAQFBXX6WLbYm134wM9wSsn1cfqFyDY2BURJSQkiIyMBAKtXr0ZUVJRdDr5v3z4YjUbs3LkTAKDVanHo0CGkpKRg3bp18PHxwZAhQ7B48WK7HO9Biu9Vt/k+Vyo5BwdwInmw+cpvUVERAgMDcePGDVgsFrsc/NChQ6LtoaGhTt8tNutiMVRo/d4HhsODcf6cyL3Y/EzqjRs3orS0FL169UJSUpKDy3K+vdmFrYaDO1134BJGIrKVTQFRUFCA6upqeHl54d69e4iNjXW7zfputTG9JLfrDvymTkTOYFNApKen48iRIzh06BBmzpzZYrmqO+gb0EX0GoQUM0sc/IlIDmwKiG7duqFXr16orKzExIkT8cc//tHRdTmdXjcc8ZnfoLrO3KTdS+OYiGAIEJHc2RQQfn5+OHnyJFQqFdLS0lBWVuboupxu3rh+AIDNGfkwmusvwtvjZjhOBxGRq7IpIFJSUnDz5k1s2rQJ77zzjltepAbqQ+K9L2/i8u3yDu3KyiAgIndiU0BotVqMHDkSABAXF+fQglwR1+0TkTvykLoAIiKSJwZEJ3FaiYjcFQOiExgOROTOGBCNLEnNxeXb5TZ9tqu3huFARG6NAdEBDUtXiYjcmXMe0+ZGOK1ERErBgLARl7ISkdJwiskGPGsgIiViQBARkShOMbWBz04gIiXjGUQjjfdgChnUneFARIrGgCAiIlEMiPuWpOZazx54UZqIiAFBREStYEAQEZEoSVcxCYKAsLAwDBo0CAAwduxYbNq0CXl5edi5cyfUajVCQ0Oxbt06p9XEPZaIiOpJGhA3b95EUFAQ3nzzzSbtiYmJOHDgAPr374/o6GgUFBQgKChIoiqJiJRJ0immgoIClJSUICoqCqtXr8Z3330Hg8EAo9GIAQMGQKVSITQ0FLm5uVKWSUSkSE47g0hPT8fhw4ebtG3fvh3R0dF44okncP78eej1ehw8eBBardb6GV9fX3z//ffOKpOIiO5zWkBEREQgIiKiSVt1dTXUajUAICQkBCUlJfD19UVlZaX1M5WVlfD3d+zW2u15DgQRkVJIOsX0+uuvW88qrly5gr59+8LPzw+enp64efMmBEHAmTNnEBISImWZRESKJOlF6ujoaOj1evzzn/+EWq3GSy+9BADYsWMHYmNjYTabERoaijFjxkhZJhGRIkkaEA899BDeeuutFu1jx47FiRMnnFqL0WSGWQAqakyYvPsT6HXDMW9cP6fWQEQkJ7xRDkBpRS1qTYL1dfG9asRnfoOsi8USVkVEJC0GBIDv71a3aKuuM2NvdqEE1RARyQMDAoDRbBFtv3WvZXAQESkFAwKAl1q8G/oGdHFyJURE8sGAANC/W8sg6OKphl43XIJqiIjkgQEBoKefN7w1KuvrfgFd8NKC0VzFRESKxoAgIiJRDAhwmSsRkRgGBIAbZVUt2rjMlYiUTvEBkXWxGCaLIPoel7kSkZIpPiDaOkvgMlciUjLFB0RbZwlc5kpESqb4gGjtLCGgiyeXuRKRoik+IPS64fBQNW3r4qlGUjifgU1Eyqb4gACAZvnAm+SIiCDx8yCklnWxGM+fyEMri5iIiBRN0WcQe7MLRcOB9z8QESk8IFpbwcT7H4iIFB4Qra1g4v0PREQKDwixFUwN7URESqfogJg3rh8Ce/g2WcXkrVFxBRMRERQeEED9syC0Phr4+WigVgFeGrXUJRERyYKky1zfeustfPbZZwCA8vJylJaW4vPPP0dOTg727NmDPn36AADWr1+PCRMmSFkqEZHiSBoQ0dHRiI6OBgCsWbMGsbGxAICCggLo9XrodDqn1tPVW4ORffydekwiIrmSxRRTTk4O/P39MWXKFAD1AfH+++/jt7/9LXbv3g2TySRxhUREyuO0gEhPT8fs2bOb/Cc/Px8AkJqainXr1lk/O3nyZGzbtg3Hjh1DVVUV0tLSnFUmERHd57QppoiICERERLRov3btGvz9/TFw4EBr28KFC+HvXz/V8/jjjyM7O9tZZRIR0X2STzGdPXsWYWFh1teCICA8PBz//e9/AQC5ubkICuLOqkREzib5Zn1FRUWYPHmy9bVKpUJKSgrWrVsHHx8fDBkyBIsXL5awQiIiZZI8IBITE1u0hYaGIjQ0VIJqiIiogeRTTHIyso8/jq+ZJHUZRESywIAgIiJRDIj7ePZARNQUA4KIiEQpOiCyLhbj4s17qKgx4eLNe8i6WCx1SUREsqHYgMi6WIz4zG9gNFsAAEazBfGZ3zAkiIjuU2xA7M0uRHWduUlbdZ2Zz6MmIrpPsQHB51ETEbVNsQHB51ETEbVNsQGh1w1HF8+mT4/r4qnm86iJiO6TfKsNqTQ8d3pzRj6MZgv6BXSBXjecz6MmIrpPsQEB1IfEe1/eBADeJEdE1Ixip5iIiKhtDAgiIhLFgCAiIlEMCCIiEsWAICIiUQwIIiISxYAgIiJRDAgiIhLFgCAiIlEMCCIiEuX0gPj444+xadMm6+u8vDxERERg6dKleP31163tr7/+OhYtWoSlS5ciPz/f2WUSESmeU/diSklJwZkzZzBixAhrW2JiIg4cOID+/fsjOjoaBQUFAIAvv/wS6enpuH37NtavX4/333/fmaUSESmeUwNi/PjxmDZtGo4fPw4AMBgMMBqNGDBgAAAgNDQUubm58PLyQmhoKFQqFfr27Quz2YyysjJ0797dmeUSESmaQwIiPT0dhw8fbtK2a9cuzJo1C+fOnbO2GQwGaLVa62tfX198//338Pb2RkBAQJP2iooKBgQRkRM5JCAiIiIQERHxwM9ptVpUVlZaX1dWVsLf3x+enp4t2v38/BxRKhERtULSVUxarRaenp64efMmBEHAmTNnEBISgvHjx+PMmTOwWCy4desWLBYLzx6IiJxM8gcG7dixA7GxsTCbzQgNDcWYMWMAACEhIViyZAksFgu2b9/usOPzQUFEROJUgiAIUhdhDwsWLEBmZqbUZRARuZS2xk7eKEdERKIYEEREJIoBQUREohgQREQkigFBRESiGBBERCSKAUFERKIYEEREJEryO6ntpbi4GAsWLJC6DCIil1JcXNzqe25zJzUREdkXp5iIiEgUA4KIiEQxIIiISBQDgoiIRDEgiIhIFAOCiIhEuc19EB1hsViQlJSEwsJCeHl5ISUlBQMHDpS0pnnz5lmfv/3II48gJiYGcXFxUKlUGDp0KBITE+Hh4dxc//rrr/HKK6/gyJEjuHHjhmg9J06cQFpaGjQaDdauXYvHHnvM6bUVFBQgJiYGgwYNAgBERkZi1qxZTq+trq4OCQkJKC4uhtFoxNq1a/Hzn/9cFv0mVlvv3r1l0W9msxlbt25FUVER1Go1XnrpJQiCIIt+E6utoqJCFv0GAD/99BMWLFiAd955BxqNxn59JihYdna2sGXLFkEQBOHixYtCTEyMpPXU1NQIc+fObdK2Zs0a4YsvvhAEQRC2bdsm5OTkOLWmt956S5g9e7YQERHRaj0//vijMHv2bKG2tlYoLy+3/tnZtZ04cUJ4++23m3xGitoyMjKElJQUQRAEoaysTJg6daps+k2sNrn028cffyzExcUJgiAIX3zxhRATEyObfhOrTS79ZjQahWeeeUaYMWOGcO3aNbv2maKnmC5cuIApU6YAAMaOHYtLly5JWs+VK1dQXV2NlStXYtmyZcjLy0NBQQEmTJgAAAgLC8PZs2edWtOAAQNw4MAB62uxevLz8zFu3Dh4eXnBz88PAwYMwJUrV5xe26VLl/CPf/wDTz31FBISEmAwGCSpbebMmXjuueesr9VqtWz6Taw2ufTbtGnTkJycDAC4desWevbsKZt+E6tNLv328ssvY+nSpejVqxcA+/4bVXRAGAwGaLVa62u1Wg2TySRZPT4+Pli1ahXefvtt7NixA7GxsRAEASqVCgDg6+uLiooKp9ak0+mg0fxvJlKsHoPBYJ0Wa2g3GAxOry04OBibN2/GsWPH0L9/fxw8eFCS2nx9faHVamEwGPDss89iw4YNsuk3sdrk0m8AoNFosGXLFiQnJ0On08mm38Rqk0O/ZWZmonv37tYvuoB9/40qOiC0Wi0qKyutry0WS5MBx9kCAwMRHh4OlUqFwMBABAQE4KeffrK+X1lZCX9/f8nqA9Dk+kdDPc37sbKysskvo7NMnz4do0aNsv758uXLktV2+/ZtLFu2DHPnzsWcOXNk1W/Na5NTvwH134izs7Oxbds21NbWNqlB6t+3xrWFhoZK3m/vv/8+zp49i6ioKHz77bfYsmULysrKmhy/M32m6IAYP348Tp8+DQDIy8vDsGHDJK0nIyMDu3fvBgCUlJTAYDBg8uTJOHfuHADg9OnTCAkJkbJEjBw5skU9wcHBuHDhAmpra1FRUYHr169L0perVq1Cfn4+ACA3NxdBQUGS1FZaWoqVK1dCr9dj0aJFAOTTb2K1yaXfsrKykJqaCgDo0qULVCoVRo0aJYt+E6tt3bp1kvfbsWPHcPToURw5cgQjRozAyy+/jLCwMLv1maI362tYxXT16lUIgoBdu3ZhyJAhktVjNBoRHx+PW7duQaVSITY2Ft26dcO2bdtQV1eHwYMHIyUlBWq12ql1/fDDD3j++edx4sQJFBUVidZz4sQJHD9+HIIgYM2aNdDpdE6vraCgAMnJyfD09ETPnj2RnJwMrVbr9NpSUlLw0UcfYfDgwda2F154ASkpKZL3m1htGzZswN69eyXvt6qqKsTHx6O0tBQmkwmrV6/GkCFDZPH7JlZbnz59ZPH71iAqKgpJSUnw8PCwW58pOiCIiKh1ip5iIiKi1jEgiIhIFAOCiIhEMSCIiEgUA4KIiEQxIEjxamtr8Zvf/EbqMpq4d+8e/va3v9n8+cWLF+OHH35wYEWkRAwIIhkqLCzEJ598InUZpHCK3u6blKuyshKxsbEoLy/HgAEDrO1RUVHo1q0bysvL8eSTT+LGjRuIjY1FbW0tnnjiCXzyySfIz8/Hjh074Ovrix49esDb29t6B3xzOTk5+NOf/gSNRoN+/fphz549MBgM0Ov1MBgMMJvNeO655zBp0iTMnj0bgwYNgpeXF+7evYsrV67g+PHjCAsLs2474e3tjeTkZPTp0wf79+/HZ599ht69e+Pu3bvO6jpSEAYEKdIHH3yAYcOGYePGjfj666+tWxMAsO5PlJmZKfq/TUxMxJ49ezB06FDs378fJSUlrR7nww8/xPLly/Hkk08iKysLBoMBhw4dwqOPPorf/e53KCkpQWRkJE6ePImqqio888wz1m050tLSsGTJEmzYsAFRUVGYOnUqcnNz8corr2DNmjX46quvkJGRgaqqKsyYMcPufUTEKSZSpH//+98YPXo0AGDMmDFNNmkMDAxs8fnGGw78+OOPGDp0KADgl7/8ZZvHiY+Px1dffYWnn34a//rXv+Dh4YHr16/jV7/6FQDg4YcfhlartW6wJnbsq1evIjU1FVFRUTh48CDKyspw7do1jBo1Ch4eHtBqtZLvI0buiQFBijR48GDk5eUBAC5fvtxkm/eGrZK9vb1x584dAPV77Dfo3bs3rl27BqD+iXZtOX78ONavX4+jR48CAD7++GMMGTIE58+fB1C/KWN5eTkCAgIA/G+3XA8PD1gsFmutsbGxOHLkCHbs2AGdTofAwEDk5+fDYrGgqqrKWg+RPXGKiRTpqaeeQnx8PCIjIzF48GB4enq2+MyUKVPw3nvvITIyEkFBQfD19QVQP8WUkJCArl27wtPTEw8//DAAYOPGjUhISMDPfvYz688IDg7GihUrEBAQAF9fX/z617/GY489hoSEBGRnZ6OmpgYvvvhii23mBwwYgKtXr+Ldd9/Fli1bkJSUhNraWtTU1OCFF17AiBEjMHPmTCxatAi9evVCjx49HNhbpFTcrI+onY4dO4YnnngC3bt3x/79++Hp6Yl169bh1VdfRUxMDLp27Sp1iUR2wTMIonbq0aMHVq5cia5du8LPz8+6gmnp0qUMB3IrPIMgIiJRvEhNRESiGBBERCSKAUFERKIYEEREJIoBQUREov4fEzH6RLiLVLAAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.errorbar(np.arange(avg_df.shape[0]), avg_df['n'], yerr=ci_95, fmt='o')\n", + "plt.xlabel('drug, sorted')\n", + "plt.ylabel('activity')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "NxsJUoS0l6BH" + }, + "source": [ + "Now, let's identify our active compounds. \n", + "\n", + "In my case, this required domain knowledge. Having worked in this area, and having consulted with professors specializing on this channel, I am interested in compounds where the absolute value of the activity is greater than 25. This relates to the desired drug potency we would like to model.\n", + "\n", + "If you are not certain how to draw the line between active and inactive, this cutoff could potentially be treated as a hyperparameter." + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 282 + }, + "colab_type": "code", + "id": "MQPUH1ogl6BH", + "outputId": "c6874a35-23f1-4a7d-e4ac-6a7fc90fc32a", + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD3CAYAAAAT+Z8iAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAARdUlEQVR4nO3dfWyN5+PH8c9x2mrTo99ObBmdh05GpPt6SsOWHiQ/upp4rB6KVKyiLPMUelAJnaGK2D+eLcsiyJeVTvxXY8sMTZeJznRIDKvUQ9gIpxqt0/P7Y9HEjGpdzum5+n4l/rivyX1dTeztdp/rPrcjEAgEBACwVptQLwAA8GoRegCwHKEHAMsRegCwHKEHAMtFhHoB/2bgwIFKSEgI9TIAIKxUVVWprKzsqfEWGfqEhAQVFxeHehkAEFbS09P/dZxbNwBgOUIPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9ADQQkzcXqqJ20uNn5fQA4DlCD0AWI7QA4DlCD0AWI7QA4DlCD0AWI7QA4DlCD0AWI7QA4DlCD0AWI7QA4DljL8cfOzYsWrXrp0k6a233tKsWbO0ZMkSORwOvfPOO8rPz1ebNvz9AgDBYjT0Dx8+lCTt2rWrYWzWrFmaP3++Bg4cqOXLl+vo0aNKTU01OS0A4DmMXlqfP39eNTU1ys7O1tSpU1VeXq6KigoNGDBAkjR48GCdPHnS5JQAgEYYvaKPjo7W9OnT5fF4dOXKFc2YMUOBQEAOh0OSFBsbq/v375ucEgDQCKOhT0xMVNeuXeVwOJSYmKj4+HhVVFQ0/Pfq6mrFxcWZnBIA0Aijt27279+vwsJCSdLNmzfl8/mUkpKisrIySdKxY8eUnJxsckoAQCOMXtFnZGQoLy9PkyZNksPhUEFBgV577TUtW7ZMn3/+ud5++22lpaWZnBIA0AijoY+KitKGDRueGt+9e7fJaQDAOgdPV+l05V3V+uuVUvidvGk9NbZfgpFzs6EdAELs4Okq5RX/qlp/vSSp6m6N8op/1cHTVUbOT+gBIMTWl1xQTZ3/ibGaOr/Wl1wwcn5CDwAhdu1uTZPGm4rQA0CIdYqPadJ4UxF6AAgxb1pPxUQ6nxiLiXTKm9bTyPmNf6kZAKBpHu+uWbT/jGr99UqIjzG664bQA0ALMLZfgv73U6Ukad/M942em1s3AGA5Qg8AliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5o99HX1dXp6VLl6qqqkq1tbX6+OOP9eabb2rWrFnq1q2bJGnSpEkaMWKEyWkBAM9hNPSHDh1SfHy81q9frzt37mjcuHH65JNP9NFHHyk7O9vkVABgHdMvHHnMaOiHDx+utLS0hmOn06mzZ8/q8uXLOnr0qLp27aqlS5fK5XKZnBYA8BxG79HHxsbK5XLJ5/Np7ty5mj9/vnr37q1FixZpz5496ty5szZv3mxySgBAI4x/GHv9+nVNnTpVY8aM0ahRo5Samqp3331XkpSamqrffvvN9JQAgOcwGvrbt28rOztbXq9XGRkZkqTp06frzJkzkqTS0lIlJSWZnBIA0Aij9+i3bdume/fuacuWLdqyZYskacmSJSooKFBkZKQ6dOiglStXmpwSANAIRyAQCIR6Ef+Unp6u4uLiUC8DAMLKs9rJA1MAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWiwjGJPX19fr000914cIFRUVFadWqVeratWswpgaAVi8oV/RHjhxRbW2t9u3bp4ULF6qwsDAY0wIAFKTQnzp1SoMGDZIk9e3bV2fPng3GtAAABSn0Pp9PLper4djpdOrRo0fBmBoAWr2ghN7lcqm6urrhuL6+XhERQfl4AABavaCEvn///jp27Jgkqby8XD169AjGtAAABWnXTWpqqk6cOKHMzEwFAgEVFBQEY1oAgIIU+jZt2uizzz4LxlQAgH/ggSkAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLEXoAsByhBwDLRZg60f379+X1euXz+VRXV6clS5aoX79+Onz4sNatW6eOHTtKkubMmaMBAwaYmhYA0Ahjof/qq6/03nvvadq0abp06ZIWLlyob775RhUVFfJ6vUpLSzM1FQCgCYyFftq0aYqKipIk+f1+tW3bVpJUUVGhc+fOaefOnerdu7dyc3MVEWFsWgBAI5p1j76oqEgjR4584teVK1cUHR2tW7duyev1asGCBZKklJQULVu2THv27NGDBw+0d+9eoz8AAOD5mnVp7fF45PF4nhq/cOGCFixYoEWLFjXchx8/frzi4uIkSUOHDlVJSclLLBcA0FTGdt1cvHhR8+bN04YNGzRkyBBJUiAQ0OjRo3Xjxg1JUmlpqZKSkkxNCQB4AcZulm/YsEG1tbVavXq1JMnlcmnr1q1atWqVZs+erejoaHXv3l0TJkwwNSUA4AUYC/3WrVv/ddztdsvtdpuaBgDQRDwwBQCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCDwCWI/QAYDlCD6BFmri9VBO3l4Z6GVYg9ABgOUIPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9EAYYKshXgahBwDLEXqEHa5ugaYh9ABgOUIPAJaLMHWiQCCgwYMHq1u3bpKkvn37auHChSovL9fq1avldDrldrs1e/ZsU1MCAF6AsdBXVlYqKSlJ27Zte2I8Pz9fGzduVOfOnZWTk6OKigolJSWZmhYA0Ahjt24qKip08+ZNZWVlacaMGbp06ZJ8Pp9qa2vVpUsXORwOud1ulZbyIRoABFOzruiLioq0c+fOJ8aWL1+unJwcffjhh/r555/l9Xq1efNmuVyuht8TGxurq1evvtyKAQBN0qzQezweeTyeJ8ZqamrkdDolScnJybp586ZiY2NVXV3d8Huqq6sVFxf3EsvFPz3eZrhv5vshXgmAlsrYrZtNmzY1XOWfP39enTp1Urt27RQZGanKykoFAgEdP35cycnJpqYEALwAYx/G5uTkyOv16ocffpDT6dSaNWskSStWrFBubq78fr/cbrf69OljakoAwAswFvr//Oc/2rFjx1Pjffv21ddff21qGgCtwMHTVTpdeVe1/nqlFH4nb1pPje2XEOplhS0emALQohw8XaW84l9V66+XJFXdrVFe8a86eLoqxCsLX4QeQIuyvuSCaur8T4zV1Pm1vuRCiFYU/gg9gBbl2t2aJo2jcYQeQIvSKT6mSeNoHKEH0KJ403oqJtL5xFhMpFPetJ4hWlH4M7brBgBMeLy7ZtH+M6r11yshPoZdNy+J0AMtXGvcaji2X4L+91OlJJ76NoFbN0ALxlZDmEDoEVYeX92WXf5LKYXfWR88thrCBEKPsNEar27ZaggTCD3CRmu8umWrIUwg9AgbrfHqlq2GMIHQI2y0xqvbsf0StCb9v4py/v2/akJ8jNak/9f6XTcwi9AjbLTWq9ux/RLUr0u8Bia214kl/0fk0WSEPoy1th0oXN0CzWPVA1Ot6bV6z9qBIsnq8PEgDdB0XNGHqda4AwVA8xD6MNUad6AAaB5CH6Za4w4UAM1D6MNUa92BAqDprPowtjXhq1wBvChCH8bYgQLgRRgL/Y4dO/Tjjz9Kku7du6fbt2/rxIkTOnz4sNatW6eOHTtKkubMmaMBAwaYmhYA0Ahjoc/JyVFOTo4kaebMmcrNzZUkVVRUyOv1Ki0tzdRUAFoB/pVqjvEPYw8fPqy4uDgNGjRI0t+hP3DggCZPnqzCwkI9evTI9JQAgOdoVuiLioo0cuTIJ36dOXNGkrR9+3bNnj274fempKRo2bJl2rNnjx48eKC9e/eaWTkA4IU069aNx+ORx+N5avzixYuKi4tT165dG8bGjx+vuLg4SdLQoUNVUlLSzKUCAJrD6K2bkydPavDgwQ3HgUBAo0eP1o0bNyRJpaWlSkpKMjklAKARRrdXXr58WSkpKQ3HDodDq1at0uzZsxUdHa3u3btrwoQJJqcEADTCaOjz8/OfGnO73XK73SanAQA0AQ9MAWGArYZ4GXzXDQBYjit6hB2uboGm4YoeACxnTehb2/tTAeBFWRH6Z70/ldgDgCWh5/2pAPBsVoSe96cCwLNZEXrenwoAz2ZF6Fvz+1P3zXyf7YYAnsuKffS8PxUAns2K0Eu8PxUAnsWKWzcAgGcj9ABgOUIPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9ABguZcK/bfffquFCxc2HJeXl8vj8SgzM1ObNm1qGN+0aZMyMjKUmZmpM2fOvMyUAIAmavZ33axatUrHjx9Xr169Gsby8/O1ceNGde7cWTk5OaqoqJAk/fTTTyoqKtL169c1Z84cHThw4OVXDgB4Ic0Off/+/TVs2DDt27dPkuTz+VRbW6suXbpIktxut0pLSxUVFSW32y2Hw6FOnTrJ7/frr7/+Uvv27c38BACA52o09EVFRdq5c+cTYwUFBRoxYoTKysoaxnw+n1wuV8NxbGysrl69qrZt2yo+Pv6J8fv37xN6AAiSRkPv8Xjk8XgaPZHL5VJ1dXXDcXV1teLi4hQZGfnUeLt27Zq5XABAUxnbdeNyuRQZGanKykoFAgEdP35cycnJ6t+/v44fP676+npdu3ZN9fX1XM0DQBAZffHIihUrlJubK7/fL7fbrT59+kiSkpOTNXHiRNXX12v58uUmp3wCLxwBgKc5AoFAINSL+Kf09HQVFxeHehkAEFae1U4emAIAyxF6ALAcoQcAyxF6ALAcoQcAyxF6ALAcoQcAyxF6ALCc0SdjTamqqlJ6enqolwEAYaWqqupfx1vkk7EAAHO4dQMAliP0AGA5Qg8AliP0AGA5Qg8AliP0AGA5a0L/+O1VEydOVFZWlv74449QLykofvnlF2VlZYV6GUFRV1cnr9eryZMnKyMjQ0ePHg31kl45v9+vvLw8ZWZmasqUKaqsrAz1koLmzz//1JAhQ/T777+HeilBMXbsWGVlZSkrK0t5eXlGz90iH5hqjiNHjqi2tlb79u1TeXm5CgsLtXXr1lAv65X64osvdOjQIcXExIR6KUFx6NAhxcfHa/369bpz547GjRunoUOHhnpZr9T3338vSdq7d6/Kysq0Zs0a6/9cS3//pb58+XJFR0eHeilB8fDhQ0nSrl27Xsn5rbmiP3XqlAYNGiRJ6tu3r86ePRviFb16Xbp00caNG0O9jKAZPny45s2b13DsdDpDuJrgGDZsmFauXClJunbtmjp06BDiFQXH2rVrlZmZqTfeeCPUSwmK8+fPq6amRtnZ2Zo6darKy8uNnt+a0Pt8PrlcroZjp9OpR48ehXBFr15aWpoiIqz5R1mjYmNj5XK55PP5NHfuXM2fPz/USwqKiIgILV68WCtXrlRaWlqol/PKFRcXq3379g0Xbq1BdHS0pk+fri+//FIrVqxQbm6u0X5ZE3qXy6Xq6uqG4/r6+lYVwdbi+vXrmjp1qsaMGaNRo0aFejlBs3btWpWUlGjZsmV68OBBqJfzSh04cEAnT55UVlaWzp07p8WLF+vWrVuhXtYrlZiYqNGjR8vhcCgxMVHx8fFGf2ZrQt+/f38dO3ZMklReXq4ePXqEeEUw7fbt28rOzpbX61VGRkaolxMUBw8e1Pbt2yVJMTExcjgc1t+y2rNnj3bv3q1du3apV69eWrt2rV5//fVQL+uV2r9/vwoLCyVJN2/elM/nM/ozW3PJm5qaqhMnTigzM1OBQEAFBQWhXhIM27Ztm+7du6ctW7Zoy5Ytkv7+QNrmD+w++OAD5eXlacqUKXr06JGWLl2qtm3bhnpZMCwjI0N5eXmaNGmSHA6HCgoKjN6R4NsrAcBy1ty6AQD8O0IPAJYj9ABgOUIPAJYj9ABgOUIPAJYj9ABguf8HakITUCchdaQAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "actives = avg_df[abs(avg_df['n'])-ci_95 > 25]['n']\n", + "\n", + "plt.errorbar(np.arange(actives.shape[0]), actives, yerr=ci_95, fmt='o')" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "9rz2KjJ8l6BS", + "outputId": "ebeac3f3-091b-4e99-ac7d-8bfec5f59aac" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(430, 5) (392, 3) 6\n" + ] + } + ], + "source": [ + "# summary\n", + "print (raw_data.shape, avg_df.shape, len(actives.index))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "TiNqzX0Kl6BV" + }, + "source": [ + "In summary, we have:\n", + "* Removed data that did not address the question we hope to answer (small molecules only)\n", + "* Dropped NaNs\n", + "* Determined the noise of our measurements\n", + "* Removed exceptionally noisy datapoints\n", + "* Identified actives (using domain knowledge to determine a threshold)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "46rf9hMkl6BW" + }, + "source": [ + "## Determine model type, final form of dataset, and sanity load" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "vUK150zHl6BX" + }, + "source": [ + "Now, what model framework should we use? \n", + "\n", + "Given that we have 392 datapoints and 6 actives, this data will be used to build a low data one-shot classifier (10.1021/acscentsci.6b00367). If there were datasets of similar character, transfer learning could potentially be used, but this is not the case at the moment.\n", + "\n", + "\n", + "Let's apply logic to our dataframe in order to cast it into a binary format, suitable for classification." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 119 + }, + "colab_type": "code", + "id": "WwcvCbigl6BX", + "outputId": "a7e8abc2-f738-401d-9e1e-f4eb3238ba8b" + }, + "outputs": [], + "source": [ + "# 1 if condition for active is met, 0 otherwise\n", + "avg_df.loc[:, 'active'] = (abs(avg_df['n'])-ci_95 > 25).astype(int)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "2t7vmHnNl6Bc" + }, + "source": [ + "Now, save this to file." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "a6AGQoB2l6Be" + }, + "outputs": [], + "source": [ + "avg_df.to_csv('modulators.csv', index=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Vs7Pkg7Il6Bp" + }, + "source": [ + "Now, we will convert this dataframe to a DeepChem dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + }, + "colab_type": "code", + "id": "NRpnbgyAl6Bv", + "outputId": "9f37a491-24cc-4a2c-af7c-23d1dd42e72c" + }, + "outputs": [], + "source": [ + "dataset_file = 'modulators.csv'\n", + "task = ['active']\n", + "featurizer_func = dc.feat.ConvMolFeaturizer()\n", + "\n", + "loader = dc.data.CSVLoader(tasks=task, feature_field='drug', featurizer=featurizer_func)\n", + "dataset = loader.create_dataset(dataset_file)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "D9GElTwzl6B0" + }, + "source": [ + "Lastly, it is often advantageous to numerically transform the data in some way. For example, sometimes it is useful to normalize the data, or to zero the mean. This depends in the task at hand.\n", + "\n", + "Built into DeepChem are many useful transformers, located in the deepchem.transformers.transformers base class. \n", + "\n", + "Because this is a classification model, and the number of actives is low, I will apply a balancing transformer. I treated this transformer as a hyperparameter when I began training models. It proved to unambiguously improve model performance." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "-Ll5i93il6B1" + }, + "outputs": [], + "source": [ + "transformer = dc.trans.BalancingTransformer(dataset=dataset)\n", + "dataset = transformer.transform(dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "L57S8x7sl6B4" + }, + "source": [ + "Now let's save the balanced dataset object to disk, and then reload it as a sanity check." + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "MwFyB7Ryl6B5" + }, + "outputs": [], + "source": [ + "dc.utils.save_to_disk(dataset, 'balanced_dataset.joblib')\n", + "balanced_dataset = dc.utils.load_from_disk('balanced_dataset.joblib')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Oydv-y4Fl6B9" + }, + "source": [ + "Tutorial written by Keri McKiernan (github.com/kmckiern) on September 8, 2016" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "F2E5bL1Jl6CD" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!\n", + "\n", + "\n", + "# Bibliography\n", + "\n", + "[2] Anderson, Eric, Gilman D. Veith, and David Weininger. \"SMILES, a line\n", + "notation and computerized interpreter for chemical structures.\" US\n", + "Environmental Protection Agency, Environmental Research Laboratory, 1987." + ] + } + ], + "metadata": { + "colab": { + "name": "09_Creating_a_high_fidelity_model_from_experimental_data.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb b/examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb index a191b0fd3f136700f9f51c6cd4c06eaf95e73ee5..d0a2b1a38773b7d10fa6b5be65d8c3a07eb9b685 100644 --- a/examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb +++ b/examples/tutorials/10_Exploring_Quantum_Chemistry_with_GDB1k.ipynb @@ -61,16 +61,97 @@ "metadata": { "id": "hiRnnJpG2UJY", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 153 + }, + "outputId": "4ccce479-ab8f-4b55-a00b-9554d53d874d" }, "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" ], - "execution_count": 0, - "outputs": [] + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "text": [ + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r100 3489 100 3489 0 0 37923 0 --:--:-- --:--:-- --:--:-- 37923\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "all packages is already installed\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "# conda environments:\n", + "#\n", + "base * /root/miniconda\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "rqGp9hYVBUyQ", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "outputId": "73b2f101-82a4-4299-a837-5b55c2e3a7a9" + }, + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ], + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: deepchem in /usr/local/lib/python3.6/dist-packages (2.4.0rc1.dev20200805143010)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n" + ], + "name": "stdout" + }, + { + "output_type": "execute_result", + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 8 + } + ] }, { "cell_type": "markdown", @@ -99,7 +180,7 @@ "from sklearn.ensemble import RandomForestRegressor\n", "from sklearn.kernel_ridge import KernelRidge" ], - "execution_count": 0, + "execution_count": 9, "outputs": [] }, { @@ -125,7 +206,7 @@ "smiles_field = \"smiles\"\n", "mol_field = \"mol\"" ], - "execution_count": 0, + "execution_count": 10, "outputs": [] }, { @@ -154,7 +235,7 @@ "source": [ "featurizer = dc.feat.CoulombMatrixEig(23, remove_hydrogens=False)" ], - "execution_count": 0, + "execution_count": 11, "outputs": [] }, { @@ -175,13 +256,13 @@ "colab": {} }, "source": [ - "loader = dc.data.SDFLoader(\n", - " tasks=[\"atomization_energy\"], smiles_field=\"smiles\",\n", - " featurizer=featurizer,\n", - " mol_field=\"mol\")\n", - "dataset = loader.featurize(dataset_file)" + "# loader = dc.data.SDFLoader(\n", + "# tasks=[\"atomization_energy\"], smiles_field=\"smiles\",\n", + "# featurizer=featurizer,\n", + "# mol_field=\"mol\")\n", + "# dataset = loader.featurize(dataset_file)" ], - "execution_count": 0, + "execution_count": 12, "outputs": [] }, { @@ -202,10 +283,10 @@ "colab": {} }, "source": [ - "random_splitter = dc.splits.RandomSplitter()\n", - "train_dataset, valid_dataset, test_dataset = random_splitter.train_valid_test_split(dataset)" + "# random_splitter = dc.splits.RandomSplitter()\n", + "# train_dataset, valid_dataset, test_dataset = random_splitter.train_valid_test_split(dataset)" ], - "execution_count": 0, + "execution_count": 13, "outputs": [] }, { @@ -230,15 +311,15 @@ "colab": {} }, "source": [ - "transformers = [\n", - " dc.trans.NormalizationTransformer(transform_X=True, dataset=train_dataset),\n", - " dc.trans.NormalizationTransformer(transform_y=True, dataset=train_dataset)]\n", + "# transformers = [\n", + "# dc.trans.NormalizationTransformer(transform_X=True, dataset=train_dataset),\n", + "# dc.trans.NormalizationTransformer(transform_y=True, dataset=train_dataset)]\n", "\n", - "for dataset in [train_dataset, valid_dataset, test_dataset]:\n", - " for transformer in transformers:\n", - " dataset = transformer.transform(dataset)" + "# for dataset in [train_dataset, valid_dataset, test_dataset]:\n", + "# for transformer in transformers:\n", + "# dataset = transformer.transform(dataset)" ], - "execution_count": 0, + "execution_count": 14, "outputs": [] }, { @@ -261,21 +342,21 @@ "colab": {} }, "source": [ - "def rf_model_builder(model_params, model_dir):\n", - " sklearn_model = RandomForestRegressor(**model_params)\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", - "params_dict = {\n", - " \"n_estimators\": [10, 100],\n", - " \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", - "}\n", + "# def rf_model_builder(model_params, model_dir):\n", + "# sklearn_model = RandomForestRegressor(**model_params)\n", + "# return dc.models.SklearnModel(sklearn_model, model_dir)\n", + "# params_dict = {\n", + "# \"n_estimators\": [10, 100],\n", + "# \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", + "# }\n", "\n", - "metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)\n", - "optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", - "best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" + "# metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)\n", + "# optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", + "# best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", + "# params_dict, train_dataset, valid_dataset, transformers,\n", + "# metric=metric)" ], - "execution_count": 0, + "execution_count": 15, "outputs": [] }, { @@ -296,23 +377,23 @@ "colab": {} }, "source": [ - "def krr_model_builder(model_params, model_dir):\n", - " sklearn_model = KernelRidge(**model_params)\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", + "# def krr_model_builder(model_params, model_dir):\n", + "# sklearn_model = KernelRidge(**model_params)\n", + "# return dc.models.SklearnModel(sklearn_model, model_dir)\n", "\n", - "params_dict = {\n", - " \"kernel\": [\"laplacian\"],\n", - " \"alpha\": [0.0001],\n", - " \"gamma\": [0.0001]\n", - "}\n", + "# params_dict = {\n", + "# \"kernel\": [\"laplacian\"],\n", + "# \"alpha\": [0.0001],\n", + "# \"gamma\": [0.0001]\n", + "# }\n", "\n", - "metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)\n", - "optimizer = dc.hyper.HyperparamOpt(krr_model_builder)\n", - "best_krr, best_krr_hyperparams, all_krr_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" + "# metric = dc.metrics.Metric(dc.metrics.mean_absolute_error)\n", + "# optimizer = dc.hyper.HyperparamOpt(krr_model_builder)\n", + "# best_krr, best_krr_hyperparams, all_krr_results = optimizer.hyperparam_search(\n", + "# params_dict, train_dataset, valid_dataset, transformers,\n", + "# metric=metric)" ], - "execution_count": 0, + "execution_count": 16, "outputs": [] }, { diff --git a/examples/tutorials/11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb b/examples/tutorials/11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb deleted file mode 100644 index 9e15381aac67f2a0225a3c14cae9dcccb6817e45..0000000000000000000000000000000000000000 --- a/examples/tutorials/11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb +++ /dev/null @@ -1,594 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb", - "provenance": [] - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "hzpae9-r2aoK", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 11: Learning Unsupervised Embeddings for Molecules\n", - "\n", - "\n", - "In this example, we will use a `SeqToSeq` model to generate fingerprints for classifying molecules. This is based on the following paper, although some of the implementation details are different: Xu et al., \"Seq2seq Fingerprint: An Unsupervised Deep Molecular Embedding for Drug Discovery\" (https://doi.org/10.1145/3107411.3107424).\n", - "\n", - "Many types of models require their inputs to have a fixed shape. Since molecules can vary widely in the numbers of atoms and bonds they contain, this makes it hard to apply those models to them. We need a way of generating a fixed length \"fingerprint\" for each molecule. Various ways of doing this have been designed, such as Extended-Connectivity Fingerprints (ECFPs). But in this example, instead of designing a fingerprint by hand, we will let a `SeqToSeq` model learn its own method of creating fingerprints.\n", - "\n", - "A `SeqToSeq` model performs sequence to sequence translation. For example, they are often used to translate text from one language to another. It consists of two parts called the \"encoder\" and \"decoder\". The encoder is a stack of recurrent layers. The input sequence is fed into it, one token at a time, and it generates a fixed length vector called the \"embedding vector\". The decoder is another stack of recurrent layers that performs the inverse operation: it takes the embedding vector as input, and generates the output sequence. By training it on appropriately chosen input/output pairs, you can create a model that performs many sorts of transformations.\n", - "\n", - "In this case, we will use SMILES strings describing molecules as the input sequences. We will train the model as an autoencoder, so it tries to make the output sequences identical to the input sequences. For that to work, the encoder must create embedding vectors that contain all information from the original sequence. That's exactly what we want in a fingerprint, so perhaps those embedding vectors will then be useful as a way to represent molecules in other models!\n", - "\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment. This notebook will take a few hours to run on a GPU machine, so we encourage you to run it on Google colab unless you have a good GPU machine available." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ci69aRSm2aoO", - "colab_type": "code", - "colab": {} - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6bm1iYbw2aoT", - "colab_type": "text" - }, - "source": [ - "Let's start by loading the data. We will use the MUV dataset. It includes 74,501 molecules in the training set, and 9313 molecules in the validation set, so it gives us plenty of SMILES strings to work with." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "YnAnjl9d2aoU", - "colab_type": "code", - "colab": {}, - "outputId": "672ec5a4-9d90-44f1-d503-98e9d9fbb40d" - }, - "source": [ - "import deepchem as dc\n", - "tasks, datasets, transformers = dc.molnet.load_muv()\n", - "train_dataset, valid_dataset, test_dataset = datasets\n", - "train_smiles = train_dataset.ids\n", - "valid_smiles = valid_dataset.ids" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n", - "RDKit WARNING: [15:40:18] Enabling RDKit 2019.09.3 jupyter extensions\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:516: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:517: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:518: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:519: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:520: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/framework/dtypes.py:525: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:541: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:542: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:543: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:544: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:545: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\n", - "/Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorboard/compat/tensorflow_stub/dtypes.py:550: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n", - " np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /var/folders/st/ds45jcqj2232lvhr0y9qt5sc0000gn/T/muv.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 0 took 10.486 s\n", - "Loading shard 2 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 1 took 10.458 s\n", - "Loading shard 3 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 2 took 10.235 s\n", - "Loading shard 4 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 3 took 10.636 s\n", - "Loading shard 5 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 4 took 10.483 s\n", - "Loading shard 6 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 5 took 10.145 s\n", - "Loading shard 7 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 6 took 9.811 s\n", - "Loading shard 8 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 7 took 10.585 s\n", - "Loading shard 9 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 8 took 10.481 s\n", - "Loading shard 10 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 9 took 11.081 s\n", - "Loading shard 11 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "Featurizing sample 8000\n", - "TIMING: featurizing shard 10 took 10.569 s\n", - "Loading shard 12 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "TIMING: featurizing shard 11 took 3.824 s\n", - "TIMING: dataset construction took 121.359 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 3.393 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 1.770 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 1.871 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "EslVHE2m2aoY", - "colab_type": "text" - }, - "source": [ - "We need to define the \"alphabet\" for our `SeqToSeq` model, the list of all tokens that can appear in sequences. (It's also possible for input and output sequences to have different alphabets, but since we're training it as an autoencoder, they're identical in this case.) Make a list of every character that appears in any training sequence." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "nsE8e9xn2aoa", - "colab_type": "code", - "colab": {} - }, - "source": [ - "tokens = set()\n", - "for s in train_smiles:\n", - " tokens = tokens.union(set(c for c in s))\n", - "tokens = sorted(list(tokens))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "vgzyJ1-42aog", - "colab_type": "text" - }, - "source": [ - "Create the model and define the optimization method to use. In this case, learning works much better if we gradually decrease the learning rate. We use an `ExponentialDecay` to multiply the learning rate by 0.9 after each epoch." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NHKrymnM2aoh", - "colab_type": "code", - "colab": {}, - "outputId": "fe3a80bd-9432-469c-d1ef-7bf0c39e42eb" - }, - "source": [ - "from deepchem.models.optimizers import Adam, ExponentialDecay\n", - "max_length = max(len(s) for s in train_smiles)\n", - "batch_size = 100\n", - "batches_per_epoch = len(train_smiles)/batch_size\n", - "model = dc.models.SeqToSeq(tokens,\n", - " tokens,\n", - " max_length,\n", - " encoder_layers=2,\n", - " decoder_layers=2,\n", - " embedding_dimension=256,\n", - " model_dir='fingerprint',\n", - " batch_size=batch_size,\n", - " learning_rate=ExponentialDecay(0.004, 0.9, batches_per_epoch))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /Users/bharath/opt/anaconda3/envs/deepchem/lib/python3.6/site-packages/tensorflow/python/ops/init_ops.py:1251: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Call initializer instance with the dtype argument instead of passing it to the constructor\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting >: AssertionError: Bad argument number for Name: 3, expecting 4\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting >: AssertionError: Bad argument number for Name: 3, expecting 4\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting >: AssertionError: Bad argument number for Name: 3, expecting 4\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutgoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: converting >: AssertionError: Bad argument number for Name: 3, expecting 4\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "hSr7FkSW2aok", - "colab_type": "text" - }, - "source": [ - "Let's train it! The input to `fit_sequences()` is a generator that produces input/output pairs. On a good GPU, this should take a few hours or less." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NZ5l_g1E2aok", - "colab_type": "code", - "colab": {}, - "outputId": "8db60a71-2724-4342-d513-13d7bcbad3f9" - }, - "source": [ - "def generate_sequences(epochs):\n", - " for i in range(epochs):\n", - " for s in train_smiles:\n", - " yield (s, s)\n", - "\n", - "model.fit_sequences(generate_sequences(40))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Ending global_step 999: Average loss 72.0029\n", - "Ending global_step 1999: Average loss 40.7221\n", - "Ending global_step 2999: Average loss 31.5364\n", - "Ending global_step 3999: Average loss 26.4576\n", - "Ending global_step 4999: Average loss 22.814\n", - "Ending global_step 5999: Average loss 19.5248\n", - "Ending global_step 6999: Average loss 16.4594\n", - "Ending global_step 7999: Average loss 18.8898\n", - "Ending global_step 8999: Average loss 13.476\n", - "Ending global_step 9999: Average loss 11.5528\n", - "Ending global_step 10999: Average loss 10.1594\n", - "Ending global_step 11999: Average loss 10.6434\n", - "Ending global_step 12999: Average loss 6.57057\n", - "Ending global_step 13999: Average loss 6.46177\n", - "Ending global_step 14999: Average loss 7.53559\n", - "Ending global_step 15999: Average loss 4.95809\n", - "Ending global_step 16999: Average loss 4.35039\n", - "Ending global_step 17999: Average loss 3.39137\n", - "Ending global_step 18999: Average loss 3.5216\n", - "Ending global_step 19999: Average loss 3.08579\n", - "Ending global_step 20999: Average loss 2.80738\n", - "Ending global_step 21999: Average loss 2.92217\n", - "Ending global_step 22999: Average loss 2.51032\n", - "Ending global_step 23999: Average loss 1.86265\n", - "Ending global_step 24999: Average loss 1.67088\n", - "Ending global_step 25999: Average loss 1.87016\n", - "Ending global_step 26999: Average loss 1.61166\n", - "Ending global_step 27999: Average loss 1.40708\n", - "Ending global_step 28999: Average loss 1.4488\n", - "Ending global_step 29801: Average loss 1.33917\n", - "TIMING: model fitting took 5619.924 s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_lxf1lmX2aoo", - "colab_type": "text" - }, - "source": [ - "Let's see how well it works as an autoencoder. We'll run the first 500 molecules from the validation set through it, and see how many of them are exactly reproduced." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NXDBtIvn2aop", - "colab_type": "code", - "colab": {}, - "outputId": "59d18b07-0945-4bbb-ecf0-9860ed140e62" - }, - "source": [ - "predicted = model.predict_from_sequences(valid_smiles[:500])\n", - "count = 0\n", - "for s,p in zip(valid_smiles[:500], predicted):\n", - " if ''.join(p) == s:\n", - " count += 1\n", - "print('reproduced', count, 'of 500 validation SMILES strings')" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "reproduced 363 of 500 validation SMILES strings\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Rt9GLy502aou", - "colab_type": "text" - }, - "source": [ - "Now we'll trying using the encoder as a way to generate molecular fingerprints. We compute the embedding vectors for all molecules in the training and validation datasets, and create new datasets that have those as their feature vectors. The amount of data is small enough that we can just store everything in memory." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "kdUfsbtZ2aov", - "colab_type": "code", - "colab": {} - }, - "source": [ - "train_embeddings = model.predict_embeddings(train_smiles)\n", - "train_embeddings_dataset = dc.data.NumpyDataset(train_embeddings,\n", - " train_dataset.y,\n", - " train_dataset.w,\n", - " train_dataset.ids)\n", - "\n", - "valid_embeddings = model.predict_embeddings(valid_smiles)\n", - "valid_embeddings_dataset = dc.data.NumpyDataset(valid_embeddings,\n", - " valid_dataset.y,\n", - " valid_dataset.w,\n", - " valid_dataset.ids)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "lVvfGr562aoz", - "colab_type": "text" - }, - "source": [ - "For classification, we'll use a simple fully connected network with one hidden layer." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tFmnnVNm2aoz", - "colab_type": "code", - "colab": {}, - "outputId": "e4efa887-24ac-4fab-e17b-fe27fc905a2b" - }, - "source": [ - "classifier = dc.models.MultitaskClassifier(n_tasks=len(tasks),\n", - " n_features=256,\n", - " layer_sizes=[512])\n", - "classifier.fit(train_embeddings_dataset, nb_epoch=10)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Ending global_step 999: Average loss 829.805\n", - "Ending global_step 1999: Average loss 450.42\n", - "Ending global_step 2999: Average loss 326.079\n", - "Ending global_step 3999: Average loss 265.199\n", - "Ending global_step 4999: Average loss 246.724\n", - "Ending global_step 5999: Average loss 224.64\n", - "Ending global_step 6999: Average loss 202.624\n", - "Ending global_step 7460: Average loss 213.885\n", - "TIMING: model fitting took 19.780 s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "khdB2v7R2ao2", - "colab_type": "text" - }, - "source": [ - "Find out how well it worked. Compute the ROC AUC for the training and validation datasets." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ZlilhPvm2ao2", - "colab_type": "code", - "colab": {}, - "outputId": "7ee4c5d3-2647-401a-ce5f-65ded20daaee" - }, - "source": [ - "import numpy as np\n", - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean, mode=\"classification\")\n", - "train_score = classifier.evaluate(train_embeddings_dataset, [metric], transformers)\n", - "valid_score = classifier.evaluate(valid_embeddings_dataset, [metric], transformers)\n", - "print('Training set ROC AUC:', train_score)\n", - "print('Validation set ROC AUC:', valid_score)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.97828427249789751, 0.98705973960125326, 0.966007068438685, 0.9874401066031584, 0.97794394675150698, 0.98021719680962449, 0.95318452689781941, 0.97185747562764213, 0.96389538770053473, 0.96798988621997473, 0.9690779239145807, 0.98544402211472004, 0.97762497271338133, 0.96843239633294886, 0.97753648081489997, 0.96504683675485614, 0.93547151958366914]\n", - "computed_metrics: [0.90790686952512678, 0.79891461649782913, 0.61900937081659968, 0.75241212956581671, 0.58678903240426017, 0.72765072765072758, 0.34929006085192693, 0.83986814712005553, 0.82379943502824859, 0.61844636844636847, 0.863620199146515, 0.68106930272108857, 0.98020477815699669, 0.85073580939032944, 0.781015678254942, 0.75399733510992673, nan]\n", - "Training set ROC AUC: {'mean-roc_auc_score': 0.97132433878689139}\n", - "Validation set ROC AUC: {'mean-roc_auc_score': 0.74592061629292239}\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "ixqbRXnW2ao6", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/11_Putting_Multitask_Learning_to_Work.ipynb b/examples/tutorials/11_Putting_Multitask_Learning_to_Work.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c6bd972089b1262efab546d3e0dfaf3e89ed23d4 --- /dev/null +++ b/examples/tutorials/11_Putting_Multitask_Learning_to_Work.ipynb @@ -0,0 +1,237 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ElXOa7R7g37i" + }, + "source": [ + "# Tutorial Part 11: Putting Multitask Learning to Work\n", + "\n", + "This notebook walks through the creation of multitask models on MUV [1]. The goal is to demonstrate how multitask methods can provide improved performance in situations with little or very unbalanced data.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/11_Putting_Multitask_Learning_to_Work.ipynb)\n", + "\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "Fc_4bSWJg37l", + "outputId": "dce34f1f-e14f-42d0-ccb6-c0893d0fda3f" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "3HHM8X9t_NPp", + "outputId": "1da9ace2-4f46-4e1e-93cf-97eae4ef8bb5" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "9Ow2nQtZg37p" + }, + "source": [ + "The MUV dataset is a challenging benchmark in molecular design that consists of 17 different \"targets\" where there are only a few \"active\" compounds per target. There are 93,087 compounds in total, yet no task has more than 30 active compounds, and many have even less. Training a model with such a small number of positive examples is very challenging. Multitask models address this by training a single model that predicts all the different targets at once. If a feature is useful for predicting one task, it often is useful for predicting several other tasks as well. Each added task makes it easier to learn important features, which improves performance on other tasks [2].\n", + "\n", + "To get started, let's load the MUV dataset. The MoleculeNet loader function automatically splits it into training, validation, and test sets. Because there are so few positive examples, we use stratified splitting to ensure the test set has enough of them to evaluate." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 85 + }, + "colab_type": "code", + "id": "FGi-ZEfSg37q", + "outputId": "c806cf75-0666-4d5d-a8cd-8f5470286017" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import numpy as np\n", + "\n", + "tasks, datasets, transformers = dc.molnet.load_muv(split='stratified')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6nRCpb08g375" + }, + "source": [ + "Now let's train a model on it. We'll use a MultitaskClassifier, which is a simple stack of fully connected layers." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "BvfbTbsEg376" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.0004961589723825455" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "n_tasks = len(tasks)\n", + "n_features = train_dataset.get_data_shape()[0]\n", + "model = dc.models.MultitaskClassifier(n_tasks, n_features)\n", + "model.fit(train_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see how well it does on the test set. We loop over the 17 tasks and compute the ROC AUC for each one." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MUV-466 0.9207684040838259\n", + "MUV-548 0.7480655561526062\n", + "MUV-600 0.9927995701235895\n", + "MUV-644 0.9974207415368082\n", + "MUV-652 0.7823481998925309\n", + "MUV-689 0.6636843990686011\n", + "MUV-692 0.6319093677234462\n", + "MUV-712 0.7787838079885365\n", + "MUV-713 0.7910711087229088\n", + "MUV-733 0.4401307540748701\n", + "MUV-737 0.34679383843811573\n", + "MUV-810 0.9564571019165323\n", + "MUV-832 0.9991044241447251\n", + "MUV-846 0.7519881783987103\n", + "MUV-852 0.8516747268493642\n", + "MUV-858 0.5906591438294824\n", + "MUV-859 0.5962954008166774\n" + ] + } + ], + "source": [ + "y_true = test_dataset.y\n", + "y_pred = model.predict(test_dataset)\n", + "metric = dc.metrics.roc_auc_score\n", + "for i in range(n_tasks):\n", + " score = metric(dc.metrics.to_one_hot(y_true[:,i]), y_pred[:,i])\n", + " print(tasks[i], score)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Not bad! Recall that random guessing would produce a ROC AUC score of 0.5, and a perfect predictor would score 1.0. Most of the tasks did much better than random guessing, and many of them are above 0.9.\n", + "\n", + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!\n", + "\n", + "# Bibliography\n", + "\n", + "[1] https://pubs.acs.org/doi/10.1021/ci8002649\n", + "\n", + "[2] https://pubs.acs.org/doi/abs/10.1021/acs.jcim.7b00146" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "05_Putting_Multitask_Learning_to_Work.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/12_Predicting_Ki_of_Ligands_to_a_Protein.ipynb b/examples/tutorials/12_Predicting_Ki_of_Ligands_to_a_Protein.ipynb deleted file mode 100644 index 00eee1f706d510bc63c6ba70e3ce68cfc2aeb6a7..0000000000000000000000000000000000000000 --- a/examples/tutorials/12_Predicting_Ki_of_Ligands_to_a_Protein.ipynb +++ /dev/null @@ -1,1922 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "12_Predicting_Ki_of_Ligands_to_a_Protein.ipynb", - "provenance": [] - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "lNXzKyg2eYtR", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 12: Predicting Ki of Ligands to a Protein\n", - "\n", - "\n", - "In this notebook, we analyze the BACE enyzme and build machine learning models for predicting the Ki of ligands to the protein. We will use the `deepchem` library to load this data into memory, split into train/test/validation folds, build and cross-validate models, and report statistics.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/12_Predicting_Ki_of_Ligands_to_a_Protein.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "xoDXdhhYfKmD", - "colab_type": "code", - "colab": {} - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "9uKkg6iXeYtb", - "colab_type": "code", - "outputId": "8a41594b-a80f-4008-964b-2c10132278bf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 304 - } - }, - "source": [ - "import os\n", - "import sys\n", - "import deepchem as dc\n", - "from deepchem.utils.save import load_from_disk\n", - "\n", - "current_dir = os.path.dirname(os.path.realpath(\"__file__\"))\n", - "dc.utils.download_url(\"https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/desc_canvas_aug30.csv\",\n", - " current_dir)\n", - "dataset_file = \"desc_canvas_aug30.csv\"\n", - "dataset = load_from_disk(dataset_file)\n", - "num_display=10\n", - "pretty_columns = (\n", - " \"[\" + \",\".join([\"'%s'\" % column for column in dataset.columns.values[:num_display]])\n", - " + \",...]\")\n", - "\n", - "dc.utils.download_url(\"https://s3-us-west-1.amazonaws.com/deepchem.io/datasets/crystal_desc_canvas_aug30.csv\",\n", - " current_dir)\n", - "crystal_dataset_file = \"crystal_desc_canvas_aug30.csv\"\n", - "crystal_dataset = load_from_disk(crystal_dataset_file)\n", - "\n", - "print(\"Columns of dataset: %s\" % pretty_columns)\n", - "print(\"Number of examples in dataset: %s\" % str(dataset.shape[0]))\n", - "print(\"Number of examples in crystal dataset: %s\" % str(crystal_dataset.shape[0]))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "display_data", - "data": { - "text/html": [ - "

\n", - "The default version of TensorFlow in Colab will switch to TensorFlow 2.x on the 27th of March, 2020.
\n", - "We recommend you upgrade now\n", - "or ensure your notebook will continue to use TensorFlow 1.x via the %tensorflow_version 1.x magic:\n", - "more info.

\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n", - "Columns of dataset: ['mol','CID','Class','Model','pIC50','MW','AlogP','HBA','HBD','RB',...]\n", - "Number of examples in dataset: 1522\n", - "Number of examples in crystal dataset: 25\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "fX2Dy785eYtp", - "colab_type": "text" - }, - "source": [ - "To gain a visual understanding of compounds in our dataset, let's draw them using rdkit. We define a couple of helper functions to get started." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "TxN6zSo8eYts", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import tempfile\n", - "from rdkit import Chem\n", - "from rdkit.Chem import Draw\n", - "from itertools import islice\n", - "from IPython.display import Image, display, HTML\n", - "\n", - "def display_images(filenames):\n", - " \"\"\"Helper to pretty-print images.\"\"\"\n", - " for filename in filenames:\n", - " display(Image(filename))\n", - "\n", - "def mols_to_pngs(mols, basename=\"test\"):\n", - " \"\"\"Helper to write RDKit mols to png files.\"\"\"\n", - " filenames = []\n", - " for i, mol in enumerate(mols):\n", - " filename = \"BACE_%s%d.png\" % (basename, i)\n", - " Draw.MolToFile(mol, filename)\n", - " filenames.append(filename)\n", - " return filenames" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "qnqxVm8ceYtw", - "colab_type": "text" - }, - "source": [ - "Now, we display a compound from the dataset. Note the complex ring structures and polar structures." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "qEaaVKbKeYtz", - "colab_type": "code", - "outputId": "e31aadd2-7663-4f00-815e-33f37bd0f828", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "num_to_display = 12\n", - "molecules = []\n", - "for _, data in islice(dataset.iterrows(), num_to_display):\n", - " molecules.append(Chem.MolFromSmiles(data[\"mol\"]))\n", - "display_images(mols_to_pngs(molecules, basename=\"dataset\"))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKv0lEQVR4nO3d3ZabOBBFYTEr7//K\nzAUdQgPGQn+nqrS/lYtOJhnbwLZAxrCs65oA6PynfgLA7IgQECNCQIwIATEiBMSIEBAjQkCMCAEx\nIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQ\nECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEitG5Z7n+7LD+/\njv/p9Fu48Ef9BFBiWdK63vwMjxgJXfpU3boyAPrDSOgAXcVGhA4cx73rUd9pVNwGQ3ZQHSFCrygt\nDI4JXXoukCNDXxgJvTpmxpDo2rKyAgEpdkfDYo/UCyIExIgQECPCsJgj9YIIATEiBMSIEBAjQkCM\nCAExIoyMCVIXiBAQI0JAjAjDY3/UOiIExIgwuHVdFyZnbCNCQIwIATEiBMSIEBAjQkCMCONjgtQ4\nIgTEiHAWDIZmcfHf+Jbl5+qyxw653qwdXPw3uL3A65/vP7MNaBFhWFtmOev3b5BsCxpEGNOnAfDb\nv/r5gY1iJI4JAyorMKVft+BOpDgKEUbzfBCYGef2txgYx2B3NJSvY+A+H/NqvR8HxtMgyb1K6zES\nxpGzF7r/hVc1kllXRBjE2+PAS41ryo5tu4gbZbZChI29OvSy8IjHf8jdfyWIsKXruSm9ayyeCL11\n/D89T5AyGDZEhM0ceyg79Kp5xOYIbBgibOBhh7BfjV0LzMHlvVshwlqZMbStUV4gGmJdVqmJofgU\nagoMxv3qFG6RDR86c3gcP/WKASJEmBTbZaf4H2q0OQAyR1rP9zHhtl2O/854vx4+HTraLBBN+I4w\nXbbO3hvrsIH3VCMFBub4GjP5Jyu3fcTBPRjPjw8q6jmO8KrrMGhth5ALN4URKsKjYzP126u2QC4c\nGpvXCPcpmQFtWBsDEYzXCK8yU3k7pJgtkOExDK8Rfp2SOe6L1pzUYrNAU5ibqeQ1ws2xkMxhMPNA\ncVkWCsQYviP8pDIeyUcRz9j5DCxmhEc5o+Vx+2YALMIbRLn4EX5ye9BIgRhv3gh3fquzs49q55l4\nRISAGBH+Yvkd3fJzQw0iBMSIEBAjQkCMCD25HhbaOVC080zcIcIzNiYMRoRoifevAu6vMQMjxt+H\nIwwiRAPj78MRCRE6M+x6Apkk9+EIxtDqNMXUhn5yfW6qZ/v2cYuv/B8bIyEKFZR/ukLs9Q/nRIQo\n8anA/EsVs7O6I8IIBm++D2NgwQQpNdo98pHzdVho86HLupqtRkZCvPA2/rJRbrb7cBChV4MHw8oe\nyrqa5BRCInRp8LkpDYPnrJoru4c9cpaPCU+6btO9b7Oz/fDwEI5WRJngL6+Su9XfvMZhS8DdLYob\nYnc0lLbT/SO3/tkmY46IMKbbq/2//ZxAUsJU+W2IMLiCM8UmHIu0iHAiOTurBg/ArH1xpDkifHL8\nnCrSRvCpxtjbullE+N3po60UKMiZp0PsIMJctwdXKcqGO8m5KTYR4ZNPu2enIPfrOgx6WgOxgzoA\nV1v7KHP7W9c1bb+W5eeXT5YHQ8vPrR4j4b2twHdHSvtf2zYXBhDkIcIb+xhYeLbx9tf2d27PNYb/\neMACIjy7bnOFU4gMjMhDhL88v+tXDYyoE3hMJsJ/luX1976DzYsG3tAtI8Ify/I6pfW0w/n3T788\nTObfxDSIsMUh2/EfP2R2Cr2ge0Q0e4TtQ7gG6bw09lF7m/rD+u5D0fYhPhqJ+pH9vBGyM3gr6oZu\n2bwRUiCMmDdCwIjZJ2aG2k7yPv4W38wwJxT/FaKAkVsg3j5ovCzZHYVR8WL7JGaEp6/17T+fpv2Y\nBTRrngJT1Ajh2kOBIeMMG+FpEgRezFZgmnB2lDJzqE5Vq78Lt0eRI9wGw9OK+3SuNZ4NyPKhwKj5\nbSJHCC8eBrrwBabwEXJkaN+EB4EnYSdm0NzgO5BOUmDijBk8UB0EDnhoU4Lvjp7EnmTzZc6J0Fvx\nI4x334iROo1I006E3gr7mq83/dr/U9SX3Enzm8NR4EnYl336enjUlzlSkxop8GreV45ixTVS4K2p\nXzwqdR0b5zHR6w9542sjig+5KTCFj5D5mPEy3+wm/Cjik+ARQuihRgbAI5YFurt+XMRWdzTX4mAX\nSIvlfyv+GTOJKRkzuLz3rSkipD1YxleZMBSD4dUUI2HiswoYNkuEhAez2B0FxIgQEJsrwmVZmBWQ\nY27mZJZjwg1HhjBorpEQMIgIAbHpIuSwENZMdwL36bpPU718U/guxW6iiZnjWt9/oEbITRHhwzdo\nqBFy8SPM3O251pgIEkME3y+vPPBgeOyKw8JN5JGwfh2zs4oBwkbY9l2WGmczcpSOGWG/JUiNMxi8\nnxwwwjFL8FQjKYYx/kg11Bkz29kwg5cg+RUz+HUKyVxRnJGQqTZUUm1CQUZCbYEG39HximQfahdh\nJGQMRA359uN+JJQvQbhmYfvxHaGFJYga2j15I9uP191RPhhAJSMFJqcj4bb4jCzBDXMzNcYvOjsF\nJo8joanFh0qSr1lb24ScRWht8aGG5GvWBjchZxEijE8xdD0f0GCBiQghkRND891UmwUmj1/qNbso\nk+3nZkTx+FZTo/G5dEZCjFPzJlV80Gj/nZEIMchzDPmpvKrRfoGJCDFGqwKPvk7huCgwEWFb20f2\nLlb8SD0KPLqdwnG0IvxFyIbuy8PKajtf4vdaBy5PW4MXzwV2OvfQUX4bIkQvXwsc/HzMIkJ0oS3Q\n1/n0RNiYr9XfyfgCXS9zfxMzibkZwx4mRdzNlwzjMkLYxEFgGXZH0QYHgcWIEA0YHAMdZek1QsuL\n2PJz68Fggb5wTIgqnzJjGiaf15EQlkmuxOV3B4QI25t8H8zOy/eSJRE2ZmcTHOO6oU/18ptwHKG1\n9zntTUXgFxMzbZAfijkeCe2gQCOs7RxlIsJaFGiZiyyJsAoFJicbumW+IxSvfgpEC74jVFqWRIFo\nwejs6KvxbfQ+4fbcKNAkj981HRfhq67yF+K2xMfdWIsB0Bv7WY6LsOtSGHS5Owr8wP6GbpnR3dFi\nHe84SYHow3eEOfe4229CWflIFOiFu2HZd4Rf/VsTxyPSgtXjZ43iyniWwSP857gCKoPEHeMbumXT\nRHh0DZJNJxZf7wVTRnh0WlsMkhEZH6Knj/DoNPvCZEwIxgtMnLaG2OwXmFxH6GL5TsXa1ym8bCHs\njiIgXxdcJEL0Nb4HLwPgjgjRV8cTCe+4KzAR4S/rykcUnYw5w95jgYkIzxyuQlO+njfTb2B0WmAi\nQkicBsZUXaPfAhMRoof8JJrU6LrAlJLXZ+99uYdXHNXbg8YAWwIjIbooHuJeHTQGKDARIXorqzFn\nNjVGgYkIMUxljUe+Toj5iggx2rXGNNlB4AkRQubY0lQHgSdECBNydlZDFpicfkQRdWXg6FRj4JXO\nSAijBl3Q2QAihHWB89s4/mY9EIPLCMO/NWIqLiMEIiFCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQ\nI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwI\nATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkDsf0fmN+vs\nFylZAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKBElEQVR4nO3dUZLaSBaGUTHRO7L3\nvwLXmpgHTTMqAWWQUvpvkudEP7gdLgqq9OkmQojL9XqdgJz/pO8AjE6EECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGH/pO8APHC5XLZ94fV6bXtPTiBCyrlcLptb2vO1KZajECZCalmNshfXpbd/dr1e\nNy9lU0RIXa+vLZftddehCCmkx2d0+4mQKu4Xom8F2e8wFCGfqaMORUgJO8fgrKPwlkRIOXueGfa4\nKBUheccdj+miQxES1mQhutRFeEsipJBWI7GvRakISTrnhcHiHYqQKtoGWTy8JRESc+b5MZWbFCEx\nyzCOCPLo229FhJRw3EsUR9xsWyIk6ZxVYuUxOIkQ4kRI2NHDsPgYnEQIcSIk77hhWH8MTiKkiMqv\n4x1NhHysLsbgJELqaDsMeylwEiHEiZBCWg3Djsbg5DL4VPZukB2Ft9TTDoNB7JxjfY3ByXIU4jrb\nZzCIPc8Mu9ukPSeknO7WkztZjkKYCKlltDE4iRDiREghA47BSYQQJ0KqGHMMTiKEOBFSwrBjcBIh\nFYxc4CRCiBMhYYOPwekDzx29nfg79u+VjnzEJLyFd7lM1+v//luchj/sZbzqMwan7ifhXNePv0W/\n5kPNOzg/4T26jfCF/KYjCzyv7dUCe572737VYeYfwua1hnqnHiO8XC7XaeO2tb+c5Seh77md17/f\n/x/pi/lt/qr33cbgMkVdvaunCP/+O14+FbxeV8ntKXC1p79tc5kN7rVVwAlW7clvmz4ifGMXu/g3\n+7eJ1dxbVpd8qllsWw/vkvpXPcJWK5y3puKzNadF1w/8QDarHuHU6Lf7SoGvPN+7/9rmE+DbDX5f\nYL96Ew+/qvWTQzujVqovIU5Y5Gw+1nLQVnjgsZ8DDtJIcb8OJuFx9mxAB+0dlkeA2tR45NHRSX4t\nfMQZMzts3oZO2/j2fqPV0rTRJz04Camh0hF2dMDtoI2y5sOfX43QYSulI+zCPBaOqKXZbR4wDKeq\nO4geiXCvhi9SL2fLgZu4IVaMCFuqu3G3G4B1H2O3RNhMw0XpIcddv9/+nlOuddiWCJupXODUNB7P\nBtsSYRUnv+ptoNUx9Iv1R6j8wXrXf99ZcruTg3wedXFDR3i9e7vTTl18zvPOt3TpsLm6y9Huft9d\n3OH693BAdSMc0AnP03buKTyTPMLQy9GGLhdD5gBjXMDSJGyg4RsVDh01TRbM1+v1pFn45AKWn6do\nhNY8zTWc1Z8exdnKLUe7e5No8/frNT9mS3GFIuwuv44csKc45t3CB78FuaYSEWbz2/e62SHbzKDD\ncPXT3HZ9nQ4lf9NnX0i3h3uy1KrD46ZL41s+65rF1WQmYbWV5/Jqot//4hMc+c7Edrc1aoHT+ZOw\nWn7PFFkHjbIoHbjA6cxJ2Et+s+UmMaVT/GzfdjTjFTjVv+4onz0MFTiVfbG+puUr1POf7/9m/oPX\nsl+x+myPMQucirxE0ZG/biq3f9Dqac7zE9ke3OKzb33OAdLlY1/dpUdf+MkT/i0ifM/9Br0K5IBL\n1295Ln3mXLn/Xs9/Pt//598OBw9ShHutDuHc/ryaCduq2Hw069nO4phTC356dN//fv1ZOu3vTYdE\n+La/nr683CLv14eva7uNlp00g4/ByYGZbX7YZm4HbFoVtOPTMs47PrTtew16dt4dk/ANqxMb//o3\nD7/wRU3GYP0OmUzCmnYW+OKuYb/7M67ZQITlnPSJFJQhwlo6OmDY5FUQV46aRFjKanM0Bgchwir6\nGggDn2TWngiLKj8Ge9plFCfCEvoag7QlwrzuCmz7CrtjMyIMe7j9lV+Lhv38nrLuiDCs4UfenyN4\notnyZMCuq1tx2loJyyVZX00e7dlbE898m8jRRFhFL0+Njjjr+v42L5fL/L6n179Pj/nNRFjIyB3O\n3loOfMwp454TssUR+4s57LeeJN//wx6z9G6ucjp6i12Tu9r8hPX9Z/OcfHlOy1G227kuXW3rrUbr\n5gtbpY6NdbPTHUpHw3DaNDcefknb+fP6zzB+XNokZK+3rph22krvlSld5KrwPe1xh9LXMJy9stH/\n8A+OmEgPv2N89K3095sexyd1+OLMaZ7H8vtWa+/GcpSWnrzsntzu65+N5HXCunp57X7ldrcvl8vt\npb93b6T1NVdLn6ArQtq7zcN3N/37EbrH6gNnynYowtI6HYZT1YVfTSKkrk53QO9yYKa6PcNw8HF0\ne/CV16KTCOvbeV5Y5Y3vodVOp8lDKP4jsByluo9flIqwtJ1zoN/jOg30c2lUy9FuvJVTd6vQn+3a\nGZWvUYR1rba8bVthj58BuBrgfd35DSxHKWpu7+MLnERYVsPx1eMzwwYDvJ96RTiEHjtspnyNIqyo\nu2dxzQ31ExBhOQdtfx0Nw6EKnERINaMVOImwmkM3wfrDcMACJ68T8plu+5oekjYJCzlhDlQehs0e\n/nyKzPxf1Qe7JEJKGHMhOrMcreLoazHcbvPZMPz158/DL/zz69f8h99fX6u/aX7fxiTCKpbniDQ/\n4bNV4c3zYxIhb5mH4cMUf36q+fM1fxuPweVTwR4GrAgLOWgYNlzo/jAJX7ng/IYv3KiH9m5EyBme\nlVb2UO2ZHB2tZXW56I+/9mbll0xOU+63QsNs7q9IX/DXXfNenckkLKf5MJyV3dYNQxFWt3kbLb4Q\n5UaEFRkOQxFhUTsXpX2NwcF3OiKEMBHWtXkY9jUGZyMPQxF24/WWuqiOGxGWtnM+9DIGZ8MOQxFC\nmHNHq/P5hB+vp+XKmPpaUu432uOdLEchToSlDTgWBjw8I0IIE2FdA47B2WiPWoRF/f76Gm1bHJYI\nIUyEFf3++nJxwXGIsBwFjmbQp/5Qh0kIYSKEMBFCmAghTIQQ5v2EJRz30X/UJ8Iq5Dcsy9Eqfn99\n3eYhQzEJqzAJh2USQpgIIcy5oxBmEkKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEsP8CF8inPV9dzocAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJt0lEQVR4nO3dXZLaRhiGUZHKjpz9\n78BrIhfKEIxAgNTSq691TvnCnmLGDM1Dt34Ql+v1OgA5f6XvAJydCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ\nJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhPUT4eVy\nSd8FWKKfCK/Xqw6pqJ8IBx1SU1cRQkW9RWgypJzeIhx0SDUdRjjokFL6jBAK6TZCkyFVdBvhoEOK\n6DnCQYdU0HmEgw45vP4jhIM7RYQmQ47sFBEOOuTAzhIhHNZZIrxcLtfrNX0v4ImzRAiHJUIIO0WE\n1qIc2SkihCPrP0LTIAfXf4RwcCKEsM4jtBbl+P5O3wH4w+3swvO8eoqQA7lfuUzP9e01y55Xa9ai\ntbwdr16zNBNyCA9z4NO6pl/sY+3a7Y4Z02AhnxT41PV67WCUO4zwcrl462Ahiwu8qf5m0U6Wo/dj\nsHJE2dOaMepmfAtH+DQ86uomqm8Vi/Cr8MZVyjnHtYSVC9FuxrdGhJ/vBHu4ZTfj1J/1m4L3Sg90\njQiH2fzmp8fSw9OrVgX2MbhlInxgg7CutnNgB4pFuOzgbB+vl33YbiDqjnKx44TXHwu+sfShpD48\nRNKkmQ5GtliEdKPorLWFE0XYwUtmdfdDsEWBRYe4SoRtBqzoIDGv+rBWibCZ6gNWncd/6nQR0qXS\nbReI8HIZmmw+3G+NXH40+LkcScUaix0nXOzVAWIH/fe30QG9h4Mf0y8e1ikinDlF4+nIDUUGj6np\nocjxL/dDvvd9eqdAhCsftM9PkhJkddPxfTJ294vVY4xsgQjXWLzsqbu2KWGLFemnP/DpbW5lJoa4\n5whbnSQ1fle5zX0+db/rr9VuwG8U2DvaRKt3rLW6P7R9PEufBNdthK3eLyO8Haw8YlS6wOHgy9HF\nywTvWKtlzRGjDsb30L/AOCLjHbxFeP/FF9/VuEBJb2fm8XyYGJ/erNlw2DEz43p9nAPnUxRMOWNs\nT69LMr3Z8Oe02Wx8oztmjhjhJ4/DqxS3e4PMHleOir4e7++rU5c6fj09VoRP57dxMnzqLsXHMOpN\ng+kd5TtbeepSvfF97SgRzm/pzXT4813X+1VNTyN0Qm+D3HB8p9s/2zvEk7XhY/pqA6PJT972SfB2\nJnx6g7f7qQ5p2QO406lLu0cYngmbN1PskPrK8a65am1yLuF2Lq2u4/CxWITbTVllLn033e37dsdM\nodeXF2oMzb4yZ8yMI1FrMB4uUtR4vr2tKsc/r+/Emxuw2v6LqT5PWyuwKD3ZvtCRafCpPiPcSLPJ\nsFWB4wr2524c/JodhQrc+UX8KIcoivp2y/bx9l8VOD1vaBiG8emy2QZ2K4UK3F/modlnSDb9XxZc\nqObx/tRZhU5n7q8OlyhwXmAm7GBIFpztscXHMDy9Y8NWh0nfvGLcp3i+rd1VLEe/dswL1WyX32h6\nJsnDRtProyrlX3O31nOEyQuZ/NyB+2/c7l4NiQ3C++nu5mFpOgyX4fUylVHPETa3Jp7bSa2FZoaZ\nswne3v4nxSq/a1Lnhyga7mtuEs9OJ161+JWfbs7N3P3x/7w7YsKnzjUTdn/xwo22DKcHR+a/8vQb\neeVcES6+lknDNeSmp7a2ezOKfvbTf4RPn/Rf7cPcoJlDP8EVuLO9I0ztlphZp73ah3n7Srm165pV\nd8Xft7r+Z8JXp7aMZmbIoeYz8qvfd+Z72U3/Ed6bPsNmnqZbbrltteSbOZXndoPJnSk85/eh8wjf\nPrFmnqblnpGfVNTT79uNniNc9tJ+tufi2X7fA9r7YP2+79Q67tOr+TPfYrKuwBkz+3R4qv3sCiwt\nc9pagctP1KHA6mLnjl6v19+/t+rwPNOgAjuQPIH7168NOzwDBfYh/C6KjTo8/jPz4T14D1/hVPJv\nZTrtfLiyOtNgN/IRDmftcPoe2emb8V69PU+BPTGWGffXLJtev2x64/E6ETdGrSc9nzFzfG8vGHG7\n2ZFPPGClQyxHvzUu0m5P3+lOjkJMaRSYCe83F//55zo8v7xssWfzh5eH4AwOuk14C+/Xr0+uPfHR\nllU3tr7EKDs7xEz4+/flPraHf/JAfp0JRzjOeNPkXn19xoc7OeBoosvRd2vHD6fEc374wdaLUm/2\n3U1u7+gkl8XH66c7ObqfErf+qOPbzx/f7/Jgo//0tEIz4Z8FPiw+7/fKLN4+PM+U2Nzb03G+vX4U\n89J7R198Vsh9e8s6FOEyy06Is3ZdIxfhu4/q+b+9pT2docO2W4bTD1G8/X3n65SfyoEftaefOvnd\nDzjwb7da8x0z8w/Xh0vQvh/zjex1iOI2hAtGaOkx+E0/9SFu589dnL+Eaa8P8j52iXDZMYSTnP9S\nlvBaKXIC99LxPsMVpdYfOWj+mVNNftR5HDtCr7Wzbp/7e/sY4GU/xJyWdYhzRzfV8ZbhQ3s+g6mo\nvcZgzY4ZXnjYQXqcz2DS9lf2mgkNyQbmP9dt8BlMRVQYCbNoO7ud2iLyzx17x8zwc0hj/GO322rT\n077tzIw7fIRswIGEQxHh2W0UpM4/J0IIO3yE46bg+MeGfjv3M5VZK+vwEQ7D/ztmqEDb36oQIXU4\nMrGACM+r+Yp0+p5gQX5ChLShwMVEeGqtNtsUuIYI+c9GQfKWCFnrvjoFLiDCs1u5e0aB64mQ5VTX\nhAj5w+dR2RnTighZuwqd/pOviJD/fHXJNgU25OHjD8e5UM15eAR5w4VqtuZB5Gs+g6ktEUKYHTMQ\nJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGH/AhxI6jUJNXe/AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJ1klEQVR4nO3dUXabyBqFUXRXz8ie\n/wiiMXEfiAkWsoyg4FDU3qsfnHQSyxKf/gIhdOv7vgNy/pe+AdA6EUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCHsv/QNuJrP+3344s/HR/aWUAsRlic/3mI5Wt7n/T7OQ/iV\nSVieSchbTEIIE2ExlqCsI8LCrEV5lwjL+Lzf5cc6IoQwERZgDLLFcS9R3G63vu8P+3bLOceFrOMi\n7Pv+tB1uyc8YZKNDqzhnhA+TcPjl8q7O+UNRkaM3oBNusk9H2cIUT/jj0NX2uBx92tqZF6VT66Yi\nrBA4d7SWDrvvKd4/P6u4zVQnE0MtET643W5d101veaU/yOXV9bjEbmtdd9PUmGK9P0ILKnp0Yi/W\nDxtx6rtv0ff9eOMr/RE4leT7CaseJuONn3ZY6c8S4X4beVPvSuPTx8Mu4sefP50DqjPzJYNd61E4\nwqqH4dz4g7T82sbTJfplHuI9nCKA6jpceIPbSXH72rL4NlDRRnWWG7rpLhu3gKN+lrdubQsprnv4\nHv5Wsx2e5VYuvb+mS53hz99u/9qbfr2bWh7aw2y5Q3btsJZH6iwHZuY7h99WOJM/d+jNKu3yb5uq\nZbs/lbNE2M1eOTznY7l9I7tYfhvvkIcn34sdqFvoRBF2Zw2vrAvvIq7rR4fninCNvj/+wMwWl8yv\nrNY6rD/C7ll7+xyhaWrLWGJ6h2y5c3aqbsU/G9khukSEc8N4FEw9IovS1+fxHOa6T+2lIzQGH+zx\n6kLxf/NhVj/835M8oBedhJ1heKidnqFWz8Npb+PXJ0lu7roRFmUMHmNddS9GXBUP3KUjNAx3U+qQ\nzNyvO4enXVWudukIu66bPISrt5Uqnk0vrIqzOLa4/mXw52/hf7rD8OJrDvb0qgv9l+3/1NlcP8Ju\n8SMx/WM/fU2351p01NRbfpuIsPt6UKfXaBp//6fApg+8DtlPKxF2y/YlXvwZHQ4OGIM/fburaijC\nnyx/jHXIHhqNcMtZjl3bHT4s5nf9Xi2Mwa61CMs8ojqsakVw/lvbVoRUpJEx2IlwJcPw9OOlIiJc\nS4c6LESEG7SxWHphvw7bWYt2IoQ4EbLJHsOw+Bg8+cpZhGy1cRO/fSl4k+py9bcycYjlb8b99d2A\nTe0NDkRYQlXXXDzYQ3WtBbZEc8865R3+YRin9fSqSu9ecbDBDdIkpJjponT11T4bJEJKWrJzeL2L\nxGwkQvYluV+1uAQvz4GZ7659XabiKpuETw+CDI/4/CNDA7eJL9pbrvoX64cEpmdTt72Tn9fmEc4t\nKpuE3ayx+cN96CV/jUE2qy/C6XJ0pIWTMAZXqH452j0rsPn3+lGT6iMcYrvdflym7lij+UsJTSwe\n9opFhN9Zi65T/SRcYqfVqQUvRTQR4R4861NKKxE6VLM3z0qrtRJhV7RDGxwFNRRh97dDA7E8z0pb\n1Pdi/UbrPhW9c1Iyu2kuwoVevwHHEz8FtRjhfBh6z9sWnpI2ajHCbnaVvnWfhG7Lo4hGIxwsr2he\nrA4H7oTtGo3wp01nyefXT3/TJtg5e2+zRiMcuSrmRu6x7Vp8Ii87vgxDNmrrxfo9nPzDRl57eGP0\n09/pnr1TjIKai3CPwXWZDn/6Aw9X8Vnyt1iu9X1C5pfk+fUqPpTV1iTcb/+t6mH4YJh7D/fT9AKT\n84UrW7QV4a4q6nB2TPiXoqajcuzThCxFhCW90WH6AMjD53K+KOqnq/hQSkP7hGd/LSHzmvft6/rl\n/Xy+vZh4Z74jq9NQhMd44zSa00yWUz83NaCV5egBY3D6sZiLFqVPD4Dsr5Yd13a0EmHVHvbfyjIG\n45qI8JgxOP2k6DXfbjhGOYttyE8qF2afMGR+AKR7PNyx7mPf37wV2s67/iT89U30Zb9Fqak7DkAF\nXt5lI7x9efj9il5S738YjFzMFZajn/f7+PX983P44sV1mcq+GXenHc759C77XYzB86gywml1Xdf9\n+fj494voW+D3+C7F3/1Y6p+ilHOfRDIxhvctucXmm3LBnbfiO4T7caz1hGqahOvyG8yHYZHxeHx1\n5++cd9UU4TAMV6e4U4ejvfNYPcRcR+fkaopwyyQc1FXdgxff6/We3k7rcEqpKcIiHjosleXBW/aW\n4eZKjWfTXIRz6zbK+Hasosto9Blx+8HS+Yk4h92TNY5uXrjsGTOvbT9vpvYPaarozKHLazTC7tln\nwlSxURYMvpYf+fLajbDb/NlMgxrH4EiHZ+DAzNuCW23VwfOT1iN8emj0rZfdusrb8IpFnHu/60qc\nU3Lwm/fr+sd5rfVJOLD9mYdBTR+YKegCRzgu8CNUSoTF7LoRG1MXJsKSah8mtd/+SomwsD224yPH\noA6PJ0K+se49ngjLKztMVHF5ItxFpYs6wUeIcC9FOlRFC7xYv6O65qHgU0R4UsdfRvHPnwO+CU+I\ncF/vDkOzqEFWIEd4920Z87++98N0v98+PmwJGSZhzPKunF19bSI82rqWdHhhIjzImfuxFs3yOuER\nihRY1wseLGeFUxmL0usxCStjHl6PCCFMhPUxDC9GhFXaqcP7/Xa/y/to9vIrtv0gzZjc9CWK4Tc/\nPr9+bQvZmQgrtjzC6Xwbe1v68uA4cm0q+xBh3V5fPnx8Y8TT2J6OQY4nwuptv3z4MBI1meK0teqV\nehqdLlO71/uEFqhFmYTtWjr6HpK73f61N/2atbxE0aK/s+6jX7Ty7Pu//7EPEUKYCCHMgRne1PcO\nzJTlwEyTHFw5E8tRCBMhhIkQwkQIYQ7MQJhJCGEihDARQpgIIcxpa61y6tlpiLBJTls7E8tRCBMh\nhIkQwuwTNsl7As9EhK3S3mlYjkKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE/R+ASsbgVJ5t5gAA\nAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJn0lEQVR4nO3dXXajOgKFUejVM0rN\nfwTlMbkfWHFT+BcQPpLY++XWTaVix+KzMMh4vF6vA5Dzn/QdgLMTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nrLcIx3FM3wVYp7cIr9erDmlLbxEOOqQ1HUYIbekzQpMhDekzwkGHtKPbCAcd0oieI4QmdB6hyZD6\ndR7hoEOq13+Egw6p2ykihJqdJUKTIdU6S4SDDqnViSKEOp0ownEcr9dr+l7A0okihDqdJULTINU6\nS4RQrVNEaBqkZqeIEGrWf4SmQSrXf4RQuc4jNA1Sv84jhPr1HKFpkCb0HCE0odsITYO04r/pO1Ce\n9ys14TZMnis7iXAe3m1QTYbVmg/Nw7E7lYYjNHiNWjw5zv98zjFtbK54O0iLnRyTYW0Wc+CL0TlP\nkM1so9OQvA7v4TfosB6fF3j/D4d+U2xpd9R+S9M2Fzj0fn2gliIcNh1Sm8ZPqFl7hmD6tx2PY2MR\nbhuDjsevRdmx+HO5TH/4+/OTug8LjUVIi/bsiA4HPIfWk9+k2xUzC32/qKjZzgLnSg3in8vlNh/W\noKGZ0M5kewoWWJCZMMZk+GUFq+t77NqIcByHIqPZ91hWZVFgqSC7HME2Iiyiv8FrRT07onU6S4S3\n7WB6Kp2k71TP5lOWAl9r48DMzkF8dnjAspuG3E5U7Dlj8edyqe2ozNBKhHu8OEBnHRw16DzCzw+R\nC7K42hYq1TkNDn1HuPkk1X2Q9WxJZ1Zkj7RC/fwm927jtHPAehrvLyv+0M3HdFj55FjtONZ+dHR+\nCHPV4cwiBd5G2qHUbYo/dPMxnR/obnqAGtgd3XCmvtrnPPabD+7nr+Rr3iQaiPB6/afD6aF+8XgW\nXzFc7eCd0IvhaPfQWo0Rvs5s+vrtQV5820ErhtVYg8+H4GGQ1Y5gXRE+y2+aDBdfmf+T2xGmCtfs\nn1yp56/NP2T+MrLORTy1RDiO4+tH5r7D29en/84Pl7U0Bz6b03sUb2D+HP3z9+/05/jJw/x0UfZE\n3BGn9Uqd6nj4o/+Z09NjcZD7x23ta7ZDTzLN3+AbCTIZ4UHnwQ89NyXCbV48dG+D3P+wb/gJ37wU\nTWx39Lh9PAdR6vTsOffhQZSyr+1ve56rfG1WrOU1Yc3Kr5b6cNJ7OE++PUVTn2fvXBnumrzfaw0+\nmU6TYc8z4UktEpofblqbZSNevHNlePRO63qOb5sJd6l0j3SR0PS/r+9ky6uxhg9Ceri05dlfbVPt\nmyduMhHWWMhLBfZIHxb4wQ3///tbs+2BamvDKKL2Bdx1WrtceGxwN3Kn5p5nF745eWYeqe+M0BG3\nsngrzeTtrSzvSaEDM/Fz3y+0HuE39fma8CBl1u9/PivOv212LGccx6HutxorcBURfmrb+v37o/P7\nt85Dt+/72XftiRIFriXCj+xZv39bOnzEpnnMMr33h2xvKd4Vq8DVeo6wnvX7R6ykm99EWYvZb7g7\nNPvsNhW4Tc8RzgWPYVwPuDrGl3+LhydKFrumwzDW9qE9FX4U4UOBCCPPlw+XTX1h/f4XHHEPn71x\nbHaji2U/42H3ZZfK85t0PhPe75GuughCqW3qoNeEBV8Q3r8OfNHh9PX5sZnr9frwhWLc19Z/7tF5\nhJPg+v35rZX7Ub8/sdDdW0xrs5///iuLv3r2tymV5zfpPMJG1+9zKp1HONfQ+v1Vdu6UVrX3eE6W\nrf3z/dMfDrpvR2zu25bRHXqXWCv2lH90hxXOZsW3+Ge/4ydBHnRtETboc3d0HPvfvPZcBrfCZ6gz\ni72V6Xq9Xi7tvUduj8hmf50ZfymwKuHxuFzGn5/iZ8/6f50jpJ6E39T781N+Pux+41RgZ/LvrD+i\nw44psD9GtCUK7FJ+JjyP+w883faxp3RGhF/V4DXTOJwIv+r+rQm/14v55yv3TIMda+Bk/e2wzZ8/\niyXXjy9/0pb7+/wb6iGf9EaFahzdW3VvTyHO39JWf4T3V0n64GouFpf1Lz8TTsndelt7+v6TTbk2\nb9+3PvvOpn4xNolGOI6Xvw+mu0WW3fjw3bGcTW539N389cmU2PLnha1gp7RvoaOjdwXuXDTz8HJg\n3ZiWX6fvBUdJH5gZx2EY5jul86My25Z3N/cSkZOL7o4Oj/cg5+1t6FCEtCU9Ez7x//Y2JdVZh4de\nd6Pmj3Y6iW9FuOpDoYcnnwq24tYqfXKpzbOr0Xn0vukrG+u2ovZdTVaHb714iAT5TfmT9e/ZCH4V\nveT2gzlww6cvsl/FM2GBm+1nMix7qnDzZdqcsTxCCzMhRbf7PZdpO+ITpqj1wEyxm+1nMixi2wOy\n2F/1qJb1rZkwNGb9bSuRMwr9PYxVqX53NDSF1qbITqAZrE51j0oHb9otZ35QZMMRy7IF6rmg6mdC\nfm3+qNNBM3UTYfN88kTrRNiV1589XPaGtF1K3RHOrwNhvFdavHrUTLXqjnDQ3l6mrPq57uhZFF/s\nYvVMKSKEMBH2z5RVORGeSMEabysH5L2fCFnNkZ6yRHgKZedAywDKEuG57KxRgUcQIZ9S4EFEyGoK\nLEuEZ3HbEd22R3oLT4HFiZD3hHcoEZ7RqqK8FDyaCE9kw46oAr9AhKczjuOHKSrwOzyyJ7Xo8PVm\noMBDeXAZhkdXczMHfo3Hl8d8ZNrXiBDCHJiBMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEML+BwYTGNhEvCkS\nAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJRUlEQVR4nO3dbW6rWBaGUdyqGTnz\nH0E8JvcPlyiCP+IYznnZsJZapaurqsSNedj4gO3T9XodgJz/pR8AHJ0IIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQgj7J/0Aavu6XG5/+D6fs4+EukS4lPxYyOnoUl+XyzgP4QMm4VImIQuZhBAmwk84/2RFp+v1\nmn4MxXxdLk5BWZFJCGFHWZhZ64KeMcjqjhLhYBmTrTrQ6ej0gt5nKyvGIC0cdBJ+n8+3DkVF3IEi\nnLnl936KxiCNHDfCm/dTvHx9DS7n0IDrhP95MetOJxuKVuxbP5xOp2EY7reJCGmn/L7V4h19sxQV\nSFN7eE24+nrJLblnUxHWVf4Y3/q97cYgrZmEr9wKNBJpag8RtubslKZE+NTsRFSKNFL+3tHxBrQ+\nrtfr9Xr1pl5WVD7CRl6vx9zKlyKrcDr6ob/eegrPiPCB9y9LSJHlnI6u4Pt87vzSlD0R4dzHV+dN\nQj7jdPSHte6P8R0VvE+ErciPN+3hxsi1xteKt4mahHGFbvo1CVuRH2+yMPOvQgdOdkaEELaHw//y\nIWYMfuZ2O/sS7TZ7oefUa0I+tMpeXiiVdg4R4a8HbPsBQZuIcLqg//rmr8vX18O/f53Zr405Hv/V\nki02/W9vH1xw8I2/iQiHyYL+Lyv7j54tz2LWX7e/8Ga2sjqafXve+EEyvGOa0PKcbPzNTcIUh+ee\nZlv74Bt/K5NwifP3d/ohHMi6Y5BhHxGuxXlRT7OtfeSNX/5ItvI3lp1OvnrphdXHYLu5WmhKm4Q/\nXa/DUY/Hv2q9Wx92GJY5WvRjGD7RaGp1+LEbZxLeMQwf6bMec8xhKELCrNCI8BHD8KfWY/CA4U2J\n8IkiLyd26WhNbuWOmc/4KJcOer4arLKUsq7aEQ7y26lVhuHCn9DtiFD72GMSttb5JrXxVxzq0xJM\nQl6ZniV22KerZLOu8hE2N57SHHL/iCg0xFZhdfSl290zt/8dab1u6mhrlf0d65DzZ9Nb2A58O1vP\n0bTBz1NvzSTkd4ZhUyJkQwqNrxVZmHlp+lLweDvH1JEvprcmwufGVRm6OGzkTkefOPAyzDNeGTay\niQinz+z459Ppx5/ZgnYdrj4GCx0yNhHhvfFMMNOhMUhHW4nwNvdeLIL0u1quwJdaTJjDvhq82crC\nzPSS+EgOHMFWIrx3X+BtGLbNUvdvmF2u+Gww+gTh0UYjvD2tt39On6DWT9ZpGA69O3zk4AktV/gg\ntPrQckj+k+1/SWiVJ3QrCzMfOPAbG9iVjZ6OvmnFV4lVjprbscoyqW0+VI9w+LdD/QTY7GspH+Gb\nfG09m7WTg9nyxhzX/8TmWtEeJqEdgtIKr46uq9D9vnGOeusqH6Edgur2cDo6Wnj/lDePv8MmWl3t\nCGc7xPKdQ4f0V/50lJ4coVooHGGjHcIKDZ0VjpDOjMFGqkbYdIcwDOmpZIQdDsk6nDEG2ykZIexJ\nvQi7HZINw5Ex2FS9CHvSIR0Ui9AhuT/bvLViEfZnGNJapQgdkv/q/vsFfv3GgbufYJs3VynCnqbT\n791h+NcdvItff/n9Nw5M/ysFdrCJG7i/Lpfxz5evr2f/WrePi539/M9/3QY+Svj+s7BmWaYfIL0i\nHDP7Pp+Hn9WNf/mv5zvFGMN23+tQ4dXj628cmI7wDW7gXeo3Cael/ahuYxaNwYc7eEcPH+2vH9A6\n7W1MUYHd9HtN+HW5zAbgX01fm1VftGz34E+n0/0Pf1HUuGBTeXPWlpmEq2hxUrraq8GbcQb9/JlD\nmwWPaXv3v+X2x/sv9nj4QIzBnjaxMPO+7b4aHJ58c82T/Pr8v2gXPCuqd4mi3UnpymPw50++/bSe\nBd4ocPu2OlVealTL7Dv32p0xNtrmj14K1ntyD6jeJBzKfmPzdBi2+OGzv1FgFSUjHBqvlLb79Jr7\nX7T6b3n2u9isYgszjfRf7Gl6XqrAWqpOwqHZMKx4Xlr6kimFI5z5uMMO6zEzTX+FMVhO7Qir3zcz\nrD3EFFhR7QiHxSel/cfgs9++nAKLKh/hzF93xOyOu9blzbV+FBElL9bfWz5Stns3nO/63juXKPJ8\n1/fB7STC5Ss0qZ1YP+wkwmFxRaVj2PSbS/jN3hZmalEOgwhH1S85Vn/8RybCmNXHoA6LEuF/7MRE\niPCHbh36rm9GItwbHZYjwrkOO7FFUaZE2Jvv+mZGhA/YielJhI816tB3fXNPhLulwypE+FSVD3Gj\nOhG+Un2YVH/8ByHCtyzflY1BnrFn/O6dArf8vlv9b9x+3k+Y5RMo+Jhj5Fs+OB2dbdjUODIGt88k\nXIcdnY+J8EMffLZi9c81pRERvsveTCMuUbxllQI7X7UzBqsQYVc9O/z+7vN7WEqE+3S5nM5nY7AG\nEfbmVjJmRLhDxmAtIgxoOgwVWI4IM5yUMhLhrhiDFbmUlORSHoNJmOWklEGEECfCMMMQEUKYhYFN\neDEMn90COl0FvVxO939JFSIsz2WJ6pyO7sHlchqHIeWYhPthJBZlEu6QqViLSVjew1UZU7EQnzFT\nntiqczq6H9Oz0PP56qS0Cqeju/LjLPR0Gjy5FZiE+3W9Dm6Iq8Ak3J3pADQMKzAJd2c6AA3DCkS4\ndybh5olwjwzAUkS4UwZgHSKEMBFCmAghzL2jBzC9YsH2iHDvXLvfPKejECZCCBMhhHlNuHezW0nZ\nHhEegPa2zekohIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCHs//NuBM9ASDXwAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAALHUlEQVR4nO3dbZKjuBJGYTExO3Lt\nfwXNmpgfzDCUwHxKejNT54kbN6rv7S67XBwnAmwP0zQlADp/qe8A0DsiBMSIEBAjQkCMCAExIgTE\niBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNC\nQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAEx\nIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBsb/Vd6Cp\nYRiO/8I0TW3uCbDoKMJhGE4bo1K011GEV9AY2utlTXhlDAISXURIgbAsfoQUCOPiRwgYFzxCxiDs\nixwhBcKFyBECLoSNkDEIL2JGSIFwJGaEgCMBI2QMwpdoEVIg3AkVIQXCI0OvouBlROiTlQh5sR+6\nZSXCK44bO00UsMnEmrDIWm6aJjqER/pJmBV4EBJ7mwhJH2HmoLTTPudhSKvwRRzhrWaoCyEp14Q1\nphYrQ7hj4sAM0DNZhPUWbwxD+MIkBMQ0EdY+hskwhCOCCDmLAKyF3R1lGMa2/t0uXw/Dr/940fo8\nIWMQpQxDyjal7I+/O9yP0sLW2DTCxgVyAU1s07TTYfYX1n/a/gUj+0phd0eBU0bWLO0iXA+lZj+5\nkUcZlczD0DvNJKQNlPJgtbHe9ixsiu0ibPzTLrdl4VFGQfMvc93e3Q6tbRKyNWHVB4LjMVEdH4l5\nRt5k0wglPy1BhlGwQHl4a8qjo5UeCKrrRMFtR9tk6whr/7TbN8sgyBiyMfh+KtoZhuLzhKwMccUw\nDLV/k8ImBRHW+2mpLqTtr7XU4tDIMNRfMcPKEEaomoxzsj67IocCY9gbgyV3TS0MQ/0kTCUeCKrr\nRMiXgwc8WU+QYbQ51i0fhiYmYXr3QFAdCmrfpPuT9ZwY7ETV32xfJ+sPsDJEZr1JtH85eLObs3Wy\n/u4DTXUopa+T9QWxIxpeyzZUp7j0EdZYGQKOWPlotOMOCaxnjd+wq/0TuokI+bR6GCHZpTIR4Sk+\nrb5zsd+9Ur8mfE9+xQNiUHUeIUL0oPHLwVsKEiHDEH4FiRA9iHrRf5wIGYZwKk6E6EHtl4NL+DhF\ncVHsA9nYOgjy4mZgYYMJFSF6sH6qPejnfZ/NRIuQYYjZlW3AyKbCmhD+BLvoP9okTAzDboS56D9g\nhAjvypOso4v+Y0bIMISj3z5rQjgT7+k1bIRcQBNSvAJT4AgRT8gCU+wIGYZwIeaBmbU3b6kIO6KO\nwZRS2B9s1+/BuP+D9/R4uBG4wNTDJFy7/otcfwzl/PX2f0n/fWx63M0DLUReE750upycU5z7vP6v\ncFfsMZh6m4S3ZNMvbQILvWFYEb7ARIS3rHdHF9muaUp5ug8t32757tG3xW4R4ZFsV3NrnUbJWHYX\noLf++fo+udXDGEysCU9lU25t/h+HodU68OItLUvV06cQ2zopMDEJv1n/9reLw/nr3S2k4mbTxxY5\n66fAxCS8qPZQCXNlz/ZA8eOfLMxjcopJaMW8zU3LkHW7rruyht30lf+5q0sOOxr671U6QpltbQV+\nI9uAWx1cXV/YsPx3dl8O//mvDbKTnVImoTm/RuIzxjZcY3fHHNaEN9RYGe7udJV5+l9/57ZHSh/f\nWrYX2slOKRGaU2wHTHqKgul3HRHeVXezrrgEqtzk9lr2gxM5BzochkR4T9ltovwhmUyrYVj1uE/4\nmer4wMzPOM5f/Pl8tPekiFozcH2pwfaygxKKf8v8zfLq3G07HEeYRPnVeD/FZgfiByeDxcWdLMX3\n7ujPOC7z0J1lX7R6gaud0uJLrFojKtuRdn4d7DEm4RO7m/KzltrMwEoDMPROYju+IxTaxnProxEK\nnJG/I/s4sSK709UvZ8mWgnFXhkRYzPEWWWpyIp4I1+b9jKO7A6SSqyLXN/ryDrS7/0Gn35rvAzN+\nuT4H3cl11c0EeTQZhrdu9OXHhhFhWawJZVSf3/b+w/0osKw4T2kMQ5u3iFOsCZVcrwxRSpwI/3w+\nfq+eaaPsGBQ8fczvNxfuaStOhE41G4bDUHItJ9itjfJWjluhVghOFzwN7naEk20v3xDZsFCT0OkS\ny+ndRimhIsSuemNjGAaePt5zuf92jJ3S39+2YoGCZeHM4e/3ACfr8ZDgmS5WewuXQ+OU05csFJ8t\nzY5fON37MCLgJNzdIPq8DqvBj9X4hZEhBYxwl4sX+6muJn3M1701K1qEzzaL3Ze941T2uRGJkfhI\nqFMU2wKf5SQ8cefxnOF8omKaJgp8JtokXLs7Fe3sXH3v8P+7Z+OeGnrQ/IrzCO6Owbs/XcE3gHjm\n+k5d9oaAae+6rlhXd4UVZBIWKVDu1rJq929ZqI7F4V2h1oSLxwWul2SNl2dFbmv7AoNmL/0Z/tPi\nxmKJMAkDfLxrvXfjbnmqkBn4TMxJ+Eb7YVj2Jtq/2m6ujgIfcx+h9zGYfRZfke+puKiT8xPP+Y6w\nUoHNhmHxGZh93bILd09/dviOMBLXWzAFvuE4wqo7og2GIQcSMfN6dDTSUjC1H4NFXx3r7sG3xvEk\nXFTaCOoNQ/EM3H3bsqd3aRwp8C2XEbp+6jXysqncHOTNFMdx+HwM3HnnXEa4VjXI4sPQ9Dow4lt6\nuuAywqwN7Z25brdA/f3P7tU0pWEYx/MUGYOleN2va7lHmr20Ivt/L96NbzNQ8/hnB2bmP/6+J3OH\n3zKjwIK8RpjMfKTR9XevMboaXOy9BIPYGiDCurfo7A2mLLwUqj+OI0y6ndL33yoZLDCldLYXihqI\nUHNbxs+ysBfaksujo4uGnytWuBnLBaaUPp/pygFSFGH6+fiKJp8r5v5RKmLJkiFZltdrRxfu3jDX\nKXZQ6/G9O9oAhS/GMT+Jzy5rEUG2sHqpEGEmG4kcTX2PSXiEAk99PtO/R3G46PSpOBtZhc8Vi/Pg\nvLc+KvN1fci5/kfibGfHV3gurv+8RJi+H4856jCZeY9+J0JtZ1eyudgnBaazI6JHq0FG4h3uT1Hc\nde1jHijw3L/57fbGo3dHqAMzHj9XzKyrJwZ5HfBrASfhy/dQYgymu6fmlw67f9yeCRUhn1Yvs/1w\nNlwWKsJdLj6t3pTn+wLZvyr6xoqBxdn1YjeyiGIP4/YjS/FFkAMzFAi/gkSIInguk4iwJvwZRzad\n94tbClRxH+HPOP75fNT3QsziYeH1+UPaPuQ+QnyjPyxMe9f43gNhDCZ2I/1zfGCGAhMFhuA4QiAG\nrxEyBhNjMAp+i15RYBheJyEQBhG6xBiMhAgBMSL0hzEYjKcrZn7Gcf6i5+OiFBiPpwhT3/khKme7\noz/juMzDDjEGQ2ISukGBUTmL0K/T92IksG4RYQsv3xp8RqVRsYfTAnuSOODswIxHFIhjRFgXBeIU\nEVZEgbiCCAExIqyFMYiLiLAKCsR1RAiIEWF5jEHcQoSFUSDuIkJAjAhLYgziASIshgLxDBGWQYF4\njAgBMSIsgDGIN4iwAArEG0QIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQI\niBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBG\nhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBEC\nYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiI/QP2MHnortabEwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKbklEQVR4nO3dXZqiSAKGUe1ndlS9\n/xVUrYm5oNs2RZGfIL4gOOeZixyzWkzkNQJEuQ/DcANy/ko/ALg6EUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMKuEuH9fk8/BP5zv//43+PG539wHf9L\nPwD6NHnVG378n+H21v3+8VcdEyFlvFQ3bIppGK7YoQgpZlt4XGWfkEPd7/dSBY6D4aUYCantubG3\n5V6tQxGy18ww+HNH8Z9/82nIfL59/Pki+4ciPI3puywN7oMVOTzz9J9fokMRNmHJ25jTDbrgnthm\nL48h/njOSIQBpxjTGnGFwVCEGUWqG4YhOxjWWXr3HXqLorb9G+7zQDp2uPtBta7v46UiPJ+X8FId\ntrBH2gcRVvWy4V5hECul48FQhKfUyGBY2TD0+bIlwnqmw+Ce6Vy2w9RctMuXGxH2o8sN9ApEWEnZ\nYXB0zUMy/b3WiPDcrrpz2NWfKcIajhgGPzl6A00Ng9OzUrvpUISnN90ce9pAr0CEtR0xklSrLjgM\nviy3p1MFRHi4WidYXnHnsA8irKrm63fxDlOn+/Q9DN7qfYri8YQ9r7u+z42/3W6J6l4+3bf5AXxt\nLP4Zjm5UifA5tuefH6cDHvlEjhvTY1jo+4M/C8PY9hniW2II6n4YvOU/T/j4LpFb+RRfqht/qJli\nC5vL20lpuW9GMxgWkI5wVPprfWZKe6QYf0U/yNswjqvu0A6vMAzeYhG+7a3E7HThQFdhdhrcXA49\nMcDoV1yVCJ8/CjY+eZ96+3dIvK/PYyaqtxtNzdlpZ1vtS4fVsuxsNT7UGgnfjnu3D3uDwzCsyePr\nv5wZ9w6anTayuWTnwDs1sg4rSO8Tft4bXDJSLQ91/t6Onp32tzFV3jnsWzNrbXZvcFrInma+fmN0\nkXUS3yIrPIDjTky/yCGZUTNnzAzDzLeIDMPwGKzu9/v4fGx+Ssa7evte2fOC+Mq5ckU09+oyf3zl\n06/aXFbwxbvaoo8Ysi41DN7y+4QTn3beDvrwwdtlzf9q1f33vfXcrvE3Hq2Z6ehPNaeFM8va/zAi\nM7TKVZSdlF5tGLw1G+FoZuft1Mvqj53DPZqbjr54nhbWXFbBl14TtlWuNgzeGh8JHypProq/kNcc\nGVJbrcFws3NEWN8RB4G63yinHe68wysMg7f2p6OsEt9q98y9u3+R+kSE9Vx55/BEHyOuT4RV7e/w\nZWs++q3UDd5OvFt4YM06UYSdPItfO5wfNE6xNTu2vMqJIuzWzOC26k563VK771CEAS9bVcebVyl9\nd+gtioyy71h0vIFegZEwxtH8VToeDEXYnM1H87vXa4ciTHI0f60uOxRhXmebFGs5MJPU34t6Bf2d\nhStCzqezDk8TYX8DhmFwj546PE2E0CsRZhgG9+tmMBThe9Nnt4/nuzN9dCjCV48vF3658VauQ8Ng\nQR106H3C/yz5iqdDL1nBNZ1gJHx+mTvoJW/tV+uX/V5NdpoOhucaG08Q4e2w9m7f8pv5JtJzPc3d\nO/V3vbUb4fM6nF4qZv8aXpLfkqkp7NTiPuGSa2Z/utTvsvtfes3Dr5mt3UU0Fz3O9KPSZ1nbzT3K\n6fVCH7e8re7d9UVn7nzjJUe/1rh8Z7K1Fd6ZlzV8ihXe0Ej4dWR7e/3C8cav63lzfv8u5cvAuOTJ\nPsUGcS5fV+kpxsMmHt/mieXCe5h5Gra95fCpxq/7kC2s7c58vYpT+6s9//gKrqNVd7X/Hb9VKba/\nKZzU2xV7rg6TR0eXHIFcZVh2ebO17wrOLO7tnbR21LS1x1PWkncjGn/HIvMKcfR5J5/u/9DlNvjd\n2I+/t/sTfb5OSt/e0ojaB2bqbA3Px1GmPx+60Meysk/5y99bbSW04xSHZEb1HmVqC8gud1Rz6V//\n3l5TPO9gWDXCBv/+Cqpt9KsW1OXTcdIOG3qfsEuPp/wxMB6xBXx9F3T6qy53FJdMQRucporwQC9n\nUT1ufLll5yJm7mr+t1fYUWwwuSnT0aOsOv97w5rZeQ7Qzn/fstNNSo2EMS/vYUxv/KRsfs/LbWrT\n3GzhpLTa4/lKhIdYuzUvnKwekd/Lw+hpSHxofFJqOlreqg92fL6THzUueeOh4OrtIMXGp6DPjISF\nFSnwNhkbK7/v18Exm8ZHv2ftfrKe0Xx+RU6CnVl042ddrtLs3yLCkkoNgwsWdGx+fZhW12aHIiym\nZoGH5tfgZrrZKV6kRFhGtQJvJ9mwmtXgYChCLqe1DkVYQM1hkCKa6lCEBSiQPUTIRbUzGIqQ62qk\nQxFyaS10KEIIEyFXFx8MRbja9HqJFa6gmHKWc6B3ynYowi0WPl+dBdm3YIc+yrTFMLkKjd7YTIRl\nPIKcTlA5i9RHEOtNRzvbtRjeXadtNA6Sff25VxGZlNon3O5TZjN90r76HYpwtef2xp+fq5v+FuaJ\n8BAGw1OrPBiKsIya1cVPs7qCmocwRFjMS4fDcFQtw7JroXIWIixp0uFRs5rH96Adcf+dHcdunwiP\ntb/Dmf+8s68kvCxv1hc2zkL3DybLv4G71++uv45LnJ5b30uHC7P8ep2mDdch3EzY1YjwEKsuhLD2\nioUzeRQpR36VifAoy69PuO0p+Fp18WsechARHujtePj4udR+Y5HZqfyCHJip4aAL1s8fs1l+wOYi\nn9xtlrV/rGojzPyO4nHXFWU/ER6r8iCzfHHya4fpaFeWTEHl1xoj4YGC+1pvS5Nfm4yEfXo5ZiO/\nlomwZ48U5dcyJ3BDmAiPYvxhIRFCmAiPYxhkERFCmAgP4Sr2LCdCCBNheYZBVhEhhImwf79+/04/\nBOaIsDBzUdYSIYSJsDDDIGuJEMJECGEihDARQpgIIcwHT797fuvv8fP4db6Pn61FNjMSbjFW93xJ\nUNcIZDNf9LTIS2PTcW8M0njIBiJc5Hk6+qA6ijAd3Wha4MsF62EhEW4xxna/f5+mwleOjnbu7z9/\nxh9+//qVfSR8Yp+wf/JrnOlo//7+8+cxHtIgI2H/jISNMxJCmAghzNFRCDMSQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQtj/AfjNIapokDU8AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKd0lEQVR4nO3d0bKiuAKGUT017//K\nngu7HUYQEUL+JKx1MbWrZ7etMR9BQbw/Ho8bkPO/9B2AqxMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYVeJ8H6/p+8CLPsnfQdO98zv8Xi8fkjfI/iP+8CTcrE6KdKa\nMSP8Wtr9PuYDp0ejzcVP+c2rsyTSiKEiXFzf1mOTInGDRLh9Afzpr0MF3Ue4kt/in6/fVO+jQY86\nnnYrme3OyZJIfV1GuJ7fp/9V5PahuP4i/LTKFS/H3il1DHLGjGDoV/fnjt7v94IFOsWU+vpeCS2A\nDKDvlVCBDKDvCGEAIoQwEUKYCCFMhBAmQggT4X+8LkXz9jOcR4QQJkIIEyGEiRDCRAhhfX+KAoqb\nvj1e51/sO0IfZaKI6bGo+jOq7wjP+IYJVV9ENrypviO8/R0+l2Zii3bCm+o+wqfiKfoup2G0Gd5U\nZ6+pni8Cv17Z/siD+vRdTn0NFLd+9o+6mVvzAT3j6qMrsfXyjPLSy6azk3u5q42fstn4y708r9z6\nebI6uJdbhvJIir8ucZbELvRS4K39CH8ayvUFs/i3VjQ+dBfX0RPU7h3dt+Bs/1vHF7QmlsTXW3/P\nu3G/31p9QivrKMJGD1HsHsGNxyqKPEP5Q5TT5OTXrRYjPF7ISh4nnWHT0Hb3uTY2cmdCGno6Nmgu\nwoLD95biqUtWQ4f143eAH7UV4RkbsGorVXNLIp1oJcKzl5FqYcyXxDOy/PdUrPv98fcfLvtPUE0T\nm+0hV49qpyzW//xb47qbTvmVsLsh+2p+ocTij3G62A42ehfk8hblVaji8Xg893tdGXUA4QjHWwZr\neqWYviMcYiU8xXzLcl4qtmJTPW7WkxH2OF7bnffQ7IUOxkp4osWTdYrcrBeEI4m9Ozr2MliB0RuG\nlfBcUqmp0y27CE83vyTHQfZC57reOc/sjna6xWrE8wWhAVw8J6nHkcmfMXMFxY/mdTfPSvl6MmCP\nW6jA3e1ujEpxkuc+O87C7WuOWQk708qnFk/2U3jzrVtf66GVsKp9i+F8Rg48htuvPfn6+aTLQFdT\n+172Mi7nKTXJRh3Jr9eYfRppv9TuaG0rb9L8NMn62uPaaP6IDn4ss4tRqhph+8NRzWsojkyyLmbY\nbhd59XurvDs68Iz5VcF3Skca1ZMeS+NDVO+MmcYHor7nedhFbqffk0WmzpshjQ+R09ZG0Pgka0HL\nQyRCrqLZDkU4iGZn2EZ1Xq20OUoiHEebM6w1DY6SCIeye4bd7/9+v9P0Bp4/z//k7a+07+1QUFMd\nOlg/mu0HD6fz8O3Xv37F0+sXfDHUcSJMOXG2rnf4dxFY6/TxeC/qbeUoG9vZLwjfbr+1o2UiHNNb\nh8evyT9Z7u6vLch0PXz7E7YT4bCmr3x2nXW58JJvGvO0t/muKduJcGQHd7qmHU7ye57yenv992B4\nlXcOW9sXvYmQuekUfe3Pvu3fLk7jxub2Hw1W98YhCr54TuL5exvBuzQYEfLFYoGl1pZP7x6dpM1V\nUYR8d16Bb//KNRdYrwkz2tscb3K8wPXr5RT/mHKbS98bEfKD+bUntp2as3aUcvaWz8iXC1gkQnZa\nT+XI9XJO6rDZtkXITvMJXfB6OZdaD0XIUaWul1O8w14y9u5obesfFOrUSdfLKfh+actBijBggOpe\nik/u8zpslggD5udG9/UB2cp2d9js0vemXoRX2KTt9nj0euTwDPOpcnDytLwverMSpix+UKg7Na8U\neuDKHa0PdNV3Ry/1vvNXY3R4nvls+eXKHUc/xFyTQxRVvX3s9flz85MkZrHDT7/cV3hTtSO88mLo\ng+c7bLtezp/frHWnCrMSslO1jWnx6+W0JhDhNRdDy+ARB6+X0zgrId0YL7+nzCGKqx0zHG8ZdHWm\nghwnrONCWxx+FYvwOovh2FtxjrMS8jOblbKSEV5hMTRfjxt+DK2EEBaOcOzF8HXZ3PQd6dvIi+Dt\ndotHeAWDdVh753C8wzsz+QgHm6Mv88ulDPkwOS4f4W3EDucP53kVlgEe5hiPoikNve/0fGrbuT/7\nTB/F8iMaYv+q3k7pEMO1rqEIn7p+P3rxzi/84RATq8YzNcRAfdXE7uhUv3s7nyblwgvCIT5U3+8z\n1ZpGl53u1sNNd/htu17ke27TunumGtTuCHY0RX+YiPP9q/73uHR4UHO7oy/PqwC2v7/z2xScP6Qu\nHuQq+6UHtRvh03OKNvsU71kE5g/JMnJt3exItLbXVuCASmsP6Rg7pbsZuD2KTTgdfrihPz8MNDgr\nRPgzm/wVOwdnWt10wzTWRuoTF3r6wenn9PS/AvxwKT2viv/qLML6W8l6X9M1ygqw0uGfrdjf36t6\ntxrWWYS3D0fabkWf0/EuL1vZtEOD+VV/Eb69arj9ze/g3s2WueLV4HavvYadh3BeP19AfxF+Mj8h\nbHqhwZW9o5VfYJ9Xfjv33i/2XHQZ4deTTF4fpn39yeJs2B5ejTNChlgBxvg8WmVdRnj7/WSvHdNi\nvk6enmLPc3cxv2t+78ivOovw7LcPV3ZQTaZPrH4HdRbhGbwyPOLrtsli+NWlI7QJP8LolXLpCE2g\nfX7Nz2K47tIRbmcOPVn9ziBCNjmYn8VwResf6qUFz34OJuQD+J+IkO/0c6p+I6w3J+xHlSLmRf1G\nSFX6OY8IqUrMcyJkK/2cRITLXrPNC8LixPymywhPfQqf3xshvEX6OUNnB+uPflp0240Xv2XeOHY/\n1U2EpxayeOOaXHS8HwP7poOt0afnrMhzuXLj7Y9M0L7xkd+i5lfCD1+7eVv/NtxNN3xi27wxqisa\n3t4vXcmwSDkrv2wB3G7jWMnvq1ZXwtnlK9afy42r4np+63+XHWzUtmhvjBYv5fvLJWVWLv8sv7JW\nGjOq2zW2Epa4vPany87a/6xDfr9qewoeu6ba+mwwV4qYbsUM6T6NrYQvJb5fYuWFogWwLPkd0eRc\nPOGiom+n2rT4qLtli3ZQevjqXvjddKFB0Uk5yjfywRFdfooCRiJCCBMhhEUPUQzxjXxwUPo4ofa4\nPLujECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQtj/AXwGvOu9Mvh0AAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAIpElEQVR4nO3d25arNgJFUejR///L\n7gcSNwcMh4tgCzHnUyWxXS5gIXGx038+nw7I+U/6DcDbiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhLfq+77v+/S7oC7/Tb+BF+n7\n/vP5DD90XTf8DL1N4QY/q5MiAxFe7jsALv3X4Qcr4rVEeK1JgStBGhhfS4RXmUS1sTEpvpAIL7F9\nAFx6eifF13CJoryTBXbyexmXKEo6NgWdvIIC30aExZwfAHknEZYxTs4RHbs4JixsqPF8gZ/Pxw1u\nLyHCwk7mp70XEiGEibAMIxiHiRDCRAhhIoQwEVbH4eXbiLBeanwJEUKYCCFMhMUUnD1ObgQv8ppU\nyw3clzh/D/f4FdwRfl7NH2oR4SVOxjPZYsavduwFX25YntXuy+rdPTxRka82/Os+u9qNqULzZVXh\n0hNheSspzv/9X5+4/uDtj3+hp3y3nQivshTJ0uo/fNBS1fZUjy3Ls5JFJ8LLbZmjnjltMD99+vJ1\nemD+v+vxxYnwJnsHRo554oRChHebXHuw/As6vzwjKdoIMi5a2a8dVwv+4ffvGV0nzCi7ml9+pvTp\nEwoRtuDRm+BJTy+wc+8oTNz/CTIRtsYN348jwnb0fd/A3GyvBj767JiwHW/LrxlGQggTYWuePjc7\n5NlTABE25YXHhA0QYVPeWeDn0z16+K8lwldOoqjUzWdca4mQUuzOHkeETfl+mcrbUnz0NNx1wkaM\nP0bgO9qepaKTac7sHfa3L1OpfbnWtupvfj+mo2Hnp47rW8zn8xlOHtY8P63q1jOfJ3yR81/vu/1Z\nw0P6vt5jp6HD7HiYmsCLMKPv/1jZB1I8sMlWW+AZ4z3Lmb1McBdgOrpPkVnT0rby+Xy+5zbXf9HJ\nLWb82sPPNUwGD09K50/a+zLZQdhIuEORb1P/6956/D2I819UaspU59T02KR0OOgdP+l7D82WV4pP\ng0W41XdVnbwAsP0Z819UcHOZb7jbt9pLbe9wfT/yPQzuVv+oeIGdCLf42duokGtX4m0X/dKb4v9t\n6XBp6Ju91D8Pnl+Nq+c6an43MFZ8t1Tkf1G24dvUu66mjfivvlvw951XODtdWvKTpb39xEy13/ja\n8kj4XdCl/hdlSzbOfDhvvkYm4+GKam/oazPCSXXjFLvNNe7dWT4ov/mGW+Gbn58DKzJ8TTaGGlQX\nYZFvMv95gnH9rGPZtzGfI/2cNe0aPN/8Db/Fj99quDfgq6IIz88eu1k8P19wfWC87RrAOMUtRzVX\nbDF1Hg0Oviuo6Dnhitr7qiXC8aI5PHtcevDPF/w5MN5wDWD8AJZMposVllNQPsKleIrPHpde8P5r\nAJMbVsZT0251MHzJjPTnAVvxDutZguEIt8ezsv3tP4Pye45a/GB96eLV17i3+dT0z0det4/oa9og\nfxRY9lRKRX/qv5IR7opnafbYHd007xkAVzoc37T5198/+fO7igfDsot0spcs+VdXc0Acm2qfPv1Y\nPp6aDzyua6/skjx5dm083C1cqS+xjn6eE8vJjITnF+UVs8eaXbR3GJ+BPPlbzl+b3fLgOk9vnnT3\nR5mGWxauuDftvLeFPV8Rh//84aV+nl0bbPlw1vcpx97DDpMD8bRbR8L29mEPtTLuHRgSz59d2/VL\n2xsM74uw/Cnm5lbGTWZ3FP16yKYq9hZb6uxaY6v+vgibWWTPNjoVMb8zc/bYtTzOZFDk5HYzHeYv\n1rPDt5MDJ/dmHwGabMHjfxwH+XNbL3Uv9asOwpeI8Dm2f3Ju/bkbnj4JstS12bJODYaT3VlUCxG2\nNDPZbfPF/n2P/+OpfwRZ1XI+XuDh3dkFWojw1c6MhxV49Q70X77y8A+NH6W8e1uvlpGw6+qbZf02\nvg+1/nfLZs+OsMhk5hkFDibvc/1tX1BsI7PHynZnz47wpHpO9JVX2bmH6tS0QN4b4dKdk202ScUa\nifDAR2bmBcqPiBbOju49nznp7YoPdsB2jx8Jd93GMX9Ys/lVdu5hRSMne054fISDLSm+bgra8J/W\nlkYiHKx8aG0+Be2cg6EOLQ8FS6U1PgBe74rvRnjzGmlqJJyYz1ENgBV6eYFd2xEO5FctK2XQfoQD\na7o2BsCvFq4Tcr+TnzVR4JhlwUFnvt7XVjcmQk7ZFZUB8CcLhQK2fHWvApdYLpT0c2A0BV0nQsqb\nXJu1ja2zgLiKAXAjEUKY64QQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoSw/wH710ffFNQiSwAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKPElEQVR4nO3d3XqqSAKGUZ1n7qj3\n/V9B55qcAzJuAkgQC76qYq0nB9lpYyvWa/Ej5P54PG5Azn/SDwCuToQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIS05H6/px9CeSKEMBHSjPv9/ng80o+iPBFCmAghTIQQJkLa0OsG\n4U2EECdCCBMhhImQBnS8QXgTIcSJEMJECGEipHZ9bxDeRAhxIoQwEUKYCKla9xuENxFSsy4vZjH3\n3/QDgB/G4Q1zYPeTYedPj1Y821sckH132PNzo3LzSW/9xr2O1W6fGHXaHt5wy/Fteu2wz2dFheZR\nvbrN4DrrpXbMcJ5X/axvEE7uYUvMbenwfYUKzWewtzYIt9xhu8yEnK3IVDZMiX106GA9Z3s8HkXi\nea6atk6ENKyPDkXI4Q5db+ygQxHSvNY7FCE9aLpDEXKsQ9dF2w1vzCGKC3uO4PZ39Dd9uEKEV3W/\n/21v/H07mg5vzOoohImQn4puZXUzWR1KhPz0eJTt8AStpy7CLtzv31/Pf/5qiG34moTXQoethzcm\nwvYNFQ1fb8Xz/K3bLLzSHfZxLOEgIuzUeGLcqHSH48mq6YPpRxNhp55T3Lu/9bPD3eXMf7Fgh+O8\nO1gvdZyQn4YOZzPYloH+69UKezoJsCARNux7QI+nryLje9bh7fWEs36C/Lw6Hc5ZHA1bXOUreOfz\nC1KMVwK3/x/X7+qTx9ZHz2bCfpQdjotT1lvtrdzVJ/NhH+GNiZCXnql8flUY66Ur7B090Jk75Q8a\nzQUvMTjfO7pvf2l/3YrwKN28zRd8FkN1k305nxwF6WMJi/AQi/swjtPQWBwutfZ5hz0d+rdNWNhk\ncBz3bn3aKDziKUw2CDduH/b6V9NEWNJzN8aZQ6TRUTjvcPFm7x6HbJEIb7cStYz3XqyMp9aHS1kr\nCb371ymaXrDXi/D1h0vKHrma/PCggXLK4DvwfzFZcRj/fN+dtOhiES5dWGWyDvnm/b3cfd/umBg7\n4eozBx2HbIi9ows27vMYXvUdn9sqtU+l0TE3t3Ex/nonje4yvUyEL16efdPgcLBr+68UXy9tdLQd\nrdEOr7E6+s5K1a/nqn1S0Ye/+/z+nL2vLV4J8e231Ml5zJMfnvL8a4yw5J9iHZbm865mZ/2s788s\n/sB2TIkr+wmb3hA6zvpiud/vj5+3nt/i5CuyVhfhfF/Z/kG2uAQ37PJef2An2L6f8NAO2617vlgK\nDKfD1BXh4tHbfYuvyNA89AV7dZbQu/9f8+Git08Bmaw0naiWF2/Lktpe40GD8ri7vX0cvA4X/bJY\nXm0QXnB19NXJo5MfbpkbS25PnqjgDvrmnvuhFlcT/m4WLi6r2Y6Do+UjXHyvWt8sfFVji7NBwcfc\n7kI4xxvbNecuwHCE6yPm16lvcgOD71ZiE3G+OtbisYqxyodHMsLtY+XX2M45Slt8F8hxHyh9/+MH\nwy/+/WetI/Y9TawXxCLct3SKHb2oxVGPf0uHi3slnv+cdJjbd9i/QIQFD3lXvpqR9arD+/0+xP/W\nYrOMj3N2hGVXD8Z31dzhstNOUNi3+vBo4E8zbdHAeDg1wrYiWdRc6p8cuuilw9qddxZFW2O3M29+\nBGf6faOvWyu7lzKnMrV4vklZp40Pi7p+lzmfsJxGT1qrx3A2ZvpRVOS8bcLmtqYOdeY0WMMydzLk\nivzH1grSeW2cDLlFJkIvwAlSq3xOhnxXVzPhabyJTDgZ8hMi3K/mT8+dNg1e/GqFRYhwp8VLJwz/\nZenGw83Wzk54ft/WpzSLxHPxDk+NsNdlvf0Z/brLbpzi7nMadq8cBvU6Nrbo7Thh5Qfx5h8Eu9+/\nv543uKzPX7vxb1c8CqY6ibDm8NY9Ht9fY+P5cHhm259fu4vitrfD8btYi8++h23CtlZjfv1U9Hjl\nc75q2r2N66WvToZs8ToAsQiLbAPU84mQt6x0OJ73djytpqfBp5Wx8f8n2NxrvqbhmbD4NeqPtn52\nwsr5CrU+oQMtngl52xZfc2dgnR1hqZ1gG6+SeCnzabDppVHqTMj6106bnAmXryRZ+ZLmfe++pvM1\ni1sLW4nJCPdlo8BX5leIZFB5hy2d1Duch6bAX1kgczVvKMZG8Lur+zYC3+Xtaa7OZRJ+TBs/A20C\n3MdSmqtwmdTygFamNQV+wrKaq20dqq5XaOMfSKvqMdfPEltUz2Kp5XGMvVpHre0NrCH1DLiqVLJY\najxOuHiV+0qWV6OufKJQ/Rp4YUyApVz2w7qL6nlXqnEmnKhkSXVg33x4/tUKr6aBCClo84lCa5+Z\ntnJblggvZ8OJQr+vfeiwIMvxonacKDTfIGy3w6oeuZnwojaeKGS99AQivLRX/VgvPZMIL+rVB+IH\nO66i/e5v8SRCylxF+1bZhtaK2h6nCCk2g+1YNf3z9XW73f7955/xP8c/uQIRUtLGDldiu1R+AxFe\n0aHrY+vHIf/599/bammTufEKREh5n1yw8Oj8atsgvImQg9hlul0nf4uCSkwu4aXALaqbmjnaoetj\n47XQOodWhQ/MTEgxFY7viTofoQghTITXUudUcJpHlX9DVoSUV3PqFXYoQsqoObyJ2joUIVdUVYci\nvJDxZHXcEGxlSqynQxFeVD1DMKiShSDCC5mMuYJDsJXZb66GDkV4Lcd12K74QhDh5RzaYaNTYrZD\nEV7RYocXnxKDHYrwouYd7h6Fjc5+c6kORXhd8wkwvnUUF1kCnbyH8YnJVLZ7ZutmSjz5iZgJKbOr\npqcp9OS3kk7euvjcjvlw9/WCGRMhf21cDdt+kXy2ECE//PpX027aK02ETO2+WiH7iJAFrlZ4JntH\nWabA04iQqW4O97VChBAmQggTIYSJkB9sEJ5PhBAmQn4Y/ognZxIhhIkQwkTIX3++vi71x+IrIUII\nEyGEOSgEYWZCCBMhhIkQwkQIYf9NPwCq8Ofra/jGccLziZBv8kuxOsq3P19fz/mQM5kJ+WYmTDET\nQpgIIczH1iDMTAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh7H8y6fiw6v8Q5AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKEUlEQVR4nO3dXXabyAKFUXRXz8iZ\n/wjiMXEfSBQZJARSFadK7L36wZ04CNt8quLXl3EcByDnf+kVgLMTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJkLDL5ZJehTARQpgIIUyEECZCCBMhSZfLZRzH9FqEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJkBhn6icihDARQpgIIUyEZLih/uq/9ApwOlN+0yGZ249Py+EpjvMo\nuZMfJj31F89hno54Z+7wvF85B7ju+N0d/WZ/ftqpqQipYqWo9dhOOCSe7gumtpUdv+mDp5vc2YZE\nEVLM3Xi2t7dc2kk2TqcoKKngPt44jifpUISUV2o+OXVYZFEtEyHlFWzmek7/gzt02RqEiZAyagxW\nJ7m+VIQQJkIIEyGEiZBGffYR0VsihDARQpgIIUyEtO7jdw5FSAEf30lVrh09h+ulJ1Jpj5HwBC6X\nYRz//Hd7IViFi8JKXWh2qqFVhCc2a7LMIsfL5XKSaz5LEeG5TR2+18wsuXEcr/cBsoV9wmpa3g2b\nJqiT6YPbP9mxmIeP8T3J/bhFnGjmfajbbfql7buw5TvCcq32rOf2x/i+v3f38fuHRsJzWG7E1x3C\nmyHx6eb+9KlNywfDvDkknmFa++HvMTG1R8LZyPbOS/z8t+sPLNy4tTx67Nquje08v7LCSNihsoVP\nQ+LNvtzwM5gXArj7YJiNQ+LdwfaznzTzmV9V3u0kqsYwOIvw7YNAjx5KP7w3/uwaEjcmWmyLbebI\nmZGwmutE8ciXe/1f/5j1FXxm4XBvSJx92vaXe2vdbveBWzpyJkL+qbQD9ugxvq8Ntrunps2MeI+I\nsDN/Nr6aG1aNU+2zEez9zp8OiZfL5bqb+/KrHEOEFVSe3vwoZGqy+e1sqDDMPhoS115idlamDSJk\nrupByOJLvp3r7siv8mxil6Yj/OyzQ0X45gyrU9O1SWkz37pGI+z6RO1lGLpZ109R5PRmSnMRLr93\nXZ+o7TPIDlf5Rkf5TVqJ8IWLEou8Ykc/qqWuV76SHt+s8xFu/+XmpU4iz+a6ff3M3Kr3SHc/yqtk\nhNvzu3qnnEcXJdb74fW4WaSvHjmjTIQv5Hdr75C4/sk1Ojwgv+7y5pGjI3wzv6uNQ+LGZRbscHb5\nJQfq9V0pMBK+ea/abFGlLkocx/HNqymWe7BDhzPSrlb2Q4QPzBS/hvCdBd7e9rBrAZfL8OimsEr7\nnH2FzbpYhDWuISyywOWdLituin342QXvDBrO8biHs4lFWOO9vOjdN08+YdeAWWRq2vVVRKw4NMIa\nE7PITtdrx/Ffnpp+2FVEzHj47yte3vLHnQ+onj55HMeVHc4XV+XOa80/NvM9Rv6KmcYVfwzClkFs\n+9Hd0jucHR8d7XfNPyTCqhOzGpvm+smVuo97+PFy0xKui5p/sU3eBPtp+o7wmJ2i5aZZaLEln/iw\na0jcfjtrqa/akaQVfUfYu9InV54MiZfLnzurVl5qLP2bmhzUfUqEmxTfNH8uvORGeXeiu/OSwDJf\nrIO6G4nwucvf37HZy9HC26npnkd6zj8udQXfyuq9+AI/XqvwkbPjifCJsaEHAu0QGXN2HdQdyq1e\np+1dHRehSchSkUfNt+C1ke3NIfHaXqUjZ4cxEib13t5Q4qDusP8N+umJk76aDERY8M69D9iI66l9\nqXfZS/A3Lu1RfrM99r7Gxsz9hGUPVXdaY+/H64sf1B0e/yj/vqGsvebdDoce9uQz01GHqoefp86G\nnmss6O4FDMPqN2d5UHf2v+0PicdFuBwAK93wWkHFp4f28OUfqvhp/XFs/e3+0JFwOQC+MzVt+dsa\n1/u9vzUuYCi+2FJi+4SlnsXS6dZmFnqwlq+biw3Ty2/H/uPUR3xDa89kKtUo8hWtzU7Dj7d4bWra\n5vvZa3p/E+F94ZP1s/C23QfwIfnV0+kU/bTyV8w8GhJnM9XbTz549ZRPVfkIJ3eP1gxtDH3OalJV\nKxEOpe80L66fs5qsafCH2FCEQ6v5XRWcmi7vgrt7X9xLjwO3Q9iZFh95OI5jgwVOpnV7eUNfPlZw\n/ZOXNxNL7PO0NRL24oWp6XJMW17pPwus1TciChPhi96/++beMn/8k+vHt1PTYZinu1yxv592eWfQ\n5jAifN2Wo6brfzs+e27NbW977wlQYC9a3Cfsy6Nt/foE+2f//OFfXZ9F/3JKb+7BcozmDtd2quWT\nK0PR4/INfnW7OEXxsZrNb1Lk5ErjbzT9au5dgapK/W62d5YW1OYKGwnPZdeQ+PSS3RoX1v76/p4+\n+P31VWqZjRPh6Ww8qDs8SGv5jJKnS9vrPPlNRHhSj6432J7fcmmP/nbdNPTdhrf8k88mwvPafsX8\nlsBeGBIfxXae/CYiPLUi+c0WuOWfpMa6Nm+FESHD8ODI590/37iolcf4fv3+HRzrGrxLu7l3BY53\n9zkGJW7X2vcY34O1MyQaCfmn7BDR+Gn9dqamrh3ln0p3crawod/VyIW1Ijy7GqNBC1v2RlOH2RUW\nIWcXv9dEhDAM0ampCKmokSMfG6WmpiKEfyJTUxHC3MEdihDuOHIWLUIK62s/sAUihDARnppRqwUi\nhDAR8kdH15p9GBHyR/FT1ea6G4mQf+JXUZ6TCE/t7uinw4OJ8Ozujn7vTE1NQfcSIcNwr7q3fx2q\nsXQru878sDyasv/XoTb6PItmiZC5ZUWbfx2q/F4hQu7bNSTK7x0i5KGnaTX4IMMeiZAn9v7KCvYS\nIc/59aBViZCt5FeJ84TsoMAaRAhhIoQwEbKJ+5LqESGEiRDCRMgmX79/p1fhY4kQwkQIYSLkuV/f\n37+/vtJr8bFECGEihDBnYCHMSAhhIoQwEUKYCCHsv/QK0Lpf39/TB04VViJCnpNfVaajPPfr+/s6\nHlKckZDnjIRVGQkhTIQQ5rI1CDMSQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYT9HwAa1XZ6BQiz\nAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "lrEcrEsOeYt5", - "colab_type": "text" - }, - "source": [ - "Now let's picture the compounds in the crystal structure collection" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "dBa2xXeNeYt7", - "colab_type": "code", - "outputId": "1925e077-7b30-4812-c4fb-4e0619a31cce", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "num_to_display = 12\n", - "molecules = []\n", - "for _, data in islice(crystal_dataset.iterrows(), num_to_display):\n", - " molecules.append(Chem.MolFromSmiles(data[\"mol\"]))\n", - "display_images(mols_to_pngs(molecules, basename=\"crystal_dataset\"))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJ40lEQVR4nO3dXXLiSAKFUTExO+r9\n76BrTcwDZUYGjIWUqZs/50Q/VHS73CD0kamUBJfr9boAOf9JPwCYnQghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ\nJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkTYh8vlkn4I1CJCCBNhBy6Xy/V6TT8KahEhhIkQwkTYOnPR4YkQwkQIYSJsmrno\nDEQIYSKEMBG2y1x0EiKEMBFC2H/TD4DXpp2L3u8Xmefpi5Cwh7u07u3N8zYkQgLW4f1U2vV6naTD\nKZ5kd8bb+X4a7rb8xcE2xTMjYXNGuon++AHeDOOhCJvwta9eluEWJI4/neE7HPm5deGW35AvQtly\nBu7QSBg26H7FB5yspw+3SWn6UVQhwlrWO8ztz4PuQuc52OHlS8GHVITpaEWXy7yzzUqHcJ8u0rw8\nIdna4aUIK7peHzsceBnmNO873HJCsrXlVhGeqpnXvW8PFW25/ub9b8gSYV23wZDi1seH+66/aadD\nCzMVTTv5PG3nvl6vH/2PHpZ2GllxFWEt633j9ucJa6xnd+cNdijCWmZeGj3ZpxW11qEIKayRA633\nmupQhPRtgHmpCKswF62q6pUAxX/tr0TIvJ6ri3QowkryC98RXRwQrrXQoQjL625H7MvDtTKlbhp+\n/2+qajXCy+XvP3Qlvty/j9XRJ7dljds/Hb6ikxugw5PnMk1G2DNz0eWsDotv6tQLJ8KSehwBKqnU\n4ZDvce6iOOr5Ppohd5Qd2rlNoXFNRrg+FLy9hO2d/H5zH42d767HTXH+A24ywuXVTekN2H7zaI87\nX19G2rytRrh873BHkw9j6V7bw7v9ZIP3jGYV3A6jbs9uFmb+/Lvhh+7BlDjJcftkruvKTz+z/sn1\nf+10sb64Itth97dZtK/5t5bVAPjnz+Wff354tA83sa+Hzb3T2i2fJrRl6436/v2pHdvh/EWvyIvV\n+ki4dQC8DXrlPL8Yz4OetD6yfTx83s4Pk/yaDzOggzfpNwPgnz+XZVke/+ubUXGz5w/zOrKhDIZ3\nP22Kjz4xrd72jLxSDS/M/OZFnOv8Ci3MLL5XqKj1ptjxUYXPv6Sg1Gs00J5R9JTGSK9xg45PLpYK\n2zP1AnU8Et79nZS2XeBiPPyuhflFkfeC47qP8N2SaXt0uBR9j9uxPXfPgeuZfYd4ach18HaUffob\nf9uvZ5WCL0pnI+Ft5rk8r4iWc8KLYTws6M3G/PRqp5RGI7zHtqx662vm+Ux4S52N8NzhloO9dual\nDUV4uVz+/To1/9mJwd6osbiHDn+91OnNz5wvvDfs2yhVh8RKhRT/eKJOVb035td5aZubPTYSNrI6\n/GDmPAZQ5EqA8yWno59umudVmd6PEinufnFp4+Gtha7TKTfglJ3enHBR4syDbXv3aTeh9bsofnXg\nhsF3yq5Zzxwev+o+wqVchw/La+PdMkObGjpFccSRYean2JxSL862fCmwk8X37MtlWX9hy8NJ3ud7\neQte7B9/7jRokJFwi+83GL4u4Xn0Mx5S2wjHhL+6fbXM/ZOf3ns+GnR8SFWjRbiOZT30fTSSVarO\niMpLZ0d4wo5YJJ+HDndnKbznt8WXb5QzfxHeaCPhUu6MRakOp3L/XsmXpb35W8+fETvPxp5oYWaH\n52vzPx3ZZhgGL5fLfaHr5dO9Pn1++kNgE2ykd06N8LS5WcHLaI53eDPYvPTg5dHrz6Nc/c7HD8qb\n5DK3YUfCdjocbBK77/LoX1+OdW/3FGcocBk4wqXoJGfjPaN3LweKYQbDfc/iTYfrBZshttBnTt0t\nzlkaPf+e0WXz15X03mHvj79N542EvRe4PI2HO46LXH/Ds5GnozWsT1TsnZXpkG/OO084zHm265cj\nv2GMTUERp56sr7rz9bWY1mOHBvBKzr5ipubO19k+3WOH1BC4bM3Od2dTsKSuHbXz3R3cFO8vj6YL\nsQu4y3bY9eHK9k2x7/LoIrrewo0Lf+6ol/bmp02xvjZ6+fDyaNu1F+HzhDq8e/np0bsvEKMj+ZP1\nBzsc5tjythEqXR59kDfKqvIRLsfuSxhs59h9qFy7Q+ppIsLltw43Xh7dr91Dzfov3f78fIhI41qJ\ncClxeTQ3OuxLQxEuJS6P7l2po6/77PT4L3NAWFtbES5TtldpL/+6P92iV+uai3BCVd91iix6GQyr\nEmFbauzpxxe9nM6tSoRppyyh+EyAlomwJTWDLPKZAFZdaxDhXI4OZTqsYMCPwe/JiTt0scmka3NK\nE2EzOhphdFiUCKN6qe6ZDsux3jUFC5stMxJCmAghTITjMxdtnPOEOfeFjd4LeXgiHS3ztkGEIes9\nteu9dpgnkmM6OrjMXPThUxl5y0hIBcbDTxgJIcxIGLK+4qTauHHGXPSUJzI2EeYMs8s+PJFhntdZ\nTEchTITDco6+FyKEMBEOy5dA9kKEI9NhF0Q4OB22T4Tj02HjRDgFHbZMhLPQYbNEOBEdtkmEc9Fh\ng0Q4nYMdXr4UfEiTc2XTpHxfWjtsx3kd/L6097+B7WzEqR38vrRFhyXYgrP79PvSbj+//mEdHuSm\nXn7P7/0g6ftDD7LtpvY+no8GSR3uZiTkm31Hhovx8AAR8tfzwd6ndLiPk/X8db1ej/fjipwdRDiv\nSqOWDj8lQsrT4UdESBU63E6E1KLDjUQ4qXrLmML7lAipxemKjURIScLbQYQzkkpTRAhhIqQKg+12\nIqQY4e0jwun8dDc9KSKcWqXz6YbEj4hwOg/hlepQeLuJcEaVOmQfEU5Kh+0Q4bzMSxshwqkJrwUi\nnN26Q5PSCBHyzb4ODYNHiJCSB4fmpTuIkGUp1KGp7D7et/i/h3Fsy7C2+8OCuRMh32ycT376NTK8\nIUIe/dShQa8SEfLCvUPhnUCEvHb8qynYSIQQ5hQFhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIex/J8Zg93f+xZYAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKsElEQVR4nO3dWXajyBZG4eCumpFz\n/iNIj4n7gE1iOtNExH+a/a16KGdl2RjYOoCQNIzjWADo/E+9AEB2RAiIESEgRoSAGBECYkQIiBEh\nIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAY\nEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQI\niBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgNh/6gXo7c/n5/LL\nvx8fqiUBJukiLIQHYzgcBcQyTsL5iJSRCAsyRkh7MIXDUUCMCAExIgTEhnEc1csApMYkBMRSR7i6\newaQSB0hYEHqCP9+fDAMIZc6QsACIgTEskfIESnkskcIyBEhIEaEHJFCjAgBMSIExIiwFI5IIUWE\ngBgvZfoyDKwKaDAJATEi/DKO4zAM6qVARkQIiBEhIEaEgBgR/sNpISSIEBAjQkCMCAExIvyB00L0\nR4Q76BA9ZfxotBPzHaTLDrmnFE1x1/I/R/dwEySaIsIvF19FQZCojghLefo6pjlI1iHeIMIKryRk\nPOKN7BFWfy0vQeKu1BGeFDi1xIREB3kjvDIDK1ZEkDiSNMIHR6HVg8y55rGVMUILV2J4XynM0u0K\ndq7E0CEmuW5ba7HfL78hJ36NxD6AzxVhawTZwvaG3mDrM1GEnQ//tkEG23U6WK23+V+Crc8spyU2\nT8BsLpURv66cMIMxxSRkX3fnyiZbDcbitsb4EVKgO3c3mfcag0dIgb68PNlzetIYeR91UaCLhezj\nfFU86MrLYAw7Cdm5ffl1ey2fpbi4Zb0cpsaMkAJ9GYZ7Xd2NyniNAXdWdwW6W+C6hqE8/u0fR2Xq\npDHaJEy+Q7vzpsDy4kqMqVtwQkVIgY5M+3+tzVXlMFW188SJkAIdeTkAj7wZjMJ3fA7+Dtwu3ks7\n23vvNypwaRzHaa1O2v6w1+JMwmmlMwyN61DgzPhF0VnwSZhtyBjXs8Cl8dvJYBTuKnEmIYxTFbj0\n4Bn/DoJPQi8yTGwz+/zXYFQvxT+hIsywKyOeUBHuokwYFz9CL3iwSIsIUc0wlOXDCA8pF0WLcHee\nMGRwhWo/iRYhtMaRAXgbERrCxM4pYIQckWoxDO8KGCHgCxEqRR3ODMNbiFBm9zUfHDZrSdZ/zAjt\nnxaGfNXV8hcaR0M3ixoXM0LjTgqMFKeZRzzriLC3JAXiulwRyo9IKZDxuBU2QnlvWxSIXWEjtCZh\ngTxRcVG6CCUTMmGBR+yX2X8PiRyhkSNSCsS5yBFaQIH4FRE2RIG4ImOEfQ5TKRAXBY9QdVpIgZPd\nyzD2r810FjzCI00zoEDckjTCdsjMHfm9/kRY03mB9Llg5XjUwkaJH2G3RzUKPGClty0jGyV+hJPW\nHVKgL9PHMxnZKPE/lWle18sO6659CvTl1y3SeZMFj3C5NpertWKQFHjO2oe3/rq9SvdPTTO0dqq7\nuO1VEzKP3fUgWTk2HzHDTsLrK1QyIdGfzQJL4Aif2Q3ywbahQGvMFliiRlhlnc7f4e54pEBTfj3N\nk2+vgBFWX6e3jlflWxRLBi/D7CyGfAnq6tnANkgKPMJlmBOhJmHn1bqdkEY2KoqfAkukO2a0q9XO\nFjWr5y3RjgosYSbhn89P+Wq19qy0HdubloQvJWv90x+IEOGfz8+/Hx/qpcC+3ZuWGsVw/m3NPkS6\nj5ACLTva71sMRl+HoEuOI/zz+VlKoUCbrsy67TOxjzvxW2Dx+xSF2QFofHv38XglPDtMdXcSuOJy\nEpotEOXdw9Czw1R3J4Er/iKkQMsa3TAY+/ZdZxFSoGXtbhi8e1TpqMDi65zQS4G+9oBaOvzWFwej\nu/XvbHFd6LY72tl2w9D1E+qParS2Wi5ydjiKsrkBRbvPTTl0XoTdw1R3A3DmdbmNa7RD7FbX516w\ng+Xpnd/BYugfjN5gErpxfvdJ6V6jkQKLmQ+ifMxohNPdMDMX12OaujJaW9+Z+XN5rBQYgNEIC+Et\n3D24/fmUd+UYJSeBv3L9Eha7EbpWcZ94/H0Wg3H68v2yMACbsBvhfESadiTWOqqcvsF80vT4+1Fg\nI3YjTNvepMHdJ/N3Xv/J3e+AuuxGmFnT05tVjaQlF+c9Zqx5fN282wWGcfz65GrPl/cjYBIaInnS\nOcwk9HuB1OVCe3Frn7CwA60OUN1dibGwDh/gcLSh60ekTvceVEGEeqYKnM4S0RPnhEre7zxGFUQo\nY2oALk3D0OSixZTxcHR1Ub7p0dfRaaHZAl1z+nIKJuHaciM2ysR+gZwZ9pQ0wpMjruUf7u6IL/Ox\nXyA6SxrhRbux/Czzxuto7V+GWS6a4cWMJm+Ejy8//Pxf1q9q//k3x7I4UbGc3y4uz/SRN8K6jj46\n+/wvACXn1dFZ68sP47eGPwM/ebxAmjpCwIKMEa4uP3QYVB4fngtPVPSSLkL2KliTK0Iu98GgXBEC\nBiWKkDGYhLsz8EQRAjZliVA+Bt09PE+4QNpBlgiPsIdBLkWER2NQPh6BkiRCwLL4ETIGX/N3yO7r\nDDx+hIBxwSMchsHOGPT18IxugkeI9yI9dtj8RSJHePRuLpwNwpTIER6z+HCIN7YjztEADxvh8Rjk\nzc7ysllm2AgBL2JGaHYM2nwkDsD1io0ZIepyvYvbFzBCs2MQ2BUwQmCyO8ANTvVoETIG4U6cCIdh\nsPYIF4aLh7A7I87W7+L7bfB332d+u8eY2oemfcXO8pxz+ikaCw6W3FmEq4e63Z1ju5d73oeUHD1Y\nfPO1tF9MR7j9ELKL+4SvaWNT4BVo7fPArUR44eM4760zOnyDVdeTMsLWH0xNh89EWmnWht6uJxGu\nfqv5yzmq+b9Of7L8svNnwdLhLf4vw7hU7SmKKbDpH1PPFBh8ctbgIpXvAei6QGv73kXVIjzadhbW\ni82d3pRsxwsWdsvZw3NCO7/ARaOlk4PpEcHOfm9nSXJ6GOHqnHD176sNamX/N7Acq5Mu+TmYfAFQ\n6l4dVe/hF+g63N3dpy/nQ+XOMSQZgEfb3M6vXi3C833b0CF49w5/nTarwXj+lysuVYYCXag8CWem\nt2+vDu8e7HU7TA1coMdfK+zG+F3LDqtU1GIw5jwJNH6ilDjC0mDjfHdTd63WqjHwAHQt/Vap1eHu\ndeHa3swxCjSLDfO6wy75/fyBtwcjBVrGtimlPO2we36bn3+pRgo0js3ziDq/laPD1JyXYdwhwpuM\n5be0GowMQC/YTncYv9T9jQHoCxEiuPMXtVoQ5y0PAaesvMeMLW5uwMMlBl4/c4YIN47evQNog8NR\npGDodTwbRAiIESGyMDsMiRAQ48IMguv8VrcPEOHG6qjF5nZDIES4h/DQEeeEgBgRAmJECIgRISBG\nhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBEC\nYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgR\nISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmL/B33ksURR4nZLAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKB0lEQVR4nO3d65HjxhmGUcDlJKw0\nlIYVkzcnxTFpKA36B7UQlwQ5IHF5v26cUyrV3moGBPGgG5cBx8vlMgA5/0ovAJydCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkRIzDiO6UUo4d/pBeC5aRu9XKLLwb6MhFWN43C5/P1fpyPG5XIxGA4ihDgRNqij0cNg\nOIiwSf1OUM9pvDjoL+v1iZnrQWMXxvHU26Gzo4XdbZd31U3j4Yk33z6YjrbjcRbay7nTnY4MWzna\nFGFTZpProsNtjePY0BRXhK3ptMMNB8Nrfq0UOIiwH871D8PQ5jme9paYYXh6anTjTfDw++bWLP91\nH9Ti9mwkbNOT+efaSd04/vNlm7pvrrkp6C2XKJp1bWP9ZncbWHojvu5E3mqp3QFwIsKWzW2yS7fj\nz6aaxa5MtngE+KiH13Bysxvisz8chuHvP11S6fRvjr07Z2FafRQ4GAk7MB0H3m6RsweH722yt4eC\nxbb1DqagtzrZlzDcjAxTfu2+uS9GuW4GwImRsB+3R4OdbaZXnQ2AE5coutLNj+c9vpCmL0K8JsIO\n9bel9jcFvWU6SlG3g2HHBQ4ipKaT5HclQqr42d0vZ1/6nohe9f8KT6XRTfb1fTiNvqjlnJgh73qX\n+PO/7eSU7zMipAF9dyhC2tBxhyLsR/fHTr12KEK2dNvI9dePf7JGlx2KkI0tbOTjlPrrUIRs7PFp\nGNeHZmwYTmcduljficoHhLc/GzxsMSkdPnoQRllGQrb3+tFQr68KvvNdOhkPRcgujhmi+uhQhOzl\n2uHdZ9jM/nrdd2m+QxH2oJujo8+03qEI6UHTHRaKcPaq7ranttnV3YMRD37j2u2w1iWKx8dbXn9b\n6SHR1NXodYtaET57svvs/rW1Vb2Xmptd6sO8W+ywVoRL3F35vftBbGhOuQiXfwTQz+5+edztz786\nRZOlDoFSQ9/DYjQ2DA5FIrx7/+46XDL/vFvvHT8m6Da8Uz2IpWP5CKcCv72qu/z0zN3T4Fs3G96t\nagdCRUbFVuQjXG7u9Ey3B4Tfhndn9mNhjjVOn/jEW8IRfrzLfHZA2HSQK1/INP4fvxIevmkmyFLT\ngeWSEW41abld7+nR4G2b70GqTU35VkvT0SUa2vj2219cLhdHZQ2J3bZmKxn23GUsv9Kz3t3Aaxx+\nV6F7R0/lgC31yA4raDf+TISGwWOcrcNGZSJUYDfaHX/qMB3t3K6D4eztEJp8V29nR5tw8Ojx7GdT\nPnZ3V2CFwbDCMnxMhKewSYfjOMzeouTK5Eo9R2jjuPXZani4X3f+q1jVa/QcYU2tbKyvP7jzkQ4/\n5sQM8z54RG/qKS+tx39EhK8/qYeetPu0paCDRkLvSx177xN1+K6DInz2ST1nU2TitPea1+FbYseE\nW30qCB84YJ94WIdF9mtrHBeh+xgr22OfaDxc6NCRsPEdVleO2Sfu3WEfkR9xnfDxqU2b30g1qfyT\n9dUmTkd2uOELf3wWQbUV+67YxfptO+z4GYebm90n7vwdN+jwxVvc+n0CyTtmNrqhse7Qx+SzTpY/\ngKfpDvPL/VmHS4a+Un22u4lsaMlKWPPkq0ZXcmP3ji6cdt7mVyrFk3sxXn1wQPH4zjY6HpZY4iWD\n4cKWnv2z+HsTX4A6plXx2aD3ba7Nreoqi7tyB1l/dtrclrGrd9+Ld3Nta20XWtbHJ+cNK4a+rf79\nJtraJg6wcIWsOend0DqvtaBvrbg1OR35DjkovfPtyt/qglMrHZZbyuUn0Da57rTry787P1RtVafM\nroqdPlCkidXe0tnRba/I79fG7HI2euLuALvuDZtY7eUinF1rO39sw2Zf3MzzA/s/ibx6h0UX7u4s\n9gELueZ9Wj5EF98aDpBaA5XXfOElO3xU+fi8+evrIo+jetl1foDgyy+75osuVtCSFN/KVYe3sq+9\n5pqvuEwVvD6Dt/6exppbwwHiLzy+AI/KnZgp4u6EjTMum6gQQMHzNLWWpqCt8jMYDpVecp0lGTz8\nd4mtLkvePYvh8U/YSfH1LML3rHk7dVhEqWFwEOHBztxhtU2/DhEe7TzVFVE/fhHmyfLkRBhw5klp\nXMGBUYQZZ+uw4KZfhwhjztZhRBPxu2MmaZO7N3788dsvv/3zr3UL1bOaTYqwlmdZ3o2Q//vvf6Zf\nP6tuilOWxYkw7LG62UnpZ/vva34//vjtx59/Xf+/ZlE/9vgIr4LDUZAI8+463HAbvQ6GU3vBDm8d\ndgt1K7U7MfONu3Fpp3MnO20rP/786zoGTr/d47t8IHIKqmyTIqyl7IayxrMJtlPBV6ajhWxe4N10\nNGh2ClrwR/sirILvHXNe4Qyb47MTvwesz8qr13SU45iXzhJhCV9fdffT29LhIxHmfX2Nv/9+igKv\nju+w8lx0ECERe3dYvLo7Igw72zA4MS+diJCY/TpsaBgcRJh12mFwcsB4WH9qKsK3FX9Hm7Nrh01M\nbqvvJCrY6fHbhsFbG17Hf/y80eKDYemFK2Wnj5JlsrLD158UUrnDuktW2bafGczk3VTe2jOW7dAx\n4ScuP43jOI7j11cDBx5NWHgoOP50ubHVFz9e0X1Dc247dKS30rMha5MJSMHxsNwCdWAKUo0fm1LZ\n41B8/Bovld4aEe7oWqMUP7PrZ0KW6lCEnFSdDkV4KDPVUop0KMLjuDpfUIUOXaI41NeX6xm1XH6/\njOl3xEgYYEisJjseGglhyM5IPfLwOM7KMMt0FMJMRyFMhBAmQnjlgAsYTszAL6bqDjtlKkL4x+0F\nw8MuHpqOwrzDRkIRQpgIYd5h95Q6JoR/3N7Pfdh01B0zEGY6CmEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwv4PpNASu+t2ZrcAAAAASUVORK5C\nYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAHiklEQVR4nO3d21bb2BKGUanHfv9X\ndl9ox+3gA/JB/teqmnNwkUAIQkufShYY1tPptAA5/6Q3ALoTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ\nJkIIEyGEiRDCRPifdb37mgdv2v68vcALRPiXF0Ja1+V0+v+LDnmBCP/yQkin0zGbQhsi/JhtJP54\nDfzqf+kNGM42DK/n2+Oibr4L7CHCG252+GDKPZiB2x/0yQMifNfNGbi9xnhkD48Jb3Ork68xCe/a\n3+HlP7scfcYge6yn3keKK0biWl+OKpAR9I1QgQyiaYTHFeh2Ds/qGOGhM3C7nSNF9mt3d/QLV6Hn\nLxIubpCyQ68Iv/k4UIrs1OhyNHIn5s9TnFyecleXCLP3Qk+n07quUuSmFl+sH+erEVuHHfY5+9WP\ncJwCz6TIpeIRruu4n+DI27Ystx/HjrzB86p8d3Two3xk93bdnoe19vmzykaowCPs2aU77z9ZnbOa\nESrwHW/uvZ3v63L3rODBOkWBw25kdsOG3S2HqvZ1wp6ryNRKRajAN8V34PZdDcENiKgTYfwAmp0d\nmFLkxowD6Dj3RtNBO3wbhq1Ws0KE3dbsCA/24b3XP3vdaI3uqRDhdEY7a7y2Pc++y/6J2m0YVoiw\n25pNygLdU+fGDK8Z8/zV6jZpkQhbrdkHjVlgN0UipJ4+J9Y6EfZZs08xBgdRJ8JZOPT3a3JiLRVh\nkzX7COeCcZSKkJ0mKrDDibVahB3WjGKqRcivJhqDm/In1oIRll+zd0xXYAcFI6Se2ifWmhEOu2Z+\neATXakbItdkLHPbE+r6yERZeM4opGyGXZh+Dm6on1soRVl2zZ9UosLDKEVJPyRNr8QiHWrPIRDIG\nx1c8wuZKFjjUifUj6kdYb80opsIPeuLS5Rnn3q83m308FvvRXi0iLLZml66H/PnTNP9n0SLCMh4k\nd+3eqafGKanGZ7HpEmF8zZ796H59Xx9dIhzcUyNuvzLDsMZncU+jCMdZs4OSq2qQVTtOowgjLns7\n//mbh1SZYXht9u0/6xXhcUfer7/tpMwR82Ud9luvCD9iulsm8w7DB5s3+JY/pV2E+4+8D/5yzEpH\nzAiK7c92EV778m+ijZhxGA67YR/XMcIf303aZKXn0uRCdNMxwqVleDMOw2sTbep+9Z9FwXRKlvaA\nCA83ziF171ldQz3bq9WF6EaEzKFqgYsIuxl8GBYu7QERMoqGF6IbEbYz5jBsW+AiQogT4bHGPIuP\nNgw7j8FFhMQ1L3ARYVujDcPOREiSMbiIsLP4MOyT2WMiPJCD7GWtdp0IWwsOQxeiZyIkoFtmj4mw\nu/gjwx8a9ilC7jooBheiP4jwKBMdT18eerPslq8RIaOY6LT1WSJkWQb4Rpm2BS4ihLimP22Naw9u\nkx79oTuPwUWEnN0rYedl6ssVNS9wEeFBpjuwHmzwzk9kT6tz7ZOvESGfOWW8/Os9lCnC7r45tPV2\nk7ujrU132VySCD9pXdeJnpauwEG4HP2Mrb3tmJ7i4J5iI5sQ4bsu85uFAociwtfNmN+iwPFYj1f8\nmt+wfa7rMt5GdSfC5zxV12gpKnBMItznz03PF3bXICkqcFgi/M1W35N76Tq8bIoKHJkI7/tQfvvf\nehAFDk6EtxyQ32v/8n0KHJ8I/3ZwflfvdezuV+AURPjHF/N7+8Pu/Z+t7RREGMvv7U2giPbfMfP8\nvDjiEd32n0mxJ5PwGV+pRIrdtJ+EO32xDFOxGxH+JlSDFPsQ4X0DFHBOUYeFtYnw8gnvvx7RA+R3\naZgN4RA9frzFNkrOLw9+AsW6/vePh3S97efXPHgTI2sQ4fXF3L0Ox87vTFrFNIhwv+Hz2zye5UxH\nhBDW5sZMLdswvJ7cJuSMRDirmx3++Ksmp9DgcvT6IZSvuzGSHpPwR4dVCnSHpoYeES51wvtBhwV4\nFgWENXhMCGMTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh7F9gpr82mgxUagAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJvElEQVR4nO3dWZajRgKGUdGnd1Te\n/wo610Q/cEomQQNDwB8B9z74VJXTWXLCpwiCQV3f9w8g5z/pFwB3J0IIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBD23/QLgKmu64Zf9H2ffSXn\nECF16bru2d6zxqdLZilCKjWucfyHkz+5QJYipCIvwxv7kGW7NVqYoUZfa3zq+77d/AYipBbLwxv/\nJwe9mDOJkCp03WO8HtP64LaKCKnQjQp8iHCza0yEKtF1j+fIN/71cn3ft7tFREjYtuoGTbf3JELC\nxgXuCbJdItzibisHp7nnD1WEECZCCBMhhIkQwkTI2cbnFPafX3iepWj3dIUIV7M0ul+bsRxFhAT0\nvQ7/JUIIu2CEXdc1emxwK5PBcM8Wa/3o4Dp31k/usHbkVr9xh8OvN2+x59pMoZd2qub31A9PNzio\nQ3kfZ0OHTec3aHh/WvLTL76FFHi0VR1e44Lv9qajXx/sM+7E1LQ5CzfUsBdcY6s2s3eueqjWfAAs\n0uEFZj4XcKX8Bg1EuHnXn4S3uUNLPpW4Xn6DBvanPTv9ng7PX/Lhnlo6T7jh7N9wPeE4pyVnEYev\n6f96923XvhhWKXuJac1qX5jZP+ZM5pAfppSrDjuHDo2Hh7rG4udXtUdYyqSZ8W83P0e97/urHqVU\nYucZ/FbcJcLH73te5r/d+j0fj+suGHCOZo4Ji8z9hmO88eFckflk37st4Ch3+ME2E2FBBx3IGQkP\ncvkf7B0jHFhWqdzk8u4LqzpCnXAHVUfYovnZrcsf0rCTCMtTHauIsLz5gl7XKXO1O5whHLRxnrD1\ng8OWXzuHu+lIeHTVVzu79fI6TuN7IfVGaPSr2jBZPPZ/8i6FtzEdbcj4csfxP5tn0DtMpRFObhRq\neki8iPHnWR+v9XnQKnVF6B72ht1nNbO0Wo4J//n5md9Hu/AeXAKGafd405RbjLrbFs8PNf/8/Dwe\nj//9+fPha/YPieO7lg4dYFsdD0pdprnjtq7NN3a2LjkdXZLfYM9t7B6R9t34nWPnu8jzDssSz/K5\niUyEy/N7Wtvhu017t6lOxrJb4r0/Dk6ajg7VPVaGN/d1s31t7+j/3yYXkwqOhL++6+uNZeibOG8k\n3Jnf4Otjmpb/OUebbKwa2qtzZ2hsJHwad/jyJxvZ5E0Og4Mj75+N7/qVn/pqbCR8Gj+mafJ43+cX\nFPzrlmj4aPM5BT3sfyGy07/bGWp7XGVdJ+tXeTnbCbbnicD1+LozVNVhwxEOijy5cIuuezwe3e+/\nt6pNe0Of50GT/aSejVXFi2jJx2OnGtYethhPR0u/8hN29CWflvfu39bQYfMj4XneXwsy3wniSxEr\nHFng0T7/nJe8J8ZmUiMiXGy+kV7NSP9+bY2rcJe0/zxkfGOJcL3fM9IP263UUUf8rboVe35QwUNE\nEa6x/urknZv2hOvOu7/vI93HN5QKfT1XvFaqQxG+N1+DOfEtdr5X1bOaV6FSP5bID1mEbxS9nHL5\npv18PFN8FeHoHa7Fd43zOxThSb7283kZ/fmvDpqatljLcU7uUITnedfPwvwm30o2R3BMeAv97EOC\nty2yF1x6FXOWCN/oD/xgrndT0w0nuLYlNL+rYO13iDv4GSWOCStx8GaY3/yxYcOv7fDlX5S9Ar4e\nzhPe1P5df8mS6cL57aPotWsmuguJMGC8dxbZTT8v+az6W/pFT4ehJBFex5Iln2Xfp0CHLR5npojw\nUkqdzd/c4d9r2vNXRTdEhFcw2dcLTXHXdTi6rnZ6qV2plzT6u8q3PZ5EnPzGUctj8KnQql2xf/NB\naX3f90c++OMC814RXkq107/jOjy08HOI8GzVdnK0UrXMv0nf+AcHibB5pzzEZfrrbTv8nlq6v17O\nb3dOerPvjBZmWKTUycMNix/vrvK5zC2XImSR+WLpjg9BW1TL1ysNXva8/1EG7qKgGTv31eOuYt/Q\nYXbwFOF1HP0u3hf7KN7nN/w1lBW8in3b+cnU0o4ITxU8I1xE8Q4fhU7oz7/J8p/zhitsyxJhRltL\nCOOXOfy67As/7ir2zz/nSu7eEmFMkRGgoZLP8XJqOvma+NA3IcJTTXaRpqem1Sq15HMaEZ6tyGoe\nn5Va8jmHbZ8x3y02Py3GcPpB5fkNbLmkSTnNnWWmCNeOJk0ud9x8aWW71y7zMBLW4OWzYZZsl2pX\nGlhFhFVY1aH2LkaEtViyhNDEMgNribAuLwdAQ9+1ibA6k7NbD+1dnQhrZNp5K05RVEqB9yFCCBMh\nhImwOi5AuxsRQpgIIUyEFTIXvRcRQpgIIUyEdfFR1TckQggTIYSJsC7mojckQgjz3NErej73ycDa\nAhFezniB1WJrC0xHIUyEVZh/KDz3IcKkrvs3Oe3dlmPCgJfrJvMPhV/3HZ//5fiDPB0QtkCEp+q6\nbvnjY5Y2Of867TXFdPQkw4dM9P2nAicfRr3ks6l/fn4XaFLbIE9SONyyR2u/Pa2wYo7qhESbjIQH\nGo1+X9qYfyj8+Lc/P9/Ht+mQSDuMhG34+en+/LGlrkmEEGY6CmEihDDnCZv0XKpxoHgBImyPRZqL\nMR1t0s9Pt+S8BU2wOtowQ+I1GAkb8HXQMyo2zUhYvdnFaC9XZYyK7RJh3X4X+Lm0IU4pNkeEzVg4\n1vl4w+bYYO1YfJOEDttiYaYRa25T6vu+c2NhO0TYiJUjmw4bIsLL0mErRHhljgyb4NrR+nhW2s2I\nsDIeYn8/pqMQJkIIEyGEOSasjIfY348I66O9mzEdhTARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEEPZ/v/UUGH19b2gAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJ9UlEQVR4nO3da7KbuAKFUdN1Z5TM\nfwTtMXF/kND4eWyQ2BKsVamuU6k+Drb5LPH0MI7jBcj5J70AcHYihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCPtfegGgsmH488M4RpfjJRFyaMPwX3vLn1ti\nOgphIoQwEUKYbUIObRzb3zFjJOQExvGmxsaIEMJECGEi5DRanZGKEMJECGEi5OCGFz+3Q4SbDE1u\nY9AXEUKYCNcbhmFs9SQMZuM4Nj5hESGEiZCzaHY8dAI3pzBvO0wpNrUdYatmJRuEvXj6TjWVopGQ\nI3v1WdnUqHjMCOfZf/z1JeWTwBpJ8SBzqrtt7vlJVZo0movWUPCeTCveoGCKXY6Ej7u5Xr120zEi\nwfTirr3pff7q3VvdUnBU7CnCryaZc3s67Mh0sdH8Xk0/fH57iu1vdCTFniK8fPO6LNsr26Gkd7ac\no07nYD99/Qu+L0bC51a8xPU6pJ43V95O7+TlYU4U37OyUTcRzr5qSYc9+vEK+OWOt0vP+U36OG1t\nSzzL83fbP5f3hJ5u8n34bvee36SPCGdFtry3dGgsbcSRPkw7i3Cdu/DWdTgMw5He+EbcHRs8pw62\nCYsMPncbhJ9sH745AeBylIlQXPPfmLSHDiKcbazxfYdfnQCwfWG4c+bXsqcIt3va4fzzloeC1Vrf\nJiy+oj9uEI7juO6fGMfxtJsxFNR6hDXUPr0bvtJ0hHfbbA0G0+qN1Ru1fK28brOmI+yCDr/itXp0\n3giLnu9b5GFOwWfWo3YjbH8uCkU0GmHtc1NUHWQwvNPWccLHS1TmCyCSi0Vpc4dnPlFmlo/w9/U6\n/XD9/fsutu4OiD/eJcVKtvR4nYSX6JKKcA7vcrn8++vXn59e3JqueIdVw7ZKfWtdhx19NP9op3Fm\nru6/5L5RNpt6ES5Hv/m/kwOtMw05xsn0+42E6/KbdDcvXepzqZt2sPvK7hfhNBiuTnF1h6929tRj\n718lf1/VXj+OX+ljJPzW3SW8u/27i39UhyXd3oD0UAVeWtg7+rn3g+En4e05p533/rHFivv/dqen\nCC/POvxw88CdKdr39Db4x85v0uX0+sMBbedJqYMTGy0HvVO9mJ2NhJM389L41iBbnPPYfZcRXm47\nbCG8E646lNJrhJfFjSqMeEdywh3LjV5F8aHVt4ep4GQrTk2tvKV76TtCjupUHYqwgH5PqaMFHW8T\nxh352OPTY3Y1D5yfec+WCD82n7n49y/e3L37gM5cSWUifGtZ1+szF7u+yOOlHT9ZTh64CN/6adW4\n++bDnjr88UpH35a0FxF+af7e9MvlcnuIsqcOn27yJQ3HuzbicyK89Wp8uP37M64v80H0tuo9gk4+\nufdRYpdgH4NhqZGwxP7SPl6xmhwn/Mk4frWSbfw67s5ML855nm8dIiyv6Q7nsWsY/vzZPgqt7dA3\nkE9sE1bRwU6aslcNffx8W7jkpTUiXCh6i8JSd6Y6AOG9J8JbscsRn6ymdcfS0vs27z50hPe5tqdM\n/WvrzlT1Dw8ebxjfgZGwro3jQwfblg/6WtoWiLC65c7SH1fQnUaSasOgAlcQ4U4+mZQ+/d8KDoan\nPjesYSKs7jGhyKT0ZlasxpaIcD9bppolx8M6k8ZG5qI97hkS4X42rhZbOmykkEoCB3iKEmFdZVeF\njR3OD1JqeYJ+nNJ3tGNZhIf1anwILU4BRz3AI8LObPxqqkrrZeVvIG/sAE9pIuzPCU8Qe/O83j/9\nLgbD1peva42MD89+t/Cx+nrP9P0BnstnT7/xDo2EPblbmVavWGUvY9pnFW/kAE8NIuzS9lWqbIc7\nCB7gqc2V9bUUf8uLP2D7N6aocYCn1KMVJMJT295hs8NLR0TYn9Ljw6YlKbUY+2hzMBRhFU+/RbjI\nA7ZgGP7co2n6fshKq3Wlp9xghyKsq8G3fIv5/mzLr2ft7jm2tsAirOLuQt6Cb3l2SHx1E9bWVutX\n2lxIEdZSsMMdwlsu3fRzk6trGU3N7S8irKreeFjDxqVrf8Bvrb2ZCPfTeIePhyumLcBvHqHpJ9gs\nEdZ1t16uWE3vztXe87P8y6/hmH6l0Q6X+6tbGw9FWN32DndT5Bya7U/w1TUiRyXCPWzpcOeP7SL/\n2uoOH78ipuXPrFKaG5oP7G4i9O28qMF51BufL+0nd4jZ+NyDU/pPNLdAx7Zu3er0gvGyXwGwJZ6W\nNwgvLmXKWn3BeBc23gHg7tdbvhZpIxHuauMdYrpzd6S0yKN9+zjtpyvCvZ3tDjFbnlTZ0a/ZGhtd\nrMPrdDMvYsvGYeO7ZCYOUcQsL0TgjY4OtK4jQjqwusP2h8GLCCNaXiGatbrDxxMAWmPHDL06zAEe\nEdKNox7gEWFET6tIUw55gMfGyd76uuVumw52gMdISH+O0d7M3tFdGQZ5JEIIE+GuDIM8EiGEiRDC\nRAhhIoQwEVb3eId5WHKwvry5tHlfaLHDg48PTf9EWMDfs6jmWxLd/w9lviB++RCO+h+ICNc72BmM\npIhwjSm/r9pbeYd5I94J2DGzxrrbwyx/wx4aZiKsbtne/POPA+P1OtwPg9PvDA9/T+dEGDOOl+v1\neYjX6/Dr13izG2ZKdvqyMgUei4t6m2fcOzojYdOuVwUenwjb9WdSytGZjkKYkRDCHKxvyLyz1Cz0\nVETYCluAp2U62pDrdXh15JADs2Mm5tXQZ0g8GyNhwjAojZmRcHe3Z8DMNd7tlVHpeYhwd4sI35em\nw5MQYc4HJ4VOw6MUj02EId+clu2bfY/NjpmQ767K//SroemRCPugwwNzxkx97lPIW0bCyqZtv+nP\ntqHMYHhUIuyJ3TOHJEIIEyGE2TFT2XJT0GSSZ0RYn/Z4y3QUwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYf8HN5YVPNz6knQAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJCklEQVR4nO3da3LbNgCFUarTHWX/\nS/Ca1B+0WUYPmi/hAsQ5k5mmqWOpIj8CfPp2v98HIOef9BuA3okQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhHUX4e12S78F+Et3EUJt/k2/gUcvR6r7/X7WNz/r\nW8FZkhGu7008XFgswvJdKZk6hfcJlw+TTP/1fr87oMJVZSJcOSjN29MhV5UcCUvOD81FqVYgwk09\nGAy5vNhIuD7F4+0ZBqlZ6Qj39TB1aDDketq7YmZrh4ZBKlf0POHUw44wxvZ+/VvGSZpT3WVrC6YO\nFwbD50plWZuPXpnYosBJglPmh5u+iRlpiisT12hpJJz0vMAasuNcVJ+LtdyBmfkRzmIvOnvpwq/J\nZt0upvaOju49yTH0uYBTuh3WdigaYfYsn1Wifn1uK0uPhM629+DIMNjhtrKx6ahJDtcTiNBgeG02\nlFvVeIriOdETTzBCbTIRzk8KvUvuQ263//c6xt/P/4SDIhvKn1Wo1U10bJ9wftrwwcuvP3HpmgvX\nb76Mxt8//8lkXDEaLXBoZTp6rufRb3zBdpdiJc4dBlfOUC4wkUlG+O4ITeKSmsIveAWpbeWbL274\nqrfwSJj61Po8KbxbJddhz3fmh1eT0kY7rHE6WoYON4ms3MvLaHxHF1iIjZ2sP+hhejP+vsFNZ1G7\nH8x1ik1Lp9FT0H1FSAFnPJjrl23lu983qqMIL3AYrbyDD+ba9YqHFlOLg2FHEdKJ5jrs5cBMo8fN\nso7dDLHnWGWB2UolZ8XmrhXh359vSxvDK9rV4W0YTuhh06PAhvQ2+loR/v05Tv9iGNzhlA9tU4en\nL6Y1D8isYcVoOcJpU7f4OY5bxEo+biYf3W1b+Wi/h4doptaQZiN8vhtieD0dfXevBgtOXCPruThx\nWbDDZiN86c10dGj5mqZKHLnGvfDHvukJtzWsGNeKcFENH3cTpgn8zx+0faPQr+KT0o4iHDrucPlW\n5ofpfNOfz7kPei+j2ZP143o0/mrks876dad4/CAfrpnuZFd6fot5+cMHzUY4DN+rzK6f7vShd7TT\ny7V+3L6c5/mOhGkjNn1Bz4Id9jUdHYUnpWvW+iLD+8PteQ+vPN8atNJni3PRoe2R8IDYeDjN+Z6n\nfWcPfc9+vYVy3ts0yWhnZT4qNRj2OBIuy5zUejkqfeZ13r3C/Hb1FsM7ZRh0nrCoTSeR25rePFh5\ne97L/79m/6cPKby4+41wVGla04B1+O01tEd30Px0X/q9bNNvhO+2di8fRnzCsZznMwDz3a/Zi734\nw6GnmNLKz3r6jXC0/vnfhzqcH3Pce+nXjr/bW7mNXozRaYTTotq0wMLLeE+H59ye15CDyyiyfDs9\nRVHU8WFwsuXQeYtjQp96HAkLP7WhzGBU3WVAObsHQ7cyNWP33eKn1NjK7Xk1K/yDwH7VXYTln9pw\n4uvO38BZ3+qS5gtoZXLurL+O50V+9s8qsqe3SkOnDfuK8JQ1eH4D6PN/rW0ry7Pa5vN9RXhc9jp9\nMW/Syv5zRxEG+7nf772dN69Ebb291NF5wuM3qhzJ+NfbiNa/OhfTUYRxxzvkkvqK8MhgeNLtavv+\nnmHwyjraJxzFn29XpyYOYFxVp2vh1mHtMgdFG/oxKf3obiREV7Xpd1O3fnCLDIM/I9b3HuxJ3/Pt\nlVzPWnxsWaP6HQmDO4fLz8P+eXvf/xwSPzNs/pU6/LS+jo4+mB8sXf6y4fxLQM/6TutfUUuV6nck\nXPbp2/OeR7+FBztlhyOD4af1HmE9lxcuv2CLT21gpd4jjI4wzVxAYzD8qK73CeM2rdW7L3k9RfbV\nr63rCFNb95VPvz6LQaxyXUfY3OMAU8NR8Gf39aDfCBu9wW9rCYbB+vV+YOZKluPcXWOjP/SvIf1G\n2O7qtPW0irtGKtdvhE3b19KRJzUK+HP63SfshwMqlRNhFw4+X8cw+FEi7M76DrVXxsUjvN3+vzRs\nvuJ1ODubt7dpPJTip13nwMzTWjX++MGHr2n4oGhh2ivmOhGuWWN2/bjb65gfHV15pFSKBVx8OsqD\nh0np8lcWeUdcaCRcac8NRNNfuPR6Wcl9lR3qcSQc16uvr3UtjvPX8dcljueMg+Gz+yvpN9uFjkbC\npxuI3v+QloVHTVyCuqrS40g4ebkqfn39DH1QhGNf7z0/jbDnQ6t8TNcj4YLv8XD4OZIz/lIgH2Ak\nhLCODswsmI6U/vljk0RpIhy+vm7aI6jTfcKHk4RfX7e1pw3hbP3tE74/B2hIJKK/kfDnHKChj0r0\nNxKOZuOhozJk9RrheyalFNbfdPQ3f/7czVQpqYORcNeNSG5mpZirj4R7b0TymECKuXqEUD0RvmUw\npAwRLrFbSAFXv3Z0viuoKKp09QgH7VE701EIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAgh7D9L1k4QuD8q/wAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKp0lEQVR4nO3dXbaiSBqGUenVM8qa\n/wxyTNSFnbYp6kGEeL8g9r46K+tUpgKPEfwI0zzPFyDnP+kXAKMTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoSw/6ZfwLimabr+MM9z9pWQJcKMaZpu7d1qvAhySJO13t59gcv/dPvZqhmECFt7U+DyN28/\nW00nJsKm1he4/B9vP1tlJyPCdp4WeK3ro7UgyJMRYSPvx8DNXTnEegIibOGjWeiGIDfPcqnAyjvc\nN4WsD1KH/XKesLT7ruwKnpUIj7XjALUMUornYA7zt9tos8diaTxFNCPtlJHwb7eN+G7uty1ISbDS\nwBG+H/Tu//DPb04OV3KAUbeVafpr0Nv1/EGwQPH3aOCRcJOnhysf/jCagQL7I8I/Pj8k8/BdpHme\n0wXSJRH+8fSQzGVVk9fwFMg2A283K4e+dUdl6hT4yR4uJQw8Eq7cVP/82uyaFY4xcISf+/GoDGwg\nwo0clWEvIvxWtfDm2W5hZ9x3FMJECGEi3M11tzD9Ki6Xfb4BQjsihDARQpgI92UiyMdEuKfr6QH4\niAghTIQndD8aG5nrGzrCI84oFJmRVngNrDR0hL17U1qRzwLWGDfC3q+3doHoaYwb4XEajEJrCjQY\n9mLQCHsfBp9aJne6t3hOg0Z4tEO3/qfD4P0f3v9XHdY3YoRdD4M/Fkh3RoywXwo8peEi7HcYnKZJ\ngadUOsIiX8+r4MXz7hV4BqUjvH5NdscUOx0GFXhupSO8XC7zPNf5xnrEiwKfTE3pVPUIr3YZElsO\ng8tLqLe99tcFSvA8+ojw0uGQuOaVTtMPv6bAEXR239Fbh59uiO233eX9P6+9Lc+kf/1QYPrWWYSX\nnh+B9Or1Pn0o8NN31+O75kf9RXj10ZCY2nY3XEJ9e/zM8uEzCjyrXiO8dLJpbv4qw/LhM5XfJt8o\nvQWv9H4bLV7pSud4FzzVzdHRN7o7cLrB6d/gyM4Q4dXTc4kGEOo7T4SXMYZEzqfjAzOv3HdoGKS+\nM8/Wzpei2fUpnXAkvLrfXh8mqLZjSjlnhA8jxkN1f5qcLy4To4ATTm8+mrPdxsheFoMZ6fmcbST8\ndBu9/a6rqEk5VYTfjBLvv+4AxzlPhDvO0+RHSyc5WX/cntL778i3vy7A1Qjnc4YIjz5WsXKblwbb\nnGc6epxX35GHXXQfYeSQ/cMx1cZNXmekTlScRt/T0Wbb4vvv5s6zYznVVX6EeMcRNh4NZNadh5vZ\nVWvvptfpaMsx8M2fpB5CZkb6ypuLLpb79kV0GaHtjwfdXX54r78IgwWW/Sgd1jRNH12IX/MR4p1F\n2L5A1VWzvBnkRwp22FOEZqH3Rtst/PQr2g8XAD+98XmRhdfNWgxucMsz9UWW2SD3I/30ba6//r7I\nquxpJOTe7VPpy+lZF7aNfiv+2hIddhPhaLOv9+4XxfJe3cs/P7ET3CehmwjriH98vvkwGiTIHd9a\nfG1eRLjGs/X0vyPj7a2fDpwyyCN2g+MdinClWHV/vYitE/LTBHnQC8522FOEg+8W7vXenwY57FKt\noOMLuFt6+D57+6+3T9MhnVwfHLD7X9uj4El8EXbg6JmSW2ZcpT6OOovQ5sL5dBbhgOIH0DmaCNeK\n7BY2K9AUI0iEdRkDB9FfhMHP7OWBxOWzgXf95w76i6mlp/OEpZzv0WuDn4a9uZ+AXH8+ekoiwi3W\nP3rtYkDrUOMdARF+7MfhYtHkw3894kWxp1e3ez5o3XUZYXDitOHfffj1yrckMiN9xXS0il02UFt4\nF1pexdbf0dGUg4aI9099IqjZx2WvETY+UXHoJE11FTzdTbj+fHSNvUbY0tG7ScuZz8P921sqfunM\nQSdmfam3tOxTn3jwcG+rcxxDEmEJBe9IW9nDZRJfphi/PLDj6WiDiVPLYbDOZ3rxGem965eSpz82\n/A3xAi8d3fz3qUOnJSOfMSv13tcPdxvu0l3hXXY8HX119eYuW0+prXBY9/mtCWzfaWozvW5qbyL5\nPkgFXgoshFcvYK+BMf4Gb6q8jo+sX3wbgqyzbrKiD//4ObOPZp7Lv7DUWu5vOvrR4vv0fpul1s2Y\nVq6C5Z7ImmlqTZ1tc8s1tO3YzNMgFfig8QL5ckfuo2lqqRVd69W8937Zbd4VPNmZ3x21PLyxVxg/\nrs1qBV46ivCjZWdX8HvLR68dtHwOSv3py665liu+pqVvlt36IGuuoYini+KIGhss84fzHAVXcQcH\nZr5ccKd5FkrcR8dCftRsrns/ntdc6RU/GO4d99FV/LB10IYzQJ8uN4v6XumR8NBVZSN4atsZoPU1\nVh6RUup+IEU+LAf/hP7+7fdykUopRUdCa6u9ne6g83xsNAC+UTFCBZ5Ap9dSR5T7PmG2wI6+Srev\n4xa7/H5UK0JjYITFnlUoQptCxO6LfcypxDeqRFinwKFmpHUW+8iqRLg0TgkMrkqEQ40/RRgGi6gS\nIY0psI66EQbHxtMPywospW6EHESB1RSK8PTjDzxV8bK1CuYaj8tc/6nkBgL9Kh1hkRJSNt/R441h\nF2ZltSIcvLpvlB0JrdAfFdon5J5tdxwi/L+HGd0pDxSd8k31rnqEzTaaUiPP0ff10GEp5SKMbCKv\n7vAXybLBv6vDUspF2F6pAhnQ6BFWK7DZP20wrKODCI/bXKoV2Ng8z79/6zCvYoRtPqQLFjhO/9yr\nGGEDCrz69ctgmDdohEvDjkI6jCsa4cOMdN8J6tMnjWYLzL6AfTsc87PsG0UjPE7BAhlcNxHu9RjX\ngr1VeFUHTUqdBVkjv/pfOeJ+mNUOxtR5DfvyBPKPlF79O26dCjyaB7BuVuv7hEu7PE5Egce5TmL/\n+Ud429WN8H4D/eZTtmyBXbvtQP76NV8uF8vyG0W3xTeRfD/tKVJgkZex3v2Rm2t77KLuSPjKw1Pm\nn/75G99s+sPex+Wan/AOUvHDeFsna4JsM/gM2yrblItwl06eBtnd9C/oYZePQ9XaLo/oxDmrT/3+\nPWmvpUL7hNPvQz4RtLeBncCWqoyE0+9ptsqLMSS2UeLaUQUysvx0VIHVOCrTWPp7dApkeMnpqALh\nEh8JgRIHZmBkIoQwEUKYCCEsEOHkLpdwp8XJ+lt1TkjA0uER3p8MdGIQlppORxUISw7MQFjTCB2S\ngaXD9wnnX7MDM/CGa0chzD4hhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBD2L1tokE4AvHVjAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJUklEQVR4nO3dW5KbvAJGUTh1ZpT5\nD6HHxP9A2sHgC8agT4K1nlJJyu02bEsIjPthGDog53/pJwBXJ0IIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIqQ6fd+nn0JRIqQufd8Pw5B+FkWJkIpcsMBOhNTj\nmgV2IqQSly2wEyFVuHCBXdf9P/0EuLZxIfTCBXYiJKnvL57fyHSUEAX+EiEhCvwlQggTIYRZmKGU\n2xWhJqL3REgR02UYSzL3TEchTIQQJkIIc0xIEcNgYeYZIyGlDIP8HhIhhIkQwkQIYSKkrOkKDV3X\niRDiRAhhIqQ0k9EZEUKYCCFMhJQ2DMPVvm3iNRFCmAghrK5PUdxmKVe+H/PpXfmO9w9VFOF020yP\nGWyw0xg3qw06U8t70ot3x9lBfCVPmE8ZAJ+p4nX5aPMYJFukwBfyL81s82wLMv5b8Iwp6FvhY8Jv\nCux+N62TTtUyAK6RPEXxZYE3Tv7WqViBrW/9Ws4Tbthgrb/051Z4DGx6Z4hNR2cnJExaTqPwQeBt\n52l3L8qMhLsUaBZaoXFrRmJod38IRHjEO1a7G+BM2h2LskpHuNdiDHSL/afR9+KiESqQo7XYYbkI\njyhw+oq3+OrzjWe7UHN7Qn5hhtOoZ++v55msUS7C2ahV7OdyTQ11WMvJ+s3MSK/pTJOp5iOkMhWF\n0cqbsgg5syY6LBphE68I9Vs/Fx33t77va97xKrq9xWZj2+NWmf6Z8save0m9/MvSZmfFln9ZgzNE\nuJTtcLoX3v784o24sl2idtMrtmf/9HqjL+9gVEmNgZ310EIebqHCv+P4k2/trfzhD9Nt0dFPfseE\nKvnU/6lGwmne2XnItllZ0+3dHF3gjhtx9n6dqvE8Eb7YPNXOQ2ayB1T1O2gOFd89AhEesXay8gFL\n3tfUN9LuqMy8cbZ7FEvxDCPhhqSXk9Vj3mI/7lC6S+WX2Qr/uPDJ+v7XN4/w5Ut26Cu+4bFNR6eu\ncMIpPBJ+OT/cawvVcHbxHHvavmu88Y1SRibC5U7/cDFz+U9TdW6h05xp2Gyv37rO7XuE5Ej47GBs\n9jcP16xOuYVuu2/Ty6TLJz89cbrSKbfvM7EI109El39Z7RYyDD40u2bo9ctSyQn0kvKro88mooVn\noZUcFrab7vSZP1zjnb49dd3j0uKbICIf4dTbIKvfSP34gbo9WuqP/mzejmPO8vd9ca5lvNK+Wxxo\nVL9xj1JXhFNNXP8+tdM+dHh7f3/M4iLbbU9+drw3fYy3j5c6OV6beiOcKrN5dp2RftvSodPjhxfZ\nbqhxr8nzZfMbtREhe3kx5nw6LrV7+FobEe5jr++3KTYAdu9OEb0YGC8+e9ydCO9sa+CgWyfsG+Ty\noV7H9myaetnlk+OIcKPl4u30Fhv7/qBdPr268nNeL66dMAAexLva3Oud/vXqxa6L/v8e6usFzA+2\n8ttf0A6zOyPhKisz2GXCtiz5uwXMz55M/BOuF+SN7YFxx/3mg7/bO1y35rhmyN1rWJ49jsFwd17Q\nud1GgE2XLX/0/18vYJZZYuV7pqN/TXfoffaz6WXLbx9tQ7EWMM/i6lvrd++dvw4778evh7j9TnsX\nWMB0jn53VxwJp2f1bhdvHfsjixTY/Q7jk8e++ptsE64V4aZJ38HqejYEtPqtTNPR7Pbnvr/78/I/\nDIN9/lsvPqPENo1F+Hrzj4097PBTw0FfIDW+Txy5Fx/1zDlMw9PR5Z62/GxbXasIxe9+UcPtAnir\nmQiX49v97RL+/U1d4cE7zUxHb4dzHx3XfXMAY173jPe4fTUT4Rrj0dbyZidQs2amo6PX9zJ5doOT\nWjqcjsu1PCfyTjUS7m7/Gek4mT64QBPptogQwkR4cruPig+vguAbIuRj2tuXCN+4+PHVw1/dlWv7\nEiFPueyhDBG+1+JlX9MBfMPzX55xnQ19BsMdiZC5vu9np1EeDokNvjVVSoRntuFodnGX7nmBNV4F\n0bjGrphhjc33LF0WKLMCfM7lbJ59dultjctvqrBvlOGFPpU15ay8Z6kdoxjT0ZPof9beZ23Nly4p\nsCQv9xn0P/3wZ+N2dM/SOK94874p8O5xfOlSiAj5xzAY4TwhhImQfy5+tXqKCFs1LodyAk5RNONW\n3S7LMM85JixNhG2YLoHutRxKJUxH26PAkxEhd3xQsDwRtseSzMk4JmzD8GcotTBDaa6QYM7HCAsz\nHWVOgYWJEMJECGEWZs7C9z01S4SnUPyLuNmR6Sh3fN9LeSJkTnuFiZC55ZVr4y2AOYhjwlN49kXc\nOx0fjo9h6ecgIjyLh2WMca669/b8/vbLoW+69NN17gq1G9PRM5oGNAw/7y74/vT7XoahG4ZhvBfG\naPMzpXPt6Gmtnoj+/PR/vr4i3O0Sv2EkPKkVA2C3U4Fd93dg/P5xrslIeF17FciXRAhhpqMQJkII\nc57wQm5LNQ4FqyLCq7AMUy3T0Qv5+enXnLegMKujl2NIrI2R8OReD30GxhoYCc9sOug9W5UxMMaJ\n8JzG5F7UddeeO2JEifCEVg5uOqyECM/mg++dF14dLMycygcFdr6BqRZO1p/Exk/0GQkrIMIz+GwA\npDKmo2egwKaJEMJMR5lwV8MEEfLLF1qEmI5CmAghTIQQ5piQX8++0IKDiZAJ7SWYjkKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ\nJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoSw/wAbXBJhsylmLQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJL0lEQVR4nO3d3ZaaWreGUd3tu/9b\ndh+4Qir+UIjAO8ak97YOspK0FFXyOCageL3dbhcg5//SGwBnJ0IIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDC\nRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIex/6Q1gINfr31/f\nbrntaEaEbOR6/Se8h//lPctRtvCc3O32z2DkPRFCmAghTIQQJkIIEyFbeD4N4+zoYi5RsJGHDhW4\nmAjZjvBWsRyFMBFCmAjZmdfN/EaEbEdvq4gQwkQIYSJkZ95O8RsRsh29rSJCCBMhhIkQwkQIYSKE\nMBGyP2dNZ4mQTentcyKEMBFyBMNxhgjZmN4+JUIIEyGEiZAj3G63q7Omb4iQjentUyKEMPcdHcqf\nCfR3EN3cC7Q8EQ7int+f4m4/fv+qw+JEOAih9eWYcHfPJymm35n5o/uv7/99w2mS+kR4hBUV3D/U\n6P7fGBF5OnhHhEdYEdKGy8vj9369fUSE1T1/zp/dezAiPMi7YTgd+L08/Nvqkzazo8lUnOfs6HHu\nHT5ENTPlZmbgvxckSrtfI7l36GLJSyIs6uUMvP/OuvF4L+GwDO5faPqK0/9epPhEhIca5lTnEs/N\nS/ElER5teYfvPgF+9d575DCc+UJSfOA1TedyTITLv4oULyI8ob07XPHvnzxFy1G2tK7wPwvUHud7\nN+c64UHqnI/Z75rhlzP2frRc5wd1GJOQbWyyyp2uwVyaXAXdhEl4RpsPw22PM6eXrZ9kKpqERxj7\naGenMz3nmYom4UltNQz3Ptd6hqloErLSkdcVxp6KJuF5fTMM7wPw4Ct7I73F+ScR8rHszaNMQj5W\n+azMimFY6vZtq+/fU4oI+UCpAu/KprWcCM9u+TAsWOBliHeHiZBFahY4Bpco9lX5gHDy6/sMixf4\n8r4hlz4rVRHyi+IF3n16/55SLEe5XN4fGbYosDsR7q3q0+8CvQrse4ZGhPznYRj2KvCu2/b+R4S8\n0KXAd3eFnP+janr8rJvqsiv/dPLbvUSYhDxS4MFEyF9NPzSi51b/JUL+M707qWmKfRW7WP/uptPs\nrOPh6zAqRfjwkocWr/gawkOBB390zJcG2E3KLEdfvujIumh/jXobVZkIh9Ni5363kY4Mj9QwQjvH\nRlo8TZxBwwiHvwPeIX4tsMUwHOCA8NIywsu4t906ihlYSpkIn6P69Vmu6ki8Xq/TB7UXHCbLC2wx\nDAdQ6RLFQ4dLdpRvPsR9Bw8vvCz4kbRmYEGjPCTpmzP/WlqFFNcVWLnbMk+/X6k0CWcsWZou+Ws7\nWFhXfCpWbmmdMQq8dJqEC2fdgSNxdVHHp/hlgTUDHibCJpPwsnjWHTISv6zo4KlYM6EtXC+XEb6v\nhg/Pslm30y6++T+7d4pbFViw5IKbtE6fSThZNuumabPV47RTLbtOxWF207F1fpCOGolN142bF1gt\n6Wrbs1rDSTjZfyQefAZlujj+/VccZgd9Z6RvcIjvZIeRmL2s9+VX328HrbPr19mS73WehJNNR2KF\nq+rfHCiOtHeexFAP2MK99t1fq5Dfs08H+N7bXyTyIpuxiSEm4R8LZ93zX6uZ393yqTjSfnkqYz5s\ny6OqnN+zma09ssAKtVfYhq0MNQknS0Zir/zu3k3FkfbIExr8wXtZWsf8nk3fRaTAbPaDPemMOQkn\n70biAA/h5i8JIqXMO+v3VPZN7t9LFehN9xsafBJOzI0BjJr9WSK8U+CG9jscfRfbqOeizhUh1bzs\nbUldve7VP0+ErLe8hCXD7bROF+EwT581HRnbMMPwdBGyrYfTpAcnMUaHImQDwQwG6PAU1wnZT4UA\nul+0NAmpLrjcPYYIWW/zm+K8/P3hr1iIkIDVlwdn9O3wdBH2e4g6O/jyYNMOTxchW3m3u2cP4Tp2\nKEI21iuAClyiYI3K06bdFQsRMqBeHYqQMTXqUIR8rPJa9KcuHZ4swmE+V5KBnCxCTqbFMBQhn+my\nFp3U71CEjK94hyLkA+3G4KRyhyKEMBFyFmWHoQhZqu9adFKzw5NF2Hwf4nsFOzxZhKw1wBicVOtQ\nhBAmQs6o1DAUIb9rvRaducVGkQ5FCGEi5Bfdx+C7ja/zfZ3jHjM/Vx01fu4wOUGED+8h9JbC02gx\nBi/jL0efk7vdLjUOx1sotbOOavQIoTwRMqYua9GLCJlRbWcd1aAROurbTZEL3KsVfGYZLsLr9Z+T\nMc+nYZwdXebdzlrnhSYzCpY2Y6BLFPc94/lH/9Bhn8emrI4fulLZEBG+y29id/lc39IanZK5a74c\nnRaf9X6yA5hZebZYlHbRdhL+Ov3YWbtRWXZrG05C0+9A8xOv4DwsW9qMNhFer9er/BLaddhOg+Xo\n/TFu9/Q2knYrz2eVt7/0JLxPv9vtVvbHdx4tTtJULm1G0Ulo+vVSfFRW3rZLwUlo+pVV/OCweGkz\nCkUov/qKd9hUieWoxWcjxVeez+pvbYlJaPr1UvAkTf3SZpSIkJGUWpS2iFOErOHgcEMiZKU6pbUY\ndzNEyHoFDw5/6hKnCPlKvMMupc0QITsKzsNGcYqQb2VP0nQpbYYI2UCFI8C+RMg24geHPzVai15E\nyIZKddiICDnIYaOp1xi8FHkBN8MIvry776Rt9pxBCwd0+Jxc3z1ZhOxi2w5HSu6ZCNnLug5frirH\n3ksdE5I09ohbyCRkRw/DUHIviZB9/QzPzvaSCCHMxXoIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhP0/XcA1PkLz\n22oAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJA0lEQVR4nO3d23ajxhqFUbRH3v+V\ntS9Iq4kOCBCw6i/mHH3Rcdw2wvVRnIxu9/t9AHL+l14AuDoRQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYT9k14AyrvdbvOfcL/fz1mSokTIdmN+XxubqVSfwzDcrAXWekT1++BZ\nmHHfRMgKBzVz8RRFyHc7Tn1fv8sFB6QI+WhJe7uXc8EURcgbX0s4em68VIoi5D8W5nfOsLlIiiLk\nr9vt43g457Bw5lt3PFBFyL/eFhhs7+2SxBfjCCJkGN4V2Oagn5mr6+rwJbFWrZFda2mXcAP31ZUb\n0/f7/evdqrWI8NLKFdglEV5X3QI7mwxFeFF1Cxz11KEIr6h6gaNuOhTh5fRR4KiPDkV4LT0V2A0R\nXkiXBXYwGYrwKroscFS9QxHSg9IdivASOp4GH+p2KEIIEyH9KDoZirB/V9gXfajYYSzCcmuqrqv0\nV1YswopbrJJut+Ey0+Co3NCyOwphyQjLbbHqud40WJGZEMLCEZoMD3ThabDWuMrPhLusr0JrHJ6U\nf3/CNp/M1wTrpIgmIhwnw7UhyY8+5HdHR2t3SsdoFcgnhQ4Lm5gJVzEB0plWZsJh2abLBEh/ysyE\nJsClHhuyy6+rbecaztdWhG/XmvxWmF4bvPB1wlrainB46bDElowS/r7N26fPCI205iJ8MAGyi3be\nYvGTFiOssitP+zaOpXOPq5uL8DEB6nCL+92JmV+dflzdVoTC28HYodU48Xrpq6nDwlYifD0CNBmy\nl8ZHURMX6z9dgi905xFsFp5qlpwCNR9uYY90GIbNg+c6J2bURaPOHZaZ3dHb7ba8QDul9C0Q4Yab\nsHW4lpVVyKkRrpoAn+iQXp13TOgIEN46byb8vUCTIatU2e43cZ1wOR3Sn2IRDjqkO/UihM60cu/o\nKk+TYYn9fvikZISj6W/f//3gh089Y4Fa4vb3QkpG+DS8lg41v2h3JYW2QZc5JhxvaB7/XOO8jjNY\nVdSL8OkxUNmFoU21BkbJ3VEWutSR4eu5uiqvvXCEVVYxh5p5mFqVbVCxCLev08scCj6pMhBXWX6B\nqsTLLxbhw5Y1O32E1pWUGIhfLQ/v9eR54y+/UoS7rcq9n0fW+M+4urWPgX6trvEO650dZYPqlyvW\n9vP6elteAyUjbGqr1tTC9GfH3wJvtsMyEe481q93nqbZIbjc2uWv0mGZCGGDEh3Wi3CvKXGXn0Ot\nfdEGx99yHe+X1oiw1lhnL3v93BvvsEaED2r8UVOD70wtd1ggwt3Dm7772o9fp9wWYVzmRgbfyZrt\nsNLF+uHnX6K/8rv/Tl/7Y/C1vCqOeNf0Nq/aF5gJ99pcbXjy99evtsuXOsHrax//s4V54GTTV93I\nD7FAhMOfFffLiPm0uq8wCmeG2v1+v8AKeNZCeFNldkc3/4bYOMie/tV0Z2zmd2Ei9t1R/LrG2n9j\n30bmq+OUiXA0brlXPVPm6ZPfviXw9H8NCwI4aFi8bhp++S7Lv0KDHXYf3lSxCIfFI+bt58z/aDfU\nuKO3T6/avCRrB3GDHR6tnc7rRThMbvycWYcvE+D4waXviPjnX51U46cB8bQkCxdj2/C6YIeNKBnh\n8KexhYNm89h6W+O+W9CFW4fl+6i/LF6DBbYzXx2naoSjrxvvrxPm4m90yPPd1m4d5vdR27/0t9AV\nwpvq+dUetHO11z7q74v3dCKnjx/lOZuSplZX7Zlw3kEFbjtOe7XX/HzEqJ1uIMa/H3q4WPdphbvo\nOcKjTRoY7wWLLcYRtxwcfZJm/sFNbd5fdpBOInzdch/zXd4Mi8nEOP3gIQtwptfZb5cD7OU788d1\n2FrenUQ4HL/l/mr38brmW58xb7y77vr+4y+ftvF97C4yH/YTYVOXuRpZjB/dvz2I53GhaBiG8VkF\nb08j/1LRLh02ft64nwiPtm0ozJ/hePz95Jlzua8dDn8X+/km+70G/bYOj1iSg3QV4ZIRc76v8/M0\nxc1HtvuOsum3Hv++cGEOuqC6vMOvx5wN7t92FeFwWIc/3oby9gzH9BP4aqbD5ZNem7+51kmEr1vu\nlk2nu4enXdOhgVNNrXntcOF9fNOvMLQ3GXYSYeO+zs/T3lbdFns1Tx3O7HNO/8n8F4kT4Xe7/MBm\nOhw//uu5mabODh9pyX5p8CLkBiI81vwZjplzHm0Mj3XuH/6+/zeaJNTBRUgRUtJ9jwfGNdJhjQc9\nBbXwQ+Kt+/2+x2FC/pFzIuyIjcUm8Q5FOMc0uNE1ThHtRYQQngxFOMc0eB3BDkUI/0p16JinI48B\n5KabH5x/IsBM2IsxufFP+px7aefPhyLs2u0myA1O7tAdM12zO7rVmXukZkIIMxP2YnoomJoA4wtQ\nk7Oj7OScx072yO4ohIkQwkQIYU7MsJMWzgzVJEL2o71N7I5CmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQtj/AXNaOMhtCxnCAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJ80lEQVR4nO3dXZaiShqGUezVM8qa\n/wgqx2RfWGVbiCQi8H5B7L364uTps5QMeAx+zcv1eh2AnP+kFwB6J0IIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE/feA9/j1/f344++vrwPeFFpxRISD8FpzuQzDMFyv///x\n/s9szu4ohB00E973SE2JrbheTYAHsTsKYXZHeek2GbI3EUJYLMLRdQtqMhke4HLNHXr/+v52rFjT\n4ymZ0eWKmf+SdZIRDjo8BR1+KHxM+Pvry35pW573Tu2yfih/YkaHbZHc5vIRDjpsnzI/USLCQYdN\nmUxOh6tViXDQYVN0uKFCEQ46rGoyrdcdCvE94UsUky6X3ZfqvqEU/PVrmrwO8eJfVtyoKis6Xnus\nyMdP6PuL22KW0+FO6g7WJityMrw93qgHry7K6/BDdUdq9VpcEt5W79Wb1x1ODKBRXeig5wlXuF6v\ny9fiivBWv1fPPOm7h+pb3kwbH4b31nvxaPn+pyFdooExelyRm4c3817MkNyG2hi1ba8ozL+aLWmJ\nV6Nk9Faoe0w48smqHV0+nn8px4dLGKUNNTCOK1b2W9Vt9aYdslO6iQbGa+FKdZ4mQoefa2Z39Nnn\n0908e1yrGbq3VI/weV267bMUvX2u+vDFV3B8Adpl6Baq9ShTQbdP+vRSNMnQLVQ6wiIfpTam1Qzd\nEqUjrMPGxH5EuJQO1zFuP6obYZF90Ue2p3WM27y6EdZke1rHuM0Q4dtsT2xLhGvocAWD9krRCAse\nEI7YpFYwaJOKRliNTWcrOnwmwpXqz9W0omKE1bbvasvTOpPhSMUIOT0dPhLhGubGz+nwrlyE1bbv\nasvD+ZSLkH6YDG+qP1lfkLlxQ/s9mL/3V9RuqFaEtu8Ofd7h5HT6+ILFt6taEVZTfOX15tW+a+vr\nSITvkeUenifDHye3D1+/lEIR+gbLno1O0nT1BZbVz44GT6CVXWcndv1rpxeveTK2UISvxqjO2Mly\nPxuO7eXBJi+4t0K7o8PrfYbK+xKkrDhPU3NDKrdAQ40/u+UA9UjzA7v5SdFq67HWTHhjPuxcb39l\npGKEQ8kOS622M3ke2N7GudCJmZH652kKau6cREqpraj6p3vk+LCJA8LW79W6yS5kkSEqujt6V2T3\nPbgMq09LFBm6yooMUfUIhxcjVWT4trXtvVrDSUfpfBqIcDi2w4O32oaeuNlchQ+ICp9TdU/MLHHM\n4fXeK6nPe7XqiA9RMxEedrL04GnwgLeLb2ST4vPPo+wQNRPh4KLFBwxRZS1FOCQ63O8D++CpoFSH\npabBm+D4NBbhcGCH57vkfb1ez/ULbSzVYbkPpIV2uog/ea7yBOdgH953iK/wgtPgo+MXr41LFM9m\nbi5996V+vEhQ4Sz2Vq7XEh3yqO1ta3UbK67ObfvUaXbYsx3Gf/0fHX2tuPhw/Gj5eH1+WXyrdVNh\nK0x1WOF3f/a8VEcuZ6u7o3fz+4rb3o9ypv1S6mg+wuGpjeI3ghXJOHJwWOR3H8lOg8M5Ihz+Pbm8\n9y1mNbekFZykKaK964Qz9rsDc/Quq68mVQv4yGWp9rvfxKfB4TQR9nz3SaMM4N1JIjzeig5rTgXH\nePUkWmp5boqskZMcE0ac6fhwJ/HM3hVZoSL8yPIOe8j1Obn577zJforVWSNniDA7mieYD2/t3H+D\nH0+ZPrW26Lx0/a8pSS3JGSKMK7Ulbe55j/L5jt1PXj8yeqXWlwi38eONO3VW+aSZa4YbLvi5P61W\nc3Z0MxVO99U3OUoVHnQMfjo0H2H9T9b6S3hzmwwPeaPJDg9696HeGmk+wlJMhp84ssORbJbNR/j1\n+3d6Ef4x6rDah+68YyfDzAIUXCNtR/jr+/v311d6KcbMh0sE572ReJbOju7i3mG1D91Jj8t45PJO\nnpLd9dmOeG+T2p4JOaU6k+QxzIQbGz3WWPOjdyT4VOGreW+P5ZlcFxVWUMMRVjsgbGj/sxTPFjcc\nYRHzT/TXv0ekQgAHdFh2GhxEuNrf9H5ei/U7JKvVjSO1L3o/YfDusNXssMI0eNftF0+ZCZcaPe/D\n5voscGh3JjzM6qlv6qVqjXapafB4dVaHmfClzac+B4d1lFoRvUc481D5HuuoTod9ToM1byfsPUJO\nbP47b+oQ4dEXi8tMhpcPv5ZiP299583U3HbEd7FvqJkIf31/P/5Y6l6Zd8U7rPEpsMaCL7wZyn64\nvNJMhMOe4R1/51S8w+KOvKc0zlMU3RF/NS3NhPc90j2mxOMfnzEZzuvnxu6WImz6OHCSDhnsjo4e\nKj8+h4O/C6Ot5jt5urf3CO96WNnUJMK8wybDVqbB+O7JwdpYKwd4Pgdw8FmB/Qp5LNzqLqilEzPn\n9vlJmlfTqfCKE2GTJnsTW6NEWMjzZGhy64EI/yhyaXh0kkZsPRBhLa2cwGRDLlFAmAgLMQ32SYQQ\nJsJHl9kfd35v02CvRFiCAnsmQggTYZ5psHMifOmghxsU2D0RQpgIoyrcKUeaCHMUyDAMIoQ4Ec7Z\n8ZSJaZC/RJigQB6I8B81/3QW5+Z5wj/uV8x3f6bWNMi/XCn+U93kOHjInQP0HuHyW8Yul8uf/67v\nEWNzXUe48qbN+/TY8dCxoU6PCWd2QX/2+CdkBynyqR4j3OyphduLmBj5THe7ozuem3z+U+t3nQ0y\nb+loJtx95/HxpUetuyzBa71EqALKOn+Ezp5Q3MkjNAFS32kjNAHSinNGaAKkId1dojiOSxQsc86Z\nsAThsUy55wlHD/Tdf7xc/vzv8f8a/QgtamMmfDzGc7zHyZSbCSe9qu7253WhaRVnQl3RlYoRjm66\nHP3zaFYs8rfmYbWKEU5SGmfVxjHhfIGODGlaSzPhnSmRM3HHDIS1sTsKJyZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQtj/ABV/OUMCMvnQAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "WqbaG6ZEeYuE", - "colab_type": "text" - }, - "source": [ - "Analyzing the distribution of pIC50 values in the dataset gives us a nice spread." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "z_N2_csYeYuG", - "colab_type": "code", - "outputId": "5b95d5e3-14c9-4e2e-9f68-dbb425a5e08e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "%matplotlib inline\n", - "import matplotlib\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "\n", - "pIC50s = np.array(dataset[\"pIC50\"])\n", - "# Remove some dirty data from the dataset\n", - "pIC50s = [pIC50 for pIC50 in pIC50s if pIC50 != '']\n", - "n, bins, patches = plt.hist(pIC50s, 50, facecolor='green', alpha=0.75)\n", - "plt.xlabel('Measured pIC50')\n", - "plt.ylabel('Number of compounds')\n", - "plt.title(r'Histogram of pIC50 Values')\n", - "plt.grid(True)\n", - "plt.show()" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZwcdZ3/8debBOQYIEA0PyBAEAge\nKEdGQHAxI+oCKkFBfkSEyKLZnyBmvVY8MYsHeKCwri4sIEExESMCgiD8MAOiXAmHEI5wnyFBSAjD\nfXz2j/pO0Wl6Zmom0101M+/n4zGP6aqu7np3T9Kf/n6r6vtVRGBmZgawWtkBzMysOlwUzMws56Jg\nZmY5FwUzM8u5KJiZWc5FwczMci4KtsokLZQ0uewcZZL0YUkPSuqStGPZeVpBUqekT5adwwaXi4L1\nStJ9kt5bt+4Tkq7sXo6It0ZEZx/PM0FSSBrdpKhl+yHwmYhoi4gb+vPA+vczrfuYpPmpyCyWdJGk\nd9Vs/3K6r/tncs1jJ0iaJ+kZSbfX//1qtjta0hUN1o+V9IKk7frzOmx4cFGwYaECxWYLYOFgPJGk\nzwM/Ab4LjAM2B34GTKnZ7KpUgLp/Omvumw3cAGwEfA2YK+n1DXb1K2A3SVvWrT8IuDkibhmM12ND\ni4uCrbLa1oSkndM33BWSlkg6IW3W/Y10efpm+05Jq0n6uqT7JS2VdKak9Wue99B03+OSvlG3n29J\nmivpV5JWAJ9I+75K0vL07fqnktaoeb6QdISkOyU9JelYSVtJ+lvKe3bt9nWvsWFWSa+T1AWMAm6S\ndHcPjw9Jn5V0j6R/SPqBpNf8/0uv/z+AIyPinIh4OiJejIg/RMSXCvwtJgI7AcdExLMR8TvgZmD/\n+m0j4iHgz8AhdXcdCpwpaQNJF0h6TNKydHt8D/v9lqRf1Syv1DJM79Vp6e/ysKRvSxqV7tta0uWS\nnkzvzW/6ep3WPC4KNthOBE6MiPWArYCz0/o90u8x6ZvtVcAn0k8H8EagDfgpgKS3kH07PhjYGFgf\n2LRuX1OAucAY4CzgZeBzwFjgncCewBF1j/lnYBKwK/DvwCnAx4HNgO2AqT28roZZI+L5iGhL22wf\nEVv1/NbwYaCd7EN7CvAvDbZ5J7Am8Ptengdgx/QBuigVzO6W0luBeyLiqZptb0rrG5lFTVGQtC2w\nA/Brss+HX5C1gjYHniX9fQbgDOAlYGtgR+D9QPfxiGOBS4ANgPHAfw5wHzYIXBSsiHPTt+/lkpaT\nfVj35EVga0ljI6IrIq7uZduDgRMi4p6I6AK+AhyUPuAOAP4QEVdGxAvAN4H6gbquiohzI+KV9K14\nQURcHREvRcR9wMnAu+se8/2IWBERC4FbgEvS/p8ELiL7wOpv1qKOj4gnIuIBsu6hRgVoI+AfEfFS\nL89zBVkBewNZC2Aq0N2KaAOerNv+SWDdHp7r98A4Sbul5UOBiyLisYh4PCJ+FxHPpCLzHV77fvZJ\n0jhgH+DfUstnKfBjsm4qyP7NbAFsEhHPRcSVPTyVtYCLghWxX0SM6f7htd++ax0OTARul3SdpA/2\nsu0mwP01y/cDo8n60TcBHuy+IyKeAR6ve/yDtQuSJqYujkdTl9J3yVoNtZbU3H62wXIbjfWWtaja\nvPen56z3ODC2t2KTCtO9qRjeTNbddEC6uwtYr+4h6wFP0UB6X38LHCpJZMXvTABJa0s6OXWZrSAr\nRmO6u336YQtgdWBxzReLk8mKGmQtNgHXKjuTrVELylrERcEGVUTcGRFTyf7DH092kHMdXvstH+AR\nsg+MbpuTdTEsARaTdSUAIGktsm/RK+2ubvnnwO3ANqn76qtkHzaDobesRW1W9/hHGmxzFfA8sF8/\nnjd49XUuBN4oqbZlsD29HwSfBRwIvI+sRfGHtP4LwLbALun97O4CbPSePg2sXbP8f2puP0j2msbW\nfLlYLyLeChARj0bEpyJiE+BfgZ9J2rrXV2xN46Jgg0rSxyW9PiJeAZan1a8Aj6Xfb6zZfDbwOUlb\nSmoj+2b/m9R1Mhf4kKTd0sHfb9H3B/y6wAqgS9KbgE8P1uvqI2tRX0oHbzcDZgCvOaCaurG+CfyX\npP3St/XVJe0t6fsA6fa4dPtNwDeA89LjFwE3AsdIWlPSh4G3A7/rJddfyP5WpwBzUncdZO/ns2Qn\nB2wIHNPLc9wI7CFp83Sw/Cs1r2kx2TGDH0laLx2030rSu9Nr+GjNAexlZEXulV72ZU3komCDbS9g\nYToj50TgoNTf/wxZn/RfUxfCrsDpwC/JuiXuBZ4DjgJIff5HAXPIWg1dwFKyb5w9+SLwMbKukv+h\nwYfuKugxaz+cBywg+wC9EDit0UYR8SPg88DXyYrpg8BngHPTJnsCf5f0NPBH4ByyItXtILID2suA\n44ADIuKxnkJFNqnKmWQtoTNr7voJsBbwD+Bq4OJenuNSsvf77+k1XlC3yaHAGsCtKddcshMIAN4B\nXJP+zZwPzIiIe3ralzWXPMmODQXp2/lysq6he8vO01+Sgiz7XWVnMeuNWwpWWZI+lLpP1iG7Yvhm\n4L5yU5kNby4KVmVTyA7GPgJsQ9YV5aatWRO5+8jMzHJuKZiZWa7sQcRWydixY2PChAmlZnj66adZ\nZ511Ss3QiHP1j3P1T1VzQXWzVSnXggUL/hERjQZJhIgYsj+TJk2Kss2bN6/sCA05V/84V/9UNVdE\ndbNVKRcwP3r4XHX3kZmZ5VwUzMws56JgZmY5FwUzM8u5KJiZWc5FwczMci4KZmaWc1EwM7Nc04qC\npNMlLZV0S826DSVdKunO9HuDtF6STpJ0l6S/S9qpWbnMzKxnzRzm4gzgp6w8acfRwGURcZyko9Py\nl4G9yUbB3AbYhWxaxV2amM1sxOmY1dFw/bxp81qcxKqsaS2FiLgCeKJu9RSy+WBJv/erWX9mugL7\narLJwTfGzMxaqqlDZ0uaAFwQEdul5eURMSbdFrAsIsZIugA4LiKuTPddBnw5IuY3eM7pwHSAcePG\nTZozZ07T8hfR1dVFW1tbqRkaca7+GQm5Fj2+qOH6iRtN7PdzVfX9gupmq1Kujo6OBRHR3ui+0kZJ\njYhIUxT293GnkE0wTnt7e0yePHmwo/VLZ2cnZWdoxLn6ZyTkmjlrZsP18/bvf/dRVd8vqG62quaq\n1+qzj5Z0dwul30vT+oeBzWq2G5/WmZlZC7W6KJwPTEu3pwHn1aw/NJ2FtCvwZEQsbnE2M7MRr2nd\nR5JmA5OBsZIeAo4BjgPOlnQ4cD9wYNr8j8A+wF3AM8BhzcplZmY9a1pRiIipPdy1Z4NtAziyWVnM\nzKwYX9FsZmY5FwUzM8u5KJiZWc5FwczMci4KZmaWc1EwM7Oci4KZmeVcFMzMLOeiYGZmORcFMzPL\nuSiYmVnORcHMzHIuCmZmlnNRMDOzXGnTcZpZMR2zOhqunzet/9NomvXFLQUzM8u5KJiZWc5FwczM\nci4KZmaWc1EwM7Oci4KZmeVcFMzMLOeiYGZmORcFMzPLuSiYmVnORcHMzHIuCmZmlnNRMDOznIuC\nmZnlXBTMzCznomBmZjkXBTMzy5VSFCR9TtJCSbdImi1pTUlbSrpG0l2SfiNpjTKymZmNZC0vCpI2\nBT4LtEfEdsAo4CDgeODHEbE1sAw4vNXZzMxGurK6j0YDa0kaDawNLAbeA8xN988C9ispm5nZiKWI\nKL6xtBrQFhErVmmn0gzgO8CzwCXADODq1EpA0mbARaklUf/Y6cB0gHHjxk2aM2fOqkRZZV1dXbS1\ntZWaoRHn6p8q53rk+Uf69ZiJG01suH7R44v6tX1fuar4fkF1s1UpV0dHx4KIaG903+i+Hizp18D/\nA14GrgPWk3RiRPxgIGEkbQBMAbYElgO/BfYq+viIOAU4BaC9vT0mT548kBiDprOzk7IzNOJc/VPl\nXLMfn92vx8zbf17D9TNnzezX9n3lquL7BdXNVtVc9Yp0H70ltQz2Ay4i+zA/ZBX2+V7g3oh4LCJe\nBM4BdgfGpO4kgPHAw6uwDzMzG4AiRWF1SauTFYXz0wd58T6n13oA2FXS2pIE7AncCswDDkjbTAPO\nW4V9mJnZAPTZfQScDNwH3ARcIWkLYMDHFCLiGklzgeuBl4AbyLqDLgTmSPp2WnfaQPdhZsV1zOpo\nuH7etP53K9nQ12dRiIiTgJNqVt0vqfG/ooIi4hjgmLrV9wA7r8rzmpnZqumxKEj6fB+PPWGQs5iZ\nWcl6aymsm35vC7wDOD8tfwi4tpmhzMysHD0WhYiYCSDpCmCniHgqLX+LrP/fzMyGmSJnH40DXqhZ\nfiGtMzOzYabI2UdnAtdK+n1a3o9sGAozMxtmipx99B1JFwPvSqsOi4gbmhvLzMzKUKSlAHAj2aB1\nowEkbR4RDzQtlZmZlaLI2EdHkV1TsIRs/CORXdH89uZGMzOzVivSUpgBbBsRjzc7jJmZlavI2UcP\nAk82O4iZmZWvSEvhHqBT0oXA890rI8JXNJtVUE9jGZkVUaQoPJB+1kg/ZmY2TBU5JbXxzBxmZjbs\nFDn7aB4N5k+IiPc0JZGZmZWmSPfRF2turwnsTzYPgpmZDTNFuo8W1K36qySPkmpmNgwV6T7asGZx\nNWASsH7TEpmNUI3OGpraNrWEJDaSFek+WkB2TEFk3Ub3Aoc3M5SZmZWjSPfRlq0IYmZm5SvSfbQ6\n8Glgj7SqEzg5Il5sYi4zMytBke6jnwOrAz9Ly4ekdZ9sVigzMytHkaLwjojYvmb5z5JualYgMzMr\nT5EB8V6WtFX3gqQ3kg2hbWZmw0yRlsKXgHmS7iE7A2kL4LCmpjIzs1IUOfvoMknbANumVXdExPO9\nPcbMzIamImcfrQkcQTZHcwB/kfTfEfFcs8OZmVlrFek+OhN4CvjPtPwx4JfAR5sVyszMylGkKGwX\nEW+pWZ4n6dZmBTIzs/IUOfvoekm7di9I2gWY37xIZmZWliIthUnA3yQ9kJY3B+6QdDMQEfH2pqUz\nM7OWKlIU9mp6CjMzq4Qip6TeL2kDYLPa7SPi+mYGMzOz1itySuqxwCeAu3l1Ws4ABjwdp6QxwKnA\ndum5/gW4A/gNMAG4DzgwIpYNdB9mZtZ/RbqPDgS2iogXBnG/JwIXR8QBktYA1ga+ClwWEcdJOho4\nGvjyIO7TzMz6UOTso1uAMYO1Q0nrkw3DfRpARLwQEcuBKcCstNksYL/B2qeZmRWjiOh9A6kdOI+s\nOOTDW0TEvgPaobQDcApwK7A92cxuM4CHI2JM2kbAsu7lusdPB6YDjBs3btKcOXMGEmPQdHV10dbW\nVmqGRpyrf/rKtejxRQ3XT9xo4qBlaLSPDUdtyBMvPzFo++iP3l5bVf+OUN1sVcrV0dGxICLaG91X\npCgsBE4GbgZe6V4fEZcPJEwqMlcDu0fENZJOBFYAR9UWAUnLImKD3p6rvb095s8v95KJzs5OJk+e\nXGqGRpyrf/rK1Wj+ZIB50+YNWoae5mie3TV70PbRH729tqr+HaG62aqUS1KPRaHIMYVnIuKkQczz\nEPBQRFyTlueSHT9YImnjiFgsaWNg6SDu08zMCihyTOEvkr4n6Z2Sdur+GegOI+JR4EFJ3aOu7knW\nlXQ+MC2tm0bWZWVmZi1UpKWwY/q9a826VTolFTgKOCudeXQP2fwMqwFnSzocuJ/srCczM2uhIhev\nNe5MXQURcSPQqD9rz8Hel5mZFVfk4rX1gWPITiMFuBz4j4h4spnBzAZDKw4QjzQdszqY2jaVmbNm\nrrTe7+nwUOSYwulk8ykcmH5WAL9oZigzMytHkWMKW0XE/jXLMyXd2KxAZlYNPbWybHgr0lJ4VtK7\nuhck7Q4827xIZmZWliIthU8Ds9KxBYBlZAPkmdkA+Bu4VVmRs49uBLaXtF5aXtH0VGZmVoo+u48k\nfVfSmIhYERErJG0g6dutCGdmZq1V5JjC3mkUUwDSHAf7NC+SmZmVpcgxhVGSXhcRzwNIWgt4XXNj\nmQ0Nvg7ChpsiReEs4DJJ3dcmHMar8x6YmdkwUuRA8/GSbgLem1YdGxF/am4sMzMrQ5GWAhFxMXBx\nk7OYmVnJihxoNjOzEcJFwczMcj0WBUmXpd/Hty6OmZmVqbdjChtL2g3YV9IcQLV3RsT1TU1mZmYt\n11tR+CbwDWA8cELdfas685pZJQ3WuEQe38iGqh6LQkTMBeZK+kZEHNvCTGZmVpIi1ykcK2lfXp15\nrTMiLmhuLBtuur8518/Y5St/zaqlyIB43wNmALemnxmSvtvsYGZm1npFLl77ALBDRLwCIGkWcAPw\n1WYGM2um+j7/RnMOm41ERa9TGFNze/0etzIzsyGtSEvhe8ANkuaRnZa6B3B0U1OZ9cCjkpo1V5ED\nzbMldQLvSKu+HBGPNjWVmZmVouiAeIuB85ucxczMSuaxj8zMLOeiYGZmuV6LgqRRkm5vVRgzMytX\nr0UhIl4G7pC0eYvymJlZiYocaN4AWCjpWuDp7pURsW/TUpmZWSmKFIVvND2FmZlVQpHrFC6XtAWw\nTUT8f0lrA6OaH83MzFqtyIB4nwLmAienVZsC567qjtNB7BskXZCWt5R0jaS7JP1G0hqrug8zM+uf\nIqekHgnsDqwAiIg7gTcMwr5nALfVLB8P/DgitgaWAYcPwj7MzKwfihSF5yPihe4FSaPJZl4bMEnj\nyUZfPTUti2wmt7lpk1nAfquyDzMz6z9F9P75Lun7wHLgUOAo4Ajg1oj42oB3Ks0lG2hvXeCLwCeA\nq1MrAUmbARdFxHYNHjsdmA4wbty4SXPmzBlojEHR1dVFW1tbqRkaqVquRY8vAmDDURvyxMtP5Osn\nbjRxQM9Tr6fn6Wn7evW5qmIo5erv37JZqvZvv1uVcnV0dCyIiPZG9xUpCquRdeW8n2yU1D8Bp0Zf\nD+z5+T4I7BMRR0iaTD+LQq329vaYP3/+QGIMms7OTiZPnlxqhkaqlqt25rXZXbPz9f0d3bS/o6QW\nnSu5PldVDKVcVRmptmr/9rtVKZekHotCkbOPXkkT61xD1m10x0ALQrI7sK+kfYA1gfWAE4ExkkZH\nxEvAeODhVdiHmZkNQJGzjz4A3A2cBPwUuEvS3gPdYUR8JSLGR8QE4CDgzxFxMDAPOCBtNg04b6D7\nMDOzgSly8dqPgI6IuAtA0lbAhcBFg5zly8AcSd8mm+7ztEF+fuunMie0KdrtM9DtzayxIkXhqe6C\nkNwDPDUYO4+ITqAz3b4H2HkwntfMzAamx6Ig6SPp5nxJfwTOJjum8FHguhZkM7MhxFOlDg+9tRQ+\nVHN7CfDudPsxYK2mJTIzs9L0WBQi4rBWBjEzs/L1eUxB0pZkF61NqN3eQ2ebmQ0/RQ40n0t2JtAf\ngFeaG8dGGp81ZFYtRYrCcxFxUtOTmJlZ6YoUhRMlHQNcAjzfvTIirm9aKjMzK0WRovA24BCyUUy7\nu48iLZuZ2TBSpCh8FHhj7fDZZmY2PBWZT+EWYEyzg5iZWfmKtBTGALdLuo6Vjyn4lFQzs2GmSFE4\npukpzMysEorMp3B5K4KYmVn5ilzR/BSvzsm8BrA68HRErNfMYGZm1npFWgrrdt+WJGAKsGszQ5mZ\nWTmKnH2Ui8y5wD83KY+ZmZWoSPfRR2oWVwPageealsjMzEpT5Oyj2nkVXgLuI+tCMjOzYabIMQXP\nqzDMeaRSK4tna6ue3qbj/GYvj4uIOLYJeczMrES9tRSebrBuHeBwYCPARcHM+uSW6NDS23ScP+q+\nLWldYAZwGDAH+FFPjzMzs6Gr12MKkjYEPg8cDMwCdoqIZa0IZmZmrdfbMYUfAB8BTgHeFhFdLUtl\nZmal6O3itS8AmwBfBx6RtCL9PCVpRWvimZlZK/V2TKFfVzvbyNHbgUOfSmg2tPmD38zMckWuaLYh\npmNWB1PbpjJz1syV1vtbvJn1xS0FMzPLuSiYmVnORcHMzHI+pmCDykMamA1tLW8pSNpM0jxJt0pa\nKGlGWr+hpEsl3Zl+b9DqbGZmI10Z3UcvAV+IiLeQTet5pKS3AEcDl0XENsBladnMzFqo5UUhIhZH\nxPXp9lPAbcCmZBP3zEqbzQL2a3U2M7ORThFR3s6lCcAVwHbAAxExJq0XsKx7ue4x04HpAOPGjZs0\nZ86cluVtpKuri7a2tlIz1Fv0+CI2HLUhT7z8xErrJ240scftW6VRripwrv5pdq6e/q0WUcX/k1Ct\nXB0dHQsior3RfaUVBUltwOXAdyLiHEnLa4uApGUR0etxhfb29pg/f36zo/aqs7OTyZMnl5qhXvfF\na7O7Zq+0vqeL11p5cLhRripwrv5pdq5VudCyiv8noVq5JPVYFEo5+0jS6sDvgLMi4py0eomkjSNi\nsaSNgaVlZDOz8nmazvKUcfaRgNOA2yLihJq7zgempdvTgPNanc3MbKQro6WwO3AIcLOkG9O6rwLH\nAWdLOhy4HziwhGxmZiNay4tCRFwJqIe792xlFjMzW5mHuTAzs5yLgpmZ5VwUzMws56JgZmY5FwUz\nM8u5KJiZWc7zKQwBvrrTzFrFLQUzM8u5KJiZWc5FwczMcj6mMIJ4/mQz64tbCmZmlnNLwcyGvNpW\n8NS2qcycNRPwGXoD4ZaCmZnl3FKoEPf5m1nZ3FIwM7Oci4KZmeVcFMzMLOdjCoOgrLGJfAzCzAab\nWwpmZpZzS6EE/oZvZlXlloKZmeXcUiiop2/3U9umtjiJmVnzuKVgZmY5FwUzM8u5KJiZWc5FwczM\nciP2QHMrLjjzqadmg8v/p5rPLQUzM8uN2JaCmQ1//e0RKGvImipxS8HMzHIuCmZmlqtUUZC0l6Q7\nJN0l6eiy85iZjTSVOaYgaRTwX8D7gIeA6ySdHxG3tjKHz24wsypp9XGOKrUUdgbuioh7IuIFYA4w\npeRMZmYjiiKi7AwASDoA2CsiPpmWDwF2iYjP1G03HZieFrcF7mhp0NcaC/yj5AyNOFf/OFf/VDUX\nVDdblXJtERGvb3RHZbqPioqIU4BTys7RTdL8iGgvO0c95+of5+qfquaC6maraq56Veo+ehjYrGZ5\nfFpnZmYtUqWicB2wjaQtJa0BHAScX3ImM7MRpTLdRxHxkqTPAH8CRgGnR8TCkmMVUZmurDrO1T/O\n1T9VzQXVzVbVXCupzIFmMzMrX5W6j8zMrGQuCmZmlnNRGABJa0q6VtJNkhZKmll2plqSRkm6QdIF\nZWepJek+STdLulHS/LLzdJM0RtJcSbdLuk3SOyuQadv0PnX/rJD0b2XnApD0ufTv/hZJsyWtWXYm\nAEkzUqaFZb9Xkk6XtFTSLTXrNpR0qaQ70+8NyszYExeFgXkeeE9EbA/sAOwladeSM9WaAdxWdoge\ndETEDhU7X/tE4OKIeBOwPRV47yLijvQ+7QBMAp4Bfl9yLCRtCnwWaI+I7chOCjmo3FQgaTvgU2Qj\nI2wPfFDS1iVGOgPYq27d0cBlEbENcFlarhwXhQGITFdaXD39VOKIvaTxwAeAU8vOMhRIWh/YAzgN\nICJeiIjl5aZ6jT2BuyPi/rKDJKOBtSSNBtYGHik5D8CbgWsi4pmIeAm4HPhIWWEi4grgibrVU4BZ\n6fYsYL+WhirIRWGAUhfNjcBS4NKIuKbsTMlPgH8HXik7SAMBXCJpQRqupAq2BB4DfpG63E6VtE7Z\noeocBMwuOwRARDwM/BB4AFgMPBkRl5SbCoBbgH+StJGktYF9WPli2CoYFxGL0+1HgXFlhumJi8IA\nRcTLqWk/Htg5NV9LJemDwNKIWFB2lh68KyJ2AvYGjpS0R9mByL717gT8PCJ2BJ6mQs36dCHnvsBv\ny84CkPrBp5AV002AdSR9vNxUEBG3AccDlwAXAzcCL5caqheRXQtQid6Fei4Kqyh1Nczjtf2HZdgd\n2FfSfWSjzL5H0q/KjfSq9C2TiFhK1j++c7mJgGyY9odqWnpzyYpEVewNXB8RS8oOkrwXuDciHouI\nF4FzgN1KzgRARJwWEZMiYg9gGbCo7Ex1lkjaGCD9XlpynoZcFAZA0usljUm31yKbA+L2clNBRHwl\nIsZHxASyLoc/R0Tp3+IAJK0jad3u28D7yZr8pYqIR4EHJW2bVu0JtHQOjz5MpSJdR8kDwK6S1pYk\nsver9APzAJLekH5vTnY84dflJnqN84Fp6fY04LwSs/SoMsNcDDEbA7PSxECrAWdHRKVO/6ygccDv\ns88RRgO/joiLy42UOwo4K3XV3AMcVnIeIC+e7wP+tews3SLiGklzgeuBl4AbqM7wDb+TtBHwInBk\nmScMSJoNTAbGSnoIOAY4Djhb0uHA/cCBZeXrjYe5MDOznLuPzMws56JgZmY5FwUzM8u5KJiZWc5F\nwczMci4KNmRJitqL8ySNlvRY1UaHrSepq++tVtq+U1J7ut0m6WRJd6fhQjol7ZLuazgK7VAZndOq\nwUXBhrKnge3SBYSQndP/cBlB0uBwrXAq2UBr20TEJLJrKsbW3N9oFNohMTqnVYOLgg11fyQbFRbq\nrv5NV1Gfnua+uEHSlLR+gqS/SLo+/eyW1m8s6Yr0TfsWSf+U1nfVPOcBks5It8+Q9N+SrgG+L2kr\nSRenb/B/kfSmtN2Wkq5K3+K/3ehFpEy3SzpL2ZwOc9PAbrXbbAXsAnw9Il4BiIh7I+LCPt6jITE6\np1WDi4INdXOAg9JEL28Haker/RrZUB87Ax3AD9JVwkuB96XB+f4vcFLa/mPAn9JAh9uTDarWl/HA\nbhHxebIre49K3+C/CPwsbXMi2YB7byMbWbQn2wI/i4g3AyuAI+rufytwY0T0NNBbT6PQDonROa0a\nPMyFDWkR8XdJE8haCX+su/v9ZAMEfjEtrwlsTjb+/08l7UA2kubEdP91wOmSVgfOjYgiReG3EfGy\npDaygeF+m4byAHhd+r07sH+6/Uuy0TwbeTAi/ppu/4psMpsfFsjQ7V0R8XAaA+hSSbencf1zERGS\nPIyB9chFwYaD88k+PCcDG9WsF7B/RNxRu7GkbwFLyFoDqwHPQTYxShrO+wPAGZJOiIgzWXmI4/qp\nJ59Ov1cDlqdWRiNFPojrt6lfXghsL2lUo9ZC7Si0krpHob2CNDpnRCyu8uicVg3uPrLh4HRgZkTc\nXLf+T8BRaTRPJO2Y1q8PLL22J04AAAESSURBVE798oeQTSmJpC2AJRHxP2QHdLuH0F4i6c2SVgM+\n3ChARKwA7pX00fRckrR9uvuvvDpl5cG9vI7N9er80B8Drqzbx93AfGBmzWuaIOkDfYxCOyRG57Rq\ncFGwIS8iHoqIkxrcdSzZVKl/l7QwLUPW1z9N0k3Am3j12/5k4CZJN5AdazgxrT8auAD4G70fEzgY\nODw970KyA7yQzZl9pKSbgU17efwdabvbgA2AnzfY5pNkxwTuUjYp/Blk3/zHAVemfV8LXFgzCu1x\nwPsk3Uk2H8JxvWSwEc6jpJpVQDouckFElD6Dn41sbimYmVnOLQUzM8u5pWBmZjkXBTMzy7komJlZ\nzkXBzMxyLgpmZpb7X92YnUG7KXFrAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "sgobPzXteYuL", - "colab_type": "text" - }, - "source": [ - "We now featurize the data using the Canvas samples. To do so, we must specify the columns in the data input that correspond to the features. (Note that CanvasUID is excluded!)" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Lbo1SzuleYuN", - "colab_type": "code", - "colab": {} - }, - "source": [ - "user_specified_features = ['MW','AlogP','HBA','HBD','RB','HeavyAtomCount','ChiralCenterCount','ChiralCenterCountAllPossible','RingCount','PSA','Estate','MR','Polar','sLi_Key','ssBe_Key','ssssBem_Key','sBH2_Key','ssBH_Key','sssB_Key','ssssBm_Key','sCH3_Key','dCH2_Key','ssCH2_Key','tCH_Key','dsCH_Key','aaCH_Key','sssCH_Key','ddC_Key','tsC_Key','dssC_Key','aasC_Key','aaaC_Key','ssssC_Key','sNH3_Key','sNH2_Key','ssNH2_Key','dNH_Key','ssNH_Key','aaNH_Key','tN_Key','sssNH_Key','dsN_Key','aaN_Key','sssN_Key','ddsN_Key','aasN_Key','ssssN_Key','daaN_Key','sOH_Key','dO_Key','ssO_Key','aaO_Key','aOm_Key','sOm_Key','sF_Key','sSiH3_Key','ssSiH2_Key','sssSiH_Key','ssssSi_Key','sPH2_Key','ssPH_Key','sssP_Key','dsssP_Key','ddsP_Key','sssssP_Key','sSH_Key','dS_Key','ssS_Key','aaS_Key','dssS_Key','ddssS_Key','ssssssS_Key','Sm_Key','sCl_Key','sGeH3_Key','ssGeH2_Key','sssGeH_Key','ssssGe_Key','sAsH2_Key','ssAsH_Key','sssAs_Key','dsssAs_Key','ddsAs_Key','sssssAs_Key','sSeH_Key','dSe_Key','ssSe_Key','aaSe_Key','dssSe_Key','ssssssSe_Key','ddssSe_Key','sBr_Key','sSnH3_Key','ssSnH2_Key','sssSnH_Key','ssssSn_Key','sI_Key','sPbH3_Key','ssPbH2_Key','sssPbH_Key','ssssPb_Key','sLi_Cnt','ssBe_Cnt','ssssBem_Cnt','sBH2_Cnt','ssBH_Cnt','sssB_Cnt','ssssBm_Cnt','sCH3_Cnt','dCH2_Cnt','ssCH2_Cnt','tCH_Cnt','dsCH_Cnt','aaCH_Cnt','sssCH_Cnt','ddC_Cnt','tsC_Cnt','dssC_Cnt','aasC_Cnt','aaaC_Cnt','ssssC_Cnt','sNH3_Cnt','sNH2_Cnt','ssNH2_Cnt','dNH_Cnt','ssNH_Cnt','aaNH_Cnt','tN_Cnt','sssNH_Cnt','dsN_Cnt','aaN_Cnt','sssN_Cnt','ddsN_Cnt','aasN_Cnt','ssssN_Cnt','daaN_Cnt','sOH_Cnt','dO_Cnt','ssO_Cnt','aaO_Cnt','aOm_Cnt','sOm_Cnt','sF_Cnt','sSiH3_Cnt','ssSiH2_Cnt','sssSiH_Cnt','ssssSi_Cnt','sPH2_Cnt','ssPH_Cnt','sssP_Cnt','dsssP_Cnt','ddsP_Cnt','sssssP_Cnt','sSH_Cnt','dS_Cnt','ssS_Cnt','aaS_Cnt','dssS_Cnt','ddssS_Cnt','ssssssS_Cnt','Sm_Cnt','sCl_Cnt','sGeH3_Cnt','ssGeH2_Cnt','sssGeH_Cnt','ssssGe_Cnt','sAsH2_Cnt','ssAsH_Cnt','sssAs_Cnt','dsssAs_Cnt','ddsAs_Cnt','sssssAs_Cnt','sSeH_Cnt','dSe_Cnt','ssSe_Cnt','aaSe_Cnt','dssSe_Cnt','ssssssSe_Cnt','ddssSe_Cnt','sBr_Cnt','sSnH3_Cnt','ssSnH2_Cnt','sssSnH_Cnt','ssssSn_Cnt','sI_Cnt','sPbH3_Cnt','ssPbH2_Cnt','sssPbH_Cnt','ssssPb_Cnt','sLi_Sum','ssBe_Sum','ssssBem_Sum','sBH2_Sum','ssBH_Sum','sssB_Sum','ssssBm_Sum','sCH3_Sum','dCH2_Sum','ssCH2_Sum','tCH_Sum','dsCH_Sum','aaCH_Sum','sssCH_Sum','ddC_Sum','tsC_Sum','dssC_Sum','aasC_Sum','aaaC_Sum','ssssC_Sum','sNH3_Sum','sNH2_Sum','ssNH2_Sum','dNH_Sum','ssNH_Sum','aaNH_Sum','tN_Sum','sssNH_Sum','dsN_Sum','aaN_Sum','sssN_Sum','ddsN_Sum','aasN_Sum','ssssN_Sum','daaN_Sum','sOH_Sum','dO_Sum','ssO_Sum','aaO_Sum','aOm_Sum','sOm_Sum','sF_Sum','sSiH3_Sum','ssSiH2_Sum','sssSiH_Sum','ssssSi_Sum','sPH2_Sum','ssPH_Sum','sssP_Sum','dsssP_Sum','ddsP_Sum','sssssP_Sum','sSH_Sum','dS_Sum','ssS_Sum','aaS_Sum','dssS_Sum','ddssS_Sum','ssssssS_Sum','Sm_Sum','sCl_Sum','sGeH3_Sum','ssGeH2_Sum','sssGeH_Sum','ssssGe_Sum','sAsH2_Sum','ssAsH_Sum','sssAs_Sum','dsssAs_Sum','ddsAs_Sum','sssssAs_Sum','sSeH_Sum','dSe_Sum','ssSe_Sum','aaSe_Sum','dssSe_Sum','ssssssSe_Sum','ddssSe_Sum','sBr_Sum','sSnH3_Sum','ssSnH2_Sum','sssSnH_Sum','ssssSn_Sum','sI_Sum','sPbH3_Sum','ssPbH2_Sum','sssPbH_Sum','ssssPb_Sum','sLi_Avg','ssBe_Avg','ssssBem_Avg','sBH2_Avg','ssBH_Avg','sssB_Avg','ssssBm_Avg','sCH3_Avg','dCH2_Avg','ssCH2_Avg','tCH_Avg','dsCH_Avg','aaCH_Avg','sssCH_Avg','ddC_Avg','tsC_Avg','dssC_Avg','aasC_Avg','aaaC_Avg','ssssC_Avg','sNH3_Avg','sNH2_Avg','ssNH2_Avg','dNH_Avg','ssNH_Avg','aaNH_Avg','tN_Avg','sssNH_Avg','dsN_Avg','aaN_Avg','sssN_Avg','ddsN_Avg','aasN_Avg','ssssN_Avg','daaN_Avg','sOH_Avg','dO_Avg','ssO_Avg','aaO_Avg','aOm_Avg','sOm_Avg','sF_Avg','sSiH3_Avg','ssSiH2_Avg','sssSiH_Avg','ssssSi_Avg','sPH2_Avg','ssPH_Avg','sssP_Avg','dsssP_Avg','ddsP_Avg','sssssP_Avg','sSH_Avg','dS_Avg','ssS_Avg','aaS_Avg','dssS_Avg','ddssS_Avg','ssssssS_Avg','Sm_Avg','sCl_Avg','sGeH3_Avg','ssGeH2_Avg','sssGeH_Avg','ssssGe_Avg','sAsH2_Avg','ssAsH_Avg','sssAs_Avg','dsssAs_Avg','ddsAs_Avg','sssssAs_Avg','sSeH_Avg','dSe_Avg','ssSe_Avg','aaSe_Avg','dssSe_Avg','ssssssSe_Avg','ddssSe_Avg','sBr_Avg','sSnH3_Avg','ssSnH2_Avg','sssSnH_Avg','ssssSn_Avg','sI_Avg','sPbH3_Avg','ssPbH2_Avg','sssPbH_Avg','ssssPb_Avg','First Zagreb (ZM1)','First Zagreb index by valence vertex degrees (ZM1V)','Second Zagreb (ZM2)','Second Zagreb index by valence vertex degrees (ZM2V)','Polarity (Pol)','Narumi Simple Topological (NST)','Narumi Harmonic Topological (NHT)','Narumi Geometric Topological (NGT)','Total structure connectivity (TSC)','Wiener (W)','Mean Wiener (MW)','Xu (Xu)','Quadratic (QIndex)','Radial centric (RC)','Mean Square Distance Balaban (MSDB)','Superpendentic (SP)','Harary (Har)','Log of product of row sums (LPRS)','Pogliani (Pog)','Schultz Molecular Topological (SMT)','Schultz Molecular Topological by valence vertex degrees (SMTV)','Mean Distance Degree Deviation (MDDD)','Ramification (Ram)','Gutman Molecular Topological (GMT)','Gutman MTI by valence vertex degrees (GMTV)','Average vertex distance degree (AVDD)','Unipolarity (UP)','Centralization (CENT)','Variation (VAR)','Molecular electrotopological variation (MEV)','Maximal electrotopological positive variation (MEPV)','Maximal electrotopological negative variation (MENV)','Eccentric connectivity (ECCc)','Eccentricity (ECC)','Average eccentricity (AECC)','Eccentric (DECC)','Valence connectivity index chi-0 (vX0)','Valence connectivity index chi-1 (vX1)','Valence connectivity index chi-2 (vX2)','Valence connectivity index chi-3 (vX3)','Valence connectivity index chi-4 (vX4)','Valence connectivity index chi-5 (vX5)','Average valence connectivity index chi-0 (AvX0)','Average valence connectivity index chi-1 (AvX1)','Average valence connectivity index chi-2 (AvX2)','Average valence connectivity index chi-3 (AvX3)','Average valence connectivity index chi-4 (AvX4)','Average valence connectivity index chi-5 (AvX5)','Quasi Wiener (QW)','First Mohar (FM)','Second Mohar (SM)','Spanning tree number (STN)','Kier benzene-likeliness index (KBLI)','Topological charge index of order 1 (TCI1)','Topological charge index of order 2 (TCI2)','Topological charge index of order 3 (TCI3)','Topological charge index of order 4 (TCI4)','Topological charge index of order 5 (TCI5)','Topological charge index of order 6 (TCI6)','Topological charge index of order 7 (TCI7)','Topological charge index of order 8 (TCI8)','Topological charge index of order 9 (TCI9)','Topological charge index of order 10 (TCI10)','Mean topological charge index of order 1 (MTCI1)','Mean topological charge index of order 2 (MTCI2)','Mean topological charge index of order 3 (MTCI3)','Mean topological charge index of order 4 (MTCI4)','Mean topological charge index of order 5 (MTCI5)','Mean topological charge index of order 6 (MTCI6)','Mean topological charge index of order 7 (MTCI7)','Mean topological charge index of order 8 (MTCI8)','Mean topological charge index of order 9 (MTCI9)','Mean topological charge index of order 10 (MTCI10)','Global topological charge (GTC)','Hyper-distance-path index (HDPI)','Reciprocal hyper-distance-path index (RHDPI)','Square reciprocal distance sum (SRDS)','Modified Randic connectivity (MRC)','Balaban centric (BC)','Lopping centric (LC)','Kier Hall electronegativity (KHE)','Sum of topological distances between N..N (STD(N N))','Sum of topological distances between N..O (STD(N O))','Sum of topological distances between N..S (STD(N S))','Sum of topological distances between N..P (STD(N P))','Sum of topological distances between N..F (STD(N F))','Sum of topological distances between N..Cl (STD(N Cl))','Sum of topological distances between N..Br (STD(N Br))','Sum of topological distances between N..I (STD(N I))','Sum of topological distances between O..O (STD(O O))','Sum of topological distances between O..S (STD(O S))','Sum of topological distances between O..P (STD(O P))','Sum of topological distances between O..F (STD(O F))','Sum of topological distances between O..Cl (STD(O Cl))','Sum of topological distances between O..Br (STD(O Br))','Sum of topological distances between O..I (STD(O I))','Sum of topological distances between S..S (STD(S S))','Sum of topological distances between S..P (STD(S P))','Sum of topological distances between S..F (STD(S F))','Sum of topological distances between S..Cl (STD(S Cl))','Sum of topological distances between S..Br (STD(S Br))','Sum of topological distances between S..I (STD(S I))','Sum of topological distances between P..P (STD(P P))','Sum of topological distances between P..F (STD(P F))','Sum of topological distances between P..Cl (STD(P Cl))','Sum of topological distances between P..Br (STD(P Br))','Sum of topological distances between P..I (STD(P I))','Sum of topological distances between F..F (STD(F F))','Sum of topological distances between F..Cl (STD(F Cl))','Sum of topological distances between F..Br (STD(F Br))','Sum of topological distances between F..I (STD(F I))','Sum of topological distances between Cl..Cl (STD(Cl Cl))','Sum of topological distances between Cl..Br (STD(Cl Br))','Sum of topological distances between Cl..I (STD(Cl I))','Sum of topological distances between Br..Br (STD(Br Br))','Sum of topological distances between Br..I (STD(Br I))','Sum of topological distances between I..I (STD(I I))','Wiener-type index from Z weighted distance matrix - Barysz matrix (WhetZ)','Wiener-type index from electronegativity weighted distance matrix (Whete)','Wiener-type index from mass weighted distance matrix (Whetm)','Wiener-type index from van der waals weighted distance matrix (Whetv)','Wiener-type index from polarizability weighted distance matrix (Whetp)','Balaban-type index from Z weighted distance matrix - Barysz matrix (JhetZ)','Balaban-type index from electronegativity weighted distance matrix (Jhete)','Balaban-type index from mass weighted distance matrix (Jhetm)','Balaban-type index from van der waals weighted distance matrix (Jhetv)','Balaban-type index from polarizability weighted distance matrix (Jhetp)','Topological diameter (TD)','Topological radius (TR)','Petitjean 2D shape (PJ2DS)','Balaban distance connectivity index (J)','Solvation connectivity index chi-0 (SCIX0)','Solvation connectivity index chi-1 (SCIX1)','Solvation connectivity index chi-2 (SCIX2)','Solvation connectivity index chi-3 (SCIX3)','Solvation connectivity index chi-4 (SCIX4)','Solvation connectivity index chi-5 (SCIX5)','Connectivity index chi-0 (CIX0)','Connectivity chi-1 [Randic connectivity] (CIX1)','Connectivity index chi-2 (CIX2)','Connectivity index chi-3 (CIX3)','Connectivity index chi-4 (CIX4)','Connectivity index chi-5 (CIX5)','Average connectivity index chi-0 (ACIX0)','Average connectivity index chi-1 (ACIX1)','Average connectivity index chi-2 (ACIX2)','Average connectivity index chi-3 (ACIX3)','Average connectivity index chi-4 (ACIX4)','Average connectivity index chi-5 (ACIX5)','reciprocal distance Randic-type index (RDR)','reciprocal distance square Randic-type index (RDSR)','1-path Kier alpha-modified shape index (KAMS1)','2-path Kier alpha-modified shape index (KAMS2)','3-path Kier alpha-modified shape index (KAMS3)','Kier flexibility (KF)','path/walk 2 - Randic shape index (RSIpw2)','path/walk 3 - Randic shape index (RSIpw3)','path/walk 4 - Randic shape index (RSIpw4)','path/walk 5 - Randic shape index (RSIpw5)','E-state topological parameter (ETP)','Ring Count 3 (RNGCNT3)','Ring Count 4 (RNGCNT4)','Ring Count 5 (RNGCNT5)','Ring Count 6 (RNGCNT6)','Ring Count 7 (RNGCNT7)','Ring Count 8 (RNGCNT8)','Ring Count 9 (RNGCNT9)','Ring Count 10 (RNGCNT10)','Ring Count 11 (RNGCNT11)','Ring Count 12 (RNGCNT12)','Ring Count 13 (RNGCNT13)','Ring Count 14 (RNGCNT14)','Ring Count 15 (RNGCNT15)','Ring Count 16 (RNGCNT16)','Ring Count 17 (RNGCNT17)','Ring Count 18 (RNGCNT18)','Ring Count 19 (RNGCNT19)','Ring Count 20 (RNGCNT20)','Atom Count (ATMCNT)','Bond Count (BNDCNT)','Atoms in Ring System (ATMRNGCNT)','Bonds in Ring System (BNDRNGCNT)','Cyclomatic number (CYCLONUM)','Number of ring systems (NRS)','Normalized number of ring systems (NNRS)','Ring Fusion degree (RFD)','Ring perimeter (RNGPERM)','Ring bridge count (RNGBDGE)','Molecule cyclized degree (MCD)','Ring Fusion density (RFDELTA)','Ring complexity index (RCI)','Van der Waals surface area (VSA)','MR1 (MR1)','MR2 (MR2)','MR3 (MR3)','MR4 (MR4)','MR5 (MR5)','MR6 (MR6)','MR7 (MR7)','MR8 (MR8)','ALOGP1 (ALOGP1)','ALOGP2 (ALOGP2)','ALOGP3 (ALOGP3)','ALOGP4 (ALOGP4)','ALOGP5 (ALOGP5)','ALOGP6 (ALOGP6)','ALOGP7 (ALOGP7)','ALOGP8 (ALOGP8)','ALOGP9 (ALOGP9)','ALOGP10 (ALOGP10)','PEOE1 (PEOE1)','PEOE2 (PEOE2)','PEOE3 (PEOE3)','PEOE4 (PEOE4)','PEOE5 (PEOE5)','PEOE6 (PEOE6)','PEOE7 (PEOE7)','PEOE8 (PEOE8)','PEOE9 (PEOE9)','PEOE10 (PEOE10)','PEOE11 (PEOE11)','PEOE12 (PEOE12)','PEOE13 (PEOE13)','PEOE14 (PEOE14)']" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "op-ucdRNeYuT", - "colab_type": "code", - "outputId": "e310a830-7de8-4655-9367-dfdaa766c5f3", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 323 - } - }, - "source": [ - "import deepchem as dc\n", - "import tempfile, shutil\n", - "\n", - "featurizer = dc.feat.UserDefinedFeaturizer(user_specified_features)\n", - "loader = dc.data.UserCSVLoader(\n", - " tasks=[\"Class\"], smiles_field=\"mol\", id_field=\"mol\",\n", - " featurizer=featurizer)\n", - "dataset = loader.featurize(dataset_file)\n", - "crystal_dataset = loader.featurize(crystal_dataset_file)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from desc_canvas_aug30.csv\n", - "Loading shard 1 of size 8192.\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/data/data_loader.py:131: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n", - " X_shard = df.as_matrix(columns=featurizer.feature_fields)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "TIMING: user specified processing took 0.169 s\n", - "TIMING: featurizing shard 0 took 0.176 s\n", - "TIMING: dataset construction took 0.459 s\n", - "Loading dataset from disk.\n", - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from crystal_desc_canvas_aug30.csv\n", - "Loading shard 1 of size 8192.\n", - "TIMING: user specified processing took 0.162 s\n", - "TIMING: featurizing shard 0 took 0.163 s\n", - "TIMING: dataset construction took 0.234 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "UAg_knFneYub", - "colab_type": "text" - }, - "source": [ - "This data is already split into three subsets \"Train\" and \"Test\" with 20% and 80% respectively of the total data from the BACE enzyme. There is also a \"Validation\" set that contains data from a separate (but related assay). (Note that these names are really misnomers. The \"Test\" set would be called a validation set in standard machine-learning practice and the \"Validation\" set would typically be called an external test set.) Hence, we will rename the datasets after loading them." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "XISgZKsYeYuc", - "colab_type": "code", - "outputId": "0ef562a7-6460-4d31-dff8-eeb6c0cbe302", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 119 - } - }, - "source": [ - "splitter = dc.splits.SpecifiedSplitter(dataset_file, \"Model\")\n", - "train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(\n", - " dataset)\n", - "#NOTE THE RENAMING:\n", - "valid_dataset, test_dataset = test_dataset, valid_dataset" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.055 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.040 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.146 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "4ueVztyzeYuh", - "colab_type": "text" - }, - "source": [ - "Let's quickly take a look at a compound in the validation set. (The compound displayed earlier was drawn from the train set)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "-l8uMJpueYuj", - "colab_type": "code", - "outputId": "7692477d-3e18-41b1-870e-7e5b8dc3b8a0", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } - }, - "source": [ - "print(valid_dataset.ids)\n", - "valid_mols = [Chem.MolFromSmiles(compound)\n", - " for compound in islice(valid_dataset.ids, num_to_display)]\n", - "display_images(mols_to_pngs(valid_mols, basename=\"valid_set\"))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "['S(=O)(=O)(N(C)c1cc(cc(c1)C(=O)NC(C(O)CC(OC)C(=O)NC(C(C)C)C(=O)NCc1ccccc1)COc1cc(F)cc(F)c1)C(=O)NC(C)c1ccccc1)C'\n", - " 'O=C(NCCC(C)(C)C)C(Cc1cc2cc(ccc2nc1N)-c1ccccc1C)C'\n", - " 'Fc1cc(cc(F)c1)CC(NC(=O)C(N1CCC(NC(=O)C)(C(CC)C)C1=O)CCc1ccccc1)C(O)C1[NH2+]CC(O)C1'\n", - " ... 'Brc1cc(ccc1)C1CC1C=1N=C(N)N(C)C(=O)C=1'\n", - " 'O=C1N(C)C(=NC(=C1)C1CC1c1cc(ccc1)-c1ccccc1)N'\n", - " 'Clc1cc2nc(n(c2cc1)CCCC(=O)NCC1CC1)N']\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJuUlEQVR4nO3dWXabygKGUXTXmZEz\n/xFEY+I+KCFYnWlK/FXU3isPPj5uZOCj6HUZx3EAcv6XfgHQOxFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ\nJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARssvlckm/hOaJkF3GcdThTiJkLx3uJEIK0OEefUVoQfkcHW7WV4RQoY4ivFwu4zimX8WZ\nGQy3+S/9Ap6bz0vlNOTWoVm2ShXT63H1OX9VRWaqJeNIpvYqsZHQWHdib8bDp9urnS8Ayc3RHyf9\nbUbawmnRq/1D8/FRZuGeovqxruVfueSHcCSTfaFmjo468tYWBS4XiHDV4La/PUsDlWtgJJw6NBi2\nwopvlaMj3LaPt7lDSwP1a2AkvLOkw8tfx7ykyh08Haz41qr0iplHq049FT/XD59z6AK6/3zD/CdM\nn1x4vnHDrzuBg//2nif1Zs2MhDfzGbxyl3KwbFCn4/YJi5x2v/tR/MAw2IL2DsxsM46DwzTUqb01\npdXtUsdugpsvmzW2T8gKRyVxuQzDYDNju0M3R13yckrjOEw3u6RfS5OO3ifcOats8xxpPqNuH7+f\ndTrcJnBgxqxqyNoZ1crMvcykX0tT+4SFjuv821e6fewU4huP0+e20L6ZYhXehF35NVWZCN/Pp/eT\nbL+TVzdNvc/8kUt+ag0dNvT8lNhIOJ9Pd9W9ukC01KTcsHZvxuNAv1u7p1iXXGlcw/NTws+YmZ4i\n8+prjtlkP0N+n7Shw+yS/Wr9PrwoM/tq69on/PSG6OzHtrp2T7nNh1VzIz7CDKsvMM682nCEdwfT\njpwEOjzAqyX7br+17NGyePlr5UfC4PTasHav3XzVsvcq+QJT5tbD9w2ccXj20mo4WpYaDPMRHqmG\nOf1x9S3gSxbuUkfLdlYU6bCXuyhY5XK5lBoGh61hjGNm1XH89Qa9RdjlXmBo93dtQd3upXexOTqt\n2NraXy9pwVbW3RGyYltl6x6rt6vDUveLH7xRet4IZzfYvDpl1LNXZ4OmhW/cfYxy+ffOv3A6Wpbe\nkz3O6SL8fmxw/Pfpxg5bF7fwbNC3QWB/CgeWVPCxKQcvLaeL8PWVSjoclm2Q7x0D//6UXd/+Zw3w\n8yx7HNKbm9GNvdx7j+fEvs+S+ebo0ODsKWj13z51GN0uvHvZC++H2DCjU8Pg0PZI+PRi5e+Tr5ng\nnv4t2evKp/GwgnOM8/9c/l2trHBbjnC9luZN0fFn419dx4Rq4vrPPXo7T1jHrd+Xy59/P36Slab5\nu2pGB7dFhw4jHOIdTtt4dyfFpk+yz7YOg1qO8LYQ3/6tXHZbmT1FHL12r2lIXzKjs8Pg0Pw+4Ycn\nWfIRCdM4WcnYuPDmjA/c2r9BWzuHjUe4w+N8Sj4O6PGakeFhcd+0TJd52XWktcr8uQ3vJ0J2GBx6\njnBYefi7wMp12hvcdstfV5dylVDP81Pe6zrCm4WXNRdeTW77aSs7bGiT7BiHPT9lla4jfFxG38+k\nXYNhqQtQVt4PcbTN4/xnBJ+fslzXEU6WD3cV7vEfsXZflVZNE2eoNby5upanI+1pafuliYV26upf\nu7OckXCLzePhpdzlrNo7jZZP1kdtuyqqiNq2h9mp05HwsOV4CnW6kOqAX0pbOo2wiA2n+/f/UsPg\n+fQYYaG3WPt3Ncb0yQWn+2s7dkhejxEWZ2hij+4OzBQcBjf8tDH9SD8q1F2EkxouGoShwwj33/G5\n/+nu2xgGz6q7COe6urWXavUY4by9/ePhMQyDJ9ZjhEM77Q1/HhZhuD6zTiMc2nkc0DgOjyckOZN+\nI5xbuIiXfSLQ/BfePn7/EnR4Vl1HWGTncA9NMXQe4bCyw+JPBHo8d//+WYEGw1Ny2dq367DjTwRa\ncNt6dbf2s5MIn3ja24eW+w0XsunwZEQ4DOnHAemwcyL85/j27j7WVJ+sTYeh2etRXr7suyej1fQM\nQh4ZCRt2vxU9+x//vqjBJ9j3RoQ3rS6aLQ7g3On9POFgeCBNhBDW5AEJ1nk8MGP0r4kIe6XDatgc\nhTBHR3u18i3WbDF9js3Rrs07TL5beN96nLJOX09WjXU6/JAzb47+XcK+LTdF3i33TLQXd8IIl7zt\nbrm362zYwqd1THdsuHXjQ04Y4WeXki4vhtbhR/V7imLL20LcRs/bv8YfM7H2oVWtPJyuRf1GOHy/\ngOQdy9x3Oiyrxwif3k37dKG6Xi+n3Hfc9uzG+MPpzsr2/TK3FM9ycmPPA1Q3fO+v6/X2we+vr5Wv\ntAsnPDBT3PV6+bPszEfMZgvc6elBmtuo+PX7990XT9XJ7w0jYXeKPEf8Lr/3P8dI+J6RkC3m1S0p\nWX5viPDe9fpng/Pr64TbCKXfTsOWVAEi/OZ6vZyyPWpmTfYtvHMPgzelhi/DYCndT8cXZxpOPCSK\nsDY9nqz/ZrwfA0/PefbaWJn9vSptHIc+Nkdvdo5jhsGCTMof2C4t/r3c6X5z9CdfX2M/W6oLKbCs\n/qbmpuvOzrrYvf+7Xj115qxTI6WzqbnjCuyzLnl3F6DNPf17zzodgpysX+qsN5XPL8h+82UOqH6O\nCFc4X4FPHflu4QwiZEi/WzidReiGwNe0l9JZhIP2njjlvm5DnCfkyR3xHEmEECbC3v26Xt32niVC\nCLNHDmFGQggTIYSJEMJECGH9XTHDMAyeil0TEfZLfpWwOdqvX9frNB4SZCTsl5GwEkZCCBMhhLls\nDcKMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwv4PVE0a6ihb5ssAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKSElEQVR4nO3d25KbyhJF0cLh//9l\n+QE3QXMTElC5VuYccR7s445trGKSgG7D6/VqAOL8id4AoDoiBIIRIRCMCIFgRAgEI0IgGBECwYgQ\nCEaEQDAiBIIRIRCMCIFgRAgEI0IgGBECwYgQCEaEQDAiBIIRIRCMCIFgRAgEI0IgGBECwYgQCEaE\nQDAiBIIRIRCMCC8Zhu3fDsP//83/aPFbYPQ3egMSGoY2fcvO/NfAJibh/faqe70YgNjAJLyKrnAR\nEV41n3vrq77FVByHISeomCPCR1AazuOa8H7HBXJliAUm4SPmmTEScWzgO+uBWJyOAsGIMAxXhhgR\nIRCMCMNwmxQjIgSCEWEkhiEaEQLhiDAYwxBECAQjwngMw+KIEAhGhBIYhpURoQ4qLIoIVbxer4Fp\nWBIRAsGIUAjDsCYiBIIRIRCMCLVwRloQEQLBiFAOw7AaPvJQ1NQhH4eXHhHKGYZfn0O5nopkmQyf\nO6plUeDez/z88v9PsobWmIR+1pWuLyHXWS6+oIZvy9BBhELOjMFNe8nt/SmkEKGKrwvctPlf4ovZ\nNPEUhYR7C4QXJqGuJ+6LMgwFEWG8vTG4dQNmkeVr9sP3bxj6IMJgH52IHvzkIk++pdQIEUa68VKQ\nSeiLCKuYV0qxUrg7GoY7ohgRYQyRArk4VECEAUQKhAgiLI07pQqIsDfGIBaIsCvBAhmG4YiwH8EC\noYAIgWBE2InyGOSMNFb1V8zMXxL9XCTKBSJc6QjffqTS6GI/FgXyFqdAdSNct7GXykNxAqO6EZ53\nJU6LMfhjmL9BEd0Y7SJ3erSNPteZT7A6ZORR8e7o07va68fe3/7cXw1H5SLsebC3+1YJuw3OoVyE\n4djRsVArwv7XPCSHtwpFqHPXQblM5W3LqkqEOgUCC1UiDGQ3W+w22F2JCAXHoP6OPvyI3pD88r9i\nRqHAMbnwzTipz0tqMUkeofKur1kmL6ntL3mEOjSTu4I475I5wmQ7fQe3PGJn4mRd5tLemLEoUOr2\nTOxLaivLGaFmgVLJLfCS2kCZT0dx0SIVweNaDooT4yLNMThZb174Bp/cgHvvuIT/q3X0m4R8pJKm\n848Yt0Mf0i9CPlJpT+CzF91uhx78GGKuCSsfU/M9Ybjn+N9Y53F4S+vGTJmPVJLAIybCexl8n/8N\nvz0jUqDIZsTSmoSfmtaP550+wq4vJcmT9XbP/6432O6fcIua/+qFJBHiPMagmjwRJjimdmiDAgXl\nidBO/6OGZoEJjp4XESEQLFWEdsfUnhusOQZHdgt3L++nKHKY9j9eUltTtgjtXgzFS2qRLUJrvKQ2\n07/ovIQR5ltOXlKbW8II60j2kUr5jp4npbo7itHxRypVvg+pKWeExW95T+weB7sNvkXOCHGg5o6u\nLG2E7GojHgd9aSPEAeUylbftIUQIBMscYcFj6qbNx0HnwVHetj4yRwh9NZ8YXEgeYbVj6h6vx6Fa\nmckjxIHwMjdjq1ZgqxBh+K4mQu1xKBjbnvwR4kBUmXsF1iyTCKGiZoHN/RO4z+PLSUYKl2EK2yCl\n0FuZjt/DXnYP6KxybHsKPSLHy//rPXjrP070KAUOIi4FNxWahMfe7ATruxeFd5p7FS+w1Ynw6krX\n3ktuwaXgnhJ3R1npuZCXkrIEB/JPwnH5H/zYlfm+y362hUvBY8kfhb1ToMX/8/2DMAy/wlv8VljP\nk0NORI/ln4Rrx1nmvjXaHwW+lTnC8ytd8Nbo5ucL3v6hg8R2RtoI71x+dqOvcCl4Us67o8PAK2C+\n9PTjRoFrCSP0uTkS7NGnJbgUPC/t6WgnrxdPUawR20eyPViMwU91C4Yy96SahBT4Hb6lNFaqCPEF\nvqU0XJ4IGYPXVf6W0kBJDlEUGIJvKb1FnkmI/pJ9S2mUDBEyBtVM4Ul9yKKsDE/WU6AstQ871ZQh\nQsAaEeJZDMO3iBAIRoRAMCLE4zgjPWYT4TD8ervC9OvF4rLWuEXPo4ZNhLDmNQw7v8rHKcLFe/eA\nJ/R/nV2SV8xA3+2fIpWGWYTjMFys4+KDP4GvhRwmnE5HgUdFDWq/CLky9KV8eybwVNkvQiAZLpTR\nm+DtmdhNMp6Equc1MBN+UDCOELguvMBmHSF3aEwp354JYRwhcJHCGGzuETIMTSkMQ5ECm3uEwHd0\nCmxEiCgKw1CEfYSckeJTUmOwJYgQ+IhagS1HhAxDU/3PSAULbDkibK21RoVwlSZCWOo5DDXHYEsT\nIbfarA0/Hv0rNAtsdu+sRzKLNmp+S2meCPkIEzvr9ar5LaV5IkRiV+LUPzSrb9+n9B9xjB5dKa9v\nKWUSIsDTx0qvbylNcnd0wm1SfXsFPrFwFvtDtghhqvJ1RMIILQ5+ZW3G9miB+vtDwgghq/K4O5Aw\nQlZa08Gl4L3rJT731rJFSIFe+qyX+BlptgihqeeloHhya6kiZAxqUlgX5TLzRKiw0ljrdik4p5zc\nWpIIKdBLyHrJlpkkQmjq/6ygowwRsqiaYtdFdu6t2UdIgZpCLgXf0izTO0IK9NJ5vTSTW/OOEJqU\nLwUFyzSOUGRRscC6fMo1QlZak9qloODcW7OMkAK9qK2XWpmWEUKT8qWgMr8IWVRNsuuyOfekhqFZ\nhLIrjU2s1xlOEbKiyvYGTsjGLIgPQ6cIgZRsImQM6tOZLV48IqRAXKR8RmoQIQUiN4MIYURktmyS\nHYbqETIGkZ50hBToSGG27NF8QY90hMCjFAps4hEqH1NxgIX7iHSEwHNExmDTj5BjKp6gU2DTjxCm\nlI+eUgU2IgTCGUSofEzFAc2FUxuDzSJC4C6CBTaXCDWPqfCiWWBziRCmOHqeYRMhy4krZMdgM4oQ\nphSOnsoFNiJEeuIFNq8IFY6p+MLmwg0zIVul42/0BqCo+XR67mPa9Mdgs4twPKbqP6z4yObb/N7+\nzFsuu4pZhH2Me4DF+rn49Oh5PUuXAhsRLpCfrOMsrZfM5mgxd/tBblpOx0fDSLfpZDQGG5OQ0ddZ\nh+OdV4HNdBK2Ox5o8ou198zE9WW1W9Nyk5AzTxF7j/9DcSrzO2xMPj3mMfqsnYnTcQy2CpOQ0ZfD\nmclpusSZI2T0VZBgfS3H92Tv9IP8YCTVJOTME468J2H7GYaMPvjyjpDRhwSc3k84N74PbWyPAmHN\nbBJujj7TZ4eAkc2NGa76kJXBDDmTH8MQvnQnITddUIRihJx5ohShs7iLo48zUpiSmISMPlQWPD3u\nzY9hCEcxk5CbLsCk9+h4+syTYQg7/SYhF37Apn4Rkh+wyfUF3Hv40hjYyRYhYCdhhAxDeEkYIeCF\nCIFgOSPkjBRGckYIGEkbIcMQLtJGCLjIHCHDEBYyRwhYIEIgWPIIOSOFvuQRAvryR8gwhLj8EQLi\nSkTI+4mhrESEgDIiBIIRIRCMCIFgRAgEI0IgGBECwYgQCEaEQDAiBIIRIRCMCIFgRAgEI0IgGBEC\nwYgQCEaEQDAiBIIRIRCMCIFgRAgEI0IgGBECwYgQCEaEQDAiBIIRIRCMCIFg/wALb2FRV5JXAwAA\nAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKZ0lEQVR4nO3dXXbiSBKAUTFndlTe\n/wraa9I8qIfCQgIsZWZkpO49/cBxVbsA6SP0B9zmeZ6AOP+JvgNwdSKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCIc1u22vv38k+XG\n489pT4Qje1vX7TbN8zTP233Sxn+j7wAVLXXN89+frAJ7/COimISnpdrIW+beqr17qPd73eV9H5ZJ\nWMFq+oRabWo+e7yz9xS7ufuXYBKWsEyQXsfHi6Luc2/vvt96fVAjMQlLuK/m3ayyq2m23H79k9Uf\n0YxJOCDbk7mIsIJlP6ybqUjnRHja80beNK0PQTYM0hhMR4RNvD1GWYgCMxIhr8zz7ABpbSJspf4w\nNAaTEmFDNTu83W4KTEqEEEyEbdUZhrfbbTYH0xJhcw518JMI0zMGsxNhAMf9eeQC7hhLhwcm2HO9\nxmB2IuzL2wn5nJzN0exEGGZzo/RAToeHKp2w8CIVjKdqhyKvyoGZMNZsFiIchCOueYkwRo0xqMOk\nRDgUHWYkwgBV9waLd6jq2pyiGFCpkxZLfsvvebxNWQ7QtdbsoOjhf+g++jYvDNj8OWeYhMM6MA/f\nNmYq1iBCXo2+TbOPyy/K5mhT7U/Qv/4Xz860pV6r0Dkm4eA2N0p/O/pe/Pbl1/29ze+ZhO0EXqd2\n/6cr7s5J8SiT8BLuJw8rvgrYUTzKJGzE5drsccUMBBNhCxcdg6vvTnX52w77hNTxuHNoR/Elk7C6\ni47BTb62cYtJWJ63HezyYrRFhL/mA9EoS4Rv+JzPgx6/dcMz9pJX6DdKDTHDkD0OzLxSsBwfPMEe\nEUIwEe4qvgE5z7NZyDMRNlX/i+t/Wl2zQpdEuK3ecZR2HS7XqSz/6bBjIgwgCh6JcIPTCbQkwhhV\nhqHxmpMrZtaajcGlw4r/lGtWkhBhpJJpbAatvQxsjv6QdG/wtirQaYlURJjecjXc7fEN7MtpCZKw\nOfpXz2Nwa7CtPzt0uf+3aer0MbBDhDlsvThs/Kjn1xH22Bz9V+gn865vH9uhk19SIuxCqcMo3jCV\nkQinqYOtuOdz94cPcOownYvuE/a/mtq0vI5hI3ydWYefxVTwQrZSX5dNG4NEOMbHMenwmgZZTkVW\nuPHW2vEe0ZBGODBTalVzSIMQg2yOHrYKeLCtuMEezucOfEBzoPQRrlay365zw6+m4z3AT7ZW3j7e\nrp6T9BEWN95aO57Blk7ufcKTY3DxvCs42M7hSA9nyP3/3BGWosOB3X66/7yf5yTxdleRMVj1F/bm\n2CPq5xzs6/u/V9Tre9vDUrZPuOsiO4d5v+nteeMl6p6c1MWzeUClqfX8ezpZ4UopMtZCnpN6/2j4\nIjYJf7jC9Dv/6AZ7lsIfTsoDMy133vrZfT8v7ze9jdT8s5QRVjX8kVKexS7ifBE2GINDdljnm95a\nPCdtxmDgIs4XYRsDVNeAZ6mIZBEGnspLvcKNvU9VStQiThZhS6mre5L1wFXjl4+QhZ4pwvZjcIwL\naOp+7cxor1YBDydThFxN3he+X+n6ZP2L65IaL568a0PtMbgIP99dVuOH00WEX9/f99vfX1/32y+u\nIBtmeQ+j+Ip7ndfZRhHeM/vnz5/pZ3X3H/6rv7ryvsa3GYPjabzE203Cx9J+VPexwbZ5xlNwAV1n\nDE4tI1ym37H87tp3mDf7kDF4eAG9PiDZcim0X+Ixk/AM8/BDXT1DJ9+1WHWhh69OXRyY6Vb44slo\n8zxblqcxZImnjNAw7FzBz2KqPQx7WJHi78FhDZ6+HpbQxdW+TOrxF0YtblfM7FJgh8peU9bJIk4c\n4WCXLBbx/M3b2V1hKSeOcKq5hDp5jfzE6jt9x1tjV0u51ELv5xMuUx6YefTb/fUXyy9LddPP0lb3\nevmSwzwP5YjBjsylj3DTydK6WsBPj2WeOjsH2EDtC1OvcsVMPc9LqJ+EDjj/mbZzuW/87UebMxYh\nRohwynzV4qYSHw06YIcrZy6R62cMTsNEOFW7ajGXx0c/3s7h5iZPeELnjRPhnrzftXDeFTo89kvu\nt3tY1kNFmPqqRY4ZYBjmvvf1BC7XCldmDTUMp5rf/xOiizvRp5G+e0iH9/9r9ZMe1v+hNke5uAP7\n/z3IfdlaVfM8rz4LJ6/xzlgsu4Ir8zvR93qbzdE3vr6/S30mwCcGfv84e0zCq7jC2xGSEuEb//z5\nM8xGKX0S4XvDdGgY9snR0StavT0v8J4wOTDzuTZHaNp/cM6PIFd/1brRhAj70tenV622Xa0qddgc\nvZbfRa66JkTYkdpjcPn95980TFkivIp74avqdvcJ9+K8/331FiLCq9udhJs7hI9Xgo93VXgQER6x\n+rrF/h3Z0BVYKyI8KEt+k6tGu+eKmYO+vr9rXEZT/IoWBfbPJDyoxiRcalk67PSd44/vidJ2ISLs\nTqkUa81A7ZUmwk6dTNFWaCIWVQK/TVGBuZiECTxOxck1LsPxkpnP68FoDKZjgWW1maICM7LMcntM\nUYFJWWwjKHhqkfZcMTMInx+TlwjHocOkRJje8/fXSjEXEeb2fDBm+bx3HSYiwjHN8yzDLESY2Otz\nEuN9CcyoRDgyHaYgwqw+PDW/dCjFnokwpV9dHDPPRmLXRHgVOuyWCPM5fI2oy9r6JMJkXKU9HhFe\n0eN26XLblmogEWZScAyqrh8ivKjn4zTOZETxGTNp1N4btKcZxSTMoUaBTlp0wiRs4e17GqIOeOqw\nB453V/fJEGtfqe8164dJ2IXXjXlz4NjsE9ZVZF/Om3THJsKKXN3CJ0SYQ9lhaIewKyKsxRjkQyJM\nw57hqERYhTHI50RYXr0Cyw1DE7UjIoRgIiys/mXW9gxHI8KS7ApygAjzOTkMvVL0RoTFWLk5RoQp\nHa7d/mSHRFhG/2Nw+cq0zu/kNXkrUwG9r9y3282XaXdMhENbNj5/9nc/riPLTtgcPavfMbi8V2Ln\nvjnf2I9eVyDqWM3Afl9BrsQyuJKt9xHaNA0nwmv4/87h/p9bE8J46kf3Lr+Hv2hliOF5H8j9QMu5\nU/lWicZEOIrH/b1znyFjJDbmFAVry9kLJzCaESEb5nl2IrEZEbJLh23Y+h/I6QMzO7/V0Zq6RMhH\nHK2px+YoH1FgPSKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKEYCKE\nYCKEYCKEYCKEYCKEYCKEYCKEYP8DyPVEhG3c588AAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAI3ElEQVR4nO3dW3KjWBqFUeioGWXP\nfwRVY6IfaCso3YzQgc1/WCvyIR8ctizx6VwAeZymaQBy/pN+AHB1IoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBPh\nXsZxTD8EahBhe+M4juM4TZMOWeOv9APoylzdNE3pB0IloyOmiVf5zUNi4hFRhpGwAaXxDWvCr9yW\nf6++wMqQXxkJN7L8oxXzqI9tyM98lTeMhB8w+rEHa8K15tFsW4FWhrwhwrW+DEmHvCJCCBPhBwyG\n7EGEECbCzxgMaU6EECbCjxkMaUuEW2wO6ddrTbkgV8wcxNU2vOJdebuVY5r8eM9IuCP5sYY14Xbv\nV4bfXGvKpRgJ2zMA8hFrwm8tV4byYwMjYRt95Of0SYQIG3Ds8g0RbnTbkpnz0yGbOXQ+NI7DMIw9\nfsRo9cdfl5FwheV5iGkahuHpoTqfsXAc8ykRvjXnt7orHbKBCBduI96tos9z0iGfcsXMj3Ecpun/\n/7671ajizUreOIJECGEi3EXFwZAUEe5Fh6xkY+bHcinYaHVkk4Y1jIT/Nm/MtPx+BcZD7xRZItxd\niQ4JEuGP+RTFPnTIGyI8iA55xcbMdd3dCEKKCIdh2HcueirL0fh2E1bu4TAMIpyNL26MaPkjsjuQ\nr+/AIk6EXfv3mU93YJ2TCNNj1E7W3YR1+2icPp+EIkTYqddFPS4LBx1GifAIpzi+f1sW6jDl6hH2\nc9i9uvB1xbLwZpqmy+wTn8ilI+xnd36Zzu3/H342x2y+jl2HR7pihFf5tMIuf6keXSjCn/Tuk7MW\numMwPFjnET58WOHwdFm0a4cVC9fhkbqNcM2CaPkHJGqPh7vckdzk2/C7HiJ8uiuxPr+b8h1SUw8R\nDusuwF5MTV+WdrvhqGqKFNRJhK/WMM/WhMP7s2Vtt0wLD60cpfObem8f5/tpCO7BvVk+DfP/PTFt\n9RPh1x+c/fgNv+pwHMduMu7l9zipTqajs506XD+ffHptdAcz0sfZ/qarcXiuhwiXh0jzw+J9h3dj\n3dMvq73p+kL8t+lp/6yHCPd2V9HT4e6j71BR81nGBk8/m6P0szrrIcIDru1Yrg+3veo63Ob9W14H\nz+rQR4T7uXuBv3yxO7hRaKc5/6P1b3kddCjCVVq9zK7JfOOjef7yFaneYfkI9zumd3pdm3fYwUbl\nhl2Wu/BKd9jBecL0dsHnWi2u5jORrf+Gzd2POCjvDf3cncite31FBxGW1OTInqap4nvQnW9GsD46\nrB3hMTOQM89zih52r2z4Xfbo8OCrncqvCXdy5vDu7LwcOuDTyb/VZH244fRvKyLswU4dHvxO9P28\n9NMOg+EtFZ6OvrqKpZb39yjc/j+Ov+zlFJ2XNux8/bx0/DEtNHkM2xSOcGm/Q/CA0eDXB37bAn3a\n503RDht60+G4cIbwlgpPR/c7U3TwNOzVPQrLL1j9rao+Ca08PSqGc1/qXXsk7GOH+tHTG5GXH+r7\n5ubaVtuDX36HDT9xp3npcO4Ch+oRDjt3eNho8Ovp++VQeevz1UPb9iQ8Ttj2TnG/p7fWzZyFp6M3\nFWcgj950uBz3Vv5O6+elby6VLn0tWCH9PMWPh8u2A+hu07X08/Pq8W++VLqt23c+4EecWQ8j4ezx\nbdsb+fIZ2HxOrI+TkGfW2xPxOBHd/GJ3c5Q0mZw3fzYOmHFUeQXLb8zcedxR2LxL0fRxhX1/LHaz\n83xC/UxHl0pfxNTW+UeD8z/CvfUZ4fCsw6df1mV4O6l1JUChtruNcHh70LzZl+eNJh3WOpN+gJ4j\nHBptD9a1x2iwrcPHJ7/QSLW3ziMcyp67b2KnX9iVAG31H+HMi93Wmqn+8NvTvl+HtfLuP8Jar0ch\nZ74SoJb+I7yu/T8prclUv1WHdRcdIuQrTQ76zR32sdl2ughNTq5pw2bP8CK8cofQ6SJsq9zrcWVN\nNnsqXlt3ugit1Jsp+Bw+vvprVnrVT0KeLkIubuX1hkNHJyF7jrDQy8DSyU9CNtdzhNR1qZOQJ32I\nrU4cnfO3Y6Vz3o7cXG839V7Ltg/rrmO+Rfv7b/Lff/5p8nh20m2E53//28Xef6+wpr///Dlzh91G\neBXzuNfL0LefM3d40gh9oslaTz+sm2dO2+F552xfzicvMR1dXqI9//+wP29NOycdCb90iQKfmjs0\niSjl1AfrN1fWn/n3gqVOTtb3cUsL11Q7Qh+axma3TZq///zJPpJ6ERr0aCWe3+zUa8LhZ1koPJoz\nEq4yt3fdrU52Fs9vdroIH/+WiwLp21kifLXFokO6lzy+V670REjfjh4JbbHAneMidCELPFVjpmdG\nSsf6vIAbChEhhNWI0D2+dKxGhNAxEUKYCCGsTISWhfSqTITQKxFCmAghrFKEloV0qVKE0KViEbqM\nm/4UixD6I0IIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihLD/AT+AgSW9sgGvAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAI4ElEQVR4nO3da5LbNgKFUXJqduTZ\n/wqSNXF+0FYYvVqkQFwAPKdS5a5UW5YofgL41LwsywTk/Cf9BODqRAhhIoQwEUKYCCFMhBAmQggT\nIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQw\nEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR1jDPc/op0C4R\nnmue53mel2XRIa/8N/0EhrVWtyxL+onQOiPhKdbRb1ugwZBXjISFGQDZa7a6lPJJfusIWesZ0Qfr\nRAG7Rj8dcsd09Fui4kt2zBx3O/yw62/ZQ8MdI+ER9r5QkJFwt8fDD3sZDNkS4W4SoiwRZiiZGxEe\nUSQhHbISIYSJ8CCDIaWI8DgJUYQIw5SMCL/yZULHzrlhMM6YyXDODTdGwm8dGAy/P+eGkRgJqzIA\n8sgGSRk/btrJj1dEWMyrDuXHe6aj57Lzkx/ZMVPM3R4ahx/4kJGwvMHmnz5KzibCwqyy7CXCb22n\noGt+I3U40mtplgiPmudpmuZn085149C6y4dEuMf2zJhlmabpVWc65HMi/MCtvT1R6ZAPWUueOVTd\ni0fqeAl3/eQ74jjhg3meluX3f66d53wihDARns5gyHsirKHHDm0QVmPv6IPtpmC5tdDOUl4R4YN1\nx8wJdMhTpqNV9TIv9WFRkwj/7bRh8KaXDqlGhAE6ZEuEEGbHzMb5c9E//05zW1x3I3NrT29sIvzH\n/PqqiAH9uRRrta3OVLkyEV7JZ5diOZRSmQivYc3vg65uN8jRYTUirC2zcn9wY+I/vyi82kT42xU/\n+F/foWMyGFYkwov5+A4dkw5rEeGgHs9B/3izcGtZlloHbq7r6hFW3h1faWDZdnP7+ei/u15VosPz\nXC7CP9H9/uPu+Jip11M6PNUlInzYDpqebgrZBHpDh+cZPMK920Gndth74To8Sd+rxfdu24QV5qVV\nIzzh5gCcZJyR8OnOiFe/uf45DXyIrOsnfzHjRDh92t67zcKbETqkE0NdT/jqbr3z/K87+n78aCUv\nvR0p6e1SWX923cU3horwlV3t/fsvlulwvIuDhntBSUNNR6fXg+EXD3hwXjr29xY+7ik9dEIO0zRe\nhFO0w/eXI4y9nTnoy6phwAinE1aIN/3sug5opA6Lf9iVcrskMv1EPjVmhGfY9vPNBXg6PEPXk/9u\nnuiP6pzMUepTtqNV5KkWTp358aOwl4U8yEh46jpx914WeV9dInTMkJP/QSKso+w72u+pmJWf9t7J\n//Zt6qJDEf7g1Lew3w7rODb5vwuv/Q6HOVjfxv6B/Qru25jneT03aCTH4rk7y6Lx7x0YJsKOlepw\nWZZpmhv+xN/ny+Grow5HiLDOZOPseWmhxzl9VYvMn4+9qF46HCHC8zS+LfFUs6taRBcdinBAJ69q\ntVfi4eel3Ue4fYdaW7h7vb9E6PbzJ3tfTlrVqk0NSh8NarrD7iPcOm/hVlz5fv6F9cqsp31utbaq\nZbXcYfcRnrdwIxuEj3tK13Hv2C1jmlrV4prtsPsIp4YXbhFPbwhw20V56/PVK+79U+kK89IRIpxa\nXbjH/HjYcHuQ4NbnmxW1yNKouTxPPhrU3LoxSITTWPPSH29XtffMmGNLY95YlqXB1feY7Z68Fg5B\nDXXuaMGTBlMbhHc/v/8/T//i6wfffX+Aq90cIGWoCKcOT96tqcj9AWou0ou8d6NFOBXqcNT3vsj9\nAU7t8CLhbQ0Y4dT/vPRUt027709D6XrJtPP8x9kxc6fIfpox9kM89f36N8x+mrgxR8LVsfHwm5s4\ndaHgCGBeWsTIEU7POnz6a8OHd56yHV4nvK3BI5w+2yV4wTe+oFId3k1uT31Tmqp9/AincrcMHcBJ\nK9/hDh/fjqbyqOMSEU6FdgkO4LwXf+BkgMn5ANM0XSfC1aXe2vqKnAzw/nGGdJWXeqk39aUq94cp\nNfO/zn7Xa42EVFBq5n+d8VCElNfUccj294G3GGHxz7+LfKAO6UCHNQ91FNFihJyi2xvu/9jhruoa\n/ERuMcLrbAzwoaerxIF5Zpsnu7YYYVl6/q3zhfDY4Ydva/vnA4wfIcMY9XwAEdKTIc8HaOV5PCq1\njNpZ1pTS/vkAuwx7Ue+qkaVMWes4drsH3OrY4/zv77+LP729Bo/wWg5/c0WHDod3569fv+IdinBo\nt2+u4LV4h+1G+P0tTK44F7375go+k+2w3Qg54uk3V/CBYIdNjxVfDmWXGwm3J6atP3d7qtqlDDsS\nXq7Ap9YOzU7b1vqa+s2dSxp/abAa54yZi9/BiX51H2H7l2zSo9tOmr9+/Tr73+oyQoMeFVTIb9X6\nNuH0Z7NQeNRkJPzH2p5dndRXbSRsMcLHb1NSIANrJcI3s00dMrbkyv35Zp4IGVjtkdD+FbhTL0Jn\nscBT3UzzzEgZ1bAncEMvRAhhIoSwbiL8/m4X0KZuIoRRiRDCRAhhPUVos5Ah9RQhDEmEECZCCOss\nQpuFjKezCGE8/UXoWgoG01+EMBgRQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFC\nmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMh\nhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDAR\nQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh7P84BHI2cxE30AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJ/ElEQVR4nO3dXXKjSBqGUZiYHVXt\nfwXtNWku1KGhQFZZgsw3f86Jvqjo6Sljm0cfJAitt9ttAXL+k94AmJ0IIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiZA/rOua3oTpiBDCRMj/\nret6u93SWzEdEUKYCCFMhPzLsWiKCCFMhPzLEEwRIcuyLMu6Lo5FQ0QIYSKEMBHiWDRMhBAmQhZj\nMEuEECZCCBMhhIkQwv6b3gCqeLxf/rEGc/w3hIhwNLvHU9yWdVkOpW0vDLpImCbCjj1N6RCUwFrn\nnLBvHss0ABH27XbTYfccjk5p264TwjQRdu/DYai9ZjgcHYGguibCQeiwXyKEMBH2aneRkH6JEMJE\n2D23nfVOhF0S3khE2Dc1DkCE/VnXVXgjEWHvLIx2T4Sd2X52ks9RGoMIIcwN3B1YNxfjjcHxiLA5\n6+H+l21s2huP32jY6+S++7/4rY3EJAx4enjJtLymdskwHInV0S7dbrfjcSydEmGvdDgMEXZMh2Ow\nMEOCZ71tmIR963IY3t/6cf+nu40vQITd67JDNkQ4Ah12TYSD0GG/LMzMaxdtjav/j7NBCzMbIhzH\nfRjuWnoxHo//Zb27cLS34e6n0ZyZb2U79Mmk3zAJB+SFtS8WZoby2Sh7DM+CqzvG4PdEyB/tWWWt\nT4Tj2D0D6uO/5/oOjcGXRMiyHMK7tkOD9TURDuL8oxALHYjuNkaQRyLk/0qfHHogwFMiHEGhndsi\nTR0iHM3JIK89OfS88J8QIXtFF2k4csdM90pMm6e3ob67Vbs/GIPfESF/9+6t4cuzu8OLbNkQHKb3\nbX8B4NLzruNfvv1f3/1Czgm/YxKO4/K9/DgAVVRCeGHGUcoZFWbLhWebt9vNb/spq6PU4+lqTyUj\ndJJwRtGzwddf6wwdHpmEECbCXh0vqQc35i2G4U4sQsei59W5l6XEb0qHWyYhGV6BH2IR+hVcovQw\ndMBSQehivWccXOf8fZ7d6fRk+DvumBlBoQ5baHuT2/PwWtjIkxIRGoMFjDQPt8fX23vmAptShYUZ\nkra9bT+f4vHPXw3wdsfqERqDxXy8O64b239ZZ66eL6j3DqsfjiqwpNcHpd/tqdmD2Ps1w5Ob0PXR\nuIWZ0ex2x88WEvvdoXskwgFtO3z3U5mKbdQrl9xA0+8wFOG8jskF9+CZO+xvi/mh1w+nWNKngkux\nRbruOqw1CX08cnX9vs1iNlUi9KE8Oc22V25H6O6g1MX6YfW1I16rryuHFmYIqHA89ME8PPlMx4+J\ncEzNj8E1ci/oWw8srqZKhNvl5/v36cyQ8o4HpW2+MNWahLtv/pJblehTzSndZnU7uYUZjxkppvlj\nUf5gdRTCohEahgU0PgYb37yI9Oqok8MJdHTJLiId4bIsvd3f0LLUT7LNpf9eNBDhp9wYGdTF0n8v\nmojwJzc3vH4TgFlanx/4VZqI8Ojd9910d89uCRWfCjP7j/parUTofTdMq5UI7858DPrgw/DpQ3D/\nZAx2qpUIP/vV7sIbrcN3HoLrMkC/Wrxj5sz+1Ncbyf7inYfgDvWNT6aJSXhmfB2n32jz8KXK4c3z\ng62piQi3Pvg1j1Pd6yfxPDst3H3Xg/wcJtNchJ8Z4eTw6ZN4WvpslP5+pJ3InxPuHhd91a95kHOk\ntz4bZZjvejL5CK/y9G3UE+6Rt9ttvm+6b+EIrx2DOrwr8RYxx6LlDHJO+HD+bDB2a/LxSTxT+v31\ndf/DP79+ZbekmmSEdV5cn2b5Yjwm1xuv+0Jdv09znvzukscYhZZknv5tZ+Zbv0diV3VY8ydgEg7i\n6fS78H6AXnS4ycsyU353sYWZcmPw7tp3G865wHPX6QtQR8a5RFGapX8KyURYegx+97X4QOWzwdmO\nRReT8C0e0UgJmQi3p1jGYGnbF477nxt8Kfn99TXhDLwzCd/T6TDscZvnEYuwwnpjoTHY42Q9vnas\nqzJbkZyEM6/7x/34jRk1zHwsusQPR2fucF3XmuOo0wPpGeTXLQodNHaxJFNnI4/vFm7tttIuflnl\n5BdmZh6G9d139aZ2+MkLXFqIcCnQYbUJs/vz8d8sL5dAqqxOtZUcR01EuJzYHdeNy7fqB1/97//B\nfQnkaZ/L9AcCxuDS0bsovttTj7/Cuh+Jvh81u838yYaUe5eGMdiFhiI87os9fjrFdgnk4fjwtGnz\n8JlqRw1FuDx7cuG7f0P9w5u/Lv1ve3ukuNvGEsMw1flbHxjqcHRpLcLlo90xfk71osPtgs3r76lA\nh2udB5SeGW79vmH6Qs1F+FSbn8a8/bLHpf8XFwO+294L98jKe/aZj9OixQifPrkwtTG8dv7jtAzD\nVi5R7Nz+lN6cerq+YvHWlu/eztbvd31eoxHO7Pwe2cvnZuvwrsXDUXpcnbrqaVpTHfjcibAbQy79\nTxve1uzff8saf2DxhU/rqvngrwY5J2zax6tTfZ1iTX5yKMJGNf7A4stn18wdinBkfe3N03YoQt42\n4WlbUSJsUUcfG35tkHMOQxG26Nop09fePGGHImxPgfcgXbg3V7icMNuxrghp1yQnnyJsTLG34l4y\nDCe/ql6I29Ym8vE9YqGHaM0SuQhbUv6JFD/p8Jjc7sE/j/cBltjCCYlwdq+TO6pzy/U8Y3ARYVuq\n7Ha7k8N59vVmTfR6w4WKTqqpxuBidZTPTHIZvQ4R8qFCHc42BhcRcoZ5eAkR1vXu5zZNZsIxuIgw\n7/G5TX0yDM9ziaK64XbZz64cHi+TzDkGFxEGPP3cps696PDnn2k3LRFyjW2HH9wMMO0YXESY9/hI\np4F2wZ+HV3pLujDvyw8lHAfai9I6fWDx5Sb9timn8WcWN8jhKBebsKKTXCekIXNedRQhbZmwQxHS\nnNvt9vvrK70V9YiQFv3z69c8HYoQwkRIo+YZhiKkXZN0OOO1UWiKSQhhIoQwEUKYe0fpw2OF5p9f\nv7JbcjkR0o3x8rtzOEo3fn99DXnFwiSkGyYhUIQIIcwdMxBmEkKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAm\nQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhh\nIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihLD/AUoh6j9DFltqAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAKIUlEQVR4nO3dWXLjthqAUepWdtTZ\n/wriNek+sFthSIniAODHcE7lweW0bZnmJ5Dg9Hg+nxMQ53/RLwBGJ0IIJkIIJkIIJkIIJkIIJkII\nJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkII\nJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkII\nJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIIJkIINnqEj8fj8XhEvwqG\n9lf0Cwgzt/d8PueP5w+gvBFXvmV++5+EAsaK8GtphkTKG2Wd+5Tf9vM6pLD+V7jj+R35X5BczxFe\nyG/1zzpeONSjz/VsZ+pl+/n979Pl8qEqva1kyWc+dUhuHR4nXM2yTPf27p7Pp11EsuowwlnCchzQ\nJ6tuT1t7Pp9pm3kNiZBWtxEmpD2y6ipCW4y0qKsIoUUihGAi/MImLrmJEIKJEIKJEIKJEIKJ8Cgz\nNGTST4QioVH9RAiN6jPCVGd7Gl0poM8I5ysenHhNE/qMcPpzKZMOqV8PF/W+rt/dXsjrunjq1/Y+\nz6c7yry9w9rN39T+IZk0u2I9Ho/P41uO2z1d/lrY12CE827egZedZEhcPTdmkiKpNRXh4fwWX3Fx\nSNy5SantUtJqZ316PE7l998vPTEkHk+0mUVH3dqJ8J4caRkSSaLi1eh1iC/dK/w0JP75Oad/UNiQ\nuFo4NzYTCFfrccLlWpVuDVsdNkxyf+6p/JCYZ+EQpdYIs8kxz1nFKQHnZ62oxHARzpLXEn+rfPk1\nq9tzR0M4cZwLxpqYKSb7kGhipiOVRdjRypRxL7GjpcRU1+ZoX+uWa6k4qJoI+yrwRYd8VUeEnRbI\nfSNMdFWwTzhAgSnnacZYXK+P44/95Bd8nLDvhcspO+cPzlv1va4qgx6sL6zvdeiO7aD3ScfLMDLC\nXpdpVo9pan2RHQ9vdZin1w7DfqUul+Y+97k5dTn1p3/Z+kLYsjlKUfu3Bdr5B8vvEH+6fFIxEfb3\nZnZEr1tTlx3fLl3pbMrUSNiM1te55etPMpR186ZWx8H6YVw+gaazA9bzOX1Jvk8HS8ZIWK/Rjllf\n08F4GPDqW19k9+0vgf0pinaXXtZX3u5imYyE9Tg4Pdjo3GDuSJoeD0UY4LXGXJsetGn60sdCKB1h\nH0stiZuLoun3fpbMjoa5308fc4NJNP1+JMK23e9w+dXzx9vPzB/U1nvT4S0VjbCbpXZfwkHswrda\nFfX1q+drGJ/Pc1+1+Jf+7ntMzPTg4JTpp/vXzXUtP7kKTEFZibATO1Om8/NUpzMtLe+yv/g+/95g\ncfWZWK2PtCIMlH69WU6ZnnrQzWpTc2vZm3udplUuwtbfrlpx+Wj+TofLCZuzf8NMf/eeVicjYYdO\nXha0/ng50C0/s/OFB/VUTkIOUfDe1w3UI1bTtg5svmUk5KNXh6dGr/2LP5Kf+9rB6CpC9vzZNP2+\noh+/YeH9c187CG9JhHy3c57q5Ys/nPv6IsIwba1+bw9+TPcu/tDhrNwisLg7kGR3brUm2C41O8oJ\nSe4Ns72QcoSnvuwQIQG2HV46Db2Tbu0TEmO7Q3hkF7HLm18V/QU6WF6pLE+8nD8e81TMgzuZvd78\namYkDDNmdSv7o9mp4x/tdijCMJ+u4mt2XbouyfGPdjsUYUXaXIXSuH86W7sdijBSkpOku5Hw4Edb\nKTpEEayptaUB1452rBQ+bmkkjLdzzR7XXNg0vfyctvtESJ8OdnjqJiCZiDCAgxNl3L/4o4yiEbY7\nf0Wj7h/8KMBIWJphsLD6p0xLz466y8g0Df7rx6i2wMkhCggXEKHBkJLqn4YwEhZV/wpBeTERGgzh\nxUhY2uC3cmArLMJhB8PXyY1j/vqFNbH9HzkSjtbh6m5/v3/9kZYAbzlYX8jbt5vn8nl/1b9hk0lw\nhIOcyLY6c2r9+0pxbEbCoqTIVhWjUN+D4cffzlmkmbWyXjlEkdfeejDf3MLEzPCqiLDXadLv78Tz\ns3ClOLYqIpx67PDEtpAUx1bXRnPl130dl2Zv5NVk+wukvFZ2CKfaZkf7eLpAsgJXN8qnU7Vsji71\nt2kKO2qMcPrdYfSLOGB7Cmjrw3g3Gvor1LU5ulT5g4re7r4qkAtqX2kq7PDT7FH6Ak3MjKHekXBW\n1Xi4k1+Wn1fJr01mtUc41bEq7ue3va0lHNdAhLGO5Ad3iPAj+VFGSxEWfs67/CijpQinuMlS+ZFP\nYxGWf867/MitsQi3tnXcyXI7wyk/cmsvwq/PeV/eJuLt2Qg7xxIkR3ntRTgd6HD6dyR8cy54ptIG\nuWkVyVV6Avdby9X7+HPe5yqef2R6bXBZSxFe5tooajZEhEuCpDbDRQi1ESEEGyVCW6FUa5QIl5IH\nKW/uGDHCtBwb5KaBIsyxRbosUI1cM1CES0mCVCBJDBrhfQoklbEiTLVFqjoSGivCHATJTeNGeHlU\ntCFKWuNGeI0CSW64CJcD4NmEFEgOw0U4u7AhqkAyGXdlWnb4dSEokHyavL1FEsuQvm6gqo58vKn/\nx9fh0TBIclapj7ZBKpAcrFWHuAUw+Qw6O3qW/MhHhEe5Np9MRAjBRHhCysHw8fj9H8Mb9zhhpO2T\nFhmYkfAswZCYCCGYCCGYfcJzto8KPm3++teUjB3C4RkJgxx/thu9E2FZ5kLZsDl6Wp0RXb5dAOGM\nhEX9/JPl286Xd8weC1l+GKmJsJyfn8evX/8OU6mCXF1g9VwQZBNsjsZYBXnZ/iWOb+8eMNlkrYwI\ny0lS3dKpi4xP3c6DklzUe1olJ366zL8b9gmv2O5hFb4oQoE9sTl6xfa8meXYOE3TP/88pgzbn39+\nhAK7IsLE/tTxnKbp5yd9igrsj7/oaa8xcB70Ti2/OcvpapkK7JKR8ITtJuipncD1ccKT46QCe+Xv\netT9idC3w+Drk9NukArsmD/tIWkPRXw6Uv/z8/j7798frw7r+TN1zObod8UOBv769VzMsjrBZRTe\nYr9IOArdnJWhVw7W70lV4HIORoGsiPCjhAUKjx02R99LORfyZ59SjbwlwjcSz0Yuj+5b2mzYHH0j\nS4HwgQghmAgzMwzyjQgLEiTviBCCiRCCiRCCiRCCuYoiM09f4hsR5lTJ3RGpm81RCCZCCCZCCGaf\nMCePxeYAEWamPb6xOQrBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjB\nRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjB\nRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjBRAjB\nRAjBRAjB/g9DQ2ghcumtcQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAALtUlEQVR4nO3d7ZKbuBZGYTGV+79l\n5oc6RA0YC329e0vrqVOnUj2JjYGFBKbtbd/3AEDnP/UCAKsjQkCMCAExIgTEiBAQI0JAjAgBMSIE\nxIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAj\nQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAhR\nZds29SK4R4SotW0bKdYgQtTa933fdzosRoRoI3ZIigWIEM0wJJYhQjTGkPgWEaLctm37vl9/zpD4\nChGiFzrMRIToiKlpDiJEX0xNvyJCjMCQ+IAIMQhD4idEiKEYEq+IEKPFdzXo8PBHvQBYS2zv9t3F\nZREhBiG/T4gQ3ZHfMyJER+SXgwhR6NONo8d/DeSXhwjRGPm9RYRohvzKECEaIL8aRIgq5Ffv6dwa\nuLVtIYSf+13Yf+px2xpe2/ef9iiwCSJEIX4lohUiXE4aTvzz9SfxDyQ2BhGu6Gtd2xbnnPd9oi0i\nXNGprvB33Dt+mHmux4y0Cd6iQAhJdaehL/48naNyLaY5RsJFXQfDk7S3ODUN2SMkXiHCDpxc6Hgo\n6rhg83WRmZHWYzo6hJlp3GlBruPbw4hn4xVMiAj7MDk4mDkU4Bemo33Es6g1dnlmpJWIcBUMg2YR\n4RDxWuT1vTmACLu4XugI4WZ2+vVdgnYYBi0jQjTAaWENIpQaMhgyDBpHhCtgjDKNCNU6D4bPH0zY\nEDPSYkQIiBGhAQMvk8IgIrShz1xu2Fw0YkZahgitmGYPnuNVjMQN3NMaPAymz5h2yCeyfcXnjtpS\nU851CBq5cT8tOUF+RYTm3O7NOXM84abMPHYcr4K9LkWEFmnHtLcKRm+GxxTnhEZ52TWvBeZ8O0X6\nXwmSkdCc8RdUij0sallaa369jOORkBMMreeDxe1Y93VLTfM+zStuDrrR7fHV0dDxlZfXUracmcOj\nl5XQivWR8HRcvN028fC51GZzilPBW+YizKnuao4OvbyEJst5DdLFa+/BxFZvdVD0shN/4mL5xyyk\ni1XRinIkrLmycvtvXY+HLpbcxUK6I56OvtqiOQOm6w6NY8V2IlutOVu07Pww88FNsb/AwtvBp2f6\nwkzxNmA8bIuV2ZWVCFd+5934Lm588SZg5Zd6978aPuCC9140JyxwnS2oiXDMpnWxFX2NM/bXZysj\nX6mVkbATFx1atuYKHHxktHJO2E/bizTFF2w/PZqjYTCsccVr/AucP8JQset8/eVaboDsanzzkkOM\nIELJ68zZnAW/z15zR/L0Q4o7qi2yxEgYnTps/hESn4Ksf2StFaagQXpMXCjC8PsyQ9c1/nXW6ne3\nHtzkmKfTbo75I7yuX8lkOF2ewc+OZ/ID4ugItS9YvrrD7w8ECM5nqhOwsEtM/j6hhVX8SbxDyMXA\nOH45TxP4Ts9uZPeYPEL7vHSY6r3MxR9/XP8sEvOfEx7srPSTY5+2uXiD3X6Q6XE16/jhTLdJDI3Q\n1Cs35e9OZvfL5VVXKdOf1Lwr+/wsWjOPhNbW9Vf73y8Ltb/UPZp8LvC6AOlfu/4w/1nkZo4wZXDV\n34rLaHlI7ORVgSe3N2Dc/lubu4EsQpurw4g4JK6zemoKTD3PV83ucuMiPK2CRW6GKmZwatppk7Uq\n8OQapNmdTfkWRdcr3RMUvu//UjSoyebrVOCJ8T1hXIS322zMu2Sug3S74N+NKTCy/H7s0JHQ8opA\ngZpgRhZo3Ojp6LXDHmUuuzl7a7WxKDAlOCcc0+Fhmq2brqH4Z9WsonJjUeCJ5sLM4A6nYWENHfed\nH179c2GBZvcx2fuE1+vdvGnx1fX9Q9XbGMV3kDEGXinvmOnU4ekzLObexhZe3Ex3kEnov5WJ8fAV\ny+8cBv93kEno7x1tXt30m9Z4h5HNO8hsHuKtLNBp1TRZUwZXd1se7y+V30FmcK+w8pv1pytXlRey\nCq7aeeRiSDyJF1fVS2GLraNCzXjY8NeuHfE4GMpZGwz154Sp05T9eU1d32nsuGRWrfZLT1OyFWH4\nduq85nCHuZmLMDx+Xj3hXTUYDE9v+R8rnLU9hMUIw8DPq1/abWxp0JPOdK29UWE0wsjOajJu399c\nbGCgM8ZohKYOVC58Pbpv23Zc7xq1UMhiNEIUuO0wd1Zv7QNtVmJxwGEYrHH6vOqs9sLltPD6w+nY\n2c0YCSf0Ir/bv2Nj11wHEc7m6xutnBlGRobBYHA6ameS4Nr1BsD4B9atQYyEM6M9F2xFyDDYECvT\nCyu/yoTmKNALWxGy17Ri9pPFcGUpwknvVASeWYoQWBIRAmJECIiZiZATQqzKTITAqogQECPCafFW\noRc2IuSEEAuzESGwMBsRMgxiYTYiBBZGhIAYEQJiRAiISSPctp//oQ/eKnRB9/EWC3znAZCD6Sgg\nRoSAGBHO5noSyGmhcbpzwvjdlsef0c1xeYbPX7NJOhLuO/l1dXz06L7vXCk1i+noVJ4/8JcObSLC\ntdChQUS4nNghKdpBhNN6mJpyimiKgQjTy6So8PYbYOjQCAMRQoepqQVEuDqmpnJEiBCYmkqZiJCN\n31zBN4TSoYqJCFGvyffycoooQYT4hVPE8YgQN+hwJBMRssmxMhMRotLphLD4/JBDoQQRAmJEiHuc\nIwxDhDPgV+ZdsxIhx91WmrxhiJGsRAgLOBRKGIqQPQBrMhRhoMNqzEU9shUhsCBzETIYFmMYdMpc\nhIEOi/QokA0xhsUIgaUYjZBj8CtMRF0zGmGgw2wU6J3dCAMdYg2mI8RXzYdBDnzjWY+QfeIBE9E5\nWI8w0OEHFDgNBxFCiCPgAD4iZFc4WWEYXOfDF31EGOgw0btAed4xv3U+fNFNhEgNHiWGPVeaX/zJ\nCh06m9WsMA17lq6BuHf2WyHp4498rtv/OvF29/faeu8Nlt3ui51WyMNzNX+6nMYm3u7+IozmPjR+\n8vCqG+6jOQ+1bVuTZ4tR5z/OlNvd8Uuacns8yG6jKo5Xa/VtQk3+7Xzb3ffrmXiKknp7bla2WopX\n5vvR7N3fv3sE3/vtyQwvZrJNkvoURvMU69dhZlrbVpVf8jjzbPRJXslMmyTKz+z5r2U+Tqu1Vz/K\nvXmuSTb6JC8jzDQ1fX/R4+trrxlRLZujwxleQ8r3VqkbR94W1XtdpTPPVrPQu2fxfRwJ80UY/HbY\naD9tNUdtsSRxMX7+3PXZvG70EMKUEQaPR8cOO+nDFHTMmomvKf3/zk/ndWf+o16ALo6r+Q62SrdL\nGde3NPwdm96Id5l6fHUuFzqf9a0yYID4eR5BfseL45LpszlHwsNxD77RDTNqqbQvP05H8Ym/w0aZ\nTnceI8eo8d7lMBimHwkPp1//Of1wkOOpHe4o6GeVCA9peKffFm3W5Gnudb1IP2xoWInTYTAsGGHq\ntM0KBsl/s9zfj1u9aFMZ8xaFX0tHePJpkKxvFb35HQYDEX4yYtYKhBCIMEeb6tLr9GSMhONBHL50\nvYfb9W7MRx4CYkSIYbrcNeN9GAxECMgRIQbp8VnaEwyDgQgBOSIExIgQXs0xFw1EiGFiM9N/xVIB\nIsQIzUetaYbBwG1rGOAUDPfinsxzOIFN1yHruclbz4/gHSMhhrr2k5PT3GeSUx1RYM110GN/u+LC\nDLqhwDxEiD74QItsRIgOLgUyDD4gQrR2HQMp8BERoqm7ApmXPiNCtEOBRYgQjVBgKSJEI/RWigjR\nB8NgNiJEBxT4BhGiAwp8gwgBMSIExPhVJlTg2zVaIEKU4mtPG2E6CogRISBGhIAY54QoxdeeNkKE\nqEB7LTAdBcSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAEx\nIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQ\nECNCQIwIATEiBMT+Bz0hovad8HLhAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAALPElEQVR4nO3da5KjNhSGYUhlR937\nX0F6TeQH04zMVYCk75yj96lUZdLTsTHwWtyMx2maBgA6/6gnAOgdEQJiRAiIESEgRoSAGBECYkQI\niBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBG\nhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBEC\nYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgR\nISBGhEWN486fx/Hj5yLbSTM8sX0hwsrGcZimYZrU0zEMw3Vdy8Tu9olK/lVPQDiG19m5rvQNYTWx\nNt4rukOEpS0rsuEaF7sTu4SabqPSZz1sjvZltam5lfa2bEdTYFVEWNm81lsaFU+KWg7YWJre+MaJ\nd7nG2m7bFXy2cWRtqYKRMDL25VwgwuYud8vQGSIMi2HQCyJUqD8YUqAjRAiIEaFIzcGQYdAXItSZ\nprFCh+M4UqAvRAiIEaHSVHow5Hy6R0QYBwU6RYRixQfDehxNqi98lMmHnLWfYdApItTLGWFyAmNz\n1Cki1CsVzxwzHbrDPiEgRoRiZccujp14RITR0KE7RKhUaReODn0hwpjo0BEilKl9JJMOvSBC5CLp\nSjitpNHshF6RJ5rzmx8n/TOK4GR9cG/O4C9DX/q/LynSYSnMSoH2a/DdZ8wZ7uiwFEbCLmSOh7tD\n3/ljZv4yTvBm1ppwADl56lc5cU+bdxgJu3Zr6Du03LSKFB9hJGxKvh+1TECVLUlSfISRsC91d+TS\nrzUkxWyMhO3Ih8Gm2FHMxhUzqIMCs7E5WkyRW1SgQ0SY631jhjZHl9diZHr6RoRZDPXzXrq31mDP\njeCvsE/YTo+fLZojn//p7bVnI8JrBYfBHjvEFSLsA+Ubxj7hheJ7g9M0tT6Ftnq+dMuQ/TQDiFBg\nrqDR+r/7TM3aI/gMbI6eCXVQdNZ+u3SegcFmY1FEeKhqgY0OFq6GQS4lM4kI46JAJ4hwX4MN0bqD\noZ0Cif8KEQY02ikQGYhwR7PjMTUGw/XEU6B5RLjW+Iho6Y+2f0z8ekiESUQYx7ZAC+dXuFTnEhF+\nMLLiPuB3ykGEQawKpElHuGztL/sr7uYozs62Ht9c7w4RmrD9nG3eJZ87pVGgOx1FaPweMBzI7Fac\nCL3fA2Y7+j27fyebo+4EiTDkahfuBWEfR0f/kt97otQFNPIXglsiRPhyGEzXV/nqW2r0k7+QxTwl\nRibGpggRph4sbAvr6+ruE0OsbdFpmizMZLPcR5gOg0X2DMOsLtZeSJsh0eOo6z7CIqytr6VYe11V\nh8Q5v+UpTL3wc76PjhYfBmcc5a+q+NezbR9t+Q5GFwuRkfCP1Tu0tTHkMZsvpNSQmI5+u8/iYkh0\nHCF7g969ieQov9WjuTgm5DjC2uwvvEyWX8jdSMZf56Pf3g/t3oXcx0bzVqW9we2jedmvuGT8hVzu\nJd7ajTyuelo+emJnbvg+MIMwTg6lbPPLGTmPjq4tP7PzrmRlOm6pNwzuPqadpfWSixeSJrc7+t16\nFQcd/vm3kbnBSLij51MUmYNMvQk4ye/Zo20/jPI7ElpZyv4irD0MboVpMucQSM7LbDbbizz1b3J/\n/7z83MihGn8RtrEKL0yHg6UDEi89OOQ7jlO6FWpksTqLsP0wGEmpOdZ43V2ea/WkTydgZxdxdW+R\nxjhPeCjqNTRFhJkb0zQ9GlNLsj4Sbi+AWH7eeLfE+8DL9B+btLNHE+H3z8/RX/18f6f/eXR42vX6\nFIORHaoAGs3Epbr/vr7ePI5qqXtf2+pNf6VHXu38DzXfdn/vMTnOu4vtl3O7kfBlfjPefZ9ijl1T\nnbRod2Dm++fnZCvUMu/ZVz3i1+YITeXLA+ZZNL+/13ueQ85GwoHB0J4wS0T1QjhFccH76uXxxt7b\nHcLYXEYY5iRVbc0KrLpEWr4JSlYtx2/zDcYohsGbT1dmdqWXyAxNIvy8aKb1Qnc5EiKHxw1RC9oP\nho4jrD2zvA+D7ae97BLxPv/zOY4QBvndXRdeJ+w7wnozq5+3YZuaHyBV3kvB+gXcl8KcpAqj+Dtj\n+EXsPsKtx2sAn1Qs5eXcm+d/s6WwWmHaL/0IEW4/Ba+dHhX5h1PriT0YRoiwLC8LezXeL3dS8TDt\nWdrcYWR1KlKy9INEGPid8nNjKb25w84vT5svvseuZpcB5AgSYb4itxur6uhmAv1YDu3UGAxX9zVN\nDyOp3sfjRJh5UO5yLlsYUd9MgJ07+b2xiqRIh6ZGv1ScCAeT81ciRodDobMdyyNsb5UyHN/KrSX9\nu75ByuVR7qkj7Rw++2KCk6Fv9/stiNCWGDezCdbhkH0Y8zK/wUyBQ7DNUcSWs0OYOfqZusDV97Wj\n9UzT5PSOOKkwO4dDUuA4jqujmrPl59sCV3/1bOO2HiI89N/XV4AO45lb2naYk59NbI4GF+n0fbo5\nenng9Gi71NowODASnms8GFpYIYxL21uGxNXvnI9+BucwEV4IsFEaac9wa9lLHO5vfBp512NztBNj\nmJtwb4+Rys+2v+R1uhv7/vkpdfPiE1VXI7/r6FbOaylyDWMbcRZMAESYz/6F+PlCLRjXuI1qt9gn\n7AufkzKICDuyPaTxcbL795daT1b3iLBrOyNhOlQSZBNE2JGsfcL0F5YgqbEmIryn1Pd+t7d7seXF\n/0N7TRDhbTXyq33cMr3kMv3h8md2CIWI8LZ5MHQ0Eh4V/mSHkA3UCojwNkf5DXfH2PMdwsB3F5Yi\nQhPSm/wVfNhXW7k01goRWpHeduF9imZv74ctrmOy6GVCtQ7zsDlaBxHa9SzFugdaOTBTARFadytF\nLtH2iGXmQ06KFOgUi82T85tqsiidYsn5Y+oW7niPhefV7peHwSOWn28UGAC3PHRs927wcIcI3aND\n74jQq+3315KiU0To0nZX8Oie8LCPCEOZpokM3SFCf86PiM7fPEGKjhChMznnJKYp+JfABEOEYdGh\nF0Toyd1T82yaukCEbjy7OIZNU/uIsAtc2WYZEfrANaKBEaEDFBgbEXYk3TOc/8y+ogVEaF3ZYZDq\nDCJC04pviG6PlHIOQ46b/1bk4nvVDUxC74iwlpxBTFIppw2tIUKly1sYVnteOjSEY99VlNqXK75P\nyN3rDeLATHmc1sMtRFgYBeIuIjSNO1b0gAhLMj4MskNoExEWU6lABsPwiLAM42MgLCNCQIwIC6g9\nDJbbImWz1iIifIsNUbxEhIAYEb7SbBjkGGlgRPicrw1RX1PbFSJ8qP06TUJREWEX2JS1jAifcLRp\nN39voZep7ROLJ65xHD83YqnRJj5ZH9G88bkXHB0aRISx/Oa3+fE4/FZJh9awPKLbbJQOn01Cjgjj\nOhgVk79n6ZvAYojoKr/kF1kB9FgGzi0nAJ8uRzqU4zyhZ/P9Kt59DyhXpcoRIehQjAgxDHQoRYT4\nY+6QFNtjp9y51wdm9h6StaIpZjd20GFLbI5iB7uILREh9jESNkOEgBgRAmJECIgRISBGhIAYEQJi\nRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEh\nIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAY\nEQJiRAiIESEgRoSAGBECYkQIiBEhIEaEgBgRAmJECIgRISBGhIAYEQJiRAiIESEgRoSAGBECYkQI\niBEhIPY/QK8oPcGlsEQAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAALQUlEQVR4nO3dXXajOBRFYejVM0rm\nP4LOmOgHKpTMrwBJ596r/a1+qE6lbGzYlsAYj9M0DQB0/lEvANA7IgTEiBAQI0JAjAgBMSIExIgQ\nECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCM\nCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIE\nxIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAj\nQkCMCIsax50/j+PHz0W2i2Z4YftChJWN4zBNwzSpl2MYhuu6loXd7ROV/KtegHAMb7NzXekLwmph\nbbxWdIcIS1s2ZMM1LnYXdgk1naPSZz1MR/uymmpupb0t82gKrIoIK5u3ekuj4klRywEbS8sb3zjx\nKtdY27ldwXsbR7aWKhgJI2NfzgUibO5ytwydIcKwGAa9IEKF+oMhBTpChIAYEYrUHAwZBn0hQp1p\nGit0OI4jBfpChIAYESpNpQdD3k/3iAjjoECniFCs+GBYj6NF9YWPMvmQs/UzDDpFhHo5I0xOYExH\nnSJCvVLxzDHToTvsEwJiRChWduzi2IlHRBgNHbpDhEqVduHo0BcijIkOHSFCmdpHMunQCyJELpKu\nhLeVNJq9oVfkjub85ttJ/4wieLM+uDfv4C9DX/rPlxTpsBSeSoH2W/Dde8wZ7uiwFEbCLmSOh7tD\n3/ltZv4yTvBi1ppwADm561c5cU2bdxgJu3Zr6Du0XLSKFB9hJGxKvh+1LECVmSQpPsJI2Je6O3Lp\n1xqSYjZGwnbkw2BT7Chm44wZ1EGB2ZiOFlPkEhXoEBHmet+Yoeno8liMLE/fiDCLoX7eS/fWGuy5\nEfwV9gnb6fGzRXPk83+9PfZsRHit4DDYY4e4QoR9oHzD2Ce8UHxvcJqm1m+hre4vnRmyn2YAEQrM\nFTTa/nfvqVl7BJ+B6eiZUAdFZ+3npfMTGOxpLIoID1UtsNHBwtUwyKlkJhFhXBToBBHuazARrTsY\n2imQ+K8QYUCjnQKRgQh3NDseU2MwXC88BZpHhGuNj4iW/mj7x8Kvh0SYRIRxbAu08P4Kp+pcIsIP\nRjbcB/wuOYgwiFWBNOkIp639ZX/D3RzF2Znr8c317hChCdvP2ead8rlTGgW601GExq8Bw4HMbsWJ\n0Ps1YLaj37PrdzIddSdIhCE3u3APCPs4OvqX/NoTpU6gkT8Q3BIhwpfDYLq9yjffUqOf/IEs5iUx\nsjA2RYgw9WBlW9heV1efGGLNRadpsvAkm+U+wnQYLLJnGGZzsfZA2gyJHkdd9xEWYW17LcXa46o6\nJM75LXdh6oGf8310tPgwOOMof1XFv55te2vLdzC6WImMhH+sXqGtjSGP2XwgpYbEdPTbvRcXQ6Lj\nCNkb9O5NJEf5rW7NxTEh39PRqsJMSi0/kLvzxiWno9/fnevW/X7i14yum0uV9ga3t2Z2873L+AO5\njORuRcZPFU4xEsKEkyFxm1/O9PJo9Ft+aOdVycpy3FJvGNy9TTtr6yUXDyQt56iiW48iM2whRsId\npl4mG8scZOotwEl+j2/waC/RyFo2sRC31B4Gd2/ZyNp6r8ieUoNn4+gu3tz19t8aGQ8ZCfetBsNI\nY6O7R3ES5IObGpJnIH2PRPi0OIuwzTAYValnTPWStLrTUkOi/PQax2/W1xb1HJoiXD8bqwFQ/kCs\nj4TbEyCWnzd43Yq0W8jyp9LRT96hJsLvn5+jv/r5/k7/9+jwtOvtKYYA+8lGFr7Rk7hU99/X15vb\nUa1171tbs8PINW62zYET4SpuNxK+zG8W4NVXxN0zNn3+2ev+Z452B2a+f35OZqGWec++6hVNm+xQ\nNXpbUrVn6GwkHBgM7QmzRlQPhLcoLnjfvLxf2HscR4dz6XtcRig/puxFswLLrpHVYrd8EZFsWo5f\n5oVnMHrReBgs9XQtiz0Pgw0egvZMLJcjIXJ4n4iqtB8MHUdY+8nyPgy2X/bSk1Lfz38+xxHCoOKv\njM0yFJ4n7DvCek9WPy/DBk3Tzhkz9e9Udp6w9RO4L4V5kyqMCoNh8FXsPsKtx1sAn1QspdAx0kZr\nQX4VhQgRbj8Fr10ele0X34cRezD0vU9Yg5eVPY4f/y0/DKPNh6pXtylZ+xFGwiH0K+XnVpJelGHn\nl6fNF9/7shrMa96RiUs8zYJEmM/+hZmPLibQibTDGpfbWl3XNB1gVa/jcSLMnK5cPssWRtQ3CzAV\n+uJ7ldVgXrBDU6NfKk6Eg8nnV8J7h6n3D2R5ad696OiStPDFV/+qb5ByfZS7a487h8mp28Mw7Pz5\n99eynqWToW/7V1zy0JYYH5VyPR6uPsq0eiyXK2i+ouF8JbXdvxrMFDgEm47Cu9OPEWaVkjn6mXqR\nZSTcN02T0yvipFwPhivTNM3t/M5O14Ph5ei3/JX8FJkVIjz039dXgA6D+Z2a7lw8Oyc/m5iOBuf9\n7fvU8v5Ezk770bzU2jA4MBKeazwYWtggjFvyS+eWq985H/0MPsNEeCHApDTSnuHKMioub/rdmnwa\nedVjOtqJOBcO3J40I3+3/SWvy93Y989PqYsXn6i6GfndRrdyHkuRcxjbiLNiAiDCfPZPxM8XasW4\nxmVUu8U+YV86/5yUTUTYke0hjY83u39/qfVidY8Iu7YzEn6eKN1yYbpFhB3J2if8/MjQzg9RGhHe\nU+p7v9tbXcphyNkhpL0miPC2GvnVPm653D47hAYR4W3zYOhoJDwq/MkOIRPUCojwNkf5DXfH2PMd\nwsBXF5YiQhPSDwcUvNlXs1waa4UIrUg/mPM+RbOX98MW5zFZ9DKhWod5mI7WQYR2PUux7oFWDsxU\nQITW3UqRU7Q9Yp35kJMiBTrFavPk/KKarEqnWHP+WLuANF5i5XmVXnuTlega6883CgyASx46tv1g\nBDwiQvfo0Dsi9Gr7/bWk6BQRurTdFcz8hgYYRIShTNNEhu4QoT/nR0Tnb54gRUeI0Jmc9yS23y8N\ny4gwLDr0ggg9ufvWPFNTF4jQjWcnxzA1tY8Iu8CZbZYRoQ+cIxoYETpAgbERYUfSPcP5z+wrWkCE\n1pUdBqnOICI0rfhEdHuklPcw5Lj4b0UuvlfdwCL0jghryRnEJJXytqE1RKh0eQnDavdLh4Zw7LuK\nUvtyxfcJuXq9QRyYKY+39XALERZGgbiLCE3jihU9IMKSjA+D7BDaRITFVCqQwTA8IizD+BgIy4gQ\nECPCAmoPg+VmpExrLSLCt5iI4iUiBMSI8JVmwyDHSAMjwud8TUR9LW1XiPCh9ts0CUVFhF1gKmsZ\nET7haGo3f2+hl6XtE6snrnEcPyex1GgTn6yPaJ587gVHhwYRYSy/+W1+PA6/VdKhNayP6DaT0uGz\nScgRYVwHo2Ly96x9E1gNEV3ll/wiG4Ae68C55Q3Ap+uRDuV4n9Cz+XoV774HlLNS5YgQdChGhBgG\nOpQiQvwxd0iK7bFT7tzrAzN7N8lW0RRPN3bQYUtMR7GDXcSWiBD7GAmbIUJAjAgBMSIExIgQECNC\nQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAEx\nIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQ\nECNCQIwIATEiBMSIEBAjQkCMCAExIgTEiBAQI0JAjAgBMSIExIgQECNCQIwIATEiBMSIEBAjQkCM\nCAExIgTEiBAQ+x8u4DBIXth8KAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJl0lEQVR4nO3da5KbRhiGUZTKjib7\nX4FnTcoPHIKRxOgCvP015/xIucqOzah5aO66XK/XAcj5K70AcHYihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKY\nCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGE\niRDCRBh2uVwul0t6KdY0vngdEGHMmN/1ek0vCGFWgqNNE8v8k2+2xmYXrCd/pxfgRMb8rNMs2M4d\n4Zn8GpxzGlykLpkJd3R3zxMWRLiLDvY8TYOHEeHG3s7ver02td63shwn4BLF9toJ6X2Xy9DBT1GE\nCDc2TmjH/7/UJUJumAaPJcLtmQx5iQi5YRo8lgghTIS7sEfK80QIYSLci8mQJ4mwRQo8lYbuk+rS\nq3eixW46nbK3PhzOvaNNCD9vMb8670r94UQY1sHzFnxIhPtaeTZCfoxEeDRP+rLgxMwRxsmw6anv\n9sSMg8OjiHBHiysN9T5qHR7C7uhGZr1Nv1q81PDYBaIMEb5isc8272rWWz9zx/VqMjyACJ92ezHt\nlbWztVfI0A63rR2n5B2h42RYXONf+CHCQxXtsN4yz4w7IOMn3+YPYneUns0PAcZfNHid1lHKKza6\ny7niwWF/y9zOZdt6n2zG1icJ+1unW/PnebSHvbWQot3RDCdLd7XYZq7siLawjyrCJ7hWVmoafDRc\n0/LfJrf4rYN/UhHGmAxTVpKbT4yHDY1LFElVrlgU2li8tNcyv3QxHwgzYWN23hc1H27rjQ8yuy86\niPBHl57uBWVValNod3TNMXNU4zulJuq9iRDCRPjQYTNAy1NNy8vWDRFS1XwXfvPd+V+/Nv4LVzgx\ns3TwzRMtTzUtL9uoj9soTh3hn5vPZXvtr4L08ej/6SJ88EqK4fZKhCt4p/X9ffn6Om7cez4mvHvM\nML6V4sV3U+yl5chbXra5u4/+N3zF547OZ8JX91UWB4Rnmwxbvly5YupwGu5au6k9r2HjMMz/O/+t\n2z8+PH7kbI9PKZz36jsa6256poEuFGHnM+Ej94bn4Yj1Mx92/47GmUKTYc/HhKNNXhe2+Z1lmaqf\nPiBu/E66FRXfDtd/hIPvvXzF9FxP3Q4n12uNo9xud0dvXnDw6V9Ycqd0/c1UP726v6ixvUKn1rqN\ncA9bjehBq8Xd79995bCwxBp814dL/v39+1M65mphnxHud0Red7387cUlL//zPvEjjMlNvR18pX7o\nNcJdfbhellunr9drldOMc+vDNM11w73pbpHl3oqtEM84YI15NaTMFxXe3R39+G8qZD5M0xCMj0c8\nE9hhU2KXM+Hur6S4u5VdORF3+yeP6HB+tv6zf67QNbe5+QneliebLiPcy+KLDQp8Ee92i1S0w+H1\ncTn4rMzQ3+7orpPM4i8/1ZFhRVU+5FNcrN/EhgUOlW9JYXNdRVhlyzcazzqyk0IrQ1cRzu06z2w1\nwBVvdGRzXUW4OBu2YYeFNqsM1carqwgXdjru2naAy02Gt+8rqLX8DeotwoonPEp32KBa0+DQX4TD\nDjulixsv9hjgUuvMna3G5dJ6mS3rMMJhz4ND7mrkxVlDwWlwOMkdM5/ccn3ANLiJ/zY040N0u/9z\n5XahW9bnTDicbwL87xnW42akBjdHLW8lV3Qb4bD1Tmn7A3z8dmd6vyCf6DnC4eMODw5v/ez/9Ovg\nWZCW7+Fufyv5SOcRLrw9Vxz4NWk//4Fxn/PB+8XPtRPeh/4j/GS9PH7L+ujs//MPBu7XoWlwJ/1H\nOFTbKV24+6LQ+XubbndcTzUfji9oTC/FR05xiWL48yrFM0UFx/XHs//zGenRK983f0FTI9Pg7biU\nuIC07iwR3vX8CykOttLhfN47dhl3f2nInX/ycXI9qbrxeE+BF1JsaqvJ4chJZj5Gr/6jRSfDdmfC\n3e7SrDdIbyv61tAnF/g216I/b7sRbq7i8Hyu1np5d1EfHTXc/aFq/byjE0V4WhXfVvzJTmk5Z4mw\n3NYxK3JyePFGybf/nnKT4VkiPLkabyveTq0O242w1ufYvhpvK95OofWn3Qg3VGUwdvXJh3DACn3m\nMTrFbWt8ruKtcFWWuf8Iz7yJnWzyIex5a/heY1Siw/4j5OTa77DzCE2Dw6Yfwh4rtDHqPEKGrW+7\nbn9iudX4MouQlzW+Tk9KLOTQd4T2c4ahmQcBHzhmjBpfE3qOkP20Pxk2Ht5c0xF+MtKFxmBHe06D\n275Fcr+k218TertjpvHNc2feuJNm5bmkDWtpP7y52hGe5PUH70t8Guvbweq3hu+hUoSSa9CG94Vv\nNRlW+fqQSYsRLgZ1/rbCxOLwgw2f/Sv06MOGYhE++TDbCYfkbPY7Gqyy8hwX4akeZuMli7fCVoln\nK8dFeKqPlU+83WHFaXBo/DohP2jtS5s+0P7V//20HuGZx+Yd05c2FVT6K0M+0XqE/GDxpU3Ffdjh\npFaQLV6i4AXTqtZLh28rVN2CmZC2fD4Z1poGBzNhb6bvcyq1Fi4srlg8+X/VPXdQYJtRbsPGJtbH\nff0exlrrjJmQAl69bbjWFX8R0qjFwWF2YXYlQtp1kq+FKXB21PV63nO9Xv/5/k4vxc8KRAhv+/X1\n1X6HIoQwEdK59idDEUJYjQhLnOOiWY1PhjXO4ULHasyE0DERQpgIIcxta5zOdJLm19dXdklGIuSM\nGslvZHeUM/rn+7udixZmQs7ITAj8T4QQ5o4ZCDMTQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAgh\nTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQ\nwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgI\nIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJ\nEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQ9i8clWSIkMZ6twAAAABJRU5E\nrkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAJWUlEQVR4nO3d3bKiSBqGUZyYO6q+\n/yvouibmgCmbVreikrxfJmtFRUcd1N6NyEPyk+JlnucJyPlPegHg7EQIYSKEMBFCmAghTIQQJkII\nEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKE\nMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZC\nCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEi\nhDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCHntcrmk\nF2FkIoQwEfLC5XKZ5zm9FCMTYZgjPf6bXoDz6iI/w+ABRBiw5HfduG3oJyfCQ93kV5y9wzFEeJAn\n+c3zbHM/MxE219fot9bfEvfJDriht/IrNxheLlOp5RmXkbCJfkc/jifCnX2cnzPD0xLhboYa/RyL\nHsiMmT0NUiDHEuFuluPJ7G/Yjb3JgUQIYSLc01CDIUcRIYSJcGcGQ97lFgUr1/hdmDmQkXB/vQ6G\ny73B5Y+h+EAirMjh6Kk4HG3i4zloQ027YRsRViG/0zJjuKGNg2Gh/O4vzJhE2p6RMOZ64lciv8X9\nkiwXaeos4YiMhG09HAwLDX0b6bAlI+Gh+suP9oyEzS2DYff5GQybEWErN/f6RljPOmzD4eh+Lpdp\nmq7lrasb5Oa7B3C0YZ1+Z13X0zU5zOY7zAupw0j4qSW/zZuj5zjxExFuc38X+6w52ZvszgTuDXb6\neMEwHxQc5oUUIcJD2Xy5J8KjDdChw9F9iTBggA7ZkQg3WE4Flz87jQD9djjMMHiz/oNvh6ujG7SZ\nKeIyIwsjYVK/42HvbnZ/2b2hCF8xYXLlDEP38btFEYYZDOPiexYR5unwYE+qiwQpwqeOOhbtosP4\niDEqET5TPQu+tt6zpPYyIiyh/iBTfwk3KvhCRPijgu8WQxIh51XhWHQSYQX1h9z6S7hRzRdi2tpj\nNd+tI9W/WvulIsPgJMIbw295P/r3U6qm1YOq7I9aO3uEf6J78ET6Yza+5CZ+95SqsVMruzc5Y4SP\nnpD24L0Z+VMO7zylasj1UOdYdDpVhL4K+h+bvyvqzz8focOyyz9shOsJZ9cHNW34qX8dl7be+Kps\n2aveHj68eDClhsFp4AinDRM/77e9+/djjEHg8RcPXq1e3U+vc5D1UNLIET78ar03t73ln3W+/d0f\nFUyfHJR3vx4eqfByRo7woc/WeYvtr8cNep7n3j/kXHC1Dz5j5run9fKAVbq7MSPc/DUtb+jiI3/H\n0OG+yg3Nu2h3yLTXwczRB0UN7s/0eFxa8Fh0GvKcsOnG8f3JYWY4bfLIxt1/5UkNGGFr73b401f2\n1twrczwR7m/jF2UPecW/srJrW4SfuOlnwK+n50CjRdj6asG1vfXF0o+rMxgyjXiL4tDLHvM8f5nQ\nAHc+1ou//L3zF3S08SJsqNHE32UaStfqL3/lI46hIqy8op/r/fb3/fIvXyTHFqOdE1JEnzvDjKFG\nwqZafwhtvMGwjuKHSONE+OSeQS8qb8dbFN7OSxsnwrV+LznuuB0Hp8ep8S3jRHgT3r4dVnsgwnOX\ny6XH2dWN1H+/xolwatxhR+Z5Pux+adnaL3+kF+S10a6OmoOyONV6eFhana+kf6n0wn3mZo1//wYc\nfyx6/1CYh4+Jefn00NZLGxkG75Pb8hordzjaSDjdDQKdjglbHhV3TfG+z8HcnGV88BsqbwZDnRNe\nDXBy+NMclLc+It/0hR8c/PzH9h/p5U0fM8Jpvw7rXBddnl98/wTH63j4cPJ0yw4Pu/bz4WrvZV88\nbIRTP+/BT17eu1+PRdc+7zfXFi88tT9694V0sQ2MHOG0x0rPnkU8vejy//9ueX01N75j1O+w6Knq\nvnY5pCx7Wr/dji/hsLWx1+nA7tfMdzT4SLi4+RT8uzvCXu75vrTXIFBqC96o8ng44C2Kl15uQAM/\nM+bLK/V1Ntzv1blpcZYIX67x7+9EDSn7vMZ9L03Xqe5GxWVqxyPSFrvsjw7YoFvcHyp4cphfgoN9\n/4i0MeyyP2o/La7JTdpqHZ7lcHTt5PktdnlkY1PtpklUm9hY9OpoowsA8X1eNe9OBLv/8U4v1ZRa\n8qIR0tpe+6NSW/NbisxGnE4VYXxdj6r1tLjh37iiEfa7fz0n79c3ikZIU20e2dhlhxWG2bNEWGFd\n19FoRbSYFneGN+4sEfKPlp/G/bLDYabpvuUU9wnPsDft1MtpcYdNBgiqG2H8FuqY2j+UYpdpuqd6\n9+tGSL9uEjrzNN0txo/wPDvU1w58NtP30+LOMxiOHyEp3/fTtMM6hbs6eiY1trm3dHr78S2lI/z+\nDaizt+Njw3focJTx3Z+dlto7jxxhqRXNN949OezreuyYEY599HJOzzvsq7obg0TY9Xuwp8++vakT\n9x1+dhek2iFS9Qif7P88H22Tsb6o6f7JFNt/tuzxUfUI1wx3m1Td1A52n1yphzut1Y1wvRKXv9dZ\na6WtD0dH9OSg9OafvfVLgqpE+GS/VWdlUcTNncPeN49MhM8PFdjT9QvWxlrDxefEveW4hfh411Vk\nTTGkClvXcdPW5pV3f7DsdS16N8/zX79/Z5eh9NxROMDfv35lOxQhhIkQwoOhCGGaoh3mLw1tUeES\nFjTSx0joAikD6yNCGJgIIazK3FEo5XqR5u9fv1r/v0QIjx2Q38LhKDz21+/fx9y06GYkrDPnnZMw\nEsJZiBDCejrAczjKkHoaCRXIkHqKEIYkQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQI\nYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyE\nECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJE\nCGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFM\nhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwv4H4hNi+X16j4oAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "LInArD_-eYur", - "colab_type": "text" - }, - "source": [ - "Let's now write these datasets to disk" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "lT7PxXreeYut", - "colab_type": "code", - "outputId": "53ec6ecc-0751-4ebb-ce8c-130e4fdce051", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "print(\"Number of compounds in train set\")\n", - "print(len(train_dataset))\n", - "print(\"Number of compounds in validation set\")\n", - "print(len(valid_dataset))\n", - "print(\"Number of compounds in test set\")\n", - "print(len(test_dataset))\n", - "print(\"Number of compounds in crystal set\")\n", - "print(len(crystal_dataset))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Number of compounds in train set\n", - "204\n", - "Number of compounds in validation set\n", - "1273\n", - "Number of compounds in test set\n", - "45\n", - "Number of compounds in crystal set\n", - "25\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "collapsed": true, - "id": "f8NYSeGdeYux", - "colab_type": "text" - }, - "source": [ - "The performance of common machine-learning algorithms can be very sensitive to preprocessing of the data. One common transformation applied to data is to normalize it to have zero-mean and unit-standard-deviation. We will apply this transformation to the pIC50 values (as seen above, the pIC50s range from 2 to 11)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "lKQfu5pveYuy", - "colab_type": "code", - "outputId": "260f065a-1fe1-4339-a9b5-9953bf6b0007", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 357 - } - }, - "source": [ - "transformers = [\n", - " dc.trans.NormalizationTransformer(transform_X=True, dataset=train_dataset),\n", - " dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset)]\n", - "\n", - "datasets = [train_dataset, valid_dataset, test_dataset, crystal_dataset]\n", - "for i, dataset in enumerate(datasets):\n", - " for transformer in transformers:\n", - " datasets[i] = transformer.transform(dataset)\n", - "train_dataset, valid_dataset, test_dataset, crystal_dataset = datasets" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.035 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.023 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/trans/transformers.py:254: RuntimeWarning: invalid value encountered in true_divide\n", - " X = np.nan_to_num((X - self.X_means) / self.X_stds)\n", - "/usr/local/lib/python3.7/site-packages/deepchem/trans/transformers.py:254: RuntimeWarning: divide by zero encountered in true_divide\n", - " X = np.nan_to_num((X - self.X_means) / self.X_stds)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.171 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.096 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.009 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.008 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.007 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.006 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "shBrVTYGeYvA", - "colab_type": "text" - }, - "source": [ - "We now fit simple random forest models to our datasets." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "jU49euh3eYvC", - "colab_type": "code", - "outputId": "ea62e7d9-9824-4228-9aad-6c127a7107d5", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 765 - } - }, - "source": [ - "from sklearn.ensemble import RandomForestClassifier\n", - "\n", - "def rf_model_builder(model_params, model_dir):\n", - " sklearn_model = RandomForestClassifier(**model_params)\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", - "params_dict = {\n", - " \"n_estimators\": [10, 100],\n", - " \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", - "}\n", - "\n", - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score)\n", - "optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", - "best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'auto'}\n", - "computed_metrics: [0.7425808527431867]\n", - "Model 1/8, Metric roc_auc_score, Validation set 0: 0.742581\n", - "\tbest_validation_score so far: 0.742581\n", - "Fitting model 2/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.7799706904092787]\n", - "Model 2/8, Metric roc_auc_score, Validation set 1: 0.779971\n", - "\tbest_validation_score so far: 0.779971\n", - "Fitting model 3/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'log2'}\n", - "computed_metrics: [0.749754915514979]\n", - "Model 3/8, Metric roc_auc_score, Validation set 2: 0.749755\n", - "\tbest_validation_score so far: 0.779971\n", - "Fitting model 4/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': None}\n", - "computed_metrics: [0.7415502410625858]\n", - "Model 4/8, Metric roc_auc_score, Validation set 3: 0.741550\n", - "\tbest_validation_score so far: 0.779971\n", - "Fitting model 5/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'auto'}\n", - "computed_metrics: [0.7799675483004963]\n", - "Model 5/8, Metric roc_auc_score, Validation set 4: 0.779968\n", - "\tbest_validation_score so far: 0.779971\n", - "Fitting model 6/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.787823448678052]\n", - "Model 6/8, Metric roc_auc_score, Validation set 5: 0.787823\n", - "\tbest_validation_score so far: 0.787823\n", - "Fitting model 7/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'log2'}\n", - "computed_metrics: [0.7893109229756021]\n", - "Model 7/8, Metric roc_auc_score, Validation set 6: 0.789311\n", - "\tbest_validation_score so far: 0.789311\n", - "Fitting model 8/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': None}\n", - "computed_metrics: [0.7611984757001875]\n", - "Model 8/8, Metric roc_auc_score, Validation set 7: 0.761198\n", - "\tbest_validation_score so far: 0.789311\n", - "computed_metrics: [0.9998077662437523]\n", - "Best hyperparameters: (100, 'log2')\n", - "train_score: 0.999808\n", - "validation_score: 0.789311\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "jqjBgMxHeYvO", - "colab_type": "code", - "outputId": "950f98ca-aaeb-4386-e5b8-f302925fdd74", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 479 - } - }, - "source": [ - "import numpy.random\n", - "\n", - "params_dict = {\"learning_rate\": np.power(10., np.random.uniform(-5, -3, size=1)),\n", - " \"weight_decay_penalty\": np.power(10, np.random.uniform(-6, -4, size=1)),\n", - " \"nb_epoch\": [40] }\n", - "n_features = train_dataset.get_data_shape()[0]\n", - "def model_builder(model_params, model_dir):\n", - " model = dc.models.MultitaskClassifier(\n", - " 1, n_features, layer_sizes=[1000], dropouts=.25,\n", - " batch_size=50, **model_params)\n", - " return model\n", - "\n", - "optimizer = dc.hyper.HyperparamOpt(model_builder)\n", - "best_dnn, best_dnn_hyperparams, all_dnn_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/1\n", - "hyperparameters: {'learning_rate': 0.000140267028096135, 'weight_decay_penalty': 2.5361932372437012e-05, 'nb_epoch': 40}\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/losses.py:108: The name tf.losses.softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.softmax_cross_entropy instead.\n", - "\n", - "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/deepchem/models/losses.py:109: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n", - "computed_metrics: [0.769121617205685]\n", - "Model 1/1, Metric roc_auc_score, Validation set 0: 0.769122\n", - "\tbest_validation_score so far: 0.769122\n", - "computed_metrics: [0.9121491733948481]\n", - "Best hyperparameters: (0.000140267028096135, 2.5361932372437012e-05, 40)\n", - "train_score: 0.912149\n", - "validation_score: 0.769122\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "5vhsHoeLeYvU", - "colab_type": "text" - }, - "source": [ - "Now let's evaluate the best model on the validation and test sets and save the results to csv." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "VeINkC9ReYvW", - "colab_type": "code", - "outputId": "6e8fe05b-5bca-4790-fc2a-8986c37cc43b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 207 - } - }, - "source": [ - "from deepchem.utils.evaluate import Evaluator\n", - "\n", - "rf_train_csv_out = \"rf_train_regressor.csv\"\n", - "rf_train_stats_out = \"rf_train_stats_regressor.txt\"\n", - "rf_train_evaluator = Evaluator(best_rf, train_dataset, transformers)\n", - "rf_train_score = rf_train_evaluator.compute_model_performance(\n", - " [metric], rf_train_csv_out, rf_train_stats_out)\n", - "print(\"RF Train set AUC %f\" % (rf_train_score[\"roc_auc_score\"]))\n", - "\n", - "rf_valid_csv_out = \"rf_valid_regressor.csv\"\n", - "rf_valid_stats_out = \"rf_valid_stats_regressor.txt\"\n", - "rf_valid_evaluator = Evaluator(best_rf, valid_dataset, transformers)\n", - "rf_valid_score = rf_valid_evaluator.compute_model_performance(\n", - " [metric], rf_valid_csv_out, rf_valid_stats_out)\n", - "print(\"RF Valid set AUC %f\" % (rf_valid_score[\"roc_auc_score\"]))\n", - "\n", - "rf_test_csv_out = \"rf_test_regressor.csv\"\n", - "rf_test_stats_out = \"rf_test_stats_regressor.txt\"\n", - "rf_test_evaluator = Evaluator(best_rf, test_dataset, transformers)\n", - "rf_test_score = rf_test_evaluator.compute_model_performance(\n", - " [metric], rf_test_csv_out, rf_test_stats_out)\n", - "print(\"RF Test set AUC %f\" % (rf_test_score[\"roc_auc_score\"]))\n", - "\n", - "rf_crystal_csv_out = \"rf_crystal_regressor.csv\"\n", - "rf_crystal_stats_out = \"rf_crystal_stats_regressor.txt\"\n", - "rf_crystal_evaluator = Evaluator(best_rf, crystal_dataset, transformers)\n", - "rf_crystal_score = rf_crystal_evaluator.compute_model_performance(\n", - " [metric], rf_crystal_csv_out, rf_crystal_stats_out)\n", - "print(\"RF Crystal set R^2 %f\" % (rf_crystal_score[\"roc_auc_score\"]))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.9998077662437523]\n", - "RF Train set AUC 0.999808\n", - "computed_metrics: [0.7893109229756021]\n", - "RF Valid set AUC 0.789311\n", - "computed_metrics: [0.5227272727272727]\n", - "RF Test set AUC 0.522727\n", - "computed_metrics: [nan]\n", - "RF Crystal set R^2 nan\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "LMDBBUtJeYvb", - "colab_type": "code", - "outputId": "c27f8711-bd89-4930-efcc-2556453a533c", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 207 - } - }, - "source": [ - "dnn_train_csv_out = \"dnn_train_classifier.csv\"\n", - "dnn_train_stats_out = \"dnn_train_classifier_stats.txt\"\n", - "dnn_train_evaluator = Evaluator(best_dnn, train_dataset, transformers)\n", - "dnn_train_score = dnn_train_evaluator.compute_model_performance(\n", - " [metric], dnn_train_csv_out, dnn_train_stats_out)\n", - "print(\"DNN Train set AUC %f\" % (dnn_train_score[\"roc_auc_score\"]))\n", - "\n", - "dnn_valid_csv_out = \"dnn_valid_classifier.csv\"\n", - "dnn_valid_stats_out = \"dnn_valid_classifier_stats.txt\"\n", - "dnn_valid_evaluator = Evaluator(best_dnn, valid_dataset, transformers)\n", - "dnn_valid_score = dnn_valid_evaluator.compute_model_performance(\n", - " [metric], dnn_valid_csv_out, dnn_valid_stats_out)\n", - "print(\"DNN Valid set AUC %f\" % (dnn_valid_score[\"roc_auc_score\"]))\n", - "\n", - "dnn_test_csv_out = \"dnn_test_classifier.csv\"\n", - "dnn_test_stats_out = \"dnn_test_classifier_stats.txt\"\n", - "dnn_test_evaluator = Evaluator(best_dnn, test_dataset, transformers)\n", - "dnn_test_score = dnn_test_evaluator.compute_model_performance(\n", - " [metric], dnn_test_csv_out, dnn_test_stats_out)\n", - "print(\"DNN Test set AUC %f\" % (dnn_test_score[\"roc_auc_score\"]))\n", - "\n", - "dnn_crystal_csv_out = \"dnn_crystal_classifier.csv\"\n", - "dnn_crystal_stats_out = \"dnn_crystal_stats_classifier.txt\"\n", - "dnn_crystal_evaluator = Evaluator(best_dnn, crystal_dataset, transformers)\n", - "dnn_crystal_score = dnn_crystal_evaluator.compute_model_performance(\n", - " [metric], dnn_crystal_csv_out, dnn_crystal_stats_out)\n", - "print(\"DNN Crystal set AUC %f\" % (dnn_crystal_score[\"roc_auc_score\"]))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.9121491733948481]\n", - "DNN Train set AUC 0.912149\n", - "computed_metrics: [0.769121617205685]\n", - "DNN Valid set AUC 0.769122\n", - "computed_metrics: [0.4772727272727273]\n", - "DNN Test set AUC 0.477273\n", - "computed_metrics: [nan]\n", - "DNN Crystal set AUC nan\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/metrics/__init__.py:368: UserWarning: Error calculating metric roc_auc_score: Only one class present in y_true. ROC AUC score is not defined in that case.\n", - " warnings.warn(\"Error calculating metric %s: %s\" % (self.name, e))\n" - ], - "name": "stderr" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "wjflxuMMeYvf", - "colab_type": "text" - }, - "source": [ - "Now, we construct regression models for the data." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NqEbvd2ZeYvg", - "colab_type": "code", - "outputId": "0259bdef-9184-4214-a101-64f12490857b", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 323 - } - }, - "source": [ - "#Make directories to store the raw and featurized datasets.\n", - "featurizer = dc.feat.UserDefinedFeaturizer(user_specified_features)\n", - "loader = dc.data.UserCSVLoader(\n", - " tasks=[\"pIC50\"], smiles_field=\"mol\", id_field=\"CID\",\n", - " featurizer=featurizer)\n", - "dataset = loader.featurize(dataset_file)\n", - "crystal_dataset = loader.featurize(crystal_dataset_file)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from desc_canvas_aug30.csv\n", - "Loading shard 1 of size 8192.\n", - "TIMING: user specified processing took 0.165 s\n", - "TIMING: featurizing shard 0 took 0.174 s\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/data/data_loader.py:131: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n", - " X_shard = df.as_matrix(columns=featurizer.feature_fields)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.441 s\n", - "Loading dataset from disk.\n", - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from crystal_desc_canvas_aug30.csv\n", - "Loading shard 1 of size 8192.\n", - "TIMING: user specified processing took 0.151 s\n", - "TIMING: featurizing shard 0 took 0.152 s\n", - "TIMING: dataset construction took 0.219 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "dPEHZbTreYvo", - "colab_type": "code", - "outputId": "3cbf271f-db6a-4c1e-bc39-524c3d992765", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 119 - } - }, - "source": [ - "splitter = dc.splits.SpecifiedSplitter(dataset_file, \"Model\")\n", - "train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(\n", - " dataset)\n", - "#NOTE THE RENAMING:\n", - "valid_dataset, test_dataset = test_dataset, valid_dataset" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.056 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.039 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.142 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "leu2sy1HeYvx", - "colab_type": "code", - "outputId": "e42b9fe4-2b19-41a3-f23a-22062f278583", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "print(\"Number of compounds in train set\")\n", - "print(len(train_dataset))\n", - "print(\"Number of compounds in validation set\")\n", - "print(len(valid_dataset))\n", - "print(\"Number of compounds in test set\")\n", - "print(len(test_dataset))\n", - "print(\"Number of compounds in crystal set\")\n", - "print(len(crystal_dataset))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Number of compounds in train set\n", - "204\n", - "Number of compounds in validation set\n", - "1273\n", - "Number of compounds in test set\n", - "45\n", - "Number of compounds in crystal set\n", - "25\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "NmlQz-9ZeYv2", - "colab_type": "code", - "outputId": "67060adc-8c11-4386-a679-c7a871f84db0", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 357 - } - }, - "source": [ - "transformers = [\n", - " dc.trans.NormalizationTransformer(transform_X=True, dataset=train_dataset),\n", - " dc.trans.ClippingTransformer(transform_X=True, dataset=train_dataset)]\n", - "\n", - "datasets = [train_dataset, valid_dataset, test_dataset, crystal_dataset]\n", - "for i, dataset in enumerate(datasets):\n", - " for transformer in transformers:\n", - " datasets[i] = transformer.transform(dataset)\n", - "train_dataset, valid_dataset, test_dataset, crystal_dataset = datasets" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.032 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.021 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/deepchem/trans/transformers.py:254: RuntimeWarning: invalid value encountered in true_divide\n", - " X = np.nan_to_num((X - self.X_means) / self.X_stds)\n", - "/usr/local/lib/python3.7/site-packages/deepchem/trans/transformers.py:254: RuntimeWarning: divide by zero encountered in true_divide\n", - " X = np.nan_to_num((X - self.X_means) / self.X_stds)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "TIMING: dataset construction took 0.158 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.097 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.009 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.008 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.007 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.006 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "BgB88N9leYv7", - "colab_type": "code", - "outputId": "d7099322-f193-401e-9e9a-242ee410a47c", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 765 - } - }, - "source": [ - "from sklearn.ensemble import RandomForestRegressor\n", - "\n", - "def rf_model_builder(model_params, model_dir):\n", - " sklearn_model = RandomForestRegressor(**model_params)\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", - "params_dict = {\n", - " \"n_estimators\": [10, 100],\n", - " \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", - "}\n", - "\n", - "metric = dc.metrics.Metric(dc.metrics.r2_score)\n", - "optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", - "best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'auto'}\n", - "computed_metrics: [0.23116057453507344]\n", - "Model 1/8, Metric r2_score, Validation set 0: 0.231161\n", - "\tbest_validation_score so far: 0.231161\n", - "Fitting model 2/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.24192711715556714]\n", - "Model 2/8, Metric r2_score, Validation set 1: 0.241927\n", - "\tbest_validation_score so far: 0.241927\n", - "Fitting model 3/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'log2'}\n", - "computed_metrics: [0.24437002800920515]\n", - "Model 3/8, Metric r2_score, Validation set 2: 0.244370\n", - "\tbest_validation_score so far: 0.244370\n", - "Fitting model 4/8\n", - "hyperparameters: {'n_estimators': 10, 'max_features': None}\n", - "computed_metrics: [0.2299690455806439]\n", - "Model 4/8, Metric r2_score, Validation set 3: 0.229969\n", - "\tbest_validation_score so far: 0.244370\n", - "Fitting model 5/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'auto'}\n", - "computed_metrics: [0.23705465248412372]\n", - "Model 5/8, Metric r2_score, Validation set 4: 0.237055\n", - "\tbest_validation_score so far: 0.244370\n", - "Fitting model 6/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.25524717935387475]\n", - "Model 6/8, Metric r2_score, Validation set 5: 0.255247\n", - "\tbest_validation_score so far: 0.255247\n", - "Fitting model 7/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'log2'}\n", - "computed_metrics: [0.29028610308758807]\n", - "Model 7/8, Metric r2_score, Validation set 6: 0.290286\n", - "\tbest_validation_score so far: 0.290286\n", - "Fitting model 8/8\n", - "hyperparameters: {'n_estimators': 100, 'max_features': None}\n", - "computed_metrics: [0.23957751231322233]\n", - "Model 8/8, Metric r2_score, Validation set 7: 0.239578\n", - "\tbest_validation_score so far: 0.290286\n", - "computed_metrics: [0.9478385687084689]\n", - "Best hyperparameters: (100, 'log2')\n", - "train_score: 0.947839\n", - "validation_score: 0.290286\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "qEhs3pUueYv_", - "colab_type": "code", - "outputId": "b5abdeba-a769-4b35-f02d-4e71a9661c47", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 717 - } - }, - "source": [ - "import numpy.random\n", - "\n", - "params_dict = {\"learning_rate\": np.power(10., np.random.uniform(-5, -3, size=2)),\n", - " \"weight_decay_penalty\": np.power(10, np.random.uniform(-6, -4, size=2)),\n", - " \"nb_epoch\": [20] }\n", - "n_features = train_dataset.get_data_shape()[0]\n", - "def model_builder(model_params, model_dir):\n", - " model = dc.models.MultitaskRegressor(\n", - " 1, n_features, layer_sizes=[1000], dropouts=[.25],\n", - " batch_size=50, **model_params)\n", - " return model\n", - "\n", - "optimizer = dc.hyper.HyperparamOpt(model_builder)\n", - "best_dnn, best_dnn_hyperparams, all_dnn_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/4\n", - "hyperparameters: {'learning_rate': 0.0005235973498873468, 'weight_decay_penalty': 1.3122752916546754e-05, 'nb_epoch': 20}\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "computed_metrics: [-0.03593155027495132]\n", - "Model 1/4, Metric r2_score, Validation set 0: -0.035932\n", - "\tbest_validation_score so far: -0.035932\n", - "Fitting model 2/4\n", - "hyperparameters: {'learning_rate': 0.0005235973498873468, 'weight_decay_penalty': 1.1225205219411416e-05, 'nb_epoch': 20}\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "computed_metrics: [0.11464293191445063]\n", - "Model 2/4, Metric r2_score, Validation set 1: 0.114643\n", - "\tbest_validation_score so far: 0.114643\n", - "Fitting model 3/4\n", - "hyperparameters: {'learning_rate': 0.00041048311637740804, 'weight_decay_penalty': 1.3122752916546754e-05, 'nb_epoch': 20}\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "computed_metrics: [-0.11855063006937927]\n", - "Model 3/4, Metric r2_score, Validation set 2: -0.118551\n", - "\tbest_validation_score so far: 0.114643\n", - "Fitting model 4/4\n", - "hyperparameters: {'learning_rate': 0.00041048311637740804, 'weight_decay_penalty': 1.1225205219411416e-05, 'nb_epoch': 20}\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: module 'gast' has no attribute 'Num'\n", - "computed_metrics: [-0.11058929412292762]\n", - "Model 4/4, Metric r2_score, Validation set 3: -0.110589\n", - "\tbest_validation_score so far: 0.114643\n", - "computed_metrics: [0.6591412288586316]\n", - "Best hyperparameters: (0.0005235973498873468, 1.1225205219411416e-05, 20)\n", - "train_score: 0.659141\n", - "validation_score: 0.114643\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "1c-1CX5weYwC", - "colab_type": "code", - "outputId": "fe8b926e-ac8c-4e97-df5d-e28ee2d91caf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "from deepchem.utils.evaluate import Evaluator\n", - "\n", - "rf_train_csv_out = \"rf_train_regressor.csv\"\n", - "rf_train_stats_out = \"rf_train_stats_regressor.txt\"\n", - "rf_train_evaluator = Evaluator(best_rf, train_dataset, transformers)\n", - "rf_train_score = rf_train_evaluator.compute_model_performance(\n", - " [metric], rf_train_csv_out, rf_train_stats_out)\n", - "print(\"RF Train set R^2 %f\" % (rf_train_score[\"r2_score\"]))\n", - "\n", - "rf_valid_csv_out = \"rf_valid_regressor.csv\"\n", - "rf_valid_stats_out = \"rf_valid_stats_regressor.txt\"\n", - "rf_valid_evaluator = Evaluator(best_rf, valid_dataset, transformers)\n", - "rf_valid_score = rf_valid_evaluator.compute_model_performance(\n", - " [metric], rf_valid_csv_out, rf_valid_stats_out)\n", - "print(\"RF Valid set R^2 %f\" % (rf_valid_score[\"r2_score\"]))\n", - "\n", - "rf_test_csv_out = \"rf_test_regressor.csv\"\n", - "rf_test_stats_out = \"rf_test_stats_regressor.txt\"\n", - "rf_test_evaluator = Evaluator(best_rf, test_dataset, transformers)\n", - "rf_test_score = rf_test_evaluator.compute_model_performance(\n", - " [metric], rf_test_csv_out, rf_test_stats_out)\n", - "print(\"RF Test set R^2 %f\" % (rf_test_score[\"r2_score\"]))\n", - "\n", - "rf_crystal_csv_out = \"rf_crystal_regressor.csv\"\n", - "rf_crystal_stats_out = \"rf_crystal_stats_regressor.txt\"\n", - "rf_crystal_evaluator = Evaluator(best_rf, crystal_dataset, transformers)\n", - "rf_crystal_score = rf_crystal_evaluator.compute_model_performance(\n", - " [metric], rf_crystal_csv_out, rf_crystal_stats_out)\n", - "print(\"RF Crystal set R^2 %f\" % (rf_crystal_score[\"r2_score\"]))" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.9478385687084689]\n", - "RF Train set R^2 0.947839\n", - "computed_metrics: [0.29028610308758807]\n", - "RF Valid set R^2 0.290286\n", - "computed_metrics: [0.4617340106891408]\n", - "RF Test set R^2 0.461734\n", - "computed_metrics: [nan]\n", - "RF Crystal set R^2 nan\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "D7g92mUweYwF", - "colab_type": "code", - "outputId": "890443b8-87b8-4f5d-9187-60cc1c4924c4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 153 - } - }, - "source": [ - "dnn_train_csv_out = \"dnn_train_regressor.csv\"\n", - "dnn_train_stats_out = \"dnn_train_regressor_stats.txt\"\n", - "dnn_train_evaluator = Evaluator(best_dnn, train_dataset, transformers)\n", - "dnn_train_score = dnn_train_evaluator.compute_model_performance(\n", - " [metric], dnn_train_csv_out, dnn_train_stats_out)\n", - "print(\"DNN Train set R^2 %f\" % (dnn_train_score[\"r2_score\"]))\n", - "\n", - "dnn_valid_csv_out = \"dnn_valid_regressor.csv\"\n", - "dnn_valid_stats_out = \"dnn_valid_regressor_stats.txt\"\n", - "dnn_valid_evaluator = Evaluator(best_dnn, valid_dataset, transformers)\n", - "dnn_valid_score = dnn_valid_evaluator.compute_model_performance(\n", - " [metric], dnn_valid_csv_out, dnn_valid_stats_out)\n", - "print(\"DNN Valid set R^2 %f\" % (dnn_valid_score[\"r2_score\"]))\n", - "\n", - "dnn_test_csv_out = \"dnn_test_regressor.csv\"\n", - "dnn_test_stats_out = \"dnn_test_regressor_stats.txt\"\n", - "dnn_test_evaluator = Evaluator(best_dnn, test_dataset, transformers)\n", - "dnn_test_score = dnn_test_evaluator.compute_model_performance(\n", - " [metric], dnn_test_csv_out, dnn_test_stats_out)\n", - "print(\"DNN Test set R^2 %f\" % (dnn_test_score[\"r2_score\"]))\n", - "\n", - "dnn_crystal_csv_out = \"dnn_crystal_regressor.csv\"\n", - "dnn_crystal_stats_out = \"dnn_crystal_stats_regressor.txt\"\n", - "dnn_crystal_evaluator = Evaluator(best_dnn, crystal_dataset, transformers)\n", - "dnn_crystal_score = dnn_crystal_evaluator.compute_model_performance(\n", - " [metric], dnn_crystal_csv_out, dnn_crystal_stats_out)\n", - "print(\"DNN Crystal set R^2 %f\" % (dnn_crystal_score[\"r2_score\"]))\n" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "stream", - "text": [ - "computed_metrics: [0.6591412288586316]\n", - "DNN Train set R^2 0.659141\n", - "computed_metrics: [0.11464293191445063]\n", - "DNN Valid set R^2 0.114643\n", - "computed_metrics: [0.5419630023912616]\n", - "DNN Test set R^2 0.541963\n", - "computed_metrics: [nan]\n", - "DNN Crystal set R^2 nan\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "fPpZmZbqeYwK", - "colab_type": "code", - "outputId": "16590d66-2014-4aff-e522-3cfb446faa92", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "task = \"pIC50\"\n", - "rf_predicted_test = best_rf.predict(test_dataset)\n", - "rf_true_test = test_dataset.y\n", - "plt.scatter(rf_predicted_test, rf_true_test)\n", - "plt.xlabel('Predicted pIC50s')\n", - "plt.ylabel('Secondary Assay')\n", - "plt.title(r'RF predicted IC50 vs. Secondary Assay')\n", - "plt.xlim([2, 11])\n", - "plt.ylim([2, 11])\n", - "plt.plot([2, 11], [2, 11], color='k')\n", - "plt.show()" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZxN9f/A8dfbkixZkjZRSmGMkAn9\nSpJK+qZUivQtZQxCC6WipET2SPat8hUpSwlFkSzJOjGWlKQsyZQ9E2Pm/fvjnNE15s7cWe49d8b7\n+XjMw51zzj3nPXfGeZ/zWd5HVBVjjDEGIJ/XARhjjAkflhSMMcacYknBGGPMKZYUjDHGnGJJwRhj\nzCmWFIwxxpxiScH4JSIqIhXd16NFpEcIjvm4iCwL9nFMYERksYi08ToOEzqWFEJMRHaISIKIHBWR\nvSLynogU81n/noiccNenfDX3MmYAVW2vqm9ktF0wTyIicoWbqAr4LKstIvNE5KCI7BeRVSLyRKrt\nfT/LHj7vLSQiE0XksPu76BKMuDP4mbqLyC9ubLtEZFqoYwg37t/QAREp5HUsZyNLCt5ooqrFgBpA\nTaBbqvUDVLWYz1e2TxS+J9K8QkRuABYB3wAVgdLAk0DjVJuW9PksfRPba8DVwOVAA+AFEbkz6IG7\nRKQV8Chwm/v3EAUsDNXxgymrf28icgVQD1DgnhwMyQTIkoKHVHUvMB8nOWSaexX8tIhsF5E/RWSg\niORz1z0uIstFZIiI/AW85l4ZDxKR30TkD7dJqLDP/rqKyO8iskdEWqc61nsi0tvn+3tF5Hv3Kvtn\nEblTRPrg/Ice7l75Dne3rSwiX7pX8ltF5CGf/ZQWkdnuflYBV2XiIxgIvK+q/VX1T3WsVdWHMnyn\noxXwhqoeUNUtwDjg8dQbuZ/bQRGJ9FlWxr3ju1BELhCROT53K0tTfg8ZuB6Yr6o/g/P3oKpjfY5R\nQkQmuL+T3SLSW0Ty+6yPEZEtInJERDaLyHXu8iru1fZBEdkkIvf4vOc9ERkhInPd960Ukat81t8u\nIj+IyCH39yc+664SkUUi8pf79/aBiJT0Wb9DRF4UkQ3A3+7f04xUn+UwEXk7nc/kMeA74D2c34/v\ne+9yf84j7ufxvLvc7+cvIi+5f58pn9F97vJz3G2r+ez/QhE5JiJl0okv71NV+wrhF7AD58oQ4DIg\nDnjbZ/17QO8A96XA18D5QHngR6CNu+5x4CTwFFAAKAwMAWa7258HfAb0dbe/E/gDiASKAlPc/VdM\nHRdQGzgE3I5zYVEWqOyuW5wSg/t9UWAn8IQbR03gTyDCXf8h8JG7XSSwG1jm5+e9wo2pAFAESAIa\npPP5pGy/G9gFvAtc4K4r5a67yGf7ZkCcn31NBPr4fN8R+MJ93RcYDRR0v+oBEsDv77/AfqArzl1C\n/lTrZwFj3M/mQmAV0M5d96D7c12Pc+KuiHPHUxDYBnQHzgFuBY4AlXx+j3+5v8MCwAfAh+66C9xt\nm7n76ez+DaX8TVV0f+eFgDLAEmBoqr/t74FyOH9vlwB/49yp4R5vH1Arnc9kG9ABqAUkpvr9/A7U\n8/n9XZfR5+9+Tpfi/J02d+O5xF03Eujvs/9ngM+8Pkd4/eV5AGfbl/sf56j7n09xmgtK+qx/D/gH\nOOh+/ZnOvhS40+f7DsBC9/XjwG8+68T9D3GVz7IbgF/c1xOBfj7rrsF/UhgDDPET02JOTwrNgaWp\nthkD9ATyu//xK/use5PAkkJZ93XltLZ1t09pkikAXARMx7kyxz1xKXCuz/a3Azv87Os24Gef75cD\nj7mvewGfpnxWmfx7eAT4yv3d/AW86C6/CDgOFPbZ9mHga/f1fOCZNPZXD9gL5PNZNhV4zef3ON5n\n3V3AD+7rx4DvUv3N7PL9faY6VlMgNtXfdutU23wOxLiv7wY2p/NZ3OT+PaQk7h+Azj7rfwPaAcVT\nvS/gzx8nad3rvq7j7jMlgawBHsrs7zCvfVnzkTeaqup5wC1AZZwrNF+DVLWk+5V6XWo7fV7/inNV\nlNa6MjhX12vd2+yDwBfuctz3pd6XP+WAnzOIK8XlQJ2UY7rHfQS42D12gUwc19cBIBnnajRNqnpU\nVdeo6klV/QPoBNwhIufhJGaA4j5vKY6TrNPyNVBEROq47d41cK7kwWnG2gYscJvyXgrwZ0BVP1DV\n24CSQHvgDRFpxL9X/b/7fG5jcO4YwP/v4FJgp6om+yz7FSeJptjr8/oYTvI89V6f2NT3exG5SEQ+\ndJtuDgOTOfNvd2eq79/HuSPC/fd/acScohWwQFX/dL+fwulNSA/gJLFfReQbcfqUIJ3PX0Qec5s5\nUz7DyJSYVXWl+/PfIiKVce6EZqcT31nBkoKHVPUbnCu3QdnYTTmf1+WBPb6H8Hn9J5AAVPVJOCXU\n6eAE59Y89b782Yn/tv/UZXd3At/4HDOl0/dJIB6neSLQ4/57ENVjwAqcE0WgUmLLp6oHcH7m6j7r\nqwOb/BwvCaeZ62H3a46qHnHXHVHV51T1SpzO0S4i0jATcaGqiar6MbAB58S1E+dO4QKfz624qlZ1\n3+Lvd7AHKJeqT6M8TlNTRk77GxAR4fTfzZs4n2E1VS2Oc5IXTpf69/8JcK3bH3M3TnPVGcTp23oI\nqC/OSLC9OM1X1UWkOoCqrlbVe3ES4yc4vw+/n7+IXI7TT9QJKK2qJYGNqWJOSVqPAtNV9Z+MPqS8\nzpKC94YCt6f84WdBVxEpJSLlcNpE0xyp5F45jgOGiMiFACJS1r0qBec/2OMiEiEiRXCad/yZADzh\n/sfL5+6nsrvuD+BKn23nANeIyKMiUtD9ul5Eqrgn2pk4neBFRCSCVJ2LGXjBjbmriJR2f6bqIvKh\n+7qOiFRyYywNDAMWq+oh9/2TgFfcz68yEIOTpP2ZgtMc9oj7Gvc4d4tIRfckeginryM57V38S5zB\nAP8RkfPcGBsDVYGVqvo7sAAYLCLF3fVXiUh99+3jgedFpJY4KronwZSr3xfcz/oWoAlO301G5gJV\nReR+cUYPPY1zR5ci5Q7rkIiUxekLSZd7kp2O83mtUtXf/GzaFOdzi8C5C6sBVAGWAo+5HcOPiEgJ\nVU0EDuN+xul8/kVxklS8u90TOAnX12TgPpzEMCmjn+dsYEnBY6oaj/PH+GoWd/EpsBanrXQuzgnb\nnxdxbrO/c2//vwIquXF8jpOgFrnbLEon5lU4HcdDcP4TfoPT3AHwNtBMnHHmw9yr6TuAFjhXsXuB\n/jidleBcxRVzl7+H0xkcEFX9Fqcj9VZgu4jsB8YC89xNrsRpIjuCc4V4HOcqP0VPnCaYX92fYaCq\nfpHO8VbitP1fitNWnuJqnM/yKM7dy0hV/RpARD4Xke5+dnkYp0P4N5z+owHAk6qaMnnvMZzO4s04\nzWXTcZvL3LuKPjgn2yM4V87nq+oJnCTQGOfucCRO38cP/n4un5/vT5yO2X44/RtX4/SdpHgduA7n\ndz4XJ6EH4n2gGhk3Hb2rqr+pMwprrzqj84bjJGFwruZ3uH+77X2Wp/n5q+pmYLC77A83Bt+fB1Xd\nCazDSR5LA/x58rSUDhaTC4mIAler6javYzHGHxEpj9NpfLGqHvY6ntREZCKwR1Vf8TqWcJDnJjQZ\nY8KH27fRBWfYazgmhCuA+3GGShuC2HwkTvmAfSKy0WfZg+JMpkkWkahgHdsY4z0RKYrTRHY76fdR\neUJE3sBpVhyoqr94HU+4CFrzkYjcjNPGN0lVI91lVXA6gMYAz6vqmqAc3BhjTJYErflIVZe4t2a+\ny7YAOIMEjDHGhJuw7VMQkbZAW4CiRYvWqly5cgbvMMYYc+LECXbs2MGRI0fAqYiQqVpOYZsU1CkM\nNhYgKipK16yxliZjjPEnKSmJESNG0L17d0SE4cOH06lTp0ArBJxi8xSMMSaX27JlCzfffDPPPPMM\n9erVY+PGjXTs2DFL+7KkYIwxuVRiYiJ9+vShRo0a/PDDD0yaNIl58+Zx+eWXZ/xmP4LWfCQiU3EK\nvl0gIrtwhqTtB97BKYQ2V0S+V9VG/vdijDEmLevWraN169asX7+eBx98kHfeeYeLLroo2/sN5uij\nh/2smuVnuTHGmAwkJCTw+uuvM2jQIMqUKcPMmTO57777cmz/YdvRbIwx5nRLliyhTZs2/PTTT0RH\nRzNw4EBKlSqVo8ewPgVjjAlzhw8fpmPHjtSvX5/ExES+/PJLxo8fn+MJASwpGGNMWPv888+JjIxk\n1KhRPPvss2zcuJHbbrstaMez5iNjwtAnsbsZOH8rew4mcGnJwnRtVImmNctm/EaTZ/z111907tyZ\n//3vf1SpUoXly5dzww03ZPzGbLI7BWPCzCexu+k2M47dBxNQYPfBBLrNjOOT2EAenmZyO1Xlo48+\nokqVKkydOpUePXoQGxsbkoQAlhSMCTsD528lITHptGUJiUkMnL/Vo4hMqOzZs4f77ruP5s2bU758\nedasWUOvXr0oVKhQxm/OIZYUjAkzew4mZGq5yf1UlQkTJhAREcH8+fMZMGAA3333HdWrZ/UpvVln\nScGYMHNpycKZWm5yt+3bt3PbbbfRpk0bqlevzoYNG+jatSsFCnjT5WtJwZgw07VRJQoXzH/assIF\n89O1USWPIjLBkJSUxNChQ6lWrRqrV69m1KhRfP3111x99dWexmWjj4wJMymjjGz0Ud61adMmoqOj\nWblyJXfddRejR4+mXLlyXocFWFIwJiw1rVnWkkAedOLECfr3788bb7xB8eLFmTx5Mi1btgyrB49Z\nUjDGmBBYvXo10dHRxMXF0aJFC95++20uvPBCr8M6g/UpGGNMEB07doyuXbtSt25d/vrrLz799FOm\nTp0algkB7E7BGGOCZvHixcTExLBt2zZiYmIYOHAgJUqU8DqsdNmdgjHG5LBDhw7Rvn17GjRoQHJy\nMgsXLmTs2LFhnxAgiElBRCaKyD4R2eiz7HwR+VJEfnL/zfkSf8YY46G5c+dStWpVxo0bR5cuXYiL\ni+PWW2/1OqyABfNO4T3gzlTLXgIWqurVwEL3e2OMyfXi4+N55JFHuPvuuylZsiTffvstgwcPpkiR\nIl6HlilBSwqqugTn8Zu+7gXed1+/DzQN1vGNMSYUVJWpU6cSERHBxx9/zGuvvca6deuoU6eO16Fl\nSag7mi9S1d/d13uB7D9Q1BhjPLJr1y6efPJJ5syZQ+3atZkwYQKRkZFeh5UtnnU0q6oC6m+9iLQV\nkTUisiY+Pj6EkRljTPqSk5MZO3YsVatWZeHChQwePJhvv/021ycECH1S+ENELgFw/93nb0NVHauq\nUaoaVaZMmZAFaIwx6dm2bRsNGzakXbt21KpVi7i4OLp06UL+/PkzfnMuEOqkMBto5b5uBXwa4uMb\nY0yWJCUlMXjwYK699lrWrVvH2LFjWbhwIVdddZXXoeWooPUpiMhU4BbgAhHZBfQE+gEfiUg08Cvw\nULCOb4wxOWXjxo20bt2a1atX06RJE0aNGkXZsnmzNlXQkoKqPuxnVcNgHdMYY3LS8ePH6du3L2++\n+SYlSpRg6tSpNG/ePKwK2OU0K3NhjDFpWLlyJdHR0WzatIlHHnmEoUOHcsEFF3gdVtBZmQtjjPHx\n999/06VLF2644QYOHTrEnDlzmDx58lmREMDuFIwx5pRFixYRExPD9u3bad++Pf3796d48eJehxVS\nlhSM8cAnsbtPPVmtZJGCqMKhhER7yppHDh48SNeuXRk/fjwVK1Zk8eLF1K9f3+uwPGFJwZgQ+yR2\nN91mxpGQmATAgWOJp9btPphAt5lxAJYYQmT27Nk8+eST7N27l65du/Laa6/lunpFOcn6FIwJsYHz\nt55KCGlJSExi4PytIYzo7LRv3z5atGjBvffeS+nSpVm5ciUDBgw4qxMCWFIwJuT2HEzIkW1M1qgq\nkydPpkqVKsyaNYs33niDNWvWEBUV5XVoYcGSgjEhdmnJwjmyjcm8nTt3cvfdd/Poo49yzTXXEBsb\nyyuvvMI555zjdWhhw5KCMSHWtVElChf0XyencMH8dG1UKYQR5X3JycmMGjWKqlWrsnjxYoYOHcqy\nZcuIiIjwOrSwYx3NxoRYSgeyjT4KjZ9++ok2bdqwZMkSGjZsyNixY7nyyiu9DitsWVIwxgNNa5a1\nE3+QnTx5krfeeouePXtSqFAhJkyYwBNPPJGnS1TkBEsKxpg8Z/369URHR7N27VqaNm3KiBEjuPTS\nS70OK1ewpGDMWcB3slxebqI6fvw4vXv3pl+/fpx//vl89NFHNGvWzO4OMsGSgjF5XOrJcnl1gtyK\nFSuIjo5my5YtPPbYY7z11luULl3a67ByHRt9ZEwel9Zkubw0Qe7o0aM8++yz3HjjjRw9epR58+bx\n/vvvW0LIIrtTMCaP8zcRLi9MkPvyyy9p27YtO3bsoGPHjvTt25fzzjvP67ByNU+Sgog8A8QAAoxT\n1aFexGGMV9Jq4wey1O6fUX/BpSULszuNBJBPhE9id+fKJqQDBw7w/PPPM3HiRK655hqWLFlCvXr1\nvA4rTxBVDe0BRSKBD4HawAngC6C9qm7z956oqChds2ZNiCI0JrhSt/EDFMwnIJCYpKctK3ZuAQ4e\n8z9/Ia19FS6Yn773Vzu1bVrb+Ns2N5g1axYdOnQgPj6erl270rNnT84991yvwwpLIrJWVTNVv8OL\nPoUqwEpVPaaqJ4FvgPs9iMMYT6TVxp+YrKclhJRlB44lovzbOfxJ7O4M95W6v6BpzbL0vb8a+dMY\ngZOb+hb27t3Lgw8+yP3338/FF1/MqlWr6Nu3ryWEHOZFUtgI1BOR0iJSBLgLKJd6IxFpKyJrRGRN\nfHx8yIM0Jliy2paf1gk80P6CpjXLkuynVSDc+xZUlUmTJhEREcHs2bPp06cPq1at4rrrrvM6tDwp\n5ElBVbcA/YEFOE1H3wNn3Neq6lhVjVLVqDJlyoQ4SmOCJzvF7lKfwP3tK63lmdk2XPz66680btyY\nVq1aUaVKFdavX0/37t0pWLCg16HlWZ4MSVXVCapaS1VvBg4AP3oRhzHp+SR2Nzf2W0SFl+ZyY79F\nZzTdZFVaBfEK5hMK5s94glXqE3ha+/JXUC8z23otOTmZESNGEBkZybJlyxg2bBhLly6lcuXKXoeW\n53k1+uhCVd0nIuVx+hPqehGHMf4Ec8JX6oJ4aY0+KlG4IH+fOHlGx/OxEyep8NLcMzqeAxm1lJlt\nvbR161batGnDsmXLuOOOOxgzZgxXXHGF12GdNUI++ghARJYCpYFEoIuqLkxvext9ZELtxn6L0hzG\nWbZkYZa/dGtIYvAdappWksiNI4fSk5iYyODBg089DnPIkCE89thjVqIiG7Iy+siTOwVVtQHFJqyF\nw4Qv30qqN/ZbxMGExNPWp3Q854WkEBsbS3R0NLGxsTzwwAMMHz6ciy++2OuwzkpW5sKYNIRbp2w4\nJKlg+Oeff+jevTvXX389e/bsYfr06UyfPt0SgocsKRiThnDrlA23JJUTli9fTo0aNejbty+PPvoo\nmzdv5oEHHvA6rLOe1T4yJg2h7JT9JHY3r3+2iQPHnOahkoUL8to9VU87VtdGldKcuRyOI4cycuTI\nEbp3786IESMoX7488+fP54477vA6LOOypGCMH6F4OtonsbvpOn39aR3IBxMS6frx+lMx+P7rL0nl\nluclzJ8/n7Zt27Jz506eeuop+vTpQ7FixbwOy/iwpGCMhwbO33pGeQtwSlyk7kT2l6Ryw/MS9u/f\nT5cuXXj//fepXLkyS5cu5cYbb/Q6LJMG61MwxkPpdRQH2okc7s9LmD59OlWqVGHy5Mm8/PLLxMbG\nWkIIY3anYIyH/JW1TlkXiHAdmfT777/TqVMnZs6cSc2aNZk/fz41atTwNCaTMbtTMMZDXRtVSrO8\nRcF8EnAncriNTFJV3n33XSIiIpg7dy79+vVj1apVlhByCUsKxqQjWPWPUjStWZaBzapTqsi/Bd5K\nFi7IwAerB9wfEE7DZ3fs2EGjRo1o3bo11apVY/369bz44osUKGCNErmF/aaM8SPYHbipRwz1bFI1\nS/sNh5pGSUlJjBgxgu7duyMijBgxgvbt25Mvn1135jaWFIzxI70O3OyecHM64YRi+Kw/W7ZsITo6\nmhUrVnDnnXcyZswYypcv70ksJvssjRvjRzA7cMN9xFAgEhMT6dOnDzVq1GDr1q1MmjSJefPmWULI\n5exOwRg//I0MyokO3HAdMRSotWvX0rp1azZs2MBDDz3EsGHDuOiii7wOy+QAu1Mwxo9gduCG24ih\nQCUkJPDSSy9Rp04d9u3bx6xZs5g2bZolhDwkw6QgImtFpKOIlApFQMaEi5QH3pctWRjBeZZCTj2/\nIJxGDAVqyZIlVK9enf79+/P444+zefNmmjZt6nVYJocF0nzUHHgCWC0ia4B3gQWajafziEhnoA2g\nQBzwhKr+k9X9GRMswerADYcRQ4E6fPgw3bp1Y+TIkVSoUIGvvvqKhg0beh2WCZKAn7wmIvmAu4FR\nQBJOcnhbVfdn6oAiZYFlQISqJojIR8A8VX3P33vsyWsmHOSWonM5ad68ebRv355du3bxzDPP0Lt3\nb4oWLep1WCZAQXvymohci3O3cBcwA/gAuAlYBGRlmmIBoLCIJAJFgD1Z2IcxIZMbis7lpD///JPO\nnTszefJkIiIi+Pbbb6lb1x6lfjbIMCmIyFrgIDABeElVj7urVopIpqtaqepuERkE/AYk4DRFLUjj\nuG2BtoANcTOeC+acBS+lvvt5/o5rSNz2LZ06deLAgQP06NGDl19+mUKFCnkdqgmRQO4UHlTV7Wmt\nUNX7M3tAt8P6XqACTrL5WET+q6qTU+17LDAWnOajzB7HmJyU24eQpiX13c+vO3fx2MOvcPTHFdSq\nVYuvvvqKa6+91uMoTahlmBRUdbuI/AeoCpzrs7xXFo95G/CLqsYDiMhM4P+Ayem+yxgPBXPOgldS\n7n5UlaMbvuTA1xMgKZHLG7fju9nDrV7RWSqQIamjcUYgPQUI8CBweTaO+RtQV0SKiIgADYEt2dif\nMUGXG4eQZmTPwQQSD+5l37SX2f/FMM65sAKXtB6OXNvEEsJZLJDf/P+p6rUiskFVXxeRwcDnWT2g\nqq4UkenAOuAkEIvbTGRMuMqJIaThNHopKSkJNs7l9wUTQfJxfqOOFKveCJF8ufrux2RfIEkh5Z75\nmIhcCvwFXJKdg6pqT6BndvZhTKhlZ85COI1e2rRpE9HR0exYuZKiFWtT8vYOFCh+AZD7735M9gWS\nFOaISElgIM7VvQLjgxqVMXlEyt1BWv0RWR29lNU7jhMnTtCvXz969+5N8eLF+eCDDyhc+WYGLfgx\nLO5eTHgIePIagIgUAs5V1UPBC+lMNnnNhDN/J+nUdwf+lC1ZOOCTclr7LFwwf4blN1avXk10dDRx\ncXE8/PDDvP3225QpUybzP6zJVbIyeS2QjuYHReQ899uuwLsiUjMrARqT16ScpHcfTED5t1koJVFk\nlBDEfU/q9/qT2ZLbx44do2vXrtStW5f9+/cze/ZspkyZYgnB+BVIldQeqnpERG7CGU46ARgd3LCM\nyR3SO0lnNIdBcNpi03qvP5mZL7F48WKqV6/OoEGDaNOmDZs2baJJkybpxmRMIEkh5S/+P8BYVZ0L\nnBO8kIzJPdI7Sac3iqdsycJnJISM9gmBldw+dOgQ7du3p0GDBqgqixYtYsyYMZQoUcLvfo1JEUhS\n2C0iY3DmKsxz+xXsOQzG4P8knU+EBpXLpDm3YWjzGix/6VbKZuGZChnNl5gzZw5Vq1Zl3LhxPPfc\nc2zYsIEGDRpk5kcyZ7lATu4PAfOBRqp6EDgfp2/BmLNeWidpgCRVZqzdzQO1yvp9HkNWJsT5e8bD\njZedQ8uWLWnSpAmlSpVixYoVDBo0iCJFiuTkj2vOAoEMSb0EmKuqx0XkFuBaYFJQozIml0g5wT/3\n0XqSUo3kS0hM4usf4ln+0q3pvjezw0t950uoKh9++CERdzzNoUOHeO211+jWrRvnnGMtvCZrAkkK\nM4AoEamIM/P4U2AKThltY856TWuWpfO079Ncl1Fnc3YmxO3atYsnn3ySOXPmULt2bSZMmEBkZGSW\n9mVMikCSQrKqnhSR+4F3VPUdEYkNdmDGBENacwog+09AC2XBvOTkZMaPH0/Xrl1JTEzkrbfe4umn\nnyZ//jObsYzJrECSQqKIPAw8BqSMZysYvJCMCY60Sk10nb4eFBKT9dSyrJSf6NqoUpqTynK6ZMS2\nbduIiYlh8eLFNGjQgHHjxnHVVVfl6DHM2S2QjuYngBuAPqr6i4hUAP4X3LCMyXlpzSlITNJTCSFF\nRnMF0uKvAzinSkacPHmSQYMGUa1aNdatW8e4ceNYuHChJQST4wJ5nsJm4GmfRSeB5KBFZEyQZOaB\nOFl5eE52+gfSExcXR3R0NKtXr6ZJkyaMGjWKsmWtPpEJjoDmG4hIGRHpICJLgcXARUGNypggyEz7\nfjiUjz5+/Dg9e/bkuuuuY8eOHXz44Yd8+umnlhBMUPlNCiJynoi0EpH5wCrgKqCCql6lqs+HLEJj\nckha8wIK5hcK5pPTloVD+egBkz6j1OWV6dWrF6Uib2HQhwtp3rw5znOpjAme9JqP9uEkg1eAZaqq\nInJfdg8oIpWAaT6LrgReVdWh2d23MenxNy8grWVelY/++++/ebhdZz77YDz5zytNmWY9KXLV9fT9\nejfFS51vZa1N0PktnS0izwItgKLAVJwT+ZeqemWOHVwkP7AbqKOqv/rbzkpnm7PBokWLiImJYfv2\n7RSreRel6j9OvkL/zkguW7Kw34lwxqQlR0tnq+pQVa0L3Osu+gS4VEReFJFrshGnr4bAz+klBGPy\nuoMHDxITE0PDhg3Jly8fFz/cl9J3dDgtIUDWOr+NyawMO5pVdbuqvqmq1YAooDgwL4eO3wLnLuQM\nItJWRNaIyJr4+PgcOpwx4eXTTz8lIiKCiRMn8sILL7BhwwauvLZ2mtuGQ+e3yfsyVe1UVTeq6suq\nWjG7BxaRc4B7gI/9HGusqkapapQ9EMTkNfv27aNFixY0bdqUMmXKsHLlSvr370/hwoWzVCjPmJzi\nZQnsxsA6Vf3DwxiMCSlVZSgskjAAAB8dSURBVPLkyVSpUoVZs2bxxhtvsGbNGqKi/m32DfZEOGPS\nE0iZi2B5GD9NR8bkRTt37qR9+/bMmzePunXrMmHCBCIiItLcNlgT4YzJSCDPaG4iIjl6RyEiRYHb\ngZk5uV9jQuWT2N3c2G8RFV6ay439FqX7XOXk5GRGjRpF1apVWbjoa664uwO/1+tGzOy96b7PGC8E\ncqfQHBgqIjOAiar6Q3YPqqp/A6Wzux9jvJBWYT1/RfR+/PFH2rRpw9KlS6lepx5HolqTVKxMhu8z\nxiuBjD76L1AT+Bl4T0RWuCODzgt6dMaEobQK66Uuonfy5EkGDBhA9erViYuLY+LEiRS5t+ephODv\nfcZ4LaBmIVU9DEwHPsR5Ett9wDoReSqIsRkTcoE0C/mbL5CyfP369dSpU4cXX3yRxo0bs3nzZp54\n4gl+P/RPuu8zJhwE0qdwr4jMwimEVxCoraqNgerAc8ENz5jQSWkW2n0wAeXf5p3UicHffIGLi+Wn\nR48eREVFsWvXLj7++GNmzJjBJZdcku77bP6BCSeB3CncBwxR1WqqOlBV9wGo6jEgOqjRGRNCgTQL\nQdqF9fjjR3ZNfJrevXvTsmVLNm/eTLNmzU4rYGfzD0xukG5Hs1ub6HJVXZLWelVdGJSojPFARs1C\nKXwL6+3at5/ElVPZu2IW5cqV4/PPP+fOO+9Mcz/+CvJZJ7MJJ+kmBVVNEpFkESmhqodCFZQxXsjM\nc5ab1ixL0T8307ZtV37fsYOOHTvSt29fzjsv/fEXNv/AhLtAmo+OAnEiMkFEhqV8BTswY0It0Oad\nAwcO0Lp1a+644w7OOecclixZwvDhwzNMCMbkBoHMU5iJTTIzZ4FAmndmzZpFhw4diI+Pp1u3brz6\n6quce+65ORbDJ7G7rXnJeMrv8xTCiT1PwXht7969PPXUU0yfPp0Klapy3u2dOFy0XI6euFNPigPn\nTsXqHpmsytHnKfjs9GoRmS4im0Vke8pX1sM0JvdQVSZNmkRERASfffYZ/+30Eufc349DRculO2w1\nKwId/WRMMAXSp/AuMAo4CTQAJgGTgxmUMeHg119/pXHjxrRq1YoqVarw/fffs73s7fyTfPpzknPq\nxB3o6CdjgimQpFDYHXoqqvqrqr4G/Ce4YRnjneTkZIYPH07VqlVZtmwZ77zzDkuXLqVy5cpBPXHb\n5DYTDgJJCsfdKqk/iUgnEbkPKBbkuIzxxNatW7n55pt56qmnuOmmm9i0aROdOnUiXz7nv0owT9w2\nuc2Eg0CSwjNAEeBpoBbwKNAqmEEZE2qJiYn07duX6tWrs3nzZt577z0+//xzLr/88tO2C+aJ2x6u\nY8KBjT4yZ73Y2Fiio6OJjY2lWbNmvPPOO1x88cV+t8/OsFEbcmpCKSujj/zOUxCRzwC/GUNV78nM\ngVLtuyQwHoh0j9FaVVdkdX/GZMU///xDr169GDBgABdccAEzZszg/vvvz/B9WZ2VnJnnMBjjlfQm\nrw1y/70fuJh/Rxw9DGT3ucpvA1+oajMROQenecqYkFm2bBlt2rRh69atPPHEEwwePJhSpUoF9Zjp\nDTm1pGDChd+koKrfAIjI4FS3H5+JSJbbckSkBHAz8Lh7nBPAiazuz5jMOHLkCN26dWPEiBFcccUV\nzJ8/nzvuuCMkx7YhpyY3CKSjuaiIXJnyjYhUAIpm45gVgHjgXRGJFZHx7jObT+M+3W2NiKyJj4/P\nxuGMccyfP5/IyEhGjhzJ008/TVxcXMgSAtiQU5M7BJIUOgOLRWSxiHwDfA08m41jFgCuA0apak3g\nb+Cl1Bup6lhVjVLVqDJlyqRebUzA9u/fT6tWrbjzzjspUqQIy5Yt4+2336ZYsdCOrLYhpyY3yLAg\nnqp+ISJXA5XdRT+o6vFsHHMXsEtVV7rfTyeNpGBMTpg+fTodO3Zk//79vPzyy7zyyis5WsAuM+x5\nCiY3CKRKKjjzE65wt68uIqjqpKwcUFX3ishOEamkqluBhsDmrOzLGH9+//13OnXqxMyZM7nuuuuY\nP38+NWrU8Dose56CCXsZJgUR+R9wFfA9kDJ0QnFqIGXVU8AH7sij7cAT2diXMaeoKu+99x5dunQh\nISGBfv368dxzz1GgQKDXP8ac3QL5nxIFRGgOznJT1e/d/RqTY3755Rfatm3LV199Rb169Rg/fjzX\nXHON12EZk6sE0tG8EWeegjFhKSkpiWHDhhEZGcl3333HyJEjWbx4sSUEY7IgkDuFC4DNIrIKONXB\nnJ0ZzcbklC1bthAdHc2KFSto3Lgxo0ePpnz58l6HZUyuFUhSeC3YQRiTWYmJiQwYMIBevXpRrFgx\n/ve///HII48gIhm/2RjjVyBDUr8RkYuA691Fq1R1X3DDMsa/tWvX0rp1azZs2MBDDz3EO++8w4UX\nXuh1WMbkCYE8jvMhYBXwIPAQsFJEmgU7MGNSS0hI4MUXX6ROnTrEx8cza9Yspk2bZgnBmBwUSPPR\ny8D1KXcHIlIG+Apn0pkxpwlWaeglS5bQpk0bfvrpJ9q0acPAgQMpWbJkDkRsjPEVyOijfKmai/4K\n8H3mLJNSGnr3wYQce6j94cOH6dChA/Xr1+fkyZN89dVXjBs3zhKCMUESyMn9CxGZLyKPi8jjwFzg\n8+CGZXKj9EpDZ8W8efOIjIxk9OjRdO7cmbi4OBo2bJgToRpj/Aiko7mriNwP3OQuGquqs4IblsmN\ncqo09J9//knnzp2ZPHkyERERfPvtt9StWzcnQswT7OltJpgCKXNRAZinqjPd7wuLyBWquiPYwZnc\n5dKShdmdRgIItDS0qvLxxx/TqVMnDhw4wKuvvkr37t0pVKhQToeaa9nT20ywBdJ89DGQ7PN9krvM\nmNNkpzT0nj17uO+++2jevDklylxK1Q4jmXS8NrcOWZ6tPom8Jqeb6IxJLZDRRwXcp6MBzpPS3EJ2\nxpwmK6WhVZUJEybw/PPPc/z4cR7v3IMVhetw0D3v2ZXw6ezpbSbYAkkK8SJyj6rOBhCRe4E/gxuW\nya0yUxp6+/btxMTEsGjRIurXr8/48eNpNf03/kl1grPnGP8ru010xmQkkOaj9kB39xkIvwEvAu2C\nG5bJy5KSkhgyZAiRkZGsXr2aMWPGsGjRIipWrGhXwhmwp7eZYAtk9NHPQF0RKeZ+fzToUZk8a9Om\nTURHR7Ny5Ur+85//MHr0aC677LJT6+1KOH329DYTbIGMProIeBO4VFUbi0gEcIOqTsjqQUVkB3AE\np9P6pKrasxXyuBMnTtCvXz969+5NiRIlmDJlCi1atDijgF3XRpVOG10DdiWcmj29zQRTIH0K7wHv\n4pS7APgRmAZkOSm4Gqiq9U2cBVavXk3r1q3ZuHEjLVu2ZOjQoZQpUybNbe1K2BhvBfQ8BVX9SES6\nAajqSRFJyuhNxhw7doxXX32VIUOGcMkllzB79myaNGmS4fvsStgY7wTS0fy3iJTGeS4zIlIXOJTN\n4yqwQETWikjbtDYQkbYiskZE1sTHx2fzcCbUFi9ezLXXXsvgwYOJiYlh06ZNASUEY4y3AkkKXYDZ\nwFUishyYBDyVzePepKrXAY2BjiJyc+oNVHWsqkapapS/pgYTfj5YsoWL69xNgwYN2H0ggV5jpjF6\n9GhKlCjhdWjGmAAEMvponYjUByoBAmxV1cTsHFRVd7v/7hORWUBtYEl29mm89/Lb7zOgx3OcPHqA\n4rXvp8RNLZmysyjVYndbc5AxuYTfOwURuV5ELganHwGoBfQBBovI+Vk9oIgUFZHzUl4DdwAbs7o/\n4734+HhatmzJm88+jhQqxsX/HUipBq3JV/BcK8FgTC6T3p3CGOA2ALd5px9Os1ENYCyQ1aevXQTM\ncociFgCmqOoXWdyX8ZCq8uGHH/L0009z6NAhSt70CMXrNkPyFzxtO5t4ZkzukV6fQn5V3e++bo5T\nMnuGqvYAKmb1gKq6XVWru19VVbVPVvdlvLNr1y7uueceWrZsyVVXXUVsbCwR/2l9RkIAm3hmTG6S\nblIQkZQ7iYbAIp91gQxlNXlQcnIyY8aMISIigoULF/LWW2+xfPlyqlataiUYjMkD0ju5TwW+EZE/\ngQRgKYCIVCT7Q1JNLrRt2zZiYmJYvHgxt956K+PGjePKK688td4mnhmT+/lNCqraR0QWApcAC1RV\n3VX5yP6QVJOLnDx5kqFDh9KjRw/OOeccxo0bR3R09BklKsAmnhmT26XbDKSq36Wx7MfghWPCTVxc\nHNHR0axevZp77rmHkSNHUrasnfSNyasCmbxmzkLHjx+nZ8+eXHfddezYsYNp06bxySefWEIwJo+z\nDmNzhu+++47o6Gg2b97Mf//7X4YOHUrp0qWzvD970LwxuYclhTwmOyfgv//+mx49ejB06FDKli3L\n3Llzueuuu7Idjz1o3pjcw5qP8pCUE/Dugwko/56AA3nw/cKFC6lWrRpDhgyhffv2bNq0KdsJAexB\n88bkNpYU8pCsnIAPHjxITEwMt912GwUKFOCbb75h5MiRFC9ePEdissdrGpO7WFLIQzJ7Av7000+J\niIjg3Xff5cUXX2T9+vXcfPMZBWuzxd9sZpvlbEx4sqSQhwR6At63bx8tWrSgadOmXHjhhaxcuZJ+\n/fpRuHDOn6htlrMxuYslhTwkoxOwqjJ58mSqVKnCrFmz6N27N6tXr6ZWrVpBi6lpzbL0vb8aZUsW\nRoCyJQvT9/5q1slsTJiy0Ud5SHplJn777Tfat2/P559/zg033MCECROoUqVKyOKyJJA2G65rwo0l\nhTwm9Qk4OTmZUaNG8cILL5CcnMzbb79Nx44dyZ8/fzp7MaFgw3VNOLLmozzsxx9/5JZbbqFDhw7U\nrVuXjRs38vTTT1tCCBM2XNeEI8+SgojkF5FYEZnjVQx51cmTJxkwYADVq1cnLi6OiRMnsmDBAipU\nqOB1aMaHDdc14cjL5qNngC1AzgyINwCsX7+e1q1bs27dOu677z5GjBjBJZdc4nVYJg2XlizM7jQS\ngA3XNV7y5E5BRC4D/gOM9+L4edE///zDK6+8QlRUFLt372b69OnMnDnTEkIYs+G6Jhx5dacwFHgB\nOM/fBiLSFmgLUL58+RCFlTt9++23REdH88MPP9CqVSveeustzj//fK/DMhmwhxKZcBTypCAidwP7\nVHWtiNzibztVHQuMBYiKilJ/253Njh49yssvv8w777xDuXLl+OKLL2jUqJHXYZlMsOG6Jtx40Xx0\nI3CPiOwAPgRuFZHJHsSRq3355ZdUq1aNYcOG0bFjRzZu3GgJwRiTbSG/U1DVbkA3APdO4XlV/W+o\n48itDhw4wHPPPce7775LpUqVWLp0KTfddJPXYZl02AQ1k5vYPIVcZObMmURERDBp0iS6devG999/\nbwkhzGWnnLkxXvA0KajqYlW928sYcoO9e/fSrFkzHnjgAS6++GJWr17Nm2++ybnnnut1aCYDNkHN\n5DZW5iKMqSqTJk2ic+fOHDt2jDfffJPnn3+eggULprm9NVOEH5ugZnIbSwph6tdff6Vdu3bMnz+f\nG2+8kfHjx1O5cmW/21sdnfBkE9RMbmN9CmEmOTmZ4cOHU7VqVZYvX87w4cNZsmRJugkBrJkiXNkE\nNZPb2J1CGNm6dSvR0dEsX76cRo0aMWbMGC6//PKA3mvNFOHJJqiZ3MaSQhhITExk0KBBvP766xQp\nUoT333+fRx99FBEJeB/WTBG+bIKayU2s+chjsbGx1K5dm+7du9OkSRO2bNnCY489lqmEANZMYYzJ\nGZYUPPLPP//QrVs3rr/+evbu3cuMGTP4+OOPueiii7K0P3vspTEmJ1jzkQeWLVtGdHQ0P/74I088\n8QSDBw+mVKlS2d6vNVMYY7LL7hRC6MiRI3Tq1Il69epx4sQJFixYwMSJE3MkIRhjTE6wpBAi8+fP\nJzIykpEjR/LMM88QFxfH7bff7nVYxhhzGksKQfbXX3/RqlUr7rzzTooWLcry5csZOnQoxYoV8zo0\nY4w5g/UpBImqMmPGDDp27Mj+/ft55ZVXeOWVVyhUqBBgJSmMMeHJkkIQ/P7773Ts2JFZs2ZRq1Yt\nFixYQPXq1U+tt5IUxphwZc1HOUhVeffdd4mIiODzzz+nf//+fPfdd6clBPBfkuK5j9ZT4aW53Nhv\nkZVWNsZ4wu4Ucsgvv/xC27Zt+eqrr7j55psZN24c11xzTZrb+is9kaTOU0ftzsEY45WQ3ymIyLki\nskpE1ovIJhF5PdQx5KSkpCSGDRtGZGQkK1euZNSoUXz99dd+EwIEVnrCitkZY7zgRfPRceBWVa0O\n1ADuFJG6HsSRbZs3b6ZevXo888wz1K9fn02bNtG+fXvy5Uv/Y02rJEVarJidMSbUQp4U1HHU/bag\n+6WhjiM7EhMT6d27NzVr1uTHH39k8uTJzJ07l3LlygX0/tQlKfL7qXNkxeyMMaHmSZ+CiOQH1gIV\ngRGqutKLOLJi7dq1tG7dmg0bNtC8eXOGDRvGhRdemOn9+JakSD0aCayYnTHGG56MPlLVJFWtAVwG\n1BaRyNTbiEhbEVkjImvi4+NDH2QqCQkJvPjii9SuXZv4+Hg++eQTPvzwwywlhNSsmJ0xJlyIqrct\nNyLyKnBMVQf52yYqKkrXrFkTwqhOt2TJEtq0acNPP/1ETEwMAwYMoGTJkp7FY4wxgRCRtaoalZn3\neDH6qIyIlHRfFwZuB34IdRyBOHz4MB06dKB+/fokJSWxcOFCxo4dawnBGJNnedGncAnwvtuvkA/4\nSFXneBBHuubNm0e7du3Ys2cPXbp0oVevXhQtWtTrsIwxJqhCnhRUdQNQM9THDdSff/7Js88+ywcf\nfEBERATTp0+nTp06QT2m1UEyxoQLK3PhUlWmTZtGREQE06ZNo2fPnqxbty4kCaHbzDh2H0xA+Xc2\ns5W5MMZ4wZICsGfPHpo2bUqLFi244oorWLduHa+99tqpiqbB5K8Oks1mNsZ44axOCqrK+PHjiYiI\n4Msvv2TQoEGsWLGCatWqhSwGf7OWbTazMcYLZ21S2L59O7fddhsxMTHUrFmTDRs28Nxzz5E/f8bl\nJ3KSv1nLNpvZGOOFsy4pJCUlMWTIECIjI1mzZg1jxoxh4cKFVKxY0ZN40qqDZLOZjTFeOatKZ2/c\nuJHo6GhWrVrF3XffzahRo7jssss8jSlllJGNPjLGhIOzIimcOHGCvn370qdPH0qUKMGUKVNo0aIF\n4qcQXaj51kEyxhgv5fmksHr1alq3bs3GjRtp2bIlQ4cOpUyZMl6HZYwxYSnP9ikcO3aM559/nrp1\n63LgwAE+++wzPvjgA0sIxhiTjjx5p7B48WLatGnDzz//TLt27ejfvz8lSpTwOixjjAl7eepO4dCh\nQ7Rr144GDRoA8PXXXzN69GhLCMYYE6A8kxQ+++wzIiIiGD9+PM8//zwbNmzglltu8TosY4zJVXJ9\nUoiPj6dly5bcc889lC5dmu+++46BAwdSpEgRr0MzxphcJ9cmBVVlypQpVKlShenTp9OrVy/WrFnD\n9ddf73VoxhiTa+XKjuZdu3bx5JNPMmfOHOrUqcOECROoWrWq12EZY0yul6vuFJKTkxkzZgwREREs\nWrSIIUOGsHz5cksIxhiTQ7x4HGc5EflaRDaLyCYReSaQ923bto2GDRvSvn17ateuTVxcHM8++2zI\nC9gZY0xe5kXz0UngOVVdJyLnAWtF5EtV3ezvDX/88QfVqlWjUKFCjB8/ntatW4dNiQpjjMlLQn6n\noKq/q+o69/URYAuQbuGfXbt20ahRIzZv3kx0dLQlBGOMCRJRVe8OLnIFsASIVNXDqda1Bdq630YC\nG0MaXMYuAP70OohUwjEmCM+4LKbAWEyBC8e4KqnqeZl5g2dJQUSKAd8AfVR1ZgbbrlHVqNBEFhiL\nKXDhGJfFFBiLKXDhGFdWYvJk9JGIFARmAB9klBCMMcaEjhejjwSYAGxR1bdCfXxjjDH+eXGncCPw\nKHCriHzvft2VwXvGhiCuzLKYAheOcVlMgbGYAheOcWU6Jk87mo0xxoSXXDWj2RhjTHBZUjDGGHNK\nWCeFrJbECHJM54rIKhFZ78b0utcxpRCR/CISKyJzvI4FQER2iEic22+0xut4AESkpIhMF5EfRGSL\niNwQBjFV8ulf+15EDovIs2EQV2f3b3yjiEwVkXPDIKZn3Hg2efUZichEEdknIht9lp0vIl+KyE/u\nv6XCIKYH3c8pWUQCHpYa1kmBf0tiRAB1gY4iEuFxTMeBW1W1OlADuFNE6nocU4pncGaIh5MGqloj\njMZvvw18oaqVgeqEweelqlvdz6gGUAs4BszyMiYRKQs8DUSpaiSQH2jhcUyRQAxQG+d3d7eIVPQg\nlPeAO1MtewlYqKpXAwvd772OaSNwP84E4YCFdVLISkmMEMSkqnrU/bag++V5b72IXAb8BxjvdSzh\nSkRKADfjDIlGVU+o6kFvozpDQ+BnVf3V60BwaqMVFpECQBFgj8fxVAFWquoxVT2JM/n1/lAHoapL\ngP2pFt8LvO++fh9o6nVMqrpFVbdmdl9hnRR8uSUxagIrvY3kVDPN98A+4EtV9TwmYCjwApDsdSA+\nFFggImvdsiVeqwDEA++6zWzjRaSo10Gl0gKY6nUQqrobGAT8BvwOHFLVBd5GxUagnoiUFpEiwF1A\nOY9jSnGRqv7uvt4LXORlMNmRK5KCWxJjBvBs6hpJXlDVJPdW/zKgtntb6xkRuRvYp6prvYwjDTep\n6nVAY5ymv5s9jqcAcB0wSlVrAn8T+tt8v0TkHOAe4OMwiKUUztVvBeBSoKiI/NfLmFR1C9AfWAB8\nAXwPJHkZU1rUGefveetBVoV9Ugjnkhhu08PXnNmWF2o3AveIyA7gQ5yJgZO9DenU1Saqug+njby2\ntxGxC9jlc2c3HSdJhIvGwDpV/cPrQIDbgF9UNV5VE4GZwP95HBOqOkFVa6nqzcAB4EevY3L9ISKX\nALj/7vM4niwL66QQjiUxRKSMiJR0XxcGbgd+8DImVe2mqpep6hU4zQ+LVNXTqzoRKeo+LwO3ieYO\nPK50q6p7gZ0iUsld1BDw+xwPDzxMGDQduX4D6opIEff/YUPCoFNeRC50/y2P058wxduITpkNtHJf\ntwI+9TCWbAn3ZzSnlMSIc9vwAbqr6jwPY7oEeF9E8uMk1Y9UNSyGgIaZi4BZ7rMvCgBTVPULb0MC\n4CngA7epZjvwhMfxAKcS5+1AO69jAVDVlSIyHViHMwowlvAo4zBDREoDiUBHLwYKiMhU4BbgAhHZ\nBfQE+gEfiUg08CvwUBjEtB94BygDzBWR71W1UYb7sjIXxhhjUoR185ExxpjQsqRgjDHmFEsKxhhj\nTrGkYIwx5hRLCsYYY06xpGDClogkuVVDN4rIx25pg6zu6z0Raea+Hp9eYUURuUVEMj1Ry60Ke0EW\nYyooIv3cKpvrRGSFiDR21y0Wka0+VVRTxuoXEpFpIrJNRFa6pWCMyRZLCiacJbjVQyOBE0B735Vu\nobZMU9U2qprepLVbCP3s3Tdw5sBEuqVBmgLn+ax/JKWSqjtDHCAaOKCqFYEhOCUgjMkWSwomt1gK\nVHSv4peKyGxgs1uccKCIrBaRDSLSDpzZ8CIy3L3C/gq4MGVH7pV3lPv6TvfKfL2ILHSvttsDnd2r\n8nruLPYZ7jFWi8iN7ntLi8gCt2b9eEDSClxEjorIEHe7hSJSJtX6IjgloZ9S1eMAqvqHqn6UwWfi\nW5lzOtDQ/bmrivPMj+/dz+TqwD9mc7azpGDCnntH0BiIcxddBzyjqtfgXC0fUtXrgeuBGBGpANwH\nVAIigMdI48rfPTmPAx5wn4/xoKruAEYDQ9yr8qU4z2AY4h7jAf4tT94TWKaqVXFqO5X38yMUBda4\n233jvs9XReC3DIo9vuue5Hu4ZSfAKSO/E8AtJX0IKI2T1N52izZG4dR8MiYg4V7mwpzdCvuUN1mK\nUwfr/4BVqvqLu/wO4NqUtnmgBHA1znMTpqpqErBHRBalsf+6wJKUfalq6hr5KW4DIv49F1NcnMq9\nN+PW81fVuSJywM/7k4Fp7uvJOMXlMuMRVd3t1pKagVP6ZVI6268AXhbnGRszVfWnTB7PnMUsKZhw\nluBe7Z7inpj/9l2E0+wyP9V2d+VgHPmAuqr6TxqxZEXq2jLbgPIiUjytuwWfarNHRGQKTrXZScBu\nnOcJ7HLvpkoAf6nqFBFZifPQpXki0k5V00qKxpzBmo9MbjcfeFKcEuuIyDVucbklQHO3z+ESoEEa\n7/0OuNltbkJEzneXH+H0Tt4FOIX0cLdLSVRLgJbussaAv+fy5gNS7mRaAst8V6rqMZy7oLfdQn0p\n1XgfFJECKSOa3J/xbv6tNutbmbMZTnVcFZErge2qOgynWue1fuIy5gyWFExuNx6n/PU6cR5aPgbn\nDngW8JO7bhJOk8ppVDUeaAvMFJH1/NvE8xlwX0pHM+6zit1O2838OwrqdZyksgmnGek3PzH+jfMw\npo3ArUCvNLZ5BeepcJvd7eYAh4FCwHwR2YDzUJndOP0g4CSS0iKyDejCvw8MegjY6Da9RZJ+U5Mx\np7EqqcYEmYgcVdViXsdhTCDsTsEYY8wpdqdgjDHmFLtTMMYYc4olBWOMMadYUjDGGHOKJQVjjDGn\nWFIwxhhzyv8DCv+xfkveFLwAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "OBCPydPleYwO", - "colab_type": "code", - "outputId": "af86365a-230e-4ba5-ba94-a1de0a888007", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } - }, - "source": [ - "task = \"pIC50\"\n", - "dnn_predicted_test = best_dnn.predict(test_dataset, transformers)\n", - "dnn_true_test = test_dataset.y\n", - "plt.scatter(dnn_predicted_test, dnn_true_test)\n", - "plt.xlabel('Predicted pIC50s')\n", - "plt.ylabel('Secondary Assay')\n", - "plt.title(r'DNN predicted IC50 vs. Secondary Assay')\n", - "plt.xlim([2, 11])\n", - "plt.ylim([2, 11])\n", - "plt.plot([2, 11], [2, 11], color='k')\n", - "plt.show()" - ], - "execution_count": 0, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZxV8//A8de7RSoqKiGyRZomlabF\nD1mK5CvKGr5EMy0UkkLUN1LapCSllRKJFlExpaRF27ROi0iiVUOL9qaZ9++Pc2bcxix3Zu69587M\n+/l43Mfce+6557zvMud9PusRVcUYY4wBKOR1AMYYY8KHJQVjjDGpLCkYY4xJZUnBGGNMKksKxhhj\nUllSMMYYk8qSgsk2EVERqezef19Euodgn0+IyKJg78f4R0Tmi0iM13GYwLOkECQisk1EjonIIRE5\nICI/iEg7ESnks86H7gG2rs+yyiKiPo/ni8hxEbnYZ1kjEdkWsjeTCVVtp6pvZLVeMA8iInKp+zkW\n8VlWV0RmuZ/9PhFZLiJPpln/sM+tu89ri4nIWBH5W0T2iEinYMSdxXt6RUR+dWPbISKTQh1DuHF/\nQ/tFpJjXseRnlhSCq6mqng1cAvQFXgLGpFlnH9Ari+0cAYJyNu57IM0vROQ6YB7wPVAZKAs8BTRJ\ns2oZVT3LvfkmtteAK3G+t1uAF0XkjqAH7hKRlsBjQCNVPQuIAuaGav/BlNPfm4hcCtwIKHB3AEMy\naVhSCAFVPaiqXwIPAS1FJNLn6XHANSJyUyabGAI8LCJX+LM/9yz4WRHZKiJ/isiAlBKKWw2zWEQG\nichfwGvumfFbIvK7iPzhVgkV99leFxHZLSK7RKRVmn19KCK9fB7fIyJr3LPsX0TkDhHpjfMPPdQ9\n8x3qrnu1iMxxz+Q3i8iDPtspKyJfuttZDvj13l0DgHGq2k9V/1THSlV9MMtXOloCb6jqflXdBIwC\nnki7kvu5HfD9PkWkvFtCPE9EyonIDJ/SykLfkmIm6gCxqvoLgKruUdWRPvsoLSJj3O9kp4j0EpHC\nPs+3FpFNbil1o4hc6y6v6p5tHxCRDSJyt89rPhSR90Rkpvu6Zb6/NxG5TUR+FJGD7vcnPs9dISLz\nROQv9/f2sYiU8Xl+m4i8JCLrgCPu72lKms9yiIi8k8ln8jiwFPgQ5/vxfe2d7vs85H4end3lGX7+\nIvKy+/tM+Yyau8vPcNet7rP980TkqIiUzyS+/ENV7RaEG7AN50wv7fLfgafc+x/ilBKeBRa5yyo7\nX0vq+vOBGOBtYIK7rBGwLZN9K/AdcC5QCfgJiHGfewI4BTwDFAGKA4OAL931zwa+Avq4698B/AFE\nAiWBT9ztV/Z9D+79usBB4DacE46KwNW+78MnxpLAduBJN45awJ9AhPv8p8Bn7nqRwM6Uzyid93up\nG1MRoASQBNySyeeTsv5OYAfwAVDOfe4c97kKPuvfD8RnsK2xQG+fx+2Bb9z7fYD3gaLu7UZA/Pjt\n/BenBNkFp5RQOM3z04AR7mdzHrAcaOs+94D7vurgHLgr45R4igJbgFeAM4BbgUNAFZ/v8S/3OywC\nfAx86j5Xzl33fnc7z7u/oZTfVGX3Oy8GlAcWAIPT/C+sAS7G+b1dgFP6LeM+XwTYC9TO5DPZAjwN\n1AYS03w/u4Ebfb6/a7P6/N3P6UKc3+lDbjwXuM8NA/r5bP854CuvjymhunkeQH69kXFSWAq86t7/\nECcpFMNJFk3IOCmUxzngVsO/pHCHz+Ongbnu/SeA332eE/cf4gqfZdcBv7r3xwJ9fZ67ioyTwghg\nUAYxzef0pPAQsDDNOiOAHkBh9x//ap/n3sS/pFDRvX91euu666dUyRQBKgCTcc7MwTlwKXCmz/q3\nZfR5u9/FLz6PFwOPu/d7AtNTPqts/n4eBb51v5u/gJfc5RWAE0Bxn3UfBr5z78cCz6WzvRuBPUAh\nn2UTgdd8vsfRPs/dCfzo3n8cWJrmN7PD9/tMs69mwOo0/wut0qzzNdDavX8XsDGTz+IG9/eQkrh/\nBJ73ef53oC1QKs3r/P78cZLWPe79eu42UxJIHPBgdr/DvHqz6qPQq4hzFphKVU8Ab7i3dKlqAjAU\n54fuj+0+93/DOStK77nyOGfXK91i9gHgG3c57uvSbisjFwO/+BnfJUC9lH26+30UON/dd5Fs7NfX\nfiAZ52w0Xap6WFXjVPWUqv4BdABuF5GzgcPuaqV8XlIK50w5Pd8BJUSknlvvXRPnTB6caqwtwGy3\nKu9lP98DqvqxqjYCygDtgDdEpDH/nPXv9vncRuCUGCDj7+BCYLuqJvss+w3n95hij8/9ozjJM/W1\nPrGp72MRqSAin7pVN38DE3BKF762p3k8DqdEhPv3o3RiTtESmK2qf7qPP+H0KqT7cJLYbyLyvTht\nSpDJ5y8ij7vVnCmfYWRKzKq6zH3/N4vI1Tgnal9mEl++YkkhhESkDs4/YXpdKz/AOQDcm8kmBuA0\nfNb2Y3cX+9yvBOzyeew7Ne6fwDGgmqqWcW+l1WngBKdonnZbGdlOxnX/aafj3Q5877PPlEbfp4AE\nnOoJf/f7z05UjwJLcA4U/kqJrZCq7sd5zzV8nq8BbMhgf0k41VwPu7cZqnrIfe6Qqr6gqpfjNI52\nEpGG2YgLVU1U1c+BdTgHru04JYVyPp9bKVWt5r4ko+9gF3BxmjaNSjhVTVk57TcgIsLp382bOJ9h\ndVUthXOQF06X9vv/AqctLRKnpPBxejsWp23rQeAmcXqC7cGpvqohIjUAVHWFqt6Dkxi/wPk+Mvz8\nReQSnHaiDkBZVS0DrE8Tc0rSegyYrKrHs/qQ8gtLCiEgIqVE5C6cevIJqhqfdh1VPYVTdfJSRttR\n1QPAQOBFP3bbRUTOEacr63NAul0a3TPHUcAgETnPjbeie1YKzj/YEyISISIl3BgzMgZ40v3HK+Ru\n52r3uT+Ay33WnQFcJSKPiUhR91ZHRKq6B9qpOI3gJUQkgjSNi1l40Y25i4iUdd9TDRH51L1fT0Sq\nuDGWxWnIn6+qB93Xjwe6uZ/f1UBrnOqVjHyCUx32qHsfdz93idPFWHCq/pJwSjGZEqczwH9E5Gw3\nxiY41YbLVHU3MBsY6P6uCrkNvSkdFUYDnUWktjgquwfBlLPfF93P+magKc5vMiszgWoicq84vYee\nxSnRpUgpYR0UkYo4bSGZcg+yk3E+r+Wq+nsGqzbD+dwicEphNYGqwELgcbdh+FERKa2qicDfuJ9x\nJp9/SZwkleCu9yROwvU1AWiOkxjGZ/V+8hWv66/y6w2nHvUYTrXDQZyz1/b4NBriUx/vPi6Ec8ai\nPsvmc3pd/Fk4jXLbMtm34vzjbsWpjx6Ysl+cNoVFadY/E+dsbyvOP9Um4Fmf51/GqVrYBbQigzYF\n93FznLPaQzhF98bu8utwGrz3A0PcZVVwDjgJbpzzgJruc+VxEsffOA2pb6SN22efl7oxFfFZVhen\n3vogTnXdMv6p638Y+BWnvn43zj/9+T6vLYbTlvI3TjLr5Mf3vcXdzxk+y553fwdHcOrgu/s89zXw\nSgbbuhenbWK/G0M88ITP86WB4e42DwKrgRY+z7cDNuMcqNcDtdzl1XC66R4ENgLNM/kt3gzs8Hl8\nh/v9HcSpxvyefxqaqwEr3f2tAV5I89ptpN++doP7vT2Zyef6DTAwneUP4vwmz3DXSfmsVgA3+PH5\n93a/rz9xOnGkvh+fdb51X59l54D8dEtpSDH5iDiD365U1S1ex2JMRkSkEk6j8fmq+rfX8aQlImOB\nXarazetYQinfDVwyxoQ/t22jE06313BMCJfilNhqeRtJ6AWtTUGcaQL2ish6n2UPiDNoJllEooK1\nb2NM+BKRkjhVPbeReRuVJ0TkDZxqtwGq+qvX8YRa0KqPRKQBTh3jeFWNdJdVxWnoGQF0VtW4oOzc\nGGNMjgSt+khVF7hFMN9lmwCczgDGGGPCTdi2KYhIG6ANQMmSJWtfffXVWbzCGGPMyZMn2bZtG4cO\nHQL4U1WzNWdT2CYFdSYAGwkQFRWlcXFW02SMMRlJSkrivffe45VXXkFEGDp0KB06dPB3JoBUNnjN\nGGPyuE2bNtGgQQOee+45brzxRtavX0/79u1ztC1LCsYYk0clJibSu3dvatasyY8//sj48eOZNWsW\nl1xySY63GbTqIxGZiDMqspyI7MDperYPeBdntOpMEVmjqo0z3ooxxpj0rFq1ilatWrF27VoeeOAB\n3n33XSpUqJDr7Qaz99HDGTw1LYPlxhhjsnDs2DFef/113nrrLcqXL8/UqVNp3rx5wLYftg3Nxhhj\nTrdgwQJiYmL4+eefiY6OZsCAAZxzzjkB3Ye1KRhjTJj7+++/ad++PTfddBOJiYnMmTOH0aNHBzwh\ngCUFY4wJa19//TWRkZEMHz6cjh07sn79eho1ahS0/Vn1kTH5wBerdzIgdjO7DhzjwjLF6dK4Cs1q\nVcz6hSZs/fXXXzz//PN89NFHVK1alcWLF3Pddddl/cJcspKCMXncF6t30nVqPDsPHEOBnQeO0XVq\nPF+s9ueiaibcqCqfffYZVatWZeLEiXTv3p3Vq1eHJCGAJQVj8rwBsZs5lph02rJjiUkMiN3sUUQm\np3bt2kXz5s156KGHqFSpEnFxcfTs2ZNixYqFLAZLCsbkcbsOHMvWchN+VJUxY8YQERFBbGws/fv3\nZ+nSpdSoUSPrFweYJQVj8rgLyxTP1nITXrZu3UqjRo2IiYmhRo0arFu3ji5dulCkiDdNvpYUjMnj\nujSuQvGihU9bVrxoYbo0ruJRRMYfSUlJDB48mOrVq7NixQqGDx/Od999x5VXXulpXNb7yJg8LqWX\nkfU+yjs2bNhAdHQ0y5Yt48477+T999/n4osv9joswJKCMflCs1oVLQnkASdPnqRfv3688cYblCpV\nigkTJvDII4+E1YXHLCkYY0wIrFixgujoaOLj42nRogXvvPMO5513ntdh/Yu1KRhjTBAdPXqULl26\nUL9+ff766y+mT5/OxIkTwzIhgJUUjDEmaObPn0/r1q3ZsmULrVu3ZsCAAZQuXdrrsDJlJQVjjAmw\ngwcP0q5dO2655RaSk5OZO3cuI0eODPuEAEFMCiIyVkT2ish6n2XnisgcEfnZ/Rv4Kf6MMcZDM2fO\npFq1aowaNYpOnToRHx/Prbfe6nVYfgtmSeFD4I40y14G5qrqlcBc97ExxuR5CQkJPProo9x1112U\nKVOGH374gYEDB1KiRAmvQ8uWoCUFVV2Ac/lNX/cA49z744Bmwdq/McaEgqoyceJEIiIi+Pzzz3nt\ntddYtWoV9erV8zq0HAl1Q3MFVd3t3t8D5P6CosYY45EdO3bw1FNPMWPGDOrWrcuYMWOIjIz0Oqxc\n8ayhWVUV0IyeF5E2IhInInEJCQkhjMwYYzKXnJzMyJEjqVatGnPnzmXgwIH88MMPeT4hQOiTwh8i\ncgGA+3dvRiuq6khVjVLVqPLly4csQGOMycyWLVto2LAhbdu2pXbt2sTHx9OpUycKFy6c9YvzgFAn\nhS+Blu79lsD0EO/fGGNyJCkpiYEDB3LNNdewatUqRo4cydy5c7niiiu8Di2ggtamICITgZuBciKy\nA+gB9AU+E5Fo4DfgwWDt3xhjAmX9+vW0atWKFStW0LRpU4YPH07FivlzrqmgJQVVfTiDpxoGa5/G\nGBNIJ06coE+fPrz55puULl2aiRMn8tBDD4XVBHaBZtNcGGNMOpYtW0Z0dDQbNmzg0UcfZfDgwZQr\nV87rsILOprkwxhgfR44coVOnTlx33XUcPHiQGTNmMGHChAKREMBKCsYYk2revHm0bt2arVu30q5d\nO/r160epUqW8DiukLCkYE8a+WL2TAbGb2XngGIVFSFKlol1ZLeAOHDhAly5dGD16NJUrV2b+/Pnc\ndNNNXoflCas+MiZMfbF6J12nxrPzwDEAktQZ67nzwDG6To3ni9U7vQwv3/jyyy+pVq0aY8eOpUuX\nLqxdu7bAJgSwpGBM2BoQu5ljiUnpPncsMYkBsZtDHFH+snfvXlq0aME999xD2bJlWbZsGf37989z\nE9gFmiUFY8LULreEkNPnTfpUlQkTJlC1alWmTZvGG2+8QVxcHFFRUV6HFhYsKRgTpi4sUzxXz5t/\n2759O3fddRePPfYYV111FatXr6Zbt26cccYZXocWNiwpGBOmujSuQvGi6c+nU7xoYbo0rhLiiPKu\n5ORkhg8fTrVq1Zg/fz6DBw9m0aJFREREeB1a2LHeR8aEqZTeRdb7KHd+/vlnYmJiWLBgAQ0bNmTk\nyJFcfvnlXocVtiwpGBPGmtWqaAf/HDp16hRvv/02PXr0oFixYowZM4Ynn3wyX09REQiWFIwx+c7a\ntWuJjo5m5cqVNGvWjPfee48LL7zQ67DyBEsKxph/SRk0t+vAMS7MQ9VVJ06coFevXvTt25dzzz2X\nzz77jPvvv99KB9lgScEYc5qUQXMpYyRSBssBYZ0YlixZQnR0NJs2beLxxx/n7bffpmzZsl6HledY\n7yNjzGnSGzQXzoPlDh8+TMeOHbn++us5fPgws2bNYty4cZYQcshKCsaY02Q0KC4cB8vNmTOHNm3a\nsG3bNtq3b0+fPn04++yzvQ4rT/MkKYjIc0BrQIBRqjrYiziMCWfZrdcPVDvAhWWKp863lHZ5erp9\nEc/EZdtJUqWwCA/Xu5hezapne7/ZsX//fjp37szYsWO56qqrWLBgATfeeGNQ91lQhLz6SEQicRJC\nXaAGcJeIVA51HMaEM9/J8BSnXv/5SWu49OWZXN933r8mw0tv/ZxOmpfeoLmMBst1+yKeCUt/T52s\nL0mVCUt/p9sX8dner7+mTZtGREQE48aN4+WXX2bt2rWWEALIizaFqsAyVT2qqqeA74F7PYjDmLCV\nXr2+un/TO+AHsh2gWa2K9Lm3OhXLFEeAimWK0+fe6umWOiYu257uNjJanht79uzhgQce4N577+X8\n889n+fLl9OnThzPPPDPg+yrIvKg+Wg/0FpGywDHgTiAu7Uoi0gZoA1CpUqWQBmiM17Kqv0854Kcc\nqAPdDuDvoLmUEoK/y3NCVfnoo4/o2LEjR44coXfv3nTp0oWiRYsGbB/mHyEvKajqJqAfMBv4BlgD\n/Gt+YFUdqapRqhpVvnz5EEdpjLf8mezO94Cf0frBnjSvcAb9/zNanl2//fYbTZo0oWXLllStWpW1\na9fyyiuvWEIIIk+6pKrqGFWtraoNgP3AT17EYUwgfLF6J9f3ncdlGdT350Rmk+Gl8D3gZ6cdIJAe\nrndxtpb7Kzk5mffee4/IyEgWLVrEkCFDWLhwIVdffXWutmuy5lXvo/NUda+IVMJpT6jvRRzG5Faw\nBnqlnQxP+KdNAdI/4J9ZtFBqHGWKF+W1u6sFfbBZSi+jQPY+2rx5MzExMSxatIjbb7+dESNGcOml\nlwYoYpMVr8YpTHHbFBKB9qp6wKM4jMmVzBp4c3tA9q3Xz6y7adrEBHDiVHKu9p0dvZpVD0gX1MTE\nRAYOHMhrr71GiRIl+PDDD3n88cdtiooQ8yQpqKr1HzP5QqgGemXW8BvMxBQqq1evJjo6mtWrV3Pf\nffcxdOhQzj//fK/DKpBsmgtjcsGrBl5feWkEclrHjx/nlVdeoU6dOuzatYvJkyczefJkSwgesqRg\nTC541cDrKxwSU04sXryYmjVr0qdPHx577DE2btzIfffd53VYBZ4lBWNyITsDvXIqq95NGfVUOnry\nVEB6QgXaoUOHeOaZZ7jxxhs5fvw4sbGxfPDBB5x77rleh2awCfGMybVgXh3Nn95NKX9f+3IDB44l\npr52/9HEkE157e+8S7GxsbRp04bt27fzzDPP0Lt3b84666ygxmayx0oKxoQxf6evaFarIiWL/fsc\nLxRTXvsz79K+fft44oknuOOOOyhRogQLFy7knXfesYQQhiwpGBPGstOI7FWDc1aJa/LkyVStWpUJ\nEybw6quvsnr1aq6//vqgxmRyzqqPjAlj2ZnGOrtTXgdKRknn9x07ue+++5g6dSq1atUiNjaWmjVr\nBjUWk3tWUjAmjGWnd5NXPaHSJh1V5fC6Oewe8zQzZ86kb9++LF++3BJCHmElBWOy4OVF7H2nu8hq\n/9lZN5C6NK6S2hh+6uAf/PXNUI5vW01ErXpMnTiOKlVC1z3X5J5oAKe4DZaoqCiNi/vX7NrGBF16\nU0gUL1o44N1O/Y3Fq+SUlSlxv9Pl9f5six2DiBDTqRvDe79MoUJWGeElEVmpqlHZeY19Y8ZkIlwu\nYh/IK6sF2qZNmxj4bAt+nfEejRvezK8//8iIPq9YQsij7FszJhPhMoVEuCQnX4mJifTu3ZuaNWuy\nefNmxo8fz6xZs+yiWHmctSkYkwmvevSkFS7JKcXKlStp1aoV69at48EHH2TIkCFUqFDBk1hMYFlJ\nwZhMhMPcRhA+8xsdO3aMl19+mXr16rF3716mTZvGpEmTLCHkI1kmBRFZKSLtReScUARkTDgJxdxG\n/giH5LRgwQJq1KhBv379eOKJJ9i4cSPNmjUL2f5NaPhTffQQ8CSwQkTigA+A2ZqLbksi8jwQg3Mx\nqXjgSVU9ntPtGRNMwZzbKDsxQOi7mwL8/fffdO3alWHDhnHZZZfx7bff0rBhw6Dv13jD7y6pIlII\nuAsYDiThJId3VHVftnYoUhFYBESo6jER+QyYpaofZvQa65Jqciqcu3HmBbNmzaJdu3bs2LGD5557\njl69elGyZEmvwzJ+ykmXVL8amkXkGpzSwp3AFOBj4AZgHpCTYYpFgOIikgiUAHblYBvGZCpY108u\nCP7880+ef/55JkyYQEREBD/88AP169ul1AuCLJOCiKwEDgBjgJdV9YT71DIRyfasVqq6U0TeAn4H\njuFURc1OZ79tgDaAdXEzOZIfLlOZHYEoFakqn3/+OR06dGD//v10796dV199lWLFigUpahNu/Ckp\nPKCqW9N7QlXvze4O3Qbre4DLcJLN5yLyX1WdkGbbI4GR4FQfZXc/xoRbN85gCkSpaNeuXTz99NNM\nnz6d2rVr8+2333LNNdcELWYTnrLsfaSqW0XkPyLyooj8L+WWi302An5V1QRVTQSmAv+Xi+0Zk65w\n6cYZCrkZ3KaqjBkzhoiICGJjYxkwYABLly61hFBA+dMl9X2cHkjPAAI8AFySi33+DtQXkRIiIkBD\nYFMutmdMusKhG2eo5LRUtHXrVho1akRMTAw1a9YkPj6ezp07U6SIjWstqPwZvPZ/qvo4sF9VXweu\nA67K6Q5VdRkwGViF0x21EG41kTGBFC5jDNLK6prLOZHdUlFSUhKDBg2ievXqrFixgvfff5958+ZR\nuXLlXMdi8jZ/TgdSTjWOisiFwF/ABbnZqar2AHrkZhvG+CMcxhj4ClaPKN/pq1NkVCrasGED0dHR\nLFu2jP/85z+8//77XHTRRTnet8lf/CkpzBCRMsAAnLP7bcDEYAZlTH6VVd1/TksRKaWiMsWLpi47\ns+jp/94nT56kZ8+e1KpViy1btvDxxx/z1VdfWUIwp8mypKCqb7h3p4jIDOBMVT0Y3LCMyZlwH6yW\nWd1/eqWIjpPW8PpXG+jRtJpf7+PEqeTU+/uPJqaWQiqe2kV0dDTx8fE8/PDDvPPOO5QvXz4A78jk\nN/40ND8gIme7D7sAH4hIreCGZUz2hfM1B1JkVvefXikC/jm4Z/U+0nv9kaNHaP9cJ+rXr8++ffv4\n8ssv+eSTTywhmAz5U33UXVUPicgNON1JxwDvBzcsY7IvHK85kFZmPaIy6ynkz/tI+/rjv69j9wfP\nsGvhZ8TExLBhwwaaNm2a8+BNgeBPUkj5L/sPMFJVZwJnBC8kY3ImLwxWy6xHVFbjJ7J6HymvTz5x\nhL9ih/LHxFdAoVrMW4wYMYLSpUsH6m2YfMyf3kc7RWQEcBvQT0SKYddhMGEoXC6I4yujNo702gfS\n60HkK6v30aVxFZ7pN5rdM98l6ch+StVpzvm3Pk6vh+oE5L2YgsGfg/uDQCzQWFUPAOfitC0YE1bC\nbbBadts40utBlCKr95GQkMBnA7qwY9JrFCtZigv+O4DIezvQ76E6YdXQbsKfPyWFC4CZqnpCRG4G\nrgHGBzUqY3LAy2sOpCcnE/KllCL87UWlqnz66ac8++yzHDx4kNdee42uXbtyxhlWw2tyxp+kMAWI\nEpHKOCOPpwOf4EyjbUxYCafBarlp4/DnfezYsYOnnnqKGTNmULduXcaMGUNkZGSOYjUmhT9JIVlV\nT4nIvcC7qvquiKwOdmDG5JTXYxVS9p/R1L65beNITk5m9OjRdOnShcTERN5++22effZZChcunPWL\njcmCP0khUUQeBh4HUvqz/bvS05gw4PWFddLuP63ctnFs2bKF1q1bM3/+fG655RZGjRrFFVdckePt\nGZOWPw3NT+JMgtdbVX8VkcuAj4IbljE54/VYhYwGoEHuJuQ7deoUb731FtWrV2fVqlWMGjWKuXPn\nWkIwAefPNBcbgWd9Fp0CkjNY3RhPeT1WIaP9CLD45VtztM34+Hiio6NZsWIFTZs2Zfjw4VSsGB7t\nJib/8Wu8gYiUF5GnRWQhMB+oENSojMkhry+sE8j9nzhxgh49enDttdeybds2Pv30U6ZPn24JwQRV\nhklBRM4WkZYiEgssB64ALlPVK1S1c8giNCYbvB6rEKj9L1u2jNq1a9OzZ09atGjBxo0beeihh3Cu\nS5W1YFyzwRQMmZUU9gKtgF7A5ar6AnAytzsUkSoissbn9reIdMztdo0B7y+sk9v9HzlyhE6dOnHd\ndddx8OBBZsyYwUcffUS5cuX8jiEvTAxowpeopt9xzj1QtwBK4lw/YRIwR1UvD9jORQoDO4F6qvpb\nRutFRUVpXFxcoHZrTFiaN28erVu3ZuvWrTz11FP07duXUqVKZXs71/edl+50HxXLFM9xu4bJm0Rk\npapGZec1GZYUVHWwqtYH7nEXfQFcKCIviUiOL8eZRkPgl8wSgjH53YEDB2jdujUNGzakUKFCzJ8/\nn2HDhuUoIYD3je0mb8uyoVlVt6rqm6paHYgCSgGzArT/FmRwFTcRaSMicSISl5CQEKDdGRNepk+f\nTkREBGPHjuXFF19k3bp13KOmqfoAAB9XSURBVHTTTbnapteN7SZvy9Zsp6q6XlVfVdVcX91bRM4A\n7gY+z2BfI1U1SlWj7IIgJr/Zu3cvLVq0oFmzZpQvX55ly5bRr18/ihfP/YHb68Z2k7d5OQV2E2CV\nqv7hYQzGhJSqMmHCBKpWrcq0adN44403iIuLIyoqW9W+mfK6sd3kbf5McxEsD5NB1ZEx+dH27dtp\n164ds2bNon79+owZM4aIiIig7CucJgY0eUuWSUFEmuJMnR2wUcwiUhLnoj1tA7VNYwIhGJPpJScn\nM2LECF566SWSkpIYPHgwHTp0OG0CO68n8TMmhT8lhYeAwSIyBRirqj/mdqeqegQom9vtGBNIwZhM\n76effiImJoaFCxfSqFEjRo4cyWWXXRb0/RqTU/70PvovUAv4BfhQRJa4PYPODnp0xoRQICfTO3Xq\nFP3796dGjRrEx8czduxYZs+e/a+EEOj9GpNbfrUpqOrfIjIZKA50BJoDXURkiKq+G8wAjQmUrKpo\nAtW/f+3atbRq1YpVq1bRvHlz3nvvPS644IIM17dxBSacZFlSEJF7RGQazkR4RYG6qtoEqAG8ENzw\njAkMf6Z+yG3//hMnTtC9e3eioqLYsWMHn3/+OVOmTMk0IQRiv8YEkj9dUpsDg1S1uqoOUNW9AKp6\nFIgOanTGBIg/VTS56d+/ZMkSatWqRa9evXjkkUfYuHEj999/v18T2Nm4AhNOMk0K7txEl6jqgvSe\nV9W5QYnKmADzp4omJ/37Dx8+TMeOHbn++us5cuQIX3/9NePGjaNsWf/7Udi4AhNOMm1TUNUkEUkW\nkdKqejBUQRkTaBeWKZ7uJHFpq2iy079/zpw5tGnThm3bttG+fXv69OnD2WfnrP+FjSsw4cKf6qPD\nQLyIjBGRISm3YAdmTCAFsopm//79tGrVittvv50zzjiDBQsWMHTo0BwnBGPCiT+9j6a6N2PyrJSz\n8NwOEJs2bRpPP/00CQkJdO3alf/973+ceeaZwQgZsEFtJvT8uUbzuFAEYkwgZHYQzU0VzZ49e3jm\nmWeYPHkyNWvWZObMmVx77bWBDP1fbFCb8YI/XVKvFJHJIrJRRLam3EIRnDHZEYwrjqkq48ePJyIi\ngq+++oo333yT5cuXBz0hgA1qM97wp03hA2A4cAq4BRgPTAhmUMbkRKAPor/99htNmjShZcuWVK1a\nlTVr1tC1a1eKFi0aiHCzZIPajBf8SQrF3a6noqq/qeprwH+CG5Yx2Reog2hycjJDhw6lWrVqLFq0\niHfffZeFCxdy9dVXByJMv9mgNuMFf5LCCREpBPwsIh1EpDlwVpDjMibbAnEQ3bx5Mw0aNOCZZ57h\nhhtuYMOGDXTo0IFChUJ/6REb1Ga84M8v/TmgBPAsUBt4DGgZzKCMyYncHEQTExPp06cPNWrUYOPG\njXz44Yd8/fXXXHLJJcEKN0s2qM14QVTV6xiyFBUVpXFxcV6HYfKAnHThXL16NdHR0axevZr777+f\nd999l/PPPz/X2zXGayKyUlWzdVm/DLukishXQIYZQ1Xvzs6O0my7DDAaiHT30UpVl+R0e8akyE63\n0+PHj9OzZ0/69+9PuXLlmDJlCvfee++/1rOuoaYgyWycwlvu33uB8/mnx9HDQG6vq/wO8I2q3i8i\nZ+BUTxkTMosWLSImJobNmzfz5JNPMnDgQM4555x0182sV5MlBZPfZJgUVPV7ABEZmKb48ZWI5Lgu\nR0RKAw2AJ9z9nARO5nR7xmTHoUOH6Nq1K++99x6XXnopsbGx3H777Zm+xrqGmoLEn4bmkiJyecoD\nEbkMKJmLfV4GJAAfiMhqERntXrP5NO7V3eJEJC4hISEXuzPGERsbS2RkJMOGDePZZ58lPj4+y4QA\n1jXUFCz+JIXngfkiMl9Evge+w7n6Wk4VAa4FhqtqLeAI8HLalVR1pKpGqWpU+fLlc7E7U9Dt27eP\nli1bcscdd1CiRAkWLVrEO++8w1ln+dez2rqGmoLEn7mPvhGRK4GUkTs/quqJXOxzB7BDVZe5jyeT\nTlIwJhAmT55M+/bt2bdvH6+++irdunXL9gR2gZpMz5i8wK9rNOOMT7jUXb+GiKCq43OyQ1XdIyLb\nRaSKqm4GGgIbc7ItYzKye/duOnTowNSpU7n22muJjY2lZs2aOd6eXe/AFBRZJgUR+Qi4AlgDpHTB\nUJw5kHLqGeBjt+fRVuDJXGzLmFSqyocffkinTp04duwYffv25YUXXqBIEX/Pf4wp2Pz5T4kCIjSA\no9xUdY27XWMC5tdff6VNmzZ8++233HjjjYwePZqrrrrK67CMyVP8aWhejzNOwZiwlJSUxJAhQ4iM\njGTp0qUMGzaM+fPnW0IwJgf8KSmUAzaKyHIgtYE5NyOajQmUTZs2ER0dzZIlS2jSpAnvv/8+lSpV\n8josY/Isf5LCa8EOwpjsSkxMpH///vTs2ZOzzjqLjz76iEcffRQR8To0Y/I0f7qkfi8iFYA67qLl\nqro3uGEZk7GVK1fSqlUr1q1bx4MPPsi7777Leeed53VYxuQL/lyO80FgOfAA8CCwTETuD3ZgxqR1\n7NgxXnrpJerVq0dCQgLTpk1j0qRJlhCMCSB/qo9eBeqklA5EpDzwLc6gM2OCIu1U1XeW28+4/l35\n+eefiYmJYcCAAZQpU8brMI3Jd/xJCoXSVBf9hX+9lozJEd+pqpNPHGXdZ8P4YfUsKlSsxLfffkvD\nhg29DtGYfMufpPCNiMQCE93HDwFfBy8kU9ClTFV97JcV/BU7jKRDf3J21D1c2rS1JQRjgsyfhuYu\nInIvcIO7aKSqTgtuWKYg275rD/vmjebIhu8oWrYS5f87gGIVr+aPo15HFjp2pTfjFX+mubgMmKWq\nU93HxUXkUlXdFuzgTMGiqnz++efsHtueU8cOUfr/Hqb0dQ8iRYoCBWeqarvSm/GSP9VHnwP/5/M4\nyV1WJ/3VjclcemfBdSsITz/9NNOnT6dyRA1OXd8WPfefQWgFaapqu9Kb8ZI/DcZF3KujAalXSjsj\neCGZ/CzlLHjngWMosGP/UZ7qPoArq1QlNjaWt956i01r4xj01N1ULFMcASqWKU6fe6sXmAOiXenN\neMmfkkKCiNytql8CiMg9wJ/BDcvkV75nwYkH9rDvmyEc/20dpS6rwcrZk6lcuTJQsKeqvrBMcXam\nkwAKSvWZ8ZY/SaEdzjTX7+FMmb0DeDyoUZl8a9eBY2hyEodWfsWBBR9BoUKc27gDZ9e4PTUhFHRd\nGlc5rU0BClb1mfGWP72PfgHqi8hZ7uPDQY/K5FtlTvzBxkn9Obl7M8WvqMO5t7enSKlyVLSz4FR2\npTfjJX96H1UA3gQuVNUmIhIBXKeqY3K6UxHZBhzCabQ+pap2bYV87uTJk/Tt25f1Q3uhZxSnXNMu\nlKjaABGxs+B0FOTqM+MtfxqaPwRigQvdxz8BHQOw71tUtaYlhPxvxYoV1K5dmx49evDggw8w5quF\nXPV/d1BIpMA1IhsT7vy6noKqfiYiXQFU9ZSIJGX1ImOOHj3K//73PwYNGsQFF1zAl19+SdOmTQFo\neavHwRlj0uVPSeGIiJTFaWRGROoDB3O5XwVmi8hKEWmT3goi0kZE4kQkLiEhIZe7M6E2f/58rrnm\nGgYOHEjr1q3ZsGFDakIwxoQvf0oKnYAvgStEZDFQHsjt1Nk3qOpOETkPmCMiP6rqAt8VVHUkMBIg\nKioqYNeHNsF18OBBXnzxRUaOHMkVV1zBvHnzuOWWWzJc36ZzMCa8+NP7aJWI3ARUAQTYrKqJudmp\nqu50/+4VkWlAXWBB5q8y4W7GjBm0a9eO3bt307lzZ15//XVKlCiR4fo2nYMx4SfD6iMRqSMi54PT\njgDUBnoDA0Xk3JzuUERKisjZKfeB24H1Od2e8V5CQgKPPPIITZs25ZxzzmHJkiUMGDAg04QAmU/n\nYIzxRmZtCiOAkwAi0gDoC4zHaU8YmYt9VgAWichanCu6zVTVb3KxPeMRVWXixIlEREQwefJkXn/9\ndVauXEndunX9er1N52BM+Mms+qiwqu5z7z+EM2X2FGCKiKzJ6Q5VdStQI6evN+Fhx44dPPXUU8yY\nMYN69eoxZswYqlWrlq1t2HQOxoSfzEoKhUUkJWk0BOb5POdPA7XJh5KTkxkxYgQRERHMnTuXt99+\nm8WLF2c7IYAznUPxooVPW2YD2YzxVmYH94nA9yLyJ3AMWAggIpXJfZdUkwdt2bKF1q1bM3/+fG69\n9VZGjRrF5ZdfnuPt2XQOxoSfDJOCqvYWkbnABcBsVU3pFloIeCYUwZnwcOrUKQYPHkz37t0544wz\nGDVqFNHR0YhIrrdt0zkYE14yrQZS1aXpLPspeOGYcBMfH090dDQrVqzg7rvvZtiwYVSsaAdxY/Ir\nf0Y0mwLoxIkT9OjRg2uvvZZt27YxadIkvvjiC0sIxuRz1mBs/mXp0qVER0ezceNG/vvf/zJ48GDK\nli0bsv3bKGdjvGNJwaQ6cuQI3bt3Z/DgwVSsWJGZM2dy5513AqE7UNsoZ2O8ZdVHBoC5c+dSvXp1\nBg0aRLt27diwYcNpCcH3usopB+ovVu8MeBw2ytkYb1lSKOAOHDhA69atadSoEUWKFOH7779n2LBh\nlCpVKnWdUB6obZSzMd6ypFCATZ8+nYiICD744ANeeukl1q5dS4MGDf61XigP1BmNZrZRzsaEhiWF\nAmjv3r20aNGCZs2acd5557Fs2TL69u1L8eLZOyAH40Bto5yN8ZYlhQJEVZkwYQJVq1Zl2rRp9OrV\nK/VSmZkJ5YG6Wa2K9Lm3OhXLFEfALtdpTIhZ76MC4vfff6ddu3Z8/fXXXHfddYwZM4aqVav69dpQ\nT0cR7FHO1uXVmIxZUsjnUiawe/HFF0lOTuadd96hffv2FC5cOOsX+8gv01FYl1djMmfVR/nYTz/9\nxM0338zTTz9N/fr1Wb9+Pc8++2y2E0J+Yl1ejcmcZ0lBRAqLyGoRmeFVDPnVqVOn6N+/PzVq1CA+\nPp6xY8cye/ZsLrvsMq9D85x1eTUmc15WHz0HbAJKZbWi8d/atWtp1aoVq1atonnz5rz33ntccMEF\nXocVNuzCPsZkzpOSgohcBPwHGO3F/vOj48eP061bN6Kioti5cyeTJ09m6tSplhDSsC6vxmTOq5LC\nYOBF4OyMVhCRNkAbgEqVKoUorLzphx9+IDo6mh9//JGWLVvy9ttvc+6553odVliyC/sYk7mQJwUR\nuQvYq6orReTmjNZT1ZHASICoqCjNaL2C7PDhw7z66qu8++67XHzxxXzzzTc0btzY67DCXn7pSWVM\nMHhRfXQ9cLeIbAM+BW4VkQkexJGnzZkzh+rVqzNkyBDat2/P+vXrLSEYY3It5CUFVe0KdAVwSwqd\nVfW/oY4jr9q/fz8vvPACH3zwAVWqVGHhwoXccMMNXoeVJ9igNWOyZuMU8pCpU6cSERHB+PHj6dq1\nK2vWrLGE4KdQTv9tTF7maVJQ1fmqepeXMeQFe/bs4f777+e+++7j/PPPZ8WKFbz55puceeaZXoeW\nZ9igNWP8Y9NchDFVZfz48Tz//PMcPXqUN998k86dO1O0aFGvQztNXqiWsUFrxvjHkkKY+u2332jb\nti2xsbFcf/31jB49mquvvtrrsP4lr8wlZIPWjPGPtSmEmeTkZIYOHUq1atVYvHgxQ4cOZcGCBWGZ\nECDvVMvYoDVj/GMlhTCyefNmoqOjWbx4MY0bN2bEiBFccsklXoeVqbxSLWOD1ozxjyWFMJCYmMhb\nb73F66+/TokSJRg3bhyPPfYYIuJ1aFnKS9UyNmjNmKxZ9ZHHVq9eTd26dXnllVdo2rQpmzZt4vHH\nH88TCQGsWsaY/MaSgkeOHz9O165dqVOnDnv27GHKlCl8/vnnVKhQwevQssUun2lM/mLVRx5YtGgR\n0dHR/PTTTzz55JMMHDiQc845x+uwcsyqZYzJP6ykEEKHDh2iQ4cO3HjjjZw8eZLZs2czduzYPJ0Q\njDH5iyWFEImNjSUyMpJhw4bx3HPPER8fz2233eZ1WMYYcxpLCkH2119/0bJlS+644w5KlizJ4sWL\nGTx4MGeddZbXoRljzL9Ym0KQqCpTpkyhffv27Nu3j27dutGtWzeKFSvmdWgBkRemtjDGZJ8lhSDY\nvXs37du3Z9q0adSuXZvZs2dTo0YNr8MKmLwytYUxJvus+iiAVJUPPviAiIgIvv76a/r168fSpUvz\nVUKAvDO1hTEm+6ykECC//vorbdq04dtvv6VBgwaMGjWKq666yuuwgiK9EcyZLTfG5B0hLymIyJki\nslxE1orIBhF5PdQxBFJSUhJDhgwhMjKSZcuWMXz4cL777rt8mxAACmcw2jqj5caYvMOLksIJ4FZV\nPSwiRYFFIvK1qi71IJZc2bhxIzExMSxZsoQmTZowYsQILr74Yq/DCrok1WwtN8bkHSEvKajjsPuw\nqHvLU0eTxMREevXqRa1atfjpp5+YMGECM2fOLBAJAZypLLKz3BiTd3jS0CwihUVkDbAXmKOqy7yI\nIydWrlxJVFQU3bt3p3nz5mzcuJFHH300z0xgFwg2CZ4x+ZcnSUFVk1S1JnARUFdEItOuIyJtRCRO\nROISEhJCH2Qax44d46WXXqJu3bokJCTwxRdf8Omnn3Leeed5HVrI2SR4xuRfoh7XA4vI/4CjqvpW\nRutERUVpXFxcCKM63YIFC4iJieHnn3+mdevW9O/fnzJlyngWjzHG+ENEVqpqVHZe40Xvo/IiUsa9\nXxy4Dfgx1HH44++//+bpp5/mpptuIikpiblz5zJy5EhLCMaYfMuL3kcXAONEpDBOUvpMVWd4EEem\nZs2aRdu2bdm1axedOnWiZ8+elCxZ0uuwjDEmqEKeFFR1HVAr1Pv1159//knHjh35+OOPiYiIYPLk\nydSrV8/rsILC5i8yxqRl01y4VJVJkyYRERHBpEmT6NGjB6tWrcrXCaHr1Hh2HjiG8s/8RV+s3ul1\naMYYD1lSAHbt2kWzZs1o0aIFl156KatWreK1117LNzOapsfmLzLGpKdAJwVVZfTo0URERDBnzhze\neustlixZQvXq1b0OLeh2ZTBPUUbLjTEFQ4FNClu3bqVRo0a0bt2aWrVqsW7dOl544QUKFy6c9Yvz\ngQszGH2c0XJjTMFQ4JJCUlISgwYNIjIykri4OEaMGMHcuXOpXLmy16GFlI1KNsakp0BNnb1+/Xqi\no6NZvnw5d911F8OHD+eiiy7yOixPpPQyst5HxhhfBSIpnDx5kj59+tC7d29Kly7NJ598QosWLQrU\nfEXpaVaroiUBY8xp8n1SWLFiBa1atWL9+vU88sgjDB48mPLly3sdljHGhKV826Zw9OhROnfuTP36\n9dm/fz9fffUVH3/8sSUEY4zJRL4sKcyfP5+YmBh++eUX2rZtS79+/ShdurTXYRljTNjLVyWFgwcP\n0rZtW2655RYAvvvuO95//31LCMYY46d8kxS++uorIiIiGD16NJ07d2bdunXcfPPNXodljDF5Sp5P\nCgkJCTzyyCPcfffdlC1blqVLlzJgwABKlCjhdWjGGJPn5NmkoKp88sknVK1alcmTJ9OzZ0/i4uKo\nU6eO16EZY0yelScbmnfs2MFTTz3FjBkzqFevHmPGjKFatWpeh2WMMXleniopJCcnM2LECCIiIpg3\nbx6DBg1i8eLFlhCMMSZAvLgc58Ui8p2IbBSRDSLynD+v27JlCw0bNqRdu3bUrVuX+Ph4OnbsWGAm\nsDPGmFDwovroFPCCqq4SkbOBlSIyR1U3ZvSCP/74g+rVq1OsWDFGjx5Nq1atCvwUFcYYEwwhLymo\n6m5VXeXePwRsAjKdgGfHjh00btyYjRs3Eh0dbQnBGGOCRFTVu52LXAosACJV9e80z7UB2rgPI4H1\nIQ0ua+WAP70OIo1wjAnCMy6LyT8Wk//CMa4qqnp2dl7gWVIQkbOA74Heqjo1i3XjVDUqNJH5x2Ly\nXzjGZTH5x2LyXzjGlZOYPOl9JCJFgSnAx1klBGOMMaHjRe8jAcYAm1T17VDv3xhjTMa8KClcDzwG\n3Coia9zbnVm8ZmQI4soui8l/4RiXxeQfi8l/4RhXtmPytKHZGGNMeMlTI5qNMcYElyUFY4wxqcI6\nKeR0Sowgx3SmiCwXkbVuTK97HVMKESksIqtFZIbXsQCIyDYRiXfbjeK8jgdARMqIyGQR+VFENonI\ndWEQUxWf9rU1IvK3iHQMg7ied3/j60VkooicGQYxPefGs8Grz0hExorIXhFZ77PsXBGZIyI/u3/P\nCYOYHnA/p2QR8btbalgnBf6ZEiMCqA+0F5EIj2M6AdyqqjWAmsAdIlLf45hSPIczQjyc3KKqNcOo\n//Y7wDeqejVQgzD4vFR1s/sZ1QRqA0eBaV7GJCIVgWeBKFWNBAoDLTyOKRJoDdTF+e7uEpHKHoTy\nIXBHmmUvA3NV9UpgrvvY65jWA/fiDBD2W1gnhZxMiRGCmFRVD7sPi7o3z1vrReQi4D/AaK9jCVci\nUhpogNMlGlU9qaoHvI3qXxoCv6jqb14HgjM3WnERKQKUAHZ5HE9VYJmqHlXVUziDX+8NdRCqugDY\nl2bxPcA49/44oJnXManqJlXdnN1thXVS8OVOiVELWOZtJKnVNGuAvcAcVfU8JmAw8CKQ7HUgPhSY\nLSIr3WlLvHYZkAB84FazjRaRkl4HlUYLYKLXQajqTuAt4HdgN3BQVWd7GxXrgRtFpKyIlADuBC72\nOKYUFVR1t3t/D1DBy2ByI08kBXdKjClAx7RzJHlBVZPcov5FQF23WOsZEbkL2KuqK72MIx03qOq1\nQBOcqr8GHsdTBLgWGK6qtYAjhL6YnyEROQO4G/g8DGI5B+fs9zLgQqCkiPzXy5hUdRPQD5gNfAOs\nAZK8jCk96vTz97z2IKfCPimE85QYbtXDd/y7Li/UrgfuFpFtwKc4AwMneBtS6tkmqroXp468rrcR\nsQPY4VOym4yTJMJFE2CVqv7hdSBAI+BXVU1Q1URgKvB/HseEqo5R1dqq2gDYD/zkdUyuP0TkAgD3\n716P48mxsE4K4TglhoiUF5Ey7v3iwG3Aj17GpKpdVfUiVb0Up/phnqp6elYnIiXd62XgVtHcjscz\n3arqHmC7iFRxFzUEMryOhwceJgyqjly/A/VFpIT7f9iQMGiUF5Hz3L+VcNoTPvE2olRfAi3d+y2B\n6R7Gkivhfo3mlCkx4t06fIBXVHWWhzFdAIwTkcI4SfUzVQ2LLqBhpgIwzb32RRHgE1X9xtuQAHgG\n+NitqtkKPOlxPEBq4rwNaOt1LACqukxEJgOrcHoBriY8pnGYIiJlgUSgvRcdBURkInAzUE5EdgA9\ngL7AZyISDfwGPBgGMe0D3gXKAzNFZI2qNs5yWzbNhTHGmBRhXX1kjDEmtCwpGGOMSWVJwRhjTCpL\nCsYYY1JZUjDGGJPKkoIJWyKS5M4aul5EPnenNsjptj4Ukfvd+6Mzm1hRRG4WkWwP1HJnhS2Xw5iK\nikhfd5bNVSKyRESauM/NF5HNPrOopvTVLyYik0Rki4gsc6eCMSZXLCmYcHbMnT00EjgJtPN90p2o\nLdtUNUZVMxu0djOhH737Bs4YmEh3apBmwNk+zz+aMpOqO0IcIBrYr6qVgUE4U0AYkyuWFExesRCo\n7J7FLxSRL4GN7uSEA0RkhYisE5G24IyGF5Gh7hn2t8B5KRtyz7yj3Pt3uGfma0Vkrnu23Q543j0r\nv9EdxT7F3ccKEbnefW1ZEZntzlk/GpD0AheRwyIyyF1vroiUT/N8CZwpoZ9R1RMAqvqHqn6WxWfi\nOzPnZKCh+76riXPNjzXuZ3Kl/x+zKegsKZiw55YImgDx7qJrgedU9Sqcs+WDqloHqAO0FpHLgOZA\nFSACeJx0zvzdg/Mo4D73+hgPqOo24H1gkHtWvhDnGgyD3H3cxz/Tk/cAFqlqNZy5nSpl8BZKAnHu\net+7r/NVGfg9i8keP3AP8t3daSfAmUZ+O4A7lfRBoCxOUnvHnbQxCmfOJ2P8Eu7TXJiCrbjP9CYL\ncebB+j9guar+6i6/HbgmpW4eKA1ciXPdhImqmgTsEpF56Wy/PrAgZVuqmnaO/BSNgIh/jsWUEmfm\n3ga48/mr6kwR2Z/B65OBSe79CTiTy2XHo6q6051LagrO1C/jM1l/CfCqONfYmKqqP2dzf6YAs6Rg\nwtkx92w3lXtgPuK7CKfaJTbNencGMI5CQH1VPZ5OLDmRdm6ZLUAlESmVXmnBZ7bZQyLyCc5ss+OB\nnTjXE9jhlqZKA3+p6icisgznokuzRKStqqaXFI35F6s+MnldLPCUOFOsIyJXuZPLLQAectscLgBu\nSee1S4EGbnUTInKuu/wQpzfyzsaZSA93vZREtQB4xF3WBMjouryFgJSSzCPAIt8nVfUoTinoHXei\nvpTZeB8QkSIpPZrc93gX/8w26zsz5/04s+OqiFwObFXVITizdV6TQVzG/IslBZPXjcaZ/nqVOBct\nH4FTAp4G/Ow+Nx6nSuU0qpoAtAGmisha/qni+QpontLQjHutYrfRdiP/9IJ6HSepbMCpRvo9gxiP\n4FyMaT1wK9AznXW64VwVbqO73gzgb6AYECsi63AuKrMTpx0EnERSVkS2AJ3454JBDwLr3aq3SDKv\najLmNDZLqjFBJiKHVfUsr+Mwxh9WUjDGGJPKSgrGGGNSWUnBGGNMKksKxhhjUllSMMYYk8qSgjHG\nmFSWFIwxxqT6f0VyZpUl/bUqAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "bwSpFWsPeYwS", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/13_Modeling_Protein_Ligand_Interactions.ipynb b/examples/tutorials/13_Modeling_Protein_Ligand_Interactions.ipynb index 00698f81b2d5830f43f72df00ae1535257614856..b9b8c906362ea6d847a9c90814b58e307340ae7c 100644 --- a/examples/tutorials/13_Modeling_Protein_Ligand_Interactions.ipynb +++ b/examples/tutorials/13_Modeling_Protein_Ligand_Interactions.ipynb @@ -25,7 +25,7 @@ }, "widgets": { "application/vnd.jupyter.widget-state+json": { - "7f02a8593e7047afbb724db76cf6f117": { + "e108ad0b43ba46a3951a6ddd9cdf49a7": { "model_module": "nglview-js-widgets", "model_name": "ColormakerRegistryModel", "state": { @@ -34,16 +34,16 @@ "_model_name": "ColormakerRegistryModel", "_msg_q": [], "_view_module": "nglview-js-widgets", - "_model_module_version": "2.7.5", + "_model_module_version": "2.7.7", "_msg_ar": [], "_ready": false, - "_view_module_version": "2.7.5", + "_view_module_version": "2.7.7", "_view_count": null, "_model_module": "nglview-js-widgets", - "layout": "IPY_MODEL_08e05f7872b444eaa7b68b650295ccb7" + "layout": "IPY_MODEL_b2792c7e538841c0b47f59d2c9298675" } }, - "88b020104a4e45b28011396c905cf8f0": { + "4c19ba4fe42d4b549691ac2d66099761": { "model_module": "nglview-js-widgets", "model_name": "NGLModel", "state": { @@ -54,17 +54,17 @@ "_camera_orientation": [], "frame": 0, "_view_module": "nglview-js-widgets", - "_ibtn_fullscreen": "IPY_MODEL_4914133a9450452ab4febd14ab10e1ab", + "_ibtn_fullscreen": "IPY_MODEL_dd95ef51e49047dd83ecc0a56891b4aa", "_camera_str": "orthographic", "_ngl_serialize": false, "picked": {}, "_model_module": "nglview-js-widgets", "_igui": null, - "_iplayer": "IPY_MODEL_23194dbd6a41452580659be1faf1ba25", - "layout": "IPY_MODEL_18e41587f81b4477b5f7bf7db0739c21", + "_iplayer": "IPY_MODEL_bec1543b732042ef918d293a94e3e036", + "layout": "IPY_MODEL_cc28166c13bd4ae39a7e1771d8acae96", "_view_width": "", "_ngl_coordinate_resource": {}, - "_view_module_version": "2.7.5", + "_view_module_version": "2.7.7", "_player_dict": {}, "_synced_repr_model_ids": [], "_ngl_version": "", @@ -72,7 +72,7 @@ "_dom_classes": [], "_model_name": "NGLModel", "_scene_position": {}, - "_model_module_version": "2.7.5", + "_model_module_version": "2.7.7", "gui_style": null, "background": "white", "_view_count": null, @@ -104,7 +104,7 @@ ] } }, - "bd4c86a1548a407fb3d55c1060810198": { + "8ec90fba285b445da23565d02e4c5ed9": { "model_module": "nglview-js-widgets", "model_name": "NGLModel", "state": { @@ -115,17 +115,17 @@ "_camera_orientation": [], "frame": 0, "_view_module": "nglview-js-widgets", - "_ibtn_fullscreen": "IPY_MODEL_ca7a42748b254b5690d0d2c4f5f23db5", + "_ibtn_fullscreen": "IPY_MODEL_3fb3c86ac18743088fc6e04bbefc4f66", "_camera_str": "orthographic", "_ngl_serialize": false, "picked": {}, "_model_module": "nglview-js-widgets", "_igui": null, - "_iplayer": "IPY_MODEL_c32d1bb3f1ff43fe8bedbb13bc0484c6", - "layout": "IPY_MODEL_c25caa50329a4e9da9097072c871cb49", + "_iplayer": "IPY_MODEL_b44295a614224f9e9361d0e02e43fa42", + "layout": "IPY_MODEL_bb6ebc908bae4630a33963eef3aa730b", "_view_width": "", "_ngl_coordinate_resource": {}, - "_view_module_version": "2.7.5", + "_view_module_version": "2.7.7", "_player_dict": {}, "_synced_repr_model_ids": [], "_ngl_version": "", @@ -133,7 +133,7 @@ "_dom_classes": [], "_model_name": "NGLModel", "_scene_position": {}, - "_model_module_version": "2.7.5", + "_model_module_version": "2.7.7", "gui_style": null, "background": "white", "_view_count": null, @@ -165,7 +165,7 @@ ] } }, - "775f136b5f0f4f73ab4d2184e9287a2e": { + "e415db751b98475185a05c4103a31244": { "model_module": "nglview-js-widgets", "model_name": "NGLModel", "state": { @@ -176,17 +176,17 @@ "_camera_orientation": [], "frame": 0, "_view_module": "nglview-js-widgets", - "_ibtn_fullscreen": "IPY_MODEL_a301b97c47e845448fbb7f25cb716775", + "_ibtn_fullscreen": "IPY_MODEL_e2f4fb6302934a9283bd06d0ad94c960", "_camera_str": "orthographic", "_ngl_serialize": false, "picked": {}, "_model_module": "nglview-js-widgets", "_igui": null, - "_iplayer": "IPY_MODEL_7f3154412ca74369bfce04821e947d6f", - "layout": "IPY_MODEL_063aab61ac344039ab726115f2e0b21e", + "_iplayer": "IPY_MODEL_dd8ecc76abdd480bb2d83095b7033be7", + "layout": "IPY_MODEL_ced6cd34b97749f7852b1fa8bd7ba10d", "_view_width": "", "_ngl_coordinate_resource": {}, - "_view_module_version": "2.7.5", + "_view_module_version": "2.7.7", "_player_dict": {}, "_synced_repr_model_ids": [], "_ngl_version": "", @@ -194,7 +194,7 @@ "_dom_classes": [], "_model_name": "NGLModel", "_scene_position": {}, - "_model_module_version": "2.7.5", + "_model_module_version": "2.7.7", "gui_style": null, "background": "white", "_view_count": null, @@ -264,27 +264,26 @@ "metadata": { "id": "QsmBgrqsqTr0", "colab_type": "code", - "outputId": "db177991-3d70-4a17-ebc5-09806afec171", "colab": { "base_uri": "https://localhost:8080/", - "height": 462 - } + "height": 170 + }, + "outputId": "df3a53c3-4de8-4739-d4d8-c2f3d0e0d0dd" }, "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(additional_packages=['mdtraj'], version='2.3.0')" + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" ], "execution_count": 1, "outputs": [ { "output_type": "stream", "text": [ - "TensorFlow 1.x selected.\n", " % Total % Received % Xferd Average Speed Time Time Time Current\n", " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 22146 0 --:--:-- --:--:-- --:--:-- 22146\n" + "\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r 0 3489 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r100 3489 100 3489 0 0 21145 0 --:--:-- --:--:-- --:--:-- 21018\n" ], "name": "stdout" }, @@ -292,46 +291,69 @@ "output_type": "stream", "text": [ "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" + "all packages is already installed\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", + "# conda environments:\n", + "#\n", + "base * /root/miniconda\n", "\n" ], "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ox8mgBy8C5Zb", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 }, + "outputId": "fb91d942-8ae9-4a3c-833c-3511b7615a6e" + }, + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ], + "execution_count": 2, + "outputs": [ { "output_type": "stream", "text": [ - "deepchem-2.3.0 installation finished!\n" + "Requirement already satisfied: deepchem in /usr/local/lib/python3.6/dist-packages (2.4.0rc1.dev20200805143736)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n" ], - "name": "stderr" + "name": "stdout" }, { - "output_type": "stream", - "text": [ - "CPU times: user 2.89 s, sys: 634 ms, total: 3.52 s\n", - "Wall time: 2min 18s\n" - ], - "name": "stdout" + "output_type": "execute_result", + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 2 } ] }, @@ -340,11 +362,7 @@ "metadata": { "id": "F5yjhSAeqTr_", "colab_type": "code", - "outputId": "a96cf86f-df40-4cf3-9257-25012eca8eaf", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 51 - } + "colab": {} }, "source": [ "import deepchem as dc\n", @@ -362,17 +380,8 @@ "\n", "raw_dataset = dc.utils.save.load_from_disk(dataset_file)" ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "File does not exist. Downloading file...\n", - "File downloaded...\n" - ], - "name": "stdout" - } - ] + "execution_count": 3, + "outputs": [] }, { "cell_type": "markdown", @@ -389,18 +398,18 @@ "metadata": { "id": "hQW5CvXHqTsD", "colab_type": "code", - "outputId": "481bcac9-5a0d-4e6c-c9e9-905c07686868", "colab": { "base_uri": "https://localhost:8080/", "height": 187 - } + }, + "outputId": "7cd20b0c-001c-4bde-94a3-b8ee832adbaf" }, "source": [ "print(\"Type of dataset is: %s\" % str(type(raw_dataset)))\n", "print(raw_dataset[:5])\n", "print(\"Shape of dataset is: %s\" % str(raw_dataset.shape))" ], - "execution_count": 3, + "execution_count": 4, "outputs": [ { "output_type": "stream", @@ -435,40 +444,27 @@ "metadata": { "id": "WCWAc-FSroM0", "colab_type": "code", - "outputId": "ebc2cfac-ab97-42d8-bc33-ceeba2dd2599", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 51 - } + "colab": {} }, "source": [ - "!pip install -q nglview" + "!pip install -q nglview mdtraj" ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "\u001b[K |████████████████████████████████| 5.2MB 2.8MB/s \n", - "\u001b[?25h Building wheel for nglview (setup.py) ... \u001b[?25l\u001b[?25hdone\n" - ], - "name": "stdout" - } - ] + "execution_count": 5, + "outputs": [] }, { "cell_type": "code", "metadata": { "id": "aBRWy9I5qTsI", "colab_type": "code", - "outputId": "a7c814a8-9db9-4a39-b3a1-67467fc8f9dc", "colab": { "base_uri": "https://localhost:8080/", "height": 17, "referenced_widgets": [ - "7f02a8593e7047afbb724db76cf6f117" + "e108ad0b43ba46a3951a6ddd9cdf49a7" ] - } + }, + "outputId": "84437378-423a-4896-9ed4-fb2adfbe59b8" }, "source": [ "import nglview\n", @@ -477,18 +473,18 @@ "import mdtraj as md\n", "import numpy as np" ], - "execution_count": 5, + "execution_count": 6, "outputs": [ { "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "7f02a8593e7047afbb724db76cf6f117", + "model_id": "e108ad0b43ba46a3951a6ddd9cdf49a7", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "_ColormakerRegistry()" + "" ] }, "metadata": { @@ -525,7 +521,7 @@ " molecule_mdtraj = md.load(molecule_file)\n", " return molecule_mdtraj" ], - "execution_count": 0, + "execution_count": 7, "outputs": [] }, { @@ -550,7 +546,7 @@ "protein_mdtraj = convert_lines_to_mdtraj(first_protein)\n", "ligand_mdtraj = convert_lines_to_mdtraj(first_ligand)" ], - "execution_count": 0, + "execution_count": 8, "outputs": [] }, { @@ -568,26 +564,26 @@ "metadata": { "id": "5NyQYfzUqTsa", "colab_type": "code", - "outputId": "b49a4294-1c75-4ea5-f6e6-393609ba5f80", "colab": { "base_uri": "https://localhost:8080/", "height": 17, "referenced_widgets": [ - "88b020104a4e45b28011396c905cf8f0" + "4c19ba4fe42d4b549691ac2d66099761" ] - } + }, + "outputId": "79dcfb80-25c6-475c-bf67-a1516558e616" }, "source": [ "v = nglview.show_mdtraj(ligand_mdtraj)\n", "v" ], - "execution_count": 8, + "execution_count": 9, "outputs": [ { "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "88b020104a4e45b28011396c905cf8f0", + "model_id": "4c19ba4fe42d4b549691ac2d66099761", "version_minor": 0, "version_major": 2 }, @@ -616,26 +612,26 @@ "metadata": { "id": "zKGqEq0wqTsi", "colab_type": "code", - "outputId": "cfbd2fbf-b848-45e5-8d1d-67d76915ff2a", "colab": { "base_uri": "https://localhost:8080/", "height": 17, "referenced_widgets": [ - "bd4c86a1548a407fb3d55c1060810198" + "8ec90fba285b445da23565d02e4c5ed9" ] - } + }, + "outputId": "895c8197-c12e-4b46-f1e2-dfd5672c1221" }, "source": [ "view = nglview.show_mdtraj(protein_mdtraj)\n", "view" ], - "execution_count": 9, + "execution_count": 10, "outputs": [ { "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "bd4c86a1548a407fb3d55c1060810198", + "model_id": "8ec90fba285b445da23565d02e4c5ed9", "version_minor": 0, "version_major": 2 }, @@ -677,7 +673,7 @@ " return protein\n", "complex_mdtraj = combine_mdtraj(protein_mdtraj, ligand_mdtraj)" ], - "execution_count": 0, + "execution_count": 11, "outputs": [] }, { @@ -695,26 +691,26 @@ "metadata": { "id": "YxM-ESaEqTsw", "colab_type": "code", - "outputId": "4fd2a468-3370-4739-f88a-0da5802a9250", "colab": { "base_uri": "https://localhost:8080/", "height": 17, "referenced_widgets": [ - "775f136b5f0f4f73ab4d2184e9287a2e" + "e415db751b98475185a05c4103a31244" ] - } + }, + "outputId": "bfc62df3-9b7d-48f5-8215-85f79ec7b0d3" }, "source": [ "v = nglview.show_mdtraj(complex_mdtraj)\n", "v" ], - "execution_count": 11, + "execution_count": 12, "outputs": [ { "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "775f136b5f0f4f73ab4d2184e9287a2e", + "model_id": "e415db751b98475185a05c4103a31244", "version_minor": 0, "version_major": 2 }, @@ -747,15 +743,92 @@ "metadata": { "id": "UpU1chIBqTs1", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "405c73d7-ceb6-43a3-f105-c94c432c2b6b" }, "source": [ "grid_featurizer = dc.feat.RdkitGridFeaturizer(\n", " voxel_width=16.0, feature_types=[\"ecfp\", \"splif\", \"hbond\", \"pi_stack\", \"cation_pi\", \"salt_bridge\"], \n", " ecfp_power=5, splif_power=5, parallel=True, flatten=True, sanitize=True)" ], - "execution_count": 0, - "outputs": [] + "execution_count": 13, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--- Logging error ---\n", + "Traceback (most recent call last):\n", + " File \"/usr/lib/python3.6/logging/__init__.py\", line 994, in emit\n", + " msg = self.format(record)\n", + " File \"/usr/lib/python3.6/logging/__init__.py\", line 840, in format\n", + " return fmt.format(record)\n", + " File \"/usr/lib/python3.6/logging/__init__.py\", line 577, in format\n", + " record.message = record.getMessage()\n", + " File \"/usr/lib/python3.6/logging/__init__.py\", line 338, in getMessage\n", + " msg = msg % self.args\n", + "TypeError: not all arguments converted during string formatting\n", + "Call stack:\n", + " File \"/usr/lib/python3.6/runpy.py\", line 193, in _run_module_as_main\n", + " \"__main__\", mod_spec)\n", + " File \"/usr/lib/python3.6/runpy.py\", line 85, in _run_code\n", + " exec(code, run_globals)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py\", line 16, in \n", + " app.launch_new_instance()\n", + " File \"/usr/local/lib/python3.6/dist-packages/traitlets/config/application.py\", line 664, in launch_instance\n", + " app.start()\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/kernelapp.py\", line 499, in start\n", + " self.io_loop.start()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tornado/platform/asyncio.py\", line 132, in start\n", + " self.asyncio_loop.run_forever()\n", + " File \"/usr/lib/python3.6/asyncio/base_events.py\", line 438, in run_forever\n", + " self._run_once()\n", + " File \"/usr/lib/python3.6/asyncio/base_events.py\", line 1451, in _run_once\n", + " handle._run()\n", + " File \"/usr/lib/python3.6/asyncio/events.py\", line 145, in _run\n", + " self._callback(*self._args)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tornado/ioloop.py\", line 758, in _run_callback\n", + " ret = callback()\n", + " File \"/usr/local/lib/python3.6/dist-packages/tornado/stack_context.py\", line 300, in null_wrapper\n", + " return fn(*args, **kwargs)\n", + " File \"/usr/local/lib/python3.6/dist-packages/zmq/eventloop/zmqstream.py\", line 548, in \n", + " self.io_loop.add_callback(lambda : self._handle_events(self.socket, 0))\n", + " File \"/usr/local/lib/python3.6/dist-packages/zmq/eventloop/zmqstream.py\", line 462, in _handle_events\n", + " self._handle_recv()\n", + " File \"/usr/local/lib/python3.6/dist-packages/zmq/eventloop/zmqstream.py\", line 492, in _handle_recv\n", + " self._run_callback(callback, msg)\n", + " File \"/usr/local/lib/python3.6/dist-packages/zmq/eventloop/zmqstream.py\", line 444, in _run_callback\n", + " callback(*args, **kwargs)\n", + " File \"/usr/local/lib/python3.6/dist-packages/tornado/stack_context.py\", line 300, in null_wrapper\n", + " return fn(*args, **kwargs)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/kernelbase.py\", line 283, in dispatcher\n", + " return self.dispatch_shell(stream, msg)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/kernelbase.py\", line 233, in dispatch_shell\n", + " handler(stream, idents, msg)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/kernelbase.py\", line 399, in execute_request\n", + " user_expressions, allow_stdin)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/ipkernel.py\", line 208, in do_execute\n", + " res = shell.run_cell(code, store_history=store_history, silent=silent)\n", + " File \"/usr/local/lib/python3.6/dist-packages/ipykernel/zmqshell.py\", line 537, in run_cell\n", + " return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)\n", + " File \"/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py\", line 2718, in run_cell\n", + " interactivity=interactivity, compiler=compiler, result=result)\n", + " File \"/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py\", line 2822, in run_ast_nodes\n", + " if self.run_code(code, result):\n", + " File \"/usr/local/lib/python3.6/dist-packages/IPython/core/interactiveshell.py\", line 2882, in run_code\n", + " exec(code_obj, self.user_global_ns, self.user_ns)\n", + " File \"\", line 3, in \n", + " ecfp_power=5, splif_power=5, parallel=True, flatten=True, sanitize=True)\n", + " File \"/usr/local/lib/python3.6/dist-packages/deepchem/feat/rdkit_grid_featurizer.py\", line 952, in __init__\n", + " DeprecationWarning)\n", + "Message: 'parallel argument was removed and it is ignored, using it will result in error in version 1.4'\n", + "Arguments: (,)\n" + ], + "name": "stderr" + } + ] }, { "cell_type": "markdown", @@ -777,7 +850,7 @@ "source": [ "compound_featurizer = dc.feat.CircularFingerprint(size=128)" ], - "execution_count": 0, + "execution_count": 14, "outputs": [] }, { @@ -795,48 +868,14 @@ "metadata": { "id": "1HNhZ9jHqTtL", "colab_type": "code", - "outputId": "28535766-40f2-4491-da27-6a27977ee40e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 408 - } + "colab": {} }, "source": [ "pdbbind_tasks, (train_dataset, valid_dataset, test_dataset), transformers = dc.molnet.load_pdbbind_grid(\n", " featurizer=\"ECFP\", subset=\"refined\")" ], - "execution_count": 14, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/refined_smiles_labels.csv\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "TIMING: featurizing shard 0 took 9.982 s\n", - "TIMING: dataset construction took 10.126 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.166 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.085 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.081 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.142 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.022 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.023 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] + "execution_count": 15, + "outputs": [] }, { "cell_type": "markdown", @@ -866,7 +905,7 @@ "model = dc.models.SklearnModel(sklearn_model)\n", "model.fit(train_dataset)" ], - "execution_count": 0, + "execution_count": 16, "outputs": [] }, { @@ -874,11 +913,11 @@ "metadata": { "id": "d-imE_PBqTtT", "colab_type": "code", - "outputId": "d0b7e86b-d50a-457b-a4db-d90b0f3ceed2", "colab": { "base_uri": "https://localhost:8080/", "height": 85 - } + }, + "outputId": "ddcbc27e-9ee9-4450-f7af-b6f2a3337a0a" }, "source": [ "from deepchem.utils.evaluate import Evaluator\n", @@ -894,15 +933,21 @@ "valid_r2score = evaluator.compute_model_performance([metric])\n", "print(\"RF Valid set R^2 %f\" % (valid_r2score[\"r2_score\"]))" ], - "execution_count": 16, + "execution_count": 17, "outputs": [ { "output_type": "stream", "text": [ - "computed_metrics: [0.8433487532863048]\n", - "RF Train set R^2 0.843349\n", - "computed_metrics: [0.45434010385273105]\n", - "RF Valid set R^2 0.454340\n" + "n_samples is a deprecated argument which is ignored.\n", + "n_samples is a deprecated argument which is ignored.\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "RF Train set R^2 0.850540\n", + "RF Valid set R^2 0.372395\n" ], "name": "stdout" } @@ -923,23 +968,23 @@ "metadata": { "id": "CHAvWVCXqTtb", "colab_type": "code", - "outputId": "f3eeaae0-4c9e-4733-be3d-c99174a0b2ee", "colab": { "base_uri": "https://localhost:8080/", "height": 51 - } + }, + "outputId": "3a484924-3a98-4078-c503-ac9556a43e12" }, "source": [ "predictions = model.predict(test_dataset)\n", "print(predictions[:10])" ], - "execution_count": 17, + "execution_count": 18, "outputs": [ { "output_type": "stream", "text": [ - "[-0.64106832 -0.80219175 -1.19084758 -1.11424137 -1.21312906 -0.73018821\n", - " -1.00686205 -0.17348379 -0.98073392 -0.10108712]\n" + "[-1.23524245 -0.97359773 -0.56976069 -0.87289442 -0.98665882 -0.38179604\n", + " -0.14367127 -1.20101768 0.00373068 0.15792326]\n" ], "name": "stdout" } @@ -970,32 +1015,14 @@ "metadata": { "id": "jhrZqqCDqTth", "colab_type": "code", - "outputId": "30dcfe25-5c90-4634-bca9-1e924a695ad2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 136 - } + "colab": {} }, "source": [ "pdbbind_tasks, (train_dataset, valid_dataset, test_dataset), transformers = dc.molnet.load_pdbbind_grid(\n", " featurizer=\"grid\", subset=\"refined\")" ], - "execution_count": 18, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.236 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.073 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.072 s\n", - "Loading dataset from disk.\n" - ], - "name": "stdout" - } - ] + "execution_count": 19, + "outputs": [] }, { "cell_type": "markdown", @@ -1021,7 +1048,7 @@ "model = dc.models.SklearnModel(sklearn_model)\n", "model.fit(train_dataset)" ], - "execution_count": 0, + "execution_count": 20, "outputs": [] }, { @@ -1039,11 +1066,11 @@ "metadata": { "id": "zXyNarwnqTtp", "colab_type": "code", - "outputId": "0030b5d5-458c-4b94-fbc1-6eef3768404f", "colab": { "base_uri": "https://localhost:8080/", "height": 85 - } + }, + "outputId": "6aaff35c-08b4-4833-ec8b-d3b8e63a618b" }, "source": [ "metric = dc.metrics.Metric(dc.metrics.r2_score)\n", @@ -1056,15 +1083,21 @@ "valid_r2score = evaluator.compute_model_performance([metric])\n", "print(\"RF Valid set R^2 %f\" % (valid_r2score[\"r2_score\"]))" ], - "execution_count": 20, + "execution_count": 21, "outputs": [ { "output_type": "stream", "text": [ - "computed_metrics: [0.8954267076811548]\n", - "RF Train set R^2 0.895427\n", - "computed_metrics: [0.4608614366143733]\n", - "RF Valid set R^2 0.460861\n" + "n_samples is a deprecated argument which is ignored.\n", + "n_samples is a deprecated argument which is ignored.\n" + ], + "name": "stderr" + }, + { + "output_type": "stream", + "text": [ + "RF Train set R^2 0.897545\n", + "RF Valid set R^2 0.402932\n" ], "name": "stdout" } @@ -1105,102 +1138,27 @@ "metadata": { "id": "uxV2wE_5qTt3", "colab_type": "code", - "outputId": "5dcf5163-30a2-432f-e1b4-1a48a7f71ca1", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000 - } + "colab": {} }, "source": [ - "def rf_model_builder(model_params, model_dir):\n", - " sklearn_model = RandomForestRegressor(**model_params)\n", - " sklearn_model.random_state = seed\n", - " return dc.models.SklearnModel(sklearn_model, model_dir)\n", + "# def rf_model_builder(model_params, model_dir):\n", + "# sklearn_model = RandomForestRegressor(**model_params)\n", + "# sklearn_model.random_state = seed\n", + "# return dc.models.SklearnModel(sklearn_model, model_dir)\n", "\n", - "params_dict = {\n", - " \"n_estimators\": [10, 50, 100],\n", - " \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", - "}\n", + "# params_dict = {\n", + "# \"n_estimators\": [10, 50, 100],\n", + "# \"max_features\": [\"auto\", \"sqrt\", \"log2\", None],\n", + "# }\n", "\n", - "metric = dc.metrics.Metric(dc.metrics.r2_score)\n", - "optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", - "best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", - " params_dict, train_dataset, valid_dataset, transformers,\n", - " metric=metric)" + "# metric = dc.metrics.Metric(dc.metrics.r2_score)\n", + "# optimizer = dc.hyper.HyperparamOpt(rf_model_builder)\n", + "# best_rf, best_rf_hyperparams, all_rf_results = optimizer.hyperparam_search(\n", + "# params_dict, train_dataset, valid_dataset, transformers,\n", + "# metric=metric)" ], - "execution_count": 21, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Fitting model 1/12\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'auto'}\n", - "computed_metrics: [0.4527347088180085]\n", - "Model 1/12, Metric r2_score, Validation set 0: 0.452735\n", - "\tbest_validation_score so far: 0.452735\n", - "Fitting model 2/12\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.4608614366143733]\n", - "Model 2/12, Metric r2_score, Validation set 1: 0.460861\n", - "\tbest_validation_score so far: 0.460861\n", - "Fitting model 3/12\n", - "hyperparameters: {'n_estimators': 10, 'max_features': 'log2'}\n", - "computed_metrics: [0.40215050034606037]\n", - "Model 3/12, Metric r2_score, Validation set 2: 0.402151\n", - "\tbest_validation_score so far: 0.460861\n", - "Fitting model 4/12\n", - "hyperparameters: {'n_estimators': 10, 'max_features': None}\n", - "computed_metrics: [0.4527347088180085]\n", - "Model 4/12, Metric r2_score, Validation set 3: 0.452735\n", - "\tbest_validation_score so far: 0.460861\n", - "Fitting model 5/12\n", - "hyperparameters: {'n_estimators': 50, 'max_features': 'auto'}\n", - "computed_metrics: [0.49621726686995704]\n", - "Model 5/12, Metric r2_score, Validation set 4: 0.496217\n", - "\tbest_validation_score so far: 0.496217\n", - "Fitting model 6/12\n", - "hyperparameters: {'n_estimators': 50, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.4931560486803085]\n", - "Model 6/12, Metric r2_score, Validation set 5: 0.493156\n", - "\tbest_validation_score so far: 0.496217\n", - "Fitting model 7/12\n", - "hyperparameters: {'n_estimators': 50, 'max_features': 'log2'}\n", - "computed_metrics: [0.4619425746467314]\n", - "Model 7/12, Metric r2_score, Validation set 6: 0.461943\n", - "\tbest_validation_score so far: 0.496217\n", - "Fitting model 8/12\n", - "hyperparameters: {'n_estimators': 50, 'max_features': None}\n", - "computed_metrics: [0.49621726686995704]\n", - "Model 8/12, Metric r2_score, Validation set 7: 0.496217\n", - "\tbest_validation_score so far: 0.496217\n", - "Fitting model 9/12\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'auto'}\n", - "computed_metrics: [0.5019612740243959]\n", - "Model 9/12, Metric r2_score, Validation set 8: 0.501961\n", - "\tbest_validation_score so far: 0.501961\n", - "Fitting model 10/12\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'sqrt'}\n", - "computed_metrics: [0.48994241350618273]\n", - "Model 10/12, Metric r2_score, Validation set 9: 0.489942\n", - "\tbest_validation_score so far: 0.501961\n", - "Fitting model 11/12\n", - "hyperparameters: {'n_estimators': 100, 'max_features': 'log2'}\n", - "computed_metrics: [0.47513889029551215]\n", - "Model 11/12, Metric r2_score, Validation set 10: 0.475139\n", - "\tbest_validation_score so far: 0.501961\n", - "Fitting model 12/12\n", - "hyperparameters: {'n_estimators': 100, 'max_features': None}\n", - "computed_metrics: [0.5019612740243959]\n", - "Model 12/12, Metric r2_score, Validation set 11: 0.501961\n", - "\tbest_validation_score so far: 0.501961\n", - "computed_metrics: [0.931461486646433]\n", - "Best hyperparameters: (100, None)\n", - "train_score: 0.931461\n", - "validation_score: 0.501961\n" - ], - "name": "stdout" - } - ] + "execution_count": 22, + "outputs": [] }, { "cell_type": "markdown", @@ -1217,46 +1175,28 @@ "metadata": { "id": "5u96D9j1qTt9", "colab_type": "code", - "outputId": "1681de3e-68c2-4b6e-fb3c-3be7e80ea3ca", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 295 - } + "colab": {} }, "source": [ - "%matplotlib inline\n", + "# %matplotlib inline\n", "\n", - "import matplotlib\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", + "# import matplotlib\n", + "# import numpy as np\n", + "# import matplotlib.pyplot as plt\n", "\n", - "rf_predicted_test = best_rf.predict(test_dataset)\n", - "rf_true_test = test_dataset.y\n", - "plt.scatter(rf_predicted_test, rf_true_test)\n", - "plt.xlabel('Predicted pIC50s')\n", - "plt.ylabel('True IC50')\n", - "plt.title(r'RF predicted IC50 vs. True pIC50')\n", - "plt.xlim([2, 11])\n", - "plt.ylim([2, 11])\n", - "plt.plot([2, 11], [2, 11], color='k')\n", - "plt.show()" + "# rf_predicted_test = best_rf.predict(test_dataset)\n", + "# rf_true_test = test_dataset.y\n", + "# plt.scatter(rf_predicted_test, rf_true_test)\n", + "# plt.xlabel('Predicted pIC50s')\n", + "# plt.ylabel('True IC50')\n", + "# plt.title(r'RF predicted IC50 vs. True pIC50')\n", + "# plt.xlim([2, 11])\n", + "# plt.ylim([2, 11])\n", + "# plt.plot([2, 11], [2, 11], color='k')\n", + "# plt.show()" ], - "execution_count": 22, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydeXhU1dnAf2/CAAkgQUSUCIraioACBUVLXQCLWjfqhlrrwqYWrCxiweKCVUFxwZVPFBU/BUFQ3FCgAsWiqGBANvlUBCWIIBBAEmBI3u+PmQmTyb137qx3kpzf8+Qhmbn3nHduyHnPeVdRVQwGg8FgAMjyWgCDwWAwZA5GKRgMBoOhHKMUDAaDwVCOUQoGg8FgKMcoBYPBYDCUY5SCwWAwGMoxSsGQNkREReT44Pf/IyJ3pWHOG0Tkv6mex2CoLhilUMURkfUiUiIiv4rIZhF5WUTqh73/sojsD74f+urlpcwAqnqzqv4r2nUiskBE+qZCBhE5JqioaoW9dqqIzBKRIhHZLiKfi8iNEdeHP8u7wu6tIyIvisiu4O9iSCrktvksfwmTqUREysLlTJccFnJVm2dcUzBKoXpwkarWB9oDHYAREe8/rKr1w76mJjph+B95dUFETgfmAf8BjgcaA7cA50dcmhf2LMMV273Ab4Cjga7AHSJyXsoFB1T1tZBMQXk3hf/Ow68Vkex0yGRFVX7GNQWjFKoRqroZmE1AOcRMcIf2dxFZJyK/iMhYEckKvneDiCwSkcdFZBtwb3DX9oiI/CAiPwdNQjlh4w0TkZ9EZJOI9I6Y62URuT/s50tEZFlwB/idiJwnIg8AZwBPB3eMTwevbSUic4O7zLUicmXYOI1F5J3gOJ8Dx8XwCMYCk1T1IVX9RQMsVdUro94Z4HrgX6q6Q1XXAM8DN0ReFHxuRSLSNuy1JsEd/uEicpiIvBe2k/449HuIh+CzHh/cne8BukaewCLNbE7P2GL8BSIyOrjj3yUib4vIoTaXp+UZG+LHKIVqhIgcRWDH9W0Cw/wZ6AT8DrgECF/MOwPrgKbAA8AY4LcElNDxQD5wd1CW84DbgT8S2Nmd4yD3qcArwDAgDzgTWK+q/wQ+BgYGd4wDRaQeMBeYDBwOXAU8KyKtg8M9A+wFjgzKXkEZOciQC5wOTHdx+QYR2SgiL4nIYcH7GwXnXB523XKgTeTNqroPeBO4OuzlK4H/qOoWYCiwEWhC4FnfCSRaj+YaAr+zBoCjj8XFM7biOgLP+kjgAPCkxbhpe8aG+DFKoXowU0R2Az8CW4B7It6/PbjrLBKRX6KM9ZCqblfVH4BxVFy4NqnqU6p6gMDC2x8YHLx+N/AggQUEAovcS6q6UlX3EDj229EHeFFV56pqmaoWqurXNtdeSEBhvKSqB1S1AJgBXBE0i1wG3K2qe1R1JTApyucN0YjA38NPDtf8ApxCwHTRkcAC+1rwvZCJZmfY9TuD11gxmYPPCgKL9uTg934Ci9/RqupX1Y818SJlb6vqouDz3RvlWttn7HDP/4b9ru8CrrQwU6X7GRviwCiF6kFPVW0AnA20Ag6LeP8RVc0LfkW+F8mPYd9vAJrZvNcEyAWWhhQO8GHwdYL3RY5lR3PguyhyhTga6Bym5IqAvwBHBOeuFcO84ewAyggsxpao6q+quiS4UP4MDAR6iEgDIOTMPSTslkOA3TbDzQdyRaSziBxD4LT1VvC9sQROe3OCprzhLj+DEz9Gv6Qcp2fsZvwNgI/K/w/T/YwNcWCUQjVCVf8DvAw8ksAwzcO+bwFsCp8i7PtfgBKgTZjCaRjm1PzJYiw7fsTe9h+5Q/6RgJklL+yrvqreAmwlYLpwO+/BSVSLgU8JnDTcEpItS1V3EPjM7cLebwesspmvFJhG4CR2NfBe8LSFqu5W1aGqeixwMTBERLrHIJeTrCH2EFDqIcIXfKdnbEfkM/cT+D9yUIA0P2NDfBilUP0YB/xRRNpFvdKaYSLSSESaA7cBlpFKqlpGwMn3uIgcDiAi+SJybvCSacANItI6aEuONGmFMxG4UUS6i0hWcJxWwfd+Bo4Nu/Y94Lci8lcR8QW/ThGRE4ML7ZsEnOC5QRv49TF89juCMg8TkcbBz9RORF4Pft9ZRE4IytiYgN18gaqGzBmvACODz68V0I+AkrZjMtCLwC48ZDpCRC4UkeNFRAiYR0oJ7LCTyTLg0uBzOp6ACS+E7TN2GO/asN/1fcD04O8jknQ/Y0OMGKVQzVDVrQT+cO6Oc4i3gaUEFo33CSzYdvyDgJljsYjsAv4NnBCU4wMCCmpe8Jp5DjJ/DtwIPE5gEfwPARMGwBPA5SKyQ0SeDO6mexCwx28CNgMPAXWC1w8kYHveTGCxeMntB1fVT4Buwa91IrIdmADMCl5yLAET2W5gJbCPij6XewiYwTYEP8NYVf3QYb7PCOzYmwEfhL31GwLP8lcCO+tnVXU+gIh8ICJ3uv1MDjwO7CegdCdx0G6Pi2dsxf8SeN6bgbrA360uSvczNsSOJO6/MlQXRESB36hqItFLhhqGiCwAXlXVF7yWxZA45qRgMBgMhnJSphQkkIq+RURWhr12hYiskkAKfqdUzW0wGAyG+EiZ+UhEziRgE31FVdsGXzuRgMPsOeB2VV2SkskNBoPBEBcpq1+jqguD8dfhr60BCARVGAwGgyHTyNiiZiLSn0DGLPXq1evYqlWrKHcYDAaDYf/+/axfv57du3cD/KKqTaLdE07GKgVVnUAgVI1OnTrpkiXG0mQwGAx2lJaW8swzz3DnnXciIjz99NMMHDjQbUZ/OSb6yGAwGKo4a9as4cwzz+S2227jjDPOYOXKlQwYMCCusYxSMBgMhiqK3+/ngQceoH379nz99de88sorzJo1i6OPPjr6zTakzHwkIlMIFGg7TEQ2EshE3A48RaBw2fsiskxVz7UfxWAwGAxWfPnll/Tu3Zvly5dzxRVX8NRTT9G0adOEx01l9NHVNm+9ZfO6wWAIY2ZBIWNnr2VTUQnN8nIYdu4JAJVe69kh32NJqyZWz7cqPMuSkhJGjRrFI488QpMmTXjzzTf585//nLTxM9bRbDDUZGYWFDLizRWU+AM15QqLShg2fTko+Mu0/LURb64AqBKLWSZh9XyrwrNcuHAhffv25ZtvvqFPnz6MHTuWRo0aJXUO41MwGDKQsbPXli9YIfylWq4QQpT4Sxk7e206RasWWD3fTH6Wu3btYsCAAZx11ln4/X7mzp3LCy+8kHSFAEYpGAwZyaaikpRcawhg98wy8Vl+8MEHtG3blvHjxzNo0CBWrlzJOefYdrdNGKMUDIYMpFleTkquNQSwe2aZ9Cy3bdvGddddx5/+9Cfq16/PokWLePzxx6lXr15K5zVKwWDIQIadewI5vootjrOzKpeHyfFllzugDe6xer6Z8ixVlWnTpnHiiScyZcoU7rrrLgoKCjj99NPTMr9xNBsMGUjI2RmKjsnL9fHr3gOEW8EFuKxjfkY7RjORUNRRib+UbBFKVcnPkOijTZs28be//Y23336bjh07MnfuXNq1i7eJYnwYpWAwZCg9Oxxc8LuMmceOYn+F9xWY//VWDySrukRGHZWqlp8QvFQIqsqLL77I0KFD2bdvHw8//DCDBw+mVq30L9HGfGQwVAGqkmM0k8nEqKN169Zxzjnn0LdvX9q1a8dXX33FsGHDPFEIYJSCwVAlqAqO0apAJinX0tJSxo0bx0knncQXX3zB+PHjmT9/Pr/5zW/SLks4RikYDFUAO8do11ZN6DJmHi2Hv0+XMfOYWVDokYRVg0xRrqtWraJLly4MHjyYs88+m1WrVnHzzTeTleX9kuy9BAaDISo9O+Qz+tKTyM/LQYD8vBwu65jPjKWFFBaVoBzMyjWKwR6vo47279/Pv/71Lzp06MC3337Lq6++ynvvvUfz5s3TMr8bjKPZYKgihDueIeB8trOPex1Fk6lERnWls+bRF198QZ8+fVixYgVXXXUVTzzxBIcffnjK540VoxQMhipKJtnH3ZApBegilWuqKS4u5p577uGxxx7jiCOO4O233+biiy9O2/yxYpSCwVBFaZaXQ6GFAshE53NVLUCXKAsWLKBfv358++239OvXj7Fjx9KwYUOvxXLE+BQMhiqK1/bxWMjEUNBUsnPnTm6++Wa6du1KWVkZH330ERMmTMh4hQApVAoi8qKIbBGRlWGvHSoic0Xkm+C/yS/xZzDUEKycz6MvPSkjd95VzdSVCO+//z5t2rTh+eefZ8iQIaxYsYJu3bp5LZZrUmk+ehl4Gngl7LXhwEeqOkZEhgd//kcKZTAYqjXpto/HS1UydcXL1q1bGTRoEJMnT6ZNmzbMmDGDzp07ey1WzKTspKCqCwm03wznEmBS8PtJQM9UzW8wGDKHqmTqihVVZcqUKbRu3Zo33niDe++9ly+//LJKKgRIv6O5qar+FPx+M5B4Q1GDwZDxeBkKmko2btzILbfcwnvvvcepp57KxIkTadu2rddiJYRn0UeqqiKidu+LSH+gP0CLFi3SJpfBYEgNyTB1xRvWmuxw2LKyMl544QWGDRuG3+/n0Ucf5bbbbiM7Ozv6zRlOupXCzyJypKr+JCJHAlvsLlTVCcAEgE6dOtkqD4PBUDOIN6w12eGwofDSBQsW0LVrV55//nmOO+64mMfJVNIdkvoOcH3w++uBt9M8v8FQY5hZUFit6iLFG9aarHDY0tJSHn30UU4++WS+/PJLJkyYwEcffVStFAKk8KQgIlOAs4HDRGQjcA8wBpgmIn2ADcCVqZrfYKjJVMdksXjDWpMRDrty5Up69+7NF198wUUXXcT48ePJz0/9c/QiCzxlSkFVr7Z5q3uq5jQYDAGcdsdVVSnEG9aaSDjsvn37GD16NA8++CANGzZkypQp9OrVC5HKrVGTjVeK3WQ0GwxVGDsTUSYliyXLjBVvWGu893322Wd07NiRUaNGceWVV7JmzRquuuqqtCgE8C4L3NQ+MhiqKE47yXh3x8k2VyRztxtvWGus9+3Zs4e77rqLcePGkZ+fz3vvvccFF1wQk6zJwCvFbpSCwVBFcdpJDjv3BIZNX46/9GDgni9bHHfHqTBXJNuMFW9Yq9v75s2bR79+/Vi3bh0333wzDz30EIccckjM8yUDr7LAjfnIYKiiRN1JRgZyRwnsToW5IpPMWE4UFRXRr18/unfvTlZWFgsWLGD8+PGeKQTwLgvcKAWDoYri1Fpy7Oy1+MsqagF/mTou8PEu4E4+g0xpf+nEO++8Q5s2bXjxxRcZNmwYy5cv56yzzvJaLM8KHhqlYDBUUZx2kvEs8PEs4CGTk11L0EyuebRlyxauuuoqLrnkEho3bsxnn33Gww8/TG5urteildOzQz6Lhnfj+zEXsGh4t7REjhmlYDBUUZx2kvEs8PEs4NFMTtF2u14k2Kkqr776KieeeCJvvfUW//rXv1iyZAmdOnVK+dxVAeNoNhiqMHYO1GHnnlDBaQzRF/h4onvcnEjsZPQiDv/HH3/k5ptvZtasWZx22mlMnDiR1q1bp2SuqopRCgZDNSSR8M1YFuREImTSmWBXVlbGc889xz/+8Q9KS0sZN24cAwcOrBYF7JKNUQoGQzUlmQ147PIX4jmRhEhXZNI333xD3759WbhwId27d2fChAkce+yxSZ2jOmGUgsFgcMSNmSeehLdUx+EfOHCAxx57jHvuuYc6deowceJEbrzxxrRlJFdVjFIwGAyORDPzxHsiSeSUEY3ly5fTp08fli5dSs+ePXnmmWdo1qxZwuPWBIxSMBhiwIuqlV7LkyozTyq6se3bt4/777+fMWPGcOihhzJt2jQuv/xyczqIAaMUDAaXuDGjpFNppCt6J5VmnmT6PT799FP69OnDmjVruO6663jsscdo3LhxUsauSRilYKhRJLJoRzOjpDvEMlnRO9GeSaJmHqvxQ/InQ3n++uuvjBw5kieffJKjjjqKWbNmcf7558c1lsEoBUMNItFFO5oZJd09DJJh1nHzTBIx81iNP/SN5ZSGleBIRHnOnTuX/v37s379egYMGMDo0aNp0KBBTGMYKuJJRrOI3CYiK0VklYgM8kIGQ80j0YJv0bKE7RbjwqKSlGTrJqOukNtnElluAXCViWw1fmlZ5cp8sRbe27FjB3369KFHjx7Url2bhQsX8vTTTxuFkATSrhREpC3QDzgVaAdcKCLHp1sOQ+aSqtIHie6so5WBcFqMI2sCJYNk1BWK55lEq3fkdpx4r33rrbdo3bo1kyZNYvjw4SxfvpwzzjjD9TwGZ7w4KZwIfKaqxap6APgPcKkHchgykFgWnFhJdGcdrY6P1SIdTrK7ZiWjimY8zySWE1csp5Zo127evJkrrriCSy+9lCOOOILPP/+c0aNHU7du3QrXpWpT4UWdJi/wwqewEnhARBoDJcCfgCWRF4lIf6A/QIsWLdIqoME7UmmXT0ZcvFO0TLjt3SpaB5KfrZto9E48zySW04XV+FZI8Fqo7Ji+vcdv2b1yHoMGDWLPnj088MADDBs2DJ/PV2mcVDn7veqX7AVpPymo6hrgIWAO8CGwDKj0P0ZVJ6hqJ1Xt1KRJkzRLafCKVJY+SEd9+pDtPT/GHbhXu9B4nkksp4vI8Rvl+vBlVcwZEOAvp7WoEMEVOilu2LCBv17Rk+uvv54TTzyR5cuXc+edd1oqBEhdX2Ov+iV7gSfRR6o6EZgIICIPAhu9kMOQeaS69EFoZx3ajQ6euqy8faXbaBo3UTix7MC93oXGetqI9XQROb7TMwwtvqpl/Fowix3/mQSqtLxoIB/PfIKsLOd9bKo2FVWlg1wy8EQpiMjhqrpFRFoQ8Cec5oUchswjlaUPQsS7CMdyXyxhnOkIZU1mUl2imchOSmhTUQn+bRvZ9uGT7Nu4mrrHdKDxeQPRhk2jKgRI3abCq37JXuBVnsKMoE/BDwxQ1SKP5DBkGPEsOLEuePEuwrHe53YHnqx8A7sEscKiEoSDLZqTcRKJ/D2FN9WJF7/fT9mymWz69ySyfHVo/KfB1GvbDRFxvfimalORjs1KpuCV+cjEjxlsicWcEc+uP95F2CkPwY2cdoor0V2o1TMY9sZyEPCXBlRBZGZAoieRZJu8CgoK6NOnDz8UFNCgVRcadr+Z7PqNgNgW31TUU0rluJmIyWg2VGni2fXHuwjb3QcwcuYK7u95kuV70RbQRHehVs/Ab5EgFomdknNz8kqWyWvv3r3cd999PPzwwxx22GFMnz6d7GNPS2jxTWY9pXSMm2kYpWCo0sSz6491EQ4tkk4ngtcW/0Cnow+Ny2fgdhdqt1jH6+y0UoJuTwDJMHktWrSIPn36sHbtWm644QYeffRRDj300EpzGdKLUQqGKk08u/5YTAGRi6QdGhwvPLIpNLabnAWnPsbR/AJOc9gRqQSdFJ/VCSARk9fu3bu58847eeaZZ2jRogWzZ8+mR48eMclvSB1GKRiqNPGaXtyaAqx2+XZsKiqx3GmHL+bhRFtAI8ey8wtYPQNfllTwKYSTH6EE3Si+wuBnC8/ejue5z549m/79+/Pjjz9y66238sADD1C/fn3bz18TbPiQWZ/VKAVDlSbVDsBYzCHN8nIslYhCJcXgZgF1o5A2FZXYPoMlG7bz2uIfLOcNfz5uFV+4GSnW5759+3aGDBnCpEmTaNWqFR9//DFdunSxncvr3I10kmmfVVSjO6S8plOnTrpkSaVKGAZDSgjftWWJUOribyTHl83oS09i8NRllqcCCOzQY1FcLYe/bztW+JihqqWRdBkzz9LEE3mPm3nczGfH9OnTGTBgANu2bWP48OGMHDmyUr2iSNzKHo1U7sCTNXayPqsVIrJUVTvFco85KRgMYUTu2qwUQo4vm8s65jP/662VFgQ7u3w8f+DRfAWJ1igKLWqxbAtjOTn99NNPDBw4kDfffJMOHTowe/Zs2rdvn9A8ye4VES/JHDvTsqWNUjDUeNycDLJFKFONuiNMZpKTUzG5SL9A5OdolpdDXq6PHcX+Svc2y8tx7UC3ujcaqsrLL7/MkCFDKCkpYcyYMQwdOpRatdwvN8nIIE5lpngyx860bGmjFAw1mpEzV1Swu9uZispU+X7MBbbjhC/IDXN81PVlUVTsT8is0LNDvmu/gNXO1Zcl+LKlgrM5dG80P0IWkG1zrxPr16+nf//+zJ07lzPOOIPnn3+eE05IjkJMR68IL8bOtGxpTzqvGQyZwMyCwkoLrh1Ou7bIyp5FJX72+st4vFd7Fg3vltCudP7XW22jjsKxS2CrV7uWZQXUaItXGdjea0VpaSlPPvkkbdu25dNPP+WZZ55hwYIFcSkE8K5XhBdjp6N6byyYk4KhxuLWnh5t12ZnShg6bTmQmP3a7Y7U7rqdJX6W3VM5B8BNboPdvZGsWbOGPn368Omnn3Leeefx3HPPJaUHihe9IrwaO5Oypc1JwVBjcdotZ4u43rXZjVOqats1zm3/BLc70lh3rtG6xDndG8Lv9/PAAw/Qvn171q5dyyuvvMKsWbMypilWKnfgmba7TybmpGCoVsQSJmi3Wxbg0Svbuf4Dd9p1WzkfY4lccbsjjafHAVhXUY12L8DSpUvp3bs3X331FVdeeSVPPvkkTZs2tb3eK1K5A8+k3X0yMScFQ7Uh1v7OVrvl8C5gbom26448ScTSxcvtjrRnh3wu65hPtgS6mmWLcFlH50Ur1CVuXK/2NMw52MmsUa7PdtdbUlLC8OHD6dy5M1u2bOGtt95i6tSpGakQDPFhTgqGakM8/Q5C9yWSgBS6fui05ZbRS5FmmFgjV9zsSGcWFDJjaWH5/KWqzFhaaFukL/y+yBPGXn+Z5bULFy6kb9++fPPNN/Tp04exY8fSqFEjR7kM8eFl2QuvOq8NBvoSOLGuAG5U1b1eyGKoPsQTJpgsE0BojEo1iLKFPfsO0HL4++V/3MmKS4+WX1HiL2XUu6scFxc3inTXrl2MGDGCZ599lpYtW/Lvf/+b7t27xySrwT1el71Iu/lIRPKBvwOdVLUtkA1clW45DNWPVIYguiHS1NMo1wcaCFENN2d1bdWkkrkp1siVSFOZXX7FjmK/oznNzhcSen3WrFm0bduW8ePHM2jQIFasWGEUQoqJxbyYCrzyKdQCckSkFpALbPJIDkM1wsq2n6okILvooZCd/vsxF5Bbu1alZjcl/lLmf7014ciVWKq3Rs4fvriEfBCVL9zFX//6Vy644AIaNGjAJ598wuOPP069evVintMQG16XvUi7+UhVC0XkEeAHoASYo6pzIq8Tkf5AfyBjQtwMmU2sfRLitdkmoxFNomarRBaI8HsjTxiqSvHX/2X7v/+H1/fv4a677uKf//wnderUiXs+Q2x4XfYi7UpBRBoBlwAtgSLgDRG5VlVfDb9OVScAEyBQJTXdchqqJm6dsonYbN06tFP5x+0m+Swvx0dRiXXtoxD5YeMc2L2N7XPHU/LNYurl/5ZPZr3BySefnLCshtjwuuyFF+ajc4DvVXWrqvqBN4HfeyCHoQrjNvnLCrtFfdDUZa7Gcnu8T6U5a9i5J+DLtjH9EFjs7724TdT5h517AnVrZbF7+Rw2Tfwbe7//kibd+zDprTlGIXiE14lxXkQf/QCcJiK5BMxH3QHTLMHgmkR3+k6mFzdjuT0BJLsBUKTJy5cllp3VBCrM4zT/yQ33UXvOA2z/YhF1mrel9ZV3cPdfKtdryqTOYDUBLxPjPGmyIyKjgF7AAaAA6Kuq++yuN012qheJLjCJNiWxu9/tWFax/aEmO6n6Q4611PV6h4qucLCA3ciRI8nOzmbs2LH069ePrKzKxgMvPq8hOcTTZMeT6CNVvUdVW6lqW1X9q5NCMFQvYs06tiLR6Aw3dX+ijxW5mYp9cxWLCSyWaKP8KD6LVatW0aVLF4YMGULXrl1ZvXo1N910k6VCsJs7nSGShvRiMpoNaSUZzUkSdeBG1v2xm8OKmQWFDHtjuUWoaRnD3jhYFTXaaShWE5hbhWflswjJUrhtF2XLZvLTgsk0bHgIr732GldffTViF5YaZW6vOoMZUoupfWRIK8lYYJLhwA2v+xPLWGNnr62kEEL4y5RBU5fRftQchk1f7ngainX37UbhZYtUMumElM+6NcvZNGkwP/57EnV+83vOuvMVnvnhCI4dMSvqKcXrpEBDejFKwZBWkrHAWGUO16mVxeBg9NDImStcm2VijfRwo7yKSvyVHMCRC77dOIVFJZbydm3VJOq8paqMnb22wv1j3l3OpjnPs/l/b6esZDdNLruLwy4extItmlDhQC87gxlSizEfGdJKsmKwQ9EZVmaYVxf/UH6dm2iiWCI93OQH2BFa8Ht2yHccZ9j0is15QsXu3M4Ruj9v5zcsHdeXAzt+on6782jU9Uay6lhnJNuV+E52i1FD5uNJ9FGsmOij6kUywxvdRBKBdTRRLHKU2+UTtKOHonagcvG8cBrl+ii4uwczCwptq6/aUbZvD8X/ncS2JbOoe2gzGvYYSN2jo+ccCJT3oTYRR9WDeKKPzEnBkHaSGYPt1hcReV0sjt5Yw0GdCO3IFw3vxpIN2yucasLZUewvnzcWhVD87edsn/0MpXt2MHToUE659GaGv+Ou7Wi4CS8ZAQGGqolRCoaUk8rEJ7fmnEifRSyLXrzF5+zYFDQjRTMJ/fMt94qotHgn2/89geI1/8F32NE0+fOdPPLIEABWbCnhtcU/OCqGSBOeiTiquRhHsyGlJCMvwQk3OQdWPotYFr1kL4TN8nJcKZo9+6MrBFVlz+r/sOmFWyheu4iGXa7hyBvGUafZwc97f8+TeLxX+wrO9GtPa+HoXDcRRzUXc1IwpJRUmyGsSjl0bdWE+V9vdTyZxJLrYHdtXo6PenVqWfY4tiOkoAZPXebq8zlxYNcvbJ/zDCXffUHtI39L4/P/Tu0mx5S/H3JqQ+wmO6+Lshm8w1YpiEhDYATQEzicwP/5LcDbwBhVLUqLhIYqTTrMEPH4KLq2alLJpCJYh352bdXE0vbfplkD1m8LfA43CqFRro97LmpDzw75cTutBSjTMn5dPocd81+EslIadetLg44XIVkVT0yJKN5k120yVB2cTgmilnYAACAASURBVArTgHnA2aq6GUBEjgCuD77XI/XiGao6iWQfh0f8ZAfbTeYnYXEK2fOtClVY9TWe//VWy3EWfbc9pnnDex9b7cSj0SjXx+lN/Ex6aCTFG76iTouTaXzerfgaHWl5faKK18uibAbvcPIpHKOqD4UUAoCqblbVh4CjUy+aoToQb+JTuC8CDjaDSYZPwsmeb5VVnGgYqtXY4UlzbtCyUnYsfpOXhlwO277nuEuHcMRVD3BMy2PJy/FZ3mPs/4Z4cDopbBCRO4BJqvozgIg0BW4AfkyDbIYMIZHooXjNEG4WbjcyWMkebQcd+X6WgE1li5gJHzu0E4+Wa7F/63q2ffAE+3/6hosuuojx48eTn29fRwmM/d8QP05KoRcwHPiPiBwefO1n4B3gylQLZsgMEu1dELouVjNErAt3OOFmp3AHcEj2vFwfO4ordyQLEb7DnllQmDSFEBo7UlEd0ziHTcHorHD0gJ+dn05j5+JpZNWtz9FXjODtqQ9UKmBn7P+GZGIymg2OJNq7INnzRpvfTaJZXo6PfQfKLK/xZQv1atdiZ0mgnEPx/gOOCiQWQgrKTaTSvk1r2fbBE/h/+YF6bbrSqFtfGjc+jGX3GFeewT0pzWgWkT8ApwIrVXVOrMKFjXMCMDXspWOBu1V1XLxjGlKHV0lMTo7YcNNI5K67eP+BqM7bnSV+Hu/VvpITu1Guj1/3HijvaxyvL8Fu0deIf60o27+Xoo//l91L3iG7QWOaXH4PucedUi63wZBqnEJSP1fVU4Pf9wMGAG8B94jI71R1TDwTqupaoH1w3GygMDiuIQNJZfN5JyJ7HlhFH1mZttzQLC/H0qTVZcy8pJwKFCrIu2ffQUXjRMmG5Wz/8CkOFG2mfoc/0eisG8iqk1tBboMh1TidFMJDGvoDf1TVrSLyCLAYiEspRNAd+E5VNyRhLEMK8DKJyc4XEepYFs9O3kn2ZJ5+SlXxZYurRLWyvb+yY/6L/PrVHGo1OpKmV4+mbouTXMudKkxf5pqJk1LIEpFGBMJWRVW3AqjqHhE5kKT5rwKmWL0hIv0JKCNatGiRpOkMseKVE9NuQZpZUMiw6cstG9bbETLnRMtxsDsV5fqy2HdAYypMB+AvVUa9u8qxPlPxN4vZPudZSvcUcUjny2jY5RqyfHWAwGmjTNWTBTkZAQaGqomto1lE1gNlHPyb6qKqP4lIfeC/qto+oYlFagObgDahkFc7jKO5ZuFUtnnUu6tcmXjskt1mFhRy7zurys054VnGVgonO0vIAttuaxA9ZHVcr/aVPk/pniK2//s5ir/+GF+TY2h8/m3UOfI3Fe4LL2WdbrwKMDAkl6Q6mlX1GJu3SoE/xzKJDecDX0ZTCIaah1O9JLc2/1LVcpNLuEKI7K+8o9jPoKnLGDR1GXk5PkojVvfSMiVaznFoR+/E6EtPCvhHdhTz6+oF7Pj3BMr8JTQ841oadr4cya78p+ilD8FUSa252GY0i8gpInK+xVtdgUOTMPfV2JiODDWbZC1IkdnJTv2VIdBGM56cBH+ZIg7vh8wur//lN7RZOZ5t7z1KrUObceQNT5L3+6ssFYLXyWemSmrNxcmn8BBwo8Xrq4CXgLjPkCJSD/gjcFO8YxgqU1Ucg9HkdIp4chvJEyJckaRyl6uAL0sslU7xfj9DR41l67wXKS0tpfft97Kodif2hh1BfFlC/bq1MqbdpamSWnNxUgoNrKKCVHWDiByWyKSqugdonMgYhopUFcegGzmjLUhDpi1zvaMP39nG21/ZTbJZyHcxKCLSyL+9kG0fPMm+jas455xzmDBhAi1btsx4BW6ypGsuTo7mb1X1+FjfSwXG0RydquIYdCun06I5s6CwgsPZLlM4MuoIqORTiEaOL5vLOuaX92fICya4hY8R3rs49Pm0rJRdX7zFzv9ORrJ9tLzwb3zzxkOVSlQYDKkkHkezk1L4H2AbMFKDF0ngf/Qo4AhV7Z+gvK4xSiE6LYe/b7mb9TKCxYpUymlX8wgOLtxAheijSNyYcaxqFy1et4NSDfgW/Fu/Z+v749j/83fk/PZ08s8fyNjrz6qg1MwO3JAOkl3mYijwAvCtiITOxO2AJUDf+EQ0pAqvMo9jJZVyOlUdDTmdFw3vVmEBjmeBDk+qGzlzRXkDHj3gp+iT19n52XSy6jbgsEuG06DVH/BDBYd3VTDzGWouTiGpe4CrReRYoE3w5VWqui4tkhlioqo4BtMhp51DOaQoopmmuoyZZ5k0Z3XPlM8CVeT3Fa5h2wdP4t/2I/XadqNRt75k5xxCWdjcI95cQV1fVkrbkxoMieJkPvqd042q+mVKJLLAmI/cUVXMEqmWs8N9c2zzGa49rQUzlhZaJsYBlgrrso75le4R4C+nteCVhWspWvi/7F76LtmHHEbjcweSc2zHuOQWyOjfm6HqkWyfwnyH+1RV0+a9NEohs3GzyCeqCGK5v/2oObY+g1CmcyShDmhWpi27e/Z+X8C22U9zYOfPNPjdBeSdeX2FAnbxEu64jqSqKH5DZpDsjOauiYtkqO64CTF1ugaihz1GmyNyoXTKY7CrX+SUwxB5T+neX9kx7wX2rPg3tQ/Np+k1Y6jbvK3t/bFiZ06qKmHHhqqNabJjSAg3IaZ21zTK9bHXX2Zpyglf5JzuV6WSEnCTV2AlL0Q/KRT/3ydsnzOe0uKdHNL5MvK6XM1f/3B8ubM5FpzktIrGqiphx4bMIZ6Tgm2ZC4PBDW5KUthds6PYb+t0dTPHjmK/5akgnm3Onn0H6NqqCTm+7Aqv5/iyubpzc0p/3cHWmaPZ+taDZNVrxBHXPUajs67nqMMacn/PkxzLXFiRn5fD92MuKFdGkVhFY5l6RIZ0YJSCISHc1MiJNdy0sKiElsPfp8uYecwsKExLWG1RiZ8ZSwu5rGM++Xk5CIGF+8E/t+W3uwr4ZdIAir/9nLwzr+PI6x6jzhHHV4iaikXG8PuGnXuCpSKyisYy9YgM6SCqUpAA14rI3cGfW4jIqakXzVAVcFrUwpvhRO6kc3zZ5OX4sEM5aDPv2qpJzDvx7Dgyh0v8pcz/eiuLhnfj+zEXMPnq43juzr5cf/31/O7ktjw1bS6tz7+erOxa5OflVDBzDTv3BHzZ0efMFqlwX88O+Yy+9KQKisjOyRyLAjEY4sVNj+ZnCfRV6AbcB+wGZgCnpFAuQxXBrkYOVAzvDC9DUaHsRJSGOaGFOhaTUI4vO2qfZjs2FZVQVlbGs88+y/DhwwF46qmn+Nvf/kZWVhYDbe4LPYfB05Zh56aziyqKfIYh85nddeElPurUMod9Q3JxoxQ6q+rvRKQAQFV3BBvkGAyAddvMLmPmVVqYQwoh5BSdWVDoygGwqaiEfJfF7EJNc0LlLmKlkf8XzjzzTBYtWsS5557Lc889x9FHH+3q3tAziMx1CJfLLszUbXRVwxwfe/YfbHxYVOI3EUiGpOJGKfhFJJvgn6+INIHyRE2DwRI3TtFo/Q1CNMvLoWurJo4RPlaLrtXibIeWHqBk6Uw2LZpC/Xq5vPzyy1x33XWWBeyccgWsTk5dWzVh/tdbGTx1GWNnr60UduvUVCjyc1g51k1GtCGZuFEKTwJvAYeLyAPA5cDIlEplqPK4qXHkJmomZDOPjEiKZFdJ5bbhdWpVLilhxf6fv6No9lOU/PQtl19+OU899RRHHHGE5bVucgXCT05urndSoFYKw+5agyEZuMpTEJFWQHcCZuGPVHVNQpOK5BEotteWwAmkt6p+ane9yVOoelj1WfZlC/Vq12JnSaACafH+A5blKMIb1od22W5MQSGb/ZIN23lt8Q+WZbTD0QP7KVo0hV2fzSA7tyFD732Ih27v5/iZhk5bbpkAFy5z+EkgkTyO/LwcNhWVuPKnmFwFgxXJrpIaGrQFUAy8G/6aqsaerXOQJ4APVfXyoH8i8doAhowi0owS6kMQMn8UFpXgyxJ82VLB0RzujLVSLE6U+EsZ9e4qior9lRUAFRXD3o2r2PbBUxzYvpF6J51Do259+e+BJrZjh2Sxy4gOvR55EnBjRnMqEujGN2IikAzJxI356H0O/k3VBVoCazlYOTUmRKQhcCZwA4Cq7gf2xzOWIbMJN6N0GTOv0qnAX6bk5fioV6dWBfv72NlrGTx1GVk2NYecsCuEB4H/xEfkKKvf+R92f/k+2Q2bcviV95HTMlD70ckE49aMAxVt/G7MaNG6nFU6cWVY605D9SKqUlDVk8J/DlZP/VsCc7YEtgIviUg7YClwW7BUd/g8/YH+AC1atEhgOkMmYLfg7izxs+yeHkBlk1OsCiEae79fyrqFE9j98yYadLyIvDOvI6u2uyS7WG32oevdlgoPKdCQEzvcKT360pNMETxD2nBzUqiAqn4pIp0TnPN3wK2q+pmIPAEMB+6KmGcCMAECPoUE5jNkAG52zG5346E8B6vFtk6trEoROqUlu9kx73n2rJxH7cbNuf7BSXy2p0lMPR1i7e+clxtIzIul17GdU3r0pScZf4EhbbjxKQwJ+zGLwIK+KYE5NwIbVfWz4M/TCSgFQzXGzY7ZrTM5fFG1SpobNHVZ+fV7vv4v2+f+D2V7d3PI6b3I+30vvqEhoy89wfXue2ZBIXv2VY5uyvFl87sWDVn03fZK7/269wAzCwrLTwCR3d6sGvk4haaak4EhXbg5KTQI+/4AAR/DjHgnVNXNIvKjiJygqmsJRDWtjnc8Q9XAzY7Zrm8BWDegsUqag0AP5l+2/syOuf9D8f99Qu2mx9H4yvuo3fRYIKB87O6NxM7Z3SjXxwUnH8mMpYWW9/nLNOby17EUvDN9FQypwlEpBJPWGqjq7Ume91bgtWDk0TrgxiSPb8hAoi3ETj6EyDLSYL0wXtK+GWfoSp6deA9l/n3knXUDh5z6ZySrYs2glsPfd7WY2pm0cmvXYv7XWx3NXVaLudNpwG3/atNXwZBKbAuniEgtVS0FuiR7UlVdpqqdVPVkVe2pqjuSPYeh6mFXRtrq9dDCWBiM4y8sKmHoxLl0OP0snh41lFYntqFZ76dpeNrllRQCVCy4N7PAercPzrv3aM7nWMtfuy14Fy0D2mBIBKdqWp8H/10mIu+IyF9F5NLQVzqEM1QvQrb08LLY4bhZFENjDJq67GCxvbJSdi15h+8n3MyKgiU8++yzrFjyCccce3xUmaItpk7lqp2ileIpf+22Yqrpq2BIJW58CnWBbQSqpIbnAL2ZQrkM1Qy35SHA3u9gZd/3//Ij2z54gn2bvqbusR057NwB3HJLwBpp5dy2wmkxtXOQd23VhPeW/2R5T73a2fiysyxrHUVzuLvxdbg1MxkM8eCkFA4PRh6t5KAyCGFCRA2uCNn9rRYxq8gap0Vx1LurDp4OSg+w67MZFH0yhSxfDo0vHEq91mdzVKPcCmPBQSVjlwzntJjaFbibsbTQ0fkcnrkdrvxiCVG1w23ug8EQD05KIRuoD5b9TYxSMETFTZkKtyaPmQWF5dnK+zZ/y7ZZ4/BvXU9uqzM49JybyK6XBwQW4S5j5pXv5EOLc/iCHetiGqmorMqCg73zOVL5uY18cpIHElMsbjARTjUTJ6Xwk6relzZJDFUWu8XDTTKaW5PH2NlrKfPvY+eiyez6/C2y6+XR5M//JPe3p1e6trCopFKZ7R3FfqZ+8SO9TmnO/K+3JrTQxWPTT7a9P1HFEg0T4VRzcVIKsfczNNQ44om7DxFtlx6ubEp+XMm2D57kwI5N1D+5B4269iarbv2YZPWXanm7Tbt53CiKaDb96mDvN4l0NRcnpdA9bVIYqixOi0ders+2QF2+zeIb7oMQoHRfMTv+8zK/FsyiVsOmHN7rfnKOaR+3vJGKKp4dcTSbfnWw95sIp5qLrVJQ1cq5+wZDBHaLRKg0diS+bGHs5e1c1f4p/u4Lts1+ltLdv9Cg0yXknfFXsmrXTUje8B27XX+EaDtip17J6bL3pxoT4VRzibkgnsEQjt3ikS1i2WqzXu1atgtk6NRRWryTHfNeYM+q+fgat6DJtWOpk9/K8h6r5jl2+LKlfMcerT+Cmx3xXv/BrrSRvZKrmhKIxEQ41VycktcMhqjYJZzZLbY7LXoMhyjcUcyeNR+zaeLf2LNmIQ1/fzVH3vCErUKAgEKoV7tyxjJA7eyDJ5VGub4KJ5RoTvBoO2I7s9mod1c53ldVcJtIZ6h+mJOCwTXh9v5Q8br8vBwu65hfKaLHLjfBbrHdtGkTu98bzfbVn1D7iN/QuNf91D68ZVSZskXIy63Nnv2V52rSoK5tyWmnk4CbHbHd/TuK/bQfNYd7L25ToT9C5DOrCial6nDiMcSOUQoGV9g1wCksKmHG0kLLXaQb84OqMnHiRG6//XZK9u6lyTl9yelwUXm9omjmoVLVuJyiTmav0ZcG+kpZlbeOdj8cNCUt2bC9Ql6EXctOgyGTMOajGkq0OkSROJlbrOoHuTE/rFu3jnPOOYd+/frRvn17Vq1cyYSH7+WoQ+uX3/N4r/asH3OBY7E8u9OHgu1nszN7PXplO4BKxfYiC+dFO0mU+EuZ8tmPMT0zgyETEE1yy8NU0KlTJ12yZInXYlQbrDKNc3zZjjbjlsPfd9yxC9blra0oLS3lySef5J///Ce1atXikUceoW/fvmRl2e9RnGSGyqeSSNkUyMvxIUJ5b+OurZpYJrJ1GTPP8hSQn5dTwRzVftScSl3eYiGWZ2YwxIOILFXVTrHcY04KNZB4Si9Hc7y6DVVctWoVXbp0YciQIXTr1o3Vq1fTv39/R4UAlU8ejXJ91Kl1sOjcZR3zbU8TIWVWVOJnR7G/fPc/9YsfLTuquTVH3Xtxm0qnjXCyxTn/04R3GjIRT5SCiKwXkRUiskxEzBEgzcRjg7cyt4SwK28dbprav38/9913Hx06dOC7775j8uTJvPvuuxx11FGu5e7ZIZ9Fw7vxeK/27PWXUVRycIGfsbSQrq2auB4LAhnO4WOETERO5a0j5Rl96Uk0CvZjDifHl83VnZu7fmYGQ6bgpaO5q6r+4uH8NZZoiUlOZR+iRdJYZQgPeno6ty0czw/fruWaa65h3LhxNGkS2wIejt1J57WIekexEjotxRqjn1u7FjuK/ZbPpNPRh1bZ6CNDzcQTn4KIrAc6uVUKxqeQXGK1z0fzN4QTbo8v8+9l58evsWvJ29RucCjTX32Riy66KGH5o/k3EiFk549UjFb+B0jsWRkMqSYen4JXJwUF5oiIAs+p6oTIC0SkP9AfoEWLFmkWr3rjVIrBqix0LIXQQiaovT98xbYPnuJA0U/Ub38eh559Y1IUAjiHgyZjbKgYo29XH6muL8sUjTNUO7xSCn9Q1UIRORyYKyJfq+rC8AuCimICBE4KXghZnbFLTEq0ENrhdUtZPXM8vy7/kFp5R9L0qgepe/TJtk7gaFiZsqzMO27KXdTOFurVqUVRsZ+8XB+/7j1QoRSHnYnIzlxlF+3k5lmZXgWGTMUTR7OqFgb/3QK8BZzqhRyGyrh1slrx3nvv8d34m/j1qzkccuqlHNn7KeoefXL5YhtrbsTImSsYPHVZpXwBoFIOxF9Oa+EYCQQBx/I9F7Xh+zEXcM9FbahX5+CeqFGuz9bsE2tl0GjPKnTycMqDMBi8Iu0nBRGpB2Sp6u7g9z0A08wnQ4inENrWrVu57bbbmDJlCm3btuWOR57n7cJcR/t7tKzemQWFvLb4h0q7/5B5ZtHwbpXuC3fqWqFQHnYb+RnDi9tFYlcCPNeXhSIxF41zqptkTg8Gr/HCfNQUeEsCMdy1gMmq+qEHchgsiKX0s6ry+uuv8/e//52dO3cyatQohg8fTu3atbkj4tpYfRVjZ6+1NQfZLfohk5hd8hkEdv2xNpCxi8WoXSubey9uE/NC7lQ3KaR8TCkMg1ekXSmo6jqgXbrnNUQn0s79eK/2tgvSxo0bueWWW3jvvffo3LkzEydOpE2bNrZjx+qrcDLZWLRpqMCwc09g0NRllu81y8uJeU67yq47S/xxFY1z6ygv8ZcyKJicZ04NhnRhMpqrOW7t+G7t3GVlZTz33HO0bt2ajz76iMcee4xFixY5KgSI3VeRZ5EQVi5DAmEHXVs1iVmWRPwsVjglAlphfA6GdGKUQjUmFoemm9IX3377Ld27d+fmm2/mlFNOYeXKlQwePJjs7OgLnF0BOjv7eyLpM07lOuZ/vdUy8zm8AU8kscoeDatigXk59koQTAE9Q/owpbOrMbHYzp1MKgcOHGDcuHHcdddd1K5dm+eff54+ffogUWr7hBNrm0qnZjxOC+jMgkJH00xhUQlTP/+x0uulDsePVLTYjDQ7WSUURmL6IxvSgVEK1ZhYbOd2du6Ge3/i97//PV988QUXX3wxzz77LPn59ouhU/x9LPZ3J7v7vRdbm6pCC6sTdm1Cy5SofZlTYdMPf14Nc3zU9WVZRjqBKaBnSA/GfFQNCfkR7Pa+VotLpIlED/j59ZPJrHzmFtavX8/UqVOZOXNmVIXg1lwVzddhZbIR4NrTWkTt8WyHL0ts24SCfVRTqoh8XkUlfvb6y7jWIufCFNAzpAtzUqhmRDND2C0u4SaSdasK2DnnKUq2bODaa6/lj73/wROfbeEfI2ZV2P1HngqK9x9wZa6yKxsRLkc8Jhsn80pejq88fNRu8Y9W6jrZ2Jn35n+9ldGXnmRyFgyeYJRCNcNptxytOucff5vHi4++zieTXyC7fmNOvP5Bju55If/69w/4Sw+2khw2fXmlVpNOu+zIxdqtryNWk41Ti81Qz2TANlzV6RSRCpzMe6Y/ssErjPmommG30AhYZgGH+OijjzjuhNa8+9rz1G9/Ps36PEvxESfz6uKDCiGEv1SZ/NkPjqaacCLNVYnWV7LDLtSzVLXcjNWzQ76tozre+kzxkuxQV4MhGRilUM1wu9CEbPotBk2l6akXcM4557BzbylNrxlD4x5/I6tOruM8bnMFrMxVsSyGsdRLCoV6WpmBwkM6rTqmeWGzT3aoq8GQDIxSqGa4WWhCNv1vvpjPpol/Y8uSDzn091fQ5Ponqdu8bULz5+X4KsTfWxWZc7sYxlM4rmeHfMpszEChk4hVnoAXPRAyRQ6DIRzjU6hmuHHQPjBjMT/MeILirz/Gd3hLmlx2N3WOOL68M5gbcnxZYFEMLtx2n4iMoffj6Vfg1FkullIe6cD4DgyZhied12LFdF6Ln/BF8MiGdTm1dBVPPTCSMn8Jeb+/mkM6X4ZkH9wb5PiyKyzEoTDOcHORL0sYe0WgfFUqI2TsOqyFuqPZYddZ7rKO+RWc46HXze7cUF2pSp3XDGkgfHE8sGsLBW88w6frlpLb/ETyevwd32HNK1wfik6KXOjBfvFP5WIarZe0HXYnkVHvrjKd0gyGKBilUMVxyiAeO3stxfv9/LrsQ3YseAm0jEbd+9O8S0/2l1U0/QgBm71dRc5YF81kdBaLp7dDuLzh842cucI2U9iUjzAYDmKUQhUmWhLYhnXf8ssHT7Jv4yrqHt2eQ88biC/vCHbtK+PxXu3LE7nCW1kmo46/m+Q0NySr5tDMgkJeXfyD7fsmBNRgOIhnPgURyQaWAIWqeqHTtcanYI1dM5lmDWrT01fAiH/eBdk+GnXrS72TzikvYJefl8Oi4d0cxwi/BmLb+bsdMx3MLChk8LRljlVXx3nsbDYYUkVV8yncBqwBDvFQhiqNldlj/5Z1LH35CT79+Ts6dz2PbSf/FX/dhuXvR5pf7EwnhUUl5clese78Y0lOS2UD+5DcTgohL8dXYxRCKp+1ofrgSZ6CiBwFXAC84MX81YVws4ce2M+Ohf/LT5MGo3u2M336dBbP+4Cx15/lGAfvZDoJ5QQ4OWijyeX0eqob2EcrkAf2FVerG6l+1obqg1fJa+OAOwDbbuki0l9ElojIkq1bt6ZPsipEKAls78Y1bHrp7+z6dCqHtO3KxHcXctlllwGBnfyi4d34fswFlmUunLqAhZrJx+qgdZuc5qaxTyJEcyDn+LJqzE451c/aUH1Iu1IQkQuBLaq61Ok6VZ2gqp1UtVOTJpU7ZRngnN805Ljv3uDnyXegB/Zx4o1jmPTyy/z1bPdZyaGsWjvsFALYnwjcZuqmqgZSNPkgkGsx+tKTkzJPVSDVz9pQffDCp9AFuFhE/gTUBQ4RkVdV9VoPZKmyzJ07l/79+7N+/XoGDhzIgw8+SIMGDeIaq2eHfMeS0nY4hYa6ydSNNw/BLVYhrQCNcn3cc1H0zOvqRKqftaH6kPaTgqqOUNWjVPUY4CpgnlEI7tmxYwe9e/emR48e1KlTh48//pinnnoqboUQws7kk+uz/i+SDAdtqgvCWZ1YxvVqT8HdPWqUQgBTfM/gHpOnUIV48803GTBgAFu3bmXEiBHcfffd1K1bNyljW+UEdG3VxLKfMcCF7Y5Myrx1amWV7+RTsYM3tYUCpKLPtKF64qlSUNUFwAIvZagKbN68mYEDBzJjxgzat2/PrFmz6NChQ9LniVxAu4yZZ9nPGGDG0kI6HX1o0hLcAPb6yyq8bxaw5GIUpMEN5qSQwagqr7zyCoMHD6a4uJgHH3yQ22+/HZ/PukmMHfEusE5OyERrBkWLhklGRnQ8RHtWRlkZqjtGKWQoGzZs4KabbmL27Nl06dKFF154gVatWsU8jpvEM7uFzs45GSKeZLSRM1cw5bMfbUt0FxaVMHTa8krvx6KE4l24oz2rZJXvMBgyGdNkJ8MoKyvj6aefpk2bNixatIinn36ahQsXxqUQIPqO3CmpySmHAWJPRhs5cwWvLv4has8Gu/fdhE8mkqQV7VmZWH9DTcAohQxi7dq1nHnmmdx666384Q9/YOXKlQwYMICsrPh/TdHi0+0WulHvriqP3mmUW9lcFU8y2pTPrJ3WbnETPpnIwh3tWZlYf0NNwCiFDMDv9zN69GjatWvH6tWrmTRpMTDtFgAAEutJREFUEh988AFHH310wmNHKzlht6DtKPaX1z4quLsH43q1TzgZzW1XNyvchk8msnBHe1ax9JY2GKoqRil4TEFBAaeeeip33nknF110EWvWrOG6664rr2iaKNHi050WtPDddbRyGU5jKYFIpqwYP1K2SMy9ixNZuKM9KxPrb6gJGKXgEXv37mXEiBGccsopbN68mRkzZvDGG2/QtGnTpM4TreSE04IWq1nEyQdRWFSCZW9NoMtxh1a6T4CrOzd3VEJuZYilMY/Ts3JbvsNgqMqYHs0e8N///pc+ffrwf//3f9x44408+uijNGrUyDN52o+aQ1FJ5RpHof4HsUTzhK61i1rK8WWx/4BSqkq2CFd3bs79PU9i5MwVvLb4hwp6I97+ySZs1GAIEE8/BaMU0sju3bsZMWIEzzzzDMcccwwTJkzgj3/8o9diBSKNpi/HX3rw/4IvWxh7eTuWbNheabEW4C+nteD+nvaF9FoOf9/yYCDA92MuqPR6JjXmMRiqC/EoBWM+ShOzZ8+mbdu2PPvss9x2222sWLEiIxRCOZEruGKpEEKXvrb4B8cwz1ht+8mI7JlZUEiXMfNoOfx9uoyZZ3oFGAxxYJRCitm2bRvXX3895513HvXq1WPRokWMGzeO+vXrey1aOWNnr61UzsJfpkz57Ec7NwAKjmGesdr2E43scZufYBSHweCMUQopQlWZPn06rVu3ZvLkyYwcOZKCggJOP/10r0WrhN1uPFoIqdMuPlanrJ2Tes++A0lJPAPTfcxgcIMpc5ECfvrpJwYMGMBbb71Fx44dmTNnDu3atfNaLEtmFhSSJWKpALJtXg8RbRcfawG28IqpIYpK/K5KSbgxPzkpDuOINhgCmJNCElFVXnrpJVq3bs0HH3zAQw89xOLFizNaIYx4c4Xlwp/jy+bqzs1tQ0yTGZ8fksMqAgrcZSS7MT+ZjGSDITpGKSSJ77//nh49etC7d29OPvlkli9fzh133EGtWpl7GBv17irLxvbZIoy+9CTu73lSuQko9DokPz7fagcfSbSF240Pw2QkGwzRSfuKJSJ1gYVAneD801X1nnTLkSxKS0t55plnGDFiBNnZ2YwfP57+/fsnVK8oHcwsKLTtv1ymWiFhK9WmlURKUIRw00TGqj2nyUg2GCrixTZ2H9BNVX8VER/wXxH5QFUXeyBLQqxevZq+ffvy6aefcv755/Pcc8/RvHnzpIyd6gQsJ3NMojvnWGWPVqI7loxkp3lM9zGDITppVwoayJb7NfijL/iV+Rl0Yfj9fh566CH+9a9/0aBBA1599VWuueaapNUrSkfdfqfdeSI751hkD89+FqiUIKcETFXJXLhN9zGDwRlPDN4ikg0sBY4HnlHVz7yQIx6WLl1K7969+eqrr+jVqxdPPvkkhx9+eFLnSEeUjN3uPC/Hl9AcdrIPnraMwVOXle/OoWJ3NSV1igBM6QuDwS2eGL5VtVRV2wNHAaeKSNvIa0Skv4gsEZElW7duTb+QEZSUlPCPf/yDU089la1btzJz5kxef/31pCsESE+UjJ1j9t6L2yQ0rp2MqlTIDbBycocUQiwF8Nxg8hMMBvd46g1V1SJgPnCexXsTVLWTqnZq0qRJ+oULY+HChbRr146HH36YPn36sHr1ai655JKUzZfMKBm7DN5UVfx0I2OJv9TWyZ2K8FDTMc1gcI8X0UdNAL+qFolIDvBH4KF0y+GGXbt2MXz4cMaPH8+xxx7LRx99RLduqS/OlqwomWj2/WTa16NVR3VLKsJDTX6CweAeL3wKRwKTgn6FLGCaqr7ngRyOzJo1i5tuuolNmzYxZMgQ7rvvPurVq5eWuZMVJZMO38TMgkLufWeVbeKZE5HO5VSFh9r5T0x+gsFQGS+ij74COqR7Xrf88ssvDBo0iNdee43WrVszffp0OnfunHY5krGLT9YO2c5JG3kSiZVUO5dDmPwEg8E9mZtum2ZUlWnTpnHrrbeyY8cO7rnnHkaMGEGdOnW8Fi1ukrFDdjJBuclEFiAv14cqlqeJcOdyqjD5CQaDe4xSADZt2sQtt9zCO++8wymnnMJHH33ESSfZN5CpKljtkAXo2sq9497JBBXtxBG52Ns13kmHbd/kJxgM7sjsWgwpRlV54YUXaN26NXPnzuWRRx7h008/rRYKAQIL4WUd8wlPqVNgxtJC1+GYTiYopxOHlXkmE2oPmX4KBoMzNVYprFu3jnPOOYd+/frRoUMHvvrqK4YOHUp2tnVV0KrK/K+3VtqdxxKO6bSQ2/VAaJTrswxvjbXxTrLxMl/BKCNDVaHGKYXS0lIef/xx2rZty5IlS3juuef46KOPOP74470WLSUk6mx2Wsitch3G9WpPwd09LE01qcqNcItX+Qomec5QlahRPoWVK1fSp08fPv/8cy688ELGjx/PUUcd5bVYKSUv12eZKJaX63N1fzQnbay2ei9t+17lK5jmPoaqRI1QCvv372f06NE88MADNGzYkMmTJ3PVVVclrYBdJmPXOC1Kp80KpHohT1ddIq/yFUzynKEqUe3NR1988QUdO3bk3nvv5YorrmD16tVcffXVNUIhAOy0SSqzez3dpNO04pVPIxMc7AaDW6qtUiguLub222/ntNNOY8eOHbz77ru89tpreF1HKd1k+oKUTju/Vz4Nrx3sBkMsVEvz0YIFC+jbty/fffcdN910Ew899BANGzb0WqyEicfMkuxs3mSbeuxMKIVFJcwsKEz6gu2FT8MkzxmqEtVKKezcuZM77riDCRMmcNxxxzF//nzOPvtsr8VKCvE23knmghSrDG4UiFPXtcFTl7Fkw3bu71n180ZM8pyhqiAai8fRIzp16qRLlixxvObdd9/l5ptvZvPmzQwZMoRRo0aRm5ubJglTT5cx8ywXz2SViHCzgMcig1VdpBxfdiVzTbT6SQI83qu9WVANhjgQkaWq2imWe6q8T2Hr1q1cc801XHzxxTRu3JjFixczduzYaqUQILURLG6dvbHI4NZXELLz26E495M2GAzJpcoqBVVl8uTJnHjiiUyfPp377ruPJUuWcMopp3gtWkpIpcPY7QIeiwyxKJCeHfLJd/gcJnTTYEgfVVIpbNy4kYsvvpi//OUvHH/88RQUFHDXXXdRu3Ztr0VLGamMYHG7gMciQ6xKbNi5J2AXJJwpkVIGQ02gSimFsrIynnvuOVq3bs28efN4/PHHWbRoEW3aJNZXuCqQynBKtwt4LDLEqsR6dsjnL6e1qKQYTOimwZBe0u5oFpHmwCtAUwIm4wmq+oTTPZ06ddLXX3+dfv36sWDBArp3786ECRM49thj0yFytcetUziecUPO61BPhZ0lfscIqHRlNxsMNYF4HM1eKIUjgSNV9UsRaQAsBXqq6mq7e5o3b66//PILderU4dFHH6V37941JiM5XaRyMU6V0jEYDM5UCaVQSQCRt4GnVXWuwzV6ySWX8Oyzz9KsWbM0SmdIBqkOpzUYDNZUOaUgIscAC4G2qror4r3+QP/gj22BlWkVLjqHAb94LUQEmSgT2Yc06Zida51Rvn/zt0vTLE6ITHxWRiZ3ZKJMkJlynaCqDWK5wTOlICL1gf8AD6jqm1GuXRKrtks1Rib3ZKJcRiZ3GJnck4lyxSOTJ9FHIuIDZgCvRVMIBoPBYEgfaVcKEvAQTwTWqOpj6Z7fYDAYDPZ4cVLoAvwV6CYiy4Jff4pyz4Q0yBUrRib3ZKJcRiZ3GJnck4lyxSyT59FHBoPBYMgcqlRGs8FgMBhSi1EKBoPBYCgno5WCiDQXkfkislpEVonIbRkgU10R+VxElgdlGuW1TCFEJFtECkTkPa9lARCR9SKyIug3cm6IkSZEJE9EpovI1yKyRkROzwCZTgjzry0TkV0iMigD5Boc/D++UkSmiEjdDJDptqA8q7x6RiLyoohsEZGVYa8dKiJzReSb4L+NMkCmK4LPqUxEXIelZrRSAA4AQ1W1NXAaMEBEWnss0z6gm6q2A9oD54nIaR7LFOI2YI3XQkTQVVXbZ1D89hPAh6raCmhHBjwvVV0bfEbtgY5AMfCWlzKJSD7wd6CTqrYFsoGrPJapLdAPOJXA7+5CETneA1FeBs6LeG048JGq/gb4KPiz1zKtBC4lkCDsmoxWCqr6k6p+Gfx+N4E/YE+L5WiAX4M/+oJfnnvrReQo4ALgBa9lyVREpCFwJoGQaFR1v6oWeStVJboD36nqBq8FIdCuN0dEagG5wCaP5TkR+ExVi1X1AIHk10vTLYSqLgS2R7x8CTAp+P0koKfXMqnqGlWNuUNVRiuFcIIlMToAn3krSbmZZhmwBZirqp7LBIwD7gDKvBYkDAXmiMjSYNkSr2kJbAVeCprZXhCRel4LFcFVwBSvhVDVQuAR4AfgJ2Cnqs7xVipWAmeISGMRyQX+BDT3WKYQTVX1p+D3mwlUga6SVAmlECyJMQMYFFkjyQtUtTR41D8KODV4rPUMEbkQ2KKqXtURsuMPqvo74HwCpr8zPZanFvA7YLyqdgD2kP5jvi0iUhu4GHgjA2RpRGD32xJoBtQTkWu9lElV1wAPAXOAD4FlgHVzbw/RQJy/59aDeMl4pZDJJTGCpof5VLblpZsuwMUish54nUBi4KveilS+20RVtxCwkZ/qrURsBDaGneymE1ASmcL5wJeq+rPXggDnAN+r6lZV9QNvAr/3WCZUdaKqdlTVM4EdwP95LVOQn4NtAULtAbZ4LE/cZLRSyMSSGCLSRETygt/nAH8EvvZSJlUdoapHqeoxBMwP81TV012diNQL9ssgaKLpgceVblV1M/CjiIRauXUHbPt4eMDVZIDpKMgPwGkikhv8O+xOBjjlReTw4L8tCPgTJnsrUTnvANcHv78eeNtDWRKiltcCRCFUEmNF0IYPcKeqzvJQpiOBSSKSTUCpTlPVjAgBzTCaAm8FmyHVAiar6ofeigTArcBrQVPNOuBGj+UByhXnH4GbvJYFQFU/E5HpwJcEogALyIwyDjNEpDHgBwZ4ESggIlOAs4HDRGQjcA8wBpgmIn2ADcCVGSDTduApoAnwvogsU9Vzo45lylz8f3t3E2JVGcdx/PuTQnpTaCpoIxSjgQ4uQkGKxF6IJlokaoFCm160RURtK6LaBC0Go0XRQDGEYaVBbzCSUTOCmSFpkyAOFdYEESGmZgX2b/H8z7l3xjvjzFh2h/l9Vpfzfu/i/J/nOff8HjMzq7T18JGZmZ1fLgpmZlZzUTAzs5qLgpmZ1VwUzMys5qJgbUvS6UwNHZL0dkYbTPdYr0tam597JwpWlLRK0pRf1MpU2CumeU0XSno+Uzb3SdotqTvXfSrpUFOKavVf/bmStkoalrQno2DMzomLgrWzU5ke2gX8BWxqXplBbVMWEQ9ExEQvra3i/L+9+xzlHZiujAa5G7isaf2GKkk13xAHuB84GhGdQA8lAsLsnLgo2EwxCHRmK35Q0nvAwQwnfEHSXkkHJG2E8ja8pJeyhf0xcFV1oGx5L8vPd2TLfL+kndna3gQ8lq3ym/It9m15jr2Sbsx9OyTtyMz6XkCtLlzSCUk9ud1OSVeOWX8xJRL6kYj4EyAifo6It87ymzQnc74D3Jrfe4nKnB9f5W+ycPI/s812LgrW9rJH0A18nYuuBx6NiEWU1vKxiFgOLAcelHQNsBq4DlgM3EeLln/enF8F1uT8GOsi4nvgZaAnW+WDlDkYevIca2jEkz8N7IqIJZRspwXjfIVLgC9zu89yv2adwJGzhD2+ljf5pzJ2AkqM/A8AGSV9DOigFLXNGdq4jJL5ZDYp7R5zYbPbRU3xJoOUHKwbgC8i4rtcfjuwtBqbB+YDCynzJrwZEaeBnyR90uL4K4CB6lgRMTYjv3IbsLhxL2aeSnLvSjLPPyI+lHR0nP3/Brbm5zco4XJTsSEiRjJLahsl+qVvgu13A0+ozLGxPSIOT/F8Nou5KFg7O5Wt3VremE82L6IMu/SP2e7Of/E65gArIuKPFtcyHWOzZYaBBZLmteotNKXNHpe0hZI22weMUOYT+DF7U/OBXyNii6Q9lEmXPpK0MSJaFUWzM3j4yGa6fuBhlYh1JC3KcLkB4N585nA1cHOLfT8HVuZwE5Iuz+XHGf2QdwclSI/cripUA8D6XNYNjDcv7xyg6smsB3Y1r4yI3ym9oM0Z1Fel8a6TdEH1j6b8jnfRSJttTuZcS0nHDUnXAt9GxIuUtM6l41yX2RlcFGym66XEX+9TmbT8FUoP+F3gcK7rowypjBIRvwAPAdsl7acxxPM+sLp60EzOVZwPbQ/S+BfUM5Si8g1lGOnIONd4kjIZ0xBwC/Bsi22epMwKdzC3+wD4DZgL9Es6QJlUZoTyHARKIemQNAw8TmPCoHuAoRx662LioSazUZySavYfk3QiIi79v6/DbDLcUzAzs5p7CmZmVnNPwczMai4KZmZWc1EwM7Oai4KZmdVcFMzMrPYPWtO5c9Z6CbYAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] + "execution_count": 23, + "outputs": [] }, { "cell_type": "markdown", diff --git a/examples/tutorials/14_Conditional_Generative_Adversarial_Networks.ipynb b/examples/tutorials/14_Conditional_Generative_Adversarial_Networks.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..622b7dbd7618d1f22b50edb140402c34c7b5de5d --- /dev/null +++ b/examples/tutorials/14_Conditional_Generative_Adversarial_Networks.ipynb @@ -0,0 +1,385 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "gG-V_KZzqSSr" + }, + "source": [ + "# Tutorial Part 14: Conditional Generative Adversarial Network\n", + "\n", + "A Generative Adversarial Network (GAN) is a type of generative model. It consists of two parts called the \"generator\" and the \"discriminator\". The generator takes random values as input and transforms them into an output that (hopefully) resembles the training data. The discriminator takes a set of samples as input and tries to distinguish the real training samples from the ones created by the generator. Both of them are trained together. The discriminator tries to get better and better at telling real from false data, while the generator tries to get better and better at fooling the discriminator.\n", + "\n", + "A Conditional GAN (CGAN) allows additional inputs to the generator and discriminator that their output is conditioned on. For example, this might be a class label, and the GAN tries to learn how the data distribution varies between classes.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/14_Conditional_Generative_Adversarial_Networks.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "gXeKc6O9qSSw", + "outputId": "9872d3b7-bf6d-4977-d064-ca122f539751" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "xDBRoR3pFeGs", + "outputId": "d336d18f-703d-4268-c5eb-e39d6ce86148" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Vr4T07_aqSS_" + }, + "source": [ + "For this example, we will create a data distribution consisting of a set of ellipses in 2D, each with a random position, shape, and orientation. Each class corresponds to a different ellipse. Let's randomly generate the ellipses. For each one we select a random center position, X and Y size, and rotation angle. We then create a transformation matrix that maps the unit circle to the ellipse." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "IdfLLsjGqSTC" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "n_classes = 4\n", + "class_centers = np.random.uniform(-4, 4, (n_classes, 2))\n", + "class_transforms = []\n", + "for i in range(n_classes):\n", + " xscale = np.random.uniform(0.5, 2)\n", + " yscale = np.random.uniform(0.5, 2)\n", + " angle = np.random.uniform(0, np.pi)\n", + " m = [[xscale*np.cos(angle), -yscale*np.sin(angle)],\n", + " [xscale*np.sin(angle), yscale*np.cos(angle)]]\n", + " class_transforms.append(m)\n", + "class_transforms = np.array(class_transforms)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xPml_fFGqSTK" + }, + "source": [ + "This function generates random data from the distribution. For each point it chooses a random class, then a random position in that class' ellipse." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ksP0E2KHqSTM" + }, + "outputs": [], + "source": [ + "def generate_data(n_points):\n", + " classes = np.random.randint(n_classes, size=n_points)\n", + " r = np.random.random(n_points)\n", + " angle = 2*np.pi*np.random.random(n_points)\n", + " points = (r*np.array([np.cos(angle), np.sin(angle)])).T\n", + " points = np.einsum('ijk,ik->ij', class_transforms[classes], points)\n", + " points += class_centers[classes]\n", + " return classes, points" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "yvf85D4KqSTW" + }, + "source": [ + "Let's plot a bunch of random points drawn from this distribution to see what it looks like. Points are colored based on their class label." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 282 + }, + "colab_type": "code", + "id": "CXy5-cJkqSTk", + "outputId": "afb38088-aa6f-4414-98b2-285b473b140c" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydd5xU1dnHv+feO3V7offq0lVARETsXQmW2DVq1Bi7r68l0UQTE01iib2grwVjbwFREVBBRHrvvcOybC9TbjnvH7Msu8zM7uzuLEXP9/OZmLnl3HNnmd995jlPEVJKFAqFQnH4oh3sCSgUCoWieSghVygUisMcJeQKhUJxmKOEXKFQKA5zlJArFArFYY5xMC6am5sru3btejAurVAoFIct8+fP3yOlbLX/9oMi5F27dmXevHkH49IKhUJx2CKE2Bxru3KtKBQKxWGOEnKFQqE4zFFCrlAoFIc5SsgVCoXiMEcJuUJxmBG0wywq3sSGivyDPRXFIcJBiVpRKH6prC3bycdbZqEJjSu6HU9Hfw4AuwIlbK8qomtqK3I8aXHP/2zLHJ5ePRFdaNjSoYMvm6cHX0NbX2bM40vCVdjSrndMxeGPEnKFooVZU7aTjRX5TNg+nzmF62u2f7J1NmM6DqUoXMFPe9bi1gzCjsXZ7Y/ivn6j0UXdH8xLijfz9KqJBB2zZtvGit3cPu8NPjj+ToQQONJhaclWtlQW8MmWOawt3wkCOvlz+cvAX9M7vd0Bu2/FgUMJuUKRBIpCFby36UfmFq6nvT+LK7oeT4+0Ntw1/22Wl2zFkQ5haUed99m2uRhCx5I2YccC4Osdi+joz+Hq7ifUOfaDzTMJ1RJxAAdJfrCUNeU78WgGt857g3IzQNAOU1OgWsKGinx+N2csn51wDxluf0t8BIqDiBJyhaKZFATLuHLmc1RYQUzHZmXZdqbvXkmq7qHIrGzwfGs/gQ86Ju9v/rGOkJeGq1hcvJlY3QM0ISgOVfK35Z9SECyNeQyA6dh8tWMhl3Yd0Yi7UxwOKCFXKBpJlRXim51L2FCRT++0diwu3kyZGcCWDgASSdixKKq2sJtCpRWq+f9lZoDLf3yGglB5zGMtxwYkpWZVXBEHCDkm26qKorZLKXl304+M2zidknAl3VPbcHefcxiS06PJ81ccWJSQKxSNYEdVMdfNeomAHSJgm/h0NyHbxKlXQhuHAI7M6lLz/sPNMykMVcQ9/ppuo7ClQ8iu/8Hh090MzOoctf2VtVN4d/MMgnbEbbOuYhd3zX+bF4+5ngGZ0ccrDj2UkCsUjeDxFZ9TEq6sEe6AHW7WeJ39OeQHSrFwsKWDITSEEKwu28lpUx/l+FZHsLY8v94HxdLSLQzL7YWs5xiX0GnjzeCkNv3qbA/aJu9umlFnARUi1vuNs1+lW0prftvzZE5u279Z96loWZSQKxQJ4kiHOXvWNcv6dguD9v4s3JrORZ2Hc37HwWyu3MM7G39gfUU+peFKCoLlFIUjFvjXOxaja6LeMecVbuCqrifgEjpmjAVVHcGvuwzn+h4n49LqfuULQ+UIEXt8Wzqsq9jFw0s+ojhcyYWdhzXxrhUtjRJyhSJBBAJNCJwmNiw/MrMLjx91Bdme1Drbu6W25qEBF7KpYjdXzXyesNznIrFx0DHQEHEfILrQaO1Lx6O7MK26Qu4WOr/rfTpXdhsZ89wcTxoNNWAPOiYvrpnE6I5DMDQ9kVtVHGBUZqdCkSBCCE5s0w9DNE3MFpVs5p6F41hdtiNqnyMdvti+ANtxovaFHYteaW2JZ5d7dBcd/Dk81P8CvJoLvfpr7dPddEltxYWdhhGyTd7b9CNXz3yB3856mS+2LyBsmwjgki7H4dVd9c497FiUhBuOwFEcHERDT+OEBhHi/4Bzgd1SygadaUOGDJGqHrnicKQkXMkNs1+hIFhGVRP9437dzXvH30E7XxYQiTq5Y96bLCnZTChGpItb07my60hOaTuQ62a9GHWMT3NxVvujuLbHiQRtk0+3zqYgVM5xrY7g9HYD0YXGDbNeYV35rihfOECP1DYMyenO1zsWU2pWxZyzR3Mx5ZQH8dQSfFs6WI5dZ5uiZRFCzJdSDtl/e7Is8jeBM5M0lkJxyJLpTuGD4+/ksSMvJ8fduLR3GRBIGYnnfn/TzJrtE7bNZ2nJFkKOhTQFMhQ5bi+60BnTeRi90tvyj6OuRNvPNg84Jp9tm8MVM5/Dq7s4t+Ngzm5/FMfm9MStGfywexXrK/JjijjA+op8Jmybz3vH38E/jrwc935+dK/m4oJOx9QIdtix+NeK8Zw4+RFGTX6YX//wNAuKNjTqs1Akl6T4yKWU04UQXZMxlkJxqKMJjeGtevPwwIu4Z8E7dbIt3cLAro5AqY10QOa7QZeYncL8sHsl1/c8mXSXjy93LCQQsHAWpiN3uyPxhz4b/ehyOnZI49FBl9LGmxGJ914zM6avXAIVZpCrZr5A0AljCI2wYzO6Y8R4ayi6JmhbPLfqK37as6aOz9yjGVzYeRi39D6jZtvDSz7ih90ra34ZbKos4M55b/HG8Jvpkda2sR+nIgkcMB+5EOJGIcQ8IcS8goKCA3VZhaLFGJbbi+eGXsvg7O5ku1M5Oqsbzw69ltPbDURz9n21pAQcEK3DiHYRQd0RKGbMtH+xvnwXSIk9MyMi4lKAI6DSwJ6ZyV1dRtMvsxOVwTC/feoj5uSvjTsfB0mJWUnQNqmwQoQdiwnb51MQLIuysqPPdZi0czElZlWdyBe/7uGW3mfULHLuCZYxvZaI7yXsWLy9YXpCn5vpWEzLX8GEbfPZHiNBSdF4DljUipTyVeBViPjID9R1FYqW5Misrrx0zG/rbBuY2Zkf52yhtFUxGBJZ5IKQQLQLs3ed1EFSbgV5YNF7bNtRBpWpERGvjYSPJy7nse3TyC+uiFjpfSRIiBMxGEXQNtlQsRstgRNixaGHHYuf9qxlZOs8IPIAclUX96qNg2R9AmV115Tt5Ja5r2M6NlI62Eh+1XEo/9Pn3LhhkIqGUVErCkWSMTSdo5w8nEm52F+0wpmZCbYgVvTgpsoCwhWSmCEpjmDGso0REQe0vEoQDYu4lBFXzl6CjsmTR19NljslbuQLxJwetnRqYtoBOqfkYsZYkNXR6JfRqd55OdLh7gVvU2pWUWWHCDhmza+GabtX1n9TinpRQq5QtADXnTkUvZbiOktTsb/OwdkdHeEhMqyIOyUKSW2FF+1CcUVcOuAEwZ6Xhj0hF3tCLtYPGWjlLka2ymNjxW4sx643lWn/srmRGUiOzOpa8z7TncJ5HQbj1ereh1vTaevL4IGF7/Lsqi9ZWLSRjzb/xKdbZlNYXSNmVdkOKsxA1DUCdphPt86uZ2aKhkiKa0UI8R5wIpArhNgG/FlK+XoyxlYoDkd0TUNoGjjV/mY7IpLOnHTEmYWIWt88keog2oWQu9w1xyFkXRNZyLjfVimBSg1neiY42j4XTZEL84cM9B4u/r31Syyisz5rE4mGEeyVe5/u5tS2A+iSklvnuHv6nkc7Xxbvb/6RcjNI34wO7AqU8MaG7wnaJhqCdzbNwCU0NKHz9KqJPNDvV7T3ZyPi/CYI2bEjahSJkayolcuSMY5CcagyeecSXl//LQXBMvpkdODW3meSl9Eh5rFBO8xnM5di2jGEU4DM9yA6hOps1o4ux1nvQ27ygSUQbUPIAhcEjJrz4iEESJ8DaRYUe+pczJA6701fgJ5Xv4gDdRY5daFxVrsjua/f6KjjdKFxdfcTasrsvrJ2MuM2Tidc/dDaG1VjSqfGx/PY8s/5cORdMe/Dq7k4vd2gBueniI9K0VcoGuCDTTN5Yc2kmjjsOYXruXHOqzzU/wIModMrvR0d/TlsqMjn0aWfsrJsO+GNfpBeopRLQm3DWEqQO9w4K1LRBpSjnVpU4z5xtrtwFmZE/OuOQBYbkGXFdq/ooB9XhrMwDbnDW7PZtGyoaHwmqi0dxm+bx+Cc7pzWbmDNdkc6fL51Lu9tnkmlFWR4bm8WFm+sEfH4CH4qWMOf+1/EQ0s+wHJsbCS60Ojgz+a8joMbPUfFPpSQKxT1YDk2L6+bHJVME7RNHlr8IT7DjWlb9M3syNqyXVTZoYhnu10IudUTEeHaSIFoXR3T7QgwwVmWGtH7LAtnrQ9Z4Eak2GjdA2jDSnAWp0GljrPKjz68LOY8hQAM0I4qxw7oUFztw9YdRFbT6qJbOLy4ZlIdIX9i5RdM2DavJvzwi+3z47pL6tw2Els6nNCmDwMzu7CgaCNUb9tWVcgb677jpt6nNWmeCrXYqVDUS0GoLCq5Zy8OkkorRFjaLCreTGW1iAOIViaibRh0ScQMl6BLRJ8KstJ8DEjtglibgj01G4I6oncFzvdZyNV+2ONGbvFiT88CW0MfVYy7m4kxvKzhsEMdtF570+xl5H3nYJPvf3ugmDmF64BIDPnnW+fWiSGX1Z9DrIXS/RnZug+z96xjWelWbPZ9piHH4o0N37OhvG74opQS07EaLOqlUBa5QlEvWe6UJgmJEKANLkcWBJE7PKBLtM5BRIZNipHNRe5RrN44BdM0QZPIMheEai1USgE2OAvTEMcVY2/X0fs3HHooBJBiIroGIDcMJQb2d1nggDawHNHOTDgGfS/3LBjHDT1PoUdqm7gPNR0NXdNqMkotaSOIZMEaQuN3vU6nrS+TNzdMi5ll6iC5Zd7/8daxv6fMCrCkeDOvrf+WwlA5GS4/1/c4mV93Ga5izeOghFyhqAev7ub8jkOYsG1+jXtFluk463zICgORHUbrEUT4ogVOCBCtTWhd1y3TO60drVNrlbLNsCDfE50QBGAL5BYvhHWczR60rvFDEGuumybRBlZg/5QBha6a0EZnXgbkhtCHlzdKzIO2yStrp/DEUVfFbV7hIPno+LtYVrKVXE8aKYaH73evQBcap7UdQNfU1gCkubxxS/IWhSoYPe2f6EKvU8q3xKzihTWTALik63GJT/wXhBJyhaIB7so7B11ovL95Js5uF86cDHAAKZClBvYWH/qoYkRKbGt1f/aEyjjqyA6k+jxUhUwICzAcIMaipAR2ehFdAmidI5EusqHMTgEUGxAG0bMq8jDY4YGAHtlZNzw9ISzH5sMtM+PudyFo68ukgz+7ZlusqJ5z2x/NfzbOwInRAEMisQFbRvv0g47Ja+u/ZWBmZ2YXriPV5eWUtv3JcqdGHftLJCllbBuLKmOrOBy5fe7/MeP9IqjaX3Alon0IfWh5jUi6NB0zTiSHhuCnMx7l+8Xr+d9Xv8CREpFXjtzsR7QyYW+BLUtAqg1hDf20QhpTBt3JdyFyzMgqWPVX3FmciqzU0I9PwNfeBNINH48OuoRjW/UGIuI/bfcKZu9ZR44nlfM6DKG9P4sX1kzirQ3TmnQNjzCwpIOh6Qjg8aMu57hWRyTxLg5tWrqMrULxs+fy9idAMNZXRiAL3JFQwuotVowGEXtxawZCCEb074rfUx1dYmnopxZFQhAHlaOfUYjoHIByA9EmFDt/Pg7SBpFjIgwQGgg98tIGVSDaN6/HaH2UWQFun/8m9y98l4AV4sbZr/KXpZ/w+ba5vL1hOpfO+DczC1Zzc6/TaOvNbNI1QtLCxiHkmAQdkz8sel8lE6GEXKFImEGtusTvDmRElHavpRvPl+zSdI709uDesV9wzh9fpyoUhhQLLa8qIrpG9UsHbUAlZIQjbpDG/HAOi9jfbEeApbWINV6b7/OX85eln7CufFfNwqYpbYKOyQOL3qXCDPHk4Ksa692JiRBUhzL+slE+coUiQXxuFycN6sHkBTFKyQYFcrsH0bFuxubeBCBN03AbOr6An3lflxAMFAIg2obQB5bHFl4N9BNKI/74xphcWpwiXEJG9rUwDpIfClZFVUgECNgm5057nEu6DMejueI2u0gYGf+h+UtCCblC0Qg6tsqIvUNqOFs8aPsJuZTgLPeDdDGkU09mz9lBMBwRONElgNa/ok7dldqISOkT0CLuEukQEfXqbXEta3f1cfv/eBAg0pqWHNRYnDhhihCJgnkrwdrlDSGRHJ3dPSljHc4oIVcoEmRrQQnvTF0Y/4ACN9aMDPSjyxF+J1LMSoLWtwrpCGbN20Jor4taSLS+lXFFfH+EDrJS4Kz3ofWrqtc9IgRIS+yzVKsXYOUON6Jjwz7yBqNiEqCTP5edwWKCSfRfG0JDCIHtOLg0HYHg0UGXUhKuZOqupVjSYWTrPLqntknaNQ8XlJArFAkyfubyBpKDBBS6sKdnop1ahGYQsYp1AIk1uBBjaitMywGvE9fNEVdI3RIq9YTcLMIjI4uvFVrEL+610TrtE/H6xDoZPvRtlXvI9aWzM1DS/MGq8Wgurut5EqZjk2J4OK3tQGYUrOLiH57CQeJIyWvrpnJ51+O5uffpSbvu4YAScoUiQYorAlh2Q7HiAkyB3OlBZlg4G31QpUFrE1fHMGRbsFuDsNboWG5ZHimalShCgEjb64+pHuMAuZNNnKSKOIAlbUZ3HEq6ywdAYaicf64YX8cXb0uH9zb9yElt+sWtTvlzREWtKBQJMrJ/N3ye6MYQUTgCudGLPS0LudmL3O1BrkjBnJ7BpacMwud2keLyoG9Pgf10OZalLGW1r32TB6HLmBZzLIGOtU0IkKUaslxLmqhrSYg/EYBL6KQaXlxCJ8Plx6u7asb3ai5uP+KsGhEHmFGwOmYLu7BjMXnXkmbP6XBCWeQKRYIcP6Ab/bq0YdmmXTULlnEpdlHH5HYEMqDhFBj87fqzeGfyfOzSbPSeFSwJ7wufiyXihAROkQFFLqRRHSceKwoygYxNaYFcl4LoX4Es1SHdJoF6V3ERCHK96ewOljZ9EMCne7iv32haedLo6M+hygrx7qYf2VZVSNeUVozuNJQ++1nYe9eC483rl4QScoUiQXRN44XbL+DL2av4cs5KPC6DtdsKyC+p2O/IeP3YBO9/t5gPv19M2Iq4O9x6CPKI+9tYCJCmQC5LhYCO3KZDnyqkts8ylw7oto7HoxNwavnBt7ugbSS7U2jVIl7siix69og8WIQGMsWuc/3G+MglstkivpftVYWsKt3G5F1L2VPdHg4iLeJWlG1nU+Vu0g0faS4fWysL8eguQnb0A9WlGXVK7/4SUCn6CkUzKA+E+Pen0/ly9ipCZuND+0S3AFq/irjp91JWF+lakgJFbkBAqol+TDn4IyUARIXBvT3HsM7czicFMyNhhgZYP2aABVqXELgc5A4vcqe7pjiXGFCG6BBCuKhjle+VhANZaFAXGi6hNzqu3BAatpS4NB1HOuhC48quI3+2tc3jpegrIVcoGkBKyfvfLeKNSXMoqgjQvW02/3PxiQzL61xzTDBsMfKu57GdRn6fvDb6qUU1Qh7xhXshKNB6BSCkYX+bVV3BcK+ySnBJ8Njojs5FowZy1cihXPzIOwRkENEliMgxkbvdyA0+6v5CqOt/0c/cg/BEz1magE6z3C6J4hY6DhKrntjzes/XDC7vMgKv4SLTlUJbXyaDs7vj0RNYzzjMaNFaK0KIM4UQq4UQ64QQ9ydjTIXiUOG1r2bz3H9nsKesCseRrNtRyF0v/pdF67fXHON1G+R1at34wYM6zqLUSMKPBfaiVOTyFLRuQYQB9ir/fiIOeyNjcAROuskXgdlcPOspzFaVENaRm3w489ORG/xEu3nqvhfuOA8eAwjXtNxsMQyh4TM8TRZxiJQ9sKTDWxum89zqr/njovc549u/M2tPjAzcnynNFnIhhA68AJwF9AUuE0L0be64CsWhQNi0ePObeVGLm0HT4qUJP9XZ9tCVp5HideNxRcxrl66hawKtoQXIbT7syTnYi1JhizfSHm6vlbyn2p0SiyoDuctN5Q9+gtt15KDSSN2WnlXQNhRJyW+ANOGPvSOgYX+f3egQycYihGh2in3IMvlwy08E7DCVdohKO0SVHeLeBe9QGq5qeICfAcmwyI8B1kkpN0gpw8D7QHTrbYXiMKSoPBA3CWj9jkJCpsW/PvyOEXc+z2V/f4eubbI4c0heTZii40gS8raENNi+1w0i9lVZ9DrErpgl9v3XFsjFqUhTgM/G2eqNJA4ZTnWrufjkVXbDvV96qbTAWeGPPFBa2PNqOTbZ7lTcjanRuz+C2H8jAd/lL2/6uIcRyRDyDsDWWu+3VW+rgxDiRiHEPCHEvIKCgiRcVqFoebLTfHFD2bq1zea+1yby6YylBEImUsLyzfl8MXsFYdPCtGP1wUkMZ4UfaQGdA4mdYAmcaVlQ6IYKI7Iw6miQEwbNJp4ir55dzilt+kFYRBZWqzScRWnI7T6wNKhohsAmgAS2VhYiAa/WNJ+2JR3MGI0qbMeh0mp6v9LDiWQIeax/5VH/aqSUr0oph0gph7Rq1SoJl1UoWh63y+CiEwbg0ut+VTwugwtHDmD2ys2EzLoiYjuy8Yue+yG3+SK9NpekJX5S7Z6fELGoC9ygx4+4Liqv4ov3tuDku3G2u7GnZCO3eyI7hcRektLi2aA2DkIITm83iK4pydMGTWgcm9s7aeMdyiRDyLcBnWq97wjsSMK4CsVB5+MflvDB94vqbGuTlcoTN55Lms+DS29Bi7XKoP60l/2JlfJJZGG0nnNkmYGzOA1KXegnFSM6BSHDRHQOoh9ZcUDCEMOOxZaqPfy+9xl4teant/h0N+d1HEyPtF9GAa1kJATNBXoJIboB24FLgcuTMK5CcVDZsLOQJz+aVpO8s5eyqhAZqT7emTKfylB0NcFYfSBE9f+0nHXbTLW1RSRUsWcV+tH7Epz23ktje1s0hUXFm3AJjZPb9ufrHYtjNmj2ai66pbZmVdn2uPM5pU1/zu84hGNze7XshA8hmm2RSykt4FZgErAS+FBK+ctYYVD8rPli1gosO9r3GjYtrvnne3wzf03sGif7vTd0jbbZaYzsfzDqZjdC4DWJLNtn23l1Fx19OeiIZom4hkBPcB7zijYys2ANv+t1Gq4YC6ASeOGY6zm+VV7MtQsNQa+0tgzL7Yk4kBlNB5mkxJFLKb+UUvaWUvaQUv4tGWMqFAebQMiM6eu2Hdloy/r2McezZtvBXORPYMKOQBgSTWr8T965TD/tEYKOid3AuQYaadXFrmrjEjpe3cWNPU/h0q4jSDO8GGhkGH5y3KkYMbKNJJKgbZJiuDk6uxs+3Q2AjoZHc3Fv3/NJNbzcmXcOqYY3qvWeg+TNDdN4etXEhu/3Z4SqtaJQxOHEQT0Y/9MKAuHmNUewbIf3vl1IsJnjNBmN2pVs4xBpA+es9qOVehl9yjEA+A0PhOKfpQFpbh8DMzoTcixmF65DEwJHRhYwB2d355oeJ6ILjTvyzq5z7pvrv2fs2imY+00u6JhsqCjg30N+w48Fq5mWv4J0l4/zOg6me2obHOlQbgX484CLeGbVl2ytbptX+/zPts7lpl6nkWp4E/uMDnOUkCsUcTgmrzNH9mzPTys2N3uszbtLOKpnB6YtWX/AaoLX4EDDLhYBlkDmexCGRkUghNdtcEmX4Ty76qu4NVAcoDhcyfSCVTWJPXb1DYYdi/lFG/hqxyLO7XB01LnF4cooEYeIH/yI9HboQuOE1n04oXWfmn2rSrfzPwvGUWkFEQiq7Ngdj1xCZ1eghJ5pbRu4758Hqh65QhEHIQS3jR6Bx2h+ZIplO9w6egQ+d8vW/zA0QZusVDQh8LiM6rDJxvmKPS6D7LRIxucFnY7hjPaDcGsGfs2NXu2Z3t8/HS87M2ibjN82D1s6VFmhmsSduXvW89GWWbHPcUze3DCN/P0aUwRtk1vmvk5BqIyq6izOeNc1pU1bX2ZjbvuwRgm5QlEPvTu2JtXvafY4qT433dvl8PTN58dshpAsLEfi0nXmvnAHPz17G6cc1avR1wuETHYURkrTakLjj/0v4OORd3Nm+yPRhEakFWniPyt2VBVx8pS/cMrUv3L+tH8yLX8Fj6/4HCtGEk/NOYFiLp3xTJ2MzRkFq2qs/frwai7GdBr6i3GrgBJyhaJeNE3wj9+e06wxDE3jnGP6sKOwlMUbdraokEOk/OzV/3iPM+5/lcpgmFSfG4+rEV5UAeN/qht41taXyZKSLTEzKOtDR1AUriRgh7GlQ36wlAcXf8C2qsIGz620Q0zZubTmfWm4CjtOca1Mlx+BIMPl5zfdR3FXXvP+ZocbykeuUDTA0b06YuhaAv06o3HpGllpft77biHvfbeIkGnhtKCT3NA0dhWXs7UgYlEXlm3C6za4eNQgtu4uJjPVx5T5a6kKh+P66i3boag8ujRAqdn4AlQOErmf+IcaUXN8UckmTmsfaRIxOLsbsaJvfLqbhwZcxMjWeY2e388FJeQKRQJkp/nZHdUJKBqf28UfLj+FgtIKgmGTykCYj39YEpXGn2z8HhemFbmGWSuByZGSoGmxp6SCp2+O1LK764IT+GLWCr6eu4plm/Oj78HjYuSAblHbB2R24ttGFKHSEAgEdsMhM3EZkLGv5nvX1Nac1f4oJu1cTKB6kdOru+iT3oHjWv0yUvHjoVwrCkUCnHJU/CxBTYDb0PG4DEYf14+zj8njN6cP5XfnHsf3Sza0uIgbmuC8Y/sy9u6LMWIszDqOZNH6fVUz0lO8XH7K0bx9/+VcdtJRdRZgvW6Dfl3acHz/aCG/7YizYl5fR+COkVbvIHFiiLgAOvlz8DSQiu/T3TXW+F4e6Pcr/jzgIobl9OSorK7cnXcuzw29Fv1AdMCoxnEcPnt2Ild2+z2/yr6GP4/5J9vWHNyqJMoiVygS4LTBvXjvu4Ux96X7vdxw9rEc27cL3dpm19lXWFbZ4nOzHInHZdAxNxM7jvunbfa+4lt7XUSGrnHPxaMY1qczn/6wlGDY5Kxj+nD2sDx0LVoYO/iz+X2v0xm7bmqNr9yjueiR1pp15dGWvY6gd3p7NlbuJmjvc6d4dBd/G3QpswrX8s7GHygzA+S6UykzA4Srx23nzeSlY34bJdBCCE5u25+T2/Zv5KeUPJ65eSzfvPkdVvUDeub4uSz6bjljlz5J6065B2VOqtWbQpEApZVBTr/vFcwYQnlUj/a8fs8lQCR9f09ZFdlpfrxug2v/9QGLNzRsremaaFbFRLvlDSUAACAASURBVK9LJz3FR98urflh6cY6Y7ldOk/eeB492ufy6H8mM3vVFgCG9enCQ1ecSpusRlRYBJYUb+bTrXOosIKc0rY/p7UdyEOLP2BGwSpCzr4GHF7dxbjht7KoeBNvbvieonAFeekduCPvbPpmdKw5zpFOJBpGSrZWFeLVXbT2ZjT5s2hJCncVc1mHm6LrnwsYc/vZ/P7pa1v0+qpnp0LRTO57bSLTFq8nbO1zlXjdBv+64VyO69eVNybN5fWv5yBlJIX/khMHceKgnvz+2U+iOgzVxqVrPHTlqfztP1MJWU13w2iaoFV6CnvKKqMeCpedeCRTFq6lsKyqZrFVE4LcjBTG/+Va3I2JaomB6Vi8sGYSn22dQ9A26Z3envv6jqZ/ZqeGTz6MePfvn/DGg+/H3Ne5TwdeX/7vFr2+EnKFopkEwiYPv/0N0xavR69u43busX3Zkl/M+p2FFJZV1Yls8boNrj5tCCcM6M6LE2ayfNMuyqpCdaw5r9vg+jOPYf2OQqYsXFvdUSj530lBROj3F3i/x8WfrjqN0wcfkZTrSCmRSLQD6LNuLFJKCrbuwXAbZLfNitpfUVKJYzuk50T/Unnm92P54uVvYo6b2yGbd7e83KLFuuIJufKRKxQJ4nO7+Mdvz6GsMkhxRYBvF65l7Fez41rbwbDFf6Yu4KZzjuX5W8cAMG/NVp76eDrrd+whJyOF3545jJVb8vlu8fomhTcmioSYrptA2GRLfkn0CU1ECBG3o9KhwKo5a3nsymfZs60QKaHbgM48+MFdtOvWht1bCnjsqudYNWsNCEHnvA7c9/ZtdB/Ypeb8rv06oWkCJ8ZnWbSrmM+f/4oxt50dta+lURa5QtFINuws5IXxP/LdovUNHqsJwcxnbq1xXWwtKKEyGKZH+xxcuo5p2Zxw9wtJi2wRjax57ve4+Nu1ZzFqUI+kXP9QpqSglKt73EqgYl/7NyEgNSuVky4bwdR3fiBQEcSp9UBNyfQzbv0LpGWlAlBeXMEVXW8mUB67hZwQgmdm/o0+w1qmFrqyyBWKJLB2+x6u/se7CQvv3miRXUXl3PXyf9m0qzjilhGCh648lWF5nZvdFg4ii6Xpfi/d2+WwYN32mM2II+4VDdvZF7XSKjOVETFCDX+OfPPW99j7rUFICeVFFYx/YVLMc6ywxUdPjCc1MwWP38MJFx3LE98+zC1D7495vJSSP579Nz7YORZXC9fVqc2h68hSKA4xbMfh5mc+SVjENSHYUVjGsbc/x9l/fI01WwsImRZVwTDlgRB/enMSWwtKSfO5o84VwHF9u3DPxaMavI7f4+LMIUfw7h+u4Mge7WM6NgxN0LNDLucMyyPF68bvcdGtbTYVVSHO/sNYnvp4GhWBeurV/gzYtXE34WDjSgmHqsJ88M//8sZD7zH23nFc1f0W8jfvoUOv+FUVbdthwZSlcfe3BErIFYoEmfDTCorKE0tTT/W5oxYt97eRg9WdhsoDdUuxaiKSXTmwezvGTZnf4LWqQiaT5q1mzJ/f5O3J82IulnbIzeDVuy7m4avPYNqTv6db22w25xdTWF7FnrIqPpi2mGuf+KBF/fQHmwEj++JLbXwhLcd2sMI2oUCYUCDMP65+lt/89TI0PfZagJSSYEVs10tLoYRcoUiQz35MzMpyGxoVgdh1svfHdmSUeHZslclVpw7mzW/mkV/ccFkAiCQFBU0rZpw7gNtlkJESEbG5q7ewcVdRnTBK07LZWVjGjGUbE7re4cjxFxxD6865cQU4ijiHOY5EAJc9cAFCiz7IDFlMeus7ruh6M/ed8VeW/biq6ZNOECXkCkWC2HZivuz9mzU3li27S3j3u4X1xp43lk27iiiuiBTCWrllNyEzeuyqkMmKGLVXfi643C4eePeOyApnPWi6RnbbTDJbx05KMoMmlmlx1Z8v5uhTB+JNiZQ5FprA5TFASuZ9vZjdW/awYPIS7j/9r8z+ckHS76fOnJtzshDiYiHEciGEI4SIWklVKH5OnHtsX7zuAxMfUF7VNH91fRL18fTFALTLSY9Z1tbndtE+N71J1z1cWPztcnQ9dqMQw22Qlp3Kywv+yQc7xpKWmRJ3HDNk8tgVz7JhyWYyWqXTd3hvzrr+FDrldcAy7TqLzaFAmBdu/7+k30ttmmuRLwMuAKYnYS4KxSHNhSMH0LdLG7QYP6f3R0/gmJbA0LWY8zNth8Xrd+A4Ek/1w6i2YSpEJJX/9KN/3lUENV2LaZBrmuCEC4/l/W2vkJKZwpevTUHo8eXxqRteZvpHMyneVUL+pgLWL95M+x5t2L5mZ8zj8zcXEGrBxeSkxJELIb4H7pFSJhQcruLIFYcrjiN59N0pjJ+5vN4MTI9LZ3ifLkxfurFF64/vz8mDejBz5eYot4yha1xw/AAWr9/B1oKS6kbQAkdKDF2jd8dWPHzV6SzbtIsJs1aga4LRx/XnrKF5CT24WpqKkkrGvzSJOV8uILdjDhfccQ59j238Q2fP9kKu6XVbVPSK2+dm7JIn+fK1KXz4r/HIJoSEelM8pGWnUrA1ummGL9XL5yVvocUoRtYYDnocuRDiRuBGgM6dOzdwtEJxaKJpgrsuGMmMpRsprgzErDZo6Br9u7bjqer63wC3Pv8pM5cn1sRZAO2z09hRXN7oRs2pfi+9O7Zi5Zb8OnXJXYZOVSjMhl1FNXXLQSIEDOjajtf+52Juff4zFq7bXvMQWL4pnxlLN/BYMzskNZeyonJuPvpeSnaXEQ6GEUIwa8J8bnv+es74zUlxz8vfXMD0j2dhWzbHjR5K57wO5HbI4bYXfstzt7yG0ARSgmPb5LTP4qYj7yFY2XSrWQjBaVeN4tN/TyRYyzXm8XsYfeuZzRbx+mhwZCHEFCHEshiv0Q2dWxsp5atSyiFSyiGtWrVq+owVioNMmt/Lf/5wOaOH9yMn3U92mg+Py8DrNtAEIGHpxp3c/sJnbNldQkUgxPJNjVtE7NWpNUYcX248vG6DPp1b89wtv2Jo705oQqAJQbvsNJ6/9VfMWLqxlohHkBKWbNzJj8s2smj9jjqWfCBsMm3pBlZuObgLoJ8+/QXF+SWEg5FIICkloaoQL9zxfzXb9mfi2Mlc1+cO3vjju7z5p/e5efC9jPvLR+zcmM+i75bjT/eTmpVK3jE9cWzJzvX5zRJxgGBlkP4j87jonvPw+D34Ur24vS7OuPZEfvOXS5s1dkMo14pCkQRM2+aWZz9lyYadNWF9QkCaz8PVpw3htTg1Wap1PwpD1wCJVU+kTO10fE0IMlN9fP7Ib3hxwkw++G5Rwu2RdU3j8pOPihmz7tI1bv3V8Vx16uAER0seUkoqS6u4e9Sf2Lh0S9R+w63Te3APjj5tIOfffAZZbTIB2LOjiGt63hrtPvG60F0GocpgzFopycDjc3Pkyf25/53bKd5VQk77bPxpvqSNH8+1osIPFYoksGNPGUs37qoTmy1lpHDW5PlrYoq4JgTeOGnclu3UK+KGrnHa0b3we1y4DZ0TBnZn3P2X8dG0xbzfCBHXhGBg93a0yUrF44r+BWAYOpkpB74b/ZLpK7iuzx1c3OZ6Ni2LFnEAK2yz4qc1fPjP/3Jd3zvZvi6y0PjT+HkxKxCGQyaBikCLiThEIlQWfbeMKeOm0+mIDkkV8fpobvjhGCHENmA4MFEIEbtggULxM2fdjj3VVnRdwpZN2LJjhi163AZ5nVs1FNYcE8dxSPF6+O1Zwxh798U8edN55KT5efXLWQmP4fO4yEjx8vDVp3PmkLyY4qcJwcn1tLlrCbau3s4fzv4729bsrA7lq//4cNCksqSKl+5+C4hkYsZMxZfE/vmTIIbboHXnXHoN6V7v3yxUFear16ZGbS8vrmD1vPWU7ilr+iTiza05J0spPwM+S9JcFIrDli6ts2qKUdXGpeuM6NeVCbNWEDbtmggWl67RuVUm9196Mr/51wcEQ2aNxnhdBmHLoj7D0ZHw2Y/L0DWB29A5tm9Xbjn/uIR16uheHRg9vB+nDu6Nz+1CSsm/bx7Nfa9NxLRtkOD3uHnyd+eR4o2uBdOSfPL0F5ih2DVRvCmemL5sKSWLpkYyb0P71XxvMiJS5tbtdSGExunXnMhZ159M4Y5ibhl6H5VlAaw4SVt7i3NJKZk/eQlj7x3H5hVbcXvdWJbNSZeM4K5Xb8JoZkOPvajqhwpFEujZIZd+XdqydOPOOu4Vl0vjylMHc/GoQfz93anMXb0VXROcNrg3N593HHvKKnnqpvN4Z+oClm/aRVaajz2llVi2hiMbzhC1HUkgbDFrxSaO7dM5obA5j0vn0hOP5NSje1NaGeSv4ybXNLUYekQnLjphIO2y08jrlFjMfLLZsnJ7nVKye/Glebns/gt4++EPavpl1sbjj2RYLpiyJPGLxVmkcPvcTCgfFzPSpE2XVry2/Gmu6nFLTCE3PAanXjUKx3F45MInmP3VAuxwZL57S+hO+2gmma3SueGfVyU+13pQPnKFIkk88/vRnDk0D5ehI4Sgf9e2vH73r2mdmUqnVpm8dMeFzH3hDmY+cxuZqT4u/Mtb3PbcZ9zx4n8RAo7r25XN+SWUB8JYMaz7+giELT6dsZQjOjUcEZaT5mfUoB5IKbnhqQ+ZsnAtlu3gSMnc1Vv5+7tT6dw664CL+JyvFnJdnztY+sPKmPsD5UH2bC/k5CtG4vLUXVtw+9ycc9OpAKTWk5FZGyGg/4g83PtVn/T43Fz/98vqDRfctGxr3C5IKWk+fnXbmXz//o/Mn7y4RsRrE6oKM/6lb5LzywFlkSsUScPvdfPw1afzpytPq0m02R8hBB9OX8SnM5YSNm3C1ZbljGWbmn391VsLGswo7ZCbwVv/eykuXWfu6q3sKCyrU7TLkZJA2OTLOSv59agjmz2nRFn47VL+ctEThBooNvbNm99zyb2jye2Yze4texBCYJkW0nHYsGQz6xdv4rybz2DWxAWEGihzMOik/vxryp+Z9cV8Xvnft9mxbhdZbTO58qGLOOeGU2Oes23tTsbeO475kxcTDsR2/7TqlMtvet9O4c7ien3yoaoQju2gG40LM42FEnKFIslomkCrp+rJO1MWJLUgVm3qa1IxvE8Xnr31V+jVluamXUUxjw+GLdZu39Mi84vHGw++16CIAwSrQrz1yIcYLgO7lnvFDFnM/mIBsycuYNTFx3H5H8fwn79+gu7SY3bzcXtdHHd+JIrv2HMHc+y58cMrpZSUFpQy8bWpvPOXj7FNK+4CrMtjsGn51ri+89p06dcxKSIOSsgVigPGjsJSXp04i11FyY9aaAhNwOlDeteIOED39jkxLXif20Vep9YHcnpsXb0j8YMl8YVSwo+fzyHvmJ68u+VlFn+/nElvfMucrxfVsY5ty2Hsfe8wa+IC/vzxPTHDBKWUfPbcl4x7+EMqShquQ+9N8eA4EjOBB5LH7+bWZ69v8LhEUT5yhaKF2VlUxjfzV3PJo+8wcfbK5kTANRlHwo7Cug+Qo3t2oEubLFy1rEJNCPxeF2cNzTug8+t0RPukjWWFLca/OImM3HRm/ncui79fEeXisC0bM2SxcOpSftP7Np677XW2ran7MPns2Ym88Yf3GhRxw6XTd3hvbn/xhoTWFYQm6HdcHgNH9W30vcVDCblC0UIEwxZ3vvRfxvz5TR5842sqg+Gk9OdsCrommDx/Df/+dDq7SyLNKoQQvHLnRZx7bF981YlFowZ1Z9x9l+M/wCGHlz0wpsFjtHqqEe5PsDLI7q17+OGTWfW6bKQjKc4vZcKLk7iu753cNvwBFkxZQkVJJW/9+cM6NVPizsvQuX/c7Zx21Sg65XVo8HjpSJb+sKJx0TUNkJQU/caiUvQVvwT++s5kvpyzst4en5oQDOvTiVEDe/DqxFkUVwRq/K/d2maxq6iCQLhxfSb32oSxvtkuXcfrNhh3/2V0bp3VqHFbkmBViNEZV8cMO3R7XXTK60Cvwd2Z+s50zFD9/mfd0DnzupMY8atj+Ntl/6ayNLH2fHvRdA0EOAk0CHF5XAw6sS+PffUgAAumLuWh8x8nnIB7Zdi5g3l0fOwmzvE46NUPFYpfEpbtMHH2yjox5bEwdI1Hrj6T3IwULj5hEDuLyjF0jaxUH3PXbOW+sRMbdd0UrxvbceIuppq2jR10+PenP/DU785v1NgtidfvYfDpg1gwZUmdRUy3z82v//d8rnn4EgA2Lt3C6jnr4o7j9rpIy07l6od/TVV5ELMJi8qxHiax0DTBqEuO4/YXflvrPtxomohUVmzg19ey6bHDLJuCEnKFogUwLTtmpmdt3IbOsD6dyc2IxD0LIWifs69DzzFHdMbnNqgKhhPyq3dunUl+cUXMNm612RsvfqggpeSLV75h3YIN2KaNEALDbaDpGv1HHMFlD1wAgGVarF+0KfYgAo48qT/HnnM0Z153MikZKUhZhNfnTsg6bgyGSye3Uw7//uGv5LTLrtluhk0ePPexhKsoVibYyDsRlI9coWgBfB4XHVtlxtyniUha/Yj+3bjspKN45O1vuP+1iUxduLaO+Bu6xit3XUyH3AxctfzDsZbTvG4jUt61ARHfS6rP06j7aUk+fmoCr9wzjuL8UiAi7NKR/O7Ja3h80kO4q5N/bMuOby3LSJLO6FvPIiUjhdLCMq7qfgtlRYk1r04UoQl0l0HZnkiN9A1L9tWYX/Td8pj16eORFacnaFNQQq5QtBAPXn5qpEZ5dSSDoWukeN08c8toJj1+I307t+Hul8czftZyvpm/hj+9NYm7Xhpfpzpft7bZnHVMHmYtgahtnad43XjdBreOHpFw02ev2+Cykw5csk992JbNfx79JCp5xzItnr1lLNf0vo1Jb36HlBKPz0P3QV3ijhUOhJn79SIAXr1nXIO+9KYgnUgt9KqyAMX5pdx/5qPYtl1z/UQx3AaX//HCpM1LuVYUihZicO+OvHP/5bw9eR7rdxQyoHs7rjp1MO2y0ykoqWDsl7Pq+NADIZP5a7fx4/KNjBzQHYAv56zkjUlzY47vdbv4zRlDufyko/C4DJ79fEaDc3LpGmcOzeOKU45Ozk02k7KiCsJxCmRJR7Jj3S6eveU1Zn8xn6UzVlGyuzTuWLbt1Fj1C79dmtyJxqnJEqwMsmzGKgaN6segE/thxUjH9/jddOvfhTUL1gPgchtc9sAFjL7lzKRNTwm5QtGCdG+Xw8NXnxG1ffaqLRi6FrUYGgiZfLtwXY2Qj504u04KfW2kdMhM8eLzuKgMhrHqsch7dcjlwuMHcNJRPWmVkdqMO0oOy2asZMLL31BWWIFoqExtIMwPn85ucEzpOKyZt47vP/iRYGV0NmdzSM1MoaK4Mmq7EKImczQ1M4Vbnr2WF+94AzNs4dgO3lQvA47P468T7se2HEoLyshsnY4rTh36pqKEXKE4CPi97rj1v9P8+/zXu0vj+3illAzu1TEynsdFdpqfPWXRYtO/a1vevu+yJMw6OXz81ATe/NMHhAMhpIwsHiYS5VEfHp8b23L45q3vk+5ScXlcXPPIJYy9d1xUnXMrbDNgZB62bTPtg5nM+GwOfYb3wp/mJy0rhRFjhnHM2Ueh6zq6rtOqY05S57YX5SNXKA4Cx/XtGnPR0mXonD+8X837IzrGr2Z49rC+dG0biZoQQnDnBSOjGlh4XQZ3XDAyKXNOBmVF5ZG6KlWhmnh5y7TRdA1vatMWYF1eF940L5ZpJV3EvX4PF955Nl+9PjUqlNHtc3HTE1fhS/Pxp9H/4OmbXmHuVwtZ9O1yFkxZgjfVy/DzhqA3svdqU1BCrlAcBLxug+duHUOaz0OK102K143HpXPPxaPo2SG35rg7xkSLsyYEl550JA9dUbdC39nD+vD3686mV4dcUr1uBnVvx/O3jamx2g8mjuMw7aOZvHjHGzEzNG3TJhwwGXnhseiuxsmSGTQp3d0y9WuCVSE+enICG5duqfOLQTc0xtx+Duf//kwWTFnKkmkr64QdBitDfPX6t1Fp/y2FyuxUKA4iYdNi7uqtBE2Lob07kR6jP+ayTbt4/vMZrN2+h/Y56fzu3OGM6N/tIMy2aSz6fhl/OOtvCVnLLq8LK2w1y81yoMhsncHvnryG5TNXMeGlb6L2u70ubnriGs7/ffQaSVOJl9nZLCEXQvwLOA8IA+uBa6WUJQ2dp4RcofhlUF5cwa/b3ZBQWdf6aK4PvaXw+D01vv798aZ6uPPlmzjl8uS5tuIJeXNdK5OB/lLKgcAa4IFmjqdQKH5GTPvwp2aLONAoEff4D1zBr9q+/v0JVoR44toX+OslT1FSED9sMhk0S8illN9IKff+lWYBB98Zp1AoDhlKCw5s7XXdpZOaefDDK/dimTY/fj6HO0Y8WJM41BIkc7HzOuCrJI6nUCgOcwaO6hu3/Gx2uyxcnroLuR6/m679O9XpyanpGiLB/qG2ZVO4o6jpE66HpnbzsU2b4vwS5n61KMkz2keDQi6EmCKEWBbjNbrWMX8ELOA/9YxzoxBinhBiXkFBQXJmr1AoDmn6H5/HoBNjN1Ao3lVSI9JCE7Tv0YYb/nkVuqFj1sr2dByn3t6XdWhBN7rdQCXL+jCDJltWbkvibOrSoJBLKU+VUvaP8fovgBDiGuBc4ApZz8qplPJVKeUQKeWQVq0a7vStUCgOf4QQPPbVg/Qd3jtqn5SSUFUY6Ug0TaN1l1Z89MSE6AqHkpjd5lOzUlpo1snH5XXRpV+nFhu/Wa4VIcSZwH3A+VLK5NVkVCgUPxs0XWPdwo31HmNbNkunr6R4V3HC4zq2gzfl4Fdx9Kf76NynAzESdYFIzHlu+2yGnDGoxebQXB/580AaMFkIsUgI8XIS5qRQKH4G2LbNpuVbyd9SkFCDByllVAp8fVSVRbopacbBzWtMzUzhwffv4o/v343L68Jw6zW1hjVD48RLRvDvGY+2aIZns2qtSCl7JmsihwvS2gihbwEdvGcg9HYHe0oKxSHHTxPm8cT1L2IGTWzLxuv3EKiov5CVlBK3z0U4kLiYh6pCaHpiHXlait1b9nD7iAfpP+KISHcjKenSpyO3PHsdR5084IDM4Ref2SmdYmT54xCcFNngOR2Rfj9Cy4461ql4ESpeAhwij1wB6Q+h+X+d2LXMNWBvBqM3wohfV1mhOJzZvGIrtxxzP6GqffW590ad6IYeP65cgOEysEyrzqKl0AQC6tRpPxQRQtT48nVDJ7dDNm+ueRbDlbzahC2VEHRYI6WFLLwUAhNAVkVewS+Qhb9GyrpWgTRXQ8XLQAgwiSSzhqDsr0h7d/3XcSpwCq9AFl6ELLkXuedMnPxhOMV3IsMLW+r2FIqDwut/eLeOiEMkocfjdXPqVScw/PwhsdscSbDCFp3zOiBExMoeOKov72x6kTF3nJNwCOLBorZRbFs2ZUXl/DT+wBisv2ghJ/Q9OLuJRE7uxQKnsNp9sg8Z/IqIgO+Piax8HRmeh6x6FxmaiZR160LLskfAXAwEgUrABlkMoS+RRdfgVH2QzLtSKA4aW1ZtZ9YX82Pu0wyNY88ZzF8+vy9mCd+9FGwrxON3Y7gNBpzQh9Ydcxl0Yr8DvrDp8XvIbUbZ2WBFkC2rtidxRvH5ZQu5tRZkIHq7rARrzf4biR2k6kDVG8iia5BljyNLbkHuOQfpRJISpLQg+BURCz4WQSj/OzLWPBSKw4xPnv4iZqggQDhocsQxkWU1f5ov7hiB8iDByhBm0OTjJ79g7qRFLPtxJcGKxJoaNwXDrdN7aA90Q0c3NPqPzOPVxU9w0qUjcLmb5hqREjJbpTd8YBL4ZTeWMLqB8EWEuzYiBfT9qssZfYj4xuNhRl4SsDcjS/+MyHqOiLXf0Iq9DuYycA9t5A0oFIcWW1dtj5uUM+yco8ltH1l76nlUNxZ/v7zB8cKBEI9f+SyBymDcB0Qy+NfUh+k/Io9QdQEsb3Vzj0vuHc33H/xIye7SOtUbNUPD5TIwwxb+dC8VxbGjr7et3dlic67NL9si95wMIh2oHRakgUgF7+l1j616vREDWxCaipQ2QnjBOKL+w6UNInkdtRWKg0W/EUdEpd0DIOCyB8bUvL30/jEJFbeSEipKKjEbEZbYFPKqfyl4fJ4aEQfIyE3n+TmPR5UZcCwHR0ouvPMcqsriR+NsXalcKwkhg9/hFJyFs6svTsFJOFX/TfhcIdyInA/BM4qImOvgPgGR8xFC7PtHJmUYzMY2c3XYa8GLjEcjln/Mj1sDvQMYvSLXciqQdkGLWh8KRUsx5vaz8aZ4o5JjhBD878mPsHzmal66+00eufBf+xZEqwPAYrnNDbdxQL4LC6YsidompWTBlCX86zfPY8foh2oGTT56cgJOnJ6qhtug74gGjLgkcViHH8rQ98ji24ksIu7FC+kPJhwSWDOWjNRRECI6aF9KC5k/iNiLnbEQ4BqKlvNO9Tx/QhbfSMTFsrdegw7CA1obRPb/gfAjS+6D8I+R8/XWiPTHEJ5hjboPheJgM2/yYh67/BnKCsuj9qVk+DHDFuFA9JqR7tIjzeqlxLYijYtbd8phywGwat0+Ny/O+wdd+kQKuEopeezKZ/hp/Lw6nX8agz/dx7j1L5Cek5a0ef4sww9l2d+pK+JE3lc83eBTXDplyNBspLUBiAh4LBGP7DPAPYzYMVMa4AKqF2+ED0RGxAoHpDSRJbcRCVusXXRHh5SbELlfI/QOyKLrq0W8OrTR3oYsuRFpba73PhSKQ4Wq8gD3nPwwD//qnzFFHKCytCqmiEOkSqDL42LMHedw9g2n8OB7d9KmW+uWnHIN4UCYNx96r+b9gilL+Gn8/CaLuG7ovDD38aSKeH0ctoud0lwO9qbYO51iIsIZ3TYLwKl4HipeAeEGaSJdRyCyXomZBBQ5/lUIz6XuKo4GRj/IfAqhtYLgRKS5DIxeCN9ohFb9BzQXE3uRNAzh2YjUm5HmCrDXE2XxOIOBpwAAIABJREFUSxNZ9TYi/aF4H4NCccjw3K2vseKn1c1qgBysDJGWncqN/7wKIQSPXPxkEmdYPzM+ncOY7Gs47lfHIG2HYGX9majx8Pjd/O2LP9CxV/skzzA+h6+Qlz0Wf6dIR0oDQt9FBFLvAZ4TEEJHBr+BirFACGT109Zcjiy+A5EzLvo61laoeC5yfB1ciIy/7svQ9F+M4OJYkyF+bc1qC9/eQd0F171YYNVfbEihOBSwTItpH8xMqKZKfUgpefdvn7Bl5TYC5cEWX+Tcn4qSKqa+8wNurwshiOr+o+kCIURMnzlAek4a4zY8jz/NfwBmu4/DVsjrXXz0XwmF/9/eWYfZVV19+F3n+h3LSBIkgkOCJSVJ8UAIUgheXEKhEKAUwoe2SJFSoEhoPzQttEUKhSJBPhwKBRokKe6u8ZmMXzvr+2PfkTv3nPGMZb/PM0+4R/ZZZ+6wzj5rr/Vbe6HukqyzDoM4xrlTR36sOw2pN9HMYiQwMndX4nk/A9DGp5GQt9ZyM6EtMaGXNkgMif00e8x4UK/XzTBoBrf6d0hkJwhv024hhcXSX6SSaaMb3gsk6pM8e+e/e2Ws7pBJZ8hkAjiBQJ4Gubqw9y924/Fbn83ThAnHwlz68Dl97sRhMMfInVKfHUFIfw6Zb7L54WmgHrQWqMJ3wVKC4Hr01ZMQvtkmXg667ekSREpvAIlnM1eCQMykPkZ/Yo4JrAWxGTTH2ZvHT0Lqdaj/K1p5kik20lXXLspi6S6xgihjxg2dTo/JhiQb/GhdAm2UFVWVJ257jgNOm8HoTdYiGA5SUlHMXrOm8/evbmL8Nn2TpdKWwTsjLzgean4PtK6IjEL8MKi/i46LcNoSMAVCrdD012hyAfkLquZ4ie2Zt1VVIfWWCeFIFInNQMKTYfiLpsLTrYLINia3XBtM8REgxZehwU2h/g7QGnMctLqPBki+Aomnmh8AFstA4vS5J3L29EtMUc0AF7gCPEMnrVl/y7Es+WoplYtzJ3iJ+iSvzHud297/wyq2sPMM2hm5xA+HgplANOsMwxCbgRSdSdf6PQkQg+LfINIyw9b0p+jyfaHxsTbjhYEIFJ2LBNfJGUlV0erfoJUzTQFR3c3osv1w6+5GnGIkfgjED0Zrb0YXT0SXTMJdtj+a+gCRAE7BUTjDn0JKrsjO3tug9WjDI124N4ul7xj34w259oWLV9n4ZWsOY9RGa+IEHALBALGiKIeeu593Mlkn6MhLvPTAa3lOvInvP1vUvYuuIgbtjFxEkKL/QQtmQeY7CKyBOEbXQCM7Q+JZctP92hI0ZfjBDZCCY5FwbvcOrbnKqCG2/bolBuWP4gTbxNIBUm9Awzxa3hKy5fk1v0Oju4JTjq44yoR+mmba6ffQFUdAxVNIoKLFNl86DudYLP1Foj5BvChG3cr8knURwQk6OI7gumq0u7tAOBpm0ZdLmgtwGmoy3H/dYwSDAdJdHAugpLyI6uW1vqnKjfUJCksLqK2sy9s3cuzAalc5aGfkTYhTgIQ2anbigEnXc4abuLQnQQiui1Q8ilP6h2Ynrpow6oWJVyH5Bp7PbK1HHO/SYm14HM8wjAQg8SKkFkLmW/LCPppCG+5t+RyejOdXIzEk7pUZY7EMDEaMqchpnNyMwIZbrccxlxzK7x4/r8tNkp2gQ+WiKtLJXIedSaYJdkPUKhwNMfPSQ7zlBJpQ5aAz9iYSz1VdjMTD/OzSQ7t8zVXJoHPkqinUrfF9iqoqJP9jHDlFEJoE8VkgYzApfg4Et0TK7sjJANHG59ElW6NVp6BVJ2YXR72Q5rh2/q4wnu95illMzXztM2YC0p+1DCNhpPSm7AJpASYfPgLh7dGaq3AXTzQhmcRLPuNZLP3DiDHDmTBtM0KR3DfHSCzM6XNnsc/Ju/OHE+d2qSN9JB7mR9M3Nw0n2uC6SsnwYt9+mcFwkHAs/y3WVWXKT37Eefec7n2iwJhxa3PYrw7gxGuOpnSk0UKqGFXG7FtmMfXgbTttf18waBy5ahK3+lJ08Vbokh+jS3dCG5/LP676ArT6N5B+B3SxSVNsfARYiolvB0w4o+aa5oeBZhajVaeZLBetzWa7eD0oIhDbL0eHpfm6mcWm5N7zV+pCZCejoKheKVqxbJpiCxKejAx/CSm+GCk+B4rOhcS/If1BVmb3PbTyZLTRLz3SYul7ViyqZM31RhKKBhERAqEAI8ZUcOF9Z7LBhHV56PrH+f6zxb7nt02vjcTD7DZzJ97594eeudvhaIht95nsOSuPFkSYdfXReE+ulIf+93G23Wcyt314HeVr5WbBCcL3ny7m3Zc+ZMas3bj3hz/zROoe7v76FqYfsWPnfhl9SI8cuYhcKiJvZxsvPyUiq6yUSVdeCPX3YUIXaXB/QKtmo8mFLcekvzYx6hxt7wS432e3NWDSDxuh8dHm5hFmAdHLwQbNjxQCIXBGQPoL3Jo/opnlzUe5dXeiS6dD3V9p+aMJZkM7USi51ix2hjbJhk1av6oFwClEYgfkXV2cQiS2DxI/Aur/gpccgdZc2dGvzmLpEyoXVzFrwlk8NvcZ6lc2oKo4jkMoGuKCfa5g76Ijuf9af5EpJyBsvfdWjBk/il1nTuWGN65k3srbqV5RR7Leu6w/VhTjiPMPZIcDt87bV75WKcNHV3i2WksnM3z25pcAjN5obea8eGnOW4SqUl/TwHl7/Y6GWuNPVmXz5J7S08XOq1T1AgARORW4EDixx1a1Qd2V2eyRttWVjWjtjUjZn83H1JsmHt2Z+Js2oA33I9FdQFfi3fjBgcJTwCmG6ivMA8H9xqQX1t8FFQ+YQp6aKz1sAwpOQ+L7I86w5k1SehNaeyM03GuKlSLTkKIzEafQ31RV/7BMxmqxWAYG913zMLVVdTk9OVOJFN99bDS5G+sSeS3gWiMinHvHqTlNJ97613u8eO8rnqHUQCjAr+48lfde/pAX//mfvP1Lv1lOKBIkncyP2YfCQTbaar3mz0/f8QLqVdAk8J9HFjDtsO197R4I9MiRq2p1q48FdHkJo5NkFpnCHPVwlq31VpwKupSL1KR4GNkBrb8jm6XSGgfC02DlSeTOhpOgabRmDgTWwzs7JoQ4sRwnDtn4d9FsKJrdaTNFBHXKTQu6tjgV+dssln7gjSff8m+snKU9MbvNdxif48STjUku3O9K33Mcx+EvF9zDF29/lbcICpBOZfjvs+8wafcJvPHUW81iXSIQiobY75ct9Ri1VXWemS9u2vXMwBlo9Dj9UEQuA44GVgI7t3PcCcAJAGPGjOnaRQKjQb3+QBwIbdHyMfxjzPPEI20wf1Akvp/5z9BkCG9rCm6anLnEILo3EqxAM145o66JWcdH4x2WcT3L7lXTkPqviZWHJ3rG2z2Jz4LaK8l9aATNG4PFMgCoGFXGF+/4Lei3EAzlpguKCCXDixg+poLjN/8fRowdziFn78uX731DQ423cJUTcFhn09F8+c7XJH30WNyMS21lLWfcehK3nfd3nr/7ZRINSbbYcTy7Hj2Va46/mR8+W8ym227MFlPHEy2I5KkdptMZtpjagQzHAKBDPXIReQZYw2PXeao6r9VxvwKiqvqbji7aHT1yt+YPUHcbOZWcEkfK/4kEN0DVRasvgob7Mc7OxSwB+Ok/OMjI9xExywSqGbTmGmj4h5n5BzeE4ssgOBaWTMazUjQwGkrmwIojyA+tRJCKx5FgS9myJheglSfljCXD5iCRqR3ff9WvofGhNnaEoOQPOLHpHZ5vsaxq/vvcO1ywz5Uk6tuXfo3EI1z4zzP46LVPqV5ew7qbjeFP59xJY11js4MPx0KgQrLROxSz1gYjicQi7T44IgURNt9+HG+/8B7iCKFwiJ9feQSFwwq56mfXN4d5nIBDJB5m40nr8+7LH+W8VQSCDhtMXJdrX7yUcKT/azj89Mg7nJGrame9xN+Bx4AOHXl3kMJTUWcNqJ9rZGpDWyJFZyNB06JJ6+/MFuO0fjoH8HfkwWYnDqAN87Kl/dkHRfp9WHFINv3PK3QShcA6UDnT45ohKDwl14m7tWjlz/P6g2rlL2H4M0jAX3dZ3bps5k3bh0kK6m8C68gtA4CJ0zbnxGtnMvfM20kn055KiKFIiI0mrceUPSYyZY+JAFx93I3U1zTkLIK2FaRqjeMIh517AA/f+ITvMeFYiKJhBbz1/LvNdiTqk9w4+6+EIsGcWL2bcWmsTRCOhvPCOJm0y1fvf8u/7nmZ3Wbu1KnfQ3/Q06yVDVt93Af4sGfmtHstnIJDcIY/izNyIU7ZX5DQuJYD6v9Kru4KGAeb7SOVQxCiuzZ/chMvQ/V5bc5XjNTtCvLDNAGQIpOvrnW0PCwCEN4JKb8Pp/CE3FMan/IRdnA7LrvXKny/Ks+wj8XSP8w4YVcuevAsxMlfqxKB3Y7Zicse+3XO9v8++45vJosXrqtcf+qtbL7j+LxiHTBph6fecALLf6jMe5gkG5LUr2zrJ0zs/t2XPyQSyw91NtYlePnB1zptX3/Q0zzyK0TkXRF5G9gNOK0XbOoerndHEuN0y1qqPCUOznCkyPwxaeOzUHki7Zfz510MdCn5M+QMZD5FQhujiZdxV8zEXbob7soL0MzXHseDWTitbP9yzsisCmNbBEITu2C3xbLq+b+5z3hWd0YLovzk2GnECnIbvjQV23SFRH2Sp29/gQk7b0q0IEIwHCBWGKWotIA/vHwZ6WTSV7jLL5xcXFbkubQmjlBc7p9VNhDoadbKgb1lSI8Jb2OUAduGUgJrQvmjSOIJNP0ZEtwYjUxFG+ahjY9D6l08UwfbpZ11hcxi3Pp7ofoymmf4DV+DeHcrQuJIeId2ryYSRIvOgurftYyJmJL9wv57dlosXqxcVuP58imOUNNGt0RV2WLqpnyy4HPcVo43FAlSOnIY1ctrfNut1ayoZerB23LE+T/l3Zc+pHzNYWy73xSi8Qh/vfAeX/uKSgvy7ADY/dideeSmp6ivyZ2xh6MhZpy4W3u33O8MmspOAE1/a/psZvLT8KTozJbCHcDEqmNI8W9xnBgS2x+n6EyI7g4rjoaaq4zIladEbQ8IbJDNK2/9x+CabBgpaqP/EgTNoCsvxK37m8lo8cGJH4IMu85UgDojILKbWegNbeh7jsXSH2y3/xQi8fwQRTqRZtzWuX+vN87+Cw/f+ESOExcRJuy8GXPfvobTb5nlOVYTD/zhMcb9eEMOOmNvph2+A9FsqKWtjnhrhq0xzHP7/EcXcOVTF1C+dhmxoijx4hjhWJhZVx/NxpM3aPee+5sOs1ZWBV3NWlG33pTQJ+dn+2wmIHYgUvyb3AXLzGK0/nZILoDgekbVMJj7BWjDA2j1JR45471BFEougZW/IT9en6X4UhMvT76GCbVkWs6N7IRT+sduXVndSrT6Ckg8YWLx0elI0XlIoLxb41ks3aWxPsEvt/4VP3y+JCeDJRAKMHbcKC564CzWXG8kP3y+mGPHnZaXvx2KhDj3jl+y40+3AeD+OY9w8xm3e14rXhxjXlX+vvmPLuCyw+bkzeZjhVHSqbRnX1ER4fHE3YgjvP+fj6mvbmDT7TamoLjvO/744Ze1Mihm5Fp9sXHiJEzTBZLQcA+6dBcT484igZE4RWfhlN+DU/K7PCcORhyrZ07cKxolENoKKbs1m0roV70miDYi0T0wv/rWf8CNkHgCt/5un3P9Uc2gyw8zmS3aYMZqfAJdcRDq2ULOYll1ROMR/nf+5Rx6zn44rRY9M6kMX7z7NWdOu4hMJsMrD7/uWYSTSqR4Zd7rzZ/3Pnl3AkHv8ng/Odkpe05kp0O2IxILEwwFiBZEiBZEuOjBs4kWeIc5g+EgTsDBcRw2224Tpvxk4oBy4u0x4B25atKnPF/B/Q6t+h/cur90fkDxfq3qHHEIbpINjwTMZylGyh/EKb/bCF05pUbn3BMHSKPJV/GdsVdfhFtzPW7dnbhLpuIu2hx3+aFo8i1/sxIvZrNXWs8y0iZNs/GZbtynxdJ9XNfl8Vuf5f45j+SETADUVWoq63jr+ff4/G1/eYnWMexwJMwR5x9IONpGUTEeZubFh+Rd++7LH+CAip/x5F+fp6Akzs6Hbc9Jc37G37++mR/tsjk/+fkueWOFIiGmH7njoO2JO/AbS2gC/1xwgAaovQ6NH4b4LSi2JpWvydBpJAJl/0DSb5rwjTMCorsjToHpDlR/t+kM5JsSGITIdHDrMCmRXmEthbobMLH+bPw+tRBdcTSU35Obctl0RvI1TDVr2x11aPpjhPyWdBbLquIPJ/2JZ+/6t39hkCrLf6hk2Aj/bJX1J4zN+XzE+QfiOMK9Vz9Moj5JcUURx195JNvtNyXnuDsuuY/7rn6k+dorFlXx4n3/YdrhO1BUajJPZl58CN988B0Lnn6LYDhIJpVh3NYbcdJ1x3T/pvuZAR8jV1V02W4diEPFkPK7O+xor6mP0OX74Jt10qwzHjDl/okXs9uDgCCltyJh73Q/t/r30HBXG+XFNhSeilN4CppZhC6d6m+Ht3EQmYZTelPuPamiS7bPpkO2xUFKfo/E9unCdSyW7rPs+xUcvcEppHzK5sFkgfzpnWtZ8vUyzp9xOYmG3PBfKBzkhtevYN3Nx+ad67ouifoE0YJo3uw5lUxxQMWxNNbmJzBsMmUD/nf+5Tnbvv34e7587xtGbbQW62w6uiu32W90u7KzvxERKL4ErTwR33AEDSa1sANHTuYL/J2nQPHvkUAJhCYiEkLTXxj9FSkxKoVOS7xMkwvRmmsh/TEE1jb/4v/HCzFIf4u7ZAcT8qAQ8Mt990Ih8W9UM4i0xAu19hYfJw7gmiwdi6WP+PytrwhHQr6OXBxh8x3Gs9b6a7DmeiOZOH0L3nzuneZFyWhBhKkHb+vpxMEIZcUKPfrZYtIRXZ+GFV49NkdttBajNlplytt9yoB35AAS2QYtvweWHwVUex9Ucw0aneEb41K3Ek209xYQQaK75GTBSHBdCObGu00I5Y5simH2jzVdRccks2X2qZbPXSaJNsxD4gdkbXGh/k/tHB9BG58wlaPpTyC4PlL4CyS8VTeubbF0zMh1hnt28mlCXeWdl97nyb89z+4zd+aiB87khX+8wtN3vEAgGGD3n01j+/2n+J7fHiUVxYQiIU8RrbHjB8eMu7sM+NBKE27d36DmsnaOCCEjXjKLjW3Q1NvoipnZsIdPvL3wApzCo9q1Qd1qdMWR2dl350uK/RGgBCMc6WDeFjoaN46MfN28Mbh16JJJ+FeleomGRZHS65HIwOtyYhkanL7jBXz02qeeWitNFJcXcu+iP3fYrOHzt7/inisf4t2XPqChpoFYUYzpR+7IoefunyN528Q/r32Ev134DxpbxecjsTBXPHk+m22fv7402PALrQwKR66q6OKtAL8+mgBhZOQCRHK1F0yMfTpkvmnn3AKcNf7boR1u1VnQ+H+0H0LxI4Cnww2MRcrnGeXGxnl0HDcPQdE5iARQKYPqS0A9dMrbNWUszvCnu3aOxdJJaqvquPq4G3n1sYW++uShaIg7PruB8jXzJ15NvP7km1x84FV5zSiC4SBrb7gmp8+dxfxH3iAQDLDzodsxdvxoVJUn/vIcd/32flb8UMXYTUcx66qjmbDzZr16j/3FoHbkbvXl2VZnfoQgtjdOyRV5ezT9DbpsL9qt4JRinJHt22MeJpvRLSceGA+Zj8nXWhGI7IwUX4Qu3ZXOSQUI5qEQMIuw6mZtal/QPxcHGfk2uEsh/QUE1slRarRYeoN/P/Aqlxx0tefcxAk6PLzydiKxfNErMP+/HbnuySz5epnn/kAogIiQSWWatVOKygo49rLD2euEXQdtGmFHDNrFTs0sgfo72j8osj1S7KOeK0Han+UGITLN+9puDaQ/hcAIcNaia8Jarch85mNDENLfoEv3oPMPCMU47XSbjknZdEYZCbqsA1ujaNX/QOKFbKVsEo1MRYZd2/lGFxZLB6z4oZJgKOg5Kx+zydq+Thxg5bJqKhf7rz1lPAqJalbUcfPpf2PZ9ys45uJDu2f0IGXAFwSRepN248ahbXBKb0HExMtUFbfubtylu+IunoxWXwqBtfBuARcEp8zotLRCVU2D5SXbopXHoUv3QCuPNZ2EuvUrS+DtWFOQ+QSTA97TmHv2QaGLfa7VRBSCG2VTK5sqZROQeMFk4VgsvcT6E9YhGMqPgQeCAQ6cvRfQUjx08qSzOW7T2dxxyX3U1zT4Vl92RKIxyT+vfoSGul7WUBrgDHxH7lTQ7oy6Taszrfk91F5h8s51JSSehcxik0JInBaHnv03OgOcNmW+jY+Zwh4SoLXm3+TrprGzlJg2cIOSMMQPhsyn5IeaEqY7ksXSS2y67casP2EdQq2qKAOhABWjyph2uFH8vPrYG7nxtL/wycIv+PqD77jnigc5ddvzcAIO2+//45zO9p3FCQZY9MWSXruPwcDAd+ShiUY10JMYEt6y+ZO6K6H+zjZFOQokIbIHRLbBxJebtqeh4W7TXaj1GXW3ehT2JCH5Kgy7HgrPNCqEtL/iPnAIQMkcZMSrOMXn+2vNaEO7zXEtlq4gIlzx5AXs/8s9GTaihKLSQnY7eirXv3o54WiYbz/+nhfu+09OhkmyMcXir5bywr2vcPrcWWy503hCkRBOwEy8nIAwfEw5oYh/VDidTFOxdtkqv7+BxICPkYsIWvY3WH4Qef0qiy/Mielq6hO8n03ZhsfuN+QtCmoD1P0ZClqlHnp1qwcgA5U/g/ghUHR+tldnN+PmfUoAieyAONnK1dBESC3IPyw0YcguEln6h2g8wvFXHsnxVx6Zt++9Vz7KEdVqorG2kYXPvsOuR03l8sfPZ9GXS1jy9TKKywuJF8cZPqqcB//4GH865668+HskFmanw7ZrLsdfXRj4M3LACW2KjHgFYkdBYEMIT0fKbseJt/S1ULcSqs/Fu/pTIDAW1Cdu5rbp0BPZDv/ZdhIa/om4iwdgiCVM/lcaNpkxTnHzFim+MCv81fTaGjINLoov7CM7LRYoW7MUx8l3QcFwkJFjKpo/r7HOCLbYcTzrbDqGEaMrEBEOOG0Gt396PXuftBtFZYXNDZT3OmE6s286IW/Moc6gSD/sDG7VOdD4KN7ZH1Gk/G6jaZ7x6LodmoRT/vfmj5r5Hl22b7Yfp09aX3h7yCzJphX2Nw6EfgTxIyD1iYnvSwg0BeHJyLA/Ik7uDEUz36F1f4HU+xAaj8SPsSmIlj4lk8lw5Lons/z7ypy2bNGCCLe+N4cRY7wlatuiqmaBNB7xlbsdKgzqPPLO4C7e0l+watgtONGd0cSLaOUptCz0OUAEKb8DCW2RO176S6iaDen3vccMbQWRHaD2RrpXbt9Twia1UmJIyeVIZKfmPSZt8jMIjEACQ0NLwjI0+eHzxVx0wFV8+8kPOI4QL4pxzh2n8qNdNu9v0wYkq9SRi8iZwFXAcFX1zuBvxSpx5Iu2wLvoJ4CMfK9ZQ0WTb6K1N0DmcwiORwpPQUIb55yhbh26/EDIfO8zZgyKzobQBFhxAF1TMewMQaN7nn7Xe3doCpRcYUJFqfehfq6J64cmI0WnG40Yi2UQsejLJSQakozeeC3PcIvFsMoKgkRkNLAr4BGz6EOiu0Lj4+SGQhwI75grhBWegJS1JzQFWn8PZL7Fe6Ydg9DGSPwgtOr03rDcgwC4yzBfj0doJ/UmrDgcontCw90tbyKJp9DkS2jxFZB40oR+Ijsj8YPzQisWy0BijXVG9LcJg5reePTNAc6m96elXUKKfg2BNTC54pjFPGc4UnJxznGaWW5CDz6ouxJq/4i3Ew9C4c+Rsr+bbJnkq6ya206AW4V3ERPGNncR1N/WJpzkmrz3laea9YLUq6bpxvL92r1ni8UyuOnRjFxE9gG+U9W3OkpbE5ETgBMAxowZ05PLeo8fKIeKJ6DxGTT9CRJcD6K7NYtoafJNdOU5kPkOUDT8Y9N0IVCRM46u/A2+uiwSQiK7IpL9tUkBqI+sbo9pxN+Rd0TrKtFGyCxG6+9ECk/qBbssFstAo8MZuYg8IyLvevzsC5wHdCpnTVXnquokVZ00fHjnVqO7ikgYorsiwbFowzy06kw08bLpyFN5TLaxRBJIQXI+uuKonAIY1TQknsZ3lu2sCcFW8fTgqpbF7K3ZfgIardqhxTJU6XBGrqrTvbaLyObAukDTbHwUsFBEpqiqX9PKVYpqBl1xHKTeoimfXJMvQmAcaNtYcxrcH0xhTLhp7cCl3QKfzFJ08RZoeGuk+LwBmEfeDh467RaLZWjQ7dCKqr4DNK9QiMiXwKTOZK2sMhLPQfptcoqCtAHSfsJbkg21GEcuEkZDm0Hqbe9jm1qzJV9El/8XAuv3qvmrjihSMLO/jbBYLKuIIZXno4nnfHREArRUMbY+IQOhXMF5Kb4sq+3SJLHZ9G/rMIea1L/0W920tL3Yd4D2v5aO4uYRoK1ynOQLg1ksliFDrzlyVV2nX2fjkA0feL1khM3CZM6+KESmIkEzq9bMMrT+Lki+DKVzofAkk94X3RMo8BgzSbekZ511YdhcCG5I/q+/qSqtK+MGQEqBEDhrQOxQ8p19g5HjVRM2UrcOd+XFuIsn4i7aHLfyZDTzfdfvxWKxDAiG1IxcYj/F05FLECrmQeynRhbXGQWFv0SGzQHAbXgCXbozWn2l0eRecQxoI86w65CC4+hdYawUTnQqUv4olN4Bkd1Ayo08bmxfOp5xt10ADUDhGThrvIcz4kXQpXjqzWgDpBaaTkeVx0LDfVkJggQknkOXH4ibXmIeaFYB0WIZVAx49cOuIMH10OLfQvUFRjscBYkipXORwJpQcglwSc456lbDyrPJa7NW9zc0sgsS3hINbZFtcNEbpfhGC0ZEkMhkiEzO2esm3zRVp50madrgFRycHcCvr6lknfnbkPqQ3HtxjXDYsqkoAfOwK/nCbg8hAAAR+klEQVQtEtm+C3ZYLJb+YkjNyAGc+D4w4iWIHwPBLcDZAG18Djf9CZp4AU29nzvjTLyAd5pfI9rwMKouyDC613DZg8BGuMsPwl36E9ya68yDpBWmW1EXu6Poypbzo3sCHtk0mjbCWmk/ka+mjJ0kuN+jlSdnZYEtFstAZ0jNyAE08TJUnkxOeCH9GtTdgBIHUQiMhtLbkMAI1F2Od9NjNY0Wlu4Ibi92G0m9THMMvO5PaONjUD4PcUxFqkSnw7A5aE22y1GHOBDetuVjbIYJm6Q+wLSQazqsAq06w/To7FQMPonW34aUXN65+7JYLP3GkJqRm8KfWeTHiJtm3PXZdMTP0Kpsi7h0O86y8eHedeJArhNNmarLhodyjpDoLjjDn0ZGzIfwdMwiaMC8YRCh5WsLgRQihbNbzpUwUnY7FJ2be6z7LSSfh8RTmLBKR1+9C+muhHgsFkt/MaRm5NrwAJ2LY2cg9YHJ1Mh8285xnY2JOxDcFNIf4Ktf7kujEbui3uiHR3ZBQhsBIE4ZUnZjcyhIRExoqO7P5gEUngyxg9DGJ9HGeaY3KVGI751touGSP/tueqgJ5uvPtNneRBjCU7p4LxaLpT8YUo6cTFdmzw641RCeCMn5eIdXOjvUcJyK+9HUR+jyg/DVavEjOR9Nvg64UHsTGj8Kp/is5t2tdWwkNB4ZZrrda2YRuvwAcFeQ47DrbsM46vbi+iEoOguJH4FW/xYaHqDlTcYxHYPiR3ftPiwWS78wpEIrEtmWzt9SCoIbIPFDs23PWp8XovONlaMQN5otEtoYCs+g62JXrrGHDNAI9bfiLpqAu2xftPE537O05ppsm7q2s+40HaZMioNIBJEAUnwBFJ1j2uFJGUT3QsofQAK2iMhiGQwMKUdOZFrny+alBJGgCV9UPACRPUAKwRkBBcfTviMPZqs/w6ZisvY6dPE43BXHQmi82d4jXKAe0h+gVbNx6x/2PizxL/wdtku796BqNNwBEQen4HCc4U/jjJyPM+wa2/bNYhlEDClHLhKEotl4luO3JbRJy3mBtXFKr8MZuRBnxEs4RbOh6Ay80wDDJuul5FoIjDLCW6QBF5KvQNUpEN6KnjvzJhqh9ve5Ko2p93Arf2m0x30JQXiHrLBXky0ORq89CiXXIE5ZL9losVj6kyEVI9fMYqg6g45zviNAAe7S3U3zicKfN/e8VFW04V6ouxXvWHcS3MUmppxXuOOac8I7ghSbLj29IUXrLjfZNhI3sryVJ2Fi+u2MLSGk5FJwiiH1MUoQyXwKBIw0gVPUc7ssFsuAYGg58oZHaD9H2gGnHNw6SD4HpCHzBVr5Dlp0Ok7BMWj936Hm93iWuTdfqB4Sj/sZYXTPJYZ5M+hs5ksU/4YWMRDzdqDVF/kfZw4GGYmUzkECI82m8JbZqP2mnbTFYrEMJoZUaMUs/LXnOIMQnoqZsbdOE2yA2jm4bn22zVs7TrxDwhDYGBof6cCWJhwIb2cqOiN706K22EQM4scCoNoImW/8h4rsC2VPIyNeQMJbddN+i8Uy2BhSM3KJbGcUDPGSsgUIQPLfeIdeApB8PafcvXukoPaSjg9rJoaUXI4E1oBYLbpSIfEE5kETMvH4upvRuushNMls80uVTD4FoY2Q8PE9vAeLxTKYGFoz8vA2EJmC7/NJHPBb4NMGqPoF3ZKmzR2oa4dHtkUCa+DW348u+TEkHqXlbSENmY9plsxNvZbd13bW3nTpBqi7xaoXWiyrGUPKkYsIMuwmKDwd31zuwtPJF5XKKiX2irphV4hAYF3c1EdQfTH5bwpeDjlDu1+b1nqMY7FYhjJDypEDiARwCo+H4t8BEZMbnv2R0ltwojtB8a9NowkpyB5TQs9n4t0hAfV/heUH0rVq0CTISO9dzkjThNpisaw2DKkYeWuc+IFodDeT2y0RCG/b7OCc+CFobH8jCuWUoVWnQmqFxyiC96zYwcSqe2MW353zMxAYCenl5Gm7RPbooT0Wi2WwMeRm5K0RpwiJ7o5EdsqbpYqEkdAmSGAERKbjHXcOkP+si0L8OKTiISj6Nb1X+NNFnGI83yIa/o66Xg8li8UyVOmRIxeRi0TkOxF5M/uzZ28Z1pdI/FAIrElLJadgnHjb8vcwxA5Eik5HguvjFByOlN7UEr7ptD5LV/AZM/kS3uGgFDQ+tQrssFgsA5XeCK3MUdWre2GcfkOcQih/EK3/BySehfSX2d6XkBO6kEKk+HxEWpyrRHaAEfMhuRBNfwQ119Bl9UNPwlD+GEI9umImaFUnz3PRzOIuy3ZZLJbBy5AOrXQFcQpwCo81Gty63OeoRLZ3Z5tzJYxEtsYpmAnxozFhmh64UolB0a9wQmOR0DiI/4wufVVS3P1rWyyWQUdvOPJTRORtEblNREr9DhKRE0TkDRF5Y+nSpX6H9T/1t+OfwSLZhg3+OMVnIhWPQuwwOiXeBZjuP+ONomJwQ6TkCpyCI1rt70CSto2NEhrXheMtFstgRzoqHhGRZ4A1PHadB8wHlmHSNy4F1lTVYzu66KRJk/SNN97ourWrGFUXXbyJ/wESR0bMR6RzzZHdujug9mogYLr/4OBd/h9BKh5DgmO87Up9gC4/mE41v3AqkOEvIWJftiyWoYaILFDVSW23dxgjV9XpnbzAn4BHu2HbgEDVNfreUgxa7XGEQPFlnXbiAE7BUWj8IEh/Ak4FJF9HV15ArjN3ILiurxMHkNA4NDgO0vlhnRz7nLWRstutE7dYVjN6tNgpImuq6g/Zj/sD7/bcpL5HNYNWngip142yYR5BKLkaJ9b1pByRKIQ2N9eJ7g3JhdBwvxlTAKcUGXaj2e+uABzEGeYxkE9ZPgWmK1GgAlJvog33QWxvJLgBqilIPIcmF0JgbSS2N+KUmodWcr7Jow+uD+Gtc9rJWSyWwUVPs1Z+LyITMKGVL4FZPbaoP0g8A8nX8A57OFBwSqecuGoGyPhWVooIUnIRWng8JP8LgRFGCCv9Ee7SvSDzpRkntAUy7GoksHbLycENILWA/ObOGUi9AbXPY7JlBK27GZVywDX6KzQAUbR2DjrsRqj5LWS+A82ABCAwBsruRBy7SGqxDEY6jJGvCgZajNytmg2N/+d/gJQgI/5jOhB5oG4tWn1xdowMhDZHii9FQu3E25vPrUKX7gxa12qrA84IZPhzzdfU9Bfosv3IfdiEzYw6/SWdl94twFSTttZjCUFsH5ySyzs5hsVi6Q/8YuQ2mArZ5svthBY0CZnv/XdXHguNj2Ocowupt9AVh6GZJR1eWlf+to0Tx4yhNZB4scXE4LpI2Z8hsC7mRSpkem6GJtC1vPU68kW1UtDwWBfGsFgsAwnryAGJHYSvNCwAGfCKWwOaeh/SH5GnmaIptP6edq/rZqog4dNYWRMm/NHazvBknOFPmreDkQtxhs3JCn/1Rny7KymOFotlIGEdOSDhiVD4C7yXDCIQ2cU/fpz+Au9fYzLr4Nuh9qZ2dmaaF0nz7HVKkOzip8T2ofN6L05WSqCtvQ5EpnZyDIvFMtCwjjyLUzgLGf4viOwFhLMz3TBEdkTaix0HNzKLhl6kv0DdGv9z2wnXQARCW3Zot4Q2gcLTaL+aNGTuxymDYTeBU0qLJnscnDKk+IIOr2WxWAYmQ1bGtjtIYARSOsf0xkx/Cc5wJFDe/jmhDdHwFJPO1za8kvkSrTwRKb8L1Ua0fh4knzfjxg+D6FRIPuk9cHxmp1MCncLj0NieaMM8qL8H3GUg2Th6wSyTRRMYCZGdEQmjFc+YRtXpj0wVaHQG4sQ7dS2LxTLwsI7cA5P73XHGSfPxpTegyw6AzCdt9qQh9Q5u6gNYeRakv8FklzjG6Rb92n/Q8LaemzUxH62/HdzlJuQTP9yIfjnlSPwIKJgFmW/NAmpwA89MG3EKkIJDO31/FotlYGMdeS8gEkGdIu/1QglB3d2Q/pqW7BLX/HfNZUCc/GbRAo0PQXQboKVQSBsegpo5NKcapt5HG/6Bhn6UzZpxIbAWUnwJEvF+EFgslqGHdeS9RXgypN4lP3slAel38U4R9OtApEACTX2ErjzTVGACphio9fHZzJbM9zQ/RTJfmyrV8vuQ0MY9uSOLxTJIsIudvYTEZ2YXSFs1gpAYxA8Fxy/OLnhO4yUO4WnoisOzmS+p7I+X03c9xkiidX/u8j1YLJbBiXXkvYQEKpCKByG6jxHICqwPhechRechBUcap557hlmAbGoSTdBskxiEdwS3CrRtOX5ncSH9YY/ux2KxDB5saKUXkcBayLAr83dEpqLx46BuLkjYOGgJQ/xoJLYHhCegDQ+D1iCRaRCegtZcQefL7j1wa7t/rsViGVTYGXkf4RSdChXPQGAsoKZBRe016NKpQBqn6BSc4l8hkR8bca3QlrTkerdGMHnuhbT0GPXAXbwqbsNisQxArCPvSxJPQvozTIOIpEkRdFegVafmHaqBNfBcIA1OhLL7kdKbYfgr+HYhks52J7JYLIMd68j7kvp7yXfOCumv0LZVnit/Tf7ipgOhTXHCGyPhKTiBQojOIN+Zh7PbLRbL6oB15H2K3+Kl5CxsamaJKerJw4XE07lnFp8HwQ2zCo4x829wY6ToV71mtcViGdjYxc6+JLY31N5CXu/NQAUERrd8lhDeqYbkhUzEKYbyB01zifTnpgFF6Ee244/FshphZ+R9iMSPNY0gpEnXJApSgJTMyXG84pRCaAvyv54oxA7OH1cECU9G4ocg4a2sE7dYVjPsjLwPEScO5f+ExLNocgE4ayLxfRGnLP/YYdegyw8HXZlVVxQIT0EKftb3hlsslgGNdeR9jEgQorsj0d3bPy6wFgx/FpIvQeYH0z4utGkfWWmxWAYTPXbkIvJL4BTMSt5jqnp2j62yACASsA0fLBZLh/TIkYvIzsC+wBaqmhCREb1jlsVisVg6S08XO08CrlDVBICqdtxt2GKxWCy9Sk8d+UbADiLyqoi8ICKT/Q4UkRNE5A0ReWPp0qU9vKzFYrFYmugwtCIizwBreOw6L3t+KbA1MBm4V0TWU9W8JGhVnQvMBZg0aZJPkrTFYrFYukqHjlxVp/vtE5GTgAeyjvs1EXGBCsBOuS0Wi6WP6GnWykPANOBfIrIREAaWdXTSggULlonIVz289qqkgk7cxxBldb53WL3v3977wGes10bxiIJ0GhEJA7cBEzA9zs5U1ee6PeAAQUTeUNVJ/W1Hf7A63zus3vdv733w3nuPZuSqmgSO7CVbLBaLxdINrNaKxWKxDHKsI/dmbn8b0I+szvcOq/f923sfpPQoRm6xWCyW/sfOyC0Wi2WQYx25xWKxDHKsI+8AETlTRFREKvrblr5CRK4SkQ9F5G0ReVBEhvW3TasaEdlDRD4SkU9F5Nz+tqcvEZHRIvK8iHwgIu+JyGn9bVNfIyIBEfmviDza37Z0B+vI20FERgO7Al/3ty19zNPAZqq6BfAxMKQbgIpIALgB+AkwHjhMRMb3r1V9Sho4Q1XHYeQ2frGa3T/AacAH/W1Ed7GOvH3mAGfj20BzaKKqT6k2d4OeD4zqT3v6gCnAp6r6ebY24h6MPPNqgar+oKoLs/9dg3Foa/evVX2HiIwC9gL+3N+2dBfryH0QkX2A71T1rf62pZ85Fni8v41YxawNfNPq87esRo6sNSKyDjAReLV/LelTrsNM2Nz+NqS7rNat3jpQdvw1sFvfWtR3tHfvqjove8x5mNfuu/rStn7Aq1v1avUWBiAihcD9wGxVre5ve/oCEZkBLFHVBSKyU3/b011Wa0fup+woIpsD6wJvZTvSjwIWisgUVV3UhyauMtpTtQQQkZnADGAXL1niIca3wOhWn0cB3/eTLf2CiIQwTvwuVX2gv+3pQ7YD9hGRPYEoUCwid6rqoJIesQVBnUBEvgQmqepgUEfrMSKyB3AtMFVVh7wksYgEMYu6uwDfAa8Dh6vqe/1qWB8hZrbyN2CFqs7ub3v6i+yM/ExVndHftnQVGyO3eHE9UAQ8LSJvisjN/W3QqiS7sHsK8CRmoe/e1cWJZ9kOOAqYlv2+38zOUC2DBDsjt1gslkGOnZFbLBbLIMc6covFYhnkWEdusVgsgxzryC0Wi2WQYx25xWKxDHKsI7dYLJZBjnXkFovFMsj5f7IoisYK8BuYAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import matplotlib.pyplot as plot\n", + "classes, points = generate_data(1000)\n", + "plot.scatter(x=points[:,0], y=points[:,1], c=classes)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "rskLHEI9qSUg" + }, + "source": [ + "Now let's create the model for our CGAN. DeepChem's GAN class makes this very easy. We just subclass it and implement a few methods. The two most important are:\n", + "\n", + "- `create_generator()` constructs a model implementing the generator. The model takes as input a batch of random noise plus any condition variables (in our case, the one-hot encoded class of each sample). Its output is a synthetic sample that is supposed to resemble the training data.\n", + "\n", + "- `create_discriminator()` constructs a model implementing the discriminator. The model takes as input the samples to evaluate (which might be either real training data or synthetic samples created by the generator) and the condition variables. Its output is a single number for each sample, which will be interpreted as the probability that the sample is real training data.\n", + "\n", + "In this case, we use very simple models. They just concatenate the inputs together and pass them through a few dense layers. Notice that the final layer of the discriminator uses a sigmoid activation. This ensures it produces an output between 0 and 1 that can be interpreted as a probability.\n", + "\n", + "We also need to implement a few methods that define the shapes of the various inputs. We specify that the random noise provided to the generator should consist of ten numbers for each sample; that each data sample consists of two numbers (the X and Y coordinates of a point in 2D); and that the conditional input consists of `n_classes` numbers for each sample (the one-hot encoded class index)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Q5s_qNouqSUk" + }, + "outputs": [], + "source": [ + "from tensorflow.keras.layers import Concatenate, Dense, Input\n", + "\n", + "class ExampleGAN(dc.models.GAN):\n", + "\n", + " def get_noise_input_shape(self):\n", + " return (10,)\n", + "\n", + " def get_data_input_shapes(self):\n", + " return [(2,)]\n", + "\n", + " def get_conditional_input_shapes(self):\n", + " return [(n_classes,)]\n", + "\n", + " def create_generator(self):\n", + " noise_in = Input(shape=(10,))\n", + " conditional_in = Input(shape=(n_classes,))\n", + " gen_in = Concatenate()([noise_in, conditional_in])\n", + " gen_dense1 = Dense(30, activation=tf.nn.relu)(gen_in)\n", + " gen_dense2 = Dense(30, activation=tf.nn.relu)(gen_dense1)\n", + " generator_points = Dense(2)(gen_dense2)\n", + " return tf.keras.Model(inputs=[noise_in, conditional_in], outputs=[generator_points])\n", + "\n", + " def create_discriminator(self):\n", + " data_in = Input(shape=(2,))\n", + " conditional_in = Input(shape=(n_classes,))\n", + " discrim_in = Concatenate()([data_in, conditional_in])\n", + " discrim_dense1 = Dense(30, activation=tf.nn.relu)(discrim_in)\n", + " discrim_dense2 = Dense(30, activation=tf.nn.relu)(discrim_dense1)\n", + " discrim_prob = Dense(1, activation=tf.sigmoid)(discrim_dense2)\n", + " return tf.keras.Model(inputs=[data_in, conditional_in], outputs=[discrim_prob])\n", + "\n", + "gan = ExampleGAN(learning_rate=1e-4)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Lnd0Wk9WqSU_" + }, + "source": [ + "Now to fit the model. We do this by calling `fit_gan()`. The argument is an iterator that produces batches of training data. More specifically, it needs to produce dicts that map all data inputs and conditional inputs to the values to use for them. In our case we can easily create as much random data as we need, so we define a generator that calls the `generate_data()` function defined above for each new batch." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "3o85U5VJqSVG", + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ending global_step 999: generator average loss 0.87121, discriminator average loss 1.08472\n", + "Ending global_step 1999: generator average loss 0.968357, discriminator average loss 1.17393\n", + "Ending global_step 2999: generator average loss 0.710444, discriminator average loss 1.37858\n", + "Ending global_step 3999: generator average loss 0.699195, discriminator average loss 1.38131\n", + "Ending global_step 4999: generator average loss 0.694203, discriminator average loss 1.3871\n", + "TIMING: model fitting took 31.352 s\n" + ] + } + ], + "source": [ + "def iterbatches(batches):\n", + " for i in range(batches):\n", + " classes, points = generate_data(gan.batch_size)\n", + " classes = dc.metrics.to_one_hot(classes, n_classes)\n", + " yield {gan.data_inputs[0]: points, gan.conditional_inputs[0]: classes}\n", + "\n", + "gan.fit_gan(iterbatches(5000))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "m91nmqWgqSV1" + }, + "source": [ + "Have the trained model generate some data, and see how well it matches the training distribution we plotted before." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "JqJCBFIcqSV3" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD5CAYAAAA6JL6mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydd5xU1fmHn3Pv9O2V3jsIUhUVUBSsqLEQS+wabDG22H/GmJjYY9dIiBqjYhcbiKKIVOm9d1hYYAtbp957fn/cZdndmdk6W4DzfD4o3HLuO8PynTPvec/3FVJKFAqFQnHkojV3AAqFQqFoGErIFQqF4ghHCblCoVAc4SghVygUiiMcJeQKhUJxhKOEXKFQKI5wbA0dQAjRAXgXaA2YwEQp5UvV3ZOeni47d+7c0EcrFArFMcWSJUtypJQZVY83WMiBEHCvlHKpECIBWCKE+EFKuTbaDZ07d2bx4sUxeLRCoVAcOwghdkQ63uDUipRyr5Ryadnvi4B1QLuGjqtQKBSK2hHTHLkQojMwCPg1luMqFAqFIjoxE3IhRDzwGXCXlLIwwvkJQojFQojFBw4ciNVjFQqF4pgnJkIuhLBjifj7UsrPI10jpZwopRwqpRyakRGWq1coFApFPWmwkAshBPAfYJ2U8p8ND0mhaBryA8UszdtGtvdgc4eiUDSIWFStnAJcDawSQiwvO/awlHJqDMZWKGKOKU2eX/cNX+5ejEPTCZgGJ6R15x8DL8elO5o7PIWizjRYyKWUcwARg1gUiibh4x3z+Xr3EgJmiIAZAmBh7maeXfs1j/a/pJmjUyjqjtrZqTjm+GD7XHxmsNKxgBli+t4VBMuEXaE4klBCrjjmKAp5Ix43pYnfUEKuOPJQQq445hiS2hURIRvYxp1CnM3ZDBEpFA1DCbnimOOOnmfjsTmwCevHX0Pg0uw82O83WEVYCsWRRSyqVhSKI4pO8RlMPuVO3t8+m1X5u+gcn85VXUbRPaF1c4emUNQLJeSKY5LW7mTu7XN+c4ehUMQElVpRKBSKIxwl5AqFQnGEo4RcoWgCpJRIKZs7DMVRisqRKxSNSGnIz4vrpzJtzzKCpsGg1C480PcCOsdnNndoiqMIJeQKRR3YXJRNtvcgvRLbkuFKrPH6u5a8w9qC3QRMA4CleVu5Zt5rXNThBI5P6cSozD7YNL2xw1Yc5SghVyjK2OvN550tPzP7wHr8RpDWrmSu7DKCc9oOpDDo464l77C1aB82TSNgGoxrN5j7+pyPJjT8QYO8ohLSEuNw2q1/VhsK97C+YE+5iANIwGcGmbxjLl/sXohLc3BVlxGc224wKY44Pt4xn092zKfUCDAioze39BhDejUfGKY0+TVnMxuL9tLek8qozD7YNRsh02BzUTZu3UGneGUbfbQjmiNvN3ToUKl6dipaErtKcrlm/quUhPyVjjs0ndGtjqMo6GVh7hZC8rAoawikBHOdB7nNg03oGNKk28B4bh93CiWGj6fWfEmpEajx+Q6h0yepPRsK95T7wOhCI9nu4eORd5Ngd4fdUxzycfOvE8kqzcNvhHDqNuJsLm7pPoaXNk4jZJqY0qSNO5n7+17Ij9mrWFWwiy5xmVzTNbxuXkrJioM7yPYepE9SezrFpdfnrVQ0IkKIJVLKoWHHlZArFPDQsg/4ad9qIv1rsKMjBZVE/BDGRjdyoweMCnUDusTRz8eQIa1YdXBnmEGXDIG5Oh652wUmkB5AP74IERf+dKdm5+YeYxidMIB3f1jM6u3ZdGuTxrVnDuXzg/OYsmsRwSofLgBmlVciAE1oGNJEQ+DQbDw/5BqGpXUDIMdfxG0LJ7HfV2C9LmkyKrMvfz3+t+hC1US0FJSQKxTVcOaPf+dgsCTiOTtamZCbYedCU9MgGEHonAYJ5xXTzpPKzpKccrtcKcGYkwwHbWAesgOQiA4+9EHFEQ2hB8Z3ZtXHfoIhA1NKNCFw2HUcJxVRmlxsjVCoI/c5QAPRzo9whcdalQ6eND4bdS8Aty6cxPK87Rgcvs+l2bm155lc0fmUGsdSNA3RhFx91CoUQLLDE/WcLvSIZlpSAsEo3iwBDa8RoG9iey5sPxS37rA0+qANCvQKIg4gkMU6keZUOhrLlu/DHwxhll1gSokvEKLoVycyBMbqOIxZKZjr4jDXxmH8kIqZVXODjD3efEpDfgoCpazM31FJxMHK5X+2s/Z91KfvWcFFs57j5OmPMn72C8zev67W9yoahhJyxTGHYZrMXb2ND35aysL1O5FScnWXUTg1e8TrHTYbj/Ufj6vKeSFAH52PyPSH35QQQkcjyeHmvr4XMGvsX5g44FZkfpT6gnw7eLVyI69DhEImxjZXxFukT2D8lIrc5rI+GKSw/m8KzGWJyGgfMmXoQsOh2fCbwahmYd5a5PcBvslawt9Xf06WN4+QNNhRcoCHl3+oxLyJUFUrimOKvMJSbnj+I3IKSggZJjZdo2NmCm/edQm7O+fy3rbZ5TlnDUGGK5HnB19Dz8Q2vDX8Fq6c90ql8USigTa80CpHKdIx18Qhcx1ox5Vg03TObTu4/Nplq7MRaxMgUiZek5xk9MdMLmFh7mZrdu7VMJYngD9KeaIGpARhTwTrXQFynwPR3l+e4zYqpIacmo1z2g7CpulkOBPJcCaQ5c2vNIRNaJzWqm9NbykAr2/8PmwtwG8GeXXDd4zM7FOrMRT1JyZCLoR4CxgH7JdSHheLMRWKxuCJD2awJ7eQkGGJWiBksGVvDq9+OZeHrjiTq7uMYmvxPkpCfjJciXSLb1U+W+2S0CrimEJg5baTDLQTC7F7nWiJkrt7n0e3Cvf4AyFMo+xaIa0ZNAASu27jr+dewLSF61nyY5HV4MJfNnCGH/IcYFSYNWsSOvgQdomsklgXrf3ofUoQ8SYu3cHlnU6mKOjjq6zFODQbATPESek9uafPuLL4BY8NGM+di98hZBoEpYFLs5Pk8HBT99NrfE9DpkGuvyjiud2leTXer2g4sZqRvwO8Crwbo/EUiphjmCazV23DMCvngoMhk2mL1vPQFWcQb3cxIKVTxPt1oSEQyIi1LRZCh7aZCUwafkt53n3r3lz++ekslmzKsu6sIOBgCelLt/6GncH9fL1rCX6H38qlHxJoDWjrgyyXJeCmQLQKIPoUI5ckHR7LJq2ZuAkyJECTeI0AH2yfy3ODr2JCjzPYUZJDG3cyma4kKjIwpTMfjbiLz3f9yq6SXAanduG8doPx1KLRhi40kh1x5AfCF4tbu5NrvF/RcGIi5FLKX4QQnWMxlkLRWEhJVL8Tw6xd9VamK5F9ZSV60TgYLCkX8T25BVzz9GRK/RXTDtazNDvoXf1k9LRx/6638O0IQjropwB+DWN+MpTosN+Bfmo+sncJwmsDj4H0CcxfkyHfZom7BEJlM/j9DoxcB9qwQrRWAfxmkFc2fMffB17BpsK9rCvI4qT0HmEbhVq7k7mt51m1eh8qIoTg993O4OUN0yqlV1yanVt6jK3zeIq602Q5ciHEBGACQMeOHZvqsQpFOTZdY0jP9izZuLu8AgRA1wSnDehWqzFu7j6GZ9Z+FZYPrkgHj7WRxhcI8cdXp1QRcQCB0MA9ugjTE2S/NCgvGNGtebjUTPSTDmLMSAUpMOYko/UthjYBZLGOOS/Z+mRym9a9fo3DtYsCDDBXxCPG5iEEbCzay2WzXyivLxfAoJQu/HPINbWaddfEJR1PBAH/3vwjBwMlpDsTub3nmYxtM6DBYytqJmZ15GUz8m9qkyNXdeSK5mL3gYNc+8yH+AIhvIEgHqedRI+Ldx+4gvSkuFqN8fGOeby5aQalRqDSAiJYG3ieH3w1J6R3567Xv+SXVVsjD2IzcQwvwkyLXhUiQ2DMTYaDVrWM6ORF9CvGXJgEQYE+tNAScoAiHWNxkjWDP4SQ6OfkIuzR/43raCQ63Hh0B90TWnNd19Pol9wh4rWrD+5i2p5lmFJyZpsBDEzpHLHaJWQayj+mkWj0DUFKyBVHCqW+AN8t3sDWvbn07pjJ2ME9y/1RaoshTQoDpXyTtYR3t82mIFhKe3cqd/U5j1GZfdiTW8DFj/+XQDB8NygAmkQfk4dwR9+4IyWY85OQB8pqwh0m+pm5GLOS0UcWVBJoaQIBgfF92uEcvJDo43Ko68bMc9oM5C8DxlcS6dc3TufD7fPwmyFA4tTtXNBuKH/qW7cuS1JKJBJN7RatF9GEXJUfKo45PC4HF4/o36AxdKGR4ozn6q6ncnXXUzGlWUmcdh0owGHTowi5tBYrqxFxAJntQB6oULse0DDXehCt/VbVSwWEBtIpEQOLkMsTLDEXkmSnB78RrDYVVJVpe5czNK1reXnijuIDTN4+t0zELXxGkK92L2Zcu8H0TmpX45gh0+CNTd/z2c5f8RoBuie05v6+F3J8lIVlRd2IyceiEGIyMB/oJYTYLYS4MRbjKhRHClVnmF1ap0YVcRJCaEMKqx3P2O3EXJJI5T37EpERRCQYiAhTMCFAa+dHG2qN7XbY6b6mD65FrZBeLeLO0Wj8bfXnnD7jrzy1Zgo/718b5t0CVp34L/tqt+HnidWf8/GOBZQaASSwqSibOxa/xZaifbUPShGVmAi5lPIKKWUbKaVdStleSvmfWIyrUBypBBx+tHZ+0CsKoARdog8vROjgiKTGlFXXrImrXDcOgMBcHwduExllgi10EJkBSAji9Yf4dd1Ocnb7MeYngkGdxNxnBvl810Je3zg9rGSz7NWwIHdT1PsPpW3z/MXMyF6Fv8q3goAR4t2ts2ofkCIqKrWiUMSYoqCXG+f/i+CAEoTbjdzmhoAG8SFEm8OLm/2S22OWWcdWwgR8UeZYRTZICkFAIG2SiDvrddB6lCDznMi9TquipciOMTMVfUQ+uOu2LiYhau38usIsXtnwHaNb9aNfUnuEEPyUvZqX1k8l23cQu9Bx6fZy07DKL1OyuTi7TrEoIqOEXKGIIX4jyJVzXiY/WILQQO/lxYw3MJcmQqmO3OzG2OzB3sfH7b87i02Fe1l9cCdGRaHUsDb3hCKotNNAGIAzioiXIdoGEW2CcFwx5rIEZJYLSjXkLjeiZ2nMXq8hTf637Rc+2DabbgmtiLe5WJq/vfx8QBoEQlEWfIF4mxtDmsoqt4God0+hiBFrDu7iwlnPss9/eMOQDAhLxE1heZabmvX7DXEs2LqNFzdMqyziWLlu0aMU9CrpDF1aIuwkol1Lpft1EDbr/9qgInCYVprHXrO9bX0wkGwsyq4k4rVhbcEuHlz2QdSNWjuKD/DD3pWsLditmldXg5qRKxQxIMdfxO2L/hPWDUhmO8oqTCpPn4OGwZuz5qD3jZzs1np4MSXIzR5L+HVp1ZE7JJRqh+vHa4MGoncxck08pAWRfoFwtgxR9JshFuZuYln+dgandik/HjRDPLx8MgtyNqELHROTLnGZvDLsBhIjdEs61lEzcoUiBny9e3HExhOVfccrIKs5R5lFbi8v+jm5aKflgdtAbvNgLkvA+CkVY3l8rRcuhQCtkx86+TBnpWAsSkCGymrPWwBeI8jcA+srHXtn6ywW5GzGb4YoNfz4jCCbirL5++rPmynKlo0ScoUiBmSV5kdc0BOtAxVMsiqgg9Y2go95BQ4Jrbkq3lrkNASEylIzWS7Mra46VaGIQ37luU6MX1KQexzIYq1FCPr/ts3m1B/+wt9Xf0FBoJQvdi0Mq3IJSYPZ+9dHfJ+PdZSQKxQxYGBqZ9x6eFce4TJpPVjDYdfRtTJB1yWioxeRGi5IlrFX2a9cHekXkOMI/zAwBXJt7SwFKox++LdFNswlSRizUqAFCDlYTSy+zVrCjQv+hTcU2bpAIgma0RdPj1VUjlyhiAFjW/fn9XkzKVlpYJZqiJQgrj5+hnTsxMtnX8/WvblMX7yBOdkb2BK/AzOlilBJDuusKFuwzDCQXq3MRSvCQ02BzLNBaqhSBYuURKxokdkRzLEk0IJsUULSJNubT8/Etqwt2B22EalrfGbEtnvHOmpGrlDEgJlLt5A/24GZY7fKDPc4Cc1K4fY25wHQtU0at55/Ms9d9RvcGaLS0qdT2CwHQmFtta8kwi4TnJGmzGX9O+ckY65zI4NlPuR+gczXrRy4LDtmgMzXIRBB3Q2Buc3VItIrhwhIg/WFWTh1W/mmKYem49EdPHLcxc0cXcskZqZZdUGZZimOJkxTcuaDb5JX5K10XACjBnTlhVsvrHR8e/F+XtowjWV524m3uxiZ0Ytpe5aHV7yUzdJlrh3z1yQwDo0aXgWDZlrHDi2gpgTR2vqs/p1ZLijRICMA+x0VFlkrpHpa+dGHRe7y01xoCHSh0S+pPYNSu3BxxxNpVaUhxrGGMs1SKGKMlJIVW/fw3aL15Bd7w88DK7buDTveOT6TF4ZcC8CKLXt47MNpFO5JBKeJ6ORFFtkgx27VfHfyonf1ow0pwFwZDz6dMBEv6w6kDS20rinRocCGqTkhtyxvr5mQFoJ4AzZ7Ko9hCOQ+J7KoFJHQcvLPJhJTGmwu3serJ9yIQ1NyFQ31zigU9WDn/nxue+kz9uYXVVs5kpboiXpu7Y593PryZ/gCIUCAV0euP7SAKaymy2tsGJs9ENTLFiUjlyyKDn60jCDaGfnIoLDsYhcnHc4wmxoUaVa7uEhjSJAH7C1KyA9hSsmS3K2clNGzuUNpsagcuUJRR6SU3Pby5+zJq17EXQ4bN559QtTzb3w9r0zEK3Kok3OFP/v1snRIlLpz3XJCMbe7MEs00CRym+ewjzlY7eB8egQjrgrnXS0oUV4BQxp8tutXPt25gKJg+DcfhZqRKxR1ZtW2bA5GSKVUxGHXmXDecM4e1jvqNRt3H6jlE6sxVQFLnLe6MYUE4sFpWOJfERMrXRNxLKtps2gVvVtRc+I3Q/yyfx0LczbzxsbvmXjiBLoltG7usFoUSsgVijpS5PWjVeNY5bTbmHj3pfTv0qbScW8gyPs/LmXawvXYdA2Xwx5lhLoiysoXrZicISdnnNiDOSu3U1jiO3xNNTtJtWGWtW5LxmcG8ZtBHlv5Ce+dckdzh9OiUEKuUNSRAV1aEzSi55IzkuI4rnPlGWPIMLnxuY/Zlp2Lv6zhhMOmowlRqRF0LPAHDZyajRlP38zD/5nKnNXbLB2XMkKzCwmpQbSM2ncQak4ksLVoH0+u/oIZ2asImAYnZ/Tk7t7n0dqd3NzhNRtKyBWKOpLgcXHHhSN45cs55aIMVv13aoKHl27/DUIIgiGDX1ZuZXdOAd5AkB378ytdHwgZ2HSN1DgPuUUl5WOYZoWdQdVSltcOamG575Xb9mDTNZ6ZMI7NWTms3p5NelIcD/9nKsX+soYXEogz0YdV362opWFg8u2eZeVb9WftW8vy/O18NvJe4u2uZo6ueYiJkAshzgZewtojNklK+VQsxlUoWipXnjGYPp1a8dHPy9mXX0THzBRGD+zOiOO6YNM19uQWcsNzH1Hs9Vt+3FISMsNn3poQpCa6KfL68AeNw4unAqr8gYrCLtr60AYUW77lgNztxFyZUJ4+2ZKdxy9rtjCqXze6t0une7t0AC4fPZC3i6cjAxrCZUJSqFpf85aGKPtvRb8VE0lhwMv/tv3CrT3PbK7QmpUGbwgSQujARmAssBtYBFwhpVwb7R61IUhxtHPTPz9m+ZY9FWbXkXHbbYRMk6BRtWKk7L5K2/PLZCwtgDa8oFLfTmmA3OO0vM/L7hftfHST7Rg1oBu/O30wKQkeSnwBzv7qafyJR1b1hw0Nh25HF4KgaURsJq0heKz/pZzTblAzRNg0RNsQFIvywxOAzVLKrVLKAPAhcGEN9ygURy0lvgArt+6tUcSFAE3XsNkirTKWlRtKQdWSRNGzNKz5stBBtPVXahwhhWCLsZf/zVjK+L/9j5yCEuJcDp4YdQm2lmSwUgG70GnlSirf1XlCWneeHngld/Q+h8cGXMpzg69GRPkKYSL5x5opFId8Ec8fzcQitdIO2FXhz7uBE2MwrkJxRCKljNrBRwirqkVKaJeexLVjh/D0RzPrNL7wRFloNTmcM9dBa+9D5tkJ7jMoKPHyxPs/ICWU+gOM7DyIWc4lmGXNoWUQ0C2vl+bCLnQmj7iTjnHp+IwgAnDqlSt7pJR0jEtnY+GeiG+xTWgsyd3Kqa36NknMLYVYCHmUwtQqFwkxAZgA0LFjxxg8VqFomcS7nfTumMmaHdmVNgzZdY2LR/Tn8tGDsOka7dKTME3JxKm/4g8WYtQwgz+EzLODxx8uugKrexASDDDXxVkt3gDDlMxete2wweIWiUxMQhtxEOHTkEIihLScE6tM1qUBMscBJoj0IMIePc5oRo014RA6Q9O60THOyuW79MilmUIIXh92I1fNfZW9vvyI19i1lvltozGJxefvbqBDhT+3B/ZUvUhKOVFKOVRKOTQjIyMGj1UoWi5/vfYsEj0u3E5LkDxOO+0zkrntgpPp1CqFdumW+ZOmCSbdM57+Xdpg12v3z9Hc6LH8USqk1aUEc5ezrHql7FeBHSrs7qwosNIUUKhjTk/DmJmK+WMaxvwkzAN2S7gNkCEwC3SM79IwFydgLk3AmJ6GsTu6jWyire5t2OxC4+SMXvx94BW1uj7B7ubP/S/BpUUQewFD07rVOYYjnVgsdtqwFjvPALKwFjuvlFKuiXaPWuxUHAuU+gJ8v2Qju3MO0qdDK0Yd3xW7Hn22mF9Uypod+3j4rWn4AkFCYQugFYgPofUuQaQHwadh7nAhd7ir3fRTI6KsnNGngV2CaYLUrKbRFdEk+ul5iLjI8aU7EsgJ1M5J0SY0ru16Gjf3GFPncF/bMJ0Pd8wFBLoQSOCfg69hSFrXOo91pNBo7odSypAQ4g/AdKzyw7eqE3GF4ljB43Lwm1OOi3p+4+4DbMrKoVOrFPp1akVKgodT+nXm8tHHM2nqwuoHL7ZhLm6opWsVO1wpLBGXwvIuF0T+zi7B3OVC710acdScQBFOoeOXNRtwGdLErKcZ+u29zuI3HYaxIGcTcTYnIzP7HLNNJ2JSRy6lnApMjcVYCsXRji8Q4u43vmTF1j3Wzk5T0i49iYl3X8L8dTv57/dN9W01sguidUqCU0ZuRiGBYPUz/5A00RBhHX6q4tTtjMzsU7twI9DOk8olHVVthdrZqVA0Mf/6Zh7Lt2RV2uW5ZW8uZz00ieQ4J8FQU7gQyrKVyci1CqJ1ANGjBHNOSthZzSawtQmhazpuzUGpESBUZfZt1GLJ06XbObP18fRLal/P16A4hBJyhaIRMEyTyTOX8eHM5ZR4Awzv05E7LhpB27QkpsxdU0nEDxEyTHIKm2qjTpnRVtX0SkoQ7aSCw0c6eGGHu9I1p/brxmO/HUNQGuT6i7hpwZthQq4jIoq5Q7NxUnpPEuxuzmozgBPSuketC1fUHiXkCkWMmbpwHU9O/pES3+Hdhz8s3cT8dTv57LFrCISqepA3J4dFtFf7DBI9TjobySzI3syu3YWQW9n61mHTGTu0J4kOq2FGmjOB/ikdWZm/A3/ZtnkNgU2zYUTYfdnek8Yzg36nxDvGqMYSCkUM+Wz2Sv723g+VRBws50FfIMhHPy/n5L6da7TDag4OFJSQW1TKjLlb0LPdZW3iKkdqmCbZuZUrUp4ffA3jO55Ekt2DW3cwIrN3xAVMgeCsNv2PSRH3lfr58f3ZfPLcV6ydv4FY90pWM3KFIkaYpuS1r+ZFTJuA5Xb4vxlLOb5rGzxOOyX+lmUdm1dUSl6RVYkSrXGG026jZ4fK+0Bcup0/9j6HP/Y+B4D5BzayJG8rwVDl90EiWZy7jesbqcw7d28+sz9dQMAX4MRxQ+jUp+ly76Zpsui75cz7ciFxSR7Oum40nfpa22u2rdrBvaP/QigQIugPYnPYGDCqL49PuR+bPTYSrIRcoYgRJf4AxV5/tdf4gyEWbthV7TUtgaq+i2ClVTpkJDO8d6dq701yeCJ6rAsEac74mMZ5iJkfzeW5G14HwDQM/vuXj7nojnO46amrGuV5FTEMg8cuepYVP6/BV+xD0zW+em06t798A2ffcDqPX/o8RXnF5deHggYrZq3h24k/cOHt58QkBpVaUShihMdpx+U4uuZG8W4nyfFuUuLdXDpqAJPu/S2aVn1qpE9iO9KdCYgqaRmnZmN8p5NiHmNRfjHP3fA6AW+AgDdAKGAQ8AaY8up3rF2wMebPq8q8LxezYuZqfMWWWZdpmPi9AV694y02Ld1KTlZu2D3+0gDTJv0YsxiUkCsUMULXNK4/a9hRJebHd2vDT8/ewo/P3sKfxp9GnMtR4z1CCF4eej0dPGm4dTtxNicuzc5dvc+lf3LsfZYWTVuGHsHeIOAL8NMHs2P+vIrk7Mnj3b98hK8k/JuYzaGzZt6GqGsCRnU7d+vI0fMTp1C0AK47cxi60Hhr+kKKvQFSE9wYpiS/hmbNLRGXw8YNZ59Qr3vbeVL5ZOTdbCrKpijopW9Se9y2mj8E6kPUdUNJzBcVK7J67noePufv+Eqjp9PadmtNYlpCmNA73A7GXnNazGJRQq5QxBAhBNecOZSrxw4hGDKw23S+mLOa5z79GV+g+rLDk/p0ZPGm3U20IQjiXQ46ZqawNTuXtEQPGUnxrN2xD4AEj5MHLhvNwG7t6j2+EIKeiW1qvrCBDDtnIEaE98zhdnD6FSMb5ZlSSp6++mW8xdG9z4UmGHRGfx79+B7uH/tXzJCVcnHHu+g6oBO/+cPZMYtHCblC0QgIIXCUVSRcNOI49uYV8p/vovunOO02LjttEPPX7WyS+FwOG3dfMorzT+rHm9/O58OZy8nKKSQzOZ5LR/bnujNPwGY7MjKviakJ3P3vm3lhwptI08QImdidNsbdPIZ+J/dqlGfu35lD/r6CyCcF2B12zr1pDL4SH71P6MF7215n5uS55OzJo/+IPgw963g0LXbvb4PdD+uDcj9UHIs88f4PfDV/bZiroSYEPdtn8O4DlzPirtesHp+NSILbwW0XnsJlpw7kH5N/5JsFa8O+LaQlevj33ePp3Dq10vGDxV5MU5Ka6GnUGOvD/l05/PLJfAK+IMPHDaHrgOqraxpCXnY+V3W5nWCUElKhCRwuB0LAE988xKT6MuIAACAASURBVPGn9ovJc6O5HyohVyiaCH8wxP+9PY1fVm5FCAiETBw2HZAEQyYJHieF1eRbY4XdpvHGHy+hR7t0xj44kUCUunebrvH+g1fSo30Guw8c5JG3p7F+534Q0LlVKk9cfw49ypo6H2vkZedz+7AHydmTV2MnjYS0eD7ZOwk9Yku/uqGEXKFoIWTnFZGVU8CCdTt4/6elNebOG4POrVJ46qbzuPH5jynxBaJe57TrfPbYtVz3zEfkFZVWqg9PcDv55u83kuBueuvY/TsP8Ok/v2Ht/A107NOe8X+6gC7H1b4iZuOSLSyathx3gotTf3syaW3CzcGisXXlDu4e9ShBf5Cg//DfnRAi4uKqJ8HN36c+zHGn9K71M6LRaH7kCoWibrROTSAzOZ67//Vls4g4wM79+SS4HfiD1T8/EDR48bPZeP2BsE0+QcNg+qL1XDrq+MYMNYxdG7K4Y/jD+Ev9hIIGm5Zu45dPF/C3rx5g0On9q71XSskLE97kp8mzCfpD2Ow6bz38AQ+9fyen/KZ2FTov3PwmpVXMzYSA+GQPRfkl4TcIqimtiQ1HxmqGQnGU4Q0E8fqbzzzLlHDXG1/VWJ4nga3ZuQQiVIX4AiF250RZ8GtEJt7/HqWFXkJlKSHTMPGX+nnxlok1vp7F05cz88M5+EsDmIZJwBfE7w3w1FUv4y2JXoFyiIA/wIaFm8KOSwneEh+uuPBvJ7pNp8/wnrV8dfVDCblC0Qx4nHYSPM3bzWZTVk6NDZ91DQZ0bRuxgsXjtNO/S+OXF1Zl5ay1EQV73/YDlBZVX68/473ZETfvaDaNZT+uYumMldw65H7GxV/F9b3v5IuXp1JSYM2yszbv5eaB90WdXLviXAw9ayCuOCeaJnC6HbjinPz5k3tjkh+vDpVaUSiaASEEf7jwFJ77pOb68ubEMGH8yP7s2JfP2h37ylMxdptOm9RERg1o+v6YngQXpYXhbeY0XcPhqtyQOeAPMnPyHOZ/tYjkzGSKD0ZIfZSxZcV2PnpqCn6vtWawe+MeXr/rbd68713O+N1Ilv24ipysvIj32p02ho8bwpWPXMKl95zP8p9Wk5Aaz2mXnUxiWkIDXm3taJCQCyHGA38B+gAnSCnVCqZCUUsuHtEfj9POG1/PZ19+EW1SE9h/sJigYWKYslF3JdYWTQg+/HkFr//xYt6Zvoiv5q/BMCVnD+vFTeecWG0z6cbgpw/nRKzfdrjsjL5iBHbHYSH3e/3cPfJRdm3Yg6/Ej6Zr6DYNm0MnFKhcqWMaJrM/W1Au4hUxggYzJ8/BNCQy0jcYYW23nztlIbM/XUD7Xm3565cPkNmh6Sp6GlS1IoToA5jAm8CfaivkqmpFoahMyDA568GJLXIrf2ZyPN89+fvmDoP8fQe5quvtBCKI7cDR/fjb1w/hqpCu+ur16Uy8/138pZWv120auk3HCJnY7DoIePSje/jr+H8SqKaCp7Zoukbb7q15a+2LMfdeb5SqFSnlurLBGzKMQnFMIKVk5da9LNqwi8Q4F2OH9CQl3g3A8i1Zjb4RqL6kJLibOwQA5nyxkEhSo9t0Bp3Rv5KIA8z6ZF6YiAO4PE4mPHcNhbnFeBLcjBo/nOSMJDI6pJG1aW+D4zQNk9ysPDYu3kKvYd0bPF5taLIcuRBiAjABoGPH2DugKRQtGcM0eXDSt8xbswNvIIjdpvPiZ7N46faLGNarA75AqEm6BqUkuMmvZkHQpmuVdp66HDauHRs2AWwWQoFQxNSGNM1K9dyHiE+OiziOaUq6DexCr6GVO1wMPK1fTIQcrJ2dUbfwNwI1Vq0IIWYIIVZH+HVhXR4kpZwopRwqpRyakZFR8w0KxVHED0s2Mmf1drwBa0t3MGTgCxrc+foUgobBoO7twrbuNwZxTkfUDwyHTaNPx0ycdp14lwOnXeeaMUM5a2jj+JXUleHnD4l43O6yR6wBv+C2s8Jm6UJAYnoCPYdUXqTdsGgz3//355jFGgqE6H1i08zGoRYzcinlmKYIRKE4mpkyb3XEzTe+QIjvFq7n/JP68dAVp/OPyT8RMowaywLrS05hCUN6tmfxxt1h59xOB2/96TL25RdzoKCYrm3SmmXXZjTadGnFVY9eyvtPfEYwEEJKicPlYNwtY+k+qEvY9UPGHs9lD1zI5Ce/wOawIaXEk+jhH1MfCUsHP3vDawQjVA8JAa54F96immvMD6HbNAaPHYArzlX3F1lPVPmhQtEElERYoDvEzOWbOf+kfpx/Uj/6dW7NF3NW8+v6HWzdmxexZVpD0DTBbRecwt/fn8H2fXkYpkTXBDZd48kbz0XXNNqmJdI2LTGmz40VVzx0MSeeN4SfJs/BNExGXTqc3if0KD9fmFfED+/OYue6LHoN684l95zPeTefyeo560lMjee4kb3Rq1Ta7N+Vw4614R9sAHaXg4TU+FoLuaYLjJDJ8pmrubb7H3jl1yebpHqloVUrFwGvABnAQWC5lPKsmu5TVSuKY41/fjKL935aGvHcKf0688ofLqp07JG3pzFt4fo6P0cT1q7NaCR6nMx45hYAZq3YwoL1O8hIiueCk/rROrXx651roqSghIMHCsnsmF6plLA27Fi3m7tG/B9BfxB/aQBXnJP4lDheW/gUqa0Pe6lIKSkt8uJw2bE77EybNIN/Tngz1i8FTdcYdvZAnvj6oZiN2VhVK18AXzRkDIXiWOCqMUP4YOaysBm2w6Yzsn94WuDE3h2ZsXRjnZtMRBNxgeV5/vi1Z2Era4t2xuAenDG4R+QbmpiAL8ALE95k1ifz0W0amq5x45O/44Jba5wXlvP8jW9QcrCkfOelr8RP0B/i3w+8xwP/vQOAqZNmMOmB9ykuKEG36Yy+/BQyOzXOmp1pmCyevhwpZaNX9qkt+gpFE5CZEs9VYwbjtB/+Wu8o2x15/vBwr+qzhvYiLSFy1YVeR1HQhOCMQT348P+u4tQB3Wq+oZZs3ZvL85/M4pG3pjF90QaCRv3LJ1+4+U1++WwBQX8QX4mf0kIvE+/7H/O/rt03d2+Jl3W/bgzbPm+EDOZ9aY3x4s1v8sKENynKL0aaklAgxE8fzGbJ9OVEWwHWIvQCrQuirHnEillruPOUR7go9TpuH/YAi75b1qBxw56jbGxjgwztQno/hNAucJyEcF+I0Fqe+b6i+ZBS8tPyzUyeuYziUj9nDO7BFaMHER9lQbGgxMd5j0yiNELzAiFqZ6inCcENZ5/AbRec3NDwKzF14Tr+9t6M8oVZt9NO97bp/PvuS8s7I9WWksJSxre+iaAv/HX2PrEHr8z/R6Vj3hJf+U7NpT+sRLdpLJ6+nGn/+Sni+PHJHm594Xqeu/H1iOWLdqcdV5yTorziSsetXaB2/PX0iNftOiMuOpFzfz+GP1/wVKVdo06Pgwf+ewcjLxlepzGPOj9yKb3I0i8hMAf0tgjPFQhb+FfUpkD65yHzbwWCQAhwg56BSPsMoSU1S0yKo4NdBw5y38Rv2LKnssGVoMZ+BoDVROLXV+6MaUzeQJAx971ZXkp5CJfDxr2XnsolIwfUaby92/YxYcC9Ec2s0tulMnmXlb8uLfLywoR/MXfKQoyQgWlaVSu6rlXbOxOsuu6I2+uxtvf/4ZUbeevhD/AW+wgGQ0hD0n9UHzYt2Ro2tpUmkdV+kDrcdpxuJ8ed0ouNS7eRG8GjpVWnDN7b9nq1cYe9jihCfkSmVqRZhMy5EIqeBP/3UPoeMudCpG9m08ciTWTB/YAXS8Sxfm9kI0smNnk8iqOLDhnJvHHnJWhV0ikSa2FTE6Ksy1BkRFnOIBgymLt6G9MXbSCnILpxVG1YtXUvmhaei/AFQny/ZGOdx8tonxbRHVBogn4VmjE8Mu5JZn0yn6A/hGlIkBDwBmoUcSCqiAMYIZPh44bw8OS7CAVDmCETKSWrZq/DNEycHieueBc2p/VNQ8rqRRwg4A1SWuRl/tdLIoo4WM0xQjX4wdeWI7L8UJa8DcYe4NBXlRAQQhY8CM55CNGERj7GLjCLIpwIgO87SLiv6WJRHJVs2n0Ap8NGsEoJoymhX+dMHrlyDK99OZcF63ZUmrXrmuDU47uxbuc+bnv5c0KGJVAhw+Smc0/kpnNOrFc8bqc9qqGXx1m3ShMAm93GhGev5vW73ilPY2iawBnn5NrHfwvAul83snr2unrFWxOarvHh01OY8so0zAqbsqQpI5po1RYjSgu9Q8SnxMfM3vaInJHj+47DIl4RP4Q2N20swgVE+QsTkRerFIq60Do1IWL1iiYEnTNT6d0hkz9fPZb0pLhyIfU47WQmJ3DPJaP4wytfUFDio8QXoNQfJBAyeOOreTz23+kUeeue/+3XqXXEvL7bYWP8qfXrFnTuTWN49ON76HdyLzI6pDFq/Em8tvApOvRqB8Cnz39Tr3FrQ9Af5IuXvq0k4o2Ny+Pk8gd/E7NqliNyRo4WF1k7pQGiaRcYhd4Kae8LwVVUDsoN7quaNBbF0UnHzBT6dWrFqm17CVYQG4dd56oxgwHISIrny8evZ+aKLWzLzqNrm1RGHteV6Ys34Ivw9V0C3/66juVb9vDhI1fhrsNMWtMEr/zhIm558VMCIaN8ln/l6YM5uW/ner/OE88dzInnDo54bsfaXfUeNxJV+2s2xVKh0AQ2uw2bQ+ey+y5k/L0XxGzsI1LIhedqZMFGrLz0ITSwdUPYOjR9PMkvI/OuBjPHOiBD4D4H4bm0yWNRHJ28cOsFPPrOdOav24EuBPFuJ/931Rh6dcgsv8Zht5X7ovy8YgvnPjIJXyCIP8pXfFNKDhQU882CtXWeSfdol853T/2ehet3UVjiY3CPdrRKabwNRa27ZEbdfVlXojVJjiWaLqw8fhlOj4Pr/nY5Y64aRUIMUyqHOCKFHNf5EFgK3s9AlL0ELRWR8lqzhCP01pD+PQQXg7EP7AMQNuXwqIgdCR4XL952IUWlPoq8AVqnJERccATYnJXDw29NrVXnIV8gxNy12+uVErHrOqf061zn++rDb++7kOUz19S7FLAijS3iNqeNzA7p5GcftJpOhExOOn8oF/3x3DB7gJg9s1FGbWSEEIikvyDjJ0BgOegZYB+CELVL+Uv/L8jil62ab3svRPw9CMfABseEY1iDxlAoaiLB4yLBc9iMqaDEx968QtqlJZYf//Dn5bX2Ntc1QetGnEnHigGj+nLHqzfyxt3vYJomoYBB534d2Lx8W7UVKc2By+3kP2teYP2vm9i/K5eeQ7rSvmfbRn3mESnkhxB6W3DX7Q0yvVOh4EGgrGQpsACZdw2kvo1wRLbJVChaGiHD5KkPf+SbBeuw23RChsFFp/TnT+NPY8f+fMxaipvdpvPbei5QNjVnXTea068cQdambJLSE0hplczMD+fy5FUvtRgxd3qcXP/E5djsNo4b0afJnntEC3ldkVJatedUrTv1IYueRqR93BxhKRR15s1v5jN14XoCIaN89j1l3mq8gSDLN2dFvMdu0+mUmcyuAwXomsCu6zx2zZl0bZPWlKE3CLvDTud+h9fBRl9+CnanjWeue5WAN4BRR2+aWNKhdzuueWw8p112SpM/+5gScmTp4QXJqgQ3NG0sCkU9kVLy4c/Lw3LgvkCIr+atibrjs1VyPO89eCX7DxZT6g/StU1auYHWkUAoGGLRd8vJycqjz/AedB9o7eTudnxnXB5nnTzDY41m03hr7YvN9vxjS8iFy/olI+xs0xKRMgT+nyG0CWydwXkGQjiaOkqFolqkhJIoTYKrSzDsP1jMK1/O5d5LT22cwBqRvVv3cfeoRykt8mGGDBAweMwALvrjuTwy7smIPi1NSUJKfLM+/4gQcikl+L5Blr4LZjG4xiLibqyzj4kQOtJ9JZT+O/ykmY88cA7IHGvmjgAE0n0pIv4uhH7kfP1UHN1omqBb2zS27MkNPydE1GYUgZDBlLmrj0gh/9tl/yQv+2ClXPiSH1awdMaqZhdxgJKDJRQfLInaJ7SxOSK+V8mifyAL/g+CK8DYAiVvIXMvRpr18IywdSLy55cB5q6y2boETOuY92Nk7gVIM79Br0GhiCUPXnY6LoetvKu8JoTVa7OG1mxVja6OBHL25LF99a6wBc2ANxiTckQATdNwxde/NZvNbgtzT2xKWryQS2MflH5I5c0/ATAOIL2f13k8YeZiiXRVjCjHJZgFyJL36vwshaKxGNKzPW//6TJOH9iDTq1SGDukB+89eCWv/OE3xLscYSZbhxjUrV0TR1o3Nizewt2jHuU8z5Vc3n4Cn/7zawK+QNSa+VjhcNkZMKoPop7PsbvsZHZs/JZu0WhQakUI8SxwPpbxyRbgeinlwVgEVk5wBQg7yKqfvD7wz4a4q+s2nmNoWZ68tA43BSAwG7ijbs+qgJQmBBaBuQ/s/ZvNcldx9NCrQybPThgXdvy7J3/P5JnLmDh1gdVAwZTYdR2HXeeBy0Y3Q6S1Y8faXfxp9GPldra5e/J5588fkbMnj5TWyWRv21/peqELpBGbskNfqZ+FU+vX7MHpcXDrC9fFfLdmXWhojvwH4CEpZUgI8TTwEPBAw8OqgJZJ5JmyDno9Zhf2oWAfDIHFHC5DdIFIABltti5Ar39BvzSykXlXgVmW05QhpOssRNLTTevUqDgm8Lgc3HjOiZx3Yl8mz1zG+l376dMxkytGD2rUbfQN5f2/f06gitugv9TP169P5/Ep9/PYRc9ihAyMoIHDZY/Y9b6pad0lk3sn3crA0cc1axwN7dn5fYU/LgBiby5iPx60VmDspLIplR0R97s6DyeEgJQ3kaUfg/dTQIL7UnCdCXm/s2xpw8TcifBcX++XIA/eCUZW5fh9PyDtQxBxV9R7XIWiOlqnJnD3JaOaO4xas2np1ogbmXS7zou3TEQTglDZ+UAzL3Ampsdz5cOXcPGd5zV6P87aEMsc+Q3AtBiOB5Rtx0/9L9j6Ak7L3VAkI5JfQNi613NMO1rc79DSv0BLn4IWdxWanolInwqJT4DWFrCVPSsBEp9AOOq3+00aByC4hnC7Ri94a593lzKEWfIeZs75mAfOwSx+Aym9Nd+oUBwhdOrbPqIo+kr8HNidi6/U36RWs9Fwuh2cdd1oLrlrXIsQcahFqzchxAygdYRTj0gpvyy75hFgKHCxjDKgEGICMAGgY8eOQ3bs2FHnYKWRBWYJ2LoiRONWTpqhXVDyfpn3eanlo5L4V7Q6liHK0E5kzjjCd5MCWnu0zMh9BsPiyb8F/PMqjOMEWw9E2seN/l4oFI1JwBdg45Kt5GTl8twNr+MvPZxecbgdBH2BJrGZrQvuBBdfFfyvyZ8brdVbjQogpRxTw8DXAuOAM6KJeNk4E4GJYPXsrDHiSM/S20FTpZQLn4DAL5TPpP0/wIEfMVM/RXPUIR+mdwAtBcy9VU44wH12rYaQwVUQmE/lDwM/GFvBPxNcY2sfj0LRgpjx3ixeum0SmiYwDZPE1AT0Vjr7dhzA6XbQsU97Ni7e0txhhuEr9iOlbDEz8galVoQQZ2Mtbl4gZZ3KQFo0MrjWauoclg4xIf+GOtlgWk6NzwBu4JB5vxv01oi4m2s3SGCZ1TQjLNBSZKBhTawViuZi87JtvHjzRHzFPkoLveUplH3b93PGlSNp3TWT7at3NneYEek5rFuLEXFoeNXKq4AT+KHsRS2QUt7S4KiaGRlYzuFGylVPFkFoHdj71no84TwRMqYiSz+yFlPtwxGeCxDCXbsB9MyyEsyq27JdoEXKeikULZ8vX/+OoD980VJK+GnyHKQpG907vLYITSBNiW7TsDsd3PHqTc0dUiUaWrVSv9XGZkBK06odF55qfculNK2GFVFdK7QIglozQm+HSLinzvcB4Dwd6/OytHJcQkN4LqzfmApFM5O/92BUu92WsKhZkfa92hKX6KH7wM5ceu/5tOveprlDqkSL39kZC8yS95H7hyP3n4DcfyJmyTvRP+kDc2to4OwEe79GiTMaQjgQae+D3g1wAW7Q2iBS3kZoqU0ai+LYIqeghJ118DevCyeeNxinp3pLgZbCrnVZOF127nxjQosTcThCTLMagln6CRQ9Q/kWf1kARS8go9ShS99MKtsBVERHJD+PELVvVBsrhK0bImMqMrQLCIHeuUXl6BRHF7mFJTww6VtWb8tG1zTcTjuPXT2Wkf27xuwZY689jS9f+46sTXsJRekr2pJYMWstK39Zw4BRTTuRqw1H/4y8+BXChdkLJVH6e2qJRP58s0HCwwhX7bc4y8BCzIP3YubfgvR+bdnkNhBh64CwdVEirmg0pJTc9vLnrNiyh0DIwBsIkldUygOTvmXr3nDHxfri8jh5ZcE/uPov49HtR8YO528nzmjuECJy9Au5eSDK8ZyI6RXhvpiIQi5cCPclYYel9CO932AWvYT0TUOW5c/N4teQ+b8H3zfg/wlZ8Cgy/0ZkpOoThaIFsWH3AXYfKMCo6jYYNJg8s35+JIfYt+MAz97wGld2vIXbhj3Ar98u5cqHLuHt9S+hHQFNLpzultmf4KhPraB3suqtw45H3kUmbB2RSU9CwcNQ7oOiI1L+hdA8la6VRjYy97cgC61SQBEHWgoy6XUofgPLS+wQpZYBmP9Hyw5AoWih7D9YHNFt0JSSrJzCeo+bk5XLLYPvo7TQi2mYHNidy/M3vUHW5mwGnX4cdqet0magpsLhcjDg1D6smLmmRv+Wyx+8qImiqhtHvZCLhAcsr5NKm2lcEB/d20tzn4d0jobgIsAOjmER8+Ky8LGyGX/ZLFuWgOGHosew3toqP5SyFOmbgVBCrmjB9OmYSTAU/s3RabdxYu+O9R73o2e+xFvsq1SR4ivxM/kfn2MEQwT9zWOCddtL13HWdaO574zH2bxsW7n7YlXG/+kC2nZrmeW+R7+Qu0ZDyqvIoufB2A56R0T8PTXmuoXmAWf0TipSSvBX2PlZTgiCq0E4I1QwalDHrkYKRVOTkRTPxSP7M2Xu6vK+oDZdIynOxcUj+9d73BU/r8GIsKgppWTy01OareRw/leLOe/3Y3n2x8eY/dmvzP58AYmp8XQf1IUVP6/F4bLz2/svpFOf9s0SX2046oUcQDhHIZxN6QKnEdlLwIFwj2/COBSK+nHf+NPo27EVH/y0jCKvn9MGdOOGs4eRUEMHoupo3TmTbavCd2o2t5Ph5qXbAKvLz+jLT2H05aeUnxt385Hx7fmoFXJpFiCLX7NMr4QD3Jcj4q6NWjooZQC8U5Der62FTc9l4DwDMMHYDVoCCHeZcVUIHCdbG3X8MwnfBWqHxMeh8HGg7IdUBiHxzwh7z8Z70QpFjBBCMG54X8YNr/0O5pq47IHfsPTHlZXy4DaHDdMwMGPUIKI+5O8/yIZFm+k17IjZ3xhGje6HjcHQoUPl4sWN5xEipc9yHDT2Ui6kuMB5MlrKvyJcbyDzrobQGii3hnWC3sUScUJlv0zAbfVlliGIv99q5GxmhwchUiHjJ0RwBUiflWfXmrfTtkLR1OTvO8iHT0/h12+XkpSeQO8TujPjvdkEvAEMw2DwmAEs+WEloWZuEpHRIY3nf36cNl1aNWscNVFv98MjEu+3YOZwWMTBag03Dxlcj7D3rny9/ycIra0g4mC5C66PMHjJ4dx38TPg/h143yVsVi59UPo2OEeBbbiq+1YccxTmFnHzoPsoyi0iFDTI2rSXrSt3cPHd53P2dafhSXIz9/OFrJq9jlAwFN0Vowk4sCuXW4fcz8Tlz5HZMaP5AqknLb9wsx7I4KIoPTmFtRBZ9Xr/L3Xs4XkIA4JLiGywVQrFryBzL0Xu64dZPLHFGAApFE3BF69MpeRgSaVdm74SP58+9yXxKXH8+/73eOPudygt9DariFeM7cOnpzR3GPXi6JyR650AB2Hlf0IDPYJPgpaK9VbU9eudgfUTKIj8k3hoFT4Exc8hzWJEYvXGWVIGkKWfgO+rslz95eA8W83oFUccS39YGXEh0+60s2jaMn6aPIdgMy90VsQIGqz4eW1zh1EvjkohF+5LkCVvVnEp1K0GD47hUa5/m7oLuW6lZGo7nSidhIy/CaElRjwtZcjK1QfXcajuXQaWgPN7RMoLdYxNoWg6Sou8fPL8V8ycPBe70864m8eQ3iEdsWBT2DfRUDDErvV7EBE2HTUF8SlxFB8sifjPtnWXIy+tAkdpakXomYiU/5bNzJ2A3WriHH8PBJeFbZMXto7grLYRUhQM6ib+OoQ2RT/t/xFCG6i8eSkA/m8xcy9HmkdN7w7FUUQwEOSuEf/Hx898SdamvWxfvZN/P/A+BQcKcLgrV4npNg2b3caHz0wh0Ay7OIUmGHrm8Qw/bwh2Z+XYnB4nlz/QMndu1sRRKeQAwnE8Iv17RMYMSPgrhNZD4Z+R+b9HHhhhtU8rQwbXgj+aGU51s4a6JvYk6NF3hkn/nOi5+uAKZNFf6/g8haLxmfP5QvZu21cpjeIv9bP+101c/efxxKfE4Y53YXfZiU+JJ+ALNF2VSpV/vg6XgysfvpiHP7iT4ecPwe604YpzkpgWz90Tb6b/yD5NE1eMOSpTK4cQQiClH4oep9IsV5Yg866HzLkI4bRqx6vm0wHQwXZ8WfqkavPk6nLqkXLmwvIxF5HTKgBo6Vjt4CLlDQ3wfoNM/Fuz2OgqFNFY8fMafMWRt7XHJcXxSfYkdm/ci27XuXnAvTX6mcQaTReYhkTTNdLbprBnyz669O/Enz++l+KDJRTlFZPZMR3ddmQ4MEbiqJ2RH0J6PyOy4Brg//nw7yMhHOC+FGzdOdxv8xDV/DDq/cF5MZWnAxKCa5H7T8IseLjcJbHS49yXUH13acPaWKRQtCBad84IS1MAaLpOWtsUbHYbnft1wOVxIpra4VBSvtnINEyyNmfz5FUvM+2tHwGIT46jTddWR7SIQ8ObL/9NCLFSCLFcCPG9EKJtrAKLGWY+EUVXmmAWACBcXVL+jgAAGJVJREFU52Dl0sOvEa7RiNR3ynZ51hJjHfi/jnDCDwSsmXXR02Fnha09JL9I1L8WvWuYA6NC0dyced1pYUIoNIE73sWwsweWH0trm4InoZZ9ahsRf6mfSQ+8j2EcPZbSDf14fFZKOUBKORD4BvhzDGKKKcJ5GhBJ/ExwnGRd4xgEnt9itVHTsNImTkh4BKGnWVUmjsFUP1uuSLDsV7Qcug9KP4k4K8f3LZEzXm5EksqRK1oeqa1TePK7R8jsmI7T48ThstO1fyde+OWv2OyHf5Y1TeOM341sxkgP4y32UZRX3NxhxIyGNl+uaE4cR4so66+C81SwD4DgUg7nwV3guQJh61B+mZb4CNJ9IdI3A4QD4RpnVbMcwjeDqCmYemFYtrfCgQxuhOBKJAb4vic8X///7d15fJxltcDx33lnz9KkTVsLXWhlhyJbyw5l3ykqIAiCXi4WkPUigohawQ9YloogXqAgH1T2pYCyiIBsAhcoZZddFgFpSxeaNpPM8p77xzNtk3YmM8lsmcz5fj75wGRm3jlvk5x55nmf55wANJ2GhLcu4esbUzoTd9yIGz/4Xz57/3NCkRAjxw7P+rgNJq1LpCFclbrj3XkBj8aWwfPptuiLnSJyAXAM8CWQszasiEwDpgGMG9f/msZ9l2bVxhxw89Y+hKeg6fmQ/rfrfxloQ0ITkdDEHs9WVbTjVtcUopS8oShN6OJToOuJTFxpcl7oTPfWENqYykun0jxw3aM8cO0jJBNJtj1gaw4/62CGDGvO+ZzJ+25BtTc4RxoiHHzSPoTCg2fRQN6iWSLyCJBtzdy5qnpvt8edA0RVdXq+Fy130azu/OW3QPsM1uzbGQLEXdDUBMSmIkPOR6Tne5u/bBYs+12W5xdDoOUK8OdB+6WsuSJmdWFoPAGv+WQANDEHjd8DmkRiB0B4Z9v5aSpu+jcuZs7fXiERXzW6Fk/41o+mcuwFR+J52Wdu/37LU8w87mrUV5KJJCgEQ4GyN2AORUJ4AY+pP9iH//7VkQQCtXeBs99Fs1S10J0yNwP3A3kTeUV13k32JLyivGzmlzB+HxpYG2k6eeUjVBOw/Koczy9CeAckMhmdvyf5kzggAaThUAD89stg+Q2Z5yna+VfXOq7lYkvmpmLenvM+Lz78ao8kDqC+Mvs3D9AyfAiH/XBq1ufu/u2d2WznTXji9mfoiifYco/NaF/YziM3PcXjtz5d8li9gLDdgZM4/ZrjaWptGFQj8RWKXbWyfrebU4Fs5QKrrNAfWics/1PPb/mLoOTNkgXCu6LzpwC5LrYIEHb1z70RSOs1SGAUmvoYll+Pe2NZ8Ukq7ubVky+WOE5jcnvj6bfwc6z6SHYlue3ie7Pet8KIMW0cesZBHHXuIWyy3QaM3Wg0w0cPo6Wtufc9eP2w0Tbrc9YNJzF0ZMugTOJQ/Bz5DBHZEDcJ/RFwQvEhlZY0HI4ufX21ErU5aHvP294wECn9JdxlF/R+v7c2DPsDQsItOZTM+23Xk2Rfvx5HOx9Fwmt84jKmLIaNaiUQCuTss7l0YXvW72cz99HX+PnBF5FOpko+vbLB5HW5/Ok8f2+DQFEjclU9RFUnZpYgHqSqn5YqsJKJHpipoxLFjXIbyb1Oeyyqq34xRcIQyt23s3/yvSuEkNZL8YLjkOB6q5I4oIlXyb1yZvBcgTcD3/ZTJxHOsglohQkTC1vQ4Ps+F3/3t3R1dPU5iQdDAcLR3DFEGiL8zzXH9+mYtWrQ7+wU8fBaZyJttyHNZyFDfglDbwCybExIf4AuOmplMtfku5B4vILRhmDIz7IuM/SX/xG6/pz7qeFVLblU4/jtv8GfPwV//i747TOt4JYpqUgswqWPnUfb6GFZ7gtz4mXfK+g4n38w31Ui7Id02uf8e89mw8nrIiIEQwEaWxqINUfZfLdNmfnYL1hviwn9OnatGdS1VrqT0MYQcgVxBPBjB0L8jjUfmHwFuh6G6H5ox80UdDGylKJrVl9Tf1lmdUuuLuNRRNzOVFUfXXh0popipv7F8hvQrqeg7S5Eau9KvRmYJkwcxy0fX82Tdz7L3Vc8wLyPvmD8pmM45heHs/G26+c/ABBtjPS7X2cwFOCdOe9z5XMzSKfTeJ5Xtxf86yaRr6Hz0Rx3+Gj8PrdtP13pmaIAknoLwpv3/HbyNXrfVapu0xNA4pnMmvPuRYy6IP0hJJ6CyK6lDNjUORFhymE7MOWwHfr1/GGjhrLBpK/yxjNv55x1DIYCILJGxcRkV4p3534AUJNLCUtp0E+t5JZrdEtmHh0IT65MKKteOHu9cq+FXj8ZNE5b1awi+ZrrF7o67ci8IRgzsHz/oqNz3hcIBtj/+3tmLWoVjoZYb6v6mDrJp34TefSg3PfFMutfG46k4h9agll+MYMbk3tEHkYiu6y6GVgbJJrtwKjW74/bDFyL5y0h1pTtdxa23GMip1x5HOtvPaFHhUURIRwNs/9xfShmN4jV719204m4vp6rC8HiE/C/nI5IDFp+S8kXtuYUR7+cjiZe6PFdEVlZ4Cur7n1Io/vgKjmuHnMKll+N33FbqYI1piRGTRiZdZ48GAqyzqauHtKF9/+EvY7ZhXAsjHjC5rttyuXPXEDriJZKhzsg5d2iXw6V3KKfi7/sSlh2DT3nkruLQfMZkPoA4nf18rhyiCJttyOhjVZ+R5P/RBcevlocQfBGQmgztwZel0N4J1co7Msfujoya4ggI5/O2TfUmGo4ZbtzeO/lD3vMg0cbo1z72kxGjR/Z47GqWrcXNXNt0a/fEXnH7fSenOOw7PdVSOIACdc8uhsJbYIMvQoCY3HTLB6uav5n0PWQu8iZfAWWXwtLfgCBdbMfWkKQeK7cJ2BMn1z44Llsd8DWBMNBgqEA4zYezYyHfrpGEgfqNon3pn5XrWStMrga/TJTVKvSidyH5BsuBFWXoFPvQnA8tD3s+ot+eQbZ29MlXMOM9L/J3nIOt/XfmAGkeWgT0+86k654F4nOJM1Dm6odUk2p30Qe2Qfit5O7ZZsH4UmQeL6SUa2S/hi/4y8Qv9E1jlZcuYDAOuBtSPYkvkIS0h/jfryrv2EFILxNuaI2piiRWIRILEu3LtOrup1akeZTXUd7yba1PQTSiAz5WWbddW+/WB6EtqLw4lyF8mHp2W5krnEg7pYQpt6GxAMFPD+J284v7hylEaQZGXqtKz1gjBk06nZELt4wGP4AxO9Hk3OBZtB5kP4EQlsijccigVHQeim69EKIz2bVKHjFdIUHEkNaLkSX/AhSpV6nne3Tgk/vo/HVHwsEt3IdkaK7rNwBaowZPOo2kQOIRKHhEIRDenlMBGk5Dx3yM1SXQsds6LwP0ksgOBaCE9DOpyD1egUj76PkP6D9X6j/KcQORTybfzRmMKnrRN4XGr8f2qcDXqZGuQ/JRZB8Djf1MvDalfbgfwbtM9GOP0Lb3Yhn62+NGSzqdo68LzT5Fiz9mZuj1mW4xg5drNo2X+lVLf3VBel56PLfVzsQY0wJWSIvgHbcSuHz0gNdEjofrnYQxpgSskReCP8Lei2yVWu83F3OjTG1pySJXETOFBEVkeGlON5AI5Hda3ATjZC9RkwMaTgGTczBX3Qc/oK98JecjaY+qnSAxpgSKTqRi8hYYC/g4+LDGaBiB0JgAq5d3Aoe7lpxiIF5zdiD2HGu/6c0gDQBYWj4NqpBdNGxkHgS0h9B55/RhV9HU+9VO2hjTD+UIgNdBpwF9N42u4aJhKHtVrTjTuh8AKQJafwOeK3o0hmQfKnaIWaRhvgfYditiKQhvcA1n/Da0AU70bO+eRq0A22f6eq5GGNqSlGJXESmAp+q6iv5CtmIyDRgGsC4cYU1Zh1IRKIueTd+Z+X3NPlPSL5O7obIZY8KV4o316qZBCy/Bhl6xcqNp5r+AvxsHc4VEnPLE6YxpqzyJnIReQQYleWuc4GfAHsX8kKqOguYBa6MbR9iHLi6Hqeqq1mC60H0CFh2MdmTuULX3/A/39RVPYzuB02nk3PNu9eGJt9El18HqQ8hvPWqHa7GmAErbyJX1T2zfV9ENgMmACtG42OAuSKyjap+XtIoByqJ4P4Jq5DMw7tA60wgDMkXoStX/RXffWkS4vdA4iXXHanzPnom/xhEdsnUPE+456TeROOzoW02Eqy9T1HG1It+X+xU1ddUdaSqjlfV8cAnwFZ1k8QBovtTmu5BfT1GwE2PzN8W5m8J/gKI7Ev+wl1pSP8HortBdM/M4xuAKDROg84HcXPnK5ZapkCXoe2/7mN8xphKsnXkRZDAWjDkl8UfyMvRBCKnEKReYuVoO/kCdD0JsaMKeG4nJN9k1Y++E8JbQ3QK+AuzPN6HxLN9jM8YU0klWzeXGZXXHQm0ogTo/wVPDwKt4IfI3+xiOET3hs6bs9zXAV2F7NgUV8nR/2LV6yWehcXHkXvuvLC6LJp8G43fDdqBRPeG8I7WzcWYChiIC6BrS9GrVnxIziX/zlGB0HjovLWXQ31WwOsFwV9CzzcNH/w4BDdy9c67z/lLDBr+K+9R/eU3QftFrJhf184/Q3hnaL3CkrkxZWZTK0VQVdQbQc+NQv1RyPZ/heScPI/NtxjIA28ErujX6uIQ3BTC2wIRkGb339jhSMMRvUfmL4L2GfSYX9cOSDzlNh0ZY8rKRuT95HfcA8suzcwr10AdFmkA1V5G7Q1IeDOk4Xw0/SmkP4Pgeog3NP+xu54BCa7Z21Q70M4HkciUosM3xuRmI/J+0M6HYOnPwZ+Pm1ZR3MoTL/PVDAyw2izyFdwbTq5Rewe6dDr+ommgPhKeXFgSB9egOiuPAffvYMwgZIm8H7T9cnpucQeXIAO4JX3tZJ++qCL/A3LvAF0xh52CxJPowkNRf2nhx47sTPY3iDDS8M0+hWmM6TtL5P3hf5rjjiR5m0zIcAbeP3v3JOyDxt1GoAKJxJDWq7o1eW4AwtB0ChLarOTRGmN6sjny/gis2/8enZE9IPUypN4ne3PlSgqyci16D52Z1TiFk8j2MOIZSDzhVsBEdkICI0sVqDGmFwNtaFgTpPlM1lypEsIVsMoj/Q7e8L9A9LAyRNYXAQjvgOs3urooBDfu8xHFa0Ci+yEN37QkbkwFWSLvB4ns4Mq9BjcGIhAYD83nu8JU+QQ3dsv1Ou8od5i9ix6KDL0WguvT8w1IQCJIwyHViswY00c2tdJPEtkRifQswa6hcejiE0BTZL/Y6UHTqZmemdUqfZuJw2tyG3WG3YC2/xLi9wMpCG+DDDkP8VqrGJ8xpi8skZeQhCfDyGch8Ty6/HZI/LXnA6JHIroE9fNcEO23IIXNu4eRmFtNIl4T0nIROmSGu227MI2pOZbIS0wkDJGd0GVXskZi7bwR7bwj03atHPIl8QYgDc1nI6H1e9xjCdyY2mWJvAw09SEk/0n2xNqV2QFZ6Oi5RLwR0PRjJLoj4g2r3OsaY8rOEnk5+PPdhU9dfdPQ6iqYzJt/ihfbrzKvZYypKFu1Ug7BDUHzdQ1KUbkaLSEksnOFXssYU2mWyMtAvBZoPM6VgM35oAYIjKH8H4oCEJyMLjkV//OJ+PMm4S+9BM37RmOMqRWWyMtEmk5FWmZA6Gu4i4yBbveGwVsLhv4hsxa9gI1E/aaQegESTwMJ0KXQ8Sd0yWllfE1jTCUVlchF5Bci8qmIvJz52r9UgdU6EUGi++G13Yl8ZS40n+eSdmACNE5D2u7AC47GG34XtN0L3mhK0/9zdT6uBkz3eiqd0PUP/OS76OqlZ40xNacUn+svU9VLS3CcQUvEQxq/BY3fynq/F1oXf8g5sOSUCkaVgoUHoQga3gZp+RUSWLuCr2+MKRWbWqkC7XoWf+FR+PN3xl98App8EzpuJH+Hn1JK40brabeBaeHhNm9uTI0qRSI/WUReFZHrRaTATgT1y48/iC4+HpIvgD8Puh5DFx6RWXdejGJ+lGnQZdD1aJExGGOqIe9fv4g8IiKvZ/k6GLgKWBfYAvgPMLOX40wTkTkiMmfBggUlO4FaoqrQfgE9m1Iori5LkUsRA1viKhkG8j0yR3BxSH1UXAzGmKrIO0euqnsWciARuRa4r5fjzAJmAUyaNKmScwgDh7aDvzjHfSlcadzuST7mmiDr/PzH9sLQcgMsPtYl5b6SGIQ26vvzjDFVV+yqlbW63fwG0M9uC3VCYuR87wysBS2XgLc24Ll6LE3fh2CByTX1pmuCrIWO7Lv/6MMQGA1h2zRkTC0qdtXKxSKyBW5+4EPg+KIjGsREQmjDEdBxCz1G3hKDxhPwYvug0b1x7eLCiHj4wb/CkifzH9xrheSL5G01h4C3AYQnQtfD7nb0QKT5DET6OS1jjKmqohK5qh5dqkDqhTT/yK0Oid8JEgAEGn+AxL7u7hehe/ch0TRKCLcWvBeN09yoPPE8ueu3ZH7c/nvQNR8av4c0noCIldwxppbZX3CFiQSRlulo85ngL4TAKFf6NheNgwRBcyVyD0Lbu/ri6U/Q+J2Z+fYVwiDRTAGvbssLdTEsm4WmPkJaLynBmRljqsXWkVeJeI1IcFzvSRwgslOOee8ghHdFhl6HDLvebToKjkOGXu+aQxMEQhCenCnglW2NeCd0PoimPy/6fIwx1WOJfICTwChoOgk33ZL5cUkMInsiQ69BIjv1aAoh4a3xRjyIjHwO+cpcJLIHvW40kgik3i/nKRhjysymVmqA13QCGt4ejc8G7URi+0F4l167+ojXDIAGRmWmZnJcBNUEBNYpR9jGmAqxRF4jJLw5Et6870+MTAFpBO1gzZF5GCI7IMExpQjRGFMlNrUyyImEkGE3Q3BT3Pv2ilF8CGKHIK2XVzE6Y0wp2Ii8DkhwHDJ8NppeACRRaXMJXux93JjBwBJ5HZHACPffKsdhjCktG5IZY0yNs0RujDE1zqZW6pD6HWj8L5B6HYLrIrFvuIbRxpiaZIm8zmh6HrrwEPDbcXXQo+iy30HbbUjwq9UOzxjTDza1Ume0/SJX44UVNcs7QZeiX/60mmEZY4pgibzedP0d16+zO4XkXOvZaUyNskRed3LNpnnYr4Mxtcn+cutN7OvA6hUXgxDZ3eqSG1OjLJHXGWk6A0KbgDQAUVeHJbAO0nJ+tUMzxvSTDcHqjHgNMOw2SL4EqXdc5cPwtrZd35gaVnQiF5FTgJNx/cXuV9Wzio7KlJWIQHgr92WMqXlFJXIR2Q04GPiaqnaJyMjShGWMMaZQxX6ePhGYoeq6Fqjq/OJDMsYY0xfFJvINgJ1F5DkReUJEJud6oIhME5E5IjJnwYIFRb6sMcaYFfJOrYjII8CoLHedm3n+UGA7YDJwu4h8VVXXaBKpqrOAWQCTJk3qpYmkMcaYvsibyFV1z1z3iciJwOxM4n5eRHxgOGBDbmOMqZBiV63cA+wOPC4iG+B2mnyR70kvvvjiFyLyUZGvPdAMp4BzH2Tq8ZyhPs+7Hs8ZBt55Z+2ULllmQQomImHgemALIAGcqap/7/cBa5iIzFHVSdWOo5Lq8ZyhPs+7Hs8Zaue8ixqRq6uy9J0SxWKMMaYfbDufMcbUOEvkpTOr2gFUQT2eM9TnedfjOUONnHdRc+TGGGOqz0bkxhhT4yyRl4GInCkiKiLDqx1LuYnIJSLyloi8KiJ3i0hrtWMqFxHZV0TeFpH3ROTH1Y6nEkRkrIg8JiJvisgbInJatWOqFBEJiMhLInJftWPJxxJ5iYnIWGAv4ONqx1IhDwMTVfVrwDvAOVWOpyxEJAD8DtgP2AT4tohsUt2oKiIF/FBVN8bt4D6pTs4b4DTgzWoHUQhL5KV3GXAWUBcXH1T1b6qaytz8P2BMNeMpo22A91T1X5llt7fiKn8Oaqr6H1Wdm/n/dlxiG13dqMpPRMYABwDXVTuWQlgiLyERmQp8qqqvVDuWKjkWeLDaQZTJaODf3W5/Qh0ktO5EZDywJfBcdSOpiN/gBmR+tQMphHUI6qM8RcR+Auxd2YjKr7dzVtV7M485F/cx/KZKxlZBkuV7dfGpC0BEmoC7gNNVdWm14yknETkQmK+qL4rIrtWOpxCWyPsoVxExEdkMmAC8IiLgphjmisg2qvp5BUMsud4KpwGIyHeBA4E9slW+HCQ+AcZ2uz0G+KxKsVSUiIRwSfwmVZ1d7XgqYEdgqojsD0SBISJyo6oO2F3sto68TETkQ2CSqg6kgjslJyL7Ar8GpqjqoK16KSJB3MXcPYBPgReAI1X1jaoGVmbiRiV/ABap6unVjqfSMiPyM1X1wGrH0hubIzfFuhJoBh4WkZdF5OpqB1QOmQu6JwMP4S743T7Yk3jGjsDRwO6Zn+/LmZGqGUBsRG6MMTXORuTGGFPjLJEbY0yNs0RujDE1zhK5McbUOEvkxhhT4yyRG2NMjbNEbowxNc4SuTHG1Lj/BwijQn9Pc6xEAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "classes, points = generate_data(1000)\n", + "one_hot_classes = dc.metrics.to_one_hot(classes, n_classes)\n", + "gen_points = gan.predict_gan_generator(conditional_inputs=[one_hot_classes])\n", + "plot.scatter(x=gen_points[:,0], y=gen_points[:,1], c=classes)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "StyDTNfRqSV8" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "16_Conditional_Generative_Adversarial_Networks.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/14_Modeling_Protein_Ligand_Interactions_With_Atomic_Convolutions.ipynb b/examples/tutorials/14_Modeling_Protein_Ligand_Interactions_With_Atomic_Convolutions.ipynb index 67b663ff92bf2689ab99257434e96dbd234a75af..16488db39d2348c694f5ebf3b09b656403e01d04 100644 --- a/examples/tutorials/14_Modeling_Protein_Ligand_Interactions_With_Atomic_Convolutions.ipynb +++ b/examples/tutorials/14_Modeling_Protein_Ligand_Interactions_With_Atomic_Convolutions.ipynb @@ -81,27 +81,26 @@ "metadata": { "id": "Y2xCQyOInB_D", "colab_type": "code", - "outputId": "12357784-e2a1-4f7c-d053-23a2b8c335c5", "colab": { "base_uri": "https://localhost:8080/", - "height": 462 - } + "height": 323 + }, + "outputId": "6923424c-4066-497a-eae5-57a08ff43960" }, "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" ], "execution_count": 1, "outputs": [ { "output_type": "stream", "text": [ - "TensorFlow 1.x selected.\n", " % Total % Received % Xferd Average Speed Time Time Time Current\n", " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 15117 0 --:--:-- --:--:-- --:--:-- 15117\n" + "\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r100 3489 100 3489 0 0 27046 0 --:--:-- --:--:-- --:--:-- 27046\n" ], "name": "stdout" }, @@ -114,41 +113,82 @@ "done\n", "installing miniconda to /root/miniconda\n", "done\n", - "installing deepchem\n", + "installing rdkit, openmm, pdbfixer\n", + "added omnia to channels\n", + "added conda-forge to channels\n", "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" + "conda packages installation finished!\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", + "# conda environments:\n", + "#\n", + "base * /root/miniconda\n", "\n" ], "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "jFQmra_fFE8U", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 }, + "outputId": "77fd2bd3-f934-433f-a090-868611976583" + }, + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ], + "execution_count": 2, + "outputs": [ { "output_type": "stream", "text": [ - "deepchem-2.3.0 installation finished!\n" + "Collecting deepchem\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/b5/d7/3ba15ec6f676ef4d93855d01e40cba75e231339e7d9ea403a2f53cabbab0/deepchem-2.4.0rc1.dev20200805054153.tar.gz (351kB)\n", + "\u001b[K |████████████████████████████████| 358kB 2.8MB/s \n", + "\u001b[?25hRequirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n", + "Building wheels for collected packages: deepchem\n", + " Building wheel for deepchem (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for deepchem: filename=deepchem-2.4.0rc1.dev20200805144642-cp36-none-any.whl size=438624 sha256=7e5b9b5d387726c10af3665c3fabc3cf8955c98122717ba2e3ccdb016174e99e\n", + " Stored in directory: /root/.cache/pip/wheels/41/0f/fe/5f2659dc8e26624863654100f689d8f36cae7c872d2b310394\n", + "Successfully built deepchem\n", + "Installing collected packages: deepchem\n", + "Successfully installed deepchem-2.4.0rc1.dev20200805144642\n" ], - "name": "stderr" + "name": "stdout" }, { - "output_type": "stream", - "text": [ - "CPU times: user 2.78 s, sys: 630 ms, total: 3.41 s\n", - "Wall time: 2min 7s\n" - ], - "name": "stdout" + "output_type": "execute_result", + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 2 } ] }, @@ -164,7 +204,7 @@ "import os\n", "from deepchem.utils import download_url" ], - "execution_count": 0, + "execution_count": 3, "outputs": [] }, { @@ -180,7 +220,7 @@ "dataset_file= os.path.join(dc.utils.get_data_dir(), \"pdbbind_core_df.csv.gz\")\n", "raw_dataset = dc.utils.save.load_from_disk(dataset_file)" ], - "execution_count": 0, + "execution_count": 4, "outputs": [] }, { @@ -188,18 +228,18 @@ "metadata": { "id": "snei1ST1nB_a", "colab_type": "code", - "outputId": "64b72921-1c6f-4cff-8608-da71b6ffdf2a", "colab": { "base_uri": "https://localhost:8080/", "height": 170 - } + }, + "outputId": "e64c16d0-8b1a-47a6-8e92-b6895341d4ab" }, "source": [ "print(\"Type of dataset is: %s\" % str(type(raw_dataset)))\n", "print(raw_dataset[:5])\n", "#print(\"Shape of dataset is: %s\" % str(raw_dataset.shape))" ], - "execution_count": 4, + "execution_count": 5, "outputs": [ { "output_type": "stream", @@ -249,7 +289,7 @@ "import numpy as np\n", "import tensorflow as tf" ], - "execution_count": 0, + "execution_count": 6, "outputs": [] }, { diff --git a/examples/tutorials/15_Synthetic_Feasibility_Scoring.ipynb b/examples/tutorials/15_Synthetic_Feasibility_Scoring.ipynb index 107def8a1650b793283d631f7f67d1b4495b225f..bba2d68c56922bbffeb2b0680102166c75fe7705 100644 --- a/examples/tutorials/15_Synthetic_Feasibility_Scoring.ipynb +++ b/examples/tutorials/15_Synthetic_Feasibility_Scoring.ipynb @@ -59,28 +59,26 @@ "metadata": { "id": "IlFeRa3qpbFz", "colab_type": "code", - "outputId": "2a19bfc9-96a5-4e1b-b33d-ba937745d93e", "colab": { "base_uri": "https://localhost:8080/", - "height": 462 - } + "height": 323 + }, + "outputId": "2836932a-eae7-487c-b20d-54607c452046" }, "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')\n", - "import deepchem as dc" + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" ], "execution_count": 1, "outputs": [ { "output_type": "stream", "text": [ - "TensorFlow 1.x selected.\n", " % Total % Received % Xferd Average Speed Time Time Time Current\n", " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 13852 0 --:--:-- --:--:-- --:--:-- 13852\n" + "\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r100 3489 100 3489 0 0 11948 0 --:--:-- --:--:-- --:--:-- 11907\n" ], "name": "stdout" }, @@ -93,41 +91,82 @@ "done\n", "installing miniconda to /root/miniconda\n", "done\n", - "installing deepchem\n", + "installing rdkit, openmm, pdbfixer\n", + "added conda-forge to channels\n", + "added omnia to channels\n", "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" + "conda packages installation finished!\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", + "# conda environments:\n", + "#\n", + "base * /root/miniconda\n", "\n" ], "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pveyx31SFSp7", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 }, + "outputId": "9ab163f3-3f4b-4a12-9494-cc41c730353c" + }, + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ], + "execution_count": 2, + "outputs": [ { "output_type": "stream", "text": [ - "deepchem-2.3.0 installation finished!\n" + "Collecting deepchem\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/b5/d7/3ba15ec6f676ef4d93855d01e40cba75e231339e7d9ea403a2f53cabbab0/deepchem-2.4.0rc1.dev20200805054153.tar.gz (351kB)\n", + "\r\u001b[K |█ | 10kB 24.2MB/s eta 0:00:01\r\u001b[K |█▉ | 20kB 3.0MB/s eta 0:00:01\r\u001b[K |██▉ | 30kB 3.7MB/s eta 0:00:01\r\u001b[K |███▊ | 40kB 4.0MB/s eta 0:00:01\r\u001b[K |████▋ | 51kB 3.5MB/s eta 0:00:01\r\u001b[K |█████▋ | 61kB 3.8MB/s eta 0:00:01\r\u001b[K |██████▌ | 71kB 4.3MB/s eta 0:00:01\r\u001b[K |███████▌ | 81kB 4.5MB/s eta 0:00:01\r\u001b[K |████████▍ | 92kB 4.6MB/s eta 0:00:01\r\u001b[K |█████████▎ | 102kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████▎ | 112kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████▏ | 122kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████▏ | 133kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████ | 143kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████ | 153kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████ | 163kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████▉ | 174kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████▊ | 184kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████▊ | 194kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████▋ | 204kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████▋ | 215kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████▌ | 225kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▍ | 235kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▍ | 245kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▎ | 256kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▎ | 266kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▏ | 276kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████ | 286kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████ | 296kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████ | 307kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████ | 317kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▉ | 327kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▊ | 337kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▊| 348kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 358kB 4.7MB/s \n", + "\u001b[?25hRequirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n", + "Building wheels for collected packages: deepchem\n", + " Building wheel for deepchem (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for deepchem: filename=deepchem-2.4.0rc1.dev20200805144657-cp36-none-any.whl size=438624 sha256=cbcfac6df825ca0d1f04e9343b677d876d7b822b02312f748878e9bb85826da7\n", + " Stored in directory: /root/.cache/pip/wheels/41/0f/fe/5f2659dc8e26624863654100f689d8f36cae7c872d2b310394\n", + "Successfully built deepchem\n", + "Installing collected packages: deepchem\n", + "Successfully installed deepchem-2.4.0rc1.dev20200805144657\n" ], - "name": "stderr" + "name": "stdout" }, { - "output_type": "stream", - "text": [ - "CPU times: user 3.04 s, sys: 870 ms, total: 3.91 s\n", - "Wall time: 2min 10s\n" - ], - "name": "stdout" + "output_type": "execute_result", + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 2 } ] }, @@ -136,11 +175,11 @@ "metadata": { "id": "d3QTjXKwpbF9", "colab_type": "code", - "outputId": "3772afbb-5873-4681-8005-70772a82e50c", "colab": { "base_uri": "https://localhost:8080/", - "height": 306 - } + "height": 88 + }, + "outputId": "94711095-67df-4616-89a1-47246f6629aa" }, "source": [ "# Lets get some molecules to play with\n", @@ -149,30 +188,16 @@ "tasks, datasets, transformers = tox21_datasets.load_tox21(featurizer='Raw', split=None, reload=False)\n", "molecules = datasets[0].X" ], - "execution_count": 2, + "execution_count": 3, "outputs": [ { "output_type": "stream", "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/tox21.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "TIMING: featurizing shard 0 took 6.450 s\n", - "TIMING: dataset construction took 6.829 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.454 s\n", - "Loading dataset from disk.\n" + "smiles_field is deprecated and will be removed in a future version of DeepChem. Use feature_field instead.\n", + "/usr/local/lib/python3.6/dist-packages/deepchem/data/data_loader.py:198: FutureWarning: featurize() is deprecated and has been renamed to create_dataset(). featurize() will be removed in DeepChem 3.0\n", + " FutureWarning)\n" ], - "name": "stdout" + "name": "stderr" } ] }, @@ -238,7 +263,7 @@ " X.append([m1[0], m2[0]])\n", " return dc.data.NumpyDataset(np.array(X), np.expand_dims(np.array(y), axis=1))\n" ], - "execution_count": 0, + "execution_count": 4, "outputs": [] }, { @@ -264,7 +289,7 @@ "splitter = dc.splits.RandomSplitter()\n", "train_mols, test_mols = splitter.train_test_split(molecule_ds)" ], - "execution_count": 0, + "execution_count": 5, "outputs": [] }, { @@ -292,7 +317,7 @@ "train_smileslen = [len(Chem.MolToSmiles(x)) for x in train_mols.X]\n", "train_dataset = create_dataset(train_features, train_smileslen)" ], - "execution_count": 0, + "execution_count": 6, "outputs": [] }, { @@ -310,11 +335,11 @@ "metadata": { "id": "AZhS38JLpbGd", "colab_type": "code", - "outputId": "15cf4125-65be-4d99-9b67-00177940c456", "colab": { "base_uri": "https://localhost:8080/", - "height": 343 - } + "height": 34 + }, + "outputId": "471f4813-1d0d-4e42-819a-f703e0c407a3" }, "source": [ "from deepchem.models import ScScoreModel\n", @@ -322,42 +347,19 @@ "model = ScScoreModel(n_features=n_features)\n", "model.fit(train_dataset, nb_epoch=20)" ], - "execution_count": 6, + "execution_count": 7, "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:54: The name tf.losses.hinge_loss is deprecated. Please use tf.compat.v1.losses.hinge_loss instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:55: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n" - ], - "name": "stdout" - }, { "output_type": "execute_result", "data": { "text/plain": [ - "0.0" + "0.03990109920501709" ] }, "metadata": { "tags": [] }, - "execution_count": 6 + "execution_count": 7 } ] }, @@ -384,7 +386,7 @@ "import matplotlib.pyplot as plt\n", "%matplotlib inline" ], - "execution_count": 0, + "execution_count": 8, "outputs": [] }, { @@ -398,7 +400,7 @@ "mol_scores = model.predict_mols(test_mols.X)\n", "smiles_lengths = [len(Chem.MolToSmiles(x)) for x in test_mols.X]" ], - "execution_count": 0, + "execution_count": 9, "outputs": [] }, { @@ -416,11 +418,11 @@ "metadata": { "id": "CNgjQWQRpbG4", "colab_type": "code", - "outputId": "2e3d75f1-ac6a-491e-ec70-4d20b445d31e", "colab": { "base_uri": "https://localhost:8080/", "height": 920 - } + }, + "outputId": "28938618-5e6a-4470-cfef-b0a75878843f" }, "source": [ "plt.figure(figsize=(20,16))\n", @@ -430,12 +432,12 @@ "plt.ylabel(\"ScScore\")\n", "plt.show()" ], - "execution_count": 9, + "execution_count": 10, "outputs": [ { "output_type": "display_data", "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABJUAAAOTCAYAAAAGyjJNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdf5DkaX0f9s+zcwP0UTIN1klmGk4goQyV8wLj2wRU6z+kc/AQgU5TC0SiREVKXCIuO7ZjlSd1E12JQ0HetcaxXSmlUkFyUsgoZ4m7VefwKRoRrRTFBM7Z1XCMsBgLmx+XXsUgYLDEtWCYe/LHTu/uzPaP+X57+tu/Xq8qip1n57v99Ldnd6bf9/l8npRzDgAAAAAo4sy4NwAAAADA9BEqAQAAAFCYUAkAAACAwoRKAAAAABQmVAIAAACgMKESAAAAAIXdNe4NFPWt3/qt+RWveMW4twEAAAAwM65du/ZHOed7ilwzdaHSK17xirh69eq4twEAAAAwM1JKnyt6jfY3AAAAAAoTKgEAAABQmFAJAAAAgMKESgAAAAAUJlQCAAAAoDChEgAAAACFCZUAAAAAKEyoBAAAAEBhQiUAAAAAChMqAQAAAFCYUAkAAACAwoRKAAAAABQmVAIAAACgMKESAAAAAIUJlQAAAAAoTKgEAAAAQGFCJQAAAAAKEyoBAAAAUJhQCQAAAIDChEoAAAAAFCZUAgAAAKAwoRIAAAAAhQmVAAAAAChMqAQAAABAYUIlAAAAAAoTKgEAAABQmFAJAAAAgMKESgAAAAAUJlQCAAAAoDChEgAAAACFCZUAAAAAKEyoBAAAAEBhQiUAAAAAChMqAQAAAFDYSEOllNJnU0o7KaWPp5Sudvn9lFL671NKn04pfSKl9BdGuR8AAAAATsddFTzG9+Wc/6jH7/3HEfHdh/97fUT8j4f/z4g1t1uxubUb1/fasVSvxfrqcqytNMa9ralV9n6Wve7h5k48+tQzcZBzLKQU73j9y+O9a2f7XvMjP//R+Mi//vLNj89/10vil378ewY+1mve/evx775+cPPjP/P8hfjEe9408LpXbTwZ38y3Pr4rRXz64psHXveKh568Y+2zl1znOtdN6nXTsEfXua7oda986Mm47VtYpIj4zAgfz/fMO736J38t/vTg1k15wUKKT/3M949sj2/8B78df/CFr938+Lu/7YXx4Z/43oHXlf1aef3PfDj+7R9/4+bH3/4tz4unfvKNA6+r+r5U/TVd9vmVvZ9lX/eyPx+X/Xm8zM/+EdXfl7Kve5XvU6bhfVtE+deg7ON1XoPn/blX3T/wk48Zd/vbD0bEL+YbPhYR9ZTSS8e8p5nX3G7FxuWdaO21I0dEa68dG5d3orndGvfWplLZ+9ncbsX6Y08fuW79sacHXvdwcyc+8LHPx0G+8S/2Qc7xgY99Ph5u7vS85vg/1BERH/nXX44f+fmP9n2s4/9QR0T8u68fxGve/et9rzv+DSUi4pv5xno/3X5w6bfuOte5brzXTcMeXee6otcdDwkiIvLh+igez/fMOx0PFiIi/vQgx6t/8tdGssfjb94iIv7gC1+LN/6D3+57XdmvleNv9CMi/u0ffyNe/zMf7ntd1fel6q/pss+v7P0s+7qX/fm47M/jZX72j6j+vpR93at8nzLM+7Yy15V97cq+BmUfr9trUMSoQ6UcEb+RUrqWUnpXl99vRMQzt338/x6uMUKbW7vR3j/6F7C9fxCbW7tj2tF0K3s/3/OhT8b+sW+c+wc53vOhT/a97pc+9vlC6xHR8x+JQf94HP+HetB6x/FvKIPWAWBS9PpWNapvYb5n3ul4sDBofVjH37wNWu8o+7Vy/I3+oPWOqu9LWWW/pss+v7L3s+zrXvbn47I/jz/61DOF1juqvi9lX/cq36eUfd9W9rqyr13Z16Ds4w0TKEWMvv3tL+acWymlb4uID6eUPpVz/p2if8hhIPWuiIh77733tPc4d67vtQut01/Z+/mVZ/cLrXdU/cMuAAAwHp2qk5Ou01vZ921lr6v6tRvX18pIK5Vyzq3D//9CRPxqRPyHxz6lFREvv+3jlx2uHf9z3pdzPpdzPnfPPfeMartzY6leK7ROf+4nAAAwCgspFVqnt7Lv28peV/VrN66vlZGFSimlF6aUvqXz64j4yxHxe8c+7YmI+E8PT4F7Q0R8Nef8h6PaEzesry5HbXHhyFptcSHWV5fHtKPT19xuxflLV+KVDz0Z5y9dGem8qLL3s15bLLQOAIxerx+9R/Uj+V09/uBe6/PgBQvdn3yv9WF997e9sNB6R9mvlW//lucVWu+o+r6UVfZruuzzK3s/y77uf+b5C4XWO85/10sKrXe84/UvL7TeUfV9Kfu6l70vZV6Hsu/byl5X9rUr+xqUfbxB93qQUVYqfXtE/POU0tMR8S8i4smc86+nlP5qSumvHn7Or0XEv4mIT0fEz0fEXxvhfji0ttKIixfORqNeixQRjXotLl44OzOnv1U9iLzs/Xzkwfti8czRf2UXz6R45MH7RrJPptc/+qHXFVrveOcburcL91rvKBt49joN5iSnxJTR6PFfh3qtd5TdZ9nXoezj3b3Y/Vt0r/WOKu9L1a9B1deVfcM4Lc9vWv4uVH3dZy69+Y7X+CQnepV9vE9ffPMdb7pOcmLStNzPMtd96me+/44g4SSngJXd44d/4nvveLN2kpOWyn6tPPWTb7zjjf1JTuWq+r5U/TVd9vmVvZ9lX/dPvOdNdwQXJzl17Jd+/HvuePN+klPO3rt2Nt75hntvVpsspBTvfMO9A0/0qvq+lH3dy96XMq9D2fdtZa8r+9qVfQ3KPl6316CIlKesF/PcuXP56tWr494GE+z8pSvR6tLf2qjX4iMPPTCGHfVW5mjKfidu9PomX+Ya1/W+rtcJCYO+AZZ9vGG+psscK9rtNJuIk/2gXOZruuzza263Yv2DT8f+c7d2u3gmxebbXzuSkLzqxyv7Ogzz+hVV5WONwzR9P6la2aOVAYDJlVK6lnM+V+SaUQ/qhoio9ofPaRpEvrbS8EP4FHr7uXvjo//my3FbthBn0o31fp63kOIbXU4wed6Asu5hvqbfu3Z2YIh03FK91vWN9KC+8U6VYOd0jE6VYET0/TpfX10+cl1EgZbcbv+JeEQ2t3aPBEoREfvP5djc2h3J3+Oyr0PZ68qo8rHGYaivzRnn+xcAEDHiQd0Qceu/7t/ejrb+wadH1o4264Ozy7TEGPB3uja3duNYthDP5Rh4rOjPvu21XTOQn33ba/teV/XXdNm+8bLHrZYtKd7c2o39YyHd/kEe+HhlVR1YV933X+Uep8Wst4sDAAxLpRIj98gTn+z6X/cfeeKTI/nBfNb/y/LfvfCa+Ilf+fgdVTJ/98Jrel7zhu98cdd2rTd854tHscXSUoro1pE7KPtKET1bcPp54fMW4mvfOOi63k/ZcKHz9V60aq/qr+my++xWsdJv/fhjFv33oOqQp+qqnLKvQ9nrqtzjNFGRAwDQm1CJkdtr7xdaH9asv8lZW2nE1c99+Y45Of2e32e/1P1Ndq/1jkaPN9GDhvCWVbvrTDy7/1zX9X56TYYbNDHu2S6BUr/1jvrdi/GVZ+/8+q3fPfjkvjJvUMfxNV1mnwspxUGXVHBUFXFVhzzjCKzLBhpVBiFCFwCA+SVUYiZNy5ucMrOmmtutePxa6+ab94Oc4/FrrTj3HS/peW3ZCpKq30R3C5T6rXeUDTPKhlG9zjcY5bkH0/A13e016Lc+rGmp4AIAgFklVGLkXtyjquPFJ6jqmGVlhxr3m1vT67ozKe6YAdRZ76fqN9Fl29+qbu/7ao8qu17r86LqyrZpqeACAIBZJVRi5N79A/fF+mNPHxmou7iQ4t0/cN8YdzV+ZcKhiHJVR90CpX7rt6vyTXTZCqCy7X1lvai22LV980W1+Q5Kp6k9DAAAGJ5QiZEbRzVBmbayqpUdMlz13JppUPXA5v2D7u14vdZPwzR8TWsPAwCA+SJUohJVVhOUbSurWtkhw1XPralSvUcFUH1ABVDVA5u7nRjXb31Y0/I1HaFyCAAA5kn/I5VgCvVrK5sk66vLUVs8enT9SVqFes2n6Te3plcoMyisqdpbXvvSQusd66vLsXhsQNTimTTStqsqTcvXNMCsaG634vylK/HKh56M85euRHO7Ne4tAcBEEioxc6puhSprbaURb72/cbNtbSGleOv9g6s8yoRRjzx43x1Duc+kG+uDlPnBurbY/Z+WXusdv/WpLxZaP+J4998JugF7tQwOaiWsOqSblq9pgFnQqQ5t7bUjx63qUMESANxJqMTM6dXyNKpWqLKa2614/FrrZtvaQc7x+LXWwB9a11YacfHC2WjUa5HiRoXSxQtnB4ZRx4OSk8xgam63Yv2xp4/8YL3+2NMD9/iCY6HXoPWOsuHJ5tbukUHwERH7B3lgJU/ZVsKyFVVlTcvXNMAsUB0KACcnVGLmlG0rG0aZap5hfmhdW2nERx56ID5z6c3xkYceGBgobW7txv6xo972nxscurznQ5/sGta850Of7Hvd3rN3zkXqt95RNjwpG0aVaSWMGLKiqoRxfE0DzCvVoQBwckIlZk7ZSp6yypbJdxss3W99GGV/QP5KjxCo13pH2XCobHhSv7tHO1qP9WEfr+o3HFV/TQPMM9WhAHByTn9jJlV5AlW/iqN+e0gR0a3J6gSjgAofL1/16Wjrq8tHTiuLOFlYU/ZI+l7daoMOxCv7eFXfzwinqgFUpez3MACYR0IlGFLZqpVeeceAHKTU8fLrq8ux/sGnj7TAneR0tHptMfbad1YlDRpIXTas6VxbNDzptsd+68M+njccALNrmO9hADBvhEowpKqrVspWRh0fPj1oGHXEjdPhuoVRJzk1rsrKmpS6VyWdYBZ5Kd5wAMw21aEAcDJCJRhS2aqVMyniuS5ByJkBQUiZyqhHnvjkHY/1XL6x3u+H5mkJT8q2vw3DGw4AAGDeGdQNQyo7RLlboNRvvaPMANFh2sMAAACgG5VKzKSig6yHVaZqpdGjbW7QcfZVzvMpM79pHMrOfoJJV/W/ZQAAUIRKJWZOJwhp7bUjx60gpLndGvfWjlhfXY7FY71uJxmeXeXx8v3mN02SRx68r+u9PMnsJ5hU0/JvGQAA80ulEjOn7CDrsTg+P+mEg6WLVkbVFs9Ee/+5ruv9lD3ZrmrTMvsJipiqf8sAAJhLQiVmTreWsn7r47K5tRv7B0cHKO0f5JG8YXzB4kLXUOkFiwt9r6v6ZLthGJzNrJmWUBcAgPklVGLmLKQUB12O/VoY1fnyJVX5hvErz3YfyN1rvaPK+U3jUnZmjVk3jNo0hboAAMwnM5WYOd0CpX7r41LmFLeyeuVpg3K2Kuc3jUPZmTVm3VCF9dXlqB2rJpy1UBcAgOkmVGLm9Do9bdCpalWr8g1jrzxtwnK2ypUdRD4tA8yZbrMe6gIAMP20vzFzpqVlaxqGS3cqcjr3slORExETtc+yyrYgmnVDVcwKAwBgkgmVmDnTENZ0VPWGsV5bjL32nfOT6rXFvtfN+ulT9bsXu86Vqt/d/76YdQMAACBUYkb5r/tHPfLgfbH+wadj/7lb/W6LZ1I88uB9fa+b9Yqcsm2B01INBwAAMEpCJZgDZau3XtSjwulFAyqcpsVXuzy3fusd01QNBwAAMCpCJaCnsqfGTYth2thUwwEAAPPO6W8wRs3tVpy/dCVe+dCTcf7SlZEdSd8ZuN3aa0eOWwO3Bz1et3lD/danjSPbAQAAylOpBGNS5clqZQduL6QUB10GDC3MSKmSNrbT19xuuZ8AADAnhEowJlWerNatxavfeke3QKnf+mmoOpTQxnZ6qgxKAQCA8dP+xkSrqj1sHKo8Wa1XZdGgiqNGj9lCvdaHVbZNj8nQLygFAABmj1CJiTXrAUOvYdAnGRJdVNmKo6pnDgklpluVQSkAADB+QiUm1jgChioro6oMbMpWHK2tNOLihbPRqNciHX7+xQtnR9bKVLZNj8kwTFA6y1WJAAAwq8xUYmJVXfVQ9TyYKodEr68ux/oHn479525VJi2eSScKsMrOHCozG2nWB4PPuvXV5SN/hyJOFpSaxQQAANNJqMTEWqrXulaojKI9LKLawdkdlQ6JPp7LjDCnKRsSjGMwOKenbFA6jr97AADA8IRKTKyyVQ9lzfI8mM2t3dg/OBrM7B/kkb1pLxsSNHoEiaMaDM7pKxOUzvLfPQAAmGVmKjGxqp7nU+Xg7KpV/aa97ONVPRicyTDLf/cAAGCWqVRiolXZHlZ1ZVSVqm4lfFFtMfba+13X+6lyzhSTY5b/7gEAwCwTKsGhWQ40vu/V98QHPvb5ruuj0Guu9knmbVc6Z4qJMMt/9wAAYJYJleA2VZ50VqXf+tQXC63f7uHmTjz61DNxkHMspBTveP3L471rZ/tes/fsnVVK/dZvN+n3ktEQJgIAwPQRKsGQxnEcetHgpVvrW7/1joebO0cqnA5yvvlxv2CpbLudo+UBAACmh0HdMKR+J52NQid4ae21I8et4KW53Tr1x3r0qWcKrXeUHbhd9b0EAACgPKESDKnqk9WqDF4Oci603lH25L6yFVUAAABUT/sbDKnqk9XKhFhnUsRzXXKgMwMGZy+k1DVAWjjBxO0yM3KGeTwAAACqpVIJhlS21ausXmFVvxDr+Xd1/6vea73jHa9/eaH1YZWtjAIAAKB6QiUYUtlWr4gb85HOX7oSr3zoyTh/6cqJ5iKVCbH+dP+5Qusd7107G+e/6yVH1s5/10sGnv5WVqNHMNZrHQAAgPHR/ganoEyrV9mTzjq/V+T0t2FOY/vdz3/1yNrvfv6r0dxujeQ0tvXV5Vj/4NOxf1uv3uKZNLKqLwAAAMoTKsGY9Bu4PSiwKRpira8uHwmwIoY/jW0UoVJERBwfn2ScEgAAwETS/gZjUuWpcWsrjXjr/Y2bA68XUoq33j84mBrHyXb7B0fnJ+0f5JGcbAcAAMBwhEowJmUGbpfV3G7F49daNwdeH+Qcj19rDZzhVOUeI6oPsQAAAChPqARjUuWpcf3a2Pr5vlffU2h9WFWHWMMoM2QdAABglgiVYEyGOTWuqLIVQL/1qS8WWh/W+upyLJ45OkRpEgd1d4ast/bakePWkHXBEgAAME8M6oYxKnNqXBn1uxfjK8/ud13vZyztaFMwqHssA8wBAAAmjEolmAM5F1vveFGte+jUa31Y0zKo2+wnAAAAoRLMha+276xS6rfekXpUCfVaH9a0hDXTNPsJAABgVIRKMAfKhiB7XVrm+q0Pa1rCmiqHrAMAAEwqoRLMgbIhSNUhz7SENVUOWQcAAJhUBnXDHOiEHZtbu3F9rx1L9Vqsry4PDEHWV5dj4/LOkaHUowx5yu5zHKoasg4AADCpUh40qXfCnDt3Ll+9enXc24C58XBzJx596pk4yDkWUop3vP7l8d61s+PeFgAAAKcopXQt53yuyDUqleAUNLdbU1FdU1RzuxWPX2vFwWH4fJBzPH6tFee+4yUz8fwAAAAoz0wlGFJzuxUbl3eitdeOHBGtvXZsXN6J5nZr3Fsb2ubW7pHWt4iI9v5BbG7tjmlHAAAATAqVSnCbMhVH/YKXSarmKfPcru+1C60DAAAwP4RKcKhTcdQJiDoVRxHRN3wZR/BSdM5R2edWv3sxvvLsftd1AAAA5pv2NzhUttVrqV4rtD6sh5s78YGPff7InKMPfOzz8XBzp+c1ZZ9brzn+UzbfHwAAgBEQKsGhshVH66vLUVtcOLJWW1yI9dXlU9vb7X7pY58vtB5R/rl9tX1nlVK/dQAAAOaHUAkOla04WltpxMULZ6NRr0WKiEa9FhcvnB3ZPKVeRUL9iofKPreqq7AAAACYHkIlODRMxdHaSiM+8tAD8ZlLb46PPPTARA3ojrjx3BbPpCNri2fSwOdWdRUWAAAA08OgbjjUCYKKnpBWtectpPjGwZ11Sc9bSF0++zbHf3vAp0dMzz0BAACgekIluM3aSmPiA5PFhTPxjYODruu9bG7txv6xIGr/IMfm1u7A5zsN9wQAAIDqaX+DKfO1b9wZKPVbjyg/qBsAAAB6ESrBHDBwGwAAgNMmVIIpU68tFlqPMHAbAACA0ydUginzyIP3dT3J7ZEH7+t5zdpKIy5eOBuNei1SRDTqtbh44axZSQAAAJRmUDdMmbInshm4DQAAwGkSKsEUKhMQNbdbhYMoAAAA6EWoBHOgud2K9Q8+HfvP5YiIaO21Y/2DT0dECJYAAAAoxUwlmAOPPPHJm4FSx/5zOR554pNj2hEAAADTTqUSzIG99n6hdSaP9kUAAGDSCJUAJlxzuxUbl3eivX8QETfaFzcu70SE9kUAAGB8tL/BHHjx3YuF1hmd5nYrzl+6Eq986Mk4f+lKNLdbA6/Z3Nq9GSh1tPcPYnNrd1TbBAAAGEioBHPg3T9wXywupCNriwsp3v0D941pR/OpU3HU2mtHjlsVR4OCpet77ULrAAAAVRAqwRxYW2nE5tteG416LVJENOq12Hzba7VOVaxsxdFSvVZoHQAAoApmKsFtZnkY8tpKo9LnMsv3sqyyFUfrq8tHZipFRNQWF2J9dflU9wcAAFCEUAkOGYZ8etzL7pbqtWh1CZAGVRx17pmQDgAAmCRCJTjUrzXJm/di3Mvuhqk4qrrSDAAAYBChEhwyDPn0uJfdqTgCAABmychDpZTSQkRcjYhWzvktx37vxyJiMyI6Rx/9XM75F0a9J+imbGsSd3Ive1NxBAAAzIoqTn/7WxHx+31+/5dzzq87/J9AibFZX12O2uLCkTXDkMtxLwEAAGbfSEOllNLLIuLNESEsYuKtrTTi4oWz0ajXIkVEo16LixfOqiopwb0EAACYfSnnPLo/PKXHIuJiRHxLRPydHu1vFyPiixHxryLib+ecn+ny57wrIt4VEXHvvffe/7nPfW5kewaOam63zAACAACYcSmlaznnc0WuGVmlUkrpLRHxhZzztT6f9qGIeEXO+TUR8eGIeH+3T8o5vy/nfC7nfO6ee+4ZwW6Bbprbrdi4vBOtvXbkiGjttWPj8k40t1sDrwUAAGC2jbL97XxEPJhS+mxE/NOIeCCl9IHbPyHn/KWc89cPP/yFiLh/hPsBCtrc2o32/sGRtfb+QWxu7Y5pRwAAAEyKkYVKOeeNnPPLcs6viIgfjogrOed33v45KaWX3vbhg9F/oDdQsetdTnDrtw4AAMD8uKvqB0wp/XREXM05PxERfzOl9GBEfDMivhwRP1b1fmBelJmNtFSvRatLgLRUr41qmwAAAEyJkZ7+1pFz/u3OkO6c808dBkqdaqb7cs6vzTl/X875U1XsB+ZN2dlI66vLUVtcOLJWW1yI9dXlEe4WAACAaVB5pRJwS1Unq/WbjdTv8Tq/5/Q3AAAAjhMqwZh0qoc6YU+neigiTj20GWY20tpKo9R+qgrMAAAAGI9K2t+AO1V5slqvGUijmo1Utt2uc+35S1filQ89GecvXTnRNQAAAFRPqARj0m0Adr/1YVQ9G6lsYDZMGAUAAEC1tL/BmCykFAc5d10fpGhrWdWzkcq225Wd/QQAAED1hEowJt0CpX7rHc3tVqw/9nTsH9z4vNZeO9Yfezoi+s9iKjsbqYyleq1rxdWgdrthZj8BAABQLe1vMCaNHgFLr/WO93zokzcDpY79gxzv+dAnT21vwyrbblf17CcAAADKEyrBmJQNXr7y7H6h9XFYW2nExQtno1GvRYobQdnFC2cHVkpVPfsJAACA8rS/wZhUPeeoamXa7Wb9ngAAAMwSoRKMUZngpV5bjL32nVVJ9driaW1rrKqc/TSMosPSAQAAZo32N5gyjzx4XyyeOXpC3OKZFI88eN+YdjR/mtut2Li8E629duS4MSx94/JONLdb494aAABAZYRKMGXWVhqx+fbXHplXtPn216qSqdDm1m609w+OrLX3D2Jza3dMOwIAAKie9jeYQtPSIjarru+1C60DAADMIqESjJG5PNNpqV6LVpcAaaleG8NuAAAAxkP7G4yJuTzTa311OWqLC0fWaosLsb66PKYdAQAAVE+lEoxJv7k8g6qVqqxwUk11p87zd18AAIB5JlSCMSk7l6dT4dQJpDoVThFx6qFGlY81bcy1AgAA5p32NxiTXvN3Bs3lqfLkMaecAQAA0ItQCcak7FyeKk8ec8oZAAAAvQiVYEzWVhpx8cLZaNRrkSKiUa/FxQtnB7ZUla1wKqPKxwIAAGC6mKkEY1RmLs/66vKROUcRozt5rMrHAgAAYLqoVIIps7bSiLfe34iFlCIiYiGleOv9oxkaXbaaCgAAgNmnUgmmTHO7FY9fa8VBzhERcZBzPH6tFee+4yUjC5aESAAAABynUgmmjBPZAAAAmARCJZgyTmQDAABgEgiVYMo4kQ0AAIBJIFSCKbO+uhy1xYUja05kAwAAoGoGdcOU6QzN3tzajet77Viq12J9ddkwbQAAAColVIIp5EQ2AAAAxk2oBFBCc7ulWgwAAJhrQiWAgprbrdi4vBPt/YOIiGjttWPj8k5EhGAJAACYGwZ1AxS0ubV7M1DqaO8fxObW7ph2BAAAUD2hEkBB1/fahdYBAABmkVAJoKCleq3QOgAAwCwSKgEUtL66HLXFhSNrtcWFWF9dHtOOAAAAqmdQN0BBnWHcTn8DAADmmVAJoIS1lYYQCQAAmGva3wAAAAAoTKgEAAAAQGFCJQAAAAAKEyoBAAAAUJhQCQAAAIDChEoAAAAAFCZUAgAAAKAwoRIAAAAAhQmVAAAAACjsrnFvgPnQ3G7F5tZuXN9rx1K9Fuury7G20hj3tgAAAICShEqMXHO7FRuXd6K9fxAREa29dmxc3omIECwBAADAlNL+xshtbu3eDJQ62vsHsbm1O6YdAQAAAMMSKjFy1/fahdYBAACAySdUYuSW6rVC6wAAAMDkEyoxcuury1FbXDiyVltciPXV5THtCAAAABiWQd2MXGcYt9PfAAAAYHYIlajE2kpDiAQAAAAzRPsbAAAAAIUJlQAAAAAoTKgEAAAAQGFmKsEUam63DD4HAABgrIRKMGWa263YuLwT7SJ0d6QAACAASURBVP2DiIho7bVj4/JORIRgCQAAgMoIlWDKbG7t3gyUOtr7B7G5tTtRoZJqKgAAgNkmVIIpc32vXWh9HFRTAQAAzD6DumHKLNVrhdbHoV81FQAAALNBqARTZn11OWqLC0fWaosLsb66PKYd3WkaqqkAAAAYjlAJpszaSiMuXjgbjXotUkQ06rW4eOHsRLWVTUM1FQAAAMMxUwmm0NpKY6JCpOPWV5ePzFSKmLxqKgAAAIYjVAL6KnOKW+f3nf4GAAAwu4RKQE/DnOI26dVUAAAADEeoBFOoTPVQGf1OcRMYAQAAzDehEkyZYaqHinKKGwAAAL04/Q2mTL/qodPmFDcAAAB6ESrBlKmyemh9dTlqiwtH1pziBgAAQIRQCaZOldVDayuNuHjhbDTqtUgR0ajX4uKFs+YpAQAAYKYSTJv11eUjM5UiRls95BQ3AAAAuhEqwZTpBDxVnP4GAAAAvQiVYAqpHgIAAGDczFQCAAAAoDChEgAAAACFaX+DKdTcbhWeqfRwcycefeqZOMg5FlKKd7z+5fHetbMV7RgAAIBZI1SCKdPcbh05/a21146NyzsRET2DpYebO/GBj33+5scHOd/8WLBUrTKBIAAAwCTS/gZTZnNr92ag1NHeP4jNrd2e1zz61DOF1hmNTiDY2mtHjluBYHO7Ne6tAQAAFCZUginT2msXWo+4UZlUZJ3RKBMIAgAATCrtbzBlFlLqGgYtpHSq18yLKtvRrvcI/nqtAwAATDKVSnAKmtutOH/pSrzyoSfj/KUrI21nKlN19I7Xv7zQ+ryouh1tqV4rtA4AADDJhEowpKqDiUaPAKLXesSNYdzvfMO9NyuTFlKKd77h3rkf0l11O9r66nLUFheOrNUWF2J9dXkkjwcAADBK2t9gSP2CiVG0Ua2vLh85/S3iZMHEe9fOzn2IdFzV7WidrwenvwEAALNAqARDGkcwcfVzX45Hn3omDnKOhZTirfc3BBMlLNVrXQecj7IdbW3FawUAAMwG7W8wpKrn5DS3W/H4tdbNGUoHOcfj11qOpS9BOxoAAEB5QiUYUtXBhGPpT8/aSiMuXjgbjXotUtyYS3XxwlmVRAAAACeg/Q2GNMycnDLH2TuW/nRpRwMAAChn5KFSSmkhIq5GRCvn/JZjv/f8iPjFiLg/Ir4UET+Uc/7sqPcEk6Bzalyn6qhzalxE9A05qp4DVCb4AgAAYPZV0f72tyLi93v83l+JiK/knF8VEf8wIv5eBfuBU9UJh1p77chxKxwaNOOobBtble12ZZ8bAAAAs2+koVJK6WUR8eaI+IUen/KDEfH+w18/FhF/KaWURrknOG1lw6GybWxVzgEyvwkAAIBeRt3+9o8i4r+OiG/p8fuNiHgmIiLn/M2U0lcj4s9GxB+NeF9warq1ovVb7ximja2qOUDmNwEAANDLyCqVUkpviYgv5JyvncKf9a6U0tWU0tUvfvGLp7A7OD0LPYrreq13TMNx9r0CrlHNbwIAAGB6jLL97XxEPJhS+mxE/NOIeCCl9IFjn9OKiJdHRKSU7oqIF8WNgd1H5Jzfl3M+l3M+d88994xwy1DcQc6F1jum4Tj7aQi+AAAAGI+Rtb/lnDciYiMiIqX0vRHxd3LO7zz2aU9ExI9GxEcj4m0RcSXnAe/EYcI0erSxNSaoja2szt6c/gYAk8cJrQCM26hnKt0hpfTTEXE15/xERPzjiPgnKaVPR8SXI+KHq94PDGt9dTk2Lu8cGWg9S9U8kx58AcA86pzQ2vn5o3NCa0T4vg1AZSoJlXLOvx0Rv33465+6bf1PI+LtVewBRkU1DwBQtX4ntPoZBICqVF6pBLNINQ8AUCUntAIwCUY5qBsAABgBJ7QCMAmESgAAMGWc0ArAJND+BgAAU8ZMRwAmgVAJAACmkJmOAIyb9jcAAAAAChMqAQAAAFCYUAkAAACAwoRKAAAAABQmVAIAAACgMKESAAAAAIUJlQAAAAAoTKgEAAAAQGFCJQAAAAAKEyoBAAAAUJhQCQAAAIDChEoAAAAAFCZUAgAAAKAwoRIAAAAAhd017g0A1Whut2Jzazeu77VjqV6L9dXlWFtpjHtbAAAATCmhEsyB5nYrNi7vRHv/ICIiWnvt2Li8ExEhWAIAAKAU7W8wBza3dm8GSh3t/YPY3Nod044AAACYdkIlmAPX99qF1gEAAGAQoRLMgRfVFgutAwAAwCBCJZgDKRVbBwAAgEGESjAH9p7dL7QOAAAAgwiVYA4s1WuF1gEAAGAQoRLMgfXV5agtLhxZqy0uxPrq8ph2BAAAwLS7a9wbgFnQ3G7F5tZuXN9rx1K9Fuury7G20hj3tm7q7GWS9wgAAMB0ESrBkJrbrdi4vBPt/YOIiGjttWPj8k5ExESFNmsrjYnaDwAAANNN+xsMaXNr92ag1NHeP4jNrd0x7QgAAABGT6USDOn6XrvQ+rSZ9NY+AAAAxkOlEgxplk9W67T2tfbakeNWa19zuzXurQEAADBmQiUY0iyfrKa1DwAAgF60v8GQZvlktVlv7QMAAKA8oRLcpuz8oFk9WW2pXotWlwBpFlr7AAAAGI72NzhkftCdZrm1DwAAgOEIleCQ+UF3WltpxMULZ6NRr0WKiEa9FhcvnJ3JqiwAAACK0f5GIbN8vLz5Qd3NamsfAAAAw1GpxInNentYrzlB5gcBAADAnYRKnNist4eZHwQAAAAnp/2NE5v19rBOi9estvcBAADAaRIqcWLzcLy8+UEAAABwMtrfODHtYQAAAECHSiVOTHsYAAAA0CFUohDtYQAAAECE9jcAAAAAShAqAQAAAFCYUAkAAACAwoRKAAAAABRmUDcAp6a53XJCJAAAzAmhEgCnorndio3LO9HeP4iIiNZeOzYu70RECJYAAGAGaX8D4FRsbu3eDJQ62vsHsbm1O6YdAQAAoyRUAuBUXN9rF1oHAACmm1AJgFOxVK8VWgcAAKabUAmAU7G+uhy1xYUja7XFhVhfXR7TjgAAgFEyqBtu4+QqKK/zd8XfIQAAmA9CJTjk5CoY3tpKw98XAACYE9rf4JCTqwAAAODkhEpwyMlVAAAAcHJCJTjk5CoAAAA4OaESHJqmk6ua2604f+lKvPKhJ+P8pSvR3G6Ne0sAAADMGYO64dC0nFxloDgAAACTQKgEt5mGk6v6DRSf9L0DAAAwO7S/wZQxUBwAAIBJIFSCKWOgOAAAAJNAqARTZpoGigMAADC7zFSCKTMtA8UBAACYbUIlmELTMFAcAACA2ab9DQAAAIDChEoAAAAAFKb9jYnW3G6ZHQQAAAATSKjExGput2Lj8k609w8iIqK1146NyzsRERMXLAm/AAAAmDfa35hYm1u7NwOljvb+QWxu7Y5pR911wq/WXjty3Aq/mtutcW8NAAAARkaoxMS6vtcutD4u0xJ+AQAAwGkSKjGxluq1QuvjMi3hFwAAAJwmoRITa311OWqLC0fWaosLsb66PKYddTct4RcAAACcJqESE2ttpREXL5yNRr0WKSIa9VpcvHB24gZgT0v4BQAAAKfJ6W9MtLWVxsSFSMd19uf0NwAAAOaJUAlOwTSEXwAAAHCatL8BAAAAUJhQCQAAAIDChEoAAAAAFGamEjATmtstw9IBAAAqJFQCpl5zuxUbl3eivX8QERGtvXZsXN6JiBAsAQAAjIhQCcZIdc3p2NzavRkodbT3D2Jza9f9BAAAGBGhEoyJ6prTc32vXWgdAACA4QmVmGizXMkzTHXNLN+XMpbqtWh1CZCW6rUx7AYAAGA+OP2NidWp5GnttSPHrUqe5nZr3Fs7FWWra2b9vpSxvroctcWFI2u1xYVYX10e044AAABmn1CJidWvkmcW9KqiGVRdM+v3pYy1lUZcvHA2GvVapIho1Gtx8cLZua7eAgAAGLWRtb+llF4QEb8TEc8/fJzHcs7vPvY5PxYRmxHRKbH4uZzzL4xqT0yXWZ+Ts766fGSmUsTJqmtm/b6UtbbSECIBAABUaJQzlb4eEQ/knP8kpbQYEf88pfS/55w/duzzfjnn/F+OcB9MqVmfk9MJQIrORpr1+wIAAMB0GFmolHPOEfEnhx8uHv4vj+rxmD1lK3mmSZnqmnm4LwAAAEy+kc5USiktpJQ+HhFfiIgP55yf6vJpb00pfSKl9FhK6eWj3A/TxZyc7twXAAAAJsEo298i53wQEa9LKdUj4ldTSn8+5/x7t33KhyLi0Zzz11NK/0VEvD8iHjj+56SU3hUR74qIuPfee0e5ZSaMOTkAAAAwmSo5/S3nvBcRvxURbzq2/qWc89cPP/yFiLi/x/Xvyzmfyzmfu+eee0a7WZhwze1WbFzeidZeO3JEtPbasXF5J5rbrYHXAgAAwGkZWaiUUrrnsEIpUkq1iHhjRHzq2Oe89LYPH4yI3x/VfmBWbG7tHpmnFBHR3j+Iza3dMe0IAACAeTTK9reXRsT7U0oLcSO8+pWc8z9LKf10RFzNOT8REX8zpfRgRHwzIr4cET82wv3ATLje5eS3fusAAAAwCqM8/e0TEbHSZf2nbvv1RkRsjGoPMIuW6rVodQmQluq1MewGAACAeVXJTCXg9KyvLkdtceHIWm1xIdZXl8e0IwAAAObRSE9/A05f5zS8za3duL7XjqV6LdZXl52SBwAAQKWESsyk5nZrpkOXtZXGTD0fAAAApo9QiZnT3G7FxuWdmyektfbasXF5JyJCEAMAAACnxEwlZs7m1u7NQKmjvX8Qm1u7Y9oRAAAAzB6VSsyc611ORuu3fhpmvd0OAAAAjlOpxMxZqtcKrQ+r027X2mtHjlvtds3t1kgeDwAAACaBUImZs766HLXFhSNrtcWFWF9dHsnjabcDAABgHml/Y+Z02s6qakcbR7sdAAAAjJtQiZm0ttKobKbRUr0WrS4B0qja7QAAAGASaH+DIVXdbgcAAACTQKUSDKnqdjsAAACYBEIlOAVVttsBAADAJND+BgAAAEBhKpVgjJrbLW1zAAAATCWhEoxJc7sVG5d3or1/EBERrb12bFzeiYgQLAEAADDxtL/BmGxu7d4MlDra+wexubU7ph0BAADAyQmVYExae+1C6wAAADBJhEowJgspFVoHAACASSJUgjE5yLnQOgAAAEwSoRKMSaNeK7QOAAAAk0SoBGOyvroctcWFI2u1xYVYX10e044AAADg5O4a9wZgXq2tNCLixilw1/fasVSvxfrq8s11AAAAmGRCJRijtZWGEAkAAICppP0NAAAAgMKESgAAAAAUJlQCAAAAoDAzlYCRaG63DCEHAACYYUIl4NQ1t1uxcXkn2vsHERHR2mvHxuWdiAjBEgAAwIzQ/gacus2t3ZuBUkd7/yA2t3bHtCMAAABOm0oluI2WrdNxfa9daB0AAIDpo1IJDnVatlp77chxq2Wrud0a99amzlK9VmgdAACA6SNUgkNatk7P+upy1BYXjqzVFhdifXV5TDsCAADgtGl/g0Natk5Pp2VQKyEAAMDsEirBoaV6LVpdAiQtW+WsrTSESAAAADNM+xsc0rIFAAAAJ6dSCQ5p2QIAAICTEyrBbbRsAQAAwMlofwMAAACgMKESAAAAAIUJlQAAAAAoTKgEAAAAQGFCJQAAAAAKEyoBAAAAUJhQCQAAAIDChEoAAAAAFCZUAgAAAKAwoRIAAAAAhQmVAAAAACjsrnFvAEahud2Kza3duL7XjqV6LdZXl2NtpTHubQEAAMDMECoxc5rbrdi4vBPt/YOIiGjttWPj8k5ExMQFS8IvAAAAppX2N2bO5tbuzUCpo71/EJtbu2PaUXed8Ku1144ct8Kv5nZr3FsDAACAgYRKzJzre+1C6+MyLeEXAAAAdKP9jZmzVK9Fq0uAtFSvjWE3vXXbY7/122mbAwAAYNxUKjFz1leXo7a4cGSttrgQ66vLY9pRdwspFVrv0DYHAADAJBAqMXPWVhpx8cLZaNRrkSKiUa/FxQtnJ66S5yDnQusd2uYAAACYBNrfmElrK42JC5GOa/Ro02sMaNMbpm0OAAAATotKJbhNc7sV5y9diVc+9GScv3RlpC1lZdv0yrbNAQAAwGlSqQSHOrOKOq1lnVlFETGSqqfOn1l04HbZtjkAAAA4TUIlONRvVtGoWunKtOmVbZsDAACA06T9DQ5d7zGTqNf6uEzL6XYAAADMNpVKcGipRwXQ0oRVAJVtm6O75nbLvQQAAChBqASH1leXj8xUipjcCqBpON1uGlQ9RwsAAGCWaH+DQ2srjbh44Ww06rVIcWNG0cULZ4ULM6zfHC0AAAD6U6kEt5nlCiBtXnealjlaAAAAk0ilEsyBTptXa68dOW61eTW3W+Pe2lj1mpc1aXO0AAAAJpFQCeaANq/unKQHAABQnvY3ZpJWr6O0eXXnJD0AAIDyhErMHCd63WmpXotWlwBJm9dsz9ECAAAYJe1vzBytXnfS5gUAAMBpU6nEzNHqdSdtXgAAAJw2oRIzR6tXd9q8AAAAOE3a35g5Wr0AAABg9FQqMXO0egEAAMDoCZWYSVq9AAAAYLS0vwEAAABQmFAJAAAAgMJOFCqllP69lNJvppR+7/Dj16SUHh7t1gAAAACYVCetVPr5iNiIiP2IiJzzJyLih0e1KQAAAAAm20lDpbtzzv/i2No3T3szAAAAAEyHk4ZKf5RS+q6IyBERKaW3RcQfjmxXAAAAAEy0u074eX89It4XEa9OKbUi4jMR8SMj2xXMieZ2Kza3duP6XjuW6rVYX12OtZXGuLcFAAAAAw0MlVJKCxHx13LO/1FK6YURcSbn/Mej3xrMtuZ2KzYu70R7/yAiIlp77di4vBMRMTBYEkYBAAAwbgNDpZzzQUrpLx7++muj3xLMh82t3ZuBUkd7/yA2t3b7BkRlwyhBFAAAAKfppO1v2ymlJyLigxFxM1jKOV8eya5gDlzfaxda7ygTRg1TFQUAAADdnHRQ9wsi4ksR8UBE/MDh/94yqk3BPFiq1wqtd5QJo/oFUQAAAFDGiSqVcs7/2ag3AvNmfXX5SPVQRERtcSHWV5f7XrdUr0WrS4DUL4wqWxUFAAAAvZyoUiml9LKU0q+mlL5w+L/HU0ovG/XmYJatrTTi4oWz0ajXIkVEo16LixfODmxHW19djtriwpG1QWFU2aooAAAA6OWkM5X+l4j4XyPi7Ycfv/Nw7Y2j2BTMi7WVRuGZRp3PLzJ0u2xVFAAAAPSScs6DPymlj+ecXzdorQrnzp3LV69erfphYeo5/Q0AAIBeUkrXcs7nilxz0kqlL6WU3hkRjx5+/I64Mbi732ZeEBG/ExHPP3ycx3LO7z72Oc+PiF+MiPsP/7wfyjl/9sS7B06sTFUUAAAA9HLS09/+84j4TyLi/4uIP4yIt0XEoOHdX4+IB3LOr42I10XEm1JKbzj2OX8lIr6Sc35VRPzDiPh7J904AAAAAONz0tPfPhcRDxb5g/ONvro/Ofxw8fB/x3vtfjAiHjn89WMR8XMppZRP0pMHAAAAwNic9PS396eU6rd9/OKU0v98gusWUkofj4gvRMSHc85PHfuURkQ8ExGRc/5mRHw1Iv7sSTcPAAAAwHicdKbSa3LOe50Pcs5fSSmtDLoo53wQEa87DKR+NaX053POv1d0kymld0XEuyIi7r333qKXM8WmZbj0tOyzjFl+bgAAAJR30plKZ1JKL+58kFJ6SZw8kIrDQOq3IuJNx36rFREvP/wz74qIF0WXAeA55/flnM/lnM/dc889J31YplxzuxUbl3eitdeOHBGtvXZsXN6J5nZr3Fs7Ylr2WcYsPzcAAACGc9JQ6b+LiI+mlP7blNJ7I+L/joif7XdBSumeTstcSqkWEW+MiE8d+7QnIuJHD3/9toi4Yp4SHZtbu9HePziy1t4/iM2t3THtqLtp2WcZs/zcAAAAGM5JB3X/YkrpakQ8EDeGbV/IOf/LAZe9NCLen1JaiBvh1a/knP9ZSumnI+JqzvmJiPjHEfFPUkqfjogvR8QPl30izJ7re+1C6+MyLfssY5afGwAAAMPpGyqllO6OiP2c837O+V+mlA4i4vsj4tUR0TdUyjl/IiLumLuUc/6p2379pxHx9jIbZ/Yt1WvR6hJeLNVrY9hNb9OyzzJm+bkBAAAwnEHtb78eEa+IiEgpvSoiPhoR3xkRfz2ldGm0W2Pera8uR21x4chabXEh1leXx7Sj7qZln2XM8nMDAABgOIPa316cc/6Dw1//aEQ8mnP+Gyml50XEtYh4aKS7Y651Thgrc/JYlSeWDbPPKpW5J9Py3AAAAKhe6jcXO6X0iZzzaw5//ZGI2Mw5Nw8/fjrn/NpqtnnLuXPn8tWrV6t+WKZI58Sy2wdM1xYX4uKFs3MbhrgnAAAA9JNSupZzPlfkmkHtb59IKf39lNLfjohXRcRvHD5QveQeYeScWHYn9wQAAIDTNihU+vGI+KO4MVfpL+ecnz1c//cj4u+PcF9QmhPL7uSeAAAAcNr6zlTKObcj4lJK6YURcfu7z6ci4uOj3BiUNcyJZVXOYqqSU9wAAAA4bYMqlTp+MyLuvu3jWkT8H6e/HarS3G7F+UtX4pUPPRnnL12J5nZr3Fs6NWVPLOvMHWrttSNHRGuvHRuXd2bi3jjFDQAAgNN20lDpBTnnP+l8cPjru/t8PhNslsOTiBsnll28cDYa9VqkiGjUaycaSD3Lc4fK3hMAAADopW/7222+llL6Cznn342ISCmdi6PtcEyRfuHJrIQMayuNws9l1ucOlbknAAAA0MtJQ6X/KiI+mFK6fvjxSyPih0azJUZt1sOTsswdAgAAgJPr2/6WUvoPUkp/Luf8/0TEqyPilyNiPyJ+PSI+U8H+GIFeIcm8hyfmDgEAAMDJDZqp9D9FxDcOf/09EfHfRMT/EBFfiYj3jXBfjJDwpDtzhwAAAODkBrW/LeScv3z46x+KiPflnB+PiMdTSh8f7dYYlU5Isrm1G9f32rFUr8X66rLwJMwdmkfN7Za/CwAAACUMDJVSSnflnL8ZEX8pIt5V4FommPAEbp2E2Blc3zkJMSL8/QAAABhgUPvboxHxf6aU/re4cdrb/xURkVJ6VUR8dcR7AxipfichAgAA0F/faqOc88+klH4zbpz29hs553z4W2ci4m+MenNQlpYmTsJJiAAAAOUNbGHLOX+sy9q/Gs12YHhamjippXotWl0CpHk/CREAAOAkBrW/wdTR0sRJOQkRAACgPMO2mWhl2ti0NHFSTkIEAAAoT6jExCrbxqaliSKchAj/f3v3Hlx5etYH/vtGI2+OL4sAO+A+9qyHxCvWoRcLOmBvs+wAIeJWsarDbqAgZFMsXgibBUK0NQ1VS0KF6k4pBQvrBNZcQqhyzMUowotJBMWYxdwcetAYYRttjMHg0wYPMTI4nCWy/O4fOqen1aNW63ekc/98qqZG59U5+r2t/pUu336e5wUAgMFof2NiDdrGpqUJAAAAhk+lEhNr0DY2LU0AAAAwfEIlJtZ52ti0NAEAAMBwaX9jYmlju1hbO51cvfl4HnnsTbl68/Fs7XTGvSUAAACmmEolJpY2tosz6NBzAAAAuB+hEhNNG9vFOG3ouc8vAAAAg9D+BnNg0KHnAAAAcD8qlWCMtnY6I2nvO8/QcwAAADiJSiW4AIMMwe7POersd1Pz9JyjYQzQNvQcAACAiyZUgnMaNBw6bc7RRVtbaefGtctpL7VSkrSXWrlx7bJ5SgAAAAxM+xuc06BDsEc958jQcwAAAC6SSiU4p0HDofvNMzLnCAAAgGkgVIJzGjQcMucIAACAaSZUgnMaNBwy5wgAAIBpZqYSnFM/BNrY3svt/W4uLbWyvrp8pnDInCMAAACmlVAJLoBwCAAAgHmj/Q0AAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI0Z1A132drpDHSKGwAAAMwboRL0bO10cn1zN92DwyRJZ7+b65u7SSJYAgAAgHtof4Oeje29O4FSX/fgMBvbe2PaEQAAAEwuoRL03N7vNloHAACAeSZUgp5LS61G6wAAADDPhErQs766nNbiwrG11uJC1leXh3bNrZ1Ort58PI889qZcvfl4tnY6Q7sWAAAAXCSDuqGnP4x7VKe/GQwOAADANBMqwV3WVtojC3ROGwwuVAIAAGDSaX+DMTEYHAAAgGkmVIIxMRgcAACAaSZUgjEZx2BwAAAAuChmKsGYjHowOAAAAFwkoRKM0SgHgwMAAMBF0v4GAAAAQGNCJQAAAAAa0/5GI1s7HTOAAAAAAKESZ7e108n1zd10Dw6TJJ39bq5v7iaJYAkAAADmjPY3zmxje+9OoNTXPTjMxvbemHYEAAAAjItKJc7s9n630TrDow0RAACAcVOpxJldWmo1Wmc4+m2Inf1uap5uQ9za6Yx7awAAAMwRoRJntr66nNbiwrG11uJC1leXx7Sji7e108nVm4/nkcfelKs3H5/IoEYbIgAAAJNA+xtn1m+vmtW2q2kZRK4NEQAAgEkgVKKRtZX2RAUsF+m0CqBJ+jNfWmqlc0KApA0RAACAUdL+Bj3TUgE0D22IAAAATD6hEvRMyyDytZV2bly7nPZSKyVJe6mVG9cuT1Q1FQAAALNP+xv0rK8uH5uplExuBdAstyECAAAwHYRK0DPrg8gBAADgIgmV4C4qgAAAAOBshEowRls7HZVRAAAATCWhEozJ1k7n2Aynzn431zd3k0SwBAAAwMRz+huMycb23rGh4EnSPTjMxvbemHYEAAAAZydUgjG5vd9ttA4AAACTRKgEY3JpqdVoHQAAACaJUAnGZH11Oa3FhWNrrcWFrK8uj2lHAAAAcHYGdcOY9IdxO/0NAACAaSRUgjFaW2kLkQAAAJhK2t8AAAAAaEylEhNta6ejPewEPi8AAACMm1CJibW108n1zd10Dw6TJJ39bq5v7ibJXAcoPi8AAABMAqESE2tje+9OcNLXJ89hTQAAIABJREFUPTjMxvbe0MKTQSuARlk5NI7PCwAAANxLqMTEur3fbbR+XoNWAI26cmjUnxcAAAA4iUHdTKxLS61G6+d1WgXQMF43qFF/XgAAAOAkQiUm1vrqclqLC8fWWosLWV9dHsr1Bq0AGnXl0Kg/LwAAAHASoRITa22lnRvXLqe91EpJ0l5q5ca1y0ObGzRoBdCoK4dG/XkBAACAk5ipxERbW2mPLCxZX10+NhspOVsF0KCvO49Rfl4AAADgJEIlRmKUp6MNqr+fpvsc9HUAAAAwzUqtddx7aOTKlSv11q1b494GDdx7OlpyVMmjZYtJMA2BJwAAwLCVUp6otV5p8hqVSjQyyC/gp52O5pd3xunewLOz3831zd0kcW8CAAA8gEHdnFn/F/DOfjc1T/8CvrXTOfV1oz4dDc7qtMATAACA0wmVOLNBfwEf9elocFYCTwAAgMENLVQqpby4lPLmUso7SilvL6V83QnPebSU8sFSypO9//73Ye2H8xv0F/D11eW0FheOrQ37dDQ4C4EnAADA4IZZqfThJN9Ya31Zklck+dpSystOeN5baq0v7/33rUPcD+c06C/gayvt3Lh2Oe2lVkqS9lLLkG4mgsATAABgcEMb1F1rfV+S9/Xe/pNSyjuTtJO8Y1jXZLjWV5dPPMXtLL+Ar620hUhMnP496fQ3AACA5kZy+lsp5SVJVpK89YR3v7KU8rYkt5P8g1rr20exJ5rzCzizSOAJAAAwmFJrHe4FSnlukv8nybfVWjfved9/nuQjtdYPlVK+IMl31lpfesLHeHWSVyfJww8//Knvec97hrpnAAAAgHlSSnmi1nqlyWuGevpbKWUxyY8ned29gVKS1Fr/uNb6od7bP5VksZTy/BOe99pa65Va65UXvOAFw9wyAAAAAGcwzNPfSpLvT/LOWuu33+c5H997Xkopn9bbz38Y1p4AAAAAuBjDnKl0NcnfSrJbSnmyt/ZNSR5Oklrr9yT54iRfU0r5cJJuki+pw+7HAwAAAODchnn62y8kKQ94zmuSvGZYewAAAABgOIY6UwkAAACA2SRUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNCZUAAAAAaEyoBAAAAEBjQiUAAAAAGhMqAQAAANCYUAkAAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI09NO4NwDzb2ulkY3svt/e7ubTUyvrqctZW2uPeFgAAADyQUAkuwCDh0NZOJ9c3d9M9OEySdPa7ub65myRDCZYEWAAAAFwkoRKc06Dh0Mb23p3X9HUPDrOxvXfhYc+oAyxoSugJAADTx0wlOKfTwqHT3N7vNlo/j0H3CKPQDz07+93UPB16bu10xr01AADgFEIlOKdBw6GlZy82Wj+PUQZY0JTQEwAAppNQCc7p0lKr0Xpfrc3Wz2PQPcIoCD0BAGA6CZXgnNZXl9NaXDi21lpcyPrq8qmv+2D3oNH6eQy6RxgFoScAAEwnoRKc09pKOzeuXU57qZWSpL3Uyo1rlx84ZPg8v0hv7XRy9ebjeeSxN+XqzccfOHtm0D3CKAg9AQBgOpU6jF6bIbpy5Uq9devWuLcB53bviWzJ0S/SDwp7Bn0dTDKnvwEAwHiVUp6otV5p8pqHhrUZ4HT9X5ib/iJ92lDjSfolXEhAE2srbfcHAABMGaESjNEgv0hPw1Dje6up+kfEJxlacCDEAgAAGC0zlWDKTMNQ41EfEd8PsTr73dQ8HWI9aNYUAAAAgxMqwZSZhqHGo66mGnWIBQAAgFAJps40nOQ26mqqaWgJBAAAmDVmKsEUmvShxuuryyeeUDesaqpLS610TgiQJqklEAAAYNaoVAIu3KirqaahJRAAAGDWqFQChmKU1VT96zj9DQAAYHSESsBMmPSWQAAAgFmj/Q0AAACAxoRKAAAAADQmVAIAAACgMTOVAAawtdMxGBwAAJhrQiWAhrZ2Orm+uZvuwWGSpLPfzfXN3SQRLAEAAHND+xtAQxvbe3cCpb7uwWE2tvfGtCMAAIDREyoBNHR7v9toHQAAYBYJlQAaurTUarQOAAAwi4RKAA2try6ntbhwbK21uJD11eUx7QgAAGD0DOoGaKg/jNvpbwAAwDwTKgEMYG2lLUQCAADmmvY3AAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNCZUAAAAAaEyoBAAAAEBjQiUAAAAAGhMqAQAAANCYUAkAAACAxoRKAAAAADT20Lg3APNsa6eTje293N7v5tJSK+ury1lbaY97WwAAAPBAQiUYk62dTq5v7qZ7cJgk6ex3c31zN0kESwAAAEw8oRKMycb23p1Aqa97cJiN7b2hhEqqogAAALhIQiUYk9v73Ubr56EqCgAAgItmUDeMyaWlVqP18zitKgoAAAAGIVSCMVlfXU5rceHYWmtxIeuryxd+rVFWRQEAADAfhEowJmsr7dy4djntpVZKkvZSKzeuXR5KO9ooq6IAAACYD2YqwRitrbRHMtNofXX52EylZHhVUQAAAMwHoRLMgX5w5fQ3AAAALopQCebEqKqiAAAAmA9mKgEAAADQmFAJAAAAgMa0vwEMYGunY0YVAAAw14RKAA1t7XSOnabX2e/m+uZukgiWAACAuaH9DaChje29O4FSX/fgMBvbe2PaEQAAwOgJlQAaur3fbbQOAAAwi4RKAA1dWmo1WgcAAJhFQiWAhtZXl9NaXDi21lpcyPrq8ph2BAAAMHoGdQM01B/G7fQ3AABgngmVAAawttIWIgEAAHNN+xsAAAAAjalUgjmxtdPRrgUAAMCFESrBHNja6eT65m66B4dJks5+N9c3d5NEsAQAAMBAtL/BHNjY3rsTKPV1Dw6zsb03ph0BAAAw7YRKMAdu73cbrQMAAMCDCJVgDlxaajVaBwAAgAcRKsEcWF9dTmtx4dhaa3Eh66vLY9oRAAAA086gbpgD/WHc03D6m1PqAAAApoNQCS7ANAQhayvtidvTvZxSBwAAMD20v8E59YOQzn43NU8HIVs7nXFvbeo4pQ4AAGB6DC1UKqW8uJTy5lLKO0opby+lfN0JzymllO8qpbyrlPLrpZRPGdZ+YFgEIRfHKXUAAADTY5iVSh9O8o211pcleUWSry2lvOye53x+kpf2/nt1ku8e4n5gKAQhF8cpdQAAANNjaKFSrfV9tdZf6739J0nemeTeoSivSvJD9civJFkqpbxwWHuCYRCEXByn1AEAAEyPkcxUKqW8JMlKkrfe8652kt+76/F788zgCSaaIOTirK20c+Pa5bSXWilJ2kut3Lh22ZBuAACACTT0099KKc9N8uNJvr7W+scDfoxX56g9Lg8//PAF7g7Orx94TPrpb9NiGk6pAwAAYMihUillMUeB0utqrZsnPKWT5MV3PX5Rb+2YWutrk7w2Sa5cuVKHsFU4F0EIAAAA82aYp7+VJN+f5J211m+/z9PemOQreqfAvSLJB2ut7xvWngAAAAC4GMOsVLqa5G8l2S2lPNlb+6YkDydJrfV7kvxUki9I8q4kf5rk7wxxP4zR1k5HexgAAADMkKGFSrXWX0hSHvCcmuRrh7UHJsPWTifXN3fTPThMknT2u7m+uZskgiUAAACYUkMf1A0b23t3AqW+7sFhNrb3hEozTHUaAADAbBMqMXS397uN1pl+qtMAAABm39AGdUPfpaVWo/VptLXTydWbj+eRx96Uqzcfz9bOMw4xnCunVacBAAAwG4RKDN366nJaiwvH1lqLC1lfXR7Tju5vkHCoX5XT2e+m5umqnHkOllSnAQAAzD6hEkO3ttLOjWuX015qpSRpL7Vy49rliWuDGjQcUpXzTPNQnQYAADDvzFRiJNZW2hMXIt1r0IHiqnKeaX11+dhMpWRyq9MAAAAYjEol6Bk0HFKV80zTUp0GAADA4FQqQc+lpVY6JwRIDwqHVOWcbBqq0wAAABicSiXoGXSguKocAAAA5pFKJejph0Ab23u5vd/NpaVW1leXzxQOqcoBAABg3giV4C7CIQAAADgb7W8AAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQ2EPj3gCcZmunk43tvdze7+bSUivrq8tZW2mPe1sAAAAw94RKTKytnU6ub+6me3CYJOnsd3N9czdJBEsAAAAwZtrfmFgb23t3AqW+7sFhNrb3xrQjAAAAoE+oxMS6vd9ttA4AAACMjlCJiXVpqdVoHQAAABgdoRITa311Oa3FhWNrrcWFrK8uj2lHAAAAQJ9B3Uys/jBup78BAADA5BEqMdHWVtpCJAAAAJhA2t8AAAAAaEyoBAAAAEBjQiUAAAAAGhMqAQAAANCYUAkAAACAxpz+BnfZ2ulkY3svt/e7ubTUyvrqstPnAAAA4ARCJSbaKEOerZ1Orm/upntwmCTp7HdzfXM3SQRLAAAAcA/tb0ysfsjT2e+m5umQZ2unM5TrbWzv3QmU+roHh9nY3hvK9QAAAGCaCZWYWKMOeW7vdxutAwAAwDwTKjGxRh3yXFpqNVoHAACAeSZUYmKNOuRZX11Oa3Hh2FprcSHrq8tDuR4AAABMM6ESE2vUIc/aSjs3rl1Oe6mVkqS91MqNa5cN6QYAAIATOP2NidUPcwY5/W3QU+PWVtpCJAAAADgDoRITbZCQp39qXH/Id//UuP7HAwAAAM5P+xszZ9SnxgEAAMA8Eioxc0Z9ahwAAADMI6ESM2fUp8YBAADAPBIqMXNGfWrceWztdHL15uN55LE35erNx7O10xn3lgAAAOBMDOpm5pzn1LhRMlAcAACAaSZUYiYNcmrcqJ02UHzS9w4AAABCJUZia6cz8ZVDo2agOAAAANPMTCWGrt/m1dnvpubpNq95nx9koDgAAADTTKjE0J3W5jXPpmmgOAAAANxL+xtDp83rZNM0UHzS9wgAAMDoCZUYuktLrXROCJC0eU3+QHEn1AEAAHA/2t8Yumlq89ra6eTqzcfzyGNvytWbj8/93CetiwAAANyPSiWGbpravKalKmdULWlaFwEAALgfoRIjMeltXsnpVTmTtPdRhl9aFwEAALgf7W/QMy1VOaNsSZum1kUAAABGS6gEPfervpm0qpxRhl9rK+3cuHY57aVWSpL2Uis3rl2eqMotAAAAxkP7G/Ssry4faytLJrMqZ9QtadPQuggAAMDoqVSCnmmpytGSBgAAwCRQqQR3mYaqnGk5TQ8AAIDZJlRiom3tdIQnJ5iG8AsAAIDZJlRiYm3tdI7NOOrsd3N9czdJBCoAAAAwZkIlJtbG9t6xodlJ0j04zMb23gNDpVFXOKmoAgAAYN4IlZhYt0844ey09b5RVzipqAIAAGAeOf2NiXVpqdVove+0CqdhGPX1AAAAYBIIlZhY66vLaS0uHFtrLS5kfXX51NcNWuE0qFFfDwAAACaBUImJtbbSzo1rl9NeaqUkaS+1cuPa5Qe2lA1a4TSoUV8PAAAAJoGZSky0tZV247lE66vLx2YcJWercBrUqK8HAAAAk0CoxMzph1CDnMY2yClu57keAAAATCuhEjNpkAonp7gBAADA2ZmpBD2DnuLWD6M6+93UPB1Gbe10hrhbAAAAGC+hEvQMeorboGEUAAAATDOhEvQMeorboGEUAAAATDOhEvSsry6ntbhwbO0sp7gNGkYBAADANBMqQc/aSjs3rl1Oe6mVkqS91MqNa5cfOKR70DAKAAAAppnT3+Aug5wa13/+xvZebu93c2mplfXVZSfGAQAAMNOESnABBgmjkqOT44RRAAAATCOhEozJ1k4n1zd375wc19nv5vrmbpIIlgAAAJh4ZirBmGxs790JlPq6B4fZ2N4b044AAADg7IRKMCa397uN1gEAAGCSCJVgTC4ttRqtAwAAwCQRKsGYrK8up7W4cGyttbiQ9dXlMe0IAAAAzs6gbhiT/jBup78BAAAwjYRKMEZrK20hEgAAAFNJqAQXYGuno+IIAACAuSJUgnPa2unk+uZuugeHSZLOfjfXN3eTRLAEAADAzDKoG85pY3vvTqDU1z04zMb23ph2BAAAAMMnVIJzur3fbbQOAAAAs0CoBOd0aanVaB0AAABmgVAJzml9dTmtxYVja63FhayvLj/wtVs7nVy9+XgeeexNuXrz8WztdIa1TQAAALhQBnXDOfWHcTc9/c2AbwAAAKbZ0EKlUsoPJPmiJO+vtX7SCe9/NMlPJPnt3tJmrfVbh7UfGKa1lXbjIOi0Ad9CJQAAACbdMCuVfjDJa5L80CnPeUut9YuGuAem3NZOp3EF0LQw4BsAAIBpNrSZSrXWn0/ygWF9fGZfvz2ss99NzdPtYbMyd8iAbwAAAKbZuAd1v7KU8rZSyr8ppfzl+z2plPLqUsqtUsqtp556apT7Y4xOaw+bBecZ8A3MB8P8AQCYZOMc1P1rSf6LWuuHSilfkGQryUtPemKt9bVJXpskV65cqaPbIuPUuU8b2P3Wp82gA76B+WCYPwAAk25soVKt9Y/vevunSin/vJTy/FrrH45rT0yWhVJyWJ+ZIS6UMobdDMcgA76B+WCYPwAAk25s7W+llI8v5SgdKKV8Wm8v/2Fc+2HynBQonbYOMEsM8wcAYNINrVKplPL6JI8meX4p5b1JviXJYpLUWr8nyRcn+ZpSyoeTdJN8Sa3SAp7WXmqd2OrWNsgamAOX7vM10DB/AAAmxTBPf/vSWusLa62LtdYX1Vq/v9b6Pb1AKbXW19Ra/3Kt9ZNrra+otf7SsPbCdDLIGphnvgYCADDpxjmoG05lkDUwz3wNBABg0pVp6zi7cuVKvXXr1ri3AQAAADAzSilP1FqvNHmNSiUYo62djioEAAAAppJQCcZka6eT65u7d44M7+x3c31zN0kESwAAAEy8oQ3qBk63sb13J1Dq6x4cZmN7b0w7AgAAgLMTKsGY3D7hqPDT1gEAAGCSCJVgTC4ttRqtAwAAwCQRKsGYrK8up7W4cGyttbiQ9dXlMe0IAAAAzs6gbhiT/jDuUZ3+5qQ5AAAALpJQiZk0LQHK2kp7JPty0hwAAAAXTajESIwy5BGgPNNpJ83NyudkWoJEAACAWWGmEkPXD3k6+93UPB3ybO10hnK90wKUeTXrJ82N+h4DAABAqMQIjDrkmfUAZRCzftKcIBEAAGD0hEoM3ahDnlkPUAYx6yfNCRIBAABGT6jE0I065PmsT3xBo/V5sLbSzo1rl9NeaqUkaS+1cuPa5ZmZOSRIBAAAGD2Duhm69dXlY4Ozk+FWybz5N59qtD4vRnXS3DiM+h4DAABAqMQI9IOMUZ3MpRVq/oz6HgMAAECoxIiMskrm0lIrnRMCJK1Qs22WK7EAAAAmkZlKzJxZH0oNAAAAk0ClEjNHKxQAAAAMn1CJmTQtrVBbOx3hFwAAAFNJqARjsrXTOXZiWWe/m+ubu0kiWAIAAGDimakEY7KxvXcnUOrrHhxmY3tvTDsCAACAsxMqwZjcPuGEutPWAQAAYJIIlWBMLi21Gq0DAADAJBEqwZisry6ntbhwbK21uJD11eUx7QgAAADOzqBuGJP+MG6nvwEAADCNhEowRmsrbSESAAAAU0n7GwAAAACNCZUAAAAAaEyoBAAAAEBjZiox0bZ2OgZZAwAAwAQSKjESg4RDWzudXN/cTffgMEnS2e/m+uZukpzptcIoAAAAGB7tbwxdPxzq7HdT83Q4tLXTOfV1G9t7dwKlvu7BYTa294ZyPQAAAODshEoM3aDh0O39bqP1814PAAAAODuhEkM3aDh0aanVaP281wMAAADOTqjE0A0aDq2vLqe1uHBsrbW4kPXV5aFcDwAAADg7oRJDN2g4tLbSzo1rl9NeaqUkaS+1cuPa5QcO3B70egAAAMDZOf2NoeuHQIOcxra20m58att5rgcAAACcTam1jnsPjVy5cqXeunVr3NsAAAAAmBmllCdqrVeavEalEjNpa6ejUgkAAACGSKjEzNna6eT65m66B4dJks5+N9c3d5NEsAQAAAAXxKBuZs7G9t6dQKmve3CYje29Me0IAAAAZo9QiZlze7/baB0AAABoTvsbIzHKGUeXllrpnBAgXVpqDeV6AAAAMI9UKjF0/RlHnf1uap6ecbS10xnK9dZXl9NaXDi21lpcyPrq8lCuBwAAAPNIqMTQjXrG0dpKOzeuXU57qZWSpL3Uyo1rlw3pBgAAgAuk/Y2hG8eMo7WVthDpHqNsQQQAAGD2qVRi6O43y8iMo9EZdQsiAAAAs0+oxNCtry5n8c+VY2uLf66YcTRCo25BBAAAYPYJlRiN8oDHDNU4WhABAACYbUIlhm5jey8Hh/XY2sFhVSUzQloQAQAAuGgGdTN046iSMZT6uPXV5Vzf3D3WAtdaXBhqC6K/AwAAgNkmVJpTo/yF/9JSK50TAqRhVcn0h1L3A5T+UOokQ/szTnqA0t/LqPY4jr8DAAAARkuoNIdG/Qv/qKtkThtKPYw/37QEKGsr7ZHtZ9R/BwAAAIyemUpzaNQnga2ttHPj2uW0l1opSdpLrdy4dnlo4cKo2+2crPZMBoMDAADMPpVKc2gcv/CPskpm1O12ApRnGvXfAQAAAKOnUmkOzfpJYOury2ktLhxbG2a73ax/Pgcx6r8DAAAARk+oNIdm/Rf+UbfbzfrncxCj/jsAAABg9Eqtddx7aOTKlSv11q1b497G1Jv008qmjc8nAAAA06yU8kSt9Uqj1wiVAAAAAObbIKGS9jcAAAAAGhMqAQAAANDYQ+PeADAa5j4BAABwkYRKTDRByMXY2unk+uZuugeHSZLOfjfXN3eTxOcTAACAgWh/Y2L1g5DOfjc1TwchWzudcW9t6mxs790JlPq6B4fZ2N4b044AAACYdkIlJpYg5OLc3u82WgcAAIAH0f7GxJqHIGRU7X2XllrpnPB5u7TUuvBrAQAAMB9UKjGx7hd4zEoQMsr2vvXV5bQWF46ttRYXsr66fOHXAgAAYD4IlZhYsx6EjLK9b22lnRvXLqe91EpJ0l5q5ca1y4Z0AwAAMDDtbzQyytPY+h93Vk9/G3V739pKe2Y+dwAAAIyfUIkzO8+x9IOGUYMGIaMMvwa9njlHAAAATDPtb5zZoO1ao5wdNE3Xm/X2PgAAAGabUIkzG7Rda5Szg6bpeuYcAQAAMM20v3Fmg7ZrjXp20DRdz5wjAAAAppVKJc5s0Hat+4VOw5oddJ7rbe10cvXm43nksTfl6s3Hz9QyN+o/HwAAAEwCoRJnNmi71md94gsarZ/XoNczGwkAAADOTvsbjQzSrvXm33yq0fp5DXq902YjnfZn7r9vlKfNAQAAwLgJlebU1k5nZCHItMw4MhsJAAAAzk772xwatM1rUNMyU8lsJAAAADg7odIcOq3NaxjOM3NokMHZg17PbCQAAAA4O+1vc2jU7WiDzhzqV1T1A7B+RdXdH/Mir2c2EgAAAJydUGkOXVpqpXNCgDRpbV6DDs5OBp9xZDYSAAAAnI32tzk06javQWc4jbqiKhms3Q4AAADmkUqlOTTqNq9BK47OU1E1yOl2g7bbDXq987yO+eNeAQAAJo1QaU6Nss3rpGDotPW+9dXlrL/hbTk4rHfWFhfKAyuqtnY6Wf+xt+XgI/XOddZ/7G1JTg+HBg2/Bg2jzhNiMV/cKwAAwCTS/sbQLZTSaP2Y+oDHJ/iHb3z7nUCp7+AjNf/wjW8/9XWDhl+DnqY36lP4mF7uFQAAYBKpVKKRQVpwDuvJSdD91vs2tvdODIceVDm03z1otN5XcnJm9aDoa9DZT+OYGcV0cq8AAACTSKg0p0Y5c6h9n9lI7QfMRhq0cmhQ94u4HlQctfTsxfzRnz4zsFp69uKpr5uWU/gYv2m6V8x+AgCA+TG09rdSyg+UUt5fSvmN+7y/lFK+q5TyrlLKr5dSPmVYe+G4QU9jG7QFZ311OYsLx+t9zjIb6VxtcyN0v4KrBxRijfwUPqbXtNwrg35tAQAAptMwK5V+MMlrkvzQfd7/+Ule2vvv05N8d+//c2nQf93/su/95fzib33gzuOrf/Fj8rqveuWprxl0IPV5KofuHrZ90uOTDNo299H3qRz66AdUDg36ug/ep63ufut9ayvt3HrPB/L6t/5eDmvNQin5G596tgHqqkHmy6hPbBzUoF9bAACA6TS0SqVa688n+cApT3lVkh+qR34lyVIp5YXD2s8kG/Rf9+8NlJLkF3/rA/my7/3lU1836rayr/+RJxutn9fLXvi8Ruvnfd1HtU4One633re108mPP9G5E5Id1poff6LzwL931SDzaW2lnV987LPz2ze/ML/42GdPZEhj9hMAAMyXcZ7+1k7ye3c9fm9vbe4M2lZ2b6D0oPV58Svv/qNG6+d93f268R7UpefUOGbN/WY8TeLsJwAA4PzGGSqdWSnl1aWUW6WUW0899dS4t3Ph/Ov+xRq0bW7Q1+2f0DJ32nqfU+OYNdMy+wkAALgY4wyVOklefNfjF/XWnqHW+tpa65Va65UXvOAFI9ncKPnX/Ys16IDvQV836N/fqF8Hw7a20s6Na5fTXmql5OiExxvXLk9kqx4AAHB+4wyV3pjkK3qnwL0iyQdrre8b437GZtB/3b/6Fz+m0fp5Xzeoj3vesxqt9730Lzyn0Xrfl376ixutn/d1g/79jfp1MArTMPsJAAC4GEMLlUopr0/yy0mWSynvLaV8ZSnlq0spX917yk8leXeSdyX53iR/d1h7mXSD/uv+677qlc8Igs5y+tugr/udm1/YaL3vrd/8uc8IkD7uec/KW7/5c0993c/8/UefESC99C88Jz/z9x899XX/eO1yvvwVD9+pMFooJV/+iofzj9cuD+V1g/79jfp1AAAAcJFKfcCSsdU/AAAKxUlEQVS8mElz5cqVeuvWrXFvAwAAAGBmlFKeqLVeafKaqRjUDQAAAMBkESoBAAAA0JhQCQAAAIDGhEoAAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNCZUAAAAAaEyoBAAAAEBjQiUAAAAAGhMqAQAAANCYUAkAAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI0JlQAAAABoTKgEAAAAQGNCJQAAAAAaEyoBAAAA0JhQCQAAAIDGhEoAAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMZKrXXce2iklPInSfbGvQ+mxvOT/OG4N8FUcK/QhPuFs3Kv0IT7hbNyr9CE+4WzWq61Pq/JCx4a1k6GaK/WemXcm2A6lFJuuV84C/cKTbhfOCv3Ck24Xzgr9wpNuF84q1LKraav0f4GAAAAQGNCJQAAAAAam8ZQ6bXj3gBTxf3CWblXaML9wlm5V2jC/cJZuVdowv3CWTW+V6ZuUDcAAAAA4zeNlUoAAAAAjNlUhUqllM8rpeyVUt5VSnls3PthspRSfqCU8v5Sym/ctfYxpZSfKaX8+97/P3qce2QylFJeXEp5cynlHaWUt5dSvq637n7hmFLKny+l/LtSytt698o/6q0/Ukp5a+/70Y+UUp417r0yGUopC6WUnVLKT/Yeu1c4USnld0opu6WUJ/un7fg+xP2UUpZKKW8opfxmKeWdpZRXul+4Vyllufc1pf/fH5dSvt69wv2UUr6h9zPub5RSXt/72bfRzy5TEyqVUhaS/LMkn5/kZUm+tJTysvHuignzg0k+7561x5L8bK31pUl+tvcYPpzkG2utL0vyiiRf2/t64n7hXn+W5LNrrZ+c5OVJPq+U8ook/yTJd9Ra/1KSP0rylWPcI5Pl65K8867H7hVO81m11pffddS370Pcz3cm+be11k9M8sk5+jrjfuGYWute72vKy5N8apI/TfKv417hBKWUdpL/NcmVWusnJVlI8iVp+LPL1IRKST4tybtqre+utf6nJD+c5FVj3hMTpNb680k+cM/yq5L8y97b/zLJ2kg3xUSqtb6v1vprvbf/JEc/mLXjfuEe9ciHeg8Xe//VJJ+d5A29dfcKSZJSyouSfGGS7+s9LnGv0IzvQzxDKeWjknxmku9Pklrrf6q17sf9wuk+J8lv1VrfE/cK9/dQklYp5aEkz07yvjT82WWaQqV2kt+76/F7e2twmo+rtb6v9/bvJ/m4cW6GyVNKeUmSlSRvjfuFE/TamZ5M8v4kP5Pkt5Ls11o/3HuK70f0/R9J/rckH+k9/ti4V7i/muSnSylPlFJe3VvzfYiTPJLkqST/otde+32llOfE/cLpviTJ63tvu1d4hlprJ8k/TfK7OQqTPpjkiTT82WWaQiU4l3p01KHjDrmjlPLcJD+e5OtrrX989/vcL/TVWg97ZeQvylHV7CeOeUtMoFLKFyV5f631iXHvhanxGbXWT8nRaIevLaV85t3v9H2IuzyU5FOSfHetdSXJf8w97UvuF+7Wm4Hz15P82L3vc6/Q15ut9aocBdeXkjwnzxwn80DTFCp1krz4rscv6q3Baf6glPLCJOn9//1j3g8TopSymKNA6XW11s3esvuF++q1Grw5ySuTLPXKhBPfjzhyNclfL6X8To5a9D87RzNQ3CucqPcvxKm1vj9HM08+Lb4PcbL3JnlvrfWtvcdvyFHI5H7hfj4/ya/VWv+g99i9wkn+apLfrrU+VWs9SLKZo59nGv3sMk2h0q8meWlvEvmzclTO98Yx74nJ98Ykf7v39t9O8hNj3AsTojfn5PuTvLPW+u13vcv9wjGllBeUUpZ6b7eSfG6OZnC9OckX957mXiG11uu11hfVWl+So59RHq+1flncK5yglPKcUsrz+m8n+WtJfiO+D3GCWuvvJ/m9Uspyb+lzkrwj7hfu70vzdOtb4l7hZL+b5BWllGf3fj/qf21p9LNLOap+mw6llC/I0byChSQ/UGv9tjFviQlSSnl9kkeTPD/JHyT5liRbSX40ycNJ3pPkf6i13jvMmzlTSvmMJG9JspunZ598U47mKrlfuKOU8l/naEDhQo7+IeZHa63fWkr5hBxVo3xMkp0kX15r/bPx7ZRJUkp5NMk/qLV+kXuFk/Tui3/de/hQkn9Va/22UsrHxvchTlBKeXmODgF4VpJ3J/k76X1fivuFu/SC6t9N8gm11g/21nxt4USllH+U5G/m6HTsnST/U45mKJ35Z5epCpUAAAAAmAzT1P4GAAAAwIQQKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAwc0op31xKeXsp5ddLKU+WUj69t/5zpZTfLaWUu567VUr5UO/tl5RSfqP39qOllJ884WP/XCllr/dxnyylvKG3vtx735OllHeWUl57wmvvfPwL/vM+Wkr5b+56/IOllC++6OsAANztoXFvAADgIpVSXpnki5J8Sq31z0opz0/yrLuesp/kapJfKKUsJXnhAJf5slrrrXvWvivJd9Raf6K3j8sDfNxBPZrkQ0l+aYTXBADmnEolAGDWvDDJH9Za/yxJaq1/WGu9fdf7fzjJl/TevpZk8wKv+97+g1rr7mlPLqUslFI2Sim/2quo+p9764/2Kp7eUEr5zVLK6/qVVaWUL+itPVFK+a5Syk+WUl6S5KuTfEOvSuq/7V3iM0spv1RKebeqJQBgGIRKAMCs+ekkLy6l/L+llH9eSvnv7nn/z+YocFnIUbj0IwNc43V3tb9t9Na+I8njpZR/U0r5hl4V1Gm+MskHa61/JclfSfJVpZRHeu9bSfL1SV6W5BOSXC2l/Pkk/1eSz6+1fmqSFyRJrfV3knxPjqqkXl5rfUvvY7wwyWfkqGrr5gB/RgCAUwmVAICZUmv9UJJPTfLqJE8l+ZFSyv9411MOk/xCjgKlVi+UaerLegHOy2ut673r/osk/1WSH8tRO9qvlFL+s1M+xl9L8hWllCeTvDXJxyZ5ae99/67W+t5a60eSPJnkJUk+Mcm7a62/3XvO6x+wx61a60dqre9I8nGN/4QAAA8gVAIAZk6t9bDW+nO11m9J8r8k+Rv3POWHczQD6Ucv+Lq3a60/UGt9VZIPJ/mkU55ekvy9u8KpR2qtP91735/d9bzDDDYH8+6PUe77LACAAQmVAICZ0juF7aV3Lb08yXvuedpbktzIg6t9mlz380opi723Pz5HlUedU16yneRr7nrNf1lKec4pz99L8gm9GUpJ8jfvet+fJHnegFsHABiI098AgFnz3CT/Z2+m0YeTvCtHrXB31Fprkn96ho/1OaWU9971+L/v/f91pZRu7+0/rLX+1Ry1s31nKeX/662v11p//5SP/X05amv7td4g7qeSrN3vybXWbinl7yb5t6WU/5jkV+969/+d5A2llFcl+Xtn+HMBAJxbOfqZCgCASVdKeW6t9UO9EOqfJfn3tdbvGPe+AID5pP0NAGB6fFVvsPfbk3xUjk6DAwAYC5VKAAAAADSmUgkAAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI0JlQAAAABoTKgEAAAAQGP/P6Wj/HZKjQKyAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABJUAAAOTCAYAAAAGyjJNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdf3Dc6X0f9s9DCLb2XPsgyRdHhKTc1Uqg6YWyGbGRUmY61qUOHMt3wVB2Hdea2k1qNROnTsdTZMha9Z1ceUgHzo/pJNOpnEzqVKnq6sRgpFxqWg3tTqP6zuEFPjHniLViyVKXanSyDootrSwI9/QPYkkC3B/YZ7Hf736/eL1mOAc8wJf77AKLw775+XyelHMOAAAAAJjEibo3AAAAAEDzCJUAAAAAmJhQCQAAAICJCZUAAAAAmJhQCQAAAICJCZUAAAAAmNjL6t7ApL75m785P/jgg3VvAwAAAKA1nn322c/nnB+Y5JrGhUoPPvhgXLt2re5tAAAAALRGSum3J71G+xsAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADCxmYZKKaVPpZSup5R+PaV0bcDHU0rpv08pfSKl9LGU0h+b5X4AAAAAOBovq+A23ppz/vyQj/2ZiPjDe3/eHBH/w95/4VjY3OrGxpUbcXO7FyeXOrG+uhJrp5fHXveDP/er8dF//YXb75/91lfGP/yRPzGT23rDT/yT+Mpuvv3+yxdSfPynv3vsda+/8FR87c5l8bIU8YmLbxt73UPnn4q7LosUEZ+8NP66B88/dc/ap1znOtfN/Lom7NF1rqvqutL/95VeV7rP0v+3V/l4vvHxX4x/+/u7t9//pq9fiI+9+7tmtseS363quL03//RH4t/87ldvv/8t3/h18cxPfOfY60ofz6p/Lyv93iy9rvTxfNfm9Xj/M5+J3ZxjIaX4gTe/Nt6zdmrsdd/5N34lfvNzX7r9/h/+A98QH/nx7xh7XenXr+37LL2uRNXPvdL7Vnp7/efQ1/3B179p7CcfUHf725+NiH+Qb3k6IpZSSq+ueU8cY5tb3Th76Wo8dP6pOHvpamxudWd6W+tPPhfd7V7kiOhu92L9yefG3ubBX0IiIj76r78QP/hzv3rkt3Xwf9AREV/ZzfGGn/gnI687+MtxRMTX8q31UQ7+4hIRkffWRxn0i8uodde5znVHc10T9ug611V1Xen/+0qvK91n6f/bq3w8D74oioj4t7+/G298/BdnsseS363quL2DL2ojIv7N73413vzTHxl5XenjWfXvZaXfm6XXlT6e79q8Hu97+tOxm2/d5m7O8b6nPx3v2rw+8rqDQU1ExG9+7kvxnX/jV0ZeV/r1a/s+S68rUfVzr/S+ld7eoOfQJGYdKuWI+KWU0rMppXcO+PhyRHzmrvf/3701qFxp8NK/dtIw6t0ffj52Djx5d3ZzvPvDz4+87uAvIePWp7mtYT9cxv3QOfjL8bj1vmEfLv8RBwDVKP1/X+l1pUr/316lgy+Kxq1Pq+R3qzpu7+CL2nHrfaWPZ9W/l5V+b5ZeV/p4vv+Zz0y03ncwqBm33lf69Wv7PkuvK1H1c6/0vpXe3rQ//2cdKv3JnPMfi1ttbj+aUvoPS/6SlNI7U0rXUkrXXnjhhaPdIewpDV42t7px4fL1fWHUhcvXxwZLL355Z6L1aVR5WwAA0Fb96pHDrtel7ftsyv0r0bT7NtNQKefc3fvv5yLiH0XEHz/wKd2IeO1d779mb+3g3/PenPOZnPOZBx54YFbb5ZgrDV42rtyI3s7+9Le3sxsbV24c2d4AAID6LaQ00Xpd2r7Ppty/Ek27bzMLlVJK35BS+sb+2xHxpyPiXx74tA9FxH+6dwrcWyLiiznnz85qTxwfVc5Gurndm2i9b6mzONH6NKq8LQA4jl425Hf9YevTXlfq5QuD/+Jh63X4pq9fmGh9Wme/9ZUTrdd1e9/yjV830Xpf6eM57DtiVt8ppd+bpdeVPp4/8ObXTrTe94f/wDdMtN5X+vVr+z5LrytR9XOv9L6V3t60P/9nWan0LRHxz1JKz0XEr0XEUznnX0wp/cWU0l/c+5x/EhG/FRGfiIifi4i/NMP9cEyUtqOVBi8nlzoTrfc98djDsXhi/xN48USKJx57eOR1Jcl16W1V/csE8+Fvff+3T7TeN+xUl8Oc9lKi6ttr++NScv9Kb6v0uuUhP1eHrU97e65z3STXfeLi2+4Jgg5zilvpdaX7/PhPf/c9LyAOc1JWlY/nx979Xfe8CDrMCUale/yHP/In7gl0DnMaW9W398xPfOc9L2IPcwJV6eP5yUtvu+d3vsOc/lb192bpdaWP53vWTsU73vK62797L6QU73jL68aezPWRH/+Oe4KZw5yqVvr1a/s+S68rUfVzr/S+ld7eoOfQJFKe0768Yc6cOZOvXbtW9zaYY2cvXY3ugCqh5aVOfPT8I0Ov29zqxvoHnoudl+48JxZPpNj4vm+LtdPD58f3Q6y7W+A6iwtx8dypkddFlB0VOejEkIjxv4xsbnVj48qNuLndi5NLnVhfXRm7v1GndIz6xcB1zb4uouz7pVTpc7bU6Z/6pYFtra+4bzG2fvJPj7y2zY9LRLX3r8Sgk4giDn/ENQAAw6WUns05n5nkmpfNajNQl9J2tP4Lp0lfUJVet7nVjQ8+2913VOQHn+3GmT/0ypHX/sZnf3ei9bv3OU8vDplvVX6/vPUND8T7nv70wPVZePzRh2P9yef2DeZfXEjx+KOjK/ci2v24RMz/z4mTS52BQdu4ylAAAGZDqETrTPOio/QFVcl1owZ8j/q7nORWv7Pf+sqh1WKzsJDSwNMe5nVY36R++eODT/Uctj6t0iC4alU/Lk2wvroysDJ0fXWlxl0BABxfQiVapykvOkorqkpV2dbS9hDkU78z+Gs0bL2vs3giejsvDVwf5Qfe/NqBFSvjhvUtDwlYx82fqVrVz4WI+a/IiajncalSyc+kpgSCAADHhVCJ1mnKi44q2zg2t7r72n26271Yf/K5iIiRj8sr7lscOntmlEGB0qj1phn0dRu13vf2N71mYDj09je9ZuR1/Tlbk87fqiNgLQkKpnkuzPsMoGm0udXr4Cy6/oEKEaN/JvU/3pavMQBA0wmVaKUmvOhYX10ZONdl3Av+pc5ibPfuDXpGnVL37g8/v+92IiJ2dnO8+8PPj3ychmVA47Khkj1O40SKeGnAnk7MqDCqtBJrmnam96ydmvg0i6oD1tKgoDT8miaYaIKmVF2WKG3/BQBgvgiVoE4Hc4lDFPI88djDA0+pe+Kx4UOGS+cwfXFAMDRqvW9YtjKr7rdBgdKo9b5v+LqF+NJXdweuj1JaiVVa4TSNKgPW0qCgNPxqezCxdno5rv32F/ZVqL39TfMfmB9G21v7aL42V0ECwFESKkFNNq7c2BcMRUTsvJRn9gK8xP1DKo7uH1NxtD0krBq2XpeXhoRAw9b7SiuV2j5rapqgoCT8answUXpCZBO0ubWP5mt7FSQAHKXR02GBmZn2BfhHzz8Sn7z0tvjo+UfG/pI7rO1sXDvaV792bxXPqPXbf++QmUvD1vuGnZ42q1PVBg3NHrXeV1qp1PZZU8MCgVkFBVXfXtVGVWI13frqSnQW91cEtqW1j+Zr83MPAI6aUAlqMqzaZ1wVUIlhrXGjWuYiIr48JFwZtt73e18ZXJE0bL3v+8687p45SCfSrfVRhg0OHzdQvNSw09PGnapWel1TVB0UtD2YaHMl1trp5bh47lQsL3Uixa3nwMVzp1SBMBfa/NwDgKMmVIKalFYBlbj221+YaH1awzKnMVlUbFy5cc8cpJdyjP3X4ccffTgWF/anUYsLKR5/dHRoVhpGlYYZb33DAxOtN03VQUHbg4m2V2JNWnEJVWn7cw8AjpKZSlCT0iqgEv/LM/ceY99fH3WiWEqDT3qb1Qig0n8dLp0z9bY3vjre9/S9j83b3vjqsbdXMkB5mtPfmqLqkxebcNJjqTaf/gbzzHMPAA5PpRIcA6Wnow0b9TOrEUCls5hKPfWxz0603jdsgPLmVnfkdXWc/kZztb0SC+aV5x4AHJ5KJWCoqk8r+8rO4Na/Yet9pSf1vDjkNLph632lR9lP83g63vp4anMlFswzzz0AOByVSlCT0hPZqlT1aWWlp7FVfVJPaZte6ePZD826273IcSc0G1cZBQAAMEtCJajJE489HIsHjjpbPJHGnshWpdLTyobV3cxoFFNxyNNZHPwjcNh638uHfHzYel/p4+l4awAAYB4JlaAma6eXY+P7vm3fzIaN7/u2mZTbl4YZpaec/Qff+sqJ1vtKT2MrPann5Qfu27j1vt//2uDKqWHrfaWPp+OtAQCAeWSmEtSoqpkND76qM3AY9IOvGh26lJ6q9qnfGRx2DFvve/zRh2P9yediZ/dOO9jiQorHHx1dvVV6Us/2kNlJw9b7Sgeflz6eJ5cGf/0cbw0AANRJqATHwNO/9eJE63crCb5KTzkrDV1Kr1u6b3HgUO5ZnTYXUfZ4Ot6aqhgIDwDAJIRKcAxUPXC76lPjIsrCmmF3f0YPS7HS0AwmUXqKIgAAx5dQCY6BqkOeaU85K3lRW1Jh8cXe4Da3Yet9y0Pa0cbNqJqG462ZtVED4X3vVUe1GADQJAZ1wzHwA29+7UTr06r6lLN+GNXd7kWOO2HU5lZ35HX3dwa3uQ1b7ysduA3zrLRtlaNT+rMMAKAuQiU4Bs78oVfGiQNFSSfSrfVxNre6cfbS1Xjo/FNx9tLVQ724qfqUs9Iwalih1rgCrrXTy3Hx3Kl9J/ddPHdKNQGNNqxycZZtq+xX+rMMAKAu2t+ggSZtj9i4cuOek8leyjG2raW0HW3t9HJc++0vxPuf+Uzs5hwLKcXb3zS+fav0lLPSMKr09LcI7Wi0T9Wz17hX6c8yAIC6qFSChtnc6sb6k8/ta49Yf/K5kRVEVVcAbW5144PPdm+/GN3NOT74bHdslVNphdOw0GlcGFV6Hcy7kgrD0rZVjo6fSQBA0wiVoGHe/eHnY2d3f+XAzm6Od3/4+aHXlL5QqTqMKm0rKw2jzEaijUrn8ng+1M/XAABoGqESNMyLQ1qzhq1HlL9QKR1kXXULR2kYZTYSbVR1qMvR8TUAAJrGTCU4BkpnHJUOsi6djVQ6w6n/8ZIXXmYj0TbThLqeD/XzNQAAmkSlEjTM0pAqoWHrEeUzjkoHWZdWRjn5CKZnLg8AAFURKkHDfM+3vXqi9YjysGbpviEB1pD1vtIWDicfNV/JgGiOlrk8AABURfsbNMwvf/yFidYjysOaYSeJH+aE8ZIWjtK2OebDNO2LHJ3+Y71x5Ubc3O7FyaVOrK+u+BoAAHDkhEq00uZWt7UvqEoCotKw5ou9wW1uw9antb66si+UiJh9hUWbv1eqNqoizmNaLXN5AACogvY3Wqf0OO3+tfPeulPSklbaDlP1bJaqTz6a5nuFe2lfBACA40WlEq1TWi3RlNadkpa00naYt77hgXjf058euD5OaQVQaYVFye2prDla2hcBAOB4ESrROqXVEtMEDKUBSsl1pS1pJWFNyfymiOoDutLbU1lztOpoXwQAAOqj/Y3WKW3ZKg0YNre6sf6B5/a1UK1/4LmxLVSlrVel96+ktW8WAd0slN7e/Z3BLYPD1hmt6vZFAACgXiqVaJ3SaonS1p0nPvR87Ly0v/ds56UcT3zo+ZEvpksro0pa0koreUofk2kqgEqqt0pvL6XJ1hnPgGgAADg+VCrROqXVEqXDrLeHtJ0NW+8rDUJKWtJKK3mqHvBddfXW9peHfO2GrB/c67wPdQcAAJgllUq0Ukm1xNrp5bj221+I9z/zmdjNORZSire/aXZVF1VWAZUGWKUDvh981eD79uCrRt+30uqtqqvTmjLUHQAAYJZUKsGeza1ufPDZbuzuHaO2m3N88Nnu2AqUV9w3eP7OsPW+KquASit5Im6FJB89/0h88tLb4qPnHzlUaPL0b7040XrfNOFXSXXasJbBcafbVT0zCgAAYB4JlWBPaVDw+KMPx+LC/iE8iwspHn/04ZHXVdmmVxpgleoHc4dd76s6/Co93c6pcQAAANrf4LaqW8T615a06U16e9PssUpVH0lf+jUvbZsDAABoE6ES7JkmKGjCiVdN2WNEdeFX6de86vALAABgHgmVYM9b3/BAvO/pTw9cnydNGBK9PCSsWZ6zgK40HGpK5RcAAMAsCZVgT+l8naqVnpBWpaZU8lTduggwK5tbXUE3AFA5oRLsacrw5ar3WfJCpUmVPKXhkBdwwLxoQgUrANBOQiXY05Thy1Xuc5oXKm2u5Nnc6sb6B56LnZdunWbX3e7F+geeiwgv4IDqNaGCFQBopxN1bwDmxfrqSnQWF/atzWPL1vrqSiwupH1riwtpJvsc9ULlOHviQ8/fDpT6dl7K8cSHnq9pR8Bx1pRKWwCgfYRKsGft9HJcPHcqlpc6keLWUOmL507N57/y5jHvHxEvVAbb7u1MtA4wS8MqVeet0hYAaB/tb3CXJrRsbVy5MbBKZhZtDk1pCQQ4zppyOAIA0D5CJWiY0uqhksHSx+GFSsnj8or7FuPFL99blfSK+xZntU2AoZp0OAIA0C5CJbhLE070KqkeKh243fYXKqWPy+OPPhzrTz4XO7t3KsYWF1I8/ujDs90wwBBNqLQFANpHqEQrlYRD05x0VmUYVVI9NM3JQG1+oVL6uLQ9bIN51oTwHwDguBAq0Tql4VBpwDBNGFWiJNAwcHuwaR6XNodtMK+q/nkLAMBoTn+jdUaFQ6OUBgylt1elJp0MtLnVjbOXrsZD55+Ks5euxuZWd2a31aTHBWjGz1sAgONEqETrlIZDpQFD1VVA/X+p7273Isedf6kfFb6sr65EZ3Fh39o8DtwuuW/TaMrjAtyi6hIAYL4IlWidpSEncA1b7ysNGKqudin5l/q108tx8dypWF7qRIqI5aVOXDx3au7aRaquQmjK4wLcoroQAGC+mKlE6+Q82Xpf6fDl9dWVWP/Ac7Hz0l0ngZ1IM6t2Kf2X+ibMAKqjCqEJjwtwS8lBBQAAzI5Qidb5Ym9novW7FQcMacz7R+jkUie6A0KWNvxLfZvvGzA9Jy8CAMwXoRKtU3UwsXHlRuzs7i+D2tnNY0+NK9Xmf6lv830DjobqQgCA+SFUonWqDiaqbtmq+l/qN7e6ld2WKgQAAIDmECrROlUHE9NURpUGNlX9S33/NLZ+QNc/ja2/h1lQhQAAANAMQiVaqcpgorQyqo7AZlKjTmOblz0CAABQjxN1bwCarvRY+lGBzbyo4zQ2AAAAmkGlEhyBksqoJgQ2TmMDAABgGJVKUJNhwcysApvNrW6cvXQ1Hjr/VJy9dDU2t7pjr1lfXYnO4sK+NaexAQAAECFUgtpME9hMGhD15zd1t3uR4878pnHXlbb2AQAA0H7a36AmpafUlQz4nmbgttPYAAAAGESoBDUqCWxKAqImzG8CAACgWYRKcAQ2t7oTVxyVKgmIDNwGAADgqJmpBFMqnVdUqmTAt4HbAAAAHDWhEkxpVDvaLJQERAZuAwAAcNS0v8GUqp5XVDrg28BtAAAAjpJQCaZUx7yiNgdE79q8Hu9/5jOxm3MspBQ/8ObXxnvWTtW9LQAAAA7Q/gZTMq/o6Lxr83q87+lPx27OERGxm3O87+lPx7s2r9e8MwAAAA4SKsGU2j6vaHOrG2cvXY2Hzj8VZy9dndkA8oiI9z/zmYnWAQAAqI/2NzgCbW1H659s1x9E3j/ZLiJmcn/7FUqHXQcAAKA+KpWAoao+2W4hpYnWAQAAqI9KJajR5lZ34lPcqlT1yXY/8ObXxvue/vTA9XlT+rWb9685AADAYQmVoCZVt5aVqPpku/4pb/N++lvp164JX3MAAIDDSrlhs0rOnDmTr127Vvc2YGpnL10dGNgsL3Xio+cfOfLbK6mQORiCRNw62a5Ng8hLlH7tqv6aAwAAHFZK6dmc85lJrlGpBDWpsrWstEKm/zHtWvuVfu2qbicEAACYJYO6oSbDWshm0VpW9cDttiv92lX5NQcAAJg1oRLUZH11JTqLC/vWOosLsb66cuS3VVoh069w6m73IsedCqfNre6R77FJSr92VX7NAQAAZk37G9Skytay0oHboyqc5q0FrspT1Uq/dtoJAQCANjGoG46B0oHbD51/Kgb9hEgR8clLbzv6jRYyUBwAAGA6JYO6tb9BA21udePspavx0Pmn4uylq2Pb0dZOL8fFc6dieakTKW6dNnaYwKUpM4DMjAIAAKie9jdomGlOcpu0amd9dSXWP/Bc7Lx0p15p8USauxlATlUDAAConlAJGqZ0zlHxzKE05v2jvr0CpTOjAAAAKKf9DRqmpCpnc6sb608+t+8Ut/UnnxvbNrdx5Ubs7O6fqrSzm8e2lVV9apxT1QAAAKonVIKGKZlz9O4PPz8wHHr3h58feVulbWVVzzgqnRkFAABAuZm3v6WUFiLiWkR0c87fc+BjPxwRGxHRL1/42znnvzvrPUGTra+uDDzpbFRVzotf3plove/+zmJs9+79nPs7iyOvq2PGUcnMqGlU2d4HAAAwj6qYqfRXIuJfRcQ3Dfn4L+Sc/3IF+4BW6AcXVQQaacj8pGHrfW2fcVQ6LB0AAKBNZhoqpZReExFvi4ifjogfn+VtwXEyaVXO0pCKo6UxFUfbQyqZhq33lVRTNUnpsPQIFU4AAEB7zHqm0t+KiL8aES+N+Jy3p5Q+llJ6MqX02kGfkFJ6Z0rpWkrp2gsvvDCTjUKbPfHYw7F4Yn950eKJFE889vDI60rmN0W0f8ZRaXtf1QPMAQAAZmlmlUoppe+JiM/lnJ9NKX3HkE/7cES8P+f8+yml/yIifj4iHjn4STnn90bEeyMizpw5kw9+HBittGVumoqjNs84Km3vm6bCCQAAYN7Msv3tbEQ8llL67oh4eUR8U0rpfTnnd/Q/Ief8O3d9/t+NiL82w/1Aa5QEKCUhzzTzm6oMeaqecVQattUxwBwAAGBWZhYq5ZwvRMSFiIi9SqX/+u5AaW/91Tnnz+69+1jcGugNjFB1gFISRlW9x6orgErDtrYPMAcAAI6XKk5/2yel9FMRcS3n/KGI+LGU0mMR8bWI+EJE/HDV+4GmaUILVdV7rKMCqCRsa/sAcwAA4HipJFTKOf9KRPzK3ts/edf67Wom4HCmGRJdVTta1SFPUyqApmknBAAAmDeVVyoB0ykJUKpuR6s65GlSBVDVA8wBAABm5UTdGwAms766Ep3FhX1r4wKUUe1o87LHaaydXo6L507F8lInUkQsL3Xi4rlTwhsAAIAZUqkEDVPSQlV1O1odbV5trwCqsn0RAADgMIRKTMQL2/kwaYCydN9ivPjlnYHrs9L2kKdKVbcvAgAAHIb2Nw6t/8K2u92LHHde2G5udeveGmPkPNk686Xq9kUAAIDDECpxaF7YNtcXe/dWKY1aZ75U3b4IAABwGNrfODQvbOfHpG2IVZ/GxtHy9QMAAOaRSiUObdgLWC9sq1XShlj1aWwcLV8/AABgHgmVODQvbOdDSRvi2unluHjuVCwvdSJFxPJSJy6eO2XIc0P4+gEAAPNI+xuHVscx8dyrtA2x9DQ2J/7NB6fpAQAA80aoxES8sK3f/Z3F2B4wYPv+zuKR35aj7AEAABhG+xs0zM7uSxOtT8OJfwAAAAwjVIKG+dJXdydan4YT/wAAABhGqAQM5cQ/AAAAhhEqQY02t7px9tLVeOj8U3H20tXY3OqOvWZpyOykYevTcOIfAAAAwwiVoCb9Idjd7V7kuDMEe1yw9MRjD8fiibRvbfFEiicee/jI9+goewAAAIZx+hvUZNQQ7FGhTf9jG1duxM3tXpxc6sT66srMgh4n/gEAADCIUIm5trnVrSw8qdo0Q7BLgp42P5YAAABUT6jE3Oq3h/WrefrtYRExszCkyuDl5FInugMCpFkMwa7jsQQAAKDdzFRibo1qD5uF0hlHpaocgl31YwkAAED7CZWYW9O0h5WoOnipcgh21Y8lAAAA7af9jblVZXtYRD3BS1VDsJfuW4wXv7wzcB0AAABKqFRiblXZHhYxPKyaVYhVpa8cqMAat363za1unL10NR46/1ScvXR1Zu2AAAAANItQiblVZXtYRPUhVpV6Oy9NtN5X9ZwpAAAAmkP7G3Otqvaw/m1FRGWnvzXBqDlTx/lxAQAAQKgE+1QZYlXpFUNmKr1izEwlA74BAAAYRvsbHAOPP/pwLC6kfWuLCykef/Thkde1ec4UAAAA01GpBDXa3OpW0m5X2tq3vroSFy5f39cC15Y5U8yXqp4LAADA0REqQU36Q7D7gU1/CHZEzCxYmvTvPQ5zpoQZ9av6uQAAAByNlHOuew8TOXPmTL527Vrd24Cpnb10NboDZhMtL3Xio+cfqWFHx8/BMCPiViXWLE8Z5F6eCwAAUL+U0rM55zOTXKNSCWrSlCHYba7kcbrdfGjKcwEAANjPoG6oSROGYG9udePHf+HXo7vdixy32pJ+/Bd+PTa3unVv7UgIM+ZDE54LAADAvYRKUJP11ZXoLC7sW5vlEOzNrW6cvXQ1Hjr/VJy9dPVQwdCFyx+Llw6svbS33gbCjKNX8n1W9XMBAAA4GkIlqMna6eW4eO5ULC91IsWt+TGzmuXTnx10d8XRhcvXx77g7+0cjJRGrzeNMONolX6fVflcAAAAjo6ZSlCjkhPZSpgdNNhxON2uStN8n1X1XAAAAI6OUAmOgdLZQSdSxEsDDog8kY5iV/NBmHF0zKgCAIDjReUBO/cAACAASURBVPsbHANL9y1OtN73n7z5dROtc7yZUQUAAMeLUAmOgTyg2mjUet971k7FO97yulhIt0qTFlKKd7zldfGetVNHvEPawIwqAAA4XrS/wTHwxd7OROt3e8/aKSESh2JGFQAAHC9CJWigza3uRC/c7+8sxvaAAOn+zuj2N5iUGVUAAHB8CJWgYfrHtvdP2eof2x4RQ1/MpyGDtYetAwAAwDhCJajRpBVHEWXHtm9/eXCb27B1AAAAGMegbqhJv+Kou92LHHcqjja3uiOvKzm23alcAAAAHDWhEtRkVMXRKCUB0Vvf8MBE6wAAADCOUAlqUlJxFFF2bPsvf/yFidYBAABgHKES1KS0JW3t9HJcPHcqlpc6kSJieakTF8+dGjmLqTTAAgAAgGEM6oaarK+uxPqTz8XObr69triQRlYc9U16bPvJpU50BwRI8zhTqWR4OQAAANVTqUQrbW514+ylq/HQ+afi7KWrY4df12X3rkBp0PtHZX11JRZPpH1riycOF2BVqXR4OQAAANUTKtE6TQkmnvjQ8/HSgbWX9tZnIo15fw6UDi8HAACgekIlWqcpwcR2b2ei9WlsXLmxr80uImJnN8/dY2L2EwAAQHMIlWgdwcS9mvKYlA4vBwAAoHpCJVqnKcHEK+5bnGh9Gk15TNZXV6KzuLBvrbO4MHeznwAAABAq0UJNCSYef/ThWFw4MDx7IcXjjz585LfVlMdk7fRyXDx3KpaXOpEiYnmpExfPnXL6GwAAwBx6Wd0bgKPWDyDm/Vj6KvfZlMck4tZe53FfAAAA7Jdyns0R5rNy5syZfO3atbq3AYyxudVtRIgFAABARErp2ZzzmUmuUakEjFQSDm1udePC5eu3T+HrbvfiwuXrERGCJQAAgJYQKgFDlYZDG1du3L6mr7ezGxtXbrQmVFKJBQAAHHdCJWigqgKN0nDo5nZvovWmUYkFAADg9DdonH6g0d3uRY47gcbmVvfIb6s0HDq51JlovWlGhW0AAADHhVAJGqbKQKM0HFpfXYnFE2nf2uKJFOurK0e2tzq1vRILAADgMIRK0DBVBhrrqyvRWVzYt9ZZXDhcOJTGvN9gba/EAgAAOAyhEnNtc6sbZy9djYfOPxVnL12dSYtX01QZaKydXo6L507F8lInUkQsL3Xi4rlTY+cGbVy5ETu7ed/azm5uTXvYVGEbAABASxjUzdwyDHmw9dWVfY9LxGwDjbXTyxM/3m1vD+s/Hk5/AwAAjjOhEnPrOBxLX6IJgcbJpU50BwRIbWoPKwnbAAAA2kT7G3Or7dUubaY9DAAAoP1UKjG3jkO1S4kmtAU2oZoKAACA6QiVmFtVzw5qiqa0BWoPAwAAaDehEnNLtctg2gIBAACYB0Il5ppql3tpCwQAAGAeGNQNDWMINgAAAPNApRI0jLZAAAAA5oFQCRpIWyAAAAB10/4GAAAAwMSESgAAAABMTKgEAAAAwMSESgAAAABMTKgEAAAAwMSESgAAAABMTKgEAAAAwMReVvcG4Djb3OrGxpUbcXO7FyeXOrG+uhJrp5fr3hYAAACMJVSCmmxudePC5evR29mNiIjudi8uXL4eETFXwZLgCwAAgEG0v0FNNq7cuB0o9fV2dmPjyo2adnSvfvDV3e5FjjvB1+ZWt+6tAQAAUDOVSlCTm9u9idbrMCr4aku1kkosAACAMiqVoCYnlzoTrdehCcHXNFRiAQAAlBMqQU3WV1eis7iwb62zuBDrqys17eheTQi+ptGEFkQAAIB5JVSCmqydXo6L507F8lInUkQsL3Xi4rlTc9V61YTgaxp1VGJtbnXj7KWr8dD5p+LspauqogAAgMYyUwlqtHZ6uShEqmoOUP/vrHLmUJUzjk4udaI7IECaVSVWU078AwAAOAyhEjRM1cFEafBVour7tr66su/2ImZbiXUcBp8DAADHh/Y3aJg2zwGq+r5V3YLY9sHnAADA8TLzSqWU0kJEXIuIbs75ew587Osj4h9ExJsi4nci4vtzzp+a9Z6gydocTNRx36qsxKq63Q4AAGCWqqhU+isR8a+GfOwvRMSLOefXR8TfjIifqWA/0GhtPpGtzfctov2DzwEAgONlpqFSSuk1EfG2iPi7Qz7lz0bEz++9/WRE/KmUUprlnqDpSoOJJpw61vbQpQkn/gEAABzWrNvf/lZE/NWI+MYhH1+OiM9EROScv5ZS+mJEvCoiPj/jfUFjlZzI1pRTx+o4ba5qVbbbAQAAzNLMQqWU0vdExOdyzs+mlL5jyr/rnRHxzoiI173udUewO2i2SYOJJp06JnQBAABohlm2v52NiMdSSp+KiP81Ih5JKb3vwOd0I+K1EREppZdFxP1xa2D3Pjnn9+acz+SczzzwwAMz3DK0U5uHewMAAFCPmYVKOecLOefX5JwfjIg/FxFXc87vOPBpH4qIH9p7+3v3PifPak9wXE0zALsJs5gAAACoXhWnv+2TUvqplNJje+/+vYh4VUrpExHx4xFxvur9wHEwzXDvC5evR3e7FznuzGISLAEAADDrQd0REZFz/pWI+JW9t3/yrvWvRMT3VbEHOM5KB2A3aRYTAAAA1aokVALqVzIA2ywmAAAAhqm8/Q1ojmlmMQEAANBuQiVgqNJZTAAAALSf9jdgqNJZTAAAALSfUAkYqWQWEwAAAO2n/Q0AAACAialUggba3OrOfUtaE/YIAABAOaESNMzmVjcuXL4evZ3diIjobvfiwuXrERFzE9o0YY8AAABMR/sbNMzGlRu3w5q+3s5ubFy5UdOO7tWEPQIAADAdlUpQo5IWsZvbvYnW69CEPQIAADAdlUpQk36LWHe7FznutIhtbnVHXndyqTPReh2asEcAAACmI1SCmpS2iK2vrkRncWHfWmdxIdZXV458j6WasEcAAACmo/0NatId0go2bL2v3x43zyerNWGPAAAATEeoBDVZSCl2cx64Ps7a6eW5D2iasEcAAADKaX+DmgwKlEatAwAAwDwRKkFNlocMrR62DgAAAPNEqARHYHOrG2cvXY2Hzj8VZy9dHXuCW4Rh1gAAADSbmUowpc2tbly4fP32SW7d7V5cuHw9ImLkTCHDrAEAAGgyoRJMaePKjduBUl9vZzc2rtwYGxAZZg0AAEBTCZVgSje3exOt321zq6tSCQAAgEYSKsGU7u8sxnZvZ+D6KKVtcwAAADAPDOqGKaU02XrfqLY5AAAAmHdCJZjS9pfvrVIatd43TdscAAAA1E37G5Vo8+ygk0ud6A4Igk4udWZyHfOhzd/TAAAAh6FSiZnrzw7qbvcix53ZQZtb3bq3diTWV1eis7iwb62zuBDrqyszuY76tf17GgAA4DBUKjFzo2YHzVtlR0n1Sf/jVV1Xuk+OTpO+pwEAAGZFqMTMNWV20DSnsa2dXi4KE0quc2pc/ZryPQ0AADBL2t+YuWEzguZtdlBTTmNryj7brCnf0wAAALMkVGLmmjI7qCnVJ03ZZ5s15XsaAABgloRKzNza6eW4eO5ULC91IkXE8lInLp47NXetWk2pPmnKPtusKd/TAAAAs2SmEpUonTlUpfXVlX2ziiLms/qkKftsuyZ8TwMAAMySUAn2THMaW5Wask8AAADaLeWc697DRM6cOZOvXbtW9zYAAAAAWiOl9GzO+cwk16hUAmZic6urmgoAAKDFhErAkdvc6u6b+9Td7sWFy9cjIgRLAAAALeH0N+DIbVy5sW+QeEREb2c3Nq7cqGlHAAAAHDWhEnDkbm73JloHAACgebS/wREwP2i/k0ud6A4IkE4udWrYDQAAALOgUgmm1J8f1N3uRY4784M2t7p1b60266sr0Vlc2LfWWVyI9dWVmnYEAADAUVOpBFMaNT9onqqVqqym6v+9qrcAAADaS6gEU2rC/KA6TmNbO70sRAIAAGgx7W8wpWFzguZpfpDT2AAAADhqQiWY0lvf8MBE63VoQjUVAAAAzSJUgin98sdfmGi9Dk2opgIAAKBZhEowpWmqgDa3unH20tV46PxTcfbS1ZmdGOc0NgAAAI6aQd0wpZNLnegOCJDGVQFVOTzbaWwAAAAcNaESTGl9dWVfOBRxuCqgUcOzZxH2OI0NAACAoyRUgimVVgFN2zan6ggAAIA6CZXgCJRUATWhbQ4AAACGMagbalI6PHtU29xxVtXQcwAAAG5RqQQ1qaNtrkQTWu1UbwEAAFRPqAQ1qrJtrkRTwpqqh54DAACg/Q0ap7RtrkRTWu2qrt4CAABAqASNs3Z6OS6eOxXLS51IEbG81ImL507NpCKnKWHNsCqtWVRvAQAAcIv2N2igkra5ElW22k1jfXVlX5texOyqtwAAALhFpRIwVJWtdtOosnoLAACAW1QqAUOVnlA3jdLT5qqq3gIAAOAWoRIwUpVhTVNOmwMAAED7GzBHmnLaHAAAAEIlYI405bQ5AAAAhErAHBl2qty8nTYHAACAUAmYI005bQ4AAACDuoE5UsdpcwAAAJQRKgFzpcrT5gAAACin/Q0AAACAialUghptbnW1egEAANBIQiVaqTSsqTLk2dzqxoXL16O3sxsREd3tXly4fD0iQrAEAADA3NP+Ruv0w5rudi9y3AlrNre6M7mu1MaVG7cDpb7ezm5sXLkxk9sDAACAoyRUonVKw5qqQ56b272J1gEAAGCeCJVondKwpuqQ5+RSZ6J1AAAAmCdCJVpn6b7Fidb7qg551ldXorO4sG+ts7gQ66srM7k9AAAAOEpCJVon58nW+6oOedZOL8fFc6dieakTKSKWlzpx8dwpQ7oBAABoBKe/0Tpf7O1MtN7XD3OqOv2tf5tCJAAAAJpIqETrnFzqRHfAHKTDtLEJeQAAAOBwtL/ROnXMKtrc6sbZS1fjofNPxdlLV2Nzqzuz2wIAAIB5oFKJ1qm6jW1zqxsXLl+P3s5uRER0t3tx4fL1fXsBAACAthEq0UpVtrFtXLlxO1Dq6+3sxsaVG0IlAAAAWkv7G0zp5oD5TaPWAQAAoA1UKsGUphkMXmpzq1vpKXUAAABwkEolmFLVg8H7M5y6273IcWeGk+HgAAAAVEmoBFNaO70cF8+diuWlTqSIWF7qxMVzp2ZWOTRqhhMAAABURfsbHIEqB4Ob4QQAAMA8UKkEDTNsVtMsZzgBAADAQUIlaJiqZzgBAADAINrf4C5NOFWtv5953ycAAADtJlSCPf1T1fpDsPunqkXE3AU2Vc5wAgAAgEG0v8Eep6oBAADA4QmVYI9T1QAAAODwtL/BnpNLnegOCJAOc6paE2YxNYXHEgAAoBlUKsGe0lPV+rOYutu9yHFnFtPmVneGu20njyUAAEBzzCxUSim9PKX0ayml51JKz6eU3j3gc344pfRCSunX9/7857PaD4yzdno5Lp47FctLnUgRsbzUiYvnTo2tkplmFtPmVjfOXroaD51/Ks5eunrswxNzrQAAAJpjlu1vvx8Rj+Scfy+ltBgR/yyl9L/nnJ8+8Hm/kHP+yzPcBxxayalqpbOYmnTaXFXMtQIAAGiOmVUq5Vt+b+/dxb0/eVa3B3UZNnNp3CwmVTn3Kn0sAQAAqN5MZyqllBZSSr8eEZ+LiI/knJ8Z8GlvTyl9LKX0ZErptbPcD8xC6SymQUPBR60fB6WPJQAAANWbaaiUc97NOX97RLwmIv54SumPHviUD0fEgznnN0bERyLi5wf9PSmld6aUrqWUrr3wwguz3DJMrHQW00JKE60fB6WPJQAAANVLOVfTkZZS+smI+HLO+WeHfHwhIr6Qc75/1N9z5syZfO3atVlskRZpwrH0D55/aujHPnXpbRXuBAAAgOMupfRszvnMJNfMbFB3SumBiNjJOW+nlDoR8Z0R8TMHPufVOefP7r37WET8q1nth+OjKQOwl5c6A1vdls0PosGaEOgCAABHY5btb6+OiF9OKX0sIv553Jqp9I9TSj+VUnps73N+LKX0fErpuYj4sYj44Rnuh2OiKQOwzQ+ibfqBbne7FznuBLqbW926twYAAMzAzCqVcs4fi4jTA9Z/8q63L0TEhVntgeOpKcfS96s3VHXQFqMCXd/XAADQPjMLlaAuJ4e0lc3jsfRrp5e92KY1mhLoAgAAR2Omp79BHbSVQT2GBbfzGOgCAADTEyrROo6lh3oIdAEA4HjR/kYrlbaVObkKypkTBgAAx4tQCfb0T67qDxrun1wVEV4UwyGZEwYAAMeH9jfYM+rkKgAAAGA/lUqwp46Tq7TbAQAA0FQqlWBP1SdX9dvtutu9yHGn3W5zqzuT2wMAAICjJFRirm1udePspavx0Pmn4uylqzMNXKo+uUq7HQAAAE2m/Y25VfXg7KpPruoOaasbtg4AAADzRKjE3BpVyTOroKfKk6sWUordnAeuAwAAwLwTKjG36hicXaVBgdKo9boYJg4AAMAgZioxt6oenF215SH3Y9h6HQwTBwAAYBihEnOr6sHZVWvC/TNMHAAAgGG0vzG3qh6cXbUm3L+2tyACAABQTqjEXKtycHYd5v3+nVzqDDyNri0tiAAAAJTT/gYM1YQWPQAAAOqhUgkYqgktegAAANRDqASMNO8tegAAANRD+xsAAAAAExMqAQAAADAx7W9AK2xudc1+AgAAqJBQCWi8za1uXLh8PXo7uxER0d3uxYXL1yMiBEsAAAAzov0NaLyNKzduB0p9vZ3d2Lhyo6YdAQAAtJ9QCWi8m9u9idYBAACYnlAJaLyTS52J1gEAAJieUAlovPXVlegsLuxb6ywuxPrqSk07AgAAaD+DuoHG6w/jdvobAABAdYRKQCusnV4WIgEAAFRI+xsAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAExMqAQAAADAxoRIAAAAAE3tZ3RsAJre51Y2NKzfi5nYvTi51Yn11JdZOL9e9LQAAAI4RoRI0zOZWNy5cvh69nd2IiOhu9+LC5esREYIlAAAAKqP9DRpm48qN24FSX29nNzau3KhpRwAAABxHQiVomJvbvYnWAQAAYBaEStAwJ5c6E60DAADALAiVoGHWV1eis7iwb62zuBDrqys17QgAAIDjyKBuaJj+MG6nvwEAAFAnoRI00NrpZSESAAAAtdL+BgAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEhEoAAAAATEyoBAAAAMDEXlb3BgCOwuZWNzau3Iib2704udSJ9dWVWDu9XPe2AAAAWkuoBDTe5lY3Lly+Hr2d3YiI6G734sLl6xERgiUAAIAZESrBMdHmSp6NKzduB0p9vZ3d2LhyozX3EQAAYN4IleAYaHslz83t3kTrAAAATM+gbjgGRlXytMHJpc5E6wAAAExPqATHQNsredZXV6KzuLBvrbO4EOurKzXtCAAAoP2ESnAMtL2SZ+30clw8dyqWlzqRImJ5qRMXz51qRWsfAADAvDJTCY6B9dWVfTOVItpXybN2elmIBAAAUCGVSnAMrJ1ejre/aTkWUoqIiIWU4u1vEsIAAABQTqgEx8DmVjc++Gw3dnOOiIjdnOODz3Zjc6tb884AAABoKqESHANtP/0NAACA6gmV4Bho++lvAAAAVE+oBMdA209/AwAAoHpCJTgG1ldXorO4sG+tbae/AQAAUK2X1b0BYPb6p7xtXLkRN7d7cXKpE+urK05/AwAAoJhQCY6JtdPLQiQAAACOzKHa31JKfySl9E9TSv9y7/03ppTeNdutAQAAADCvDjtT6eci4kJE7ERE5Jw/FhF/blabAgAAAGC+HTZUui/n/GsH1r521JsBAAAAoBkOGyp9PqX0rRGRIyJSSt8bEZ+d2a4AAAAAmGuHHdT9oxHx3oh4Q0qpGxGfjIgfnNmuAAAAAJhrY0OllNJCRPylnPN/lFL6hog4kXP+3dlvDQAAAIB5NTZUyjnvppT+5N7bX5r9lgAAAACYd4dtf9tKKX0oIj4QEbeDpZzz5ZnsCgAAAIC5dthQ6eUR8TsR8chdazkihEoAAAAAx9ChQqWc8382643AcbS51Y2NKzfi5nYvTi51Yn11JdZOL9e9LQAAABjrxGE+KaX0mpTSP0opfW7vzwdTSq+Z9eagzTa3unHh8vXobvciR0R3uxcXLl+Pza1u3VsDAACAsQ4VKkXE34+ID0XEyb0/H95bAwptXLkRvZ3dfWu9nd3YuHKjph0BAADA4R02VHog5/z3c85f2/vzP0XEAzPcF7Teze3eROsAAAAwTw4bKv1OSukdKaWFvT/viFuDu4dKKb08pfRrKaXnUkrPp5TePeBzvj6l9AsppU+klJ5JKT04+V2AZjq51JloHQAAAObJYUOlPx8R/3FE/H8R8dmI+N6IGDe8+/cj4pGc87dFxLdHxHellN5y4HP+QkS8mHN+fUT8zYj4mcNuHJpufXUlOosL+9Y6iwuxvrpS044AAADg8A57+ttvR8Rjk/zFOeccEb+39+7i3p984NP+bEQ8sff2kxHxt1NKae9aaLX+KW9OfwMAAKCJDhUqpZR+PiL+Ss55e+/9V0TEX885//kx1y1ExLMR8fqI+Ds552cOfMpyRHwmIiLn/LWU0hcj4lUR8fmJ7gU01NrpZSESAAAAjXTY9rc39gOliIic84sRcXrcRTnn3Zzzt0fEayLij6eU/mjJJlNK70wpXUspXXvhhRdK/goAAAAAjtBhQ6UTe9VJERGRUnplHLLKKSJiL5D65Yj4rgMf6kbEa/f+zpdFxP0xYAB4zvm9OeczOeczDzzg0DkAAACAuh02VPrrEfGrKaX/LqX0noj4vyPir426IKX0QEppae/tTkR8Z0R8/MCnfSgifmjv7e+NiKvmKQEAAADMv8MO6v4HKaVrEfFI3Bq2fS7n/BtjLnt1RPz83lylExHxv+Wc/3FK6aci4lrO+UMR8fci4n9OKX0iIr4QEX+u9I4AAAAAUJ2RoVJK6b6I2Mk57+ScfyOltBsR3x0Rb4iIkaFSzvljMWDuUs75J+96+ysR8X0lGwcAAACgPuPa334xIh6MiEgpvT4ifjUi/t2I+NGU0qXZbg0AAACAeTUuVHpFzvk3997+oYh4f875v4yIPxMRb5vpzgAAAACYW+NCpbuHZj8SER+JiMg5fzUiXprVpgAAAACYb+MGdX8spfSzEdGNiNdHxC9FRPRPdQMAAADgeBpXqfQjEfH5uDVX6U/nnL+8t/7vRcTPznBfAAAAAMyxkZVKOedeRFxKKX1DRPTu+tAzEfHrs9wYAAAAAPNrXKVS3z+NiPvuer8TEf/H0W8HAAAAgCYYN1Op7+U559/rv5Nz/r2U0n2jLoAm2tzqxsaVG3FzuxcnlzqxvroSa6eX694WAAAAzJ3DhkpfSin9sZzzv4iISCmdif3tcNB4m1vduHD5evR2diMiorvdiwuXr0dEtCJYEpgBAABwlA4bKv1XEfGBlNLNvfdfHRHfP5stQT02rty4HSj19XZ2Y+PKjcaHL20PzAAAAKjeyJlKKaV/P6X0B3PO/zwi3hARvxAROxHxixHxyQr2B5W5uT24+G7YepOMCswAAACgxLhB3f9jRHx17+0/ERH/TUT8nYh4MSLeO8N9QeVOLnUmWm+SNgdmAAAA1GNcqLSQc/7C3tvfHxHvzTl/MOf830bE62e7NajW+upKdBYX9q11FhdifXWlph0dnTYHZgAAANRjbKiUUurPXfpTEXH1ro8ddh4TNMLa6eW4eO5ULC91IkXE8lInLp471YqZQ20OzAAAAKjHuGDo/RHxf6aUPh+3Tnv7vyIiUkqvj4gvznhvULm108utCJEO6t8np78BAABwVFLOefQnpPSWuHXa2y/lnL+0t/ZHIuLfyTn/i9lvcb8zZ87ka9euVX2zMFc2t7oCIgAAAI5MSunZnPOZSa4Z28KWc356wNr/M8mNAEdnc6sbFy5fv32aW3e7FxcuX4+IECwBAABQmXEzlYA5s3Hlxu1Aqa+3sxsbV27UtCMAAACOI8O24S5NaCu7ud2baB0AAABmQaUS7Om3lXW3e5HjTlvZ5la37q3tc3KpM9E6AAAAzIJQCfY0pa1sfXUlOosL+9Y6iwuxvrpS044AAAA4jrS/wZ6mtJX12/HmvU0PAACAdhMqwZ6T/3979x9leVrXB/79bFGYCxILZaJ0MYQxyxYLttDScWDbzQLGFCqEOgO7cY7GJMeV1bhZf23tmVZOwD3kTO+pHF0NWV38EeOBHRXoVGZFLT02HpU4oz3UjOUAtcEfzHAbZQwpkHglRc2zf9S9PV091dX1vVX3Z71e58yZus+937pPFd/pqn7z+XyeuVba+wRI49hWtnRmXogEAADASGl/gy5tZQAAAHB4KpWgS1sZAAAAHJ5QCa6hrQwAAAAOR/sbAAAAAI0JlQAAAABoTPsbHIPV9bZZTAAAAJwoQiU4otX1ds5f3EhneydJ0t7q5PzFjSQRLAEAADC1tL/BEa2sbV4NlHo62ztZWdsc0Y4AAABg8IRKcERXtjqN1gEAAGAaCJXgiE7NtRqtAwAAwDQQKsERLS8upDU7s2etNTuT5cWFEe0IAAAABs+gbjii3jBup78BAABwkgiV4BgsnZkXIgEAAHCiaH8DAAAAoDGVSnAMVtfbfbW/9XsdAAAAjJpQCY5odb2d8xc30tneSZK0tzo5f3EjSQ4MiPq9DgAAAMaB9jc4opW1zavBUE9neycra5sDuQ4AAADGgUolxtoktIdd2eo0Wj/qdQAAADAOVCoxtnrtYe2tTmqeaA9bXW+Pemt7nJprNVo/6nUAAAAwDoRKjK2jtIetrrdz7sKl3HbXe3PuwqWBBlGvfMEtjdZ7lhcX0pqd2bPWmp3J8uLCse0NAAAABkWoxNjqLChPAQAAIABJREFUtz1s2BVO7/vwY43We5bOzOf1L53PTClJkplS8vqXzo9dex8AAADsR6jE2Oq3PWzYA7CPEn6954F2dmpNkuzUmvc80B679j4AAADYj1CJsdVve9iwB2BPSvgFAAAAx8npb4ytXhtY09PfTs210t4nQDrMAOx+TptbXlzI+YsbewKiQYdfk3AqHgAAANNNqMRYWzrTfMZQvyFPbxZT77reLKbePg7aYzK88KvffQIAAMBxEioxdfoNeQ5qR7vZtf2GX8vvfijbO/Xq2uxMuWn4dZR9AgAAwHERKjGV+gl5hj2LKUlSb/J4HyPZJwAAAFzHoG7o6nfgdr9W1jaz/fjeFGn78XrTQd3D3icAAADsR6gEXf2eNtevfiuOhr1PAAAA2I/2N+jqdxZTv/od1D3sfQIAAMB+Sq2HGOIyRs6ePVsvX7486m3AkV1/iluyW3F09x2nBUQAAAAMVSnlgVrr2SbXqFSCETlKxdHqelulEgAAACMlVIIR6ueUuusrnNpbnZy/uHH18wEAAMAwGNQNE2ZlbXNPy1ySdLZ3bnpqHAAAABwnoRJMmH5PjQMAAIDjJFSCCXOj0+FudmocAAAAHCehEkyY5cWFtGZn9qy1ZmeyvLgwoh0BAABwEhnUDRPmKKfGAQAAwHERKsEE6ufUOAAAADhOQiVgKqyut1VvAQAADJFQCZh4q+vtnL+4kc72TpKkvdXJ+YsbSSJYAgAAGBCDuoGJt7K2eTVQ6uls72RlbXNEOwIAAJh+QiVg4l3Z6jRaBwAA4OiESsDEOzXXarQOAADA0QmVgIm3vLiQ1uzMnrXW7EyWFxdGtCMAAIDpZ1A3MPF6w7id/gYAADA8QiVgKiydmRciAQAADJFQiam0ut5WtQIAAAADJFRi6qyut3P+4sbVI+bbW52cv7iRJIIlAAAAOCYGdTN1VtY2rwZKPZ3tnaysbY5oRwAAADB9hEpMnStbnUbrAAAAQHNCJabOqblWo3UAAACgOaESU2d5cSGt2Zk9a63ZmSwvLoxoRwAAADB9DOpmKIZ5Glvv8zr9DQAAAAZHqMTAjeI0tqUz80IkAAAAGCDtbwyc09gAAABg+qhUYuAm6TS2YbbpHcWk7BMAAIDpJVRi4E7NtdLeJ0Aat9PYRtGm149J2ScAAADTTfsbAzcpp7FNSpvepOwTAACA6aZSiYGblNPYJqVNb1L2CQAAwHQTKjEUk3Aa26S06U3KPgEAAJhu2t+ga1La9CZlnwAAAEw3lUrQNSltepOyTwAAAKZbqbWOeg+NnD17tl6+fHnU2wAAAACYGqWUB2qtZ5tco/0NAAAAgMaESgAAAAA0JlQCAAAAoLGBhUqllFtLKe8rpXywlPJwKeU793nNK0opnyqlPNj9558Oaj8AAAAAHJ9Bnv72uSTfW2v9QCnlGUkeKKX8aq31g9e97jdrra8Z4D4AAAAAOGYDq1SqtX681vqB7sd/nuRDSZx5DgAAADAFhjJTqZTyvCRnkty/z9MvL6U8VEr5pVLKi4axHwAAAACOZpDtb0mSUsrnJ3lPku+qtX76uqc/kOSv11o/U0r5uiSrSZ6/z+d4Y5I3Jslzn/vcAe8Ymltdb2dlbTNXtjo5NdfK8uJCls4ozAMAAGB6DbRSqZQym91A6Z211ovXP19r/XSt9TPdj38xyWwp5Vn7vO7ttdaztdazt9xyyyC3DI2trrdz/uJG2lud1CTtrU7OX9zI6np71FsDAACAgRnk6W8lyU8m+VCt9Qdv8Jov6b4upZSv7O7nPwxqTzAIK2ub6Wzv7FnrbO9kZW1zRDsCAACAwRtk+9u5JH8/yUYp5cHu2vcleW6S1Fp/LMkbknx7KeVzSTpJvqHWWge4Jzh2V7Y6jdYBAABgGgwsVKq1/laScpPXvC3J2wa1BxiGU3OttPcJkE7NtUawGwAAABiOoZz+BtNseXEhrdmZPWut2ZksLy6MaEcAAAAweAM//Q2mXe+UN6e/AQAAcJKoVAIAAACgMZVKcESr6+2cv7hx9QS49lYn5y9uJIlqJQAAAKaWUImhWF1vT2172Mra5tVAqaezvZOVtc2p+RoBAADgekIlBm7aK3mu7HPy20HrTIdpDkoBAAAOw0wlBu6gSp5pcGqu1WidydcLSttbndQ8EZSurrdHvTUAAIChESoxcNNeybO8uJDW7MyetdbsTJYXF0a0IwZt2oNSAACAwxAqMXDTXsmzdGY+d99xOvNzrZQk83Ot3H3Haa1QU2zag1IAAIDDMFOJgVteXNgzUymZvkqepTPzQqQT5NRcK+19AqRpCUoBAAAOQ6USA6eSh2mj5REAAEClEkOikodp0ruXnf4GAACcZEIlgD4ISgEAgJNO+xsAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNCZUAAAAAaOwpo94AMByr6+2srG3mylYnp+ZaWV5cyNKZ+VFvCwAAgAklVIITYHW9nfMXN9LZ3kmStLc6OX9xI0kESwAAAPRF+xucACtrm1cDpZ7O9k5W1jZHtCMAAAAmnVAJToArW51G6wAAAHAzQiU4AU7NtRqtAwAAwM0IleAEWF5cSGt2Zs9aa3Ymy4sLI9oRAAAAk06oBCfA0pn5vP6l85kpJUkyU0pe/9J5Q7oBAADom1AJToDV9Xbe80A7O7UmSXZqzXseaGd1vT3inQEAADCphEpwAjj9DQAAgOMmVIITwOlvAAAAHDehEpwATn8DAADguAmV4ARw+hsAAADH7Smj3gAweL1T3lbWNnNlq5NTc60sLy44/Q0AAIC+CZXghFg6My9EAgAA4NhofwMAAACgMaESAAAAAI0JlQAAAABoTKgEAAAAQGNCJQAAAAAac/obU+lNqxu55/5Hs1NrZkrJnbffmrcunR71tgAAAGBqCJWYOm9a3cg77nvk6uOdWq8+FiwBAADA8dD+xtS55/5HG60DAAAAzQmVmDo7tTZaBwAAAJoTKjF1ZkpptA4AAAA0J1Ri6tx5+62N1gEAAIDmDOpm6vSGcTv9DQAAAAan1AmbM3P27Nl6+fLlUW8DAAAAYGqUUh6otZ5tco32NwAAAAAaEyoBAAAA0JhQCQAAAIDGDOqGCbS63s7K2maubHVyaq6V5cWFLJ2ZH/W2AAAAOEGESjBhVtfbOX9xI53tnSRJe6uT8xc3kkSwBAAAwNBof4MJs7K2eTVQ6uls72RlbXNEOwIAAOAkEirBhLmy1Wm0DgAAAIMgVIIJc2qu1WgdAAAABkGoBMdgdb2dcxcu5ba73ptzFy5ldb09sPdaXlxIa3Zmz1prdibLiwsDe08AAAC4nkHdcETDHpzd+5xOfwMAAGCUhEpwRAcNzh5U0LN0Zl6IBAAAwEhpf4MjMjgbAACAk0ilEhzRqblW2vsESIcZnL263tbGBgAAwERSqQRH1O/g7N4spvZWJzVPzGIa5JBvAAAAOC4qleAa/VQO9Ts4exSzmAAAAOC4CJWg6yinuPUzONssJgAAACaZ9jfoOqhyaBBuNHPpMLOYAAAAYNSEStA17MqhfmcxAQAAwDgQKkHXsCuHls7M5+47Tmd+rpWSZH6ulbvvOG2eEgAAABPBTCXoWl5c2DNTKRl85VA/s5gAAABgHAiVoKvfU9wAAADgJBIqwTVUDgEAAMDhCJXgGKyut1U4AQAAcKIIleCIVtfbe2Yxtbc6OX9xI0kESwAAAEwtp7/BEa2sbe4Z7p0kne2drKxtjmhHAAAAMHhCJTiiK1udRusAAAAwDYRKcESn5lqN1gEAAGAaCJXgiJYXF9Kandmz1pqdyfLiwoh2BAAAAINnUDccUW8Yt9PfAAAAOEmESnAMls7MC5EAAAA4UbS/AQAAANCYUAkAAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI0JlQAAAABo7Cmj3gAw3lbX21lZ28yVrU5OzbWyvLiQpTPzo94WAAAAIyZUAm5odb2d5Xc9lO3Ha5KkvdXJ8rseShLBEgAAwAmn/Q24obfc+/DVQKln+/Gat9z78Ih2BAAAwLgQKgE3tNXZbrQOAADAyTGwUKmUcmsp5X2llA+WUh4upXznPq8ppZQfKaV8pJTye6WUrxjUfgAAAAA4PoOsVPpcku+ttb4wycuSfEcp5YXXveZrkzy/+88bk/zoAPcDNPTMp802WgcAAODkGFioVGv9eK31A92P/zzJh5JcP9n3dUl+pu66L8lcKeXZg9oT0MybX/uizM6UPWuzMyVvfu2LRrQjAAAAxsVQTn8rpTwvyZkk91/31HySR695/LHu2seHsS/gYL0T3lbWNnNlq5NTc60sLy44+Q0AAIDBh0qllM9P8p4k31Vr/XSfn+ON2W2Py3Of+9xj3B1wM0tn5oVIAAAAPMlAT38rpcxmN1B6Z6314j4vaSe59ZrHz+mu7VFrfXut9Wyt9ewtt9wymM0CAAAAcGiDPP2tJPnJJB+qtf7gDV52b5Jv7p4C97Ikn6q1an0DAAAAGHODbH87l+TvJ9kopTzYXfu+JM9NklrrjyX5xSRfl+QjSf4iyT8a4H4AAAAAOCYDC5Vqrb+VpNzkNTXJdwxqDwAAAAAMxlBOf2N6rK63nQS2j0n4vgx7j5PwPQEAAKB/QiUObXW9nfMXN9LZ3kmStLc6OX9xI0lOdFgwCd+XYe9xEr4nAAAAHM1AT39juqysbV4NCXo62ztZWdsc0Y7GwyR8X4a9x0n4ngAAAHA0QiUO7cpWp9H6STEJ35dh73ESvicAAAAcjVCJQzs112q0flJMwvdl2HuchO8JAAAARyNU4tCWFxfSmp3Zs9aancny4sKIdnT8VtfbOXfhUm676705d+FSVtfbN71mEr4vw97jJHxPAAAAOBqDujm03oDlfk70etPqRu65/9Hs1JqZUnLn7bfmrUunB73lRvodLn2U78uwDHuPk/A9AQAA4GhKrXXUe2jk7Nmz9fLly6PeBg28aXUj77jvkSetf9PLnjtWwdK5C5fS3mfmz/xcK++/61Uj2BEAAAAMRynlgVrr2SbXaH9j4O65/9FG66NiuDQAAAAcnlCJgdu5QTXcjdZHxXBpAAAAODyhEgM3U0qj9VExXBoAAAAOT6jEwN15+62N1kdl6cx87r7jdObnWinZnaV09x2nDZcGAACAfTj9jYHrDeMe99Pfkt1gSYgEAAAAN+f0N6bS6nrbcfYAAABwSP2c/qZSiamzut7O+Ysb6WzvJEnaW52cv7iRJFMTLAnNAAAAGDWhElNnZW3zaqDU09neycra5tgFL/2EQychNAMAAGD8GdTN1Lmy1Wm0Piq9cKi91UnNE+HQ6nr7wOsOCs0AAABgWIRKTJ1Tc61G66PSbzg0KaEZAAAA002oxNRZXlxIa3Zmz1prdibLiwsj2tH++g2HJiU0AwAAYLoJlZg6S2fmc/cdpzM/10pJMj/Xyt13nB67eUP9hkOTEpoBAAAw3QzqZiotnZkfuxDpesuLC3sGbieHC4d6X5fT3wAAABgloRKMyFHCoUkIzQAAAJhuQiW4xup6e6gVQMIhAAAAJpVQibE2zJBndb29px2tvdXJ+YsbSSL4AQAAgOsY1M3YWl1vZ/ldD6W91UnNbsiz/K6HsrreHsj7raxt7plvlCSd7Z2srG0O5P0AAABgkqlUYmy95d6Hs/143bO2/XjNW+59+KaVQ/1UOF3Z6jRaBwAAgJNMpRJja6uz3Wi9p9fGdm2F0/mLGzetcDo112q0DgAAACeZUImp028b2/LiQlqzM3vWWrMzWV5cOPY9cvxW19s5d+FSbrvrvTl34dLA2iQBAADYpf2NsfXMp83mP/7Fk6uSnvm02QOv67eNrdceN8zT3zgehqwDAAAMn1CJsfXm174oy+9+KNs7T8xVmp0pefNrX3TgdafmWmnvEyAdpo1t6cy8EGICHVSd5n9PAACAwdD+xthaOjOflTe8OPNzrZQk83OtrLzhxTcNCbSxnTyGrAMAAAyfSiXGWj+VQ9rYTp6jVKcBAADQH6ESU0kb28myvLiwZ6ZSojoNAABg0IRKwMRTnQYAADB8QiVgKqhOAwAAGC6DugEAAABoTKgEAAAAQGNCJQAAAAAaEyoBAAAA0JhQCQAAAIDGhEoAAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGjsKaPeADAcq+vtrKxt5spWJ6fmWlleXMjSmflRbwsAAIAJJVSCE2B1vZ3zFzfS2d5JkrS3Ojl/cSNJBEsAAAD0RagEx2Dcq4BW1javBko9ne2drKxtjtU+AQAAmBxCJTiiSagCurLVabQOAAAAN2NQNxzRQVVA4+LUXKvROgAAANyMUAmOaBRVQKvr7Zy7cCm33fXenLtwKavr7QNfv7y4kNbszJ611uxMlhcXBrZHAAAAppv2NziiU3OttPcJkAZVBdRPu11vfZznPgEAADBZhEpwRMuLC3tCnmSwVUD9Dt1eOjMvRAIAAODYCJXgiIZdBWToNgAAAONAqATHYJhVQMNut5sUq+tt7X0AAABDZFA3TBhDt5+sN2eqvdVJzRNzpm42wBwAAID+CZVgwiydmc/dd5zO/FwrJcn8XCt333H6RFflHDRnCgAAgMHQ/gbXmJQWKkO39zJnCgAAYPiEStDVa6HqVbz0WqiS3DTAmZQwalqZMwUAADB82t+gq98WKvN8Rs+cKQAAgOETKkFXvy1U5vmMnjlTAAAAw6f9Dbr6baEyz2c8mDMFAAAwXCqVoKvfFqobhU7m+QAAADDNhErQ1W8LlXk+AAAAnETa3+Aa/bRQ9V7v9DcAAABOEqESHAPzfAAAADhptL8BAAAA0JhQCQAAAIDGhEoAAAAANGamEpwQq+ttw8QBAAA4NkIlOAFW19s5f3Ejne2dJEl7q5PzFzeSRLAEAABAX4RKcI1preZZWdu8Gij1dLZ3srK2ORVfHwAAAMMnVIKuaa7mubLVabQOAAAAN2NQN3QdVM0z6U7NtRqtAwAAwM0IlaBrmqt5lhcX0pqd2bPWmp3J8uLCiHbEtFpdb+fchUu57a735tyFS1ldb496SwAAwIBof4OuU3OttPcJkKahmqfXvjeN86IYH9PcQgoAADyZUAm6XvmCW/KO+x7Zd30aLJ2Z9xd7BspAeAAAOFm0v0HX+z78WKN1YK9pbiEFAACeTKUSdB3lL8Sr6+2htpYN+/3gMKa5hRQAAHgylUrQ1e8Jab05Mu2tTmqemCMzqAHFw34/OCwD4QEA4GQRKkFXv38hPmiOzCAM+/3gsJbOzOfuO05nfq6VkmR+rpW77zitig4AAKaU9jfo6veEtGHPkTG3hnFmIDwAAJwcQiW4Rj9/IR72HBlzawAAABgH2t/giIY9R+aVL7il0ToAAAAMgkolOKJ+2+b69b4PP9ZoHQAAAAZBqATHYJhzZMxUAgAAYBxof4MJc6PZSWYqAQAAMExCJZgww57hBAAAAPvR/sZQrK63hzZzaNoNe4YTAAAA7EeoxMCtrrdz/uJGOts7SZL2VifnL24kydgFIZMSfg1zhhMAAADsR/sbA7eytnk1UOrpbO9kZW1zRDva3+p6O8vveijtrU5qdsOv5Xc9lNX19qi3BgAAAGNnYKFSKeWnSimfKKX8/g2ef0Up5VOllAe7//zTQe2F0Wrf4FSyG62PylvufTjbj9c9a9uP17zl3odHtCMAAAAYX4Nsf/vpJG9L8jMHvOY3a62vGeAeGAMzpWSn1n3Xx8lWZ7vROgAAAJxkA6tUqrX+RpJPDurzMzn2C5QOWgcAAADG36hnKr28lPJQKeWXSikvutGLSilvLKVcLqVcfuyxx4a5P47B/Fyr0fqoPPNps43WAQAA4CQbZaj0gSR/vdb64iT/IsnqjV5Ya317rfVsrfXsLbfcMrQNcjyWFxfSmp3Zs9aancny4sKIdrS/N7/2RZmd2duSNztT8ubX3jDvBAAAgBNrZKFSrfXTtdbPdD/+xSSzpZRnjWo/DM7SmfncfcfpzM+1UrJboXT3HaezdGZ+1FvbY+nMfFbe8OI9+1x5w4vHbp8AAAAwDgY5qPtApZQvSfKntdZaSvnK7AZc/2FU+2Gwls7MT0Q40+8+V9fbWVnbzJWtTk7NtbK8uDARXy8AAAD0a2ChUinlniSvSPKsUsrHkrw5yWyS1Fp/LMkbknx7KeVzSTpJvqFWk5sZrX7CodX1ds5f3EhneydJ0t7q5PzFjSQRLAEAADC1BhYq1VrvvMnzb0vytkG9PzTVbzi0srZ59ZqezvZOVtY2hUoAAABMrVGf/gZj46Bw6CBXtjqN1gEAAGAaCJWgq99w6NRcq9E6AAAATAOhEnT1Gw698gW3NFoHAACAaSBUgq7lxYW0Zmf2rLVmZ7K8uHDgde/78GON1gEAAGAaDGxQN0ya3lDtpqe/makEAADASSRUgmssnZlvfGLbqblW2vsESGYqAQAAMM20v8ER9ds2BwAAAJNMpRIcUb9tcwAAADDJhEpwDPppmwMAAIBJpv0NAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNGdTNVFpdbzuNDQAAAAZIqMTUWV1v5/zFjXS2d5Ik7a1Ozl/cSBLBEgAAABwToRJTZ2Vt82qg1NPZ3snK2qZQaQKoMgMAAJgMQiWmzpWtTqN1xocqMwAAgMlhUDdT59Rcq9H6JFpdb+fchUu57a735tyFS1ldb496S8fioCozAAAAxotQiamzvLiQ1uzMnrXW7EyWFxdGtKPj1avmaW91UvNENc80BEuqzAAAACaHUImps3RmPnffcTrzc62UJPNzrdx9x+mpaZ+a5mqek1BlBgAAMC3MVGIo3rS6kXvufzQ7tWamlNx5+61569Lpgb3f0pn5qQmRrjfN1TzLiwt7Ziol01VlBgAAME1UKjFwb1rdyDvueyQ7tSZJdmrNO+57JG9a3RjxzibTNFfzTHuVGQAAwDRRqcTA3XP/ozdcv1m10rArnCbBtFfzTHOVGQAAwDQRKjFwvQqlw6739Cqcrn197/FJDpZ6gcvK2maubHVyaq6V5cUFQQwAAABDJVRi4GZK2TdAminlwOuOUuE07VTzAAAAMGpmKjFwd95+a6P1nn4rnAAAAIDBU6nEwPWqiprORuq3wgkAAAAYPKESQ/HWpdONW9buvP3WPTOVrl0HAAAARkuoxNjqt8IJAAAAGLxSJ2w+zdmzZ+vly5dHvQ0AAACAqVFKeaDWerbJNQZ1AwAAANCYUAkAAACAxsxUOqFW19tZWdvMla1OTs21sry4kKUz86PeFgAAADAhhEon0Op6O+cvbqSzvZMkaW91cv7iRpIIlgAAAIBD0f52Aq2sbV4NlHo62ztZWdsc0Y4AAACASaNS6QS6stVptD6JtPcBAADAYKlUOoFOzbUarU+aXntfe6uTmifa+1bX26PeGgAAAEwNodIJtLy4kNbszJ611uxMlhcXRrSj46W9DwAAAAZP+9sJ1GsDm9b2sJPQ3gcAAACjJlQ6oZbOzE9NiHS9U3OttPcJkKalvQ8AAADGgfY3ps60t/cBAADAOFCpxNSZ9vY+AAAAGAdCJabSNLf3AQAAwDjQ/gYAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA05vQ3OAar6+2srG3mylYnp+ZaWV5ccPocAAAAU02oBEe0ut7O+Ysb6WzvJEnaW52cv7iRJIIlAAAAppZQCa7RT8XRytrm1UCpp7O9k5W1TaESAAAAU0uoBF39Vhxd2eo0WgcAAIBpYFA3Y211vZ1zFy7ltrvem3MXLmV1vT2w9zqo4uggp+ZajdYBAABgGgiVGFu9yqH2Vic1T1QODSpY6rfiaHlxIa3ZmT1rrdmZLC8uHNveAAAAYNwIlRhb/VYO9avfiqOlM/O5+47TmZ9rpSSZn2vl7jtOm6cEAADAVDNTibE17FlFy4sLe2YqJYevOFo6My9EAgAA4ERRqcTYGvasIhVHAAAAcHgqlRhbR6kc6peKIwAAADgcoRJjqxfurKxt5spWJ6fmWlleXBD6AAAAwBgQKjHWVA4BAADAeDJTCQAAAIDGVCrBMVhdb2vTAwAA4EQRKsERra639wwUb291cv7iRpIIlgAAAJha2t/giFbWNvecUJckne2drKxtjmhHAAAAMHhCJTiiK1udRusAAAAwDYRKcESn5lqN1gEAAGAamKnEVOp3cHY/1y0vLuyZqZQkrdmZLC8uHPnrAAAAgHElVGKsvWl1I/fc/2h2as1MKbnz9lvz1qXTB17T7+Dsfq/rPef0NwAAAE4SoRJj602rG3nHfY9cfbxT69XHBwVLBw3OPijo6fe6ZDdYEiIBAABwkpipxNi65/5HG6339Ds428BtAAAAODyVSjTS76yifuzU2mi959RcK+19gqCbDc7u9zoAAAA4iVQqcWi9mUPtrU5qnpg5tLreHsj7zZTSaL1neXEhrdmZPWuHGZzd73UAAABwEgmVOLSDZg4Nwp2339povWfpzHzuvuN05udaKUnm51q5+47Th5qL1M917G91vZ1zFy7ltrvem3MXLg0sfAQAAGA0tL9xaMOeOdQbxt309Lek/8HZBm4fj35P0gMAAGByCJU4tFHMHHrr0ulDhUiMl6OcpAcAAMBk0P7GoZk5xGE5SQ8AAGD6CZU4NDOHOKwbVa85SQ8AAGB6aH+jETOH9re63s7K2maubHVyaq6V5cWFE/19Wl5c2DNTKVHVBgAAMG2ESgzFsEOXYb7fUYZST2sY1fsapvFrAwAAYFeptY56D42cPXu2Xr58edTboIHrQ5dkt2plUK1zw36/cxcu7TvAfH6ulfff9aqx2ScAAADcSCnlgVrr2SbXmKlEI6vr7Zy7cCm33fXenLtwKavr7Ztec9BJYIMw7Pfrdyj1sPcJAAAAx0kofhQPAAAPnElEQVT7G4fWb5vXUU4C66c9bNgnj52aa+1bqXSzodROSGMYprXFEgAAGD2VShxav5U1/Z4EtrrezvK7Hkp7q5Oa3RBr+V0P3bQ6atgnjy0vLqQ1O7Nn7TBDqY+yz34qxjh5ekHwtf8Nnb+44X4BAACOhVCJQ+u3sqbf0OUt9z6c7cf3zvzafrzmLfc+PJD369fSmfncfcfpzM+1UrI7S+kwc5H63aeggMPSYgkAAAyS9jcOrd82r35PAtvqbDdaP+r7HcXSmfnGn7/ffR4UFGhr4lpaLAEAgEESKnFoy4sL+55WdpgKoMsf/WT+5FN/mZrkTz71l7n80U9OVQDS79yafsIoQQGH1W8QDAAAcBja3zi0pTPzef1L5zNTSpJkppS8/qU3D0XetLqRd9z3SHbqbivbTq15x32P5E2rGwde98ynzTZa7zlKe1g/s4qG3Y427JlRTK5ht4ICAAAni1CJQ1tdb+c9D7T3hEPveaB90/Dknfc90mi95+u//NmN1nv6nSPTbzg07Lk1kxIUGCY+ev3O+wIAADgM7W8cWr+zfGrD9Z5feOjjN1x/69LpG17Xb3tYv1/fsNvRRjEzqqleQNf7fvYCuiRjtc+ToJ8WSwAAgMMYWKhUSvmpJK9J8ola65ft83xJ8sNJvi7JXyT5h7XWDwxqPxzdsMOTfgd19ztHZr9rDlo/6vsdxbgHBYaJAwAATL9BVir9dJK3JfmZGzz/tUme3/3n9iQ/2v33WOh38HK/133jj/923v8Hn7z6+Nzf+MK881tfPrD3+/I3/3I+/dkn/tL/Vz9vJr/3A68+8JovaM3uG+h8QevgGUfD9soX3JJ37NNa98oX3HLgdTOlXG3tu379IMuLC/men3swj1+z9l9016dBP/eYYeIAAADTb2AzlWqtv5Hkkwe85HVJfqbuui/JXCnl4GE5Q9LvbJ1+r7s+UEqS9//BJ/ONP/7bA3m/6wOlJPn0Z3fy5W/+5QOv67dyaNh+7nf2n9V0o/We/QKlg9Z7Ln/0k3sCpSR5vLs+6fq9x24UNI5bAAkAAED/Rjmoez7Jo9c8/lh3beT6Hbzc73XXB0o3Wz/q+10fKN1sfdJsX5/w3GS9Z/4G7Wo3Wu+55/5HG61Pkn7vsRsVd92k6AsAAIAJMhGnv5VS3lhKuVxKufzYY48N/P36bd0ZdsuPFqPj1e+pav1WOE2Cfu+xrb+4QVXbDdYBAACYPKMMldpJbr3m8XO6a09Sa317rfVsrfXsLbccPBfnONxowPLNBi/3e12/hv1+067f49dvNHPpZrOYJsGk/LcAAADA8I0yVLo3yTeXXS9L8qla6/5nyA9ZvxUr/V537m98YaP1o77fX/28mUbrPV/8jKc2Wu+5UbRys8jl+X/t6Y3We/r9fia7wdL773pV/ujC1+f9d73qUEPP77z91kbrk2TY/y0AAAAwOQYWKpVS7kny20kWSikfK6V8Synl20op39Z9yS8m+cMkH0ny40n+8aD20lS/FSv9XvfOb335kwKPw5z+1u/7/d4PvPpJAdJhTn+7//u/5kkB0hc/46m5//u/5sDr/ujC1z8pQCrd9YP86ve84kkB0vP/2tPzq9/zigOv6/f72a+3Lp3ON73suVcrk2ZKyTe97Ll569LpgbzfMA37vwUAAAAmR6kTNvfl7Nmz9fLly6PeBgAAAMDUKKU8UGs92+SaiRjUDQAAAMB4ESoBAAAA0JhQCQAAAIDGhEoAAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAAAACNCZUAAAAAaEyoBAAAAEBjQiUAAAAAGhMqAQAAANCYUAkAAACAxoRKAAAAADQmVAIAAACgMaESAAAAAI0JlQAAAABoTKgEAAAAQGNCJQAAAAAaEyoBAAAA0JhQCQAAAIDGhEoAAAAANCZUAgAAAKAxoRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMZKrXXUe2iklPLnSTZHvQ8mxrOS/NmoN8FEcK/QhPuFw3Kv0IT7hcNyr9CE+4XDWqi1PqPJBU8Z1E4GaLPWenbUm2AylFIuu184DPcKTbhfOCz3Ck24Xzgs9wpNuF84rFLK5abXaH8DAAAAoDGhEgAAAACNTWKo9PZRb4CJ4n7hsNwrNOF+4bDcKzThfuGw3Cs04X7hsBrfKxM3qBsAAACA0ZvESiUAAAAARmyiQqVSyqtLKZullI+UUu4a9X4YL6WUnyqlfKKU8vvXrH1hKeVXSyn/vvvvZ45yj4yHUsqtpZT3lVI+WEp5uJTynd119wt7lFL+Sinld0opD3XvlR/ort9WSrm/+/Po50opTx31XhkPpZSZUsp6KeUXuo/dK+yrlPLHpZSNUsqDvdN2/BziRkopc6WUd5dSPlxK+VAp5eXuF65XSlno/pnS++fTpZTvcq9wI6WU7+7+jvv7pZR7ur/7NvrdZWJCpVLKTJJ/meRrk7wwyZ2llBeOdleMmZ9O8urr1u5K8mu11ucn+bXuY/hcku+ttb4wycuSfEf3zxP3C9f7bJJX1VpfnOQlSV5dSnlZkv8jyQ/VWv/LJP8xybeMcI+Ml+9M8qFrHrtXOMgra60vueaobz+HuJEfTvLLtdYXJHlxdv+ccb+wR611s/tnykuSvDTJXyT5N3GvsI9SynyS/yXJ2VrrlyWZSfINafi7y8SESkm+MslHaq1/WGv9z0l+NsnrRrwnxkit9TeSfPK65dcl+dfdj/91kqWhboqxVGv9eK31A92P/zy7v5jNx/3Cdequz3Qfznb/qUleleTd3XX3CkmSUspzknx9kp/oPi5xr9CMn0M8SSnlC5L8rSQ/mSS11v9ca92K+4WDfXWSP6i1fjTuFW7sKUlapZSnJHlako+n4e8ukxQqzSd59JrHH+uuwUG+uNb68e7Hf5Lki0e5GcZPKeV5Sc4kuT/uF/bRbWd6MMknkvxqkj9IslVr/Vz3JX4e0fN/JvnfkjzeffxFca9wYzXJr5RSHiilvLG75ucQ+7ktyWNJ/lW3vfYnSilPj/uFg31Dknu6H7tXeJJaazvJP0/ySHbDpE8leSANf3eZpFAJjqTuHnXouEOuKqV8fpL3JPmuWuunr33O/UJPrXWnW0b+nOxWzb5gxFtiDJVSXpPkE7XWB0a9FybGV9VavyK7ox2+o5Tyt6590s8hrvGUJF+R5EdrrWeS/Kdc177kfuFa3Rk4fzfJu65/zr1CT3e21uuyG1yfSvL0PHmczE1NUqjUTnLrNY+f012Dg/xpKeXZSdL99ydGvB/GRCllNruB0jtrrRe7y+4XbqjbavC+JC9PMtctE078PGLXuSR/t5Tyx9lt0X9VdmeguFfYV/f/IU6t9RPZnXnylfFziP19LMnHaq33dx+/O7shk/uFG/naJB+otf5p97F7hf387SR/VGt9rNa6neRidn+fafS7yySFSr+b5PndSeRPzW45370j3hPj794k/6D78T9I8m9HuBfGRHfOyU8m+VCt9Qevecr9wh6llFtKKXPdj1tJvia7M7jel+QN3Ze5V0it9Xyt9Tm11udl93eUS7XWb4x7hX2UUp5eSnlG7+MkfyfJ78fPIfZRa/2TJI+WUha6S1+d5INxv3Bjd+aJ1rfEvcL+HknyslLK07p/P+r92dLod5eyW/02GUopX5fdeQUzSX6q1vrPRrwlxkgp5Z4kr0jyrCR/muTNSVaT/HyS5yb5aJL/odZ6/TBvTphSylcl+c0kG3li9sn3ZXeukvuFq0opX57dAYUz2f0/Yn6+1vq/l1K+NLvVKF+YZD3JN9VaPzu6nTJOSimvSPK/1lpf415hP9374t90Hz4lyf9Ta/1npZQvip9D7KOU8pLsHgLw1CR/mOQfpftzKe4XrtENqh9J8qW11k911/zZwr5KKT+Q5O9l93Ts9ST/Y3ZnKB36d5eJCpUAAAAAGA+T1P4GAAAAwJgQKgEAAADQmFAJAAAAgMaESgAAAAA0JlQCAAAAoDGhEgAwdUop319KebiU8nullAdLKbd313+9lPJIKaVc89rVUspnuh8/r5Ty+92PX1FK+YV9Pvevl1I2u5/3wVLKu7vrC93nHiylfKiU8vZ9rr36+Y/5631FKeW/uebxT5dS3nDc7wMAcK2njHoDAADHqZTy8iSvSfIVtdbPllKeleSp17xkK8m5JL9VSplL8uw+3uYba62Xr1v7kSQ/VGv9t919nO7j8/brFUk+k+TfDfE9AYATTqUSADBtnp3kz2qtn02SWuuf1VqvXPP8zyb5hu7HdyS5eIzv+7Heg1rrxkEvLqXMlFJWSim/262o+p+666/oVjy9u5Ty4VLKO3uVVaWUr+uuPVBK+ZFSyi+UUp6X5NuSfHe3Suq/7b7F3yql/LtSyh+qWgIABkGoBABMm19Jcmsp5f8rpfxfpZT/7rrnfy27gctMdsOln+vjPd55TfvbSnfth5JcKqX8Uinlu7tVUAf5liSfqrX+zSR/M8m3llJu6z53Jsl3JXlhki9Ncq6U8leS/N9JvrbW+tIktyRJrfWPk/xYdqukXlJr/c3u53h2kq/KbtXWhT6+RgCAAwmVAICpUmv9TJKXJnljkseS/Fwp5R9e85KdJL+V3UCp1Q1lmvrGboDzklrrcvd9/1WS/zrJu7LbjnZfKeXzDvgcfyfJN5dSHkxyf5IvSvL87nO/U2v9WK318SQPJnlekhck+cNa6x91X3PPTfa4Wmt9vNb6wSRf3PgrBAC4CaESADB1aq07tdZfr7W+Ocn/nOT1173kZ7M7A+nnj/l9r9Raf6rW+rokn0vyZQe8vCT5J9eEU7fVWn+l+9xnr3ndTvqbg3nt5yg3fBUAQJ+ESgDAVOmewvb8a5ZekuSj173sN5PcnZtX+zR531eXUma7H39JdiuP2gdcspbk26+55r8qpTz9gNdvJvnS7gylJPl71zz350me0efWAQD64vQ3AGDafH6Sf9GdafS5JB/JbivcVbXWmuSfH+JzfXUp5WPXPP7vu/9+Zyml0/34z2qtfzu77Ww/XEr5y+76cq31Tw743D+R3ba2D3QHcT+WZOlGL661dkop/zjJL5dS/lOS373m6f83ybtLKa9L8k8O8XUBABxZ2f2dCgCAcVdK+fxa62e6IdS/TPLva60/NOp9AQAnk/Y3AIDJ8a3dwd4PJ/mC7J4GBwAwEiqVAAAAAGhMpRIAAAAAjQmVAAAAAGhMqAQAAABAY0IlAAAAABoTKgEAAADQmFAJAAAAgMb+fyhO5ckUf3/cAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] diff --git a/examples/tutorials/15_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb b/examples/tutorials/15_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4372d49d382fe473386bb04c39834b5ffed97c41 --- /dev/null +++ b/examples/tutorials/15_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb @@ -0,0 +1,339 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_PGI_Rvgr0bo" + }, + "source": [ + "# Tutorial Part 15: Training a Generative Adversarial Network on MNIST\n", + "\n", + "\n", + "In this tutorial, we will train a Generative Adversarial Network (GAN) on the MNIST dataset. This is a large collection of 28x28 pixel images of handwritten digits. We will try to train a network to produce new images of handwritten digits.\n", + "\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/15_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "4qlydaTAr0bv", + "outputId": "d7d00b64-4281-4476-9912-822012906168" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "cyXeZ5zTFkah", + "outputId": "521d8d0b-3bbd-41ef-cb5f-06587d2679f8" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "06xelFpir0b6" + }, + "source": [ + "To begin, let's import all the libraries we'll need and load the dataset (which comes bundled with Tensorflow)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "23zZTDoar0b7" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import tensorflow as tf\n", + "from deepchem.models.optimizers import ExponentialDecay\n", + "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Reshape\n", + "import matplotlib.pyplot as plot\n", + "import matplotlib.gridspec as gridspec\n", + "%matplotlib inline\n", + "\n", + "mnist = tf.keras.datasets.mnist.load_data(path='mnist.npz')\n", + "images = mnist[0][0].reshape((-1, 28, 28, 1))/255\n", + "dataset = dc.data.NumpyDataset(images)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "qijPRZXOr0cI" + }, + "source": [ + "Let's view some of the images to get an idea of what they look like." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "mmhulNHor0cK" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAALgAAAC0CAYAAAAn8ea8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydeXSb5ZX/P9p325JtyZa3eI3teItxEsdJnJ0SAjTQAaaUlna60M50Yzp02sPMdJuWmZ4e6HRhzmEKtKVACiUQKKGEhBCyO4vtxLEd7/tuy7Yky5Ylvb8/8tPbmCRksRYn1eccHYIk6716dd/nfZ773Hu/EkEQiBDhZkUabgMiRAgmEQePcFMTcfAINzURB49wUxNx8Ag3NfJrebNEIlnQIRdBECQQsTOAjAiCEA8L31b/Of0wkRE8wkfRGW4D5kvEwSPc1EQcPMJNTcTBI9zUXNMi828ZtVqNVqvFarUSFxeHIAiMjo7S19fH1NQU09PT4TYxwiVYcA4ukZxfDPtzZPz/7ydcuTNms5mcnBw+9alPsWnTJgD27NnDH/7wB5qamuju7g6LXdeDRCIJ23m8Gi78zedrp+RaPiAQoSKpVIpSqUSlUonPRUVF8fGPfxyFQoHBYMBisfDLX/6S9PR0br/9dtGhxsbG2L59O6+//volHSoY4TepVEp2djZf+tKX2Lp1KxaLBa1WC8DU1BQDAwP8/ve/5/HHH7/qzwxXmDAhIYG8vDy+973v8eKLL7Jr1y56eno+6k9OCoJQBqGxNSUlhbvuuouHHnqImJgYpqen+epXv0pjYyNDQ0Mf+beXCxMGbQRXqVQolUqkUilGo5Ho6GhMJhNqtZq4uDgSExPF92q1Wm655RaUSiVyuRylUsm6detIT09n1apVpKenY7fbcblcTE9P4/P5gmX2HDQaDbGxsWzbto3ly5eTmpqKUqnE4/Hg8/mQy+UkJyeTkpKCxWJhZGQEn883r1FHpVIRGxuLUqnEbrczOjoasO8TFRVFWloaer0etVqNXL5wbuCpqamUlJSwefNmcnNz0el0TE1NodVq52VnUL6hVCrFZDJhNBpRKpXk5uaSlpZGXl4eBoOBzMxMCgsLL/m3U1NT9Pf3c+utt2K1WsnOzmZiYoKenh6amppoaGjA6XQGw+yLvkNcXBwFBQV88YtfxGKxoFKpEASBmZkZZmZmcLlcJCQkkJCQQH5+PqdOnWJqaorZ2dnrPq5Op2Px4sWYTCZaW1sD7uApKSn09/djt9sD9rmBIC8vjzVr1rB582aUSqU4SAiCMK8BI+AOLpVKyc/P52tf+xp33nkner0emUyGRCJBKpWK/70UgiDQ1tbGL37xC9GJJRIJAwMDOJ1OJicnaW5uxuv1BtrsOfarVCry8vLYunUrn/70p0lJSZkzigwMDFBXV8frr7/Of//3f7N161YqKyt58skneeutt6ipqbnu48fExFBRUUFaWhr79++f12ddiEQiwWAwkJqaSlJSEgaD4bK/QziIi4vDbDajVCoD+rkBd3BBEJiYmGBmZgaJRIJOp7vs+7q7u5mZmUEqlbJo0SK8Xi9DQ0McOHAAj8cDgM/nw+Vy4fF4cLvdQXVuAKvVym233UZlZSW5ublYLBZkMtmc98TFxREbG4sgCHR0dCCVSomJiRFv//PBaDRSUVHBxMREQB3Qf+csLy+nt7eX/v5+RkZGAvb514tSqRSnosXFxQCMjIxQU1PDvn37aGlpmdfdJmgO3t/fT29vL2q1GrfbjUKhIDo6GgCv18v09DTV1dXY7XakUik+nw+NRsPAwABNTU2BNuuqUCgUWK1Wtm7dysaNG9HpdAiCgMvlwu124/P5MJlMGAwG9Ho9Xq+X5uZmNBoNRqMRk8mERqOZlw3+KUp1dXWAvtV5UlJSyM7OJicnh1OnTjEyMsLk5GRAj3Gt+H1i2bJlLF26lIyMDAC6u7v54IMPeP755xkbG5vXlC8oc/DJyUl27dpFd3c369ato7W1leTkZL785S8DYLPZaGpq4uGHH2Z8fFyMVNx9991hPelpaWksXbqUtWvXio7q8/morq6mpqaGqakpHn30USYmJujr66O+vp7JyUncbjcFBQUXhTSvF/9ULlBIJBIeeeQRVq5cic/no6ura0HMwVNTUyktLeXxxx/HZDKJc+9XXnmF/fv309/fP+9jBG0Z3drayvDwMI2NjdhsNrKzs1m0aBGVlZW0tLSwY8cO7Ha7OBVpb2/n+eefD/oU5FIoFApSU1P5xje+QUVFBTqdDolEwuDgIOfOneM///M/UavVxMfH89xzz3Ho0CGamppobW2lt7eXwsJCJBIJZWVlFBQU0NTURGfntecpJSQkkJKSgtFoDPj8WKfToVKp8Hg81NbWMjw8HNDPvx7y8vKorKwkNjYWhUKB0+mktbWV9957j+bm5oAcI2gO7nA4cDqdjI+PMzMzg0qloqurSwyx+R/+FbLT6QxJdOTDKJVKoqOjKSwspKysjJycHHw+H9PT0/T29lJbW8uJEyeIi4tjfHwcl8vF8ePHGRgYYGpqCofDgcPhQCKREB8fT3Z2NosXL75uB7darajV6oCN4BKJBKVSiV6vF/ce+vv7w3KuL0ShUJCenk5RUREqlQqv18vY2BgnTpygu7s7YHfyoAZCBUFgamoKAI/Hg8PhwOfzkZaWxqZNm3jmmWeYnZ0N665aTEwMWVlZ3HvvvaSlpSGXy5mYmGB4eJjq6mr27duH0+lkYmKC1tbWj/wslUrFLbfcwtTUFLt3775mW3Jzc8nLy0OlUgXMwRUKBSaTCYvFQlRUlJhiEM7UAolEQnR0NCUlJaxevRqfzyeO3tu3bw/o9Clkkf6uri5++ctfkpWVRUFBAStWrGDlypXU1dUFZK51PSgUCrZs2cIdd9zB5s2bkUqlnD59ml//+tfU19fjcDiuOXITHR2N2Wy+ru1wf0wdoKOjIyDTiIyMDL785S+TkZGBz+eju7sbh8Mxr4XbfNDpdCQmJvL4449TVlYmPr9nzx727t3L0aNHA3rxhczBZ2ZmGBwcZM+ePUilUtavX8+WLVtISkqis7OT0dFR2tracDgcIbFHIpFgNptZvHgxhYWF6PV63n//fQ4fPkxVVRX9/f34fD6kUuk1OapMJrvunTe5XI5cLkcQBAYGBhgfH7/mz5DJZKhUKqxWKykpKRQWFlJeXo5Op6Ojo4OjR4/idDrFtU+oiY6OJisri6KiImJjY/F4PPT19VFVVUVNTQ0ulyugxwuZg/t3AHfu3IlOp2PFihXce++9LF26lPb2durr63n99dfp7OzE4/EEfdoilUpJT08nLy+PrKwsvF4vb7zxBnv27KGxsfGaPy/QCUx9fX3YbLaLnvcvPiUSibiB5p/OKBQK1Go10dHRVFRUUFlZSX5+PiUlJSgUCjo6OnjnnXew2+1hWczLZDISEhIoLi4W1xoOh4Pa2loOHDjA6dOnA37MkCcj9Pb2snPnToaHh3n88cdZvny5GL5KT0/nwIED7Nu3j5GRkaD+CCqViu9+97sUFRXhcrmorq7m6NGj1+3cfie78N/zISoq6qKYulKpJCcnR0yFKCoqwmKxoFQqUavV3HXXXeJOoMfjobW1FUEQcLvdyGQyRkZGOHv2bMhyeT5MZWUld911Fw8++CBqtZqOjg5qamp49NFHGRgYCMq6IOQOLggC/f39HDp0iP/5n/9h5cqV5Ofnk56ezrp164iPj0ej0bB7927GxsYCfssCiI2NJTMzk6ysLKKjo3G73VRXVzMxMXFdP74/X0IQBGw2GwMDA9c1mns8HnHqcNddd1FQUDAnGqNSqcjNzUUikYhJWQqFApfLhcPhoKGhgbGxMXG619fXR0ZGBrm5uahUKhwOhzj1CiX+DZ1Pf/rTlJWVYTAYkEgk2O12hoaGGBoawu12B+XYYUknm5ycxOFw8Morr+B0OpmZmUGj0ZCRkUFMTAyzs7N0dXUFLc/aYDCQnp4u5j6Mj49z5syZa169SyQSYmJi0Ov1YsSop6fnitGWyzE6OipuoS9ZsoS0tLQ54TL/tvb09DQzMzO43W4mJydxOp3Y7Xbq6+vp7u6mr6+PM2fO4PF4mJmZAc5fPFNTU0xMTFyXbfNBo9GQlpbGhg0bSElJmfN9u7u7xUhbMAhbvqTP56OlpYXf/OY37Nixg/Xr1/Poo4+yZMkS7r//flJTU9m+fTu//vWvA35suVyOSqVCLpczPT1NX18fu3btuqbcDH98+fOf/zzr16/H6/Vy/PhxXnzxRd56663rsuv555/n0KFDNDc3s3TpUtRq9UXv6ezs5PDhw7S3t9PZ2Ultba14a//wXWPLli2sW7cOo9FId3d3WJwbIDk5mS984QsYDIY5d4+DBw+yffv2oB477AnB09PTDA8Ps2fPHj772c+KkYu8vDyKioooKCigvr4+aLdVf17M6OjoVYfOZDIZycnJbNiwgfvvvx+LxcLAwACvvfYaDQ0N85pL9vf3s337dv785z9flOTlZ2JiQsyNn5mZuex0KDs7m8zMTABOnDhx3XeW+ZCYmMjixYspKysTL1j/YNDQ0MDAwEBQjx9WB9fr9URFRaHX6zEYDHNGLLlcftkfOJBMTk4yPDyM2+2+4rxZKpWi0+lISkpiyZIlbN68GZPJRH9/P2fOnOH48eMMDg7O62J0uVx0dHRc999fiMFgEDd3zp07R19fX0A+91ooKSmhvLycpKQkZDIZbrebiYkJPvjgA9ra2oK+4RQ2B5fJZCxatIiysjKWLFlCQUGBGCGA82Gy5uZmzp49G9SQYXt7O7W1tVd1DKVSSWZmJvfddx/l5eWsWbOGPXv28PLLL/Pcc88FzcZAUF1dTXt7e8iP+w//8A9s2rRJTCMeHh6mvr6en//855cMgwaakDt4cnIyhYWFVFRUsGnTJiwWC9HR0WKoC87fwkZHR5mcnAyac/sLL3Jzc5menr5iHLu8vJxVq1bx2c9+FrPZjNPp5M033+T73/9+wEbcmxG9Xj8nR76pqYnt27czMTERks2mkDi4SqUiKiqKpKQkVq1aRX5+PkVFReTk5KDT6VAoFACMj4+LGYgHDhzgzJkzQbfNYDCQlJREZWUlHR0d2O123G43cXFxYu2oP+acl5dHfHw8DoeDjo4ODh8+TFdXV9jzqq+ERCIR6zBDhVarZeXKlcTHxyOTycRpm81mC3pV1oUE1cH9o2RsbKwY577vvvtISkrCaDQC51f+/modf+D/T3/6E/v37w9q+EgQBHw+H2q1GqvVygMPPMDu3bvp7OzE4XBQVFREYWEhy5cvJzc3l5iYGORyOb29vdTV1XH69Gnef//9oMTpg0FcXBxRUVEhOZY/fPrQQw9htVrFza/Z2VnGxsbo6OgIWSw+aA6u1+uJiYkRk31KS0tJTU1FoVDMyXXu7u7m3Llz/O53v+PIkSMMDg6GpDTNj0QiwWg08pnPfIbbb78dp9PJ1NQU6enpqFQqZDIZUqmUrq4uOjo6qKqq4vnnn6elpUVM/V3oCIIQsB3WqyExMZGioiIqKyuJiYnB6/UyOzvLH//4R/bu3cvIyEjIMkgD6uByuRydTse2bduwWq1YLBbS0tIoLCzEbDaL+ciTk5MMDg5y4MAB6uvraWtro7a2lsHBwZCMiJOTk7S1tdHV1UVSUhJ6vR6FQkFsbCzR0dHMzs6i0+nENM7Tp0/z3nvvUV1dzeDgID09PUHbeQsGEomElJQUTCZTSI6nVCoxGAwYjUbkcjmzs7PYbDZ27txJbW1tSDMZ5+3g/kY+0dHRGAwG4uLiuOeee0hLSyM+Pl5M/4TzI8nk5CQdHR3U1dWxY8cOGhoa6OvrC6nDOBwOenp6qKmpwePxkJycTHR0NCqVSmwNMTk5yfj4OIODg+zbt4+33nqLkydPhszGQBMfHy/WxAYb/0aaRqNBJpMxMzOD0+nk1KlT11UIMi9b5vsBBoOB3NxcvvCFL7B48WLS09NJTEy86Hbor5J56aWX2LNnD7t372ZqaiosxQ5TU1N0dnbyrW99i8rKStasWcNXvvKVOe957bXXOHjwIAcOHKCtre2GmIpcjlBNTRYi83Zwo9HI6tWrWb16NRaLBY1Gg0QiwWazMTw8TE1NDT6fj/7+fvbv309zczOjo6O4XK6wVvIIgsDY2Bj79++ntraWnTt3znm9r6+P8fFxbDbbDevc586dY9GiRaxZsyakxx0fH6ejo4P6+nrS09PD2n9l3g7uXxmfOXNmzkaC38Fra2vx+XwMDAxw8ODBsI3al8LtdjM4OCgWF99stLW1UVVVRVJSEjU1NSGrnPJ3J9u9ezfp6ekoFApxUAs1IW++GUxuFO2bG8VOQtx8cz5ENHoi/E0ScfAINzURB49wU3Oti8wRFq60XNoF/47YGRhuFFvTLvdCZJEZBm4UO4kIwUa4yVmoI/ZVE3HwCDc1EQePcFMTcfAINzVhr6qPMD8yMzOJjY3FaDTidDrp7OxkaGhI7IfyN8+FXZmu9ACEhfz4W7NTIpEI//7v/y7s379f8Hq9Qn19vfCVr3xFsFqtgbL1RLDOqUQiuarHtZ7TDz8iI/hVolQqUSqVaDQaiouLsVgsGAwG/vSnP2Gz2ULezFKlUmE2m7n11lvJz88HID09nczMTFJSUsLSIuJyaDQaLBYLcrkci8VCTk4O//Iv/4JCobhs4l13dze1tbV873vfY2pq6rozOiMO/hEolUq0Wi1FRUXEx8djNBqJj49n0aJFxMTEoFarGRwc5OzZsyEXzhIEgdnZWZxOp9hbxH8R+ou4w4lfec4vW5ifn49cLicuLo5FixaRnZ0ttoq+FEajEb1ez4YNGzh9+jS9vb3XVQkUcgeXSqXIZDLxcaHgkr8WUxAEZDKZWJAc6nxsqVSKQqHAaDSSlJTEJz/5SbKzs0lJSSEjI2NOjaPD4UCpVNLS0hJSOz0eD3a7nba2tjmN8xcKCoWCRYsWceutt1JSUkJFRYUoK+mvLPookVe/mt0DDzwg5u7fEA6en59PYWEhK1asYNmyZXMq7J999lnOnj2LzWajpKSEnp4ejh07xunTp0PmPFqtluTkZD71qU+xbNkysRuqTCbD6/XS39+Py+VCpVKRnJzMxo0bRW2Ztra2kOW6+3w+Me/6ehrlBxuTycR3vvMdysvLMZvNYpeya6kuUigUbNu2jY6ODrq7u69LFDdkDh4VFUV2djYPPvggWVlZLFq0CIvFwtTUFIODg2g0GtasWcPSpUuZnp7GbDbT1taGVqulvr4+qDWb/nbEq1atIi8vj6VLl1JUVERCQgLR0dHIZDIcDgd9fX08/fTT+Hw+Fi9ezMMPP4zBYBDrUQPdBP+jkEqlqNVq0caFxuzsLO3t7ZSVlYmKF06nk76+Ptrb2y86VwkJCcTFxWG1Wud8jr+F3/VWBYXEwZVKJWazmdWrV7N161axGcz4+DgtLS309/cTExMjNtfxizCp1WqGh4eDXvIkk8mIjY2lsrKSVatWsWzZMrRarTjazM7O0tvby6lTp3jllVdQqVTY7XYefvhhVCoVarU6oMpoV4PfwePj4zEYDOLzfpUHhUIREqWMy+F2u6mrq6OkpERcgNtsNhobGzl16tRFDu4Xqf2wsrS/jcf1hj2D7uAymYysrCw2bNjAj370IzQaDf39/WJ/uvr6egYHB1Eqlfzbv/0b69evp7S0FDjfP7qhoSHoEQqtVsumTZu4++67xYiEH0EQ6Onp4X//93959dVX6e/vJykpKaj2XA0ymUyMTlw4glutVvLz82lubmZwcDBsamqTk5O89NJL7N69W2wX4l8UX6oTmE6nE+XT/VMaj8fD+++/z/Hjx69bNzOoDq7ValmzZg2f+cxnWLp0KSqVivr6evbu3curr75KY2MjTqcTuVzOypUrKSsrIzs7Gzh/tdfX1/Pee+8F3cE1Gg2VlZXiWgDO66UPDw/T29vLs88+S01NDaOjo8D5Fgzx8fFBtelKuN1uRkdH2b9/P1KplIqKCgBWrFhBfHw8Q0ND7Nu3L6xygV6vF5vNJt7ZBEG47G+ZnZ1Nbm4uer1evGMLgsDIyMi8VOGC5uDx8fGkpaWxefNmsUddb28vH3zwAQcOHODUqVNiEWpsbCzl5eWkpqaKt9v6+noaGhro6uoK+gLT6/UyMjJCR0cH4+Pj2O12uru7GRwcpLOzk4MHDzI2NiauA/ztnsOJIAhMT09z/PhxUlNTRQePi4tDEATi4+NFvZ5w2niltZNMJiM+Pp68vDxycnLmaIR6PB7q6+vn1QkraA6+cuVKtmzZwpe+9CUEQaCrq4s33niDn//85/T398+ZUxkMBv7+7/9eDHX5fD5eeOEFDh06FJLGljabjZ///Oc0NzejUqk4efKkqIhwKVlDg8Ewp2NqOHnzzTdJS0vj05/+dLhNuWb8YcP169dz++23U1BQMOei9Hg8vPTSS/PSCw24gyuVSj73uc+xbds2VqxYwezsLHV1dXzwwQc88cQTDA4Oim1zlUolWVlZ3HLLLVitVlQqFd3d3bz55pu8/fbbQe/+78fr9TI0NMTLL7+MRCJhZmaG2dnZy945srKyROWEhYC/yanP57shmvzI5XI2b97Mhg0bKC0tJSsr6yIBBJvNRmtr60cqWFzVsQJhsB+pVIpWq2X16tVkZWWh1Wrp7Ozk7bff5vDhwwwODjI7Oyv2rtu4cSN5eXnk5uaiVqsZGBjgzJkz7Nmzh9HR0ZC2c/N6vRfdLeLi4khKSkIul8+ZR2ZkZBAXFwecb+g+MDDAwMBA2BoELZQ+MxfiV8Pw90S8cM2iVCrZvHkzhYWFLFq0CKPRKAru+hXv2traqK+vZ3p6el7nNeDNN/V6PatXr8ZsNjM9PU11dTWvvPIKdXV1yOVytFqtuIX7ta99jezsbNFZmpqaOHLkCHv27AlbW2L/aOi/u6xdu1bs1gXnnSkvL0+c67a0tHDu3Dm6urrC6mh+J1gIzu4X6EpJSWHDhg3k5+dTXFwsvi6TybjlllsuUpH2er04HA5ee+01Tp48SWNj47wbRQXUwWUyGUqlEqPRiEqlor+/n//4j//AbrdTUFDAxo0bWbduHSkpKVitVkwm05wY94EDB/jggw+C2hf8o/A3EU1NTeVLX/oSpaWlLFmyZE7rYX8awezsLAMDA/zwhz/kzJkzN2x7t2CQkJDAkiVLePLJJ7FarWi12jm/s38Q+TDj4+McP36cl19+md7eXnw+37wv2IA6uM/nw+v14nQ6RaHSxx57DK/Xi1arJSUlheTkZJRKpZjHodFokMvlOJ1Ozp07R0tLSyBNuiL+0SYhIYG77rqL7OxsEhISxKb3LpeLzs5OEhISROFVOD/aSKVSMjMzGR4eZnh4OCzy2AsRiUQittJWq9WXjOZ8ePSG87Hw/Px8cnJycLvdAVmDBdzBZ2ZmaGtrE5UdHnzwQVHBYXp6GofDwfDwMHa7nYSEBPFKHhoaoquri8HBwUCadEXUajVGo5GysjLuuece8vLykEgkTExMMDIyQmtrKw0NDZSWlqLVaudETxQKBcXFxYyNjdHZ2cn4+HjYpgj+nUH/3Uaj0YSt6aXH48HhcNDd3S3uSE9PT885NxdKn/ujUlqtlrS0NNLS0ujr61t4Dj47O8vw8DCPPPIIX/va17jzzjuJjo5maGiIjo4Ojhw5wo4dOxgeHkYQBJ566imUSiUej+eahVgDgUKhYPHixZSXl/OLX/wCqVRKa2srf/jDHzh69ChtbW20t7cTExPDN7/5TaKjo8WNKIVCgclk4otf/CKpqanMzs6ye/duXC5XyKcrH55CaTQaysrK2LVrV0jt8DM0NITNZuMzn/kMmzZtQqFQcOLECdxu90XnJiYmhk984hNs27aNlJSUgA8QAQ8Ter1empqa+OlPf8pzzz0nqglPT0+LIlMGg4GMjAwKCwsxmUxMTk5eFBsPJv5R5b777mPjxo0sXboUqVRKU1MThw8f5rXXXmNgYIDZ2Vni4+N57LHHWLVqFUlJSXi9Xvbv38/ExAQJCQmUlJRQVlaGxWJh1apVHDt2jHPnzuF0OoG/dloNJh+qukKtVrN69Wpyc3NFLfhQ4/F4GBwc5O233xZ16S81p1YqlTgcDlasWDFH5jtQBGWjZ3JykrNnz172df9OYExMDCqVCrfbTVtbW8giJ0ajkdTUVDZu3Mjy5ctJSkqioaGBo0ePcuzYMdrb25HJZCQmJpKbm8u6deswm8243W5qa2vZt28fY2NjWK1WPB4PZrMZs9lMZWUler2etLQ0nE4ns7OzdHV1Bd3B7XY7PT09ovCATCbDbDaTlZVFe3t7WBzcv9N6pcoit9vN5ORk0CQFw1LR44+2+K9ol8vF/v37QyIMKpPJyM/P5xOf+AT33nuv2Jz/qaeeYt++ffT09DA1NUVRURG33nor999/P4sXL2Z0dJS6ujp+8IMfUFNTw+TkJDqdjvfee4/i4mLKy8u59dZbKSwsFBeiQ0ND7N69m927dwf1O3V0dPDOO+/w4IMPzqnmWbZsGTabbUFLr6jVapYtWxY0BbiwOHhfXx8zMzNhCa3dfvvtbN26lb/7u7/D4/HwxhtvsGfPHt577z1xylFRUcGaNWswm81otVp+85vfcOTIEerq6jh37pxou8PhoKqqipqaGnbs2MGBAwcoKysjPT0dgOeff57jx48H/TtNTEyEtNjiUvijUcuWLaO7u5v+/v4rbtT5Zb6/9a1vBa0iKSwOrlariY2NDfm2skQiYdWqVRQXF4shQKPRSFZWFkqlkoyMDJKTk8nLyyMzM5Px8XFqamp49913aWxspK+vb840yufz4XK5cLlc2O12jhw5wsDAAGazGYCqqip6enqC/r2Ghoaorq7GZrNhMpnEsFxmZiaLFy8mKioKh8MRtAHFbDZjtVrJzs6msrKSt956C5vNdkkH99+9k5KSWLNmDWvXriUpKUksQPZ4PAEtUwyLg8fGxorhuFCzZs0a8vLygL9OV5KTk1Gr1aLUoc/nw+Px0NTUxJtvvslf/vKXK+ZE+Hw+ampqrqusar709fUxOTnJwMAAGo1GdPDi4mImJjDsKC4AACAASURBVCaIi4ubV2X6lcjOzmb16tXcdtttVFRUMDQ0RENDwyWDBhqNhpiYGNasWcOWLVtYvXq1OF2dnZ3F4XDgcrkCJjUYFgc3GAykpqbOqdwIFWNjY9jtdvR6vTiS+NV4T548SWtrK3V1dezbt4/+/n6GhobCmlN9tXi9Xpqbm4mJiRELIAKxE3glJBIJjzzyCEuXLiU5ORmZTEZlZaV41/gwKSkp5OXlUVRUhFKpFH2gvr6eM2fOsGvXLt55552AZZGGxcEnJydpb28XNyZChSAIPPPMM2KCl9VqFVfx1dXVdHV1MTo6yvDwMN3d3UxNTd0Qzg3nHfzQoUMkJSWRlpYGhE4+UKVSzXHW3Nxc4uPjLzkK+wViNRoNMzMzjI+P09TUxI4dO6irq6O1tRWn0xmwCzNsDt7V1YXb7RYT3D8s8R0s3nnnHRobG8UEe5fLxfDwMO+++y5OpzOkMuKBxOv1cvr0adauXcvs7GxIe6MMDw8zMjIihn8TExNJTEy86H3+GlGfz8fk5CTDw8P09fVx4MAB3n77bVEePZCExcH9V21HRwdpaWnI5XJSUlJwOBxB3+xxuVw0NjbS2NgY1OOEGp/PR1tbG62trXR3d5ORkRGS4wqCwM9+9jOWL1/Oxo0buffee8Uq+g/T39/P1NQULpeLqqoqPvjgA6qrq2lqagraVCpsCg8qlYp//ud/Ztu2bWRnZ/OnP/2JF154gYaGhuvemLhRlBOCZadKpSIlJUUMb8Jfa1s/nAtylVyVjKBOpyMmJobExETuu+8+Kisryc/PF1t+dHZ20tLSwuHDhxkbG8PhcDAxMYHNZsNutwcke/RyCg9ha93m9Xo5ceIERUVFJCcns2zZMnHlPTo6ekNOE8LNzMwMLS0tIc/IdDqduFwuRkdHMZlMTE9P093dLTp4d3c3zc3N1NTUMD4+HtJc/7A5uMfj4ejRoxQUFGC1WqmoqGDbtm0YDAbOnDmzoBSRI1wZn8/H9PR0SHZur4lwtk+WSCSCxWIRysvLhd/97ndCS0uL8Je//EXIysoSFArFNX/e31r75BA8gtY+OVjndEG1TxYEgdHRUTweD2+++Sbt7e3ivCxSIRMhEERkBMPAjWInN4FWfUQINvTcKHbCjWNr2uVeuKYRPEKEG41rGsFvlNtUxM6AEVE6jnBTs1CnJFdNxMEj3NREHDzCTU1EZS3ApKWliX34mpqagtYd198A32QyERUVRUxMDHa7nenpaTFhbWhoCJfL9Te9IxxWBw+lpk0okEgkbNu2jY9//OMAPProo0Er+NVoNCxatIj169ezfPlyVqxYQU1NDZ2dnWJ/mVdffZWOjo6gVazfCITcwf0tDTIzM3n44Yd5/vnnF1buwnWiVCr5+te/ztatW8nKygpqA9H169ezatUqPvWpT2EymdBoNKjVahITE/F4PGKi2uTkJPv27bvpUoOvhZA7uFKpZMmSJXz84x+nsLAQi8Uitva6UUlISCAvL4/169eTmpoKQFtbW9CaiNpsNlwuFwkJCURFRYmFIh8uctiwYYPY97G/v/9vciQPuYOr1WrKysr4x3/8R0ZGRkTBzxvZwTMzM9m2bRtr1qxBIpHQ09NDXV2d2N0q0LS0tJCUlCTWll6oaXNhmdptt92GxWKhpaVFzPm5kfC3pPOLBvsvYH/1/dU0xw9bFEUikRAXF0dcXBwmkylcZgQEs9lMSUkJKpWK3t5ejh8/zp///Oeg9Vp0OBzU1dXxxBNPiC3mBEGgt7d3TqGvXq8nOTmZdevWodFogmJLsNDr9aKy9D333MMPf/hDamtrOX36NO+99x5PPPGE2Ff+owjbIvPCq3Mhym6kpaWRlZVFVlYW27dvx+l0XnIELCgoID8/H6vVytjYGIcPH+bdd98NWNuDyzE6OsqePXu47777RFGB6enpiwpFxsfHOXTo0A1xh/QrQuTl5ZGRkcHixYvR6XSYzWbi4+OxWCxiX0mn03lVIlthDxNerhl6uMnPz6eiooIVK1bwzjvvMDs7e0kHLykpITc3F5PJRFNTE8ePH+fYsWNBt8/hcHD27FlaWlowGo1iidrl3hdKOZhrQSaTERcXh0KhIC4ujpKSElavXk1+fj4FBQVotVpcLhdut5uJiQmmpqbEMrericCFzcH980W5XB52ubsPI5FI+OY3v0lxcTFarZbs7Gymp6cvWjRKJBIeeOABSkpK0Gg0/OIXv+DYsWO0tbWFxE5BEHjppZfo7+9ny5YtLFmy5KLBwt+xYKESFxfHj370IzIyMkhKSiInJ2fOHd3fHu/MmTPU1dWxd+9esS3cgnZwOP8Dmc1mFi1aFJaOUB+F2+0WpxkX9vzwo9FoSEpKIiEhAYPBgCAIKJXKkN+Nqqqq6O7u5ujRo/zkJz8hKSlpjoZnUlISDz30ED/72c9EIduFgMFgoKKiglWrVrFlyxbgr82Ldu3aRW9vL6Ojo7hcLnp6ehgeHsbhcIh6pVe7fxJyB/er3Xq9XmQyGQaD4aoWC6FCq9WSnJwsdjt1Op1iK2Q/UqmUmJgYCgsLiYmJQRAExsbGxJ3DUCKTyXC73ZdVl1AoFMTGxoali9jlkMlk5OTkUFFRQUVFhejYfimY3bt309fXJyrt+Xdor4eQO7jX68XtduN0OomKiiI6OjrsstgXYrVaeeihh8T+he3t7bS1tc3ZclcqlaSmpnL//fdjMpmYmJigvr6eQ4cOXbJdWbCQSqWsXLmS9PR00tPTyczMvGi655+Dh0pc4Er4u9Dee++9fOxjHyMlJYUXXniBZ599lnPnzgV8MRxyB3c4HHR1dVFdXc3q1atDffiPxGQysWTJEu699140Gg379u3jmWeeYXBwcM4i7ZOf/CQbNmzgtttuY3p6mldffZWf/vSnQe3geiGJiYnccsstogyjTqcT26d9mJGREfbu3Rs2WUY4fyGqVCruv/9+li5dSm5uLs8//zznzp1jdnaWffv2MTo6GpSLMCxTlNnZ2bCe8A8jlUoxGo1s2LCByspKLBYLra2t1NbWcubMGXHOp1KpyMvLo6KigpKSEvR6PceOHePMmTNXVDIIJEajkZSUFAoLC4mLi5sjVPth9Ho92dnZtLe3hy2SEh0dTXFxMZs3byYhIQGJREJzczMul4uZmRkGBwfxer1ByUtaEGHCcCKTycRIyYMPPsiyZcvQ6XTU1tZSV1dHT0+PeOL1ej233XYbq1evJiMjA7fbzd69e6mrqwupzVFRUZhMJnQ63RX3EUwmExs2bODgwYNB21m9EmazmbvuuostW7YwODjIBx98QGNjIw6HI+gNnsLu4P5NinAgk8koKyujoqKCH/zgB2g0GmQyGYIgUFBQgFQqxWKx8OyzzyKXy0lLS2Pr1q3Ex8czODjI+++/zwsvvBAQubtr4fjx44yMjDA7O8s3vvENzGbzZfsBqlQqzGZzWBeZaWlp/NM//RMKhYIjR47w1ltv4XQ6Q9K9LOwOnpSURG5ubshTZ2UyGUuXLuWuu+5i3bp16HQ6JBKJGJGwWq2ioFRUVBRarRaLxUJWVhYSiYSWlhaee+45xsbGQt5mzuv1MjAwwF/+8hcxCqVSqYDz062oqCi2bNkizsnDvZE2PT1Nf38/VqtVzNvp6Oigq6uL8fHxoB47LA7u8XjERPzY2FiSkpJCenyFQoHBYGDVqlVUVlayfPlyPB4PNpuN8fFxBgYGKCgoIDU1lfT0dGJiYsTWwEajEbvdjsPhoLPzfMmiTCYLuZM7HA5Onz6NUqkkJiYGtVoNIN51SktLsVgs4gLP3546FItguVyOTCYTF42Tk5OcOnVKXOusX7+eo0ePiqrYQU1rCEfrtpKSEuG73/2u4HK5hKmpKeHEiRPC/6/aDkj7riu9LysrS/jsZz8rTE5OCjMzM4LT6RQGBgaEr3/968Lq1auF0tJS4eWXXxYaGxsFj8cjeDwewev1Cl6vV/x/u90udHZ2CpWVlUJcXFxQ7Lyeh0QiEeLi4oTvf//7QmtrqzA1NSU0NzcL5eXlgtFovNbPu67WbYsWLRKKi4sFpVIpSCQSQaFQCLGxscIjjzwi7Nq1S/B6vcLExITw2GOPCRkZGQH53guqdZu/6bwgCOIIYzabLytcFGjS09O59dZbGR8f59SpU1RXV7Nv3z7a29ux2+14vV6effZZ1q9fz5133klWVtZFc1in00ltbS1dXV1MTEwE3earRSqVYjKZePDBB0lMTBRTSy8Uig0GKpUKk8nEt7/9bZKTk/H5fPzmN7/h5MmT2Gw2Jicn2b17NxaLhY997GNoNBpRvjuYhHWKAogZhTqdLmSOIpVKkUqlHDp0iCNHjnDq1CkOHjw45z0dHR0MDw+LU4/e3l6GhobESMTIyAjHjx9nYmIi6JmD14LZbCYnJ4eMjAwkEgmjo6N0d3cHfSqQnJwsNsH3er309PSIGkH+0LA/g1QQBCYmJkRFjWASFgf3b9X7E678u1uhWgwNDAywf/9+3n77bYaHhy8ZPtPr9ZjNZlJSUpBKpfzlL3/hrbfeorm5GZ/Ph9vtFvMjgs21LMDLy8u54447xNDhyMgI77zzDj09PUENE1ZWVvKTn/yEqKgodu7cydtvv82+ffvm/MabN2+muLgYr9dLbW0tjY2N9Pb2Bs0mIDxzcLlcLlgsFqG2tlYYHx8XhoeHhccff1zIysoKyRxcqVQK0dHRglwuv2juL5PJBIPBIHz7298W3nnnHcHtdgudnZ3C5z//eUGn0wlKpVJQKBSX/NtA26lSqYSEhAThF7/4hfDVr35VWL9+/RXn3k888YTQ398v+Dl06JBQUFAgKJXK67H1qufgFRUVwpNPPimMj48LNptN6O/vF5qamoSmpiahtrZWePfdd4W+vj5hcHBQaG5uFu68804hJSUlIGuvC8/pgpiDezwe7HY7bW1tGAwGZDIZp0+fDlkeh9vtvuyt0WAwUFBQQHl5OampqTgcDl599VXOnj0b8o2SpKQkli9fTkVFBVarVcw59+8AOp1OTCYTWq0WvV7PqlWrKCwsFCukWlpaqK+vZ2hoKOhRnu7ubvbu3UteXh5ms5no6GiSkpLE0GtMTAwzMzO0tbVx8uRJ6uvrsdlsQQ8Nhy0O7vP5aGpqwmq1olAoeO2118KelO8vo9u0aRNr1qxBqVTS29vLU089FdKteD/p6els3bqVgoICkpOTSUxM5OTJkwwPDzM2NkZfXx9ZWVlYLBaSk5P5/Oc/T3p6uiisWlVVxeHDh5mYmAh6eLC7u5vBwUGSk5NJT08nJSWF5cuXA3/dra6rq+P999/njTfeoLOz829jowcQVW7D3fQ+Li6OwsJCPvnJT6LX6zl9+jS7du2ip6cnLCVfDQ0NPP/886SlpZGTk8OKFSt45ZVXGBgYYGRkhK6uLsrKyoiOjkatVqNUKsURs729nd///vccOnQoZJmEbrebZ555BqlUKgYPLsTn8+H1esUa0lAQdgeXy+UYDAaKi4tpbm7GbreHzZZ77rmH9evXYzabOXv2LPv27ePNN98M251lfHycc+fO0dzcTE9PD9HR0dx+++0kJCQQHR2NxWIhPj5+TkGG2+2mp6eHH/zgB9TV1YU8qW0hRZQgzA6u1+tRq9VoNBqsVitdXV1hdfCioiKWLl2KwWCgoaGBkydPcurUqbDZMzU1RX9/v9iCQqvVsmzZMrGO1Wg0olarxSmAz+djaGiIuro6tm/fflN1DbtewubgcrmczZs3YzabGRgYoKWlJWiNcq6WgwcPotVqycjI4PDhwzQ1NYXVHjg/Ij/55JPI5XLi4uLE/BKDwUBWVhZ33HEHer0eOL99/+yzz/Lyyy9HnPv/EzaNHq1Wy4svvkhKSgqTk5N8/vOfp7+/f1631Pk2lk9ISMBkMpGYmMi5c+cYHx8PSmTneuz0Fw/7JbLlcjkajYb4+HjkcrlYCtjZ2cng4GCgIj5/cxo9AcPj8XDgwAEsFosY8gq3+OvAwAADAwPU19eH1Y5LIQgCbrdbTPCKcHVEVNbCwI1iJzfBCL7wOu5EiBBAIg4e4aYm4uARbmoiQrCh50axE24cW9Mu90JECDbCTU1ECDYM3Ch2chMIwYY9F+VGQ6FQiPkfAD09PTeccsI1sFCnJFdNxMGvAYVCQVpaGv/1X/9FaWkpEomEdevWRTZfFjARB79K1q5dy4oVK1i7di1FRUVIpVIaGhpu5tH7piDi4B+Bv+J/yZIlrF27ltWrV4vZfF1dXRw5cmRB9ViMcAkCXZMpkUiu+nE1n3ctj0DXjmq1WiEjI0PYu3ev0NfXJ/ZFaW5uFp5++mnBZDIJUqk0rHYG83xynX1RwvEISk2mSqUiISGB5cuX43a7MZlMJCQkoNVqKSkpISEhgdOnT4td+S/E5/MxNjZGQ0MDbW1tCyI19UKSk5NZuXIld9xxB7fccgtarRabzcYzzzxDU1MTzc3NjI+Ph7UKSSKR8Nhjj1FcXExSUhIPPPAAg4ODkbvKBczLwfPy8igqKuJjH/sYHo8HvV6P0WhEpVKRmppKTEwMWq32kgplgiBgt9spKCigt7eXhoYGsfdIX19fyHptXw6dTkdaWhqlpaVotVpkMhmzs7N0dHRQV1dHd3d32J1boVCwZMkSioqKiIuLIysri+np6YiDX8C8HLy8vJxbb72Vbdu2zXn+wltETk7OnNd8Pp/YJ8NfiTI7O0tbWxsHDhygurpalMEOlwP5q2VSU1PJz88HEFUpRkZGaG9vZ2hoKCy2+fE7eHJyMvHx8ajVavLy8ujr6wt5t9vrxS/wCn9txuTvkisIAj6fb9557fNy8OXLl7Ns2bI5zwmCwPj4OH19fYyNjV30WmdnJ1qtFrPZTFRUFMnJycTGxrJ48WKysrIoLi6ms7OT/v7+sNT3abVarFYrP/7xj0XnBnjhhRd47bXXeOedd8Ketw7nB4qpqSm6urpISEgQz6O/CeeNwJo1aygqKhIv1MzMTNauXQuclytvb29n69at86r0mpeDv/nmmzQ3N5OcnMzIyIjYrcrhcDA8PDxH18bP5OQkCoUCrVaLwWDg3nvvZe3ataLmuslkoqSkhGPHjoWlhC0mJoaVK1eSmpqKx+OhpqaG119/naNHj1JfX7+gwoKCIHD06FFR8cFisQS91998SEtLw2g0EhMTQ3p6OhUVFeTm5iKTydDr9WIXXzhfsaRQKPjud7/Lzp07OXHixHUdc14OfvDgQerr67FYLPT09Igj7vT09BWVsSQSCWazmbKyMsrLy/9qkFyO0WgMS0/r+Ph4srOzKS8vJzo6mv7+fo4dO8Zvf/tbRkZGFuTc9syZM+Tn5yOVSklOTiY6Ojos7ZwvRCqViju+crlc/C2Li4tJSEjAarWKArpWq1XsbQ6ItaQqlYq4uDjuv/9+6uvrw+Pgftm3c+fOXfuB5XIqKyvJz88XVdZmZ2fp6enhlVdeCalamZ/HHnuMjRs3kpmZydDQEHv37uXXv/41vb29Ye/ZcjnOnTtHX1+fWMT9wQcfUFVVxfDwcNhs0uv1ZGVl8bOf/Qyr1YpGowHOF0qrVCpUKpW4l9Da2jpnKnghUqkUjUZzWfWKqyHkGz0ymYzi4mI2bNjAJz7xCTIzM8XXnnrqKd577z2amppCOv9Wq9WUlJRQVFREWloacrmc48ePc+7cOWZmZhZ0hbrfNv+iMysri5KSEt59992Q2yKVSiksLGTLli2sWrWKoqIiVCqVOIL7R3OXy8Wjjz7KwMAA09PTJCQkEBUVRXZ2Nvfeey8mk0ls1t/d3T2vViIhc3C/qoLVamXFihVs3ryZoqIilEolMzMztLS0cOjQIU6dOnXJuXuwkEqlaLVaFi9eTHx8PDqdDkEQxO5Rl5qWmM1mtFqteGu12Wxhj6rAX+VLwiWs65eFWblyJeXl5XO0l3w+H+3t7aJa8Z49e7DZbHg8HrEfZGxsrPh+v/rDsWPH5hUVCpmDR0VFiZo4y5cvF/vW+Xw+RkZG+NWvfsWJEydC7igqlUpULfYvcARBYGpqivHx8UvKX69atUq8IOD8WuT1118P60jvP7ZUKg2b4JRCoeDuu++mpKQEo9EoPu/z+ZiZmeHFF1/k7NmztLS00NbWJk77JiYmuOeeeygtLRUvTrvdTnd3N08++eS8fCIkDr5o0SLWrVvHD37wA6KiouaEsvwKBAMDA8zMzIR8ruvz+cQNE4PBgFQqRRAETp8+PSdLsLy8XFQ/3rZtGzk5OZjNZuC8MOz3vvc9tm3b9je7k5iYmEhhYSFLly6dMxKPjY1RW1vL008/zcGDB7Hb7ZfsQ+mPrvjp7u5m3759jIyMzKu3YkgcPD09ndzcXJKTky/SdJRIJBgMBm677TasVit9fX309vbS09OD3W4PesvimJgYrFYr6enpaDQaPB4PDoeD8fFxXC4XEokEk8lEeno6JSUlJCUlkZeXR3x8vLhLazAYiImJ4Y477uDgwYM0NzeHTZMyXCQmJlJeXk5UVBQKhQKv18vQ0BAnTpygqqqK48ePMzw8PCfMKpfLUalUJCUlkZ6ejsViAc4HL9rb22loaJh3o86QOHhGRgaLFi3C6/VeJFzqj30//PDDtLa20tbWRlVVFe+//z4dHR309PSISsPBwGKxiJtMcF4/aGhoiImJCdxuN3K5nPT0dHJycigsLGTNmjWiwsPY2BhDQ0MYjUYSEhJ46KGHmJmZmSN1EipCLcP4YaxWKytXrkSpVIpTvPr6ejGG3dHRcdHf+DtzrVmzhiVLlpCYmIjP56O5uZm6ujrq6urmfUcPiYMfO3YMr9eL0WgkPz+fmJgYdDrdRe/zXwhr167lm9/8JjU1NezZs4f/+7//Y2xsLOAtjHU6HWazWWyHNj4+Tnt7O++++y6NjY14PB5ycnL44he/SEVFBVlZWdhsNmpqaqipqWHnzp1otVq2bdvGl770JQwGw7xCWvMlnKrRjY2NbN++ndjYWLxeL11dXfzrv/4rw8PDl52yrVixgo0bN/LVr34VlUqFy+WisbGR73znOzQ1NWGz2eYdzw/Jr9Hb2ytq2mRkZBAVFYVer0ev15OSkkJKSgpLly4Ve0rLZDKUSiWLFy9GpVLh8/nYtWsXJ0+eDJhNUqmU9PR01q9fz+bNm4HzmybHjh1jx44dCILAkiVL2LBhAxUVFVgsFpxOJ6dOnWLHjh00NzfjcDi46667WLJkCS6Xi+bmZvr7+4MubroQGRoa4tChQ2IEzG63MzQ0xPT09EWjsFQqJTc3Vzz3Go0GQRAYHR1l//79dHd3MzExEZDNqpA4uM1mw2az0dTUJIbidDodRqORJUuWsGTJElH2wi9aCucb0vt3NTs7O2ltbQ2Y80ilUhYtWkRJSQmlpaXAeQc/dOgQJ06cID8/n9LSUjZt2kROTg6jo6N0dXVx+PBhDh8+jN1up7CwkLVr12KxWBgaGqKmpoaenp6wpBj4pycejycsOTyTk5NMTk7S0tJyxffKZDJWrFjBihUrKCwsBM4vRtva2jh8+DA2my1w3yEcIlQffkRFRQmf+tSnhFdeeUVobGwUfD7fRY8dO3YIX/jCFwJWSKBUKoVHHnlEOHTokCjuumnTJrGIYPv27UJLS4v42hNPPCEsX75ckEgkQm5urvCNb3xDmJ2dFTwej3Dy5Enhxz/+saBSqa4pOT8Q5y4hIUH493//d/E8PfHEE0JZWVmgfpugFDxERUUJ4+Pj4rn1eDzCk08+KWzZsmVhFTwECqfTyd69ezl79iy33HILX//611m8eLFYuQ7nox3+VXYgkEgk6HQ6FAoFLpeLmpoaxsfH0Wg0JCUlYbVaiY6OZnZ2lj/84Q/s3r2blpYWrFYrn/vc56isrEQikfDHP/6R/fv3s3fv3rBrDMH5mHI4t+mvRG5uLhs3bhR3OP13nra2Ntrb2wN+vAXh4F6vl4GBAYaGhpDL5XR2dpKRkTHHwf35wYHEv3Xs8/kYHh5mdnYWuVyOXq8XcyD8u5pGo5HS0lISExMpKipCq9Wyf/9+3nvvPU6ePElra2tAbbte3G53WPSErgaJREJCQgLFxcVzJFfa2tro7OxkZGQk4MdcEA7ux+fzYbfbaWlpYfXq1XNeGx8fD2kiv/8WJ5FIWLRoERs3biQlJQWVSsXAwACHDx/mhz/8IQ6HY8E61ELDnymakZEBnD/HNpuNp556ipqamkvuGs/7mAH/xP+PXyn49ttvp7+/n87OziumPPo3fXJzc8WFJpwf4bu7u68ra/FyeDwe3n//fUpKSiguLmbjxo28/fbbREdHk5mZSUxMjLjgvfPOO1EoFEilUux2O6+//jr79+9ncHAwKHeW+eDPo15I+LsTbNq0iS1btrBixQpkMhlnzpyhqqqKN954g+Hh4aDE8YPi4BqNhmXLllFaWsq6det48cUXPzL9VSKRoFaryc7OpqysjMzMTHF64r/Ke3t76e7uDqid/px1n8+HTqfj1ltvpaioiPj4eGJiYsSYtl6vFytompubOX36NM3NzQuisufDxMbGkpycTE9PT7hNAc5fcPHx8Sxbtow77riDpUuXolQqOXz4MAcPHqSqqoqRkZGgrV8C7uASiYT4+Hhuu+027r77brKysnj22Wcve8L927UJCQls3LiRyspKsrOzxZ05n89HV1cXbW1tAXVwQRCYmZnB4XAwOTlJVFQUd99995wdwQv1O10uF6Ojoxw5coSamhq6uroCZst88J8jn8+HVColMTGRrKwsjh49Gm7TAFAqlWRkZPDQQw+xadMmdDodU1NT/OlPfxIDC8HcgQ24g6vVah577DHWrFkj5nrHx8djsVjweDxz8qvVajXr169n3bp1fPrTnxYT4v07ch6PB5vNxo9+9KOAy/n5fD7q6+t56qmnOHr0KA8++CCFhYVzEn5aWlqoqqri3LlzdHV10d3dzbFjxxaUNptOtAAAAu1JREFUFqTNZqOnp4fW1tY5ufULhdjYWHJycti6dStyuZzJyUk6Ozv53e9+x+Tk5I0n5e3z+ejo6BB3JgEeeOABli9fTkdHB06nU6ys1+l0FBUVkZubS2xsLHK5XBxBZ2dnqa2t5d133w3aAsRvq8PhYGhoSCza9Z90m83G4OAgNpsNu92Ow+EImWrw1eLxeHC73aJdCQkJC8LRZTIZcXFxlJeXU1pailwuZ3R0lGPHjvHnP/9Z9INgExQHb21tFaXsdDodlZWV3HLLLXR1dc1xcH8F+4W5w7OzszgcDkZGRjh27BivvfZaUHuQjI2NMTY2tuAaD10t/sQvf3KXyWTCarWG2arzueGZmZmsWLGCoqIi4HzKxvHjx3njjTdCVrwdcAf3eDwcO3ZMTGDaunUrcD6xyZ9PfTn88+3XXntN3Fi5VBZahLn4L9DS0lKUSqWofhzO7MKYmBi+8pWvsG7dOhITExEEgZ07d7Jv376QhnsD7uCCIDA4OMjOnTuprq6mpqaG7OxsFAoFo6OjlJSUoNFo8Pl89PX1idUeDQ0NYler6upqxsbGwq58fKPQ1NTE9u3bKSoqoq6ujuPHj4fVuU0mE8nJyej1ehQKBcPDw1RVVVFVVRXy6E5QwoTT09N0dXUxOjqKRCKhra0NpVLJ6Ogog4ODcxzcH81oaGhgeHgYm81Gf39/MMy6aRkbG6Ouro7du3fT1tZGY2NjWO0xm81kZ2eTkJAA/DXTsL29PfSZlgsh2SpQj4idAX9cV7LVtm3bhKefflrweDxCb2+v8Mc//lHIyckRFApF0M/phx8RGcEIAaelpUXM3X/ppZf47W9/S3t7e1jCqwsqFyXCzcHQ0BDV1dX86le/Ys+ePTQ2NoZt7yCiVR8GbhQ7uQm06iNCsKHnRrETbhxb0y73QkQINsJNTWSRGeGmJuLgEW5qIg4e4aYm4uARbmoiDh7hpibi4BFuaiIOHuGmJuLgEf7fhjUYTeCjYFgDAIIEKi/Ey+h6AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "def plot_digits(im):\n", + " plot.figure(figsize=(3, 3))\n", + " grid = gridspec.GridSpec(4, 4, wspace=0.05, hspace=0.05)\n", + " for i, g in enumerate(grid):\n", + " ax = plot.subplot(g)\n", + " ax.set_xticks([])\n", + " ax.set_yticks([])\n", + " ax.imshow(im[i,:,:,0], cmap='gray')\n", + "\n", + "plot_digits(images)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "rVeSdnNJr0cV" + }, + "source": [ + "Now we can create our GAN. Like in the last tutorial, it consists of two parts:\n", + "\n", + "1. The generator takes random noise as its input and produces output that will hopefully resemble the training data.\n", + "2. The discriminator takes a set of samples as input (possibly training data, possibly created by the generator), and tries to determine which are which.\n", + "\n", + "This time we will use a different style of GAN called a Wasserstein GAN (or WGAN for short). In many cases, they are found to produce better results than conventional GANs. The main difference between the two is in the discriminator (often called a \"critic\" in this context). Instead of outputting the probability of a sample being real training data, it tries to learn how to measure the distance between the training distribution and generated distribution. That measure can then be directly used as a loss function for training the generator.\n", + "\n", + "We use a very simple model. The generator uses a dense layer to transform the input noise into a 7x7 image with eight channels. That is followed by two convolutional layers that upsample it first to 14x14, and finally to 28x28.\n", + "\n", + "The discriminator does roughly the same thing in reverse. Two convolutional layers downsample the image first to 14x14, then to 7x7. A final dense layer produces a single number as output. In the last tutorial we used a sigmoid activation to produce a number between 0 and 1 that could be interpreted as a probability. Since this is a WGAN, we instead use a softplus activation. It produces an unbounded positive number that can be interpreted as a distance." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "8zLMNX5Xr0cW", + "scrolled": true + }, + "outputs": [], + "source": [ + "class DigitGAN(dc.models.WGAN):\n", + "\n", + " def get_noise_input_shape(self):\n", + " return (10,)\n", + "\n", + " def get_data_input_shapes(self):\n", + " return [(28, 28, 1)]\n", + "\n", + " def create_generator(self):\n", + " return tf.keras.Sequential([\n", + " Dense(7*7*8, activation=tf.nn.relu),\n", + " Reshape((7, 7, 8)),\n", + " Conv2DTranspose(filters=16, kernel_size=5, strides=2, activation=tf.nn.relu, padding='same'),\n", + " Conv2DTranspose(filters=1, kernel_size=5, strides=2, activation=tf.sigmoid, padding='same')\n", + " ])\n", + "\n", + " def create_discriminator(self):\n", + " return tf.keras.Sequential([\n", + " Conv2D(filters=32, kernel_size=5, strides=2, activation=tf.nn.leaky_relu, padding='same'),\n", + " Conv2D(filters=64, kernel_size=5, strides=2, activation=tf.nn.leaky_relu, padding='same'),\n", + " Dense(1, activation=tf.math.softplus)\n", + " ])\n", + "\n", + "gan = DigitGAN(learning_rate=ExponentialDecay(0.001, 0.9, 5000))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "69GHTt_2r0cb" + }, + "source": [ + "Now to train it. As in the last tutorial, we write a generator to produce data. This time the data is coming from a dataset, which we loop over 100 times.\n", + "\n", + "One other difference is worth noting. When training a conventional GAN, it is important to keep the generator and discriminator in balance thoughout training. If either one gets too far ahead, it becomes very difficult for the other one to learn.\n", + "\n", + "WGANs do not have this problem. In fact, the better the discriminator gets, the cleaner a signal it provides and the easier it becomes for the generator to learn. We therefore specify `generator_steps=0.2` so that it will only take one step of training the generator for every five steps of training the discriminator. This tends to produce faster training and better results." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "lP7x5ZT1r0cc" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ending global_step 4999: generator average loss 0.340072, discriminator average loss -0.0234236\n", + "Ending global_step 9999: generator average loss 0.52308, discriminator average loss -0.00702729\n", + "Ending global_step 14999: generator average loss 0.572661, discriminator average loss -0.00635684\n", + "Ending global_step 19999: generator average loss 0.560454, discriminator average loss -0.00534357\n", + "Ending global_step 24999: generator average loss 0.556055, discriminator average loss -0.00620613\n", + "Ending global_step 29999: generator average loss 0.541958, discriminator average loss -0.00734233\n", + "Ending global_step 34999: generator average loss 0.540904, discriminator average loss -0.00736641\n", + "Ending global_step 39999: generator average loss 0.524298, discriminator average loss -0.00650514\n", + "Ending global_step 44999: generator average loss 0.503931, discriminator average loss -0.00563732\n", + "Ending global_step 49999: generator average loss 0.528964, discriminator average loss -0.00590612\n", + "Ending global_step 54999: generator average loss 0.510892, discriminator average loss -0.00562366\n", + "Ending global_step 59999: generator average loss 0.494756, discriminator average loss -0.00533636\n", + "TIMING: model fitting took 4197.860 s\n" + ] + } + ], + "source": [ + "def iterbatches(epochs):\n", + " for i in range(epochs):\n", + " for batch in dataset.iterbatches(batch_size=gan.batch_size):\n", + " yield {gan.data_inputs[0]: batch[0]}\n", + "\n", + "gan.fit_gan(iterbatches(100), generator_steps=0.2, checkpoint_interval=5000)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "UW60zOZGr0ci" + }, + "source": [ + "Let's generate some data and see how the results look." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "fSQtVhSer0ck" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAALgAAAC0CAYAAAAn8ea8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9d3hc5ZX+e++d3mc0o9GMerMlWZYlW7blhm2MAYMBE5xQAoS2gWyyyeYXSEISnJAFQtjNbmCzkLIEEocACaYlxL3bSLJs2Zas3nubGY2m9+/3h/d+kYxtZHuKTPQ+zzyAhHSP7pw593ynvC9DCMEsZvFZBZtoA2Yxi1hi1sFn8ZnGrIPP4jONWQefxWcasw4+i880BJfyPzMMM6NLLoQQBpi1M4qwEEIMwMy3lb+n52I2gs/iYuhJtAFXilkHn8VnGrMOPovPNC4pB59F4iEUCiGXy0EIgcPhwGwn+uKYdfCrAAzDwGw2w2AwwGAwQKVSIRKJwG63Y3R0FHa7HePj4/B4PIk2dcZh1sGvAnAch+uvvx4333wzli9fDplMBgDweDz48MMPUV1djcrKSrS3tyMcDs9G9UlgLuVmxLNUpFAoEIlEEAqFEAwGp/WmXS3lt+nayTAMRCIRbrzxRtx3331Yu3YtVCoVGIYBIQThcBhOpxOBQAB+vx9erxd79+7FoUOH8M4770TD1BOEkPLp2JpoXKhMOOMiuFwuR3p6OpYtWwabzYaWlhZ0dnZO28njCY7jkJaWBq/XC5vNhlAoFNXfL5FIoNfrsWrVKuTm5kKhUIAQgs7OTtjtdvh8PuTk5EClUiE5ORmEEPj9fmi1Wuh0Ovztb39Df39/VG26VGRlZSEjIwOFhYUQCARgGAYM80lf5D+wgUAAJ06cwPDwMEZGRq74PZ8RDs4wDBQKBRiGgclkwurVq/G1r30NDQ0NePvtt9HX14dwOIxwOJxoUwEAAoEAQqEQUqkUCxcuxOjoKOrq6uByuaL6IVSpVMjOzsZ1112H9PR0MAwDl8uFyspKdHR0YGJiAnfccQcyMjIgEAggFotRWlqKkpIS3HLLLRgcHMT4+DjcbnfUbLpUlJeXY926dfjiF78IuVwOlj1/4Y4QgkAgAIfDgRdffBFVVVXweDxwOp1XdE9nhINnZmZiz5490Gq1EIlEAM7+wdXV1WhsbITP55sxzg0Aa9aswdKlS3HTTTdBJBLh6NGjYBgG1dXVCAaDUbkGx3GYP38+7r33XmRlZYHjOIyMjOCdd97BG2+8ge7ubjAMA4fDgfnz56O0tBTl5eWQyWQQCARQqVT43e9+hz179uDBBx+E1+uN+xOQ4zjceuutuO6662gAuxAYhoFYLIZer8dTTz0Ft9uN4eFh3HjjjRgdHYXf778sGxLm4GKxGFu2bIFSqYRWq4XZbIZYLAYABAIBfPTRR6isrMTIyAgikUiizKRgWRZGoxFPPvkksrKykJSUhKSkJNTX12N4eBiEEOj1ejgcDni9XkQiEZorXw74VK2kpAQcx2F4eBhNTU3Ytm0benp64HA4wDAMjhw5gubmZhw4cAAZGRm48cYbUVFRgaSkJKhUKixatAjPPvssnnvuOYyNjUX5rlwYAoEAycnJ0Ov1UCqVAID+/n4QQiAQCKDVaiEUCsFx3JSf4x2d4ziEw2GYzWa43e6ry8HVajWysrLw8MMPQ6fTQSgU0u95PB709/dj7969OHnyJCYmJmISefgDHCGEHmb5rzMMA41GA5lMBolEApZlwbIscnNz8dhjj4HjODidTrS3t6OhoQHd3d3wer1QqVTw+/3w+XxTrnM59ovFYmg0GqSnp4MQgq6uLtTU1ODYsWNTziPt7e1ob28HABq9ZTIZCgoKkJSUhMzMTDzwwAP461//ioaGBthsNgQCgSjcwYtDIBAgLS0NSqUSIpEIwWAQra2ttJSZmpoKsVgMsViMpKQkyOVymqPzAY3jOKjVahr4LguEkGm/AJBovL785S8Tu91OzkUkEiHV1dVk9erVRKlUEo7jLun3TtdOlmWJWCwmpaWlZO7cuSQ1NZVwHEc4jiNisZhotVry5JNPkh07dhCv10uCwSAJBAIkEAiQcDhMgsEgOX36NHn00UfJnDlziMlkIsnJySQrK4uoVKqo2KlUKsnXvvY1Mjw8TJqamshjjz1GkpOTP/V3MwxDNBoNefjhh0l9fT0Jh8MkFAoRp9NJXn31VVJQUEBYlp3uPT1+ue+9TqcjTz75JKmvryd2u50MDg6SDRs2kIyMDCIUColIJCISiYRkZGSQF198kbS1tRGfz0eCwSBxu93EYrGQ5uZmsmjRIqLVaqd9T899xTWCMwyDp556CmvXroVCoaBfD4VCGBsbw9atW1FVVYX29nYEAoGYRG4+t123bh1uuOEGCIVChEIhjI6OIhAIQCQSwWg0IiMjAzqdjkaPYDAIv9+P8fFxWCwWdHd3QyQSgWEYBINBeL1eWrKLBrxeL06dOoXXXnsNOTk5GBwcnNZhkRACl8uFXbt2YePGjUhJSYFOp4NAIEAgEIDNZotLLh4MBtHR0YHOzk6Ew2FaxgwEAlOelg6HA8eOHcOqVavAMAxYloVIJEIkEoFUKoVAILjgwXQ6iHuKsmLFChQVFYHjOIRCIfh8PjgcDhw6dAj79u1DY2MjHA5HzBoWQqEQmZmZWLVqFcrLyyEUCkEIwdDQECKRCEQiEZKSkjA2Noaenh7U1dUhEAggGAzSU34wGITP54PBYIBarYbL5YLT6bzsPPF8CIfD6O3txaFDh8AwDNxu97Tf6FAohMHBQVgsFrhcLuh0OrAse0VngksFXxXhP/herxc+nw+hUIjawDuzyWSiqeDkv8HhcMDtdl/RwT2uDs6yLLKzs2nN1uVyYWRkBI2NjXjiiScwMTGBUChE87BYvBkymQxZWVlYtmwZWJalzmqxWKBSqQAAfX19ePfdd3Hs2DHU1NTQqMeyLJRKJcrKylBcXIxrr70WTU1NNLJHE4QQ9PX1YWxsDAaDAX6/HxqNBk6nc1o/Hw6HYbPZMD4+jszMTLAsC4lEApVKBavVGnNH5zgOWq2WPgFdLhd8Pt8UZ5VIJEhPT8f/+3//D1qtFizLghCCYDCIkZERnDlzBt3d3VdU5oybg/OPH5fLhbq6Opw8eRL//u//Dr/fj2AwiNHRUerU/MEvFjZce+21WLhwIZRKJfx+Pzo6OtDU1IQ333wTgUAALpcLw8PDcDqd8Pl88Pv9tEQZiUTgdDrhdDoRCoVgMBiwfPlysCyLsbGxKf9vNMBHwePHj0Or1SIjI4NWIqaDn/3sZ6ivr8err74KoVCI5cuXY8uWLXj66acxNjYGt9sdswqV2+3Gzp07MTw8DKPRCLlcDrvdDkIIPUA/+OCDuPXWW6HX62k1JRQKoampCTt27MAf//hHeDyeq6sOfujQIbjdbhw/fhxtbW1TymnnHGhjgrS0NOh0Oto1czqdmJiYgEgkgtVqxejoKHp6LjznzzAM/H4/nE4nxsbGoNPpUFBQgFAohLa2NvT19cFqtUbNcXgnFwqFEIvFl5RmjI2Nob+/H/39/UhPT4dOp8O8efOwdOlSnDhxAl1dXTFzcP4J0tLSgtHRUej1eixevBhSqRQSiQQ6nQ7XXnst8vPzwXEcCCHwer0YGxvD0aNHUVtbi76+viu2L64RnBCCP//5z7BYLGhpaaFfZxiG5sKRSCSmTR2NRgORSASPx0PzQgAoLCxEOBw+7+OQ47gpByC/3w+73Y7Ozk7k5uaitLQUc+fORWVlJQ4ePIiJiYmoluJYloVUKoVMJrtos+RcEELgdDrR0tKC5ORkKBQKZGZmYt26dbBYLOjr64taY+p81/b5fOju7sbo6ChcLhe2bNmCuXPnwmw2Q6PRTDlABoNBOBwOdHR0YPv27WhpaYnKdGTcHDwSiSASiaCqqmpKBGJZFmKxGCkpKRCJRPB6vRgYGIj6XAdw9qb/+te/Rm1tLa677jqUlZXBYDDAbDZDJBKhq6uLtsEDgQA4jqPtb74T19PTQ0/5wWAQ+fn50Ol0YBgGnZ2dtFoRTTAMg9TUVBgMhktycOBs7tvc3IyKigrIZDLI5XJ6wI/mofhCiEQiYFkWCoUCS5cupWMFk5/afIo6PDyM/v5+Ol4QDR+Ie4pyvujMsiwMBgOuvfZaeDwevPnmm7BarTGJ5OPj4zhz5gyCwSDmz58PiUQChUIBlUoFuVyOpKQkmEwmeDweGrFVKhUaGhrQ3NwMp9MJmUxGO5cKhQJCoRButxv79u1DX19fVO3lOA5z585FRkYG5HI5/XBN99GtVquxcOFCiMViWq6Ld2c4EAjAYrEA+PvTkEckEplSXeE4DkqlklZVrpoU5UIghEAoFCInJwdr166F1WrFu+++e8mRarrwer3o7++H2+3G2NgYBIKzt4BlWQiFQuTm5mLevHnwer30ZwYHB9Hb2wuv10tb5ADo7Izb7UZ/fz9qa2tht9ujai8fvZOTkyEWi2E2mzE4ODitx7dYLIZWq0VBQQHtJvp8PkxMTMQlevMIhUKfGJqafN7iS7Aejwfj4+P0XBYNJNzBgbOO8u1vfxt6vR6VlZUYGhqKSYrCIxAIwGq14uWXX0ZycjLkcjm8Xi8MBgPy8vJwww03QKVSYWJiAr29vXj99ddRV1eHnp4eeL1eSKVSAGfnRdxuN6qqqvD222/DbrfHxG6fzweBQID09HQ8/vjjePnll9HU1DSlpnw+5Ofno7CwECqVipZEx8fH8cEHH6C9vT1ukZzjOEilUlqLB0CvzTv5wMAAKisr8eqrr2JoaChqjb6EO7hCoUBycjIyMjI+UZaLJSKRCDo7O9Hd3Q3grBOJRCLIZDK8//77kEgktGzY29tLGzlisRhLlizBsmXLkJaWhjNnzqCqqgrHjh2Lid3BYBA7d+5EXV0dkpOTkZWVBY1Gg8zMTIyMjNDBLj7XBf4+Z7NixQosW7YMHMchEolgfHwc7e3taGlpifqT5mLgHZx3bn72m2EYhMNhOBwOtLa2orW1FVarNaqz/wl38OzsbJSVlUGpVKK/v59O5sUafIUhGAzSRyTfzOnq6oJQKEQ4HEYoFKJRWSQSITs7G+Xl5SgpKYFGo8HQ0BBtyMTCbkIInevu6+uDx+OhlZ7zXY+vSGVkZKC4uBi5ubm0ccaXNx0OR1wGribbNLnEOTmK+3w+tLS0oL6+Hm1tbfQDGy0k3MFvuukm3H///RAIBKiurkZ1dXXcrh0KhWgbnncW3hHOzVH5pYyNGzdi06ZNyM7OBgAMDw/DZrPF/Knj9XppnZg/N5ybokQiEXpIu+GGG1BRUYH8/HwaKflIH++5cL5SNhkMw9DD55///Gfs2rULg4ODUb/2ZTs4n0+xLAuBQIBgMHhJ8yMsy6KoqAiFhYXIyMhAJBLBzp07sWfPnss16ZLAjwrw/34uBAIBrcsTQqBWq5Gfn4/HHnsMer0eQqEQPp8PVVVVaGtri+uMx4Vq1wzDYN68eVi0aBEefvhhpKenQyKRIBKJwGKx4OjRo/j9738f1+gNnF1o+epXvwqtVkujN8MwOHToEPbs2YN33303ZltHVxTB+TlpvrzD51afBr5Ve8cdd2Du3LmIRCI4c+YMhoaGpj1rEQ1cyCkZhoFWq6W5q9vtRllZGVauXAmj0UjTl0AggLGxMTgcjrjZfCFwHIeUlBQsXboUa9asgdlshkQioUNL+/btw9GjR+N6uATOBgqdTkdLlTwcDgfq6+tRWVkZ0/t3yQ4+uXzDH2rC4TDtRH6ag7MsC7Vajby8PDzyyCOQyWQYGxvD3r17YbFYEr69wz+VkpOT6bjm8PAwrr32WmzatAkSiQTA2UMpPzqbyJ1H4KwTSaVSFBcXY/369Vi/fj210+VyoaurC2+++SYaGhpikgZcDGq1GqmpqZg3b96U1Kq3txcnTpxAVVVVTK9/SQ7Od/b4R1woFKJfEwqFdDb6Yo/roqIi3HbbbXjggQcgEAjw0ksvYdu2bWhra4tZ2/hSwFcgxGIx5s6di9LSUqSkpGDRokXIy8ujf2N9fT22bNkSlc3vKwHLstSx77//fphMJshkMjrjfvz4cfz85z/HqVOnEkIM9L3vfQ/XX3893dryeDwYHBzEjTfeCJvNFvPrX5KDRyIRBAKBKVE2HA5Tx7xY9ObHNR955BEsWbIEGo0Gb775Jj7++GP09vbGtfFwMfA5bl9fH10QKC0thUajAcMwGB8fx+7du3Hq1CmcOXMm7vnsueAPv/wKGH9IDgQCOHz4MI4ePYq2tjb4fL6EfBDNZjNMJhP979OnT2Pbtm0YHx+PS0C7JAcnhHyikcFHNAAXPKFzHAeFQoH09HSsW7cOOp0OfX192L59O11wmCng/0aLxQKn04mRkREAZ2eXbTYbBgYGsHXrVroHORPATxy6XC66ZxoIBHDy5EmcOHEiZiXM6UChUNClY0II2tvb8cEHH8S0kTcZV1wm5DtR54vA/KRgcnIyli5diieeeAKhUAivvvoqfvGLXyT0xn8a+Gk4n8+H7du3Y8eOHfTrMwnhcBjHjx9HZ2cnxGIxSkpKYDab4fF40N3djYGBgYTazK/6CQQChEIhjI+Po7e3N25nrZjUwRmGAcdxSEpKwv3334+ysjIoFAq0tbXhrbfeQltbG8bHx2ecs1wMM9nWYDAIu92OrVu3oqioCKmpqRCJRGhtbY36ptGl4sUXX0R1dTU2b96M1tZW1NTUxJXjJi6NnkAggNHRUZw4cQKVlZWYmJiIx2X/YcDn3a2trXC5XEhOToZEIqGL1IlEXV0dvF4vNBoNWltb0dzcHNdKWUzJN/mKBF9PnswXEguQzxj55gzAFZNvsiwLjuMgFArpqG6MRhriT77JH3Zm8Y8Lfjzg0yYfY4WYpygzOXedRfyQKD+Y1eiZxWcalxrBLZi50nKZk/591s7o4GqxNfNC35ixCg+TwS+pArjoyOfVcni7WuzEZ0AINuHz4J8GlmUxZ84cunbV1NQEl8uVsEPLPxhmasSeNq4KB09PT4dWq0U4HEZHR0dcOfZmcXVjxju4SCTCvffeC6fTicHBQSoXMlt+nMV0MKOrKDKZDMnJyQDObrArlUqMjY3NOvcspo0ZG8EZhoFUKoVer4darab8fA6HI26TaJ9mm8FggFarhdPpxOjoaFy3kWYxPcxYBxcIBFCr1UhPT0dBQQEGBwcxMDBAKZbjjcmbTCKRCGlpabj55puxcuVK1NXV4YMPPkBdXV3CN5KuRky+t9E+W824MiFPl7ZgwQLceOONuPvuu6HRaPBf//VfePvtty86ahmr8ptarcY///M/Y+PGjXRxQyqVUv5rXlfouuuum5bsXazs5BfAeX4Xk8kEhUJB/3tgYAAtLS2XsvwQMyFYiUSCoqIiLFiwAI888ggGBwdRU1ODn//855eVgl4VZUL+DRKLxSgrK0NJSQlSU1PR3NyM3t5eDA8PxzVCchwHuVyOL33pS1i3bh3mzZs3pQbPV3OkUilMJhO0Wi0CgUBMhsp4vhOj0UgH2FiWhUwmg9FoRFZWFgQCAX2JRCL6AeQ4DoFAAKdPn4bdbqfkpvGuRPFj1EKhEBqNBvn5+Vi4cCHmzZsHs9kMpVKJ8fFxvP3221FbgplRDs7/8WKxGEuXLsWcOXMgFApx6tQpDAwMxHwacTIEAgEUCgXMZjOeeuopqNVqKsQ6Pj5O1QokEgnkcjkAwGQyUSWDaIPfiiorK4NKpYJYLIZIJILBYMDixYuxbt06SCSSTzzu+Q2l3t5eCIVCtLa2UrGBRGiPCoVCyOVy6HQ6ZGdnY+7cuVCpVFCr1UhJScHcuXOxe/fuz6aD80xSkUiEcke7XC689tpraG1tjYsNfJRZt24drrvuOtx2221UDcJiseDtt9/Ge++9h4GBAUgkEnzpS19CeXk5CgsLsXbtWvh8Plit1qjbZTKZsGDBArzyyitUX1IoFNKIPVmKcTLC4TClpOZlDxMhCguAktz7fD7YbDYMDw+jsrISq1atogWEM2fORHU/d0Y5+GRxIp1OB7lcjnA4jIGBAUrSE2vwj8+srCzIZDIMDg6ira0NnZ2d6OzspPzh/P6jTqdDUlIStFotNmzYgMbGRpw6dSrqdvGb81qtdorECy+KRQihu48AqCRIOByGx+PBiRMnqE5loptkk5ll+XlxPrDwFG/RwoxycIZhIJFIYDKZoNfrIZVKEQqFYLfb47Z1z9OMMQyDsbEx1NbWYnBwEHV1dWhtbUV/fz8ljgRA1Xz59MFgMETdJpFIhIKCAixZsmQKDbLFYkFPTw/l19br9dR5RSIRneEJhUI4efIkurq6ZkwPgd/V5QWy+P3dc/nDrxQJd3D+D+OrE8XFxfjWt76FgoICCIVCjIyMxExS8Hzw+Xzo6+vDb37zG2qbQqGA1+uF3++ndvCMqXl5eUhJSaEEl/zfEq3DMMuyyMjIQElJCUpKSujBtr29HT//+c+xfft2OByO86pm8B8+s9mMw4cPz6g6vVAoxMMPP4wHH3yQPm18Ph8GBwejWgZOqIPzUnO8KCwvklRSUgKWZdHd3Y2qqqqE1L15B2UYBh6P5xMHMqVSiXnz5kGlUlFuxv3790dFOIm/rk6nQ3p6Oj73uc9RbVEeNpsNR48ePa9SGi+vwuuQ2u12BIPBGVGj55t3P/rRj1BRUUHTqkgkAqvVioMHD04RH7hSJNTB+UpFUlISpFIpVVdISkqiEn9Hjx5NyGn/XDUCADQyMwwDlUqF4uJiSCQSSjFRWVkZNWo0oVAIrVaL3NxclJSUQK/XT+H/5jgOIpEIEomEOr5YLKYqZhqNBklJSVQzKNbqddOBUqlEbm4uFi1ahGuvvZaSmBJC4Ha7qTZmNNOohDk4wzCQyWTQaDQ0F9uwYQPmzJkDkUiEzs5OfPzxx3jjjTcSRunGpxsymYxKUPOHIpPJhA0bNtDvjY+P45133sHQ0FBUrqtUKpGeno758+cjMzOTjgvzzm00GnHDDTfg4MGDcLlcIIQgJSUFWVlZyMjIQGlpKU3xPv74Y0qSmkjk5ubi3nvvxb/+679SnnD+wNzT04OGhgYcP378s5Gi8AcJgUAAvV6PW2+9FTk5OVAqlbDb7XjmmWdw4sSJhFG6SaVSylEoFAqhVCpBCIHdbkdBQQGWLl2KkpIScByHtrY2HDlyBMPDw1f8eOU/VKmpqUhPT4fRaERvby8kEgnEYjHEYjFYlkV+fj6+//3v4/HHH6fOMrl0KBQKEQgE0N/fD6/Xi927dyc0B2dZFi+++CKKi4vBsix8Ph89WwUCAezcuRP79++PejqaMAfnH5kikQgKhQImkwlyuZxyWbe0tCSE2JJ3lIULF1J5DeDvm0RyuRz5+fnIzs6GRCKBy+Wi5cPJh9DLBX9f7HY7uru7IRKJ0NTUhMbGRpjNZhQXF8NsNkOhUMBoNNKD8Lm/g8/DtVotUlNTKTlqIqI4f+Dl5/qBsyScvNCA0+nEwMAArFYrOI6Lakqa0Bycb3NrNBraVnY6nejp6UnYdJ5AIIBKpcL1118Pu92OU6dOTRGMTU9PR0lJCXJycsCyLEZHR9HS0hLVs0IkEkF/fz9sNhsl1xeJRNDr9bjnnntQXl6OtLQ0JCcnTymrTa6P8618uVyOtLQ0iESihDm4UqlESUkJLb9GIhG4XC7Y7XY4HA5YLBa4XC5wHAeNRkPJQ6Oh1ZPQFEWhUCAnJwdFRUUQCoUIhULo6urC66+/njD2q3nz5uGpp55CUlIS6uvr0dnZid7eXjoeu3z5cqxYsQKpqangOI42gKKtLcQ7wWTu8f7+fjQ0NNADpkqlwrJlyyASiRAIBOB2u2kD6vbbb8f69ethNBoxf/586PX6hHGZFxQU4IUXXoBerwdw9oNos9loIAsEArjrrrug0+kgFovR09ODHTt2YOvWrVdM8ZcwB2dZFikpKUhLS0Nqaiolmu/s7ERdXV1Ccu8lS5bgmmuuQVlZGRWo4ruogUCAirCKRCKIRCKawsSSd/vcag5/X3w+H7xeL6qrq+ljnRfU4jgOPp8P8+bNg9FohFQqxUMPPYQdO3bgo48+imu5UCKRQKlUQqfT0YNuKBRCd3c3LBYLAoEAsrKykJmZCa1WC6FQCLVaTZ9YzzzzzBX5QlwcfPIjFPj71GBaWhrMZjMMBgMEAgEsFgt6e3vR29sb99o3y7JYvHgxVqxYgfT0dNTX12NiYgIjIyPweDwIBAL0gMcf5PgDUiLKmDwfIS+DeC5GRkbQ09OD/Px86PV63HTTTbDZbNi/f3/cxh6As+XOyV1VPo2y2WwIBoOQyWTIysqi3WA+rZJIJFCr1Xj++ednroPzFQH+4DS5ciKXy7F8+XIUFRXBZDJBKpWiu7sbHR0dCXFupVKJTZs2Yc2aNWAYBrt27cLHH3+MoaEhKtGiVquRkZEBg8EAtVoNv98PuVxONXtmEiKRCPbu3QuWZXHnnXciLS0NhYWFWLhwYVx7C36/Hy6XCzabjTbFGIaB0WiEXq+HwWCA2Wym8iaT+dlbWlqu2M6Y7mROPvTwn15+xjolJQVFRUVU1MlqtWL37t04ePBgLE36BCQSCTIyMvDd734XWVlZ8Hg8aGtrw1/+8hfU1tbSUhYfWYqLi6FQKOgQE38YjpX0+JXg5MmTqK2tpSmU0WhEWVnZlI5oLMGyLPR6PZKTk6FUKqlYgs/ng1KppN/jD8D8U8nr9aK9vR179uy5YgePCzfh5DefH9znD21yuRyEEPT396O7uxvDw8OxNonawSsGFxYWYsWKFVAoFBgbG8ORI0fQ3t4Om81G0yqZTAatVks14/myodPpTNj46adhaGgIPT09sNlsSE5Opls+sfwwntvfWLp0KcrLy6coHTMMg6SkJKjVairxbbfbKTm+w+FATU0N6uvrZ76DA1PnOvhWt1gshlqthlgspnqTNpstLgcg/kNWUlKCO+64A6tWrYLRaAQhBPX19Xj++eenODdwdh577ty5UKvVtOIjFArhcrnOO6syEzA6Ooru7m60tbVBoVCA4zio1eqYRHA+HeX/qVQqUVFRge9973u0IcbX7GUyGXzlavgAACAASURBVAoLC+l/E0LQ2dmJY8eO4Q9/+AO6urrgcDiiUvGJaxWFlxkMhUL0gCEUCuF2u9Hc3By3EpZQKIROp8N3vvMdqv0eDofx17/+FYcOHUJ/fz89BzAMg9TUVNx///343Oc+B4FAgEgkQtXCWltbMTIyMmPJiCKRCLxeL9xuNyYmJmIm1ahQKKDT6SCVSlFQUICSkhI8+OCDNAWd/NSY/AHzeDw4duwYnn/+edTW1tLqVbRsTEiZkHdyvhrBzyLEayWNd8SUlBTaYLLZbDhz5gyampro7ItKpUJycjI2b96M5cuXw2w200MQn38PDQ1hYmJiRjo3r5/Jl9/4fddYIBgMwu12IxQK0bSNv+65a3SBQAB/+9vfYLPZMD4+jo6ODjQ0NGBsbCzqdiWsDs6vpQkEAoTDYXR2dkZ1TPJi4FWKAdB5E6/Xi87OTvT390Mul4NhGDq09J3vfAcKhYLqq7vdblitViryNBMlWfhGWlJSEtLS0ugOp0gkisn1eMEuftRBLpfD6/VOUYXmiw4OhwP/+Z//iZaWlpg49WQkxMGTk5OxaNEi6jR+vx9WqzVu2yZ8irFz505s2LABJSUlyM/Px+OPP47x8XGoVCranDAajVCpVLQCYLVasXfvXlRWVuLdd9+FzWaLe/7Nt+EnM+6Gw2H6dd7BHnvsMaxatQp6vR52ux0HDhzAz372s5g20QghGBwchEQiwdjYGM6cOYPa2lq8/PLLsNvt9F55vd64nLcS5uALFiwAx3Hwer2YmJiAx+OJa4ctGAzi/fffR2ZmJnJzcyGVSlFYWEgbOvx2Pz/vHQwGMT4+ju3bt2P79u1oaWmZ8obFEzwpUlpaGsLhMBQKBfR6PYqKiuguq0gkwrx585CSkgIA2Lt3L44fPx4XdbtwOIyRkRE888wzNJUbHh6mUTyeSIiD85wYfHloeHiYChTFC+FwGNXV1Th58iTy8/Oh0+loKY1fbAiFQvB6vRgdHYXH48Hw8DD27t2L6urquHO0TMbklTS+I1xQUIB169YhJSWFPnE8Hg+8Xi96e3tx8OBBNDQ0xGUEghCCiYkJ/OlPf4r5taZlzHRfAEg0XnfffTdpaGggoVCIfPDBB+Sxxx6Lyu+9XDvFYjEpKCgge/fuJV6vl0QiEeJ0OklzczPZtm0bKSgoIBqNhnAcl1A7J7+EQiHR6XRk8+bN5Kc//Sk5evQo8Xq9JBgMkkAgQNxuN6msrCQvvfQSKS8vJxKJ5HKuczza732sXhfy2bhHcP7Rz4/GHj9+HHv27Im3GVMQCATQ29uLb3/721CpVHR4iW8z9/b2Jmzm5ELgqxVHjhxBU1MT9uzZg40bN8Lj8cBut6Onpwf9/f2wWCwYGBiYMdv08UbcHZxhGDidTrS3t6OpqQn19fXo7++PtxlTQAih3CFXC/hzwcjICGw2G/r7++kCxsTEBPr7+2G32/9hHZtH3Mk3RSIR8vLyMG/ePNTX12N4eBh2u/1Kfy0AXDXaN1eLnYgh+Wa0cSHyzbg7OM8ey9eeeaq2aOBqcZyrxU58Bhw87ikKP7QfCARmBE/HLD7bSIiDT/7nLGYRS8wKwcYfV4udwNVja+aFvnFJOfgsZnG14ZIi+NVy0Ji1M2q46pWOZ7SM4CwSjpmakkwbsw7+D46ZuEsaTcw6+D8oeJbXWQefBQDQOetoKxAkAiqVChkZGcjNzU20KTFHwhUezofzOVCiqj38Eu3k9bpgMIhwOHxVNqoEAgHy8vJQXFwMnU6XaHMATOXPifY9nTEOzlMCL126FF/96lchkUjAsiyCwSB27tyJ999/HzU1NXFzKo7jsHTpUlx//fXo6uqC3W6HxWJBVVXVFK3MqwUCgQDr16/HQw89hJKSEhw5cgSHDx9O+N+RmZmJP/3pT4hEIjh16hSee+45DA8PR40TPmEOLhAIcOONN8JgMFDKrpSUFOTn51MyToZhEA6HMTExgerq6rimBrfffjuWLFmC0tJSNDY2wmq10o30RDvFpYBfjuBZu8rLy2EymXDo0CGMjY0l7G/hOA4rV67EmjVrUFRUBEIIZDIZHnroIYyOjtIx32PHjl0R01lMHJxhGIhEoilOyi+e8pIbCoUCX/3qV1FSUgKz2fyJ3zGZBrigoIDySscDAoEAX//611FQUAAAlEE2Hute0QTLspBKpTAajXj88ceRn58PhUJB1++ioUZxOWAYBlqtFl/84hfxwAMPUI3P+fPno6ioCF6vF01NTTh69Cja29uvbOw32hs9DMOQlJQUsmXLFlJdXU06OjrI3/72N/L000+TzMxM8tvf/pbU1tYSp9NJQqEQiUQihEcoFCJ+v5+43W7i8XiI0+kkFouFPP/882TJkiVx2ZRRq9WkoqKCdHR0EJvNRurq6oharSYsy0Z9+yRav+9874FIJCILFiwgjz32GKmvrycej4eEQiHi8/nI9u3bycaNG6ezoRSTjR61Wk36+vqI3++n7304HCY+n49YLBbidDqJ0+kkVquV1NbWkjvvvPNTbY3bRg/HcSgtLUVBQQEyMzOhVCohkUiQkpKCnJwcVFRU0LQEAF1K3bZtGwghSE5ORkVFBYxGIyKRCCYmJrBjxw709MSn55CdnY0nnngCSqUSp06dwvvvvx+3DfArgVQqxQ9+8AMIhUJ4vV40NzejsLAQhYWFMBqNYFkWVqsVPT09+NWvfoW6urqEbCjl5eVhxYoV0Ol0EAqFlAf9tddeQ0tLC3w+H4RCIVQqFYxGIzZu3IicnBwUFhbizJkzl3y9qDo4T4lWWlqK9PR0aDQaiMViyGQymM1mlJWV0f+XEIKxsTEMDw+jtbUVf/zjH6FUKlFUVISioiLo9XoEg0GqshAt7fKLISUlBQsWLMCmTZtgs9nQ1NSEjz76KGEiWJ8GXu2Nl4B5+OGHwXEcrFYr/vznPyM3NxcGgwF+vx8+nw/Dw8M4ffo0du7cGTcOmnORlZWFG264gTKaORwOdHd346233qICVLxIbH5+Pm677TbMmTMHCxcuRGNj4yUHmqg6OE9nsGnTJuTm5l5QPx04SxTz3HPPYf/+/WhqagLDMLjttttQXFyMvLw8yGQy9PX1ob29PW4b98899xzWr18PlmXR3NyMM2fOoKOjI+bXvVTw9XihUIjNmzfjxhtvxC233AKBQACbzYbR0VG89dZbYBgG6enp2LBhA8rKynDq1Cl8+OGHCV1jy8/Px+233w6GYXDgwAHs2rUL//M//zPl/SX/Jytos9mg1+txxx13YN68eXjnnXfg8/kuyRei5uAMw1B1r8HBQSpHAQBOpxMOhwODg4NUgSASiaCmpgZutxvZ2dn4/Oc/j3Xr1iE3NxcKhQKEEFRXV+OFF16Im9qDwWCAwWAAIQS/+MUvUF1dHZfrTgcikQhGoxHf+MY3kJmZSck0U1JSkJSUhEgkgtbWVvzlL3/BgQMH0NvbCwCwWq2w2WzYs2cPRkZG0N7enpB0i2EY/OxnP8PKlSsRiUTwy1/+Env27EFtbe157eHp8UKhECQSCaRSKYRC4SX7QlQjOM/vXFVVhdHRUSQlJYEQApfLBYfDgZGREWRnZ1M+QJZlkZ6eDpPJhLVr16KkpARarRYMw6CmpgbHjh1DfX19zN8QlmWhVquhUCio3s2ZM2cwMDAQ0+tOxy5ejyczMxPz5s3DmjVrkJWVBZlMhlAohImJCdhsNjQ3N6OhoQEHDhzA8ePHqYoDn5709vbC4/EkhGZOKpXCbDZj9erVSE9Ph81mw969e3Hy5En09fWd92f4hprT6aSB8nyKcp+GqDk4v4o2OjqKF1544RPf50uHP/3pTzFnzhykpqaioqICmZmZmDt3LpYtW0ZVuEKhEH70ox/h1KlTcYk2YrEY8+fPn1JCc7vdCc29WZaFRCKBXC5HUlIS7r77blx//fXIzs6GSCSCz+eDzWZDTU0NqqqqcODAAbS1tU2ht+BLtC6XC+Pj4wn7W0wmE77whS8gNTUVfr8fDQ0N2LVr16dqG0UiEfT19YFhGJqaXGqZNm6NHp5V9Nlnn0VpaSmWL18Oq9WKefPmwWQyUZb/kZERHD16FA0NDRgdHY2LbUajEf/7v/8Lk8mE6upqfPnLX05Y9OZlDH/1q18hNzeXKiNIpVIAQF1dHVpbW9Ha2ooDBw5gbGyMcpSfS43Gl8oSyeeyefNmrF27Fps3b4bVasX777+PV1555VMPuXwlJTc3FxKJBCMjI+eVFfw0+Zi484OPjY2hubkZDMNgzpw5UKvVU+QtBgcH8dFHH8WN989gMCAvLw8ZGRngOA4ej+eiIlix5gE3mUy4+eabsXDhQphMJkgkEkoj5/P5qMrE6OgoFRTgI71arZ7Co+hwOKKiNXm5YBgGCxcuxOLFi6HT6bB161YcOHAA/f39n2pTeno61q5dC6VSSSP3+X7m06J6Qlr1POXwihUroNVqaXTy+/3o6enBu+++GzclsOzsbCxcuJDKApL/0+OZ7Mh8HszTPfPyJTxLbTQdKD09HY899hjMZjNN2XhKYolEgpycHEqw2dPTg66uLgBnJVZycnJoXbm1tRU9PT2YmJiA2+2O+8GS1zRauHAhSkpKEA6H8frrr6O1tXVa96uwsBD33HMPZDIZXC7XBQPOpwXBhDg4T/ebk5MDmUwGv9+PUCgEl8sFp9MJp9MZtzdk06ZNePjhh0EIgc/nQzAYhFgsprksy7JYuXIl7r77bqxevRpJSUlgWZZKeN97771RZeZqbm7GN7/5Tbz00kvIyMigpT/grNPwpPwGgwHLli2jXwfOfhD5SOf3+zEyMoLKykr827/9W1ylGTmOQ2pqKl544QXMnz8ffr8f/f39cDqd0y5Rms1mVFRUQCAQoK2tDVVVVZdFNZKwYatQKITa2lpaKUhKSqLiVCqVCg6HI+ZOzkdGhmHgcrnwu9/9DgcPHqTE7QsXLsTq1atRUVGBwsJCmM1miEQicBxHy1Y//vGPcerUKRw/fhzV1dVXnFa5XC40NjbimWeeoUNovAhtUlISbr31VqoVxEf480EsFkMgEGDFihX44Q9/iB/84AcYGhqKuZMLBAIUFxdjyZIlWLx4MTQaDSYmJi6J2Xb9+vUoLS2lWqTHjh3Dhx9+eFlPyoQ5eDgcRkNDA7RaLVQqFebMmUMdXKlUwuVyxaU8aLPZ0NXVhXA4jPfeew/Hjx+H3++HTCZDfn4+Nm/ejAULFiAcDtNyG/+hFIvFuOuuu1BQUAClUgmbzQaLxQKn03nZtftAIICRkRG8/fbb9Gu8cJPJZIJer0c4HIZer6eR+1wyfOBs3Tw5ORm5ubnIzMzEiy++CIvFElMHZ1kWWq0WZWVlWLt2LbKyskAIgcPhwOnTp6d1TxiGwTXXXEMnDL1eLxobG1FTU3N5qWAi6JMBEJZlSWZmJrn99tvJT37yE2KxWMjExATZvXs3KS4uJiKRKOZDTFKplGRkZJC5c+cSkUhE/m9znAAgAoGAPPDAA6S9vZ34fD5SX19PXnvtNSKVSolEIiGpqank1ltvJX19fSQcDpNwOEzGx8fJs88+S0pLS2M6bMUwDGEYhrAsS4RCIVEoFMRgMBCDwUAUCgWRy+WkpKSEHDhwgHi9XuLz+ci6deuIwWC41GtNe9iKZVkil8vJww8/TPbu3UuCwSAhhBCHw0E+/PBDkpKSMi36aYFAQJqbm0k4HCbBYJAcPnyY3HbbbdN+72M+bHUp8Hg8lA3V4/FAJpMBOCvOGo/Z70AggNHRUXAcRyMbLzP+5S9/GevWrUNycjIYhoHD4YDVaoVOp4PH40F6ejruvPNOSjbPa+LceeedKCwsxPe+9z3aXIk2JrOD8ecZPjryKdJkzSO+9R3LjrBWq0VeXh4effRR5OTk0GWV119/HXv37p2W1Etubi7uu+8+6PV6Wvt+6qmn0NjYeNl2JcTBJ+sjOp1OWCwWDA4Owmg0IhgMUrnnWIOvhIjFYhQXF0Mmk0EqlUIul2P9+vUoKiqCTCYDIQRyuRxGoxFr1qyBw+FAdnY2ioqKaFWFH1XIzs6GTCbDDTfcgJqaGvT29sacHpr/OyaDrzzwH4ZAIBDT9CQtLQ1r1qxBYWEhvWculwu1tbU4derUBQ+XDMNALpejoKAAixYtwrp162jhwWKxoLa29ooG7eLu4CzL0hdfIWhtbUVlZSXKy8vhdrtpbhkPW9RqNTIzM/GNb3wD+fn5SElJgdls/sSgWFFREfLz87FhwwZ6ABaLxdRp+OEn4Gzj6D/+4z+wbds27N69G7/97W8TUovmozsA2giKFZYuXYrvfve7kMvlAM5+oAYGBtDa2krnYiaDD2BisRhZWVl47rnnUFFRQeeQBgcH0djYeMXd5Lg4OO/MEokEaWlp8Hg8sFqt8Hg8VPSzp6cHZrMZLpcLarU65k7OT+Jt2rQJy5Ytg0ajoRUSgeCTt4VfOpZIJFAqlRgcHMSePXtw9OhRDAwMYGxsDEqlEsBZx+Kn4RwOR8JEYnnZwMkSfrECX1niHZc/iJvNZphMJoyMjNCyq0wmw9NPP43S0lJkZGRAJBJBr9fTnz9y5Aj+8pe/4A9/+MMVa6fG1MH5+fDFixfDZDJBqVSira2N1r35COP1euHxeCCRSCAQCKDT6WIiNz0Z/JyD3++HSCRCKBSiMnznAy9e63A4qGzIyZMn0dzcTB2ZHwqKRCIIBAL0w5sI5+Y31XlHu1AnMFoYGhpCdXU1VqxYQe+jRqPBTTfdhMLCQiqWy8u4X3PNNcjKyoJerwfw9/eDnzA8ceJEVFbqYurgLMtCoVDglltuwaJFi6DRaPCjH/0I/f398Pv99A/mb7xOp6OlplhH8HA4jMrKSixatAhpaWlQKBTQaDSQy+VUCJYHP0fDt/FffPFF1NTUfOIAGa/u66eBT71EIhF18FiXXNva2rBt2zYUFxdDqVRCIBAgOTkZ999/P7Xp3HOV3++H0+mk4wW1tbX4/ve/j7GxsaiNacTMwRmGgUQiQUlJCVQqFZxOJ7q6utDU1ITh4eGzJRyBACUlJVi8eDEeeeQRmM1mnD59Gvv374/Lxkk4HMbLL7+M3/zmN9TmTzvc8iPBM0mQ6lxkZ2fj6NGjdFGbH0GIZQRvampCZ2cnWlpa8JWvfAUbNmy4qKpyJBLB008/jddffx0ul4sOhfl8vqjaGfMIrlKp6KBQKBSCUqmkA/rl5eVYuXIlysrKYDKZ6Ml5cHAwbm3lYDA4Y1fSLgdisZiqNAsEAlitVtTU1MQ8YEQiEfh8PtTV1eGVV17B9u3bP/Up/PHHH8NiscT0/sfMwScPKvEVBoVCgezsbJqKrF+/HhUVFcjPzwfDMGhra0N7e3tCZ5evdiQnJyM7OxsCgQBOpxPd3d3Yt29fTOrx54IQgqGhoYTRUZwPMY3goVAIo6OjyM7Oxvz588GyLAoLC+mBjm/Ph8NhdHd3Y8uWLaisrIylSZ95PPTQQ3jggQfAMAw+/PBD7Nq1C1u3bk20WQlDXKooIpGIVkjMZjMikQgdQQ0GgxgYGMBPfvITNDc3/8PrOl4p3nvvPTQ2NiInJwcHDx6MG93GTEVMHTwcDmN8fBy1tbVwOp2Qy+VQKpXQaDTQ6XRUYbi+vh6VlZUYHx+f8fwjMx0dHR0YHR1FW1sbOjo64HQ6E21SQhFznUx+JJXnRlm2bBkqKiqwevVqsCyLbdu2YdeuXTh48OAVOze5SqRBrhY78RnQyYybECxfj5XJZCgoKEBZWRlOnjyJgYEBWK3WqJzyrxbHuVrsxGfAweM2i8IPBDkcDvT399NNDZfLNZt3zyJmSIgQrN1uR0dHBxwOx4xumMzi6kdChGDdbjfcbveV/ppzkTnp368W0dKZbCdw9diaeaFvzArBzuIzjVkh2ATgarETcRKCZVk2ahW0czFjNHpmMSMRk5SEZVmYzWZIJBKIxWIIhcLY1ewTtXQci9esnVF/RV3hgWEYolarya5du0hfXx9xOp2ksbGRrFix4opUNGbk0vEsogt+lt5kMmHjxo3o7u5GX18fGhoa4sIzcyHwyxdGoxHXXnstvvCFL6CsrIzyvshkMrpN9ZmVEUw0Ji9CX20Hb35zRyQSQavVIi0tDQsWLEBKSgqys7NhNptx8uRJWCwW2O32uNum0WhQUFCA3NxcrFu3DmvWrIFMJkM4HIbH40FPTw/8fn9MFs1nHRx/J+/nxUj5dbqrAfwoMj8Hzu9ACgQCLFu2DAaDAWazGU8//TT279+PqqqquNonFApRXFyMZ555BgsWLKB7lzyt88DAAN544w0MDAzE5J7POjhAV6b4CMITcM7kaC4SiaBSqbBgwQJYrVY6ZGWz2XD69Gns2LEDmzZtwurVq/H5z38ejz76KAQCQdwdfPny5Vi5ciWMRiNsNhtkMhkUCgX8fj/6+vpQV1eH9957DxMTEzFZcpl18Ek4l1ubx+RVtpkQ2TUaDfR6PZV7mZiYoJtJ/NjDxMQE3fZnGAZGoxEajSaudrIsi9zcXEoE1NXVRTlkWlpa0NfXh66uLoyPj192R9tgMFx0FzYhDi4QCOihggcfRfnt9USDd3D+gMQfhHi+bX4NLxERPiUlBXl5eVi6dCnq6+vh8/nOS6/gdrvh8XjoJvv56DBiBT7ty8/PR15eHgCgtbUVY2NjsNvt2LZtG8bGxq64o20ymTA8PHzB78fdwZVKJa677jrcc889WLx4Md3oGR0dxV//+ldUVVVhx44d8TbrokhKSsIPfvAD3HXXXRAKhXC5XDh8+DC++c1vwmKxxH2eJiMjA1KpFB999BEaGxsvuI5WXl6O8vJyAJ9OFB9t8AJZJSUlmD9/PjiOg9FohN/vh81mg9VqvWLOE4Zh0NDQcNGnalwcXCgUYu7cuXjwwQehVCqpLg8vUEoIgVKpxK233orFixfj5ptvxo9//GOMjY3Fw7yLgqeX++ijjzAxMYH77rsPHMchOzsbDz30EH75y1/GbYeU35DiD2j9/f3wer2feOLxekiFhYWYM2cOgNhTt50Lnoi/qakJer0e+fn5kEgkEAqFlC7kSsFv4l8McXFwk8mExYsX48tf/jLkcjlCoRC8Xi8GBgZACIFQKERaWhrKyspQWlqKUCiE//7v/4bFYpkRhzyv14vDhw+jr68PRUVF0Gq1EAqFuP7667F169a4OjjHcfD7/fD7/RgfHz+v0woEAhiNRmRlZcFsNgNAzMk3zwUhBB6PBwMDAxgaGqLapyKRiObh8eCfjLmDcxyHJ598EuvXr6fy3b29vaisrMS3vvUt+Hw+ZGVlYffu3dBoNBAIBAiFQpQqbaZQOng8HjQ2NmLz5s3QarVYvHgxvvvd70IgEIDjuJinKTw7Ac9pfj7CTeDs0zI5ORmPPvooUlNTKWVbR0fHRXPVaIO/rlKphFarhVwuR1paGiwWC/17rnoHF4lEmDNnDvLz82E0GhEKhfDLX/4Sx48fx4kTJ+gOZl9fHx5//HF8/etfR35+Pnw+H31DZ4qD8yCEUGoGPirF2sH5yM0vbxuNRiQlJWHBggX4+OOPEQgEIBaLodPpkJSURInyAdDvXYyWLlY2CwQCFBUVobCwkFLyMQyDwcFBKvIaa8R8q57XqpdKpQiHw+jp6UFTUxPOnDkDALSSwrNFBYNBuggxE9KT8yErKwu5ubnQaDQQCoUxdxyWZSEUCiEUCunhLS8vD6mpqVQQlpc40Wq10Gg0SElJmcIspVQqqdhXPMBxHHQ6HQwGAzQaDfUFfhE9XupvMedFGRoaouU0QghVCOMfT1KpFBkZGXjiiSeQnZ2NcDiM4eHhSxIsiicYhsGKFSuwcuVKAH9nnY0lRCIRxGIxddiCggKsXLkSS5YswQMPPAAAlAovFArB7/djeHgYKpWKcq1nZGTAYDDEjelWJBKhqKgIKpVqSjnY6/XCarXG/Po8Yk4bMTo6in379sHr9SIrK4tGoLlz58LtdmPt2rVYuXIlcnJyIBAIUF9fj6eeegpjY2MzLoKzLAulUgmDwQCFQgGn00lVkWN1PaFQiKSkJCrJzXEcNBoN5R0cGRmhnI+Dg4OwWCywWCxoa2vDli1bUFFRgZSUFEgkEixatAj/9E//hNdeey3mqZ9EIsHixYuhVqvp18RiMUpKSnDffffh+eefh1gshkQioUrMsTgEx/yQGQqFUFVVBZfLhYULF8Lr9SIlJQWrVq2C0+nE0qVLUVJSAqlUStUeeEnqmQReouSGG25ATk4OJBIJRkdHP6EuHE0oFArodDpKMx0IBGiTTCAQwG6345133kF7ezv6+/up+KvD4YDFYkFNTQ2USiUtx2ZmZmLdunXYunVrzB08EAigsbFxyoefEAKxWIyUlBTcf//9kMlkkEgk8Pl8U7QwCSFoaGhAdXX1FVPOxaVMeOjQIbS3t8Nut0OhUFCdep/Ph+LiYuTk5EAoFMJut2N4eBhjY2MzoiU+GSKRCCkpKXjwwQdRWFiISCSCU6dOxdRRNBoN8vPz0dLSQtvwfMPG5/Ohu7sbL730Evr6+s57vw4fPgyRSISFCxdCpVIhIyMDarUaWq0WoVAoprZ7PB7s27cP//Iv/0K/5na74fV6wbIsnnjiCajVakgkkk/8LCEEW7duRW9vL7q7u6/sAB+vhQeWZYlIJCIymYyUl5eTr3/962Tnzp2ku7ub+Hw+EggEyJNPPkmWLFkyIxcJ7rzzTrJ161bidDqJ1Wol27ZtI1qt9rKG9Kdjp0QiIRUVFeTxxx8nxcXFRKfTEY7jiFAoJCtXriTXXHMNUSgUU5Thzn1xHEcUCgXJysoiNTU1ZGJigoRCIVJXV0e+8IUvTMfWy154YBiGSKVSsnv3bhIKhYjP5yM//vGPyYYNG0hOTg75zW9+QxobG0koFCKRSIREIhGqrBYOh4nb7SZdXV0kLy+PyOXymb/wwKsehEIhdHd3gxCCway9XwAACDJJREFUVatW0ceSy+VCR0cHlaaeSeA7sUuWLAHHcfj973+PvXv3xnSJQCAQQK1WIy0tDfPnz4dAIEBfXx98Ph86OztpN/Ni4OetR0ZG8NOf/hS33XYb7rnnHmRkZFC5lViCJ1Xdt28fDh8+jF27dmFgYADj4+PYunUr9u7dSyssPFiWxTe/+U0YjUYYjUb88Ic/xAcffIDKykoMDAxcsg1xn0WJRCKUycrpdFIH5yX94nnCng5YlkV2djby8/ORk5ODQCCAXbt2Yf/+/TGtfYvFYirbXVhYCK/XC5/Ph4GBAYyOjk67hhyJROD1erFt2zakpKTg7rvvhlKpvCg5fTTAR9DGxkZ0dHTgrbfewuDgID1bHTp06Lw/JxAIcM0112Dx4sXIysrC3Xffjf7+fjQ3N18dDg78fYbA7XYjHA5DIBDQ0uFMy71lMhneeOMN5ObmUllt3tliCZPJhNTUVJhMJiqrotVq8bvf/e6y7hF/z4PB4Hnz3ljhlVdeASFk2hWSUCiEr3zlK3j00Ufx7LPPgmVZ+P3+y5aHiV9r6xxwHIeCggJotVoEg0G0tbXFhaT9UsEwDNXwGRwcxF133YW6urqYXzc9PR05OTnIy8tDaWkpNBrNFTdHmpub8fbbb9MoGqtWuUgkokzCfGp6Kbj99tuxePFihMNhDA0Nob+//7LHDBK28MAwDHQ6HS0TnThxAhMTE4ky54LgdeIBwG63Y//+/XEZH/D5fGAYBhqNBhzHIT09Henp6Vfk4F6vFxaLJaZPSX65WK1W02nQS7FZIBBgyZIlyM3NRTAYxJEjR9Dd3X3ZT8yERXBe4Zav577++uszSvqCB9+Gd7vdcW0xnzhxAr29vZBKpZBIJFi5ciXuuOOOy466/IclKytrympetCEWi7FkyRLcdNNNlzzCwLMPr169GgUFBfB4PHjqqaeuSPUjIQ7O6/XwrWSPx4PTp0/PGBk+Hmq1GllZWWBZFkeOHMGbb74Zt+6qy+VCe3s7jh49ikgkAolEAoVCAZlMdsmOw7IsXnvtNbzwwgtYvXo1tm7dioaGhqjbzIvr3nHHHbjuuusgFounbSvHcVi5ciWqqqqQnp6O999/HzfffDO6u7uvKHVNSIqi0WiQm5sLiUSCUChEGwAzjWnWYDBgwYIF4DgO7e3tOHHiRNyuHYlE0NrainfffRdLliyBRCKBTqfDV77yFRw4cAA9PT3TWghJT0/Hxo0bsXTpUhiNRoTDYezatSsm0iYcx6G8vBy5ubmQyWQoKyvDkSNHLppeqFQq3HLLLVCpVJg/fz5yc3OxZ88e7Nu3D01NTVfe0U4Es1VxcTH5xje+QdxuNxkeHiY7duy4aMNiuq9o27l69Wry61//mvj9fvK1r32NcBwXld97KXYqlUrS19dHfD4fbYhs2bKFrFy5kqhUKiKRSIhIJCJCoZAIhUIiFouJ9P+3d+4grWxRGP5NTEwy44xBDQrGkwf4Jk0KC7FNJ2IhWEgqwcZS0NLO3sJTqIUBCxEsLESbNBYSJD6jQiQJooSYYMxDMyGP2bc4JMjxHL14k8zcsL9qYJrF5t+zZ++91vq1WsIwDOE4joyPj5N0Ok3y+TxJJBLk8vLyX12c4BsXPQzDELfbTR4eHkgkEiGrq6vEZrMRtVpNlEpl+bKPZVnCcRzheZ7YbDZye3tLEokEyWQy5Pn5mUxMTBCTyfStMZVFZ6uBgQE4nU6o1WqcnJzg8PBQdolVAGCxWDA2NgaVSlWzCpTfSafT6O/vx9LSEpxOJ9rb27G4uIj5+XkUi0VsbGwgEAggFouhpaUFRqMRXV1d6OnpQXd3d/k3MBKJYHd3FysrK1U7rcrlclhbW8P09DRGR0cxNTUFnU6Hi4sLnJ+f4/7+Hg6HA7OzszCbzWX3Y61Wi/39fbjdbqyvryObzcrf6fgzNBoN9Hp9Oanm6OhIijC+pKmpCSzLSiLs97y+vmJnZwc+nw9arRZzc3Po6+tDQ0MDHA4HkskkBEEoN/9hWRZ6vR4sy0IQBFxfX8PlcpX936v1MSkWi/B6vbDb7eXJNTQ0BKPRiJGRESSTSZjNZphMJnAcBwCIx+P4+fMnjo+P4ff7K74Pk0TgKpUKDMOUq3lubm6kCONLlEplTYsEPsPj8cDj8QD4VS2v0+lgMBgwODj4YSOXz+cRDofx8vKCWCwGr9eL7e1tRKPRqq6UoigiFArh6uoKBoMBra2taGtrQ0dHBywWC1KpFBQKBZ6enhCNRiGKIsLhMLa2thAIBKpyyFBzgSsUCmg0GvA8D1EUZW2lXSoVA/DXGkgpmJmZgd1ux/LyMoaHh8vn9AqFAoVCAaFQCJOTkwiFQnh7e6tp3Pl8Hpubm3C5XAAAq9UKo9GI3t5e7O3tIR6Pf0gxruakq7nAGxsb8fj4iIODA2QyGVkmV71HFMVyg3a57BMIIfD7/VhYWADP8x8aKAmCgGAwCEEQJJuUpbEquejd3d2VfelrOY6S/KKkUikEg8Fy7rdcKVWGl8QtF4EDv8bw9PRU6jC+RBAECIJQs9YavyNJNmGpp8fZ2ZlsBV5K/MpmsxBFEYVCoWb1jJTKUTMj2PcwDAOO45BMJivacYlU2Pums7MTVqsVPM/D5/NV7HKk0nFWEWoE+x0EQUAul5Pt5rJEPB5HNptFc3OzZEss5b8hicDldCLxGaUWaYIgyH4yUv6MJEawVeLHu+eKxlnh4oaqxVkF/i+x/vjbC2oES6lrJMsHp1BqARU4pa6hAqfUNVTglLqGCpxS11CBU+oaKnBKXUMFTqlrqMApdc0/iw5rF1WgW2cAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plot_digits(gan.predict_gan_generator(batch_size=16))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "etw8X24pr0cr" + }, + "source": [ + "Not too bad. Many of the generated images look plausibly like handwritten digits. A larger model trained for a longer time can do much better, of course." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "LTtjqIsnr0ct" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/16_Conditional_Generative_Adversarial_Networks.ipynb b/examples/tutorials/16_Conditional_Generative_Adversarial_Networks.ipynb deleted file mode 100644 index 7d58e7aefb62bb061765213e318c1012bc375745..0000000000000000000000000000000000000000 --- a/examples/tutorials/16_Conditional_Generative_Adversarial_Networks.ipynb +++ /dev/null @@ -1,512 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "16_Conditional_Generative_Adversarial_Networks.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "gG-V_KZzqSSr", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 16: Conditional Generative Adversarial Network\n", - "\n", - "*Note: This example implements a GAN from scratch. The same model could be implemented much more easily with the `dc.models.GAN` class. See the MNIST GAN notebook for an example of using that class. It can still be useful to know how to implement a GAN from scratch for advanced situations that are beyond the scope of what the standard GAN class supports.*\n", - "\n", - "A Generative Adversarial Network (GAN) is a type of generative model. It consists of two parts called the \"generator\" and the \"discriminator\". The generator takes random values as input and transforms them into an output that (hopefully) resembles the training data. The discriminator takes a set of samples as input and tries to distinguish the real training samples from the ones created by the generator. Both of them are trained together. The discriminator tries to get better and better at telling real from false data, while the generator tries to get better and better at fooling the discriminator.\n", - "\n", - "A Conditional GAN (CGAN) allows additional inputs to the generator and discriminator that their output is conditioned on. For example, this might be a class label, and the GAN tries to learn how the data distribution varies between classes.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/16_Conditional_Generative_Adversarial_Networks.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "gXeKc6O9qSSw", - "colab_type": "code", - "outputId": "d5fe43f7-107e-404a-ee21-70f5960d41b7", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 7902 0 --:--:-- --:--:-- --:--:-- 7884\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.44 s, sys: 517 ms, total: 2.96 s\n", - "Wall time: 1min 57s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Vr4T07_aqSS_", - "colab_type": "text" - }, - "source": [ - "For this example, we will create a data distribution consisting of a set of ellipses in 2D, each with a random position, shape, and orientation. Each class corresponds to a different ellipse. Let's randomly generate the ellipses." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "IdfLLsjGqSTC", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc\n", - "import numpy as np\n", - "import tensorflow as tf\n", - "\n", - "n_classes = 4\n", - "class_centers = np.random.uniform(-4, 4, (n_classes, 2))\n", - "class_transforms = []\n", - "for i in range(n_classes):\n", - " xscale = np.random.uniform(0.5, 2)\n", - " yscale = np.random.uniform(0.5, 2)\n", - " angle = np.random.uniform(0, np.pi)\n", - " m = [[xscale*np.cos(angle), -yscale*np.sin(angle)],\n", - " [xscale*np.sin(angle), yscale*np.cos(angle)]]\n", - " class_transforms.append(m)\n", - "class_transforms = np.array(class_transforms)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "xPml_fFGqSTK", - "colab_type": "text" - }, - "source": [ - "This function generates random data from the distribution. For each point it chooses a random class, then a random position in that class' ellipse." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ksP0E2KHqSTM", - "colab_type": "code", - "colab": {} - }, - "source": [ - "def generate_data(n_points):\n", - " classes = np.random.randint(n_classes, size=n_points)\n", - " r = np.random.random(n_points)\n", - " angle = 2*np.pi*np.random.random(n_points)\n", - " points = (r*np.array([np.cos(angle), np.sin(angle)])).T\n", - " points = np.einsum('ijk,ik->ij', class_transforms[classes], points)\n", - " points += class_centers[classes]\n", - " return classes, points" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "yvf85D4KqSTW", - "colab_type": "text" - }, - "source": [ - "Let's plot a bunch of random points drawn from this distribution to see what it looks like. Points are colored based on their class label." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "CXy5-cJkqSTk", - "colab_type": "code", - "outputId": "835283bd-9b9a-4684-e69d-adda03180486", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 283 - } - }, - "source": [ - "%matplotlib inline\n", - "import matplotlib.pyplot as plot\n", - "classes, points = generate_data(1000)\n", - "plot.scatter(x=points[:,0], y=points[:,1], c=classes)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 4 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD5CAYAAAA6JL6mAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydZ3gUVReA35mtSUgCKYRQQ+i9i3RBqQJKFRC7In6KiL33ir2CIiAogiJK7733FgglQEJIaAkhfcvsznw/NqxZdjYFApIw7/P4yM7cvXNns3vm3FMFRVHQ0NDQ0Ci9iP/1AjQ0NDQ0rg5NkGtoaGiUcjRBrqGhoVHK0QS5hoaGRilHE+QaGhoapRxNkGtoaGiUcvRXO4EgCGZgPWDKm+8vRVHeKug9YWFhSlRU1NVeWkNDQ+OmYteuXamKooRffvyqBTlgA7opipItCIIB2CgIwhJFUbb6ekNUVBQ7d+4sgUtraGho3DwIgnBS7fhVC3LFlVGUnffSkPeflmWkoaGhcZ0oERu5IAg6QRD2AueBFYqibFMZM0oQhJ2CIOxMSUkpictqaGhoaFBCglxRFKeiKM2BqsAtgiA0Vhnzk6IorRVFaR0e7mXi0dDQ0NC4Qko0akVRlHRgDdCrJOfV0NDQ0PDNVQtyQRDCBUEon/dvP6A7cPhq59XQ0Cib5Frt7DmWzIkzF/7rpZQZSiJqJRKYJgiCDteD4U9FURaWwLwaGhpljFlr9/D13xvR60ScskyNihX45sm7CS9f7r9eWqmmJKJW9gMtSmAtGhoaZQDJ6WTt3uPExJ+hSlgwvdvUJyjAzPbDiXzzz0ZskgOb5Bp77HQqY3+Yy++vjvxvF13KKQmNXENDQwOAbIuNBz+dxdm0LHJtEmajnu/nb+bnZ4cwc80erHaHx3inrJBw9iLxZ9OoWSnkP1p16UcT5BoaGiXGpMXbOJWSgeRwAuQJbgevT12CyaAubnQ6kfRsy3VcZdlDq7WioaFxRSSnZjB38wFW7z2GTXJp2st2HnEL8fycPJ9O67rVMBp0Xuecskz9ahWv+XrLMppGrqGhUSwUReGrv9fzx9p96EQRURTQiQI/PD0InSiov0eWiYqoQIDJiCLbkJwyAKIgUM5s5Ot/NvBQzzZEVAi8nrdSZhD+i56drVu3VrRaKxoapZONB+J5adJCLJfZu0MC/RnSuSlTl+/ALv2rlQuAIAj4mQygKNgcTgL9TGTkWJHz5I9eJ+JnMjDz1ZEYdCJpWbnUiAjBbNR0zfwIgrBLUZTWlx/XPiUNDY1i8ffGGC8hDmCVHNSqHMrluqGCS4vPsdrdxy5eZhN3OGVyLDYe+nQWGTlWDHodsqIw5u4ODLvNFRQnywpLdx5m7qYDKAr0b9eQ3rc0QK/TLMSaINfQ0CgWVrukelwA/lq/H6fT20ZeFGQFUjJyALDn2dm//nsDGdlWTpxNY/+J01zMsrjPxZ48x8o9cXz1xF0IgrpJ52ZBe5RpaGgUi15t6uOnYvKQFYWY+DPIJWittUlOJi3exopdRzl3MdstxAEsdomdR5LYfSy55C5YStEEuYaGRrHofUt9GkVVctm8cdm3TQY9b9/XA73OOypFDVEQimwSkQvw41ntEjsOJxZpnrKMZlrR0NAoFKcsk2u1E2A2YdDpmDB2EBtj4lkfc4KQQD/6tWtM9Yrl2RWXxD+bDniEIAqAIArIeaq6n9FAu4Y1sEkOdhw5hUGvw+F0IisKkkMu1roUICb+LIqi3NTmFU2Qa2hoeCHLCjuPnuL0hUxiE8+xeNsh7HnRJmMHdqJ/u0Z0aVaLLs1qebxvzN0dORB/loRzaThkGb0oEhLoz5i7OrJ2/3EcskzvNvXp3CQaURRISc8mJSObkEB/Ji7cwpLthz1CEwvSxi+x+1gSs9bsZXi3m7dSiBZ+qKGh4cH59Gwe+2I2FzJzsDucOJyeWrLZqOf9h3rTrXlt1fcrisKuuCSOJadSrWIFbm1QHZ2obkZxyjJTl+3g58Xb8gppKdgdTsKC/GnfqAZLdxzBJhXuPA0vH8Cyj0YV/2ZLGVr4oYaGhiqyrCDmS+R5bcoSTl/IwOnDa2m1O5i4YItPQX4mLYsl2w+z8+gpIioEIgjQvmGU17iNB+J5fcoSMi02AA9HZmaulf/170DX5nV4dfJiBEFAURRybeoRM+nZ1qLebplEE+QaGjchTllm0uJt/L56N9kWO9GRobx0T1fqVg1n/4nTPoX4Jc6kZaoeP30hk+Ef/EauzY5TVjiVksGBhLM8N7gLgzo1dY9LPJ/OCz8tKFDbXrU7jmFdW7By/Gh2Hj2FAHzy5xqSUjK8xt7sKf5a1IqGxk3IZ3+uZfqKnWRbXEk6J85cYOz3czmYcKZITsM6VcJUj09atNUtxC9htTv4cs56klLS3U7QORv2e2jgl+N0ym4hbzbq6di4Jh0a1+TlYd08im8Jguv8s4M7F37TZRhNI9fQuMnIstiYu/mAlzZskxzMXrefSiGBJJ5P9/l+k0HPmLs7qp7bGXdKVZvPtUkMeW86ep2O4V2bs2pPnFcGaH4cssKqPXEM7tyUALPRfbx9wyh+fGYwPy3aSvzZC9SrVpHH77yVeje5Rq4Jcg2Nm4xzaVnodTovQa4Ax86kUiUs2EuQ60URg16kTtVwnr67Iy1qV1Gdu2JwOZJT1c0uNsmJTXLy85LtRVrnkaQUPvx9FR883NvjeNPoSL4bM6BIc9wsaKYVDY2bjMjQIBwqafQuM4WB/SfOeBwXBYHW9aqy6esx/PLCMFrWqepz7od63VJiha4kh5OVe+LcJXI1fKMJcg2Nm4wAs5GhXZp5CVyTQU9WjtWri4+sKOw6mkR2XnRJQXRsXJOxAzrhbzKopvEXF0VRsKkU6NLwRBPkGho3IWMHdObxO28lJNAPnSjSsEYEE54ehEP2kVkpCB6laQvintuas+rT0Ux7cRhB/qarWmeV0GACr3KOmwHNRq6hcRMiigIP9GjDAz3aeBzv3KQWC7bF4rwsCahySBAVAv2KPL/JoKd2lXAqhwaRmZtS7PXpRAGjXsfr995xU6feFxVNkGtoaLj5X//2bDoYT5bFhtXuwKDTodeLvH1/jysSqP4mY+GDVOjZuh6P9m5LVDEbMiuKwsYD8czZEIPFLtH7lvrc2bYBhiIW8yqtaIJcQ0PDTVhwAH+99QDzNx9kd1wSUZVCGNy5KZEhQQDsOHKKyUu3cfpCJs1rVWFQpyas2h3HltiTBJcz82CP1nRsHO2er2fresQmnvOyuxeEUa+7IiEO8OWc9W4hDhATf4aFW2P58ZnBPssElAU0Qa6hoeFBoJ+JoV2aEehvYvnOI3z19wYGd2pKSkY27/+2EmteFMnp1EwWbo1FwBW6CLA7LpnWdasyYewgdKLIXe0bsWBrLMfPXMBik9DlVUEsKG800M9E9YoV3K/Tsy1k5lqpHBpcYOnb5NQM/ly3zyPRyGp3cCjxPOv3n6Crj5ICZQFNkGto3IScScvEandQo2IFjzorAJLTyeNf/cXhU+fdmvTqvASe/NUIL/37cqG882gSkxZvY3TfdhgNeiY/P5TVu4+xPuYEoUH+RFQI5Pv5m1S1dJNBx3sP9UIUBbIsNl6fuoSthxLRiyJGg46Xh3WjZ+t6qve082hSntbt6ZS12CTmbIzhtma1yqy9/aoFuSAI1YDpQASuv+lPiqJ8fbXzamholAwXs3KZsnQH6/Yfx8+oJ8cmkZqRg050NTx+78FetGtYwz1+9e5jHElK8RC0hdVeuZyZq/cwum87AAw6HT3b1KNnm38FsNGg4/t5LmEuKzKVQ4Pp3DSaIZ2bUS28PAAv/LiAPceSkZwyEk4sdol3fl1OpZBAmkVX9rpmkL/J66F0iW2HTjL6qzl8N2YABn3Zs5eXhEbuAJ5TFGW3IAiBwC5BEFYoihJbAnNraGhcBVm5VoZ/OIO0rFyvcrTkCcdnJszjt5eGU6dqOADr9h/H4qPKYFHJ32hZjSGdmzGgQxPSsy0E+ZswGjxF0ekLmew7cdpdm/wSNruD6St28vnj/b3m7NAoqoByuQr74k/z2BezsdglalYK4eFet1A3755LO1dt/VcU5YyiKLvz/p0FHALU83c1NDSuK39tiCEjx6IixP9FcjgZ8dEMVu2OAyC4nBnxKk0QsqIw4sMZHEw463OMXicSFhzgJcQBUjKyVSNNFFxCXg2jQc/EZwZRvpx6mKRdcrI//gxxyams3B3Hg5/OYlsZaRNXom5cQRCigBbANpVzowRB2CkIws6UlOLHlWpoaBSPi1m5zNmwv0iNGZyywuu/LOV8ejYDOjTBWETzQ/WICj7PHT51nlFf/kVyqmfZWacsczQphYSzafhqbFMrMhRJpYyAQSfSpm41n9esX60iP40bXOj6ZUXBanfw8czVBY4rLZSYIBcEoRwwB3hGURSvR6aiKD8pitJaUZTW4eFlYzujoXGjkpKRzeB3p3PWR91wNRRFYfnOI9StGs5Lw7piNugLtCebDXo+eLAX014cRvNa3jZrcDlOZ67Z436948gper48iYc/+4MRH81gwNu/cOLMBa/3lfMz8VBPz7otOlHA32zkvu6tCryPWpGhhAb5F3a7AJxKTb9qM9KNQIkIckEQDLiE+AxFUf4uiTk1NDSunEmLtpFpsVEcH6XkdLpt23e1b8yK8Y8z/tE7qVlJXesWBIG6VcNpUjOSh3vdQjmzd/KPwykTl5wKQEp6Ns/8MJe0rFxybRJWu4NT59N57IvZSA4nCWfT+HHRFr6dt5GDCWd5rE9b3r6/Bw1rRBAZEkj/do2Y+epIwoPLAa5olJSMbHdT5/zr+nRUP8r5GfEzGtD5cICCyxFbFpyfJRG1IgCTgUOKonxx9UvS0NC4WjbEnPBKs79EePkAUtNzvMIGTQZXA4dL+JsMdGoSzf74M5w8t8ProWCxS/R9fTK/vjyC6MgQdVOIXqRxVCUA5m856BX9ouCqj/7Qp39w7HQqTkVBlmVmrt7D3R0a8+LQrvRo5Yp2uZCZw6aDCciywtbDiazdewxBcGnvL9/Tjdtb1nHP27BGBEs+fIyVu+NIy8ohJT2buZsPekTimAx67mrfqMDY9NJCSUStdADuA2IEQdibd+xVRVEWl8DcGhoaxUBRFL75ZyPn07NVzxt0IrNeu49Ji7Ywb3OsOwPSz2ige6s6NIqqhMUu8fHM1SzefginrLiSeHxo9ikZOYz6cjZz33mIzk2iWR8T7y47KwguYTnstuZY7BI7j55S7QrkcMrEJp7zOGa1O5i76QC9WtenaXQkf63fz2ez16LTidglh8cDwSbl8sa0pYQG+9O81r9xFgFmI3e1bwSQp7UL/L0xBqNBh11y0rlpNM8OKhudha5akCuKshEom1H2GhqljLX7jvPnur2qmZN6nUjretWoUM6PF4Z2pUuz2izcEousKPRp24D2ebHkT337D3uOJbvfV1gMeeL5dFbvieP9h3vz8+Jt/LV+P7k2O63rVuO5wV24mG1hyHvTi5WmD65QwxW7j1K+nJnP/1rnegj4aA9ntTuYsnQH3zypHjAnigIv3tOVx/u24+S5i1QODSIsOKBY67mR0TI7NTRuYOySg58WbeXvTQew2iXa1q/Oc4O7UDUvaSb+bBrbDicS5G+ibpVwJi7cgsWHwIyODGXcwE5kW2yU8zPRtn512tavjl1y8OuqXYz/Yw12yeFTmy+I539aiFGvIzw4gAEdm9CidhUa1oigfICZXq9OIjO38FrmlyMIAnqdyJIdh32X181HUko6NsnBmr3HOJuWRcMaEbSpV80jmzM4wEzT6Mhir+VGR/AV/nMtad26tbJz587rfl0NjdLGMz/MZdvhU25zhSgIlPMz8fdb9/Pjoq3M3xKLosg4nHKBjk2jXoef0YBVciArCp0a1+Tt+3sQYDYy6su/OJBwtkQ78Rh0IoIg0K15LZbuPHpFc5gMeqa9OIxlO4/wy7IdBdZnEQWBWxtU51DieWySA5vkwGjQU7dKGBPGDi6xrkX/NYIg7FIUpfXlx0u/lV9Do4yScDbNQ4jDpfhnied/WsjcTQewSQ7sjoKFOIDd4SQj14pNciA5nGw8EM/zPy1k97FkYhPPFUmIF8d+Kjll7A4ny3ddmRAHaFKzEnWqhNGtee1CI0tkRWFz7EkuZlvItUk4ZQWLTeLwqRSmryj7SqMmyDU0blCOn76gGlFhdzjZe9w7fb042B1O9h1PZvPBBFUHpBo6USy2M6yo4Y9q8x5IOMuCrbGEBQcgq5hWRIFCM1BtkoP5Ww4WbRGlmLKx39DQKINUq1geZxFsw1eKQadDrxMx6XXkXvZQEAS4ZHUVALNRj+RwFmjeuBrU5rXaHUxbvpOklPS8VXiO0ut1CAiF7iaK+qAqzWgauYbGDUrdquE0qB5R5HT54mK1OxBFEVEUuFyxDTCb+PDh3pj0Oox6HRa7A0cxKyCWBBezLcSfvajq7LRLziKZhHIsdnf2ZlxyKuv2H+dsWlaJr/W/RHN2amjcwORY7XwyazXLdh3F4XQiIHjUBL9SBECnExEFAUEQEHDVQHHKMqLgqv3tKjF7/eVD/jXe0bIOzWpV4bu5G90NLYqLQa8jIjiAlMxcnLKMyeDaXfRqU483RnYvVZ2DNGenhkYpJMBs5N0He7H566fY8MWT+JsNVz2nKACCKxHH7nBptZeEuKyAQ5bJtUn/qRAH8DMZePKuDvRv15BAfxO6K8zAlBxOki5kYpMcOJwyOVa72xE7a83ewicoBWiCXEOjFKATRfzNRt5/qDdmg95nA4WiIXC5jJYKCV8sKXSiQP9bG7pqnOhEgvxNqg7d8uX8+PON+6lesQLl/EzMeOVe+rVtWKCzNSTQv1jp9la7gz/WaoJcQ0PjOtO5STRz3n6Avm0bFFgMyhc6FXv49UIAQoMC2HLoJFGRFXhxaFdWjR/N//q1x2TQEWA2YjbqqVMljFmvjaRyaJD7vWHBAbx5X3d384vL8TMZWPTBI0RFVMBQDGFeWAOM0oImyDU0ShmRIUG8dV8PGkVVKrIjVMBVdvbRPm1pFFXpPxHmCq4KiCkZOcQlpfL5nHX8uGgrccmpOGVXfHyQn5lnB3ehYvlyqnOM7tsO82WNKMxGPSO6tsBk0DP5uaH0vbUh/iYDRr2uwIedThTokK9IWGlGc3ZqaJRSLHaJKUu28/fGGDJzrT5rotSpEkabetXoc0t9GtaoRMLZNB767A/skgOL3YFeJyLLyjWziYsCBAf4qa5RFAREAY+IGKNBx28vjaB2lTDV+RZsOcjX/2wgK9eGXq/j3m4tGXl7S9JzLFSqEOjuOKQoCoPfnU7i+Yte1zXodZQzG5nxyr1UCgks4Tu+dvhydmqCXEPjBiYpJZ3JS7az98RpqoQG83CvNkRHhuJnMmAy6Dlx5gIjP/69wIJUZoOe54Z0YVCnpu5j2RYbv63czeSl25AVxctmfrUYdCJt6lXHz2Sg760N+G7uJo6rNJDwRdv61ZkwdpDP87KskG2xodeJjP9zDUt3HEGncyUsje7bjpF3uJpPnE/P5tmJ8zl+OhUBAaciExURwh0t6zC4czMq+GgLd6PiS5BrCUEaGjcoJ89dzBPSrpTzk+cusjk2AVEAvU5Hj1Z1sUnOQhNerJKDb+du5O4Ojd2hdqkZOUxdtr3QyoZXggBMfn4ojaP+LU71z8aYYgnyXXFJyLLi06krigJBAWben7GSZTuPelRG/GHBZiIqBNK9VV0qli/Hby+P4PSFDLItdmpGhqj2Ai3taIJcQ+MG5Yf5m7CohAHKiitbccWuo4h5ZpHCsElO0rJy3d11Xpu65KpS/AtEgMohLkfltkMnmbp8BydOF12IgyurNPlCBtXyqjyqYbU7WLgtFvtlPUmtdgc/L9lG91Z13ccqhwYX6/qlDU2Qa2jcoOyKSy7Qbm1zOBGLKIwFIMjfDEB6toWjSdeuAbqiQN/Xp9ClWTSr9x5HKmDHIArq9VhEAQJM3q3j8pNlsSL4CEhMzcgB8hpQb4zhyKkU6lcPZ2CHJlQILFo/z9KEJsj/Q5KOnuaPT+ZydPcJajapwbCX7iaqke8O4Ro3FyGB/qRl5RY4xmTU48xL7PGF2ahncKemmPKcgHuOJV8Tk0p+rJKDZUUoXysr3sJcEKBi+XJsOBDPHS3rEKDSCxQgNDAAP5PBK01fAJpER5JwNo0Hxs/C7nBgk5xsPHCCX1fsYtqLw6kRod6HtLSihR/+R8TtPsETrV5i+fR1nNh3kjW/b2DMra9wYNPhIr3/9PGzvNr7A3oZh9EvcCRfjf4RS7blGq9a43rycK82hdbRdjplPnqkD9GRoQiCq89m+4ZRBAeYMep1mI16hnRuytMDOrnf8/vq3dd66cVDEDDoRPxNBnexruQLmXzw+0p6vfwTsSfPqr5NFAWeH9zF4zMSBQGzycBT/Tvw8azVZFtt2PJMLzbJSZbFxid/rLkut3U90TTy/4gfnpmKNcfqfi3LCtYcG9+NmczE3Z8W+N7MtCzG3PoKWRdzUGQFp8PJ8mnrSDhwii83vOfREUWj9NKzdT2SUzOZvHQbgiC4Cz9dwmzUM7BjE7o2r03X5rVxOOW8hB8BpyyTkW0l0N/kVcv7VErG9byNQlFkhbo1KiIKAjEJ/wpth9PVMGPcxPks/fAx1e91n7YNCAnyZ9LibZy+kEnTmpV4vG87alYKYefRJK9oHEWBHUcSr/UtXXc0Qf4fcXjbMdXjJ/adxOl0oivAs75s6hpsuXaUfPtRySZxeMcx/vxsHoPH9UNXhESRjNRMdq+MweRnpFWPppj8TMW/EY1rhiAIPNL7FkZ0a0Hi+YtY7Q6mr9zJzqNJBPubGXl7S4Z0aeYenz89XSeKhASp24LrV6tISnr2NStJW1wUIPbkOZ/ruZhp4djpC9TxEVd+a4Ma3Nqghtdxo16nWmjLaCh7Yq/s3dF1QFEUDm46zL51sQSHBdFlaDsCK6hnovkioLw/GSmZXseNfkYW/riCLfN3UiEimP7/60WDtnU8xsTtjsdm8U4tdkpOpr35J2tmbuLL9e/iV0CM7Lzvl/DTC7+iM7hqOiPAe/NfpmnnhsW6D41rj5/JQL1qFQH4vFb/q57viX7t2H4ksdjNkK+WghKPCnqoOBWZK8l3ufPWBizYEuvhPzDqdfRt26DYc93oaDbyYuJ0Onnz7vG80vsDpr/1BxOfm8aIGk9wYOOhYs1z95g+mPw9nThGswGTn5FJL/7GruX7WPXbBl64/W0WTVrhMa528yiMfuoOIMkmcepwMn98Ot/ntY/vS2DSi79ht0pYsqzkZlnIzbTwer+PsVls7vuc89VCHqg7hqGRj/HFqIlcOHOxWPeocWNSr1pFJo0b4nZ+Xi/8TQY6N40u9nX1okjtyuraeEGMG9iZxlGVMBv1+JsMmI16mtSM5JmBnYs9142OltlZTJb9sobvxkzGmuPZFbx8xWBmJf+I3Sox4/2/WDF9HbJT4bZ72vPAO/dQrnyAx3in08k3//uZlb+uw2AyYLdKRDWqSuKhZC9t2+RvYva5n/ELcIWPZV7I4sF6T5N9McenphJZK4Lpcd+pnvth3FTmfbvEK/7Y5Gek/5O9GP7KAH54Ziob5mzDltf9XKfXERwexOSDX3rdi8b1IzPHyu5jyfibDLSsU7VY1f7ycz49m/5vTLnu3XPMRj2j+7bju3mbcBQxdLLPLfV4/6E+Ps/bJQcz1+xl/paDKED/WxsyolsLtwnl8KnzJJxNo2alEPfOprSipej7wJJtwWaxExwW5NNJeDL2FIsmrSTtTDrH98aTdPSM1xi/QDPjV7zJ92OncmJfAnaryzFlMOqpFB3BT/s+Q6+iiWSkZpIcd4bI6AjeHfI5BzZ6R60EBPnxztyXaHZbI/expLgzfP7oBA5sUN8JVK1bmamHv1Y99+HIr1nz+0bVc0azAUUB2enE6fD8oZn8jNz/zj0Mff7qt/caxWfm6j18M3cDep0OUDAZDHw/ZsAVCaczFzLp+8Zkn6n5IYF+5Nqka2J+qRZenk5NajJnQ0yROvyYDHreuPcO+rRtwMWsXM6nZ1MtvDz+ZiOKovDYl7OJTTjntoebDHoa1ojg52eHlDnHv5aifxmZaVl89tAP7FjmqkccUT2M56f8j8YdPe1n62Zv4dMHv8MhOXA65ALrQB/ddYKEg6fcQhxAsjtITbrA5nk76Dy4ndd7gsOCsNskNvy9jZSkVNV5nU6ZgGBPx1XVOpF8ue5dHm/+PPExiR6aud6op8dDt6nOlZ6SwY4lvmsw51/75dgsdvavj9UE+X/AgYSzfDtvIzbJ6Q6ny7FK/O+bv1n28ahia+Y/zN+kapjWiyJP3tWBB3q0ZsWuo7w1bdkVd+bxxZm0TDYeiEdRFERBwM9kINdq92knt0kO3v99JetiTrBu33EMeh0Op8z93VvRsnYVDiWe91ijTXJw+NR5th9OpK2KE7QsclPayBVF4ZWe77Nj6R4cdgcOu4PkY2d5pfcHnIk/5x5nt0l88egEbBa7Wzv1lQ5t9jdhzbHisHkLQku2lbhdJ7yOpyZfYFSz5xhRbTTfPvkz5xLUBXlo5QrUah6leu6N2c8RFBrooXnITieLflzBxfPeYWZzvlyINefK4s31Bh1V60YWPlCjyJw8d5G/N8aweu8x7AUIzDkb9nulooMrVX9XXFKxrpljtbNid5yq4HTIMh0bRwHQvVVdPni4d4lXB3Q6ZRLPp+eZdRR3u7mCsNodrNrtqqmSY7Vjk1xp+F/OWe8VlglgtUnsj/feOZdVSkSQC4IwRRCE84IgHCiJ+a41x/bEk3g4GcdlPwyH3cm875e6X8ftOkFBLUkEUcDkb8KvnJk3/3qeyJoRGFRacZkDTIRVDSXh4Cmy012pwzaLjSdav0R8TCExrQK8NecFn1/0qnUiad61EYKYX5ArpCal8d2YyV7jdyzZi8N+ZXZRvVHPXf/rdUXv1fBEURTem7GSYR/8xmez1/LWL0vp+cokn6nzmbk2n+n6xW2OkJFjKVBw/rbq34Shrs1rl7hTNP9duOrGOBCLYAK5XIdSFDianKr6XrPRQFVozRIAACAASURBVLiPmuZlkZL6C/0CfAdML6H5rilnE1IQVbaiDslB0pHT7tfmABOyD4eMIAjc2rc1bfu0oMvQ9pQrH4Bkl/AP8seWa3e/TxAEZKfMTy9MR2/QI9kd3HFfZxp3qEfOxZxC12rOe1D4IinuDOvnbPWIKQdwOpxsnrcDRVE8frShVSpwfF9CodfNj8FkIKxKCM9P+R+R0RGASxDN+34pf46fR0ZqJnVaRjP6iweof0udQmbTAFix+yhLtx/2tBHbJJ75YR6LPnjES9De3qI2W2NPYrF7ap8Op5OWtasU69pB/n4FOhrzP0wyc6wkp5ZMAlFokD85FruXqcYmOQkPDiA9x1pgXRZfqD3g9DqR7i3rqowum5SIRq4oynogrSTmuh7Ubh6FQ8WJY/Qz0qSTy0aemnyBoNByXrbpSyiKQk5GDneO6k5AsD/718cy9fVZ3HZPe+q0ikZv0KE36AivHoaCy/acm2VBskms/n0D8ycsRyqCI0kQBEIi1etCpJ5O46m2L3sJ8XyL9Do0aFw/TP5FT/wxmPT8Fv890+K+dceYnzyUxKONx/HD2CmkJF3AbpU4uPkIz3d7x+shcWDjIT5/9Ac+vu8bti7chSxfo4p7pYw5G2K8hDJARq6VIypaefdWdalXLRy/vHR0QXDVGX/qro6UL2ZN7Q9/XwmK+t9BFFwJQ5cwGvQF9sksKnqdyGej+qnucC916qlfLRw/45U1lw70M2Ey6DEZ9FSvWJ5Jzw7xWaOlLHLdnJ2CIIwCRgFUr179el1WlcjoCDoOvJVNc7dhy3VtS3V6kYAgPxp1rM+jjcdx5oQr06xCeJDry6ciK03+Lq/5x/d9w+Z5O7Dm2NAbdIh6HU99/yhdBrdjdMsXkC5zINpy7ZzYm4DJ3+i+vhrmABP3vj4Io8n7y52ZlsXkV2ZguywM8hKizrVjkOwOsi9mExwehE6no+XtTRg1fiQ/Pj+9QMfmJRq2r0dIpX8fJPExJxnT/jXV69otdn579y/emvM8AFPfnMWcLxZit9hQFNg0dztt72zFazOfKXPRBMXFV7SGKAiqtnCDTserI27n7enLiUtKwWQ00LFRFFtiE1i5+yh3tKzLwE5NPAShXXJw9mIWIYH+GPQ6Fm6NZc6G/Rw+5bvyoclgoFeb+nw3byOnL2TStn4NOjapycYD8UiOK3sImw167u/emma1KlMrMpSjSSkeJXQNeh3339GKGhEVOHjyHFtiE5iyZDu2fNq5ThR8FvoSgA6NoohJOENqRi5B/mayLOq/i7JKiYUfCoIQBSxUFKVxYWOvd/jh+cQU9q8/RGBIOVp1b4reoMfpdPLP14uZ991SLNlWbu3XintevIun279Gdj6ThyDkyXCVj+n+t4ZSt3U07w/70iuu3ORn5I8zkxhWZZTXOXAJ2tDIEFJPp3lp1KJOJKJGGMNfHUivh7p5CD2n08n3T09h2dQ1OCSnT9NPcFggHQfdyspf16PIMiZ/E498NII7H+sOwOzPFzDltRlFspdXrVeZ0Z8/QNs+LXmt74dsX7zH59hKURX59cT3nIk/x6ONxnk9LMwBJt5f8IpHKOXNyJ/r9vLVnA1eZoZAfxMrxz/u1fwgKSWd4R/OINdmVw0ZvKSJTn9pOCaDnt9X7eaHBZtRAIfDib/ZSI7FjqOQHVHL2lWITTyH0ykjOWXMRj3hwQEEB5g5lldTXHI4qV+tIk2iI/lj7V7V9YiCazdZvWIFHurVhjtvaYAgCGTkWHl50kK2Hznl/kk1jqrEp6P6ElHhX6dqXHIqExZs5mDCWSqHBhNdqQL/bD7oc90mg97j4Wg26PluzABa1qla4P2WNm7K8ENFUZj00m/M+24JOr0OQRQwmAx8uvJNajapweBn+zH42X4AZKdn80ynNz2EuGsOMPoZsFu8tdc/Pp1Lm54tVAW1Tq9j98oY6rWpzb61al9Agdvv60xy3Bm2zN+JIitUqVOJpl1c5osmnRrSaVBbL8111sdzWT5tXYHatN6op22/Vqz8db07ocdulZgwbhpBIYF0GnQr1etXxmAyFEmQJx05zXtDPue9BS9zaGtcgWOr1a8MwK7l+z0csJew5drYPH/HTS/I727fmCXbD3M0ORWLTUIviuh0Is8PuY2tsSeJDAny6Fk5eel2n0IcXBr+qZR0lmw/jL/ZwHfzN3nEgGfkK9BWELuPJXu8ttodnErJoHFUJG+M7MHpCxnUqRJG5dBgJi3eil4UvRpUGHQi93dvzeh+7dwdiS7hbzaQmJqBmE/Djj15jgfGz2Leuw+5Hat1qoTxxeh/w1wnLNjsc816nei1w7FKDr6bt4kpz99TpPsu7ZS58ENrro1VMzYw+/MFzPlyIQsmLHOlomdbyc20kJGSyWt9P/Kw1cqyzH21nuLkwVOqc0pWCb3BuwiVIiukJF/w2ZFcb9Ax6tP7MAeYvISa7JT55+tF+Af5scQ6k682vkdKUhrLf1nLwokr+OrxHxnd4gVyMj3rUf/zzWK3cFbDYDLQtHMD1v2xxWucLdfGr+/OBqB1z+YEhhTdq2+z2Pnx+elUiPDdacXoZ2TkG4MBl+Ytit5fL1Gvwz+wdPVJvBYYDXomjB1Ms+hIRAEUFCSHg3d/Xc5rU5Zw//iZPDB+Jpl5AnjPseRC+2pa7Q7W7T/O5CXbSzyRZ+mOw+h1Il2a1nJ320nNyFXtMqQTRSJCAr2EOMCGmHgysi0eZhJZcfXfXL1HvZAcuOz2fj5K+vpy3B47rR7OWxYpqfDDmcAWoJ4gCEmCIDxSEvMWlxP7TzKi2uN8/cRPTHltBpNe+k1VW86+mOMR171w4nIvTTw/OoNeNX7cbpWoWjcSo0rVQAWFlnc0oW6rWny79SMq16rkNcaWa2f1jI2kJF/go5HfYMmyuDVtS7aVMyfO8/PLv7Ft8W5OH3eV97wUvuiLLkPa8cyPo5F9eP9TklzbY51eR7t+rQtMcLqc43sTaHFHU8wqzlL/ID/G/fg4x/cmMP+HZdRpFa1aPkCn13H7yLJX6+JK+HHhFvYeO42sgFNW3P/Pttqx2h0cSjzPW9OXAVCuCI47URQIDw5wd8cpSRRg1BezWbbziPvv2q5hDfxU/DcK0MqHSePkuTRV/0CuTSLhnHq8xMWsXBpFVSIyNAhDMRKfynp7t/yUiGlFUZThJTHPVa6BdwZ9SlYRQvoEUcCaT1vdVoDNFwHM/kay0707tZj8jfR++HbCq4Ty99eLABBFEbtVwmaxMyDkQToOaMuT3zxMcHgQyXHeCQpGs4GYdbGkJnt/iSWbxMKJK1gzcxOSTaJV92bUaRnN4W3q5g2Tv5GImuG82P0dnxExtVvUBFy29uW/rC1Sv8f8LJm0koHP9GXut4sRRAHJ5uC2oe1p2L4uX46aiCAIeT4FheZdG7N9iednO+T5flStoyUVOWWZP9ftKzBr0uGU2XwwgRyrne6t6hKbeL7AOY06HYM7N+N8Rg7r93snoF0tF7JyeffX5ew/cYYXht5GpyY1qVc1nMP5Miv9jAZ6tKpLzUohqnPUqhyGyaAn97IkHn+TgVqVQz2OnTx3kVenLM6zzSvUqFiBzk2jWVWA5n4Js1HPE329M6nLKmXGRp509DQXzqQXaayiKB6lYcOrqn/pwOWw8SUU7VaJ+ROW8fKvY+j1SDe2LtzFb+/9hd0mITtkZIfMhjlbidt9giadGnB4W5yXc1KySUTWruQ7hBDIyXA9RHat2Ef7u9pwfG88ks17TbZcO7M+movThzZu8jfyyIcjXGu32LH7SCQRBFdZW7U12a0Sc75YwKu/j6V6w2qEVCpPbmYuD9Uf62W3v1yIA8z9ZgnDXx5w09c+tzuc2BxFCz+12CTuua0FExZsxq4SOWI26BFEgdeG307dquE81b8Dmw8kFOrYvBIsdgdzNuzn/u6tiKgQyMSxg5i7+QCLth3CZNAzqFNTerTyHb/doVEUFcuXIyk1w20SEQUBu8PJxzNXs3JXHE/d3QGnrDDy4989sjaPn77A2YtZmI16VdORIIAoiFQo58czAzvRpVmtEr//G5UyYyOXnbJPW/Ul+7ROr8PkZ+S5SU9gzLdVHfqi79ohiqz4DBFUZIWtC3cx/e3ZVKkdSXBYEM7LIkkckpMLZy5Sp3UtDJdtQ41mA827NabBLXWIblq9UDOH3SqxbdFuxnz/mGpCk8sxqv5AqNmkOp+vfdedsGMOMBMaqf4Aa9i+boFas2R38MHwryhfMYhy5QPY+Pf2Qu23l5Blma0LdhVtcBnGbNATWSGo0HFhwQGEBvljNur54om7MBl0mAw69KKAyaCnZ+t6fPvU3awaP5o+eXW2a1cJ4/G+t6p+nww6kUEdm1CxfLkiZVOqYdCLxOSlvxsNeoZ2ac60F4fz07gh9Gxdr8DQUp0oMvWFYfRuUx8/ox69TkRRFBxOmfQcK6v3HmPIe9MZ+t50r9R7BVd6v1oYoigI3N68Nus+f4JlHz/m/ixuFsqMIK/eoKqq885oNtBtREfuGNmZAWP7MGH3eLoMbe8xZuWvG674unaLnYUTlwMQu+UIlmzv6ABLlpW/v1rEIx8OJ6JGGAiuh0vd1rV4+bexALw2axwhkRUQ9QX/Say5NhQU9cQaAa+KheBK6un1cDfqta5F2tmL7Fi2l8TDyTzx1YNedc31Bj2jxt/Hwx+OwBzgW2t2SE5e6PYOiqLgdBa98L/slItk/irrCILAS8O6YvaR/q4TBcxGPW+O7O4WjO0bRrH0o1G8MLQrYwZ0YsYrI/jokT60qlvNq7fnvbe3UrWr+5kMvDysGzNfG0mb+tUw6HXoiuEnAVckV8hVdKIPDjDzzgM9WfbxKK8UDVlRkBzqwhpcO4IWtSp73K+Aqwn16H7t8Tcbb8ochTJVxjZ2yxFeuONdHHYJ2amgN+qp0zKaz9a8rZpUA7BhzlY+vv9b7Codd4qKTq9jWty3PNJoXIERJTq9iCCK7qxSUSdiMBm4bVh7hr88AFEUebjBWK8aMPmp3bImyXFnsGR5PzAad6zPsb0JWC97mJgDTHyy4k1W/76BxZNWYTQbcEgOohpV4/Sxs2Sl57h/TQaTnnb92/Dq72P56YVfmfvtEp+x6gaTns/XvktgSDkeb/58kT5Do9nAT/s/p0ptzU4OsO/4aX5avJWT5y5Sq3IokSFBJJxNo0ZEBYZ3bUGUiq1ZURSOJKVgsztoWCPCqycnQFaule4v/eRVb9zPaOCjR/vQuUk0AG9NW8bSHYdVo0/AOxdOFAQiQ4OY985Dbo3fanfw98YYlu86QoDJyJAuzejSNLpQgbpk+yFem7q0wDFqGPU6BnVqwo4jSaRmZNMkOpIxd3X0CNcsq5SpOPKEg6f4/cO/ObYnnuim1Rn+ykBqNYviyM7jgKtoFLhsZpYsi09BBPDXFwuuSoiDy2wx9Y2ZSCpV2PLj0pbzhT06ZWy5NpZNWcO6PzYz+vMHMPmbcGR4O1YvcWxPPDofnntLjpWqdSI5GXvKbUM3+Rlp3LEB8ftPsnTyaiSb5F7nkZ3HXdvrfL9UyeZg/ewt7Ft3kJd+eYo6LWsy/qHvUZwq7bkUOLwtjgFP92HkG4P57b2/cNgdPj9vUSdy56jumhDPR7Nalfl+zMAijz+WnMozE+ZxMduSVzUQ3n2gF52a1ERWFHci0fYjpzDodV6C3GKXWLbjCJ2bRHMqJZ1lu474FOKiIPBk//ZMW7ETR55Jo0ZEBb4Y3d8txCWHk4c/+4OEs2luh+fe48kM6tSUZwd3KfBeVu4pOCfBF3aHk382HmDGq/f6dKrebJQ6QX5oWxwv3v4OdqsdWVZIOnqarQt38+bs5/j5pRkeDjzJ5uBM/DmW/bLGZ9W+syd9pysXSp66knT0NMf2xhfceLAQrDk2lk9fq2qa8UBRN58AlA8Lomq9SE7sP5nnLxCQZYURrw7g80cnevf5VNQLDgFknM/k1T4fElDen5BK5bmQ7N3mzWgyEFbVFWkw/OUBtOnZjA1/b+Pvrxd77QpEUeTBd+9h2MsDCr4/DZ9IDiePf/UXF7M9yxA//+MCRNH1t25SM5I3Rt7hsz65gKv64Y4jpziVko5OEAH1HWDl0CD2HE/m2cFdqF05jCB/E1XDy3uMWbH7KCfPXfSIvrHYHfy5bh8jurUssATumQtZRbtxFSSnk382xfDsoIIfFjcLpU6Q//DMVI/QQZcz0sY3T01Cb9Rhv0wO2nLtzPr4HxZOWE5YtVDueeEumnd1VRE4dTSZtCvpQyngUSdFLVb9Sji0Na7A6JUClyQIpKdmErPhUD6NWEGySbw98FMsV7jGnPRccjJyEUTBY22XSvje2rclx/cl8MVjEzm2Jx5RJ9K8a2OO7jyOQ3KAApLk4N5XBzL8laJrnhrebDl0UrU1m6wo7l3o/hOnefDTWfz5+n2qfgsFV/bmvhNnkBUFpQDtIyk1g6TUDHYdTaZBjYpMHDvYa8zGmHjV4l96ncieY8n0vqW+6tx2yYFeV7gtWxAE1ftwygppmb53rjcbpU6QqzVoADgXn+Kz3OuF5DRSk9JIOHiKmPWxPPnNw/R8sCvPd33nyrRoBWw5V2eOUZ32CoU4uOymx/ckqJ5zSE6cV5Ppp7jmD61cgay0bBRFoUajarw+axyZF7J5tsub5Ga6tETZKbN3zQGim1TnwfeHk5uRS9MuDakQ8a8ml3b2IgkHThERFa6ZWYpBRrbF5w7qEgouzX3x9sN8Mbo/4ybOR8Al+C4l4hQ369NilziUeJ41e4/R/bLQQqNKxjO4zJq+qjIeTjzH09/PIzXTt9NbAPq3b0SQv5lZa/Z4mX/8TAY65dn5NUqhIC9XIYCMlEyv436BZsqHB2PNsXk9wfO/tOXa+fG56URGR7jjs9Wo2yqaprc1Yv4PS5GsUpHD625U9CaDT19AUGg5Mi9kFzpHRI1wvtnyITq9jtC80rrT3/nTK6bdYXeQeDiZoJBytOnZ3H1clmW+fWoyy6auwWg2INkdNGxXl3f+eVFL2y8CLWpXwVmEhsU2ycmx06k83OsWln88inX7j7N6TxwbYuK9BKIgqFY79sJik1izz1OQn0nLZOVudTu3n9FAm3rVvI6v23ecl35eVGDTZ5NBxzv396RH63qAK2P1z7V7seQ9gMxGPbUrh9KtRe3CF36TUOrCDweOvROTv2dYlcnfyIAxffho6WtUqlkRv3Jm/IP8fHb3kWWZhAOnEH1s7USdwF1P9ebxT+/nqw3vM/yVgdRtVXqf/rJT5o6RndH7qFWRlVa0cMCUpAtUrBbmFuIACQdOqTt5BVjx6zpW/76BzDxb6Lzvl7Ji+jokm0RORi52i52Dmw7z5aiJxb+pm5Cq4eUZ2LFxofXBzUY9jWq4SkIEmI30uaUB1SuGqDo1i6qgiIJA+QDPh+2vK3f5LMf76eP9vOz0TlnmvRkrChTid7dvxOavx7iFOMDTd3fkk8f60qlJTVrVqcJzg7swadwQrwqRNzOlSiN3Opz0eLALqckXWDZ1DXqjHofdQbcRnbj/7aHuMMCjO4+TdTGH6W//oVqtz2F30rxrIy+H3CWq1avC7SM7AVCnZTR1WkYTtyf+mt7btSQyOoIuQ9uxf91Bko56lwkoaghq3dbemXL129Zh++LdXo5Ua7aNZVPXsPyXtTgdTp7+4TH++dq74Jdkc7Bp7nasuTbVGi4anjzSuy3/bDpYYPd5P6OB/u0aehxr36gGf6zbq9rfsigIuHYE+Vm/74RqvHeA2YBNcrBi11F2H0siMiSYvm0bkGOze6Xm58ds0FOrcphX2KIgCHRsXJOOjWte0dpvBkqFIFcUhRkf/s3v77vC2wRRpOuw9gwYeyeR0REE5fOMC4JAvTauLZfD7uD9YV96CA+DUU/zbo3JzbQg6nU4VWK2ZUXBmm0lIDjAfaxNz2bsXLqnVJpY4mMSebP/J3QZ2p6kuDNXHF2zd/UBZn8+n8HP9nP/2Ho/0o3Zn87DbvUusZrfCfzNkz9jNKl/3RTFVZlRE+SF8/b05YW2QwsLDsB4WaJRqzpVubV+dbYeTnQLc6Neh1TEZC4ZhTenLUOWZXq2qU9KejZnL6pHndglV7r9+fRscm0SJoOOnxZtYfxjfX0m+gAgwJ03WUZmSVEqTCv/fLuYaW/OQrI5UBSXqWDVjI38OX6ehxC/nFv7tuLB94Zh8jfhH+SH0WygaZeGvPb7WJLjzvpMEko6cpo3+n/icazXw90ICi08pfpGxWaxs3vVfvwCfPf/LIycjFymvfUH09+Z7T4WWKEc7e9ug6BSsjQ/ssNJWNUw1bTxsCohBIWWbKf2skSWxcbsdfv4eNZqtsSeLNThefLcReas3+9xTBAExo/qy1v3dadT45o0qhGBQoG9xT1QFFfN8/d/X4VdcrBqT5zPEMcAs5HTaZlu7dsmOcm1SXw0cxWt6lQpsBSFv4/fpEbBlApBPu3NP1S1yPV/bcVeyFZx8Li+/HV+Mp+ueotfjn7Lx8veICA4gOhmNXxW/lNkhaO7jpN4+N8i+37l/KhYo3Rnjl1ITlOt0VIcbLl2/vp8vvtzP7w9jlUzNhaYdAWu2PemXRrgH+yPIU8zF3UiJn8T434afVOmVReF+LNp9Ht9Cl/+vZ4/1+0rVIiDK2Fm0fZDXsd1okiPVvV4+4GeHDt9AcnhLNJ8l3MkKQWHD01ewLWjVWtXl5qZy5i7OxLoo2CaThTZGZdU7PVolBJBfim0TY0T+xMKfb/Z30TdVrUIr/pvmcxazaJo1N53gR+9Qc+5fMlClhwrJ4rYfb44DRuuJ+Zyfgx5vn/R1TAfKAqkn3d1Vl//19aipeb7Gel+Xxd+PvAlA5+5k0Yd6tPjgdv4fvtHtLy9ydUtqAzz1rRlZOVaix0yeLlpBeD0hUy2xJ5k7sYYnwXmCsMpywSYjTSoHqFqJjEa9AT5MJEpikJIoD+t63pHs1yiuGWVNVyUChu5qBN9anwhEeVVjxeF9+a/xAu3v0PslqNe5ySbRM0m/zaJdqXFF+3bn5VWeCjff0H1BlVIOnL6qjJQwZUMVL6iq2i/qBNdyUIqKfwIeQWN/E3cPrKTu/Liox+NvLoF3CTkWO0cSjxf7D+XXhQZ0OHf1rmSw8lrU5awPuaEuy3aldRYEgWByqFB6ESBcRPneZ8XBVrUrkKrOlWYvNSzS5EoCERVCiGiQiB9bqnP5oMJXolEsqzQqm7Z6rF5vbjhNfLEw8k+tYeg0HJUrB5+xXMbzUbe/udFgkIDPUwOJn8Td9zXhbDKrjoOiqKwb10soZUrlFoTgKgTadC2DuvnbL2qeQwmPUNf6O/2L3Qd1gGDSlijwain54Nd6fPYHby/4BXG/jDqqq57MyL6yGoEV/x331sb0itfmN4lHLLMpEVb2X44EUVR+GHBZtbFHMfucNmqL3UjUsOUl+DjZ9QT6G/CoNcRYDbibzIQGRLI1/+7m6//2UiOxe5llpFlhb3Hk5mydDt1q4RjNuoxGfT4mwyEBPox/rG+AHRpWovOTWviZzQg4HK6mgx63nuwJ35GzUZ+JdzwGvnJ2CRM/iZV80qt5lFXPX+FisFM2PUJU16fxc6lewgI9ufup/tw15Ou2iyyLPPu4M/YtSIGaxEb2N6IGM0GylUIQClCswFRJ/Lxstd5uef7HjshUS/yyEf3MnDsne5jtZpFMeK1gcx4fw6K4orBVxR4bvITdBvW8Zrcy82Cn8mAQa9TDTUUgDF3deDnJdu8KhQCJF/I5JkJ8+hzSwOW7TyC5KM+z6W5TEY9vdrUp0WtyiSeT6du1XBua16LzBwr+0+coUKgH82iK7P9cCJr9x33OdclLTzhXBoTxw7m8KnzVCxfjg6No9xx36Io8OHDfdh34jSbDiYQ6GeiV5v6VCx/Y5okSwM3vCCvVq+yascbg8lAg7ae6cJ2m8SKaWtZN3sL/kF+9Bvdg1bdm3mMcUgO9q+LRbJJNO3S0OXErB7Oy9PHeF1DskvM+mQeO5buLbBr/Y2MTq+jfMUgXpkxlsTYJNWGyPkRBIGG7ery7VOTvcxZRpOBChWDvXYlI14dRNdhHdmyYCcGo56OA9t6pORrXDkh5fw4oxLmp9eJyIrCjiOnfJperHYHi7cfwlaIff3Otg24u0NjWtSu4vW3DQ0KoGtzVzjvhcwcxk2cX6R1O2UFyeFkaJdmqucFQaB5rSo0r1VF9bxG8bjhBXlUo2o0bFeXAxuPeGQQGkx6+v2vp/u1ZJd47ra3iI9JdMeN71q+j8HP9eeBt4cCrnrlr/f72P1gcDqcPPPj49xxb2eyLmaTfOws879fyqGtcZQLCXBlLVol1QeJqHOVf1VzzlyqRFcS6I06HHb1uOHaLWty6nAydot3DPclnA4ndz52B43a16N6/SpMfH66z2uZ/E0YzQZGvDqQdwZ/5nXemmNj/oRldBvRyetcZHSEh6auUTL0a9+IqUu3e2VlVgkrj8UmcSql4PaGVrvDZ2s0AFFwldJtqdIs2Wp3sDk2gVyrnVvqV2fFrqPFinIpqCCXRslywwtygHfmvsSPz093p3fXah7FoHF9CcoXHbJ+9lYSDiR6JP9Yc2z88clc+j7enYBgf17t86FXfZUvHp3IX58vIOHgKdXkIF/ITkXVdm/yNzLy9cFMeX3mVRXBukTTzg3ZvTJG9ZzJz8gny9/kj0/mcmDjIZ+dd3597y/2rYtl/Mo3eWnaU3zywHcosuxe323DO4Diaszc88GunDlxzmeYYklVetQoGg/0aM3GmHgSzqWRa5MwG/XodTo+eqQPM9fuLVRUCkCj6hHsOpasel5WXGaVy9l3/DRjvvsHBVc4odMp0ziqkmpYoS8aR1Uq8liNq+OGd3YC+AWYeWbCKKYc+oqaTapz6nAy3z75M4MrPsKyaWsA2LJgp6qQ0Rt1xKyPZdui3aqCVbJJHN+bUCwhZXKokQAAIABJREFUfonLlRODUU9YlVAGjutL6x7qW8ri4B/kx761sT7PH9oWx4RnppKdkUNY1VB0PirRKbJC7Jaj7Fy2j7AqIe4dg0NyogCrftvA+tlb+OWNWcz+fD5RjathUHE6Gf2MdB3e4arvS6Po+BkNTHtpGB8+0odHe7flucFdWPTBI9StGk7iuYtF2vk9eXf7As93aepZekFyOBn7w1yyrXZyrHYsNgm7w8mBhLM+qx1ejl1y0Oe1ySzZfrhI468HiiKhWJcgZ7yGnPUViqPsxKyXCo0cXJEjr/b+kKSjpz1st98++TM1GlQlODxINUxRQKBchXKknEpV73NZQugMOvo+0YP73hyC0WSg6W2N2LF071XNWVD8PIDskN1dkVQ9XvmQbBI7l+1h4z87PB54lx5gl3wAc75chMnPyEvTx/DukM9xSk4ckgNzgInKtSvR30eDDo1rh04U6dwk2t2e7RKt6lZl7/FkbAUoIc1qVaZZdBWqhAWTnJrhdb5dwxoE+pk4n57NlKXb2XooEZNBpzqn5HASEuhPutNScKo9IDll0rMtvDdjBWHBAaqVEEsSRXGCbS2KfReCLhL8+iGI5fOdt6Gk3QuOY6DkAgaUnClQ4RsE023XdG3Xg1KhkQMc35vA+cQUL0Ftt0jM/XYJfUfdoRoGZ/Qz0qJbY1rc3qTQLNCrwSk5WTF9HUMqPcrT7V8lOCxQtXmxwUe9kaumCFYcQSe6KxH6wpZr489P59OmV3Mm7f+cwc/1446RnRk7YRTfbv3oqlL8NUqWwZ2aEmA2+Wye7Gc08NZ9PRAEwRXaZzK40+oNOpHQIH/evr8HKenZ3PP+r/y9MYbE8xeJS05VjZRRgIvZFnSFOMzzY7U7mLx0W7HvTZFzkXP+QE5/ETn7BxTned9jFQvKhSEoGc9B7s8oWZ+ipHRFkf41SSq5M0E6mifEASTAipL+Aoqicq/OZBTpqOsBUQooNRr5xfMZ6FSazCqKwomYk9RsUoOxE0fx9ROT0OlFFEWhXHAAHyx+FZ1ex7zvl17zGPDsPBv1oa1xxO9PpE7raI7uPOG225v8jNRqUZPYzUeu6Tp80b5fGxZNXFHouNwsC/EHEjmy/RiNO9Sjdc/mqp+9xn9L+f+3d95hcpXVH/+ce6fuztZk0wsQEiDwAwJJgAQh9CIQQREQpUoTQRCQqoCIFKkiogERlaIiTWkCIoQeAtJDDekkm7J9p9/z++Od3ezszmzJzmaTzft5njzs3Pve9567IWfee95zvicS5oFLjuWOf73G7Pe/QDWTLZJOs/3mwzn3m3swdqiRHN5x3Ege/OlxPDj7XRYsX8NO40fxjenbURIOct3fnqcpliDVDa1zT7VTGdpcLF3VsX9ALjRdDfGXUBLQ8DvQWiAKBNCmWVD5Z8S/fcfrmu6G1GdAy5tmzDRDqT0XBj9r/t1HHzfHO5CC1EeQmVfTy9CaM83KXVwgBOXXbvCr9oI4chE5ELgVcIG7VPXaQszblq2mjMvboX7B+4v48NVP2O97e7L7Ebsw77VPCUVCbD11SxzHobG2iX/e/nSP4+Clg0vYdpopuNh66nieuPNZqheu6ta1iViCwSMr2euo6Tx19/N4aY/9j5/BIafvzw92/gkLP+oYnxs+biiBoJ9FHy8tyEZpe1Z/VcPQzapY/PHSTlUcg+EAZ+16CY4jiCOEi0Pc+MKVjJowouA2WXpHVXmEn31vP2C/LseOGFTKjw7vmHE05+PFeZ24I+QtHgKzsg/4fQwpj7B0VS2JdvnqjiPsOK7r/2+8pnug4QYQH2gCaLtKToAm0JrzoeImcIYj7lq5DaL/ZK0Tb0O6GtJLwDcaJN+bpAeYN2dVD11znLkGL/OW24zWnA2DH0N8G66Mbq9DKyLiArcDBwETgWNEZGLnV/Wc0soSSgbnVshThVkXmLS6cHGInfbdnm12Gc9r/5zLBfteyVm7XrxO8rP1qxt444m3ef3xt3jizmc5764zOP3m47t1recpn8z5gkPPOIAfzzqd8ZM2Z85T/+Px3/2bpZ931AQHiDfFufP9mxg8Mn9n8EDYj+M6+Pwuri/7r6+rF44vP1jEz/5xPqWDSwmXhPHl2LjyBVzSKY9ENEGsKU60IUbNijouP/z6rh/aslGSrxDH7zp8Y/p2HDB5Ql6lw5KiEC/dfCYPXX48Zxw6jVCb8KYjQsjv45SDd+n0/pr8GBpuwjjsZrKdeBu8Beia76Erv4a3+ki81IKWO+WbObOqBik6GsjRhcoZDL5MPUryLfBWY5x7W1ImNLMBU4gV+VTgc1WdDyAifwVmAvnTLdYRXyev9ws+XJz1+Y+XPcAjv36yd+lyGclcgOqFq7hwv6tw/W6322OtWVHDf+6fzc2n/J5kPInnKR+8NC9vXnhzYwwR4bQbjuMXR92cc4zP7+fhVX/EF/Dx6G1PcfclD+ALuHhpj6rRgwkEfXzx7sIO17k+lzFbj2TsNqO4f9HveP1fc1m9rIZwJMRz985mwQeLGLHlMBprm1j88bLsX4MqKxasZMlnXzFqvO2xOdA4Yf/JvDt/WVauud/nMHnCaC47dj9UlYMuuYvq2mwNIUeEXbZeq0d0/P5TGFZZyh///Sar6pqYtOUIfnDYdMYMqaAzNPoY0M0euJpJsU2+C6sOxCs6EcJHQuMtZIdOBNwxiJt5Gwh9HRJvQPRRwAFxgCBS/ru1IVcv39t2CtK50zc3FArhyEcCbb3oEqDDV7CInAqcCjBmzJj2p7vF1IMm8cSs53KeG7bZkNafa1bU8o+bHyfZB9WYLeEZcQW/38fEaVvxxbsLaMjR89JLe9xy6qys7jnxaCJnZ3BxhEl7G6GjaTOn4PP7TBf6dkTKiwhmZEC3230bvnPJETTVNzNt5hS2nbYVq5et4btbnNkhjFQ2uIQ9vrUrYCo09/jWbq3nDjxp79afT9vx/JzP7bgOyVjhG05b+p9dthnLed/ak1seno0qpNIek7cazTUnHwyYKszLjt2Xn9z5OImkkb71uw7hYIAzZ2anNh4weSsOyKH/0inppXRcBXcHD5rvg7IbITAZEm8BaRA/EEQqft06UkSQsqvQ4pMh8SY4gyC4OyJt2kb6d4AcG58QhsCGnXa73jY7VXUWMAtg8uTJ6xQA/t7l3+a5e1/qECv3B3wcf+VRrZ/nvf4Zrs8lyTq2tXKkyxi1phW3yGXmmQfy4I3/yrmB6ThOTtEjVUUcwXUdUsk0/qCPYDjIaTccl3keP5tvP4bP3pqfbZcIh56xP+l0ml9+51beeOJt0skUvoCPJ2Y9y7X//inb7DKe+xbcwVXfvrFV1XHKQZP40W9Paf0C6Iy9jp7Oks+WkYhm/+6CRUHGTLTKdAOVb35tew7ddSILq2upiIQZ3KY7FsDu223OPRcczZ+fncui6lp2Gj+SY/fZiaqy3umjqMYgPrsXM8Qg9hBS8QezSk/+D5yhENoHkY7/v4tvM/BtlnMmcUeg4W9C9BHMJitAANwhSPgbvbCx7ymEI18KtE0SHZU5VnAGDa/gz1/cxlVH3sRHr36CApHyYk6/6XimzZzSOq5mRW3efpyd4bgOgVCARDzRrc3GWFOchR8uYdtpW+V05F7Kyxu+22bXCWw1eRwLPlzMNruMZ+YPD6RymHkFbahp7BAqMvYJRaVFPH//y6ZPZuYLLZVZfV9x+PU8sOT3DBpewS0v/QJVRVW71Fdpy8yzDuK/f3uFZV8sJ9YYxx/w4fhcLr73bFzb7HZAE/D7GD8yf/OUCaOq+MWJB/V4XlUFrxrEjzjt9n/iL/Z4vg54zeYt11uORh+C9HKI/hVKLkD8PdO6l9LLIbAT2vxn8JogdBBSfCLiFPXezj6kEI78TWC8iGyOceBHA98pwLw5qRxawc2zryKdShNtjFFUGs5yVKrK/dc80uN5/UEf4jgkEwnjgLuB4wiVI8p55bE5Oc9vtv0YUvEUCz5YlFWBFyoOctRPZjLtsCk5r/vg5Y9zZtikUx6vPvYmyXgyZ+w/2hjjs7e/ZKtMk2QR6XHKZbg4xG/euIaXH57D28++y+DRgzjopL17JRds2XTR5Pto7fmZ8Imi/m2R8psR14hlaXo5OTNOuk0AQl/Ha7ofGq6lNU6eeB1dfSwMuh/xb9fpDAAaew5tusekPAb3QSruyioo2tDptSNX1ZSI/BD4Nyb98G5V/bDXlnWB63OJlBd3OL562RpWLu5eiiCYhhGDRlbSVN9MU21z1xe0IZ3yeODqh1mxKPf9Fs1byh8+uIkL97+K1ctqEEdIJVJ84+yDiDXF+cMl9zFm61HsceSuWWGPD1/9OG8jjUhFMauXrsltkNAtmdqu8Af87HX0dPY6esOOC1o2bNRbY9L5tI0GUPI9dPUxUPW8+dx8H9BVWrADUgW6mo4ZLSkT8068QMc88RjacBNSeXens3uNt0HjXbSGU1ILzAbs4H8hzsbRS7YgMXJVfRJ4shBz9ZZAKNCtsEjliApOv/E4RowbxnkzLifevG4beTUr6/I2DgqGAwwZU8Xd827lkzc/p7a6nmGbV3HpIdfQsPppoo0xQpEQd110L7e9/kuqF63ixlN+Z7r45GHCTltQsu/2fP6/LzusygNBP+N33iLPlRbL+kWbH86xeeiBNkD8JSBhQi5d4oGuyH8u/gx5N0uTuZPn1GvCfCl40DiL7LeCBHir0ea/IpFTumFf/7PRVHZ2l5ZuP/lWtOFIiEhFMbe9fg2Dhlfwy2Nv7VJr3HEdHNchlUMKNBlLsdm2Y1g0b0nW+UDIz7TDJvPL79zC/PcWMm7HzTnm4sO5/+qHWL20plUaN9YYIxFNcN3xv+HTuV90mi7pD/rYcqct2GHPicz+x2u8/9I84s0J/EEfrs/lp38/z8axLRsO6cXkDJtoGrzlaHppm5L53hDDBANy4GbrnWt6JVp3ISQynbKcYSbXvMPaL27i9xuJI99otFZ6wp7f3i3n8ZHjh3HurNO559PbGDTcbCwu7qSK0h/0UzIowoV/+iGHnr4foRxNZQNhP0df+A3G7TCWUHGQcEmYYDjAFjtsxn/uf5kX/v4qCz9awgt/fZmzdrmYlx+Z00Hf3Et7vD97XpdaMEUlReyw50Rcn8t3LvkmxWXFOK5DOuUxYfI4m1Vi2aCQwM4guTcJ1R2HuKPIWaTTY/yZEvv21ZshJPLDtfdUzwhnJV7DrMZT4C3J82XigLvx1EwMuBU5wNm3n8L89xby1fxqUokUgZCf8iFl3PTiz1szQ1qYuNsEvvxgUYfNRcd1OOHnR/H10/ajuLSI3Y/YhZcfmUMinsBr02jYcR0m7bMdM46axgevzOO1x95i8KgKnv7jf7O6y3ueEmuOI3kEjhRFU7m/UILhAIFwgF8+ZXRjli+o5pKDr85avX/46idcuN/P+f07N2y0fUUtA4zQgdB4e6bkvWWRIkAcak5AfRMwa8kupDu7xIXS66H+Eki+2eZ4EvXq10Y+E6+Bt5KOMfmWEW1tCCDFx/XCpvXLgHTkkfJiZr17I2898y5fvr+IURNGsMvXd8op/PTtC2by3L2ziaZirTnfwaIgB5+yD9++YGbruEAowG2vX8MZO/+EmuVru7LEmxOcvdulnPnrk7j6mFtay+TzSdCqp/gCvqwwjOtzGTl+GMu/rO4Q5nH9Lmf/9hT2/s7u+Pzmr+tfdzzTmnLYQjqZ5qsvq/l4zudss8v4Hvy2LJbCoF4t2vQXSMwGZxhSfAIy6B9o428h+gToSkwsWzFiVfNAIuBMgHSmZsIdAekV5Ba4ao8L+KD0KkSSWWqHhjTUX4YGd0fcQWjys4yOSwfLQSpAo5mSfgdKLu9x6mJ/MiAdOZhinCkHTmLKgZM6HTd0bBW/eeMa7rzwXt578SNKKiN889xDWpsvt6W2uo7m+uzXsFQiRc2KWq444lc5Y+jtKamIUDm8nOpFq0jEk+ZtoaqMnz14Hj+ecTmpZLo1vh8sCrD7Ebuy//EzsuZY8tmynPdyRFixoNo6cst6R70adNVh4NVi4uLvofHZUPoznNIL0cAkE5tum8GCgqaQ4mMhdADgQPJdtPacbizQ/VB6BRI6AHFK8Rp+DTkLAB2IP4snEWi8Ic+YMFJyPgR2Ba0H3/jsis+NgAHryHvC6K1G8vNHL+xy3CdvfkGuFJVYU7yDgFUugkUBjjjnYI655AjeeuY9Fn64mFFbjWDqQZNwfS53zL2OP1xyP28+/Q7hkhAzzzyIb57bsQ/mDntuy1vPvNehwjWVTDFh8rgO4y2Wvkab7gavhrWaKQpEof4qvOA+SHpJntVwFE0txHFMyFMDu5LbLQngB1xwKkwuemAS6jWaHPL48+ROY1TUq4PGq8mt52LmI3wIIvnj9aoJiD0H6S/BNx6CeyOy4bjPDceSjYAhYwabpsvtcH1u3kaz4gjhSIhUIsV+x83gmEuOwHVdph40iakHZb8tDBlTxcX3/qhLOw44cS8evOGfpJKp1th+sCjAtJlTGTHO9km09APxF8jtKJth5TTUvz3G3bRfERdlhTBEAmjFnbDmBNaGVxwoPj0ThqmA0KGI46KpxejqI01IhHzdtBTj4PPsG/m2RSpnde7E0yvQ1d82q3VtNhu4ThUM+mvHStV+wjryHjBpn+0oG1RKvDmRld7oD/pIp9IdKkJDxUEuue8cKkdUMHyLIZRWFqa4oLi0iN/OvY4/X/F3Xv3nXELFQQ77wQF846yel09bLAWhU4eWguT7RsxKg6xNSfSDOxRC+7aOVPWg8deYWHqbFXbT7UZTXIHGX+GV3QBNd2aaT+RKNc7Ez0vOR/DQnGMcCM7o0hlr3c8y+e4Ze7QJ0nG0/lqk/Hpjc+I1NDEXcasg9HXEKet0zkIjuUSd+prJkyfr3Llz1/t9C8Gqpav55bG3Mu/1zxARho6t4qK/nMXLj7zBo7c9Rbw5jqpx4rt8fWcufeAcm0ViGfBo/AXTgKHTTcpwRk52NpCG4MFIydlZTk9j/0Xrzu1GfnlnmS4OFJ+FhA9CfFugqQXoqkPpmNMeQgb9DfFvk/+51ENXbEvOsI2EkSFvomtOhtT7GZvDIA5ScQ8S6H0D9g63FHlLVSe3P25X5D1k8MhB3PTCz6lf00AynmrNR99qypZMPmBHnvnTCyTjKfY+ZnemHjzJOnHLJoEEZ6DOUPA6auGvHeQiob2R8l/mHaLx/3azSKiTBagEcUrOXPvRtxkaORUa78SEfxQIQdHRnTrx7mB6gb7L2i+waKbN3NlQ9cJ6+/dvHfk6kitMssOe27LDntv2gzUWS/+iyc/Ay1dG3zIohnqNoB4ieZIDnDKMW+o6Ayw3Au4WaHo54q7dL3IiZ6HBvdHo44CHhA7u1opZxEEDe6x9i2jFB8EDIPowOd9CtBbSX4Bvy3V8jp4xICs7LRZL36KaRBPvosmPTf1Fen5rW7X8pKH+EnTFRLzlk/HqLke97MbMEj6cvBuTOTGdfrKKelKfoSsPQOOvZM/t39a0fEsvQWtOwquegdd0j4lxd4KU/RycISDF5j5SDO5IpPQi8rrQ9Ryxtityi8XSIzT+Alp7AZACTQJimjloV0U8mUIgAOoh+g80MceoDGZS+cS3BeqfDskXumeMhCB4GMQeYm1GjMme0dpzYchrSOYLRtPL0dVHZHLZM+JdDTehqfnGWee7hTsUqp5tl364FyJ+NPwtaJyfyZxpgzMI3PWXCmxX5BaLpdtoaonZ1NS6jENMAHHwFtHzdm1J8L6C+Iuo14Am/oemlyFFh5Ffg6XFZQkQgpJLIfURuQt9kplzoF4duuYU47yz7IxB9GE03bn0tUgACR+MRM5EQvsj4jfHi74N/ikgYcy6uAiIQNm1rHt4qOfYFbnFYuk2Gn2I/Prhikn7y2SU+CdDcm4n4wGNmoYOteeABEzRkH8ncMrBS7LWGfrBHQOhQyDxolnx+rZBcNG8G4oKBFBNGw309Bf5zag9H/UWg5SbOH16IThVSPEpSGifvNeJ+KHiTtNiLjHXNMqIPQU1x6O4aPgopPTCPq8UtY7cYrF0H28luVe/LaTBPwVn0H1m+JpTjePNGzQOQPJtM6dm0gOTb5lyeWcKxJ8DxORml/wEcUrR2ERTxp943RTiaQJT9dnergBac47ReNHmTmxIQPLVzM9tWiymF6N1P0bTP8IpPgnApDJGHwRvDRKcYboJiQ8CO+FpHBpvYu2KPw3Re1GtRcpv7OR31nusI7dYLN1Ggrujscc7SREUcIes/eiOpdN8b1J0XLEnIPE6MuQVxLkh64x69Wjtj4BYu2kdIGD+K66J3WsUNP8qvFtoFBpuRYuOQWMvQt1PaJHA1dhT4NsGKv9kVtx1P6VjeEkh9jjqXdGn3YZsjNxisXSf4L5ms4+O2vyZAUjR8QCoxiH69/xzBaYa1cFciAvtMloAiP+H3G5LIPwNKD4OnMG0xu4Lgbho8mOovxiTapgJ92izicFHHzWfvXw95xVNLyqMLXmwjtxisXQbER9SeS+UXADuNpjNPb/RQZEiKP0ZEtjRDE6vgHzxa2cITuWfIbQnubv7hIykbXs0Tu4VvgfpVdD8FxPfLiSazEjr5ngWjaLRf2U+dOJOncGFtakdNrRisVh6hEjQNF3INF7Q1Hzw6sC/DSJtuvS4VaatWy7czc1ckbPQ2HOZUE0mlZEghL8NiTloYHJrhggAwRnA1Tkm1EwD5kIncPshsAviDs8rjNfaBSmwJySey3G+AscdWmC7srErcovF0ivEtwUSmNTqxDW1CK/x92jjXa0OuwMZxybuCGTw41B0LPgmgrsdoBC9D609E62ehibeRjWKl14JibkQOgizudmelqYVBcQ/CSm/Bfz/Z946OhA2RUYAkZPI/XbhounVhbWrHXZFbrFYCobX/Deo/wVmA9Mjb255/PnWH8UdipRegqYWrhW30rXxbV1zTMtPKD7Mpmbn/W0Lg4DWI07GgVfMQtccn7m3aYpB0dEQnIF6NVBzOjlTLbUGbbwRKcuvMdNbrCO3WCwFQdMrM068G5uM2oiXXo0kXgRV4wyjD5O7iKbtKjuVZ0xfoJD6AvXWIE6lEdga8jLEXzKhpMBUxGcanntND2d9+WSThtizYB25xWLZ4Im/AOJ0L7rhjISVM1BcExavvwL8kyiIk5ZyU3lakDBLCvWitGh8iQQgV4FQ6iM6/QLr425CNkZusVgKREtVZy5ajjtAKNOoIQ40ZzY645kq0FCe67tLEBPeKKDjrP9Z12P825Hfdj+Ev1k4e3LQK0cuIkeKyIci4olIB7Fzi8WyCRGakSdLJQCBGeDbCkKHQtEJ5N4UxKQctmSBrBPxjJ5KoWLoarJn0l91OkrCR2T0Vtq7VAHf9kjkhwWyJze9XZF/ABwBzC6ALRaLZSNGnEoouwazKg5hVsUCeOAtQ0rOwyn/FeKWk3sTVE3XoNJfQHAvk87X6xV6AZAApJd3PsQpQwb9I2OzHwgZMa2KPyGD7s9Oy+wDevX+oarzANsFx2LZhFCNog03QvQRo3MS/BpSeinijsQJH4IGdkEbbobYo7RK16Y+MaqJ5TcbJ91wc46Z/Uh4P8S/DRI+xNwr+SnacA0k3gYJgjsKUkuA2gI8STBjX66m0W0fONGtBhHiG41U/r4AdvWc9bbZKSKnAqcCjBkzZn3d1mKxFBhd831Ivkfr5l78eXTVW1D1LOKUIm4VmnyNjhuXMbThepyqf2dar81irRMNQdEx4JuAxp5BY0+DFCPhb+FU/jFrFm/5unTh8mECEC36Lj4Ttw4dBg3XZDYrI0BzxqaWN4YwFB/fpzophaBLRy4izwHDcpy6VFUf6+6NVHUWMAtM8+VuW2ixWDYYNPkBpD4gO0PDy5SqP4wUn5DpGJRHdyS9yBTHJOfR6uidIVByKRLaH609HeJzMA7VQaP/RItPAm81RJ/KHF+H+LczGCofRuJPm83V4B6If2u85n9A+pOMhG4UnGHgHw/Jd8CpRIq/b5z9Bk6XjlxV910fhlgslo2A1KfkzkyJQfJ9wIRa1RkMXo5mDc4QU+CTXkJr8Yy3EuovR0lDosWJg1kVR6HpdnrXxxOIXIDjG4y6R0LsSbTpDyb1MfYEWV9K3iJIOTD4RSTxLBp9EuKvQtGRSGDDzeeweeQWi6X7uFvkOREyWSktFP8QGq9r1wItDKEDIPog2U7ZM23imv7UiTxuZ068Tb/OfNRfhJd4HZJvmtRHbaa1AUYWHnjLYc130PSnreM09jQaOQ0n8oNO7Og/ept+eLiILAF2A54QkX8XxiyLxbJB4t8h48zbap0ISAAp+tbaI0XHQOQ8U5yDa+RqSy5C3NGmtL0D0cyfdXVJXUVrkxD7B6QXtPmyyHONepCa125cFBrvQNMr1tG+vqW3WSuPAI8UyBaLxbKBIyJQ+Se0/ucQexLTEWjnTPPiIKopI3UrghQfhxZ9z6zKJWxCLok5mcYP7ScuguABkFqI0fzuLi2Ovzvbbt3dmmu72dkGcSHxKurbFm3+k9kHCExHio5CnNJuzt032NCKxWLpGd5KU4rv2xzc8RDcFa05LbPBaTrLS+lFpmGxSHaBj3+KWdGnPmVtxooPpBSJfB+VYmi8nrVOtyU2nq+xs0fe4qJ1ImwaZ6Tey3FOjGRv3eW0OvvE22jzX2DwoyaPvp+wJfoWi6XbaPJ9dPXhEH3MOOP4U1D/U0gvwmxexiD6D7TuopzXiwhS+WeT+idFQBBCByCDHgIciD2Mccwtyol5VsdZdNLcubvIIAjsiVTcTt4vBo1B0/2YN4YWm2LgrUYbZ/Xehl5gHbnFYuk2Wn91ZgOzxZHlcrIxiP0br/kRNP4a2j4mnngTYo9h+mv6If4ipD4xGSTpxXRZoNMXaCNSfj0EdobU+3kGpYGGHMeTmSbR/YcNrVgslu6T/F/Ow6rtu7olof4KVByQEFTcjfiX76pWAAAgAElEQVS3QdMr1zZPbnt9zRkQ3KOTrJU+RgKQWgC+CayTaqKsjZGr14g23Wn2ECQI4WOQoqMRKWQIKBu7IrdYLF2iqjy99E1T7JOD3MGPKGiTCT2sOREv/oYJy+TczIxnNjr7aW2pcXBHIE6x6VSUV8UxF2Gk+AQzjcbR1d+Cpj+Y3qGpT6HherTu/L6wuhW7IrdYLK2oxs2mpTOktTPOJ/XLOPetPxFhKTO2clgWL+aPy7dmXrSSMcEGThj6MUP8zXz3k/0Q4MCKhZw6/COK3TYhFY1Czcl0GjZJL6DXhT/rRBCCM5CW9nPl16GrjzYaK51m0BQDSdMlKHSoORR70uShZz1nFGL/wUvOQ5wKUzEqgYI+gXXkFosFVTXhgKbfYlqcpdDQoUR9u/PIJ39B00NZ4YV5rmYUv1o6ibjn4uGwOB5hbsMQTh72IfXpIONCdaTU4TfLtmNMsJGIm2Kv8iVE3DjdClmEvg6xpzFr/Fj3rukxrgmFaIP5OfwNpPSy1rPi2xKqnkej/4SGn+eZQ6DsRiSwPeIObj2qiTfyhIfisPoI06pOHLToRCRyNiKFCYpIvlelvmTy5Mk6d+7c9X5fi8WSG6/5UWi4vF0lpvB6/VAu/HJX4mqaRkhrL/ns0ENQUlw8+i1mlC8DIK1C3HM594vpLEqUcNMWrzApkqNkvwNFgIJTnlnZ9oV/CiGD/m6UFCWISK5Gzgav5ge5NzKDB+BU3JZ1SNPL0ZrTMwJcXRGGyBk4kdN7ZLmIvKWqHbQCbIzcYrFA4x3tnDikFH66cCpxbdEVB0XIFT9OqMvkSDUhJ03ISVPspij3xfn5ZnOIej4u/HI3UtqduHMzEO1DJw5ICI0+CtrcqRMHkLLrMhugfoy79IM7Him7Omucplejq2aa7JtuEYWmu/LuOfQU68gtFgt4Szoceq9pEF63nC/4xKPcnx3/dgSGBKKMDjaSVof3mgblupLcG4t9GCnQWmj+C7ryILzV38Fbvj1e9TS8xjvQ9h2OtAmcUbRu5/onQeWfO1RyavOfzdie5LRrA4XaD7CO3GLZxNH0MnI5FO2mEwfYs3QpPunofFUhIMYJZn8puFB8OpTfnpW6t/5IAg2ZPqExo9TYeAda/9PWESYD5UhIvIhx0B4k34Y1R+fIjX+NHue/u6O6fCPoLtaRWyybOol3yNVSbfviVSS1Oy7CY7A/SizdMU+62fMzP1aamW91mzMK7lgjUasJepbu11fEIPpPNL0y8/HpzKq57So7ZZx+vF13S3c0PXOnIaTkkt6Z2wbryC2WTZivojX8bVk9f1+1OcsT4axzn8XK6U4It9QXYJdRlzE/XkpzxpnHPYfmtMuVCyfjI82JQ+chWeESD+ovznQaitKnoZSeIEFIfwmApj7PnYGicUjPz76s+GSgfUqhP6P+GMr8yXQo8m2HVPwOCe1TMLNt+qHFsony94Wv8utPnjYfdGt+s3QrfjTiXQ4dtIAXakdy27L/I9mNtV59KsWw4hEsjh/L/YvnsGPxSqqTRTy9Zgx16TA+x+OPK7bhnhVbc8XYN9m9rJOO9M4w8G8P8edZ//nkmLcDdzQA4puASlFHZy5BcMehyXkmpCLlENofKb/JhGa8JsCDwG5I+Q1GZCz5PrgjwT+5YCmHWSbZ9EOLZdNgWXMNv//sWT6oW0SRE+SLphWkNLsm0ydpfOoRa13jdS/ksWtFFdsGXqEuDWHxCDkp7lu5Fc1edgw4KCnu2/oZ/KKU+RKEnPabg34oux7qLqTvNVdcssMmAv6dcAY9AGRi5Cv3N40oWsf5jKP37wixpzLH/SCCVNxljqeXgVOCOOUFtzhf+qF15BbLJsAdnz7DPfNf6EYAQ1m3eLXiwyONQ9BJMz5Uyxexsg6O3MHDxcMV+L/iVVy3+WuEnXQbnRYxK1ytWQcbekIQSs6FhuvIDuv4oexanLCp1NR0NVr/i8wbgkDoQAjsBvVXYkJCbZAKZMgriPRdoCOfI7ehFYtlgPNy9cfc++VLfRyFFlIZ+deY52Nec0U7ES2Dh4OHQ9hJcN3mr1Pktl+Ra4GdeK52bgApaLgpx7kk1F2EFzwAxwkg7hCk4tdZI7w1p9DBiQOQME2b+6G3p3XkFssA528LXyXZPj+6j0nh4mhHKa2ApNm/fBGD/dF2m59tKaTeSr57pMmf852EhmtR3xhjR3BvxNe2V2k+24SutdP7BuvILZYBTn0y1+oxH4VMA9RMSb+ZMyQpti1ew/mj3sYVcHLknQPg2970zMy56u2KfCtwMFkjLiaHvAui96IEAQ8abkUjp+JEzjJ3CB+OJt7OYZ+YGHk/YNMPLZYBzl5Dt8W/zlrY6x6Q8XAAjymR5ewUqea8Ue9w67iXCLmK39E8Xxl+KLkASs4BiRTY3p5+ScUxTj8OjXeiyYyGSujrENytTQu7IBBCym8uuKphd7ErcotlgHPk2N14ePEclsdq1+Hq3q3QFZefbzaHCl/HDBQFommXcCZOntAAgdCeSGAnJLgzHmFo+CU9a8bcuTXrTgKNPYX4J5oGEeV3QGIOmnjZSNOGDkHcIQWys+fYFbnFMsAp9gW5c9fTcNZ79aQJc9Sncq9S0yrcumwH5jQM4a2GKp6vGW6yRGpORmP/yVxfSJtLuzFfZyXza78IRAQJ7oJTch5SfFK/OnGwjtxi2SQYGirj8NFTCXaaGqd5fu4NwoOrxhFtV76f9IR3Gqt4dPUWnDd/OiVugn3KFyGpdyHxMlp7FiQ/onCbhy6UXQlSkuf0BAgdBMU/pGOFJkAACR1cIFsKj3XkFssmwvkTD+WEcTMIO53FcZV1d+K5r3t41ZY8XzeSuOfQmPbRnHZZFC/hikVTAdi3fDHjwnX4nbbXpyD2V4ichYlB+zEblZ25LD9rV9wt/3XAGQvldyKhA8gfplGc8ltxSs6A0p9l7hnARJ9DUHwS4p/Y5W+gv7AxcotlgJP0Utw07wkeX/oWSS9NVaiU4b4K5jeuaDdS8JEmjaA9WuNpJojSPmxhios8hKsWTeUPyyeyVbiW6mSYD5srkcz4mYO+xM0Z8VDQNDL4yUzXoBTa+BcgX4OKZPa1CEgZUvUEIgHUazQ6KbloI+PrFH0bDU7PiGalILSP6Rq0AdMrRy4ivwIOxdTSfgGcqKrrsqNisVj6iFPfmMWHdWsd1YpYHVCXc6wp6ulpOCNfRni2d16WiLAsYTJR/JLmiMrPGRlqZvNgff6pveWIbzRETgFAmx/sgXkKJCD+XwgdYLJMpCR3wZE7LttydyQUn2xm0SQaf9lorgSm9knpfW/pbWjlWWA7Vd0e+BS4uPcmWSyWQjGvbikf1XVsGpGfdS/R7/55JakOMfVxaOWXlPmTeVQWBQJTsg8FepinrRnZWTBiVSU/pqNkbwgpOS/35cn30OrpaO1ZaN1FaPXX8Jru7fyWXi1e7QV4y/8Pb/lEvJoz0PTyntndQ3q1IlfVZ9p8fB34Vu/MsVgsheCdmgXc+fl/+KRuaQ8j3uvqxPN/AQgekyKreLuxiraZKE/VjGVlsoibxr2Ss5wfKUFC+2UfCx0Escd7Zl26Fq29wDRyKPo2WhqGpl9DegX4NkdKfoIEp3e8ThPompNB2729NFyPBiYh/m1zXOOhq78D6QW0VoDG/4uufg+qnkMk3OGaQlDIGPlJwN/ynRSRU4FTAcaMGVPA21oslra8XP0xF7/zAHGvGxWMPSAoab475GMOGbQQB+XfNaO5Z8U2NHu+jI5gbme+RbCOLYJ1GUe+loT6eKuxiiXxYkYFmwBIK4j4cdxRSMXtiASzJ2u6u2dGiw+aZ2X6kQbQ5ruRijuRqhwNlduTeI3c5fgJtPnvED4MbbwL0l+AFIN/B/CNB++rdtd5Rto2+iQUfbNn9neTLh25iDwHDMtx6lJVfSwz5lKM5fflm0dVZwGzwKgfrpO1FoulS26Y96+CO3FQbh03m62Lagg5Jkh9VNXnTCtdzvGf7Eu6NUrb8k9bCEuCfSuWcO6Id7lowW7kcvI+8VgcjzAq2ISn8HFsK7YbfTu4o5F2y3T1miH5bvdNliGmP2erHG7C7J+u+R7qDIGi40wOuLho6gu0/ipIzAEJQfhI8OXLUvEg9jwafRhT/Zkh9SHGpeYK4jejqU/6LJO/S0euqvt2dl5ETgAOAfbR/tDEtVgsrcTTSZZHC59vsFNkJRPCta1OHCDoeAwPNDO97Ctm143MHBUcPPYsXcJPx85tlai9cdwr/GrxjvxzTfamYlJdNgs1AODhY5vhFwEJSMxGfVshbps1pHSmo5IDbSC3prmCtwIab0PTX0LkXHT1t0EbzTlthOb7wb+TibHnnLt9xk8L+QS1ihDfVt23vYf0arNTRA4EfgIcppqrJ5LFYlmf+B2XoFuYhr5t2Tpcg186rjSL3RTbhtdkHQs5KX46di5F7lqdcb8oPx71HmODa+PNAVLsUbaUoX7jOnyiOLVnoKuOQGvPRVfui1d3GZpRURQJg78nErFdiW5lenQ23plJS2z7JRGH5P+g6Hv0PAItZLtWB5xiCPddQVFvs1Z+A5QAz4rIOyLyuwLYZLFY1hFHHI4asxshp7DOfEWyiITmaK6cdlmWLM46Vu5LcMXCqTxfO5J0G9/o4nFgxWJaNkdTOCjSpmlzGlOwE8usjhPG0TavzRKR8uvI1SjaFO+sgzuTICTfJufKXXxIYEc6L9vPRQh8/0drQVFwL2TQP/psoxN66chVdUtVHa2qO2b+nF4owywWy7px6vh9OXTUZIKOjyI3gF9cXHFaC3DWhdl1I4h5bpZj9hRS6vBszeisscsSxcyuH8lViyZz/vzpeJlrXFGK3BQtmSseDi/VjeDKxVM7uXMMmv/c+kncEVD1ArhbYFbKYSAIvgmZn3PRIl+bA02Av8Xptj+XQt3N6LmcriKVs3CGfYAz7COcijsQd3gP5+gZtrLTYhlg+ByXCyYeyg8m7M+qeANDQ2U0pWI899X71Kei/PerD/i8KV+MNzdJdTn1s724cuwcJoRNDH5BrITLF07t0M6tZVMz6vl5u3Ewr9UPY3rZcmKey4t1I7JGJtTl1fphrIiHGBrMUz7vNWR9dNxKqHoaTc5DU/PBGQriwpoTclwcgPAR4I6Axt+QvfIOmrZtxadB7OF24fcgBHbC8U/AczeH9Jdd/IagVUag7EajiLgesY7cYhmgFPuCFPtM+l7I9XPUZtMAeGzxuvXLXZqI8P3P9qbMjeOIUpPKFeLIJq5+Zi2fyKTISl6qH5GVghiUNF8rW0a5L87Vi3fmyrFvUuFvH+JwILhHh3lVFY0/B01/AE2bNEMpBU2SteEoPiTyA5BSs7puvAXSCwEfhA+FyDlQcwamIsnop4MD4cOQ0svMHJHTM82gcyCVUHY1pFcgTrEJozilXf5eCo115BbLAGRVrJ5l0VpK/SHuX/AKL1Z/RMDxMTRUxqp4JyXx3aAu3Ta3u+tK0E+jFfx4/u680zS4deyEcA2/GTcbVxQXj6DjZYVtDEGQIqTk3A5zatNd0HgXrWEPjQNpo2KY/szY5RsHpVegjb+B6GO0Fi45Y6Dou0jxkWjdZZD6mGydFh9IcWtMW0LfQOuvA83e1IUgFJ+EE9qn0+dfH0h/ZAxOnjxZ585dt1WBxWLJT8JLccV7DzK7eh5+x6UpFe+0+dn6xVghwKMTn2RooLPYs0BgLyj7BY47OHsWVbR6aseKSzCr8sFPIFKMOBG8uksg+jgdVQ8D4G4G6fnkTBmUUpyha32UJt5Ca042q3/iRrfFtyVSeS8iXb+ZFAoReUtVO6TuWBlbi2UAccvHT/JS9TwSXoqmlClW2RCcuIPHMYM/5S8T/sPW4TVE3K4KlhQSr5hQSAfSuZ04gNbDqgMgNc+oHUb/RW7p2gSkF5E377udSqIEdkYG/wciP4aiE5GyG5DKv61XJ94ZNrRisQwQ0urxryVziXuF6kBfKBQFHluzBWkcdi/9qpvXxSH6GFp8OuIb1XpUxIe6oyG9OM/tmtGas6DyATpfq8YyaoiZQqBWHAh+rcNocQchkRO7afv6xa7ILZYBQtJLkdR0f5uRoW2DCkVxaPb8PLp6c95urMLrdiqkD5LvdTwcuZjc+eQtxE1Mu9OOSAL+SZkmzy1zhYyGeckla58kvQpt/iva/ECfqxiuK9aRWywDhJAbYERofaW9dRawUb4/7CO2Ca/JjFvrZhLq44PmQdSl/KRVSKmvi9lSkKMfphPeF6m4w4hV5UHEb0IhBPOMCCGR05CqZyFyJoS+DpFzkKpnEN8oNL0cr+ZcdOXX0Pqr0fpr0JX74TXllZTqN6wjt1gGELsM7vtONn5JU+VrJp/7He5rYnEswsfRCnJltLiifBKtwPVthq/kbPDvlH99LkHw75zn1HSk9GfkLASSEPi2xSk+Fim/EdwtaS0MkiIgCCXnIoEpiFOJEzkNp/xmnMhJiFOGF30KXbkfxJ/AVJzGMbH2ODRci6YWdfo7Wt/YGLnFMoCojvUutbA7JNWlKZW/bH1Fqojn68J528V5KowNJpGy65HAFNQ3Dq37BLSp3UiB4u93UEHMInQYND8GydfI+mIpOgURU80pof2R0P6oqgnTaB34JyFO7kbM6jVm8sbztIUjbdrARU7Nb9d6xq7ILZYBxJjiwfik7/9ZNxMgX/64h5DMocsCZjU/scRl/Oh7kJbuP8EZme727e0OQrBdY4kOKKQ/73i48VY0lb0ZKiJIYAckuEdeJw5A4mVTKdrJPVULLRPcO6wjt1gGEN8asys+pzMn1Pc4edyKg8e4yHBunnplVjNjET8y6AEjG9uqiZKZY/U38BrvyH+zxGuZlXz7ME8Kjf59HZ+gq41Yf8fORf2MdeQWywBiZFElt+x8PCPCFQQcH35xKXJzCEL1GYqQYsfi6qyjPtJsVzaYu3c7i7Cvoz3ijkQq/2hi24Aplc/EpBvvQBPv5L6dt4rcsfoktMswUVW6VQAZmA6ar8NzAIpPQPwTup5nPWJj5BbLAGOnyi14ZI/zWRmvJ+QGOGL2DWa/br0gpHGoTQU5cehH/HHFRAShyB/hpp1PxU2+gnp1EJjS2jRCNQGxp9DoYxmtlPbE0ehDGUnZdvh3ylRbtjejqLUPp3qNaMPVmQrPJBrYDSm9AvGNbR2umoT4f9D46+AOh5KfQcOVmbMpTMn/RCj9OU6gY6/O/sY6cotlACIiDAmV8cKKj2hI5lEV7Lu7syQe4dDKBfxr9QS2rtiOCydsQ6Rmf5R4RqAqhRadgEROQ1d/y3TsydubRvOeE99YNDwzU8HZUvIfBHc0hA42q/A1x0PqE1qVDxOvoauPhKpnEacM9ZrRNZmGydqMUTEUKLkSkZjp9xnYE/GPL+DvqLBYR26xDFBSXppffPBQpq6ycwTTlCLsBNi+cizLmtdQHaujOZ2rVVrXCOB3wzyx+0EQ3BNdOaOj6FT0L2h6IaSXkC1a1Z4iJJS/u46UXgWBKWjz/cYRhw5Bir6HSABN/M80R86Sr/VAY2jzQ0jkJLT5Hkh9wdoslYwtDRejRWfglHYU7drQsI7cYhmgLGhaScrrOqYiCN8cvQtnTNiPEr/JyfbUY+7q+Xxa/xUvVc/jf7ULenBnZXSokSp/FJxKSL5vNFA6DItC/AU6d+KAOwaCe+W3XwTCM5HwzI4nU/MzbwDtiUHT7/B0FcSeI2+qYfPdaHAXJDitcxv7GevILZYBSrEvSDrvph04CHsN3ZZztj6YWDrJ5e89yNtrviTiC3LU2Gl8Z/PdmTp4S47ebBrTnvlp3nl8IqQy93FQipwUV4x5g2Vxlwe/WMSpY8sI9yavIr0ErbsYlQASPhwJ7NT9a33jyCv/qLXQ9Cdyd71vIY42/806covF0j8MD1cwrmQYH9UtyXne57icP/FQUl6aE1//LU2pBIrSnI5z5+f/YXHzai7Z7nB8jkupP0x9Mrfs7GB/KadtXsH8mucISZywk2RO41AeXLkla1JzmLMqwr3jc6mrhCHwNUjMJrdCYQuNEHsEEDT6T7T4ZJySs7v3S/DvYNrAJeeRsy9nV28DkKNQacPDph9aLAOY6yYdm7dAKOml+Pp/r+Xol28lmnHiLcS8JE8u+x+r46bN2rdG74ovT5FMfTpKeWRPooGjuGvFRG77agd+s2x7ViSLSWqaZbEo73rfxwhTtcxRBP4JUH4dBHfLnOsqf1uBKDTdiaY6fjmpptDEu2jyPTSTySIiSMUfIXw4nYts5UfCh6zTdesT68gtlgHM0FAZR4yeipvDSSrgoTSl46RzxB4Cjo8FjSbOftK4GYwI5xbkSmqaN1Z9xmNL30ERNNNcuYVoOsHTa0Yigx+Gou9B6FCk7BdI5f2IFCHho0waoTOK7gUJBI3PRr16NBPS0fgbaPV0tOYEdM3xaPV0vNgLqNeIOBGcsquQIS+Ss8lyp7cqNmJaGzg2tGKxDHBOGrc3z6/4kIZkjLjX/dLyaDrBr+b9k/mN1QgwtngwAXFJtMvb9onL4qbVpPJI6PrEYVTRIMS3JVLaRh5WFa27COJPm41PAAIZjfAoUAQ00jGGnYCGa0xuuBShxadA0+1t5gBogtpTUfxoYHek/FrEqUAD00wJfr6GEln4oew2o6K4gWNX5BbLAKcyGOGvu5/D98ftzZTKcd2+Lq0e8xtNhaYCC5pWkdB01uo+4LhsXlxFiT+cN8nRweGQkTk2KJPvGfGpLAecAE0igx5CBj+Iyeluj4fJMkkaAazGW/MUEmHGJF5G15wMgJTfAO4WnTx1APCBuyVS8Tuc0O6djN1wsI7cYtkEKPWHOX7cntw+9WSGh8p7NdeookFEfEFK/WEOHz2V3079PvsP355wDikAAW6efByVwUjHiRIvkzvtLwXxlxDf5hDYtRsWJel8hZ2E9Bdo8iPEKUUqZ5E7xBKA4lNxhn2EU/UkkqNL0IaKDa1YLJsYp4/fj2s+fJRYD8IsbWlOJ3h+38uzjk2rmsC0wRN4ddWnRNMJXATXcTl/m0OYMiiPRrpEMCvu9s7cB63qhIXqOOqa1nD+iYg7Ag0dBLF/szZbxgEJI0XHFuh+65deOXIRuQqYiXnXqQZOUNVlhTDMYrH0DQeNnISHcsenz1Adr6fMH6YhGcPrptMs9nXsuOOIwy93PIY3V3/BC9UfUewLcvCISWwe6djdp5XQwdBwY55zB5j/+rfLpCd2RRGtWS250CT41mqkSNk1qG88NN9r0gsD05GSCxB3UDfuteEh3VIDy3exSKmqKdkSkbOBiap6elfXTZ48WefOnbvO97VYLIXBUw9HHGa+cD1fxWq7dc1FE2dyxJhdCnJ/jf0XrTuXtVFeRcp/3RrW0HQ1urKrOHUIym9FvBq0+W+Q+gCjEuatPR86AKf8VwWxuT8RkbdUdXL7471akbc48QzFFO49yGKxrAecTI752EhVtxx5iS/EzNFTCnZ/Ce0Fwdch8TogENgVkbUrfnGHoL6tMqJXOXC3RUrPb1U6lKIj0PQKtPEWU/4vRVD0XaTouILZvCHS6xi5iFwNHAfUAXkFEUTkVOBUgDFjxvT2thaLpYB8c/QuvFuzkGgXIlnjSobhFrgDkUjIdAnKd77kMrTmFLKrP8NQehlO0ZEdx7tDkbJrCmrjhk6XfyMi8pyIfJDjz0wAVb1UVUcD9wE/zDePqs5S1cmqOrmqqqpwT2CxWHrNHkO24fDRUwg4PsJuIGeNZcj1c8ToqevdNgnuglTcCf7tzQrbHYeUX5vTiW+q9CpGnjWRyBjgSVXdrquxNkZusWyYfBWt4Z2aBTSn4vzu02dJqUdaPRRlv+Hbc9l2R7SGYyzrnz6JkYvIeFX9LPNxJvBxb+azWCz9y/BwBcMzpfiHjZrMKys/pSbRyKSKzdisswwUS7/S2xj5tSKyFWZ7eCHQZcaKxWLZOPA7PmYMndjfZli6QW+zVr5ZKEMsFovFsm7YYJfFYrFs5FhHbrFYLBs51pFbLBbLRo515BaLxbKRU7A88h7dVGQlJsulLYOBVevdmP5jU3te2PSe2T7vwGd9P/NYVe1QUdkvjjwXIjI3V6L7QGVTe17Y9J7ZPu/AZ0N5ZhtasVgslo0c68gtFotlI2dDcuSz+tuA9cym9ryw6T2zfd6BzwbxzBtMjNxisVgs68aGtCK3WCwWyzpgHbnFYrFs5GxwjlxEzhKRj0XkQxG5vr/tWR+IyHkioiIyuL9t6UtE5FeZv9v3ROQRESnvb5v6AhE5UEQ+EZHPReSi/ranrxGR0SLyXxH5KPPv9kf9bdP6QERcEfmfiDze37ZsUI5cRPbC6JrvoKrbAjf0s0l9joiMBvYHFvW3LeuBZ4HtVHV74FPg4n62p+CIiAvcDhwETASOEZGBrgWbAs5T1YnArsCZm8AzA/wImNffRsAG5siBM4BrVTUOoKrV/WzP+uBm4CdsAo2rVfUZVU1lPr4OjOpPe/qIqcDnqjpfVRPAXzGLkwGLqn6lqm9nfm7AOLeR/WtV3yIio4CvA3f1ty2w4TnyCcDXROQNEXlRRArXrnsDJNP3dKmqvtvftvQDJwFP9bcRfcBIYHGbz0sY4E6tLSKyGTAJeKN/LelzbsEswLz+NgR63yGox4jIc8CwHKcuxdhTiXk9mwL8XUS20I04R7KL570EE1YZMHT2vKr6WGbMpZjX8fvWp22WvkVEIsBDwDmqWt/f9vQVInIIUK2qb4nIjP62B/rBkavqvvnOicgZwMMZxz1HRDyMKM3K9WVfocn3vCLyf8DmwLsiAibM8LaITFXV5evRxILS2d8vgIicABwC7LMxf0F3wlJgdJvPozLHBjQi4sc48ftU9eH+tqePmQ4cJiIHAyGgVETuVdXv9pdBG1RBkIicDoxQ1Z+JyATgP8CYAfoPPgsRWQBMVtUBqx4nIgcCNwF7qupG++XcGSLiw2zk7hocACYAAACtSURBVINx4G8C31HVD/vVsD5EzErkT8AaVT2nv+1Zn2RW5Oer6iH9aceGFiO/G9hCRD7AbBIdvyk48U2I3wAlwLMi8o6I/K6/DSo0mc3cHwL/xmz6/X0gO/EM04HvAXtn/l7fyaxWLeuJDWpFbrFYLJaes6GtyC0Wi8XSQ6wjt1gslo0c68gtFotlI8c6covFYtnIsY7cYrFYNnKsI7dYLJaNHOvILRaLZSPn/wFN7acekyxWAQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "rskLHEI9qSUg", - "colab_type": "text" - }, - "source": [ - "Now let's create the model for our CGAN." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Q5s_qNouqSUk", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem.models.tensorgraph.layers as layers\n", - "model = dc.models.TensorGraph(learning_rate=1e-4, use_queue=False)\n", - "\n", - "# Inputs to the model\n", - "\n", - "random_in = layers.Feature(shape=(None, 10)) # Random input to the generator\n", - "generator_classes = layers.Feature(shape=(None, n_classes)) # The classes of the generated samples\n", - "real_data_points = layers.Feature(shape=(None, 2)) # The training samples\n", - "real_data_classes = layers.Feature(shape=(None, n_classes)) # The classes of the training samples\n", - "is_real = layers.Weights(shape=(None, 1)) # Flags to distinguish real from generated samples\n", - "\n", - "# The generator\n", - "\n", - "gen_in = layers.Concat([random_in, generator_classes])\n", - "gen_dense1 = layers.Dense(30, in_layers=gen_in, activation_fn=tf.nn.relu)\n", - "gen_dense2 = layers.Dense(30, in_layers=gen_dense1, activation_fn=tf.nn.relu)\n", - "generator_points = layers.Dense(2, in_layers=gen_dense2)\n", - "model.add_output(generator_points)\n", - "\n", - "# The discriminator\n", - "\n", - "all_points = layers.Concat([generator_points, real_data_points], axis=0)\n", - "all_classes = layers.Concat([generator_classes, real_data_classes], axis=0)\n", - "discrim_in = layers.Concat([all_points, all_classes])\n", - "discrim_dense1 = layers.Dense(30, in_layers=discrim_in, activation_fn=tf.nn.relu)\n", - "discrim_dense2 = layers.Dense(30, in_layers=discrim_dense1, activation_fn=tf.nn.relu)\n", - "discrim_prob = layers.Dense(1, in_layers=discrim_dense2, activation_fn=tf.sigmoid)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "cAY2ZyrGqSU3", - "colab_type": "text" - }, - "source": [ - "We'll use different loss functions for training the generator and discriminator. The discriminator outputs its predictions in the form of a probability that each sample is a real sample (that is, that it came from the training set rather than the generator). Its loss consists of two terms. The first term tries to maximize the output probability for real data, and the second term tries to minimize the output probability for generated samples. The loss function for the generator is just a single term: it tries to maximize the discriminator's output probability for generated samples.\n", - "\n", - "For each one, we create a \"submodel\" specifying a set of layers that will be optimized based on a loss function." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "tKzSpzBuqSU8", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# Discriminator\n", - "\n", - "discrim_real_data_loss = -layers.Log(discrim_prob+1e-10) * is_real\n", - "discrim_gen_data_loss = -layers.Log(1-discrim_prob+1e-10) * (1-is_real)\n", - "discrim_loss = layers.ReduceMean(discrim_real_data_loss + discrim_gen_data_loss)\n", - "discrim_submodel = model.create_submodel(layers=[discrim_dense1, discrim_dense2, discrim_prob], loss=discrim_loss)\n", - "\n", - "# Generator\n", - "\n", - "gen_loss = -layers.ReduceMean(layers.Log(discrim_prob+1e-10) * (1-is_real))\n", - "gen_submodel = model.create_submodel(layers=[gen_dense1, gen_dense2, generator_points], loss=gen_loss)" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Lnd0Wk9WqSU_", - "colab_type": "text" - }, - "source": [ - "Now to fit the model. Here are some important points to notice about the code.\n", - "\n", - "- We use `fit_generator()` to train only a single batch at a time, and we alternate between the discriminator and the generator. That way. both parts of the model improve together.\n", - "- We only train the generator half as often as the discriminator. On this particular model, that gives much better results. You will often need to adjust `(# of discriminator steps)/(# of generator steps)` to get good results on a given problem.\n", - "- We disable checkpointing by specifying `checkpoint_interval=0`. Since each call to `fit_generator()` includes only a single batch, it would otherwise save a checkpoint to disk after every batch, which would be very slow. If this were a real project and not just an example, we would want to occasionally call `model.save_checkpoint()` to write checkpoints at a reasonable interval." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "3o85U5VJqSVG", - "colab_type": "code", - "outputId": "bea5fb15-5498-4940-cef5-d4fb1c8fb261", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 700 - } - }, - "source": [ - "batch_size = model.batch_size\n", - "discrim_error = []\n", - "gen_error = []\n", - "for step in range(20000):\n", - " classes, points = generate_data(batch_size)\n", - " class_flags = dc.metrics.to_one_hot(classes, n_classes)\n", - " feed_dict={random_in: np.random.random((batch_size, 10)),\n", - " generator_classes: class_flags,\n", - " real_data_points: points,\n", - " real_data_classes: class_flags,\n", - " is_real: np.concatenate([np.zeros((batch_size,1)), np.ones((batch_size,1))])}\n", - " discrim_error.append(model.fit_generator([feed_dict],\n", - " submodel=discrim_submodel,\n", - " checkpoint_interval=0))\n", - " if step%2 == 0:\n", - " gen_error.append(model.fit_generator([feed_dict],\n", - " submodel=gen_submodel,\n", - " checkpoint_interval=0))\n", - " if step%1000 == 999:\n", - " print(step, np.mean(discrim_error), np.mean(gen_error))\n", - " discrim_error = []\n", - " gen_error = []" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:714: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/layers.py:1634: The name tf.log is deprecated. Please use tf.math.log instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:727: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:1012: The name tf.get_collection is deprecated. Please use tf.compat.v1.get_collection instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:1012: The name tf.GraphKeys is deprecated. Please use tf.compat.v1.GraphKeys instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:738: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/tensorgraph/tensor_graph.py:748: The name tf.summary.scalar is deprecated. Please use tf.compat.v1.summary.scalar instead.\n", - "\n", - "999 0.5156213084459305 0.37282696121931075\n", - "1999 0.39635649234056475 0.6554632024765015\n", - "2999 0.4816185410916805 0.6439448493719101\n", - "3999 0.6881231372356414 0.41854076969623566\n", - "4999 0.6954806981682777 0.36900784534215925\n", - "5999 0.6934329395890236 0.34684676861763003\n", - "6999 0.6871857723593712 0.3469327309727669\n", - "7999 0.6882104944586754 0.35844097477197645\n", - "8999 0.6879851130247117 0.34883454167842864\n", - "9999 0.6891423400640487 0.3533225782513619\n", - "10999 0.6890938600897789 0.352202350795269\n", - "11999 0.6911078352332115 0.3480358254909515\n", - "12999 0.6913300577402115 0.34874600952863694\n", - "13999 0.6922475056052207 0.34867041957378386\n", - "14999 0.691593163728714 0.34903139680624007\n", - "15999 0.6911602554917335 0.35044702333211897\n", - "16999 0.6909645751714707 0.35226673740148545\n", - "17999 0.6911768457889557 0.3513581330180168\n", - "18999 0.6894893513917923 0.3482932530641556\n", - "19999 0.6915659754276275 0.35546432530879973\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "m91nmqWgqSV1", - "colab_type": "text" - }, - "source": [ - "Have the trained model generate some data, and see how well it matches the training distribution we plotted before." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "JqJCBFIcqSV3", - "colab_type": "code", - "outputId": "fa621046-ad55-490b-c0d1-cb341051df27", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 282 - } - }, - "source": [ - "classes, points = generate_data(1000)\n", - "feed_dict = {random_in: np.random.random((1000, 10)),\n", - " generator_classes: dc.metrics.to_one_hot(classes, n_classes)}\n", - "gen_points = model.predict_on_generator([feed_dict])\n", - "plot.scatter(x=gen_points[:,0], y=gen_points[:,1], c=classes)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 8 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdd3RU1fbA8e+509MLgdCbVClKFxUbotiw8Swoz967PvXZe/fZEJWfXUDFCjZUkKIgSO89tNBJL9Pv+f1xQyCZO8kkmTQ4n7VcwswtZ1iw52afffYRUkoURVGUxkur7wEoiqIoNaMCuaIoSiOnArmiKEojpwK5oihKI6cCuaIoSiNnrY+bNmnSRLZr164+bq0oitJoLVq0aL+UMq386/USyNu1a8fChQvr49aKoiiNlhBiq9nrKrWiKIrSyKlAriiK0sipQK4oitLIqUCuKIrSyKlArijKYSGvyMOiDZlk7sut76HUuXqpWlEURYkWKSVjJs9hwh+LsVss+IM6vTo055UbzyXe5ajv4dUJ9USuKEqj9tM/a/h8xhJ8/iCFHh9ef4Clm3bw2MdT63todUYFckVRGrXx0xbh8QXKvOYP6MxdvZWCYk89japuqUCuKEqjlltoHqwtQlDg9tXxaOqHCuSKojRqg7q1waKJkNdjnHbSk+PrYUR1TwVyRVEatZvOGUycy4HNYoQzIcBpt/LIqKFoJgH+cKSqVhRFadTSU+L56tHRTPhjMQvXbadVWiJXDu1H97bN6ntodUYFckVRGr0mibHcecGJ9T2MeqMCuaIoURMI6vw4bzWT564EYMTgHpwzqDtWi8ri1iYVyBVFiQopJfe8O4VF67fjLikHXJ+5jxnLNvL6zSMQIrJ8tZQSKTli8tvRoAK5oihRsWTjDhatzywN4gBuX4CF6zJZumknxx7VssLz3V4/r3w9i5/mr8YfCNKrfQsevvw0jmrZpNbG7Pb5sVksjf4nBhXIFUWJisUbduD1B0Je9/r9TF2wlo7NU0mIdYY9/86x37M8Yxe+QBCAZRk7ufqVL/n28X+TlhQX1bEu2biDZydOY8ueHKyaxvABXbn/klNw2W1RvU9dadxfQ4qiNBjJ8S7sNkvI67qEyXNXMey/43jhiz8I6nrIMRt27Gfllt2lQfwAfyDIV7OXR3WcW3Znc+tb35KxKxtdl/gCQX75Zy0P/N+PUb1PXVKBXFGUqBjWtzNamDy4LxDE5w8y5e9VfPzrgpD3t+7JxqKFhiNfIMi6zH1RHedn0xbhL/eF4QsEWbBuOzv250X1XnVFBXJFUaIiPsbJ23dcSJOEGGIc5ikKjy/AxD+WhLzeoXkqgWDok7rdauHoKNeDb9qVRVCXIa/brBYyVSBXFOVI17tDC6Y+fwPj7hoZ9pi8Ig/fz13Jq1/NYsrfq3D7/HRonkrfzq1wHJKaEQIcNisXn9gr7LWklOzLK6xSc6z26SlYTCY3ff4gHZqnRHydhkRNdiqKElWaJujerhmdWjZhw479oe8LwctfzsTt8+Ny2Hh78hweuvw02qQlkVNQzNY9Ofj8Adqmp+DzB7nk2c8Y0KUNt5w3mJZNEkuvs2TjDh7/5Ff25hYipaRv51Y8c9VwUhJiTMe1L6+Qe9/9gfWZ+wiWe/p32q0M69uZtMToTqrWFSFl6I8Yta1fv35y4cKFdX5fRVHqzuINmdw25jt8/iC6lGXy53q5uCMASclTuNVK1zZNWbt9b2l7Wk0IYl12Jj1yJc2S49mZlc/Ipz7F7fOXuUb79BS+emy0ac36Zc+OZ+PO/SFplaQ4F1cO7cvo0/ua5ukbEiHEIillv/KvN+xRK4rSaKWnxHPH+ScwqFsb2qenMKxvZyA0iIMRxAGkBI8/wNJNO8v0GNelxOP189nviwD4evZyfIFAyDUydmczadbSkOtv3LGfrXtzTHPjPdo145TeHcNO1DYGKrWiKEpU+YNBHv1oKrOWb8JmsRAI6vRon87Dl5/GtCUb0IPVywL4gzqLNmQCsGWP+YQlwMuTZnLqsZ3KpEmyCorDLvqZu2orC9dPICU+hpeuP8e02VbmvlymL9lIUNc5uXdHOjRPrdZnqC3qiVxRlKj6eOoCZi3PwFuy9ZrHH2BZxi5e/HIGpx5zVLVXUQoBrZsmARDrDL8Xpy7hm3K1593bNA0pOTx4vMTjC7AzK5+bXv+aQre3zPtfz17OxU9/ytgpc3jnh7mMen4i7/wwt1qfobaoQK4oSlRNmr0sZIWnPxDk10XruW/kybRqklimOiVSAjj+6Ha8/8t8pi1eX+Gx5WvP42OcXDt8IC57xUmIoK7z+6KD196bW8grX8/E5w/iD+oEdYnXH+DT3xexPsr17TWhUiuKokRVsddv+nogEGTU8xPw+oO0TE0kY3d2la6rS3jys98rPc6iCXq0Tw95/brhA+ncKo0J0xeTsSuLrPzikGM8vkCZ12cv32SaO/cHgkxbvJ7OrdKq9BlqiwrkiqJE1YAubfhzRYbppOa+vCIA8mtxU+QYh52zB3bns2mLmLpgLQ6blYtO7MXw/l0Z0rMDQ3p2YOH67dw5djLucl86ToeN3h1bHHwhzASo8XLDmRxVqRVFUaLqnouHEOeyY7ca6RNrSTvauih0HtKzA5/cfyn/ee8Hxk6Zy5pte1m6aSfPTZzOU+N/Kz2ub6dW9GyXjtN28FnWabPSo206/Tq3Kn3tpF4dMCvRtlospVU4DYEK5IqiRFXrtCS+efzfjD69HwO7tqFf59ZlAmZtaZoUx+u3jGDjzv1k7M4uk6d3+/z8unA9m0vSObuyC+jdsQXd2jSlRWoCnVqkctv5JzDm9gvK1KCnJcbx4KWnYrdZsFst2CwaDpuF64YPrNX2ulWlUiuKokRdakIst5w3GIAF67Zz59jvIz7XYbMiMCYe/Sb9V8Lp3aE5APPWbAtJmYCRDlmycQfb9uTw4Ac/o+sSfzCIy2GjdVoSI0/qhc0SOgk7YnAPBnVryx9LNhLQdU7q1ZE2JdUzDYUK5IqiVJuUkqz8YmIcNmKcdtP3f1u4rszinorEuezcePYg9uQUmjbXCsdhs3D9WYMA48ncZtFCvgQsmkaCy8EjH08t+7Tu9bM8Yye//LOW/l1a8/7P81mwfjtpiXFcfUZ/TujRnmbJ8Vx26rERj6euqUCuKEq1zF29hWfGTyO7oBgJnNSzA49deTpxroM13tOXbODnf9ZGfE1dl6SnJJBTWLXJ0BevO7s01XHucd35+NcFIYFcSkl8jPnGFm5fgO/nrOTVr2dR7PER1CWZ+/JYs20Pd1xwApee3HCDOKgcuaIo1bBhx37ue+8HducU4AsE8QeCzF6Rwb3v/VDmuK9nLy/TD6Uybp+fzbuyOb1PJ2zWyGvNe7RrXvrr5ikJdG3TNPTaXj+vfzsr7MKg3dkFpUH8AI8vwJjv55jufNSQ1DiQCyFaCyFmCCFWCyFWCSHujMbAFEVpmIo9Ph58/6eQdIkvEGTZpp1s25tb+lpVA6DLbqN9egpdWjdlVISpDE3A7JUZpb/PLXSzeuuekOMksC5zf8guRAfuG9B182X/gjKfqSGKxhN5ALhXStkdGATcKoToHoXrKorSwEgpufH1r9kSZjGPLxDkwfd/ZF9eIQDDB3SNuGJFCEiMdTGkdwcAbjv/hEpXYoKxUGjqgnWlv88pdEfcBkAIsFk0mibFhe1pHgjopMS7IrpefalxIJdS7pJSLi75dQGwBqh4u2xFURqlRRsyydidXWFN+PrMfVz76iR0XXL+4B50bp2GJYK1M00T4/jk/ktLK0f25hZGXLWyessuHvnoFz79fSHxLke4dTwhpDSacW3dm4PXH/qkbrdaGNitDakJsZFdsJ5EdbJTCNEOOBaYb/LeDcANAG3atInmbRVFqSWBoM6Uv1cxee4qwGhNGwiTYz5Al7Anu4A/V2SwK7ugJLAKI2qG4bBZuOzUY1i7fS8puS66tWnG9r25OO1WCt2+SsdZ6PHz8z9rmb5kIx9O/YdTe3fih/mrq/Zhy7FZNI7r3pZnrh5eo+vUhahtLCGEiANmAc9KKb+t6Fi1sYSiNHxSSu4Y+z2L1meW5sPtVgvBcLnkcmIcNoJBHW8lgR+M/iiHXlMIGNytLf+szww7OVmRSr43IvLt46Npl96w2tXW6sYSQggb8A0wobIgrihK47B0004Wr99RZlLTFwii6xItgtRFsdcfURAHQr4YpIQ5q7eCpFqdEqPxfOqwmW8g3RBFo2pFAB8Aa6SU/6v5kBRFiYYd+/O4f9yPDLnnbYY/9D6f/LaQoF55zllKSbHHx4J1202rToSAlk0S62RbNCl1jm6bjq2aPcxr4vs5K+v8ntUVjRz58cCVwAohxIE9lh6SUv4chWsrilIN+/OKGPX8RArdXnQpKXT7eO+nv9m0K4un/n1G2PO+/WsFb0+eQ36xB6vFgqaJkB19HDYro0/vx5BeHVi7fS//ee9H05K+aAjokiKvD6fdhr/chg81Feu0c2zHFvy1aovp+0s27Yjq/WpTjQO5lPIvGlI/R0VR+HLmUjw+f5lWsh5fgN8WruPW8wbTLDk+5Jxf/lnDK1/NLE2lBHXzGnAhBH8s3chLk2aiCYHNqqFLSVDXo5LSKG/Tjv0EIsjJR0oAQ/t05oXrzmLLnhwWrN8eUrFi0QTt01Oids/aplZ2KsphaFnGTtOnZLvVwsadWabnvPPD36Y9UYQwJi5jHDaaJMbSOi2Jheu24w8E8foDFHn8SCmxiNoJJ9EM4mAsDFqxeRdCGMH66LbpIatIbVYLl53SsJflH0r1WlGUw1D75qks2bgjZBLRH9RpmZpges7e3ELT16WEd+64CJvNwuL1mbzy9ayQY4z71EXH8ejQpaTI7eXFL2ewced+NIwVopomaJ6SwKNXnE479USuKEp9GnXqsSFPmXarhZ7t08MGqLbNkk1fT0uM5eh2zZi1bBOvfTs76mONJgFYLRo2ixY232u3WhjWpzNn/Pf/+HH+GvKLvXgDQXQJ3dum8/2TV9Ovc+u6HHaNqUCuKIehNk2TGXPbBbRtlmwENqvGqcccxWs3jwh7zp0Xnoij3HJ6p93KTecex79f+oJxP82LqH68PrVoksCMl2/ilRvP5aoz+nPpyccQ67DjshulhDEOG+3Sk3H7fKZ7iy7P2MX6Hfvretg1plIrinKY6tOpFd89cRUFxR7sNmtIkA45/qhWnNizPTOWbiSoS2Kddu65+CTWbtvD+h37G3zixGbVePG6s3l7yly+n7MSXyCIxSIQQnBW/240KfnJ4oQe7Tn3kQ/CXmfynJXcf8kpdTjymlOBXFEOc+F6cJd3+5jvWLF5V+lTt8fn550pcyj2+qu1urK6EmOdeHyBKndOHHpsJzy+AN/PXYmn5Fw9YHyWXxasYerzN5AYa/xZxLmckGM+J5CaEFOD0dcPlVpRFIXVW3ezeuvuMpUuQV1S6PFFFFDNnvZtFo14V+iuQZXx+gMMH9AVmzXy8GS3atx54Yn8unAd3jC7Ec09pF78hrMHmh4jgFGn9anKcBsEFcgVRQmbOvH4AqQlxaGFaSdo0TQ0IfCZBPvubZtFvMVb+Xt+P2el6e714fgCOhc9+Sl5RR7Tz+H1B9my52Dr3aF9OnPecUeXOUYTghevPxunvfEszT9ApVYURaF1WpJpsHbYrJw1oCtT5q6myOPF7QuUaXBV0ZL/rXtzsJjsnRmpQLBqWfkij491mfvCvr9k484yv39i9DBuG3E8k/9eRWKMkxGDj67SrkQNiQrkhyEpJYunLWfa+NlIXXLaqBPpd8YxRitRRTHR56iWtEhNYMueHAIlgVdglOpdcVpfrjljAL8sWMuabXtKW9pWpqDYW+kEa7Rt25MT0knxgN05+SGvNUmM5dozB9TF0GqVSq0cht66/QOeuPBlpn02m+kT/uSpka/y2g3v1fewlAZMCMG4u0dyUq8OpbvrCCGwWTS+mr0Mm83CRSf24pKTjqlSCaJex+WKktBOimAs9OnVvjk+f4BfF6zj/Z/nM3PZptIvrcZOPZEfZjKWb+W3j2bgPaQZv6fIyx+f/8U5N51O574d63F0SkMVCOrMW7OVvCJPaXDTpSS70M37P89n9da9vHbzeXw3t2odAXUp0TRRut2bWe12bROA02bl/ON7cu6jH1Lk8eH2+nE5bDRNjufj+y4hITayyp6GSj2RH2YWTF1quoOL3+NjwS9LTc5QjnSBoM6tb37L0+N/Z+H6zJD3/UGdv1dvIWNXFjv250V83aAu8QWCWDWNfl1ac8PZg3BGsAdntHVokcon91/Kh1Pnk1VQTLHXj8T4Utm2N4c3vvuzzscUbSqQN2D7d2Qxd8oC1i3cFPEMvivOidVkwsZqt+KKa9xPHUrNrM/cx21jvuOke8dywRMf8+O81UgpmbF0Iyu37MZdQYWJLiXrtu9jYJc2EW+mfIAvEGTxhkyG9+9aq6mWcJtdBIM6hW6jv3r5++u65IeSP4fG7LBOrQT8AWZ8PoeZk+YSE+/knBuH0fvkoys/MQp2bd7DlpXbaXFUOs3apjH/x0UU5hZx7Gk9adExvcJzpZQ8e/nrzJ70t/EXTEBKehLvLHqJlHTzfhgHDBk5iHH3fxr6hhCcdMngmnwkpRHL2JXF1a98iafkabSg2Mtzn09nb24hG3bsw+2rOOUhpaRFkwQGdW/L+1PnGzsFVak8MEiTxFi6tE5jxebdNfw0oWxWCwKJLxCa8966J4eb3/gmbG4/ENRZsnEHfTq1ivq46sphG8iDgSD3D32KDYsz8BQZDen//mERlz4wgiseHRm1++i6ztbVmdidNloe1ZyAP8DzV7zJvB8WYrVb8Xv9BAM6DpcdXZdIXefcm4dx+cMXkbs3n/R2adidZRdNfPnSZGZ9OffgCxKyd+Vy64AH+XxbxZOWSWmJPPLFPTx72WtoJZNWwYDOg5/dTmrz0C+BHRt38d2bP7N97Q56ntiNc28+g8Qm5t3xlMbrvZ/m4fUFytRYe3wBxv00jzP7d0UTosLAnJYUR2KMk5FPfVrau1sAFosWUR/ylDgXp/3nXfKKPTX/MCYSY53kF7lN35OAxx8wNskIE8ynL93YqAN51DZfroq62Hx51qS5vHrdO7gLy/7FsTlsjN/8dqVPtpFYPH0FL1zxBp4iL7quk96uKcec2pOpH0wvM9lYnmbVEEJgd9iQwOjHRzLy3vNK37+wydUUZJsvH/5o7Ru06tyi0rG5izwsmb4CJBx7Wg9cca6QY5bPXs3DZz2H3xcgGAhid9pwxTkZu/BFmrZJq/wPQGk0znnkA3ZmhZbfQeUbKjvtVqY8dQ23vvUtGxpgQymn3croof0YP31RhZOpVotmWqVi0QSjT+/H7eefUJvDjIpa3Xy5IZo7ZUFIEAew2i0sm7m6xtffu30/j414kZw9ebgLPXiLfWxbs4MpY6dWGMQB9IBO0B/EXejBU+jh0ycmMWvSwSfw4gLzJwuALau2RzQ+V6yTwef1Z/CI/qZBXErJq9e+g6fYS7BkctTn8ZOfXcij573ID+/8Sn52QZlzfB4f//yyhPk/L8Yb5W23lNrVKi0x7Hu+QLDCksILj+9BUNfZFGZDivrUv3Nrxj94OTFOW6XpISHMN3K2WiycPbBbbQ2xThy2gTwhJb40tXAoIQSxiTVvivPrR3+gl6sOkVIiqzGZ4ynyMvbuj3j7zg/567v5NGsb/mm468Cjwr7n9/nJzy6IaOImP6uAvdtDn66kLslYvpV37/uUUe1uYdksY/HHgqlLuLjZtTx72Ws8d/nrjGx2HfN+XBTBp1MaguuHD8JezVWLKfEu7nl3SpVy4lVhs1ho1yyZLq3STANtRRZu2E4gqPP36q0VpnfsVgsn9OjAyzeci9NuJcZhw+WwYbdauOvCE+nQPLWGn6J+HbaplYzlW7njuIdCno4TmsTz4erX0XVJUlqC6WrHdQs38eFDE9iwZDNNWzdh9OP/YvCI/qXv67rOncc/wtr5G0LOFYIa7VvoinOS2iKZzA27QjZcEULQslM6D028i059OgDwx+d/Mea29ynIKSo5CJKbJXHrG9eQ1DSBXz+agd8b4JRLj2fQuX3RSnY+dxd5uDD1agKV9MJIbBLPuBX/Y3THW/EWl/2zdLjsfLppTFTSVErtu+n1r/lnXWQ/0R3qmA7NWbl1T60snrFbLfz52q3YrBZ2ZuUx4rGPqtzz/Pij2zF/7baw47NbNY7t1IqXrz+HOJeDQreXv1Zuxh8IMvjodqQmxEbjo9SJcKmVw3ays0Ovttz61jW8ffuHWGwWkGBzWGneMZ1LW92IENCsbRr/+fg2ug/qXHreuoWbuPfkx/EWG6mDgqxCnhv1Ore+cQ3Drz0NgLdu+4CNSzab3rem34vuQg+7t+5j6BVDmP/T4jK5ciklmet3cd+pT/BZxtssmb6C50e9UW4AkLM7l2cvfw2rzUrA60dKmPfjIvqfeQyPTroHIQSuWCeDzu7LvJ8WVRjMPcVevn71hzCfVTLzy7lceOfZNfvQSp0IVNAXxVJSu2cWRJdm7Kq1MfXq0Ly0v0lBsbda/3627MmpsKxxSK+OvHT9OaW/j3M5OLN/16rfqAE7bFMrAMOvOY1Ju9/n4c/v5qkpD5DQJIENizII+AL4vQEy1+/iwWFPs3/HwdzfB/8dXxrED/AW+/i/B8YTDAbJ2ZPLrx/NqPRJtiYC3gD//LKEu969AadJ7XcwEGT6hD8rXHYvgxK/x1/6D8NT5GHB1CX888sSJjz7Ndf1uJudm3bTtHUqNkf4bm9+bwBvsY+AL3SRUcAXoDg/fD5faTh27M9j7ba9Yd9PT47nohN6li7Pr6m4CNrXagLap6eU9jr/Y+nGatVzt0yNrzDtM2t5Bv5g3fVTrw+HdSAHiIl3MWD4sVgsGvszs0on9g7wenxMGfsrYOSY1y3YZHqdguxCHhj2NIunrcDmqP0fZPL3FzDhma9DvlTA+GKZPOYXivKKq3RNT5GXl64aw8Rnv2Xr6kwylm8le3curTo3D3uOHtQZePaxWO2huUub007/M4+p0hiU+vHQhz/jDlPR0bN9Op8/fAXXDB+IVYtOSDiue9tKj9El/PD3aq555UsmTFvE17OXm7agtWgCa5jVPlaLVulPDLouw/YoP1wctqmV8nZvMW9vqQd0fv6/abTv1ZY3bhpnWulywPKZq1g+axWyjvrsZCzfZvq60AR7t1W9DExYBMV5xQT8B7/MPEVeNq8wvw8YeflPHp/EaaNO5I+Jf5XW5DtjHZxw4UC69A8/+ao0DLmFbtZu22saJBNjHHz8n0sRQvDp7wtLd9apiM1qQRMi7IYTNotWukdmZTz+AGu27WV95r6w7W6tFo17LzqZXxauYfOubAo9PqSU9GjXnOyCIrbvq7htQPOUeGKdVd/gojE5YgJ5p74dygSwQxXmFvHSv8dUmi6RkpAJyPogdYnfW40nDEnYP4Owp0jJ+oWb2Lx8K537dSQ+JRa7087QK09i0Dl9qz4Gpc4Fdd2YhTfhsNsQQpCVX8T46YsrvZbVojF6aB8mz1kVNpCfM6g7Zw3sxm+L1ke0sYQuJbpJ73GbRUNogocvH8o5g7pz8Um9Qo4ZdPublY73v5eddti3cG40qZV9mVl89coUPnr0c1bOWVvlXFrbbq1o1cU8hRAM6LWa824oRLhmFBHw+wKs/ns9S2es4vSSIH64/+M4XKQmxNK2aVLI61aLxpn9uwBwz7tTIgq615zRnyKPn9wwKzQFcO2ZA+jbqRWXnHQMNfkb0rVNM3565lrOGdQ97DEtUitehXz+4B4RpXkau0YRyOd8/w9Xd7mDjx79nInPfct/z3yG5694Az3MLLxRTTGHxy98iScvfplp42fj9/kZee95prneI4Vu0oeiKqSUeIq8PPmvV3n47OdC5huUhuuZq4cT67SVVqeA0Uxqb04hm3dlVTgReqhPfl/ItCXrw5b6SeB/38wiqOvccM6gan/Za0KwOzufO8dO5qtZy8JOVt464viw9fEuu40zSr6oDncNvo7cU+xlZLNrS3OzBzhjHfx3wp0MPq9/mdd3bd7DzX3vpyj34ESgZtXo2KstD46/gxt630ewiukFJZQz1sGtb17LmVefUt9DUSI0cfpi3vjuzzK5aIfNgs1iodBT8WrkAzQBCTFOcovCzyVpQnBK7448d91ZnHDnmGpv9XaA026lb6dWvHnr+aZfDL/MX8NTE6aVSfU4bRb6d2nDJScfw6zlGcQ6bZwzqHvjX/jTWJfoL5ux0nSFpqfIy7TPZpV5TUrJPUMeKxPEwXgS3bhsC3ef+JgxVa6EJSyRPUF5irz8/unM2h2MElVfzloaElS9/mDEQRyMfz42qyVcyr3kGMmc1VvYuGM/Q/t0rvE+mB5fgMUbdrAsY6fp+8MHduOv12/l8SuH0bZpEk67Fafdxubd2dz33g98NXsZn01bxKjnJ/LdXytqNJaGqsEHcrMgXvpeub8gW1ZuY//ObNNjZVBSkF1A8DDZ2qm2xCXG4ox1RHSsJUo1x0rdyKmgh09VZOUXV7pwR9clSzfu5L+XncrRbZvhtFvLpHXCCVfH7gsYwTwci6axYvNO9uQW4vEFyC3ykLk/r7QKJ6hLvP4AL375B/kV/DTRWDX4f4nh+oc7Yx2ccVXZH+uL8oorrCpp5L3j60RhThE+r5/KZqkcMXba9WjD23d+yNf/+4HcSkrAlPrXo3349QJVEUnPFatFIzUxhjiXgw/vu4RP7r+Mq87oX/l5mma6i5DdZq1wKf2u7Hx+nLem0glbX0Dnla9nVjqOxiYqgVwI8aEQYq8Qomob+kXA7rTz+Nf34Yx14Ix1YLNbcbjsDPv3yfQb1rvMsUf16VBhALI5j5hqy2qTUhqTopX8W7XarPzy4R98/9YvfPTI54w+6nbW/hPae6YihblFqotiHbrrwhNxOWxoh+RFLJoWsghIE4L2zZJxRLgTkNk/OQkM6nawWiQ9OY4/lmys8PlAAM9fOxyLyaIkixAM7dMp7Llrtu2NuKnXbwvXM2uZ+cK/xipaT+QfA2dG6Voh+gztxcRt73Lza1dzzf/yInoAACAASURBVHOXM+afF7h9zHUhEx/OGAdnXz/U9BpHH9+FmPiadz1UDMUFbjwli6d8Hj/uAjfPXvZ6RGWhK+es5ZpudzKy2bWcn3wVT1/yP4ryimp7yEe8zq3SGP/g5Qzr15nWaYkcf3Q7xtx2Ph1bphLjsCEExDhsJMY5ee2W8/m/e0bSo106mhDEOuymARagffMUksq1StZ1yRUvTCSvJI0xZvIcMvflVvh8IIF3f5zH2DsupGlSHC67DZfdRnpyPO/edVGFi3q8Pn/ETb18gSAf/7YgomMbi6hVrQgh2gE/Sil7VHZsbXc//OTxL5n08mR8Hj82u5UL7z6Ha5+7nMta30RWmBy6Eh1D/nUcd797I3FJ5j8G79y0mxuPua9MFZLNbqVz/6N4/c+n62qYyiF0XfL36i2s2b6XFqkJnHpMJ9P0xr+e/oyMXVllnnxddhv/vfxUPp66gIzdZf9t2SwaFw3pxf3/OoVT7nunNKhX5p07L2JAl9Zs2pmFENCheWqlZYxPfPorU/6OfJ+BlqkJ/PDMtREf31DUe9WKEOIGIcRCIcTCffvMl8tHy7+fvIQfiybwQ+F4fnJP5LrnRyGEIHt3Tq3eV4E53/3D3UMeDVvj//1bv+Avl8f0+wJsXJLB5pXhWwUotUfTBMf3aM91wwdy1oBuYXe6f/Wmc0lLiiXWaSPGYcNus3Bm/y4c17Ut2/eHzpH4gzrTFq0Hwi4sNTV+2iKEEBzVsgkdWzQxDeLFHh+5hQcnb/fmmu+oZcaiCQZ0bRP5gBqBOksaSynHAePAeCKv7fsJIXDGlK2+SG/XlF0Ze2r71ke0oD/I7i17mTt5Iced1xeLpWxl0dY1maZ1/N5iH/cPfZK73r2R488fUFfDVaqgdVoSPz5zLQvXbWd/fjG9OzSnVVqSUQUS5id7m9UIMWf268o3fy3HH8GitH0VpNlyCt088cmvzFuzFYBWaUk8MXoYfTu3Yt6ayh8ErBaNGIeN684aWOmxjUmDr1qJputeGIUjJjTPZnPYsDvt2By2Cssdlch4Cr08PfIVLky9mgnPflMmb97j+K7YneYNlXL35vP8FW+w6PdldTVUpYosmsbAbm05e2A3WqUZy/4TYp307NA8pLzQYbNywQlGpvWW8wbTrlkKMSWTrc4wOwHZLBYGdGlt2l9cSslNr3/N32u24g/q+IM6m3dnc/Mb33DC0e3DTs5qApJinbRPT+HCE3ryxcNX0jzl8Npg/LDMkVfkz2/n8+FDE9i9eR/N2qcx+vGRWCwWcvbm0WtId1697h0ylm89InqvHMrhsuP3+its0F/d64565CIu+++FAOTuy+Pa7ndTkFMYdlu8bgM78ebfz0V1HErt2pNTwLWvTiK30E1QlwgBx3Zsyeu3jChdEHQgF79+xz5aNklkX24hb30/B19JqweLJtClRBOCGIed64YP4IqhB3v6LM/Yxc1vfhPSjlcTglGnHstpfTpzzStfmlavWC0as/53S8RdGRuqcDnyqARyIcTnwMlAE2AP8LiU8oNwx9dnIK9MQU4hr980jrnf/4OUkpadWjDwrGP5cdzvuAsOv4UEYDTTSmmeRPbO3IiqTmwOa5W6L8YkuPgu++PSbeb2bN3H2Ls+Yu5k88qBhNR4vtn3YcTXVxqGoK4zb802dmXn071NM7q3bVbpOf+s3cZn0xexaUcW+/IKy+xQ5LRbuW3E8Vx+ah8Afl2wjqcnTqPYZCWqw2blpevP5u53zPcWtVo0/nj5JuJckS12a6hqNZBXVUMO5Af4fX4C/iCuWCfBQJArO95K1o7sqD+xNkYJqfHkZxVEfLzQBJNzP8F1SIlawB/gorRrTHcY6nVSd16d8WRUxqo0Dmc//D67skP/TiXHuZj+8k0AbNmdzaXPji99gj+UJgRCmG9VB9C1dVMmPjQquoOuB/VetdLY2Ow2XLHGNmsWq4XXZj9N5/5HYXNYsdqtVZqFP9y4C91V6mpnd9pwxpbdss5qszLqkYtxlJuQdsTYufqZy6IyTqX25RV5WLZpZ5WqRszsyTE/P6fQbfRTB9qlp9C3U0vT43QpwwbxGIeNJ0YPq9H4Gjq11DFCzdqm8dbfz5GzJ5eAP0h8SixXdriN3L1H3tL0Q9MqFquFYCCI0ETYnPcJFww0Dfwj7z2XuKQYJjzzDdm7c2nXozU3vjyaHscfXhvjHo6klLz61UwmzV6OJgS6Ljm+Rzueu/asauWhbVYNr0k1k82ilVmI9MgVp3Peox+GDdrl2a0az1w9nM6t0qo8psZEBfIqSm5mzNQX5RVRGOYp4khhd9roNrATXrePVl1asOj35eTuzSsT0J2xDm57y3zhhRCCs64bylnXma/GVRqu8dMX8/nMpWWqDmctz+DJT3/jhevOrvL1zNIlAIGgjpSy9EGgeUoCvTq0YHnGzjLB/MBEaWimWNCzfXqVx9PYqNRKNR3uDbiEEFz97GW079Um7M5Cfl+AY07tyVvznueBT25nzPzn6XliN6x2I/3UqU973pr3fNhVnkrjNe6neab/Bn5fvB5fBPt+lhfnNJ+EjHXZy/w0t21vLmmJsWhClP53QPnxOO1Whg/oWmGzrcOFeiKvprikWDoe2571CzaVqfQQQpSmGVJbJNP26NZGXXQjC/xWh5X4pFjGzHueG465jx3rQ3cqd7js9Dn94D6KTVs34dUZT1KUX4we1IlPjqvLISt1qChMD3MpIb/YS5PEqoWWf53cmwnTFpfZ/NlpszJyyMHGeJt3Z3PlixPxeAMhlSkHns6FAKvFQkKMg0tPPiaijouHA/VEXgMPfHIb8alxpf27XXFO2h7dim+zPuK34CQ+3/4eefvyG10QB+MLqfvgLvz68UyyMs370/Qb1ptuA0M70sUmxKggfpgr3zHxUDFhFnxVZOixnejYIhWLphHrtGO3WhjapxM3nXtc6TFvT56D2+uvtMvhyb078vuLN3Lt8IFhG30dbtQTeQ207tKS8ZvHMmvS3+zZupdOfTow8Kw+WA7Z8KKqk6GueGe916sLAQPP6kPH3u14995P8BSHtprVNEHfYb3VBsxHqBN7tmeGSSvY9ukpxDjCdyk8lJSSReszefXrWWzalQWAzSLwB4I8e/VwTitpW7s+cx9fzlzK7BUZlaY0pYTd2flV+zCHARXIa8gV66xw38o+Q3sxbfxs9EpabMYmxvDWvOdYNXcdb936Pj6Pv8LjqyM+JZb2Pduxeu5ahBD4/QHTnxbOv/0sbnx1tHFOsnl+Udclb9z8f/z9w0Ie/+Y/2B2Ne8WcUjUPXz6UVVv3kF1QTCCoY9EETruNV248N6LzdV3ywPs/8ueKzWUmOg/88snxvzOkVwdmLNvI45/8hj8YjGgNh91qYXD3dtX5SI3akfFzRz0a/cS/iE2IwVrSW0IIo1b6xIsHkdQ0EWesg0Hn9uPtBS/QuktLThp5XCVXrB4hBGPmv8CrM57gs4y3+df9I0y3arM7bTRplVra7Orcm88IqfU+1IKpSxlz+wes/nsdefuPvCehI1VKQgzfP3k1j11xOpec3Jv7Rp7Mz89dR/v0lIjO/2PpBuau2hq2WkVKyT9rt/FMyabKkQRxm0UjMdbJJScfU6XPcjhQT+S1rFnbNMYtf4VJr0xh2cxVNG/flH/9ZwTdj+tierwrzsWwf5/Mb5/MjOpTeXr7NFp0NMqwmrRMpUOvtjhiHCErK30eP9++/iNLpi/npJGDOXXUiVxy/wg+fWKS6XWlLvnl/enMnvQ3Pp+foVcM4c53rg/peqgcfpx2K+cM6s45g7pX+dyf/1mL21fx3+/t+3PDplI0IbBaNLq3bYbUJXnFHk7s2YGrhvUL2eTiSKACeR1o0jKVW167OuLjb33zGqx2Kz/93zSCgSAWq4Xm7ZuSuX5XpSkaM44YO5c+eGGZ15q1axr2Wlk7c8jamcPKv9by8/vTeGnaY0x89hsCJgs2DijKLwbgj4l/0rRNGlc8clGVx6kcOSqbhJRS0veo1mF3/WmfnsK4uy8mWe36BajUSoNktVm59Y1r+D7nE77IHMePheP5YNXrvPbn08QmxZap67ZYNboMOCo0/VFyiDPWQa8h3TnjqpPLvN25bwdadW5RmvIx4ynysmb+Bt6751OGjDwuoolNb7GP79/8KeLPqhyZRgw+OuwKUIfNyrNXD6dTqya0bZaMZrKOYeveHBZtyKztYTYaqmlWI+Mp9vLTe78x48u5xCbEcN4tZzB4RH+Wz17NN//7kayd2fQ+pQdJTRPxFnvpdVJ3eg3pbhqE87MKePmasSz8dSlS19GD0rT7oWbRuOWNq/n5/6aRsWxrpWO02CxM9X4Rlc+rHJ6klLz45Qwmz12FrutomiAQ1Lng+B5cd9YgmiYZ5au7svO58oWJZBeENleLcdiY/vJNEW8SfThQ3Q+VsNyFbhb+uoxXrhlLsck/GICU5sl8uWMc835cxHdv/kzWzmyyd+dSkB3apqDHCV15bbbaf1OpXMauLOat2Uqcy8Gpxxxl2mb2ulcnsXjjjpDXY512Xr95BH07t6qLoTYI4QL5kfNVpoTlinMxeER/HDH2sIE8e1cOwUCQQef0ZdA5fQFYOWctD57xjLEhRVBHs2jYnXZueT3y+QDlyNaheSodmqdWeEyM07wuXUqJI8z+okcalSNXAKOL4Yu/PRp2q7uU9KQyC53A2LZtzPznOfXyE+jQuy3DRp/EO4tepFOfDnUxZOUIcfGQXrhMAnasy073NpVvXlEdUs9F6o2nKZ76OlNKte/Zlocm3smLo8fgP2Q7LUeMg6uevtT0nHZHt+aBT26vqyEqR6ATe7TnwhN68tWfy7FoWmnp4Vu3XmA6EVoT0r8KmfcABDYDEmkfhEh8EWFp2G1wVY5cCTHzyzl88N8J7Nm6n9SWyVz11KWccVX41auKUhd2ZuWxcH0mibFOBndvV7oXaLTI4H7k/tNBFh3yqhUsrRFNfkGIgz+tShlAFn0K7okg3eA4DRF3B8LSJKpjKk9NdipVdmgfaEU53OmF70DhWKBcbyERi0h6B+EYdPDY3LvBMx040BfJClqqEfC12msYp7Z6U6pMBXHliBLYTEgQB5A6BA9WzcjAFvBM42AQBwiAno90f1vLgzSnArmiKAqArS9gtrxfgu2QNgT+lZhPL7rB90/tjK0SKpAriqIAwnUuaEmUDdJOsA9A2LodfMnSEoRZStoG1va1PEpzKpAriqIAQouB1G/AdRGIFNCaQ9wNiOSxZQ+0HWME8/JP5cKGiLmszsZ7KFV+qCjKEU96/0TmPwPBzSASIfY6ROz1ZSpVDhBCQPKnyLz/gG8+IMCSXlKm2CL02r5lSPcXoOcinGeA8yyEiGzzjUipQK4oyhFN+hYic26ldPJS5kHhWKQsQsTfY3qOsKQiUj5E6vkgvaA1MS0O0Is+g4KXMSZRJdI7F4o/h5TPohrMVWpFUZQjmix8k7IVKABuKPoEKUO3XZR6ITKYZZTnagkIS5ppEJd6HhS8VHLtAzl1N/jXgufnqH4GFcgVRTmyBUL3HgWMVtDB/aW/lXoOevYNyL0DkftOQu4fhvRVsB7GtxCEWateN9LzS42GXJ4K5IqiHNmsncK/V7I0X0qJzL4afH8BfsAHwa3InGuRge3m54o4TDfFRYBIquGgy1KBXFGUI5qIuxNwlnvVZUx4ipK2uoGVENgCBMoeJv3I4gnmF7b3A2FWl+5ExJj3LqouFcgVRTmiCfuxiORxYO0KWEBLg/i7EbG3HTwouBNMKlggYFS6mF1XWBDJH4KWCiK25AndDvF3IezHRvUzqKoVRVGOeMIxCOGYEv4AazejOVYIO9gGhL+urSuk/Wnky2UB2PsjtOimVUAFckVRDjPSvw7p/h5kMcI5DOyDa943SM/GPN/tB9eICk8VwgqHNNyqDVFJrQghzhRCrBNCbBRCPBiNayqKolSVXjQemTUSij8G9+fInFuQuXei6wGkXlzt60r3V4Bu8o4L4V9R7etGS40DuRDCArwNDAe6A5cJIbpXfJaiKEr1Sb0AGdiGlP5DXsuGghcw6raDJa+6wTsN9vZG7u2Lvu90pHdONW5ovgUiQmDaMbGOReOJfACwUUqZIaX0AV8AFf+soSiKUg1SetFz/4Pcexwy6zzk3kHoxV8ab3rnAGZ12wGMksFgScngzUj/6qrd2NolzID8SD0PPed29LxHkf6VVbtulEQjR94SOLSQMhMYWP4gIcQNwA0Abdq0icJtFUU50si8h8HzK+AD6TNezH8WqTUFYTeekCvdK8eHLHgTaUk1+ooLJ8T8q6S3SuiyeSndUPSe+aVECuQ/B7gBDen+HmntCsGtRulhzGWI2GsRpguDoqfOJjullOOAcWDsEFRX91UU5fAg9QLwTAV85d7xIIveheSPQAbNTi1HB9/skl8HjMBf+B7SvwKR/G7o4d7ZhF3YI/dxMI2jA14ILCsZcK7Rs8W3DFxngywGxwmmjbVqKhqBfAfQ+pDftyp5TVEUJXr0HBDWg0/ih/JnwP7TOZivtmFkjs3y1wIjMB8a9D3gnYv0r0fYOpe7byHmE53lr2HGA77pSN/cAxdDxt2IFndbhWdVVTRy5AuATkKI9sL4ueRSoIKCTEVRlGqwpANmGy4LoAD0fRwMuALsQ8A5ktBdfzRMA7DQIGCSO3cMDvOkX5XNn90l/3mhcBzSt6QK51auxoFcShkAbgN+BdYAk6SUq2p6XUVRlEMJYYe4eykbmA+EsPJPzD7wz4WEJyD+bmOTCOEC+2Bw/QswaSEr3ciCl9ELxyLlwSd5YWkOsdeXLLc/UI/uAEtX4/9V5o363p5RyZFLKX8GotuXUVEU5RBG29hEpLUNBDNBauA4AXyzjPxzyAk+BMWI2Ksg9qqDLwf3Ij1TTFI00niqL3zX6Bue8lnpQiIt/k50W2/IexBkDuCF4CqMFM6BVE3EnwRM2uPWhOq1oihKoyDzH0bmPQSBdSCLMHau3weWMN0LRRyI+NCXLU0RKZ+WnGeWHvEYTbL8i8q+XDzBmMAsE7T9GIG8Cs/EwoVwnhX58RFQgVxRlAZPBjaC+weMPPMBbvCvBudphHQvFC6Iv8d0qzYAYeuJlvYTOM4Ic0M/HLJiU+o54Psb80lPHfNJTwvYTyoZ24FA7zJec5xkft9qUr1WFEWpVdK/Bln0MQS3g/04ROwVCC25ahfxLeBgfvpQxRDci0gehyx4GQIbjf0z425HuM4pOw4ZNMoUiz4GmQ9aM9B3m99P2MHS3DgvsBWZ/yyhZY9lrm7ymhWR8F9AGr1f9EKEc2h0er+E3ElRFKWWSM80ZO49GEFQB/8KpPtzSJ2MKNm0AUAGMiC4A6ydEZZmoRfSkkFYTOKlHSypJd0Lv6l4LAXPQ/EkSrd1CxfEwVgk5DgVGdiOzLqgJJVTAa1NSe68ZIDSDwlPIKwdjMvF31vx+TWkArmiKLVCyiAy/xHK7ofpBT2ILByLSHzc2P8y50YjjSFsIL1I1/mIhKfKpkUcp2AerjSE68LKx6IXQvEXVPxUffCaJP4PIezoRe+W9FmpZDLTdTbE3gDuL4xSRdclaJZEZGAL0j0JgvsRjpPAOaxWVnmqQK4oSu0IZoJu1mwqAN6ZwOPG5KV/GcaS+5KA756CtHZGxI4uPUMIB6R8gsy5yUiLIAALIulVozyw0rHsxDw1Y8aGsPU0fulbROWLfgT45hsdF9EACcWfosf8GwrfMj4vAaTnNyj+CFImHNx5KEpUIFcUpXZo8YQNglqS0VbW+wdmS+4p/gQOCeQAwtYd0mZCYI2RurAdHfnTraUFIdu0hR13K+T+s5DCATKSRT8S/IvLvVQMhS+XO64Y/OuQRV8h4q6IbCwRUlUriqLUCqGlgL0/oc+LLqO2mwpqqfUC82sKDWE7GmE/pkopCqHFga1vZAfrm4z8eXAr6BkR3yMyXih8AemP7ppJFcgVRak1Iul/YDsacJXsWemAmFHgPA9EMphNbKKB48SIri+ljpQRLsZJegvzNrdVZcVI01S38sSHzLrMyNtHiUqtKIpSa4SWjEj9yqgDD+4BWzfjSf2AhGeRuTeWrLIMAnYQMYi4uyu8rlES+JiRm8aCdJ6JSHgMRIzxmiwq2R/TuJcM7ofcmzGCr8bBevBDfx2pCFM0FfIgC19HJDwShWuBiPjbLIr69esnFy5cWOf3VRSl4ZGBTUadeWAz2PshYq5AWJqEP17PR+4bWjLpeSAI24w8uJ5Pac5dBiD+LrTYa9GzRoJ/FWWDsB2jGsVPvdDS0JpWbbciIcQiKWW/8q+rJ3JFUeqM1HPB+ycgwDEEoSUgrB0RiU9Xfm5gIwQ2IX0rQHop+yTtN3La5RW8jq41B/86Qp+kfcYSfilN3quOKj7dq9SKoiiNjV48GfIfAawlfaYCyMSX0FzDKzxPSg8y5xbwLSzpR+6m8pLAA7xQ/FXJeSZvi3jQ0o1Vp8JasvCnOlmKeLB1Bf9yIt7DM9z2cdWgArmiKLVCygCyeIKxEEcWg76H0l10DsTKvPuR9n5lVnmGXKfglZIl+t7qxdjgTiPNYkbfCdjB2gEcp0LRB1RvM+VgBUHcgvG5Dx28A5Hwn2rcx5wK5Iqi1AqZe2dJGqWilq3C2IMztoK6avc3VB5crYRNj8gCKk55+CCwAYL7IrhPGFqisYORmZgrwdoNisZAcC9YOyLiH0DY+1fvXiZUIFcUJeqkf20EQRyMFEklwVNW9L7NaHBlOwl8UzEN2NJD5ROaQZDZlRwTjgDn2VD8WZj3NbSYCyDmgmpev3KqjlxRlOjzLyeyOmtL5S1d7f1NriXANgAtfRVasyUlKzfNOxBCpHnvalbwxfynpAFXmC8c3/zqXbcKVCBXFCX6LM2MPTDDEoALYq5EWI+q8FJGfXgcB7dns4OIQyQ8DpQ0xHJ/hnkgDoZ5PYo8n4NnWvj3AxuQwb21OgSVWlEUJfrsx5eU9rkpm+5wgPMsY9GP6zyE/diQU6XUQd9lBGstEWHtCE1+RRZ/buzcY+2OiLkMYWmK1AuQBa8RPnVSB+tk9O2VHBBE6gUIS9NaG4IK5IqiRJ0QVkiZaEx4BtYBGmipRrdCe5+w50nvTGTeIyULe3SkfTAi6WWEpQki/vYyx+rBfbD/bJB5VC1g24zNmPVt1flo1RAE7zSwday1O6hArihKrRDWVogm3xhpBekDS8sKd8aR/rXInDsoM0Hqm4PMuQmR+nnZY6Ub9o8o2UOzKmyQ/L7x/5xR1MkTO0DhGKSlKTjPinoLW1A5ckVRapmwNDWCekVBXEpk4duEThj6wb8KGdhU9vjir6tRZSLA0g7NcRxC5gPVCajVbZTlReY/abTHDVemWAMqkCuKUq+kfwVy/zDw/ob5E7KEoLEtm9QLke4fjB3tq9zsCogZafzffkyYe1U6WowFPtU5tRiCu5EFb1Xv/AqoQK4oSr2Reg4ye3RJn5RwgdWLDO5Geuci951gdD0066tS+d2g4DX0og+MrohxtwCualwn0vYAZvzgnVqD882pQK4oSr2R7h+MPS4rU/QxMvcW46lWFhE+mNqoODi7jXy19KHF3YxIHmssza9T0d+zUwVyRVHqT3Anla/+xGhqFTY/bQUcoDVHNPkFkfKpsWlF2OMlBDONzaEDG8G/oTojryYnuC6O+lVVIFcUpd4Iex9jM4jKWFqHf88+EJH6OSJtJsLaBmHvjWg6B6ydzY+XAdDSkHn3QcH/IqgDjxYn2I9FxN0Y9SurQK4oSv1xnAqWtlRcQaJBMKMkpVKOiEHEXoWw9ShTFSOE1ViQZHrPU0DfB57fAXdNRl811k4gEpFF7yODWdG9dFSvpiiKUgXGwqHPkUUfgGcyYAXbMeBfW5J2OZAPD7NyU0tDBvaDWIS0Hg3uL8A9GdCNjoahdwSRjCz8mNKdhOpKYBUEVoB3hvF5U75A2DpF5dJqqzdFURokvWgCFLxE5U/NVow+LAd6flfSTVEklbS2rWiSVVC7i4UE2PqglVvoVOlZYbZ6U6kVRVEapsBKIkt9BIBijEnTCPqJy1wqDuI20JpFcN+akOBfggy34UUV1SiQCyFGCiFWCSF0IUTIt4SiKEq1WbsAzrq9p/0EY9I04aHIJmFrxEK0nqVrepWVwIXA7CiMRVEUpZRwXQDCQfWXxR8Q6VJ8JyL+bmPbOcdQsPagbE26DSP4RqMO3A7OsxEVtvqNXI2uIqVcI6VcF5WRKIqiHErEQ/z9oIXfzzMiia8SUTDX0sDS3ri1sCJSPoKEJ8DSreTpXAJW0FpSsy8XJ9i6GX3Wo6TOqlaEEDcANwC0adOmrm6rKEojJGUQmXsr+OYZqzmxUO2l8YH1Rk15YBUV9mfRdyHz7kckj0V6fkEWvFnSCuDQPHYA9C3VGweApQ0i6TWw9qiwiVhVVRrIhRDTgHSTtx6WUk6O9EZSynHAODCqViIeoaIoRx7v74cEcahRfxP3V4fUoFf0hRAA70z0wg+g8A0iWnFaKQfGBKwdhB2RNAZh6xqF65ZVaSCXUg6N+l0VRVEqIN0/HRLEa0jfdchvKnsKDkDRW0QniNvBPrhkc+guCNclRv69FqgFQYqiNDzCXvkx1RJBMiBaXyD4wNIaLfGRKF0vvJqWH14ghMgEjgN+EkL8Gp1hKYpyJJJ6vrE3J3Bws+XGyoFw1E1Vdo2eyKWU3wHfRWksiqIcwaR/NTL7ipK2tm4OPmfW9irLqoo1qlhkNhXn7v1I26AaF09GQq3sVBSl3nkCPmZueYKpWclk+Q9Ulhz4f0MJ4hZwjUY0W4Ro+hc4L6TiZ2EbFP7PaJVby1SOXFGUerU8Zyt3LfoIXW8LtCUgNa5PX8WVzdbX99DKCRoVMM5TEI7jkb5ZlC1NLM8L7m+Q7u+QjiGIpNcRIvqbSoB6IlcUpR759AB3L/qEwoCPYt1GuD0yIAAAECZJREFUsW7DJy18sLs7K4tS6nt4JtzI4i+NX8owHRnL8ANe8P6JLPq41kalnsgVRak1utTJ97t5c91U/ti9EolOn+QOPNbrIpLtcSzI2oRu0oHVKzV+yGpHj9jsehh1JfxLkXo2OM8A99dU/FR+gMdosRt3fa0MSQVyRVGiyqcHeGPtz0zJXIhXDw1yc/avY/gfzzNu4I14gj7TDLhEozBoZa/PyZKiNOItfgbE78EqDhxtw8ih12Qj5GrSdyP3j4DUL8H3F+jZJSWLtpLxSEzz+jIatenmVCBXFCVqAnqQK+e8xeaifRUepyO5fv67jGp3IoEwmy//U9Cci1a3xKbpCMAmgrx11J90cuUBfrB0gGAmdb5BBIC+B9y/IZr8DJ6fkb6lRp8W1wjIvhSCW8qdYAPH6bU2HJUjVxQlaqbvXsmWov0RHSuBiVv+QpPmBXoFug0/Fop1G0W6jdygg3syjkeXAALkgSdyjdrYmb5SxZ8ihBPhuhAt8Sm0uKvRLCmIxJdLmmwdqIN3gdYEEX9HrQ1FPZErihI1s/euQVahXFBH4olo0hBAUBS0sao4hZ6x2eWaV0kqrje3lzzBbyaizScioZt/YQl7b2gyFVn8lbHXqG0AwnUeQouNzn1NqECuKErUJNlrdzMGAbh1s7AVJoCLFJASRBC0FAjuIGqBvILwKSzpiPjby45QLwL8CC0pSvePZCSKoihVlGiNZiA/8JR9kI6gV2wVdqCX2Qcv5Z9bycE2SHwb9H3GJKb3VypseyuksdjH0hohwvc7l3o2Mu9B8M4xfm9pg0h8wXhyjxIVyBVFiZrf9yyPynUs6MRofnzSgldasaBjFToPtFqEU6uNShULJL2D5hwCgHSejNw3nQonUmURMmskSB0ZMwoRczFY2pfpMy6lRGZfBYGNlJYpBjchc/4NTaYiLGYdwqtOBXJFUaIm329slmxBp1fsflYXp+CVVQ0zkq6ubN7rPIsZuS35M68FyVYPI1K30MGVH/1Bi2aIxCcRziFIKcE7A+n+AmMCtZKKmAN9zovfRxZ/CpbmkPQ2wtbZeN2/DILbCKk1lwFk8ReI+Lui8hFUIFcUJWoGpB7Fn3sW826nP4jT/Ixad3o1WqUIVrlTyQ3YOT05k9OTM2tjqCVs0ORnhCUeAFnwDBRPonp5dB8EtxqNv5r+aaRbgpmY90D3QWBTDcZdlio/VBQlam7uNIwrmm6mjaOAZ7f3w6dbqn2tb/d1jOLI/r+9e4+RszrvOP79vTM7l/Xsri9rY7CNFwIFnJJC4poUlxqDkwA2uJg0CQgINSq20spOigN1aaW2isjFbUklUBKDIprGASWChDbFCmyA3FwuBkMQhBrHMriEBBtf1t717OzOPP3jHV93dnc8t9fDPh9ppZ0z77znObb0zNnznkspCUheQlBM4oWBLdD3ANU/DB2A7I/DX1tmgZVa+ZmGxIeqrOcwT+TOuZo5pXUCN57SQ2++hV/un0S+4hQj1u+eWbK8OgEoAyQh8SHU8UUALP92uJCnrOX2o7CBQ1MTFT8dkvOA1BEXxCHIoPQ11dd1+I7OOVc78SDNm/0ZBqvsJ+4YTLFrIMn4eD+B0qCTwLZT2bL8ANruROn5MPg6BFNR/PAh8Lbn82D7yrsPAeEXynDz3wNIHD5QQuPvwnq/Gfb27QCkLkGZz6GgrYJ2lOaJ3DlXW63Xcu9r3VXvIj5oAa8f6GDA4uRTS5iX/AaV761SgH2rsf2Z8CT7tpVQTORW2A8DLzD6YH4CUkvQuE9BYTe2/+swsImwF39wmmIakhehllmHPiW1oMwyyCyrMPbReSJ3ztXUvvhCXu7dRPXDIGLl1nA64DmZncw7o9qRYAt73YOvYLtXYB13EqQXUf7T2BzkurHEBwhaPx7uSW79WO86yP4AaIH0J8JpiA3midw5VxNmxk/f+RXfeL2bgWH2T6nUGwcEwQQo/HaUK2OU12vPwr4vYamFKGjDYtNLbHRVQmEn9PwTBRIErVchJVFmKWSWllFn/fjDTudcTXxt82PcvmkdW/aPlmyP3wcnvq/4YDJNmKxLSULiwuI1ZSi8G45ZA8TPOo5ostD71eO4vv48kTvnqrarfz//se1nFOp0vuYtZ1yKknNh/L9B7EyglaHpKw/pGyh7+qBS4Q9A0MlxDQXla/9lVQ1P5M65qr2yd3u4KrIOUkELm/e9TeHAf8GeFZB/Hehj6D4og7B3eYnyUtIw7makMAUq/adAqf1ShknusVNLl0fEE7lzrmoTE5m6nXWfLQywec9W6Pk7IMvIY+AjJfFE8ScJrdegcZ8BwAY2Y/vv4fAmXXHC4ZkUpJdy9BxwgBRqu62yxtSJP+x0zlVtVsd0Olpa2TPQW5f753LPMfzYeDniEJsF+ZfCl33fwQa2Yu2rYdcni2PlB7+KBImL0Pg7UdCOJc/D9t0VLrePz0SZVSg1v7oG1Zgncudc1SRx3wW3cP2Gu8kWyj0oonxv9u7DzCqf0KhTIf/iEQUWbmu7e3nxLM0j/54YgNwT2M7FYWn6SjTpoboeDFEt1WtcaySzZ8+2jRs3Nrxe51x95a3AI9s3smn3VjoT7Tz4xgbyZY1ZjywdBHT/QTexg/uLH7eRTg8aTQLip6NJDyNF2/eV9LyZzT623HvkzrmaiSlgyalzWHLqHACuO+2PueonXyFv1SXzTEuGWHI+ZB86/g+3XFjGoRIjyYVb0fY/CamjD1A2y0P/T7D+H4PaUes1KH5GFXVVxh92Oufqpi+fI65qxrZDy85YEO7tPaIEqPOYogUw/l5GnlpYRnzWh+VePLrIBrHdt2B7/hoOfA/67sd2LqHQV8GXTZU8kTvn6qZ3MEtM1aeZ7t+9DKNtMhW0EZy0ASZvhNTVQBJyT8KuyyA2woKfYCYEUwhntMQpPVCRRvFpRxdlfwQDzxNOhYRwNk0Wev4x3L+lgTyRO+fqxoAD+VFO2SnDMzu3QOsNI18UKw5p9Pw9ZB8lXBiUh/z24tzzUuKQuhhN/hma8lOY8gwEHQxNjTEsmIrlnsOK+4tb9r/B+obcEcUh92z5jasBT+TOubp4o3cnn3n2PqwGM8wNY/27J0N81jBXCLWtwvI7oP8Jhq7uHGbuuVJo3KeRhIKJBEEbmvgAxM/h0Jzz4GRQAfauCodS3rkQyz0HSjPskM0IhzHXgydy51xd3P/rJ8kVanBQQ9E3tz6FJn4LNIOjx7UDaP9yeCp9/jegRHk3VCea9F0UO/no4ngXQef3oWMNJP4QCr8Le962Pzyj0/Zgu/8CUosOL/E/ShwScyptZkWqmrUiaQ1wJeEJpb8G/tzM9tQiMOdcc3t171tVz1Y50rv9+1DQDlPWQ7YbG3gZxWdA6srDhzTEu8DKHMoJMsPOMCn0rIG+bwMHSn/WQIVdWOvN0Ls2HE5BQIAmrEVqOc7WVafa6YePA6vNbFDSl4HVwO3Vh+Wca3Zdmcls691Rk6EVgLPbw4eNUgLSV6D0FUOuUdCBtV4HfQ8ybBI+qLC3ZLENvgl932LkzbcGwPYStK3AWj8BuQ3hEXLJeeGhyw1W1dCKmT1mduhk0aeB6dWH5Jx7L7jp9ItJBrVZqhJXjBVnX17WtWq7HdpuheAU0DiGnV5o+7H8zqHluf9h9J0QY8Utc0GxqSi9BKU+GkkSh9qOkS8F1g/3pqRbJG2UtHHHjh01rNY5dyI6p2Maaz54PdNbJxJTQFyxiqcinj+hi1kd5fUTpYBg3I0EU54iOGlTeNZnSQEle93KwEhz39Ua/kXQcnZZ8TTCqF+XkrqBqSXeusPMHilecwfhwXXrhruPma0F1kK4RL+iaJ1zTeWCzjN5+E9WcWAwRyDx6Q33sLX3neO+z9vZKh69tS6E3vsZclhybHLYaz9Wcj6le+QBtMxG426E5EdKvB+dURO5mS0Y6X1JNwGLgEstio1bnHMnvHQ8nEnyZzM/zL+8+kMGj3P/ldMzUyquW+OWY9nuw7NPSIDiqOOfkYTlNmHZR4EApRehlnNhwr3Y7uWE/VOB5aHjCwTpKyuOo56qnbVyGXAbMM+s1Mx455w77I86zyIIHoVC+Yk8QNz8vksqrlNBG3T+J2QfxXLPQmwGSn8cxaZQ6Pki9D1AOPEOrO8BbNzNBG0rYcqGcGGP5SAx54Te/bDaJxF3Ex6r8bgkgKfNbHnVUTnn3pNOaZ3ADV0XsW7bz8ve7va29y/mnI5po184AikJ6atR+upDZTbwajGJZ4+4Mgu992HpxSjeBcm5VdXbKFUlcjNr/DZfzrmmtuz3PsIFk89k2TNrR5yYOCXZzhfO+xTnTeiqSxyWfYKDPfGjFcLVofGldam3HnwbW+dcw503oYtru+bynW2/GPJeezzNDy7+PJl4qVWTtSMlMWIMPR4uRrg8v3n4En3nXCRWnHU5C6aee1TZtPREvnfR5+qexAFIXU7pFGiQ+lj9668hPyHIORepA4P9vNbzG2a0TqIz1d7Qugt9D0HPP4Tzxg0gDx1fIkgvbGgc5fITgpxzJ6R0PMn5E0+LpO6g9RosNR/6nwIEyfkoGB9JLNXwRO6cG9MUTIT0kqjDqIqPkTvnXJPzRO6cc03OE7lzzjU5T+TOOdfkPJE751yTi2QeuaQdwBsNr/jE0AmU2M1+zPD2e/u9/ZWbaWaTjy2MJJGPZZI2lprQP1Z4+7393v7at9+HVpxzrsl5InfOuSbnibzx1kYdQMS8/WObt78OfIzcOeeanPfInXOuyXkid865JueJPCKSbpVkkjqjjqWRJK2R9JqkX0r6vqTm2zO0ApIuk/S/krZI+puo42k0STMkPSnpVUmvSFoZdUyNJikmaZOkH9b63p7IIyBpBvBR4M2oY4nA48Dvm9kHgM3A6ojjqTtJMeAe4HJgFnCtpFnRRtVwg8CtZjYL+DDwl2Pw32Al8Kt63NgTeTTuAm6DEc+efU8ys8fMbLD48mlgepTxNMgcYIuZbTWzHPAgsDjimBrKzN42sxeKv+8jTGjToo2qcSRNBxYC99Xj/p7IG0zSYuAtM3sp6lhOAEuB9VEH0QDTgO1HvP4/xlASO5akLuB84JloI2morxJ23o496bkm/ISgOpDUDUwt8dYdwN8SDqu8Z43UfjN7pHjNHYR/bq9rZGwuWpIywEPAZ82sJ+p4GkHSIuAdM3te0sX1qMMTeR2Y2YJS5ZLOBU4DXpIE4bDCC5LmmNlvGxhiXQ3X/oMk3QQsAi61sbGQ4S1gxhGvpxfLxhRJLYRJfJ2ZPRx1PA00F7hK0hVACmiX9G0zu75WFfiCoAhJ2gbMNrMxsxucpMuAfwXmmdmOqONpBElxwge7lxIm8OeA68zslUgDayCFPZd/B3aZ2WejjicqxR75KjNbVMv7+hi5a7S7gTbgcUkvSvp61AHVW/Hh7l8BPyJ8yPfdsZTEi+YCNwCXFP/fXyz2UF0NeI/cOeeanPfInXOuyXkid865JueJ3Dnnmpwncueca3KeyJ1zrsl5InfOuSbnidw555rc/wPCHdn0SBQE0AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "StyDTNfRqSV8", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/16_Learning_Unsupervised_Embeddings_for_Molecules.ipynb b/examples/tutorials/16_Learning_Unsupervised_Embeddings_for_Molecules.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..fffaec7444a982fa09fdc9907575141e3c66feeb --- /dev/null +++ b/examples/tutorials/16_Learning_Unsupervised_Embeddings_for_Molecules.ipynb @@ -0,0 +1,375 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "hzpae9-r2aoK" + }, + "source": [ + "# Tutorial Part 16: Learning Unsupervised Embeddings for Molecules\n", + "\n", + "In this tutorial, we will use a `SeqToSeq` model to generate fingerprints for classifying molecules. This is based on the following paper, although some of the implementation details are different: Xu et al., \"Seq2seq Fingerprint: An Unsupervised Deep Molecular Embedding for Drug Discovery\" (https://doi.org/10.1145/3107411.3107424).\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/16_Learning_Unsupervised_Embeddings_for_Molecules.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine. This notebook can take up to a few hours to run on a GPU, so we encourage you to run it on Google colab unless you have a good GPU machine available." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "ci69aRSm2aoO", + "outputId": "9071e7f3-15a7-4e3e-add8-fb1b7134a85a" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "2uo2i6arBiMS", + "outputId": "d9d1d0ba-09c0-44ee-b315-84d87af40cf2" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6bm1iYbw2aoT" + }, + "source": [ + "# Learning Embeddings with SeqToSeq\n", + "\n", + "Many types of models require their inputs to have a fixed shape. Since molecules can vary widely in the numbers of atoms and bonds they contain, this makes it hard to apply those models to them. We need a way of generating a fixed length \"fingerprint\" for each molecule. Various ways of doing this have been designed, such as the Extended-Connectivity Fingerprints (ECFPs) we used in earlier tutorials. But in this example, instead of designing a fingerprint by hand, we will let a `SeqToSeq` model learn its own method of creating fingerprints.\n", + "\n", + "A `SeqToSeq` model performs sequence to sequence translation. For example, they are often used to translate text from one language to another. It consists of two parts called the \"encoder\" and \"decoder\". The encoder is a stack of recurrent layers. The input sequence is fed into it, one token at a time, and it generates a fixed length vector called the \"embedding vector\". The decoder is another stack of recurrent layers that performs the inverse operation: it takes the embedding vector as input, and generates the output sequence. By training it on appropriately chosen input/output pairs, you can create a model that performs many sorts of transformations.\n", + "\n", + "In this case, we will use SMILES strings describing molecules as the input sequences. We will train the model as an autoencoder, so it tries to make the output sequences identical to the input sequences. For that to work, the encoder must create embedding vectors that contain all information from the original sequence. That's exactly what we want in a fingerprint, so perhaps those embedding vectors will then be useful as a way to represent molecules in other models!\n", + "\n", + "Let's start by loading the data. We will use the MUV dataset. It includes 74,501 molecules in the training set, and 9313 molecules in the validation set, so it gives us plenty of SMILES strings to work with." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "YnAnjl9d2aoU" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "tasks, datasets, transformers = dc.molnet.load_muv(split='stratified')\n", + "train_dataset, valid_dataset, test_dataset = datasets\n", + "train_smiles = train_dataset.ids\n", + "valid_smiles = valid_dataset.ids" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "EslVHE2m2aoY" + }, + "source": [ + "We need to define the \"alphabet\" for our `SeqToSeq` model, the list of all tokens that can appear in sequences. (It's also possible for input and output sequences to have different alphabets, but since we're training it as an autoencoder, they're identical in this case.) Make a list of every character that appears in any training sequence." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "nsE8e9xn2aoa" + }, + "outputs": [], + "source": [ + "tokens = set()\n", + "for s in train_smiles:\n", + " tokens = tokens.union(set(c for c in s))\n", + "tokens = sorted(list(tokens))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "vgzyJ1-42aog" + }, + "source": [ + "Create the model and define the optimization method to use. In this case, learning works much better if we gradually decrease the learning rate. We use an `ExponentialDecay` to multiply the learning rate by 0.9 after each epoch." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "NHKrymnM2aoh" + }, + "outputs": [], + "source": [ + "from deepchem.models.optimizers import Adam, ExponentialDecay\n", + "max_length = max(len(s) for s in train_smiles)\n", + "batch_size = 100\n", + "batches_per_epoch = len(train_smiles)/batch_size\n", + "model = dc.models.SeqToSeq(tokens,\n", + " tokens,\n", + " max_length,\n", + " encoder_layers=2,\n", + " decoder_layers=2,\n", + " embedding_dimension=256,\n", + " model_dir='fingerprint',\n", + " batch_size=batch_size,\n", + " learning_rate=ExponentialDecay(0.001, 0.9, batches_per_epoch))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "hSr7FkSW2aok" + }, + "source": [ + "Let's train it! The input to `fit_sequences()` is a generator that produces input/output pairs. On a good GPU, this should take a few hours or less." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "NZ5l_g1E2aok" + }, + "outputs": [], + "source": [ + "def generate_sequences(epochs):\n", + " for i in range(epochs):\n", + " for s in train_smiles:\n", + " yield (s, s)\n", + "\n", + "model.fit_sequences(generate_sequences(40))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_lxf1lmX2aoo" + }, + "source": [ + "Let's see how well it works as an autoencoder. We'll run the first 500 molecules from the validation set through it, and see how many of them are exactly reproduced." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "NXDBtIvn2aop" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "reproduced 161 of 500 validation SMILES strings\n" + ] + } + ], + "source": [ + "predicted = model.predict_from_sequences(valid_smiles[:500])\n", + "count = 0\n", + "for s,p in zip(valid_smiles[:500], predicted):\n", + " if ''.join(p) == s:\n", + " count += 1\n", + "print('reproduced', count, 'of 500 validation SMILES strings')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Rt9GLy502aou" + }, + "source": [ + "Now we'll trying using the encoder as a way to generate molecular fingerprints. We compute the embedding vectors for all molecules in the training and validation datasets, and create new datasets that have those as their feature vectors. The amount of data is small enough that we can just store everything in memory." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "kdUfsbtZ2aov" + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "train_embeddings = model.predict_embeddings(train_smiles)\n", + "train_embeddings_dataset = dc.data.NumpyDataset(train_embeddings,\n", + " train_dataset.y,\n", + " train_dataset.w.astype(np.float32),\n", + " train_dataset.ids)\n", + "\n", + "valid_embeddings = model.predict_embeddings(valid_smiles)\n", + "valid_embeddings_dataset = dc.data.NumpyDataset(valid_embeddings,\n", + " valid_dataset.y,\n", + " valid_dataset.w.astype(np.float32),\n", + " valid_dataset.ids)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "lVvfGr562aoz" + }, + "source": [ + "For classification, we'll use a simple fully connected network with one hidden layer." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "tFmnnVNm2aoz" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.0014195525646209716" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "classifier = dc.models.MultitaskClassifier(n_tasks=len(tasks),\n", + " n_features=256,\n", + " layer_sizes=[512])\n", + "classifier.fit(train_embeddings_dataset, nb_epoch=10)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "khdB2v7R2ao2" + }, + "source": [ + "Find out how well it worked. Compute the ROC AUC for the training and validation datasets." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "ZlilhPvm2ao2" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Training set ROC AUC: {'mean-roc_auc_score': 0.9598792603154332}\n", + "Validation set ROC AUC: {'mean-roc_auc_score': 0.7251350862464794}\n" + ] + } + ], + "source": [ + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean, mode=\"classification\")\n", + "train_score = classifier.evaluate(train_embeddings_dataset, [metric], transformers)\n", + "valid_score = classifier.evaluate(valid_embeddings_dataset, [metric], transformers)\n", + "print('Training set ROC AUC:', train_score)\n", + "print('Validation set ROC AUC:', valid_score)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "ixqbRXnW2ao6" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "11_Learning_Unsupervised_Embeddings_for_Molecules.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb b/examples/tutorials/17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb deleted file mode 100644 index f4e7c56810c2bb6ff109c73e5e63faf99166e2eb..0000000000000000000000000000000000000000 --- a/examples/tutorials/17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb +++ /dev/null @@ -1,453 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "_PGI_Rvgr0bo", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 17: Training a Generative Adversarial Network on MNIST\n", - "\n", - "\n", - "In this tutorial, we will train a Generative Adversarial Network (GAN) on the MNIST dataset. This is a large collection of 28x28 pixel images of handwritten digits. We will try to train a network to produce new images of handwritten digits.\n", - "\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "4qlydaTAr0bv", - "colab_type": "code", - "outputId": "6cd0618b-b782-49c3-d329-7e1d65773c7d", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - } - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 9794 0 --:--:-- --:--:-- --:--:-- 9794\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.4 s, sys: 524 ms, total: 2.92 s\n", - "Wall time: 1min 55s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "06xelFpir0b6", - "colab_type": "text" - }, - "source": [ - "To begin, let's import all the libraries we'll need and load the dataset (which comes bundled with Tensorflow)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "23zZTDoar0b7", - "colab_type": "code", - "outputId": "a8572f18-6c52-4512-faaf-7a2192b56c95", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 530 - } - }, - "source": [ - "import deepchem as dc\n", - "import tensorflow as tf\n", - "from deepchem.models.optimizers import ExponentialDecay\n", - "from tensorflow.keras.layers import Conv2D, Conv2DTranspose, Dense, Reshape\n", - "from tensorflow.examples.tutorials.mnist import input_data\n", - "import matplotlib.pyplot as plot\n", - "import matplotlib.gridspec as gridspec\n", - "%matplotlib inline\n", - "\n", - "mnist = input_data.read_data_sets('MNIST_data', one_hot=True)\n", - "images = mnist.train.images.reshape((-1, 28, 28, 1))\n", - "dataset = dc.data.NumpyDataset(images)" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From :10: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please write your own downloading logic.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/base.py:252: _internal_retry..wrap..wrapped_fn (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use urllib or similar directly.\n", - "Successfully downloaded train-images-idx3-ubyte.gz 9912422 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", - "Successfully downloaded train-labels-idx1-ubyte.gz 28881 bytes.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.data to implement this functionality.\n", - "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use tf.one_hot on tensors.\n", - "Successfully downloaded t10k-images-idx3-ubyte.gz 1648877 bytes.\n", - "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", - "Successfully downloaded t10k-labels-idx1-ubyte.gz 4542 bytes.\n", - "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "qijPRZXOr0cI", - "colab_type": "text" - }, - "source": [ - "Let's view some of the images to get an idea of what they look like." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "mmhulNHor0cK", - "colab_type": "code", - "outputId": "a0e60e8e-6df4-48dd-eca6-ae9068856b6f", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 197 - } - }, - "source": [ - "def plot_digits(im):\n", - " plot.figure(figsize=(3, 3))\n", - " grid = gridspec.GridSpec(4, 4, wspace=0.05, hspace=0.05)\n", - " for i, g in enumerate(grid):\n", - " ax = plot.subplot(g)\n", - " ax.set_xticks([])\n", - " ax.set_yticks([])\n", - " ax.imshow(im[i,:,:,0], cmap='gray')\n", - "\n", - "plot_digits(images)" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAALgAAAC0CAYAAAAn8ea8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydd3Rb53n/P9gAAZAASBAc4BCXKFIcEilSsjYlUSuyrSiOm8Sx06Yjbuo2TZM67ek4SZq2qXPSNG2Gm6RJjtM6cmLFsWQn2luiRWpRJMW99wBBggPEur8/+Ls3ovYAQcrm9xwcyyDA+/Dii/d93md9ZYIgsIAFvF8hn2sDFrCA2cQCwRfwvsYCwRfwvsYCwRfwvsYCwRfwvobyQV4sk8nmdchFEAQZLNgZRAwKgmCF+W+reE9vxsIKvoC7oW2uDXhULBB8Ae9rLBB8Ae9rPJAPvoAFBBtms5mPfvSjrF69mqGhIb7zne/Q1taG1+sNyu9fWMEXMCdQqVTY7Xby8vLYunUrW7ZsYeXKlcTExKBQKIJ2nYUV/D4hk932kM5CLc+DQyaTYTKZ2Lp1K6WlpezevRuA3t5eYmJiUCqDR8sFgt8Fer2eqKgonnzySUpKSsjOzkYQBGQyGRMTE3R3d/OTn/yEyspKamtr59rcxwI6nQ6LxcInP/lJtm/fzvLlywGor6/n4sWLXLlyhampqaBdb4HgN0Eul6PRaMjOzmbRokWkpaXxxBNPkJubS0JCgkTwqakprFYrW7ZsQRAEWltbcbvdIbFRJpMRExPDE088QXR0tLSlezweLly4QH19PRMTEyGx5UEgk8mIjY1l+fLlbNy4kbS0NNRqNdeuXePgwYOUl5czNDSE3+8P2jVDRnCZTCZtPeK2LpfLCQQC0mOuoVKp0Ol0WK1WnnrqKYqKiigoKMBkMgHMsFOpVBIdHc2OHTtwuVyUlZXR2dkZEpdFp9ORmZnJF77wBfLz81Gr1fj9fgYHB/n6179OT0/PHQkuk8nmzK3SarVkZGSwc+dONm7ciFwuZ2hoiEOHDvGjH/2IxsZGfD5fUK8ZMoLHxsZSUlKCIAhMTU0xOTlJamoq7e3ttLa2cuXKlVCZckc8/fTTbNq0iU984hMolUoUCoW0OjqdTkZHRxkeHgamvwzh4eHExsby8Y9/nOXLl7Njx45ZXzlVKhUvvvgimzZtYsWKFcjlclpbW7l8+TKvvvoq169fZ3Bw8Lbv1el06PV6hoaG5oTkn/nMZ9iyZQubN29GpVJx/vx5Tpw4wVe+8hU8Hs+sLHKPRPCMjAyio6OxWq0sW7YMrVZ7x9eGh4eTlpYGgM/nw+v1EhkZidPpZHh4mK6uLoaGhmhqauK3v/0t4+PjIf8QFi9eTFZWFnq9fsa1+/v7eeedd3jvvfckghsMBhYtWsRnP/tZIiIiSEpKuuNBNFjQ6/WsW7eO9evXk5OTg1wup7y8nHPnznHo0CGuXbvGyMjIbYlSWlpKZmYmKSkpvPvuu1RXV9PV1TWr9oqwWq0UFxezc+dOFi9ejFwu58yZM+zfv59Tp07Nqmv3UARXKBRYLBYKCgpIT08nKSmJ0tJSDAbDnS+kVBIWFgZMuyiCIKBUKvH5fPh8PtxuN52dnZIP2dDQwNTUVMhcF7VaTWJiIvHx8Xi9Xvx+v/SoqamhrKyMY8eO0d7ejt/vx2g0kpGRwac+9SnMZjMmkwmTyYTX68Xj8QTdPoVCQUREBCUlJeTm5hIfH08gEOD8+fMcOXKEo0eP3va64pliw4YNrFq1iqysLLq7u+nt7Q0JwdVqNbGxsZSWllJQUIBOp2NoaIgTJ05w4sQJLl68OLsGiGS7nwcgAILVahVeeeUVoampSfD7/UIwMTQ0JBw5ckTIzc0VjEajIF7zfh4323m/D7VaLWRmZgpHjx4VHA6H0NraKly+fFk4ffq08NZbbwn5+fnCiy++KHzta18TEhMTBa1WKwCCXC4XGhoaBEEQhPHxceGll14SsrKyZsXO2NhYYfv27YLL5RJ8Pp/g9XqFkZERISMjQ5DL5Xd8X3h4uFBUVCRUVlYKLpdLmJiYEF5++WUhNzf3fq5b8bD3VHxkZmYKL774ojA0NCT4/X7h2rVrwre+9S0hLCxM+P8FXEF53ImzD7WC63Q6NmzYQFRUFG63m76+Pjwezy0uxdTUFA0NDdTU1OB0Om/7uxISEkhKSmL16tWYzWbCw8NZtmwZn/vc5zh8+DDvvPMOo6OjD2PmfcPr9dLR0cErr7yC2WxmdHSUoaEhfD4fgiCQlpbG+vXrSUpK4vXXX8fhcGAymVi6dCkajYZAIIDP5+P8+fP09PTMio2FhYVs375dcgNra2v54Q9/SH9//113uZiYGJ555hliYmLQaDRMTExQVlZGb2/vrNh5M0pLS9myZQtGo5GrV69y7NgxfvnLXzI1NRUSF/ShCO7z+Whvb5e24/r6eiYnJ29L8MbGRmpqahgZGbnt77Lb7SQnJ+P3+ykqKsJms2GxWFi1ahUOh4Nr165RU1Mzq66KIAiMj49z8eJFFAoFExMTjI2NEQgEUKlUZGZmEh4eTnh4OAA2mw2r1crGjRvR6XQEAgGmpqbo7u5mbGxsVmyMj48nMzMThULB8PAwLS0tHD16dMahVi6fmZjWaDRER0eTn5+PXq/H7XbT3d1NW1sbLpdrVuy8GampqaSlpaFSqbh+/TqVlZXU19cHNRR4NzwUwQcGBnj55ZdJSUnB7XZz6dKlhz4UVlVVoVKp+O1vf8vXvvY11qxZQ2JiIpmZmQwODtLe3k5dXV1IfPGBgYHbPj86OkpfXx8qlYqxsTHWrFlDcXExzz77LCaTCY/Hw8jICCMjI0GrobgZVquVxMREYDopcunSJaqqqoBpYsvlctRq9Yz3xMXFkZ6eTm5uLmq1mqamJs6ePUtvb2/IYvYxMTHYbDYEQeDkyZNcunQJh8MRkmvDQxLc6/XS1tZGd3e3tHo9ynbj9Xrp7Ozk9ddfZ2hoiJdeegmAnJwc9Ho9P/rRj2aNODcjPDycjIwMKakzPj5OR0cHSqWSoqIifvnLX2K1WjEajRgMBlwuF4cOHeInP/lJyEgjhjBhOvKzcuVKNm/eTFFR0Yw6DoVCIWUOR0dHKSsrC9m91Ol0FBcXk5ycjNfr5be//a10SA8lHjpM6PV6g3qjfD4fTqdzhiuj0WikJEuoIJ4BVq5ciUqlwu12MzAwQGZmJnq9nvT0dCka5Ha7qa6u5vLly1y7dm1Wd5mOjg6qq6tJSUnBarWSk5PD7t27WbZsGUuWLCEvL4/U1NQ7hirr6uqoqamhpaUlJLuhRqMhJyeHiIgIxsfHqaysZHh4eFYiTHfDvEnVq1QqVCrVLYU2MpkMhUIRsgxceHg4hYWFfPjDHyYiIgJASs/DdPxbEAQmJycZGBjgxIkTXLhwYdZDbrW1tZSVlbFr1y7sdjt6vR6LxcKyZcvQ6XQIgoDH40EmkyGTyVCpVDPeX15ezrVr1+jr65tVO0VoNBry8/MxGo0MDg5y+fLlu9aYiG6WSqVCLpfj8/mCUpMyLwgeERHBM888Q2ZmJllZWdLzYux86dKl1NfX3zFDNxu4ITwm/b+I69evc+HCBf7nf/6HysrKkNR9mM1mbDYbME0G8SCuUCjo7++nubmZw4cPo9friYuL4/d+7/dm2F5bWxuyxA5Mu0dmsxmVSoXX62V4ePiuO0d6ejpZWVls27aN6Ohojh07xmuvvXbH6Nv9ImQEVyqV6PV6cnJySEtLIyMjQ/qZTqdj+fLlmEwmzGaz9LxCocBoNPLSSy/xwx/+kKNHj866nSMjI5SXl5OVlUV0dDRarRar1YpKpUKhUCAIAufOnePEiRPU1dUxPj4eki2/paWFs2fPzsiYCoJAVVUVvb29UnQkLy9PcqEAJicn6erq4tq1ayEjuFjGILp1d9p5FQoFVquVP/7jPyY5OZnY2FgWLVpEWFgYgUCA0dFR/u///u+RXOFZJbhMJkOn06FSqTAajcTHx1NSUsKKFSsoKiqSXqdQKIiMjLzt+7VaLWvWrOE3v/nNbJoqweVyceXKFZYsWUJ0dDQGg4G0tDQiIyMxGo2EhYXR29tLR0fHPVelYKKrqwuv14vJZJpB8OPHjzM4OMjo6CiBQICoqCiys7Ol942NjXH9+nVaW1tDFr1QqVQYDAYSEhLuWL5hMBgwm80sXryYF154gejoaHQ6nXRInpiYYHBwkL17985fgut0OlatWkVGRgZZWVls376dxMTEW/zDu8Hr9fL222/T1NQ0i5b+DqOjo1RUVFBRUYFMJkMul5OWlsZTTz3Fpk2b2LRpE88++yzR0dFcv349ZCR3uVy4XK573gePx8Pk5KT0/93d3fzsZz/D6XSGrLZHLpdLO/btDr0qlYpt27axfft2Vq5cSUpKym1fc7fSj/tF0Aku1iqXlJSwcuVKVq1ahV6vx2AwEBkZiVKpZHJyktHRUSl2Pjo6yr59+/D5fCQkJPDkk09KnR2CIOBwOIIaglOr1Wi12ntmSAVBwO/309HRwd69e7ly5Qomk4mEhATy8vLYtGkT77777qwldx4GN54dxMNwV1dX0MtQ7wafz8fExASdnZ1ER0dLz2u1WuLi4sjJyeFLX/oSsbGxUvLsZvj9/vl3yDQajURHR7N8+XI2bdrEypUrSU9PJxAIMDExQXt7Ox6PB4fDQV9fn1S2OTo6yqFDh/D5fOTk5LBu3Tqio6MRBAGv10tXV1fQSBQeHk5cXByJiYn09fXR2dnJ0NDQXd8zMTFBW1sbk5OTXLhwgdjYWCIjI0lNTX2g3SgUMJlMxMTEANO70cDAAH19fSHLHMLvCN7f34/JZJLCqxqNhri4OPLz88nNzb3jvRsbG6O7u5va2tpH3h2DSvDU1FS2bdvGCy+8QFxcHAaDgfHxccbHx2lsbGTfvn0MDAzQ2dlJU1PTbRsE1Gq1lPb3+XyMjY1RXl5Of39/UGzMzMxky5YtPPnkk9TX1/OTn/zkvg+vLpeLX/ziFxQXF2Oz2UhISAhq/2AwsHTpUtavXw9AQ0MDlZWVIXPvRIgEb29vJzExkbi4OF544QXMZjMWiwWLxXLH9wr/vzvq+PHj/M///M8j51qC+ulYrVby8vJISkqitbWV69ev84tf/ILr168zNDTE2NgYfr9fqge/mdyJiYmkp6cTGxuLUqnE6/Xidrtpbm6+Yy3L/UI82X/+858nNjaW4eFh/u7v/u6+vzgqlYr4+Hi+/OUvk5qaOq/cEhEGgwGDwSBFUQ4ePMjhw4fnxBaXy8XevXtJSUkhOzub3Nxc5HL5HTvmBUGgt7eXEydO8IMf/IDGxkYcDscjnxuCSvCenh7Onz+P3W6nvLycixcvUlFRQVdX14yDz51gMpmIiooiIiICuVyOy+Wis7MzKHXhYWFhpKenk5GRgUwmo7Ozk76+vvvy7U0mE/n5+axYsYKMjAz0ej3Dw8MhjaLcC3K5nJiYGKKiojAajQQCARwOR0jrPm7E1NQUV69elaoy7xRN8Xg8tLa20tHRQWNjI8ePH5eqT4Nxb4NK8IaGBvr7+5HL5Zw4ceKB29AsFotU5wEwNDREdXV1UP5Qg8HA0qVLiYuLY2BggKGhIammQxCEGdcQs6cKhQKVSkVKSgp79uxhx44d2Gw2/H4/LpeLjo6OkNXI3AtyuZz09HTi4uIIDw/H6/UyMTERsvqYm+HxeKitrWVwcJDJyUm0Wi0+n0/qaxV38NHRUU6fPs2pU6eoqamhoqIiqHYEleBTU1MMDAzw3e9+96FO7WlpaVJbm8vloqKigtdeey0oJBoZGaGsrIypqSliY2NZtWoVu3fvpqOjg97eXmnsg5jYWb58OUuWLGHZsmWUlpZK8XyA3/zmNxw5ciSkRWD3gkaj4e///u/JyMggEAgwNjZGfX19yP3vu+HAgQO0trbS1tbGL3/5SzweD36/n/Hxcal7KtgI+glJrIl4EFgsFl588UXWrFkjEbyxsZG6ujoaGhqCsoKLtdCVlZVkZmaSmJjIpz71KVwuFyMjI9TU1ADTBLfZbCQlJREVFYXVasVgMCCXy5mYmOD06dMcOHCAS5cuzRtyw/Suo9frpTZA0S0MVez7bujt7eXw4cP8/Oc/p6enB5fLJTVqiKHY2cK8CAGEh4fz3HPPER8fT1hYGH6/n9raWsnlCQZ8Ph/Dw8NcvHgRvV7PypUr2bBhAzC984grnUajkbKWYgPB1NQUExMT9PT0cPDgQc6dOxfyss/7gViwJI6QmA9fQLGk+p133uHYsWOMjo6G9Es3LwiuVColcguCwNjYGPv27Qu6PyYIAt/+9rfp6OjAZDKRl5cnlQPcOLVKhNgM3djYyG9+8xtOnjzJ4cOH58WqeDeIfu58sPMv//Iv53QWy5wTPCMjg+LiYjQaDV6vl+bmZt544w3Ky8tnpbRzdHSUo0eP0tvby6c//WlUKhV6vZ7169dTXV3NwMCAVB3Y2NjIpUuXaGhoYGBggOHh4XlBmntBq9WSk5PD008/TUREBMePH59Te+byns0pwWUyGSkpKSxbtgyZTMbg4CD19fUcPXr0vkN4Dwqfz0d3dzcul4uEhASJ4GLzwuDg4AyCV1ZW0t3dPe+JHQgE6OzsJCIiAq1Wi8ViISEhQcpqflAxpwRXKBQUFBSwZcsWfD4flZWVnDx5kjNnzszqdT0eD0NDQ3z/+9+XnvvXf/3XWb3mbMPn83HgwAHkcjlGo5Hw8PBbato/iJA9yA0IthCRQqFg586d5OXl0d7eTnl5Od3d3Q9d5C48JuJOs2VnREQEer0erVaLXC5ndHRUmhDwkLgoCEIhPD739GbM6QouCAL19fWMjIzQ19dHe3v7vJyK+rhA7OxfwO8wpyt4sPFBX8FnAR+4FXyQ+Sstl3TDvxfsDA4eF1uT7vSDhRV8DvC42MmCEOwC3ueYryv2fWOB4At4X2OB4At4X2OB4At4X2PO4uByuZyMjAwsFgtyuZzKykqpLngBD4+oqCgiIyOJioqisbFxTuYB3g0qlQqtVkt8fDxRUVFoNBp6enpoa2tjfHw8+Be802T82z0I0jR+QNBqtcJ//ud/ClevXhXq6uqEgoKCB1Z0uPkxG3be/JDJZHd8zAc7t2/fLnzjG98Qqqurhd27dwsxMTGP8vseWeHh5ofFYhHy8/OF73znO0JNTY3gdDqFV199VcjMzAzKZx8UhYdgQBAEuru7cbvdWCwWEhMT6ejoCNlg9gdFQkICqamp7Ny5kyeeeAKr1Sr9rK+vj9bWVk6ePElFRQVNTU1z8ndERETwxBNP8OSTT5KYmDhDj2c+oLS0lLVr17J161ZSU1PR6/UArF+/np/+9KfS6zZt2sTw8DC1tbWPnNmeU4I7nU48Hg9arZa8vDyuXr0atAaHYEGpVJKfn09BQQHZ2dkUFxezePHiGQNroqOjpTFvZrMZs9kc8hJVhULBkiVLSE5OxmazoVar0Wg0cz63RRzfFx0dzfbt2yXhMvH++Xw+9Ho9crkcg8FAcnIyu3btYnJykvr6ekkW5mFLEOaU4MPDw0xNTaHValmxYgVvvfXWXJlzWyiVSsLDw6WxbUuXLpVWHTFBFggEMBgMhIeHS02/0dHRnDhxIqSVfAqFgpUrV5Kamio1bc8HiDIqxcXF7Nmzh9jYWKlTSmxXEwUUzGYz69ev59lnn5V88//4j//gzJkzjI+PP1Sf75w3PIgQBZ/mEwoKCvjwhz/MZz7zGcLCwqQO/Btx9epVzGYzixYtAqYbp6empjAYDExMTITs0KxQKCgqKpp39d87d+5k06ZNvPDCC2g0mhkdUwMDA7S2tvL666/T3t6OSqXCYrGgVCqJiIjAaDTyla98hZ///Oe8++67HDp06IGvP6dRlJycHCIjI3G73VRUVMybSji5XM6mTZvYsGEDpaWl0iAdp9NJWVmZFJ0QR0fk5uaydetW8vLypMHtoWwZi4iIICEhgfT0dGlo/3yAOKslLi7ulrkoIyMjnD59mrNnz3L06FEcDgcxMTEzvgByuRyTycS6deuwWCzo9XrOnj37QG7snBBcoVAQFhZGdnY2ZrOZiYkJLl++PC+mRYluSUlJCWvWrCEnJwefzyeNnDt48CAXL16ku7sbh8PB+Pg4TqeTlJQUcnJycLvd0gSvUEGv12O324mPj5dcqLmGOBL75q4iv9/P5OQk169f58yZMxw5ckSaaCCOkBgdHSUsLAytVotKpWLJkiVERUXR0dFBZWXl/Ce42WwmMzOT9evXI5fLqa2t5dixY3M2pOZGREdHU1paygsvvIDNZiMQCNDW1sYPfvAD9u/fT11d3S3v8Xg8Uge7w+Ggs7MzpH+LWq3GZDIRERFxi9LaXEAmk2E2m/n0pz/N7t27pVEgMB1xqqqq4o/+6I8YHByccZ8cDgcnTpzAbrdTXFxMUVERMpkMtVqNUqmksbHxviak3Yg5Ibg4P1ocjywqtc2HMWixsbE8++yzGI1GyQX5h3/4B6qrq+ns7JReJ+rJFBcXs3btWpYsWcL4+DhvvfUWe/funRPbxe1dVEcYHBx8ZAmQh4HNZmPp0qV86lOfIi4uTnq+qamJgwcP8vrrrzM4OHhLAmp8fJz6+nq+973v0d/fT3JyMtHR0TPclgd1++aE4GFhYcTExKBQKKTD5XwgN0wP7U9OTkatVtPW1sbFixc5f/48DodD+kDCwsKIjIwkIyOD9evXk5+fT0xMDA6Hg8bGRmlKVqgQHh5OYmKiRAS/38/Q0BD9/f0hPdfIZDJsNhu5ubkUFRVJK7c4JfjUqVOcPn2aS5cu3XaHE0fiuVwuurq6grILzgnB4+PjJQGlUA5mfxAIgkB1dTWvvfYaAwMDkk8tl8ux2+2sXr2aL37xi6SkpKBWq/H5fNJhKdRITU1l165d0ihnj8dDc3MztbW1dHR0hMwOpVLJxo0b2bVrFytXrpSeHxsbo7q6mn/6p3+io6Pjoc8nD3NonxOC63Q6oqKi7qjpOF/gdrulIf0w7eump6fzta99jeLiYklFrKuri+rqar74xS/OycQrk8lESkqKFF+enJzkyJEjDA8Ph8wGg8GA3W7nz//8z1m0aJEkJlZfX8+ZM2d45ZVX6O7uvi9yx8fHEx0dLUWvpqamGBkZkSYNPwjmhODi9Nb5iBuVm1NTU9m9ezcZGRm4XC5J7TgnJ4fo6GgCgQA1NTVcuXKFM2fO0NraOidN0zfLeE9NTVFVVRXSqFRKSgqlpaUsWrSIiIgIaTdpbm6mpqaGtra2+x4ll5WVJamtyWQy3G43TqeT1tbWx+OQKYo7we+yWfMFHo9Hmvudl5dHZmYmtbW1kgzIpk2bpFHA4+PjHD9+nCNHjnDw4MGgaMo8KMTD+o2YmpqipqZmdqrz7mBDbm4uv//7v09kZCRyuVw6V9XU1FBTU3Pf90Yul7Ny5Uqys7OlkOfk5CSDg4M0NTXN/0OmUqnEZDJht9uRyWQMDAyE1E+8F+rr6/nCF77Avn37sNvtKJVKli1bJt1YcecRT/t79+5leHh4zs4S27ZtY8WKFRiNRmQymaSy5vf7Q5ZoEnMGopvk9XoZGxujsrKSY8eOcfny5fv6PWq1GovFwp49e0hPT5eeFyNCD4OQE1ytVhMZGSndjJ6eHhobG+dFmj42NpaEhAQKCgrQarXSGUGUEofpHae5uZmKigqOHDmC0+mcE3LL5XK0Wi3FxcVkZGRIq3hdXR3nz58PqV2RkZFERERIhV1Op5OGhga+9a1vUVVVdU81OxFms5k1a9ZgsVgkl8vv91NVVcWZM2cej0OmRqMhIiJCkpfr6+ujpaUl1GZIEFUcoqKiWLp0qTQMVKfTSa+ZmprC4/EQCAQk+ZKenh7a29vnbOVWKBTodDpycnJITEyUXL62tjauXr0aUv/bYDCg0+mkRcDhcNDQ0MD+/fsfiJQGg4GsrCx0Op3094yPj1NbW8vFixcfyraQEzwyMnJGOrmpqYny8vJQmyHBZDKRnp7OP//zP5OXl4fJZJJ+JvqRVVVV1NbW4nK52LVrFykpKUxMTPDEE09w9uzZBz74BAOizHleXh6JiYnAtGju0NDQnNd/19fXU1ZW9sArrhh8uDGeX1VVRUVFxQPL4YgIOcF37dpFUVERfr+fy5cvc+3atTkJrYnlpU8++SRr1qwhMzNT8mNFN6Sjo4OGhgb27dtHX18fgUCA8vJyPve5z5GWlsaLL75IVVXVnBBcdFFudJ1Ev3e2h5feC93d3TQ0NDzQezIzM1m7di2f+MQnpHJfv9/PlStXHtr/hhATXCaTSSuOKBs3NDQU8u4XcbxwSUkJ69evZ9myZcC04KvL5aKnp4dLly7R2tpKY2Mj7733nhSRmJiYYNu2bRQUFLBs2TKioqJwOp0hr6MxGo1kZ2dLvqogCPT19dHd3c3AwEBIbbkZBoPhrlqYIhQKBWq1GpvNxurVq1m9erVUdixGqWpqau4p1Hs3hHwFX7JkCXa7HUEQGB8fx+PxhPyAGR0dTUFBAZ///OclPfSRkRFaWlqorKxk7969vPfee7etCmxubuadd97B4/Hw0Y9+lPT0dFwuF21toZ2Rk5CQwGc+8xmpPNbv93Px4sV50RFVXFyMTCZj3759d/1sw8LCsNls7NmzhxdeeGFGUdb4+DidnZ0cP36crq6uh7YlZARXqVSYTCY0Gs0MP2suYDabycjIQK1W43K5aGxs5OWXX6ajo4PR0VHGxsZwu9231MfIZDJUKhVpaWmkpKSgUChQKpUh/1uMRiNms5nw8HDkcjm9vb3U1NTwta997ZHIEEz7rFYrNpttRg0PTJf2RkZGUlpayhNPPEFOTg5JSUmEh4fPSP6dOXOG7373u7S2tj5SfiFkBI+MjGTjxo2YTCaUSiWBQICGhoY5qXZTKBRoNBrkcjl9fX3U1tZSVVWF0+m8Y7ZNoVBIdeLLli3DbrcDzMkXNTo6GrvdTkxMjKQI7XK5GBgYwO12h1wTp7m5mdbWVoaGhqTGhISEBJ555pnbEtxsNrN27VoWL15MfHz8DJ97YmKC1tZWrly5wvXr16Ws8sMiJASXyWTEx8fz3Fol5QgAACAASURBVHPPERkZiUKhYGJiYtZ0eO6FQCAguR49PT3U19fjcrlum1EVs65Go5FFixbxp3/6p+Tn52M2myVtx1C7WHa7nbS0NCl6AtM+uF6vx+fzScmeUOHq1askJiayatUqTCYTBoOB1NRU/uIv/gKXyzVj0dBqtRgMhhnVj6L9ExMTdHR0cOTIES5cuEBvb+/8kvK+E8SxEDk5OWg0Grq7u7ly5QonT56ck/EK4kgCt9vNihUrSE1NRafTsXfvXhobG6V6Eo1Gg9FoJDk5mRdeeIFt27aRlJSEQqFgfHyc9vZ2qqur6enpCfnfAEiqcHa7ndjYWDZv3szhw4c5cuQI3/ve90Jmx9DQEGfPnkUQBL71rW+h1+tRqVQkJyfflqAymWwGuf1+Px0dHZSVlfHKK6/Q2NiI2+0OSo4hJAQPCwvDZDJhNptRKBT09PTw3nvvPfL287AYGhri6tWrvPXWW6xatYr4+Hi2b9+O3W6ns7NTOjBGR0eTmJiI2WwmKytLqmHv6+ujrq6OX//61wwMDIQ82eN2uyWZbrGRV6lUEhYWRiAQCHlER4yIVVRU0NraSmxsLEaj8ZYm45sxOjrKwMAAx48f5/Lly9TV1UkFa8HqDwgJwTUaDWFhYej1eqampujq6uLKlStzVmQ1NjZGW1sbp06dwm63k5yczNKlS0lLS2N4eJj6+noA4uLiSEtLk84MYiKlurqasrIyDhw4wOjoaMibNVwuF4ODg3R2dko7iiisNVeDfkZHR2lqauLSpUskJSVJh0ydTodarUalUjE8PDzjM+/s7KShoYG33nqLCxcuPFI48E4IeZiwqqqK8+fPc/LkyTltdggEArS0tNDc3ExCQgJGo1FqkhUPkDdicHCQ9vZ2Dhw4wN69e2lpaZkzJeGamhqcTieTk5P827/9GwqFgubmZr7//e9z9uxZ6QsaakxOTvLpT3+aiIgIYmJi+NCHPsSaNWvIyMhg8eLF/PjHP2ZgYEDatS9dukRtbe2sRn5CovAgNjgsXryY0dFR+vr66OzsDPoK/iDKCTKZDJPJhM1mIyoqCovFwoc+9CGJ3FlZWcD0alleXs7p06eprq6mv7+f/v7+R9KBD4bCg1i0lp6ejkwmY3Jyks7OTpxOZzBr0h9Ko0epVKJWq7FarZhMJnQ6HQaDgdbW1hmLgqgCF4wy4zspPCxImDCd9g4PD6e0tJTY2FhkMhlZWVlS+ru8vJzy8nJJz36u7JwDPPYiVAsEnwM8LnbyPiD4wgD8BbyvsUDwBbyvsUDwBbyvsSAEG3o8LnbC42Nr0p1+8ECHzAUs4HHDA63gj8tJesHOoGFB6XgB72vMV5fkvrFA8AW8r7FA8AW8rzFvNHrmG1QqFWq1Wqqj0ev16HQ69Ho9g4ODUj9pZ2cnXq9XqkuRy+XI5fJ5OzU31AgLC0Oj0aDT6VCpVJIQ7I3tix6PR1LMeJQO+tthgeB3QEREBDabjeTkZEpLS8nMzCQ5OZm0tDQOHTpEY2Mj3d3d/PjHP2ZwcFAitPhBhqoV735a5uYyUpaQkEBcXBwJCQlERkZitVqJjY2luLiYuLg49Ho9vb29/O///i8nT57kN7/5TVCvv0DwmxATE8O6det47rnnSEhIICoqirCwMKmmGWDNmjUUFRXh8/nw+XycPn2asrIyAPbs2UN2djYvv/zyrNopSvN98pOfJDExkcjIyFte09PTw4ULF3jllVcYHh6esdPMNnQ6HcXFxfzVX/0Vubm5Ug+sqO6h0+mk5nObzcYf/MEfsGLFCkwmE/v37w/aZK6QE9xgMGCz2UhISCA6Ohq9Xk8gEOC9996jvb19TsYPi9DpdNjtdkpKSiQFuLCwMKn30uv14vV6pS0XplV5YboNrqGhAavVKvUbzhaZRMGr3bt3U1hYSFRUFAaDAZ/PJ7WDKRQKoqKi0Gq1OJ1Ourq6uH79OlevXg1Jv6ZGoyE3N5fU1FRp0KrX62ViYoLR0VFGR0cZHx/H7XajVCqJj48nMTGRFStWcPToUcbHx4Ny/0JGcLGtym63U1hYyPr168nOzsZut+Pz+fiXf/kX3n333TkluMViIT09na1bt0qCpWLH+uTkpDTDxWKxoNFo0Gg0bN68GYVCQW9vL83NzWi12llTOhMFmYqKiti8eTPPP/888LshOS6XSxptIYrTFhYWUlhYSFNTE/v27aOnp4eOjo5Z70JSq9VkZmai0+mkxmxxpqM4Tbinpwen04lOp2PLli0YDAZWrFhBeHi4pIL9qAgJwRUKBbGxsaxZs4Z/+qd/Ijo6Go/Hw3//938TExNDTEwMzz33HJcvX57TuR67du1iy5Yt2O12/H4/DQ0NVFRU8OMf/5iWlhbpALR27VqKior48Ic/TEZGhjSGAh5t1O+9YDQa2bNnD3/+538uNWQEAgEuXbrE/v37+d73vieNbHj++efZvn078fHxyOVyUlJSeP755ykqKuLZZ5+VXJbZwuDgIH//93+PIAhkZWUxPDzMN77xDVpbWyXlCUEQpFX6D//wD9m0aRM7d+7kmWee4eTJk5w7d+6R7Zh1gisUClatWkVhYSE7d+5EoVBw6dIlLly4wNtvv43ZbCYqKoro6GiUSiUpKSn4fD66u7tDHonIysoiKysLmUxGe3s7J0+e5I033qCmpobR0VGpmbeyslKaevUXf/EXM35HZ2cnOp0u6O6JTCZDq9WSlZWFyWSSzgOnT5/m2LFjvPPOOzidTsbHx5mYmOBnP/sZfX19LFu2jHXr1mEwGDCZTCxatAidTsfo6OisEjwQCDAyMsKFCxfo7Oykv7+f5ubmW+akiBgfH2dychKlUkleXh4tLS3zn+AqlYqYmBjWrl3LmjVrWLNmDRUVFZw9e5a3336b8vJy9Ho94eHh2Gw24uLiiI2NlSb6h5rgN/ZjdnV1cfXqVU6cOHHL67q7uyVX6tOf/jRer1f60IaGhm5R9Q0G5HI5YWFhLF68GL1ejyAIeDwezp8/z5kzZ6Tpq1NTU0xNTXHmzBncbjejo6Pk5eVJ4TpRKlscTzyb8Pl8VFVVSbvfncgtvlZsYVy0aJE0XvtRMWsEVyqVxMbG8tWvfpXNmzdjNBqpr6/nn//5n6mqqpL8MJfLxfj4OGNjY3zpS18iNzeXtrY2KisrQ+6P6/V6iTxtbW13dTUmJydpbW1lYGCArq4uGhsbJb92Nsij1+uJj49n586dyGQyxsfHaW1t5e23375jk3FFRQUKhYI9e/ZgtVpvkToJBR5mrrfNZguaJPmsfI2VSiUrV67kE5/4BJs3b8br9XLy5Em+9KUv3XZApE6nY+3atSQlJREdHU1cXBxhYWEh/0Cqq6uprq5GEIQZOo83Iz4+nrVr1/Lyyy8TGxvL0qVLeeaZZyR7Z+MAl5KSwvLly5HJZAQCAVpbW/nhD39IS0vLXRUUmpub+dd//Vfa2tqYnJxEoVCwadOmWyZLzRXEw2hxcbF0rhgYGLhvVYh7IegMEuOaBQUFrFmzBrPZzPHjx6VY8cjIyC0EEKMrRqMRlUo1JysNTPvWUVFRJCYmEhMTw6JFi0hLS6Ojo0PqDBfHpmVlZbF27VrCw8ORyWQsX74ci8WCVqsNuoKcQqFg0aJF5ObmAtO7R19fH2fPnr2nVInoBzc2NqLX67HZbKxZs4aOjg46OjpCJlR1M8RxeFFRUZJSdGxsLD6fj9ra2qBNCws6kxQKBUVFRWzfvp2SkhK6u7t59dVXOX369B11G+VyOQaDAYVCQSAQYHx8nKmpqZD74AcOHGB4eJj8/HxSU1NZvnw5zz//PD/4wQ9ITExk6dKl/Mmf/Anx8fEYDAYpcqLX6yX5jejoaOkAGCzodDpWrFhBaWkpgCSpV1FRcc/3ejwe+vr6OHnyJBqNBrvdznPPPUdLSwt1dXU0NzcH1db7gXhgXrJkCQUFBbz00kvEx8ej1WoZGxtj//799/W33Q+CSnC1Wk1UVBSf+9znSElJobOzky9/+ctcunTpjpLSBoOBhIQEtm7dSlRUVDDNeWA4HA6uXLnCd77zHf7xH/+R7OxsFi1axMc//nFUKhUajYbw8PAZI5PFcWM1NTU0NTWxevVqsrKy+MlPfhKUSIoY146MjCQ2NvahfocgCLS3t884UyxfvpyBgQH+67/+65FtvBe0Wi02m42srCxsNhsxMTGsWrWK5ORkbDabFBXq7OzknXfe4fz58/NzBRfn4yUnJ6NUKuns7KSiokLSnbwRCoWC9PR0lixZQk5ODmlpaeh0ujmbFgXTej0xMTEkJSVJhNZoNJjN5hlSIUNDQ/T09FBZWUl9fT1dXV10dXUxPDyMQqEIeqJHTJKJrpvb7X7gbKTX650xaEl0uWYDolTkjh070Ol06HQ6TCYTiYmJREREEBERQXp6OhaLRVIzLisr48qVKxw8eBCHwxG03XtWCG6xWHA6nfT19VFfX48gCNKHo1QqUSgUhIWFsX79erZu3UpBQQE2mw2VSiWpmYUaSqWSRYsWUVBQwObNm2eohokQBIHJyUkaGxs5f/48P/jBD+js7MTtdkvkmY0vqEqlmhGZcTqdQTuEzQZEV+jLX/4yUVFRUh3K3Q61Bw4c4PDhw1y8eDGoOYRZO81FRESQmprKhz70IUktQSaTsWHDBnJycqQZ2y6Xi/7+ft58800+/OEPExYWxsDAQEgHc+r1ejZs2MDzzz9PXl4eqampt4T65HI5AwMDfP3rX2f//v2SNPVsfxlVKhVPPvkkKSkp0nMnT57k5MmTs3rdR4FarSYiIgK73X7fIVMxqxnsBFlQCe52u+nv7+fgwYPk5eURFxfHZz7zGXw+n0QEcdAlwLvvvktlZSXNzc04nU42bNgATEsLBqMO4X6g0WiIiYnh937v98jPz8dms0n13IODg/T19ZGeno5Wq8Xv90tx+1DJdotRKXErB6Tir/kKj8eD0+nk0qVLGI1G3G43ra2tjI2NSQudwWAgPT0du91OREQETzzxBDCd0WxqagraThhUgns8HhwOB4cPH0aj0VBYWMjKlSuB32nSj4+PMzIyIok5nT9/npaWFqxWKw6Hg7CwsKBVkt0LYjIqNzeXkpISzGYzMB2HdTgctLa20tTUhMlkIioqSjr9321VEoueggWRDKKa2uMAj8fD8PAwp0+fxmKxSPKG4vhkmUyGxWKhqKiInJwcsrOzyc7OBqbj9mITSTAQdBfF4/Hw6quvUlFRQX5+PiUlJchkMqampnA4HPz2t7+loaGBtra2GSQWZ0VbLBa2bNnCd7/73VmZFy1CXBn/+q//mo997GMYjUbJv/6///s/3nzzTfr7+6Uy2c2bN5OcnMzGjRs5deoUnZ2dt/0SXrx4MSjSG48zvF4v7e3tfOELX7hr2fDPf/5zkpKS+MpXvsLq1atZsWIFgUCAs2fPzv9y2fr6eknJAaaze+K2PzExMcN4cWU0GAyEhYWFRIZapVLx0Y9+VJJVcTgc/OpXv6KsrIxjx47R19cnuUnf//73mZqaYtu2baxcuZL169cD3FZ9V4yszAbE+hOHw/HIHUNOpzMkg/LvRtLJyUna2tr4yle+wt/8zd9QUFDAypUrWbNmDRcvXqSxsfGRrz9rBHe5XLhcLrq7u+/5WplMhk6nkzKZNysBzAYUCgUrVqwgLi4Or9fLe++9x4kTJ7h8+fItmpf19fVcv36dzMxMcnJyWLZsGf39/bcl+Gy3qgmCMKMw6V6Qy+VoNBoSEhJmiLMODg5K9UDBgCjWJcqf3w/EpJ64KCQnJ5OamkphYSEOhyMoBJ8XXfUKhQKTyURkZCRyuZyqqqpZP2QqlUqKi4uJjo7G4XDw9a9/nXfffZfa2trbvr65uZnLly8jCAIbN25k48aNs2rfzRAFp8R2r/utWBSVhHfu3El+fr70vFjrHiyIIWKxkfhB0djYKMl/79y5k+XLlwfFrnlBcK1WS1FREREREXi9XhwOR8iiBOKq43Q673rN6upqjh49Snt7OyqVCrPZjF6vn/WCJbHxQtwZlEolu3btYufOnff1/szMTF555RUKCwulRpNvf/vbM+TJHxURERE89dRT/OhHP+ITn/gE6enp9/U+uVyO2WzmueeeY/fu3VJAQtT1CQbmRdOxQqHAbDZLyaBQ1KAIgsDo6KjUfpaXlyfJktzug3e73bhcLqampoiIiMBkMpGcnExDQ8OM3cZsNqPVaoOWag4EAtTV1c1wfSIjI+9LCz4tLY1ly5aRl5cn9Ww6HA5OnjxJR0dH0BaRyMhIUlJSKCgoQC6XS4m8pqamW3zwiIgIrFYrarWamJgY7HY769atIysrSyrVqKioCIp7AvOE4DKZjLCwsKBX4d0NgUCA3t5eqWF369atUk9jT0/PLV+ysLAwqVNHlCVPS0ujtbV1BsETEhKwWq1BJXh1dTUDAwMEAgHkcrk0n0Wr1Up9ojcTSaFQsHr1akpKSkhOTkYulzM8PExTUxPnzp3D4XAELdJjtVqJi4sjKSkJu92OwWAgOjqat99++5bXLlq0iJycHCIiIsjPz2fx4sXExcVJZcBut5sDBw5w+fLloNg2Lwg+F/D5fBw7dgyTycSKFSv46Ec/yo4dOxgeHqa1tZXz58/PqPdYu3Yt+fn5REVFMT4+TltbG4cPH76lJmT79u0UFRVx7NixoBBIEASmpqbo7u6mtbWVlJQUoqOjWb16NV/96lf52c9+RkdHBw6HQ3qPOLLhD//wDyksLESpVNLa2sqvfvUrvve970lflmBhaGiItrY2rl+/zpIlSygpKWH9+vX89V//9W1fLxariWMkYDq83Nrayq9//WuOHDkSNAXseUVwMV5+uxU02PD7/Zw6dYr8/Hyys7MJCwsjPDxc0vS0WCwzkg3x8fFYLBbkcjljY2OMjIzgdrtvIbHYdBzMOLjH45G+TJ/97GelHtatW7diMBioqamhqqoKgOTkZJKSksjKyiItLU3yZcvKyrh8+TI9PT1BLy/o7+/nwoULaLVaPvKRj0gRG7Gc+GYEAgGGh4cl4doLFy5QV1dHQ0MDZ8+eDeoZbF4RHKZ7Cnt7e2f9kOn3+6murqampoYlS5Zgt9ulqVQ2mw2bzTbj9aIGvLjNt7e335YoXV1dQe/J9Pl8XL58GafTydatW1Gr1ZjNZpYuXYrZbCYlJUUqpc3LyyMjI4OYmBhUKhVjY2MMDAxw/vx5amtrZ6XBYXR0lOvXrzM5OUlMTAx5eXkkJycTEREhHcTdbjfj4+MEAgHp4GyxWAgEAhw5coSKigpaWlqC5nuLmBcqa2azmT/4gz/gj//4j+nv7+eP/uiPHqoe4WHmbpvNZuLj43nmmWdYvXo1KSkpJCYm3vK6pqYm6uvrOXbsGEePHqWtre2ONe6zYSdM185v27aNT37yk+Tm5pKUdEdhA2D6y3b69Gn+9m//lu7u7oeZbPVQKmtpaWlkZmaya9cuSktLUavVVFRUcOjQIcbGxvB4PBw8eFDqgBLbBB8Fd5oPPq9W8LnoEXS5XLS0tPDTn/6UX//611L98s2YmJhgYmICh8OBw+EIuR48TGf+Tp06RWNjI1FRUVitVjZs2EBCQgIxMTFcv35dSp6cOXOGvr4++vr66O3tDenYtq6uLpxOJ42Njbz22mvA9H0eGhrC7/cTCAQYHR1lcnJSCtPOFuYVwf1+vzTvL1QQrzcXrVsPCr/fT39/P4ODg6hUKoxGIx6Ph4SEBGw2G7W1tQQCAcbGxjh37hwjIyNzUl8/OTkpjf64G0LxOc8bggcCASYnJxkbG2NycnJOmh4eFwQCAWn+yZtvvjnX5sxrzAsfXJTSNhqN+Hw+qYrvQfG4aN88LnbyPlA6nhcreCAQwOl0hmym9gI+OJgXtSgLWMBsYUEINvR4XOyEx8fWBSHYBXwwsSAEOwd4XOxkQQh2Ae9zzFeX5L6xQPAFvK+xQPAFvK8xJ3Fws9lMXFwccXFxdHR00NPT89CFSwt4PKHVajEajRiNRiwWC0aj8ZY2Nb/fz5UrV6ROqofCjSOz7vUAhGA81q1bJ7z66quC2+0W/v3f/10oKioKyu8Nlp0ymeyWR7D+9mDaGYJHxWzYKpfLhaSkJOHJJ58U/uZv/kY4fvy44HA4BEEQBL/fLz1cLpewfft2IS4u7r7v6c2POctkigaIvXwXLlyYK1OkntCcnBzsdjtxcXEkJiaSnJxMYmIie/fupbq6mra2Ntrb2yVNHrEaLiUlhfz8fDQaDRcvXqShoWFej1abS8TFxZGdnc1nP/tZUlNTsdls0pSAm+uPNBoN3/zmN/nWt77Fvn37GBgYeODrzXmq3mw2S5qUc1FgJYrSbtmyhaSkJCwWC2azGavVitVqJSoqik2bNrF48WKpkm90dJSuri7KysqIiYmhoKCADRs2oFarpfmMN7aQLWC63igrK4tly5axdu1a8vLyJKFd+F2p9I15GblcTmJiItnZ2dTV1d1WEOxemBcEj4uLQ6lUhrRmGaZvanp6OiUlJfzjP/4jwC3XFwSBdevWzfhZf38/FRUVjI2NSTJ927dvRxAE6urquHr1alAJLo6cfph6eVE6G6Zb3+aijl3sst+0aROlpaVs27YNuPVeBwIBqYFboVCgUqnQarVkZ2fT09PD6dOnH3hnnHOCZ2ZmSkKqoZyaKpPJiI6O5iMf+Qgf+9jHbvsasfcyPj5+BrmsViulpaVs3LgRhUKBQqGQZqu4XK6gkkipVPL000+Tmpr6wNJ6KpUKu90uyY2/+uqrd2wEnk0kJCSwZ88ePve5z931b2hra+ONN94AkOa0A2RnZyOTyfjVr371wHLvc05wcY5GqLt55HI5ixcvJj4+nvDwcGC6La2lpUU6D4jyhjabDbVajcVi4amnnpJkTMTucJ/Px8jICD/96U+5cOHCHbWIHgaiEkZxcTGLFi0Cbr+di7jxZ6LQk8FgwOFwhFQ9Q6lUYrPZWLp0KXl5eTz99NOSRqco693Z2Ul3dze1tbV0dnbS29tLQ0MDkZGRmEwmieBGoxGr1UpkZOQDj+OYc4LPFRQKBUuWLJFEo0Qdm4sXL3LgwAFgJsHFQT+lpaXo9foZSnAej4eRkRH279/P9evXg97Yq1QqUavVko6nqJZwO4IrFAqUSqUkTyIO+6mpqbmvOZHBgkqlIjU1lZKSEgoLCykqKgKmJ8+63W5qa2upqqqitraWs2fPUl9fL6lWJCUlSfPCYTqkKPYLPKgC3weW4CqVio985COkpaUhCII0pN3lcrF27VoA+vr66OzsJD8/n/z8fJYsWUJ8fPwtv2tiYoLu7m7Ky8uDLl47NTXFN7/5TWkH2bFjh7SD3A4Gg4GYmBjJ7erq6uKtt97i29/+dkimyYoIDw/nmWeeYdeuXZJ6NEyrRF++fJl/+Zd/oaWlZVZHZMMHmOB+v5+amhpiYmKkUNXWrVtntMt5PB6mpqYIDw/HYDBIIxB8Ph/d3d2cOnWKHTt2EBYWJskO1tbW3rMX8UExOTnJ1NQUbrebt956C4VCccch/KKe5sc+9jGmpqZob2/nzTffZHBwMCSqGUqlkvz8fFauXMmuXbuwWq3Sz1wuF8ePH+c//uM/aG1tfWAhrYeyZ9avME/h9/upqKggPj5ekuiLj4+/7bYvl8vxeDx4PB76+/vp7OyktraWQ4cOkZaWRkpKCkajkfT0dHp7e4NO8EAgIM1Xv9fIY3E4EUwTqre3l/r6emnu4mwjLCyMnJwcNmzYIK3cYgNyQ0MDFy5coLKyctbtEPGBJbjX6+W1114jEAggCAK7d+++IwEEQWBkZIShoSFqamr4xS9+wZUrV6irqyM+Pp4tW7awevVqli9fTl1dXdCH1zwIUlNTpTHJXV1dtLS0BP0LdyfI5XKsVislJSU8/fTT0vODg4McOHCAvXv3hnx6wQeW4CJOnTpFU1OTdLC8E3p6eujv76e3t5exsbFbaiPEjGZERMRsmntPrFixgieffBKZTEZ9fb00czsUUCgULF++HJvNJkVzhoaGqKqq4n//93+prq5+4DOKODdFJpM9VKTtA0/woaEh3G73PUN7IyMjjI2NzZBXkclkmEwmKRs3Pj4+p0K2gKSUIcqdhEqtThTALSgomOF3t7e3U19fT0tLCxMTE/eV58jNzZ0xXczpdEpiuw9adPWBJ7g4seph6hwUCgUxMTGYTCb8fj8dHR0h0Re6mz1i+FB0txQKhaQMJ050hd8pHwfrC6nRaLBYLKxevZrY2FhJaqWmpoarV6/e17RYUW6lpKREUl0LBAJ0dXVRU1NDY2PjAyfR5ozg4pZz4wjdxwnh4eGkp6ezfv16rFYrfX19vP766zQ1Nc2JPaLm0I1DQw0GA8uXL6ewsBC73Y7NZpPId+jQIU6cOMHevXuDcv2oqChyc3PJzc2VpCBPnTrFf//3f9/3rO+MjAxeeuklNm/ePCPjee7cOX75y18+VAh2TqsJxf+GhYWxY8cOjh8/HrS50LMJhUJBSkoKf/Znf0ZUVBR+v1/aRmdjeisg1WXYbDa0Wi0Wi2WGVIhSqWT16tUsXbpUei4vL4+0tDQpoylOx62pqaGsrIyrV68GzT6r1Up+fj4qlQqZTIbf76evrw+n03nf4cDw8HCys7OJiopCp9Ph9/tpa2ujtrb2oReOOXVRxK1Uo9GwYsUKKioq5j3BxRqWJUuWsGvXLgwGA319fQwMDOB0OmfF59VoNNKwzaVLl0rJHDE7CL9bwW9URI6Li2NyclLSH+rr66OhoYHTp09TXl4e1N0mMjKSrKwsyRXyer00NTVJ6sb3gsViIT4+nqSkJPR6PSqVCq/XS21tLS0tLQ+dpJoXPrhMJkOlUoVUwuRhIJPJ0Ov1vPjii2zcuBGTyQRMi9ieOXNmVgrFFAoFS5cu5emnn2bDhg2sXLnyrtGEG39WV1fHuXPn+O53vwuAw+GYNZFaIRIz5gAABoBJREFUm83G8uXLpQyrqFx3vyXQf/qnf0ppaemMw6XX6+WNN97g2rVrD50hnhcEf1wQFhbGxo0bWbNmDVlZWcB0lOC9997j3XffnZVpqQqFgoKCAgoLC1m8eDG9vb3SoNKBgQFqampwu92o1WqefvppTCaTpJog+r9icigU5chiSE/8992gVCqJjIzk4x//ODt37iQjI0P6WV1dHWfOnOHEiROPFMdfIPh9QC6XExcXR3JyMuvXryc5ORmDwcDY2BhVVVVUV1fT3Nw8aw0bWq0Wr9fLwMAA5eXleL1eJiYmGBoaorq6mqmpKbRaLSUlJej1emC6Zv3atWt31P0MNm6M3NxOFOtmhIeHY7fbycjIYMuWLaSmpmIymaTwZnt7O2fPnpVmmz8sPvAEv5/kgVarZceOHXzoQx9i3bp16HQ6JicnaW1t5Y033uDixYtBLZG9EYFAgPb2dk6fPs25c+f4z//8z9uOl9br9Xzxi18kMjJSqo6cr1AqlaSnp7Nnzx42bdpEQUHBjM9BPAgfO3bskXfFDyzBlUol27dvZ8mSJaSkpJCbm3vLawYHB+nv7yc7O5vY2FgsFgs6nQ6ZTEZNTQ2f//znqaurm7XICUyXux4+fJgTJ04gk8mYmJi4J3lHR0fZt28f/f39s2bXvaDX69m6dSvl5eUzKgYLCwvZsWMHiYmJFBYW3lY09siRI5w/f56enp5H/qJ+IAkuRiGeeuopkpKSsNlskt7NjTfU5XLhdDpJTExEq9WiVCrx+/1cvnyZM2fOUF9fz8jIyKz3kt7rCyQmSJRKpdTb6nA4QqqUMT4+Tm9vL0lJSZKW5xNPPIHf72diYoLExERMJhOLFy9m2bJlREREEB8fLwl2yWQynE4n1dXVHDlyhJqamqDYP6eJnrlCZGQk+fn5PPvss4SFhUlJp5t9R6PRKLWrCYJAIBDA6/Vy8OBBjh07Nm8ai9VqNQaDAa1WK0WifD5fSN2U4eFh6urqKCwslNL2GzduRKvVolar2bp1K4sWLUKtVt9yr8Vqyc7OTt588032798ftFnxISe4uNqIxJoL2O121q5dK4W0bjwc3YwbfzY5OUlVVRXl5eVcv349dAbfAxaLhaysLOx2O3q9fk6EBGpra3n99dfZvHkzkZGR6HQ6ioqKJP9arGG/+V57PB6am5s5ceIEZWVlvPnm/2vvbFZa18Iw/JSQ1kJAKpEiFhVBvAARZ+LEK3DmpXgNzvQadCB74sBBQIUMagQJSAz4VxIUrAUtjTWJlUbPYNPg4VQ5nONO27ieSQdd0I/ydtH15V3v9+tbfeKJPyNXFIWpqSnm5+eRJIm3tzeCIMCyrER8HNlsltHRUWZmZrp+4V3CjuLXbDbLxMQEq6urrKysxP/He81H20PHA5L08CnP87i4uGB7ezt2MEqSRDabRZblv9kxOtOba7UalmWxubnJzs4OR0dHXYfr/h8S38FzuRyKolAoFAjDkGazieu62LadiMDz+TyqqjI5OfmlOMMwjN14mUyGfD5PLpejWCyyvLyMJElsbW3RarX6qmPRbrcJgoDn5+dEw4fCMKRaraJpGqVSiVKpxMjISNe17+/vVKtVHMfBtm12d3d5eHj47/FsX5C4wIMg4Pj4mI2NDRYXF9E0DV3XE0u2GhsbY3p6mtnZ2S/XGYbB+fk5l5eXZDIZlpaWmJubY3x8nOHhYQqFQnyrvp+o1+u4rotlWYn/8KIowjRNZFnGcRzW1ta6rmu1Wqyvr3N4ePjH+/SJC/zl5QXbtrm7u2Nvb4/Hx8c/1kPuRrPZpFKpUC6XWVhYiDNNXl9f2d/fx7IsTNPk9vYWz/PiDka5XEZVVRRFoVgsxjtOP+3e8PuitOM4PavL931OTk6oVCrout51TWeUdxKH9MQFHkURnufheR6u6yb98fi+z9XVFZqm0Wg04nNAu93m4OCAs7MzTNPE932iKIqFUqvVkGU5zvvopDD1g8CHhoZQVRX4/QTz5uamZ7V0Yirq9XrPrMMf+XF98Eajga7rn+4un/Hxdky/RT13rKqSJOE4zr/2X/8EBu+mgeAfdDpR19fXGIZBuVzudUl9w4/bwdPI/f09hmHw9PTE6elpInkjg0JfjPL+LgZletmg1MkPHOU9KMNARZ3fw6DUOvnZG2IQrCDViEOmINUIgQtSjRC4INUIgQtSjRC4INUIgQtSjRC4INUIgQtSjRC4INX8BZY1nN9ndRVEAAAAAElFTkSuQmCC\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "rVeSdnNJr0cV", - "colab_type": "text" - }, - "source": [ - "Now we can create our GAN. It consists of two parts:\n", - "\n", - "1. The generator takes random noise as its input and produces output that will hopefully resemble the training data.\n", - "2. The discriminator takes a set of samples as input (possibly training data, possibly created by the generator), and tries to determine which are which. Its output is interpreted as a measure of how likely it is that each sample is from the training set." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "8zLMNX5Xr0cW", - "colab_type": "code", - "outputId": "48aaa8ce-f06c-430d-cdc6-33608f4d2bd2", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 122 - } - }, - "source": [ - "class DigitGAN(dc.models.WGAN):\n", - "\n", - " def get_noise_input_shape(self):\n", - " return (10,)\n", - "\n", - " def get_data_input_shapes(self):\n", - " return [(28, 28, 1)]\n", - "\n", - " def create_generator(self):\n", - " return tf.keras.Sequential([\n", - " Dense(7*7*8, activation=tf.nn.relu),\n", - " Reshape((7, 7, 8)),\n", - " Conv2DTranspose(filters=16, kernel_size=5, strides=2, activation=tf.nn.relu, padding='same'),\n", - " Conv2DTranspose(filters=1, kernel_size=5, strides=2, activation=tf.sigmoid, padding='same')\n", - " ])\n", - "\n", - " def create_discriminator(self):\n", - " return tf.keras.Sequential([\n", - " Conv2D(filters=32, kernel_size=5, strides=2, activation=tf.nn.leaky_relu, padding='same'),\n", - " Conv2D(filters=64, kernel_size=5, strides=2, activation=tf.nn.leaky_relu, padding='same'),\n", - " Dense(1, activation=tf.math.softplus)\n", - " ])\n", - "\n", - "gan = DigitGAN(learning_rate=ExponentialDecay(0.001, 0.9, 5000))" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "69GHTt_2r0cb", - "colab_type": "text" - }, - "source": [ - "Now to train it. The generator and discriminator are both trained together. The generator tries to get better at fooling the discriminator, while the discriminator tries to get better at distinguishing real data from generated data (which in turn gives the generator a better training signal to learn from)." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "lP7x5ZT1r0cc", - "colab_type": "code", - "outputId": "0fa74aa3-9874-4968-fcc6-64b73b04a755", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 513 - } - }, - "source": [ - "def iterbatches(epochs):\n", - " for i in range(epochs):\n", - " for batch in dataset.iterbatches(batch_size=gan.batch_size):\n", - " yield {gan.data_inputs[0]: batch[0]}\n", - "\n", - "gan.fit_gan(iterbatches(100), generator_steps=0.2, checkpoint_interval=5000)" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:191: The name tf.train.exponential_decay is deprecated. Please use tf.compat.v1.train.exponential_decay instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/gan.py:314: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/gan.py:315: The name tf.assign is deprecated. Please use tf.compat.v1.assign instead.\n", - "\n", - "WARNING:tensorflow:Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n", - "WARNING: Entity > could not be transformed and will be executed as-is. Please report this to the AutoGraph team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output. Cause: Bad argument number for Name: 3, expecting 4\n", - "Ending global_step 4999: generator average loss 0.561798, discriminator average loss 0.555924\n", - "Ending global_step 9999: generator average loss 0.568906, discriminator average loss 0.56287\n", - "Ending global_step 14999: generator average loss 0.620129, discriminator average loss 0.613639\n", - "Ending global_step 19999: generator average loss 0.57319, discriminator average loss 0.567482\n", - "Ending global_step 24999: generator average loss 0.632365, discriminator average loss 0.625501\n", - "Ending global_step 29999: generator average loss 0.629756, discriminator average loss 0.623243\n", - "Ending global_step 34999: generator average loss 0.59844, discriminator average loss 0.592471\n", - "Ending global_step 39999: generator average loss 0.5675, discriminator average loss 0.5617\n", - "Ending global_step 44999: generator average loss 0.574203, discriminator average loss 0.568346\n", - "Ending global_step 49999: generator average loss 0.562267, discriminator average loss 0.556616\n", - "Ending global_step 54999: generator average loss 0.551284, discriminator average loss 0.545583\n", - "TIMING: model fitting took 379.419 s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "UW60zOZGr0ci", - "colab_type": "text" - }, - "source": [ - "Let's generate some data and see how the results look." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "fSQtVhSer0ck", - "colab_type": "code", - "outputId": "01a29c56-4cc0-4694-faec-42b243cfda1e", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 197 - } - }, - "source": [ - "plot_digits(gan.predict_gan_generator(batch_size=16))" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAALgAAAC0CAYAAAAn8ea8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9eXRb5Zn+oytd6WpfbFm25d2xndiOdzt7QhISICmlKWEpZNg6UMoUGBh+3ZlO29OhQ1ump9MzTIGhaekEKBB2QkKMsxPHiXGczY6XeN8kWfu+3N8f6ffVTuyskqyAn3N0SmNLen316L3f937v+zwCnucxi1l8UcHMdACzmEUsMUvwWXyhMUvwWXyhMUvwWXyhMUvwWXyhIbqcXxYIBAldcuF5XgDMxhlFmHme1wOJHyu5pudiNoN/iSAQCCAUCi/nKb2xiiVemCX4lwgCgQAM8+X6yL9cf+2XFAKBAAKBABzHQa1WQyCY8m7+hcQswb/AYFkWEokELMtCJBJBIpFALpfPdFhxxWVtMmdxbYBkawCIRCIg7Rg8zyMSicxkaGAYBhKJBAsXLoRIJKJ3l0gkAqfTicHBQQwPDyMUCkXnDXmev+QHAD6RH7Nxnn2wLMvn5OTwCoWCFwqF9N/FYjGvUqn4v1VELuVxOFqxCgQCXiAQ8DKZjM/Ly+NNJhPv9/v5YDDI8zzP+3w+vrm5mf/2t7/NazSay4lx0jU99yG4nGareJaKWJaFVCqFVCrF2NgYLiXOa6X8Fss4yZIkGAwiGAxOytgsy0IsFiMQCCAcDl9KNj/C83zN1cSqVCqRl5eHdevWoaCgAGlpaTAajZg3bx6EQiHdD/A8j0AgAIfDAZPJhFdeeQW//OUvL/l9pisTJtwSRa1Wo6CgAFVVVZDL5QiFQvif//kfBIPBuLw/wzBgGAYikQjhcBihUOiSvlyJAFIhCYVCCAQC58XNcRy0Wi1MJlNclipk+eF2u3H69Gk4HA6oVCpoNBo0NDRQcguFQpSWliIzMxO5ublQq9VISUmJSgwJRXCGYWAwGHDzzTfjW9/6FuRyOcbGxvDHP/4xZgQnpTOS3UQiEcRiMTiOQzAYhNvtPo8MPM8jFArB5/MlFPnJl3LiunsiVCoVsrKyYLPZ4pIwhEIhIpEIRkdH8d577yESiUwZG8uy+OY3v4nrr78eOTk5YFn2cuv10yKhCJ6Wlobi4mIsW7YMcrkcUqkUGo0mZmUtsViMlJQUZGdnY+PGjVi1ahWMRiPEYvGkWyf5QMhmyO12Y8+ePXj00UcxPj4+4xs3gkAgMO3PGIbB/Pnzcdttt+GZZ57ByMgIXC5XTOMJhUKX9B7BYBBbtmyBxWLBhg0baOaPBhKC4EKhEElJSdBoNJDL5RCJROB5HuFwGOFwOCbvJ5PJcMstt6CwsBA5OTmorKxEZmYmFArFBbMHz/OQSqVYsGABnn32WTidTuzZswdbt26dkWwuFosRiUQuWnWoqKhAdnY2IpEIbDYb/H5/zGKaWBm5VASDwUl/Q7SuZUIQnGEYyGQyeksLhUJ0/RsMBqNKHIFAALVajaysLHzlK1/BvHnzkJGRAa1We8GsQW6vAoEAIpEIOTk5uP/+++H3+6FUKnHs2DF0dnbGLZtLpVKkpaXB7XbD7XZfMFMyDIOqqiro9XoMDAzA6XTGdIlyJRnYYDAgJSUFAoEAfr//i1kmZFmWr6mp4V977TV+cHCQN5vNfEdHBy+Xy6NWfhOLxfxtt93G7927lw8Gg3wkEuEnIhKJnPdvPM/zZrOZ7+np4YeHh3m73c77fD76s3A4zJvNZl6hUMStTLhixQq+r6+Pv/POO/k5c+ZM+3tCoZBXKpX8/v37+Z///Oe8SCS6nPeJWpnwYo+XXnqJ7+3t5SORCH/s2DH+u9/97mU9fzrOJkQGJyDfWrJMCQaDcDgcUX0PcocIBAK0lGa1WrF161Y0NTXRCkNaWhpdQ7rdblpaYxiG7vofe+wxZGVl0RPD2267DXv37kVnZ2dUYz4XK1euxMqVKyGVStHR0YGRkZFpf3f+/Pn40Y9+hEAgALPZHJMl35WCZVmkpaXh+9//PpYtWwatVgu3241//dd/RXNzc1TeI6EIzvM8RCIRkpKSIBaL4fV6o16p4HkeLpcLAwMDMJlMMJvNOHPmDD755BM0NTVREpxL8HPh9XoxPj4Oo9EIlmXBsixqamrQ0dGBrq6umK7Hc3JykJaWhv7+flgslinjA4Dk5GTMmTMHS5YswRtvvIHOzs6EqPoIhULU1tZCo9EgMzMTa9asQWpqKtxuN1paWtDY2IihoaGovFdCERw4u7acM2cO5HI5JXg0EQ6H0dvbi507d0KlUuHTTz/F3r17cfTo0Ukf/uDg4AVfJxQKwe12IxwOg+d5us7dtWsXvfvECgaDAZFIBFu3boXdbp+WtJWVlaisrIRAIMDvfvc7dHd3xyymy4FUKsXvfvc75OfnQ6vVAjj7uRw+fBiPPfYYTCZT1N4r4QhOEA6H0dnZiQ8++CB6G46/oaenB6Ojo6ivr4fH44HX672szCaTyZCamoqysjIIhUK4XC5YLBY8+uij6O7ujnq8EyESibBlyxaIRCLY7XbY7fYpf49hGJSWloJhGDz22GMYHR1NiOw9d+5crFixAoWFhZDL5bRaNjQ0hK6uLvT09EQ1OcSV4AzDXLTKEAwGYbfbwXEcxsfHY1KZIOvv6chxITAMgwULFmDBggUQi8VwuVwYGhrCyZMn0dPTA6fTGXMiWSwWAIDf75/y2rAsi+zsbGi1Wvj9fjQ3N8e0LHipkEqlKCgowPXXXw+O48DzPN0HnThxAidOnLhgLf9KEJd2WbIxu5TTqWAwiJGREQQCAYyPj6O7uzsmpbcrIaFAIIBUKsW6deuwdu1ahEIhmEwmtLa24uOPP6ZLlliCnKKSevzEfg7g7LVWKBSoqakBx3Gw2Wzo6uqKW6vDhaDT6VBaWoobb7wRAoEAwWAQfr8fXq8XLS0tOH78eNQHMuKSwQsKCgAA7e3tF/1dk8mEHTt2ID09HVarFWfOnEmIk0KGYaDT6bBp0yaUlJSA4ziMjIzg448/RmtrK1pbWyESiSj5YpHFBQIBWJZFcnIyUlNTUVJSgv3798NiscDhcEAkEmHevHmorKzEd77zHfz4xz/GwYMHox7HlYBhGNx///1YunQpAoEAXRr6fD54vV6UlpbC7/fj2LFjMJlMUUsUMSW4Xq/Hk08+ie7ubnR0dFwSwTUaDRYuXEjXZ4mQeYCzmZNkGpZlkZmZifT0dAiFQqSlpYHneVRWVqK9vR1nzpzB8PBw1GPQaDS45557kJWVhaSkJBgMBlRWVsJkMsFisSA1NZXGpVQq4fP5pq2wxBNyuRwZGRnIz8+HXC7H6Ogozpw5g+7uboyNjUEkEiEjIwN6vR5r167Fhx9+CLvdHpW9TMwInpKSgrKyMtx///148cUXcebMmUt6nkqlQllZGaRSKQBE9ZZPTtiuJMPyPA+/34+TJ08iFAphaGgIZWVl0Ol0yM7Opidx4XAYY2NjUYuZQCgUQq1WY/369cjJyYFKpYJYLEZubi6sVitsNhsKCgqgVqshEonQ29sLr9c74wmCnFJnZGRAJBJhfHwcIyMjOHr0KI4fP46BgQEoFAqsWLECBoMBCxcupPsZs9l81e8fM4I//fTTeOihh+D3+7F79258+umnl/Q8juOQnp4Oj8dDj8ejATJJwrIsAoHAFZUfyZrbbrdjeHgYcrkcdXV1SElJgUwmg8vlwkcffYTe3ugPo8tkMmg0GkgkEvh8PoTDYQQCAfj9fsjlcpq9eZ6H2WzGm2++iZGRkRmtnAiFQsjlcsjlcojFYvzv//4vxsfHMTo6ivHxcVpiFQqFcDgcqKmpwZ133omnnnoKH330Ef785z9fdQxRJ7hMJsOWLVtQVVUFnufR399PyXoxPPzww1i7di0EAgEaGxvR0dERlZhYlkVxcTEefvhhhEIh7Nq1C2+99dYVv55KpUJBQQE2bNiAwsJCyGQy+P1+fP/730djY2NMyoQejwddXV34l3/5F9rtGA6HIRKJkJmZidLSUjzxxBP0C9zZ2Qmv1xv1OC4FLMsiJSUF3/rWtzA6OgqPx4NAIIDGxkaMj4/D5XJN6rMPh8OwWCwYHh7G2NgYvF4vPTW+2gQXVYILBAJIJBKsXLkSKpUKNpsN9fX1tKw1HcRiMfLy8rB8+XJUVlYiFArhwIEDOH36dFTiEolEMBgMWLlyJRwOxxUfeDAMA6VSiaKiIpSVlaGoqAgajQYA4HK50NjYGLN6czgchtPpxJEjRyb9O8uytIHqkUceAcMwCIfDcLlcM3YsL5PJkJaWhpUrV6KnpweDg4M4efIkHA4HXC7XeaVAnufhdDphtVrhdDrhcrng9/shEomuumwYVYKzLAu5XE5LPaOjo3j88ccv+oFrtVo8+uijWLp0KQwGA5xOJzZv3hy1kzexWAy1Wo3c3Fz09PTQgdzLARnkLSgowDe+8Q3U1tYiLS0NQqEQTqcTFosFJpMp5lnz3GsZCATQ29sLm80Gh8NBCT6T0hB6vR4FBQWoqKiA0WjEsWPH0NjYCJ/PN+2ewOVy0b9haGgIDocDMpnsqrtJo0rwDRs24Ac/+AFkMhna29svqUS1ZMkSLF++HN/4xjfAcRz27t2Lhx56CH19fdEMjao6ZWZmIikp6aK/KxAIIBaLIZVKwTAM8vPzUVJSgnvvvRdGoxEqlQqhUAiHDh3Ctm3b8Nprr8Hj8UQ15ksFqTadPHkSJSUlUCqVqKurw8mTJ6OyUbtcCIVCsCwLjuOQnJyMnJwc1NTUYO/evdM+Ry6XQ6fTQa/XIy8vDzk5OcjLy8OHH34Ip9MJn893RfumqBJcq9WioKAADMPAZDKhr69v2m8fy7K44447UFNTg9LSUmg0Grz33nvYvn171HsmJlZNxGIxysvLcc899+DNN9+kmxye5zF37lxotVo6viaRSCjBc3NzkZ+fj/z8fEgkEtjtdnz22Wf49NNPcfjwYfT29s7oho7nedjtdgSDQajValRUVECpVM5ILDabDaOjo4hEIhCLxdDpdCgpKYFGo4HL5ZpEVJZlkZqaiuzsbMyfP5/OZCYnJ0Ov1yMpKQkejwdjY2N47bXXLnvZFVWCi0Qievu3Wq0YHh6mZTkCiUQCkUgEtVqNn/zkJzAYDOA4Dg6HA3/4wx+wa9euaIYEAFQPhJB5yZIlyM/Px4EDB8DzPFiWBQDcfPPNtFarUCgoyQUCAfR6PTQaDUQiERwOB7q6uvCnP/0Jn3zyScxHvy4FZB0bCoUgk8lQW1sLvV5Pp+jjibGxMXR1dcHlckGhUECn06G8vBy5ubnw+/0YHx+nDWpyuRwVFRUoKytDaWkpSkpKIJFIEA6HMW/ePKxfvx7hcBinT5/Gm2++Oe286XSIKsH9fj/sdjvUajXy8/NRW1uLV155BX6/HzzPQyKR4Ic//CFqa2tRVlYGrVaL/v5+NDc34x//8R/h8XhiUoEIh8Nwu93o7e1FZmYmOI5DdnY2mpub6cUi2Ya0E0xsmo9EIgiHw/B6vThx4gSee+45HD58GCMjIzFtrLocRCIRdHd3o7y8HEKhEMnJyXjooYeQk5ODF198Ma6x8DyPsbExPPXUU3jyySdRUlKCrKwsvPnmmxgbG0N/fz8GBweRnJxMlzCkhEsqRDzPQyaTATjbmiwQCK5I4SCqBG9sbMS///u/08y8cOFCPPfcc/S2IhQKJ9WNn3/+ebS3t6OjoyPqgw0TEQgE0NbWhl//+td48sknYTQaIZPJoFAo6O/wPD9p0JjU4Mn43MGDB/H555+jsbERzc3NMJvNCUNu4GyN/rPPPkNdXR2qqqogFAoxd+7cmNTkLwVerxd79+7F3XffjTlz5oDjOCgUCohEIqhUKmRnZ4PjOEilUigUCjAMM+XGOBKJwO/3X/GJbFQJ3tbWBrPZjDvuuAOZmZnIycnBvHnzJv0Oz/OwWq04ffo0/vrXv+L06dMx3wiFQiEMDg7ijTfewIoVK2Cz2aDRaKBSqSCXy8FxHF2HE2K73W44nU7Y7XZ4PB7s3r0bDQ0NOHToUEL0xpyLSCSCEydO0D4O0kKQlpY2I/EEg0F0dnbi1KlT0Ol0UCgUSE5OphtPnU5HMzU58IlEIvB6vXC73bTLMBQK0a7SK0FUCR4IBDA0NISVK1fiiSeewO23347S0tJJv+Pz+bB582Y89dRTcd2UBYNBWCwW3HXXXbTkd/fdd2P9+vWora1FcnIybf5xOp04cOAAdu3ahY8++ghms/my137xBkkcDocDHo8HYrGYThrNJB599FEIhUKIxWL89Kc/xaJFi1BRUQGXy0UPcsxmM2QyGWw2Gz7//HPs2LGD1s/JiSdZJl4uYiLdRspxqampdBkw8dva398fk7lF/jIk0RiGQVpaGpKTk6FUKiGRSOhFDIfDsNlssFqtMJvNUd+kXU6clwOBQICCggKUl5fjqaeewubNm+mS6gpx1dJtBAzDIC8vDxqNBkqlchJZA4EAhEIhQqEQ7HY7rFYrTTaX2p3JTyPdllBT9Vf7mI3z7CM5OZn/2c9+xhcVFV3uFP25j7hN1Ufrmp77SFjxzStBrDJjtHGtxIkoZvBYY7oMPiuAP4svNGYJPosvNGYJPosvNC63TGhG4lrLZU/479k4o4NrJdbs6X4wu8mcAVwrcWLWCHYWX3Akasa+ZMwSfBZfaMSd4NeKCel0zT+zuLYQd21CqVSK3NxcFBcXQyQSwWKxwGKxYGhoCBaLJe69yxNBeiaUSiXS09Ph8/lgs9mo4mwi96LMYmrEXZtw/vz5KC8vR3V1NSX4+Pg4BgcHqWBOV1dX3GIipDYYDEhKSoJKpYLBYEBGRgYCgQBsNhtGRkbQ29uL0dFR2Gy2uMX2RQS5MxJVgFgnjbgRnMiOPfLII6iqqkJeXt4kq4tIJIKmpibs3LkTzzzzTNwmwjmOg8FgwO23346amhpkZ2cjOzsbKpWKDk/b7Xa8/PLLePfdd3Hw4MGEbJdNZEy0C5w41ODz+egwTKwQN4JzHIe0tDSkpKTA6/Xi4MGD2LFjBzQaDVJSUjB//nzk5OSguroaBoMBo6OjMSe5QqHA3XffjRtvvBHz58+HWq0Gx3GQSCRU1FIgEECn0+GBBx7ATTfdhG3btuHFF1+MuYvDtQq5XA6WZSESicAwDOrq6jB37lxUVVVh7ty5k3rvh4aG0NbWhl/84hcwm80xUcCNG8FlMhny8vJw5swZnDp1CmfOnEFTUxMUCgX0ej2EQiFSUlKg0+mQkZEBi8USU4IzDAOj0YiioiKUlJTAYDBALBZTI9hzFVt1Oh04jsOKFSvQ09MDtVp9nkbJlxnEa3T+/PngOA4cxyElJQW1tbXIy8tDUVER0tPTJ42kJSUlQafT4b777kNzczPOnDmDtra2qMYVN4LL5XLMnTsXTU1N6OjowNGjR+FyuSAQCKDVapGUlIS6ujoolUrk5ubi5MmTMdW0Jv3JxA6EaCFOXDKRlktCeoVCgbq6Ovj9fiQlJaGlpSUuSymR6O8fEyHSuTLDU1V8SP/95Qr8XwmIBuGiRYugVCqhUqlQXl6OwsJC6HQ6yOXy82Ikkz1FRUV499130dDQgN7e3qja1sSN4ES8cvv27RgfH6fSAUTu4O2338bXvvY16PV6mtFjCaKTQh4kq5CRNYfDgbGxMQwPD0OtVsNoNCIpKQkikQgLFy4Ez/P46KOPcOLEiZh9EYk2y1e+8hUqd1ZWVoZ169ZReQWXywWJRDJpvpTA6/Vi//79+Od//mcMDQ3FfIaU53lwHAe9Xk8HSchdkeiaE2tIhmHozwQCAdavX4/KykpUV1fjxz/+cdQETONGcIfDgaNHj8Jut59XCoxEInC5XIhEIkhOTsaaNWvw1ltvXZEDw6Vgon03yYREPMdut6O7uxvbt2/H4OAgLBYLkpKSqBJAWVkZxGIxCgsL8d3vfhdPPvkkhoeHY5IhiabI17/+dZr9UlJSUFhYCK1WC7FYTI1rpxpNk0qlqK6uxs9//nP09fWht7cXp06dwsGDB6N+5yHipnq9HllZWUhJSYFUKkUoFMLw8DA6OzvR1NSE8fFxeDweSCQSiMViqFQqLF26FMXFxdDr9Vi2bBkefvhhNDU1oampCRaLJXGUrS4Ej8eDzs5O+Hy+Kb3f/X4//H4/OI5DcXExxGJxzGMiE/NkKeJwONDX14f9+/fjrbfewtDQEFwuF5KSkqh7Axm7Sk1NxYYNG/Dss8/SEatogxB88eLF4HkebrcbXq8XNpsNTqcTAOggbzgchsfjmSSsQ6Q6vv71r8NqteLEiRPYs2cPmpqaok5woVAIiUSC5ORkpKWlQa/Xw+/3w2q1YmBgAPX19fjwww8xNjYGp9MJjuMgFouRnJwM4KyxVm5uLgoLC3HLLbdAoVBgfHwcVqv1qmKNG8FDoRAcDseUwfI8D5/Ph76+Puj1erjd7pjeTkm2PnbsGIaGhuDxeMAwDPbv349PP/0UL7zwwiRNvNHRUezfvx82mw3V1dUoKCigZcTFixcjEomgpaUl6nESpTCtVovt27dj27Zt2LJlyyTZ4YcffphWKHbu3Il3332Xbn4FAgFqamrwi1/8AnV1dVi9ejVqa2vxX//1X1E/UGNZlgqTElkOq9WK1157Dfv27cOnn346KbERjUKbzYbf//73KC4uRkpKCiQSCYqLi6loPtHLuVLEheCXeux9+vRpJCcno7i4OOZr8EgkgvHxcZoNxWIxBgcHMTw8fN4dRigU0nVkf38/DAYDvcXed999YFkWra2tUa+Pj42NYf/+/diwYQN1cZj4xeN5Hu+88w7q6+spoUwm06Sfd3d34ze/+Q2ef/55pKamxuykOBwOUwtuYnlO9GhOnz497bWJRCLweDyor69HJBLBTTfdRDesBoPhqtsl4kJwkUhECXuh0ytCsPLy8rj0gXi9XphMJgwODkImk1E3Y7FYTIlENk5kGtzpdMLpdEIul0MoFCI/Px/p6elgWTbqm00iONnQ0DDlz8kX7kIIh8N0yUIMqWJxUEVEkiKRCK06iUQieL3ei2bgUCiEgYEB9PX1wefzgWEY2O32qEhRx4XgROcvEolQEk2Fzs5OGAwGLF++PC6nhcFgEG1tbWhsbKTuylqtFjqdbpJIu16vR35+PubMmUMtsUUiEUQiEa10EBH8REN6ejo2bdoErVaLY8eO4a9//WvMbE1IFYpUSNRqNdRqNZVgmw4CgYDKJ5PCQmtrK955553E0gc/F0QQf/Xq1ZDL5Th+/Pi063AASEpKgtFoRG5uLs2Qsa4zHzx4EBaLBbW1tVi+fDkWLlyIr371q7DZbLQyIBKJIJfLIZPJwHEcreuKRCLwPI+0tDTU1NTg008/TSgveABIS0vDrbfeCqFQiJaWFvz5z3+OCcGDwSCcTidNUkSs9Mknn8TixYvx/PPPo7e3lx7NMwwDjuOg1Wpx/fXXY968ecjLy4NEIkFnZyeam5vR0NBw1dczZgQnbgirVq1CZWUlVV660ElVQUEBCgsLIZVKsXDhQgCI+snWuSDi9ZFIBBqNhhLY7XZDIBDQQxZy2yXrQ4lEQsXmpVIpUlJSrvoLSd7rYhtsouB7Mb3stWvXYu3atVCr1Th8+DA6OztjVnola/DOzk6UlpbSUmx2djZVviX+p0RgiXj4zJ8/H0ajETqdDizLore3FwMDAxe0Kb9UxIzgLMvCYDDg/vvvh0gkgtlsRn9//7SbR6FQiIqKClRUVIBlWdx88810CRFLkIMHoVAIqVRKb6vkJHPiiSZZZ048oADOSkKTqsq5ctGXCnK3IAciU4FIzhkMBgBnJaojkQjV8SOkISW7Bx54ACtXrgQA7NmzBydPnrzCq3RxkFJva2sr6urq4PP56PUsKyvD/Pnz4ff7EQ6HEQqF4Ha76bVlGIaa2kYiEbS3t6O/vz8qd5qYEFwoFNJTKYlEgr1796KlpQUNDQ1Tbjg4jkNFRQVKSkqQmZkJhmGwYsWKmJMbwKSOxol9KBO1CAmxeZ6n627ygTqdTgwODqKjo+OK5H2JC7RarQaAC645dTodHn74Ydx5553IyspCIBCA1WrFzp078Ze//AXHjx9HcXExamtr8cMf/hAajQZisRihUAgvv/xy1DyPpkMoFML27dthsViwZ88e/L//9/+oMxwhMbk+SqVyUuJgGAaBQAAOhyO6/kxReZUJYFkWS5cuxfz585GVlYX29nYcOnQI7e3tU/ZESKVSGI1G/MM//AOMRiMtxxEjoniAlAw9Hg9kMhlEIhGt9hCtQiIAGYlEJlWEgsEg/YCutPJD1GzJa04Ht9tNT1irq6tx9913w2Aw4LrrrkNqaiosFgt0Oh1SU1ORlJQEoVCIjo4OvPTSSxgeHo6L3HMoFEJnZydcLhcsFgvkcjlycnJw4403Uh3wiX00pGWC/O0OhwMWiyVqBrZRd3iQy+WoqamBXq+HQCDAsWPHcOrUKQwODk6qjAgEAqjVaqSmpmLevHlYvXo1dDodJVFnZydGRkaiGd60IOatTqcTCoUCHMfB7/dPkvClWnd/62snyqj83xwiyKb4crM4ed1LaTDy+Xw4fPgwWltb0d3djaqqKmpAW1hYSO9G5A4zODiIxsZGvPzyyzFbe08Fs9kMs9mM48ePQyQSobi4GCqVijbWEZ1wUj4m8fp8PgwNDcHpdEYtuUWV4Hq9HkVFRcjPz8fu3btx6NAh9PT0TJmViEfPDTfcgKqqKqoX7fV6Ybfb8dRTT8X8lkrg9Xrx+eefIzU1FcDZY+Pe3l56VEziFYlEUCgUSElJoeamxMW3qqoKBw4cgM1mu+wP53KXNYFAAHv27MHSpUuxaNEi3HrrrfjmN78JhUJByRIIBPDggw+ivr5+Rt2OQ6EQjh8/jieffBK33norqqqqUF1djYyMDGg0GurKF4lEMDQ0hC1bttC9RTQQNYKTJqn77rsPycnJaG9vp5uGc0Fs5jZt2oTMzEyoVCpEIhF6EPHBBx9gYGDgily1Lhdkc5eamgqWZeFwODAyMoJt27ZhZPPnnssAACAASURBVGQEDoeDli8NBgOKiopooxbZGGVmZqK6uhqLFy9Ga2sr+vr6LplUV1Ml4HmedjN2dnbi2WefhUwmg8PhwBtvvIG+vr6EcKEgLg319fVoaWnBxx9/jNWrV2PZsmUoLi6mdoEmkwn79u2Lal9P1AgukUhgNBpRXV2NYDCI3Nxc5ObmnudWrNPpUFhYiMWLF6OoqIi6mAUCASpS39zcTHfZsYZUKoVOp0NWVhYUCgV4/qwV9sDAAPW9lMvl4Hme2goScXlye51qYxov2O12nD59mnpQEsPYnTt3XnUnXjQRiUQwPDxMB8zlcjmys7ORk5MDqVRKfZSi3dYbEyPYSCSCZcuWgWVZNDQ00IAZhkF5eTluvPFG3HrrrVAqlXRTSTIpgLiMqxEYDAbMmzcPixYtgkQigcPhQCgUglKppD0oJSUlyMvLQ1paGtLT06FSqehkit/vx9DQEI4ePYo9e/ZQp7N4wul0or+/H16vFyzLwmq1UkeyRAOp/OzduxeVlZUoKyuDRqNBIBCg9iXR/FJGjeDj4+MYHx+Hw+GAXC7HnDlzkJGRgSVLlqC9vR0CgYBaXxObvomVBzLdvmjRIvzqV7/C448/HrVa6HRgGAZarRZGoxFSqZSeWlZUVNBGH+IpQzI32QyRuj4pv/X19cFut88YqUg7LcMwcLlcCZO5pwKpknm9XoRCIQiFQroxjXbcUSN4IBBAS0sLXnjhBSiVSjpEnJ6eTk8Ec3NzaZ83qZYQkHKRRqNBSUkJli1bhsOHD+PEiRPRCvE88DxPZSFINYTY2ZGlE1mWkJO64eFhHDt2DAMDA+jv78fhw4cxNjYWU5e4S/1bnE4n3agnOqYrrSYswcPhMPbt24f9+/dDoVDggQcewLp165CRkUGzs8vlgkgkmnRwQgP5m4ksy7JIT0/Hhg0bIBKJ0NPTE7Wa6LkgpTSZTAafz0cHi6VSKTQaDS25kQ2wxWLB0aNH8Ze//AWnTp2KSrdbtEBaIch01LUAwgsAtGwYbUT9FUkm+e///m+88MIL5xX1yTeW/DdpusnOzsZDDz2E2tpazJ07FzfccAPy8/OxePFi/NM//VPMKiqkZbaxsRGrVq2i9eyJw8c2mw1bt27Fxx9/jE8++YT2UyQKuYGz150sES9koZ4okMlkUKlUtD4eDAZj0qses14UckhyIRCSu1wuBAIB/OlPf0JXVxcef/xxaDQaZGZmIhKJ0F7rWH1odrsdmzdvxsjICAoLC5GTk0NbZkdHR7F9+3YcOXKEnsYmKgQCAeRyOb37JCrJSd9Reno6Pdm8ZjL45WBiL4LFYsHu3bvhdruxevVqLFiwACqVipaRyIYkFvB4PGhoaEAwGERxcTHmz5+P9PR02Gw2dHd34/XXX4fJZIrZUika4HkeHo8HycnJtK8lUSEUCpGbmwuNRkOP6UmTWLQxowSfCs3Nzbj55pvR3NyM/Px8CAQC5ObmUsfbWGLfvn3Yt28fgL/rjCRqFjwX4XAYe/bswY033giVSjXT4VwUpM+HTAFdyh3/SpBwBCd2zg888ACtXrS1tV3V4OmV4FohNkE4HEZXVxcaGxuhVCoTeokSCoVQX1+Pjo4OFBQU4N5770Vrayuampqi/l4JR3DgLMn3798/02FcU+B5HgMDA5BIJFSlK1ERiUSo5szg4CDS0tLQ1dWF7u7uqL/XrEfPDCBWcZL5UKFQGK07XlyMYKOxHJzOoychM/gsrgyki/BawxdCPnkW8UGirrtnCrMmVLP4QmPWCDb+uFbiBK6dWKNjBDuLWVxruKwM/mWtTkQbVxsnaetlGIbOjMbI0OmadzpOmE3mxPG22bvK1BAIBJBKpVi/fj2ysrLAsiw6OjrQ1dWFzs5OeL3eaDeBXdWSRCgUUtk2r9dLZy/PbZWOJRKC4AzDQKVSUT3AWUwNiUSCsrIyPPbYY6iurqanl/X19XjjjTdw8uRJeL3eGR0yJmAYBnK5HNXV1RAKhThx4gQkEgncbnfMWy4mYsYJLhAIkJycjMcffxyvv/46Ojs7434sf61Ar9fjP/7jPzBv3jzaR52XlweFQoGysjJs2bIFx44dQ29vb0ymYy4VLMsiOzsbv//976HVaiEQCOBwOOBwONDQ0IC//vWvGBsbi0t8M07w5ORkFBYWYvny5WhqaoLZbJ4l+DQg3kHDw8MIBAJQKpVUyo3oOWo0GmRkZKClpQVjY2NxMaA6F6THv6Kigs62BoNBOBwO2Gw2tLa2Uu2TWM/ezmgdXCAQoLq6Ghs2bEB1dTUdc5vF1BgeHsY999yDl156CQ0NDRgYGIDb7aZqUMuXL8djjz2Gn//859iwYQPS09Nj0mN9MUQiEfh8PrS3t8Nms1GNcJFIhHnz5mHjxo1ISUkBx3Ex14GfsV4UlmWxceNG3HrrrVi0aBF0Oh0eeeQR1NfXo6+v74pe88tQRSH7FYlEAo7jqF5LZmYm7rzzTmRmZkKpVMJkMuFHP/oR9u3bh6GhoSsN9Yp7UViWRUpKChVM4jgO//qv/wqj0QiO42C32/HGG2+gvr4+KgJPCVdFYVkWq1atopP2Ho8H4+PjcZUYu1ywLAuj0Qjg73LBsTasPRdkhI7AbrdjaGgIo6OjSEtLw/Lly1FcXIzU1FSUlpZidHT0agh+xQgGgxgcHAQAOrzt9/shlUqh1+uRlpaG6667DgDQ0dERs2XUjBBcLBZDo9FgzZo1VMPQbDbDYrHM+HT6RLAsO+k2qlAoqEem3++HzWbDiRMn4PF4YjZTeDHYbDaqxhUIBKDT6ZCZmYnU1FRUVlbCbDZj9+7dcY9rIoizh1gshlKphF6vB8MwWLlyJfR6PV588cWYVc9mhODr16/HU089BYPBAKFQCKvViq1bt8ZNbPNSwDAMNm3ahJ/97GdQq9WU5MTVAfi7Zcfhw4fx4Ycf4ne/+92MkJyseY8ePYr6+nqwLIvbbruNOg4nAog0NXG4E4lEVL2A4zh4vd6Y3AnjTnCJRAKDwYCCggIIhUKMjIygvb0dO3fuxPj4+Iwf8pDb6Q9+8AOsWLECKSkpEIvFVCrZ6XSira0NLMuiurqaqqcKBAL84Q9/mOSCFm+EQiHqlQmctS9JS0ubkVgmguji2Gw2WK1WyOVyuvk8c+YMJBJJzFQT4k7wOXPmIDc3F8nJyfD5fOju7sahQ4fQ0tJCvetnkuRisRh6vR4bN26EXq/HwMAAQqEQAoEA/H4/xsfHsX//fkilUqhUKuTn50Ov14PjOEgkEng8nhn16QkGg1TdNikpCUlJSTMWC4FQKATHcVQNzOv10v8dGRmBRCKh0tPRRlwJLhAI8NZbb6GgoAA8z2N4eBivvvoq3njjDdhsNjpZHaO+iotCKBTCaDTia1/7Gvr6+vDqq69i8+bN53ln8jwPqVSKF154Adu2bUNubi6As1ofTqcz7gSfqAlOLFkAUC/PmQYRZm1tbcWePXtw5swZDA0NQafTQa1WQ6vVwuFwxMTwIG4ENxgMWLp0KfWyCYVCOHr0KEZGRhAKhSCXy5GVlUX9fEZGRuJGFCJqL5fL4fV68f7772PXrl2wWCwwmUzTujOTzB4Oh8EwDFatWoV9+/ahs7MzLnEDf7f7Tk5ORldXF5KSkqiHD1n3zjQyMzPx4IMPwu/34/DhwxgdHaUtBQqFAvfeey9efvllnDx5MupFhrj99UlJSVi7di3kcjkA0Gl5q9UKkUiEpKQklJSUoLi4GFqtNuZOxwRkjjErK4vqr/T09KC5uRk9PT3TZhWJRIK0tDSwLEvVccvLy6n3erygVquh0+mQlJREtf7Ihtjr9SaEUJFGo8GCBQuQlJREzWF5nofBYEBJSQlqa2tj9pnHjeCZmZl46KGHoFKp6Ozg3r17YbFYoNFoUFhYiBtuuIGWjuJFcJFIBK1Wi6985SvgOA5OpxOBQOCiS6Tk5GRcf/31UKvVVOpt0aJFcd/U5ebmIi0tjRrt2mw2jI2NATh78jk6OhrXeKaCUqlEaWkp1aUkQqcrV67Efffdh+zsbHqcH23EZYlCNOgI3G43RkdH0dfXB7fbDZlMhvz8fKhUKthstriJ3wPAvffei1WrVqGvr4/KI08HhmGg0WiwbNkyLFiwALfffjs4joPP54Pf78exY8fi1ilHXMtWr14Np9OJlpYWhEIh9PT04MSJE2AYBsPDw/SwZaYgEomoQu+KFStgNBqxZMkSpKamorCwEBkZGVTqIhb7hbgQfPXq1Vi9evWkfyMbNbVaDb1ej7lz50KlUsHpdMZVY9toNKKkpAQ+n2/KCywUClFQUIDU1FQkJyfT38/Ozp50u/X7/XQ9Ho9KEFmKKJXKSZkvKSkJGRkZYBgG/f39GBgYiGkcF4NSqaRGB2QZpdPpoNFoqM032cNcs9Jtd955J9asWUP/P6k1G41G6PV6ZGdno6KiAlKpFGNjY+A4DiKRiFr0xQpEAD89PR2Dg4PgOI66FxMfHqVSiZUrV6K6uhpFRUWYM2cO1eAeHh6GRCKBQCBAKBSKay87OWQiEz3k7ykoKEB1dTUYhkFnZ+d5FjLxhk6noy2zSqUSCoWCiqoS+TZSHr5mxTdlMhmd7CB+kyzLYt26ddDr9UhOToZcLodKpUJ1dTWefvpp/Pa3v8XJkyfR2xubOVeGYZCbm4ukpCSqS042azabDfn5+VizZg1uv/12epATCoVgsViwZcsWnDhxAoODg2AYBqmpqcjMzKTWh6TvIpaIRCLweDz4zW9+Qz2DyGbu+uuvh9PpxJEjR9Da2hrTOC6GrKwsZGVl0f8/UeTHarViZGQEu3fvhkajQVFREUZHR6Oa1OJC8IlGqm63Gz6fD4FAADk5ObTuPT4+Tm20RSIRbr31VigUCgwMDMSkXCiRSPC9730PNTU1cLlc+Pjjj9HT0wOXy0WNYdvb27Fnzx7s3LkTVqsVbrcbTqcTp0+fpmalAoEAIyMjGBgYQGVlJUQiEXQ6HQ4dOhT1mKeC2+2GUqlEfn4+br75ZixcuBAsy6K/vx8ul2vGp3s8Hg+t5BC3Nbvdjm3btmFgYADDw8Noa2tDamoq0tLSsGLFChw6dAgejycqy7y4EHxkZAQ9PT1QKpW0Bur3+yeRPRAIIDU1FWq1GlKpFIsWLUJXVxddMkQTZEjg61//OgCgu7sbe/fuxfDwMDweDxiGgc1mo7f3M2fOYGxsDHa7fVIsJBvZbDbYbDZ87Wtfg16vh0ajiRvBI5EItFotysrKcMcddyApKQnhcBinT59OCK8el8tFD7+cTifGxsbQ3d2Nd999F729vbBYLLBYLFi7di1ycnJQUFCA9vZ26rp3tYgLwX/xi1/AYDAgNzeXVhrsdju1MRGLxSgsLIRCoYBUKqWN+izLgmXZqGehiooKbNy4EZFIBB988AG2bds2iZDEYqWzs/OChzYTvex5nqejZEajEZs3b47PSJZIhPLycjz44INIT0+HUCjEmTNn8KMf/eiK++qjCXJo53A48Mknn+DDDz/E66+/PqkUKxQKIZVKUVBQgNWrV2PPnj0IhUK03Hk1iAvBx8fHqQei3W5HIBCY5NETCoVgNpvpoQsAtLe3o7+/PyZNOMT9ze/3U/u/q4FWq0VeXh46Ojroujce5BYIBLjrrrtwww03ICcnh55ahkIhDAwMxHwfcClYsGABSktL0d7ejhdeeAGnTp0675yBYRgUFxejqKgI4XAYNpstamOLcSE4qROfe4snIFMqHMdBKBRSa+3u7u6YVFEUCgXS09Ph9/thNpuv6DCE+AtpNBpkZWVhzpw5aG1txeeff4729vaoxzzV+5MRsJycHGrjDfx9AzrTPpksy9L4RkZG0NbWhuHh4UnkFovFyM3NRXZ2NvR6PUKhEDW1jQbiQvCL6Z1wHIeFCxciJSUFAGAymfB///d/sdGL/ttdQi6XY3h4+IpKe8TyUCaTobKyEqWlpSgsLMR3v/vdqBuZTgcy2KvX66ngPfD38uFMr72JJWRtbS3y8vLQ2Nh4XnIjCeKOO+6g5yAWi2VS6fNqMeNT9RzHISMjA9/+9reRnp4Or9eLkydPxlQjhWS/1NRUWr68lOewLIuMjAxUV1cjOTkZHMehra0Nu3btwmuvvRY3cgNn+zsqKytRU1OD7OxsSnCfz5cQqgQpKSn49a9/jQULFkCj0UCr1eK9997DqVOnYLFYUFxcjOuuuw41NTWoqamBVqudVBOP1jDyVROcHIgQtyyhUAilUgngbHnQ5/PRW87E9TTDMMjLy0NJSQnKy8uRmZkJsVgMs9mMo0ePxqwBnmS4UCgEjuOQkpKC9PT08+YWs7KykJqaSodkRSIRJBIJ7f12Op1ob29HX18fTCZT3HVIlEolKioqqLcnwYEDB7Bz584Zz+DA2SxOTHWTk5OxYcMGLFiwAC6XCxkZGXT5otPp4Pf70d/fj507d0b1NPuqCc6yLLRaLdRqNTiOA8dxyMrKoqNJZNbS6XRifHycOtxyHIdly5Zhw4YNWLRoETiOQzAYxPj4OD777LOYERw4OxTg8XigUqmQl5eH8vJyWnfleZ7KWdTV1WHp0qXQaDQQi8VUeuzAgQM4cOAAduzYcUmNWdGGQCCghzpEdwQ4W3N+++238fLLL8c1nqkQDAbR1dWFuro6CIVCyOVy3HffffTnE9sb3G43hoeH0djYiOeffx4WiyUxCC6TyVBQUICNGzdSX8v8/HzaUUdcd10uF0ZGRnDkyBE4HA5kZGSgtLQUpaWlkMlktO58+vRpHDhwALt27YrpbOPAwAD27duHW2+9Fffeey/uuusu2kNCwLIshEIhGIaBz+fDqVOn0NTUhB/+8Ie0CjRThyg8z1M1K3L9fD4fnnrqKTQ0NCREi6zVasVzzz2HhQsXIjMzc9JRPDGt3bVrF/bs2YPNmzcjEolQpYJoJoyrIjgxaO3q6oJWq6VdgcTnHQCkUim0Wi2SkpKg1+sRCASgUCig0+mgUCgQiUQwOjqKl156CadPn0ZnZ2fMidPW1oYtW7ZgzZo1UKlUkMlk5637eJ5He3s7PvvsMzQ1NcFkMtF6biLc/l0uF1pbW5Geng6GYeByuXDgwAEMDw/PdGgAzi5PHQ4Hfvvb3+KNN96AQCAAx3F0wt7n82FgYACDg4NwOp0xi+OqlyherxdtbW3QarW0t5osQTiOg0ajgVQqhVwuh8FgmDQJ43Q6YTabcfr0abz11lvo7e2Ni8/6wMAAbDYbjh8/Dp1OR5cfE8HzPD777DNs3bqVLkUSCS6XC6dOncKqVavg8/nQ1taGjo6OhNhgEgSDQbz//vsAzi6rSOcgsUKPB6KqbEVqw6TXd/78+bj77rtptpbL5QgEAhgdHUVPTw/a29vxzjvv4LPPPotK1r5cxahL2anHIltHQ4ErNTUVK1euxG9+8xt88MEH+MEPfhALVYK4uKxFA9MpW8VEuk0ikUAmk0GhUNCxLjJQTKZ5PB4PPB4P7fGIxgfzZZBuI5BKpTAYDPjGN76BU6dO4YMPPohFWXWW4JeDWNtjf5kIzjAMJBIJVa+Khr7fFJgleCLhy0Twvz0/1hvea57gM68pMIsrRiJUcxIdswSfxRcaswSfxRcas0aw8ce1Eidw7cQ6awQ7iy8nZo1gZwDRrKL87fWiENWUmDWCncXMgLhkSKVS+Hw+OJ3OSdrkE3VlruILkKhLkktG3OWTZ5dE0YFOp8Py5ctRV1eHgYEBHD58GAMDA7TlQa1WUxe2mdQrn2kkHMGJI1c8p2OuRSQnJ+Omm27CggULwPM87rzzTlitVtp26na78dprr+HYsWPo7++fcX2UmUJcCX4xwqalpcFgMCA/Px/bt2+PS2fhtQoiOW00GqkkNfGqJ0JKra2t8Hg88Pl8dNZxpgeR442EIvh1112HtWvXYtOmTSgpKYmpvdy1jlAoBJPJRCeQeJ6HRCKhen8ajQZVVVUQi8WQSCRoamqC3W6P6aRUIiKhNpnENFQgECTEZDhBIu4dOjs78dOf/hStra0YHx/HyZMn4Xa76RJPr9ejpqYGWVlZWLhwIRobG7Fv3z7s3LlzpkMHcHZeUyKRgGVZKnPxhXBZuxCIfkoikYkMR5vN5kkGrDMNv9+P4eFh7Nu3D06nkwr9CIVCiMVijIyMIDs7G9nZ2cjPz6eTU42NjTGdoJkKpOKzevVqahhAlMuEQiG1QRweHsaBAwcwOjoaNbInFMH9fj9dRyYCyVmWhUKhwOLFi3HkyJEZMZiaDmSo+8iRI5P+nUzMEJMBr9cLg8EAuVyOjo4OaLXauGoWymQyaLVaFBQU4Omnn0ZmZiZVKZj4t4TDYRw5cgQ//elPsWfPHrjd7ugEQCbJL+UBgI/l4z//8z/5lpYW3mQy8Tk5OZf9/GjHeffdd/NtbW283W7nf/WrX/ELFiyIyuvG63qmp6fzd999Nz86OsqbzWb+hRde4IuKiniGYS71NQ5faawCgYCXSqX8K6+8wvf39/NOp5MPh8N8JBLhz0UkEuEjkQjv9/v5sbExvqqqipfL5Vd0Tc99JFQGT0lJgUqlwsjIyIzv9p944gmsWrUKRqMRMpkMRqMReXl5aGpqmvHYLhU2mw29vb347LPPkJ6eDpPJFLcloEajwaZNm1BWVga9Xg+JREKXIj6fj95hSGmzoKCAHlzV1tYiEAjg+PHjVx1HQhGcWF0QgcaZgEwmQ1ZWFu666y6qeEsqFBN9668FTFzXVlRUYGxsLC6CnERr8pZbbqEePDzPw+l0wmQyURHWrq4ujIyM0DHGvLw8pKeno6CgAD09PV88gnMch87OTjz88MMztqFbtmwZtm3bNonIwWAQTU1N+PjjjxGJROLRAxIVRCIR9Pb24oUXXsDGjRvR398Pp9MZ87iJ20RVVRWVBgkGg2hoaMB7772H119/fVJNXiKRoKWlBQsWLMDGjRvBMEzU7EwSguAcx6G0tBQqlQpWqxUOh2NGlgFPP/001q5dSwlMrLsffvhhtLa2wmKxgOd5ahsYT0+eKwWR6TAajfB6vTG3ZxQIBKitrcWyZcuoiH0wGITL5UJbWxuGhoYQiUTo5ysUCqFSqZCVlYXMzEzIZDKMjY1FLcElBMGlUimWLVsGpVIJk8k0I8RhGAbV1dWoqKgAAHR0dGB4eBhdXV2or6+fZKlBvgBEyi2RQbRqjEYjLBZLzJ2PiRYhwzBobm5GdnY2tVp0u93UbS0UClH1haysLBQVFcFoNIJlWVoujgYSguBqtRoPPfQQ9Hp9rKbDLwiBQACpVEpdwHiex8svv4xdu3bh4MGD0z7nWiC4SqVCeXk55s+fT00GYgWGYSCXy9Hf3w+3242GhgZ89atfRW5uLpURycvLo4c7aWlpSE9PR2VlJXJzc6FSqaJ+h0kIgotEImRkZEAsFsfESu5iyMzMxKuvvoqSkhKEQiG43W5s27ZtWiH7lStXIiUlBR6PB5988klC98wUFRXhmWeegcFgwNGjR6HRaGC1WqO+DifSdz6fD729vejp6QFw1iSrqqoK99xzD9avXw+xWEw1H8mBj1gshlgspsu+pUuXwmw24/Dhw1cd14wTXKFQUK3tWJmBXggLFy7EihUrUFJSArlcDrPZjN27d2N0dPS8vg2GYaBUKrF48WIUFRXB6/UiFArh1KlTF/TymSnMmTMHpaWl1N6E5/noHaBMAXL4NHGNPTg4iPT0dITDYSQlJUGhUFAyk+dMNEjgeR6ZmZnUzv1qD9ZmnOCpqanIzc2NO7EFAgHkcjk2btyIe+65B2q1GpFIBP39/di8eTMcDsd5z2FZFpmZmVi9ejXq6uoQCAQgFovx/vvvJxzBRSIRamtrsWDBAsjlcvj9fvh8vphkbwKyoZ0Iq9UKk8kEt9sNjuMglUqpwhnpfJzo1wQA6enp9PR1qs/hcjDjBH/ooYfw4IMPxrWhiWEYqNVqfPDBB5gzZw50Oh1t+DGbzVPW4ZVKJQoLC/HHP/4RWVlZVPh/zpw51BUuUaoqCoUCq1evxne+8x26aW5oaEBra2vMzhem++zC4TCGh4exdetWlJeXQy6Xg2EYqgVvNptx7NgxhMNhqFQqFBQUQKVSoaamBo8//jieffbZq9pwzijBiX0c2di9/fbb2LFjR0zfUyQSoaqqCuvWrcOcOXOgVqtpZYEItefm5sJqtcLn80EoFKK4uBiLFi1CTU0N0tLSIBaLqZb11q1bcfDgwYTZbBYWFqKkpAT3338/cnNzwTAMnE4nXn311Zh5d07MyFPB6XTixIkTGBoaonfqV155BYODgzCbzdQukLgd33TTTRCLxSgvL8fSpUtx8uTJK5aFnjGCMwyD7OxsaDQaAGf9FHfs2IFdu3bF9H0lEgnmzZuHW265BVqtln44wNksxLIsNXYi9uJLlizB+vXrUVtbC47jwPM8XC4X+vv78cknn6CrqytuBJ+uciMQCJCWlobq6mosW7YM119/PcLhMKxWK7q6utDQ0ICBgYGoxkLMuMRiMZXEngo+nw/9/f3o6uqizsdbtmzB4OAg7HY7PTxTKpXo7u5Geno6cnNzYTQasXTpUircekXxz1SzlVwu53ft2sWPjIzwY2Nj/E9+8hM+Ly8vak1Mf5sCP68BKC8vj3/iiSf4trY23u12816vl/d4PLzb7eYHBwf5999/n7/++uv5lStX8t/+9rf5LVu28Farlfd6vbzP5+OdTiff19fHv/7663xRURHPsmzcmq0EAgEvl8t5oVB43s+kUim/ZcsWvquri49EInw4HOaPHTvGP/fcc5fTXHVZzVYcx/HZ2dl8UVERbzAYpn0dhmF4juP4r371q/zChQsv2EjFMAyvUCj4O+64g6+vr+dHR0f57u5u/sMPP5zyMz33miZMs5VQKEReXh7kcjmGhobw+uuvY2RkJGqvP9WakOd5jI2Nobm5GW+//TY2bdoElUoFkUgEr9cLW6uRrQAABnxJREFUi8WCQCCAgoICzJs3D3PmzMG8efOoTQgxr2poaMCuXbvQ398f13U3/zdfm6kyOMuyqKurQ2pqKqxWK1566SXs2bMHHR0dMbm7SCQSZGVl4aabbkJfXx9Onz49pd8owzD02h08eJC2RE8H0pDlcDhgNpshk8kgk8ngdDqRkpICq9V6WfuIGSM4UfwnmzMyMxhreL1e9Pb24sCBA5g7dy6ysrKgVqvh9XoxPj4O4GztuKysDEajESkpKbQpPxQKwWq1orW1FSdPnpwRN4VwOHzel1cikUCr1dKuPZPJhE8++QTNzc30b4oGSGlPIBBAp9MhJycHNTU10Ol08Hg8OHXq1KQvEzkMIxNaZMTuYiD1cfJgWRZyuRxqtRpOp/PaIDhwttdDIpFAJBLBaDRG1eF2OoTDYfT29mJoaAgmkwlLly7FvHnzqN62QqHAsmXLkJWVRZ2XgbMzkC6XCydOnMD+/ftx9OjRmMY5HaYiiE6nw9y5c8GyLILBICwWC+rr62NymAOcvVsUFhairq4OK1asQFVVFUKhEPbu3Quv10sPfYjxFGm2utT3UKvVMBgMtFpF5k61Wu1l3+VnjODBYBC7d+9GbW0tMjIy8Nprr+EPf/gD9u/fH7PdPgFpz2xpaUFHRweSk5OxfPly3HTTTdRWWiqV0tKl1+vF4cOHcejQIfzxj39MGB944Cwhli9fju9973tgWRa//OUv8Ze//CUmJVeSnUOhEHWlIxM7999/P5YtW4bdu3dTn6bt27ejpaUFZrP5kv4OtVqNoqIibNq0CeXl5XTUbnBwEEePHkVra+tlX/cZJfj777+PcDiM2tpaZGdnY82aNcjOzkZdXR0OHjyIwcHBmLqG+Xw+6vj1+eefIzc3F0qlEvn5+RAIBHA6nRgdHcWOHTvoaWVfXx98Pl/CtMrm5+ejoKAAGRkZcDgcGBgYoMfksYJAIIDH46GjbxKJBAaDgR7iyGQyupzr7++H3W4/70RyYoYny6vs7GwsWbIEdXV1SE9PB8dxsFqt2Lt37xXbI84owV9//XU6f5mVlYW1a9fihhtuAM/zeOaZZ7B37144nc6Y9npEIhG43W4cOXKEHg8XFxeD4zj09/ejpaUFP/nJT+BwOBJmHnMiampqUFRUBI7j0N3djfHx8ZjeXci62uFwwGazwev1QqVSgeM4SCQSLF26lB6a+Xw+HD58GHa7ndovEmKTfhSpVIqCggKUlZWhrKwM69ato01X4XAYXV1deOedd/Duu+9eWbyXk4liIcBI1t//9m//hpUrVyI9PR0sy1KbwYGBASxZsuSSeij4qxS1nDjtPXHtGG1j1auNk0AgEODNN99ERUUF1Go17r//fnz++efRrHdPaWHCMAz0ej3KysrwzW9+EytWrIBarYZEIpnUrej3+2EymeDxeBAMBuF2uyGRSOgZg0gkolUWkUhE690k6QwODuLGG2+k1a0LgU9U8c1QKISxsTFs3rwZn376Kaqrq/HII49ALBbT/pC0tDQMDQ3FvGoRDofpCeW1MrXDcRxYlkU4HEZ3dzfsdnvM35OMn3V0dNDybnl5OSorK6FQKOjJMMuySE5OppWfYDBI/esnNlxFIhFYLBZ0d3djx44dMJvN8Hg8cLvdl0Tu/9/e3aQoDMNRAH+CiiKINxgRqXdw68Z7egc3uvFj1YILL6AURRBFTFSEtjgLSRBBmRlrGzPvB67chORR/yZN8kzqAQeuU3eDwQCZTAaz2QytVguO4yCfz+ul/Fwul2ibTA+2os4WAYDtdpvIyVWXywWn0wnL5RJCCByPR+z3e33+iXoil0olFItF/Tqs2ngchqEuYc7nMw6HAxaLBSaTCTqdjj5ENI4xSL1EuZfNZlEul+F5Hmq1GoQQqFarekn3mbh++t8tzhJFzedHUYRGo6F3qcfkV7es3f5xrFQqaLfbaDabqNfrcBwHUkpdu6/Xa3ieh+l0ivF4fLta/ifGlij3oiiClBLdbhebzQbD4RBSSmNeZjJNv9/HfD7X28LS7CcV0iAIsNvt0Ov14LquXo0Mw1B/giCAEOLtY2tcwFUHjUYj+L7/cMsYXfvKdV34vo9CoZDaURv31BiuVqu3TvP+hHElyiv+W4mSAF4ES2QyBpysxoCT1XgRbPI+pZ3A57T169EXvAiWrMYShazGgJPVGHCyGgNOVmPAyWoMOFmNASerMeBkNQacrPYNGAtT06KTBnEAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [] - } - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "etw8X24pr0cr", - "colab_type": "text" - }, - "source": [ - "Not too bad. Many of the generated images look plausibly like handwritten digits. A larger model trained for a longer time can do much better, of course." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "LTtjqIsnr0ct", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/18_Using_Reinforcement_Learning_to_Play_Pong.ipynb b/examples/tutorials/18_Using_Reinforcement_Learning_to_Play_Pong.ipynb deleted file mode 100644 index 7d7287c97504365105689bbc8b055fbb1077bd46..0000000000000000000000000000000000000000 --- a/examples/tutorials/18_Using_Reinforcement_Learning_to_Play_Pong.ipynb +++ /dev/null @@ -1,392 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "18_Using_Reinforcement_Learning_to_Play_Pong.ipynb", - "provenance": [] - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "m0jRtbRGsoZy", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 18: Using Reinforcement Learning to Play Pong\n", - "\n", - "This notebook demonstrates using reinforcement learning to train an agent to play Pong.\n", - "\n", - "The first step is to create an `Environment` that implements this task. Fortunately,\n", - "OpenAI Gym already provides an implementation of Pong (and many other tasks appropriate\n", - "for reinforcement learning). DeepChem's `GymEnvironment` class provides an easy way to\n", - "use environments from OpenAI Gym. We could just use it directly, but in this case we\n", - "subclass it and preprocess the screen image a little bit to make learning easier.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/18_Using_Reinforcement_Learning_to_Play_Pong.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment. To install `gym` you should also use `pip install 'gym[atari]'` (We need the extra modifier since we'll be using an atari game). We'll add this command onto our usual Colab installation commands for you" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "qXdmcnhtst-z", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - }, - "outputId": "30790158-71f8-40de-f11d-7ea9c936b71c" - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 2814 100 2814 0 0 35620 0 --:--:-- --:--:-- --:--:-- 35175\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.69 s, sys: 598 ms, total: 3.28 s\n", - "Wall time: 3min 48s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "9sv6kX_VsoZ1", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 187 - }, - "outputId": "4563471c-497e-42a7-b5ed-22f205381510" - }, - "source": [ - "!pip install 'gym[atari]'" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Requirement already satisfied: gym[atari] in /usr/local/lib/python3.6/dist-packages (0.17.2)\n", - "Requirement already satisfied: numpy>=1.10.4 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.18.4)\n", - "Requirement already satisfied: cloudpickle<1.4.0,>=1.2.0 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.3.0)\n", - "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.4.1)\n", - "Requirement already satisfied: pyglet<=1.5.0,>=1.4.0 in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (1.5.0)\n", - "Requirement already satisfied: Pillow; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (7.0.0)\n", - "Requirement already satisfied: atari-py~=0.2.0; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (0.2.6)\n", - "Requirement already satisfied: opencv-python; extra == \"atari\" in /usr/local/lib/python3.6/dist-packages (from gym[atari]) (4.1.2.30)\n", - "Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from pyglet<=1.5.0,>=1.4.0->gym[atari]) (0.16.0)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from atari-py~=0.2.0; extra == \"atari\"->gym[atari]) (1.12.0)\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "EuRrb3vpsoZ_", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import deepchem as dc\n", - "import numpy as np\n", - "\n", - "class PongEnv(dc.rl.GymEnvironment):\n", - " def __init__(self):\n", - " super(PongEnv, self).__init__('Pong-v0')\n", - " self._state_shape = (80, 80)\n", - " \n", - " @property\n", - " def state(self):\n", - " # Crop everything outside the play area, reduce the image size,\n", - " # and convert it to black and white.\n", - " cropped = np.array(self._state)[34:194, :, :]\n", - " reduced = cropped[0:-1:2, 0:-1:2]\n", - " grayscale = np.sum(reduced, axis=2)\n", - " bw = np.zeros(grayscale.shape)\n", - " bw[grayscale != 233] = 1\n", - " return bw\n", - "\n", - " def __deepcopy__(self, memo):\n", - " return PongEnv()\n", - "\n", - "env = PongEnv()" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "GNnO3MZ_soaG", - "colab_type": "text" - }, - "source": [ - "Next we create a network to implement the policy. We begin with two convolutional layers to process\n", - "the image. That is followed by a dense (fully connected) layer to provide plenty of capacity for game\n", - "logic. We also add a small Gated Recurrent Unit. That gives the network a little bit of memory, so\n", - "it can keep track of which way the ball is moving.\n", - "\n", - "We concatenate the dense and GRU outputs together, and use them as inputs to two final layers that serve as the\n", - "network's outputs. One computes the action probabilities, and the other computes an estimate of the\n", - "state value function.\n", - "\n", - "We also provide an input for the initial state of the GRU, and returned its final state at the end. This is required by the learning algorithm" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "BLdt8WAQsoaH", - "colab_type": "code", - "colab": {} - }, - "source": [ - "import tensorflow as tf\n", - "from tensorflow.keras.layers import Input, Concatenate, Conv2D, Dense, Flatten, GRU, Reshape\n", - "\n", - "class PongPolicy(dc.rl.Policy):\n", - " def __init__(self):\n", - " super(PongPolicy, self).__init__(['action_prob', 'value', 'rnn_state'], [np.zeros(16)])\n", - "\n", - " def create_model(self, **kwargs):\n", - " state = Input(shape=(80, 80))\n", - " rnn_state = Input(shape=(16,))\n", - " conv1 = Conv2D(16, kernel_size=8, strides=4, activation=tf.nn.relu)(Reshape((80, 80, 1))(state))\n", - " conv2 = Conv2D(32, kernel_size=4, strides=2, activation=tf.nn.relu)(conv1)\n", - " dense = Dense(256, activation=tf.nn.relu)(Flatten()(conv2))\n", - " gru, rnn_final_state = GRU(16, return_state=True, return_sequences=True)(\n", - " Reshape((-1, 256))(dense), initial_state=rnn_state)\n", - " concat = Concatenate()([dense, Reshape((16,))(gru)])\n", - " action_prob = Dense(env.n_actions, activation=tf.nn.softmax)(concat)\n", - " value = Dense(1)(concat)\n", - " return tf.keras.Model(inputs=[state, rnn_state], outputs=[action_prob, value, rnn_final_state])\n", - "\n", - "policy = PongPolicy()" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "YU19h0aUsoaN", - "colab_type": "text" - }, - "source": [ - "We will optimize the policy using the Asynchronous Advantage Actor Critic (A3C) algorithm. There are lots of hyperparameters we could specify at this point, but the default values for most of them work well on this problem. The only one we need to customize is the learning rate." - ] - }, - { - "cell_type": "code", - "metadata": { - "scrolled": true, - "id": "Fw_wu511soaO", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 343 - }, - "outputId": "64a01d40-960f-4f4a-a21e-cd42457fcc37" - }, - "source": [ - "from deepchem.models.optimizers import Adam\n", - "a3c = dc.rl.A3C(env, policy, model_dir='model', optimizer=Adam(learning_rate=0.0002))" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/rl/a3c.py:32: The name tf.log is deprecated. Please use tf.math.log instead.\n", - "\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/math_grad.py:1424: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "Use tf.where in 2.0, which has the same broadcast rule as np.where\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "-PUD4JG2soaU", - "colab_type": "text" - }, - "source": [ - "Optimize for as long as you have patience to. By 1 million steps you should see clear signs of learning. Around 3 million steps it should start to occasionally beat the game's built in AI. By 7 million steps it should be winning almost every time. Running on my laptop, training takes about 20 minutes for every million steps." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Wa18EQlmsoaV", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 105 - }, - "outputId": "39aa4c1a-6da2-4b18-a83b-0bac0a62155a" - }, - "source": [ - "# Change this to train as many steps as you have patience for.\n", - "a3c.fit(1000)" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/rl/a3c.py:412: The name tf.assign is deprecated. Please use tf.compat.v1.assign instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/rl/a3c.py:253: The name tf.global_variables_initializer is deprecated. Please use tf.compat.v1.global_variables_initializer instead.\n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_xHNjusSsoaa", - "colab_type": "text" - }, - "source": [ - "Let's watch it play and see how it does! " - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Ud6DB_ndsoab", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# This code doesn't work well on Colab\n", - "env.reset()\n", - "while not env.terminated:\n", - " env.env.render()\n", - " env.step(a3c.select_action(env.state))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "3MGK4nrhsoah", - "colab_type": "text" - }, - "source": [ - "# Congratulations! Time to join the Community!\n", - "\n", - "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", - "\n", - "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", - "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", - "\n", - "## Join the DeepChem Gitter\n", - "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" - ] - } - ] -} \ No newline at end of file diff --git a/examples/tutorials/21_Introduction_to_Bioinformatics.ipynb b/examples/tutorials/21_Introduction_to_Bioinformatics.ipynb index 1fcf28ba22629dc2a7e8382601015ff66c7a25dc..0eaf0aa39f48c63dce9a36ae882f7951bbc53690 100644 --- a/examples/tutorials/21_Introduction_to_Bioinformatics.ipynb +++ b/examples/tutorials/21_Introduction_to_Bioinformatics.ipynb @@ -52,29 +52,28 @@ { "cell_type": "code", "metadata": { - "id": "9k2qhejltgQo", + "id": "g21hWuDwGAsC", "colab_type": "code", - "outputId": "41f75690-8054-4d36-94ed-83e2f6b86b4d", "colab": { "base_uri": "https://localhost:8080/", - "height": 462 - } + "height": 323 + }, + "outputId": "839eeca2-c652-4f60-a05f-23897b43c915" }, "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" ], "execution_count": 1, "outputs": [ { "output_type": "stream", "text": [ - "TensorFlow 1.x selected.\n", " % Total % Received % Xferd Average Speed Time Time Time Current\n", " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 36600 0 --:--:-- --:--:-- --:--:-- 36600\n" + "\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r100 3489 100 3489 0 0 47148 0 --:--:-- --:--:-- --:--:-- 47148\n" ], "name": "stdout" }, @@ -87,41 +86,82 @@ "done\n", "installing miniconda to /root/miniconda\n", "done\n", - "installing deepchem\n", + "installing rdkit, openmm, pdbfixer\n", + "added omnia to channels\n", + "added conda-forge to channels\n", "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" + "conda packages installation finished!\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", + "# conda environments:\n", + "#\n", + "base * /root/miniconda\n", "\n" ], "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "9k2qhejltgQo", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 }, + "outputId": "549f88b8-1619-41d5-f3ce-5e238edf8adf" + }, + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ], + "execution_count": 2, + "outputs": [ { "output_type": "stream", "text": [ - "deepchem-2.3.0 installation finished!\n" + "Collecting deepchem\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/b5/d7/3ba15ec6f676ef4d93855d01e40cba75e231339e7d9ea403a2f53cabbab0/deepchem-2.4.0rc1.dev20200805054153.tar.gz (351kB)\n", + "\r\u001b[K |█ | 10kB 15.4MB/s eta 0:00:01\r\u001b[K |█▉ | 20kB 3.1MB/s eta 0:00:01\r\u001b[K |██▉ | 30kB 4.1MB/s eta 0:00:01\r\u001b[K |███▊ | 40kB 4.4MB/s eta 0:00:01\r\u001b[K |████▋ | 51kB 3.5MB/s eta 0:00:01\r\u001b[K |█████▋ | 61kB 3.9MB/s eta 0:00:01\r\u001b[K |██████▌ | 71kB 4.2MB/s eta 0:00:01\r\u001b[K |███████▌ | 81kB 4.5MB/s eta 0:00:01\r\u001b[K |████████▍ | 92kB 4.9MB/s eta 0:00:01\r\u001b[K |█████████▎ | 102kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████▎ | 112kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████▏ | 122kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████▏ | 133kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████ | 143kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████ | 153kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████ | 163kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████▉ | 174kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████▊ | 184kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████▊ | 194kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████▋ | 204kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████▋ | 215kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████▌ | 225kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████▍ | 235kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████▍ | 245kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████▎ | 256kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████▎ | 266kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████▏ | 276kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████ | 286kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████ | 296kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████ | 307kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████ | 317kB 4.7MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▉ | 327kB 4.7MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▊ | 337kB 4.7MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▊| 348kB 4.7MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 358kB 4.7MB/s \n", + "\u001b[?25hRequirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n", + "Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n", + "Building wheels for collected packages: deepchem\n", + " Building wheel for deepchem (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for deepchem: filename=deepchem-2.4.0rc1.dev20200805145043-cp36-none-any.whl size=438623 sha256=b76201fc01bf910a8490d4ed5cc195b109d08f019ce7afc25cdf254c62c4eab3\n", + " Stored in directory: /root/.cache/pip/wheels/41/0f/fe/5f2659dc8e26624863654100f689d8f36cae7c872d2b310394\n", + "Successfully built deepchem\n", + "Installing collected packages: deepchem\n", + "Successfully installed deepchem-2.4.0rc1.dev20200805145043\n" ], - "name": "stderr" + "name": "stdout" }, { - "output_type": "stream", - "text": [ - "CPU times: user 2.91 s, sys: 622 ms, total: 3.54 s\n", - "Wall time: 2min 16s\n" - ], - "name": "stdout" + "output_type": "execute_result", + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'2.4.0-rc1.dev'" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 2 } ] }, @@ -140,23 +180,23 @@ "metadata": { "id": "HeYSJWSAtgQt", "colab_type": "code", - "outputId": "f4aea39d-1bca-4cc4-c01f-4c04a440076d", "colab": { "base_uri": "https://localhost:8080/", "height": 139 - } + }, + "outputId": "d725fb9a-2580-42d7-f6c9-42d79bf7d797" }, "source": [ "!pip install biopython" ], - "execution_count": 2, + "execution_count": 3, "outputs": [ { "output_type": "stream", "text": [ "Collecting biopython\n", "\u001b[?25l Downloading https://files.pythonhosted.org/packages/a8/66/134dbd5f885fc71493c61b6cf04c9ea08082da28da5ed07709b02857cbd0/biopython-1.77-cp36-cp36m-manylinux1_x86_64.whl (2.3MB)\n", - "\u001b[K |████████████████████████████████| 2.3MB 2.7MB/s \n", + "\u001b[K |████████████████████████████████| 2.3MB 4.5MB/s \n", "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from biopython) (1.18.5)\n", "Installing collected packages: biopython\n", "Successfully installed biopython-1.77\n" @@ -170,21 +210,24 @@ "metadata": { "id": "4CxSQrxptgQx", "colab_type": "code", - "outputId": "d3403ab5-0cc3-480a-ab99-4064ba4aa044", "colab": { "base_uri": "https://localhost:8080/", - "height": 34 - } + "height": 35 + }, + "outputId": "685a37c8-c4fe-4dc1-e751-eaca5ed02f1e" }, "source": [ "import Bio\n", "Bio.__version__" ], - "execution_count": 3, + "execution_count": 4, "outputs": [ { "output_type": "execute_result", "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, "text/plain": [ "'1.77'" ] @@ -192,7 +235,7 @@ "metadata": { "tags": [] }, - "execution_count": 3 + "execution_count": 4 } ] }, @@ -201,18 +244,18 @@ "metadata": { "id": "7eXZ-43CtgQ6", "colab_type": "code", - "outputId": "8cc0c9f4-7ee5-447c-ab4b-caa4b0db2e4a", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "20e88297-7ca8-4a98-9b3b-01e643eca4c1" }, "source": [ "from Bio.Seq import Seq\n", "my_seq = Seq(\"AGTACACATTG\")\n", "my_seq" ], - "execution_count": 4, + "execution_count": 5, "outputs": [ { "output_type": "execute_result", @@ -224,7 +267,7 @@ "metadata": { "tags": [] }, - "execution_count": 4 + "execution_count": 5 } ] }, @@ -233,16 +276,16 @@ "metadata": { "id": "Fd-wViuTtgRB", "colab_type": "code", - "outputId": "92b43663-3ceb-420f-f41f-a9b290f80858", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "6896f8b4-a4f2-453b-90ac-40fa8f88f0a6" }, "source": [ "my_seq.complement()" ], - "execution_count": 5, + "execution_count": 6, "outputs": [ { "output_type": "execute_result", @@ -254,7 +297,7 @@ "metadata": { "tags": [] }, - "execution_count": 5 + "execution_count": 6 } ] }, @@ -263,16 +306,16 @@ "metadata": { "id": "GlO-43FNtgRF", "colab_type": "code", - "outputId": "5adf1324-d675-4644-dd00-6670f72b0532", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "0cfaf125-ecea-45ef-b174-29be60f25b0c" }, "source": [ "my_seq.reverse_complement()" ], - "execution_count": 6, + "execution_count": 7, "outputs": [ { "output_type": "execute_result", @@ -284,7 +327,7 @@ "metadata": { "tags": [] }, - "execution_count": 6 + "execution_count": 7 } ] }, @@ -305,30 +348,30 @@ "metadata": { "id": "U0A0B3-FtgRK", "colab_type": "code", - "outputId": "c70346e5-19b9-4994-abd8-f7ecaee55fc7", "colab": { "base_uri": "https://localhost:8080/", "height": 204 - } + }, + "outputId": "a4483a68-c59d-4698-c208-e8e7ba660d40" }, "source": [ "!wget https://raw.githubusercontent.com/biopython/biopython/master/Doc/examples/ls_orchid.fasta" ], - "execution_count": 7, + "execution_count": 8, "outputs": [ { "output_type": "stream", "text": [ - "--2020-06-12 02:47:50-- https://raw.githubusercontent.com/biopython/biopython/master/Doc/examples/ls_orchid.fasta\n", + "--2020-08-05 14:50:55-- https://raw.githubusercontent.com/biopython/biopython/master/Doc/examples/ls_orchid.fasta\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 76480 (75K) [text/plain]\n", "Saving to: ‘ls_orchid.fasta’\n", "\n", - "\rls_orchid.fasta 0%[ ] 0 --.-KB/s \rls_orchid.fasta 100%[===================>] 74.69K --.-KB/s in 0.03s \n", + "\rls_orchid.fasta 0%[ ] 0 --.-KB/s \rls_orchid.fasta 100%[===================>] 74.69K --.-KB/s in 0.01s \n", "\n", - "2020-06-12 02:47:51 (2.36 MB/s) - ‘ls_orchid.fasta’ saved [76480/76480]\n", + "2020-08-05 14:50:55 (4.97 MB/s) - ‘ls_orchid.fasta’ saved [76480/76480]\n", "\n" ], "name": "stdout" @@ -350,11 +393,11 @@ "metadata": { "id": "5ZudMHxttgRQ", "colab_type": "code", - "outputId": "65c2458b-6a7b-47b0-be32-a8564a6f1cf7", "colab": { "base_uri": "https://localhost:8080/", "height": 1000 - } + }, + "outputId": "2f6069e9-7300-440f-e232-15d7a4c5e89d" }, "source": [ "from Bio import SeqIO\n", @@ -364,7 +407,7 @@ " print(repr(seq_record.seq))\n", " print(len(seq_record))" ], - "execution_count": 8, + "execution_count": 9, "outputs": [ { "output_type": "stream", @@ -673,11 +716,11 @@ "metadata": { "id": "kdkqKHmgtgRW", "colab_type": "code", - "outputId": "2cdece26-333d-4401-a6c2-8486d4721c83", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "c9799b6a-48ee-4d3f-d090-c288a665dd4b" }, "source": [ "from Bio.Seq import Seq\n", @@ -685,7 +728,7 @@ "my_seq = Seq(\"ACAGTAGAC\", IUPAC.unambiguous_dna)\n", "my_seq" ], - "execution_count": 9, + "execution_count": 10, "outputs": [ { "output_type": "execute_result", @@ -697,7 +740,7 @@ "metadata": { "tags": [] }, - "execution_count": 9 + "execution_count": 10 } ] }, @@ -706,16 +749,16 @@ "metadata": { "id": "j5xDuf7DtgRb", "colab_type": "code", - "outputId": "ca808df9-e5ed-409f-a0d9-fdb86fe8ce6e", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "0004bcbd-834a-4e3f-a63b-9bcb5b15654e" }, "source": [ "my_seq.alphabet" ], - "execution_count": 10, + "execution_count": 11, "outputs": [ { "output_type": "execute_result", @@ -727,7 +770,7 @@ "metadata": { "tags": [] }, - "execution_count": 10 + "execution_count": 11 } ] }, @@ -746,17 +789,17 @@ "metadata": { "id": "O6WUnJEftgRs", "colab_type": "code", - "outputId": "c9d45805-3166-41ee-cf14-74dacb39c011", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "53fa4143-0fe8-441c-9630-840bfab7bbb1" }, "source": [ "my_prot = Seq(\"AAAAA\", IUPAC.protein) # Alanine pentapeptide\n", "my_prot" ], - "execution_count": 11, + "execution_count": 12, "outputs": [ { "output_type": "execute_result", @@ -768,7 +811,7 @@ "metadata": { "tags": [] }, - "execution_count": 11 + "execution_count": 12 } ] }, @@ -777,16 +820,16 @@ "metadata": { "id": "jdgRxL6qtgR0", "colab_type": "code", - "outputId": "08119aad-7aa6-4346-b81b-fd23f636f531", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "cb78014a-55a6-4531-e473-013ee8b72c90" }, "source": [ "my_prot.alphabet" ], - "execution_count": 12, + "execution_count": 13, "outputs": [ { "output_type": "execute_result", @@ -798,7 +841,7 @@ "metadata": { "tags": [] }, - "execution_count": 12 + "execution_count": 13 } ] }, @@ -817,16 +860,16 @@ "metadata": { "id": "OkY6Tx60tgR4", "colab_type": "code", - "outputId": "302f7833-2068-428a-a7fc-5431ee7bfd2c", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "fc113b98-aaac-48e1-cc5a-c305c8c1a310" }, "source": [ "print(len(my_prot))" ], - "execution_count": 13, + "execution_count": 14, "outputs": [ { "output_type": "stream", @@ -842,20 +885,23 @@ "metadata": { "id": "YSOUpm8FtgR8", "colab_type": "code", - "outputId": "eca74488-5978-425b-9df1-7a28e0a525bd", "colab": { "base_uri": "https://localhost:8080/", - "height": 34 - } + "height": 35 + }, + "outputId": "a1f085e2-4304-460b-d335-d8d0521e7955" }, "source": [ "my_prot[0]" ], - "execution_count": 14, + "execution_count": 15, "outputs": [ { "output_type": "execute_result", "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, "text/plain": [ "'A'" ] @@ -863,7 +909,7 @@ "metadata": { "tags": [] }, - "execution_count": 14 + "execution_count": 15 } ] }, @@ -882,16 +928,16 @@ "metadata": { "id": "U5v3swWFtgSA", "colab_type": "code", - "outputId": "f1fdd7bf-c504-4177-c22c-28ffa64466b6", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "9b2bc945-326e-4908-cf59-cc4776e9ada6" }, "source": [ "my_prot[0:3]" ], - "execution_count": 15, + "execution_count": 16, "outputs": [ { "output_type": "execute_result", @@ -903,7 +949,7 @@ "metadata": { "tags": [] }, - "execution_count": 15 + "execution_count": 16 } ] }, @@ -922,16 +968,16 @@ "metadata": { "id": "ZG77QUj2tgSJ", "colab_type": "code", - "outputId": "d9242318-f133-44b7-c7cd-bd6e21ab3d54", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "1bc85ce7-fcf7-4359-8fcd-f7164971b403" }, "source": [ "my_prot + my_prot" ], - "execution_count": 16, + "execution_count": 17, "outputs": [ { "output_type": "execute_result", @@ -943,7 +989,7 @@ "metadata": { "tags": [] }, - "execution_count": 16 + "execution_count": 17 } ] }, @@ -962,16 +1008,16 @@ "metadata": { "id": "MZ53Yjr1tgSO", "colab_type": "code", - "outputId": "ca95ef4f-cdf6-4c72-c632-926e5b6b572e", "colab": { "base_uri": "https://localhost:8080/", "height": 287 - } + }, + "outputId": "028fbd95-1cc8-4f12-a708-4b5ea10be37a" }, "source": [ "my_prot + my_seq" ], - "execution_count": 17, + "execution_count": 18, "outputs": [ { "output_type": "error", @@ -980,7 +1026,7 @@ "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmy_prot\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mmy_seq\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmy_prot\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mmy_seq\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/Bio/Seq.py\u001b[0m in \u001b[0;36m__add__\u001b[0;34m(self, other)\u001b[0m\n\u001b[1;32m 335\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mAlphabet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_check_type_compatible\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0malphabet\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mother\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0malphabet\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 336\u001b[0m raise TypeError(\n\u001b[0;32m--> 337\u001b[0;31m \u001b[0;34mf\"Incompatible alphabets {self.alphabet!r} and {other.alphabet!r}\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 338\u001b[0m )\n\u001b[1;32m 339\u001b[0m \u001b[0;31m# They should be the same sequence type (or one of them is generic)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mTypeError\u001b[0m: Incompatible alphabets IUPACProtein() and IUPACUnambiguousDNA()" ] @@ -1016,11 +1062,11 @@ "metadata": { "id": "TvPiRx_0tgSU", "colab_type": "code", - "outputId": "5dad0985-4ba9-4509-d918-5166a852e241", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "97e9bfb5-5ed6-4e8f-cbc5-abc5fdf4e949" }, "source": [ "from Bio.Seq import Seq\n", @@ -1029,7 +1075,7 @@ "coding_dna = Seq(\"ATGATCTCGTAA\", IUPAC.unambiguous_dna)\n", "coding_dna" ], - "execution_count": 18, + "execution_count": 19, "outputs": [ { "output_type": "execute_result", @@ -1041,7 +1087,7 @@ "metadata": { "tags": [] }, - "execution_count": 18 + "execution_count": 19 } ] }, @@ -1050,17 +1096,17 @@ "metadata": { "id": "arGizrBztgSX", "colab_type": "code", - "outputId": "998c3c72-7ac3-40c2-9075-80d3b1ce7b6a", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "e5917aec-5ef9-40a2-b6be-b9fa21dc1729" }, "source": [ "template_dna = coding_dna.reverse_complement()\n", "template_dna" ], - "execution_count": 19, + "execution_count": 20, "outputs": [ { "output_type": "execute_result", @@ -1072,7 +1118,7 @@ "metadata": { "tags": [] }, - "execution_count": 19 + "execution_count": 20 } ] }, @@ -1093,17 +1139,17 @@ "metadata": { "id": "oo8bBugUtgSa", "colab_type": "code", - "outputId": "f3124064-c9a5-4c7b-a3a5-5f2660068df1", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "b739dc26-bbaf-480a-e3c1-ee44f0103b8b" }, "source": [ "messenger_rna = coding_dna.transcribe()\n", "messenger_rna" ], - "execution_count": 20, + "execution_count": 21, "outputs": [ { "output_type": "execute_result", @@ -1115,7 +1161,7 @@ "metadata": { "tags": [] }, - "execution_count": 20 + "execution_count": 21 } ] }, @@ -1134,16 +1180,16 @@ "metadata": { "id": "edClUMputgSf", "colab_type": "code", - "outputId": "55b1fb1a-72dd-4754-c168-af2c67b61766", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "3c7106fd-20a8-4ecf-8634-d032eda5fedc" }, "source": [ "messenger_rna.back_transcribe()" ], - "execution_count": 21, + "execution_count": 22, "outputs": [ { "output_type": "execute_result", @@ -1155,7 +1201,7 @@ "metadata": { "tags": [] }, - "execution_count": 21 + "execution_count": 22 } ] }, @@ -1200,16 +1246,16 @@ "metadata": { "id": "cy8y6y9CtgSn", "colab_type": "code", - "outputId": "b1fbfcb2-dfd3-4ab9-d102-1d6bb110af7a", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "0fa91c72-1fa3-49ec-9946-0f1d44354de4" }, "source": [ "coding_dna.translate()" ], - "execution_count": 22, + "execution_count": 23, "outputs": [ { "output_type": "execute_result", @@ -1221,7 +1267,7 @@ "metadata": { "tags": [] }, - "execution_count": 22 + "execution_count": 23 } ] }, @@ -1240,17 +1286,17 @@ "metadata": { "id": "iwpB4lYatgSs", "colab_type": "code", - "outputId": "12cbe03d-14a5-4c51-cc22-b2b6f0398018", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "84ece633-17df-4dad-b182-e40307b6d8f5" }, "source": [ "coding_dna = Seq(\"ATGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG\", IUPAC.unambiguous_dna)\n", "coding_dna.translate()" ], - "execution_count": 23, + "execution_count": 24, "outputs": [ { "output_type": "execute_result", @@ -1262,7 +1308,7 @@ "metadata": { "tags": [] }, - "execution_count": 23 + "execution_count": 24 } ] }, @@ -1281,16 +1327,16 @@ "metadata": { "id": "6uScm61FtgSw", "colab_type": "code", - "outputId": "254f7d2f-17e6-496e-fccd-3986cd3f0631", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "19811569-0a34-4b87-f4ba-a8563d44bb05" }, "source": [ "coding_dna.translate(to_stop=True)" ], - "execution_count": 24, + "execution_count": 25, "outputs": [ { "output_type": "execute_result", @@ -1302,7 +1348,7 @@ "metadata": { "tags": [] }, - "execution_count": 24 + "execution_count": 25 } ] }, @@ -1323,11 +1369,11 @@ "metadata": { "id": "iy9-Co_WtgS3", "colab_type": "code", - "outputId": "447eea41-332a-45f3-e831-6cde61bbab86", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "4de820f9-5ff0-4c37-f694-516ff1772fe7" }, "source": [ "from Bio.Alphabet import generic_dna\n", @@ -1341,7 +1387,7 @@ "# We specify a \"table\" to use a different translation table for bacterial proteins\n", "gene.translate(table=\"Bacterial\")" ], - "execution_count": 25, + "execution_count": 26, "outputs": [ { "output_type": "execute_result", @@ -1353,7 +1399,7 @@ "metadata": { "tags": [] }, - "execution_count": 25 + "execution_count": 26 } ] }, @@ -1362,16 +1408,16 @@ "metadata": { "id": "yWmqHt3GtgS6", "colab_type": "code", - "outputId": "ee9dc0ee-bd5c-4ff0-a1e9-bdbf50840005", "colab": { "base_uri": "https://localhost:8080/", "height": 34 - } + }, + "outputId": "4e423693-9a44-4c93-c815-4065ef35e191" }, "source": [ "gene.translate(table=\"Bacterial\", to_stop=True)" ], - "execution_count": 26, + "execution_count": 27, "outputs": [ { "output_type": "execute_result", @@ -1383,7 +1429,7 @@ "metadata": { "tags": [] }, - "execution_count": 26 + "execution_count": 27 } ] }, @@ -1404,17 +1450,17 @@ "metadata": { "id": "nnHQ_fObtgS9", "colab_type": "code", - "outputId": "446e3606-18d9-434c-87cf-81483a3b146c", "colab": { "base_uri": "https://localhost:8080/", "height": 1000 - } + }, + "outputId": "1cb1ab38-b3dd-48ac-8374-97f30e99b423" }, "source": [ "from Bio.SeqRecord import SeqRecord\n", "help(SeqRecord)" ], - "execution_count": 27, + "execution_count": 28, "outputs": [ { "output_type": "stream", @@ -2278,7 +2324,7 @@ "simple_seq = Seq(\"GATC\")\n", "simple_seq_r = SeqRecord(simple_seq)" ], - "execution_count": 0, + "execution_count": 29, "outputs": [] }, { @@ -2286,11 +2332,11 @@ "metadata": { "id": "3FItR96PtgTG", "colab_type": "code", - "outputId": "7be1b5fd-9029-48e7-915a-d8dd73fcb346", "colab": { "base_uri": "https://localhost:8080/", "height": 51 - } + }, + "outputId": "348f645c-8f5d-4394-d33c-e03bc8dcc04c" }, "source": [ "simple_seq_r.id = \"AC12345\"\n", @@ -2298,7 +2344,7 @@ "print(simple_seq_r.id)\n", "print(simple_seq_r.description)" ], - "execution_count": 29, + "execution_count": 30, "outputs": [ { "output_type": "stream", @@ -2325,21 +2371,21 @@ "metadata": { "id": "vNxAQJkqtgTL", "colab_type": "code", - "outputId": "127a850c-6681-439b-eb71-e29030beff3e", "colab": { "base_uri": "https://localhost:8080/", "height": 204 - } + }, + "outputId": "5851122b-6dcd-4947-c24b-a7d81eb94b01" }, "source": [ "!wget https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.fna" ], - "execution_count": 30, + "execution_count": 31, "outputs": [ { "output_type": "stream", "text": [ - "--2020-06-12 02:48:39-- https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.fna\n", + "--2020-08-05 14:52:05-- https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.fna\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", @@ -2348,7 +2394,7 @@ "\n", "\rNC_005816.fna 0%[ ] 0 --.-KB/s \rNC_005816.fna 100%[===================>] 9.62K --.-KB/s in 0s \n", "\n", - "2020-06-12 02:48:39 (63.4 MB/s) - ‘NC_005816.fna’ saved [9853/9853]\n", + "2020-08-05 14:52:05 (50.1 MB/s) - ‘NC_005816.fna’ saved [9853/9853]\n", "\n" ], "name": "stdout" @@ -2360,11 +2406,11 @@ "metadata": { "id": "mvFt3fVqtgTP", "colab_type": "code", - "outputId": "3b1c7a3f-9f60-4aac-ef8e-2667a80327d1", "colab": { "base_uri": "https://localhost:8080/", "height": 54 - } + }, + "outputId": "bb2ec02f-4f4c-4faf-9c4d-6e5bf8f32f36" }, "source": [ "from Bio import SeqIO\n", @@ -2372,7 +2418,7 @@ "record = SeqIO.read(\"NC_005816.fna\", \"fasta\")\n", "record" ], - "execution_count": 31, + "execution_count": 32, "outputs": [ { "output_type": "execute_result", @@ -2384,7 +2430,7 @@ "metadata": { "tags": [] }, - "execution_count": 31 + "execution_count": 32 } ] }, @@ -2405,20 +2451,23 @@ "metadata": { "id": "N7OdmewwtgTa", "colab_type": "code", - "outputId": "9184860d-7abf-4db4-e6b7-8167fc7f4240", "colab": { "base_uri": "https://localhost:8080/", - "height": 34 - } + "height": 35 + }, + "outputId": "0c4c4494-7343-4e64-fd94-9f1b0037c859" }, "source": [ "record.id" ], - "execution_count": 32, + "execution_count": 33, "outputs": [ { "output_type": "execute_result", "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, "text/plain": [ "'gi|45478711|ref|NC_005816.1|'" ] @@ -2426,7 +2475,7 @@ "metadata": { "tags": [] }, - "execution_count": 32 + "execution_count": 33 } ] }, @@ -2435,20 +2484,23 @@ "metadata": { "id": "156aQviwtgTd", "colab_type": "code", - "outputId": "dff034ee-633e-473c-94cf-e85b7f6d38d7", "colab": { "base_uri": "https://localhost:8080/", - "height": 34 - } + "height": 35 + }, + "outputId": "95c138e0-1d46-449f-8f0f-5aca1889bc19" }, "source": [ "record.name" ], - "execution_count": 33, + "execution_count": 34, "outputs": [ { "output_type": "execute_result", "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, "text/plain": [ "'gi|45478711|ref|NC_005816.1|'" ] @@ -2456,7 +2508,7 @@ "metadata": { "tags": [] }, - "execution_count": 33 + "execution_count": 34 } ] }, @@ -2465,20 +2517,23 @@ "metadata": { "id": "Ov2neH1XtgTk", "colab_type": "code", - "outputId": "3f3e85a3-3d56-4e04-9fa0-1f1c3f897991", "colab": { "base_uri": "https://localhost:8080/", - "height": 34 - } + "height": 35 + }, + "outputId": "ea6c2f6c-d1c9-40a6-bbe7-dd94d03e09be" }, "source": [ "record.description" ], - "execution_count": 34, + "execution_count": 35, "outputs": [ { "output_type": "execute_result", "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, "text/plain": [ "'gi|45478711|ref|NC_005816.1| Yersinia pestis biovar Microtus str. 91001 plasmid pPCP1, complete sequence'" ] @@ -2486,7 +2541,7 @@ "metadata": { "tags": [] }, - "execution_count": 34 + "execution_count": 35 } ] }, @@ -2505,30 +2560,30 @@ "metadata": { "id": "LpqMN5Z_tgTs", "colab_type": "code", - "outputId": "8f2d3366-4aba-4182-a922-105ded3c4bfb", "colab": { "base_uri": "https://localhost:8080/", "height": 204 - } + }, + "outputId": "c8b3ddfd-d2fc-4609-d11e-986b628237f8" }, "source": [ "!wget https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.gb" ], - "execution_count": 35, + "execution_count": 36, "outputs": [ { "output_type": "stream", "text": [ - "--2020-06-12 02:48:55-- https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.gb\n", + "--2020-08-05 14:52:19-- https://raw.githubusercontent.com/biopython/biopython/master/Tests/GenBank/NC_005816.gb\n", "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n", "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 31838 (31K) [text/plain]\n", "Saving to: ‘NC_005816.gb’\n", "\n", - "\rNC_005816.gb 0%[ ] 0 --.-KB/s \rNC_005816.gb 100%[===================>] 31.09K --.-KB/s in 0.01s \n", + "\rNC_005816.gb 0%[ ] 0 --.-KB/s \rNC_005816.gb 100%[===================>] 31.09K --.-KB/s in 0.008s \n", "\n", - "2020-06-12 02:48:56 (2.17 MB/s) - ‘NC_005816.gb’ saved [31838/31838]\n", + "2020-08-05 14:52:20 (3.80 MB/s) - ‘NC_005816.gb’ saved [31838/31838]\n", "\n" ], "name": "stdout" @@ -2540,11 +2595,11 @@ "metadata": { "id": "PhalU4PRtgTw", "colab_type": "code", - "outputId": "dd19f359-9385-4a2c-89fb-61b2eec70081", "colab": { "base_uri": "https://localhost:8080/", "height": 54 - } + }, + "outputId": "83c7bb30-d106-4ea2-9922-5a58b99fb3fa" }, "source": [ "from Bio import SeqIO\n", @@ -2552,7 +2607,7 @@ "record = SeqIO.read(\"NC_005816.gb\", \"genbank\")\n", "record" ], - "execution_count": 36, + "execution_count": 37, "outputs": [ { "output_type": "execute_result", @@ -2564,7 +2619,7 @@ "metadata": { "tags": [] }, - "execution_count": 36 + "execution_count": 37 } ] }, @@ -2590,7 +2645,7 @@ "source": [ "" ], - "execution_count": 0, + "execution_count": null, "outputs": [] } ] diff --git a/examples/tutorials/22_Transfer_Learning_With_HuggingFace_tox21.ipynb b/examples/tutorials/22_Transfer_Learning_With_HuggingFace_tox21.ipynb index dc816204fbdffd2896ea87b752d914d51229f1f2..86e5b28b4d64060e2e5c501bec196ffd7cb91236 100644 --- a/examples/tutorials/22_Transfer_Learning_With_HuggingFace_tox21.ipynb +++ b/examples/tutorials/22_Transfer_Learning_With_HuggingFace_tox21.ipynb @@ -6,9 +6,7 @@ "name": "22_Transfer_Learning_With_HuggingFace_tox21.ipynb", "provenance": [], "collapsed_sections": [], - "mount_file_id": "1pD0fsKpYujJgNAttRn9vkdBYGpwCeVC0", - "authorship_tag": "ABX9TyPyKWYOalt7P45/PzaAkzRP", - "include_colab_link": true + "toc_visible": true }, "kernelspec": { "name": "python3", @@ -17,7 +15,7 @@ "accelerator": "GPU", "widgets": { "application/vnd.jupyter.widget-state+json": { - "af2449a85886477eb1d774c35945ea7d": { + "98acba3fe53644a8ba4252de10f9a426": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -29,15 +27,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_b510b5c9444a4f7d9dbf5e7f370bcb00", + "layout": "IPY_MODEL_a9173bc7f1fb4d79b5a7122628646485", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_625f9ed2e54044bcb54a80d8adfd36c6", - "IPY_MODEL_656a9e87d904492ea39c2372c15e68cb" + "IPY_MODEL_1ce379976f2743b9b606616e8b8d45f5", + "IPY_MODEL_e00dc06324554fe88258b206a1b2c80c" ] } }, - "b510b5c9444a4f7d9dbf5e7f370bcb00": { + "a9173bc7f1fb4d79b5a7122628646485": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -88,50 +86,50 @@ "left": null } }, - "625f9ed2e54044bcb54a80d8adfd36c6": { + "1ce379976f2743b9b606616e8b8d45f5": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_0d636f90b41d4bae95fe4f41c641c35e", + "style": "IPY_MODEL_8feffc04f07d41bb9467a46ef1664481", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 501, + "max": 515, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 501, + "value": 515, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_444e92b80c5c4c7fb7b9a7e0076de66a" + "layout": "IPY_MODEL_ecdc065df020489b89b59e85ff7aa90a" } }, - "656a9e87d904492ea39c2372c15e68cb": { + "e00dc06324554fe88258b206a1b2c80c": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_dd9ef67b16e84af096ea9def685067b1", + "style": "IPY_MODEL_feab1dff569e4d51ae00e06f09de1a45", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 501/501 [00:05<00:00, 87.1B/s]", + "value": " 515/515 [02:35<00:00, 3.31B/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_4633e4426e764ca6a0b74b452461f5ec" + "layout": "IPY_MODEL_f8f963d730154041b9accba63822f0b9" } }, - "0d636f90b41d4bae95fe4f41c641c35e": { + "8feffc04f07d41bb9467a46ef1664481": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -146,7 +144,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "444e92b80c5c4c7fb7b9a7e0076de66a": { + "ecdc065df020489b89b59e85ff7aa90a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -197,7 +195,7 @@ "left": null } }, - "dd9ef67b16e84af096ea9def685067b1": { + "feab1dff569e4d51ae00e06f09de1a45": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -211,7 +209,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "4633e4426e764ca6a0b74b452461f5ec": { + "f8f963d730154041b9accba63822f0b9": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -262,7 +260,7 @@ "left": null } }, - "e3c293267cf74acfa6b1a30285bd8cd8": { + "4b9531aadec94d6997f4df3e48fe9dd5": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -274,15 +272,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_1cea9d510e99411d85de2989133206a5", + "layout": "IPY_MODEL_75f8becf86194588807bd8e118c6e448", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_1afca71c542c418eafff01eeef65e3ec", - "IPY_MODEL_2b673da9114441c88c2150e76b518259" + "IPY_MODEL_e3ab7fc4fb4249b092f40eec57017f2b", + "IPY_MODEL_9e049bb8977c42729d3fa05e8e23bef5" ] } }, - "1cea9d510e99411d85de2989133206a5": { + "75f8becf86194588807bd8e118c6e448": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -333,50 +331,50 @@ "left": null } }, - "1afca71c542c418eafff01eeef65e3ec": { + "e3ab7fc4fb4249b092f40eec57017f2b": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_25ccb68cdb014280a769f9b546b5c426", + "style": "IPY_MODEL_7ab4d5afc39f42c582f7d2fee9ba29dc", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 178812144, + "max": 336423582, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 178812144, + "value": 336423582, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_179af9da6aed4ddb827eeb6974b49284" + "layout": "IPY_MODEL_acacca6484d747608fd27537490c490f" } }, - "2b673da9114441c88c2150e76b518259": { + "9e049bb8977c42729d3fa05e8e23bef5": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_8c336ac1a7bd474499b34cfc6ded05ec", + "style": "IPY_MODEL_891d126ceafd4b65bcdcd69959086931", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 179M/179M [00:02<00:00, 73.5MB/s]", + "value": " 336M/336M [00:12<00:00, 27.3MB/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_eb4ab62124f24b239f8219fd212becf6" + "layout": "IPY_MODEL_05d4f7694b4b4d2687dbc0125f444ea0" } }, - "25ccb68cdb014280a769f9b546b5c426": { + "7ab4d5afc39f42c582f7d2fee9ba29dc": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -391,7 +389,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "179af9da6aed4ddb827eeb6974b49284": { + "acacca6484d747608fd27537490c490f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -442,7 +440,7 @@ "left": null } }, - "8c336ac1a7bd474499b34cfc6ded05ec": { + "891d126ceafd4b65bcdcd69959086931": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -456,7 +454,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "eb4ab62124f24b239f8219fd212becf6": { + "05d4f7694b4b4d2687dbc0125f444ea0": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -507,7 +505,7 @@ "left": null } }, - "e49da45c84a34da9b66917afdb9060a0": { + "f67218c34f29439b879de2b02da1309d": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -519,15 +517,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_ed2a0c847c834b02896ed12439e286bb", + "layout": "IPY_MODEL_25982cceede845d8a6478b54ab8d6906", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_bfa6ad8f732b4687afbe77181e98cb93", - "IPY_MODEL_a49239fda632493db1e8f1284be9c1c5" + "IPY_MODEL_e58b80417b444cda8a46111c8142d0b1", + "IPY_MODEL_bd945062ce944393adfac4f1bc2dca3f" ] } }, - "ed2a0c847c834b02896ed12439e286bb": { + "25982cceede845d8a6478b54ab8d6906": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -578,50 +576,50 @@ "left": null } }, - "bfa6ad8f732b4687afbe77181e98cb93": { + "e58b80417b444cda8a46111c8142d0b1": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_d68594cf5441469d9fc3340032adde3b", + "style": "IPY_MODEL_c0332264f8f74816a32832eae7f81ab1", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 9429, + "max": 11058, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 9429, + "value": 11058, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_c3bf797b8cc34c44a929e9309de06ef4" + "layout": "IPY_MODEL_e280e56118874c728e693b3da661ac16" } }, - "a49239fda632493db1e8f1284be9c1c5": { + "bd945062ce944393adfac4f1bc2dca3f": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_4b380e9403a643489305d6cdf797f99f", + "style": "IPY_MODEL_41a1514a959a48a991556d0a5bef9d26", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 9.43k/9.43k [00:00<00:00, 13.9kB/s]", + "value": " 11.1k/11.1k [00:02<00:00, 5.10kB/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_bf215f351bcd4237a7179b890466155c" + "layout": "IPY_MODEL_00da13a2e5154e52b5408e5bf08da994" } }, - "d68594cf5441469d9fc3340032adde3b": { + "c0332264f8f74816a32832eae7f81ab1": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -636,7 +634,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "c3bf797b8cc34c44a929e9309de06ef4": { + "e280e56118874c728e693b3da661ac16": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -687,7 +685,7 @@ "left": null } }, - "4b380e9403a643489305d6cdf797f99f": { + "41a1514a959a48a991556d0a5bef9d26": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -701,7 +699,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "bf215f351bcd4237a7179b890466155c": { + "00da13a2e5154e52b5408e5bf08da994": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -752,7 +750,7 @@ "left": null } }, - "09daf8e819ad451794ac88654cb7d942": { + "a5f0a5ad353c41c69a275ef766cf7775": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -764,15 +762,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_1741c16025b542988affef0ae2c658e1", + "layout": "IPY_MODEL_4275d2d29e98438ca62e695a534372b9", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_fed80eb0a92b4351af2e9e8ebff99bdc", - "IPY_MODEL_15dffad155504eff99165df54f7e7656" + "IPY_MODEL_970028ca53f244079abe68559bedc62b", + "IPY_MODEL_797465f4f03441968e15b260aef38859" ] } }, - "1741c16025b542988affef0ae2c658e1": { + "4275d2d29e98438ca62e695a534372b9": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -823,50 +821,50 @@ "left": null } }, - "fed80eb0a92b4351af2e9e8ebff99bdc": { + "970028ca53f244079abe68559bedc62b": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_9cfd4f77d1fa485ca4d6ac8d1cdc6738", + "style": "IPY_MODEL_b37d03ab7f0f4ae9b52edbde9ed586e1", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 3213, + "max": 4056, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 3213, + "value": 4056, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_fda92cac1a5e4d8887d31cea9249ba40" + "layout": "IPY_MODEL_fa3f808ac29147e28181d2838a9a5822" } }, - "15dffad155504eff99165df54f7e7656": { + "797465f4f03441968e15b260aef38859": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_1d2524191b334cba86943987e3b751ee", + "style": "IPY_MODEL_539ed619d7364d9ca0bd9a11cb2e2498", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 3.21k/3.21k [00:01<00:00, 1.86kB/s]", + "value": " 4.06k/4.06k [00:01<00:00, 2.59kB/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_de1426d650f0450e92bb4cdd02b90d69" + "layout": "IPY_MODEL_00133158eee24e068220037a27a30ad8" } }, - "9cfd4f77d1fa485ca4d6ac8d1cdc6738": { + "b37d03ab7f0f4ae9b52edbde9ed586e1": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -881,7 +879,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "fda92cac1a5e4d8887d31cea9249ba40": { + "fa3f808ac29147e28181d2838a9a5822": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -932,7 +930,7 @@ "left": null } }, - "1d2524191b334cba86943987e3b751ee": { + "539ed619d7364d9ca0bd9a11cb2e2498": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -946,7 +944,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "de1426d650f0450e92bb4cdd02b90d69": { + "00133158eee24e068220037a27a30ad8": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -997,7 +995,7 @@ "left": null } }, - "fa7e397dcc424d1c9685744df739e488": { + "45795699e2f247ae916dbec650640fdb": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -1009,15 +1007,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_c58dd7d8b78b450bad74c780d69a7daf", + "layout": "IPY_MODEL_25bf8f1dd099424993de36ffe8e34577", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_357d3fc89e95460c822a8f1a8e5e2737", - "IPY_MODEL_91bf59c36b344912bf91cb80b132555d" + "IPY_MODEL_c5125dcb1e664845aee1fe54650a8ab6", + "IPY_MODEL_bd227e553de240e1b89a2dbae023ff16" ] } }, - "c58dd7d8b78b450bad74c780d69a7daf": { + "25bf8f1dd099424993de36ffe8e34577": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1068,50 +1066,50 @@ "left": null } }, - "357d3fc89e95460c822a8f1a8e5e2737": { + "c5125dcb1e664845aee1fe54650a8ab6": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_9f250f5430924e3cb87b0d71c1301be0", + "style": "IPY_MODEL_63d86d07dd7042baaca655f6c063f975", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 150, + "max": 772, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 150, + "value": 772, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_b8ef824d51a44562a819194c66f3d77d" + "layout": "IPY_MODEL_45a316a41c7346fab66b505c9bb2d4cc" } }, - "91bf59c36b344912bf91cb80b132555d": { + "bd227e553de240e1b89a2dbae023ff16": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_3e14aa06a7944ffc911268afe00e77ce", + "style": "IPY_MODEL_edaaea155fc6457385127ad5695ecca5", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 150/150 [00:00<00:00, 197B/s]", + "value": " 772/772 [00:00<00:00, 1.23kB/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_d72af554bf5846ceb23a700e34b2cd28" + "layout": "IPY_MODEL_d806297355ab40a0a2d895e041c1e193" } }, - "9f250f5430924e3cb87b0d71c1301be0": { + "63d86d07dd7042baaca655f6c063f975": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -1126,7 +1124,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "b8ef824d51a44562a819194c66f3d77d": { + "45a316a41c7346fab66b505c9bb2d4cc": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1177,7 +1175,7 @@ "left": null } }, - "3e14aa06a7944ffc911268afe00e77ce": { + "edaaea155fc6457385127ad5695ecca5": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -1191,7 +1189,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "d72af554bf5846ceb23a700e34b2cd28": { + "d806297355ab40a0a2d895e041c1e193": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1242,7 +1240,7 @@ "left": null } }, - "a383c283f06f4c309357acc2ecb3bdbb": { + "8004a4812f6144aca56648a6ee5d1c6b": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -1254,15 +1252,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_c0a3ddc86fd549db9213b42166ac1097", + "layout": "IPY_MODEL_9899a51144a34e579335d112aa132c74", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_32ac6cc843864ee7b2b01f4c7c2caca6", - "IPY_MODEL_b9cdf760c72a4c80a3d7d628ed8fd765" + "IPY_MODEL_0e2414f3bd134e848936c7170f14a029", + "IPY_MODEL_7bdd46ac04a94263a4ca942fcb96b001" ] } }, - "c0a3ddc86fd549db9213b42166ac1097": { + "9899a51144a34e579335d112aa132c74": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1313,50 +1311,50 @@ "left": null } }, - "32ac6cc843864ee7b2b01f4c7c2caca6": { + "0e2414f3bd134e848936c7170f14a029": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_8aa8a9fdca414cc3bf6cfef38b4df57c", + "style": "IPY_MODEL_79c4e433d95a47dfb1df0d403e51fd20", "_dom_classes": [], "description": "Downloading: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 166, + "max": 62, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 166, + "value": 62, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_81d61ea6566e4ed6ae2bdc21f1c22faa" + "layout": "IPY_MODEL_7858ea077dd14a4e9ff5f48a3a72d639" } }, - "b9cdf760c72a4c80a3d7d628ed8fd765": { + "7bdd46ac04a94263a4ca942fcb96b001": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_6ecab3cb0ec24b3689db9682c000a325", + "style": "IPY_MODEL_2a989ac5aab849779a18abd94603d1be", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 166/166 [00:00<00:00, 3.17kB/s]", + "value": " 62.0/62.0 [01:33<00:00, 1.52s/B]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_3cbc597bdcbf43f98791115e65aecab4" + "layout": "IPY_MODEL_217f5c224f5a416db001133a1a679b41" } }, - "8aa8a9fdca414cc3bf6cfef38b4df57c": { + "79c4e433d95a47dfb1df0d403e51fd20": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -1371,7 +1369,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "81d61ea6566e4ed6ae2bdc21f1c22faa": { + "7858ea077dd14a4e9ff5f48a3a72d639": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1422,7 +1420,7 @@ "left": null } }, - "6ecab3cb0ec24b3689db9682c000a325": { + "2a989ac5aab849779a18abd94603d1be": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -1436,7 +1434,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "3cbc597bdcbf43f98791115e65aecab4": { + "217f5c224f5a416db001133a1a679b41": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1487,7 +1485,7 @@ "left": null } }, - "dde0ff73c3544b1ca17f15054f7afb8b": { + "7807561b736c45d49c3ef812c4aad335": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -1499,15 +1497,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_33343d7e01eb49dbacc8094b2432f8ff", + "layout": "IPY_MODEL_56300d613550401dbef1e7a106ccfb60", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_b36fc55690694e2cae051eda093406a8", - "IPY_MODEL_43739e5bee4c46ccb2ed246983386607" + "IPY_MODEL_ad7e3577ea9c460b98509d9dd5983317", + "IPY_MODEL_2ded2ded871c4925b7332e4f0b84b0d0" ] } }, - "33343d7e01eb49dbacc8094b2432f8ff": { + "56300d613550401dbef1e7a106ccfb60": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1558,50 +1556,50 @@ "left": null } }, - "b36fc55690694e2cae051eda093406a8": { + "ad7e3577ea9c460b98509d9dd5983317": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_36ca4c7b9f7f4309ae67833715ff7290", + "style": "IPY_MODEL_cefa942491b34d04869607504ff25803", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 480, + "max": 1714, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 480, + "value": 1714, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_d95b880d008e4e2892d23d5521bbf996" + "layout": "IPY_MODEL_fd10992442904b90abc0146a28084394" } }, - "43739e5bee4c46ccb2ed246983386607": { + "2ded2ded871c4925b7332e4f0b84b0d0": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_8282fd0873424a50a0e94f2f61269f2f", + "style": "IPY_MODEL_48bdadca9c9745ec89e4c1632ea64830", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 480/480 [01:23<00:00, 5.78B/s]", + "value": " 1714/1714 [00:00<00:00, 4508.38it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_1e9eecc206df42b6abc38f879ece9fbd" + "layout": "IPY_MODEL_e5e25620988048debb93a24b35d974cd" } }, - "36ca4c7b9f7f4309ae67833715ff7290": { + "cefa942491b34d04869607504ff25803": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -1616,7 +1614,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "d95b880d008e4e2892d23d5521bbf996": { + "fd10992442904b90abc0146a28084394": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1667,7 +1665,7 @@ "left": null } }, - "8282fd0873424a50a0e94f2f61269f2f": { + "48bdadca9c9745ec89e4c1632ea64830": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -1681,7 +1679,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "1e9eecc206df42b6abc38f879ece9fbd": { + "e5e25620988048debb93a24b35d974cd": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1732,7 +1730,7 @@ "left": null } }, - "d21d80567a4b47e79a377806fd89be34": { + "279b3e3dc6314303a87a96af4185ddba": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -1744,15 +1742,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_3a6b4fd9fdb1470b838b5bbb2b140dab", + "layout": "IPY_MODEL_bfd86388a7ad48189b3a23b2fe7e3360", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_8acf67a7eb5c4038929b65110a9e726d", - "IPY_MODEL_53bd772af72540fb98683953071d2ce9" + "IPY_MODEL_aab774ea207d4dcbbd9337f1e91d3df7", + "IPY_MODEL_c623373ac42a41e68f00f23fdfe50a12" ] } }, - "3a6b4fd9fdb1470b838b5bbb2b140dab": { + "bfd86388a7ad48189b3a23b2fe7e3360": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1803,50 +1801,50 @@ "left": null } }, - "8acf67a7eb5c4038929b65110a9e726d": { + "aab774ea207d4dcbbd9337f1e91d3df7": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_3c4fbeba7daf4c29be0641c14c391082", + "style": "IPY_MODEL_f698206397bb425e9f3f398c87fc4e9e", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "Epoch 3 of 3: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 336404667, + "max": 3, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 336404667, + "value": 3, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_d622d59af30e44dd95ccb49d42e7b7ae" + "layout": "IPY_MODEL_e73e875d811e4d6b9736854de6ece77f" } }, - "53bd772af72540fb98683953071d2ce9": { + "c623373ac42a41e68f00f23fdfe50a12": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_f90877640e3a43c381bd5ed8b802dda0", + "style": "IPY_MODEL_84a880bc358c4ea5ab1042ce68dc5471", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 336M/336M [00:04<00:00, 68.5MB/s]", + "value": " 3/3 [01:00<00:00, 20.00s/it]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_db17e76c0d0f4eba8dd01e35c642c11e" + "layout": "IPY_MODEL_fcefafceb5c5452a9fa1ef933c401fee" } }, - "3c4fbeba7daf4c29be0641c14c391082": { + "f698206397bb425e9f3f398c87fc4e9e": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -1861,7 +1859,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "d622d59af30e44dd95ccb49d42e7b7ae": { + "e73e875d811e4d6b9736854de6ece77f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1912,7 +1910,7 @@ "left": null } }, - "f90877640e3a43c381bd5ed8b802dda0": { + "84a880bc358c4ea5ab1042ce68dc5471": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -1926,7 +1924,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "db17e76c0d0f4eba8dd01e35c642c11e": { + "fcefafceb5c5452a9fa1ef933c401fee": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -1977,7 +1975,7 @@ "left": null } }, - "987ddef0ff664b6eb491597364bf3cb9": { + "465f65693fbb424e8be75d5a93db43cd": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -1989,15 +1987,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_8bc4a38a6d0e43e8a4d332817c8f9406", + "layout": "IPY_MODEL_fd04c65e25624b5eb92f57a5b5193c9f", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_634462afacee43f89e93e5413d0daa6b", - "IPY_MODEL_dd527df79ed844efb2b10916c7d0c955" + "IPY_MODEL_4249f25837d84083a1b0cff9ef90ec17", + "IPY_MODEL_26047712683443e8b87c124d7f735438" ] } }, - "8bc4a38a6d0e43e8a4d332817c8f9406": { + "fd04c65e25624b5eb92f57a5b5193c9f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2048,50 +2046,50 @@ "left": null } }, - "634462afacee43f89e93e5413d0daa6b": { + "4249f25837d84083a1b0cff9ef90ec17": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_6a8d7546b69c4818896449daa3127a27", + "style": "IPY_MODEL_b2a663d0d51745e5bf810f2c48eda368", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "Epochs 0/3. Running Loss: 0.1666: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 11058, + "max": 215, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 11058, + "value": 215, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_3e3ca6b4229e4fb3b985260c60eaec52" + "layout": "IPY_MODEL_9d7fcf3d445249ec966b74f2b91f866a" } }, - "dd527df79ed844efb2b10916c7d0c955": { + "26047712683443e8b87c124d7f735438": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_4e1c338648354a2eb50054cf4245fe47", + "style": "IPY_MODEL_f25bd28c1e934954b5ee214580384d6f", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 11.1k/11.1k [00:01<00:00, 6.48kB/s]", + "value": " 215/215 [00:15<00:00, 13.68it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_5b9f6eaa15a14a1d90ad4402ee67bf19" + "layout": "IPY_MODEL_a6b01b4bb4ed41caba3190451f52f2b4" } }, - "6a8d7546b69c4818896449daa3127a27": { + "b2a663d0d51745e5bf810f2c48eda368": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -2106,7 +2104,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "3e3ca6b4229e4fb3b985260c60eaec52": { + "9d7fcf3d445249ec966b74f2b91f866a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2157,7 +2155,7 @@ "left": null } }, - "4e1c338648354a2eb50054cf4245fe47": { + "f25bd28c1e934954b5ee214580384d6f": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -2171,7 +2169,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "5b9f6eaa15a14a1d90ad4402ee67bf19": { + "a6b01b4bb4ed41caba3190451f52f2b4": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2222,7 +2220,7 @@ "left": null } }, - "736e44e3cb374895bedcf188c410381e": { + "0d3b6b7b5bc944d99a5557088d8d6c92": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -2234,15 +2232,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_6b97fbdac2f34443ac9f8d7c8902b5c5", + "layout": "IPY_MODEL_a3eb9a29c70443a793de600754fdd508", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_7b75be2cfb7a4012a4f90e81401034c1", - "IPY_MODEL_85cc12ea1050448e9f14b6841db97b5c" + "IPY_MODEL_742dbb8f102143e69d76ca57420068e3", + "IPY_MODEL_9eef2984c1d347faace0a46de7982a39" ] } }, - "6b97fbdac2f34443ac9f8d7c8902b5c5": { + "a3eb9a29c70443a793de600754fdd508": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2293,50 +2291,50 @@ "left": null } }, - "7b75be2cfb7a4012a4f90e81401034c1": { + "742dbb8f102143e69d76ca57420068e3": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_ef3e457fd62149e8aa4dc0a5b6356c4b", + "style": "IPY_MODEL_d74f785a6f814941be68867872b4c93d", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "Epochs 1/3. Running Loss: 0.0323: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 4056, + "max": 215, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 4056, + "value": 215, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_1095ce8d23d643fc8095ae7d509744e6" + "layout": "IPY_MODEL_19b07e0fa3b8429091462844f4d152e7" } }, - "85cc12ea1050448e9f14b6841db97b5c": { + "9eef2984c1d347faace0a46de7982a39": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_bf963742546d4254937e679300ca10ea", + "style": "IPY_MODEL_fabc8b6b78704ddb94fb79e90c72bba9", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 4.06k/4.06k [00:00<00:00, 4.20kB/s]", + "value": " 215/215 [00:21<00:00, 10.10it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_294b001c57e4444dae15bde61cf9ba54" + "layout": "IPY_MODEL_3be6b90e331841deb02c05df7b718757" } }, - "ef3e457fd62149e8aa4dc0a5b6356c4b": { + "d74f785a6f814941be68867872b4c93d": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -2351,7 +2349,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "1095ce8d23d643fc8095ae7d509744e6": { + "19b07e0fa3b8429091462844f4d152e7": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2402,7 +2400,7 @@ "left": null } }, - "bf963742546d4254937e679300ca10ea": { + "fabc8b6b78704ddb94fb79e90c72bba9": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -2416,7 +2414,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "294b001c57e4444dae15bde61cf9ba54": { + "3be6b90e331841deb02c05df7b718757": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2467,7 +2465,7 @@ "left": null } }, - "83c90fda230a4a089bcee7905d765ee9": { + "4d8412a635904a129289253a75d68d6a": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -2479,15 +2477,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_5ffe945d78da49cd997595479764c10d", + "layout": "IPY_MODEL_2d1d3df881e84076bcd3870dd40a542e", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_c385de22e24a41e1bd819911c0928c58", - "IPY_MODEL_3cb96b04a2bd43ca939155e73804a529" + "IPY_MODEL_45e65053977d4028a23b4e1b57a37c86", + "IPY_MODEL_d8d4f82380074174aa4a3405a396b084" ] } }, - "5ffe945d78da49cd997595479764c10d": { + "2d1d3df881e84076bcd3870dd40a542e": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2538,50 +2536,50 @@ "left": null } }, - "c385de22e24a41e1bd819911c0928c58": { + "45e65053977d4028a23b4e1b57a37c86": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_48216c031181421fb44f6623d9052951", + "style": "IPY_MODEL_91c6d5dfa6b64da6803b076999751b71", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "Epochs 2/3. Running Loss: 0.0014: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 150, + "max": 215, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 150, + "value": 215, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_dd91954841e64caab850c137d4866d00" + "layout": "IPY_MODEL_d06e91d24b324a8ea9552aed0075994f" } }, - "3cb96b04a2bd43ca939155e73804a529": { + "d8d4f82380074174aa4a3405a396b084": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_01b86bfcbd8f4b0ba8cf8b995ba97e98", + "style": "IPY_MODEL_df3e87efb0ba4666adc6e86e40940d80", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 150/150 [01:12<00:00, 2.06B/s]", + "value": " 215/215 [00:15<00:00, 13.93it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_9498d0a02f104a07833f9b8fce78e43b" + "layout": "IPY_MODEL_930cc053f1c449d495016847039bf32b" } }, - "48216c031181421fb44f6623d9052951": { + "91c6d5dfa6b64da6803b076999751b71": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -2596,7 +2594,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "dd91954841e64caab850c137d4866d00": { + "d06e91d24b324a8ea9552aed0075994f": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2647,7 +2645,7 @@ "left": null } }, - "01b86bfcbd8f4b0ba8cf8b995ba97e98": { + "df3e87efb0ba4666adc6e86e40940d80": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -2661,7 +2659,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "9498d0a02f104a07833f9b8fce78e43b": { + "930cc053f1c449d495016847039bf32b": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2712,7 +2710,7 @@ "left": null } }, - "eadc3ece700643ee8dcfc62c6ac9390e": { + "825b4279ccc44474a7623ccd1e7e7f69": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -2724,15 +2722,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_b25e2925e32748f9abc0f2fa9f061dae", + "layout": "IPY_MODEL_8eda205d9f7c4e8081f924bd740ec742", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_ec951b3c633048e4953622abfcf1ed77", - "IPY_MODEL_93706b45524b4e61948b437a3c2bf75a" + "IPY_MODEL_7c9c0f9b8f5d490f8cd7b77e6ead14ea", + "IPY_MODEL_a847855e7d35468b8fd0cbce5775d271" ] } }, - "b25e2925e32748f9abc0f2fa9f061dae": { + "8eda205d9f7c4e8081f924bd740ec742": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2783,50 +2781,50 @@ "left": null } }, - "ec951b3c633048e4953622abfcf1ed77": { + "7c9c0f9b8f5d490f8cd7b77e6ead14ea": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_4be1b2f15c55402a9c11ffc611555769", + "style": "IPY_MODEL_e71cc479dbe74ba8a8bfd11ffcec70bb", "_dom_classes": [], - "description": "Downloading: 100%", + "description": "100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 16, + "max": 428, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 16, + "value": 428, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_b21308fc036b434a8479c88985adacf8" + "layout": "IPY_MODEL_e91d33e27c81443c9ec8a8b7768bda36" } }, - "93706b45524b4e61948b437a3c2bf75a": { + "a847855e7d35468b8fd0cbce5775d271": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_9e82afe32c1e4503bde2f6cdfc31abe4", + "style": "IPY_MODEL_712d56d1289247ba92d1d195e53ad578", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 16.0/16.0 [00:00<00:00, 138B/s]", + "value": " 428/428 [00:00<00:00, 3165.93it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f0f78df7f8144c0b9e621a85c1be8bec" + "layout": "IPY_MODEL_900af4baa3604152a2294b979a73cfc5" } }, - "4be1b2f15c55402a9c11ffc611555769": { + "e71cc479dbe74ba8a8bfd11ffcec70bb": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -2841,7 +2839,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "b21308fc036b434a8479c88985adacf8": { + "e91d33e27c81443c9ec8a8b7768bda36": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2892,7 +2890,7 @@ "left": null } }, - "9e82afe32c1e4503bde2f6cdfc31abe4": { + "712d56d1289247ba92d1d195e53ad578": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -2906,7 +2904,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "f0f78df7f8144c0b9e621a85c1be8bec": { + "900af4baa3604152a2294b979a73cfc5": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -2957,7 +2955,7 @@ "left": null } }, - "136b015c75e34642bd689b4ef456218e": { + "883c0f6063364ddfaa1bf0c00fd62a61": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -2969,15 +2967,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_e8f6a120219d462dbfe855f4a063435f", + "layout": "IPY_MODEL_526a14329c7540fc8abfa2105a7f8ef5", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_7c42ba33692848b9bced35360ff3d003", - "IPY_MODEL_bff1343b5c724187b92702de133f6a03" + "IPY_MODEL_3ec543b9508f4f8d85d4179ec14f97fa", + "IPY_MODEL_8472dd2d50474e4f81062aaf7366aaa2" ] } }, - "e8f6a120219d462dbfe855f4a063435f": { + "526a14329c7540fc8abfa2105a7f8ef5": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3028,50 +3026,50 @@ "left": null } }, - "7c42ba33692848b9bced35360ff3d003": { + "3ec543b9508f4f8d85d4179ec14f97fa": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_311b578ab682442d94b772f6365c2b7f", + "style": "IPY_MODEL_f99e5b80c68048e6b92a9139fc41773f", "_dom_classes": [], - "description": "100%", + "description": "Running Evaluation: 100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 1714, + "max": 54, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 1714, + "value": 54, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_b2b573bfb1a54c8bac35b908ad32b835" + "layout": "IPY_MODEL_5c5192e6e50c4f439204c735bccd40d3" } }, - "bff1343b5c724187b92702de133f6a03": { + "8472dd2d50474e4f81062aaf7366aaa2": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_db7a1ccfc79e4758bc85c767dbadd162", + "style": "IPY_MODEL_05e42d0e4fd34968b8327bfb1e6b00f9", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 1714/1714 [00:00<00:00, 5779.01it/s]", + "value": " 54/54 [00:02<00:00, 22.79it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_37a98680611d40eba5026d930be4ca5c" + "layout": "IPY_MODEL_5c5920fb6c964332b7e380011cd23ec8" } }, - "311b578ab682442d94b772f6365c2b7f": { + "f99e5b80c68048e6b92a9139fc41773f": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -3086,7 +3084,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "b2b573bfb1a54c8bac35b908ad32b835": { + "5c5192e6e50c4f439204c735bccd40d3": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3137,7 +3135,7 @@ "left": null } }, - "db7a1ccfc79e4758bc85c767dbadd162": { + "05e42d0e4fd34968b8327bfb1e6b00f9": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -3151,7 +3149,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "37a98680611d40eba5026d930be4ca5c": { + "5c5920fb6c964332b7e380011cd23ec8": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3202,7 +3200,7 @@ "left": null } }, - "c39c27352ce140bfa650c266ac205cb2": { + "7e5cba5c2747441f8d03d888dc9b933b": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -3214,15 +3212,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_607426d9589b4e84b4fcfd3a64392374", + "layout": "IPY_MODEL_e7942a62f62c413d927abfcb081d685a", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_5649cf1a33504fcca606dd75f1db4e1a", - "IPY_MODEL_205da1ebc6d3432d9be53adf2ad87633" + "IPY_MODEL_65cdde6d617142bea6bb287ad35d8861", + "IPY_MODEL_7a955bc78f0749199bd82fae712c9f75" ] } }, - "607426d9589b4e84b4fcfd3a64392374": { + "e7942a62f62c413d927abfcb081d685a": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3273,50 +3271,50 @@ "left": null } }, - "5649cf1a33504fcca606dd75f1db4e1a": { + "65cdde6d617142bea6bb287ad35d8861": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_ca6ec52d47284cf8ab617f2dfbc04358", + "style": "IPY_MODEL_44d74c51151a4311a37fba97c6175249", "_dom_classes": [], - "description": "Epoch: 100%", + "description": "100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 3, + "max": 1, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 3, + "value": 1, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_59878a92f1b74e8b92e73ad7ab509020" + "layout": "IPY_MODEL_c354a0c446e648f6af555bbad692f79c" } }, - "205da1ebc6d3432d9be53adf2ad87633": { + "7a955bc78f0749199bd82fae712c9f75": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_9b51b5951e7d445ba307dd539dd28f75", + "style": "IPY_MODEL_d69caa93921e4b2897a07ce2bf0cce5a", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 3/3 [01:07<00:00, 22.60s/it]", + "value": " 1/1 [00:00<00:00, 30.16it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_73ae0afccecb42489812b849a17a1dfc" + "layout": "IPY_MODEL_6571a194af084dd7b6edb7ba3716c0cf" } }, - "ca6ec52d47284cf8ab617f2dfbc04358": { + "44d74c51151a4311a37fba97c6175249": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -3331,7 +3329,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "59878a92f1b74e8b92e73ad7ab509020": { + "c354a0c446e648f6af555bbad692f79c": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3382,7 +3380,7 @@ "left": null } }, - "9b51b5951e7d445ba307dd539dd28f75": { + "d69caa93921e4b2897a07ce2bf0cce5a": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -3396,7 +3394,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "73ae0afccecb42489812b849a17a1dfc": { + "6571a194af084dd7b6edb7ba3716c0cf": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3447,7 +3445,7 @@ "left": null } }, - "50d49a1384cb474dbb51e38375c005e3": { + "b85c5d27c8e64499b0b38b3bbf836afa": { "model_module": "@jupyter-widgets/controls", "model_name": "HBoxModel", "state": { @@ -3459,15 +3457,15 @@ "_view_count": null, "_view_module_version": "1.5.0", "box_style": "", - "layout": "IPY_MODEL_3175c0c02b9340319f23790cda3f741a", + "layout": "IPY_MODEL_7429d08b7f14425393c08d9521918655", "_model_module": "@jupyter-widgets/controls", "children": [ - "IPY_MODEL_12c7dafc2f5b4f4e99b646dc987e305a", - "IPY_MODEL_19f4fb0189574f659be5f677b176049b" + "IPY_MODEL_e27d53e7ef84443d8e6339de513f9e0b", + "IPY_MODEL_0ff672cb082f4c4996cac50c632c1a8e" ] } }, - "3175c0c02b9340319f23790cda3f741a": { + "7429d08b7f14425393c08d9521918655": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3518,50 +3516,50 @@ "left": null } }, - "12c7dafc2f5b4f4e99b646dc987e305a": { + "e27d53e7ef84443d8e6339de513f9e0b": { "model_module": "@jupyter-widgets/controls", "model_name": "FloatProgressModel", "state": { "_view_name": "ProgressView", - "style": "IPY_MODEL_b617fd70d5e44dfc8aaf9e2e70dd96b8", + "style": "IPY_MODEL_1227fa30365b44fab9b9dfabfb73e851", "_dom_classes": [], - "description": "Current iteration: 100%", + "description": "100%", "_model_name": "FloatProgressModel", "bar_style": "success", - "max": 215, + "max": 1, "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": 215, + "value": 1, "_view_count": null, "_view_module_version": "1.5.0", "orientation": "horizontal", "min": 0, "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_0716ea9d615f43f5979a3ec4bb97433d" + "layout": "IPY_MODEL_84788d321e9942e883ebb51375679bbd" } }, - "19f4fb0189574f659be5f677b176049b": { + "0ff672cb082f4c4996cac50c632c1a8e": { "model_module": "@jupyter-widgets/controls", "model_name": "HTMLModel", "state": { "_view_name": "HTMLView", - "style": "IPY_MODEL_ab22977b97de485c8e7ff5ad32401a42", + "style": "IPY_MODEL_f2924e39f1054f41a16f1546d2b3db16", "_dom_classes": [], "description": "", "_model_name": "HTMLModel", "placeholder": "​", "_view_module": "@jupyter-widgets/controls", "_model_module_version": "1.5.0", - "value": " 215/215 [00:21<00:00, 10.22it/s]", + "value": " 1/1 [00:00<00:00, 18.00it/s]", "_view_count": null, "_view_module_version": "1.5.0", "description_tooltip": null, "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f289b20aaf2c4d6fb4f03b436fef6836" + "layout": "IPY_MODEL_ceb6ea7c05e244d7b6c0e335ea8d71c2" } }, - "b617fd70d5e44dfc8aaf9e2e70dd96b8": { + "1227fa30365b44fab9b9dfabfb73e851": { "model_module": "@jupyter-widgets/controls", "model_name": "ProgressStyleModel", "state": { @@ -3576,7 +3574,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "0716ea9d615f43f5979a3ec4bb97433d": { + "84788d321e9942e883ebb51375679bbd": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3627,7 +3625,7 @@ "left": null } }, - "ab22977b97de485c8e7ff5ad32401a42": { + "f2924e39f1054f41a16f1546d2b3db16": { "model_module": "@jupyter-widgets/controls", "model_name": "DescriptionStyleModel", "state": { @@ -3641,7 +3639,7 @@ "_model_module": "@jupyter-widgets/controls" } }, - "f289b20aaf2c4d6fb4f03b436fef6836": { + "ceb6ea7c05e244d7b6c0e335ea8d71c2": { "model_module": "@jupyter-widgets/base", "model_name": "LayoutModel", "state": { @@ -3691,1757 +3689,866 @@ "display": null, "left": null } + } + } + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "QqB-9snlWZk9", + "colab_type": "text" + }, + "source": [ + "# Part 22, ChemBERTa: Pre-training a BERT-like model for masked language modelling of SMILES and molecular property prediction.\n", + "\n", + "![alt text](https://huggingface.co/front/assets/huggingface_mask.svg)\n", + "\n", + "By Seyone Chithrananda ([Twitter](https://twitter.com/SeyoneC))\n", + "\n", + "Deep learning for chemistry and materials science remains a novel field with lots of potiential. However, the popularity of transfer learning based methods in areas such as NLP and computer vision have not yet been effectively developed in computational chemistry + machine learning. Using HuggingFace's suite of models and the ByteLevel tokenizer, we are able to train a large-transformer model, RoBERTa, on a large corpus of 100k SMILES strings from a commonly known benchmark chemistry dataset, ZINC.\n", + "\n", + "Training RoBERTa over 5 epochs, the model achieves a pretty good loss of 0.398, and may likely continue to decrease if trained for a larger number of epochs. The model can predict tokens within a SMILES sequence/molecule, allowing for variants of a molecule within discoverable chemical space to be predicted.\n", + "\n", + "By applying the representations of functional groups and atoms learned by the model, we can try to tackle problems of toxicity, solubility, drug-likeness, and synthesis accessibility on smaller datasets using the learned representations as features for graph convolution and attention models on the graph structure of molecules, as well as fine-tuning of BERT. Finally, we propose the use of attention visualization as a helpful tool for chemistry practitioners and students to quickly identify important substructures in various chemical properties.\n", + "\n", + "Additionally, visualization of the attention mechanism have been seen through previous research as incredibly valuable towards chemical reaction classification. The applications of open-sourcing large-scale transformer models such as RoBERTa with HuggingFace may allow for the acceleration of these individual research directions.\n", + "\n", + "A link to a repository which includes the training, uploading and evaluation notebook (with sample predictions on compounds such as Remdesivir) can be found [here](https://github.com/seyonechithrananda/bert-loves-chemistry). All of the notebooks can be copied into a new Colab runtime for easy execution.\n", + "\n", + "For the sake of this tutorial, we'll be fine-tuning RoBERTa on a small-scale molecule dataset, to show the potiential and effectiveness of HuggingFace's NLP-based transfer learning applied to computational chemistry. Output for some cells are purposely cleared for readability, so do not worry if some output messages for your cells differ!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6CMz5kaBWc_Y", + "colab_type": "text" + }, + "source": [ + "Installing DeepChem from source, alongside RDKit for molecule visualizations" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "VjDBOn0Wmybe", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 }, - "bfa661dfa3de41df810e0b5035d52c1e": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_1dd271d6a49445bf81488cb92a81247f", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_b9b287012e704eaea45d48f21836b8c4", - "IPY_MODEL_7b5168a54bba443980f471c5623d8a3b" + "outputId": "fc28d8e4-e7e6-4915-c7d3-bc306f394fc9" + }, + "source": [ + "!git clone https://github.com/NVIDIA/apex\n", + "!cd /content/apex\n", + "!pip install -v --no-cache-dir /content/apex\n", + "!pip install transformers\n", + "!pip install simpletransformers\n", + "!pip install wandb\n", + "!pip install scikit-learn\n", + "!cd .." + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'apex'...\n", + "remote: Enumerating objects: 24, done.\u001b[K\n", + "remote: Counting objects: 100% (24/24), done.\u001b[K\n", + "remote: Compressing objects: 100% (23/23), done.\u001b[K\n", + "remote: Total 7424 (delta 6), reused 4 (delta 1), pack-reused 7400\u001b[K\n", + "Receiving objects: 100% (7424/7424), 13.92 MiB | 18.46 MiB/s, done.\n", + "Resolving deltas: 100% (5005/5005), done.\n", + "Created temporary directory: /tmp/pip-ephem-wheel-cache-37xcttr2\n", + "Created temporary directory: /tmp/pip-req-tracker-6oxhyrc_\n", + "Created requirements tracker '/tmp/pip-req-tracker-6oxhyrc_'\n", + "Created temporary directory: /tmp/pip-install-5_3a38h9\n", + "Processing ./apex\n", + " Created temporary directory: /tmp/pip-req-build-8s0yfy62\n", + " Added file:///content/apex to build tracker '/tmp/pip-req-tracker-6oxhyrc_'\n", + " Running setup.py (path:/tmp/pip-req-build-8s0yfy62/setup.py) egg_info for package from file:///content/apex\n", + " Running command python setup.py egg_info\n", + "\n", + "\n", + " torch.__version__ = 1.6.0+cu101\n", + "\n", + "\n", + " running egg_info\n", + " creating /tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info\n", + " writing /tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info/PKG-INFO\n", + " writing dependency_links to /tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info/dependency_links.txt\n", + " writing top-level names to /tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info/top_level.txt\n", + " writing manifest file '/tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info/SOURCES.txt'\n", + " writing manifest file '/tmp/pip-req-build-8s0yfy62/pip-egg-info/apex.egg-info/SOURCES.txt'\n", + " /tmp/pip-req-build-8s0yfy62/setup.py:67: UserWarning: Option --pyprof not specified. Not installing PyProf dependencies!\n", + " warnings.warn(\"Option --pyprof not specified. Not installing PyProf dependencies!\")\n", + " Source in /tmp/pip-req-build-8s0yfy62 has version 0.1, which satisfies requirement apex==0.1 from file:///content/apex\n", + " Removed apex==0.1 from file:///content/apex from build tracker '/tmp/pip-req-tracker-6oxhyrc_'\n", + "Building wheels for collected packages: apex\n", + " Created temporary directory: /tmp/pip-wheel-nxntosk1\n", + " Building wheel for apex (setup.py) ... \u001b[?25l Destination directory: /tmp/pip-wheel-nxntosk1\n", + " Running command /usr/bin/python3 -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'/tmp/pip-req-build-8s0yfy62/setup.py'\"'\"'; __file__='\"'\"'/tmp/pip-req-build-8s0yfy62/setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' bdist_wheel -d /tmp/pip-wheel-nxntosk1 --python-tag cp36\n", + "\n", + "\n", + " torch.__version__ = 1.6.0+cu101\n", + "\n", + "\n", + " /tmp/pip-req-build-8s0yfy62/setup.py:67: UserWarning: Option --pyprof not specified. Not installing PyProf dependencies!\n", + " warnings.warn(\"Option --pyprof not specified. Not installing PyProf dependencies!\")\n", + " running bdist_wheel\n", + " running build\n", + " running build_py\n", + " creating build\n", + " creating build/lib\n", + " creating build/lib/apex\n", + " copying apex/__init__.py -> build/lib/apex\n", + " creating build/lib/apex/parallel\n", + " copying apex/parallel/optimized_sync_batchnorm_kernel.py -> build/lib/apex/parallel\n", + " copying apex/parallel/LARC.py -> build/lib/apex/parallel\n", + " copying apex/parallel/__init__.py -> build/lib/apex/parallel\n", + " copying apex/parallel/distributed.py -> build/lib/apex/parallel\n", + " copying apex/parallel/optimized_sync_batchnorm.py -> build/lib/apex/parallel\n", + " copying apex/parallel/sync_batchnorm.py -> build/lib/apex/parallel\n", + " copying apex/parallel/multiproc.py -> build/lib/apex/parallel\n", + " copying apex/parallel/sync_batchnorm_kernel.py -> build/lib/apex/parallel\n", + " creating build/lib/apex/fp16_utils\n", + " copying apex/fp16_utils/fp16_optimizer.py -> build/lib/apex/fp16_utils\n", + " copying apex/fp16_utils/fp16util.py -> build/lib/apex/fp16_utils\n", + " copying apex/fp16_utils/__init__.py -> build/lib/apex/fp16_utils\n", + " copying apex/fp16_utils/loss_scaler.py -> build/lib/apex/fp16_utils\n", + " creating build/lib/apex/amp\n", + " copying apex/amp/scaler.py -> build/lib/apex/amp\n", + " copying apex/amp/utils.py -> build/lib/apex/amp\n", + " copying apex/amp/rnn_compat.py -> build/lib/apex/amp\n", + " copying apex/amp/amp.py -> build/lib/apex/amp\n", + " copying apex/amp/compat.py -> build/lib/apex/amp\n", + " copying apex/amp/__init__.py -> build/lib/apex/amp\n", + " copying apex/amp/frontend.py -> build/lib/apex/amp\n", + " copying apex/amp/_process_optimizer.py -> build/lib/apex/amp\n", + " copying apex/amp/opt.py -> build/lib/apex/amp\n", + " copying apex/amp/_initialize.py -> build/lib/apex/amp\n", + " copying apex/amp/_amp_state.py -> build/lib/apex/amp\n", + " copying apex/amp/__version__.py -> build/lib/apex/amp\n", + " copying apex/amp/handle.py -> build/lib/apex/amp\n", + " copying apex/amp/wrap.py -> build/lib/apex/amp\n", + " creating build/lib/apex/normalization\n", + " copying apex/normalization/fused_layer_norm.py -> build/lib/apex/normalization\n", + " copying apex/normalization/__init__.py -> build/lib/apex/normalization\n", + " creating build/lib/apex/optimizers\n", + " copying apex/optimizers/fused_adam.py -> build/lib/apex/optimizers\n", + " copying apex/optimizers/fused_novograd.py -> build/lib/apex/optimizers\n", + " copying apex/optimizers/fused_sgd.py -> build/lib/apex/optimizers\n", + " copying apex/optimizers/fused_adagrad.py -> build/lib/apex/optimizers\n", + " copying apex/optimizers/__init__.py -> build/lib/apex/optimizers\n", + " copying apex/optimizers/fused_lamb.py -> build/lib/apex/optimizers\n", + " creating build/lib/apex/contrib\n", + " copying apex/contrib/__init__.py -> build/lib/apex/contrib\n", + " creating build/lib/apex/RNN\n", + " copying apex/RNN/models.py -> build/lib/apex/RNN\n", + " copying apex/RNN/RNNBackend.py -> build/lib/apex/RNN\n", + " copying apex/RNN/__init__.py -> build/lib/apex/RNN\n", + " copying apex/RNN/cells.py -> build/lib/apex/RNN\n", + " creating build/lib/apex/mlp\n", + " copying apex/mlp/mlp.py -> build/lib/apex/mlp\n", + " copying apex/mlp/__init__.py -> build/lib/apex/mlp\n", + " creating build/lib/apex/pyprof\n", + " copying apex/pyprof/__init__.py -> build/lib/apex/pyprof\n", + " creating build/lib/apex/multi_tensor_apply\n", + " copying apex/multi_tensor_apply/__init__.py -> build/lib/apex/multi_tensor_apply\n", + " copying apex/multi_tensor_apply/multi_tensor_apply.py -> build/lib/apex/multi_tensor_apply\n", + " creating build/lib/apex/reparameterization\n", + " copying apex/reparameterization/reparameterization.py -> build/lib/apex/reparameterization\n", + " copying apex/reparameterization/__init__.py -> build/lib/apex/reparameterization\n", + " copying apex/reparameterization/weight_norm.py -> build/lib/apex/reparameterization\n", + " creating build/lib/apex/amp/lists\n", + " copying apex/amp/lists/tensor_overrides.py -> build/lib/apex/amp/lists\n", + " copying apex/amp/lists/__init__.py -> build/lib/apex/amp/lists\n", + " copying apex/amp/lists/functional_overrides.py -> build/lib/apex/amp/lists\n", + " copying apex/amp/lists/torch_overrides.py -> build/lib/apex/amp/lists\n", + " creating build/lib/apex/contrib/sparsity\n", + " copying apex/contrib/sparsity/asp.py -> build/lib/apex/contrib/sparsity\n", + " copying apex/contrib/sparsity/__init__.py -> build/lib/apex/contrib/sparsity\n", + " copying apex/contrib/sparsity/sparse_masklib.py -> build/lib/apex/contrib/sparsity\n", + " creating build/lib/apex/contrib/xentropy\n", + " copying apex/contrib/xentropy/softmax_xentropy.py -> build/lib/apex/contrib/xentropy\n", + " copying apex/contrib/xentropy/__init__.py -> build/lib/apex/contrib/xentropy\n", + " creating build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/distributed_fused_adam.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/fused_adam.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/fused_sgd.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/fp16_optimizer.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/distributed_fused_adam_v2.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/__init__.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/distributed_fused_adam_v3.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/fused_lamb.py -> build/lib/apex/contrib/optimizers\n", + " copying apex/contrib/optimizers/distributed_fused_lamb.py -> build/lib/apex/contrib/optimizers\n", + " creating build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/self_multihead_attn_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/fast_self_multihead_attn_norm_add_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/fast_encdec_multihead_attn_norm_add_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/__init__.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/encdec_multihead_attn_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/fast_encdec_multihead_attn_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/encdec_multihead_attn.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/mask_softmax_dropout_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/fast_self_multihead_attn_func.py -> build/lib/apex/contrib/multihead_attn\n", + " copying apex/contrib/multihead_attn/self_multihead_attn.py -> build/lib/apex/contrib/multihead_attn\n", + " creating build/lib/apex/contrib/groupbn\n", + " copying apex/contrib/groupbn/__init__.py -> build/lib/apex/contrib/groupbn\n", + " copying apex/contrib/groupbn/batch_norm.py -> build/lib/apex/contrib/groupbn\n", + " creating build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/output.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/softmax.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/pointwise.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/loss.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/conv.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/activation.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/randomSample.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/normalization.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/embedding.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/optim.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/linear.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/__init__.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/__main__.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/usage.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/pooling.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/recurrentCell.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/data.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/base.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/utility.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/misc.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/dropout.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/index_slice_join_mutate.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/prof.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/convert.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/blas.py -> build/lib/apex/pyprof/prof\n", + " copying apex/pyprof/prof/reduction.py -> build/lib/apex/pyprof/prof\n", + " creating build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/nvvp.py -> build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/db.py -> build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/__init__.py -> build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/__main__.py -> build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/kernel.py -> build/lib/apex/pyprof/parse\n", + " copying apex/pyprof/parse/parse.py -> build/lib/apex/pyprof/parse\n", + " creating build/lib/apex/pyprof/nvtx\n", + " copying apex/pyprof/nvtx/__init__.py -> build/lib/apex/pyprof/nvtx\n", + " copying apex/pyprof/nvtx/nvmarker.py -> build/lib/apex/pyprof/nvtx\n", + " installing to build/bdist.linux-x86_64/wheel\n", + " running install\n", + " running install_lib\n", + " creating build/bdist.linux-x86_64\n", + " creating build/bdist.linux-x86_64/wheel\n", + " creating build/bdist.linux-x86_64/wheel/apex\n", + " creating build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/optimized_sync_batchnorm_kernel.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/LARC.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/__init__.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/distributed.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/optimized_sync_batchnorm.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/sync_batchnorm.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/multiproc.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " copying build/lib/apex/parallel/sync_batchnorm_kernel.py -> build/bdist.linux-x86_64/wheel/apex/parallel\n", + " creating build/bdist.linux-x86_64/wheel/apex/fp16_utils\n", + " copying build/lib/apex/fp16_utils/fp16_optimizer.py -> build/bdist.linux-x86_64/wheel/apex/fp16_utils\n", + " copying build/lib/apex/fp16_utils/fp16util.py -> build/bdist.linux-x86_64/wheel/apex/fp16_utils\n", + " copying build/lib/apex/fp16_utils/__init__.py -> build/bdist.linux-x86_64/wheel/apex/fp16_utils\n", + " copying build/lib/apex/fp16_utils/loss_scaler.py -> build/bdist.linux-x86_64/wheel/apex/fp16_utils\n", + " creating build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/scaler.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/utils.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/rnn_compat.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/amp.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " creating build/bdist.linux-x86_64/wheel/apex/amp/lists\n", + " copying build/lib/apex/amp/lists/tensor_overrides.py -> build/bdist.linux-x86_64/wheel/apex/amp/lists\n", + " copying build/lib/apex/amp/lists/__init__.py -> build/bdist.linux-x86_64/wheel/apex/amp/lists\n", + " copying build/lib/apex/amp/lists/functional_overrides.py -> build/bdist.linux-x86_64/wheel/apex/amp/lists\n", + " copying build/lib/apex/amp/lists/torch_overrides.py -> build/bdist.linux-x86_64/wheel/apex/amp/lists\n", + " copying build/lib/apex/amp/compat.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/__init__.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/frontend.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/_process_optimizer.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/opt.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/_initialize.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/_amp_state.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/__version__.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/handle.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " copying build/lib/apex/amp/wrap.py -> build/bdist.linux-x86_64/wheel/apex/amp\n", + " creating build/bdist.linux-x86_64/wheel/apex/normalization\n", + " copying build/lib/apex/normalization/fused_layer_norm.py -> build/bdist.linux-x86_64/wheel/apex/normalization\n", + " copying build/lib/apex/normalization/__init__.py -> build/bdist.linux-x86_64/wheel/apex/normalization\n", + " creating build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/fused_adam.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/fused_novograd.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/fused_sgd.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/fused_adagrad.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/__init__.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/optimizers/fused_lamb.py -> build/bdist.linux-x86_64/wheel/apex/optimizers\n", + " copying build/lib/apex/__init__.py -> build/bdist.linux-x86_64/wheel/apex\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib/sparsity\n", + " copying build/lib/apex/contrib/sparsity/asp.py -> build/bdist.linux-x86_64/wheel/apex/contrib/sparsity\n", + " copying build/lib/apex/contrib/sparsity/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib/sparsity\n", + " copying build/lib/apex/contrib/sparsity/sparse_masklib.py -> build/bdist.linux-x86_64/wheel/apex/contrib/sparsity\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib/xentropy\n", + " copying build/lib/apex/contrib/xentropy/softmax_xentropy.py -> build/bdist.linux-x86_64/wheel/apex/contrib/xentropy\n", + " copying build/lib/apex/contrib/xentropy/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib/xentropy\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/distributed_fused_adam.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/fused_adam.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/fused_sgd.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/fp16_optimizer.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/distributed_fused_adam_v2.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/distributed_fused_adam_v3.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/fused_lamb.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/optimizers/distributed_fused_lamb.py -> build/bdist.linux-x86_64/wheel/apex/contrib/optimizers\n", + " copying build/lib/apex/contrib/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/self_multihead_attn_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/fast_self_multihead_attn_norm_add_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/fast_encdec_multihead_attn_norm_add_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/encdec_multihead_attn_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/fast_encdec_multihead_attn_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/encdec_multihead_attn.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/mask_softmax_dropout_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/fast_self_multihead_attn_func.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " copying build/lib/apex/contrib/multihead_attn/self_multihead_attn.py -> build/bdist.linux-x86_64/wheel/apex/contrib/multihead_attn\n", + " creating build/bdist.linux-x86_64/wheel/apex/contrib/groupbn\n", + " copying build/lib/apex/contrib/groupbn/__init__.py -> build/bdist.linux-x86_64/wheel/apex/contrib/groupbn\n", + " copying build/lib/apex/contrib/groupbn/batch_norm.py -> build/bdist.linux-x86_64/wheel/apex/contrib/groupbn\n", + " creating build/bdist.linux-x86_64/wheel/apex/RNN\n", + " copying build/lib/apex/RNN/models.py -> build/bdist.linux-x86_64/wheel/apex/RNN\n", + " copying build/lib/apex/RNN/RNNBackend.py -> build/bdist.linux-x86_64/wheel/apex/RNN\n", + " copying build/lib/apex/RNN/__init__.py -> build/bdist.linux-x86_64/wheel/apex/RNN\n", + " copying build/lib/apex/RNN/cells.py -> build/bdist.linux-x86_64/wheel/apex/RNN\n", + " creating build/bdist.linux-x86_64/wheel/apex/mlp\n", + " copying build/lib/apex/mlp/mlp.py -> build/bdist.linux-x86_64/wheel/apex/mlp\n", + " copying build/lib/apex/mlp/__init__.py -> build/bdist.linux-x86_64/wheel/apex/mlp\n", + " creating build/bdist.linux-x86_64/wheel/apex/pyprof\n", + " copying build/lib/apex/pyprof/__init__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof\n", + " creating build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/output.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/softmax.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/pointwise.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/loss.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/conv.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/activation.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/randomSample.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/normalization.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/embedding.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/optim.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/linear.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/__init__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/__main__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/usage.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/pooling.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/recurrentCell.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/data.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/base.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/utility.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/misc.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/dropout.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/index_slice_join_mutate.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/prof.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/convert.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/blas.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " copying build/lib/apex/pyprof/prof/reduction.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/prof\n", + " creating build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/nvvp.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/db.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/__init__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/__main__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/kernel.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " copying build/lib/apex/pyprof/parse/parse.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/parse\n", + " creating build/bdist.linux-x86_64/wheel/apex/pyprof/nvtx\n", + " copying build/lib/apex/pyprof/nvtx/__init__.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/nvtx\n", + " copying build/lib/apex/pyprof/nvtx/nvmarker.py -> build/bdist.linux-x86_64/wheel/apex/pyprof/nvtx\n", + " creating build/bdist.linux-x86_64/wheel/apex/multi_tensor_apply\n", + " copying build/lib/apex/multi_tensor_apply/__init__.py -> build/bdist.linux-x86_64/wheel/apex/multi_tensor_apply\n", + " copying build/lib/apex/multi_tensor_apply/multi_tensor_apply.py -> build/bdist.linux-x86_64/wheel/apex/multi_tensor_apply\n", + " creating build/bdist.linux-x86_64/wheel/apex/reparameterization\n", + " copying build/lib/apex/reparameterization/reparameterization.py -> build/bdist.linux-x86_64/wheel/apex/reparameterization\n", + " copying build/lib/apex/reparameterization/__init__.py -> build/bdist.linux-x86_64/wheel/apex/reparameterization\n", + " copying build/lib/apex/reparameterization/weight_norm.py -> build/bdist.linux-x86_64/wheel/apex/reparameterization\n", + " running install_egg_info\n", + " running egg_info\n", + " creating apex.egg-info\n", + " writing apex.egg-info/PKG-INFO\n", + " writing dependency_links to apex.egg-info/dependency_links.txt\n", + " writing top-level names to apex.egg-info/top_level.txt\n", + " writing manifest file 'apex.egg-info/SOURCES.txt'\n", + " writing manifest file 'apex.egg-info/SOURCES.txt'\n", + " Copying apex.egg-info to build/bdist.linux-x86_64/wheel/apex-0.1-py3.6.egg-info\n", + " running install_scripts\n", + " adding license file \"LICENSE\" (matched pattern \"LICEN[CS]E*\")\n", + " creating build/bdist.linux-x86_64/wheel/apex-0.1.dist-info/WHEEL\n", + " creating '/tmp/pip-wheel-nxntosk1/apex-0.1-cp36-none-any.whl' and adding 'build/bdist.linux-x86_64/wheel' to it\n", + " adding 'apex/__init__.py'\n", + " adding 'apex/RNN/RNNBackend.py'\n", + " adding 'apex/RNN/__init__.py'\n", + " adding 'apex/RNN/cells.py'\n", + " adding 'apex/RNN/models.py'\n", + " adding 'apex/amp/__init__.py'\n", + " adding 'apex/amp/__version__.py'\n", + " adding 'apex/amp/_amp_state.py'\n", + " adding 'apex/amp/_initialize.py'\n", + " adding 'apex/amp/_process_optimizer.py'\n", + " adding 'apex/amp/amp.py'\n", + " adding 'apex/amp/compat.py'\n", + " adding 'apex/amp/frontend.py'\n", + " adding 'apex/amp/handle.py'\n", + " adding 'apex/amp/opt.py'\n", + " adding 'apex/amp/rnn_compat.py'\n", + " adding 'apex/amp/scaler.py'\n", + " adding 'apex/amp/utils.py'\n", + " adding 'apex/amp/wrap.py'\n", + " adding 'apex/amp/lists/__init__.py'\n", + " adding 'apex/amp/lists/functional_overrides.py'\n", + " adding 'apex/amp/lists/tensor_overrides.py'\n", + " adding 'apex/amp/lists/torch_overrides.py'\n", + " adding 'apex/contrib/__init__.py'\n", + " adding 'apex/contrib/groupbn/__init__.py'\n", + " adding 'apex/contrib/groupbn/batch_norm.py'\n", + " adding 'apex/contrib/multihead_attn/__init__.py'\n", + " adding 'apex/contrib/multihead_attn/encdec_multihead_attn.py'\n", + " adding 'apex/contrib/multihead_attn/encdec_multihead_attn_func.py'\n", + " adding 'apex/contrib/multihead_attn/fast_encdec_multihead_attn_func.py'\n", + " adding 'apex/contrib/multihead_attn/fast_encdec_multihead_attn_norm_add_func.py'\n", + " adding 'apex/contrib/multihead_attn/fast_self_multihead_attn_func.py'\n", + " adding 'apex/contrib/multihead_attn/fast_self_multihead_attn_norm_add_func.py'\n", + " adding 'apex/contrib/multihead_attn/mask_softmax_dropout_func.py'\n", + " adding 'apex/contrib/multihead_attn/self_multihead_attn.py'\n", + " adding 'apex/contrib/multihead_attn/self_multihead_attn_func.py'\n", + " adding 'apex/contrib/optimizers/__init__.py'\n", + " adding 'apex/contrib/optimizers/distributed_fused_adam.py'\n", + " adding 'apex/contrib/optimizers/distributed_fused_adam_v2.py'\n", + " adding 'apex/contrib/optimizers/distributed_fused_adam_v3.py'\n", + " adding 'apex/contrib/optimizers/distributed_fused_lamb.py'\n", + " adding 'apex/contrib/optimizers/fp16_optimizer.py'\n", + " adding 'apex/contrib/optimizers/fused_adam.py'\n", + " adding 'apex/contrib/optimizers/fused_lamb.py'\n", + " adding 'apex/contrib/optimizers/fused_sgd.py'\n", + " adding 'apex/contrib/sparsity/__init__.py'\n", + " adding 'apex/contrib/sparsity/asp.py'\n", + " adding 'apex/contrib/sparsity/sparse_masklib.py'\n", + " adding 'apex/contrib/xentropy/__init__.py'\n", + " adding 'apex/contrib/xentropy/softmax_xentropy.py'\n", + " adding 'apex/fp16_utils/__init__.py'\n", + " adding 'apex/fp16_utils/fp16_optimizer.py'\n", + " adding 'apex/fp16_utils/fp16util.py'\n", + " adding 'apex/fp16_utils/loss_scaler.py'\n", + " adding 'apex/mlp/__init__.py'\n", + " adding 'apex/mlp/mlp.py'\n", + " adding 'apex/multi_tensor_apply/__init__.py'\n", + " adding 'apex/multi_tensor_apply/multi_tensor_apply.py'\n", + " adding 'apex/normalization/__init__.py'\n", + " adding 'apex/normalization/fused_layer_norm.py'\n", + " adding 'apex/optimizers/__init__.py'\n", + " adding 'apex/optimizers/fused_adagrad.py'\n", + " adding 'apex/optimizers/fused_adam.py'\n", + " adding 'apex/optimizers/fused_lamb.py'\n", + " adding 'apex/optimizers/fused_novograd.py'\n", + " adding 'apex/optimizers/fused_sgd.py'\n", + " adding 'apex/parallel/LARC.py'\n", + " adding 'apex/parallel/__init__.py'\n", + " adding 'apex/parallel/distributed.py'\n", + " adding 'apex/parallel/multiproc.py'\n", + " adding 'apex/parallel/optimized_sync_batchnorm.py'\n", + " adding 'apex/parallel/optimized_sync_batchnorm_kernel.py'\n", + " adding 'apex/parallel/sync_batchnorm.py'\n", + " adding 'apex/parallel/sync_batchnorm_kernel.py'\n", + " adding 'apex/pyprof/__init__.py'\n", + " adding 'apex/pyprof/nvtx/__init__.py'\n", + " adding 'apex/pyprof/nvtx/nvmarker.py'\n", + " adding 'apex/pyprof/parse/__init__.py'\n", + " adding 'apex/pyprof/parse/__main__.py'\n", + " adding 'apex/pyprof/parse/db.py'\n", + " adding 'apex/pyprof/parse/kernel.py'\n", + " adding 'apex/pyprof/parse/nvvp.py'\n", + " adding 'apex/pyprof/parse/parse.py'\n", + " adding 'apex/pyprof/prof/__init__.py'\n", + " adding 'apex/pyprof/prof/__main__.py'\n", + " adding 'apex/pyprof/prof/activation.py'\n", + " adding 'apex/pyprof/prof/base.py'\n", + " adding 'apex/pyprof/prof/blas.py'\n", + " adding 'apex/pyprof/prof/conv.py'\n", + " adding 'apex/pyprof/prof/convert.py'\n", + " adding 'apex/pyprof/prof/data.py'\n", + " adding 'apex/pyprof/prof/dropout.py'\n", + " adding 'apex/pyprof/prof/embedding.py'\n", + " adding 'apex/pyprof/prof/index_slice_join_mutate.py'\n", + " adding 'apex/pyprof/prof/linear.py'\n", + " adding 'apex/pyprof/prof/loss.py'\n", + " adding 'apex/pyprof/prof/misc.py'\n", + " adding 'apex/pyprof/prof/normalization.py'\n", + " adding 'apex/pyprof/prof/optim.py'\n", + " adding 'apex/pyprof/prof/output.py'\n", + " adding 'apex/pyprof/prof/pointwise.py'\n", + " adding 'apex/pyprof/prof/pooling.py'\n", + " adding 'apex/pyprof/prof/prof.py'\n", + " adding 'apex/pyprof/prof/randomSample.py'\n", + " adding 'apex/pyprof/prof/recurrentCell.py'\n", + " adding 'apex/pyprof/prof/reduction.py'\n", + " adding 'apex/pyprof/prof/softmax.py'\n", + " adding 'apex/pyprof/prof/usage.py'\n", + " adding 'apex/pyprof/prof/utility.py'\n", + " adding 'apex/reparameterization/__init__.py'\n", + " adding 'apex/reparameterization/reparameterization.py'\n", + " adding 'apex/reparameterization/weight_norm.py'\n", + " adding 'apex-0.1.dist-info/LICENSE'\n", + " adding 'apex-0.1.dist-info/METADATA'\n", + " adding 'apex-0.1.dist-info/WHEEL'\n", + " adding 'apex-0.1.dist-info/top_level.txt'\n", + " adding 'apex-0.1.dist-info/RECORD'\n", + " removing build/bdist.linux-x86_64/wheel\n", + "\u001b[?25hdone\n", + " Created wheel for apex: filename=apex-0.1-cp36-none-any.whl size=192840 sha256=e82c97643c1d760c31ebf4f164f1a7c851fc853bfaac36dedcab8946d1eca982\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-37xcttr2/wheels/b1/3a/aa/d84906eaab780ae580c7a5686a33bf2820d8590ac3b60d5967\n", + " Removing source in /tmp/pip-req-build-8s0yfy62\n", + "Successfully built apex\n", + "Installing collected packages: apex\n", + "\n", + "Successfully installed apex-0.1\n", + "Cleaning up...\n", + "Removed build tracker '/tmp/pip-req-tracker-6oxhyrc_'\n", + "Collecting transformers\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/27/3c/91ed8f5c4e7ef3227b4119200fc0ed4b4fd965b1f0172021c25701087825/transformers-3.0.2-py3-none-any.whl (769kB)\n", + "\u001b[K |████████████████████████████████| 778kB 4.7MB/s \n", + "\u001b[?25hCollecting sentencepiece!=0.1.92\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d4/a4/d0a884c4300004a78cca907a6ff9a5e9fe4f090f5d95ab341c53d28cbc58/sentencepiece-0.1.91-cp36-cp36m-manylinux1_x86_64.whl (1.1MB)\n", + "\u001b[K |████████████████████████████████| 1.1MB 25kB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from transformers) (1.18.5)\n", + "Collecting sacremoses\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7d/34/09d19aff26edcc8eb2a01bed8e98f13a1537005d31e95233fd48216eed10/sacremoses-0.0.43.tar.gz (883kB)\n", + "\u001b[K |████████████████████████████████| 890kB 20.4MB/s \n", + "\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.6/dist-packages (from transformers) (2.23.0)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.6/dist-packages (from transformers) (3.0.12)\n", + "Collecting tokenizers==0.8.1.rc1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/40/d0/30d5f8d221a0ed981a186c8eb986ce1c94e3a6e87f994eae9f4aa5250217/tokenizers-0.8.1rc1-cp36-cp36m-manylinux1_x86_64.whl (3.0MB)\n", + "\u001b[K |████████████████████████████████| 3.0MB 39.1MB/s \n", + "\u001b[?25hRequirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.6/dist-packages (from transformers) (4.41.1)\n", + "Requirement already satisfied: dataclasses; python_version < \"3.7\" in /usr/local/lib/python3.6/dist-packages (from transformers) (0.7)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.6/dist-packages (from transformers) (20.4)\n", + "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.6/dist-packages (from transformers) (2019.12.20)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (1.15.0)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (7.1.2)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (0.16.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2020.6.20)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (3.0.4)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2.10)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (1.24.3)\n", + "Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.6/dist-packages (from packaging->transformers) (2.4.7)\n", + "Building wheels for collected packages: sacremoses\n", + " Building wheel for sacremoses (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for sacremoses: filename=sacremoses-0.0.43-cp36-none-any.whl size=893260 sha256=c6d416987729e676dd16e71b4384adce00b7e12657c1c3203ce173cffc52af09\n", + " Stored in directory: /root/.cache/pip/wheels/29/3c/fd/7ce5c3f0666dab31a50123635e6fb5e19ceb42ce38d4e58f45\n", + "Successfully built sacremoses\n", + "Installing collected packages: sentencepiece, sacremoses, tokenizers, transformers\n", + "Successfully installed sacremoses-0.0.43 sentencepiece-0.1.91 tokenizers-0.8.1rc1 transformers-3.0.2\n", + "Collecting simpletransformers\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/3b/36/884727c20a4777105705cd6d01d57abfa7274d63a7aebb6d23d46b589d2d/simpletransformers-0.46.6-py3-none-any.whl (199kB)\n", + "\u001b[K |████████████████████████████████| 204kB 4.5MB/s \n", + "\u001b[?25hRequirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (0.22.2.post1)\n", + "Collecting tqdm>=4.47.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/28/7e/281edb5bc3274dfb894d90f4dbacfceaca381c2435ec6187a2c6f329aed7/tqdm-4.48.2-py2.py3-none-any.whl (68kB)\n", + "\u001b[K |████████████████████████████████| 71kB 8.2MB/s \n", + "\u001b[?25hRequirement already satisfied: requests in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (2.23.0)\n", + "Requirement already satisfied: transformers>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (3.0.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (1.18.5)\n", + "Collecting wandb\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/94/19/f8db9eff4b0173adf6dd2e8b0c3d8de0bfe10ec9ed63d247665980d82258/wandb-0.9.4-py2.py3-none-any.whl (1.4MB)\n", + "\u001b[K |████████████████████████████████| 1.4MB 13.2MB/s \n", + "\u001b[?25hCollecting tensorboardx\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/af/0c/4f41bcd45db376e6fe5c619c01100e9b7531c55791b7244815bac6eac32c/tensorboardX-2.1-py2.py3-none-any.whl (308kB)\n", + "\u001b[K |████████████████████████████████| 317kB 30.7MB/s \n", + "\u001b[?25hRequirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (1.4.1)\n", + "Requirement already satisfied: regex in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (2019.12.20)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (1.0.5)\n", + "Collecting seqeval\n", + " Downloading https://files.pythonhosted.org/packages/34/91/068aca8d60ce56dd9ba4506850e876aba5e66a6f2f29aa223224b50df0de/seqeval-0.0.12.tar.gz\n", + "Requirement already satisfied: tokenizers in /usr/local/lib/python3.6/dist-packages (from simpletransformers) (0.8.1rc1)\n", + "Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.6/dist-packages (from scikit-learn->simpletransformers) (0.16.0)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests->simpletransformers) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests->simpletransformers) (1.24.3)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests->simpletransformers) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests->simpletransformers) (2020.6.20)\n", + "Requirement already satisfied: sentencepiece!=0.1.92 in /usr/local/lib/python3.6/dist-packages (from transformers>=3.0.2->simpletransformers) (0.1.91)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.6/dist-packages (from transformers>=3.0.2->simpletransformers) (20.4)\n", + "Requirement already satisfied: dataclasses; python_version < \"3.7\" in /usr/local/lib/python3.6/dist-packages (from transformers>=3.0.2->simpletransformers) (0.7)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.6/dist-packages (from transformers>=3.0.2->simpletransformers) (3.0.12)\n", + "Requirement already satisfied: sacremoses in /usr/local/lib/python3.6/dist-packages (from transformers>=3.0.2->simpletransformers) (0.0.43)\n", + "Collecting subprocess32>=3.5.3\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz (97kB)\n", + "\u001b[K |████████████████████████████████| 102kB 12.0MB/s \n", + "\u001b[?25hRequirement already satisfied: nvidia-ml-py3>=7.352.0 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (7.352.0)\n", + "Collecting GitPython>=1.0.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/f9/1e/a45320cab182bf1c8656107b3d4c042e659742822fc6bff150d769a984dd/GitPython-3.1.7-py3-none-any.whl (158kB)\n", + "\u001b[K |████████████████████████████████| 163kB 31.1MB/s \n", + "\u001b[?25hRequirement already satisfied: Click>=7.0 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (7.1.2)\n", + "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (1.15.0)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (2.8.1)\n", + "Requirement already satisfied: PyYAML>=3.10 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (3.13)\n", + "Collecting configparser>=3.8.1\n", + " Downloading https://files.pythonhosted.org/packages/4b/6b/01baa293090240cf0562cc5eccb69c6f5006282127f2b846fad011305c79/configparser-5.0.0-py3-none-any.whl\n", + "Collecting shortuuid>=0.5.0\n", + " Downloading https://files.pythonhosted.org/packages/25/a6/2ecc1daa6a304e7f1b216f0896b26156b78e7c38e1211e9b798b4716c53d/shortuuid-1.0.1-py3-none-any.whl\n", + "Collecting sentry-sdk>=0.4.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/4b/23/811fcdfc9d67fea7e47c91dd553081218d53dda744c28384f4d2f69206c9/sentry_sdk-0.16.3-py2.py3-none-any.whl (110kB)\n", + "\u001b[K |████████████████████████████████| 112kB 30.4MB/s \n", + "\u001b[?25hCollecting watchdog>=0.8.3\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/0e/06/121302598a4fc01aca942d937f4a2c33430b7181137b35758913a8db10ad/watchdog-0.10.3.tar.gz (94kB)\n", + "\u001b[K |████████████████████████████████| 102kB 12.1MB/s \n", + "\u001b[?25hRequirement already satisfied: psutil>=5.0.0 in /usr/local/lib/python3.6/dist-packages (from wandb->simpletransformers) (5.4.8)\n", + "Collecting gql==0.2.0\n", + " Downloading https://files.pythonhosted.org/packages/c4/6f/cf9a3056045518f06184e804bae89390eb706168349daa9dff8ac609962a/gql-0.2.0.tar.gz\n", + "Collecting docker-pycreds>=0.4.0\n", + " Downloading https://files.pythonhosted.org/packages/f5/e8/f6bd1eee09314e7e6dee49cbe2c5e22314ccdb38db16c9fc72d2fa80d054/docker_pycreds-0.4.0-py2.py3-none-any.whl\n", + "Requirement already satisfied: protobuf>=3.8.0 in /usr/local/lib/python3.6/dist-packages (from tensorboardx->simpletransformers) (3.12.4)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->simpletransformers) (2018.9)\n", + "Requirement already satisfied: Keras>=2.2.4 in /usr/local/lib/python3.6/dist-packages (from seqeval->simpletransformers) (2.4.3)\n", + "Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.6/dist-packages (from packaging->transformers>=3.0.2->simpletransformers) (2.4.7)\n", + "Collecting gitdb<5,>=4.0.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/48/11/d1800bca0a3bae820b84b7d813ad1eff15a48a64caea9c823fc8c1b119e8/gitdb-4.0.5-py3-none-any.whl (63kB)\n", + "\u001b[K |████████████████████████████████| 71kB 9.5MB/s \n", + "\u001b[?25hCollecting pathtools>=0.1.1\n", + " Downloading https://files.pythonhosted.org/packages/e7/7f/470d6fcdf23f9f3518f6b0b76be9df16dcc8630ad409947f8be2eb0ed13a/pathtools-0.1.2.tar.gz\n", + "Collecting graphql-core<2,>=0.5.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/b0/89/00ad5e07524d8c523b14d70c685e0299a8b0de6d0727e368c41b89b7ed0b/graphql-core-1.1.tar.gz (70kB)\n", + "\u001b[K |████████████████████████████████| 71kB 10.7MB/s \n", + "\u001b[?25hRequirement already satisfied: promise<3,>=2.0 in /usr/local/lib/python3.6/dist-packages (from gql==0.2.0->wandb->simpletransformers) (2.3)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.8.0->tensorboardx->simpletransformers) (49.2.0)\n", + "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.4->seqeval->simpletransformers) (2.10.0)\n", + "Collecting smmap<4,>=3.0.1\n", + " Downloading https://files.pythonhosted.org/packages/b0/9a/4d409a6234eb940e6a78dfdfc66156e7522262f5f2fecca07dc55915952d/smmap-3.0.4-py2.py3-none-any.whl\n", + "Building wheels for collected packages: seqeval, subprocess32, watchdog, gql, pathtools, graphql-core\n", + " Building wheel for seqeval (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for seqeval: filename=seqeval-0.0.12-cp36-none-any.whl size=7424 sha256=7648f2165adcf379da0ab12a3becb2793cdd4c5deeeefa29cb14e2e221e787f0\n", + " Stored in directory: /root/.cache/pip/wheels/4f/32/0a/df3b340a82583566975377d65e724895b3fad101a3fb729f68\n", + " Building wheel for subprocess32 (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for subprocess32: filename=subprocess32-3.5.4-cp36-none-any.whl size=6489 sha256=d9c39500378a7efa0d6b3bef9da004356342d521f8328e1d4ca43bcc6bac10e1\n", + " Stored in directory: /root/.cache/pip/wheels/68/39/1a/5e402bdfdf004af1786c8b853fd92f8c4a04f22aad179654d1\n", + " Building wheel for watchdog (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for watchdog: filename=watchdog-0.10.3-cp36-none-any.whl size=73870 sha256=7b5c1ef1ebc091ca95975c72baddee2ee80c10cd561672569c6b09fdf155e375\n", + " Stored in directory: /root/.cache/pip/wheels/a8/1d/38/2c19bb311f67cc7b4d07a2ec5ea36ab1a0a0ea50db994a5bc7\n", + " Building wheel for gql (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for gql: filename=gql-0.2.0-cp36-none-any.whl size=7630 sha256=02d6277d53f2d270f7f303e4ac7f1f16b0deff44b431548aaadb1f7200532a5f\n", + " Stored in directory: /root/.cache/pip/wheels/ce/0e/7b/58a8a5268655b3ad74feef5aa97946f0addafb3cbb6bd2da23\n", + " Building wheel for pathtools (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pathtools: filename=pathtools-0.1.2-cp36-none-any.whl size=8784 sha256=5e1250a157a061872a327756664acce5545d8f2dcb130ec90865c6f93d3099ff\n", + " Stored in directory: /root/.cache/pip/wheels/0b/04/79/c3b0c3a0266a3cb4376da31e5bfe8bba0c489246968a68e843\n", + " Building wheel for graphql-core (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for graphql-core: filename=graphql_core-1.1-cp36-none-any.whl size=104650 sha256=0932af254b3a7080e16bc944f622942fcac16af76a4aed3b1deb06e66ae2f424\n", + " Stored in directory: /root/.cache/pip/wheels/45/99/d7/c424029bb0fe910c63b68dbf2aa20d3283d023042521bcd7d5\n", + "Successfully built seqeval subprocess32 watchdog gql pathtools graphql-core\n", + "Installing collected packages: tqdm, subprocess32, smmap, gitdb, GitPython, configparser, shortuuid, sentry-sdk, pathtools, watchdog, graphql-core, gql, docker-pycreds, wandb, tensorboardx, seqeval, simpletransformers\n", + " Found existing installation: tqdm 4.41.1\n", + " Uninstalling tqdm-4.41.1:\n", + " Successfully uninstalled tqdm-4.41.1\n", + "Successfully installed GitPython-3.1.7 configparser-5.0.0 docker-pycreds-0.4.0 gitdb-4.0.5 gql-0.2.0 graphql-core-1.1 pathtools-0.1.2 sentry-sdk-0.16.3 seqeval-0.0.12 shortuuid-1.0.1 simpletransformers-0.46.6 smmap-3.0.4 subprocess32-3.5.4 tensorboardx-2.1 tqdm-4.48.2 wandb-0.9.4 watchdog-0.10.3\n", + "Requirement already satisfied: wandb in /usr/local/lib/python3.6/dist-packages (0.9.4)\n", + "Requirement already satisfied: gql==0.2.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (0.2.0)\n", + "Requirement already satisfied: Click>=7.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (7.1.2)\n", + "Requirement already satisfied: shortuuid>=0.5.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (1.0.1)\n", + "Requirement already satisfied: docker-pycreds>=0.4.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (0.4.0)\n", + "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (1.15.0)\n", + "Requirement already satisfied: nvidia-ml-py3>=7.352.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (7.352.0)\n", + "Requirement already satisfied: sentry-sdk>=0.4.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (0.16.3)\n", + "Requirement already satisfied: GitPython>=1.0.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (3.1.7)\n", + "Requirement already satisfied: psutil>=5.0.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (5.4.8)\n", + "Requirement already satisfied: configparser>=3.8.1 in /usr/local/lib/python3.6/dist-packages (from wandb) (5.0.0)\n", + "Requirement already satisfied: subprocess32>=3.5.3 in /usr/local/lib/python3.6/dist-packages (from wandb) (3.5.4)\n", + "Requirement already satisfied: PyYAML>=3.10 in /usr/local/lib/python3.6/dist-packages (from wandb) (3.13)\n", + "Requirement already satisfied: watchdog>=0.8.3 in /usr/local/lib/python3.6/dist-packages (from wandb) (0.10.3)\n", + "Requirement already satisfied: requests>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from wandb) (2.23.0)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from wandb) (2.8.1)\n", + "Requirement already satisfied: graphql-core<2,>=0.5.0 in /usr/local/lib/python3.6/dist-packages (from gql==0.2.0->wandb) (1.1)\n", + "Requirement already satisfied: promise<3,>=2.0 in /usr/local/lib/python3.6/dist-packages (from gql==0.2.0->wandb) (2.3)\n", + "Requirement already satisfied: urllib3>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from sentry-sdk>=0.4.0->wandb) (1.24.3)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.6/dist-packages (from sentry-sdk>=0.4.0->wandb) (2020.6.20)\n", + "Requirement already satisfied: gitdb<5,>=4.0.1 in /usr/local/lib/python3.6/dist-packages (from GitPython>=1.0.0->wandb) (4.0.5)\n", + "Requirement already satisfied: pathtools>=0.1.1 in /usr/local/lib/python3.6/dist-packages (from watchdog>=0.8.3->wandb) (0.1.2)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->wandb) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->wandb) (3.0.4)\n", + "Requirement already satisfied: smmap<4,>=3.0.1 in /usr/local/lib/python3.6/dist-packages (from gitdb<5,>=4.0.1->GitPython>=1.0.0->wandb) (3.0.4)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (0.22.2.post1)\n", + "Requirement already satisfied: scipy>=0.17.0 in /usr/local/lib/python3.6/dist-packages (from scikit-learn) (1.4.1)\n", + "Requirement already satisfied: numpy>=1.11.0 in /usr/local/lib/python3.6/dist-packages (from scikit-learn) (1.18.5)\n", + "Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.6/dist-packages (from scikit-learn) (0.16.0)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ZE1C_baibNUh", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 115 + }, + "outputId": "7f687f62-09aa-401d-959d-9358146cdacf" + }, + "source": [ + "import sys\n", + "!test -d bertviz_repo && echo \"FYI: bertviz_repo directory already exists, to pull latest version uncomment this line: !rm -r bertviz_repo\"\n", + "# !rm -r bertviz_repo # Uncomment if you need a clean pull from repo\n", + "!test -d bertviz_repo || git clone https://github.com/jessevig/bertviz bertviz_repo\n", + "if not 'bertviz_repo' in sys.path:\n", + " sys.path += ['bertviz_repo']\n", + "!pip install regex" + ], + "execution_count": 3, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'bertviz_repo'...\n", + "remote: Enumerating objects: 1074, done.\u001b[K\n", + "remote: Total 1074 (delta 0), reused 0 (delta 0), pack-reused 1074\u001b[K\n", + "Receiving objects: 100% (1074/1074), 99.41 MiB | 25.80 MiB/s, done.\n", + "Resolving deltas: 100% (687/687), done.\n", + "Requirement already satisfied: regex in /usr/local/lib/python3.6/dist-packages (2019.12.20)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "GOAEt4gsTZ5u", + "colab_type": "text" + }, + "source": [ + "We want to install NVIDIA's Apex tool, for the training pipeline used by `simple-transformers` and Weights and Biases." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "uSuLMmOSW531", + "colab_type": "text" + }, + "source": [ + "Now, to ensure our model demonstrates an understanding of chemical syntax and molecular structure, we'll be testing it on predicting a masked token/character within the SMILES molecule for benzene." + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "I1MLAix0pB-C", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# Test if NVIDIA apex training tool works\n", + "from apex import amp" + ], + "execution_count": 4, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "9OLp-fX5W3Ah", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 377, + "referenced_widgets": [ + "98acba3fe53644a8ba4252de10f9a426", + "a9173bc7f1fb4d79b5a7122628646485", + "1ce379976f2743b9b606616e8b8d45f5", + "e00dc06324554fe88258b206a1b2c80c", + "8feffc04f07d41bb9467a46ef1664481", + "ecdc065df020489b89b59e85ff7aa90a", + "feab1dff569e4d51ae00e06f09de1a45", + "f8f963d730154041b9accba63822f0b9", + "4b9531aadec94d6997f4df3e48fe9dd5", + "75f8becf86194588807bd8e118c6e448", + "e3ab7fc4fb4249b092f40eec57017f2b", + "9e049bb8977c42729d3fa05e8e23bef5", + "7ab4d5afc39f42c582f7d2fee9ba29dc", + "acacca6484d747608fd27537490c490f", + "891d126ceafd4b65bcdcd69959086931", + "05d4f7694b4b4d2687dbc0125f444ea0", + "f67218c34f29439b879de2b02da1309d", + "25982cceede845d8a6478b54ab8d6906", + "e58b80417b444cda8a46111c8142d0b1", + "bd945062ce944393adfac4f1bc2dca3f", + "c0332264f8f74816a32832eae7f81ab1", + "e280e56118874c728e693b3da661ac16", + "41a1514a959a48a991556d0a5bef9d26", + "00da13a2e5154e52b5408e5bf08da994", + "a5f0a5ad353c41c69a275ef766cf7775", + "4275d2d29e98438ca62e695a534372b9", + "970028ca53f244079abe68559bedc62b", + "797465f4f03441968e15b260aef38859", + "b37d03ab7f0f4ae9b52edbde9ed586e1", + "fa3f808ac29147e28181d2838a9a5822", + "539ed619d7364d9ca0bd9a11cb2e2498", + "00133158eee24e068220037a27a30ad8", + "45795699e2f247ae916dbec650640fdb", + "25bf8f1dd099424993de36ffe8e34577", + "c5125dcb1e664845aee1fe54650a8ab6", + "bd227e553de240e1b89a2dbae023ff16", + "63d86d07dd7042baaca655f6c063f975", + "45a316a41c7346fab66b505c9bb2d4cc", + "edaaea155fc6457385127ad5695ecca5", + "d806297355ab40a0a2d895e041c1e193", + "8004a4812f6144aca56648a6ee5d1c6b", + "9899a51144a34e579335d112aa132c74", + "0e2414f3bd134e848936c7170f14a029", + "7bdd46ac04a94263a4ca942fcb96b001", + "79c4e433d95a47dfb1df0d403e51fd20", + "7858ea077dd14a4e9ff5f48a3a72d639", + "2a989ac5aab849779a18abd94603d1be", + "217f5c224f5a416db001133a1a679b41" + ] + }, + "outputId": "82321015-08fc-4e05-9a2f-fd1955a9c072" + }, + "source": [ + "from transformers import AutoModelWithLMHead, AutoTokenizer, pipeline, RobertaModel, RobertaTokenizer\n", + "from bertviz import head_view\n", + "\n", + "model = AutoModelWithLMHead.from_pretrained(\"seyonec/ChemBERTa_zinc250k_v2_40k\")\n", + "tokenizer = AutoTokenizer.from_pretrained(\"seyonec/ChemBERTa_zinc250k_v2_40k\")\n", + "\n", + "fill_mask = pipeline('fill-mask', model=model, tokenizer=tokenizer)\n" + ], + "execution_count": 5, + "outputs": [ + { + "output_type": "stream", + "text": [ + "\u001b[34m\u001b[1mwandb\u001b[0m: \u001b[33mWARNING\u001b[0m W&B installed but not logged in. Run `wandb login` or set the WANDB_API_KEY env variable.\n", + "/usr/local/lib/python3.6/dist-packages/transformers/modeling_auto.py:798: FutureWarning: The class `AutoModelWithLMHead` is deprecated and will be removed in a future version. Please use `AutoModelForCausalLM` for causal language models, `AutoModelForMaskedLM` for masked language models and `AutoModelForSeq2SeqLM` for encoder-decoder models.\n", + " FutureWarning,\n" + ], + "name": "stderr" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "98acba3fe53644a8ba4252de10f9a426", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=515.0, style=ProgressStyle(description_…" ] - } - }, - "1dd271d6a49445bf81488cb92a81247f": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "b9b287012e704eaea45d48f21836b8c4": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_1875a1424a154f9b87b0958dcdc303e9", - "_dom_classes": [], - "description": "Current iteration: 100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 215, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 215, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_a1c637d057214aa4bf961115718540aa" - } - }, - "7b5168a54bba443980f471c5623d8a3b": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_ced6f8685ae84e23b517fe4c10d5e543", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 215/215 [00:20<00:00, 10.29it/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_fe94273739cc403987d47549aa894c25" - } - }, - "1875a1424a154f9b87b0958dcdc303e9": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "a1c637d057214aa4bf961115718540aa": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "ced6f8685ae84e23b517fe4c10d5e543": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "fe94273739cc403987d47549aa894c25": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "fc42b7f3c9f5486688649c44e5340390": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_992037580a774f959acab6acd413da36", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_82272780aabb457d88ba7448161327b9", - "IPY_MODEL_0cb45d8fb7604d6aabbf35abeee0b83b" - ] - } - }, - "992037580a774f959acab6acd413da36": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "82272780aabb457d88ba7448161327b9": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_d0385dfa020641a1b1867ce53612a4c1", - "_dom_classes": [], - "description": "Current iteration: 100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 215, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 215, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_3858db9d16a0482f917e2829c24090d0" - } - }, - "0cb45d8fb7604d6aabbf35abeee0b83b": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_197e5ce104f945f8bac84604295592e7", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 215/215 [00:20<00:00, 10.30it/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_ee59e545a93e4bb0a66595729f815bf3" - } - }, - "d0385dfa020641a1b1867ce53612a4c1": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "3858db9d16a0482f917e2829c24090d0": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "197e5ce104f945f8bac84604295592e7": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "ee59e545a93e4bb0a66595729f815bf3": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "a669df427e2149caa9ee0edec40dc3a4": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_0e519978fc6c476d936aac1fe0abf4bc", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_ed3005e49f84416a82794c3dfc31cfcc", - "IPY_MODEL_dade9df974f245b0b54c508f168f936b" - ] - } - }, - "0e519978fc6c476d936aac1fe0abf4bc": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "ed3005e49f84416a82794c3dfc31cfcc": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_f00dfb7fd4854a34b4619af817f62c05", - "_dom_classes": [], - "description": "100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 428, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 428, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_a54cfb4828f14b06a35a3e6d363cf7c2" - } - }, - "dade9df974f245b0b54c508f168f936b": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_67f19078963043f8b728d5efd232929a", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 428/428 [00:00<00:00, 890.92it/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_57c6e4e82402447398a4868fa8c873a5" - } - }, - "f00dfb7fd4854a34b4619af817f62c05": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "a54cfb4828f14b06a35a3e6d363cf7c2": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "67f19078963043f8b728d5efd232929a": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "57c6e4e82402447398a4868fa8c873a5": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "804b202d17654dfe96a61d35f6f69d78": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_0e67f75ca3b34c718f903182760c3d25", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_cfc1c56037cf439d99ea7ced4cd606d5", - "IPY_MODEL_902809efcf36405d87a89aa7d01d76f4" - ] - } - }, - "0e67f75ca3b34c718f903182760c3d25": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "cfc1c56037cf439d99ea7ced4cd606d5": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_57a01101a9fb43d9823e216af0be1172", - "_dom_classes": [], - "description": "100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 54, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 54, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_c36b55e07c06403384d805e0d3622f1f" - } - }, - "902809efcf36405d87a89aa7d01d76f4": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_5d4e138304ae4257a1695c676cc365fc", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 54/54 [00:01<00:00, 50.64it/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_ffbb31034601480f87cf76ca6f51e49f" - } - }, - "57a01101a9fb43d9823e216af0be1172": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "c36b55e07c06403384d805e0d3622f1f": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "5d4e138304ae4257a1695c676cc365fc": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "ffbb31034601480f87cf76ca6f51e49f": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "74a6932964bc4ef6b37c1ae144d79e87": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_a2bf6c0cb9b94f5fbaa73253bbb65072", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_42f84c7b1df44a46a246558859f7474f", - "IPY_MODEL_ee13fe2a66764746bd33f9b0927dd8b9" - ] - } - }, - "a2bf6c0cb9b94f5fbaa73253bbb65072": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "42f84c7b1df44a46a246558859f7474f": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_3b411759bd0a4886bbea0e959f57b849", - "_dom_classes": [], - "description": "100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_febbff92575f4bcb9426c89f2b0ab2f9" - } - }, - "ee13fe2a66764746bd33f9b0927dd8b9": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_27a442ed10ba4f938f57f8473bbb9e1d", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 1/1 [09:51<00:00, 591.34s/it]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_7945f511bd9a4626bb79d0e2fae49cee" - } - }, - "3b411759bd0a4886bbea0e959f57b849": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "febbff92575f4bcb9426c89f2b0ab2f9": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "27a442ed10ba4f938f57f8473bbb9e1d": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "7945f511bd9a4626bb79d0e2fae49cee": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "c230feee9b8a4d9e98a3344118988bb8": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_6ac527d01f8045b5a3441e7b88d02769", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_34b780f478994748afefefed7482aa42", - "IPY_MODEL_b51ffede8497455ca6f8a330e7543496" - ] - } - }, - "6ac527d01f8045b5a3441e7b88d02769": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "34b780f478994748afefefed7482aa42": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_47f1dfb0492c4033b52ed81923349840", - "_dom_classes": [], - "description": "100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_736e39657a204c2abbcfed7f76730b1e" - } - }, - "b51ffede8497455ca6f8a330e7543496": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_f19328ab2db9490f88c5c893bc07cfbf", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 1/1 [09:51<00:00, 591.22s/it]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f0620f9a62684f5ba8a9b9a61a7b8751" - } - }, - "47f1dfb0492c4033b52ed81923349840": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "736e39657a204c2abbcfed7f76730b1e": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "f19328ab2db9490f88c5c893bc07cfbf": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "f0620f9a62684f5ba8a9b9a61a7b8751": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - } - } - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "QqB-9snlWZk9", - "colab_type": "text" - }, - "source": [ - "# Part 22, ChemBERTa: Pre-training a BERT-like model for masked language modelling of SMILES and molecular property prediction.\n", - "\n", - "![alt text](https://huggingface.co/front/assets/huggingface_mask.svg)\n", - "\n", - "By Seyone Chithrananda ([Twitter](https://twitter.com/SeyoneC))\n", - "\n", - "Deep learning for chemistry and materials science remains a novel field with lots of potiential. However, the popularity of transfer learning based methods in areas such as NLP and computer vision have not yet been effectively developed in computational chemistry + machine learning. Using HuggingFace's suite of models and the ByteLevel tokenizer, we are able to train a large-transformer model, RoBERTa, on a large corpus of 100k SMILES strings from a commonly known benchmark chemistry dataset, ZINC.\n", - "\n", - "Training RoBERTa over 5 epochs, the model achieves a pretty good loss of 0.398, and may likely continue to decrease if trained for a larger number of epochs. The model can predict tokens within a SMILES sequence/molecule, allowing for variants of a molecule within discoverable chemical space to be predicted.\n", - "\n", - "By applying the representations of functional groups and atoms learned by the model, we can try to tackle problems of toxicity, solubility, drug-likeness, and synthesis accessibility on smaller datasets using the learned representations as features for graph convolution and attention models on the graph structure of molecules, as well as fine-tuning of BERT. Finally, we propose the use of attention visualization as a helpful tool for chemistry practitioners and students to quickly identify important substructures in various chemical properties.\n", - "\n", - "Additionally, visualization of the attention mechanism have been seen through previous research as incredibly valuable towards chemical reaction classification. The applications of open-sourcing large-scale transformer models such as RoBERTa with HuggingFace may allow for the acceleration of these individual research directions.\n", - "\n", - "A link to a repository which includes the training, uploading and evaluation notebook (with sample predictions on compounds such as Remdesivir) can be found [here](https://github.com/seyonechithrananda/bert-loves-chemistry). All of the notebooks can be copied into a new Colab runtime for easy execution.\n", - "\n", - "For the sake of this tutorial, we'll be fine-tuning RoBERTa on a small-scale molecule dataset, to show the potiential and effectiveness of HuggingFace's NLP-based transfer learning applied to computational chemistry. Output for some cells are purposely cleared for readability, so do not worry if some output messages for your cells differ!" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "6CMz5kaBWc_Y", - "colab_type": "text" - }, - "source": [ - "Installing DeepChem from source, alongside RDKit for molecule visualizations" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "8l8SDyyNWv0N", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 621 - }, - "outputId": "ef6ac53d-6b2c-4aa5-d0b6-a2f16572a8a9" - }, - "source": [ - "!pip install transformers\n" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Collecting transformers\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/48/35/ad2c5b1b8f99feaaf9d7cdadaeef261f098c6e1a6a2935d4d07662a6b780/transformers-2.11.0-py3-none-any.whl (674kB)\n", - "\u001b[K |████████████████████████████████| 675kB 4.6MB/s \n", - "\u001b[?25hRequirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.6/dist-packages (from transformers) (2019.12.20)\n", - "Collecting sentencepiece\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d4/a4/d0a884c4300004a78cca907a6ff9a5e9fe4f090f5d95ab341c53d28cbc58/sentencepiece-0.1.91-cp36-cp36m-manylinux1_x86_64.whl (1.1MB)\n", - "\u001b[K |████████████████████████████████| 1.1MB 23.9MB/s \n", - "\u001b[?25hRequirement already satisfied: packaging in /usr/local/lib/python3.6/dist-packages (from transformers) (20.4)\n", - "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.6/dist-packages (from transformers) (4.41.1)\n", - "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from transformers) (1.18.5)\n", - "Collecting tokenizers==0.7.0\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/14/e5/a26eb4716523808bb0a799fcfdceb6ebf77a18169d9591b2f46a9adb87d9/tokenizers-0.7.0-cp36-cp36m-manylinux1_x86_64.whl (3.8MB)\n", - "\u001b[K |████████████████████████████████| 3.8MB 40.2MB/s \n", - "\u001b[?25hRequirement already satisfied: dataclasses; python_version < \"3.7\" in /usr/local/lib/python3.6/dist-packages (from transformers) (0.7)\n", - "Requirement already satisfied: requests in /usr/local/lib/python3.6/dist-packages (from transformers) (2.23.0)\n", - "Collecting sacremoses\n", - "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7d/34/09d19aff26edcc8eb2a01bed8e98f13a1537005d31e95233fd48216eed10/sacremoses-0.0.43.tar.gz (883kB)\n", - "\u001b[K |████████████████████████████████| 890kB 57.9MB/s \n", - "\u001b[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.6/dist-packages (from transformers) (3.0.12)\n", - "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from packaging->transformers) (1.12.0)\n", - "Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.6/dist-packages (from packaging->transformers) (2.4.7)\n", - "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (1.24.3)\n", - "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2020.4.5.2)\n", - "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (2.9)\n", - "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests->transformers) (3.0.4)\n", - "Requirement already satisfied: click in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (7.1.2)\n", - "Requirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from sacremoses->transformers) (0.15.1)\n", - "Building wheels for collected packages: sacremoses\n", - " Building wheel for sacremoses (setup.py) ... \u001b[?25l\u001b[?25hdone\n", - " Created wheel for sacremoses: filename=sacremoses-0.0.43-cp36-none-any.whl size=893260 sha256=5b83ab4c2e1f1420040b2a1c7b2a43e2f0eb4c3ae1c251ab5ff24cc5baf3bff9\n", - " Stored in directory: /root/.cache/pip/wheels/29/3c/fd/7ce5c3f0666dab31a50123635e6fb5e19ceb42ce38d4e58f45\n", - "Successfully built sacremoses\n", - "Installing collected packages: sentencepiece, tokenizers, sacremoses, transformers\n", - "Successfully installed sacremoses-0.0.43 sentencepiece-0.1.91 tokenizers-0.7.0 transformers-2.11.0\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ZE1C_baibNUh", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 123 - }, - "outputId": "847617a3-dc37-4bae-c425-cc6ab2dfd047" - }, - "source": [ - "import sys\n", - "!test -d bertviz_repo && echo \"FYI: bertviz_repo directory already exists, to pull latest version uncomment this line: !rm -r bertviz_repo\"\n", - "# !rm -r bertviz_repo # Uncomment if you need a clean pull from repo\n", - "!test -d bertviz_repo || git clone https://github.com/jessevig/bertviz bertviz_repo\n", - "if not 'bertviz_repo' in sys.path:\n", - " sys.path += ['bertviz_repo']\n", - "!pip install regex" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Cloning into 'bertviz_repo'...\n", - "remote: Enumerating objects: 1074, done.\u001b[K\n", - "remote: Total 1074 (delta 0), reused 0 (delta 0), pack-reused 1074\u001b[K\n", - "Receiving objects: 100% (1074/1074), 99.41 MiB | 27.70 MiB/s, done.\n", - "Resolving deltas: 100% (687/687), done.\n", - "Requirement already satisfied: regex in /usr/local/lib/python3.6/dist-packages (2019.12.20)\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "GOAEt4gsTZ5u", - "colab_type": "text" - }, - "source": [ - "We want to install NVIDIA's Apex tool, for the training pipeline used by `simple-transformers` and Weights and Biases." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "VjDBOn0Wmybe", - "colab_type": "code", - "colab": {} - }, - "source": [ - "!git clone https://github.com/NVIDIA/apex\n", - "!cd /content/apex\n", - "!pip install -v --no-cache-dir /content/apex\n", - "!cd .." - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "uSuLMmOSW531", - "colab_type": "text" - }, - "source": [ - "Now, to ensure our model demonstrates an understanding of chemical syntax and molecular structure, we'll be testing it on predicting a masked token/character within the SMILES molecule for Remdesivir." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "I1MLAix0pB-C", - "colab_type": "code", - "colab": {} - }, - "source": [ - "# Test if NVIDIA apex training tool works\n", - "from apex import amp" - ], - "execution_count": 4, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "9OLp-fX5W3Ah", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 351, - "referenced_widgets": [ - "af2449a85886477eb1d774c35945ea7d", - "b510b5c9444a4f7d9dbf5e7f370bcb00", - "625f9ed2e54044bcb54a80d8adfd36c6", - "656a9e87d904492ea39c2372c15e68cb", - "0d636f90b41d4bae95fe4f41c641c35e", - "444e92b80c5c4c7fb7b9a7e0076de66a", - "dd9ef67b16e84af096ea9def685067b1", - "4633e4426e764ca6a0b74b452461f5ec", - "e3c293267cf74acfa6b1a30285bd8cd8", - "1cea9d510e99411d85de2989133206a5", - "1afca71c542c418eafff01eeef65e3ec", - "2b673da9114441c88c2150e76b518259", - "25ccb68cdb014280a769f9b546b5c426", - "179af9da6aed4ddb827eeb6974b49284", - "8c336ac1a7bd474499b34cfc6ded05ec", - "eb4ab62124f24b239f8219fd212becf6", - "e49da45c84a34da9b66917afdb9060a0", - "ed2a0c847c834b02896ed12439e286bb", - "bfa6ad8f732b4687afbe77181e98cb93", - "a49239fda632493db1e8f1284be9c1c5", - "d68594cf5441469d9fc3340032adde3b", - "c3bf797b8cc34c44a929e9309de06ef4", - "4b380e9403a643489305d6cdf797f99f", - "bf215f351bcd4237a7179b890466155c", - "09daf8e819ad451794ac88654cb7d942", - "1741c16025b542988affef0ae2c658e1", - "fed80eb0a92b4351af2e9e8ebff99bdc", - "15dffad155504eff99165df54f7e7656", - "9cfd4f77d1fa485ca4d6ac8d1cdc6738", - "fda92cac1a5e4d8887d31cea9249ba40", - "1d2524191b334cba86943987e3b751ee", - "de1426d650f0450e92bb4cdd02b90d69", - "fa7e397dcc424d1c9685744df739e488", - "c58dd7d8b78b450bad74c780d69a7daf", - "357d3fc89e95460c822a8f1a8e5e2737", - "91bf59c36b344912bf91cb80b132555d", - "9f250f5430924e3cb87b0d71c1301be0", - "b8ef824d51a44562a819194c66f3d77d", - "3e14aa06a7944ffc911268afe00e77ce", - "d72af554bf5846ceb23a700e34b2cd28", - "a383c283f06f4c309357acc2ecb3bdbb", - "c0a3ddc86fd549db9213b42166ac1097", - "32ac6cc843864ee7b2b01f4c7c2caca6", - "b9cdf760c72a4c80a3d7d628ed8fd765", - "8aa8a9fdca414cc3bf6cfef38b4df57c", - "81d61ea6566e4ed6ae2bdc21f1c22faa", - "6ecab3cb0ec24b3689db9682c000a325", - "3cbc597bdcbf43f98791115e65aecab4" - ] - }, - "outputId": "652be3a4-16a2-467d-a9c9-9d816191c1bb" - }, - "source": [ - "from transformers import AutoModelWithLMHead, AutoTokenizer, pipeline, RobertaModel, RobertaTokenizer\n", - "from bertviz import head_view\n", - "\n", - "model = AutoModelWithLMHead.from_pretrained(\"seyonec/ChemBERTa-zinc-base-v1\")\n", - "tokenizer = AutoTokenizer.from_pretrained(\"seyonec/ChemBERTa-zinc-base-v1\")\n", - "\n", - "fill_mask = pipeline('fill-mask', model=model, tokenizer=tokenizer)\n" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "af2449a85886477eb1d774c35945ea7d", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=501.0, style=ProgressStyle(description_…" - ] - }, - "metadata": { - "tags": [] + }, + "metadata": { + "tags": [] } }, { @@ -5455,12 +4562,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "e3c293267cf74acfa6b1a30285bd8cd8", + "model_id": "4b9531aadec94d6997f4df3e48fe9dd5", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=178812144.0, style=ProgressStyle(descri…" + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=336423582.0, style=ProgressStyle(descri…" ] }, "metadata": { @@ -5478,12 +4585,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "e49da45c84a34da9b66917afdb9060a0", + "model_id": "f67218c34f29439b879de2b02da1309d", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=9429.0, style=ProgressStyle(description…" + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=11058.0, style=ProgressStyle(descriptio…" ] }, "metadata": { @@ -5501,12 +4608,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "09daf8e819ad451794ac88654cb7d942", + "model_id": "a5f0a5ad353c41c69a275ef766cf7775", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=3213.0, style=ProgressStyle(description…" + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=4056.0, style=ProgressStyle(description…" ] }, "metadata": { @@ -5524,12 +4631,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "fa7e397dcc424d1c9685744df739e488", + "model_id": "45795699e2f247ae916dbec650640fdb", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=150.0, style=ProgressStyle(description_…" + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=772.0, style=ProgressStyle(description_…" ] }, "metadata": { @@ -5547,12 +4654,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "a383c283f06f4c309357acc2ecb3bdbb", + "model_id": "8004a4812f6144aca56648a6ee5d1c6b", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=166.0, style=ProgressStyle(description_…" + "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=62.0, style=ProgressStyle(description_w…" ] }, "metadata": { @@ -5565,14 +4672,6 @@ "\n" ], "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py:831: FutureWarning: Parameter max_len is deprecated and will be removed in a future release. Use model_max_length instead.\n", - " category=FutureWarning,\n" - ], - "name": "stderr" } ] }, @@ -5583,17 +4682,16 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 105 + "height": 181 }, - "outputId": "a54e4885-f920-4841-b4ce-da35ac53433a" + "outputId": "81415d51-2a2e-4398-b057-1075b0e9bc36" }, "source": [ - "remdesivir_mask = \"CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=1\"\n", - "remdesivir = \"CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=C1\"\n", + "smiles_mask = \"C1=CC=CCC1\"\n", + "smiles = \"C1=CC=CC=C1\"\n", "\n", - "\"CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=O1\"\n", "\n", - "masked_smi = fill_mask(remdesivir_mask)\n", + "masked_smi = fill_mask(smiles_mask)\n", "\n", "for smi in masked_smi:\n", " print(smi)" @@ -5603,13 +4701,24 @@ { "output_type": "stream", "text": [ - "{'sequence': ' CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=C1', 'score': 0.5986589789390564, 'token': 39}\n", - "{'sequence': ' CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=O1', 'score': 0.09766950458288193, 'token': 51}\n", - "{'sequence': ' CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=N1', 'score': 0.0769445151090622, 'token': 50}\n", - "{'sequence': ' CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=21', 'score': 0.024126358330249786, 'token': 22}\n", - "{'sequence': ' CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=H1', 'score': 0.018853096291422844, 'token': 44}\n" + "{'sequence': 'C1=CC=CC=C1', 'score': 0.9903193712234497, 'token': 33, 'token_str': '='}\n", + "{'sequence': 'C1=CC=CC2C1', 'score': 0.006178670562803745, 'token': 22, 'token_str': '2'}\n", + "{'sequence': 'C1=CC=CC1C1', 'score': 0.0012479453580453992, 'token': 21, 'token_str': '1'}\n", + "{'sequence': 'C1=CC=CC)C1', 'score': 0.000855799880810082, 'token': 13, 'token_str': ')'}\n", + "{'sequence': 'C1=CC=CC/C1', 'score': 0.00035406468668952584, 'token': 19, 'token_str': '/'}\n" ], "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.6/dist-packages/transformers/pipelines.py:882: UserWarning: This overload of nonzero is deprecated:\n", + "\tnonzero()\n", + "Consider using one of the following signatures instead:\n", + "\tnonzero(*, bool as_tuple) (Triggered internally at /pytorch/torch/csrc/utils/python_arg_parser.cpp:766.)\n", + " masked_index = (input_ids == self.tokenizer.mask_token_id).nonzero()\n" + ], + "name": "stderr" } ] }, @@ -5628,18 +4737,281 @@ "metadata": { "id": "gM0KLeoqWACR", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "152e8d17-393b-4638-d120-6d3e4aed8f17" }, "source": [ - "!wget -c https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "!chmod +x Miniconda3-latest-Linux-x86_64.sh\n", - "!bash ./Miniconda3-latest-Linux-x86_64.sh -b -f -p /usr/local\n", + "!wget -c https://repo.continuum.io/miniconda/Miniconda3-py37_4.8.3-Linux-x86_64.sh\n", + "!chmod +x Miniconda3-py37_4.8.3-Linux-x86_64.sh\n", + "!time bash ./Miniconda3-py37_4.8.3-Linux-x86_64.sh -b -f -p /usr/local\n", "!time conda install -q -y -c conda-forge rdkit\n", + "\n", "import sys\n", - "sys.path.append('/usr/local/lib/python3.7/site-packages/')" + "sys.path.append('/usr/local/lib/python3.7/site-packages/')\n" ], - "execution_count": null, - "outputs": [] + "execution_count": 7, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--2020-08-07 23:54:04-- https://repo.continuum.io/miniconda/Miniconda3-py37_4.8.3-Linux-x86_64.sh\n", + "Resolving repo.continuum.io (repo.continuum.io)... 104.18.200.79, 104.18.201.79, 2606:4700::6812:c94f, ...\n", + "Connecting to repo.continuum.io (repo.continuum.io)|104.18.200.79|:443... connected.\n", + "HTTP request sent, awaiting response... 301 Moved Permanently\n", + "Location: https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.3-Linux-x86_64.sh [following]\n", + "--2020-08-07 23:54:04-- https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.3-Linux-x86_64.sh\n", + "Resolving repo.anaconda.com (repo.anaconda.com)... 104.16.130.3, 104.16.131.3, 2606:4700::6810:8203, ...\n", + "Connecting to repo.anaconda.com (repo.anaconda.com)|104.16.130.3|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 88867207 (85M) [application/x-sh]\n", + "Saving to: ‘Miniconda3-py37_4.8.3-Linux-x86_64.sh’\n", + "\n", + "Miniconda3-py37_4.8 100%[===================>] 84.75M 185MB/s in 0.5s \n", + "\n", + "2020-08-07 23:54:04 (185 MB/s) - ‘Miniconda3-py37_4.8.3-Linux-x86_64.sh’ saved [88867207/88867207]\n", + "\n", + "PREFIX=/usr/local\n", + "Unpacking payload ...\n", + "Collecting package metadata (current_repodata.json): - \b\b\\ \b\b| \b\bdone\n", + "Solving environment: - \b\b\\ \b\bdone\n", + "\n", + "## Package Plan ##\n", + "\n", + " environment location: /usr/local\n", + "\n", + " added / updated specs:\n", + " - _libgcc_mutex==0.1=main\n", + " - ca-certificates==2020.1.1=0\n", + " - certifi==2020.4.5.1=py37_0\n", + " - cffi==1.14.0=py37he30daa8_1\n", + " - chardet==3.0.4=py37_1003\n", + " - conda-package-handling==1.6.1=py37h7b6447c_0\n", + " - conda==4.8.3=py37_0\n", + " - cryptography==2.9.2=py37h1ba5d50_0\n", + " - idna==2.9=py_1\n", + " - ld_impl_linux-64==2.33.1=h53a641e_7\n", + " - libedit==3.1.20181209=hc058e9b_0\n", + " - libffi==3.3=he6710b0_1\n", + " - libgcc-ng==9.1.0=hdf63c60_0\n", + " - libstdcxx-ng==9.1.0=hdf63c60_0\n", + " - ncurses==6.2=he6710b0_1\n", + " - openssl==1.1.1g=h7b6447c_0\n", + " - pip==20.0.2=py37_3\n", + " - pycosat==0.6.3=py37h7b6447c_0\n", + " - pycparser==2.20=py_0\n", + " - pyopenssl==19.1.0=py37_0\n", + " - pysocks==1.7.1=py37_0\n", + " - python==3.7.7=hcff3b4d_5\n", + " - readline==8.0=h7b6447c_0\n", + " - requests==2.23.0=py37_0\n", + " - ruamel_yaml==0.15.87=py37h7b6447c_0\n", + " - setuptools==46.4.0=py37_0\n", + " - six==1.14.0=py37_0\n", + " - sqlite==3.31.1=h62c20be_1\n", + " - tk==8.6.8=hbc83047_0\n", + " - tqdm==4.46.0=py_0\n", + " - urllib3==1.25.8=py37_0\n", + " - wheel==0.34.2=py37_0\n", + " - xz==5.2.5=h7b6447c_0\n", + " - yaml==0.1.7=had09818_2\n", + " - zlib==1.2.11=h7b6447c_3\n", + "\n", + "\n", + "The following NEW packages will be INSTALLED:\n", + "\n", + " _libgcc_mutex pkgs/main/linux-64::_libgcc_mutex-0.1-main\n", + " ca-certificates pkgs/main/linux-64::ca-certificates-2020.1.1-0\n", + " certifi pkgs/main/linux-64::certifi-2020.4.5.1-py37_0\n", + " cffi pkgs/main/linux-64::cffi-1.14.0-py37he30daa8_1\n", + " chardet pkgs/main/linux-64::chardet-3.0.4-py37_1003\n", + " conda pkgs/main/linux-64::conda-4.8.3-py37_0\n", + " conda-package-han~ pkgs/main/linux-64::conda-package-handling-1.6.1-py37h7b6447c_0\n", + " cryptography pkgs/main/linux-64::cryptography-2.9.2-py37h1ba5d50_0\n", + " idna pkgs/main/noarch::idna-2.9-py_1\n", + " ld_impl_linux-64 pkgs/main/linux-64::ld_impl_linux-64-2.33.1-h53a641e_7\n", + " libedit pkgs/main/linux-64::libedit-3.1.20181209-hc058e9b_0\n", + " libffi pkgs/main/linux-64::libffi-3.3-he6710b0_1\n", + " libgcc-ng pkgs/main/linux-64::libgcc-ng-9.1.0-hdf63c60_0\n", + " libstdcxx-ng pkgs/main/linux-64::libstdcxx-ng-9.1.0-hdf63c60_0\n", + " ncurses pkgs/main/linux-64::ncurses-6.2-he6710b0_1\n", + " openssl pkgs/main/linux-64::openssl-1.1.1g-h7b6447c_0\n", + " pip pkgs/main/linux-64::pip-20.0.2-py37_3\n", + " pycosat pkgs/main/linux-64::pycosat-0.6.3-py37h7b6447c_0\n", + " pycparser pkgs/main/noarch::pycparser-2.20-py_0\n", + " pyopenssl pkgs/main/linux-64::pyopenssl-19.1.0-py37_0\n", + " pysocks pkgs/main/linux-64::pysocks-1.7.1-py37_0\n", + " python pkgs/main/linux-64::python-3.7.7-hcff3b4d_5\n", + " readline pkgs/main/linux-64::readline-8.0-h7b6447c_0\n", + " requests pkgs/main/linux-64::requests-2.23.0-py37_0\n", + " ruamel_yaml pkgs/main/linux-64::ruamel_yaml-0.15.87-py37h7b6447c_0\n", + " setuptools pkgs/main/linux-64::setuptools-46.4.0-py37_0\n", + " six pkgs/main/linux-64::six-1.14.0-py37_0\n", + " sqlite pkgs/main/linux-64::sqlite-3.31.1-h62c20be_1\n", + " tk pkgs/main/linux-64::tk-8.6.8-hbc83047_0\n", + " tqdm pkgs/main/noarch::tqdm-4.46.0-py_0\n", + " urllib3 pkgs/main/linux-64::urllib3-1.25.8-py37_0\n", + " wheel pkgs/main/linux-64::wheel-0.34.2-py37_0\n", + " xz pkgs/main/linux-64::xz-5.2.5-h7b6447c_0\n", + " yaml pkgs/main/linux-64::yaml-0.1.7-had09818_2\n", + " zlib pkgs/main/linux-64::zlib-1.2.11-h7b6447c_3\n", + "\n", + "\n", + "Preparing transaction: / \b\b- \b\b\\ \b\bdone\n", + "Executing transaction: / \b\b- \b\b\\ \b\b| \b\b/ \b\b- \b\b\\ \b\b| \b\b/ \b\b- \b\b\\ \b\b| \b\bdone\n", + "installation finished.\n", + "WARNING:\n", + " You currently have a PYTHONPATH environment variable set. This may cause\n", + " unexpected behavior when running the Python interpreter in Miniconda3.\n", + " For best results, please verify that your PYTHONPATH only points to\n", + " directories of packages that are compatible with the Python interpreter\n", + " in Miniconda3: /usr/local\n", + "\n", + "real\t0m30.174s\n", + "user\t0m13.043s\n", + "sys\t0m3.861s\n", + "Collecting package metadata (current_repodata.json): ...working... done\n", + "Solving environment: ...working... done\n", + "\n", + "## Package Plan ##\n", + "\n", + " environment location: /usr/local\n", + "\n", + " added / updated specs:\n", + " - rdkit\n", + "\n", + "\n", + "The following packages will be downloaded:\n", + "\n", + " package | build\n", + " ---------------------------|-----------------\n", + " boost-1.72.0 | py37h9de70de_0 316 KB conda-forge\n", + " boost-cpp-1.72.0 | h7b93d67_2 16.3 MB conda-forge\n", + " bzip2-1.0.8 | h516909a_2 396 KB conda-forge\n", + " ca-certificates-2020.6.20 | hecda079_0 145 KB conda-forge\n", + " cairo-1.16.0 | h3fc0475_1005 1.5 MB conda-forge\n", + " certifi-2020.6.20 | py37hc8dfbb8_0 151 KB conda-forge\n", + " conda-4.8.3 | py37hc8dfbb8_1 3.0 MB conda-forge\n", + " fontconfig-2.13.1 | h1056068_1002 365 KB conda-forge\n", + " freetype-2.10.2 | he06d7ca_0 905 KB conda-forge\n", + " glib-2.65.0 | h3eb4bd4_0 2.9 MB\n", + " icu-67.1 | he1b5a44_0 12.9 MB conda-forge\n", + " jpeg-9d | h516909a_0 266 KB conda-forge\n", + " lcms2-2.11 | hbd6801e_0 431 KB conda-forge\n", + " libblas-3.8.0 | 17_openblas 11 KB conda-forge\n", + " libcblas-3.8.0 | 17_openblas 11 KB conda-forge\n", + " libgfortran-ng-7.5.0 | hdf63c60_14 1.3 MB conda-forge\n", + " libiconv-1.15 | h516909a_1006 2.0 MB conda-forge\n", + " liblapack-3.8.0 | 17_openblas 11 KB conda-forge\n", + " libopenblas-0.3.10 |pthreads_hb3c22a3_4 7.8 MB conda-forge\n", + " libpng-1.6.37 | hed695b0_1 308 KB conda-forge\n", + " libtiff-4.1.0 | hc7e4089_6 668 KB conda-forge\n", + " libuuid-2.32.1 | h14c3975_1000 26 KB conda-forge\n", + " libwebp-base-1.1.0 | h516909a_3 845 KB conda-forge\n", + " libxcb-1.13 | h14c3975_1002 396 KB conda-forge\n", + " libxml2-2.9.10 | h72b56ed_2 1.3 MB conda-forge\n", + " lz4-c-1.9.2 | he1b5a44_1 226 KB conda-forge\n", + " numpy-1.19.1 | py37h8960a57_0 5.2 MB conda-forge\n", + " olefile-0.46 | py_0 31 KB conda-forge\n", + " openssl-1.1.1g | h516909a_1 2.1 MB conda-forge\n", + " pandas-1.1.0 | py37h3340039_0 10.5 MB conda-forge\n", + " pcre-8.44 | he1b5a44_0 261 KB conda-forge\n", + " pillow-7.2.0 | py37h718be6c_1 675 KB conda-forge\n", + " pixman-0.38.0 | h516909a_1003 594 KB conda-forge\n", + " pthread-stubs-0.4 | h14c3975_1001 5 KB conda-forge\n", + " pycairo-1.19.1 | py37h01af8b0_3 77 KB conda-forge\n", + " python-dateutil-2.8.1 | py_0 220 KB conda-forge\n", + " python_abi-3.7 | 1_cp37m 4 KB conda-forge\n", + " pytz-2020.1 | pyh9f0ad1d_0 227 KB conda-forge\n", + " rdkit-2020.03.4 | py37hdd87690_0 24.6 MB conda-forge\n", + " tk-8.6.10 | hed695b0_0 3.2 MB conda-forge\n", + " xorg-kbproto-1.0.7 | h14c3975_1002 26 KB conda-forge\n", + " xorg-libice-1.0.10 | h516909a_0 57 KB conda-forge\n", + " xorg-libsm-1.2.3 | h84519dc_1000 25 KB conda-forge\n", + " xorg-libx11-1.6.11 | h516909a_0 920 KB conda-forge\n", + " xorg-libxau-1.0.9 | h14c3975_0 13 KB conda-forge\n", + " xorg-libxdmcp-1.1.3 | h516909a_0 18 KB conda-forge\n", + " xorg-libxext-1.3.4 | h516909a_0 51 KB conda-forge\n", + " xorg-libxrender-0.9.10 | h516909a_1002 31 KB conda-forge\n", + " xorg-renderproto-0.11.1 | h14c3975_1002 8 KB conda-forge\n", + " xorg-xextproto-7.3.0 | h14c3975_1002 27 KB conda-forge\n", + " xorg-xproto-7.0.31 | h14c3975_1007 72 KB conda-forge\n", + " zstd-1.4.5 | h6597ccf_2 712 KB conda-forge\n", + " ------------------------------------------------------------\n", + " Total: 103.8 MB\n", + "\n", + "The following NEW packages will be INSTALLED:\n", + "\n", + " boost conda-forge/linux-64::boost-1.72.0-py37h9de70de_0\n", + " boost-cpp conda-forge/linux-64::boost-cpp-1.72.0-h7b93d67_2\n", + " bzip2 conda-forge/linux-64::bzip2-1.0.8-h516909a_2\n", + " cairo conda-forge/linux-64::cairo-1.16.0-h3fc0475_1005\n", + " fontconfig conda-forge/linux-64::fontconfig-2.13.1-h1056068_1002\n", + " freetype conda-forge/linux-64::freetype-2.10.2-he06d7ca_0\n", + " glib pkgs/main/linux-64::glib-2.65.0-h3eb4bd4_0\n", + " icu conda-forge/linux-64::icu-67.1-he1b5a44_0\n", + " jpeg conda-forge/linux-64::jpeg-9d-h516909a_0\n", + " lcms2 conda-forge/linux-64::lcms2-2.11-hbd6801e_0\n", + " libblas conda-forge/linux-64::libblas-3.8.0-17_openblas\n", + " libcblas conda-forge/linux-64::libcblas-3.8.0-17_openblas\n", + " libgfortran-ng conda-forge/linux-64::libgfortran-ng-7.5.0-hdf63c60_14\n", + " libiconv conda-forge/linux-64::libiconv-1.15-h516909a_1006\n", + " liblapack conda-forge/linux-64::liblapack-3.8.0-17_openblas\n", + " libopenblas conda-forge/linux-64::libopenblas-0.3.10-pthreads_hb3c22a3_4\n", + " libpng conda-forge/linux-64::libpng-1.6.37-hed695b0_1\n", + " libtiff conda-forge/linux-64::libtiff-4.1.0-hc7e4089_6\n", + " libuuid conda-forge/linux-64::libuuid-2.32.1-h14c3975_1000\n", + " libwebp-base conda-forge/linux-64::libwebp-base-1.1.0-h516909a_3\n", + " libxcb conda-forge/linux-64::libxcb-1.13-h14c3975_1002\n", + " libxml2 conda-forge/linux-64::libxml2-2.9.10-h72b56ed_2\n", + " lz4-c conda-forge/linux-64::lz4-c-1.9.2-he1b5a44_1\n", + " numpy conda-forge/linux-64::numpy-1.19.1-py37h8960a57_0\n", + " olefile conda-forge/noarch::olefile-0.46-py_0\n", + " pandas conda-forge/linux-64::pandas-1.1.0-py37h3340039_0\n", + " pcre conda-forge/linux-64::pcre-8.44-he1b5a44_0\n", + " pillow conda-forge/linux-64::pillow-7.2.0-py37h718be6c_1\n", + " pixman conda-forge/linux-64::pixman-0.38.0-h516909a_1003\n", + " pthread-stubs conda-forge/linux-64::pthread-stubs-0.4-h14c3975_1001\n", + " pycairo conda-forge/linux-64::pycairo-1.19.1-py37h01af8b0_3\n", + " python-dateutil conda-forge/noarch::python-dateutil-2.8.1-py_0\n", + " python_abi conda-forge/linux-64::python_abi-3.7-1_cp37m\n", + " pytz conda-forge/noarch::pytz-2020.1-pyh9f0ad1d_0\n", + " rdkit conda-forge/linux-64::rdkit-2020.03.4-py37hdd87690_0\n", + " xorg-kbproto conda-forge/linux-64::xorg-kbproto-1.0.7-h14c3975_1002\n", + " xorg-libice conda-forge/linux-64::xorg-libice-1.0.10-h516909a_0\n", + " xorg-libsm conda-forge/linux-64::xorg-libsm-1.2.3-h84519dc_1000\n", + " xorg-libx11 conda-forge/linux-64::xorg-libx11-1.6.11-h516909a_0\n", + " xorg-libxau conda-forge/linux-64::xorg-libxau-1.0.9-h14c3975_0\n", + " xorg-libxdmcp conda-forge/linux-64::xorg-libxdmcp-1.1.3-h516909a_0\n", + " xorg-libxext conda-forge/linux-64::xorg-libxext-1.3.4-h516909a_0\n", + " xorg-libxrender conda-forge/linux-64::xorg-libxrender-0.9.10-h516909a_1002\n", + " xorg-renderproto conda-forge/linux-64::xorg-renderproto-0.11.1-h14c3975_1002\n", + " xorg-xextproto conda-forge/linux-64::xorg-xextproto-7.3.0-h14c3975_1002\n", + " xorg-xproto conda-forge/linux-64::xorg-xproto-7.0.31-h14c3975_1007\n", + " zstd conda-forge/linux-64::zstd-1.4.5-h6597ccf_2\n", + "\n", + "The following packages will be UPDATED:\n", + "\n", + " ca-certificates pkgs/main::ca-certificates-2020.1.1-0 --> conda-forge::ca-certificates-2020.6.20-hecda079_0\n", + " certifi pkgs/main::certifi-2020.4.5.1-py37_0 --> conda-forge::certifi-2020.6.20-py37hc8dfbb8_0\n", + " conda pkgs/main::conda-4.8.3-py37_0 --> conda-forge::conda-4.8.3-py37hc8dfbb8_1\n", + " openssl pkgs/main::openssl-1.1.1g-h7b6447c_0 --> conda-forge::openssl-1.1.1g-h516909a_1\n", + " tk pkgs/main::tk-8.6.8-hbc83047_0 --> conda-forge::tk-8.6.10-hed695b0_0\n", + "\n", + "\n", + "Preparing transaction: ...working... done\n", + "Verifying transaction: ...working... done\n", + "Executing transaction: ...working... done\n", + "\n", + "real\t0m40.151s\n", + "user\t0m34.384s\n", + "sys\t0m4.069s\n" + ], + "name": "stdout" + } + ] }, { "cell_type": "code", @@ -5696,12 +5068,12 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 105 + "height": 99 }, - "outputId": "12d1a5ee-f184-4278-c6ed-346a8e6eb06d" + "outputId": "20e6cd93-aa8b-422d-c6c8-e29af7e29e2f" }, "source": [ - "sequence = f\"CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC={tokenizer.mask_token}1\"\n", + "sequence = f\"C1=CC=CC={tokenizer.mask_token}1\"\n", "substructure = \"CC=CC\"\n", "image_list = []\n", "\n", @@ -5732,11 +5104,11 @@ { "output_type": "stream", "text": [ - "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=C1\n", - "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=O1\n", - "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=N1\n", - "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=21\n", - "CCC(CC)COC(=O)[C@H](C)N[P@](=O)(OC[C@H]1O[C@](C#N)([C@H](O)[C@@H]1O)C1=CC=C2N1N=CN=C2N)OC1=CC=CC=H1\n" + "C1=CC=CC=C1\n", + "C1=CC=CC=CC1\n", + "C1=CC=CC=N1\n", + "C1=CC=CC=CN1\n", + "C1=CC=CC=CCC1\n" ], "name": "stdout" } @@ -5751,7 +5123,7 @@ "base_uri": "https://localhost:8080/", "height": 1000 }, - "outputId": "b764a21e-26b9-462f-807e-969e32a2e758" + "outputId": "0ed272c3-7cf3-4abd-e0ce-de572d36e692" }, "source": [ "from IPython.display import Image \n", @@ -5764,9 +5136,45 @@ { "output_type": "display_data", "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAACCrklEQVR4nO3dd3hTZf8G8Dvdu7R0T6DsPWWUpYLvy96oYAuIAoK0gEpxFtTfKxWQgsgS2SBLQIYLUIGyNxSQVTronnSPNOf3R2pbOtJBkpNxf65eXO3JOcm3BXL3ec4zJIIggIiISF8ZiF0AERGRmBiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk1xiERESk14zELkAlTpw4YW9v7+Li4uTkZGhoKHY5RESkuXQtCB88ePDZZ58dOHCgoKBAfsTOzs7V1dXNza30Tzs7O/knnp6eNjY24hZMRETikgiCIHYNSlNUVOTr63vp0iUvLy9bW9ukpKSkpCTF36A8Jl1cXNzc3Jydnd3d3Z2cnNzd3Z2dnd3c3GxtbdVWPBERiUKngvCjjz766quvPD09b9y4YWdnJz+Ynp4eFxcXHx9f4c/09PTo6Ojs7GwFT2hqampvb19la9LNzc3Ly8vISNea1ERE+kZ3gvD06dMvvviiIAh//vlnv379anlVXl5e5YwsTcr4+HjFlyvod/Xy8rK2tn7ub4uIiFRLR4IwIyOjY8eOUVFRwcHBCxcuVNbT5ufnp6WlVZeUSUlJxcXFCi43MzMrn5EVktLV1VUikSirVCIiqh8dCcLXX399165d3bp1O3PmjLGxsXpetLCwMCkpKTY2NjExMT4+Pj4+PiEhIS4uLjExMTY2NikpqaioSMHlQ4cOXbJkScuWLdVTLRERVUkXgnDjxo1Tp061srK6evVqs2bNxC6njIJ+19jYWEEQjI2No6OjLSwsxK6UiEh/aX0QPnr0qFOnTllZWdu3b584caLY5dRBz549z58/v2rVqlmzZoldCxGR/tLulWWkUukbb7yRlZU1btw47UpBAB988AGAZcuWKb7RSEREKqXdQfjZZ5+dP3/e09Nz3bp1YtdSZyNHjmzWrNnjx48PHDggdi1ERPpLi4Pw9OnTX3/9tYGBwbZt20pnDWoRAwODuXPnAvj666/FroWISH9p6z3CesyXOHv2bGZmpnztGGdnZxUXWCt5eXne3t7JycmnTp3q06eP2OUQEekjbQ3CesyXGDx48K+//lr6ZeW58KXz/Ly9va2srFRW+zMWLly4aNGi4cOH//zzz+p5RSIiKk8rg3DTpk1vvvlmXedLBAcHnz17Vj7bLzU1VfHJjo6O8uVGXVxcKqxE6uHhYWlp+dzfRInU1FRvb+/c3Nzw8PDWrVsr62mJiKiWtC8IS+dLbNu27Y033qjfkxQWFqakpMgXUas8zy8mJkbxXHgzM7Pyi45W+NPZ2blOez+98847a9euffvtt9evX1+/b4eIiOpNy4JQKpX26dPn/Pnz48aN27Nnj4peRRCExMTE0lVj4uLiEhISSteOiY+Pz83NVXC5sbGxvDXp7Oy8aNGiTp06KX65iIiI5s2bGxkZPX782NXVVanfChER1UDLgrDK/SXUr3TJmCrblImJiTKZTH7mmTNnevXqVeMTjh49+sCBA5988skXX3yh4tqJiOgZ2hSE9dtfQv0KCgpKlxvt169fgwYNarzk4sWL3bt3t7e3j4qKUts4HSIighYFoYr2l9Acvr6+Z8+eXbly5ezZs8WuhYhIj2hNEIqyv4Q6HThwYPTo0Y0bN75//z73+yUiUhvtWFlm06ZNu3btsrKy2rFjh06mIIARI0a0atXq8ePH+/fvF7sWIiI9ogVB+OjRo8DAQABr1qzRqF2WlMvAwCAgIADAkiVLxK6FiEiPaHrXqHrmS2iI/Pz8xo0bJyQk/P3335o8GoiISJdoeoswODhYe/eXqCszM7MZM2YAWLp0qdi1ENUkMxMffIDGjWFqCldXTJmCmJiyRxcsgESC8+fLjuTnQyLB0KHqr5RIMY0OwtOnT4eEhGjv/hL1MHv2bEtLy6NHj965c0fsWoiqV1iIgQOxdCm6dsX//ofhw7F9O3r2RFyc2JUR1ZnmBmFGRoafn19xcfEnn3yiP/2E9vb2kyZNEgRh+fLlYtdCVL01a3DxIubPx969eO89rFuHH35AbCw++0zsyojqTHOD8J133omKiurWrdsnn3widi1q9d577xkaGm7bti0+Pl7sWoiqsXMnDAzwwQdlR/z84OGB3btRXCxeWUT1oaFBqA/zJarTpEmTESNGFBQUfPfdd2LXQlQVQcCNG2jcGA4OZQclEnTrhuxsRESUHYyPR2RkyUdUlPorJaoNTQxCPZkvocCCBQsArF69Ojs7W+xaiCrJzkZBwTMpKOfoCAApKWVHRo9G48YlHy1bqq9CorrQuCCUSqVvvPFGVlbWuHHj6r3Lkrbr1q2br69venr6xo0bxa6FqBqVZ17JjxiUe1cJCcGBAyUfuj79ibSXxi3lpVfzJRR4//33z5w5s3z58pkzZ3LFNdIsVlawsEBiYsXjyckA4OxcdqRvX/ToUfJ5fr5aiiOqM81qEZbOl9i6dauezJeojnzFtcjIyJ9++knsWoieJZGgUydERT0zWUIQcOkSHBzQqJFohRHViwYFYfn5Ev379xe7HJFJJBL5jVJOridNNHEiACxeXHZk927ExsLPDwCio3HvnjiFEdWdBi2xpvP7S9RVQUFBo0aNEhIS/vrrL/5mQJqlqAh9++L8eYwZg759cfcufvgBNjbo2RNXrqB05k9YGHx9Sz7Pz4e5OYYMwZEjYlVNVCVNaRHq83yJ6piamr7zzjtgo5A0kLExvv8eQ4fi2DHMmYO1a1FUhNRUHDmC8vNf09LEK5GotjSiRRgREdGpU6fMzMytW7f6ybtWCACQlpbm5eWVm5t769atNm3aiF0O0b927cLrr9d82sWL6NZN9dUQPRfxW4RSqXTixImZmZljx45lClZgb28/efJkQRC++eYbsWshKmf+/FqdFhur4jqIlED8ICydL7F+/Xqxa9FE8hXXduzYwRXXSIOMH1+r07gGN2kDkYOQ8yVq1Lhx41GjRhUUFHz77bdi10L0r8BASCQ1n8YgJG0gZhByvkQtzZ8/H8DatWu54hppCk9P9OpV82nsGiVtIGYQ6u3+EnXVrVu3Pn36pKen//DDD2LXQvSv2gyWYYuQtIFoQbh582bOl6i9999/H8Dy5culUqnYtRABAMaPR42L/zEISRuIE4QRERHyZVNWr16tn/tL1NWwYcNatWoVFRW1b98+sWshAgA4OuKll2o4h12jpA1ECELOl6gHiUQyd+5ccHI9aZRXX63hhPR05OWppRSi+hNhQv3HH3/8v//9z9PT88aNGxwpWnulK679+eefL774otjlEAEZGXBxQUGBonMePoSPj7oKIqoPdbcIExISQkNDDQ0Nd+7cyRSsE1NT01mzZoGNQtIcDRrgv/+t4Rz2jpLGU3cQpqenu7u7N2rUqHfv3mp+aR0wc+ZMKyurX3/99ebNm2LXQgSgFr2jHC9DGk/dQejl5ZWWlvbo0aNz586p+aV1QOmKa6GhoWLXQgQAGDECVlaKTmAQksZTdxBaWlpOnz4dwLJly2o8OTs7O5+7Wj9r3rx5RkZG27dvf/Lkidi1EAEWFhg6VNEJDELSeCKMGg0ICDAzMztw4MDDhw8VnLZhwwYvL6+NGzeqrTCtIF9xraio6LvvvhO7FiIANfWO8h4haTwRgtDZ2XnixIkymWz58uUKTrOzs0tPT1+6dGlxcbHaatMKH3zwAYDVq1dnZmaKXQsRMHgw7O2rfZQtQtJ44kyo/+CDDwwMDDZv3pySklLdOaNGjWratOnjx48PHjyoxtK0QLdu3fr27ZuZmcnmMmkEExOMGFHtowxC0njiBGGLFi0GDx6cm5u7evXq6s4xMDCQTyEPCQlRY2naQb7i2rJly4qKisSuhUhh7yi7RknjibZD/cmTJ/v37+/o6BgVFWVubl7lObm5ud7e3ikpKadPn+Z0i/IEQWjbtu2dO3d27tz5em3WPiZSKakUHh5ITKz60bQ0cNIwaTDRFt3u169f9+7dk5OTt27dWt05FhYWM2fOBKeQV1K64lpISIhYv8oQlTEywpgx1T7K3lHSbGJuwzRv3jwA33zzjUwmq+6cWbNmmZubHzp06O7du2osTQv4+fm5urreuHHjr7/+ErsWIvaOkhYTMwjHjBnj4+Nz//79Q4cOVXeOk5OTv7+/Xk0hLywsTEtLq/E0U1PT119/3dDQ8OrVq2qoiqgGvXvDw6Pqh2Ji1FsKUd2IGYSGhoYBAQGoqedz3rx5BgYGW7ZsSUhIUFdpYvr000/bt29/+vRpxadJpdKwsDCZTObs7KyewogUMTDA+PFVPpLATgvSbGIGIYC33nqrYcOGZ86cUbDiWvPmzYcPH15QUKBgiKnOOHXq1LJlyxITE41q2vL0iy++uHjxoru7+5AhQ9RTG1ENXnut8rF/gC9tbNRfC1HtiRyEFhYWM2bMQE2NQvlsge+++y4nJ0dNlYkhPT3dz8+vuLj4s88+69mzp4Izz5w583//938GBgZbt261VzCXmUidunU77OhYAFwAVgCvA42AVkBsfLzYlREpInIQ4t8V1w4ePPjgwYPqzvH19e3Zs2daWtrmzZvVWJq6zZgxIzo62tfX96OPPlJw2tOnT994443i4uIFCxZwY0LSKBemTbMEegBzgF1AFAAgjqNGSbOJH4ROTk5vvPFGjSuulU4h19UV177//vs9e/bY2tpu377d0NBQwZkzZ86MjIzs0qVLcHCw2sojqo2JEydW/v8Zy1GjpNlEm1Bf3r1791q3bm1qahoVFeXo6FjlOTKZrGXLlg8ePNi7d+/YsWPVXKGqPXz4sHPnzllZWTt27JgwYYKCM7dt2+bv729paXn16tXmzZurrUKiWmrXrl14eHj5I4aGhgUFBYp/vSMSkfgtQgAtWrQYMmRIXl5ebVZc+/rrr9VYmjoUFRW98cYbWVlZ/v7+ilPw8ePH7777LoBVq1YxBUkzvVppQmFxcXFSUpIoxRDViqAZTp48CaBhw4bZ2dnVnZObmytvL546dUqdtana/PnzATRu3Pjp06cKTisqKpKPoBkzZozaaiOqq4cPH0okkgrvM5cvXxa7LqJqaUSLEEDfvn179OiRmpq6bdu26s4xNzfXvRXX5PMljIyMduzYYaNwlPkXX3xx7tw5Dw+P9evXq608orry8fHp2rVrhYO8TUiaTFOCEP+uuLZkyRIFw2HkK64dPnxYN1Zc43wJ0kmVe0c5cJQ0mQYF4ejRo318fCIiIhSsuObo6Dhp0iRBEBQPMdUWnC9BOun11183MHjmvYVBSJpMg4LQ0NBwzpw5ABYvXqzgNPmKa1u3btX2Fdc2bNjA+RKkk9zc3Crsm8auUdJkGhSEAN58800HB4eLFy+ePXu2unOaNWs2YsSIgoKC7777Tp21KdfDhw/lXcGrV69u1KiRgjO3bdu2c+dOS0vLnTt3mpiYqKk+oudToXeULULSZJoVhLVccW3BggUAVq9enZ2drabKlKp0voSfnx/nS5BOGjduXPn1chmEpMk0KwgBzJ4929zc/Oeff1YwHOaFF17o1auX9q649sknn1y4cKFx48arVq1ScJpUKp04cWJmZuaYMWMmT56sruqIlMDR0fHll18u/ZJdozXLzMQHH6BxY5iawtUVU6Y8s33VggWQSHD+fNmR/HxIJBg6VP2V6h6NC8LSFddWrlyp4DT5imvffPONVCpVV2nKwfkSpCfK946mpaXl5+eLWIymKyzEwIFYuhRdu+J//8Pw4di+HT17gi1p9RB7ImMV7t27Z2BgYGZmFh8fX905xcXFLVu2BLBnzx511vac0tPTvby8AHz++eeKzwwLCzM0NDQwMPjzzz/VUxuRcqWnp5uampa+1URERIhdkSap8OYWGioAwvz5ZUe2bBEAYerUki+DggRAOHeu7IS8PAEQhgxRfa26T+NahACaN28+dOjQ/Pz8tWvXVneOgYGBfFPfJUuWqLG058X5EqQ/GjRoMGjQoNIv2TtaQhAwfz7c3GBkhOPHSw7u3AkDA3zwQdlpfn7w8MDu3dDRbQY0iiYGIf7t+fz2228VbEA4ZcoUZ2fnS5cunTp1So2l1d+GDRt2797N+RKkP8r3jnK8DACkp+O//8WSJRAEFBcjMBBFRRAE3LiBxo3h4FB2pkSCbt2QnY2IiLKD8fGIjCz5iIpSf/m6SkODsE+fPvINCLdu3VrdOWZmZrUZYqohOF+C9NDw4cOtrKzkn7NFiMuX0bkz/vij7MidO1i1CtnZKCh4JgXl5FvxpKSUHRk9Go0bl3y0bKn6ivWFhgYh/l1xbenSpQpWXJs9e7alpeWRI0fu3LmjxtLqjPMlSD9ZWFgMGzZM/nm8Pu9TLwhYsQK+voiMrPjQwoWQrw1SeUc8+ZHya/SEhODAgZKPPXtUVq7e0dwgHDVqVNOmTSMiIn7++efqzmnYsKG/v7+g8Suuffrpp5wvQfqptHdUf1uEmZl49VXMmYPCwqofXbwYFhZITKz4UHIyADg7lx3p2xcjR5Z8/PsbBimB2KN1FJHHRrdu3RSc8+jRI0NDQ1NT07i4OLUVVicnT540NDQ0MjI6e/as4jM/++wzAB4eHqmpqeqpjUjVCgoK5MvE9+/fX+xaxHDpktCkiQAo+jAwENq3FwAhNrbsQplMcHcXHBxKvuSoUVXS3BYhgClTpjg4OFy6dOnMmTPVndOkSRNNXnEtIyNDvr/Ep59+yv0lSA+ZmJiMHDkSetgiFASEhsLX95nRLlWSyZCRAQDll1nevRuxsfDzU12BVEbsJK7Bp59+CmDkyJEKzrlw4QIAOzu7rKwstRVWS/J+IV9fX6lUquC0jIwM+Qiajz76SG21EanHr7/+CsDExOSPP/6QyWRil6MWT58K48bV0BCs8OHjIwDCmDHCihXCu+8KJiaCj4+QllbyhGwRqpKmB2FiYqK5ublEIrlz546C03x9fQGsXLlSbYXVxvfffw/A1tb28ePHis+Uj6Dp0qVLQUGBWkojUp+pU6caGBjIN2Zq1qxZaGhodna22EWp0uXLNXeHVv5wdBQCA4VGjQRjY8HVVXj7bSEhoew5GYSqpOlBKAjCtGnTAEyfPl3BOQcOHADQqFGjoqIitRWm2IMHD6ytrQHs2LFD8ZnyKSKWlpb37t1TT21EarNo0SL8O9nJ3d1d3hHVsGHDBQsWxMTEiF2dCqxbJ5iY1DkF5R8ffyx29XpKC4JQvuKaqamp4hXXmjdv/uKLLyo4R52Kioq6d+8OwM/PT/GZERER8hVHN23apJbSiNRn+/btEonE0NDwp59+EgRBKpUeOnRI3n8DwMDAYOjQoceOHRO7TCV5+lQYP76eESj/WLVK7O9BT2lBEAqCMGLECACfffaZgnPS09PVVU7NgoKCADRu3Pjp06cKTisqKpKPoBkzZozaaiNSjxMnTshXhKh8z+Ly5ct+fn7GxsbyROzcufO6devy8vJEqVM5Ll8uuclXv49GjYRvvhFycsT+NvSUdgTh6dOnAdjb22vFrYXS+RJnzpxRfCbnS5Cuunz5snxNGQXjv+Lj44ODgx3+XVHFxcUlODg4OTlZnXUqx/N0h3bpImzZImjMPR39pB1BKAiCvOW0SuO7Dkr3l1i0aJHiM7m/BOmqiIgIFxcXAK+//nqNw0Tz8/O3bNnSrl07eRyampr6+fnduHFDPaU+r3p3hxoYCEOHCjrTLazltCYI9+3bJ+9sVDwPQXQhISGcL0H6LCUlRb5F2osvvpifn1/7C0+fPj1u3LjS9eh9fX337Nmjyf/fY37/XerlVecItLISpk0T/vlH7PKpjNYEYXFxcbNmzQDs3btX7FoUkclkq1ev5nwJ0k+5ubnysTBt27at3237hw8fBgUFNWjQQB6HTZo0Wbx4cVrpdDqNsWLFimWGhnWLQFdXIThY4H0QzaM1QSgIgnztGMUrrmkFzpcgnVRcXDx69GgA7u7u0dHRz/NUmZmZ69ata9GihTwOra2tp02bdvfuXWWV+jwyMjLk3+aU2kdgp07Cli1CYaHYtVPVtCkIc3JyHB0dAZw6dUrsWuqP8yVIV8n3yra1tVXWHb7i4uJjx44NHTpUIpHIp1sMGDDg0KFDIi5Pc+XKFR8fH3k8S4DzNd4IHDVK0Ob3Kz2hTUEoCIJ8o9rhw4eLXUg9cb4E6arFixfL11E7fvy40p/8+vXr06ZNMzc3lydQixYtQkNDc9Q+2WDdunWmpqbll6h8AShWcCNQM5qwVCMtC8KkpKTarLimsThfgnTSjz/+aGBgIJFItm/frrpXSUxMXLx4sYeHhzyEHB0dg4KCnrMPtpYyMjLGjBlT5XLNP1SIQHd3YfFiQfNuapICWhaEgiDId6V/++23xS6kzjhfgnTSX3/9JW8nLV++XA0vV1BQsGfPntK9XExMTMaNG1fjnN3ncfny5dLu0MqcgGh5BDZrJmzfzhuB2kj7gvD+/fs1rrimgThfgnRSeHi4fITnvHnz1PzS8uVpjIyM5IHUpUuXLVu2FCo7h1atWlWhO7QCAwODYcOG3fnjD+W+LqmTRBAEBX/HmmnUqFE///yzs7Nz06ZN3dzcXFxcXFxc3NzcnJ2d3d3dnZycnMvv6awZJk6cuHPnzi5dupw9e1a+7hSRtouNje3Zs2dMTMz48ePlvaPqryE+Pn7dunWrVq1KTU0F4OrqOm3atNmzZzds2PA5nzkrK2vatGm7du2q7gRTU9Px48cvWLCgdevWz/laJC6tDMJ79+4dPHhwwYIFCs6xs7NzdXV1c3Or8KednZ23t7d88Se12bZtm7+/v6Wl5dWrV5s3b67OlyZSkczMzD59+ty8ebNv376///67mZmZiMXk5+fv2bPn66+/vn37Nv6NqPnz57dt27Z+T3jt2rXx48c/fPiwykednZ1nzJjx7rvvlq4PR1pNK4MQQH5+fmRkZFJSUmxsbGJiYlxcXEJCQnx8fHx8fEJCgvx3QwUcHR2dnZ3lrUlXV1dXV9fS1qSHh4elpaUSS338+HHHjh0zMzM3bdo0efJkJT6zhsrMxBdfYN8+xMXB3h7//S8+/xyenmKXRcpUWFg4ePDgEydOtG7dOiwszM7OTuyKSoSFhYWEhBw9elT+zubr6xsYGDh69OjSBWtqY+vWrTNmzMjLy6v8UIcOHWbOnOnv7y9u8JNyaWsQKlZYWJiSkpKenh4fHx8XF1fhz5iYmKKiIgWXm5mZ2dnZVW5Nyv90dnau/X8qqVTat2/fc+fOjRkzRr5KnI4rLESfPrh4EWPHokcP3L+PjRvh7IyLF+HmJnZxpByCIPj7+2/fvt3Nze3s2bPe3t5iV1TRgwcPVq1a9cMPP+Tk5ABo2rTpu++++9Zbb9X4O25mZuZbb721d+/eCsclEsmgQYPmzZv38ssvq6poEo9uBmGN0tPT4+LiqkzK2NjYp0+fKr68un5XNzc3Dw+P8rcAg4ODP//8cw8Pjxs3btjb26v429IAK1ZgzhzMn4+QkJIjW7di0iRMnYoNG0StjJTmvffe++abb2xsbE6ePNmxY0exy6mWvBtm+fLlUVFRAGxsbCZPnjx37lz5sLXKquwOlfeyBgUFtWnTRg01kyj0NAgVy8zMjIuLS0xMrNDvKv8zLS1N8eVOTk5OTk7u7u6Ghoa//fabRCI5ceJEv3791FO8yLp3x+XLSExE6b0TQYCXFzIykJGBunRPkWZas2bNzJkzjY2Njx49OnDgQLHLqZlMJjt69OjKlSuPHz8OwMDAYPDgwYGBgQMGDCh/WuXuUCcnpylTpgQEBLixM0PXMQjrrKCgIDU1Vd58rNymrNDv6uHh0aZNm99++03EgtVHEGBuDg8PVBhiMHo0DhzA/fto1kykykg5Dh06NHr0aJlMtnnzZn9/f7HLqZtr166tXbt269at+fn5ADp16jRjxgw/Pz+pVDp9+vQff/yx9MzmzZvPnDmz/Fo2pNsYhEomk8mSkpLkrcljx46FhoZ6eHhERESUbsaty7KyYGOD7t1x/vwzx6dPx/r1OHsW/06CJm10+vTpV155JT8/f/HixUFBQWKXU0/x8fGrV69et25dcnIyAEdHR0NDw4SEBAASieSVV16ZN2/ewIED5aubkr4QawKjPpDJZPIJRjt27BC7FrXIzBQA4YUXKh5/+20BEM6fF6MmUo47d+7Ib3JPnz5d7FqUQL48Tffu3QHY2tqamppOnTo1PDxc7LpIHGwRqtaGDRvefvvtDh06XLt2Tfd/xxQEWFnB0RGRkc8cHzUKBw/i8WNUM0iBNFx8fHzPnj2joqKGDh168ODBOk1F0HA+Pj4REREnTpx46aWXxK6FRCPCShB6xc/Pz9XV9caNG3/++afYtaieRIJOnRAVhbi4soOCgEuX4ODAFNRSWVlZQ4YMiYqKeuGFF3bt2qVLKQggKysLAEeE6jkGoWqZmprOmjULwNKlS8WuRS0mTgSAxYvLjuzejdhY+PmJVRE9j6KiorFjx167ds3Hx+fw4cPKXWtCdEVFRampqYaGhlwgRs+xa1Tl0tPTvby8srOzr1+/3qFDB7HLUbGiIvTti/PnMWYM+vbFgwdYvx6enrh0CRqz+AjVkiAIb7755ubNmx0dHc+cOdNM5wb9PnnyxNPT09XVNa58HwbpH7YIVc7Ozu7NN98EsHz5crFrUZkzZ3D2LHJzYWyMY8cwfz6uXMH77+OnnzBpEs6cYQpqo48//njz5s0WFhaHDh3SvRQEEB8fD8DV1VXsQkhkbBGqQ2RkZLNmzSQSyaNHjzx1ctXNjh1x4wauXEHnzlWf8OABpFI0bgyu0Kgl1q9fP336dENDw/379w8fPlzsclTi0KFDI0aMGDJkyJEjR8SuhcTEFqE6NGrUaMyYMUVFRatWrRK7FtWIjwcABb9Zf/ghWrfG4cNqq4iex9GjR2fNmiWRSNavX6+rKQi2COlfDEI1ef/99wGsXbu2xoVMtY9UipQUGBjA0bHac+RJ6eKitqKo3i5duvTqq69KpdKFCxfKe/V1lXwePYOQGIRq0rVr1/79+2dmZv7www9i16JsiYmQyeDkhH/3Cq9CjU1GXXH+/HlLS8vRo0eLXUg9PXr0aOjQoTk5OVOnTv3ss8/ELke15C1CF/5+pvcYhOojbxQuX75c8SZQ2qc2IZeQAOhFizA+Pj43N1cmk5UemTRpUvv27c9XWHZOI6WkpAwaNCgpKWnw4MFr164VuxyVY9coyTEI1Wfw4MFt2rR58uTJnj17xK5FqeQhp+DdJCMDeXmwtoaVldqKEkvlRsbt27dv3bplYFDyf+3+/fu9e/eeO3euOPVVLzc3d9iwYQ8ePOjSpcvu3buNFLTvdQWDkOQYhOojkUjmzZsH4Ouvv9apwbo13v/Tm35RVPXeWuHI48ePz5w5Ex4eXnrCV199NXny5Bs3bqi30mcUFxdPmDDh/PnzjRs3Pnr0qJUe/MoCBiH9i0GoVhMnTnR1db158+aJEyfErkV5asw5PQ5CmUyWnJwskUicnZ3lRyoP0Pjtt9+2bNlSus9lfHz87Nmzv/vuO3WWHRAQ8PPPPzds2PDXX38tLVW3CYKQlJQEQE++X1KAQahWpqams2fPho6tuFZj12iNJ+iKgoIC+Yy00pxLSUkpKiqyt7c3MTGRH6ncd1ohOx89erRq1aodO3aUnrBv374VK1Y8evRIRWUvWrRo9erV5ubmhw4datGihYpeRdOkpqYWFhba2dmZcW6r3mMQqtuMGTOsrKx+//3369evi12LktSya1QPRsp8+OGHiYmJAJycnORHauwprXyk8gkbN26cM2fOP//8I/9SPvZYWZ0KO3bsWLRokaGh4Y4dO3r16qWU59QK7BelUgxCdbOzs5s6dSp0acU1do0CAOT7MMs/d3d3l39SY+zl5ORkZ2ebm5vb2toqvqS0Efno0aO33npLfr9Z7sqVK6dOnUpNTa1rzb/++uvkyZMFQVi+fPmoUaPqerlWYxBSKQahCObOnWtkZPTjjz9GR0eLXYsyMAiBlJSUSZMmCYJgYGBQ/o5gPdp/lW8i1njJV1991a9fv9I2YmFh4eXLl+WnKXDlypXx48dLpdKPPvpI3mOvVxiEVIpBKAJvb++xY8fqyIprgoDERABQMOJAD+4RTp06NT4+vnv37jKZzMHBwdjYWH5cnmrl7whWyLnqbhmWHpFKpSkpKQYGBrXvbn38+HG3bt369u1bekJqamqFJY0eP348dOjQ7Ozs119//csvv3z+n4DWYRBSKQahOOST69evX5+bmSl2Lc8nLQ0FBWjQABYW1Z6j6/cI16xZc+jQoQYNGgQHB6OmO4IVorHGVEtKSiouLnZ0dCyd2FdjI7LyCfPnz2/QoMHGjRtLj2zYsCEhIeHll1/evHmzRCJ53h+BFqr8OwrpL4FE8sWUKZdbtRKWLBG7kOcTHi4AQqtWis5p0EAAhJQUddWkVnfu3LGwsACwa9euvLy8mzdvXr58ufTRlJSUy5cvx8bGlh6Jjo4+e/Zs6Zf5+fn3799/+PBh6ZGMjIw7d+5kZWWVHklISLh3717pl0VFRbGxsYmJieWfJCoqqrCwsPRIdnZ2QkJC+TrT09Ozs7NLv3zrrbcArFq1qt7fuLYbP348gJ07d4pdCImP2zCJ55dfMGQI3N0REYF/x9Zrn+PHMXAgXnwRf/5Z9Qn5+TA3h4kJ8vOhcy2PgoKCHj16XL9+/c0339SuVWR37NjxxhtvODk53bt3r0GDBmKXI4K+ffuePn36r7/+6t+/v9i1kMjYNSqeQYPQoQNiY7F7t9ilPIdajpRxcdG9FATw4YcfXr9+3cfHp3S8qLaYMGFCv379kpKSPv/8c7FrEQfvEVIpBqF4JBIEBgLAkiXQ3na5Hg8Zlc+XMDIy2rFjh7W1tdjl1I1EIgkNDTU0NPz2229v3boldjki4B5MVIpBKKo33oCHB27dwrFjYpdSXzVuK6GjQVg6X+Lzzz/v3r272OXUR8eOHadNmyaVSufMmSN2LeqWnZ0tn75pY2Mjdi0kPgahqIyNMWsWAGjvims15pyOzp1466234uPj+/TpM3/+fLFrqb8vv/zSwcHhzz//3L9/v9i1qBX7Rak8BqHYZs6ErS2OHcO1a2KXUi+9TDCrD9p4VHuCLs6dWLNmzc8//9ygQYNt27YZGhqKXU792dvbL1q0CMCcOXNyc3PFLkd9GIRUHoNQbDY2ePNNAPjmG7FLqRfJeTiehodTtSc0leGtfujqo8aaVOvu3bvyaaBr1qzx9vYWu5znNX369I4dO8bExCxZskTsWtSHQUjlMQg1wLx5MDbG7t3QxhXXshMAwKr6NxTj6/A4iaa2aqtIpQoKCiZMmJCbm/vmm2++9tprYpejBIaGhqtWrZJIJCEhIZGRkWKXoyYMQiqPQagBPDwwbhyKirBypdil1FFRLgoyYWQGswbVnpMdDwDWOvKO89FHH2npfAkFfH19X3vttby8PHlLVx9wyCiVxyDUDPPnQyLB+vXIyBC7lLqQh5yVwvt/NTYZtUfpfInt27dr3XwJxZYsWWJlZfXTTz/9/vvvYteiDpWXeCV9xiDUDB064KWXkJWF778Xu5S6yJIHYfUhJxQjJxkSA1g6qq0oFZHPl5DJZJ9//nmPHj3ELkfJ3N3dP/roIwDz5s0rKioSuxyVY9colccg1BjyXqkVK1BYKHYptVZjt2dOEoRiWDjAwFhtRamIbsyXUOC9995r3rz5nTt3vvvuO7FrUTkGIZXHINQY//1vyYpru3aJXUqt1dgirPEELZG7YYM0Ls7Ozm7Hjh1aPV9CARMTk2XLlgEIDg6ucS9DbccgpPIYhJpk7lxAq1Zck9//U9Ai1I2RMnfvWgQGHr58+eLmzZ6enmJXo0JDhw4dMmRIZmbmJ598InYtKlRYWJiWlmZkZOTg4CB2LaQRGISaZMIEeHoiPBx//CF2KbVT42AZHWgRFhRg4kTk5kqmTGk6fLjY1ajcihUrTE1NN2/efOHCBbFrURX5DlbOzs4GBnwDJIBBqFmMjfHuu4D2rLhWY87V2GTUfB99hGvX4OMDHZovoYCPj8+cOXNkMtmsWbNkMpnY5agEh4xSBQxCDTNjBmxtcfw4rl4Vu5RaqLHnszbzKzTZsWMIDYWREbZvh27Nl1Dgk08+cXd3v3LlypYtW8SuRSV4g5AqYBBqGBsbTJ0KaMmKa7o9WCYlBZMmQSbD559D5+ZLKGBlZRUSEgJgwYIFGdo1sbV2GIRUAYNQ88ydW7LiWlSU2KUoJJMiLxUSQ0VzBLV6sMxbbyE+Hn36QEfnSyig29v2MgipAgah5vHwwPjxkEo1fcW1nEQIMlg6QVL9dALtbRGuWYOff0aDBti2DTo6X0IB+ba9r7dq9dnff0Pntu2Vr6/Ge4RUikGokeST63ftgiav8ZFVi/t/OYk1n6OB7t4t+StYswbav79E/XTs2HFn//4Nrl3DvHli16JkbBFSBQxCjdSxI7ZvR3g4jDV4QZYauz3z0yHNh6ktjC3UVpQSFBRgwgTk5uLNN6ET+0vU35dfwsEBx49Dt7btZRBSBUZiF0DVmDhR7ApqUtuRMtrWHPzoI1y/rj/zJRSxt8eiRZg1C3Pn4r//hYVW/UJTPa0Owg0bNjx8+BBAYWFhTk5O5ROkUmlWVlbl4zKZ7OnTp56e42Ni3q7wUGYmioureK28POTnVzx46hTc3etVugZjEFJ91XLuhHaNlCmdL7Fjh/7Ml1Bk+nR8/z2uX8eSJQgOFrua+oqLw+3biIpCSoosPz8pIUEikTgfP47mzdG2LRo0ELu+Oti7d+8fz7Hgxgsv9Lt48bkK0OTbNfXGINRgmZn44gvs24e4ONjb47//xeefQ3OW+KpxfyWtGylTOl/iyy/RvbvY1WgGQ0OsWoU+fRASgkmT0KiR2AXV0ZMnOHas/JbXKbm5RcXFDS0sTGNiEBODP/9EmzYYMAC22rF39NSpU1966SUAJiYmlpaWlU8wMjKqco8wAwMDW1tbIyMXqbTiQzY2VQ8IMzeHmVnFg7rXHASDUHMVFmLgQFy8iLFj0aMH7t/Hxo04dgwXL8LNTeziAAAFmQBg5VztCVrXItTj+RKK+Pritdfw4494/33s2yd2NbUmCPjrL4SFVVi5NyE7G4CrlVXZaeHhuH8fQ4eiXTv1l1lX48ePF7sEHcTBMppqzRpcvIj587F3L957D+vW4YcfEBuLzz4Tu7J/jd6Bj/PQfFi1J5Q0GbXkHqF+z5eowZIlsLLCTz9BW7btlcnw0084fbry+vXxWVkAXEqDUK6wEPv34+xZtRVIGoVBqKl27oSBAT74oOyInx88PLB7N4qL8fSpeJWVY2QGQ5NqHx24BO/FofNbaiyovjhfQjF3d3z0EQDMm6cd94h++w23b1f5SLy8RVjlDeBjx3DzpkrrIs3ErlGNJAi4cQONG6P8NjESCbp1w4EDePQI7dpBIoG9Pdzc4OoKO7uST0r/9PKCkdh/uRID7bhByPkStfHee9i8GXfu4LvvMGeO2NUodO8eLl2q7kF5i9C1Qouw1JEj8PSEnZ2KSqusuLg4ISEhOjo6Li7uyZMnT548iYuLi4mJycnJ2bRpU/v27dVWiT4T+72SqpSdjYICVN4szdERAKKiYGaGzEzEx6O6DVQNDODsDGdnuLnB2Rnu7iWfu7jA1RUuLjA3V06pxxfgTMi/X0hgag3n9ugyDe39lPP8avDhh5wvUTMTEyxbhmHDEByMV1+Fxs49KC7Gb78peDxBQYsQQFERjh2Dsu/D5efnx8bGxsXFVQ68hISE4irnLgCzZ88+efKkcitRigULEBKCl1/G8eNlB5s2RcuWOHJEvLKeA4NQg1Xenld+xMYGT58iN7ckCBMSSv6Mi0NiImJjkZiIpKSSR69fr/rJPxgA71hYucDaDVbOsHaHlTOs3WDpDGs3mDWoW6l9P4GdD2RSZMUh/Ecc8EfKPbz0ZV2/YxFwvkTtDR2KIUNw9Cg++QQ//CB2NdW4fRsKFwqXd41WvEdY3t27SE1Fw4Z1feWsrKyYmJjSwIuNjY2NjY2JiYmLi0tOTq7rswGoLiA1xIkTuHQJ3bqJXYcyMAg1kpUVLCyQmFjxuPy/k7MzAFhYwMcHPj7VPkl6OuLiEB9f8c/0dERHwyAVKXeRcrfqaw1NYW4PazdYu8LaDVauMLcr+cTaDbZeMHj2X06zIfD4d38G3w+wthPOfI3eQTDR+Gj54w8IAj7/nPMlamXFChw/js2bMW2ahv7EbtxQ/HgNXaNyN2/ixRerfCQ9PT0uLi4+Pl7+Z0REROkn6enp9S26akLlX4U1hq0tHB3x1Vc6sugQg1AjSSTo1AlnziAurmyyhCDg0iU4ONR2LpedHezs0KZN1Y/mpCA3EVlxyE5AVjyy45GdiKxY5CQh8wkKs5Edj+x4VNnzKjGEpRNaj8WgqpYFNzKHV2+k3kPmEzi0qlWpIlqyBAMHYsAAsevQEj4+mDMHISGYNQsXL0LTdngvLi4/ZbBKigbL/Cs9PDzCxqZy4MXExFS5aIuKaHKLMCcHX3+NGTNw5w5atxa7mufGINRUEyfizBksXly2B8Xu3YiNxdy5ynl+SwdYOsCxmpiU5iMvDdnxyIpDVjyy4p75PCcJ2fGQVlp8qYSAhOswMEZ2Ak7/D5ZOZf2uVi6wcq1zv6uqvfKK2BVolU8+wfbtuHIFW7ZgyhSxq3lWaioqTxd/lvweYUMLi7isrPisrIj09LisrPjs7NIvo58+lcpkaim3BqmpqXv37q18PC8vL7/y0mdAUVFRdnZ2lU+VkRFUZfPy6VNU+b1mZVX9g8zJwbJlACCV4o03sHAhQkKgA/s3SzS59a3XiorQty/On8eYMejbFw8eYP16eHri0iV1DmmrmqwIOUkAYO1eMlhmwlG4doZQjIxIXFiJ23vQPQDO7XGoqrkTde13VQoNX6ZHu+zYgTfegJMT7t3TrPXJIiKwbVuVj0Q/fXouJuZUVNTqS5ckgL696xkaCspqXp47h4MHERKCoiKEhuLDD/HwIby9tXuwDINQg2Vn44svsGcPYmPh4IChQ/HFFyU3CDXHM6NGAQCGpnhhFgYsxtMYRIchOwHZ8chOQFZcSe9rYdW/tJaQ97tau8LKFVYuuO6Bho5lo16dnatY9KlGhYXo06fiMj3Ozhq0TI92EQS8+CJOnsTcufjmG7GrKefhQ+zYIf+0qLj4ZmJiWHT0lfj4sOjox//ewDOQSGRa8qbXoEGDgQMHVj5ubm5uVtX/guoWVwNgb/+VIEgqH7e1rbp729q66ulXlpbw9cVXX5UEYV4evL0xcSK+/Va7g5BdoxrMygohIQgJqflM0f1nORxaQiKBqQ2c28PYEgDsmsCuSRUn16bfVb48m2CJRZXW1zczqzhpsvxMSldXSCr9hy9dpqf0h+nri0mT8Nln2LBBmT8HPSGRIDQUXbvi228xZYrmrEwWmZJy7tat80+enH/y5FpCQlG5RlBDC4vu7u49PDz+7/TpAql019ixTwsKErKzk3Jy4rOyEnNyknJy4rKycgoLRay/Am9v7z179ohdhSLW1pg1C8uW4dNPq/hvp0UYhKQMHj3KRo3WyMgM1m6wdoNrlyoelfe7ZsYiJxHpKZBEl8wDkc8MSUpCfj4iIhARUfWTm5uXTZd0dYWzM8aNq3qZno8/xu7dWLeOC6rVR8eOePttrF2LuXOfmU2mXkVFRTdv3gwLC7ty5cqpU6eioqJKHzI0MGjt6NjFza23l5evp2crR0cDiQTA9ps376emtnd2biWflfusfKk0LS8v3tAwrnfv9PR0+WCZ0k/i4uIyFM7NUC6ZZtyqVCwwEN98UzIFSXtpc+2kkwyMYe0O63+XuK88RD8vr4o5IaV/pqfj0SM8elR2fvv2ipbpiYhAs2aq/Y501f/9H/btw4kT2L8fo0er7WXj4uLOnDkjD7/Lly8XFBSUPmRra9vN1dXXxaWLq2tvLy+7qlaN8LS1vZ+aGpOZWWUQmhkZuVlbu3Xu3GVY1Yvo5uXlleZihZhMT09/8uRJZmamsr5TrQhCBwdMnYrVq7X7hjuDkLSNuTmaNEGTqjpdAeTk4MkTJCUhLq5kqYFGjRQt05OSwiCsp9Jte3/6SbVBmJODS5fWXr3668mT58+fT0pKKn3EyMioU6dOPXv27NGjR48ePZo1a4ZTp/DXXwqezNPGBkCM4tV6q5t0BJibm5ubm7u5uXXpUlV/BpCRkREfH5+cnBwXF5eUlJSUlCT/JCEhISEhISkpqajWi7Vq8vSJ8t5/H2vXIjxci5fpZRCSbrG0RIsWaNGi7Ih84ld1y/Ro2kw47TJ9OtzdMXy48p85Lg5nziAsDFeu4NIlFBYe79Hj0PnzkDf7unXz9fXt0qVLnz59GlQYttqlC06fVjCJwtPWFkCMgnaboyMaN6534Q0aNGjQoEGrVtXOoE1JSUlMTJQHZHJycmlAyvMyOTlZ+m/xVY6U0UBeXpgwQbsnUXDUKOk6QYCVFRwdERn5zPFRo3DwIB4/1r7NZnVSVhYuXsS5czh/HhcuICWl7CFjY3To8Ofw4fFNmvTs2bNJdZ0Bpf7+G9Uv0fn9lSvTDh9+s1OnH0aMqPqMiRPRtGk9vgOlEARBHofp6em9e/eWaPUQFO3BFiHpOqUs00OK1W+aZkRESZvvyhVcvPjMBk8uLujaFV26oHdv9OoFC4uXal9Mnz64f7+69ehraBF27ixiCgKQSCTOzs7OmjZLStcxCEkPqHqZHj1XWIiBAytO0zx2rIppmllZuHGjpM/z/Plnmn1GRmjdGr17w9cXXboouEtXM0NDvPYaNm2qcvVtRfcImzTB4MH1f13SWgxCUoZz3yDtIYasFruOarz1FrZuxbffIi6ubJkeHx98+qnYlekExdM0S5t9Z87g2rVnVvRydUWXLiXNPl9fpW0NBsDGBlOm4McfkZBQ4RGv6lqEbdpg5EjNn0vz5MmTxErL8RcWFubkVJpxCwiCUGG+h6GhTXHxfwBkZqLKsTjZ2VVvvZyXB/mybpMmofoboNqK9whJGX6bA4kB/qNJi4xUoBXL9Gip7t1x+TISE8uG5goCvLyQkYHNmzF2bNmZpqbo0gXdu6NnT/TsCQ8P1RYmleLkSZw7V+Et3/arrzILCtKCgkrmV1hYYMAAdOqk2mKU5IMPPli6dGm9L3d37xUbe+Z5Cjh4ENXdXdVebBGSMgwMQbEGLclRBS1apke7CIKiaZqurvDyQq9e6NED3bujc2eYmKivNiMjvPwyXngBly7h9m2kpckPe9ra3k5KisnKsvPxQfv26NRJrVU9H09Pz8ozN0xMTCwtLSufLJFIKoyqtbT0kjcdFSyiVuUPw9y8ZHFDHdhrojIGISmDoSkMTcUugsSQna1omqZEgnKrvYjD2hovvYSXXkJWFlJTkZvrefz47aSkmAED2o8aJXJtdRcQEBAQECB2FbqGk6joud3YijXtcWS62HWQeLRimqa1NRo1QuvWnq1aAYipvPE16Su2COm5pd5H0i1Y8X6bXrKygoUFKodKcjIAzbwL6+npCSAmJkbsQkhTMAjpufV6D23GAZz5q5e0cJomg5Aq0KSOC9JSZnZw7gDn9mLXQSKZOBEAFi8uOyKfpunnJ1ZFijEIqQJOn6Dnc/8wziyBjTus3fDKMrGrITEUFaFvX5w/jzFjyqZpenri0iXY2YldXBXu37/fokULHx+fhw8fil0LaQQGIT2fMyE4vgAArFzwXtWLWpHu06ppmnl5eZaWliYmJnl5eVzMk8AgpOeVHY+Ue8iKhawYHfzFroaoVhwcHFJTUxMTE52cnMSuhcTHwTL0fKxcYeUqdhFEdePp6ZmamhoTE8MgJHCwDD2X6NPYNQJHZ+Lk57j6PQqzxC6IqFY4XobKY4uQnkPiLdw7VPZly1EwsRavGqLaYhBSeQxCeg7Nh8DaFZmxyElCVizMG4pdEFGtMAipPAYhPQdbb9h6i10EUZ0xCKk8BiHVV8J1XFgBa3dYOcPaDVYucGoHUxuxyyKqGYOQymMQUn0l3cL1zc8cmfQXGvUXpRaiOvHy8gKDkP7FeYRUX2kPEXWy7AZhdiJGbYV9U7HLIqpZUVGRmZmZgYFBfn6+ocbvSk+qxiAkIn3k5uYWHx//5MkTd3d3sWshkbFrlOrj0SN89x3c3eHsDDc3uLjA1VUz15Ukqpqnp2d8fHxMTAyDkBiEVB+3bmH58ooHV67E7NliVENUd56enhcvXoyJienRo4fYtZDIGIRUH61aYckSxMYiKQmxsUhMRGwsXFzELouo1jhwlEoxCKk+WrRAixZiF0H0HBiEVIpBSHUWH48NG565QejkBI68I+3CIKRSDEKqs7t38dlnzxwxNISTE955B59+KlJNRHXEIKRSDEKqM3d3fPQR4uKQmIgnT5CUhKQkxMdDKhW7MqJaYxBSKc4jJCUoKkJSEkxM4OgodilEtVNcXGxubl5cXJyXl2diYiJ2OSQmBiHVTUYGtm+HhwecnEpuE5qZiV0TUb14e3tHR0c/fvy4UaNGYtdCYmLXKNXNgwcVJwva2cHVFS4u6Nev4r1DIk3m6ekZHR0dExPDINRzDEKqGxsbzJhRcoNQPo8wPR3p6bhzBw0aiF0cUV3wNiHJMQipblq0wJo1zxxJTkZiIuLiGISkZRiEJMcgpDrIy8P+/SU3CN3cYGsLAI6OcHRE27ZiF0dURwxCkmMQUh1ERuKNN8q+NDeHq2vJDUI3N3h74733xCuOqI4YhCTHIKQ6MDbGq6+W3SDMyUFEBCIiSh5t2pRBSNpDEDytrADE/PMPzpyBhQXs7eHuDiO+K+od/pVTHTRtil27yr7MySlZcTsuDgkJMDUVrzKi2svOxoULuHnTMz4eQMyTJzh+vOQhIyM0b44XXoC3t5gVknoxCKm2pFL88UfJ3EEnJxgYwNISzZujeXOxKyOqJUHAuXP4+28UFQFwtLAwMzJKyc3NLSqyMDYGAKkUd+7gzh00b46hQ2FtLXLBpBYMQqqthAQMGVLyuZERnJyeuUHo5AQPDwwaBK7RQRqqsBB79+Lhw9IDEsDdxuZRWlpsZmazhg2fOfn+faxdi9dfh4eHuusktWMQUm1JpfjPf0rmDiYlIS4OcXEVz8nLE6MyohpJpdixA9HRzxyUSDxtbB6lpcVUDkIAubnYtg1+fsxCnccgpNpq1Ai//VbyeWFh2Za88fGIj0dCAjIyuNwaaaqjRyumIADA09YWQMzTp1VfVViI3bsxYwYsLVVaHYmLQUj1YWICDw/+okxa4uFDXL9e5SOeNjYAYjIzq702Oxu//oqxY1VTGWkEA7ELIA2yYAEkEpw/X3YkPx8SCYYOfeaEAQOeuapp07ITiDSOIJQNCq2khhah3O3biI9Xel2kORiEVGcnTuDSJbGLIKqlmBgkJlb3YM0tQjn+i9dpDEKqG1tbNG2Kr74Suw6iWrp7V8GDtWoRAvjnH3DHOt3FIKS6ycnBBx/g4EHcuSN2KUS1UdUYmVK1bRHm5SElRYlFkUZhEFJF8fGIjCz5iIqq+KhUijfegIsLQkLEKI6ortLSFDxoZ25uZWLyND8/s6DgeZ6HtBqDkCoaPRqNG5d8tGxZxQkmJpg3Dzt3VhGTRBpHYcLdTU42NTJysrQctnPnneTkej8PaTUGIVUUEoIDB0o+9uyp+pzp02FtjaVL1VsZUT0YGlb3yPknT3pv3Jiam5uen38qKqrT2rXv/f57enWrQlT/PKTtGIRUUd++GDmy5GPYsKrPsbbGrFn44QckJUEiKTn4zz8KRufpkdjYWLFLoHKqWS/0RETEK9u2peXlDWvRIiIgIKB792JB+ObcuaYrV4aEhRUWF9fyeUgHMAipngIDIZEgNLRk15rcXIwejXbtcPiw2JWJ5/bt2wMHDvTw8PD19ZXJZGKXQwAAV9fKx3beujVox46sggL/Dh32v/qqh63tikGDbr7zzqBmzdLy8hYcP95+zZq9t2+XXSCRwNlZfTWTejEIqZ4cHDB1KlavLgnCnBy4uyM5GSNGICAA+fli16de6enpAQEBHTt2PH78OICzZ88e1uffCDSKj0+FA6suXvTbv7+ouDige/fNI0caGZS8DbZ2dPxl4sRj/v6tHR3vpaSM37t3wNatN+W9HJ6e3GZMhzEIqf7efx+5uQgPBwBHR/zxB0JDYWKCb79F587VrWmla4qLi9euXdu8efNvv/1WKpWWHl/KO6gaok2b8luihISFzf7lF0EQQgYOXDFokKS0Z/9fA5o0uT5jxrphwxwsLE5ERHRau9b/wIFET0/1Fk1qxSCk+vPywoQJZV9KJAgMxOXLaNcOd++iZ0+EhEC3OwhPnjzZpUuXd955J6XSJLOwsLBz586JUhU9w9QU3boBKJbJZhw5suD4cUMDg++HD5/v61vdFcaGhtO6dLk3e3ZQ795GBgbbbtxoOmrUwoUL8/Wto6Mc3V5eUSJwuQRStrw8LFiAb7+FIGDAAGzZAjc3sWtSttjY2A8//HD79u0K/geNGTNm37596qyKqlZYWPjtt36bNu25fdvU0HDn2LGjW7Wq5aX3UlI+ffhw72+/AfDy8vriiy/8/PwqtyN13oIFJVOHL16U/14BAE2bomVLHDkiYl3KwRYhKZ+5OVaswK+/wsUFx4+jY0edGkGTl5cXEhLSsmXLbdu2Kf498sCBAw/LbQNLYskuLBx2+PCe27cbmJkdnzSp9ikIoMWwYXt+/fX48ePt27ePjo6eNGlSr1699LOtr8PLKzIISVX+8x9cv45Bg0pG0EyfjtxcsWt6bocPH27duvWCBQuys7NrPFkmky1fvlwNVZECqampAwYM+OPkSRdHx7/fequ3l1cdLu7UCQMHAnj55ZevXbu2ZcsWZ2fn8+fP+/r6jh8/Plrh4m26R4eXV2QQkgo5O+Po0ZIRNOvXo2tXLR5Bc/369X79+g0fPjwyMrL2V23evLny7UNSm8jIyF69el24cKFJkyanz57t8NFHsLev1ZWGhhg4EMOGlc6TNTAw8Pf3f/jwYXBwsKmp6d69e1u1alXLX4l0gw4vr8ggJNWqMIKmRw/tG0GTlpYWGBjYtWvXU6dO1fXa3Nzc1atXq6IqqtHt27f79Olz//79Ll26nDt3rmnTpnB1xYwZePFFmJtXe5lEglatMGMGevVCpXuBVlZWCxcuvHfvnp+fX2kn+fr163V12mhmJnbsKFtvXFeXV+RgGVITbRxBI5VKV69evXDhwvT09Ho/iZOTU2RkpLmCd15SgQsXLgwZMiQ1NbV///4///yzjY3NMw9LpXjwAI8eITEROTkAYGQEBwd4e6NlS9ja1uYlTp06NXfu3KtXrzZo0LhLl3uff27cq5cKvhMx5ObixAns3Yv9+5GTg7Vr8fgxQkJQVIS8PHh7Y+JEfPut7gyWgUCkRr/9Jri4CIDg6CgcOiR2NQodO3asTZs2Svlf9ueff4r93eiXw4cPW1hYABgxYkRubm6N50ul0vq9UHFx8aZNm4YOvQYIEonw6qtCZGT9nkkjZGYKO3YII0cKZmYCIACCgYHQr59w6JAQFCQAQlGRIAjCJ58I5uZCYqLQtKkwZIjYRSsDg5DULSFBGDy45L+Zn5+QkyN2QZU8evRo5MiRSolAAO3bty+Sv3+QWmzbts3Y2BjA5MmTa/OTz8rKeuWVV5YtW1bvV8zJERYvFqysBEAwMRECAoSnT+v9ZCLIzRUOHRL8/ARLy7L88/UVQkOF2NiSc8oHYXKyYGEhfPih0LJlSRDm5Qnp6WKVrwQMQhKBTCaEhgqmpgIgtGolXLsmdkH/ysnJCQ4ONjMze/788/DwCAgIOH36tEwmE/vb0iMrVqwwMDAAEBQUVJuffGJiYpcuXQC4u7tnZmY+z0s/eSJMmyYYGAiA4OAghIYK9W1nqklt8q9U+SAUBGH2bMHWVmjbVhgyRCguFsaOFVq3FqKi1PwdKA2DkERz9arQqpUACGZmwqpVBeIGhkwm27Nnj1edxtZXxcHBYdq0acw/9ZPJZMHBwQAkEsnSpUtrc0lkZGSLFi0ANG7c+MGDB0op49IloU+fklxp1Ur45RelPKsy1Sn/SlUIwqgowdhYAIQhQ4TkZKFNGwEQ3N2FGzfU800oGYOQxJSXJwQECBKJ0LNn6IABA2IV/EdUpStXrvhWv+BWbdjb2/v5+R06dIi9oKKQSqVvv/02ACMjo40bN9bmktu3b3t4eABo27at0v/hHTokNGlSEjMDBgjh4cp9+voozT95F24t869UhSAUBGHSpJIgFAQhPV3o318ABCsr4ddfVfUtqA6DkMT3yy/xDg4OAJycnI4cOaLOl05JSQkICDCs756rdnZ28vwrLCxUZ9lUXn5+/tixYwFYWFj8Ursm2IULF+T/5Pr165eRkaGKqgoKhNBQwcZGAARjY2HaNCEpSRWvU4PnzL/aKygQJkwouUu6bZsyn1kNGISkERISEgYPHixPFz8/vxzVD6EpLCwMDQ21rd1A+QpsbW3l+VdQUKDqOkmxrKysAQMGyH8pCQsLq80lx44ds7KyAjB8+PDajCl9HsnJQkCAYGgoAIK9vbB4saCefzIK8u/JE1W9qEwmBAcL8gG0wcGqehVVYBCSppDJZKGhoaampgBatWp1TZVDaOo3NcLc3Hzo0KFbtmxRQ05TbSQkJHTs2BGAq6vrjdrdntq+fbt8TOmkSZPU1o99544waFBJIDVvLuzZo6oXEiX/KlixomTE0NSpgrbcKGAQkmYJDw9v164dAFNT08WLFxcXFyv3+e/fvz+0jjvHmJmZyfMvOztbucXQ84iIiGjWrBmAli1bRtVuwOLKlSvrNKZUuY4dKxlUAggvv6zMcSWV8w8QWrcWFi9WX/6Vt3+/YG4uAMKIEZo4P6oyBiFpnLy8vICAAPlON0ocQZOdnS1fJbKu+feco+pJFW7duuXu7g6ga9euSbW4+VZ+TOmSJUvUUGGVCguFdesER8eStpqfn5CQUP9nqy7/goOFR4+UV3S9nDsnODgIgNC9uzg3R+uEQUga6rfffnNxcQHg6Oh46PkWoZHJZFu2bJE/W40MDQ0HDBiwZcuWp9o1KVqf/P333/Kbuy+99FJt/prKjyn94Ycf1FChYqmpQlCQYGJSMswyOFjIyysZlvnyy8+c6eNTxdItmpx/5d25I3h7C4Dg4yPcvy92NQoxCElzKWUEzcWLF3v27Fmb/PP19Q0NDa1N84JEdOjQIfnCraNGjcrLy6vx/PJjSo8ePaqGCmvp7l1hyJCSGJs6tSQIAeHixbJzygdhaf5ZW1fMv4cPRfkOahYXJ3TuLACCs7Nw+bLY1VSPQUga7XlG0MTGxk6bNk1+T6g6BgYG8vxLeJ4uKlKXLVu2GBkZAZg5c2Zt7h9nZWUNHDgQdRlTqmbHjwudOwvh4UJQkGBrKzRtKowaVfZoaRBOnVo2/10iEXr2FJYvF6Kjxaq6DrKySgYKWVoK6p0bVQcMQtICdR1BI58aUXHDgaryLy4uTj3fAlVBJhOePBHCwoQDB4QdO4SdO4WffxbOnavuvlloaKj8znFQUFBtnj4hIaFTp06oy5hSEQUFCUZGwrp1gkQi3L5dcrA0COVT9DS8/VedoiLhrbcEQDA0FNauFbuaqnAbJtIO+fn5QUFB3377rSAI8nt4btVs43Tw4MH33nsvIiKi8kMSiaRnz57jx48fN25cdZeTOhQX4/JlXLiA6va3cnJCr15o316+I6AgCEFBQUuWLJFIJMuWLZs7d26NrxAZGfnKK688ePCgSZMmf/zxh4+Pj3K/A6VbsAAhIcjJQdOmGDgQW7YAKNvn6OFDmJjguVcAVIniYtS4IoUgYNEiLFoEAEFBWLxYDXXVAYOQtMnvv/8+efLkhIQER0fHH374YdiwYeUfvXfv3ty5c3/99dfKF7Zu3XrcuHF+fn6a/4ao++LicOBA2WavCri7Y/ToYlvbadOmbdy40cTEZMuWLa+99lqN192+ffs///lPbGxsly5dfvnlFycnJyWUrWLyICwqQmgoPvwQDx/C21sLNvwLCcGJE9i3D9X3v5TZuBHTp0MqxeTJWL8exsaqr692GISkZRITE998881ffvkFgJ+f39q1ay0sLDIyMhYvXrx8+fLCwsLyJ8vzb8KECc2bNxepXnrWnTs4cABSaS1PLzAymhAWtv+33ywtLfft2/ff//63xkvOnz8/dOjQ1NTUF1988eDBgwp6yDVKaRBq0c63GRlo0QJJSejaFUeOwNm55ksOHcLrryM3FwMH4qefYG2t+iprgUFI2kcQhJUrVwYFBRUUFLRq1Wr06NHr1q1LKdfCaNu27fjx41999VXmn2Z5+BA//giZrJanZ+TnD9u5Myw62r5BgyO//FKb0b+HDx9+9dVX8/LyRo4c+eOPPyplRy31KA1CIyN8+imWLUNkJHx90aKF5gYhgIgIDBqE+/fRuDF+/RUtWtR8yfnzGDYMKSno1g1HjuQ4OVmqvswaKBpQR6SZJBJJYGDguXPnWrVqdffu3ZCQEHkKtmjR4tNPPw0PD79169ann37KFNQsmZn46afap2BCdnb/zZvDoqO9bG3PzpzZs3PnGi/Ztm3bmDFj8vLy3nnnnZ9++kmLUrCCwEBIJAgNhZGR2KXUpEkTnD2LXr3w+DF69UJYWM2X9OiBCxfQrBnMzNb36NHu3r17qi+zBgxC0ladOnW6fPmyi4uLVCp98803r1279s8//3z++ef1WESU1OG335CfDwC16IWKSE/vs3HjjYSEVo6OYW++2cLEBCdPKr5kxYoV8uVDg4KCVq9erXjajIZzcMDUqVi9WguCEEDDhvjjDwwdirQ0DBiAvXtrvqRJE4SFFRcUbHz8+HGfPn3Onz+v+jIV0eJ/K0QWFhZSqRTA//73P/niy6ShEhNx927J5xKJ4nOvxMX13LDhYVpaN3f3U1OmeMp3CLl0CTk5VZ4vH1M6Z84cAMuWLVusaUMS6+X995Gbi/BwseuoHUtLHDyIGTNQUIDXX8d339V8iZOT4Z9/nhg6dGhycvKLL764tzb5qTIMQtJiRUVFaWlphoaG8r3lSHNdvVrLE/+OjHxpy5aknJyXmzQ54e/vYGFR8oBUihs3Kp9fXFz89ttvf/311yYmJjt37pw3b56yShaXlxcmTBC7iLowNMSaNVi8GDIZ3n0XgYE194JbWloePHhwxowZ+fn5r7/++urVq9VSaRU4WIa02JMnTzw9Pd3c3GJjY8WuhRRasQIZGTWe9fM//7y2b1++VDqhXbvNI0caV5ie5u2NyZPLHygoKJgwYcL+/ftrP6ZUwy1ditxcTJkCT0+xS6mvLVvw9tsoKsIbb+CHH2BiUvMlISEhH374oSAIAQEBy5cvV3+3NluEpMXi4+MB1HI1bRJNbm5tUnDz9etj9+zJl0rffeGFbaNHV0xBAPHx5e8vZmRkDBw4cP/+/fb29n/88YcOpCCANWsQHFybn5bmmjQJv/wCGxts345Bg/D0ac2XBAUFbdq0ydjYeOXKlZMnT64wCUoNGISkxeRB6OrqKnYhpFAt3tdXX7o05eDBYpnsy5de+nbwYIMq7yMWFiIvT/5pQkLCiy++ePr0aTc3t7///rtXr15KrVgcaWl4/BgWFmjVSuxSns+AATh9Gu7u+PNP9O6NmJiaL5k0adIvv/xiY2Ozbdu2QYMGPa1NfioPg5C0GINQOxQV1XiKBDA1NPx64MCP+/at8ankQw2vX7/eqlWr8+fPy9eh1QGXL0MQ0KmTdgwWVax9e4SFoWVLhIejRw/cvFnzJQMGDDh9+rS7u/uff/7Zu3fvJ0+eqL7MEgxC0mIMQu1Qi9tEvz96VFBcnFVjn5iJya1bt3r37v3w4cNu3bqdPHnSU3tvplVy+TIAdO0qdh1K0qgRwsLQqxfi4rBgwdLTp0/XeEn79u1Pnz7dsmXL8PDwHj163KxNfioDg5C0WEJCAniPUPM1aFDjKe/36gVg1cWLOQqy0NQUZmbz5s2Li4v7z3/+89dffzk6OiqvSvHpWBACaNgQx49j9uw/fvtt/iuvvLJv374aL2ncuPGZM2d69+4dGxvbv3//U6dOqaFOBiFpMbYItYO5OeztFZ/S28urp6dnWl7elqrmSJRwd4dE8uOPPwYFBR06dMjSUvyluZRL94IQgLk5li9/eebMmfn5+ePHjw8JCanxEnt7++PHj48fPz49Pf2VV17ZtWuXqotkEJIWYxBqjVqsQflez54Alp49W1zdBLTmzQE4ODgsXrzYpDaj8rVKcjJiYmBtDd1bGdDQ0HDVqlXy7SQXLFgQGBgoq2mOoamp6c6dO2fNmiWfIfP111+rtEIGIWkxBqHW6Ny5xgVlRrVq1dTe/nF6+sF//qniYWNjtG+vkto0w8WLANClC7R5bThFAgMDd+/ebWZmtnLlynHjxuX9OwC4OuXjMygoqDbxWW86+iMnPSAIQlJSkkQica7N7i8kLgcHtG2r+BQDiWRuz54AQs6cqeLhHj1gbq6K0jSEvF+0Wzex61ClsWPH/vLLL7a2tvv373/55ZdTarEnZfn4HD9+fL58uVplYxCStkpJSSksLLSzs9PeTQb0y3/+g9K7etUsaDW5Y0cHC4tLsbFh0dHPPODggD59VFyfyORB2KWL2HWo2IsvvhgWFubp6Xnu3Ll+/fpFV/iLrsrYsWOPHj1qa2v7008/vfzyy6mpqUqvikFI2opDRrWMpSXGjSuZIldNN6mFsfHMbt0ALD17tuyouTlefVWDtjNXjStXAJ0bKVOltm3bnj9/vmPHjnfu3OnRo8e1a9dqvOSll146efKkm5vb2bNn+/fvHxcXp9ySGISkrXiDUPt4e+O11xRPK5z1wgvmxsaH/vnnbnIyAFhZwc8Pur6o+pMniI+HvT2aNBG7FLVwc3P766+/+vfvHx8f37dv399++63GSzp06HDp0qUWLVqkp6crfW1hBiFpKwahVvLxwdtvw82tusedLC39O3QQgNDz5+Hjg2nToAd/xaUTJ2oaUaQ7GjRo8Pvvv0+YMCE7O3vEiBHbt2+v8RI3N7fx48fHxsbW5uQ6YRCStmIQaisHB7z1FkaORDXd2vN69jSQSLbcupUwYACsrdVcnSh0cgZhjUxMTLZv3x4cHFxYWOjv779w4cIaL7lz5w6ALsq+lar9S9qRvpLfI2QQaiWJBB06oEMHpKYiMhLJycjNhUQCKys4OjZv3Hh4TMzBgwdXr179+eefi12rOujJSJnKJBLJwoUL7ezs5s2bt2jRol69er3yyisKzr906RKAbsoeXMv9CElbvfrqq3v27Nm5c+frr78udi2kZPJFtuzt7aOjo3VvBZnKnJyQnIzoaC3ehvA57du37+zZs998842Cc5KTk52cnKysrJ4+farcPQvZNUrail2jOszX17dnz55paWmbN28WuxaVe/wYyclwdNTfFAQwduxYxSmIf5uDXbp0UfrOvQxC0lYMQt32/vvvA1i2bFlxcbHYtajWpUsA8MILYteh8S5fvgygqwpupTIISVvxHqFuGzlyZLNmzR4/fnzgwAGxa1Et/ZlB+JyuXLkCBiFRqaysrOzsbAsLCxsbG7FrIZUwMDCYO3cuAFUvuCw6/RwyWg9sERI9g81BfTB58mRHR8dLly7VZk9XLSUIuHoV0Msho3USGxsbFxdna2vr4+Oj9CdnEJJWkt8g5Ppqus3c3HzmzJkAli5dKnYtqnL/PjIy4OGhD8sGPBd5c7Bbt24SFSw6wCAkrcSRMnpi9uzZlpaWhw8fls+k1j3sF60l1d0gBIOQtBSDUE80bNjQz89PEITQ0FCxa1GJ8PCvOnde8NJLV8UuRNPJ504wCInKMAj1xwcffGBoaLh161b5X7qOCQv75erVkObNk8UuRNOxRUhUEfdg0h9NmjQZPnx4QUHB6tWrxa5FyYqLi+WbEHXu3FnsWjRaZGRkcnKyg4ODt7e3Kp6fQUhaiS1CvbJgwQIAq1evzs7OFrsWZbp7925OTk7jxo0dHR3FrkWjlY6UUdHzMwhJKzEI9coLL7zQq1evtLS0TZs2iV2LMqluYpyOYRASVYFBqG/kK64tX75cKpWKXYvSMAhrSf6DUvruS6UYhKR9CgsL09LSjIyMHHR943IqNWLEiFatWunYimsMwtoQBEGlI2XAICRtlJCQIAiCs7Oz0hehJ41lYGAQEBAAHVpxraio6ObNmxKJhCNlFHv48GFGRoarq6ubm5uKXoLvI6R9uL6afpo8ebKzs/Ply5dPnToldi1KEB4enpeX17x58wYNGohdi0aTt5tfUOX2HAxC0j5cX00/mZmZvfPOO9CVFddUOkNcl6j6BiEYhKSNOFJGb8lXXDty5IgOrLgmv++l0vd33aCGO6kMQtI+DEK9ZW9v7+/vLwjC8uXLxa7leclbhKqbEqAbZDKZfM0BtgiJnsF7hPrs/fffNzQ03LZtmyavuJaXl6f4hPz8/PDwcENDw06dOqmnJC11/35x+/bXR4z4ycnJSXWvYqS6pyZSEd4j1GdNmjQZMWLE/v37v/vuuy+//FLESgoLC1NSUuLj4yMiIuLi4uSfyD9PSkrKzc01MTGp7tobN24UFRW1bdvW0tJSnTVrnQsXjM+caTJ2bBOVvgqDkLQPu0b13IIFC/bv37969eoFCxZYWVmp9LWkUml8fHx0dHRMTMyTJ09iYmKio6OfPHny5MkTec9ElczMzBITEz09Pas7gTMIa+nKFUD1uxYzCEn7MAj1XLdu3Xx9fc+cObNx40b55MLnl56eXr5hV/pJdHR0dWvZGBsbOzg4uLm5NWnSRD7LrfSTRo0aKZ7kyiCsJfl+jaq+kSoRBEG1r0CkVDKZzMzMTCqV5uXlmZqail0OiePgwYOjRo1q1KjRgwcPjIxq+wt9enp65aiLi4uLiorKycmp7io7O7vKUdekSRNvb29DQ8P61d+uXbvw8PDz58937969fs+gD6RS2NggPx8pKbC3V+ELMQhJyyQlJTk7Ozds2DAlJUXsWkg0giC0adPm7t27u3btevXVVxWfPHbs2PDw8OjoaAVjWJydnT08PDw8PLy9veWfeHl5eXp6urm51T5oayk3N9fW1lYikTx9+tTc3Fy5T65Lrl9Hp05o1gz376v2hdg1SlqG/aIEQCKRBAYGzpgxY+nSpa927YrHj5GUBPkmTWZmcHSEtzeaNYOxMYBHjx7du3cPgJmZWYVWnfxzb29vVd9rLO/q1atSqbRTp05MQcXk/aJq6D9mEJKW4Za8JDfZz2/hxx9fvnz57y+/7N+o0TOPPX6MixdhYoIuXdCnz6ZNm0xNTb28vDRkiCZnENaSekbKgPMISeuwRUgAEB9vunHjO+3bA1h69mzV5xQW4tw5rFrV0dy8VatWGpKC4JoytXbpEqD6kTJgEJLWYRASHjzApk1ITX33hRcsTUx+uX//dlJStSfn5mLXLpw7p8b6aqDqbWZ1Q2EhwsNhYAA1LDnAICQtwyDUd9HR2L0bRUUA7M3NJ3fsKADfVJdzpYMB//gDV6+qq0RFnj59+uDBAzMzs7Zt24pdi0a7cQMFBWjZEtbWKn8t3iMkLVN+fbWlS5f26NGjd+/eYhdF6pKXh337UFxceuC9nj3XXr6849atL196ybXyW6ZEUvb5L7/A3R3OzmopFAAKCwvj4uLKz8GPioq6d++efDdNpQ9G1TFqGykDBiFpndL11c6ePRsUFCSRSD766KPPPvuMbyt64a+/kJVV/kBjO7tRLVvuu3Pn24sX//fyy4quLS7G0aN4802lF1XdZPyoqKjicpldysnJKSoqavLkyRs2bDA2NlZ6PbpBPlJGPUHIeYSkZZo2bSofDd+kSZMvv/zyyy+/LC4u7tat244dO5o1ayZ2daRK2dkIDUWlaLkUG/vC99/bmZtHz51rVf3yniX8/dG4cT1ePDExsbRVJ/9Evu5aXFxcdUvPGBkZubq6yucjenh4eHp6enl5eXh4REVFvfnmm5mZmS+99NL+/fttbW3rUY/O69ABN2/i7Fn07Kny12IQkpaxsrLKycnJzMy0trYGcP78+YkTJ0ZERFhbWy9dunTatGliF0gqc/Ysjh2r8pG+mzadjooK/e9/A3v0qOFJ2rXD6NHVPZiXl1flItrR0dHZ8kmKValu6RkvL6/qOipu3rw5ePDg2NjYtm3b/vrrrx4eHjWUrWdyc2FrC0FAZiYsLFT+cgxC0ibJyclOTk6Wlpbl35UyMzNnzpy5Y8cOAGPHjl23bp29SpdjIrFs2YLIyCofOXTv3ogff2zUoMGDgAAjhYt8wtw8f/bsuPj4ClEXHx//8OHDp0+fVnednZ1d5ahzdXVt1KhR/SZmPH78ePDgwf/884+7u/svv/zSvn37ejyJrjp7Fr6+6NAB16+r4+V4W4W0Q2Zm5qZNm77++mtHR8fJkyeXf8jGxmb79u0jRoyYNm3avn37Ll68uG3btr59+4pUKalM9RsQDmvevJWj493k5H137rzWti2AwuLilNzc+KysiPT0iPT0uKys+Oxs+ScJCxZU1wCovPSM/JNmzZrZ2Ngo97tp3LjxmTNnRowYERYW1r9//4MHD/IfbSl1jpQBW4Sk+SIjI0NDQ3/44Qd5K7Bjx46//fabc1Vj/yIjI994440zZ84YGBi8++67S5cu5UgE3VFQgMWLFTz+/ZUr0w4ftjMza2Jv/yQzM7H6nkxzMzOvfxcUrbCyqNLTrkYFBQX+/v579uwxNTXdvHnza6+9puYCNJO/P7Ztw5o1mDFDHS/HICTNdf369W+++ebHH3+UD0bw9fUNCgoaOnSopPyY+GdJpVKOoNFNOTlYulTB4/lS6bzff99/9648Ak0MDRtaWLhZWzexs3O1spJ/0sTOztXa2nXGDEnTpuqqu2bFxcWBgYHfffedRCJZvHjx/Pnzxa5IfK1b4+5dXLrEUaOkx8LCwkJCQo4cOQLA2Nh45MiRH3zwQe1X4uAIGh0kleL//q/Gs/6OjLQwNva0sXGxsqr2F6apU6F5g1NWrFgxb948mUwWEBCwfPlyxdsZ6rbsbNjawsgImZlQz05r+vuzJg1UWFi4devWdu3a9enT58iRI9bW1gEBAY8ePdqzZ0+TJk2++eabKmdlVdajR49r165NnDgxKytr+vTp48aNS0tLU3XxpFpGRrVZYqR/o0YvuLu7Wlsr6DaAnZ0yC1OSwMDA3bt3m5mZrVy5cvz48fn5+WJXJJorVyCToX17NaUgGISkITIzM1esWOHj4zNp0qTw8HAXF5fg4OCoqKgVK1Z4enoCmD59+nvvvffiiy9GR0fX5gnlI2j27NnToEGDffv2derU6dSpUyr+JkjFlNKMs7ODxqy+XcHYsWOPHj1qa2v7008/vfzyy6mpqWJXJA717EpfHoOQRBYVFbVgwQJvb+85c+Y8efKkffv269ati4yMXLhwoV2539xnzpzp7u5++vTpdu3abd++vZZPPm7cuGvXrvn6+kZHR7/44ouBgYFFRUWq+T5I9Vq21JQnUZmXXnopLCzM09Pz7Nmzffv2reWvfTpGHoTq3JyDQUiiuX79ur+/f7NmzUJCQjIyMnx9fQ8dOnT9+vVp06aZVuoTeemll8LDw1977bXMzEw/P7/x48dnZGTU5lUaNWr0999/BwcHSySSlStX+vr6PnjwQPnfDKlB69bP25iTSNT6/lovbdu2PX/+fIcOHe7cudOjR4/r6plJp0nUPHcCDEISRVhY2LBhwzp37rxt2zYA48aNu3jxovyggls7DRo0+PHHH7ds2WJlZbV3795OnTqdOXOmNi9nZGS0cOHCsLCwJk2aXLp0qUuXLuvXr1faN0NqY2SE55xp17EjGjZUUjUq5Obm9vfff/fr1y8+Pr5Pnz6///672BXVWVoaNm7EoEH1mRH/xx/Yswdt2ii/qupw1CipT2Fh4a5du5YsWRIeHg7A2tp6ypQp7733npeXV52e5969exMmTLh69aqRkdHHH3/86aefGhoa1ubCjIyMGTNm7N69G8DeuXPHfvYZGjSo+/dB4hEEbN6M+nUY2thgxgyYmyu7JlUpKCiYMmXKjz/+aGJisnHjxokTJ4pdUc3S03H4MPbuxR9/oLAQAD78EP/7n9hl1YRBSGqRlYWNGzPXrfN88iQzK8vFxWX69OmBgYF29R2/J58v+MUXX8hksh49euzYsaNJkya1vHbv3r3rPv30WFKSxNoa27Y9byOD1Cw7Gz/8gNp1jJcxMcGkSXBzU0lJKiMIwqJFixYtWiSRSD777LOFCxeKXVHVKuefoSF69MC4cXjtNXXufFVPDEJSsehohIZiwwb57jmhr7/ecNCg1157TSlrvvz555/+/v6xsbE2NjbffffdG2+8UcsLix8+NHzjDVy4AENDfPghgoPBXZy0yNOn2LEDycm1Pd/CAq+/roFzB2updIrhW2+9tWbNGs3ZcUxZ+bdgAUJC8PLLOH687GDTpmjZEkeOKL/syhiEpDI3bmDZMuzaJd9MHL6+CArC0KFQMMGr7jIyMt55551du3YBGDdu3Pr16xvUsrdTKsWXX+LLL1FcjG7dsGMHuAaNFiksLNl0vsZ3MB8fDB8Ota+dplwHDhyYOHFiXl7eyJEjd+7caS5qB29GBg4dUmb7Tx6EAC5eLJs1wSAkLRcWhpAQHD0KQYCBAQYPxqef4oUXVPeCW7dunTVrVnZ2dqNGjbZt21aHPevPn8fEiYiIgLU1li4F16DRLgkJOHcOd+6g8o6AEgl8fNCjB3x8xKhM+c6dOzd8+PCUlJTu3bsfPnzY0dFRzQWkp6f//nva1q0+J06U5J+REV58EePHY9So5xqEtGAB1q6FoyPatcP+/SUH1RmEEIiUpbBQ2LNH6NpVAARAsLYWAgKEqCj1vPg///zTuXNnAEZGRsHBwVKptLZXPn0qTJxYUvPYsUJqqirLJBUoLBQiIoTz54Xjx4Vjx4QzZ4R794TcXLHLUr7bt297e3sD8PHxefDggXpeND09fcuWLUOHDjUxMenWbSggGBoKvr5CaKiQkKCclwgKEoyMhHXrBIlEuH275KCPjzBkiHKev0YMQlKGzEwhNFTw9CyJE2dnIThYSEtTcxVFRUXBwcHyRRp79Ojx6NGjOly8Z4/QoIEACF5ewsmTKquR6LnExcV16tQJgIuLy+XLl1X3Qunp6Zs3bx48eLCJiYm81WRkZDRo0JD162XJyUp+raAgARBycgRXV8Hfv+Qgg5C0R3y8EBxcEiGA0K6dsG6dkJcnYkUnTpxwd3cHYGNjs23btjpc+fix4OsrAIKBgRAQIBQUqKxGovrLysr673//C8DS0vLo0aPKffLy7T95/hkaGvr6+oaGhsbHxyvxhYqKhD/+EL74QhD+DcKiImHJEsHISIiMFAQGIWmH69cFPz/B2LgkAn19hUOHBJlM7LIEQRDS09NL93UbN25cenp6ba8sKhKCgwVDQwEQunUT7t9XYZVE9VVUVPTWW2/JW2nr1q17/icszb/SRZ1UlH9SqXD6tBAQILi4lLxzPHpUFoSZmYKdnfDuu4LAICRNd/q0MHSoIJGUNJ6GDhUuXBC7piqsW7fOwsICQMtmzfIvXqzDlefOCU2alNzmVMa7DJHSyWSy4OBgABKJJDg4uH5PkpGRUSH/DAwMVJR/x44J06YJjo4l+QcIrVoJn30mxMeXBaEgCJ98IpibC4mJQtOmDEISy9OnwvvvC40aCSYmgouLMHmyEB1d8pB8LEy3biX/iq2shICAkl4MTSUfQXOsf3/ByEgIDhY4goZ0y4YNG+TTCqdMmVJYWFina5OTk83MzErbfy+99NKaNWsSExOVWF5xccX2HyA0biwEBAinT5edVj4Ik5MFCwvhww+Fli0ZhCSKggLhhRdK3vqXLhWmTROMjAR3dyE2VhAEoX//kn/Ibm7C4sVC7fsbRVVQUFAcFCQYGAiA0KdP3ZKbI2hI4x08eFDe8/HKK69kZmbW6dqePXuqov1Xy/wrVT4IBUGYPVuwtRXatmUQkihCQwVAmD+/7MiWLQIgTJ0qCIKwerXQrJkQGiruWJh6OnFC8PAQAMHGRuAIGtItFy5ckE8r7NatW52adEWl4aMMdc2/UhWCMCqqZOwBg5DE8MILgoGBUH5wtEwmeHgIVlaCVCoUFWnIWJh6Sk8XXnut5H/nuHF1aNFyBA1pvIcPHzZr1gxA48aN//nnH3W+dL3zr1SFIBQEYdIkBiGJQiYTTE0FH5+Kx0eNEgDdefffskWwshIAwdu7tv9N5U6dEry8SkbQHDyosvqI6ikhIaFr164A7O3tw8LCVP1ypfnn6lqWf40a1SH/NAf3I6R/ZWejoAAODhWPy1dySklRf0Uq4e+Py5fRuTOiovDii1i4EMXFtbqwTx/cuoWJE1FYCG9vFVdJVGfOzs5///33kCFD0tLSXnnllcOHD6viVYqLi//666+ZM2e2aJHepw9WrkR8PJo3x8cf4/p1PH6MFStQ+yUONQSDkJ5Vee1Z+REDHfqn0qIFLlxAcDBkMixahN698ehRrS60scH27bh+HR07qrZConqxtLT8+eefp0+fnpubO2rUqDVr1ijrmWUyWVhYWGBgoKenp3x8qYvLz40aISAAp0/j3j18+SU6dFDWq6kbF92mfwkCrKzg6IjIyGeOjxqFgwfx+DEaNRKlLhX6809MmoQnT2Bjg+++Q613cSLScCEhIR9++KEgCAEBAaGhoZL6bvlSXFx8+vTpvXv37t+/PyEhQX6wWbNm48aNGz9+UocOzZVXspgYhFRO7944cwaxsWX7lwoCPD1RUFCHvd+0S0YG3nkHu3YBwLhxWL+ee9aTbti8efO0adOKior8/f03bNhQpx1AZTLZ2bNn9+7du3fv3vj4ePnBRo0aDR8+fNy4cb6+vvVOVg0l8j1K0iirVwuAMHt22ZEffxQAYe5c8WpSizqNoFGw5gCRJvnjjz9sbGwAvPzyy0+fPq3xfHn7LyAgwNXVtTQjvL29AwICTp8+LdPqQeMKsUVI5RQVoW9fnD+PMWPQty8ePMD69fD0xKVLsLMTuzgVu3cPEybg6lUYGeHjj/HppzA0rOK0wkL06YOLFzF2LHr0wP372LgRzs64eLGsGU2kMS5fvjx06NDExMR27dr98ssvHh4elc8pbf/t27cvLi5OftDb23vEiBG62f6rTOwkJg2TlSXMny94eAjGxoKrq/D220rbc0zzyecLyteg6dFDePiwinMUrzlApHkiIiJatGgBoFGjRnfu3Ck9Xtr+cyv3O5w+tP8qY4uQqtK5M65dw+XL6NJF7FLU7q+/4O9f7Qia7t1x+TISE8vmmQgCvLyQkYGMjKobkURiS0tLGzFiRFhYmJ2d3f79+42MjPS6/VcJg5Cq4uqKhAQ8eQJ3d7FLEUNqKt5+GwcOAIC/P9atg3xtYkGAuTk8PPDw4TPnjx6NAwdw/z6aNROhWqJayM3NnTBhws8//2xhYZGbmys/2KRJk3Hjxo0bN66LHv7KW46R2AWQ5ikuRnIyDAzg5CR2KSJp2BD792PrVsyahYQE/LtDTc1rDjAISVNZWFj89NNPnTt3fvLkiZWV1Wuvvaa37b/KGIRUSVISiovh5IS6jLfWQf7+6N4dtrao8E6hD2sOkC4yNDQ0NTVNS0v7+++/+/XrJ3Y5GoRBSJXIpw2VGz+tv1q0eOZLKytYWCAxseJp8kmWzs5qqoqoXgoLC2/evGlgYNCpUyexa9Es/B2WKmEQVkciQadOiIrCv0MMAEAQcOkSHBx0cOUd0i03b94sKCho0aKFfHIhlWIQUiXyhZQYhFWaOBEAFi8uO7J7N2Jj4ecnVkVEtXT58mUA8h0qqDx2jVIl8hahi4vYdWikt97C1q349lvExZWtOeDjg08/FbsyohpcuXIFgJ4PEK0SW4RUCbtGFTA2xrFjmD8fV67g/ffx00+YNAlnzuj+yjuk/S5dugSgW7duYheicTiPkCqRz4rbuxdjx4pdChEpR35+vo2NjUwme/r0qaWlpdjlaBa2CKkStgiJdM61a9eKioratGnDFKyMQUiVyAfL8B4hkQ7hSBkFGIRUiXyeHIOQSIcwCBVgENKz0tORlwcbG7D/hEiHMAgVYBDSs3iDkEjnZGdn37t3z8TEpH379mLXookYhPQsBiGRzrl69WpxcXG7du1MS1eQp3IYhPQsBiGRzmG/qGIMQnoW11cj0jkMQsUYhPQsrq9GpHMYhIoxCOlZ7Bol0i1Pnz59+PChmZlZmzZtxK5FQzEI6VkMQiLdcvnyZUEQOnbsaKznW21Xj0FIz+I9QiLdIu8X5VrbCjAI6Vm8R0ikW+RByN2XFGAQUjl5eXj6FKam3FSISGdwpEyNGIRUJjIhoZOz88zu3SGRiF0LESlBSkpKZGSklZVVy5Ytxa5Fc3GHeioTGxd3PTHRrHFjsQshIuW4cSPOyal98+Y2hoaGYteiudgipDLx8fEAXDlShkhXnD/fPinphq/vCbEL0WgMQiqTkJAABiGRDrl0CQDatzcRuxCNxiCkMvIWoQuHjBLpisuXAYBTJxRjEFIZdo0S6ZKEBMTGwtYWTZuKXYpmYxBSGQYhkS65eBEAunblMPAaMAipDO8REumSK1cAgBMIa8QgpDK8R0ikS+Q3CBmENZIIgiB2DaQRpFKpmZmZIAgFBQVGRpxgSqT1nJ2RlITHj9GokdilaDa2CKlEUlJScXGxo6MjU5BIB0RFISkJDg5MwZoxCKkER8oQ6RL2i9Yeg5BKMAiJdAlHytQeg5BKMAiJdIl8TRkGYW3ofhDKZLIDBw6MHDlyzZo1J05wwb1qyedOcMgokQ4QBFy9CjAIa0eXh0Xk5eVt3bp12bJlDx48AHD06FGZTPb+++9/8cUXJiZcea8itgiJdMajR0hLg4sL3N3FLkUb6GaLMCUlJSQkpGnTpjNmzHjw4EHjxo2XL1/+v//9z9DQ8Ouvv+7SpcutW7fErlHjMAiJdAaXGK0TXWsRRkRErFixYsOGDbm5uQA6deo0Z86cCRMmyKcE9O/ff+LEieHh4d27d//qq68CAwPFrleDMAiJdAaHjNaJ7rQIr1y54u/v36JFi5UrV+bl5Q0YMODQoUNXr1719/cvnRjXrVu3K1euTJs2LS8vb86cOaNGjUpNTRW3bM3B9dWIdAaDsE60fmUZmUx29OjRlStXHj9+HICJicmrr74aFBTUpk0bBVft27dv+vTpaWlpzs7OmzZtGjRokLrq1VCCIFhYWOTn5+fk5FhYWIhdDhHVn0wGOztkZiIxEU5OYlejDbQ4CAsKCnbv3r148eK7d+8CsLGxmTx58vz5891rd3c4Ojraz8/v1KlTEolk9uzZS5Ys0ecRNGlpaQ0bNrS1tc3IyBC7FiJ6LnfvonVreHkhKkrsUrSEVnaNPn36dMWKFU2aNJk0adLdu3cbNWq0ePHi6OjoFStW1DIFAXh5ef3111+hoaFGRkYrV67U8xE0vEFIpDPYL1pXWhaEjx8/DgwMdHd3nzNnTlxcXKdOnbZs2fLgwYOgoCBbW9u6PpuBgUFgYOCZM2eaNWsmH0GzYsUKVZSt+RiERDqDQVhXWhOE8mEvzZs3X7lyZW5ubpVjYeqHI2gA/Pnnn2AQEukEBmGdCZpNJpMdOnRowIAB8mpNTEz8/PzCw8NV8Vp79+61t7cH4Ozs/Msvv6jiJTRNcXHxoUOHevToAaBPnz5//fWX2BUR0XORSgVLS0EiEVJSxC5Fe2huEBYUFGzZsqV169byCLSxsQkICIiJiVHpi0ZFRfXt2xeARCIJCAjIz89X6cuJKCsra8WKFY3+3aDFyclp8eLFYhdFRM8rNVUYO1bo10/sOrSKJo4affr06ebNm5csWRIbGwugUaNGM2bMmDFjRj3uAtaDTCb79ttv58+fX1hY2LZt2507d7Zr104Nr6s2SUlJq1evXrVqlbwH2MfHZ/bs2W+//TZnTRCRnhI7iZ8REREREBBgaWkpr61jx45btmwpKipSfyUXL15s1qwZAHNz89DQUPUXoAoPHjwICAgwNzeX/3i7dOmyZcsWqVQqdl1ERGLSlCC8evWqn59f6bAXX1/fQ4cOiVtSZmbmtGnT5PWMHDkyRZt73E+fPj1u3DhDQ0MABgYGQ4cOPXPmjNhFEZFyBAUJgPDyy88c9PERhgx55oRz58oezcsTgLIT9JzIo0YFQTh+/PiwYcM6d+68bds2AwMDPz+/W7duhYWFDRs2TNzarK2t161bt2/fPnt7+4MHD7Zp0+bXX38Vt6S6kslkhw8f7tWrV58+ffbu3WtkZOTn53f79m35QbGrIyJlOnGiZA9CqivRgrCwsHDr1q1t27YdOHDgkSNH5GNhHj16JD8oVlWVjRkz5tq1a3379k1MTBwyZEhgYGBBQYHYRdUsJydn/fr1rVq1Gj58+Llz5xwdHYOCgh4/frx169aWLVuKXR0RKZmtLZo2xVdfiV2HdhIhCDMzM0vXhblz546rq2twcHBUVNSKFSs8PDzUX0+NStegMTY2XrlyZdeuXTV5DZqkpKSFCxd6e3tPnz79/v37Pj4+oaGhkZGRixcv5jRBIl2Vk4MPPsDBg7hzR+xStJE6+2EfP35ceSxMYWGhOmt4Hho+goZjYYj0k/wWYE6O4Ooq+PuXHKx8j3D/fuHx45KPf/7hPcIyagrCKsfCyGQy9by6EmnmCJrKY2HCwsLELoqI1ESec0VFwpIlgpGREBkpCFUFYeUPBqGcartGBYVjYSQSiUpfXRU0agSNgrEwvr6+YlVFRGKZPh3W1li6tOpHQ0Jw4EDJx5496q1Mw6koYOXrwpRuCmhtbR0QEBAdHa2il1O/qKiofv36QaQ1aPLz87ds2dKiRQv5j9fBwSEoKCguLk6dNRCRhihtEQqC8Mkngrm5kJgoNG3K6RO1pfwWYfmxMLdv35aPhZHvkeTp6an0lxOLl5fXn3/+qf4RNMnJyQsXLnR3d580adK9e/eaNGkSGhoaFRXFsTBEBCAwEBIJQkPxfJsR6Bnl5mp+fr7Tvzsid+rUafv27Vo0FqZ+1DaC5uHDhxwLQ0SVlW8RCoIwe7Zgayu0bVuHFmFBgTB/vuDpKVhZCX36CBcuqLF6DaDkFqGpqem4cePkY2GuXLkyceJEY2Nj5b6EplHDLk5Xrlzx9/dv0aLFypUrCwoKhg4deuzYscuXL/v7+8sHyBARlXr/feTmIjy8Dpd8+SV278bOnbh1Cy1bYtgw5OWprD7No/yu0RUrVmjvWJj6UdEIGvlYGF9f365du27btk0+FiY8PPzw4cOl+1IREVXg5YUJE+p2SWws/u//0Ls3GjXCV18hKQl376qmOI2kibtPaK/o6Gh/f/+TJ09KJJLZs2d//fXXpqam9XiegoKC3bt3f/XVV//88w8ABweHqVOnBgQEuLm5KbtkIqJnXLuGLl0QHQ2NXOBEJRiESvacuzglJyd/99133333XUpKCoAmTZoEBARwjyQiUo+cHPTujR49sGaN2KWoEYNQJS5dujRx4sQHDx6Ym5t/9dVXgYGBNV7y6NGjlStXbtiwITc3F0Dnzp0DAwMnTJhgxLFfRKQW6ekYPBiWljh8GP8Oy9MLDEJVycrKev/999evXw9g5MiR33//vYODQ5VnXrlyZcWKFTt37iwuLjYwMBg8eHBgYCDvAhKROsXFYeBAtG2LLVtgZiZ2NerFIFStn376adq0aWlpac7Ozps2bRo0aFDpQzKZ7OjRoyEhIWfOnAFgamo6fvz4Dz/8sFWrVuLVS0T6KD0dvXujTx+sWQO9GeZYhkGocjExMX5+fuVH0AAoPxbG1tZ20qRJQUFBHAtDRKKYMgW3b+O332Dw70wCCwuYmIhakxoxCNWhuLj4q6++WrRokVQq9fb2zs7Ols819PHxmTt37pQpUzgWhojEUlwMY2NUiII1azBjhkgFqR2DUH0uXbo0duzY4uLi2NhYjoUhItIQDEK1+vrrr4OCgsaMGbNv3z6xayEiIkCUHer1WVpaGoDOnTuLXQgREZVgEKpVfHw8AG4TQUSkORiEasUgJCLSNAxCtUpISADg4uIidiFERFSCQahWbBESEWkajhpVn6KiIjMzMwMDg/z8fO4jSESkIdgiVJ+EhASZTObk5MQUJCLSHAxC9WG/KBGRBmIQqo98pAyDkIhIozAI1UfeIuSQUSIijcIgVB92jRIRaSAGofowCImINBCDUH14j5CISAMxCNWH9wiJiDQQg1B92DVKRKSBuLKMmgiCYGpqKpVKc3NzzczMxC6HiIhKsEWoJikpKUVFRXZ2dkxBIiKNwiBUE/aLEhFpJgahmnDIKBGRZmIQqgmHjBIRaSYGoZqwa5SISDMxCNWEQUhEpJkYhGrCe4RERJqJQagmvEdIRKSZGIRqwq5RIiLNxCBUE3aNEhFpJgahOmRlZWVnZ1tYWNjY2IhdCxERPYNBqA7sFyUi0lgMQnVgvygRkcZiEKoDh4wSEWksBqE6sGuUiEhjMQjVgUFIRKSxGITqwHuEREQai0GoDrxHSESksRiE6sCuUSIijcUgVAcGIRGRxpIIgiB2DTqusLDQzMzM0NCwoKDAwIC/eRARaRa+L6tcQkKCIAjOzs5MQSIiDcS3ZpWTDxnlSBkiIs3EIFQ53iAkItJkDEKVYxASEWkyBqHKMQiJiDQZg1DleI+QiEiTMQhVji1CIiJNxiBUOQYhEZEmYxCqHIOQiEiTcWUZ1RIEwdTUVCqV5uXlmZqail0OERFVxBahaqWkpBQVFdnb2zMFiYg0E4NQtbgBExGRhmMQqhZvEBIRaTgGoWoxCImINByDULUYhEREGo5BqFpcVoaISMMxCFXo8ePHv//+O9giJCLSYAxClbh27Zq/v3/z5s3v3bv3wQcfDBgwQOyKiIioakZiF6BTBEH45Zdfli5d+vfffwMwMTGZNGnSpEmTHBwcxC6NiIiqxiBUjsLCwl27dn399de3b98GYG1tPWXKlPfff9/T01Ps0oiISBEG4fPKzMzctGnT0qVLnzx5AsDFxWX69Olz5sxp0KCB2KUREVHNGIT1FxkZuXbt2rVr1z59+hRAhw4d5s2b9/rrrxsbG4tdGhER1RaDsD6uX7/+zTff/Pjjj1KpFICvr29QUNDQoUMlEonYpRERUd1w94m6CQsLCwkJOXLkCABjY+ORI0d+8MEH3bp1E7suIiKqJ7YIa0U+FmbJkiXh4eHgWBgiIh3CIKxBlWNhAgMD7ezsxC6NiIiUgEFYrfj4+HXr1q1YsSIjIwNA+/btZ82aNWnSJO4sSESkSxiEVbhx48ayZct27dpVVFQEjoUhItJpHCzzDPlYmKNHjwqCwLEwRET6gC1CoJqxMO+9956Xl5fYpRERkWrpexBmZWVt3Lhx2bJlMTEx4FgYIiL9o79BWOVYGH9/fzMzM7FLIyIi9dHHIORYGCIiKqVfg2XKj4UxMDAYPHjwp59++sILL4hdFxERiUYvWoRFRUUHDx5csmTJpUuXAFhZWb355pscC0NERND5IKwwFsbZ2XnGjBkcC0NERKV0NggTEhLWrl1bOhamXbt27777LsfCEBFRBToYhDdv3ly6dCnHwhARUW3o1GCZymNhPvnkk+7du4tdFxERaS7daRH269fv1KlTAKytrd966605c+ZwLAwREdVId4Kwe/fu9+7dmzFjRkBAgL29vdjlEBGRdtCdrtGsrCxTU1MTExOxCyEiIm2iO0FIRERUDwZiF0BERCQmBiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREek1BiEREem1/wdwP6RB3QGhwQAAAABJRU5ErkJggg==\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAABtAElEQVR4nO3dd1hU19YG8DUMXUBABRR7oSsoWLD3Gk0zMUbTjF+Sm5gISLAHe0XBaHLTe4/GmoiKWBEbAipVrNhABUWkz8z3h15z2GdUyszsM3Pe3/M933NdyswKMPPOPmVthUajIQAAALky490AAAAATwhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyZs67ATAeZWVEREolWVjwbgXg0SorSaUiIrK25t0KGAcEIVSnUlFeHuXl0c2bVFBAd+5QcTGVlVFlZbV/ZmVFNjZkb0+OjuTsTE2akJsbOTuTQsGpb5AZjYYKCuj6dbpxgwoK6PZtunuXSkupvLzaP7OwIGtrsrOjhg3J2ZkaNyZXV3J1JaWSU98gRQqNRsO7B+BNraZLl+jsWbpwga5de/Bpug6sralFC2rdmtq3JxcXnbYIQERE+fmUk0MXLlBu7oNDFHWgVFLTptS6NbVrRy1bkhnOEMkdglDG1Go6e5ZOn6bs7Lq/pzyKoyP5+FDHjuTmpuNHBhm6fp1OnaL0dLp9W8ePbG1NHh7k50ft2iERZQtBKEtFRXT8OCUnU3Ex+1cajY4Pb7q5UWAg+fvjzCLUWmUlpaZSUhJdv67Lh9X6S25nR507U1AQOTjo8rnAGCAIZebGDTp4kE6fJrXaoM9rY0Ndu1KPHmRjY9DnBSNVWkqHD9OxY1RaatDnNTMjX1/q04eaNDHo8wJXCELZKCykPXvo9Gni+BO3tKTgYOrZkywtufUAEldRQYcOUWIiVVRw60GhID8/GjCAnJy49QAGhCCUgcpK2rePDh+u+1UwumVnR4MHU6dOuMQUqtFo6ORJiovTcsSeC6WSevSgvn3xuc3kIQhN3Zkz9PffdOfOgz/q/BRgnbVuTaNHk7Mz7z5AGgoKaMsWuniRdx9EVP1l0rAhPfUUtW/PtSHQLwSh6aqspNhYOnGCdx+PZmFBQ4dSYKBUshm40Gjo+HHatYu9V1VSunSh4cNxwZepQhCaqPx8+uMPunWrWlE6y0EhLy96+mkMAZGp8nLatIkyM3n3ISJ+sTRuTC+8gBtkTRKC0BSlp9OmTZL+fM1wdqaXXsJ1erJz4wb99hsVFPDuo8YsLOiZZ8jHh3cfoGMIQpNz4ADFx/NuosYefu62sqJx46hNG94NgaGcP0+///5gIpo0j1U8ysCB1KcP7yZAlxCEJkSjoe3b6dgx3n3UlVJJzz5Lvr68+wD9S0ujjRulchlzHXTtSiNGGFN4w2MhCE2FRkNbtlBKCu8+6kehoKefJn9/3n2APqWm0ubNPO9n1YmAABozBlloGjBbzyRoNLR1q9GnIBFpNLR5M506xbsP0JtTp0whBYkoJYW2bjWF/xBAEJqIuDhKTubdhI5oNLRpE+Xk8O4D9CAnhzZtMp3wSE6muDjeTYAOIAiN37FjdOjQv380gXcZtZr+/FPHc5aBu+vX6c8/DT3kVh+EL7FDh4z4rDz8D4LQyJ0/T7Gx1SqmcdKiooJ++43u3ePdB+jIvXv06688x4fqEPMSi42l8+c5tQK6gSA0Znfv0vr1pvARW6s7d2jDBlNY4IJGQxs2UFER7z70Q62m9evp7l3efUDdIQiN1v03l5IS3n3o0/nzdOAA7yag3g4cMPE1U0kJPrQZNQSh0Tp0SCoTivVq3z66epV3E1APV6/Svn28m9C/ixernaoHo4IgNE63btHevbybMAi1mjZtMuI7r2VOpaLNm0326D1j7166eZN3E1AXCEIjpNHQtm1UVcW7D0O5cQOftY3VoUOUn8+7CUOpqqK//8YBUmOEIDRC6el04QLvJgzrwAGTvdTChBUVye4U74ULlJ7OuwmoNQShsVGp5HgPb2Ul7dnDuwmopfh4Y9oCRVfi4nAk3+ggCI1NUhLdvs27CR5SU+nGDd5NQI3duEEnT/JugofbtykpiXcTUDsIQqOiUtHBg7yb4ESjof37eTcBNbZ/v3zPlh08KKNT+CYBQWhUUlNlet/u/bfUtDQqLOTdCtRAYSGlpRGZxMC/Orh7V6arYaOFIDQeGg0dPsy7CU7uD7XSaOjoUd6tQA0cPfogAk1j4F8dHD4s0w8BxglBaDwuXcJJMkpJwUEnqauqMoUdwerpxg26dIl3E1BTCELjYTIbLdXBww/XZWWUkcG1FXiS9HQqK3vwv+W8KpLzC9bYIAiNRGWlrANAeIQNZ18kTrivsmwPjRJRRoYc7x4xTghCI5GTYyJb2NTH/eXFuXNUWsq7FXiE0lI6d45I3mvB+yoqsL+0sUAQGonMTN4dSMD95YVaTWfO8G4FHuHMmQeTReW8FnwIL1sjgSA0BhoNnT3LuwkpwQdtycKPRujsWayMjQKC0Bjk5/+7VzteV0R07hy+D1Kk0Tw4LipzD385792T0cxxY4YgNAbCfQflfMRJ+P5SUMC1FdCmoODfT2xyJnyRymHTUOOHIDQGV67w7kAahO8vly/z6wMeAT8UMbx4jQGC0Bhcu8a7A+m5fp13ByCCH4oYXrzGAEEoeWo13brFuwnpwakXCcIPRezWrQeX0YKEIQglr7AQLyQt8OFAgvBDEVOrMSle+hCEkifP3QefqKgInw+kRa2moiLeTUgSXsKShyCUPLy5aKXRUHEx7yZAoLgY97Roh5ew5JnzbgCepKSEdwf/ulNWViKd8Ynnz5OLC+8m4H/y86WzWaathUVDa2veXfyPlF7CoBWCUPIeDvKXgCn//POTdGZer1rFuwOQqImdOv343HO8u/gfKb2EQSscGpU8CazADuXmpuLKeDA2qdevH8rN5d2FJF7C8HhYEUqeSsX3+StVqkmbN5+5dWtCp05l2BQXjEGFSjV1+/ZPjh1r4+h4+t13rcy5vtHxfgnDEyEIJY/3TLWPjxzJunmTiH5MTbVUKvk2A1ATm7OyyquqiCinoOCTY8fCgoN5dsP7JQxPhEOjksc1e27cu7do//6Hf6zAZ1swBuWCQxcL9+27wXcCKj4+Sh5WhJJnYcHxyWfHx98Wnep3tLa25nus6T4bG7zFSIhKJYUNk8uqqpjf2NtlZXPi4z8fPZpXS3xfwlATEng7g8ezseH1zCnXr3+TnMwUB7RpE//aa1z6Yf3nP7h9QkLy8uizz3g3QUQ09Mcfd1Xfv/OrEyfeCgwMbNaMT0P8XsJQQzg0KnkNGvB65pDYWFX16S1KM7PoYcN49cOyteXdAQjY2fHu4IHoYcPMzaq9s6k1mqmxsRpe9/vzewlDDSEIJc/BgcvT/pGWtu/CBab4VmCgv5sbj3YE7r+dKZV4f5EWW9sHR6p5z5fxdXGZ3KULU0y4dGl9ejqXfni9hKHmEISS5+ho+OcsraycvmsX24i19YIBAwzfDOv+NXiOjrgYT1oUige/qxL4uSweNMhZdEBy2s6dfOYi8XgJQ60gCCWvYUMy+JUpKw8duiCaFBzZv39j6RyNbNSIdwcgIpkfirONzZy+fZli7p07qw4dMnQr5ubUsKGhnxRqCUEoeQqFgS8JuVJUtCIhgSl6NW78XteuhmzjCXCZjARJ6YfyfvfuvqJ+lh48eOnOHYP24eIihSUyPB6C0Bg0bWrIZ5seF3evooIprh42zEJS9yrwugIQHkNKPxRzM7OY4cOZYmll5ezduw3ah2FfvFA3CEJj0Ly5wZ4qMTf3F9FY7ac8PEZ06GCwHmrEgN8TqCmJ/VAGt207ysODKf588uTBS5cM14TEviegFYLQGLRqZZjnUWs0IbGxzDV/Fkpl1NChhmmgppycyN6edxMgYm9PTk68m6gmZvhwZtCohmjq9u1qg13aaqgXL9QHgtAYODkZ5v3l+5SUo1euMMUPunf3bNzYAM9eC23b8u4AHkFiP5r2zs7ic9snrl37ITXVEE9vqFcu1BOC0Ejo/8hkcUXF7Ph4pujSoIH46jv+pHacFh6S3o8msn9/N9HN/jPi4orKy/X+3NL7boBWCEIj4emp72dYvH//NdEO44sGDnSUzk7f91laUrt2vJuAR2jXjiwteTdRjYOV1XzR/a95xcXLDh7U+3Pr/2ULOoEgNBJt2uh1kMq5wsKYw4eZYoCb26TOnfX3pHXk4WH4GyuhpszNSXR9CneTu3QJEl3RuurQoTO3bunxWRs0oDZt9Pj4oDsIQiOhUFDHjvp7+Gk7dog33Y0ZPlxpJr3fEH9/3h3AY0nvB2SmUMQMH87czVehUk2Pi9Pjs3bsiDsIjYX03ubgUUTjE3Ul/vz5TZmZTPFFX99+rVvr6RnrrmFDHBeVunbtJDhLpVfLlmN9fZnixoyMndX3qdAlvb1gQecQhMajSRPSQzKp1OqQ2FimaGNhsXzIEJ0/Vx0Jr3QPCsKnbKlTKCgo6N8/8p7B/dCqoUNtRVsDhsbGVlXfYkU3WremJk10/7CgHwhCo9Kjh84f8rPjx0/l5THFD3v2bC2dScEPk8/CggIDubYCNRMY+O9utJL54NKiYcNpPXsyxfQbN75IStL9k+nhpQr6gyA0Kh4euh3nWFhaOm/vXqbo7uAQ0auXDp+lXoTricBA7HFqHGxsqn1kkcyicGbv3i1Fh23nxsffKinR5dO4uEjwiiF4DAShUVEoqH9/HT5e5N69N0VvASuGDGkgnSvghctB6cQzPFGvXhJcFNpYWCwZNIgpFpSWLti3T5dP07+/dP6ToSYQhMbGy4vc3XXySBk3bnx2/DhTDG7RYryfn04eX8d69JDOHujwZHZ20jw8+HLHjr1btmSKnx47Jj5BUEfu7uTlpZuHAkNBEBobhYKGDdPJI4Xu2FGpUgkrD64yl+CHWTs76t2bdxNQS717S/Czi0KhWDNihFn1X/IqtTp0xw7dPMGwYVgOGh0EoRFq0aL+t2ptycrakZPDFF8LCOimo+Wmjg0dKrV5JfBklpYknWuPBbo0bfqq6BW0+9y5rVlZ9X1of39q0aK+DwIGhyA0TkOHUj02i69QqT7cuZMp2ltZLR44sH5t6Ue7dnodJgB61KmTNO/7XDZ4sIOVFVMM3bGjXDRWohZsbUlq+7RAzSAIjZOtLY0aVeevjjl8OFs0XGp2nz5NJbi3kZUVjR7Nuwmoh9GjSRQ53Lna2c3s04cpni0o+PjIkbo/6KhR9fl4ChwhCI2Wjw8FBNTh6/Lv3Vty4ABTbOvkNFWSlzbQU09JcEwJ1ELDhvTUU7yb0CIsOLhDo0ZMcaG20fM1EhBAPj46aAt4QBAas5Ej6zC9YmZc3J2yMqa4atgwawlOsu7ShaR5CSvUip+fBOeNWSqVywcPZop3y8vn7tlT68dq0oRGjtRNW8ADgtCYWVjQuHFUm22Skq9d+y4lhSkObNPmGQle8O3uTiNG8G4CdGTECF3d9qNDz3p7DxWdwvw2OfmYaHvqx7G2pnHjSDS8DYwIgtDINWpEL7xANd4jIiQ2Vl19zIfSzCxm+HA9dFY/DRvSSy9huyXTYW5OL70kwaPc0cOHm1d/+ag1mpDYWE0Np+GYmdELL5DoECsYFwSh8WvblsaMqck//PXUqf0XLzLFd4KCOrq66qGterCxoQkTJHgLGtSLnR1NmCC1IXk+TZq8JRpgeyg39/e0tBp9/Zgx1Lat7tsCw0IQmgR/f3rSqq60snLm7t1M0cnGZp5OZ7bpgJUVTZiAyf2mqUkTmjBBaheRLhw4sJHoas8Pd+68V1HxhK8cPlyCmy9CHSAITUX37o+fOLM8IeHi7dtMcX7//o0ldcG3lRVNnCjBk0mgM+7uNHGipLLQ2cbmo379mOLloqKoQ4ce92XDhlH37npsCwwIQWhCevSg0aO1jnfS+qr2btLkHeG+cbw8PBnToAG9/jo1b861G9C/5s3ptdeoQYMHf5TA3hTvdu0qPkGg9bMjEZFCQaNHS3OSKtQNgtC0dOlCL7307zSy/73FaD3OEz1smIVSacjutLuf3I0b05tvkpsb727AIJo2pTffpMaNiSSxN4W5mVm06IBKtbMJD9Pa0pJeekmCd4NAfSAITY6HB02aRPe31VUo6P6Z/9OnmX81xtNzWPv2Bm/uEdq3pzffJCcn3n2AATk50ZtvkmR+CQe1bTva05Mp/nt92f20dnSkSZOw16DpQRCaIldXevtt8vSkh9eCV/97S6VypUSGIt7fYfHll2t1NySYCGtrevll6ezeFz1smJXojp1/7zjy9KS33yapXWINuoAgNFH3b/IdNerbkyfFdweH9OjhIYU7n5yc6I03qF8/ibwPAgcKBfXrR2+8IYXjAe2cnT8QXf+SfO3ad6dO0ahRtR1eAUZEUdP7RsEI3b1717NDh2vVdxx1adAg+/33G/J9SSsU1L07DRyIeRzwQGUlxcfTkSN8r525W17uuW4dM27UpUmT7DNnGkpvGgDoClaEpmzhwoXXRPtuLx08mHMKtmxJb71Fw4YhBeFfFhY0bBi99RaJto83JHsrq4UDBjDF/Bs3lixZwqUfMAysCE3W2bNnfX19y8vLhcXO7u7HJ08243Uo0sWFBg4k0SUJANVkZVF8POXnc3lytUbT45tvjuXmCouWlpanTp3ywGUyJgpBaLLGjBmzdetWprgvLq6vrS0dPkxFRQbtpnlz6tWLPD1rcjrw119//fvvvw3QFBjeqFGjxo8f/+R/p9FQVhYlJNDly/pvSsDBgXr0OFRR0XvAAOa9ccyYMZs3bzZoM2AoGGpsmnbv3i1OwfHjx/cdNIiIqHt3ysqipCQ6d06/p2QsLcnHh4KCajUsJikp6eeff9ZfU8CRm5tbjYJQoSAvL/LyoitX6PhxSk+nJw48qw+Fgtq2pcBA8vQkM7OeROPGjfvtt9+E/2TLli07duwY9tj5TWCkEIQmqKqqKjQ0lCna2NgsXbr0wR/MzMjbm7y96e5dSkujjAzKzdVlIlpYUPv25ONDnp44EQj14u5O7u40ciRlZVF6OuXkUGWlzh5coaAWLcjbm3x9yd5e+DcrV67cunXrvXv3hMXQ0NDU1FQL/EqbHAShCfr0009PnTrFFKdPn96qVSv2n9rbU48e1KMHlZbS+fN04QLl5lJ+PqnVtX5WS0tq2pRataLWrallS5LCzBowGRYW5OdHfn6kUtGlS3ThAl28SNeu1WWZaGZGLi7UogW1bk1t2jxqN4zmzZuHh4fPnz9fWMzIyPjss8/ef//9uv1HgGThHKGpKSgo8PDwuHXrlrDYvHnzzMzMBg+nOz5eVRXduEG3blFBAd25Q8XFVFpKFRX/vulYW5OlJdnakr09NWxIzs7k4kJOTrq6HTA8PHzVqlU6eSiQmmnTpkVFRenmsTQaKiyk/PwHv6h371JJCVVUUFnZg39gaUmWlmRjQ3Z2D35RGzWiJk1quM9laWmpt7f3xeo7lzk5OWVnZze+PxwOTAVWhKZm7ty5TAoS0cqVK2uagkRkbk5Nm1LTpjruDEC3FApydiZnZz09/P2zCS+//LKwWFhYOG/evHXr1unpSYELrAhNSnp6ur+/f1VVlbDYs2fPgwcPKoxnesuJEyfS09N5dwF64ePj08WoJlb369dv//79wopSqUxOTu7YsSOvlkDnEIQmZdiwYTt37hRWzMzMDh8+3LVrV14tARi15OTkoKAgdfWz5gMHDtwt2uYajBcmy5iOjRs3MilIRJMmTUIKAtRZ586d33jjDaYYHx+/adMmHu2AXmBFaCIqKir8/PzOnDkjLNrb22dlZTXF2T6AesjPz/fw8Lhz546w2LZt27S0NGuM4TYJWBGaiFWrVjEpSERz585FCgLUk4uLy6xZs5jiuXPnYmJieLQDuocVoSnIy8vz8PAoqj41rV27dmlpaVZWVry6AjAZFRUVHTt2zM7OFhbt7Oyys7PxWdMEYEVoCmbMmFEkmh0aHR2NFATQCUtLy5UrVzLF4uLi2bNnc+kHdAsrQqN34sSJrl27Mle1DRo0KC4ujldLACZpxIgRsbGxwoqZmVliYmK3bt14tQQ6gSA0bhqNpm/fvgcPHhQWzc3Nk5OT/fz8eHUFYJIyMjL8/f0rqw87DQ4OTkhIMKL7dEEMh0aN288//8ykIBG9++67SEEAnfP29n7nnXeYYmJi4i+//MKlH9AVrAiNWGlpqZeX16VLl4RFZ2fn7OzsRo0a8eoKwIQVFhZ6eHjcvHlTWHR3d8/KyqrFFEOQGKwIjdjSpUuZFCSihQsXIgUB9MTJyWnevHlM8cqVKytWrODRDugGVoTGKjc318vLq6SkRFj08fFJTU01r9lwfQCoA5VK1aVLl5MnTwqLNjY26enprVu35tQU1AtWhMZq2rRpTAoSUXR0NFIQQK+USmV0dDRTLC0tnT59Opd+oP6wIjRKCQkJffr0YX52zz777F9//cWrJQBZefbZZ8XjRvfu3duvXz8e7UC9IAiNj1qt7tatW1JSkrBoaWl5+vTpDh068OoKQFbOnTvn6+tb9nATYCIiCggIOH78uFKp5NUV1A0OjRqfr776iklBIpo2bRpSEMBg2rZtGxISwhRTUlK++eYbHu1AvWBFaGSKioo8PT2vX78uLLq6umZnZzs4OPDqCkCGiouLPT09r169Kiy6uLhkZWU5OjpyagrqAitCIzN//nwmBYlo+fLlSEEAA7Ozs1u8eDFTzM/PX7RoEZd+oM6wIjQmOTk5fn5+5eXlwmKXLl2OHTtmZobPNACGplarg4ODjx49KixaWFicOnXK09OTV1dQW3j3NCZTp05lUlChUKxZswYpCMCFmZlZTEwMM2i0srJy2rRpvFqCOsAbqNGIi4v7559/mOKECRN69+7NpR8AIKLg4OCXX36ZKf7999/bt2/n0g/UAQ6NGoeqqqqAgIC0tDRh0dbWNiMjo2XLlry6AgAiunLliqen571794RFLy+vkydPWlhY8OoKag4rQuOwdu1aJgWJaObMmUhBAO7c3d3FY2UyMzM/+eQTLv1AbWFFaAQKCgo6dOhQUFAgLLZo0SIzM9PW1pZXVwDwUGlpqY+Pz4ULF4RFR0fH7OzsJk2acGoKagorQiMwa9YsJgWJaNWqVUhBAImwsbFZvnw5U7x9+/ZHH33EpR+oFawIpS4tLS0gIKCqqkpY7NWr14EDB7ApNoCk9O/ff9++fcKKUqlMSkry9/fn1RLUBFaEUhcaGsqkoJmZ2Zo1a5CCAFITExPDDBpVqVShoaG8+oEaQhBK2vr163ft2sUUJ0+eHBgYyKUfAHiMgICASZMmMcU9e/Zs2LCBSz9QQzg0Kl3l5eV+fn45OTnCooODQ1ZWlpubG6+uAOAx8vPzPT09b9++LSy2adMmPT3d2tqaU1PwBFgRSldUVBSTgkQUGRmJFASQLBcXlzlz5jDF8+fPr169mks/UBNYEUrUlStXvLy8iouLhcX27dufPn3aysqKV1cA8ESVlZUdO3bMysoSFm1tbTMzM1u0aMGrK3gMrAglaubMmUwKEtGaNWuQggASZ2FhsWrVKqZYUlIiXimCRGBFKEWHDx/u2bMn86MZPHiw+MIZAJCmkSNHMuNGFQrF/v37MRxYghCEkqPRaIKDg48cOSIsmpubp6Sk+Pr68uoKAGolMzOzU6dOlZWVwmJgYODRo0exXYzU4OchOT/++COTgkT0/vvvIwUBjIiXl9d7773HFJOSkn7++Wcu/cBjYEUoLcXFxZ6enlevXhUWnZ2dz5w54+zszKsrAKiD27dve3h43LhxQ1h0c3PLyspycHDg1RWIYUUoLUuXLmVSkIiWLFmCFAQwOo6OjgsWLGCK169fX7FiBZd+4FGwIpSQ8+fP+/j4lJWVCYu+vr4pKSnm5ua8ugKAOlOpVIGBgampqcKilZXV6dOn27dvz6srYGBFKCHh4eFMChJRdHQ0UhDASCmVyujoaKZYXl4+Y8YMLv2AVghCqdizZ89ff/3FFMeOHTtkyBAu/QCATgwYMOD5559nihs2bMDdUNKBQ6OSoFKpunTpcvLkSWHR2to6PT29TZs2vLoCAJ24dOmSt7d3SUmJsIizHtKBFaEkfPHFF0wKElF4eDhSEMAEtGzZUrwZU1pa2ldffcWlH2BgRchfYWGhh4fHzZs3hUV3d/fMzEw7OzteXQGADpWUlHh7e1+6dElYdHZ2zs7ObtSoEa+u4D6sCPmbP38+k4JEtGzZMqQggMmwtbVdtGgRUywoKBAXwfCwIuQsIyPD39+fmcPUo0ePQ4cOYQ96AFOi0Wj69OmTkJAgLJqbmycnJ/v5+fHqCggrQu7CwsKYFFQoFDExMUhBABOjUCjWrFnDDBqtqqoSnz4EA0MQ8rRt27bY2Fim+Oqrr3bv3p1LPwCgV4GBgRMnTmSKcXFxf//9N5d+4D4cGuWmoqKiU6dOzO6ddnZ2WVlZzZo149UVAOhVXl6eh4dHUVGRsIg9t/nCipCbjz/+mElBIpo1axZSEMCEubq6Tp8+nSnm5OSsW7eOSz9AWBHykp+f7+HhcefOHWGxTZs26enp1tbWvLoCAAMoLy/38/PLyckRFu3t7bOzs93c3Hh1JWdYEfIxe/ZsJgWJKCoqCikIYPKsrKyWLVvGFO/evfvRRx9x6QewIuQgJSUlKChIpVIJiwMGDIiPj+fVEgAY2NChQ5lxo2ZmZkeOHAkKCuLVkmwhCDno37//vn37hBWlUnnixIlOnTrxagkADCwtLS0gIKCqqkpY7NWr14EDB3D3lIHh0Kih/f7770wKEtFbb72FFASQFV9f38mTJzPFhISEP//8k0s/coYVoUGVlpb6+PhcuHBBWHRycsrOzm7cuDGnpgCAj4KCAg8Pj1u3bgmLLVq0yMzMtLW15dWVDGFFaFArV65kUpCIIiMjkYIAMuTs7DxnzhymmJubu2rVKi79yBZWhIZz5coVT0/Pe/fuCYve3t6pqakWFha8ugIAjqqqqjp37nz69Glh0cbGJjMzs2XLlry6khusCA0nIiKCSUEiWr16NVIQQLbMzc2jo6OZYmlp6axZs7j0I09YERpIYmJir169mO/2U089tXXrVl4tAYBEPPXUU8y4UYVCsW/fvj59+vBqSVYQhIagVquDg4OPHj0qLFpaWp48edLT05NXVwAgETk5OX5+fuXl5cJily5djh07xuxWAfqAb7EhfP/990wKEtEHH3yAFAQAImrfvv2UKVOY4okTJ3744Qcu/cgNVoR6d/fuXU9Pz2vXrgmLLi4u2dnZDRs25NUVAEjK3bt3PTw8rl+/Liy6urpmZ2c7ODjw6komsCLUu8WLFzMpeL+IFASAh+zt7RcsWMAU8/Lyli5dyqUfWcGKUL/OnTvn4+PDHPoPCAg4fvy4Uqnk1RUASJBare7evfvx48eFRUtLy9OnT3fo0IFXV3KAFaF+hYWFMSlIRDExMUhBAGCYmZnFxMQwg0YrKioiIiJ4tSQTCEI9io+P37x5M1McN25cv379uPQDABLXq1evF154gSlu2rRp586dXPqRCRwa1ReVStW5c+dTp04JizY2Nunp6a1bt+bUFABIXW5urpeXV0lJibDo4+OTmppqbm7OqyvThhWhvvz3v/9lUpCIIiIikIIA8BgtWrQIDw9niunp6Z9//jmXfuQAK0K9KCws9PDwuHnzprDo7u6elZXVoEEDXl0BgFEoLS318vK6dOmSsOjk5HTmzJlGjRrx6sqEYUWoFx999BGTgkS0YsUKpCAAPJGNjc2SJUuYYmFh4fz587n0Y/KwItS9jIwMf3//yspKYTE4ODghIQEbTwNATWg0mn79+h04cEBYVCqVycnJHTt25NWVqcKKUPdCQ0OZFNR6VTQAwKMoFIqYmBhm0KhKpQoNDeXVkglDEOrY5s2bd+zYwRRff/31bt26cekHAIxUly5dXnvtNaa4e/fuLVu2cOnHhOHQqC5VVFR07NgxOztbWLS3t8/KymratCmvrgDASOXl5Xl4eBQVFQmL7dq1S0tLs7Ky4tWV6cGKUJeio6OZFCSi2bNnIwUBoA5cXV1nzpzJFM+ePbtmzRou/ZgqrAh1Jj8/38PD486dO8Ji27Zt09PT8dkNAOqmoqLCz8/vzJkzwiKOM+kWVoQ6M3PmTCYFiWj16tVIQQCoM0tLyxUrVjDFu3fvzp07l0s/JgkrQt1ITk4OCgpSq9XC4sCBA3fv3s2rJQAwGcOHD2euwjMzMzt8+HDXrl15tWRKEIQ6gDt+AECv0tPT/f39q6qqhEXcnawrODSqA7/++iuTgkT0n//8BykIADrh4+Pz9ttvM8XExMTffvuNSz8mBivC+iotLfX29r548aKw6OTklJ2d3bhxY15dAYCJKSws7NChw61bt4TF5s2bZ2ZmYnZjPWFFWF/Lly9nUpCIFixYgBQEAB1ycnKKjIxkipcvX165ciWXfkwJVoT1cvnyZS8vr3v37gmLPj4+KSkpFhYWvLoCAJP0qF1OMzIyWrVqxasrE4AVYb2Eh4czKUhEq1evRgoCgM4plcqYmBimWFpaOmPGDB7tmA6sCOvu0KFDvXv3Zr6BTz/99KZNmzh1BACm7+mnnxaPG923b1/fvn259GMCEIR1pFaru3fvfvz4cWHR0tLy1KlTHh4evLoCAJN39uxZX1/f8vJyYbFz587Hjx9ndquAGsJ3rY6++eYbJgWJKDQ0FCkIAHrVrl27qVOnMsXk5ORvv/2WSz8mACvCurh7966np+e1a9eERVdX16ysrIYNG/LqCgBkQutbkIuLS3Z2Nt6C6gArwrpYsGAB8ytIREuXLsWvIAAYgL29/aJFi5hifn7+4sWLufRj7LAirDUcoAcA7tRqdY8ePY4dOyYs4jKFusEbd62FhIQwKahQKNasWYMUBACDMTMzW7NmDTNotKKiIjw8nFdLxgvv3bWze/fubdu2McXx48f36dOHSz8AIFvBwcEvvfQSU9y6dWtsbCyXfowXDo3WQlVVVefOnU+fPi0sYqwDAPCidbiVt7d3amoqxnrUHFaEtfDJJ58wKUhEM2bMQAoCABfNmzf/8MMPmWJGRsZ///tfLv0YKawIa6qgoMDDwwOj3wFAUrABTv1hRVhTc+bMYVKQiKKiopCCAMCRjY3NsmXLmGJhYaF4qwp4FKwIa0Tr9tA9e/Y8ePAgtocGAO769eu3f/9+YUWpVJ44caJTp068WjIiWBHWSGhoKJOCWq9dBgDgIiYmRqlUCisqlSo0NJRXP8YFQfhkf/31186dO5nipEmTgoKCuPQDAMDo3Lnz66+/zhTj4+M3btzIox0jg0OjT1BRUeHn53fmzBlh0d7ePisrq2nTpry6AgBg5Ofne3h43LlzR1hs27ZtWlqatbU1r66MgmmtCKuqqKiI8vPp2rV//6+ggEpK6vyQq1atYlKQiD766COkIABIiouLy+zZs5niuXPnxHv51kJJCRUUVHtHzc+noiKqfqrI2BnzirC4mK5coevXKT+fCgro9m0qK3vkP1YqycGBHB2pcWNydaVmzcjVlZ40FC0vL8/Dw6OoqEhYbNeuXVpampWVlU7+IwAAdKWioqJjx47Z2dnCop2dXVZWVrNmzZ7wxWo15eXR1auUl0c3b1JhId29SyrVI/+9tTU5OpKzM7m4kJsbubuTnZ0u/iM4MLYgrKigs2cpJ4fOn6fCwno9lIUFtWxJbduShwc94m6b119//fvvv2eKW7dufeqpp+r11AAA+rF169YxY8Ywxddff/2RuxXevEnZ2XTuHF26RJWV9XpuJydq04bat6d27cjSsl4PZVhGEoQqFWVl0alTlJOjlyV548bk50edOpGT08NaUlJSt27d1Gq18B8OGjQoLi5O9w0AAOjIiBEjmHGjCoXi8OHD3bp1+7dUWEgnT9Lp03Tzpu47MDendu2oUyfy9KTq17JKk+SD8M4dOnqUUlLqc56vFtq2pW7dyMNDQ9S3b9+DBw8K/9Lc3Dw5OdnPz88QnQAA1ElGRoa/v39l9eVdjx49Dh06pCCi7Gw6epTOnTNEKzY21Lkzde1Kjo6GeLq6knAQ3rhBBw5QWhpVX5MZgrPzT0VFr8yZw5SnTp1ar9POAAAGMXXq1I8//pgp/rR48QQHBxJNyNI7MzPy9aU+fahJE0M/dc1IMgiLiig+nk6eJE69lVRWeq9bd6n6VcjOzs7Z2dmNGjXi0hIAQM0VFhZ6eHjcrH7Y093BIWvKlAa8zt4pFNSpEw0cSA4OfBp4NIndPqFS0b59tG4dpabySkEiWnrgAJOCRLRo0SKkIAAYBScnp/nz5zPFK0VFyxMSuPRDRKTRUGoqrVtH+/Y97mJUHqS0IszNpc2bOSzbmS7u3PFat66k+uF1n1atUrOyzHHLBAAYCVVFRRdv75PVzwVam5tnTJnSmvsZu8aNacwYatGCcxv/I40VoVpNu3fTt99yT0EiCtuxo0R0DXF0377m335LN25waQkAoHZu3FB+8010nz5MuayqKmLXLi4dVXPzJn37Le3ezeESEG0kEIRFRfTtt3TwIMdjoQ8lXLq0IT2dKT7n7T20XTvKy6MvvqCUFB59AQDUWEoKffEF5eUNbNPmWW9v5i//TEvbd+ECj7aq02jo4EH67juqPrGEC95BmJtLX3xBly9zboOIiNQazdTYWCaNLZXKZYMHP/hDVRVt3kz//CORTzEAANWo1fTPP7R588P7raOGDrU2N2f+VUhsrEoib2L3IyA3l28XXIMwLY2+/57u3fu3wnVR+GVSUtLVq0wxvGfPDsw1MseO0a+/UkWF4ToDAHiiigr65Rc6dkxYa+vkFBoczPzDlOvXv05ONmBnIsK3+nv36Pvv6fRpft1wDMJjx2jDBvbaIX7b+xWVl8/bu5cputrZTe/dW8u/zsmh77+n0lIDNAYA8GSlpfT993T2rPhvZvXp08zeninOiY+//ZjhzPrGvNWrVPTXX3T0KKdueAVhYiL9848UTgo+NG/v3uvFxUxx+eDBDsIrRe83fP//X71K331noHk3AACPUVJC339P9w9oCd+miIjIztJy8aBBzFfcuHdv4b59huvwiTQa2r6dEhO5PDmPIDx6lET73PKVU1DwiejDSGCzZq/4+1cr3f8U8/CzTH4+/fjj47a8AADQt7Iy+vFHyst78EfmbYqIiF7z9+/m7s583dqjR7P0MWi0PnbuZA7tGobBgzAtjapPg5WCqdu3V1Q/SKsgihk+3OyJh2qvX6fffjOxrbkAwGhUVdGvv9L164//VwqFYs2IEczbWaVKNU1iaxIiou3bKS3NwM9p2CDMzaWNGyV1RJSIdp09+49o692J/v69W7Z8wlfe/w+5eJE2b5bafxQAmD6NhjZvpkuXHvzvx+rRvPmETp2Y4t/Z2dtF736caTS0caOBryM1YBAWFdHvv0ttsk6VWh26YwdTtLWwWDRw4JO/+OF68fRp4ji4CADkKSHh34sta3Cl4bLBg8WDRsN27KiU2NsyqVT0+++GvL/QUEGoVtOff1a7U0IaPj5yJC0/nynO7NOnZcOGtXug+Hg6f15nbQEAPN758xQfX6uvcHdwmN6rF1PMvHlzHb/LNR/p3j3680+D3bFtqCDcs0cid80Lab1uqkXDhmGi226eTKOhv/7CRaQAYAglJfTXX3U4I/Nhr17iQaPz9u7NE10zz9/ly7Rnj2GeyiBBmJsrzSOHc/fsEd9Js3rYMFsLi7o8XHExbdumg7YAAB5v2zaqU3RZm5uvGDKEKWq9i1oSEhIMc7JQ/0GoUtGWLf9+cpHMRSWp169/deIEU+zdsuXzotF8NXL/vysjgzIy6t0aAMCjPXyfqdPb6Qu+vv1bt2aKX2ibq8WNMC82bzbAlSX6D8KDB0l4qwq/2TEM8bQ9M4UiZvhwRd06fPhV27dj+hoA6EtFBW3f/uB/1/XtNGb4cKVZtTd/tUYTEhsrlV35hP9dt27RwYP6fkI9B2FRkTQPiv6ZlrZXNH/9/wIDA5s1q+9D371L+/fX90EAALTav5/u3q3nY/i7ub3ZuTNTPHjp0gZpHtBKSCDRTum6pecgjI8n0d5+3GndkcvBympe//66eYIjR/T9YwMAObpzh44c0ckjLRo40NHamilq3Y2Vv8pKfV81o88gvHGDTp7U4+PXVdShQxdu32aK8/r3d7Ozq9fjPjyqUFWFRSEA6N7+/f/OsarfYcwmDRrM7dePKebeuRPNadrnE5w8SfqcBqfPIDxwQDqXxjx0pahoueiIc3tn5/e6davvQwuPa6ekYFEIALp05061jcHrfb3F+926eTZuzBSXHDhwSYLvXRqNXlcXegvC27cNPy+uJmbExRWLLmb5eMQIS6VSl0+jVtPhw7p8QACQucRE3d5gbqFUrh42jCmWVFbOqeV9+gaSlqa/1YXegvDYMQlu43748uWfRUdrh7RrN6JDB90/WXIyLh8FAN2oqCA9bKU7skMH8bvfT6mpB+/PL5UUtVp/GxbqJwhVKn38zOpJo9FM3b6dOVZroVSuHTFCt0/z4H+Ul0vzFCkAGJ+TJ//9YK3TU07i42EaopDYWLX0TmxRSoqe7inUTxBmZUlw9/YfUlOPXrnCFLUeJa8X5kwhAED96fTsoJDWKySSrl79SYKf40tKKCtLHw+snyA8dUovD1sPxRUVs3bvZopar5vSpStXqKBAj48PAHJQUECiD/E6pPWa+Yhdu4rKy/X3pHWkn3jWQxBWVFBOju4ftn6WHDhwVXQXqtY7aXQsPV2/jw8AJk/PbyNa76LOKy4WX2DP39mz+rj2Qg9BePas1HZsP1dYKL45RutsBd3Tz0IeAGRE/28jWudqRR06dObWLX0/de1UVdHZszp/VD0EofSWg+E7d5aJslk8bU8vrlyR4OlSADAapaV6PS5634NJy9WLFSrVjLg4fT91rekhYvSQBBLbn3bP+fMbRQP0tM5f1wuNhi5eNMQTAYBJunjRMJNJerds+byPD1P8KyNjlx5WYPWih4jRdRAWF1NhoY4fsx5UanVIbCxT1Lojlx5J8I4cADAWBnwD0boba+iOHVWSuim8sLBuezE+hq6DUP9L+Fr5PCnpZF4eUwzv2VO8R7MeSex7AgDGxIBvIC0aNgwNDmaKafn5XyYlGayHGtH190TXQXj9uo4fsB4KS0sjRTPL3R0cpvfubdA+rl+X4MxVADACGo2B31Rn9enTsmFDpjgnPv5WSYkh23gCXX9PdB2E+fk6fsB6mLd3703RD2/Z4MF2lpYG7aOigoqKDPqMAGAaiooMPKnR1sJi0cCBTLGgtHShpHbU0XXQ6DoIJXP/eMaNG/89fpwp9mjefELHjhy6kdolyABgFHi8dUzs1Kl3y5ZM8ZOjR09LZ52j66DRdRCK9vnjJWzHjsrqU+kURGtGjFDodDpRTUlwWxMAkD4ebx0KhSJm+HCz6m+VVWp1qOjCQ250HTQ6DcKqKiore/C/uZ4V25qVFSu61+S1gIBu7u5c+iHRUBsAgCfj9NYR2KzZK/7+TDHu3Llt2dlc+nngYayUlel2bItOg1B4Qo7LwouIiCpUqvCdO5minaXl4kGDuPRDVP07AwBQQ/zeOpYPHuxgZcUUQ2JjyzkODhPGik6/MzoNwofLQa7WHD6cLTqwPqtPn2b29lz6IZLKdwYAjAy/tw5XOzvxBfZnCwrW6m1TwNrR6XdGp0FYWanLR6srBysrZpR2Wycn8c0xBiWN7wwAGBmubx1hwcFtnZyEFUdra3sDX3X/KDr9zug0CKUxfeDtoKCzU6e+6u//cBU9o3dva3Nznj1J4zsDAEaG61uHtbn5zP8tChVEoz09s95//+2gII4t/Uun3xmdBiG/84JCt8vKViQk/Hb69MPLdQ5wH3Imje8MABgZ3m8d+/43KllDtP3Mmbnx8eKbs/nQ6XdGp0HId9X1P//Ztm35wYMVgnsnfkpNPXz5MseWJPKdAQAjw/Wt4/Dlyz8LNsKtUqu/SEp6/59/OLb0L51+Z3QahNI4drxw4EBLpVJY0RBN+ecfNcc7OqTxnQEAI8PvrUOj0Uzdvp1507RQKsVb+PKh0++MToPQxkaXj1ZX7Z2d3+vWjSkmXb36k+CjjaFJ4zsDAEaG31vHD6mpR0Wzrd/v1s2zcWMu/bB0+p3RaRBaW1P1pRgv8/r3d7OzY4oRu3YVlZdz6YcaNODzvABg1Di9dRRXVMzavZspNmnQYG6/flz6YSmVVP3WgHrS9cUyDg66fMC6crCyEq/f84qLlx88yKMdItE0d8aePXv69et38+ZNw7QDAHwVFBT06tVr+/btT/h3T3rr0JMlBw5cFQ21WTRwoKNO46fuHBwkfLEMEVW/6YSj/wsMDGzWjClGHTp0hsv860d/W3Jzc1999dWBAwfu378/MjLSkE0BAC9z5sw5dOjQyJEjR48effYxW8DzeEc9V1gYnZjIFP3d3N7s3NnwzWin62+LroOwUSMdP2BdmSkUMcOHM58ZKlSqGXFxhm5FodD6bbl37968efM8PDx+/PHH+5XPP//8JMcTmQBgEGlpaV9++eX9/71t2zZvb++pU6cWad2srVEjw99BEb5zZ5lojlrM8OFKM13nRZ3pOmh0/R/m5qbjB6yH3i1bPu/jwxT/ysjY9ZjPX/rg5EQWFuLymTNnFi1aVCYYFKRSqUJCQgzXGADwEBoaWiVImsrKynXr1mVrnWdtYWHgReGe8+c3ZmQwxRd8ffu3bm3INp5A10Gj6yBs2lTHD1g/q4cNsxWFUOiOHVWGnNcgOkJ7X0BAwBtvvMEU9+zZ89dff+m/JwDgY8OGDbt27WKKb775ZtCjJrY84g1EH1RqdYhoryVrc/MVQ4YYrIca0XXQ6DoIXV21rn54adGwYZhoymhafv6XSUkGbKLFo/5m8eLFDUUnw8PDw8swpBvAFFVUVMyYMYMp2tvbL1iw4JFf8+g3EJ37PCnpZF4eU/ywV6/Wjo4G6+HJLCzI1VW3D6nrIDQzI9HWxnzN7NOnpShs5sTH3zLYoKBWrR71Ny4uLnPmzGGK58+fj46O1nNPAMBBVFRUjmir1MjISLfHHOt79BuIbhWWlkbu2cMU3R0cpvfqZZgGaqplS9L12Uo9nPxs21b3j1kPthYWiwYOZIoFpaUL9+83xNPb2ZGLy2P+/oMPPvD09GSKS5YsuXr1qj7bAgBDy8vLW758OVNs3779lClTHvdlLi4kuitaH+bt3SueI7ps8OAGUhuMpYeI0UMQenjo/jHrZ2KnTr1F69RPjh49nZ+v9+fu0OHxF31ZWlpGRUUxxeLi4lmzZumzLQAwtIiICPGloTExMVai/W+rUSioQwc9tkVERBk3bvz3+HGm2KN58wkdO+r7qWtNDxGjhyBs3JgkMoPnfxQKRczw4WbVA6lKrQ4VnRbWDeFQU2/vJ/7zp556avjw4Uzxhx9+OHLkiG77AgBekpKSfvrpJ6Y4ePDgUaNGPfmLvbz+/d/6mZkctmNHpWCjAiJSEK0ZMULBe/sLln7yRT/3hfj56eVh6yGwWbNX/P2ZYty5c9u0XrJcTw9/dWxsariKX716tUX1i4w0Gk1ISIiG46BwANARjUYzdepUdfWL1c3NzWt6NUC7dv+O1tRDMm3NyooVnbl8LSCgm7u7zp+rvvQTLvoJwk6d9PKw9bN88GAH0SGIkNjYctGtozrj51fD4ave3t7vvvsuUzx8+PDPP/+sh7YAwKB++umnhIQEpjhlyhS/Gr6tK5X6W11UqFThO3cyRTtLy8WDBunpGetFP+GinyB0cpLaJTNE5GpnN/1/uy0/dLagYO3Ro/p6yi5dav5vIyMjG4uW/DNmzCguLtZpTwBgUCUlJeKLw52dncXFx6nNm0mtrDl8OFs0eHJWnz7N7O319Ix117atnsYL6G1kjmgjJCkI79mzg2g2z4J9+67rI2xatqzV+AMnJyfxvURXrlwRX2YGAEZkyZIlly5dYoqLFy9uVKs5YW5u+rgzLf/evcUHDjDFtk5OoaLbryVBb7GityD08JDO3NGHLJXKZYMHM8W75eUfie6e0YHa/ya99dZbnUQL/6ioqPPnz+uoJwAwqEuXLolPBPr6+k6ePLnWj6WHcJq9e/cd0fiOqKFDrXW6/7tuNGqkv1sS9BaECgWJjkNKwXPe3kPbtWOKX584cVy39+25uJDo7sAnUiqVMTExTLGsrCwiIkI3XQGAYYWFhZWIbs6Ljo42r0PSeHo+/qbk2kq+du3blBSmOLBNm2drcK07B71762/+uD6niXfqRM7Oenz8uooePty8+mACtUYzdft2XV6i2b9/3X5mAwYMeO6555ji+vXr9+7dq4OuAMCADh48KB4d/Pzzzw+p2+hOhYJE26zWR0hsrKr6haxKM7No0a1ckuDsrNdrMPUZhGZmJMnrjnyaNPm/wECmeCg394+0NN08gbt7tft+aikqKspatPtlSEiIqvpdPgAgZWq1WnwHlJWV1bJly+r+oF5epKNbGn47fXr/xYtM8e3AwE66HuOpG4MG6XysmpCe95fy9q52glcyd8UtGjiwka0tUwzfubOkslIHjz58eH2W8G3atAkNDWWKqampX331Vf3aAgDD+eKLL5JEw/2nTZvWvn37uj+oQkG6WLGVVlaKd2Z1srGZP2BA/R9cN4Rh0bJlTSaT1Ieeg1ChoFGj/k1yyQwpcLaxmdu3L1O8XFQUdehQfR+6c2dq3ryejzFr1qxmor1XZs2aVVBQUM9HBgADuH379kcffcQUXV1dp0+fXt+Hbt6c6r1T/IqEhIu3bzPFef37NxYtD7h5GBZmZjRqlL6zQ/87Dru4kNSGlxMR0XvduvmJzjwvO3hQ/PtRC3Z2pIuNu+zs7JYsWcIUCwoKFi1aVP8HBwB9mz9//o0bN5jiihUrHBwcdPDoQ4bUZwz35aKilaJP/N5NmvznURsi8tWrl24vEdJK/0FIRP366Xz7qPozNzOLER1kKK2snLV7d90fdPTofych1c+rr77avXt3prh27do0XZ3IBAD9yMzM/OSTT5hiYGDgxIkTdfMENjY0enSdvzpi1657FRVMcfWwYRY1G4NlUK6u1K+fAZ7HIEGoVNJzz1XbsFcaJwsHtW37lOjGlF9PnTogOodcI1276vA2F4VCERMTw0y8raqqEp8+BABJCQsLq6x+tcH9l7OZDi/38PCgrl3r8HWJubm/nTrFFEd7eg6vz5lLHRJGg4UFPf98DadU1pNBgpCIXFxo5Mh//yiZk4Uxw4dbVb+hR0M0NTZWXduobtqUhg7VZWdEPXr0mDBhAlPctWvXP//8o9snAgBd+fvvv7dv384UJ06c2Fvn91UPHUpNm9bqK9QazdTYWOatzVKpjNL1e1fdCaNh5Ehq0sQwT2uoICSigIB/P8JIY0VIRO2cnd8Xje1Jvnbte9F9po/ToAGNG0d6mMWwbNkyO9HJgKlTp5aXl+v8uQCgniorK6dNm8YUbW1t9XJ239ycxo2jBg1q/hXfpaQcu3KFKU7t0cNDOlPAHkZD164UEGCwpzVgEBLR8OF0f6qLZFaERPRRv35NReNlZ2qbPKSduTm99BI1bKj7zojc3d3Fl5nl5OR8+umn+ng6AKiPtWvXZmVlMcVZs2a11MOYUCKihg3ppZdq+BH8bnn5nPh4pujSoMHsPn300Fld3Y+Gdu10cpdIzRk2CM3M6MUXSXRjAF/2VlYLRHfP5BUXLz148MlfbGZGY8fW/36JxwgPD2/Tpg1TnDdv3vXr1/X3pABQWzdu3Fi4cCFTbNmypX7P6zdvTmPH1uRm80X791+7e5cpLhk0qKFofAdnzZrRiy/q9fZ5McMGIRFZWtLEif9eRCqNY6STOncOEsVzdGKieHeSahQKevrpOswUrRVra+sVK1YwxaKionnz5un1eQGgVubMmXNbdPPV6tWrbfV9c56nJz399OMPs50tKFhz5AhT7Ny06esGPPz4OA+DwNWVJk4kS0sDP7/Bg5CIbGzo1VcfbFEkjWOkZgrFmhEjmFYqVKqIXbse/TVm9MwzhtmCeOzYsf1FMwa//PJL8dwKAOAiJSXl66+/Zoq9e/cWjw7Wi06d6JlnHrOKCtuxQ7wDeczw4UrDLrwe6X4QuLnRq6/q6g60WuH0XbC1pdde08f2WnXWs0WLF0V7QG/OzNyRk6PlX5ub04svGiYF71u3bh0zrl6tVk+dOlWXg8IBoK7E04CVSuW6desUBvus36kTvfii1vOF8efPbxGduXzJz69vq1YG6axmWrak114jTqNt+H0csLamV14hUfZwFDV0aAPRkjxsx45KZtp1gwb02mv6PiLK8PX1ffPNN5liQkLChg0bDNkGAIj9+eef+/btY4r/93//5+/vb9A+PD3ptdeY60hVanVIbCzzD20sLMQ7s/Lk50evvEL8zlYqOC8pNBpKSKD4+AfHiDUavgdLI/fsWSD6hV47cuSUh7dYNG1K48bp6RrRxysoKOjQoQMzbrRFixaZmZl6PwkBAI9QVlbm7e194cIFYdHR0TE7O7uJoW6Dq+bOHfr9d7p27f6f1h458oHovsbI/v3n6XRHp1p7+FavUNDAgdSrF993ft4HiO/v3/vKKw9G5wm/FzwSekbv3q0cHZniR3v23Ly/tWbXrjRpEpcUJCJnZ+c5c+Ywxdzc3NWrV3PpBwCIaOXKlUwKElFkZCSfFCSihg1p0qT7N20XlpbOF32yb+7g8GHPnhwaE76l33+rt7OjV17R6467NcR7RfhQSQlt20YZGbz7oF9OnZogOt44pVevtd98o8MJanVTVVUVEBDAjBu1tbXNyMjQ141KAPBoV65c8fT0vHfvnrDo5eV18uRJC+FQSS6ys6dMmvRJQgJT/uX558d37Milo2q8vempp3idFGTwXhE+ZGtLL75IL75IonvbiQy6Ohzv59dHdA75v4cPn5LAMBdzc/Po6GimWFJSMnv2bC79AMjc9OnTmRQkotWrV/NPQaL0qqovjh5lisEtWrxkyCsztL5129s/eLeXRgqShFaED1VU0P79dOQIiS721aPq5yaTr10L+uILZtzowIEDd9dnYwrdGTVqFDNuVKFQ7N+/X/fDDAHg0Q4fPtyzZ0/mLXTUqFHbtm3j1ZLQ8OHDd+zYIayYKRSHJ0/uKtzj3sCXZZibU/fu1Lev4e8UfDzpBeF9d+7Q/v2UkkJqNZfnn7Rjx7eJiUxx06ZNTz/9NJd+hHJycvz8/Jhxo4GBgUePHtXleHsAeDSNRtOjR4+j1ZdcFhYWp06d8jTsJeVabdq06dlnn2WKk3r2/JrXfG0zMwoIoL59eV1j8XhSDcL77tyhxERKTibR7ll61LIlBQfnOTp6ennduXNH+Ddt27ZNT0+3srIyXDOPMG3aNPE1Mt99991rr73GpR8Aufnuu+/eeOMNpjht2rSoqCgu/QhVVFT4+fmdOXNGWLS3t8/KzGxaVESJiXTpkuG6sbSkzp0pOFiaEXiftIPwvooKOnmSUlJINDddl2xsyM+PunR5MPKGaPny5TNmzGD+1fLlyyMiIvTYRs0UFRV5enoy40ZdXV2zs7N1swU2ADxacXGxp6fn1atXhcUmTZpkZ2c7ii47N7wnv3ddv04nTtDp01Raqsc+3N0pIIA6dZLagVAxYwjChwoKKD2dsrLoyhWdXT5jZ0cdOpC3N7Vty+wAWVFR0bFjx+zsbGHR3t4+KyuraS23AdOHzz///J133mGKM2fOXLJkCZd+AORj5syZy5YtY4qff/75W2+9xaUfoby8PE9PT+ZoVrt27dLS0tijWSoVnTtHGRl05gwVF+vm6RUKcncnT0/y8SFnZ908pv4ZVRA+VFpKFy/SxYt09Spdv167A6cKBTk5UbNm1KIFtWpFLi6POVe8efPmZ555hilOmjRJPFTQ8NRqdbdu3Zhxo5aWlqdPn+7QoQOvrgBM3rlz53x9fcuqb9MWEBBw/PhxpUG2U3+8SZMmffvtt0xx8+bNY8aMeeTXaDSUn08XL1JuLl29SoWFtVtpWFqSmxs1a0atWlGrVlyGhdaTcQahkEZDRUV06xbduUN371JJCZWVUWXlg79VKMjammxsyM6OHBzIyYkaNaLaXNms5corM7PExMRuou18DS8hIaFPnz7MT/C5557D3DUA/Xnuuec2btzIFPfu3duvXz8u/QidOHGia9eu6urXGNb6ivfKSrp1iwoLqaiI7t178Kb68H3GwoKsrcnWluztqWFDatSIHBy43xFfT8YfhHqWnp4eEBBQ+TBZiYgoODg4ISHBcON0H+3FF1/8888/meKOHTuG8ro2DMCkxcfHDxo0iCm++OKLv//+O5d+hDQaTb9+/Q4cOCAsKpXK5OTkjlK4g17CcLX9E/j4+Lz99ttMMTEx8bfffuPSD2PVqlXiQaOhoaFVhrwLE0AeVCqVeJdda2vr5cuXc+mH8euvvzIpSETvvvsuUvCJEIRPtmDBgsaNGzPFiIgI8UQJw2vRokVYWBhTTE9P/+KLL7j0A2DCPvvss5MnTzLFDz/8sHXr1jzaqaa0tHTWrFlM0cnJKTIykks/xgVB+GRaf5kuX768cuVKLv0wZs6cKR40Onfu3Fu3bnHpB8AkFRYWzps3jym6u7tPnz6dRzusZcuWXbx4kSkuXLiwUaNGXPoxLgjCGvnPf/4jPrywYsUK8dR5w7O1tV28eDFTLCgoWLBgAZd+AExSZGTkzZs3meLy5csbVN//j4vc3FzxjfxaT+uAVrhYpqa0niQfN26cFE4WajSavn37Hjx4UFg0NzdPTk72k9LWxwBGKiMjw9/fX7IXzY0bN+6PP/5girGxscOGDePSj9HBirCmBg4cKB40+vvvv+/fv59LP0IKhWLNmjXMoNGqqqqQkBBOHQGYlLCwMCYFzczMYmJipJCChw4dEl86/swzzyAFaw5BWAurV68WDxoNCQlRqVRc+hHq0qXLK6+8whR37969detWLv0AmIwtW7bExsYyxVdffVUKNxOr1eqpU6cyB/YsLS1XrFjBqyVjhCCshbZt24rXWMnJyeI5DlwsX75cPGg0NDS0XAI7KQIYqYqKig8//JAp2tnZiU/Mc/H1118fP36cKYaFhWG8VK0gCGtn9uzZ4kGjs2fPZib7ceHq6iqetHv27Nm1a9dy6QfABKxZs4YZOExEs2fPbtasGZd+hO7evfvRRx8xRVdX15kzZ3Lpx4hpoJa0DhoNDw/n3ZdGo9GUl5eLPwna29tfvXqVd2sAxicvL6+haPOgtm3blpaW8m5No9Fopk2bJn4v+uabb3j3ZXywIqy1119/XXxu4OOPPxZ/bDQ8S0tL8ZALrR8bAeCJZs2aJT7Ys2rVKmtray79CJ09e3bdunVMsUuXLtiUtA5w+0RdJCYm9urVi/nWjR49esuWLbxaEho2bNjOnTuFFTMzs8OHD3ft2pVXSwBGJzk5OSgoqL4DrPVm9OjR27ZtE1YUCsW+ffv69OnDqyXjhRVhXQQHB48fP54pbt26VXxpGRfR0dHm5ubCilqtDgkJwYcegJoLCQlhUlCpVEZHR/PqRyguLo5JQSJ6+eWXkYJ1gyCsoxUrVognSohvNuLCx8dHvEHooUOHxLfcAoBWv/32m/gW4XfeeadTp05c+hGqqqoSz/62sbHBptx1hiCsI3d3d/FF1RkZGZ9++imXfhhaZwyGh4dLYVA4gMSVlpaKL8B2cnISzxrlYt26dadPn2aKM2bMEM8chhpCENZdRESEeOr8/PnzxQMJDc/Z2Vl8gczly5fFAwkBgLF8+XLxAOv58+eLd6ExvIKCgkWLFjHFFi1ahIeHc+nHNOBimXr5/fffX3rpJab47rvvfvLJJ1z6EaqqqurcuTPzydHGxiYjI6NVq1a8ugKQuMuXL3t5eTHHTry9vVNTUy0sLHh19dC777773//+lyn+/vvvL774Ipd+TAOCsL769++/b98+YUWpVJ44cUIK5xJ27949ePBgpjh+/PhffvmFSz8A0jd+/HjxJP3t27cPHz6cSz9CaWlpAQEBzLbbvXr1OnDggBSmnhovBGF9paSkBAUFMeNGBwwYEB8fz6sloTFjxojHje7fvx9XlwGIab0zasyYMZs3b+bVktDQoUN37dolrJiZmR05ciQoKIhXS6YB5wjrKyAg4I033mCKe/bs+euvv7j0w4iOjhYPCp86dSpzXTgAPGqAtUS24N6wYQOTgkT05ptvIgXrD0GoA4sXLxbPYQoPDy8rK+PSj1C7du0++OADppicnPzdd9/xaAdAur799ttjx44xxZCQEA8PDy79CFVUVIgvZLW3t8f+2zqBINQBFxeXOXPmMMXz589L5N7buXPnigeFz5w5UwqDwgEk4u7du3PnzmWKLi4us2bN4tIPIyoqKicnhylGRka6ublx6cfEIAh144MPPvD09GSKS5YsuXr1Kpd+hLR+bMzPz8fttwAPLVy48Nq1a0xxyZIl4oM9hpeXlyeeIdy+ffspU6Zw6cf04GIZndm2bdvo0aOZ4muvvSaFg5BqtbpHjx7MYR9LS8tTp05J4bAPAF9nz5719fVldu7s3Lnz8ePHzcz4rxZee+21H374gSlu27Zt1KhRXPoxPfx/xibjqaeeEl9g/cMPPxw5coRLP0JmZmYxMTHMBdYVFRURERG8WgKQjrCwMPH+1TExMVJIwaSkpJ9++okpDh48GCmoQ/x/zKZk9erVzC23Go1GItOue/bsOW7cOKa4efPmHTt2cOkHQCJ2794t3jdm/Pjxffv25dKPkEajEV/jbW5uLpHrD0wGglCXvL293333XaZ4+PDhn3/+mUs/jJUrV4oHhYeGhkphUDgAF48aYL106VIu/TB++umnhIQEpjhlyhQ/Pz8u/ZgqBKGORUZGigcSzpgxo7i4mEs/Qs2bNxcPJMzIyPjss8+49APA3aeffnrq1CmmGBERIYUxhCUlJeLL0Z2dncVFqCcEoY45OTmJL9G8cuWK+KIvLqZPny5+hUdGRkphUDiAgRUWFopfrc2bNxdvLMPFkiVLLl26xBQXL14s3lgG6glBqHtvvfWWeNBoVFTU+fPnufQjpPWYT2Fh4fz587n0A8DR3Llzb926xRS1nkEwvEuXLolPBPr6+k6ePJlLP6YNt0/oxZ49ewYOHMgUx44d++eff3Lph9GvXz9m01GlUpmcnNyxY0deLQEYWHp6ur+/PzPAumfPngcPHpTCAOuxY8du2LCBKe7cuXPIkCFc+jFtWBHqxYABA5577jmmuH79+r179/JohyW+LlylUoWEhHBqB4CD0NBQJgW13mXExcGDB8XDip9//nmkoJ4gCPUlKirK2tqaKU6ZMoV57XHRuXPn119/nSnGx8dv2rSJQzcABrdx48adO3cyxTfeeKNr165c+hFSqVRTpkxhjtVZWVktW7aMV0smD0GoL23atBFflp2Wlvb1119z6YexdOlS8eyoadOmiW8rBjAxFRUV06dPZ4r29vYLFy7k0g/jyy+/TE1NZYrTpk1r3749l37kAEGoR7NmzWrWrJm4WFBQwKUfIa3ThM+dOxcTE8OjHQDDWb169ZkzZ5ii1tn0hnf79u2PPvqIKbq6uoqTG3QIQahHdnZ24sHWBQUFixYt4tIPQ+v+MosXLxaPHgYwGXl5eeILp7XuVsbF/Pnzb9y4wRRXrFjh4ODApR+ZQBDq16uvvtq9e3emuHbt2rS0NC79CGndcfTu3buzZ8/m0g+AAcyYMaOoqIgpat2/2vAyMzM/+eQTphgYGDhx4kQu/cgHbp/Qu8OHD/fs2ZP5Pg8ZMkR8rp6L4cOHM+NGzczMEhMTu3XrxqslAD05ceJE165dmdGdgwYNiouL49WS0MiRI7dv3y6sKBSK/fv39+7dm1dLMoEVod716NFjwoQJTHHXrl3//PMPl34Y0dHRzKBwtVotkUHhADok8QHWf//9N5OCRDRx4kSkoAEgCA1h2bJldnZ2THHq1KlSuETT29v7nXfeYYqJiYm//vorl34A9OSXX345ePAgU3z33XelMEeisrJy2rRpTNHW1lYi1xOYPAShIbi7u4sv+srJyfn000+59MOYP3++eFB4RETEvXv3uPQDoHOlpaXiy6SdnZ3Fl2hysXbt2qysLKY4a9asli1bculHbhCEBhIeHt6mTRumOG/evOvXr3PpR8jJyWnevHlM8cqVKytWrODRDoDuLV26VDzAeuHChVIYYH3jxg3xLYwtW7YU34gMeoKLZQxn/fr1L7zwAlN8++23pbALkkql6ty5M7MfjY2NTXp6euvWrTk1BaAbubm5Xl5eJSUlwqKPj09qaqq5uTmvrh56++23v/jiC6a4fv36559/nks/MoQVoeGMHTt28ODBTPHLL79MSkri0o+QUqkU30pfWlqK23jBBEybNo1JQSKKjo6WQgqmpKSIp031798fKWhIWBEaVFpaWkBAADNutFevXgcOHJDCqN9nn31WPG507969/fr149EOgA4kJCT06dOHeaN79tlnxVOtuejfv/++ffuEFaVSmZSU5O/vz6slGcKK0KB8fX3ffPNNppiQkCDeb4WLVatWiQeFh4SEqFQqLv0A1JPWe4EsLS0lslH2n3/+yaQgEf3f//0fUtDAEISGtmTJEmdnZ6YYFhYmPnRjeG3btp06dSpTTElJ+eabb7j0A1BPX3311fHjx5liWFhYhw4duPQjVFZWFhERwRQdHR0XLFjApR85QxAamrOz85w5c5hibm7u6tWrufTDmD17tnj08Jw5c27fvs2jHYC6KyoqioyMZIqurq4zZ87k0g9j5cqVFy5cYIqRkZFNmjTh0Y6sIQg5eP/99319fZmi1su7Dc/e3n7x4sVMMT8/Hzf2gtGZP3+++PakZcuWSWGA9ZUrV8SHZ728vN577z0u/cgcLpbhY9euXUOHDmWKEydO/PHHH7n0I6RWq4ODg48ePSosWlpanjx50tPTk1dXALWSk5Pj5+fHDG/q0qXLsWPHzMz4LwAmTpz4888/M8V//vlnxIgRXPqROf6/EPI0ZMiQkSNHMsWff/5ZPALK8MzMzGJiYpirWCsqKsLDw3m1BFBbISEhTAoqFIo1a9ZIIQUTExN/+eUXpjhq1CikIC9YEXIj8U+sEyZMEL9W8YkVjEJcXNyQIUOY4oQJE3766Scu/QhpNJoePXowR1wsLCxOnTqFIy688H/Dla327duLzwecOHFCCkdHiWjFihUNGjRgimFhYZWVlVz6AaihqqqqkJAQpmhjYyPeJZuL77//nklBIvrggw+QghwhCHmKjIx0c3NjitOnTxdvHGp47u7u4mu7MzMzJTIoHOBR1q1bJ974eubMmVIYYF1cXCze+NrFxUV8JTkYEoKQJwcHh/nz5zPFvLy8ZcuWcemH8eGHH4oHjc6fP//mzZs82gF4soKCAvEVzi1atBBvcsTF4sWLr169yhQXLVrk6OjIox14AEHI2eTJkwMDA5niqlWrzpw5w6UfIRsbG/EV3oWFhRLZuQZAbPbs2bdu3WKKq1atsrW15dKP0Llz58QTfQMCAiZNmsSjHfgXLpbhD7MQAXQCs3yhbrAi5K9Xr15jx45lihs3bty5cyeXfhgxMTFKpVJYUalU2CkNJCg0NJRJQa33AnERHx8vTsEXX3wRKSgFCEJJ0HroRvyq5kLroZs9e/ZIZFA4wH3r16/ftWsXU5w8eXJQUBCXfoS0fnbUeuoBuEAQSkKLFi3CwsKYYnp6uni7Ti60nsz/8MMPy8rKeLQDwCovLxdPENV6MRoXn3322cmTJ5lieHg4dr2WCAShVGi9vHvu3LniM/+Gp/Xy7vPnz0tkUDhAVFRUTk4OU9R6e5LhFRYWzps3jym6u7tj12vpQBBKha2trXjadUFBgUT2ZNF6w+/SpUvF14IDGNj169dXrFjBFLUOrOAiMjJSfMfR8uXLxQMrgBcEoYRMmDChd+/eTPHTTz89ffo0l36ELCwsVq1axRSLi4slsqMNyFlERIR4BsWaNWusrKy49COUkZHx2WefMcXg4OCXX36ZSz+gFW6fkJYTJ0507dpVrVYLi4MGDYqLi+PVktDIkSO3b98urCgUisTExO7du/NqCWQuKSmpW7duzEtm8ODB4gtnuBgxYkRsbKywYmZmlpiY2K1bN14tgRhWhNLSpUuXV155hSnu3r1769atXPphrF692sLCQljRaDTvvfce8zYEYBhaf/3Mzc3F961zsWXLFiYFiejVV19FCkoNglByli9fLt44NDQ0lNmnggutG4cmJSWJd1YDMIAff/zxyJEjTFHrxteGV1FR8eGHHzJFOzs78aUAwB2CUHJcXV1nzJjBFM+ePbt27Vou/TAiIyObNGnCFLWepAHQK62nqJ2dnSUywHrNmjXZ2dlMcfbs2c2aNePSDzwGglCKpk2b1qFDB6a4YMGCa9eucelHyNHRUXwhq9bL9gD0SutFy0uWLHF2dubSj1B+fr545de2bVvx/lAgBbhYRqI2btz43HPPMcXJkyd/+eWXXPoRUqlUgYGBqampwqKVldXp06fbt2/PqyuQlfPnz/v4+DAjHXx9fVNSUszNzXl19dDkyZO//vprprhx48ZnnnmGRzvwBFgRStSzzz47dOhQpvjNN98cO3aMSz9CSqUyOjqaKZaXl4uP6ALoSXh4uHiwUXR0tBRSMDk5+dtvv2WKAwcORApKFlaE0pWenu7v78+MG+3Zs+fBgwelMER47Nix4nGjO3fuHDJkCJd+QD727NkzcOBApjh27Ng///yTSz+Mfv367d+/X1hRKpUnTpzo1KkTr5bg8bAilC4fH5+33nqLKR46dOiPP/7g0g9j5cqV1tbWTFEig8LBhGkdYG1lZbV06VIu/TB+++03JgWJ6J133kEKShmCUNIWLlzYqFEjphgeHn7v3j0u/Qi1adNGPCg8LS3tq6++4tIPyMQXX3zBnJ8movDwcCmcny4tLRWfIHBychLPGgVJQRBKmrOzs3g7+MuXL0dFRXHphzFz5kzxteBatwgH0InCwkLxK8LNzS0iIoJLP4zly5dfvHiRKc6fP79x48Zc+oEaQhBK3bvvvuvn58cUtb7eDM/Ozk58PKqgoGDRokVc+gGTN3/+fPEA6xUrVohnUBie1k+o3t7e77zzDpd+oOZwsYwR2L179+DBg5ni+PHjf/nlFy79CGk0mj59+iQkJAiL5ubmKSkpUpjuAaYkMzOzU6dOlZWVwmKPHj0OHTokhcvHxo8f/9tvvzHF7du3Dx8+nEs/UHNYERqBQYMGjR49min++uuvBw4c4NKPkEKhWLNmjZlZtV+kqqoq3DgMOhcWFsakoEKhiImJkUIKJiYm/v7770xxzJgxSEGjgCA0DtHR0eI9ZaZOnSqFadeBgYETJkxginFxcX///TeXfsAkbdu2jdn5hIheeeUVKex8olarp06dyhxds7S0XLlyJa+WoFYQhMahXbt2H3zwAVNMTk7+7rvveLTD0nqSJiQkRAqDwsEEVFRUhIeHM0Wtp6i5+Pbbb8WTLkJCQjw8PLj0A7WFIDQac+fObdq0KVOcOXPmnTt3uPQjpPWyvZycnE8++YRLP2Bi1q5dm5WVxRS1XrRseHfv3p07dy5TdHFxmTVrFpd+oC40YDy0DhqNiIjg3ZdGo9GUlZWJb+RycHC4du0a79bAuOXl5Tk6OjK/Wm3atCktLeXdmkaj0Yj3WiKir776indfUAtYERqTSZMmde3alSnGxMSId3sxPCsrq2XLljHFoqKiyMhILv2AyZgzZ87t27eZYlRUlHiwkeGdPXv2448/ZoqdO3d+4403uPQDdYPbJ4zMoUOHevfuzfzUnn766U2bNnHqqJqhQ4fu2rVLWDEzMzty5EhQUBCvlsCopaSkBAUFqVQqYXHAgAHx8fG8WhJ6+umnt2zZwhT37dvXt29fLv1A3WBFaGR69uw5btw4prh58+YdO3Zw6YchHv+vVqtDQkLweQvqJiQkhElBrZufcLF7925xCo4fPx4paHQQhMZn5cqVDRo0YIqhoaHMLVZc+Pr6Tp48mSkmJCSsX7+eSz9g1P744499+/Yxxbfeesvf359LP0JVVVXi2d82NjYSuZAVagVBaHyaN28uvpQ8IyPjs88+49IPY/HixeJB4dOmTSspKeHSDxip0tLS6dOnM0UnJ6cFCxZw6Yfx6aefnjp1iilGRES0atWKSz9QHwhCozR9+nTx6y0yMlI8htHwnJ2d58yZwxRzc3NXrVrFpR8wUitXrrxw4QJTjIyMlMIA68LCQnEeN2/eXOsVpCB9uFjGWP36668vv/wyU5wyZcratWu59CNUVVUVEBCQlpYmLNrY2GRmZrZs2ZJXV2BErly54unpyWw35uXldfLkSQsLC15dPTRlyhTxPbK//vrrSy+9xKUfqCcEoRHTuhF2cnJyx44debX0UFxcnHir+gkTJvz0009c+gHjMmHCBPFM+X/++WfEiBFc+hFKT0/39/dnNqDu2bPnwYMHpTD1FOoAQWjEkpOTg4KCmHGjAwcO3L17N6+WhJ566ilm3KhCodi/f3/v3r15tQRGITExsVevXsxb01NPPbV161ZeLQkNGzZs586dwoqZmdnhw4fF9/iCscA5QiPWuXPn119/nSnGx8dL5J7CmJgYZlC4RqORyKBwkCyt99tYWlpKZDPqjRs3MilIRG+88QZS0KghCI3b0qVLGzZsyBSnTZsmhWnX7du3nzJlClM8ceLEDz/8wKUfMArff//90aNHmeIHH3zg6enJpR+hiooK8YWs9vb2Cxcu5NIP6AqC0Lhpne177ty5mJgYHu2wIiMj3dzcmOKMGTOKioq49AMSd/fu3dmzZzNFFxcX8XXIXKxevfrMmTNMUes0fDAuCEKjp3W3l8WLF1+7do1LP0L29vbz589ninl5ebjpGLTS+nu7aNEi8WEPw9P6e6t1fzQwOrhYxhRs2bLl6aefZopvvPHGN998w6UfIZVK1bVr1+TkZGHRysrq4sWLrq6uvLoCCcrLy2vVqhVzVL9z587Hjh1TKpW8unrojTfeEG//uWXLltGjR/NoB3QJK0JTMGbMmGHDhjFFredaDE+pVK5du1Z4Wbm3t/fmzZuRgsBwdXWNjY1lxqdFR0dLIQW1ntseNGgQUtBE8Nr/CXQrPT1dfKNxcHCwWq3m3ZpGo9G8+OKLROTs7BwTE1NVVcW7HZAulUr1/fffu7i4ENG4ceN4t6PRaDRqtVp8z4+5ufnJkyd5twa6gSA0He+//774g87PP//Muy+NRqO5cOFCWFhYYWEh70bAOBQWFoaFhV24cIF3IxqNRqN1CsQHH3zAuy/QGZwjNB2FhYUeHh7MuFF3d/esrCzxbhUAUBOlpaVeXl6XLl0SFp2dnbOzs8XD5cFI4Ryh6XBycpo3bx5TvHLlyooVK3i0A2AKli5dyqQgES1cuBApaEqwIjQpKpWqc+fOzO4wNjY26enprVu35tQUgLHKzc318vJidhDz8fFJTU1lNqAGo4YVoUlRKpXiW+m17usGAE+kdR/N6OhopKCJwYrQBD377LPicaN79+7t168fj3YAjFJCQkKfPn2Yd8hnn332r7/+4tUS6AmC0ASdO3fO19e3rKxMWAwICDh+/LgUbskCkD61Wt29e/fjx48Li5aWlqdPn+7QoQOvrkBPcGjUBLVt23bq1KlMMSUlRQqDZgCMwldffcWkIBGFhYUhBU0SVoSm6e7du56enszYRhcXl6ysLEdHR05NARiHoqIiT0/P69evC4uurq7Z2dkODg68ugL9wYrQNNnb2y9evJgp5ufnL1q0iEs/AEZk/vz5TAoS0bJly5CCpgorQpOlVquDg4OZcaOWlpYnT56UwtZuANKUk5Pj5+fHzP7u0qXLsWPHzMywcjBN+LmaLDMzs5iYGOG0ayKqqKgIDw/n1RKA9IWEhDApqFAo1qxZgxQ0YfjRmrLg4ODx48czxW3btm3fvp1LPwASFxcX9/fffzPFl19+WTx0G0wJDo2auCtXrnh6et67d09Y9PLyOnnypHi3CgA5q6qqCggISEtLExZtbGwyMzNbtmzJqyswAKwITZy7u3tERARTzMzM/PTTT7n0AyBZ69atY1KQiGbOnIkUNHlYEZq+0tJSHx+fCxcuCItOTk7Z2dmNGzfm1BSAtBQUFHh4eNy6dUtYbNGiRWZmpq2tLa+uwDCwIjR9NjY2y5cvZ4qFhYUfffQRl34AJGj27NlMChLRqlWrkIJygBWhXPTv33/fvn3CilKpTEpK8vf359USgESkpaUFBARUVVUJi7169Tpw4ABz3TWYJKwI5SImJoYZNKpSqUJDQ3n1AyAdoaGhTApqvfsITBWCUC4CAgImTZrEFPfs2YNR+iBzGzZs2LVrF1OcPHlyUFAQl37A8HBoVEby8/M9PT1v374tLLZp0yY9Pd3a2ppTUwA8lZeX+/n55eTkCIsODg5ZWVlubm68ugIDw4pQRlxcXObMmcMUz58/v3r1ai79AHAXFRXFpCARRUZGIgVlBStCeamsrOzYsWNWVpawaGdnl5WV1axZM15dAXBx/fp1T0/PoqIiYbF9+/anT5+2srLi1RUYHlaE8mJhYREVFcUUi4uLZ86cyaUfAI4iIiKYFCSimJgYpKDcYEUoRyNHjmTGjSoUisTExO7du/NqCcDAkpKSunXrplarhcXBgweLL5wBk4cglKPMzMxOnTpVVlYKi4GBgUePHsWIfZADjUYTHBx85MgRYdHc3DwlJcXX15dXV8AL3vXkyMvL67333mOKSUlJP//8M5d+AAzsxx9/ZFKQiN5//32koDxhRShTt2/f7tChw82bN4VFNze3rKwsbMMNpq24uNjT0/Pq1avCorOz85kzZ5ydnXl1BRxhRShTjo6OCxYsYIrXr19fsWIFl34ADGbp0qVMChLR4sWLkYKyhRWhfKlUqsDAwNTUVGHRysrq9OnT7du359UVgF6dP3/ex8enrKxMWPT19U1JSTE3N+fVFfCFFaF8KZXK6OhoplheXo5bKcCEffjhh0wKElF0dDRSUM4QhLI2YMCA559/nimuX78eV5CDSdqzZ8+GDRuY4tixY4cMGcKlH5AIHBqVOxwpApnAuQB4FKwI5a5NmzZhYWFMMS0t7auvvuLSD4CefPHFF0wKElF4eDhSELAiBFxNDqYP9wvBY2BFCGRnZ7d06VKmWFBQsGjRIi79AOjc/PnzmRQkohUrViAFgbAihPs0Gk2fPn0SEhKERUycAtOgdaZgjx49Dh06hD3ogbAihPsUCsWaNWuYQaNVVVUhISGcOgLQmbCwMCYFFQpFTEwMUhDuQxDCA4GBgRMmTGCKcXFxf//9N5d+AHRi27ZtzF4rRPTKK69grxV4CIdG4V/YpxRMDHaihprAihD+5ebmFhERwRRzcnI++eQTLv0A1NPHH3/MpCARzZw5EykIQlgRQjXl5eV+fn45OTnCooODQ1ZWlpubG6+uAOogPz/f09Pz9u3bwmKbNm3S09Otra05NQVShBUhVGNlZbVs2TKmWFRUFBkZyaUfgDqbM2cOk4JEFBUVhRQEBlaEoMXQoUOZcaNmZmZHjhwJCgri1RJAraSkpAQFBalUKmFxwIAB8fHxvFoCyUIQghZpaWkBAQFVVVXCYq9evQ4cOIArzsEo9O/ff9++fcKKUqlMSkry9/fn1RJIFg6Ngha+vr6TJ09migkJCevXr+fSD0Ct/PHHH0wKEtFbb72FFAStsCIE7QoKCjw8PG7duiUstmjRIjMz09bWlldXAE9UWlrq4+Nz4cIFYdHJySk7O7tx48acmgJJw4oQtHN2dp4zZw5TzM3NXbVqFZd+AGpo5cqVTAoSUWRkJFIQHgUrQnikqqqqgICAtLQ0YdHCwmL69OmOjo6cmgJ4nDt37ixfvryiokJY9PLyOnnypIWFBa+uQOIQhPA4cXFx2LwbjN0///wzYsQI3l2AdOHQKDzO4MGDR40axbsLgLobNWoUUhAeD0EITxATE3N/0GiDBg06derEux2AJwsKCrq/0aClpSXOasMTIQjhCdq3bz9lypQXXnghPT0dQQhGwcvLKzMz86233goJCfH09OTdDkidOe8GwAisXLkS99GDcWnatOnnn3+OayCgJhCE8GQPU3DkyJEuLi58mwF4ooezAPEBDmoCV40CAICs4RwhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZQxACAICsIQgBAEDWEIQAACBrCEIAAJA1BCEAAMgaghAAAGQNQQgAALKGIAQAAFlDEAIAgKwhCAEAQNYQhAAAIGsIQgAAkDUEIQAAyBqCEAAAZA1BCAAAsoYgBAAAWUMQAgCArCEIAQBA1hCEAAAgawhCAACQNQQhAADIGoIQAABkDUEIAACyhiAEAABZ+3+8Lux/614AmwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAAB3XklEQVR4nO3dd1wU1/o/8GfpUlVEKYoFu8ZGsYCKvRewV1QUzTXFNEUxub+bxJrERKNJIDYssYtdY6eqFLH3CgoqRemd/f2x+W7GWQssu3Nmdj/v1/3DedCZJ3uVD2fOzDkyuVxOAAAA+sqAdQMAAAAsIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvIQgBAECvGbFuAEBrSkooPZ0yM+nVK8rNpYICKi6moqJ/vmpmRiYmVK0aWVtT9epUsybZ2pKhIdOOAYABBCHolsxMeviQkpLo6VPKzCS5vBJ/1sCAatcmJydydqZGjcjSUmtdAoCIyOSV+k4BIE5PntCNG3T7NmVmauyc9vbUvDm1akW1amnsnAAgPghCkLK8PEpMpMRETeafKkdH6tCBPviATEy0eBUAYARBCNKUkUHR0XTlCpWVCXRFMzNydaVOnXDLFEDHIAhBal6+pDNn6Nq1ys3/aYqREbm7k5cXmZszuDoAaAGCEKSjqIjCwyk2VrhR4NuYmVG3btSxIxngBSQAyUMQgkTcukVHjlBODus+OGrXpmHDyNGRdR8AUCUIQhC9oiI6fJiuXmXdx5sYGJCnJ3l7Y2gIIF0IQhC3lBTatYtevWLdxzs5OdGoUWRjw7oPAFAHghBELDGRDh9mPyNYEebmNGoUNWjAug8AqDQEIYiSXE6nTlF0NOs+KsPAgAYPpvbtWfcBAJWDIATxKS+n/fvpyhXWfailZ0/q2pV1EwBQCQhCEJnyctq9m27eZN1HFXTrRj16sG4CACoKj7qBmMjltHevtFOQiCIiKDycdRMAUFEIQhCTI0fo+nXWTWjC2bMUF8e6CQCoEAQhiEZMDMXHs25Cc44epbt3WTcBAO+HIARxuHePTp5k3YRGyeW0Zw9lZLDuAwDeA0EIIpCVRXv3sllEW0sU/y1FRbRjB5WUsO4GAN4FQQisKR6QKSj459e6QSb75xdpafT330xbAYD3QBACazExlJT0z6+V+aFLEhIwWQggZghCYCojg86eZd2E9h06RMXFrJsAgDdDEAJTR45QaSnrJrQvO1sv8h5AmhCEwM7t2/TgAesmhHLhAp4gBRAnBCEwIpfr2vsS71ZeTqdOsW4CAN4AQQiMXLlC6emsmxDWzZuUmsq6CQDgQxACC3I5RUayboIF/fyvBhA3BCGwcOeOnk6Y3bpFmZmsmwCA1yAIgYULF1h3wIhcjsW4AcQGQQiCy8ykhw9ZN8HO5ctUVsa6CQD4F4IQBCfRrec1paCAbt9m3QQA/AtBCIK7do11B6zpxp6LALoCQQjCSkvT08dkuO7e1Yv1dAAkAkEIwrpzh3UHIlBSQo8esW4CAP6BIARh6c+aau+GzwFANBCEIKDyckpOZt2EOGBECCAaCEIQ0PPn2K79H/goAEQDQQgCwkqbSuXl9Pw56yYAgAhBCILCt36uFy9YdwAARAhCEBRenODCpwEgDghCENCrV6w7EJOXL1l3AABECEIQVHY26w7EBJ8GgDggCEEoxcV4TvI1+fmsOwAAIgQhCKewkHUHVC6XE1FWYeEP0dHRSUmFbNc5KyhgeXUA+D9GrBsAvcF6OPg0O3vkzp2fde5sZWIy98QJIjIyMGhrb+9Zr56ro6N3gwbONjaCNsT6AwEABQQhCKW8nOHFo5KSRu3c+Sw313///glt2iiKpeXlCSkpCSkpikMHKysvZ2fPevW8nJ3bOzgYyGTa7YnpBwIASjK5XM66B9APaWn0229MrhySkPDxkSPF/7cdrpmR0XtvilqamLS1t1fmYo1q1TTflkxG33yj+dMCQCVhRAhCMTQU/pqFpaWzDh0KvXSJV3zvH8wtLo5OSopOSiIiQwODZra2Xs7Ons7Org4OrWrX1kxzRvjXByAK+KcIQjExEfiCT7KzR+zYEfv0Ka9uY2bm6uh4Ky0tJSenIucpKy+/kZZ2Iy0tJCGBiJysrbvUq6f4X3t7e2O1A17wDwQA3ghBCEJR3l2Uy0nb029EkY8fj9q163luLq/u5ey8a/Roe0tLIkrJyYlOSopKSkpITY17+lR57/TdnmZn77p+fdf160RkbGjYpk4dxe3THg0b1jI3r0SL2rjdCpIgl1NRERUV/TNPbGpKxsZkbMy6Lf2FOUIQ0PLlwrwzEJKQ8NGRIyUqwRbg6vrrwIEmbxrD5RUXJz57psjFmOTkTLX6bFSjhuL2aYUet6lfn6ZMUeMqICVyOb14QamplJZGGRn08iXl5Lz5X4GREVlYUPXqVLMm2dqSgwM5OOCnJWEgCEFAf/yh7XW3C0tL/3P48IbERF7d1Mjot0GDprVvX8HzPHj5MiopSZGLN9PS1PhHYm1q6uHk5Ons7OXs3KVePXPVn/fbtCEfn8qfGKQgPZ3u3qUHDygpiYqL1T+PnR01aECNG1OjRphU1h4EIQhoxw66dUt7p3+Sne27Y0ecyqRgXWvrPWPGeDg5qXfa57m5sU+fJqSmKnJRjdfwjQwMmv7f4zbd6tdvUL06EVH37uTtrV5LIFKZmXTlCl2/TunpGj6zsTE1bUoffEBNmpABFkLRMAQhCOj0aYqM1NK5Ix4/Hv2mScGu9evvGjWqjqWlRq5SWl5++dmzqKSk6OTk8EePXuTlqXGSf15YHDzYdfBgDw8PEzw1I3VyOd2+TbGx9PCh1q9lYUEdOpC7O1lZaf1aegNBCAK6fp1279bGid8xKbh64ED1H+x8H+XjNtHJyYmpqeWV/9dkYWHRrl07Ly8vT09PT0/PmjVraqNP0Jbycrp8maKiKDNT0OsaGlKbNtS1K9WoIeh1dRSCEAT08iWtWqXZU77xTUEiMjMy+n3w4Cnt2mn2cu+QU1R04enTqKSkhJSUqKSkV2qtrdqoUSNPT09FLrZs2VKm/cdrQU1yOd28SadOCR2BXAYG1KEDeXuThQWzHnQCghCE9dNPpHL3Um3JWVm+O3bE/98aaUr1bGz2jB7tru6kYNWVlZffSk+PTk5W5OKNtDQ1TlKnTh13d3dXV1cvLy8vLy8zMzON9wlqevGCjhyhx49Z90FERKam1KMHeXgI8FaSrkIQgrB276br1zVyprOPHo3ZtUt1lq5b/fq7Ro+uLaafkVNychJSUhS5GJ+SUlT5x22MjY3btGmjGCx27969tqZWt4HKKi+niAiKjBTdUrGOjjR8ONnZse5DkhCEIKxLl2j//qqfJiQhYfbhw6Uq34y0PSlYVXJ5fmnpxdTUhAYNoq9ePXPmTLpajxc6ODgobp96eXm1b9/eAI8RCiMzk/bsIZU7EGJhZES9e2NoqAYEIQgrL49++omq8Lcut7h42v79u1SGlWZGRn8MHuwn4KSg+qys6LPPFN+tHjx4EBUVFR0dHRUVdfPmTTX+PVpZWXXs2NHT09PV1bVr167VFe9mgMbduEH791fppUBhNG9Ow4eTqSnrPqQEQQiC27hR7cmV+5mZPjt2XFV5K7+ejc3eMWPcHB2r3JwgPDxowADVclZWVlxcnCIXo6OjCyq/uo2hoWGzZs2Ug8VGjRppol29J5fTmTPae/NH82rVonHjCE8gVxiCEASXkECHDqnx547duzd+z56XKvHg3aDBjlGjRDUp+B7+/lS37rt/S2lp6eXLl6OiohISEiIiIh6r9aODg4OD4lkbT09PvLCoprIy2rePrl1j3UclmZvT+PHE7nkxaUEQguAKC+mnn6gyD4zI5fLl0dFBp0+XvWlScM2gQUYSmiSrVYtmz67sH0pJSVHcPo2Ojk5MTCyv/JMaihcWFbnYs2dPW1vbyp5BH5WW0o4ddO8e6z7UYmJCY8dSw4as+5AABCGwcOAAqSwH+ja5xcVT9+3bfeMGr25pYrJ+2LBRrVppujkt69ePOnWqyglyc3MvXbqkzMWXL1+qcRK8sPh+paW0bRs9eMC6jyowMqJx4wh3yN8HQQgsPH9Of/xRkd94LzPTZ/v2ay9e8OouNWuGjRnzQZ06WmhOm0xN6bPPNPggQ1lZ2a1btxISEhS5eEPlx4WKsLGxcXd3V+ZiNex4QETl5bR9O929y7oPdSk3OzM2pkmTqF491g2JGoIQGNm69b13nI7evTth717VScH+jRv/NWJEDSl+v/b0pN69tXf6Z8+excXFKUIxPj6+qKiosmcwMjJq27at4hnU7t27169fXxt9SsD+/aSyXJHEKLOwWjWaNo1q1WLdkHghCIGR5GRav/5tX1RMCi44dYq3eqeMaK6X16KePQ0lNCmoZGxMn34q2GpYJSUlV65cUdw+PXv2bJpaq9vo6QuL0dF08iTrJjSqRg2aMQO7G74NghDY+euvN956yikqmrp//543TQpuGD58ZMuWgjSnOcofzL28qFcvVl0oXlhU3ERV73EbS0vLtm3bKnOxhq4u93z/Pm3dWpVXXUXKxYUmTMC79m+EIAR20tLojz94S1Xdzcjw2bHjusqkYOOaNcPGjm0t3aXFLCzo449F8ppzdnZ2bGysYrAYExOTn59f2TMoXlhUvpvRSnKPLL1NTg798QdV/gORhh49qFs31k2IEYIQmPr7bzp/Xnl0+M6diXv3qu7bMKBJk62+vpKcFFQaNoxEuepNaWnp7du3FdOKUVFRD9XaUc/e3t7NzU0Riu7u7qbiyPtKk8tpyxZpPyb6bgYGNG0aXi5UhSAEpoqL6bffKCvr3ZOCi3v1MpD0LZ0GDWjyZEnclUpJSVE+gxoXF1dc+RXFuOuDe3t720loGej4eDp8mHUTWlarFs2cSUZGrPsQFwQhsPbgQc7atX779oXdvMn7ipWp6YZhw0ZIblKQx8SEZs2S4gaqeXl5iYmJilCMiYnJVGvjPcULi4qbqKJ+3CYnh9asoco/Zys93bpRjx6smxAXBCEwdufOHZ8+fW4kJfHqTWxtw8aMaSXdSUGlIUOoQwfWTWhA1dcHt7a29vDwUAwWu3TpYm5uro0+1bR3L129yroJQRga0uzZUvzJTHsQhMDSoUOHJk6cmJWVxasPatp0i69vdR3YirZ1axoxgnUTmvf8+fPY2FjlTdRClWnd9zIyMmratKliWrFbt24NGjTQQpsV9vQprV3LsgGBtWhBo0ezbkJEEITAhlwuX758+YIFC3jP8evIpKBCnTrk70/Gxqz70C7l+uDR0dHh4eEvVJ74rQjlC4uurq4M1gcPDaVHjwS9InPTp+OpGSUEITCQnZ3t5+e3b98+Xt3K1DR0+HCfFi1YNKVpFhY0fTrp3+6AmlofXJGLnp6eNbW9ndDjx7Rxo3YvIUJNmtD48aybEAsEIQjt9u3bPj4+N1UejWnatGnYDz+0vHRJwu8yK9+dNzEhPz+Syv6IWpOTk3P58mXluxmvXr1S4yRaXx+8Aqv96aZZs0hyq/VqB4IQBHXw4MFJkyapTgoOHjx4y5YtNjY2dPEiHTzIpDeNMTKiCROI7aSX+CjWB1eEYkJCgnrrg9epU8fd3V3xDKqXl5dZ1WeR09Lot9+qehKJatuWhg9n3YQoIAhBIG+dFJTJ5s6du3jx4n8frE9IoMOH/xkXKsdYUmFsTOPGYRO490pNTY2Pj9fI+uBeXl7du3evrd4DxkeOUFycOn9QBxgZ0eefYwFSQhCCMLKzsydNmnTgwAFe3draOjQ0dLjqj6VXr9K+fVT56SXGzMxo/HhseVNZ+fn5Fy9eVDyDeubMmfT0dDVOos764KWl9NNPVPlHXnXHgAHk4cG6CfYQhKB1V65c8fX1vX//Pq/erFmzsLCwFm97NObBA9q1S0rfpGrUoPHjsdlN1VX9hUUrK6uOHTsqnkHt2rVr9bc9snTzJu3cWcVupc3JiaZPZ90EewhC0K4dO3b4+/vn5eXx6kOGDNm8ebONjc27/nBGBm3fTmqND4TWsCGNHEmiekNcJ3DXB4+Oji5Q2ZzyvRTrgyvfzXhtffDdu+n6dU22K0WffqqHzzbzIAhBW8rKyoKCgpYvX877O/aGScF3KC6mQ4dEveSHTEZdu5K3t8TmMiVI+cJiQkJCRETE48eP1TiJg4PDP5tmdO7sHhFhWlam8T4lpn9/6tiRdROMIQhBKzIzM8eNG3f8+HFe3draevPmzUOHDq3c6a5epaNHqfKjAa2rUYOGDydnZ9Z96CPlC4sJCQmxsbElJSWVPYO5sXF7BwdXBwcvZ+ceDRvW0s8BfePGNGEC6yYYQxCC5l2+fNnX1/eBynY2bdq02bt3r4uLizonzc2lY8dEdCPLwIA6dqQePXR+4RhJyM3NvXTpkvJF/pcvX6pxkkY1ang6O3s5O3vWq9fSzk7zLyyKk7ExBQaSaBdDFwSCEDRs+/bt06dPV50UHDNmzLp16ywsLKp09ocP6e+/6fnzKp2k6lxcqG9f0oEFwXWR4oVF5Tqo6r2waG1q6uHkpMzFarr9447eL7eGIASNUUwKLlu2jFc3NDRctGjR3LlzNfMjtlxO165ReDhlZGjgbJVVrx716IHXBCXk2bNncXFxypuo6qwPbmDQ1t7es149V0fH7vXr19e9R0v0/iUKBCFoRkZGxtixY0+ePMmr16xZc9u2bX379tXw9eRyunWLzp2j5GQNn/mNZDJq2pQ6d6b69YW4HGhHSUnJlStXoqKios+cOXvyZJrKfYuKcLCyUgwTXR0dOzo5GRsaarxPobVvT5WdttctCELQgEuXLvn6+j58+JBXb9u27d69exs1aqTFaz9/TomJdO0ave2bWhXXpqlRg9q0ofbt6d1veoC03LpFO3Y8ePkyKikpISUlOjk5MTW1vPLfDC1NTNra2yty0cvZuYZEV2mpW5f8/Vk3wRKCEKpq27Zt06dPz8/P59XHjh27du3aqk4KVpBcTo8e0e3bdO+eBm6ZymRkb0+NG1OLFuTgoIn+QGRiYujECW4hu6go9ulTRS5GJiVlVf4OqqGBQTNbW1dHR0UuSmlPaXNz+uor1k2whCAE9ZWWli5cuPBtk4Lz5s1j0hVlZ1NSEj19Ss+f04sXbx0p8tjYUO3aZG9PdetSvXpYgFHHHT1KsbFv+2Jpefnt9PTo5OSopKSopKSHaj2Dam9p6aYIRWdnd0dHUyOjKrSrNcr7JQsW6PPzzwhCUFN6evrYsWNPnTrFq9va2m7fvr13795MunqD4mLKyqKcHCoo+GfBtqIiMjUlmYzMzMjCgiwtycaGxPl9CrRk505S2QjsbVJychS3T6OSkuKePi2u/Dv4xoaGberUUdw+9W7QwE6Y2ySV8vHHpO19H0UMQQjqSExM9PX1faSyqXe7du327t3bEA9Vgsht3EhqLUyTV1yc+OxZdFJSVFLSuSdPMlRmBCpC8cKi4kX+9g4OBmJ4YVG/36BAEEKlbd26NSAgQHVScNy4cWvXrjXXz+U5QFqCg+nZs6qfRvG4jSIXb6alqfHN9Op//tNaDLOJkyaRVh9qEzfcDoJKeNukoJGR0ffff89sUhCgskpLNXKaRjVqNKpRY3LbtkT0PDc39unThNRURS4WVuAS1c3MGtes+WdCwrDmzWuzvV+qoQ9EohCEUFHp6eljxow5ffo0r16rVq3t27f36tWLSVcA6tDCnbA6lpZDmjUb0qwZEZWWl19+9iwqKSk6OTn80aMXb3liy8nauumvvyZnZT189Wox239Bktv7U6NwaxQq5OLFi76+vqrr/bdv337v3r0NGjRg0RSAutasEXJ7r5ScHMUw8W0vLFqbmj6aM4fla4jjxlHTpsyuzhpGhPB+W7ZsCQgIUN0KbsKECSEhIZgUBOkR9lUBRyurUa1ajWrVioheFRaeS04++eDBL+fPKxMxu6hodWzs1927C9nVa/T7qWm9XnEc3qu0tDQwMHDSpEm8FDQyMlq6dOmWLVuQgiBJpqasrlzdzGxAkyY/9es3y82NW//l/PmcoiJWXZGZGbNLiwCCEN4qLS2tT58+qo/G1KpV6++//5buozE3K/wCGegsEfwAF+jlZcJZpzSzoCAkIYFZN/q9ggSCEN4sISHBzc3t7NmzvHqHDh3i4+N79uzJoqmqUgxw27ZtGx4ezroXYMrKinUHVM/GZkKbNtzKjzExBZXfXlgzRPCBMIQghDfYtGmTl5dXUlISrz5x4sSoqKj60tyBQTnALSkpGTNmzJMnT1h3BOyIYwn1oK5dDTk74j7Lzd1w6RKDPiwsMEcI8K+ioqKZM2f6+fnxtm1TTApu3ry5mjRvofAGuM+fPx86dKjq4z+gL2xtWXdARORSs+aoli25laVRUWos4VZV4vg0GEIQwr9SUlJ69OgREhLCq9vZ2Z04cUK6k4IhISFdunThDXATExN37tzJqiVgzM6OdQf/+Lp7d+4Sa8lZWVuvXBG6CdF8GqwgCOEf0dHRbm5u586d49VdXV3j4+O9vb1ZNFVVRUVFAQEBM2fOLC4u5tYVA1w/Pz9WjQFj1auL5DnJlnZ2g19/gW9RZGSZwK+329sLejnxQRACEVFISEjPnj1TU1N59YCAgJiYGGdnZyZdVVFKSoq3t/eff/7Jq0t9gAsaIJORoyPrJv4R1K0b9/B+ZubuGzcE7aBuXUEvJz4IQn1XVFQ0Y8YM1TGTqalpcHBwcHCwiYkJq96qQjHAPX/+PK8u6QEuaJJonvnycHLq4+LCrXwXEaG6+oy2mJpSnToCXUusEIR67enTp97e3mvXruXVHR0dz5w5ExAQwKSrqtPJAS5omJg2Cwvq2pV7eP3Fi0N37gh07QYNSAz7QDGFINRfUVFRbxwzeXp6xsfHd+7cmUlXVfSOAW5ISIh0B7igeU5O4nmLvHuDBl6v/3y2KCJCoGs3bizQhUQMQainQkJCevXq9UxlS7aAgIDTp087ODgw6aqKnj592r179zcOcM+ePTtjxgwmXYFIGRiIapnpBa8PCmOfPj354IHWryqTUbNmWr+K6CEI9U5RUZG/v/8bx0x//vmndMdMigHuhQsXeHXFALdTp05MugJRa9WKdQf/GtCkidvrz+8IMSh0dtbzNWUUEIT65cmTJ926dVu/fj2v7uTkdPbs2enTpzPpquoUk4I6NsAFrXNxIbbb4b5u/uuDwrOPHkWprO6kYa+v8aa3EIR6JDIy0s3NLTY2llf38vKS7phJOcAteX2RRqkPcEEIBgbUti3rJv7l07x569q1uZXFkZFavJ6JiajGxAwhCPWFYlLw+fPnvLpizGQvzTdq3zHADQ8Pl+4AF4Tj5iaeZyZlMtk8Ly9u5ejdu/EpKdq6Xps2DLejEhUEoe4rLCycNm3aG8dM69atCw4ONhZ2k1JNefcAt2PHjky6AompUYOaN2fdxL/GtW7d5PWVP5dGRWnlSjIZSfMmkDYgCHVccnJyt27dNmzYwKvXrVs3IiJi2rRpTLqqOp0c4AIbrw/C2DI0MPiqSxduZe+NG9devND8lVq0wFrbSghCXRYREeHm5hYXF8erd+3aNT4+3sPDg0lXVVRYWDh16lTdG+ACM46OonqPYkq7dvWrV1ceyomWaXxQKJNR9+4aPqeUIQh1VkhISO/evV+o/CwZEBBw6tSpOtJcVEkxwN24cSOvLvUBLjDWq5d4ZgqNDQ0/f305i23Xrt3NyNDkNdq0odefytFzCEIdVFhYOGXKFNUxk5mZ2YYNG6Q7ZtLJAS6IQu3a5Or6z68FW+Tz7QJcXR04r/eVlZf/EBOjsbObmFCvXho7m05AEOqa5OTkrl27hoaG8ur16tWLiIiYMmUKi6Y0QCcHuCAiPXuSuTkRiWFoaGZk9MnrT3ttvHQpKStLM2fv3h0v0fMgCHXK2bNn3dzc4uPjefVu3brFx8e7u7sz6aqKdHWAC+JSrRoNGMC6iX/9x929Bmcp1JKyshUqe4Wqw9ERD4uqQhDqjpCQkD59+rxxzHTy5Mna0pwS0NUBLohR69bUogXrJv5hbWr68et3+4Pj41Nzcqp0UiMjGj6cDPBtnw+fiC7Izc0dPXr0zJkzS0tLuXUzM7ONGzdKd8ykkwNcELUhQ8jamnUT//i0UycrzgvvhaWlq1SW0q2cPn3Izq6qbekiBKHk3b9/v0uXLrt27eLV69WrFxkZ6efnx6SrKpLL5StXrtS9AS6IXbVqNGoUGRqy7oOIqGa1arPc3LiV3+LiXhYUqHm6Vq0ID5S9BYJQ2o4dO+bu7n716lVe3dvbOz4+3u31f0VSkZubO2bMmDlz5ujYABekoW5dGjiQdRP/+KJz52qcv+3ZRUW/qiylVCF16tCwYRprS+cgCKVKLpcvW7Zs8ODBL1++5H0pICDgxIkTEh0z6eQAFySmQwd6fXkXVupYWk5r355bWXn+fE5RUeXOYmVF48cTfnx8OwShJCkmBQMDA8vKyrh1S0vLnTt3BgcHGxkZseqtKnRygAuS1Lu3SLYomufpacK5VZtZUBCckFCJP1+tGk2cKJ6JT3FCEErPvXv3OnfuvHv3bl7dxcUlJiZm1KhRTLqqorcNcGUy2SeffCLdAS5IlUxGw4ZRy5as+6B6NjYTX4/kH2NiCl5/leitzMxo4kQsIvNeCEKJOXr0qIeHx7Vr13j1/v37x8XFffDBB0y6qqJ3DHB37NixcuVKiQ5wQdoMDGjECBLBv6kFXbsacd55eJ6buz4x8f1/rFo1mjSJXt/1Ht4IQSgZ7xgzzZs379ChQzVq1GDVW1Xo5AAXdISBAfn4EOstvVxq1hz5+th0WXR08es/NfLZ2NC0aUjBCkIQSkNOTs6oUaMCAwPLy8u5dcWk4NKlSw3F8cB3ZenkABd0ikxG/fvTgAFs30P/unt3A87ab8lZWVuvXHnr765bl6ZPp1q1hOhMJyAIJeDu3budO3fes2cPr964ceNz586NHDmSSVdVpKsDXNBNHh40aRJZWLC6fks7uyHNmnEriyIjS1//sfgfHTrQlClkaSlQZzoBQSh2hw8f9vDwuH79Oq8+YMCA2NjY1q1bM+mqinR1gAu6rEEDmjWLXFxYXf+b7t25y4Hfz8zcfePGa3tlmJnRiBE0ZIhIFgSQEASheCnGTEOHDn316hW3LvUxk04OcEEvWFrShAk0cCCZmAh/8Q4ODr1fj+HvwsP//UGycWP68EOS5k/GzMnkIth8C1Tl5OT4+fmFhYXx6lZWVhs2bBgxYgSTrqru8OHDEydO5EU7EQ0cOHDr1q3VORtzA4hXdjYdO0Y3bwp82fBHj7xf35V639ixw9zdqW9fRGBVIAjF6M6dOz4+Pjdu3ODVmzRpEhYW1qpVKyZdVZFcLl++fPmCBQt4t0NlMtncuXMXL15sgEXxQVoePaKTJ+npUyGv2W3DhsjHj5WHHZo0ib96VcZZmxvUgCAUnUOHDk2cODFLZRPOQYMGbdmyRaJjpncMcDdu3Ojr68ukK4Cqksvp7l2KjKQnT4S54LF79wZs2cKtHD9+vE+fPsJcXVchCEVEV8dMd+7cGT58+E2V+0iSHuACvCY5mS5coFu36N2v91VdrVoev/0Wx7ld1L1797Nnz2r3oroOQSgW2dnZfn5++/bt49WtrKxCQ0N9fHxYNKUBOjnABXizggK6do2uX6ekJNLst1ZLS2rZklq3pnr1wsLCeDdRIiIiunbtqsnL6RldDMLcXEpLo5cvKTubcnOpoIAKC//9qqkpmZqShQVZWVGNGlSrFtWoQTLZ208nhNu3b/v4+KiOmZo2bRoWFtZSBAseqkFXB7gA75efT/fu0YMH9PgxqTwaVlHGxuTkRA0bkosLOToqv03J5fI2bdpwl6Ho37//0aNHq9y0/tKJICwvpydP6NEjSk6mlBTKz6/cHzc2Jnt7qluX6tenBg1I8GnngwcPTpo0SXXMNHjw4C1bttjY2Ajcj0ZkZ2dPnjx5//79vLrUB7gAlZaXR6mp9OIFZWTQq1eUnU35+VRQ8NqQ0cSEzMzI2pqsralmTapVi+ztyc7ubcvZbN26deLEidxKXFwctmdRm5SDsLSU7tyhGzfo3j2q7AZdb2NgQPXrU4sW1LKlAKtI6OqY6fbt28OHD7916xav3rRp03379rVo0YJJVwCiU1RExsZqLN5WVlbWokWLu3fvKiu+vr6q7+ZCBUkzCFNTKT6erl/XWP6pMjAgFxdydaWmTbV04zQ7O3vSpEkHDhzg1a2trUNDQ4cPH66NiwrgwIEDkyZNys7O5tUlPcAFEJu1a9fOmDFDeSiTya5cuSLRpaaYk1QQKp5Ujo6mpCThLlqjBnXqRB06kEZ3Arpy5Yqvr+/9+/d59WbNmoWFhUl0zKQY4M6fP5/3l0rqA1wAVYWFhWZmZgwbKCkpadKkyWPOO4UTJkzY8vqbFVBB0gnCe/fo9GlKTWVzdUtL6tqVXF01sojfjh07/P398/LyePUhQ4Zs3rxZomOmzMzM8ePH//3337y6tbX1pk2bhg0bxqQrAI2Lior6f//v/zk4OGzevJltJ6tWrfr000+Vh4aGhjdv3mzSpAnDliRKCkGYnk5Hj9KDB6z7IKpZk/r1o6ZN1T5BWVlZUFDQ8uXLdWzMdOXKFR8fnwcq/x81a9Zs3759zZs3Z9IVgAbJ5fJDhw59//33sbGxJI7UKSwsbNSoUSpneDBjxoyQkBCGLUmUuIOwrIwiIig6WuvvqFZK8+Y0aJAau5xkZmaOGzfu+PHjvLq1tfXmzZuHDh2qof6E9rYB7tChQzdv3mxtbc2kKwDNEmfqLFu2LDAwUHlobGx89+7d+vXrM2xJikQ8/nj+nEJCKCJCXClIRLdu0W+/kcq+SO92+fJld3d31RRs06bNxYsXJZqCZWVlgYGB48aN46WgYn+Mffv2IQVBZ5iZmX3yySfcysaNG5OEfF7hTWbPnm1ra6s8LCkpWbFiBcN+JEqsQRgfT2vX0osXrPt4i4IC2r2bDhyg0tKK/Pbt27d7enqq3jkcM2ZMTEyMC7sdzqoiIyNjwIABy5Yt491UqFmz5tGjR5cuXSpjvUwBgGbNnj2bu/eZGFLH0tLyo48+4lZCQkJSWT1LIVniC8KyMtq3jw4frmDGsJSYSOvWkcqL8FxvGzMZGhouXbp027ZtFuz2vK4KxQD3xIkTvHqbNm3i4uL69evHpCsArbKysvr444+5leDgYOap88knn1hZWSkPCwsLV65cybAfKRJZEBYUUGgoXb7Muo8Ke/aM1q6llJQ3fjEjI6N///7Lli3j1WvWrHnkyJF58+ZJdMy0ffv2Ll26PHz4kFdXDHAbNWrEpCsAAXz66ae81Fm1ahXDfoioZs2as2bN4lZ+++23ly9fsupHisQUhNnZtH49JSez7qOScnMpNFT1odZLly65u7ufPHmSV2/btm1cXFzfvn2F6k+TlAPc/NfXsZP6ABeggsSZOl988UW1atWUhzk5Ob/++ivDfiRHNEGYlUUbNlB6Ous+1FJcTNu20b17ysK2bds8PT1Vx0xjx46Njo6W6JjpbQNcW1vbo0ePSneAC1ApvNTJzs5mnjp16tTx9/fnVlauXJmTk8OqH8kRRxDm5FBoqPprtLOleFSktJR27KCHD0tLSwMDA8ePH69jY6Z3D3CxLyjojzp16kybNo1bEUPqzJ0718TERHmYmZn5xx9/MOxHWkTwHmFREa1fL94HRCtCLlesR5peUjI2PPxUVBTv67a2ttu3b+/duzeL5jTgr7/+mjFjRr7Kth5jx45dt26dubk5k64AWElOTm7cuHFxcbGy8sMPP3z55ZcMWyIif3//9evXKw/r1Knz8OFD7uAV3ob1iLC8nHbulHYKEpFMRnJ5Ymqq+5o1qinYrl27uLg4iaagYoA7YcKEtw1wkYKgh+rVq8fbBenHH38sKChg1Y/CggULjDhLIj9//nzdunUM+5EQ1kF4/Lgo1k6rsq1Xr3qtX/9I5e7uuHHjoqOjGzZsyKKpqkpPT3/bpOCxY8fmzZvHpCsAMVBNHe5ojAkXF5dRo0ZxK8uXL+cOW+FtmAbh9et04QLLBjShtLw88OTJiXv35peUcOtGRkZLly7966+/JDpmSkxMdHd3P3XqFK8u6QEugKaops6yZcuYp87ChQu56xUnJydjP4qKYBeEr17RwYPMrq4h6fn5/TZvXqZyO7SWufmx4GDpjpm2bt3q6en56NEjXn38+PHSHeACaJYIU6dly5a89RoXL15cKv7FSVhjFIRyOe3fr8VtdQVxMTXVLSTktMo7Eu0dHOICAnqlp1NuLpPGqkIxKThx4kTehIdigLt161aJDnABNK5ly5ZDhgzhVsSQOt988w33Rab79+/v2rWLYT+SwOip0fh4OnyYwXU1Z8uVKwEHDxa8fjuUiCa0aRMyZIi5sTERUYsWNHo0g+bUlZaWNmbMmDNnzvDqtWrV2r59e69evZh0BSBaFy9edHNz434X3bZt29ixYxm2RET9+vXjru/fsmXLq1evSnSLN2Gw+Gjy8khl5klCFJOCk/bu5aWgkYHB0t69t/j6/pOCRHTzJvcte5G7ePGiu7u7agq2b98+Li4OKQigqkOHDryXaL/77rvy8nJW/Sj897//5R7euHHjoPTnobSKRRCeOUOFhQyuqwlpeXl9Nm1646Tg35MmzfPy4v+BY8eI9b+Kiti8ebOXl9fjx4959QkTJkRHRzdo0IBFUwASEBQUxD0UQ+p06dKla9eu3Mq3337L/pVxERM8CNPT6eJFoS+qIQkpKW4hIWdVHiHp4OAQHxDQ842PkGRkiPy/VzEpOHny5DdOCm7ZsgUv5AK8Q7du3USYOrx4vnjxoupeMaAkeBCePUus/4qoJyQhocu6dUkqmy5NbNMmatq0+tWrv/VPRkSIdkuptLS0Pn36qL4paGdnd/z4cek+9QogpAULFnAPL168qLoYocD69evn7u7OrSxatIhVM+InbBBmZNCNG4JeUROKSktnHjw48+DB4rIybl0xKbjZ17eaclKQRxH5OTl06ZLWu6y8mJiYtm3bnj17llfv0KFDXFxcjx49WDQFID39+/cXYerMnz+fexgREREZGcmqGZETNgjPnZPccDAlJ6dHaGhIQgKvbmdhcWLy5DdMCnIpH2I+f15s/+EhISE9evRQ3VN00qRJUVFR9evXZ9IVgETxUic8PJx56gwfPvyDDz7gVhYvXsyqGZETMAgLC+nKFeEupwnRSUluISHnVLZIdHV0jA8I8K74IyQZGeJ5fLSoqCggIGDmzJm8VTAUk4KbNm3CpCBAZQ0fPrx169bcCvPUkclkgYGB3MqxY8fi4uJY9SNmAgbh1auk8tadmIUkJPQMDU1V2V0lwNU1xt/f2camcqcTxyMzKSkp3t7ef/75J69uZ2d34sQJTAoCqOeNqRMfH8+qH4UxY8Y0bdqUW1myZAmrZsRMwCAU5TzZGxWVls44cEB1UtDUyCh4yJDgIUNMDA0rfdI7d0hlGyOBRUdHu7m5nT9/nlfv0qXLpUuXvL29WTQFoCPGjh3bpEkTboV56hgaGs6dO5db2bdv39WrV1n1I1pCBeHLl5SSItC1quZpdrb3xo1rVQZwjlZWZ/z8Alxd1TxveTndvFnV5qogJCSkZ8+eqpOCAQEBZ86ccXR0ZNIVgM5QTZ2wsLBr166x6kdh8uTJ3Cl/uVy+dOlShv2Ik1BByDQDKi4qKcktJOT8kye8uqezc3xAQOd69ap0dkYfQlFR0YwZM1QnBU1NTUNCQoKDg7kbWwOA2vz8/MSWOsbGxl988QW3smPHjrt377LqR5yECkIpfO4hCQm9QkOfqayUHeDqetrPz8HKqqoXePRI+FnSp0+fdu/efe3atby6o6Pj2bNnZ8yYIXA/ADpMNXW2b9/OPHVmzJjh4OCgPCwrK1u+fDnDfkRIkCAsKSGVBy9Fpai01H///jdOCv45dKiak4KqyspIZQ0zrYqKinJzc7ugsumjp6dnfHx8p06dhGwGQB+IMHXMzMzmzJnDrYSGhqqup6jPBAnCJ0/o9YARlSfZ2d02bFifmMirO1lbh0+ZMr1DB01eTMC/fIpJwWfPnvHqAQEBp0+f5v5bBQBNMTMz+/TTT7kVMaTO7Nmza9WqpTwsKSn56aefGPYjNkIFoVhFPn7sFhIS+/Qpr+7l7BwfENCxbl0NX0+Qj6KwsNDf33/mzJklr9+JNTU1Xbt2LSYFAbRKNXVWrFjBsB8isrCwmD17Nrfy559/qj46p7cECUKxftwhCQm9Nm16/pZJQXtLS81fMjVV20vMPHnypHv37uvXr+fVnZycwsPD/f39tXp1ALC0tOSlTkhICPPU+eSTT6w4DzoUFhauXLmSYT+iIkgQvnghxFUqo7C0dOq+fTMPHixRmRRcN2xY8JAhxhqZFFRVVETZ2Vo5MxERRUREuLm5xcbG8upeXl7x8fEdO3bU3qUBQEmEqVOzZs0PP/yQW1m9enV6ejqrfkRF+0FYXk4vX2r9KpWRnJXVbcOGjSov+Ne1to6YOnVa+/bavXxGhpZOHBIS0rt37+fPn/PqiklBe3t7LV0XAHhUU2fNmjUZWvu3X0Gff/45dwHFvLy8NWvWMOxHPLQfhFlZotqZNuLxY7eQkDiVScGu9evHBwR4ODlpvQMt/FhQWFg4depU1UlBMzOzdevWBQcHG79tfwwA0A5e6uTm5q5evZphP0RUp06d6dOncyurVq3KUVlFUg9pPwjF9CmHJCT03rTpRV4erx7g6npq8uQ62pgUVKXpW6PJycndunXbuHEjr163bt3w8PBp06Zp9nIAUBF16tThTcmLIXXmzp3LfVYuMzPz999/Z9iPSGg/CFVSh4nC0tIpb5oUNDMy2jB8uBYnBVVpdMXR8PBwNzc31RXlu3XrFh8f7+HhocFrAUClqKbOH3/8wbAfIqpbt+6kSZO4lRUrVhQUFLDqRyS0H4SFhVq/xPskZWV1Wrs2VGVSsEH16jH+/lPatRO0G819ICEhIX369Hmh8ixSQEDAyZMn69Spo6kLAYAa6tWrx0udn376iXnqzJ8/38jISHn4/PnzdevWMexHDLQfhCLYesnKxCTv9WU2iahb/foXZsxoL/x75Zr4QAoLC/38/N44KbhhwwZMCgKIhAhTx8XFZfTo0dzK8uXLi1W+Q+oV7Qch6zVlsgoLU3Nzf+jbtxonGwJcXU9OnlzbwoJBQ1X+QJKSkry8vDZt2sSr16tXLyIiYsqUKVU8PwBoiouLy6hRo7gVMaTOwoULDQz+/eafnJy8efNmhv0wp/0glMm0fol32n/7dqs1a3y2by8oKSEiQ5lso8CTgjxV+0DOnj3r5uaWkJDAq3fv3j0+Pt7d3b0qJwcAjVNNnS1btjDsh4hatGgxdOhQbmXJkiWlpaWs+mFO+0HIKm/eYkCTJn4CTwryqPuByOXyZcuW9e7dOy0tjfelgICAEydO1K5du8rNAYCGtWzZkpc6ixcvZp4633zzjYzzQ/n9+/d37drFsB+2tB+EIlvWsgbnzR421PpAcnNzx4wZExgYWMZ76tXMLDQ0FJOCAGImwtRp37593759uZXvv/++XEzvfAtJ+0HIPHjEpvIfyL1797p06aL6L8fZ2TkqKmry5Mka6gwAtKJ9+/Z9+vThVsSQOv/973+5hzdu3Dhw4ACrZtjSfhAK85a6hFTyAzl27JiHh8fVq1d5dW9v7/j4eFdXV811BgDaopo6Bw8eZNWMQufOnbt168atfPvtt3It7wogTtoPQmtrrV9CWir8gSgmBQcPHvzy9VXZZDLZvHnzTp48aWdnp4X+AEDzunTp0rVrV25FDKkTFBTEPUxMTDx+/DirZhjSfhBaWIhtmpCxmjUr8rtyc3NHjx6tOiloaWm5Y8eOpUuXGorsKSQAeDde6ly8ePHEiROsmlHo27cv71Hz//3vf6yaYUiQ1yc4e1RCRT6Ne/fude7ceffu3bx648aNY2JieK8lAYAk9OvXj5c6ixYtYtWM0oIFC7iH586di4yMZNUMK4LsR4i1vpRsbMjM7N2/5ciRI+7u7teuXePV+/fvHxsb+8EHH2itOQDQLl7qREREME+dYcOG8b6riCGeBSZIEDo6CnEVSXjnR6GYFBwyZMirV6+4dcWk4KFDh2rUqKHd9gBAm0SYOjKZbP78+dzK33//rbqOv24TJAjr1RPiKpLw9o8iJydn5MiRgYGBvIeqLS0td+7ciUlBAB0gk8kCAwO5FTGkzujRo5s2bcqtLF68mFUzTAgShLVrk7m5EBcSvwYN3li+e/euh4fH3r17efWmTZteuHBh5MiRWm8MAAQxZswYXuosWbKEVTMKhoaGc+fO5Vb279+v+sqWDhMkCGUyatRIiAuJnIUF2du/8StWVlaqO3YOHDjwwoULLVu21H5nACAQ1dTZt28f89SZPHly/fr1lYdyuXzp0qUM+xGYIEFIRK//BKSnmjR524rb9vb2u3fvNjU1VRwqJgUPHjxYvXp14doDAEGIMHWMjY2//PJLbmXHjh137txh1Y/ABAxCzqZceuqdY7tOnTr98ssvRGRlZbV79+6lS5dyV6wHAJ1hbGz8xRdfcCs7duy4e/cuq34Upk+f7sDZn7WsrGz58uUM+xGSUN9qTU31fVBobk4uLu/+LbNmzfrf//4XGxvr6+srTFMAwMSMGTPEljpmZmZz5szhVjZt2vT48WNG7QhKwDEH282PmGvThiowwvvmm2+aN28uQDsAwJBq6oSGhjJPndmzZ9firPhRUlLy008/MexHMAIGYePGpM8zXlgdGwA4RJg6FhYWH330Ebfy559/pqamsupHMAIGoUxGHh7CXU5UGjfGOnMAwGVhYTF79mxuRQypM2fOHBsbG+VhYWGh4tkF3Sbs4xiuru9dYEw3eXqy7gAAROeTTz6xsrJSHhYWFq5cuZJhP0RkY2Mza9YsbmXNmjXp6ems+hGGsEFoYkKdOgl6RTFwdn7be/QAoM9q1qz54YcfciurV69mnjqff/65OWcJlLy8vNWrVzPsRwCCP6DfqZPerTLTqxfrDgBApFRTZ82aNQz7IaLatWv7+/tzK7/88ktWVharfgQgeBCampK3t9AXZah5c3J2Zt0EAIhUnTp1eKmzatUq1XWmBDZ37lwTzj6yWVlZwcHBDPvRNhavbLu5vW2lMV1jbEz9+rFuAgBEjZc6mZmZv//+O8N+iKhu3bqTJ0/mVlasWJGfn8+qH21jEYQyGQ0Z8rbFxnRK9+56/cYIAFRA3bp1J02axK2sWLGioKCAVT8K8+fPN+IsB/b8+fN169Yx7EerGC3i5ehIXbqwubRgnJyoc2fWTQCABKimztq1axn2Q0SNGjUaM2YMt7J8+fLi4mJW/WgVu9Use/TQ5RukJibk41ORpWQAAFxcXEaPHs2tiCF1goKCuCseP3nyZNOmTQz70R5236kNDWnkSPq//RZ0zaBBZGvLugkAkIyFCxfyUmfz5s0M+yGiFi1aDBs2jFtZsmRJaWkpq360h+mQxdaWhg/XwclCDw9q04Z1EwAgJeJMna+//lrG+Rb94MGDnTt3MuxHS1jfu2venHr2ZNyDZrm44ElRAFADL3Xu37/PPHXat2/f7/VvaN9//315eTmrfrSEdRASkZcXubmxbkJDHBxo1ChMDQKAGtq3b9+3b19uRQyp880333APb968uX//flbNaIk4vmUPHEgffMC6iSqzs6OJE3V21hMAtO+///0v9/DmzZsHDhxg1YxC586du3Xrxq189913crmcVT/aII4glMnIx0faWWhnR35+erd6HABolGrqfPvtt8xTJygoiHuYmJh4/PhxVs1ogziCkP4vC93d/62w/v++EpycaOpUsrBg3QcASJ4IU6dv376dX38r+n//+x+rZrRBNEFIRDIZDRxIvXr98xypmJ8m5YZ0s2bk50fVqrHrBgB0R9++fd25QwJxpM68efO4h+fOnYuIiGDVjMaJKQgVvLxozBixz7QpQ7pbNxozhoyNmXYDADplwYIF3MNz585FRkayakZh6NChH7w+e7Vo0SJWzWic+IKQiJo1o4AAcnBg3YcK7kDQ3JzGj6cePUQ9cgUACRo2bJjYUkcmk82fP59bOX78eFxcHKt+NEuUQUhENWuSvz95eYnrVQRl5jVtSh9+SE2aMO0GAHSTaur8/fffzFNn9OjRTZs25VYWL17MqhnNElPM8BgaUq9eNH26uIaGFhbk60vjxpGlJetWAEBniTB1DA0NeTOF+/fvv3r1Kqt+NEjEQajg4EAzZtDgweyfyTQ0pM6d6aOPpP2aBwBIgaGh4dy5c7kVMaTOpEmTGjRooDyUy+VLlixh147GiD4IiUgmI1dX+uQT6tmTzcOZBgbUvj199BH17UtmZgwaAAD9M3ny5Pr16ysPxZA6xsbGX375Jbeyc+fOO3fusOpHU6QQhAomJtS1K82ZQ/37U40aAl3U1JQ6d6ZPPqGhQ7HFLgAISZyp4+/v7+joqDwsKytbtmwZw340QjpBqGBiQh070scf08SJ1Lq1tt5bkMmofn0aNoy++IL69iUbG61cBQDgnaZPn85LneXLlzPsh4jMzMzmzJnDrWzevPnx48eM2tEMqQWhgkxGLi40YgR99RWNGkVt22rm0RVjY2rShAYNos8/pylTqF07vCAIAAypps6mTZuYp85//vOfWrVqKQ9LSkp+/PFHhv1UnYz5KnYak55OycmUkkLPnlF6OhUWKsqbLl/2CwtT/q5Jbdtu8vH558DIiGxtqXZtcnCgunXJyUlcb2sAgN7Ly8tr0KBBenq6svLRRx/9+uuvDFsiom+//Za7PriZmdn9+/e5g1dp0aEg5CkspOxsysnZtHOnH2eZhklDhmz66ScyNydra7K0xOvwACBy//vf//7f//t/ykMzM7MHDx44MH2vLCsrq379+llZWcrKV199xfy2rdp0dwBkZka1a5OLCzk5vVavXp2aNCEnJ7KyQgoCgPjNmTPHhvOkQmFh4S+//MKuHSIiGxubDz/8kFv57bffuMNWadHdIAQA0Ak2NjazZs3iVtasWcM8db744gsLzuvdeXl5q1evZthPVSAIAQDE7vPPPzfnbHcqhtSpVauWv78/t/LLL79wb5ZKCIIQAEDsateuPX36dG5FDKnz1VdfmZiYKA+zsrL++OMPhv2oDUEIACABIkydunXr+vn5cSsrVqzIz89n1Y/aEIQAABJQt27dyZMncytiSJ3AwEAjIyPl4YsXL9auXcuwH/UgCAEApGH+/Pm81Fm3bh3DfoioUaNGY8aM4VZ++OGH4uJiVv2oB0EIACANqqmzfPly5qkTFBRkwFmK5MmTJ5s2bWLYjxoQhAAAkiHC1GnRosXw4cO5lSVLlpSWljJqRx0IQgAAyWjRosWwYcO4FTGkztdffy3jrE/y4MGDHTt2MOynshCEAABSopo6O3fuZNgPEbVr165fv37cyqJFi8rLy1n1U1kIQgAAKWnfvj0vdb7//nvmqfPNN99wD2/evLlv3z5GvVQaghAAQGJUU2f//v2smlHo3Llz9+7duZXvvvtOKps6IAgBACRGnKkTFBTEPbx06dLff//NqplKQRACAEgPL3USExOZp06fPn26dOnCrXz77besmqkUBCEAgPT06dOnc+fO3IoYUmfevHncw3PnzoWHh7NqpuIQhAAAkqSaOhEREayaURgyZEibNm24lUWLFrFqpuIQhAAAkjR06FCxpY5MJps/fz63cuLEiXPnzrHqp4IQhAAAkqSaOsePH2eeOqNGjWrWrBm3smzZMlbNVBCCEABAqkaNGtW0aVNuhXnqGBoa8u7ZHjhw4MqVK6z6qQgEIQCAVL0xda5evcqqH4WJEyc2aNBAeSiXy5csWcKunfdDEAIASNikSZPEljrGxsZffvklt7Jr1647d+6w6ue9EIQAABKmmjo7d+5knjrTp093dHRUHpaVlTG/Z/sOCEIAAGnz9/cXW+qYmpp+9tln3MrmzZsfPXrEqJ33QBACAEibmZnZnDlzuJXNmzc/fvyYUTv/+PDDD2vVqqU8LCkp+fHHHxn28w4IQgAAyfvPf/4jttSxsLD4+OOPuZV169alpKSw6ucdEIQAAJKnmjpr165lnjqffvpp9erVlYeFhYU///wzu3beCkEIAKALVFPnl19+YdYNERHZ2NjMmjWLW/n999/T09NZ9fM2CEIAAF2gmjq//fYb89T54osvLC0tlYd5eXm//vorw37eCEEIAKAjvvjiCwsLC+WhGFKnVq1a/v7+3MrKlStfvXrFqJ03QxACAOiIN6ZOVlYWq34UvvzyS1NTU+VhVlbWH3/8wbAfVQhCAADd8dVXX4ktderWrevn58et/PTTT3l5eaz6UYUgBADQHXXr1p08eTK3smLFivz8fFb9KMybN8/IyEh5mJ6evnbtWob98CAIAQB0SmBgIDd1Xrx4wTx1GjVqNHbsWG7lxx9/LCoqYtUPD4IQAECnqKbODz/8UFxczKofhQULFhgY/Js4T5482bRpE8N+uBCEAAC6RjV1QkNDGfZDRC1atPDx8eFWli5dWlpayqofLgQhAICuadGixfDhw7kVMaTOwoULZTKZ8vDBgwfbt29n2I8SghAAQAd9/fXXvNTZsWMHw36IqF27dv379+dWFi9eXF5ezqofJQQhAIAOUk2dRYsWMU+db775hnt48+bNffv2MerlXwhCAADdJMLU6dSpk7e3N7fy3XffyeVyRu38A0EIAKCbOnXq1L17d25FDKkTFBTEPbx06dKxY8dYNaOAIAQA0FmqqfP333+zakahd+/eXbp04Va+/fZbVs0oIAgBAHRWnz59xJY6RDRv3jzu4fnz58PDw1k1QwhCAADdxkudc+fOsU0dIhoyZEiHDh24lUWLFrFqhhCEAAC6bciQIe3bt+dW2KYOEclksrlz53IrJ06ciImJYdUPghAAQJe9MXXOnTvHqh+FkSNHNmvWjFtZtmwZq2YQhAAAOm7UqFHiSR0FQ0PDwMBAbuXgwYMXL15k0gyCEABAxxkaGvJmCg8cOHDlyhVW/ShMmDChQYMGykO5XL58+XImnSAIAQB038SJE3mps2TJEnbtEBEZGxt/9dVX3Mru3btv374tfCcIQgAA3aeaOrt27WKSOlz+/v6Ojo7Kw7KyMib3bBGEAAB6QSSpw2Vqavr5559zK1u2bHn06JHAbSAIAQD0gqmp6WeffcatMEkdnlmzZtnZ2SkPS0pKfvjhB4F7QBACAOiLDz/8kJc6P/74I8N+iMjCwuLjjz/mVtatW5eSkiJkDwhCAAB9wUsdAwODV69esWvnH5988kn16tWVh0VFRT///LOQDSAIAQD0iCJ1DAwMRo0adfXq1S1btrDuiGxsbD788ENu5ffff09LSxOsAQQhAIAesbGx2bBhw507d3bu3NmyZUvW7fzjiy++sLS0VB7m5eX9+uuvgl0dQQgAoF+GDx/u4uLCuovX2NraTp8+nVtZtWqVYLdtEYQAAMDe3LlzzczMlIdZWVm///67MJdGEAIAAHsODg6TJ0/mVn766afc3FwBLo0gBAAAUZg3b56RkZHyMCMjY926dQJcF0EIAACi0KhRo3HjxnEry5YtKyws1PZ1EYQAACAWCxYsMDD4N5hSU1M3bdqk7YsiCAEAQCyaN2/u4+PDrSxbtqy0tFSrF0UQAgCAiCxcuFAmkykPHzx4sH37dq1eEUEIAAAi0q5du379+ikPO3XqVLduXa1eEUEIAADi0qJFC8Uv3N3df/nlF29vb61eDkEIAAAiUlxcvHv3bsWv4+LiOnXqtHz5cq1eEUEIAAAiEhoampycrDyUyWQDBw7U6hURhAAAIBZlZWW8jXl9fHxat26t1YsiCAEAQCy2b99+9+5dbmX+/PnaviiCEAAAREEuly9dupRbGTBggJubm7aviyAEAABRCAsLu3btGreyYMECAa6LIAQAAFFYsmQJ97B79+5eXl4CXBdBCAAA7B07diw+Pp5bCQoKEubSCEIAAGBv8eLF3EMPD48+ffoIc2kEIQAAMBYeHh4ZGcmtCDYcJAQhAAAwt2jRIu5hy5YtBw8eLNjVEYQAAMDSxYsXT548ya18/fXX3F0JtQ1BCAAALH377bdyuVx56OLiMmrUKCEbQBACAAAzN27cOHjwILcSFBRkaGgoZA8IQgAAYOa7774rLy9XHtarV2/ChAkC94AgBAAANu7fv6/ccUkhMDDQxMRE4DYQhAAAwMaiRYtKS0uVh3Xq1Jk6darwbSAIAQCAgeTk5K1bt3IrX375ZbVq1YTvBEEIAAAMLFu2rLi4WHlYs2bNmTNnMukEQQgAAEJ7/vz5+vXruZU5c+ZYWVkxaQZBCAAAQvvxxx8LCgqUh9bW1h999BGrZhCEAAAgqMzMzODgYG5l9uzZNWrUYNUPghAAAAS1cuXKnJwc5aG5ufmcOXPYtYMgBAAAAWVnZ//666/cyowZM2rXrs2qH0IQAgCAkH777beXL18qD42NjT///HOG/RCCEAAABFNYWLhq1SpuZerUqc7Ozqz6UUAQAgCAQIKDg1NTU5WHhoaGX375JcN+FBCEAAAghJKSkhUrVnAr48aNa9KkCat+lBCEAAAghI0bNyYlJSkPZTLZvHnzGPajhCAEAACtKysr++GHH7gVX1/f1q1bs+qHC0EIAABat23btrt373IrgYGBrJrhQRACAIB2yeXyZcuWcSsDBgxwc3Nj1Q8PghAAALQrLCzs2rVr3MqCBQtYNaMKQQgAANq1ZMkS7qG3t7eXlxerZlQhCAEAQIuOHj0aHx/PrQQFBbFq5o0QhAAAoEWLFy/mHnp4ePTu3ZtVM2+EIAQAAG05e/ZsVFQUt7Jw4UJWzbwNghAAALRl0aJF3MNWrVoNGjSIVTNvgyAEAACtiI2NPXnyJLfy9ddfGxiILndE1xAAAOgG3nCwcePGI0eOZNXMOyAIAQBA865fv37o0CFuJSgoyNDQkFU/74AgBAAAzfvuu+/Ky8uVh/Xq1Rs/fjzDft4BQQgAABp2//793bt3cyuBgYEmJias+nk3BCEAAGjYokWLysrKlIf29vZTp05l2M+7IQgBAECTkpOTt27dyq18+eWX1apVY9XPeyEIAQBAk5YuXVpcXKw8rFmzZkBAAMN+3gtBCAAAGvP8+fMNGzZwK3PmzLGysmLVT0UgCAEAQGN+/PHHgoIC5aG1tfVHH33EsJ+KQBACAIBmZGZmBgcHcyuzZ8+uUaMGq34qCEEIAACa8csvv+Tk5CgPzc3N58yZw66dikIQAgCABmRnZ69evZpbCQgIqF27Nqt+Kg5BCAAAGrBmzZqXL18qD42NjT/77DOG/VQcghAAAKoqPz//l19+4VamTp3q7OzMqJ3KQRACAEBV/fnnny9evFAeGhoafvnllwz7qRQEIQAAVElJScmKFSu4lfHjxzdp0oRVP5WFIAQAgCrZsGFDUlKS8lAmk82dO5dhP5WFIAQAAPWVlZX9+OOP3Iqvr2/r1q1Z9aMGBCEAAKjvr7/+unv3LrcSGBjIqhn1IAgBAEBNcrl8+fLl3MrAgQPd3NxY9aMeBCEAAKhp7969165d41bmz5/Pqhm1IQgBAEBNS5cu5R726NHDy8uLVTNqQxACAIA6jhw5Eh8fz60EBQWxaqYqEIQAAKCOJUuWcA89PDx69erFqpmqQBACAEClnT17NioqiltZuHAhq2aqCEEIAACVtmjRIu5h27ZtBw8ezKqZKjJi3YCmlZXRixeUkUGZmZSdTbm5FBn52m+4d4/++ovMzcnGhqpXJ1tbql2bzMwYtQsAID2xsbEnT57kVhYsWCCTyVj1U0U6EYSFhfTwIT16RElJ9OIFlZe/9tWMjNcO8/Pp9Xc/iYhq1KB69ah+fWrUiKpX12qzAABS9/3333MPGzduPGLECFbNVJ2UgzAvj65fp5s3KSmJH36V9fIlvXxJV64QEdnZUYsW1Lo12dlppE0AAF1y5cqVQ4cOcStBQUGGhoas+qk6CQahXE7371N8PN29W9X8e6O0NEpLo4gIcnQkV1f64AMyNtb8VQAApGnx4sVyuVx56OzsPH78eIb9VJ2kgrC8nC5fppgYSk8X4nIpKZSSQidPkrs7dexI5uZCXBQAQMTu3bu3e/dubiUwMNDExIRVPxohkSCUy+nyZQoPp1evhL50QQFFRND589SxI3l6kqmp0A0AAIjGokWLysrKlIf29vZTpkxh145mSCEIk5PpyBF69oxlD8XFFBlJFy9Sr17Urh1J9uEoAAC1JSUl/fXXX9zKl19+Wa1aNVb9aIq4g7CoiI4fp4sXWffxf/Ly6MABSkykYcPI1pZ1NwAAglq6dGlxcbHy0NbWdubMmQz70RQRv1D/6BH9/ruIUlApOZn++IMuXCDOdDEAgG579uzZxo0buZU5c+ZYWloyakeTRDkilMspPJwiIsSbNKWldOwYPXhAPj54GR8A9MGPP/5YUFCgPLS2tp49ezbDfjRIfCPCoiLato3Cw8Wbgkp37lBICL14wboPAADtysjICAkJ4VY++uijGjVqsOpHs0QWhFlZtH79G1Z+Ea2XL2n9enrwgHUfAABa9Msvv+Tk5CgPzc3NP/30U4b9aJaYgjA9ndavl94Aq6iI/vqLrl9n3QcAgFZkZ2evWbOGWwkICKhduzarfjRONEGYlkYbN1J2Nus+Kklx/7asjPbs+WeFNgAA3bJ69eqXL18qD01NTb/88kuG/WicOIIwM5M2baK8PNZ9VJ7yhUK5nPbtoxs3mHYDAKBh+fn5K1eu5FamTJni5OTEqh9tEEEQ5ubS5s2Um8u6jyqTy2nvXnr4kHUfAAAaExIS8oIzY2VoaKhjw0FiH4SlpbRtG4OF07SkrIx27hRoKVQAAC0rKSn5+eefuZUJEyY0btyYVT9awjoIDx6klBTGPWhWYSFt305FRaz7AACoqvXr1yclJSkPDQwM5s6dy7AfLWEahPHxuvmASUYG7d/PugkAgCopKyv78ccfuRVfX99WrVqx6kd72AVhWhr9/Tezq2vbzZuUkMC6CQAA9f3111/37t3jVgIDA1k1o1WMgrC8nMLCqLSUzdWFcfy47sx9AoCeKS8vX758ObcyaNAgV1dXVv1oFaMgPHeOUlPZXFowxcV08CDrJgAA1LF3795r165xK/Pnz2fVjLaxCMKsLAoPZ3Bd4T14gBVnAECKli5dyj3s0aOHp6cnq2a0jUUQnjxJJSUMrsvEiRM6fgcYAHTOkSNHEl5/yiEoKIhVMwIQPAhTU+n14baOy8qi2FjWTQAAVMKSJUu4hx07duzVqxerZgQgeBCePSv0FZmLjibOns4AAGJ25syZqKgobmXhwoWsmhGGsEH47BnduSPoFcUgPx+vUgCAVCxatIh72LZt20GDBrFqRhjCBuG5c4JeTjwuXKDyctZNAAC8x4ULF06dOsWtLFiwQKbcXUBHCRiEeXn6+AilYp+mrCy6fZt1KwAA7/H9999zDxs3bjxixAhWzQhGwCC8fJnKyoS7nEgof5Kq2N3RXB3YhQMApOny5cuHDx/mVhYuXGhoaMiqH8EIG4T67MEDysl592/JzMxs3759YGBgmR7+xAAArC1evFiuuIlFRETOzs7jxo1j2I9ghArCtDTi7Gilj+RyunnzHV8vLy+fOHHivXv3li1bNnjwYO5+0AAA2nbv3r09e/ZwK/PnzzcxMWHVj5CECsJbtwS6kJi9MwgDAwOPHj2q+PWxY8fc3d2vXr0qSFsAAPT9999z70XZ29v7+fkx7EdIQgXh3bsCXUjMkpLetk9hZGQkb7uT+/fvd+nSZdeuXYJ0BgB6LSkpadu2bdzKV199Va1aNVb9CEyQICwqoqdPhbiQyJWX06NHb/yKl5fXkiVLeJPSubm5Y8aM+fTTT0uxSBsAaNPSpUuLOet+2NraBgQEMOxHYIIE4ZMneIvuH48fv7Esk8nmzZt36NChGjVqcOtyuXzVqlV9+vR5oeczrACgNc+ePdu4cSO38tlnn1laWjJqhwGhghAU3vlR9O/fPy4u7oMPPuDVz5496+bmFh8fr83OAEBP/fDDDwUFBcpDa2vr2bNnM+xHeIIEoc5vPVhxz58T5+lkVS4uLjExMaNGjeLVk5OTu3btGhoaqs3mAEDvZGRkhISEcCsff/xx9erVGbXDhiBBiNt6SsXF79223tLScufOncHBwUZGRtx6YWHhlClTZs6cWaI/m1gBgJb98ssv3HU8zM3NP/30U4b9MKH9ICwre++3fv2SkVGR3xUQEHDixInatWvz6iEhIb1798aUIQBUXXZ29urVq7mVmTNn2tnZseqHFe0HYVbWu28G6p0K/1jg7e0dHx/v5ubGq0dERLi5ucXFxWm4MQDQM6tXr37F+Y5kamr6xRdfsGuHGe0H4fvWFdM72dkV/7316tWLjIxUfa01OTm5W7duvAe9AAAqLj8/f+XKldzK1KlTnZycWPXDkPaDMC9P65eQlvz8Sv12MzOzjRs3BgcHGxsbc+uFhYVTp07FlCEAqCc4OJg7yWJoaKifw0ESIggLC7V+CWlR6wMJCAg4efLkG6cMe/Xq9fz5c010BgD6oqio6KeffuJWJkyY0LhxY1b9sKX9IBTZeOX8kyclbPd2UPcD6datW3x8vLu7O68eGRnp5uYWGxtb5c4AQF9s3LjxKWfBLwMDg7lz5zLshy3tByHrNWXsLS29GzTwbtCgtoUFEd3NyOi1adNzhtv+VeEDqVevXkRExJQpU3j1J0+edOvWbf369VVqDAD0Q1lZGW9x4xEjRrRq1YpVP8xpPwiVO9My0tfF5cyUKX1dXF7832xl5OPHbiEhF1itd1O1D8TMzGzDhg2qU4ZFRUX+/v6YMgSA99q6deu9e/e4lXnz5rFqRgy0H4Qi2N348rNnX585w608yc723rgx9NIlBt1o4gMJCAg4depUnTp1ePWQkJCePXs+e/as6pcAAJ1UXl6+fPlybmXQoEGurq6s+hED7QehqanWL/E+be3tfxs0yPj1BCosLZ2yb9/MgweLBZ4y1NAH0rVr1/j4eA8PD149KirKzc3twoULGrkKAOiYvXv3Xr9+nVuZP38+q2ZEQvtBKI4drQJcXU9Nnmyvsp56SEJCz9DQZ0JOGWruA6lbt25ERMS0adN49adPn3bv3n3t2rWauhAA6IylS5dyD3v27Onp6cmqGZHQfhCKZi+PrvXrxwcEdKxbl1ePTkpyCwk5L9iUoUY/EFNT03Xr1r1xynDGjBkzZ87k7jEGAHru8OHDCQkJ3EpQUBCrZsRD+0FoY6P1S1SYk7V1+JQp/h068OpPs7O9N25ce/GiEE1o4QMJCAg4ffq0vb09r66YMkzF7h8AQERES5Ys4R527NixZ8+erJoRD+0Hobm5GKYJlUyNjNYOHRo8ZIjJ61OGRaWlMw4cEGLK8PWtdzXFy8srPj6+U6dOvHp0dLSbm9u5c+e0cVEAkJDTp09HR0dzK19//TWrZkRFkG2YxLeWeYCr62k/vzdOGfbYuDFVq+uj1qqlpRM7OTmdPXt2+vTpvHpKSkqPHj14W44BgL5ZtGgR97Bt27YDBw5k1YyoCBKEKk/5i4Gns3N8QEAnlSnDmORkt5CQc8nJWrlqjRpaHR+bmpr++eefwcHBJiYm3HpRUdHMmTMxZQigty5cuHD69GluJSgoSMb6PW+RECQIHR2FuErlOVlbn50yZbrKlGFKTo73xo0rz5/X/CUF+SgUU4YODg68ekhISJcuXZKSkgToAQBE5fvvv+ceNm/efMSIEayaERtBgtDZWYirqMXUyOjPN00ZFpeVzTl2bHJYWIFmF2oR6qPw9PSMj4/v3Lkzr56QkODm5nb27Flh2gAAMbh8+fLhw4e5lfnz5xsYCPL9XwoE+SBsbcXzEsUbBbi6npkyxcHKilfffPly1w0bkrKyqnoB5dbEDRpU9VQV5ujoeObMmYCAAF49LS2tT58+y5YtE6wTAGBr0aJFcs4G6c7OzuPGjWPYj9gIEoQyGbm4CHGhKuhSr158QEDnevV49YSUFLeQkDMPH1bp7Iob8dbWAj83ZGpqGhwcrDplWFpaGhgYOGnSpIKCAiH7AQDh3b59e8+ePdzK/PnzeW8e6zmhhsbNmgl0oSpwtLI6O2XKJx078uppeXl9N29eFhVV1Qs0bcpkCfKAgICYmBhnlbuyW7Zs8fLyevz4sfAtAYBglixZUs7Z9Mbe3l51Bxs9J1QQNm5Mrw9KxMnE0HDlgAGhPj7VXv9xqbS8PPDkyUl796ozZai8I9GypSZ6VIerq2t8fLy3tzevfvHiRTc3N96zZACgM5KSkv766y9uZe7cuWZmZqz6ESehgtDYmJo3F+haVTa5bdvIqVOdVZaA2XLlitf69Y9fvarc6RSjQCsrIScIVdnZ2Z04cUJ1s5X09PR+/fphyhBAJy1ZsoS7NZutre2MGTMY9iNOAj411K6dcNeqMldHx/iAgB4NG/LqF1NT3UJCTqsxZdi2LfOtGY2MjJYuXbp58+Zqry/8rZgynDhxYn5+PqveAEDjnj17Fhoayq189tlnluJ+dJEJAYOwQQPtLaqiDXYWFscnTZrn5cWrp+fn96vslKFMRqLZ7mvixIlRUVH169fn1bdu3erl5fXo0SMWTQGA5v3www/cB+Ksra1nz57NsB/REjAIZTJSeQ5F5IwMDJb27r3Z1/eNU4YT9+7Nr+CUYfPmVL26NjpUT4cOHeLj41UX201MTHR3dz916hSTrgBAgzIyMngLK3788cfVxfSNSDyEfaGyXTuysBD0ipowsU2bqGnT6qv8Bdp65YrX+vWPKjJlKL7tvmrVqvX333+/ccqwf//+mDIEkLqff/45l7PTqoWFxaeffsqwHzETNgiNjEQYCRXRwcEhPiCgp8qUYWJqqntIyKkHD971hxs3JicnLTanLsWU4ZYtW8zNzbl1xZTh+PHjMWUIIFHZ2dlr1qzhVmbOnGknvv0PRELwJXbc3cnaWuiLakItc/O/3zJl2H/LlrdOGcpkJO7tviZMmBAVFdVA5YnWbdu2eXp6PqziSgIAwMKvv/76inOzytTU9PPPP2fXjtgJHoRGRtSrl9AX1RDFlOEWX1/zN00Zjt+z5w1Thm3akMri12LTvn37uLi4Xir/v1y6dMnd3f3kyZNMugIA9eTn569cuZJbmTp1qpMo70uJBItFVz/4QMzLcL/XhDZtoqZNa6AyZbjt6lXPdesevnz5b8nUlHr3FrI3tdWqVevYsWOqU4YZGRmYMgSQluDg4LS0NOWhsbHx3LlzGfYjfjLuSqzCSUuj4GDS9l7w2pSenz92927V2UFbc/NtI0b0UaytOniweN6aqKBt27ZNnz5ddXZw7Nixa9eutZDgs04AeqWoqMjFxeXp06fKip+f38aNG9l1JAGMtuGwsyOV5b6kpZa5+bGJE1WnDDPy8wds3bosKkresCGp7HQofuPGjYuOjm6o8ljQ9u3bPT09H7z7sSAAYG3Dhg3cFDQwMPjqq68Y9iMJ7Paj8vQklXe6pUUxZfjXiBG8KcMyxZThgQN50nzqsl27dnFxcb1VbupevnzZ3d39+PHjTLoCgPcqKyv76aefuJWRI0e2atWKVT9SwS4IZTLy9ZXia4U84z74INrfv2GNGrz69j17unTpItEhlK2t7RunDDMzMwcOHLhs2TI2d9QB4J22bNly7949bkX1XzGoYjRHqPToEW3eTJwtQiQqIz9/3J49J+7f59Vr1qy5bdu2vn37Mumq6rZv3z59+vS8vDxefcyYMevWrcOUIYB4lJeXt2nT5vr168rK4MGDDx48yLAlqWA3IlRo0IAGDWLcgybYmpsfnTBhnpeX7PWVtaU+hBo7dmx0dHSjRo149R07dnTp0uW+SvADACt79uzhpiARBQYGsmpGWliPCBXOnKGICNZNaEKDBtuNjacHBKgOoUaPHr1+/XqJDqEyMzPHjRunOjtobW29efPmoUOHMukKALjc3NwSEhKUh7169cJLwBXEekSo0KOH5NbjfoO6dWncuLHjx8fExKgOoXbu3Onm5nbr1i0mrVVRzZo1jxw5Mm/ePN54Nzs7e/jw4YGBgeXSv7kNIGmHDh3ipiARBQUFsWpGcsQxIiQiuZyOH6fz51n3oa569WjCBDI1VRy9Ywi1adOmYcOGCd6fZuzYscPf3191vDtkyJDNmzfbqGxlDADC8PLyio6OVh527NjxvHS/nQpOHCNCIpLJqF8/qb5c6OJCkyYpU5DeOYTy8fGR7hBqzJgxMTExLorlAjgOHjzYsWPHmzdvMukKQM+dPn2am4JE9PXXX7NqRopEMyJUunSJDh2S0qIzHTrQoEFk8OYfKXbu3Dlt2jQdG0JlZ2dPmjTpwIEDvLq1tXVoaOjw4cNZNAWgv3r16nX69GnlYdu2bRMTE3k/hcM7iGZEqNSuHfn5kaUl6z4qwNCQBgygIUPeloJENHr06Pj4+ObNm/Pqkh5CWVtb79u3b+nSpQav/4dnZ2f7+vpKd7wLIEUXLlzgpiARLVy4EClYKeIbESrk5dHevSTmt9GrV6cRI6hu3Yr83uzs7MmTJ+/fv59Xl/oQ6uDBg5MmTcrKyuLVBw8evGXLFomOdwGkZfDgwYcPH1YeNm/e/Pr16wZv/+kcVIk1CIlILqfYWDp1ilT3NmKufXvq1487Kfhecrl8+fLlCxYs4I2WZDLZ3LlzFy9eLNG/uLdv3/bx8VEd2jZt2jQsLKxly5ZMugLQE5cvX27fvj3323hoaOjkyZMZtiRFIg5ChZcv6fBhEs+L2zVr0qBBpPJ2RAXp5BAqOzvbz89v3759vLqVlVVoaKiPjw+LpgD0wujRo3ft2qU8bNiw4Z07d4yMjBi2JEWiD0KF27fpxAnKyBD0onI5ce+zm5mRlxd16kSGhlU5q04OoXR1vAsgZrdu3WrVqhX3H90ff/wxc+ZMhi1JlESCkIjKy+nyZYqMJO7Ot8IwNSUPD+rcmapV08j5dHUIdejQoYkTJ6qOdwcNGrRly5bqKlsZA0BV+Pn5bdq0SXno4ODw4MEDMzMzhi1JlHSCUKG8nG7epPPn6ckTDZ+ZN/5TsLEhDw/q0IE0/XdLV4dQd+7c8fHxuXHjBq/epEmTsLAwbAcDoClJSUmNGzcu4TxC8fPPP8+ZM4ddRxImtSBUev6cEhPp+nXKzdX8yY2NqWlTatuWGjd+QzpqztuGUAMHDty6datEh1A5OTl+fn5hYWG8upWV1YYNG0aMGMGkKwAdM2vWrODgYOWhra3to0ePLCXx4pn4SDYIFeRySk6m27fp/n16/ryqZ7O2JhcXatKEGjem1/fa1R6dHELp6ngXQCSePXvWsGHDwsJCZWXRokULFixg2JKkSTwIuQoK6MkTevqUXrygtDR6+fI9y9PIZGRtTbVqUe3a5OBA9eoRoxHY24ZQlpaWGzdulO4Q6vDhwxMnTnz16hWvPmDAgK1bt9ZQ2coYACro888///nnn5WHNjY2jx49kug9JDHQoSBUlZtLeXlUUEDFxVRaSkRkYEAmJlStGllYkKXlO1aEEZiuDqHu3r3r4+PD2yONiBo3bhwWFta6dWsmXQFIWkZGRoMGDXI5s0ILFy787rvvGLYkdTodhFJz5MiRCRMm6NgQKjc3d8qUKXv27OHVLS0tN2zYMHLkSCZdAUhXUFDQ4sWLlYcWFhYPHz60s7Nj2JLUSXKcoasGDhwYGxurOi949OhRDw+Pa9euMemqiiwtLXft2qW6MGlubu7o0aMDAwPLJLTAOgBr2dnZv/32G7cya9YspGAVIQjFpUmTJufPn1edF7x3717nzp13797NpKsqkslk8+bNO3ToEG9QK5fLly1bNnjw4JfCvxsKIE2rVq3i3jQyNTX9/PPP2bWjIxCEoqOrQ6gBAwbExsaqzgseO3bM3d396tWrTLoCkJC8vLxVq1ZxK9OmTXN0dGTVj85AEIqRrg6hGjdufO7cOdV5wfv373fu3Hnnzp1MugKQiuDg4LS0NOWhsbHxV199xbAfnYEgFC+dHEJZWlru3Llz6dKlhq8v2ZqXlzdmzJiZM2eWKp7vBYDXFRUVrVixgluZMGFCw4YNWfWjSxCEovaOIVTHjh25ywxKyNvGu0QUEhLSu3fvFy9eMGkMQMzWr1//9OlT5aGBgQGGg5qCIBS7tw2hCgoK/Pz8pDuE6t+/f1xc3AcffMCrh4eHu7m5xcfHM+kKQJxKSkp++OEHbmXkyJES3axGhBCEEqCrQygXF5dz586NHj2aV09OTu7atWtoaCiTrgBE6K+//nr48KHyUPE9gWE/OgZBKBk6OYSysLDYsWNHcHAwbyvRwsLCKVOmzJw5k7u4PoB+Ki8v5w0HBw8e3KFDB1b96B4EoZS4uLhcuHBh8uTJvLrUh1ABAQEnT56sXbs2r64Y7z6v+nLqAFK2e/du3jqFgYGBrJrRSVhiTZJCQkJmz56tOjsYEBCwevVqY6G2ztCs5ORkX19f1aFt3bp19+7d6+7uzqQrAOZcXV0vXryoPOzVq9fJkycZ9qN7MCKUJJ0cQtWrVy8yMtLPz49Xf/LkSbdu3TZs2MCkKwC2Dh06xE1BIgoKCmLVjK7CiFDCdHUIFRIS8tFHH6nODkp6vAugHi8vr+joaOVhp06dzp07x7AfnYQRoYQphlBTpkzh1aU+hFKMd+vUqcOrh4SE9OrVS6LjXQA1nDp1ipuCRPT111+zakaHYUSoC3RyCPXkyRNfX9+4uDhevW7dunv27PHw8GDSFYCQevbseebMGeVhu3btLl68KJPJGLakkzAi1AXvHkI9e/aMSVdVVLdu3YiIiKlTp/LqivHu+vXrmXQFIJjz589zU5CIFi5ciBTUBowIdcfbhlBOTk579uzp2LEjk66q7h3j3V9//dXExIRJVwDaNmjQoCNHjigPmzdvfv36dd6mNKAR+Ex1x9uGUE+fPu3evfu6deuYdFV1AQEBp06d0rHxLsC7Xb58+ejRo9zKggULkIJaghGhDtLJIdSTJ09GjBgRGxvLqzs5Oe3evbtTp05MugLQklGjRnE34m7UqNHt27d5CzCBpuDnCx2kGELZ29vz6iEhIT179pToEEox3p02bRqv/vTpU29v77Vr1zLpCkAbbt26tXfvXm5l3rx5SEHtQRDqpq5du8bHx6vOC0ZHR7u5uZ0/f55JV1Vkamq6bt264OBg3qC2qKhoxowZM2fOLC4uZtUbgAYtXry4vLxceejg4KC6sCJoEIJQZzk5OYWHh/v7+/PqUh9CvXu8m5qayqQrAE15+PDhtm3buJW5c+eamZmx6kcfIAh1mamp6dq1a3VvCOXl5RUfH686L6gY72LdDZC0ZcuWcZcRtrW1nT59OsN+9AGCUPcFBAScPn36jUOoHj16SHQI5eTkdPbsWdVvECkpKT169AgJCWHSFUAVpaam8raR+eKLLywtLVn1oycQhHrB09PzjUOomJgY6Q6hTE1N//zzzzeOd2fOnDl58uSCggJWvQGoZ/ny5YWFhcpDGxubDz/8kGE/egJBqC/eMYTy9vZeuXIlk66qTjHedXBw4NU3b97ctWvXpKQkJl0BqCEjI4M3ef/JJ59Ur16dUTt6BEGoR942hCouLp4zZ450h1CK8W7nzp159YSEBDc3t7Nnz7JoCqDSVqxYkZubqzy0sLD4+OOPGfajPxCEeicgIODMmTM6NoRydHQ8c+ZMQEAAr56WltanT59ly5Yx6Qqg4rKysn777TduZdasWXZ2dqz60SsIQn3UpUuXdwyheOv8SoWpqWlwcHBoaCjvQfPS0tLAwMBJkyZJdLwLeuLXX3999eqV8tDU1PTzzz9n145+QRDqKUdHx7Nnz37yySe8elpaWt++faU7hJo8eXJUVJSzszOvvmXLFi8vr8ePHzPpCuDd8vLyVq1axa34+/s7Ojqy6kffIAj1l4mJycqVK0NDQ6tVq8atS30I5erqGh8f7+3tzatfvHjRzc3t9OnTLJoCeJc//vgjLS1NeWhsbPzVV18x7EffIAj13eTJkyMjI3VsCGVnZ3fixIl58+bx6unp6f369ZPueBd0UlFR0YoVK7iVCRMmNGjQgFE7+ghBCP8MoXr06MGrS3oIZWRktHTp0s2bN79xvDtx4sT8/HxWvQFwrV+/PiUlRXloaGgYGBjIsB89hCAEIiI7O7vjx4/r3hBq4sSJUVFR9evX59W3bt3q5eX16NEjFk0B/KukpOSHH37gVkaOHNmsWTNW/egnBCH8Q1eHUB06dLhw4ULXrl159cuXL9+8eZNJSwBKW7duffjwofJQJpPNnTuXYT/6CUEIr9HJIVSdOnVOnz7NG+9+++23AwYMYNUSABGVl5cvXbqUWxkyZEiHDh1Y9aO3sEM9vEF6evqYMWNUZwdr1aq1ffv2Xr16Memq6rZu3RoQEJCfnz9s2LCwsDCZTMa6I9BrO3bsGDt2LLcSHR3dpUsXVv3oLYwI4Q1q1ar1999/v3HKsH///tKdMpwwYUJ4eHi/fv02b96MFAS25HL58uXLuZXevXsjBZnAiBDeRTmE4tXHjRu3du1ac3NzJl0B6ICDBw8OHTqUWzlz5ozq+68gAAQhvEdiYqKvr6/q7GC7du327t3bsGFDFk0BSJtcLnd1dU1MTFRWOnXqJNEN0XQAghDeLz09fezYsadOneLVbW1tt23b1qdPHyZdAUhLSUnJlStXoqKioqOjjx8/npWVxf3qkSNH8PQWKwhCqJDS0tKFCxeqzg4aGhouWrRo7ty5mHIDUPXs2bO4uLjo6OioqKj4+PiioiLuV42NjUtKSoioffv2CQkJ+EfECoIQKmHbtm3Tp09XnTIcO3bs2rVrLSwsmHQFIB5lZWW3bt1SJF9CQsKNGzfe8Zs/+OCDCRMm/PDDD8HBwSNGjBCsSeBBEELlXLp0ydfXl/sKsEKbNm3CwsIaNWrEpCsAhnJzcy9duqQIv+jo6JcvX1bwDxoYGGRmZhoYGFhYWBgY4Bl+ZhCEUGkZGRnjxo07ceIEr16zZs1t27b17duXSVcAQrp//35MTExMTEx0dPT169fLy8srewZLS0t3d/e1a9fix0fmEISgjrKysqCgoOXLl/P+/mDKEHRVaWnp5cuXFTc8IyIi1NuYxcHBwdXV1cvLy9PT08PDw8TERON9ghoQhKC+7du3T58+PS8vj1cfPXr0+vXrMWUIUpeVlRUXF6e44RkdHa3GDp2GhobNmjVTJF/Xrl3xupE4IQihSq5cueLj4/PgwQNevXnz5mFhYc2bN2fSFYDaHjx4oEi+qKiomzdvqvEd0srKqmPHjp6enq6urt26dbOxsdFGn6BBCEKoqszMzHHjxh0/fpxXt7a23rRp07Bhw5h0BVBBeXl5iYmJCQkJ0dHRp0+fzsjIUOMkDg4OimGfl5dX+/bt8eSLtCAIQQPeNmWo2FNm8eLF+L4AopKSkqJIvqioqLi4uOLi4sqewdjYuE2bNork8/b2trOz00afIAwEIWjMgQMHJk2alJ2dzasPGTJk8+bNuEEEDHFf74uOjla9mV8R9vb2bm5uiqddvLy8zMzMNN4nMIEgBE26deuWj4/PrVu3ePVmzZqFhYW1aNGCSVegn3Jyci5cuKB4zjMqKurVq1eVPYPiURflc54tW7bE49A6CUEIGpadnT158uT9+/fz6tbW1qGhocOHD2fRFOiLlJQU5bAvMTFRvdf72rZtq0g+T0/PmjVraqNPEBUEIWieYqO1BQsW8L4NYcoQNI67knV4ePiLFy/UOInyURdXV9eOHTsaGxtrvE8QMwQhaMvBgwcnTZrEW2KfiAYPHrxlyxZMGYLanj9/Hhsbq3zapbCwsLJnMDIyatq0qSL8unXr1qBBAy20CZKBIAQtun37to+Pz82bN3n1pk2bhoWFtWzZkklXIDmKR12Uyafe633W1tYeHh6K5zw9PT2rVaumjVZBihCEoF3Z2dl+fn779u3j1a2srEJDQ318fFg0BRLAXck6JiYmMzNTjZM0atRIccPTy8urQ4cOeNQF3ghBCFqHKUOoIOWjLgkJCbGxsYq9+irF3Ny8ffv2iuTr0aNHrVq1tNEn6BgEIQjk0KFDEydOVJ0yHDhw4NatW6tXr86iKWCstLT09u3bivCLjIx89OiRGifhrmTt7u5uamqq6TZBxyEIQTh37tzx8fFR3aq0SZMmYWFhrVq1YtIVCCw7Ozs2NlZTK1m7urribw5UEYIQBJWTk+Pn5xcWFsarW1pabty4EZt06yrFStaKp12q/nqfl5dXjRo1tNEn6CcEIQgNU4b6gPt635kzZ9LT09U4CVayBmEgCIGNI0eOTJgwQXXVqwEDBmzduhU/70tRampqfHy8YsIvPj6+qKiosmcwMjJq27at4oant7e3s7OzNvoE4EEQAjN379718fG5fv06r964ceOwsLDWrVsz6QoqjruSdUJCgursb0XUrl3bw8ND+bQLXu8D4SEIgaXc3NwpU6bs2bOHV7e0tNywYcPIkSOZdAXvwH29Lzo6+uXLl2qcRPF6H1ayBpFAEAJj754yXLRokaGhIaveQKHqK1lbWFi0a9dOkXxdunSxtbXVRp8A6kEQgigcPXp0woQJqsOL/v37//XXX5gyFFhpaenly5cVNzzDw8OTkpLUOAn39T4PDw8TExON9wmgEQhCEIt79+75+Phcu3aNV3dxcQkLC/vggw+YdKU/srKy4uLiNPV6X9euXRs2bKiNPgE0DkEIIpKbmzt16tTdu3fz6tWqVfvjjz8mT57MpCsdpni9ryorWVtZWXXs2FHxnGe3bt2wqQhIEYIQxEUxZRgUFFRWVsb7UkBAwJo1a4yMjJg0phvy8vISExMVyXfu3LmMjAw1ToLX+0DHIAhBjI4dOzZ+/HjVKcPu3bvv3Lmzdu3aTLqSqJSUFOUGRnFxccXFxZU9g7GxcZs2bRTJ5+3tbWdnp40+AVhBEIJI3b9/38fH5+rVq7x6vXr19u7d6+bmxqQrSeC+3hcVFfXw4UM1TmJvb+/m5qYY+bm5uZmZmWm8TwCRQBCCeBUUFMyaNWvTpk28upmZ2R9//OHn58ekK3HKycm5cOGC4jnPyMhI1V0+3kvxqIvyOU+83gf6A0EIYhcSEjJ79uzS0lJePSAgYPXq1cbGxky6EoOqv97HXcna09OzZs2a2ugTQOQQhCAB4eHho0ePfvHiBa/erVu3nTt31qlTh0lXwuOuZH327Nm0tDQ1TqJ81MXV1bVjx476/JMEgAKCEKQhOTnZ19c3Pj6eV69bt+7evXvd3d2ZdCWAZ8+excXFKdfzLCwsrOwZjIyMmjZtqgi/7t27169fXxt9AkgXghAko7Cw8MMPP9y4cSOvbmZm9ttvv02dOpVFU5qneNRF+Zyneq/3WVtbe3h4KNfzxErWAO+AIASJCQkJ+eijj0pKSnh1SU8ZcleyjomJyczMVOMkWMkaQD0IQpCeiIiI0aNHP3/+nFfv2rXrzp077e3tmXRVWcpHXRISEmJjY1Wj/b3Mzc3bt2+veM6zR48etWrV0kafADoPQQiS9OTJE19f37i4OF7dyclpz549HTt2ZNLVu5WWlt6+fVsRfpGRkY8ePVLjJNyVrN3d3U1NTTXdJoDeQRCCVBUWFv7nP//ZsGEDr25qarpmzRp/f38mXfFkZ2fHxsZqaiVrV1fXVq1aaaNPAH2GIARpe8eU4a+//spk6x/FStaKp10uXryo3krWbdq0UW7jUL16dS20CQD/QBCC5EVGRo4ePfrZs2e8uqen5+7duwWYMszPz7948aIi+c6cOZOenq7GSbCSNQArCELQBU+fPh0xYsSFCxd4dScnp927d3fq1EnjV0xNTY2Pj1dM+MXHxxcVFVX2DEZGRm3btlXc8PT29nZ2dtZ4kwBQEQhC0BFFRUWzZ89et24dr25qarp69erp06dX8fzclawTEhJu3Lihxknq1Knj7u6ueNrFy8sLK1kDiAGCEHRKSEjIxx9/rLrTkHpThtzX+6Kiol69eqVGS3i9D0DkEISga6Kjo0eOHKk6ZdilS5fdu3c7ODi8+49XfSVrCwuLdu3aKZKvS5cutra2lT0DAAgJQQg66OnTpyNHjjx//jyv7ujouHv37s6dO3OLpaWlly9fVtzwDA8PT0pKUuOK3Nf7PDw8mDytCgDqQRCCbioqKvroo4/Wrl3Lq5uYmCxfvnzcuHEXLlxQruepxkrW3Nf7unbt2rBhQw01DgBCQxCCLvv999/nzJmjOmWonpo1a3bu3LlLly6KVV3Mzc01cloAYAtBCDouJiZm5MiRqamp6v1xxaMuitueeL0PQCchCEH3paSkjBw58ty5c4rDpk2b3rlz522/2djYuE2bNornPL29ve3s7IRqEwDYQBCCXiguLv7qq69WrVrVuXPnsWPHfvrpp9yv2tvbu7m5KSb83Nzc8HofgF5BEIIe2bhxY79+/Z4/f+7m5tasWTPlc554vQ9AnyEIQe+Ul5fn5eVZWVmxbgQARAFBCAAAeg2PwAEAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF5DEAIAgF77/yvg40k0zAY9AAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAAB0qUlEQVR4nO3deVxU1fsH8Id9EVBQQEVcQIdVAcUFUMvdzGwxy7SvZmValgKiuIY7LshiWZltai5pZmkLbrgvuaOyJrjgBigooOzM7w/96XDuOLLMzLlz5/N+ff+wJ5v7+Y44z5xzzz3HQC6XEwAAgL4y5B0AAACAJzRCAADQa2iEAACg19AIAQBAr6ERAgCAXkMjBAAAvYZGCAAAeg2NEAAA9BoaIQAA6DU0QgAA0GtohAAAoNfQCAEAQK+hEQIAgF5DIwQAAL2GRggAAHoNjRAAAPQaGiEAAOg1NEIAANBraIQAAKDX0AgBAECvoRECAIBeQyMEAAC9hkYIAAB6DY0QAAD0GhohAADoNTRCAADQa2iEAACg19AIAQBAr6ERAgCAXkMjBAAAvYZGCAAAeg2NEAAA9BoaIQAA6DVj3gFAx92/T3fvUl4eFRRQYSEVF1NpKZWWPv63hoZkZkbm5tSgAVlZUaNGZGdHTZqQuTnX0AAAT6ERQi2VldHVq3TtGl2/TrduPe15tdKwITVvTs7O1KoVNWtGBgbqTgkAUFMGcrmcdwbQBffuUUoKpaVRVhZVVanzlS0syNWV3N1JJiMTE3W+MgBADaARgkqlpXTxIp07R9eva/xaJibk7k6+vtSmDcaIAKA1aITwDHl5dPw4JSZSWZm2L21nR126kJ8fmZpq+9IAoH/QCEEgJ4cOHKCUFOL7s2FhQV27UrduZGbGMwYASB0aISi4f58SEujCBc4tUJGFBfXoQV26kJER7ygAIE1ohEBERBUVdOQIHT5MFRWPK3K5iG7U2dnRSy9R27a8cwCABKERAtG1a7R9O929yzvH83h50aBBZGnJOwcASAoaoX6rrKSEBDp2TERzoao1aEBDhpBMxjsHAEgHGqEey8+nLVvo1i3eOWpGcaq2a1fq1w93DQFALdAI9dWlS7R1K5WU8M5RVy1a0Ntvk5UV7xwAoPPQCPXSv//Szp3VpkNFtTSmhmxsaMQIcnTknQMAdBsaoZ6Ry2n3bjp2jHeOumIatpkZvf02tWnDLxAA6Dw0Qn0il9OOHXT2LO8camVkRG+9heUzAFBnaIR6Qy6nP/6gxETeOTTAyIiGDSM3N945AEAn4WBevfH339LsgkRUWUlbtlBmJu8cAKCT0Aj1w4EDdOrU03+U3jRAZSX98ovOPAoCAGKCRqgHLlyg/furVXRugWhNlJXRxo1UWMg7BwDoGDRCqbt5k7Zv5x1CWwoLadOmp9ulAgDUABqhpJWU0ObN+tUYbt6k+HjeIQBAl6ARStr27XT/Pu8QWnf6NCUn8w4BADoDjVC6zp2jlBQiKS6Nea4//8TNQgCoITRCiSosfDpDKMmlMaoVF9Off/IOAQC6AY1Qov75h0pLeYfgKj0dE6QAUBNohFKUkaG/k6KKdu6k8nLeIQBA7NAIJUcup507H/9aDydFFRUU0NGjvEMAgNihEUrOuXOUm8s7hGgcPUoPHvAOAQCihkYoLVVVdPAg7xBiUlZGR47wDgEAooZGKC3nz9O9e7xDiMypU/TwIe8QACBeaIQSIpfjlthTTxYKlZfTyZNcowCAqKERSkhmJu4OPqW4UOjUKaqs5BcFAEQNjVBCFA9aAkVFRZSWxjsEAIgUGqFUPHhA6elEev/s4LOcPcs7AQCIFBqhVCQlUVUVkd4/O/gsGRl4jgIAlEIjlApsJ6aaXP54tx0AgOrQCCXhwQO6do13CNFDIwQAZdAIJSEjA7cGVXn05ly9iq1HAUAIjVASMjN5JxC3R/dNKyvp6lXeUQBAdNAIJeHKFd4JdATeKAAQQCPUfQUFdP8+7xA6IiuLdwLQSd7eZGDw+H8DBqj6ndOmPf5t3t7aCgf1hkao+27e5J1Ad9y6hZupUE+7dtH587xDgFqhEeq+7GzeCXRHeTnl5fEOATpv+XLeCUCt0Ah1H/YXrRW8XVBvGzfSjRu8Q4D6oBHqPgxxagVvF9SDrS0RUXk5rVjBOwqoDxqh7sMBhLWCtwvqYdiwx79YtYoKC7lGAfVBI9RxFRVUXPz411gGUhP49IJ66NGD2rQhIrp/n777jncaUBM0Qh2nuJE0ttuuCWy9DfVQUEDBwY9/HRdHFRU8w4C6GPMOAPVTUsI7gXJX790b9+efvFMoY2JCmzfzDqEGjo49srNn8k4hZd7eFBXFFh8+pPHjKSKC7t2jq1fp119p+HAe4UCt0Ah1XFkZ7wSsgtJSGzOzwrKynZcu8c7yDJLYfdvDw1ES/z/ES+ncQWkpWVnRuHG0ZAkRUVQUGqEUYGpUx1VW8k5Qzdlbt1pER8/Zv78Uc0YgRY9uxH/2GZmYEBGdPk3793MNBOqARqjjRLZAJjg+vrC0dO7+/S9v2MA7C4CmODk9HQgKp09B56AR6jhDEf0J/nLx4sH/P94hu6iIbxgAjZo8+fEv/v5bGnPtek1EH6NQF8ZiuctbXF4evmcP7xQAWuLjQ336EBHJ5dhxTeeJ5WMU6sjMjHeCx5YeOXJV8Kz6eH//V93decR5NltbGjSIdwg1MDa2w31YjbKze85vmDyZ9u4lIvr5Z1q4kBwdtRAKNAKNUMdZWvJOQER0vaBg2dGjTNHD3n7FSy+ZGBlxifRMbdrQwIG8Q4AUDBxInp6UnEylpfTFF7RgAe9AUFeYGtVxFhaPbxNyXTUzdffuB4IHOaIHDBBdFyQiKyveCUAiDAwoNPTxr7/+mh4+5JoG6gGNUMcZGJC19eNfcHIsK2vThQtM8RU3t4Ft23LJ8xwNG/JOANLx7ruPZ0Tz8ujHH4nEtXwNagp/aLrv0X74nFTJ5ZPi45nRqKmRUVT//nwCPRfXtwskxsyMJkx4/OuYGKqqevx8IegWNELd16QJx4v/dO7cScHJbJO6dZM1bswlz/OJNhjopo8/JgsLIqKMDNq27fGvQbegEeo+BwdeVy4sLZ2VkMAUHRo0mNmjB5c8NcLv7QJJatKERo9+/OuoKMw46CQ0Qt3XtCmvKy84ePCW4FSjRX36NDQ355Ln+Ro2xDd2ULuQkMf36I8fp//+450Gag+NUPc1a0aPFmdqd+FoRl5e3L//MkW/Zs3G+PlpM0btODnxTgASJJPRK688/vWmTVyjQJ2gEeo+Y2Nq1oxI2wtHQ3fuFO6sHTtwoKGYj0Vs2ZJ3ApCmJzuuCe6Ygw5AI5SER2dma1HC5cvb09KY4nBv756tWmk5Se1o/Y0CPdGzJ/n78w4BdYVGKAmurk9/rfkJ0sqqquD4eKZoYWKyuG9fTV+6XmxsyN6edwiQrCeDQtA5aISS4Oz8dA2I5mcmvzp58kJ2NlOcGhTUqlEjTV+6XmQyjtsOgOS9+aaSqfcdO8jfn+7c4REIagx7jUqCoSG5u9PZs1q4VH5x8dwDB5hiCxubKYGBWrh6vXh48E4Auurixef/HmNj+v9TyIiIUlNp0CD65x8ioogIWrlSU9mg/jAilApvb+1cZ/a+fXcFmyou7devgampdgLUUYMG1Lo17xCgF/LyaNIkat/+cRckolWr6Px5rplAJTRCqWjThmxsNH2R5NzcVadOMcUAZ+fh2mrDdde+PXaBBO24epVWriTFJdWVlRQczC0PPBc+GqTCwIA0/wBf6M6dFVVVihVDA4O4gQMNxH/vrVMn3glAX/j50ZgxbHHfPtq2jUcaqAE0Qgnp1Emjg57fU1N3XrrEFMf4+XUW/1Pqbdrw3ZEV9M3ChUqOOQkLo5ISHmngedAIJcTamry8NPTaZZWVU3fvZi9oZja/Vy8NXVGdunXjnQD0i4MDzZrFFjMzKSaGRxp4HjRCaQkKevwLdT9NGH3s2H937zLFWT17Nnt0GqKYOThQu3a8Q4DemTiRZDK2uGgR3bzJIw2ohEYoLY6Ojx8SUOtNu+yioshDh5iiq53dpK5d1XgVTXnhBTw+CNpnakpRUWyxqIhmzOCRBlRCI5ScXr3U/rk/fe/egtJSphg9YICZseifQ23WDI8PAi+vvEIDB7LFtWvpxAkeaeDZ0Aglx96eOnZU4+uduXVrzblzTLGPi8sQNzc1XkXNnswM9++P4SBwFB3Nnlkvl9OkSVo+KgaeA41Qinr3JjWdCCiXy4Pj46uq/601MjSMGTBALa+vKY+an6cnHqIHvjw86JNP2OLx47R+PY808AxohFJkaUlq2v96w4ULhxS3jSIiok86d27v6KiW19cgU1MSebcG/RARoeThnWnTqKiIRxpQBo1Qojp2pHqfiFRcXj5j716maGthEfHCC/V8ZW3o108LW+0APJetLc2dyxZv3KAlS3ikAWXQCCXKwIBefZXqt//n4sOHr92/zxTn9+rV2NKyPi+rDS4u2EoGxGPcOOrQgS1GRdGVKxzCgBAaoXTZ2tLLL9f5v866fz/q6FGm6GlvP078x49aWtLrr2ONDIiHkRHFxrLFkhKaOpVDGBBCI5S0Dh3qvAFp2K5dD8vLmWLMwIHGIt+62sCA3niDrKx45wCoplcveuMNtrhlC+3fzyEMMMT9oQb1N2gQ1X4v0CPXrm1JSmKKr7m793d1VVMsjendm8QfEvRSVJSS1dzBwVRZySMNKEAjlDpjYxo+vFbLRqrk8uD4eOYxJ1Mjo6X9+qk3mto8ebqjQ4enm8wBiEybNhQSwhYTE+m773ikAQVohHrAyopGjqz5k4XfnzlzSrAfYmhAQLvGjdWdTE0e3Q5s04aGDMGtQRCzGTOoeXO2OHs23bvHIQw8gUaoHxwcaOTImiwiLSwt/XzfPqboaGU1vUcPzSSrnydjQScnGj6cjIy4pgF4DisrWrSILebm0vz5PNLA/0Mj1BstWtCIEex2TwJzDxy4LXjQd3HfvjZmZhpLVntP+t+j8V/z5vTuu/V8VgRAO0aNIuFm9StWkOCmPGgPGqE+adWK/vc/FXOkl/LyvhTsB9yxWbNRPj4aTlZLivOfLVvSqFHq2lIOQNMMDCg2lp3Cr6hQcvsQtAaNUM84O9OYMfSMQwSD4+NLKyoUKwZEsQMHGor2xpubG/3vfySq0SrA83TrRiNHssXdu+nvv3mkASIDOXZB10OFhbRxI926pVjbk5nZb+1a5jeO7NDhZ+HTTyIREED9+mF1DOiiGzfI3Z3dbrRtW0pKwhw/BxgR6iVraxozRnHTp4qqqpD4eOZ3WZiYLOrTR7vJasbEhF5/HUcsge5ycqLwcLZ46RKtXMkjjd7DiFC/nTlD8fFUXh57/LiwEc7r1Wu2CPfXtrenN98kBwfeOQDqpaSEPD3p8uVqRRsbSkujpk05ZdJXGBHqt44dady4PFvbBQcPMv/GuWHDyYGBXEI9JvyKZmBA3brRRx+hC4IEmJvT0qVssaCA5szhEEbPoRHqvcaNZ6ak3H34kClH9e9v+bxnLTTlUQtkpj3t7WnMGBowgIyNuYQCULs336QXX2SLq1fT6dMcwugzTI3qu6SkJF9f34rqi0WDWrY8NGaMwZNWJJfzvBtnbk49e1LXriTy/b4Bai8piXx9qfrfPwoKokOHcAdce/DJou9CQkKYLmhoYBA7cKCB4t9CXn8jjY0pIIAmTqSAAHRBkCQvL/rgA7Z45Aht3cojjb7CiFCvbd269c0332SKY8eO/Xb2bDp0iP77j0sqIiIzM+rUiQICanKg0q1bt0pKSrQQSlQMDc2rqprxTsGBvb3UTtnKy6N27Sgvr1rR2ZlSU0n8Z2BLAxqh/iotLfX29r506ZJi0draOj09vemjVWt37tCpU5SYSNpsM/b25O9PPj41f0y+a9euJwQb4kieh8eolJQ1vFNw8NNPNHo07xDqFhNDoaFscf58mjWLRxr9g3UH+mv58uVMFySiiIiIpk/WbjdpQgMHUt++lJpKFy9SRgZ7K0ONrK3Jw4M6dKjD6YkAuu6zz+j779ntRiMjadQoatmSUyZ9gkaop7Kzs5csWcIU27Zt++mnn7K/1diYvL3J25vKyigjgzIy6PJldh6nbgwNycmJXFyoXTtq3hxrA0BvGRtTTAz171+t+PAhzZxJ69ZxyqRP0Aj11NSpUwsKCphibGysmYoJSVNT8vAgDw8ioqIiun6dbt2inBy6c4fy82t0zLaFBdnZUZMm1LQpNWtGzZs/9zQMAD3Rrx8NGsRuN7p+PY0bR927c8qkN9AI9dHp06d//vlnpti3b9+XX365pi9hZUXu7uTu/rRSVEQPHtDDh1RaSlVVVFZGRkZkZETGxmRuTpaWZGODXRQBVIiLo717qbT0aUUup+BgOnECi6Y1C41Q78jl8kmTJlVVVSkWjY2NY2Ji6vW6Vla8FvM1b968devWXC7NUdOmtsXFvEPw8IyjU6SgbVuaMIGio6sVT5+mdeskuD5IVLBqVO+sW7du1KhRTDE4OLi+jRAA6q2ggNzc6PbtakVHR0pPJxsbTpn0ABqhfnn48KGHh8e1a9cUi3Z2dunp6Y0bN+aVCgCeWLWKxo9ni9On06JFPNLoB0w865dFixYxXZCIFi5ciC4IIBJjx1KnTmxx+XKe+1tIHkaEeuTatWseHh4Pq++v7eXlde7cOWPsZA0gGkeOUI8e7Pkrb7yBfdc0BSNCPRIaGvpQcMpETEwMuiCAqAQFkWDrQ/rtN9q1i0caPYARob7Yv39/r169mOLQoUN//fVXLnkAQIWsLHJ3J+aLq6cnJSbiIDL1w4hQL1RWVgYHBzNFMzOzxYsX84gDAM/h7Kxk99HkZPr2Wx5ppA6NUC+sXr06MTGRKYaFhbVt25ZLHgB4runTlWw0Ons23b3LI42kYWpU+u7duyeTyXJzcxWLjo6O6enpNng0CUDEfv6Z/vc/tjhpEsXGcggjYRgRSt/cuXOZLkhES5cuRRcEELmRI5VsNLpyJV28yCONdGFEKHGpqakdOnQoLy9XLHbq1OnEiROG2L4QQPTOnKHOnan6lojUpw/t2cMpkBTho1DiQkNDmS5oYGAQGxuLLgigEzp2VDI7uncv7djBI41EYUQoZX/99dfgwYOZ4qhRo9as0ceTzQF0VHY2yWTEHJvm6kpJSaTi2DSoOQwLJKu8vHzy5MlM0dLScsGCBVzyAEDdODrStGlsMSODvviCRxopQiOUrBUrVqSlpTHFmTNnOjs7c8kDAHU2eTK1a8cW582jW7d4pJEcTI1KU05Ojpub27179xSLbdq0SU5ONjc35xQKAOpu2zZ64w22+OGHtHo1jzTSghGhNM2aNYvpgkS0bNkydEEAHfX669S/P1v84Qc6eZJHGmnBiFCCzp075+/vX1lZqVjs1atXQkICr0gAUH/JyeTjQxUV1YqBgXT4MBkYcMokCRgRSlBwcDDTBY2MjHAAPYCu8/Skjz5ii0eP0ubNPNJICBqh1GzevPnAgQNMcezYsT4+PlzyAIAazZ9PwlO0w8LowQMeaaQCjVBSiouLw8PDmWKjRo3mzZvHJQ8AqJedHX3+OVu8fp2ionikkQo0QkmJioq6cuUKU4yIiLC3t+cRBwDU75NPyNubLS5ZQlev8kgjCVgsIx03btxwc3N7UH2KxN3d/fz58yYmJrxSAYDa7d1LffuyxXfeoQ0beKTRfRgRSkd4ePgDwY2C6OhodEEAienTh155hS1u3EiHDvFIo/swIpSIY8eOBQUFMX+aL7/88p9//skrEgBoTkYGeXlRaWm1op8fnTpF2FG/tvCGSUFVVVVwcDDTBU1MTJYvX84rEgBolKsrTZzIFs+epZ9+4hBG16ERSsHatWtPnDjBFCdOnOjm5sYlDwBowezZ1KwZW5w+ne7f55FGl2FqVOcVFRW5ubndvHlTsejg4JCWltaoUSNOoQBAG777jsaOZYtTp9KSJTzS6CyMCHXewoULmS5IRAsWLEAXBJC899+nzp3ZYmwspafzSKOzMCLUbZmZmV5eXiUlJYpFX1/fU6dOGRkZ8UoFAFpz9Ch1707MB/mrr9Lvv/PJo4swItRtkydPZrogEcXGxqILAuiJwEB6+222+McftHMnjzS6CSNCHZaQkNCnTx+m+NZbb/3yyy9c8gAAF9evk7s7u92ohwclJhKeIq4JjAh1VWVlZUhICFO0sLBYgrvkAHqmRQsKC2OLKSn0zTc80uggNEJd9c0335w/f54pTpkypXXr1jziAABP4eHUqhVbjIigO3d4pNE1mBrVSfn5+TKZ7E71n3EnJ6e0tLQGDRrwSgUAHG3cSCNGsMVPP6UvvuCRRqdgRKiTIiIi7gi+6S1ZsgRdEEBvvfMO9ezJFr/+mi5c4JFGp2BEqHtSUlJ8fHzKy8sViwEBAUeOHDEwMOCVCgC4O3uW/P2pqqpasXdv2ruXUyAdgRGh7gkNDWW6oKGhYWxsLLoggJ7z86P33mOLCQl4pvA5MCLUMdu3b3/11VeZ4pgxY3744QcueQBAVHJySCZjtxt1caHkZDIz45RJ9DAi1CVlZWVTpkxhilZWVgsXLuSSBwDExsGBZsxgi5mZFBvLIYyuQCPUJbGxsemCPQRnzpzZTLgFPQDoq+BgksnY4sKFdOsWjzS6AFOjOiMnJ0cmk92vPuXh4uKSlJRkbm7OKxUAiND27SS4hUJjxhBuoSiFEaHOmDFjxn3BOWPLly9HFwQAxpAhNGAAW1yzhgTnlgIRRoS64uzZs/7+/lXVl0X37t17L5ZFA4AyKSnk40PVF5hTQAAdOUJYYM7AiFA3BAcHM13QyMgoFre/AeAZPDxo/Hi2eOwYbdzII424oRHqgI0bNx48eJApjh8/vn379lzyAIBOmDuXmjRhi1OnsudUABqh2BUXF0+fPp0p2trazpkzh0ccANAZtrYk/Jy4cYOWLuUQRszQCMVuyZIlV69eZYpz585tIvymBwBQ3fjxJJw5WraMrlzhEEa0sFhG1K5fv+7u7v6g+kSGh4dHYmKiCQ7cBIAaSEggwQHe9NZbhAO8n8CIUNSmTJnyQDCdHxMTgy4IADXUuze99hpb3LyZDhzgEEacMCIUr6NHj3bv3p35AxoyZMgff/zBKxIA6KLMTPLyopKSakVfXzp1ioyMOGUSE4wIRaqqqio4OJjpgqampsuWLeMVCQB0lIsLBQezxXPnsNHMY2iEIvXjjz+ePHmSKQYHB8uEewgCADzPzJkk3JN41iy6d49DGLHB1KgYFRYWurm53aq+Ra6Dg0N6enrDhg15pQIAnfbjj/T++2xx8mSKiuKRRkwwIhSj+fPn3xJsFB8ZGYkuCAB1Nno0denCFlesoLQ0HmnEBCNC0cnIyPDy8iotLVUs+vn5nTp1ytAQX1wAoO6OHaOgIGI+9QcPph07OAUSB3ywik5ISAjTBYkoNjYWXRAA6ikggEaMYIt//kn//MMjjWhgRCgue/fu7du3L1N85513NmzYwCUPAEjMjRvk5sZuN+ruTufPk94+n4xBhohUVFSEhIQwRQsLi8jISC55AEB6nJxo6lS2mJpKK1fySCMOaIQi8tVXX124cIEphoeHt2rVikseAJCkKVOodWu2OHcu3bnDIYwYYGpULPLy8mQy2d27dxWLLVq0SE1NbdCgAa9UACBJmzfT22+zxY8/pq++4pGGN4wIxWL27NlMFySiZcuWoQsCgNq99Rb17FmtYmpKqan/nj9/nlMinjAiFIXk5GQfH5+KigrFYmBg4OHDhw0MDHilAgAJO3eO/P2pspKIqGPHvPv338vI2NGrV6+EhATe0bQNjVAUBgwYsGvXLsWKoaHh8ePHO3fuzCsSAEjeRx/Rnj1lTZosOnly7pPir7/+OnToUI6ptA+NkL9t27a98cYbTPGDDz747rvvuOQBAD2Rk1Ps5eVy585txWKbNm2Sk5PNzc15pdI+3CPkrKysLDw8nClaW1vPnz+fSx4A0B8ODhbTpoUxxcuXL0dHR3PJwwsaIWfR0dH//fcfU5w9e3Yz4UbxAADqNnHiRDc3N6YYGRl58+ZNLnm4wNQoT9nZ2TKZrKCgQLHo6uqalJRkZmbGKxUA6JW//vpr8ODBTHHUqFFr1qzhkkf7MCLkadq0aUwXJKKYmBh0QQDQmpdffvmll15iiuvWrfv333+55NE+jAi5OXPmTOfOnauqqhSLffr02bNnD69IAKCfUlNTO3ToUF5erljs1KnTiRMn9GG7f+n/PxQnuVw+adIkpgsaGxvHxMTwigQAesvd3X3ChAlM8fTp0+vXr+eSR8swIuRj/fr17777LlOcOHFiXFwclzwAoOfu3bsnk8lyc3MVi02bNk1LS7OxseGVSjswIuSguLh4xowZTNHOzu7zzz/nkgcAoFGjRvPmzWOKt2/fXrp0KZc82oRGyEFkZOS1a9eY4vz58xs3bswlDwAAEY0dO9bHx4cpRkVFXbp0iUsercHUqLZlZWW5u7s/fPhQsejp6ZmYmGhsbMwrFQAAEe3bt693795McejQob/++iuXPNqBEaG2TZ48memCRBQTE4MuCADc9erVS7jR6NatW3fv3s0lj3ZgRKhVR44c6dGjB/Oev/7667/99huvSAAAii5fvuzp6VlSUqJY9PLyOnfunFS/r2NEqD1VVVXBwcFMFzQ1NV2yZAmvSAAAjDZt2oSGhjLFpKQkCR8DgEaoPd99992pU6eY4uTJk9u1a8clDwCAUtOnT2/evDlTnDlzpvDwcGlAI9SSgoKCiIgIpujo6Dht2jQueQAAnsXKyioyMpIp5uXlLViwgEseTUMj1JK5c+fevn2bKS5evFjyT6oCgC763//+FxQUxBS//PLLpKQkLnk0CotltOHSpUve3t6lpaWKxY4dO548eVIf9vEDAF10+vTpLl26MDtB9u3bV3orSPEprA3BwcFMFzQwMIiLi0MXBADR6tSp08iRI5ninj17/vrrLy55NAcjQo3bs2dPv379mOK77767bt06LnkAAGro9u3bbm5uzGlxbdu2vXjxopROi8OIRLMqKiqCg4OZooWFxcKFC3nEAQCohaZNm06dOpUpXrp0aeXKlVzyaAgaoWZ98cUXwnvL06dPb9myJZc8AAC1EhYW1rZtW6aodPWf7sLUqAbl5eW1a9cuLy9Psejs7JyammppackrFQBArWzduvXNN99kih999NGqVau45FE7jAg1aMaMGUwXJKLly5ejCwKADhk6dKhwoYPSHUJ0FEaEmpKUlOTr61tRUaFYDAoKOnTokIGBAa9UAAB1IO0PNIwINSUkJIT5oTE0NIyLi5PADw0A6BsvL68PP/yQKR45ckQaxzNhRKgRv/7667Bhw5iilKbUAUDf5OXlyWQyZrtRaSx6wIhQ/UpLS6dPn84UbWxs5s6dyyUPAED92dnZzZo1iylmZWUtX76cSx41QiNUv6ioqEuXLjHFiIiIpk2bcskDAKAWn376qZeXF1OMjIy8du0alzzqgqlRNdOTjRgAQD8p3Spr5MiRP//8M5c8aoERoZpNnTqV6YJEFBcXhy4IABLQt2/fl19+mSlu2LDh8OHDXPKoBUaE6nT8+PHAwEDmLZXkZu0AoLekd5yOToYWJ7lcHhwczHRBY2Pj2NhYTokAANSvbdu2n376KVM8c+bM2rVrueSpP4wI1Wbt2rWjR49miiEhIdHR0VzyAABoSGFhoUwmY7YbdXR0TE9P18XDxjEiVI+ioiLhIxNKVxsDAOg6a2tr4fNg2dnZkZGRXPLUExqhekRGRt68eZMpLlq0yM7OjkseAACN+vDDD/39/ZlidHT0f//9xyVPfWBqVA0uX77s6elZUlKiWPTy8jp37pyxsTGvVAAAGnXkyJEePXowTeT111//7bffeEWqG4wI1SAsLIzpgkQUExODLggAEhYUFCQ8nmnbtm27du3ikqfOMCKsr3379vXu3Zspvvnmm1u2bOGSBwBAa7Kystzd3R8+fKhY9PT0TExM1KGRAEaE9VJZWRkSEsIUzczMdPSOMQBArTg7O0+ePJkpJicnf/vtt1zy1A0aYb18++23iYmJTHHKlClt27blkgcAQMumT5/esmVLpjh79mzmnAoxw9Ro3eXn58tksjt37igWnZycUlNTrayseKUCANCy9evXv/vuu0xx4sSJcXFxXPLUFkaEdTd37lymCxJRZGQkuiAA6JURI0Z0796dKX711VcXLlzgkqe2MCKso9TU1A4dOpSXlysWu3XrdvToUZxBDwD65syZM507d66qqlIs9unTZ8+ePbwi1RxGhHUUGhrKdEEDA4PY2Fh0QQDQQx07dhw1ahRT3Lt3744dO7jkqRWMCOvizz//fOWVV5ji6NGjf/rpJx5xAAD4y87OlslkzDl0rq6uSUlJIj+HTuojwooKKil5+r/qw/a6KSsrCwsLY4pWVlaLFi2q/4sDAOgoR0dH4ZbLGRkZK1as4JKn5qQyIiwvp+xsysmhO3coL4/u36eiIioupspK9neamJClJdnYUMOGZGdH9vbk6EhNmlCNpzSjoqKmTJnCFBcuXDhjxoz6//8AANBdZWVl3t7ezHaj1tbWaWlpzZo145XquXS5EZaV0eXLdPkyXb1KOTn1Gu2ZmlLz5tS6Nbm4kJMTPftsyZycHJlMdv/+fcVimzZtkpOTzc3N6x4AAEAStm3b9sYbbzDFDz744LvvvuOSpyZ0sBEWF1NKCiUn05UrSgZ8qsnlzx/5WViQTEaenuTqSkZGzL/86KOPVq9ezRS3bt0q/IMHANBPAwYMYLYbNTQ0PH78eOfOnXlFUk13GqFcTpmZdOYMpaXVuv/VjaUltW9P/v7UpMmjwrlz5/z9/SurX71Xr14JCQnayAMAoAuSk5N9fHwqKioUi4GBgYcPHxbnunpdaIQVFZSYSMeOEbNhT02Gd2rh4kKBgeTq+uKLLx44cEDx3xgZGZ05c6ZDhw7aiAEAoCM+/fTTlStXMsWNGzcOHz6cSx7VxN0IKyvp1Ck6coQKC3lHoV9u3BgumBT9+OOPv/rqKy55AABEKz8/v127dsx2oy1atEhNTW3QoAGvVM8i1scn5HK6eJG+/JLi48XQBYvLy6cJjlWytbWdN28elzwAAGJma2v7+eefM8Xr168vW7aMSx7VRDkizM2lv/6iq1d553hq3oEDEfv2McXY2NhJkyZxyQMAIHIVFRUdO3Zkthu1sLBISUlp1aoVr1RKiWxEWFVFBw/SqlWi6oI3CgqWHjnCFD0cHT/p149LHgAA8TM2No6JiWGKxcXFwofuuRNTI8zLox9+oH37tLQotMam7t79oKyMKUb362eyZQvt2EHVdxwFAIBH+vTpM2TIEKa4cePGgwcPcsnzLKKZGk1Opu3bqbSUdw7WsaysoO+/Z96jwTLZjhEjHv+DvT299daTRywAAOCJjIwMLy+v0uqf7X5+fqdOnTJ89tYlWiaCHHI57d1LW7aIsAtWyeXB8fFMFzQ1Morq3//pP+fm0urVlJ6u3WgAADrA1dV14sSJTPHs2bM//vgjlzxK8R4RVlTQ1q2Umsozw7P9ePbs+3/8wRTDAgOXKTbCRwwMqF8/CgjQUjIAAB1RWFjo5uZ269YtxaKDg0N6enrDhg15pVLEdURYUkJr14q2CxaWls4UbBnj0KDBrJ49lfxuuZx27aLdu0kkU80AAOJgbW09f/58ppiTkyOeE3v4NcLiYlqzhrKyuAV4noWHDt0SPMK4sE+fhio21z56lP7+G70QAEDRmDFjhBuNxsbGpovjphKnRvhoLHj7Np+r10Bmfn7s8eNM0bdp0zG+vs/5L0+dQi8EAFBkaGgYFxfHbDRaVlYmPNKOCx6NsLyc1q8XcxckotCdO0ur7xhLRLEDBxrVZJnTqVO0d69GYgEA6KaAgIC3336bKW7fvj0+Pp5LHkVab4RyOf36K12/ru3r1kbC5ct/CO5cvu3t/ULr1jV9iSNH6MQJ9aYCANBpy5YtE240GhoaWs77aWytN8Ldu0X+pEFlVVWw4BuKhYnJ4r59a/dC8fF06ZLaYgEA6LgWLVqEhYUxxZSUlG+++YZLnie02wjPn6djx7R6xdr7+tSpC9nZTHFKYGDrRo1q90JyOW3dSnl56goGAKDrwsPDhRuNRkRE3Llzh0ueR7TYCHNz6c8/n/6jKJeT5BcXz92/nyk62dhMDQqqy8uVlNCWLSS41wgAoJ8sLCwWL17MFPPz8+fMmcMjzmPaaoSVlbR1a7VtOUV5TvHn+/bdefiQKS7t16+BqWkdX/H2bdqzp76xAACkYvjw4T0FT2N/880358+f55KHtNcI9+0jwXyj2KTk5q46fZopBjg7v+PtXa/X/fdfuny5Xq8AACAhsbGxzEajlZWVISEhvPJopRHevElHj2rjQvUTsnNnefWDLwwNDGIHDjSo/+B1+3YcUgEA8Iifn9+YMWOYYkJCwu+//84jjhYaoVxOf/759I6gKG8NEtEfqak7BYs8R/v6dnFyUsOr37tHBw6o4XUAACRh0aJFwo1GJ0+eXFJSov0wmm+Ep0+T4l6rorw1WFZZOXX3bqZobWa2sHdvtV3j+HG6e1dtrwYAoMscHBxmzpzJFDMzM2NjY7UfRsONsKyMBIswRSj2+PF0QZea2aNHM2vr+r70kxFwZSVWzQAAPDFp0iSZTMYUFy5cePPmTS0n0XAjPHaMHjzQ7CXqLefBg0WHDjFFF1vb4G7d1PDqiiPg1FSRb6kDAKA1pqamy5YtY4pFRUXCkaKmabIRlpSI//F5Ipq+Z899wax09IABZsbG6r+YLoyPAQC0Y8iQIQMHDmSKa9euPaHdLSo12QhPnhThofOMs7du/XTuHFPs3abNq+7uGrleRgZVP50SAECfRUdHm5iYKFaqqqqCg4O1eWi8xhphZaX4d52Wy+WT4uOrqr/dRoaGsYJvKOqkC0+SAABoh4eHx/jx45nisWPHNmzYoLUMGmuEyclUVKSpF1eTTRcvHrp6lSl+7O/f3tFRg1fVhXcGAEBr5s6d26RJE6YYHh7+QFtLTDTWCE+d0tQrq0lxefl0wamBthYWES++qNkLV1XR2bOavQQAgO6wtbUV7jV648aNJUuWaCeAZhphfj5du6aRV1afJUeOXL13jynO69WriaWlxq8tuCsJAKDPxo8f36FDB6YYFRV15coVLVxdM43wwgWNvKz6XC8oiBLcq/Owtx/XqZM2Lp+XR1p/UAYAQLSMjIxiYmKYYnFxcXh4uBaurplGmJyskZdVn7Bdux6UlTHFmAEDTIyMtJRA9G8RAIA29e7d+7XXXmOKmzdvPqD5/Sk10Ajv3RP5QRNHs7I2X7zIFF91dx/Qtq32QqSlae9aAAC6YPny5ebm5kwxODi4svpxCGqngUYo2LpaVKrk8uD4eOb5FFMjo6X9+mk1x507JLhDCQCgz1xcXIKDg5niuXPnfvjhB41eVwONUNxn7/1w9uzJGzeYYkhAgKxxY21HyczU9hUBAMRt5syZzZs3Z4qzZs26p8mRgwYaoeDJPPEoLC39fN8+pujQoMH07t05pBH9wloAAC2zsrJauHAhU8zJyVmwYIHmLqruRpif/3SXbfEdPTjvwIFbhYVMMbJv34aCWWltyMricFEAAHEbPXp0ly5dmOKKFSvSNLa0Qt2NUMRHD2bk5X0h2PXNr1mz93x9ecQhyssT/16sAABaZmBgEBsba1C9g5SXl0+ePFlDV1R3IxTxetGQnTtLKyoUKwZEcQMHGnJs2Dk53C4NACBWAQEBI0aMYIp//fXXP//8o4nLqbsR5uaq+QXVZG9m5g7BsPqd9u17tGrFJc9jYn27AAD4WrJkSYMGDZhiaGhoeXm52q+lgXuE4lNRVRUcH88ULUxMFvXpwyXPU6J8uwAAuHNychJuK5Oamrpy5Uq1X0vdjfD+fTW/oDqsPHHiomASMjwoqFWjRjziKMCjhAAAzzBlypTWrVszxblz5+aqey5NrY2wooKKix//WjRLRvOKi+cfPMgUW9jYhAUGcslTDc5jAgB4BnNzc+EBFPfu3fv888/VeyG1NsInXZBEtGR0dkLC3YcPmWJU//4NTE255KlGEAwAAJ546623XnjhBaa4evXqxMRENV5FrY2wpESdr6YOybm5354+zRQDnZ3f8vLikoclvncMAEBUYmNjjaofh1BZWRkSEqLGS6i1EWpgMU89/f3ff5VVVYoVQwODLwYNMhDJgFV87xgAgKj4+vq+//77TDEpKemq+nYxU2sjrN5yxCAsMPDERx8FOjs/qQz39u7YrBnHSNWI7x0DABCbhQsXWlhYPPq1sbHxxIkT09PTW6nv4TfNnEcoYmJZwwMAADUjl8vl1RdgytW6HlOtjdBQdG016ujRLt9+e1RhV89fLl48o7gPHF/ie8cAAMRm1qxZJf+/oqKiomLFihVubm5inRo1MVHnq6nDoHbtjKo3myq5/LO//1bvt4m6E987BgAgKkrPI/Ty8hLr1CiXMxxU8rS3H+fvzxSPZmX9kpTEJQ9LfO8YAICoCE+oNzIyio2NVeMl1NoI//9mJpGIHqif36tXY0tLpjhl164HZWVc8lQjCAYAAE/88ssvBw4cYIofffRRhw4d1HgVtTZCY+OnvVAkzycQ2VpYfC54HvN6QcGyo0e55KnGyqomv6sKi0sBQE106POkuLh42rRpTNHW1nbevHnqvZC6F2s0bKjmF1SHTzp3bu/oyBSXHjlylftWnzXY7PTkyZPdu3ffvHmz5tMAgPQNGDBg3Lhxd+7c4R3k+ZYtW3blyhWmGBER0aRJE/VeSN2N0NZWzS+oDsaGhjEDBjDF4vLy6Xv3csnzlMq36+bNm6NGjerateuxY8fCwsIeYj82AKifX3/9dc+ePd9++62bm1tcXBxz701Ubty4sXTpUqbo4eHxySefqP1a6m6E9vZqfkE16ePiMsTNjSluvHDhoPoW4NbFM96u0tLS+fPny2SydevWPVrgmpWVtXz5cu2GAwBJKSkpmTp16qNf5+XlBQcHd+rU6aDgTAKRmDp16oMHD5hidHS0iQYW26u7EQpmIMUjesAAM2NjphgcH1/FcV2Pg4PSspGR0ebNm5kfgsjIyGvXrmklFgBIUFRU1OXLlxUriYmJpwW7MYvBsWPHNm7cyBQHDx48cOBATVxO3Y1Qcfcy0SwcfcTVzm5i165M8eytWz+ePcslD9nZkZmZ0n9jbGwcExPDFIuLi2fMmKH5WAAgQTdu3BAeadS2bdsJEyZwyaNCVVVVcHAw87S3qalpVFSUhq6ogXuEDRo8/rVoFo4+Mbtnz2bW1kxxxt6997mcAtGihYp/2bdv35dffpkpbtiw4dChQ5rMBADSNG3atCLBAahxcXGmYjiQrro1a9acOHGCKU6cONFNcHtLXTSwxVfLlup/TTWxNjOb36sXU8x58GARl+7yvG0RYmNjzaoPGeVyeXBwsA6tfgYAMTh+/Pj69euZYr9+/QYNGsQljwqFhYUzZ85kig4ODrNmzdLcRTXQCF1c1P+a6jPGz6+zkxNTjD1+PP3uXW1Hed4b1bZt208//ZQpnjlzZu3atRrLBABS8+gLNDPTqPT+ixgsXLjwlmA76IULFzbU5LN5GmiEbduq/zXVx9DAIG7gQGbStqyycsquXVrN0aRJTR4ijIiIaNq0KVOcNm1aQUGBRlIBgOSsXbv233//ZYoTJ070Esn55AoyMzOFe6f5+vqOGTNGo9fVQCNs1EjMa0eJKMDZ+W1vb6a4PS0t/tIl7YWo2WS3tbW1cA+F7OzsyMhIDWQCAKkpKioSLrKzs7MTTj+KQWhoaGlpKVMUnlCvdpo5BsjTUyMvqz7L+vdvILhFHLpzZ7nWHi+t8Vv0wQcf+Av2DY+Ojv7vv//UnQkApGbRokU3b94UFu3s7LjkUSEhIeGPP/5gim+//fYLgj0y1U4zjVAw3hKbFjY2YYGBTDElN/ebU6e0cXk7O2revIa/19DQMDY21qD6EtyysrInD8YCACh1+fJl4Y1AHx+fDz/8kEseFSorK4ODg5mihYXF4sWLtXB1zTRCO7vnLonkLjwoqJXgLl3E/v13tLCTma9vrX57UFDQsGHDmOLvv/++S8v3NQFAp4SFhZUIng3TwkxjHXz99dcXLlxgilOmTGndurUWrq6xE9I7ddLUK6uJhYnJ4r59mWJ+cfGc/fs1e2FDQ/Lzq+1/FBUVZSk4sykkJKSiokJNsQBAUvbt2/fbb78xxWHDhr344os84qiSn58/d+5cpujk5KS1eS+NNUJPzxqeMcTRcG/vnoKR6zenTp3PztbgVev0zjg7O0+ePJkpJicnr1q1Sk2xAEA6lM40mpubC7exFoPPP/9ceBrG0qVLGzzZnkXDNNYIjYyoSxdNvbj6xA4caFj99ltlVVVIfLwGLym4N1lD06dPbynYrGD27Nl3tf8EJACI26pVq86fP88Uw8LCtDPTWCspKSnCL/QBAQHvvPOO1jJorBESUefOz9pLUzz8mjUbI5ioTLh8+ffUVI1cz9W12nastWFhYbFo0SKmqHRKAQD0WX5+fkREBFN0cnIKDw/nkke1kJCQ8vJyxYrSFYIapclGaG5O3bpp8PXVZFGfPg3NzZni5J07SzRx+61+s/MjRozo0aMHU1R6kxkA9NbcuXOFM42LFy+2Et/tqj/++GPnzp1McfTo0V20O6GoyUZIRIGBpK1J3jpzaNBgpqC7ZObnxx4/roZXV9zWyN1d9Ubbz2VgYBAbG2toWO1PraKiIiQkpD4vCwCSkZKS8tVXXzHFbt26jRw5kkseFZQ+BmZtbb1w4UItJ9FwIzQ1JcEm1yI0qVs3WePGTHHhwYM3Cwvr+9JPRvdGRiRYpFoHHTt2HDVqFFPcu3fv9u3b6//iAKDrQkNDmZlGAwODuLg4bc401lBsbGx6ejpTnDlzZrO63j+qMwO5pk8NlMtp9Wp6somqXC7C45mIaHta2quCcyDf8/X98bXX1HOBoCC1NEIiys7OlslkzHajrq6uSUlJZqK/KQsAmrNjx44hQ4YwxdGjR//000884qiSk5Mjk8nu37+vWHRxcUlOTtb+55iGR4REZGBAgwc/bX6i7IJENMTNbaBgu/C1iYknbtxQw6s3akTq2yXI0dFx+vTpTDEjI2PFihXqugQA6JyysrKwsDCmaGVlJVxkJwbTp09nuiARRUdHc/k2r/lGSETNm9f5mQFtih4wwKT6hgtVcnlwfLwaBs1DhpCJSX1fREFoaGi7du2Y4vz584XHlwCAnlixYoVwpnHGjBnNa7yho9acPXtWOEjt3bv3q6++yiOOdhohEfXqJfIjKYjIw95+vGB762NZWRvquSaza1dq06ZeryBgamoqfDC2sLBw9uzZ6r0QAOiEnJycBQsWMEUXFxcRrqSTy+WTJk1iDhg3MjISHsCkNdpqhEZGNHRotVGRpu9N1sncF19sItjJLHzPngdlZXV8xaZN1XVrkPHaa6/179+fKf74448nT57UxOUAQMxmzpwpnGmMiooyFzwbxt2mTZsOHTrEFD/++OP27dtzyUPaWCyj6Px52rZNe5erk5UnTnz6999McfYLL8yrw/JXc3MaO5Y0dtxJcnKyj48Ps91oQEDAkSNHRLhCDAA05OzZs507d66sfopcr169EhISeEV6luLiYg8Pj6tXryoWbW1t09PTmzRpwiuVtkaEj3ToQAEBWr1i7Y339+8gmMWNOnr0yr17tXshAwMaOlRzXZCIPD09x40bxxSPHTv2yy+/aO6iACA2wcHBTBfkO9OowpIlS5guSETz5s3j2AVJ2yNCIpLLadMmEtzRFZWEy5f7rFnDFN/y8vpFcBaSKi+9pIXdVvPz89u1a8dsN9qiRYvU1FSt7VcLABz98ssvw4cPZ4qffPLJypUrueRR4fr16+7u7g8ePFAsenh4JCYmmqh1OWFtaXdESEQGBvTmm/XcYEXTerdp85q7O1PcnJR04MqVmr5EUJB29hy3tbUVbip4/fr1ZcuWaeHqAMBXcXHxtGnTmKKtra04tyAOCwtjuiARxcTE8O2CxKEREpGJCY0cSU2bPv5HUa6aWT5ggLmxMVMMjo+vrL7SSTl/f+rTRyOxlPnkk0+EN5mXLl0qnH8AAIlZunTpFcEX9Dlz5vCdaVTq6NGjmzdvZoqvvvrqgAEDuORRxKMREpG5OY0a9fgcBlEu63CxtQ0W7Bh+7vbtH86efc5/2akTDRqkzf9TRkZGMTExTFHp90QAkBKlcz8eHh4ff/wxlzwqVFVVBQcHM3filD4GxgWnRkhEFhY0ahQ5O3ML8Dwze/Zsbm3NFGclJNwrKXnmfxMYSC+/rP3W3qdPH+G+Sps2bTp48KCWkwCA1kydOlU40xgdHc19plHohx9+ED7ZFRISIpPJuORhaH2xDKOign77jVJSeGZ4tp/OnRvz++9McXJgYJTgAT4yMKD+/TkeO5WRkeHl5VVaWqpY9PPzO3XqFHNaBQBIwLFjx4KCgpgP8FdeeUWE++8XFha6ubkx+145ODikp6c3bNiQVypFvD8ijY1p2DDq3p1zjGcY7ePTxcmJKa7499805qwvU1N6+22+hy+6urpOmjSJKZ49e/bHH3/kkgcANOdZM41RUVG8Iqkwb9484e6PkZGRIumCxH9E+ERyMm3fTtUHNGJwLCsr6PvvmffoZZnszxEjHv+DvT299RaJ4Na0+L92AYBa/PDDDx988AFTDAsLE+FycZ2YrBJLDvL0pI8+IvFtDhvg7DyiQwem+Fd6+j///UdE1LEjjR0rhi5IRNbW1vPnz2eKOTk52j/lEgA0p7CwcNasWUzRwcFBWBSDkJAQpgs+Oh9RPF2QRDQifKSqig4fpoMHqfouCXzdKChw+/JLZrtRdweH8/v2mXh68kqlVFVVVbdu3Zib0qamphcuXBDJTWkAqKfw8HDhYsvVq1d/+OGHXPKosHfv3r6CzZZHjBixfv16LnmeRUQ9mYjI0JB69qRx46hVK95RnnKysQkPCmKKqTk5K3ft4pJHBUNDQ+FR1GVlZVOmTOEVCQDUKDMzMy4ujin6+fmNGTOGSx4VKioqgoODmaKFhYUIz0cUWSN8xN6eRo+moUNJNHe2pgQFtW7UiCnOnTs3NzeXRxxVAgIChPstbd++PT4+nkseAFCj0NDQUsFaitjYWKPqZ6mKwcqVKy9evMgUw8PDW4lpnPOIyKZGGZWVdOoUHT5MRUW8o9Dmmzff/vZbpjh+/Pivv/6aSx4VRLuhHwDUR0JCQh/BrlVvv/32pk2buORRIS8vTyaT6co2yKIcET5hZERdu9KkSTR4MDVuzP5brbVwFxd69923Vq164YUXmH+zevXqxMRELcWosRYtWoSFhTHFlJQUEfZsAKihyspKpTONixcv5hHnOWbPns10QSKKiooSYRcksY8IFcnllJlJZ85QWpqWltJYWlL79uTv/2RR6Llz5/z9/XHoFwBo35dffvnZZ58xxYiIiDlz5vCIo4rSo1IDAwMPHz4szqNSdacRPlFcTCkplJxMV67UuiPK5c/f/8zCgmQy8vQkV1cSTLuPGzfuW8EE6a+//jp06NDaJdG8TZs2vfPOO0xxwoQJX375JZc8AFBn+fn5MpnsTvWtPEQ70zhgwIBd1dcSGhoa/vvvv/7+/rwiqaaDjfCJsjK6fJkuX6Zr1yg7m2pyLsSzmJqSkxO1akUuLtSihYpmmZubK5PJ7lU/pLdly5YpKSmWlpZ1D6AZL7zwArPdqJGR0ZkzZzoInowEADH77LPPhF9hN2zYIPyyy91vv/0mHBh8+OGHq1ev5pKnJnS5ESoqL6fsbMrJoTt3KC+P7t+noiIqLlYyZDQxIUtLsrGhhg3Jzo7s7cnRkZo0qflO2cuXLxfegVuwYMHMmTPr//9Dvc6ePevv719V/StC79699+7dyysSANRWcnKyr69veXm5YjEgIODIkSNim2ksKyvz9vb+79F+I//P2to6LS2t2aPjhsRJLm3l5fLi4qf/q6ys/0uWlZW5ubkxb6OlpeW1a9fq/+JqJ9yHiYh+++033rkAoKaEJ/Y9mmnknUsJpc8ILlu2jHeu55DKiFC7/vrrr8GDBzPFUaNGrVmzhkseFXJycmQy2f379xWLLi4uSUlJ5ubmvFIBQA398ccfr732GlN8//33v//+ex5xVMnOzpbJZAUFBYpFV1fXpKQkMzMzXqlqQtyPT4jVyy+//NJLLzHFdevWHT58mEseFRwcHIRztpmZmbGxsTziAEAtlJWVTZ06lSlaW1svWLCASx7VwsPDmS5IRLGxsSLvgiSde4Ral5qa2qFDB2bWvlOnTidOnBDVZrJEVFZW1r59+/T0dMWilZVVWlpac/Htcg4ATyxZsmTatGnCorA7cnf69OkuXbowKxL69OmzZ88eXpFqTlwf2TrE3d19woQJTPH06dM///wzlzwqKD2lrKioSISrewDgiezs7MjISKbo4uIiPHmUO7lcHhwczHRBY2NjXZl5QiOsu4iICHt7e6Y4depU4eQAd6+88srAgQOZ4po1a06cOMElDwA81/Tp05m7+0QUHR0twpnG9evXC28MTZgwwdvbm0ue2sLUaL188803H3/8MVOcOXOmCGfwU1JSfHx8mLncbt26HT16VGwrsAFAh559evjwoYeHx7Vr1xSLdnZ26enpjYVbY4oSRoT1MnbsWB8fH6YYFRV16dIlLnlU8PDwEPbs48ePb9iwgUseAHgWuVw+adIkpgsaGRmJc6YxMjKS6YJEtGDBAl3pgoQRYf0dPny4Z8+ezNs4dOjQX3/9lVekZ1G6S5OTk1NaWpoId2kC0FsbNmwYOXIkU/zss89WrFjBJY8KWVlZ7u7uDx8+VCx6enomJiYaGxvzSlVbGBHWV/fu3d944w2muHXr1t27d3PJo4Ktre3cuXOZ4o0bN5YsWcIlDwAIFRcXz5gxgyna2tpGRERwyaNaaGgo0wWJKCYmRoe6IGFEqBbXrl3z8PBgfhq8vLzOnTsntp+GysrKjh07nj9/XrFobm6ekpLSunVrTqEA4KmIiIh58+YxxS+++OLTTz/lkkeFI0eO9OjRg2kib7zxxtatW3lFqhuMCNWgZcuWISEhTDEpKem7777jkkcFIyOjmJgYplhSUiLCx5IA9FBWVpbwYSdPT89x48ZxyaNCVVXVpEmTmC5oamoqzvMRVUMjVI8ZM2a0bNmSKc6cOVN4NCV3vXv3fv3115nili1bDhw4wCUPADwRFhYmnGmMjo42MTHhkkeF1atXnz59milOnjy5Xbt2XPLUB6ZG1WbdunWjRo1iisHBwcIRGHeZmZleXl4lJSWKRV9f31OnThkJjmAEAO04evRo9+7dmc/k1157bdu2bbwiPUtBQYGbm9vt27cVi46Ojunp6TY2NrxS1RlGhGrz7rvvBgUFMcUvv/zy4sWLXPKo4OLiIpzLPXfunAi38QXQE8+aaVy6dCmvSCrMmTOH6YJEtGTJEl3sgoQRoXop3W2vb9++IlxBWlRU5ObmdvPmTcWivb19enp6o0aNOIUC0F+rV6/+6KOPmGJ4eLgIb7ldunTJ29u7tLRUsSjOnZZrSCdDi1anTp3effddprhnz56//vqLSx4VrKysFi5cyBRzc3Pnz5/PJQ+APissLPz888+ZoqOj4/Tp07nkUW3SpElMFzQwMIiNjdXRLkgYEaqd0hO52rZte/HiRbHtECiXy7t168ZsN2piYnLhwgXhycMAoDlhYWHLly9nij/88MOYMWO45FFh9+7d/fv3Z4rvvvvuunXruORRC11t4KLl6OgYHh7OFC9duvTll19yyaOCgYFBXFwcs9FoeXn55MmTeUUC0EMZGRnCz4eOHTuOHj2aSx4VKioqhMsLLC0thdNLugWNUP0mT57ctm1bpjh37lzhvWXuunXrJtzJ6a+//vrnn3+45AHQQ8HBwboy0/jFF18kJSUxxenTpwsfHtMtmBrViK1bt7755ptMcezYsd9++y2XPCrcuHHDzc3twYMHikV3d/fz58+L8NElAInZs2dPv379mOKIESPWr1/PJY8KeXl57dq1y8vLUyw6OzunpqZaWlrySqUWovvGIQ1Dhw4V/nB///33p06d4pJHBScnJ+FcbmpqqgjncgEkRulMo4WFxaJFi7jkUW3GjBlMFySi6OhoXe+ChBGh5iQlJfn6+lZUVCgWg4KCDh06JLbz/0pKSjw8PK5cuaJYtLGxSU9Pd3R05BQKQPri4uKCg4OZ4ty5c4UrSLlLTEzs1KlTZWWlYlGcH2h1gBGhpnh5eX344YdM8ciRI1u2bOGSRwVzc3PhQ7sFBQVz5szhEQdAL+Tl5QmfVnJ2dg4LC+OSR7Xg4GCmCxoaGgpX2+koNEINWrhwofBoSqV7CXI3bNiwF198kSl+++23wr0EAUAtZs2aJdyLOCoqSoQzjb/++uv+/fuZ4tixYzt16sQjjvqhEWqQnZ3drFmzmKLS3eXFIDY2ltlotKqqKjg4GJPnAGqXlJS0evVqphgYGDhs2DAueVRQejqNjY2NlGaM0Ag169NPP/X29maKixcvvnbtGpc8Kvj4+HzwwQdM8fDhwzp3tBiA+IWEhDALCEQ70xgVFXX58mWmOGfOnKZNm3LJowlYLKNxSpdHjxw58ueff+aSR4Xc3FyZTHbv3j3FojSWRwOIx2+//TZ06FCmKNrHq9zd3YuKihSLbdu2TUpKMjU15ZVK7TAi1Li+ffsOHjyYKW7YsOHQoUNc8qhgb28/e/ZsppiVlSXCk6QAdFRZWdm0adOYorW1tfBUejGYNm0a0wWJKC4uTkpdkDAi1I6MjAwvLy9m84iOHTuePHlSbJtHlJeXt2/fPi0tTbFoaWmZkpKi65tHAIjBokWLZs6cyRSjoqJEuLXh8ePHAwMDmR7Rr1+/Xbt28YqkIeL6FJYqV1fXTz/9lCmeOXNmzZo1XPKoYGJiEh0dzRQfPnwoXPUDALWVnZ29ZMkSpqj084E7uVwuXCtnbGwsyfkhNEItiYiIEN5bnj59OnNOhRgMGjTopZdeYoo///zz4cOHueQBkIypU6cK/8rHxsaK7WgaIlq7du2///7LFCdOnOjl5cUlj0ZhalR7lB68OW3atMjISC55VLh06ZKXl1dZWZliUacP3gTgDgd3ixY+1LTngw8+8Pf3Z4rR0dH//fcflzwqtG3bdsKECUzx9OnTIlzpCqATHs00Ml1QtDONixYtYrogEc2fP1+SXZAwItSyo0ePdu/enXnPX3vttW3btvGK9CwFBQVubm7M0VGOjo7p6ek2Nja8UgHoqHXr1o0aNYopBgcHi7ARXr582dPTs6SkRLHo4+Nz+vRpZs8NycCIUKuU7hzx+++/79y5k0seFZTuHKH0Vj8AqKZ0uZnSnafEICwsjOmCpGznKSnBiFDbsrKy3N3dme1GPT09ExMTjY2NeaVSqqqqqkuXLsx2o6amphcvXmzXrh2vVAA6Z9asWcIz3L/66quPP/6YSx4V9u3b17t3b6Y4bNiwzZs3c8mjHRgRapvS3eWTk5NXrVrFJY8KhoaGsbGxzJ5PSh8HBoBnUbolhZeX19ixY7nkUaGyslJ4LJTS02kkBo2Qg2nTprVq1Yopzp49W7gVPXfdu3cX7gX122+/iXCdG4A4hYaGCg+ciYmJEdsMEBGtWrXq/PnzTDEsLKx169Y84mgPpkb52LBhw8iRI5niZ599tmLFCi55VFA6l+vl5XXu3DkR/k0GEJXDhw/37NmT+ZgdOnTor7/+yivSs+Tn58tksjt37igWnZycUlNTrayseKXSDowI+XjnnXd69OjBFL/66qsLFy5wyaOCs7NzSEgIU1R6iAwAKFJ6kJmpqenixYt5RVJh7ty5TBckosWLF0u+CxJGhBydOXOmc+fOzHNFvXv33rt3L69Iz/Lw4UMPDw/m6Cg7O7v09HThycMA8Mg333wjXA4zY8YM4cIZ7lJSUnx8fMrLyxWL3bp1O3r0qAhPhlI7jAi56dix4+jRo5liQkLC9u3bueRRwdLScsGCBUwxLy9v/vz5XPIAiF9BQYHwASRHR8fw8HAecZ4jNDSU6YIGBgbiPB9RI+TAz+3btxs2bMj8ibi6upaUlPCOxqqqqurevTsT1djY+MKFC7yjAYiR8IYCEa1Zs4Z3LiWUfvkePXo071zagxEhT46OjtOnT2eKGRkZcXFxXPKoYGBgEBsby2w0WlFRofRvO4CeS01N/fLLL5lip06d3n33XS55VCgrKxM+0GVlZbVo0SIuebhAI+QsJCRE+HD6ggULbt26xSWPCkr/Gu/Zs+fPP//kkgdAtJTONAq/SorBihUr0tPTmeKMGTOaN2/OJQ8XWCzD3++///76668zxffff//777/nkkeF7OxsmUzGnCPj6uqalJQkwnNkALj466+/Bg8ezBT/97//rV27lkseFXJycmQy2f379xWLLi4uSUlJ5ubmvFJpn+i+nuih1157bcCAAUzxp59+OnnyJJc8Kii91Z+RkSGcBQLQT+Xl5cKz5pUuNxODmTNnMl2QiKKiovSqCxJGhCKRnJzs6+vLzKUEBAQcOXJEbKu2ysrKvL29maOjrK2t09PThScPA+ib6OhoYSNcsGDBzJkzueRR4dy5c/7+/pWVlYrFXr16JSQk8IrEC0aEouDp6Sk8s/fYsWObNm3ikkcFpY8DFxYWfv7551zyAIhHbm6u8Jmili1binNNWXBwMNMFjYyMYmNjOcXhCY1QLObPny98OH3q1KkPHjzgkkeFN954o1+/fkzx+++/P3XqFJc8ACIxa9ase/fuMcXo6GhLS0secVT55ZdfDhw4wBTHjRvXoUMHLnn4wtSoiHzxxRcTJ05kihEREcLHcrlLSkry9fWtqKhQLAYGBh4+fFhsc7kA2qF0prF79+4HDx4U21+K4uJiT0/PK1euKBZtbW3T09ObNGnCKRRPGBGKyCeffNK+fXumuHTp0qtXr3LJo4LSQ2SOHj26ZcsWLnkAuBPONCo9yEwMli5dynRBIoqIiNDPLkgYEYpNQkJCnz59mOLw4cM3btzIJY8KeXl5MpmMOTrK2dk5NTVVhBNBABq1ZcuWt956iymOHz/+66+/5pJHhRs3bri5uTH3XDw8PBITE01MTHil4gsjQnHp3bv3kCFDmOKmTZsOHjzIJY8KdnZ2s2fPZopZWVlRUVFc8gDwUlJSMnXqVKbYqFGjefPmccmjmtKVB9HR0XrbBQkjQhHKyMjw8vIqLS1VLPr5+Z08edLIyIhXKqUqKir8/PwuXryoWLSwsEhJSRGePAwgVfPnzxeumo6JiRGe9s7dsWPHgoKCmI/9V155RYR7/WsTRoSi4+rqOmnSJKZ49uzZn376iUccVYyNjWNiYphicXHxjBkzuOQB0L4bN24sWbKEKbq7u0+YMIFLHhWedT4iZnHQCMVo1qxZzZo1Y4ozZswQ7gHBXd++fYW7SW3cuPHQoUNc8gBoWXh4uK7MNP70008nTpxgihMnTpTJZFzyiAemRkXqhx9++OCDD5jilClTli5dyiWPCkrncjt27Hjy5EkRbjEMoEbHjx8PDAxkPkVffvllEe5EX1hY6Obmxuzm7+DgkJ6eLjwMTt/gc0qk3nvvvc6dOzPFuLg44T7x3Lm6un722WdM8cyZM2vWrOGSB0A75HL5pEmTmC5oYmKyfPlyXpFUWLhwofBMm4ULF6ILEkaEYqZDt7ULCwtlMtnt27cVi46OjmlpafhrBlL1008/jRkzhimGhoaKsBFmZmZ6enrqxBI8LjAiFK+AgIDhw4czxR07dsTHx3PJo4K1tbVwpXh2dnZkZCSXPACaVlRUJNxH297eXvhMkRiEhoYyXZCIYmNj0QUfwYhQ1K5fv+7u7q4Tj75WVVV17dqV2W7U1NT04sWLwpOHAXTd9OnThbvPr1q1Srh7PndKt+l4++23RbinPy8YEYpaixYtpkyZwhRTUlJEuF2FoaFhXFwcs5tUWVmZ8EFjAF2XmZkpPKXB19dXuMCNu8rKSuHjjBYWFsIurs/QCMVu6tSpwofT58yZc+fOHS55VAgMDBTuMvX777/v3LmTSx4ADQkLCyspKWGK4pxp/Prrry9cuMAUp06d2rp1ax5xRApTozpg06ZN77zzDlP85JNPVq5cySWPCtevX3dzc3v48KFi0dPT89y5c2KbywWoG6UzjcOGDdu8eTOXPCrk5+fLZDLmS3OLFi1SU1MbNGjAK5UIYUSoA4YPH96zZ0+muGrVqvPnz3PJo0KLFi3CwsKYYnJy8qpVq7jkAVCvyspK4Sm75ubmInzAl4g+//xz4dTR0qVL0QUZGBHqhrNnz3bu3Jk55KV379579+7lFelZiouLPTw8mKOj9PmoM5CSlStXfvrpp0xx9uzZItxfOzk52dfXt7y8XLEYEBBw5MgREZ4MxRdGhLrBz89P+MRSQkLCtm3buORRwcLCYtGiRUwxPz9/7ty5XPIAqEt+fr7wlGwnJ6fw8HAecZ4jNDSU6YKiPR+RPznoiOzsbOHD6S4uLsXFxbyjsaqqqnr06MFENTIyOn/+PO9oAHU3ceJE4Ufozz//zDuXEr///rsw6vvvv887l0hhRKgzHBwchA/wZmZmCs9/4M7AwCAuLo7ZaFTpMm4AXaH0saWAgIARI0ZwyaOC0seWrK2tFyxYwCWP+KER6pJJkyYJ94lftGjRzZs3ueRRwc/Pb/To0UwxISHhjz/+4JIHoJ6EM40GBgbinGmMiYkRbkqs9EwbeASLZXTMjh07hEfYv/feez/++COXPCpkZ2e7ubkxR0e5uLgkJyebmZnxSgVQB9u3b3/11VeZIv7eSQZGhDrmlVdeGThwIFNcs2aN8Jgx7hwdHadPn84UMzMz4+LiuOQBqJuysjLhBk9WVlYLFy7kkke16dOnCw8ujY6ORhdUASNC3ZOSkuLj48PM0nTr1u3o0aNim6UpKytr3749M0tjbW2dlpaGWRrQFcuWLRPecouMjJw2bRqXPCqcPXvW39+/qqpKsSjO56xEBSNC3ePh4fHxxx8zxePHj69fv55LHhVMTU2FDxoXFhbOmjWLSx6A2srJyRGO/FxcXES48ksul0+aNInpgkZGRsJtUYGBRqiT5syZI3w4fdq0acw5FWLw6quvDhgwgCn+9NNPIpzLBRCaMWOGcKZx+fLl5ubmXPKosHHjxkOHDjHFjz/+uH379lzy6BBMjeqqr776asKECUwRO1wAqJEOzTRiR6f6wIhQV40bN65Dhw5McdmyZVeuXOERRxVPT89x48YxxWPHjuE4NBC54OBg4UyjCJ/cJaLFixczXZCI5s2bhy5YExgR6rB9+/b17t2bKWIXfAC1UHrqy4QJE7788ksueVRQeoI3Tn2pOYwIdVivXr1ef/11prhly5b9+/fziKOKra1tREQEU7x+/fqyZcu45AFQrbi4WLgo1NbWVrjXqBiEhYUJ1wdER0ejC9YQRoS6LTMz08vLizkj1MfH5/Tp02I7I7SystLPz485I9TCwiI5ORlnhILYzJkzR7hN/IoVKz777DMueVQ4evRo9+7dmU/yV199Vel2o6AURoS6zcXFRXg6WmJi4vfff88ljwpKl3Er/d4NwNf169ejoqKYooeHx/jx47nkUaGqqmrSpElMF1T62BKogEao82bMmNG8eXOmOGvWrHv37vGIo0rv3r2F+1T98ssvBw8e5JIHQKkpU6boykzjDz/8cOrUKaYYEhIi3JQYVMDUqBSsWbPmvffeY4qhoaHLly/nEUeVzMxMT0/P0tJSxaKfn9/JkyfFNpcL+unYsWNBQUHMB+OQIUNEuF98YWGhm5vbrVu3FIuOjo5paWnCI9tABYwIpWDUqFFdunRhil988UVaWhqXPCoo3ZLj7NmzIty8GPTQs2Yaxbmqa968eUwXJKLIyEh0wdrCiFAijh8/HhgYyPxpDho06K+//uIV6VmUfo11cHBIT0/HX2Dg6/vvv//www+Z4pQpU0R4yy0jI8PLy0s4uXLq1CnmKFB4LrxfEtGtW7eRI0cyxb///vuff/7hkkcFpQeE5uTk4NRQ4KuwsHD27NlMUemB2GIQHBzMdEGlB2JDTWBEKB03btxwd3cvKipSLLZt2zYpKcnU1JRXKqWqqqoCAgKY7UZNTU0vXLiAm/zAy9SpU4VToN99990HH3zAJY8Ke/fu7du3L1McMWKECHfe1wn47iAdTk5O4eHhTPHSpUsrV67kkkcFQ0ND4dHeZWVlYWFhvCKBnsvIyFixYgVT9PPzGzNmDJc8KlRUVAhvtFtYWCxatIhHHClAI5SUsLAw4cPpc+bMuX37No84qgQEBAj3r9qxY0d8fDyXPKDnQkNDmZlGIoqNjRXhTOPKlSsvXrzIFKdNm9aqVSsueSQAU6NSs2XLlrfeeospjhs37ptvvuGSR4UbN264ubkxD2x5eHgkJiaK8IEtkDClM43Dhw/fuHEjlzwq5OXlyWSyu3fvKhZbtGiRlpZmaWnJK5WuE92XHainYcOGvfjii0xx9erVp0+f5hFHFScnpylTpjDFlJSUr776ikse0E8VFRXC7ZksLCwWL17MJY9qs2bNYrogEUVFRaEL1gdGhBKUmJjYqVOnyspKxWL37t0PHjwotvP/iouLPT09maOjcIgaaNMXX3wxceJEphgRESHC/bWTk5N9fHwqKioUi4GBgYcPHxbbX23dghGhBPn4+AjXuR0+fHjr1q1c8qig9Ht3fn6+8KgKAE3Iz88Xbq7dokUL4VyFGISEhDBd0NDQMC4uDl2wvuQgRTk5OY0aNWL+rJ2dnR88eMA7mhIvvPACE9XIyCgxMZF3LpC+CRMmCD8VN27cyDuXEkq/yI4dO5Z3LinAiFCa7O3thY8GZ2VlifNw7djYWGaj0crKSuECcQD1Sk5OXrVqFVMMCAh4++23ueRRoaysTHhOi7W19bx587jkkRg0QsmaOHGil5cXU1y0aNG1a9e45FHB19dX+LTWvn37fvvtNy55QE/o0ExjVFTUf//9xxQ///zzpk2bcskjMVgsI2W7d+/u378/U/zf//63du1aLnlUyMnJkclk9+/fVyy2adMmOTnZ3NycVyqQsN9///31119nih988MF3333HJY8K2dnZMpmsoKBAsejq6pqUlGRmZsYrlZRgRChl/fr1e+mll5jizz//fPjwYS55VHBwcJg1axZTvHz5sjjnckHXlZWVTZ06lSlaW1vPnz+fSx7VwsPDmS5IRLGxseiC6oIRocRdunTJy8urrKxMsdipU6cTJ06IbcuMsrKyDh06MEdHWVlZpaWlCU8eBqiPxYsXT58+nSkuXbpUhItFT58+3aVLl6qqKsVi3759d+/ezSuS9IjroxDUrm3btsJ1cadPn163bh2XPCqYmppGRUUxxaKiohkzZnDJA1KVnZ0dGRnJFF1dXYVPE3Inl8uDg4OZLmhsbIyZEvVCI5S+OXPmCO+oK51s4W7w4MEDBw5kimvXrv3333+55AFJmjZtmvCHPzo6WoQzjUpvZEyYMMHb25tLHqnC1KheWLVq1fjx45nijBkzFi5cyCWPCikpKT4+PuXl5YrFbt26HT16VIRr+UDnnDlzpnPnzswYq0+fPnv27OEV6VkePnzo4eHBLPO2s7NLT09v3Lgxr1SShBGhXhg7dmynTp2YotIF2dx5eHh88sknTPH48eM4aA3qT7dmGiMjI4UPOy1YsABdUO3QCPWC0qejlD6iKwYRERHMRqPNmjWzsLDglQcko6SkRCaTMcvExo8f3759e16RniUrKys6Opopenl5jR07lkseieO6rw1o1Ztvvin8Adi5cyfvXEo8OYDCxMRk4sSJ9+/f550IpOPMmTM9evR49ANma2t7584d3omUUPq3ddeuXbxzSRPuEeqRrKwsd3f3hw8fKhY9PT0TExONjY15pVKqsrKyY8eOLVu2jI2NdXV15R0HJGjHjh0TJ04MCwtTut0oX4cPH+7Zsyfz4fzGG2+IcN98aUAj1C+zZ89esGABU/zqq68+/vhjLnlUKCwstLa25p0CpOzhw4empqZi+xZYVVXVpUsX5gBRU1PTixcvtmvXjlcqaUMj1C9YhwYgcjq0xlsysFhGv1haWgr/OuXl5YlzZykAfVNQUCA8ENjR0TE8PJxHHH2BRqh3Ro4c2b17d6a4cuXKixcvcskDAE/MmTPn9u3bTHHp0qU2NjZc8ugJTI3qI6W7F9b3meKiInrwgB4+pNJSqqqisjIyMiIjIzI2JnNzsrQkGxsyNa1vdACO5HJ68ODx/0pLSS6n0lIyMSFDQzIxITMzatCAbGzIxKRuL69DOwNLDBqhnnrvvffWrFnDFHfs2DF48OAa/fdFRXT9Ot26RTk5dOcO5edTZeXz/ysLC7KzI3t7cnSkZs2oefM6f2QAaENBweOf89xcunOH7t2r0c+5peXTn/Pmzal5c6p+7vSzDBo06J9//lGsGBgYHDx4UDiFA+qFRqin6nLCWVkZZWRQRgZdvkx5eWoIYWhITk7k4kLt2lHz5oQd1EAMSkvp0qXHP+f37qnhBY2MqEWLxz/nTZs+6+dch04PlR40Qv0VGRkpPNhh2bJlYWFh1UoVFZSWRhcuUEYGVT/OW52srcnDgzp0ICcnTV0CQIXyckpJoQsX6PLlGg376qZhQ/L0pA4dqPo++OXl5e3bt2fOILO0tExJSWnZsqWmwsD/QyPUX2VlZd7e3sx2o9bW1mlpac2aNSMiunuXTpyg8+eppEQjCeRyJd+O7e3J3598fEh8RwGANGVn08mTdOECVb85V0dKf6qFmjYlf3/q0OHR3YHo6OjJkyczv2XBggUzZ85UQyR4HjRCvfbbb78NHTqUKX744YerZ8+mw4eJ45bcZmbUqRMFBJCVFbcMIHmZmXT4MF2+XK1Yw06mFhYW1LlzrqurzMfnXvVpWGdn59TUVEtLSy0l0W9ohPpuwIABu3btUqwYGhj8O3asvxgOhTc2ps6dqUcPwo7boF5Xr9LevZSVxTsHEdG4v//+9sQJprhlyxal242CJqAR6rvk5GQfH5+K6jf/Ap2dD7///tPTKrT5HVnI3Jx69qSuXQkryKH+8vNp1y5KTeWd47HE27c7ffttZfVnmbp3737w4EEcwKk1+GTRd56ensKDXY5mZW1OSnr6z1r+C8l8OSspoV276JtvRPL9HXRVZSUdPEhffSWeLkhEwfHxTBc0NDSMjY1FF9QmNEK9d/fuAje3xoJbEWG7dj1Qy9qBOlD6EZCbSz/+SPHxGly5ChKWnU2rV9O+faL6+dmSlLT/yhWmOLZTp07372tw5SoIoBHqtzNnaNUqu3v3Zvfsyfyb6wUFy48d4xLqmeRy+vdf+vZbysnhHQV0h1xOx4/T6tWUnc07SjUlFRVTd+9mijZmZnNeeIEOHaIfflDP07pQA2iE+qq8nLZtox07qLyciCZ06eLt4MD8lsWHD19VywPF6pWbS6tX0/nzvHOALigtpc2baedOEQ6woo4evSL4+zXnxRebPlopffMmffutqGZxJQyNUC8VFtKPPyr2EmNDw9iBA5nfVVxePmPvXu0mq5mKCtq2jXbtYu8mAijKz6fvvxdnL7lRULDk8GGm2NbObkLnzk//ubSUfvmFDhzAz7mmoRHqn0c3S27dYsp9XFwGy2RMceOFC4euXtVWslo6dow2bxbVLR8QkevX6bvvKDeXdw7lpu3ZUyS4B7/ipZdMhacE799Pv/9O1RfUgHqhEeqZrCz66ScqLFT6L2MHDjSr/vdQTjQpPr5KtF9IU1Np3ToqLeWdA0QmI4PWrqWHD3nnUO749evrBXP7g9q1e+lZB9CfP08bNz66iwGagEaoT65epXXrVOyX5mpn91mXLkzx7K1ba86d02yw2lJszNeu0dq1mtoEDnTRf/+JuW3I5fJJ//zDfLU0MTKKHjBA1X926RJt2CDa/1O6Do1Qb1y/XpO/SJ+/8EIza2umOH3v3vui6jSKT/oT0c2btH69ejaKBF2XkUG//CLCpTFPrE1MPHHjBlP8rEsXtyZNnvNfXrlCmzbhXoAmoBHqh5ycGrYKazOzeb16McXsoqJIwY19UXjSEa9fp02bxPzxB9pw44bIu2BRWZlwAZp9gwazX3ihRv99ZiZt3Yq1M2qHRqgHiopo/fqaTx6+7+cn3Gg05tix9Lt31Z1MTR59Lly+TNu34zNCf+Xni3/ycNGhQzcFd+gX9O7dyNy8pi+Rmkrx8WqOpffQCKWuooI2baLqB/CqZmhgEPfSS8zmLmWVlcKHf8Xiybjw/Hk6coRrFOCkrIw2bhTt6phHMvPzYwSbVPg0bfqBn1/tXujECTp1Sm2xAI1Q+v7+mwQ3JJ4r0Nn5LW9vpvhHaurOS5fUFEtjEhIoI4N3CNAuuZx+/120T0o8EbZrV4ngDl/swIFGNdxNXnG2Iz6erl9XXzR9h0YoaefP09mzdftPo/r3b2BqyhRDd+4sF/ENGCIiuZx++42KinjnAC06eZJSUniHeI59ly9vE4Qc5uX1YuvWNX0JxT14Kyvp11+xWFpd0AilKz+f/v67zv91CxubsMBAppicm7vq9On6xdK8hw9p2zbcLNQXOTkk2kn7/1dZVRUsuLFnbmy8tF+/ur/o/fu0Y0e9YsH/QyOUKLmc/vijnk+ahwcFtWrUiCl+vm/fHXHfiSEiyswk8TdsqL+qKtq2TfxPFKw6ffq8YMvvKUFBrQV/v2rq0fe85GS6eLFeyYCI0Agl68wZqvfWaBYmJov69GGK+cXFc/fvr+cra8Pu3bVaIgQ66ehRun2bd4jnyC8ujti3jyk62diEBwXV/UWfTJP+8w8VF9f9dYCI0Ail6eFD2rNHLa/0jrd3j1atmOLXp05dENmJNkqUldHOnbxDgCbdv08HD/IO8Xxz9u8XTqIs7ttXeA++LtT3l12foRFKUUKCuu6iGxgYxA0caFj9pFylNzzE5cnEkeDUU5COXbtE/tQgEaXk5n4teNShW4sWI9u3V9s1zp4V7qEPtYJGKDm5uXTmjBpfz69Zs/d8fZliwuXLf4jydJvHnnRuHNUkVdevU3IyEYn8z1e40NqAKO6llwwMDJ71n9SaXC7+5UIih0YoOfv2qf2jIbJv34aCnS9Cd+4sFf0iBbp1S/wL66EunmxUpsaOom470tLiBY/ejvb17eLkpOYrXb5MmZlqfk19gkYoLdnZjz/31doLHRo0mN69O1PMzM+PPX5cjVfRFJxrKj3Xrol/0russjJs1y6maGVqulCwAE09DhzQyMvqBzRCaXmywZi6vyaHBATIGjdmigsPHbr1jKMNRSQnh/77j3cIUCtxbgFfXdzx48LteWf06NFccLqLely7RllZGnllPYBGKCGFhZSUpKHXNjUyEj78W1haOishQUNXVCfBBo+gw+7cEf83m5wHDxYeOsQUXWxtQwICNHhV/JzXFRqhhJw+TVVVmnv5V93dB7RtyxR/OndOeLia6Fy5Qnfu8A4BanLyJO8EzzdT2RGeUf37mxsba+R6jyb/09JI/DM0ooRGKBVyeZ23Fa25mAEDTIyMFCtVcnlwfLxc/DfhsNGMNFRU0PnzvEM8x9lbt348d44p9m7T5nUPD01d8tGtkKoqLXwISBIaoVRcvqyFjVQ87O3HderEFI9lZW0U/z5PFy5odLgMWpKW9vQZWbF+/QqOj6+s/sNmZGgYM3CgNq6dmKiNq0gOGqFUaKsVzevVq4mlJVOcunv3g7Iy7QSoowcPxL/OEJ5P8edclA9O/HLx4kHB7objOnXq4Oiojcvn5dHNm9q4kLSgEUpCVRVp6/F2WwuLiBdfZIo3CgqWHT2qnQB1hwcKdV15OYn7RMzi8vJpgg3PbC0s5vbqpb0Q+DmvPTRCSbh+/enGu5qfL/rY37+94Ovt0iNHrty7p+lL10tammgn06BGMjNFftCE0r8Fc158UTiJokFpadq7llSgEUqC4tdkzc8XGRkaxgpueCj9LiwuhYXiP8QcVMnI4J1AlevK5kU87O0/9vfXao7cXKwdrS00Qkm4fFnLF+zdps2r7u5M8ZeLFw+I/D6cyOOBalr/Oa8VpXfKowULrbVB3G+UCKER6r6Kisd7z2t33i96wAAzwUNRwvVy4lLvMxqBm4cPxfww6LGsrE0XLjDFV9zcBgoevdWGa9c4XFSXoRHqvlu36NH29tpdROdiaxvcrRtTPHf7tvAJKhER/7P/8Cwi/rN7/DRt9aKpkVFU//58Al2/zue6OguNUPfxO6F7Zo8ezQQbJyrdU0Ms7t/Hcd66SsRH7indX2lSt27C7Xm1JDeXqp/9BKqhEeq+nBxeV7Y2M1vYuzdTzHnwYIGYzw3n93ZBvYj1D07pjrsODRrM7NGDSx4ioqoqMU8jixAaoe7j+hOv9HC1Ff/+mybav4eCAwFAN4j1D27BwYPCM1gW9ukjPMJTq8T6dokTGqHuy8/neHFDA4PYgQOZm5NKT2ITC65vF9SdKP/gMvPz4/79lyn6NWs2xteXRxwFony7RAuNUMfJ5Y+fGeL3qHiAs/M77dszxT/T04Vnc4vC/fu8E0DtlZZSaSnvEEqE7txZKnjGP3bgQCND3h+tmt95WEp4/2lBPRUXP95Lmuu+i0v79WtgasoUQ3fuLBfhHfuiIt4JoPZE+aeWcPnyH4KtDYd7e/ds1YpLnmrwTH1taOZwLNCahw95JyAicrKxmRoUFLFvn2IxJTf3s3/+eU3w3D1nd++SgwPvEFBLd+6IbZfRqqqqCf/8wxQtTEwW9+3LJQ9LtCu3RclAB06SAxVu3KDvvuMdgoiouLzcc+VKsW83CqBJES++OEewJT0fzZvT2LG8Q+gMTI3qONHsQSyi78IAPLSwsZkaFMQ7xf8rL+edQJegEeo4Me1n9ra39wutWz/6taOVFdcsANrQwsbG9P+3El3ar5+liQnfPE+J6ZNB/NAIdZzIziaNHTjQ2sws4sUX/xoxgncWAI1r3ajRhU8+eVkmC3B2Hu7tzTuOApF9MogcFsvoOO1vbK+Sb9Om10NDbczMLop1HxAA9ZI1bvzniBEFpaUGouo9IvtkEDk0Qh0neGiBOxszMyKyNjUdwGXf/ecyMaGWLXmHgFoqLqabN3mHUML7/1cgP/qxFxHxfTKIGRqhjuO7jdOztWrUKP7dd3mnUMbZmd5/n3cIqKXsbPrmG94hdIpYPxnECfcIdVyDBk9/jSdhakLxHQNdYWnJO4Guwc95baAR6jhjY7KwePxrUd2iEC3BuVGgA6ys6NGmZfi2V0P4Oa8NNELd17Ah7wQ6pVEj3gmg9gwMyMbm8S+gJvBzXhtohLqP1+GfOsrOjncCqBP8wdUK3q7aQCPUffb2vBPoFLxdOgp/cLWCt6s20Ah1n6Mj7wS6w8QE35R1VdOmvBPoDisrLJapFTRC3de8Oe8EuqNZM9xk0lX4Oa85JyfeCXQMGqHus7HBepmacnbmnQDqyt6exPbQumjh57yW0Agl4f+3uobnwBuluwwMSAwH3uoEvFG1hEYoCS4uvBOI26OHz4yM8AGh2/BzXhPm5phGri00QklwdcWtL1UevTmtWpF4TsmBOmjXjncCXeDq+njzAagxvF+S0KAB7go8n4cH7wRQP3Z29P+bXMMzubvzTqB70AilwsuLdwJxMzBAI5QC/JyrZmJCMhnvELoHjVAqvLywGaMqrq54skoK2rfnnUDc3NxwAFMdoBFKRYMG5OZGhM0Yn8HPj3cCUAdbWyz9VQU/53WCRighnTrxTiBWVla4cSId/v68E4iVnR21acM7hE5CI5QQFxdsMPiU4hSxvz/W0UmHh8fjkyiA0bUrJoTqBp8OEmJgQAEBvEOIxpNPBBMT6tyZaxRQK0ND6taNdwjxsbDAvGidoRFKi48Ptltj+fvjfHOp6dQJf6asbt3wmGydoRFKi6EhvfAC7xBiYmpKQUG8Q4C6mZpS9+68Q4iJpSVGyfWBRig5vr64U/hUYCCempCmzp0x+fFUz554aqI+0Aglx8CABgx4/Gs9f6bQxoYCA3mHAM0wNqZ+/XiHEIcmTXAXvJ7QCKXI1fXxLip6voRswADcNZEyLy88LUBENGgQFkXXE94+iXrpJX0/vE0mI09P3iFAwwYPJmNj3iG48vHBt4H6QyOUKGtrGjjw8a/1cILUwoIGD+YdAjTPzo769OEdgh9r66f3QaAe0Aily9dXfydIBw8ma2veIUArunbV0yGRgQG99hpZWPDOIQVohJI2ZAg1asQ7hNZ16oRJUT1iYEBvvKGPa4ODgnBSsbqgEUqauTkNG6ZfN1GaN386Jwx6wsqK3nxTvxaMtGlDvXrxDiEd+vSjo5+aN6chQ3iH0BZraxo+XL8aPzzSurUefQGytaVhw/Sr8WsY3ko90L49vfhitYokl8+YmtI77+DWoP7q3FkvdlexsKCRI3FrUL3QCPXDCy9UO7xGestnjIzo7bepWTPeOYCr/v3Zk3sl9p3PxIRGjKDGjXnnkBo0Qr0xaBD5+PAOoRlGRjRsGBYOwOOFlIpnT0rpO5+JCb3zDrVowTuHBBnIJfaNCVSQy2nHDjp7lncOtXrUBd3ceOcA0aiqoq1bKTmZdw61ejQWbN2adw5pQiPUM3I57d5Nx47xzlFXcnm17/impjR8uJ4+RgYqyOX055905gzvHGpiYUEjRmAsqDlohHrp339p585qt0+YBqMTbGzonXeoaVPeOUCU5HI6dIj27eOdo97s7HBfUNPQCPXVpUu0dSuVlPDOUVctWtDbb5OVFe8cIG7JyfT771RezjtHXbm40JtvYo2opqER6rH8fNqyhW7d4p2jZhTHrF27Ur9+ZGTENRDoiNxc2rKFcnOrFcU/BWJgQN27U69eYs8pCWiE+q2ykhIS6NgxnVll3qABDRlCMhnvHKBTystp1y46dYp3jhqzsaHXXsPNb61BIwSia9do+3a6e5d3jufx8qJBg8jSkncO0E0ZGbRjB92///gfRTso9POj/v3J3Jx3Dj2CRghERFRRQUeO0OHDVFHxuCKqjwk7O3rpJWrblncO0HHl5XTgAB0/TpWVvKMo4+BAL72EZyS0D40QFNy/TwkJdOGCiGZKLSyoRw/q0gV3BEFt8vJozx5KSeGdQ0GDBvTii9SxI3YQ5QKNEARycujAAUpJ4dwOLSyoa1fq1o3MzHjGAKm6dYv276f0dG1cS8X8SoMGFBBAXbqQiYk2koAyaITwDHl5dPw4JSZSWZm2L21nR126kJ8fmZpq+9Kgb3Jy6PhxunDh6U0BrbG3p65dyccH56Vwh0YIKpWW0sWLdO4cXb+u8WuZmJC7O/n6Ups2Iro9CfqgpIQSEykxURtPE5makocHdexILVtq/FpQM2iEUDP37lFKCqWlUVYWVVWp85UtLMjVldzdqV07DAGBs7w8Sk6m9HS6fl3NtwYaNKC2bcndndq2xRBQbNAIoZbKyujqVbp2ja5fp1u3qLS0Li/SsCE1b07OztSqFTVrhvEfiE5JCV29Slev0o0bdPt2HW8Q2No+/jlv3ZocHPBzLlpohFA/9+/T3buUl0cFBVRYSMXFVFb2dOc2Q0MyMyNzc2rQgKysqFEjsrOjJk3wjBToErmc7t+nO3coP7/az/mTb4FGRmRqShYWT3/OGzemxo2xzktXoBECAIBewzMrAACg19AIAQBAr6ERAgCAXkMjBAAAvYZGCAAAeg2NEAAA9BoaIQAA6DU0QgAA0GtohAAAoNfQCAEAQK+hEQIAgF5DIwQAAL2GRggAAHoNjRAAAPQaGiEAAOg1NEIAANBraIQAAKDX0AgBAECvoRECAIBeQyMEAAC9hkYIAAB6DY0QAAD0GhohAADoNTRCAADQa2iEAACg19AIAQBAr6ERAgCAXkMjBAAAvYZGCAAAeg2NEAAA9BoaIQAA6DU0QgAA0GtohAAAoNf+D6rGEwQ8soCtAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "display_data", + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAAB38klEQVR4nO3dd1wU1/o/8GfpUhVR6Rqxa2wURUCx9wL2ihWTmGJioihJ7u8msWCMNyaaBGJHYxdb7A0EVIq9d0FBpChNOvv7Y/2uw64iLLtzZnY/79f9w3nEmSd7lQ9nzpwzEqlUSgAAALpKj3UDAAAALCEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApyEIAQBApxmwbgBAY0pKKCODsrLo5UvKy6OCAioupqKi179rYkJGRlSrFllaUu3aZG1NdeuSvj7TjgGAAQQhaJesLHr4kJKS6OlTysoiqbQaf1ZPj+rXJwcHcnamxo3J3FxjXQKAgEik1fpOASBMT57QjRt0+zZlZantnLa21KIFtW5NNjZqOycACA+CEMQsP58uXqSLF9WZf8rs7aljR/rwQzIy0uBVAIARBCGIU2YmxcTQlStUVsbTFU1MyNWVOnfGLVMALYMgBLF58YJOnaJr16o3/6cuBgbk7k7e3mRqyuDqAKABCEIQj6IiioykuDj+RoHvYmJCXbtSp06khwVIAKKHIASRuHWLDh6k3FzWfXDUr09Dh5K9Pes+AKBGEIQgeEVF9O+/dPUq6z7eRk+PvLzI1xdDQwDxQhCCsKWk0I4d9PIl6z4q5eBAI0eSlRXrPgBAFQhCELCLF+nff9nPCFaFqSmNHEmNGrHuAwCqDUEIgiSV0okTFBPDuo/q0NOjQYOoQwfWfQBA9SAIQXjKy2nvXrpyhXUfKunRg3x8WDcBANWAIASBKS+nnTvp5k3WfdRA167UvTvrJgCgqvCoGwiJVEq7d4s7BYkoKooiI1k3AQBVhSAEITl4kK5fZ92EOpw+TfHxrJsAgCpBEIJgxMZSQgLrJtTn0CG6e5d1EwDwfghCEIZ79+j4cdZNqJVUSrt2UWYm6z4A4D0QhCAA2dm0ezebTbQ1RPbfUlRE27ZRSQnrbgCgMghCYE32gExBwetfaweJ5PUv0tPpyBGmrQDAeyAIgbXYWEpKev1reX5ok8RETBYCCBmCEJjKzKTTp1k3oXkHDlBxMesmAODtEITA1MGDVFrKugnNy8nRibwHECcEIbBz+zY9eMC6Cb6cP48nSAGECUEIjEil2rZeonLl5XTiBOsmAOAtEITAyJUrlJHBugl+3bxJqamsmwAARQhCYEEqpTNnWDfBgm7+VwMIG4IQWLhzR0cnzG7doqws1k0AQAUIQmDh/HnWHTAilWIzbgChQRAC77Ky6OFD1k2wc/kylZWxbgIA3kAQAu9E+up5dSkooNu3WTcBAG8gCIF3166x7oA17XjnIoC2QBACv9LTdfQxGa67d3ViPx0AkUAQAr/u3GHdgQCUlNCjR6ybAIDXEITAL93ZU61y+BwABANBCDwqL6fkZNZNCANGhACCgSAEHqWl4XXtr+GjABAMBCHwCDttypWXU1oa6yYAgAhBCLzCt36u589ZdwAARAhC4BUWTnDh0wAQBgQh8OjlS9YdCMmLF6w7AAAiBCHwKieHdQdCgk8DQBgQhMCX4mI8J1nBq1esOwAAIgQh8KewkHUHVC6VElF2YeHPMTExSUmFbPc5KyhgeXUA+D8GrBsAncF6OPg0J2fE9u1fenpaGBnNPXaMiAz09NrZ2no5Obna2/s2auRsZcVrQ6w/EACQQRACX8rLGV48Oilp5Pbtz/Lypu3dO75tW1mxtLw8MSUlMSVFdmhnYeHt7Ozl5OTt7NzBzk5PItFsT0w/EACQk0ilUtY9gG5IT6c//mBy5bDExM8OHiz+v9fhmhgYvPemqLmRUTtbW3ku1qlVS/1tSST0/ffqPy0AVBNGhMAXfX3+r1lYWvrRgQMbLl1SKL73D+YVF8ckJcUkJRGRvp5e87p1vZ2dvZydXe3sWtevr57mDPCvD0AQ8E8R+GJkxPMFn+TkDN+2Le7pU4W6lYmJq739rfT0lNzcqpynrLz8Rnr6jfT0sMREInKwtOzi5CT7XwdbW0OVA573DwQA3gpBCHyR312USknT029EZx4/HrljR1penkLd29l5x6hRtubmRJSSmxuTlBSdlJSYmhr/9Kn83mnlnubk7Lh+fcf160RkqK/ftkED2e3T7h98YGNqWo0WNXG7FURBKqWiIioqej1PbGxMhoZkaMi6Ld2FOULg0dKl/KwZCEtM/PTgwRKlYAt0df19wACjt43h8ouLLz57JsvF2OTkLJX6bFynjuz2aZUet2nYkCZPVuEqICZSKT1/TqmplJ5OmZn04gXl5r79X4GBAZmZUe3aZG1NdeuSnR3Z2eGnJX4gCIFHf/2l6X23C0tLP/n333UXLyrUjQ0M/hg4cGqHDlU8z4MXL6KTkmS5eDM9XYV/JJbGxh4ODl7Ozt7Ozl2cnEyVf95v25b8/Kp/YhCDjAy6e5cePKCkJCouVv089epRo0bUpAk1boxJZc1BEAKPtm2jW7c0d/onOTn+27bFK00KOlpa7ho92sPBQbXTpuXlxT19mpiaKstFFZbhG+jpNfu/x226NmzYqHZtIqJu3cjXV7WWQKCysujKFbp+nTIy1HxmQ0Nq1ow+/JCaNiU9bISiZghC4NHJk3TmjIbOHfX48ai3TQr6NGy4Y+TIBubmarlKaXn55WfPopOSYpKTIx89ep6fr8JJXi9YHDTIddAgDw8PIzw1I3ZSKd2+TXFx9PChxq9lZkYdO5K7O1lYaPxaOgNBCDy6fp127tTEiSuZFFw5YIDqD3a+j/xxm5jk5IupqeXV/9dkZmbWvn17b29vLy8vLy8va2trTfQJmlJeTpcvU3Q0ZWXxel19fWrblnx8qE4dXq+rpRCEwKMXL+i339R7yreuFCQiEwODPwcNmty+vXovV4ncoqLzT59GJyUlpqREJyW9VGlv1caNG3t5eclysVWrVhLNP14LKpJK6eZNOnGC7wjk0tOjjh3J15fMzJj1oBUQhMCvX34hpbuXKkvOzvbfti3h//ZIk3Oysto1apS7qpOCNVdWXn4rIyMmOVmWizfS01U4SYMGDdzd3V1dXb29vb29vU1MTNTeJ6jo+XM6eJAeP2bdBxERGRtT9+7k4cHDqiRthSAEfu3cSdevq+VMpx89Gr1jh/IsXdeGDXeMGlVfSD8jp+TmJqakyHIxISWlqPqP2xgaGrZt21Y2WOzWrVt9de1uA9VVXk5RUXTmjOC2irW3p2HDqF491n2IEoIQ+HXpEu3dW/PThCUmzvr331Klb0aanhSsKan0VWnphdTUxEaNYq5ePXXqVIZKjxfa2dnJbp96e3t36NBBD48R8iMri3btIqU7EEJhYEC9emFoqAIEIfArP59++YVq8Lcur7h46t69O5SGlSYGBn8NGhTA46Sg6iws6MsvZd+tHjx4EB0dHRMTEx0dffPmTRX+PVpYWHTq1MnLy8vV1dXHx6e2bG0GqN2NG7R3b40WBfKjRQsaNoyMjVn3ISYIQuDd+vUqT67cz8ry27btqtKqfCcrq92jR7vZ29e4OV54eFD//srl7Ozs+Ph4WS7GxMQUVH93G319/ebNm8sHi40bN1ZHuzpPKqVTpzS38kf9bGxo7FjCE8hVhiAE3iUm0oEDKvy5w/fujdu164VSPPg2arRt5EhBTQq+x7Rp5OhY+ZeUlpZevnw5Ojo6MTExKirqsUo/OtjZ2cmetfHy8sKCRRWVldGePXTtGus+qsnUlMaNI3bPi4kLghB4V1hIv/xC1XlgRCqVLo2JCT55suxtk4KrBg40ENEkmY0NzZpV3T+UkpIiu30aExNz8eLF8uo/qSFbsCjLxR49etStW7e6Z9BFpaW0bRvdu8e6D5UYGdGYMfTBB6z7EAEEIbCwbx8pbQf6LnnFxVP27Nl544ZC3dzIaO3QoSNbt1Z3cxrWty917lyTE+Tl5V26dEmeiy9evFDhJFiw+H6lpbRlCz14wLqPGjAwoLFjCXfI3wdBCCykpdFff1XlC+9lZflt3Xrt+XOFuou1dcTo0R82aKCB5jTJ2Ji+/FKNDzKUlZXdunUrMTFRlos3lH5cqAorKyt3d3d5LtbCGw+IqLyctm6lu3dZ96Eq+cvODA1p4kRycmLdkKAhCIGRzZvfe8fp0N2743fvVp4U7NekyT/Dh9cR4/drLy/q1Utzp3/27Fl8fLwsFBMSEoqKiqp7BgMDg3bt2smeQe3WrVvDhg010acI7N1LStsViYw8C2vVoqlTycaGdUPChSAERpKTae3ad/2mbFJwwYkTCrt3Sojmensv7NFDX0STgnKGhvTFF7zthlVSUnLlyhXZ7dPTp0+nq7S7jY4uWIyJoePHWTehVnXq0IwZeLvhuyAIgZ1//nnrrafcoqIpe/fuetuk4Lphw0a0asVLc+oj/8Hc25t69mTVhWzBouwmqmqP25ibm7dr106ei3W0dbvn+/dp8+aaLHUVKBcXGj8ea+3fCkEI7KSn019/KWxVdTcz02/btutKk4JNrK0jxoxpI96txczM6LPPBLLMOScnJy4uTjZYjI2NffXqVXXPIFuwKF+b0Vp0jyy9S24u/fUXVf8DEYfu3alrV9ZNCBGCEJg6coTOnZMf/XvnzoTdu5Xf29C/adPN/v6inBSUGzqUBLnrTWlp6e3bt2XTitHR0Q9VeqOera2tm5ubLBTd3d2NhZH31SaV0qZN4n5MtHJ6ejR1KhYXKkMQAlPFxfTHH5SdXfmk4KKePfVEfUunUSOaNEkUd6VSUlLkz6DGx8cXV39HMe7+4L6+vvVEtA10QgL9+y/rJjTMxoZmziQDA9Z9CAuCEFh78CB39eqAPXsibt5U+B0LY+N1Q4cOF92koAIjI/roIzG+QDU/P//ixYuyUIyNjc1S6cV7sgWLspuogn7cJjeXVq2i6j9nKz5du1L37qybEBYEITB2584dv969byQlKdSb1q0bMXp0a/FOCsoNHkwdO7JuQg1qvj+4paWlh4eHbLDYpUsXU1NTTfSpot276epV1k3wQl+fZs0S409mmoMgBJYOHDgwYcKE7OxshfrAZs02+fvX1oJX0bZpQ8OHs25C/dLS0uLi4uQ3UQuVpnXfy8DAoFmzZrJpxa5duzZq1EgDbVbZ06e0ejXLBnjWsiWNGsW6CQFBEAIbUql06dKlCxYsUHiOX0smBWUaNKBp08jQkHUfmiXfHzwmJiYyMvK50hO/VSFfsOjq6spgf/ANG+jRI16vyNz06XhqRg5BCAzk5OQEBATs2bNHoW5hbLxh2DC/li1ZNKVuZmY0fTrp3tsB1bU/uCwXvby8rDX9OqHHj2n9es1eQoCaNqVx41g3IRQIQuDb7du3/fz8bio9GtOsWbOIn39udemSiNcyy9fOGxlRQACJ5f2IGpObm3v58mX52oyXL1+qcBKN7w9ehd3+tNNHH5HoduvVDAQh8Gr//v0TJ05UnhQcNGjQpk2brKys6MIF2r+fSW9qY2BA48cT20kv4ZHtDy4LxcTERNX2B2/QoIG7u7vsGVRvb2+Tms8ip6fTH3/U9CQi1a4dDRvGuglBQBACT945KSiRzJ07d9GiRW8erE9MpH//fT0ulI+xxMLQkMaOxUvg3is1NTUhIUEt+4N7e3t369atvmoPGB88SPHxqvxBLWBgQF99hQ1ICUEI/MjJyZk4ceK+ffsU6paWlhs2bBim/GPp1au0Zw9Vf3qJMRMTGjcOr7yprlevXl24cEH2DOqpU6cyMjJUOIkq+4OXltIvv1D1H3nVHv37k4cH6ybYQxCCxl25csXf3//+/fsK9ebNm0dERLR816MxDx7Qjh1i+iZVpw6NG4eX3dRczRcsWlhYdOrUSfYMqo+PT+13PbJ08yZt317DbsXNwYGmT2fdBHsIQtCsbdu2TZs2LT8/X6E+ePDg8PBwKyuryv5wZiZt3UoqjQ/49sEHNGIECWqFuFbg7g8eExNToPRyyveS7Q8uX5tRYX/wnTvp+nV1titGX3yhg882K0AQgqaUlZUFBwcvXbpU4e/YWyYFK1FcTAcOCHrLD4mEfHzI11dkc5kiJF+wmJiYGBUV9fjxYxVOYmdn9/qlGZ6e7lFRxmVlau9TZPr1o06dWDfBGIIQNCIrK2vs2LFHjx5VqFtaWoaHhw8ZMqR6p7t6lQ4douqPBjSuTh0aNoycnVn3oYvkCxYTExPj4uJKSkqqewZTQ8MOdnaudnbezs7dP/jARjcH9E2a0PjxrJtgDEEI6nf58mV/f/8HSq+zadu27e7du11cXFQ5aV4eHT4soBtZenrUqRN17671G8eIQl5e3qVLl+QL+V+8eKHCSRrXqePl7Ozt7Ozl5NSqXj31L1gUJkNDCgoiwW6GzgsEIajZ1q1bp0+frjwpOHr06DVr1piZmdXo7A8f0pEjlJZWo5PUnIsL9elDWrAhuDaSLViU74Oq2oJFS2NjDwcHeS7W0u4fd3R+uzUEIaiNbFIwJCREoa6vr79w4cK5c+eq50dsqZSuXaPISMrMVMPZqsvJibp3xzJBEXn27Fl8fLz8Jqoq+4Pr6bWztfVycnK1t+/WsGFD7Xu0ROcXUSAIQT0yMzPHjBlz/Phxhbq1tfWWLVv69Omj5utJpXTrFp09S8nJaj7zW0kk1KwZeXpSw4Z8XA40o6Sk5MqVK9HR0TGnTp0+fjxd6b5FVdhZWMiGia729p0cHAz19dXeJ986dKDqTttrFwQhqMGlS5f8/f0fPnyoUG/Xrt3u3bsbN26swWunpdHFi3TtGr3rm1oN96apU4fatqUOHajylR4gLrdu0bZtD168iE5KSkxJiUlOvpiaWl79b4bmRkbtbG1luejt7FxHpLu0ODrStGmsm2AJQQg1tWXLlunTp7969UqhPmbMmNWrV9d0UrCKpFJ69Ihu36Z799Rwy1QiIVtbatKEWrYkOzt19AcCExtLx45xCzlFRXFPn8py8UxSUnb176Dq6+k1r1vX1d5elotieqe0qSl98w3rJlhCEILqSktLv/3223dNCs6bN49JV5STQ0lJ9PQppaXR8+fvHCkqsLKi+vXJ1pYcHcnJCRswarlDhygu7l2/WVpefjsjIyY5OTopKTop6aFKz6Dampu7yULR2dnd3t7YwKAG7WqM/H7JggW6/PwzghBUlJGRMWbMmBMnTijU69atu3Xr1l69ejHp6i2Kiyk7m3JzqaDg9YZtRUVkbEwSCZmYkJkZmZuTlRUJ8/sUaMj27aT0IrB3ScnNld0+jU5Kin/6tLj6a/AN9fXbNmggu33q26hRPX5uk1TLZ5+Rpt/7KGAIQlDFxYsX/f39Hym91Lt9+/a7d+/+AA9VgsCtX08qbUyTX1x88dmzmKSk6KSks0+eZCrNCFSFbMGibCF/Bzs7PSEsWNTtFRQIQqi2zZs3BwYGKk8Kjh07dvXq1aa6uT0HiEtoKD17VvPTyB63keXizfR0Fb6ZXv3kkzZCmE2cOJE0+lCbsOF2EFTDuyYFDQwMfvrpJ2aTggDVVVqqltM0rlOncZ06k9q1I6K0vLy4p08TU1NluVhYhUvUNjFpYm39d2Li0BYt6rO9X6qmD0SkEIRQVRkZGaNHjz558qRC3cbGZuvWrT179mTSFYAqNHAnrIG5+eDmzQc3b05EpeXll589i05KiklOjnz06Pk7nthysLRs9vvvydnZD1++XMT2X5Do3v2pVrg1ClVy4cIFf39/5f3+O3TosHv37kaNGrFoCkBVq1bx+XqvlNxc2TDxXQsWLY2NH82ezXIZ4tix1KwZs6uzhhEhvN+mTZsCAwOVXwU3fvz4sLAwTAqC+PC7VMDewmJk69YjW7cmopeFhWeTk48/ePDruXPyRMwpKloZF/ddt258dlWBbj81rdM7jsN7lZaWBgUFTZw4USEFDQwMlixZsmnTJqQgiJKxMasr1zYx6d+06S99+37k5sat/3ruXG5REauuyMSE2aUFAEEI75Sent67d2/lR2NsbGyOHDki3kdjblZ5ARloLQH8ABfk7W3E2ac0q6AgLDGRWTe6vYMEghDeLjEx0c3N7fTp0wr1jh07JiQk9OjRg0VTNSUb4LZr1y4yMpJ1L8CUhQXrDsjJymp827bcyrLY2ILqv15YPQTwgTCEIIS32Lhxo7e3d1JSkkJ9woQJ0dHRDcX5Bgb5ALekpGT06NFPnjxh3RGwI4wt1IN9fPQ5b8R9lpe37tIlBn2YmWGOEOCNoqKimTNnBgQEKLy2TTYpGB4eXkuct1AUBrhpaWlDhgxRfvwHdEXduqw7ICJysbYe2aoVt7IkOlqFLdxqShifBkMIQngjJSWle/fuYWFhCvV69eodO3ZMvJOCYWFhXbp0URjgXrx4cfv27axaAsbq1WPdwWvfdevG3WItOTt785UrfDchmE+DFQQhvBYTE+Pm5nb27FmFuqura0JCgq+vL4umaqqoqCgwMHDmzJnFxcXcumyAGxAQwKoxYKx2bYE8J9mqXr1BFRfwLTxzpozn5e22trxeTngQhEBEFBYW1qNHj9TUVIV6YGBgbGyss7Mzk65qKCUlxdfX9++//1aoi32AC2ogkZC9PesmXgvu2pV7eD8ra+eNG7x24OjI6+WEB0Go64qKimbMmKE8ZjI2Ng4NDQ0NDTUyMmLVW03IBrjnzp1TqIt6gAvqJJhnvjwcHHq7uHArP0ZFKe8+oynGxtSgAU/XEioEoU57+vSpr6/v6tWrFer29vanTp0KDAxk0lXNaeUAF9RMSC8LC/bx4R5ef/78wJ07PF27USMSwnugmEIQ6q7o6Oi3jpm8vLwSEhI8PT2ZdFVDlQxww8LCxDvABfVzcBDOKvJujRp5V/z5bGFUFE/XbtKEpwsJGIJQR4WFhfXs2fOZ0ivZAgMDT548aWdnx6SrGnr69Gm3bt3eOsA9ffr0jBkzmHQFAqWnJ6htphdUHBTGPX16/MEDjV9VIqHmzTV+FcFDEOqcoqKiadOmvXXM9Pfff4t3zCQb4J4/f16hLhvgdu7cmUlXIGitW7Pu4I3+TZu6VXx+h49BobOzju8pI4Mg1C1Pnjzp2rXr2rVrFeoODg6nT5+ePn06k65qTjYpqGUDXNA4Fxdi+zrciuZXHBSefvQoWml3JzWruMebzkIQ6pAzZ864ubnFxcUp1L29vcU7ZpIPcEsqbtIo9gEu8EFPj9q1Y93EG34tWrSpX59bWXTmjAavZ2QkqDExQwhCXSGbFExLS1Ooy8ZMtuJcUVvJADcyMlK8A1zgj5ubcJ6ZlEgk87y9uZVDd+8mpKRo6npt2zJ8HZWgIAi1X2Fh4dSpU986ZlqzZk1oaKghvy8pVZfKB7idOnVi0hWITJ061KIF6ybeGNumTdOKO38uiY7WyJUkEhLnTSBNQBBqueTk5K5du65bt06h7ujoGBUVNXXqVCZd1ZxWDnCBjYqDMLb09fS+6dKFW9l948a158/Vf6WWLbHXthyCUJtFRUW5ubnFx8cr1H18fBISEjw8PJh0VUOFhYVTpkzRvgEuMGNvL6h1FJPbt29Yu7b8UEoUovZBoURC3bqp+ZxihiDUWmFhYb169Xqu9LNkYGDgiRMnGohzUyXZAHf9+vUKdbEPcIGxnj2FM1NoqK//VcXtLLZcu3Y3M1Od12jblio+laPjEIRaqLCwcPLkycpjJhMTk3Xr1ol3zKSVA1wQhPr1ydX19a952+Tz3QJdXe04y/vKyst/jo1V29mNjKhnT7WdTSsgCLVNcnKyj4/Phg0bFOpOTk5RUVGTJ09m0ZQaaOUAFwSkRw8yNSUiIQwNTQwMPq/4tNf6S5eSsrPVc/Zu3bCIXgGCUKucPn3azc0tISFBod61a9eEhAR3d3cmXdWQtg5wQVhq1aL+/Vk38cYn7u51OFuhlpSVLVd6V6gq7O3xsKgyBKH2CAsL692791vHTMePH68vzikBbR3gghC1aUMtW7Ju4jVLY+PPKt7tD01ISM3NrdFJDQxo2DDSw7d9RfhEtEFeXt6oUaNmzpxZWlrKrZuYmKxfv168YyatHOCCoA0eTJaWrJt47YvOnS04C94LS0t/U9pKt3p696Z69WraljZCEIre/fv3u3TpsmPHDoW6k5PTmTNnAgICmHRVQ1KpdMWKFdo3wAWhq1WLRo4kfX3WfRARWdeq9ZGbG7fyR3z8i4ICFU/XujXhgbJ3QBCK2+HDh93d3a9evapQ9/X1TUhIcKv4r0gs8vLyRo8ePXv2bC0b4II4ODrSgAGsm3htjqdnLc7f9pyiot+VtlKqkgYNaOhQtbWldRCEYiWVSkNCQgYNGvTixQuF3woMDDx27JhIx0xaOcAFkenYkSpu78JKA3PzqR06cCsrzp3LLSqq3lksLGjcOMKPj++GIBQl2aRgUFBQWVkZt25ubr59+/bQ0FADAwNWvdWEVg5wQZR69RLIK4rmeXkZcW7VZhUUhCYmVuPP16pFEyYIZ+JTmBCE4nPv3j1PT8+dO3cq1F1cXGJjY0eOHMmkqxp61wBXIpF8/vnn4h3gglhJJDR0KLVqxboPcrKymlAxkpfFxhZUXEr0TiYmNGECNpF5LwShyBw6dMjDw+PatWsK9X79+sXHx3/44YdMuqqhSga427ZtW7FihUgHuCBueno0fDgJ4N/UAh8fA86ah7S8vLUXL77/j9WqRRMnUsW33sNbIQhFo5Ix07x58w4cOFCnTh1WvdWEVg5wQUvo6ZGfH7F+pZeLtfWIimPTkJiY4oo/NSqysqKpU5GCVYQgFIfc3NyRI0cGBQWVl5dz67JJwSVLlugL44Hv6tLKAS5oFYmE+vWj/v3ZrkP/rls3Pc7eb8nZ2ZuvXHnnVzs60vTpZGPDR2daAUEoAnfv3vX09Ny1a5dCvUmTJmfPnh0xYgSTrmpIWwe4oJ08PGjiRDIzY3X9VvXqDW7enFtZeOZMacUfi1/r2JEmTyZzc5460woIQqH7999/PTw8rl+/rlDv379/XFxcmzZtmHRVQ9o6wAVt1qgRffQRubiwuv733bpxtwO/n5W188aNCu/KMDGh4cNp8GCBbAggIghC4ZKNmYYMGfLy5UtuXexjJq0c4IJOMDen8eNpwAAyMuL/4h3t7HpVjOEfIyPf/CDZpAl9/DGJ8ydj5iRSAbx8C5Tl5uYGBAREREQo1C0sLNatWzd8+HAmXdXcv//+O2HCBIVoJ6IBAwZs3ry5NufF3ADClZNDhw/TzZs8Xzby0SPfim+l3jNmzFB3d+rTBxFYEwhCIbpz546fn9+NGzcU6k2bNo2IiGjdujWTrmpIKpUuXbp0wYIFCrdDJRLJ3LlzFy1apIdN8UFcHj2i48fp6VM+r9l13bozjx/LDzs2bZpw9aqEszc3qABBKDgHDhyYMGFCttJLOAcOHLhp0yaRjpkqGeCuX7/e39+fSVcANSWV0t27dOYMPXnCzwUP37vXf9MmbuXo0aO9e/fm5+raCkEoINo6Zrpz586wYcNuKt1HEvUAF6CC5GQ6f55u3aLKl/fVnI2Nxx9/xHNuF3Xr1u306dOavai2QxAKRU5OTkBAwJ49exTqFhYWGzZs8PPzY9GUGmjlABfg7QoK6No1un6dkpJIvd9azc2pVStq04acnCIiIhRuokRFRfn4+KjzcjpGG4MwL4/S0+nFC8rJobw8KiigwsI3v2tsTMbGZGZGFhZUpw7Z2FCdOiSRvPt0fLh9+7afn5/ymKlZs2YRERGtBLDhoQq0dYAL8H6vXtG9e/TgAT1+TEqPhlWVoSE5ONAHH5CLC9nby79NSaXStm3bcreh6Nev36FDh2rctO7SiiAsL6cnT+jRI0pOppQUevWqen/c0JBsbcnRkRo2pEaNiPdp5/3790+cOFF5zDRo0KBNmzZZWVnx3I9a5OTkTJo0ae/evQp1sQ9wAaotP59SU+n5c8rMpJcvKSeHXr2igoIKQ0YjIzIxIUtLsrQka2uysSFbW6pX713b2WzevHnChAncSnx8PF7PojIxB2FpKd25Qzdu0L17VN0XdL2Lnh41bEgtW1KrVjzsIqGtY6bbt28PGzbs1q1bCvVmzZrt2bOnZcuWTLoCEJyiIjI0VGHztrKyspYtW969e1de8ff3V16bC1UkziBMTaWEBLp+XW35p0xPj1xcyNWVmjXT0I3TnJyciRMn7tu3T6FuaWm5YcOGYcOGaeKiPNi3b9/EiRNzcnIU6qIe4AIIzerVq2fMmCE/lEgkV65cEelWU8yJKghlTyrHxFBSEn8XrVOHOnemjh1JrW8CunLlir+///379xXqzZs3j4iIEOmYSTbAnT9/vsJfKrEPcAGUFRYWmpiYMGygpKSkadOmjzlrCsePH7+p4soKqCLxBOG9e3TyJKWmsrm6uTn5+JCrq1o28du2bdu0adPy8/MV6oMHDw4PDxfpmCkrK2vcuHFHjhxRqFtaWm7cuHHo0KFMugJQu+jo6P/3//6fnZ1deHg4205+++23L774Qn6or69/8+bNpk2bMmxJpMQQhBkZdOgQPXjAug8ia2vq25eaNVP5BGVlZcHBwUuXLtWyMdOVK1f8/PweKP1/1Lx58z179rRo0YJJVwBqJJVKDxw48NNPP8XFxZEwUqewsLBx48apnOHBjBkzwsLCGLYkUsIOwrIyioqimBiNr1GtlhYtaOBAFd5ykpWVNXbs2KNHjyrULS0tw8PDhwwZoqb++PauAe6QIUPCw8MtLS2ZdAWgXsJMnZCQkKCgIPmhoaHh3bt3GzZsyLAlMRLw+CMtjcLCKCpKWClIRLdu0R9/kNJ7kSp3+fJld3d35RRs27bthQsXRJqCZWVlQUFBY8eOVUhB2fsx9uzZgxQErWFiYvL5559zK+vXr0/i83mFt5k1a1bdunXlhyUlJcuXL2fYj0gJNQgTEmj1anr+nHUf71BQQDt30r59VFpalS/funWrl5eX8p3D0aNHx8bGurB7w1lNZGZm9u/fPyQkROGmgrW19aFDh5YsWSJhvU0BgHrNmjWL++4zIaSOubn5p59+yq2EhYWlsnqWQrSEF4RlZbRnD/37bxUzhqWLF2nNGlJaCM/1rjGTvr7+kiVLtmzZYsbundc1IRvgHjt2TKHetm3b+Pj4vn37MukKQKMsLCw+++wzbiU0NJR56nz++ecWFhbyw8LCwhUrVjDsR4wEFoQFBbRhA12+zLqPKnv2jFavppSUt/5mZmZmv379QkJCFOrW1tYHDx6cN2+eSMdMW7du7dKly8OHDxXqsgFu48aNmXQFwIMvvvhCIXV+++03hv0QkbW19UcffcSt/PHHHy9evGDVjxgJKQhzcmjtWkpOZt1HNeXl0YYNyg+1Xrp0yd3d/fjx4wr1du3axcfH9+nTh6/+1Ek+wH1VcR87sQ9wAapImKkzZ86cWrVqyQ9zc3N///13hv2IjmCCMDub1q2jjAzWfaikuJi2bKF79+SFLVu2eHl5KY+ZxowZExMTI9Ix07sGuHXr1j106JB4B7gA1aKQOjk5OcxTp0GDBtOmTeNWVqxYkZuby6of0RFGEObm0oYNqu/RzpbsUZHSUtq2jR4+LC0tDQoKGjdunJaNmSof4OK9oKA7GjRoMHXqVG5FCKkzd+5cIyMj+WFWVtZff/3FsB9xEcA6wqIiWrtWuA+IVoVUKtuPNKOkZExk5InoaIXfr1u37tatW3v16sWiOTX4559/ZsyY8UrptR5jxoxZs2aNqakpk64AWElOTm7SpElxcbG88vPPP3/99dcMWyKiadOmrV27Vn7YoEGDhw8fcgev8C6sR4Tl5bR9u7hTkIgkEpJKL6amuq9apZyC7du3j4+PF2kKyga448ePf9cAFykIOsjJyUnhLUjLli0rKChg1Y/MggULDDhbIqelpa1Zs4ZhPyLCOgiPHhXE3mk1tvnqVe+1ax8p3d0dO3ZsTEzMBx98wKKpmsrIyHjXpODhw4fnzZvHpCsAIVBOHe5ojAkXF5eRI0dyK0uXLuUOW+FdmAbh9et0/jzLBtShtLw86PjxCbt3vyop4dYNDAyWLFnyzz//iHTMdPHiRXd39xMnTijURT3ABVAX5dQJCQlhnjrffvstd7/i5ORkvI+iKtgF4cuXtH8/s6urScarV33Dw0OUbofamJoeDg0V75hp8+bNXl5ejx49UqiPGzdOvANcAPUSYOq0atVKYb/GRYsWlQp/cxLWGAWhVEp792rwtbq8uJCa6hYWdlJpjUQHO7v4wMCeGRmUl8eksZqQTQpOmDBBYcJDNsDdvHmzSAe4AGrXqlWrwYMHcytCSJ3vv/+eu5Dp/v37O3bsYNiPKDB6ajQhgf79l8F11WfTlSuB+/cXVLwdSkTj27YNGzzY1NCQiKhlSxo1ikFzqkpPTx89evSpU6cU6jY2Nlu3bu3ZsyeTrgAE68KFC25ubtzvolu2bBkzZgzDloiob9++3P39W7VqdfXqVZG+4o0fLD6a/HxSmnkSEdmk4MTduxVS0EBPb0mvXpv8/V+nIBHdvMldZS9wFy5ccHd3V07BDh06xMfHIwUBlHXs2FFhEe2PP/5YXl7Oqh+Z//znP9zDGzdu7Bf/PJRGsQjCU6eosJDBddUhPT+/98aNb50UPDJx4jxvb8U/cPgwsf5XURXh4eHe3t6PHz9WqI8fPz4mJqZRo0YsmgIQgeDgYO6hEFKnS5cuPj4+3MoPP/zAfsm4gPEehBkZdOEC3xdVk8SUFLewsNNKj5B0tLNLCAzs8dZHSDIzBf7fK5sUnDRp0lsnBTdt2oQFuQCV6Nq1qwBTRyGeL1y4oPyuGJDjPQhPnybWf0VUE5aY2GXNmiSlly5NaNs2eurUhrVrv/NPRkUJ9pVS6enpvXv3Vl4pWK9evaNHj4r3qVcAPi1YsIB7eOHCBeXNCHnWt29fd3d3bmXhwoWsmhE+foMwM5Nu3OD1iupQVFo6c//+mfv3F5eVceuyScFwf/9a8klBBbLIz82lS5c03mX1xcbGtmvX7vTp0wr1jh07xsfHd+/enUVTAOLTr18/AabO/PnzuYdRUVFnzpxh1YzA8RuEZ8+KbjiYkpvbfcOGsMREhXo9M7Njkya9ZVKQS/4Q87lzQvsPDwsL6969u/I7RSdOnBgdHd2wYUMmXQGIlELqREZGMk+dYcOGffjhh9zKokWLWDUjcDwGYWEhXbnC3+XUISYpyS0s7KzSKxJd7e0TAgN9q/4ISWamcB4fLSoqCgwMnDlzpsIuGLJJwY0bN2JSEKC6hg0b1qZNG26FeepIJJKgoCBu5fDhw/Hx8az6ETIeg/DqVVJadSdkYYmJPTZsSFV6u0qgq2vstGnOVlbVO50wHplJSUnx9fX9+++/Fer16tU7duwYJgUBVPPW1ElISGDVj8zo0aObNWvGrSxevJhVM0LGYxAKcp7srYpKS2fs26c8KWhsYBA6eHDo4MFG+vrVPumdO6T0GiOexcTEuLm5nTt3TqHepUuXS5cu+fr6smgKQEuMGTOmadOm3Arz1NHX1587dy63smfPnqtXr7LqR7D4CsIXLyglhadr1czTnBzf9etXKw3g7C0sTgUEBLq6qnje8nK6ebOmzdVAWFhYjx49lCcFAwMDT506ZW9vz6QrAK2hnDoRERHXrl1j1Y/MpEmTuFP+Uql0yZIlDPsRJr6CkGkGVF10UpJbWNi5J08U6l7OzgmBgZ5OTjU6O6MPoaioaMaMGcqTgsbGxmFhYaGhodwXWwOAygICAoSWOoaGhnPmzOFWtm3bdvfuXVb9CBNfQSiGzz0sMbHnhg3PlHbKDnR1PRkQYGdhUdMLPHrE/yzp06dPu3Xrtnr1aoW6vb396dOnZ8yYwXM/AFpMOXW2bt3KPHVmzJhhZ2cnPywrK1u6dCnDfgSIlyAsKSGlBy8Fpai0dNrevW+dFPx7yBAVJwWVlZWR0h5mGhUdHe3m5nZe6aWPXl5eCQkJnTt35rMZAF0gwNQxMTGZPXs2t7Jhwwbl/RR1GS9B+OQJVQwYQXmSk9N13bq1Fy8q1B0sLSMnT57esaM6L8bjXz7ZpOCzZ88U6oGBgSdPnuT+WwUAdTExMfniiy+4FSGkzqxZs2xsbOSHJSUlv/zyC8N+hIavIBSqM48fu4WFxT19qlD3dnZOCAzs5Oio5uvx8lEUFhZOmzZt5syZJRXvxBobG69evRqTggAapZw6y5cvZ9gPEZmZmc2aNYtb+fvvv5UfndNZvAShUD/usMTEnhs3pr1jUtDW3Fz9l0xN1fQWM0+ePOnWrdvatWsV6g4ODpGRkdOmTdPo1QHA3NxcIXXCwsKYp87nn39uwXnQobCwcMWKFQz7ERRegvD5cz6uUh2FpaVT9uyZuX9/idKk4JqhQ0MHDzZUy6SgsqIiysnRyJmJiCgqKsrNzS0uLk6h7u3tnZCQ0KlTJ81dGgDkBJg61tbWH3/8MbeycuXKjIwMVv0IiuaDsLycXrzQ+FWqIzk7u+u6deuVFvg7WlpGTZkytUMHzV4+M1NDJw4LC+vVq1daWppCXTYpaGtrq6HrAoAC5dRZtWpVpsb+7VfRV199xd1AMT8/f9WqVQz7EQ7NB2F2tqDeTBv1+LFbWFi80qSgT8OGCYGBHg4OGu9AAz8WFBYWTpkyRXlS0MTEZM2aNaGhoYbvej8GAGiGQurk5eWtXLmSYT9E1KBBg+nTp3Mrv/32W67SLpI6SPNBKKRPOSwxsdfGjc/z8xXqga6uJyZNaqCJSUFl6r41mpyc3LVr1/Xr1yvUHR0dIyMjp06dqt7LAUBVNGjQQGFKXgipM3fuXO6zcllZWX/++SfDfgRC80GolDpMFJaWTn7bpKCJgcG6YcM0OCmoTK07jkZGRrq5uSnvKN+1a9eEhAQPDw81XgsAqkU5df766y+G/RCRo6PjxIkTuZXly5cXFBSw6kcgNB+EhYUav8T7JGVnd169eoPSpGCj2rVjp02b3L49r92o7wMJCwvr3bv3c6VnkQIDA48fP96gQQN1XQgAVODk5KSQOr/88gvz1Jk/f76BgYH8MC0tbc2aNQz7EQLNB6EAXr1kYWSUX3GbTSLq2rDh+RkzOvC/rlwdH0hhYWFAQMBbJwXXrVuHSUEAgRBg6ri4uIwaNYpbWbp0abHSd0idovkgZL2nTHZhYWpe3s99+tTiZEOgq+vxSZPqm5kxaKjGH0hSUpK3t/fGjRsV6k5OTlFRUZMnT67h+QFAXVxcXEaOHMmtCCF1vv32Wz29N9/8k5OTw8PDGfbDnOaDUCLR+CUqtff27darVvlt3VpQUkJE+hLJep4nBRXU7AM5ffq0m5tbYmKiQr1bt24JCQnu7u41OTkAqJ1y6mzatIlhP0TUsmXLIUOGcCuLFy8uLS1l1Q9zmg9CVnnzDv2bNg3geVJQgaofiFQqDQkJ6dWrV3p6usJvBQYGHjt2rH79+jVuDgDUrFWrVgqps2jRIuap8/3330s4P5Tfv39/x44dDPthS/NBKLBtLetwVvawodIHkpeXN3r06KCgoDKFp15NTDZs2IBJQQAhE2DqdOjQoU+fPtzKTz/9VC6kNd980nwQMg8eoan+B3Lv3r0uXboo/8txdnaOjo6eNGmSmjoDAI3o0KFD7969uRUhpM5//vMf7uGNGzf27dvHqhm2NB+E/KxSF5FqfiCHDx/28PC4evWqQt3X1zchIcHV1VV9nQGApiinzv79+1k1I+Pp6dm1a1du5YcffpBq+K0AwqT5ILS01PglxKXKH4hsUnDQoEEvKu7KJpFI5s2bd/z48Xr16mmgPwBQvy5duvj4+HArQkid4OBg7uHFixePHj3KqhmGNB+EZmZCmyZkzNq6Kl+Vl5c3atQo5UlBc3Pzbdu2LVmyRF9gTyEBQOUUUufChQvHjh1j1YxMnz59FB41/+9//8uqGYZ4WT7BeUclVOXTuHfvnqen586dOxXqTZo0iY2NVViWBACi0LdvX4XUWbhwIatm5BYsWMA9PHv27JkzZ1g1wwov7yPEXl9yVlZkYlL5lxw8eNDd3f3atWsK9X79+sXFxX344Ycaaw4ANEshdaKiopinztChQxW+qwghnnnGSxDa2/NxFVGo9KOQTQoOHjz45cuX3LpsUvDAgQN16tTRbHsAoEkCTB2JRDJ//nxu5ciRI8r7+Gs3XoLQyYmPq4jCuz+K3NzcESNGBAUFKTxUbW5uvn37dkwKAmgBiUQSFBTErQghdUaNGtWsWTNuZdGiRayaYYKXIKxfn0xN+biQ8DVq9Nby3bt3PTw8du/erVBv1qzZ+fPnR4wYofHGAIAXo0ePVkidxYsXs2pGRl9ff+7cudzK3r17lZdsaTFeglAiocaN+biQwJmZka3tW3/HwsJC+Y2dAwYMOH/+fKtWrTTfGQDwRDl19uzZwzx1Jk2a1LBhQ/mhVCpdsmQJw354xksQElHFn4B0VNOm79px29bWdufOncbGxrJD2aTg/v37a9euzV97AMALAaaOoaHh119/za1s27btzp07rPrhGY9ByHkpl46qdGzXuXPnX3/9lYgsLCx27ty5ZMkS7o71AKA1DA0N58yZw61s27bt7t27rPqRmT59uh3n/axlZWVLly5l2A+f+PpWa2ys64NCU1Nycan8Sz766KP//ve/cXFx/v7+/DQFAEzMmDFDaKljYmIye/ZsbmXjxo2PHz9m1A6veBxzsH35EXNt21IVRnjff/99ixYteGgHABhSTp0NGzYwT51Zs2bZcHb8KCkp+eWXXxj2wxseg7BJE9LlGS/sjg0AHAJMHTMzs08//ZRb+fvvv1NTU1n1wxseg1AiIQ8P/i4nKE2aYJ85AOAyMzObNWsWtyKE1Jk9e7aVlZX8sLCwUPbsgnbj93EMV9f3bjCmnby8WHcAAILz+eefW1hYyA8LCwtXrFjBsB8isrKy+uijj7iVVatWZWRksOqHH/wGoZERde7M6xWFwNn5XevoAUCXWVtbf/zxx9zKypUrmafOV199ZcrZAiU/P3/lypUM++EB7w/od+6sc7vM9OzJugMAECjl1Fm1ahXDfoiofv3606ZN41Z+/fXX7OxsVv3wgPcgNDYmX1++L8pQixbk7My6CQAQqAYNGiikzm+//aa8zxTP5s6da8R5j2x2dnZoaCjDfjSNxZJtN7d37TSmbQwNqW9f1k0AgKAppE5WVtaff/7JsB8icnR0nDRpEreyfPnyV69esepH01gEoURCgwe/a7MxrdKtm06vGAGAKnB0dJw4cSK3snz58oKCAlb9yMyfP9+Asx1YWlramjVrGPajUYw28bK3py5d2FyaNw4O5OnJugkAEAHl1Fm9ejXDfoiocePGo0eP5laWLl1aXFzMqh+NYrebZffu2nyD1MiI/PyqspUMAICLi8uoUaO4FSGkTnBwMHfH4ydPnmzcuJFhP5rD7ju1vj6NGEH/974FbTNwINWty7oJABCNb7/9ViF1wsPDGfZDRC1bthw6dCi3snjx4tLSUlb9aA7TIUvdujRsmBZOFnp4UNu2rJsAADERZup89913Es636AcPHmzfvp1hPxrC+t5dixbUowfjHtTLxQVPigKAChRS5/79+8xTp0OHDn0rfkP76aefysvLWfWjIayDkIi8vcnNjXUTamJnRyNHYmoQAFTQoUOHPn36cCtCSJ3vv/+ee3jz5s29e/eyakZDhPEte8AA+vBD1k3UWL16NGGC1s56AoDm/ec//+Ee3rx5c9++fayakfH09OzatSu38uOPP0qlUlb9aIIwglAiIT8/cWdhvXoUEKBzu8cBgFopp84PP/zAPHWCg4O5hxcvXjx69CirZjRBGEFI/5eF7u5vKqz/v68GBweaMoXMzFj3AQCiJ8DU6dOnj2fFVdH//e9/WTWjCYIJQiKSSGjAAOrZ8/VzpEJ+mpQb0s2bU0AA1arFrhsA0B59+vRx5w4JhJE68+bN4x6ePXs2KiqKVTNqJ6QglPH2ptGjhT7TJg/prl1p9GgyNGTaDQBolQULFnAPz549e+bMGVbNyAwZMuTDirNXCxcuZNWM2gkvCImoeXMKDCQ7O9Z9KOEOBE1Nadw46t5d0CNXABChoUOHCi11JBLJ/PnzuZWjR4/Gx8ez6ke9BBmERGRtTdOmkbe3sJYiyDOvWTP6+GNq2pRpNwCgnZRT58iRI8xTZ9SoUc2aNeNWFi1axKoZ9RJSzCjQ16eePWn6dGENDc3MyN+fxo4lc3PWrQCA1hJg6ujr6yvMFO7du/fq1aus+lEjAQehjJ0dzZhBgwaxfyZTX588PenTT8W9zAMAxEBfX3/u3LncihBSZ+LEiY0aNZIfSqXSxYsXs2tHbQQfhEQkkZCrK33+OfXowebhTD096tCBPv2U+vQhExMGDQCA7pk0aVLDhg3lh0JIHUNDw6+//ppb2b59+507d1j1oy5iCEIZIyPy8aHZs6lfP6pTh6eLGhuTpyd9/jkNGYJX7AIAn4SZOtOmTbO3t5cflpWVhYSEMOxHLcQThDJGRtSpE332GU2YQG3aaGrdgkRCDRvS0KE0Zw716UNWVhq5CgBApaZPn66QOkuXLmXYDxGZmJjMnj2bWwkPD3/8+DGjdtRDbEEoI5GQiwsNH07ffEMjR1K7dup5dMXQkJo2pYED6auvaPJkat8eCwQBgCHl1Nm4cSPz1Pnkk09sbGzkhyUlJcuWLWPYT81JmO9ipzYZGZScTCkp9OwZZWRQYaGsvPHy5YCICPlXTWzXbqOf3+sDAwOqW5fq1yc7O3J0JAcHYa3WAACdl5+f36hRo4yMDHnl008//f333xm2REQ//PADd39wExOT+/fvcwev4qJFQaigsJBycig3d+P27QGcbRomDh688ZdfyNSULC3J3BzL4QFA4P773//+v//3/+SHJiYmDx48sGO6riw7O7thw4bZ2dnyyjfffMP8tq3KtHcAZGJC9euTiws5OFSo165NTZuSgwNZWCAFAUD4Zs+ebcV5UqGwsPDXX39l1w4RkZWV1ccff8yt/PHHH9xhq7hobxACAGgFKyurjz76iFtZtWoV89SZM2eOGWd5d35+/sqVKxn2UxMIQgAAofvqq69MOa87FULq2NjYTJs2jVv59ddfuTdLRQRBCAAgdPXr158+fTq3IoTU+eabb4yMjOSH2dnZf/31F8N+VIYgBAAQAQGmjqOjY0BAALeyfPnyV69esepHZQhCAAARcHR0nDRpErcihNQJCgoyMDCQHz5//nz16tUM+1ENghAAQBzmz5+vkDpr1qxh2A8RNW7cePTo0dzKzz//XFxczKof1SAIAQDEQTl1li5dyjx1goOD9ThbkTx58mTjxo0M+1EBghAAQDQEmDotW7YcNmwYt7J48eLS0lJG7agCQQgAIBotW7YcOnQotyKE1Pnuu+8knP1JHjx4sG3bNob9VBeCEABATJRTZ/v27Qz7IaL27dv37duXW1m4cGF5eTmrfqoLQQgAICYdOnRQSJ2ffvqJeep8//333MObN2/u2bOHUS/VhiAEABAZ5dTZu3cvq2ZkPD09u3Xrxq38+OOPYnmpA4IQAEBkhJk6wcHB3MNLly4dOXKEVTPVgiAEABAfhdS5ePEi89Tp3bt3ly5duJUffviBVTPVgiAEABCf3r17e3p6citCSJ158+ZxD8+ePRsZGcmqmapDEAIAiJJy6kRFRbFqRmbw4MFt27blVhYuXMiqmapDEAIAiNKQIUOEljoSiWT+/PncyrFjx86ePcuqnypCEAIAiJJy6hw9epR56owcObJ58+bcSkhICKtmqghBCAAgViNHjmzWrBm3wjx19PX1Fe7Z7tu378qVK6z6qQoEIQCAWL01da5evcqqH5kJEyY0atRIfiiVShcvXsyunfdDEAIAiNjEiROFljqGhoZff/01t7Jjx447d+6w6ue9EIQAACKmnDrbt29nnjrTp0+3t7eXH5aVlTG/Z1sJBCEAgLhNmzZNaKljbGz85Zdfcivh4eGPHj1i1M57IAgBAMTNxMRk9uzZ3Ep4ePjjx48ZtfPaxx9/bGNjIz8sKSlZtmwZw34qgSAEABC9Tz75RGipY2Zm9tlnn3Era9asSUlJYdVPJRCEAACip5w6q1evZp46X3zxRe3ateWHhYWF//vf/9i1804IQgAAbaCcOr/++iuzboiIyMrK6qOPPuJW/vzzz4yMDFb9vAuCEABAGyinzh9//ME8debMmWNubi4/zM/P//333xn281YIQgAALTFnzhwzMzP5oRBSx8bGZtq0adzKihUrXr58yaidt0MQAgBoibemTnZ2Nqt+ZL7++mtjY2P5YXZ29l9//cWwH2UIQgAA7fHNN98ILXUcHR0DAgK4lV9++SU/P59VP8oQhAAA2sPR0XHSpEncyvLly1+9esWqH5l58+YZGBjIDzMyMlavXs2wHwUIQgAArRIUFMRNnefPnzNPncaNG48ZM4ZbWbZsWVFREat+FCAIAQC0inLq/Pzzz8XFxaz6kVmwYIGe3pvEefLkycaNGxn2w4UgBADQNsqps2HDBob9EFHLli39/Py4lSVLlpSWlrLqhwtBCACgbVq2bDls2DBuRQip8+2330okEvnhgwcPtm7dyrAfOQQhAIAW+u677xRSZ9u2bQz7IaL27dv369ePW1m0aFF5eTmrfuQQhAAAWkg5dRYuXMg8db7//nvu4c2bN/fs2cOolzcQhAAA2kmAqdO5c2dfX19u5ccff5RKpYzaeQ1BCACgnTp37tytWzduRQipExwczD28dOnS4cOHWTUjgyAEANBayqlz5MgRVs3I9OrVq0uXLtzKDz/8wKoZGQQhAIDW6t27t9BSh4jmzZvHPTx37lxkZCSrZghBCACg3RRS5+zZs2xTh4gGDx7csWNHbmXhwoWsmiEEIQCAdhs8eHCHDh24FbapQ0QSiWTu3LncyrFjx2JjY1n1gyAEANBmb02ds2fPsupHZsSIEc2bN+dWQkJCWDWDIAQA0HIjR44UTurI6OvrBwUFcSv79++/cOECk2YQhAAAWk5fX19hpnDfvn1Xrlxh1Y/M+PHjGzVqJD+USqVLly5l0gmCEABA+02YMEEhdRYvXsyuHSIiQ0PDb775hlvZuXPn7du3+e8EQQgAoP2UU2fHjh1MUodr2rRp9vb28sOysjIm92wRhAAAOkEgqcNlbGz81VdfcSubNm169OgRz20gCAEAdIKxsfGXX37JrTBJHQUfffRRvXr15IclJSU///wzzz0gCAEAdMXHH3+skDrLli1j2A8RmZmZffbZZ9zKmjVrUlJS+OwBQQgAoCsUUkdPT+/ly5fs2nnt888/r127tvywqKjof//7H58NIAgBAHSILHX09PRGjhx59erVTZs2se6IrKysPv74Y27lzz//TE9P560BBCEAgA6xsrJat27dnTt3tm/f3qpVK9btvDZnzhxzc3P5YX5+/u+//87b1RGEAAC6ZdiwYS4uLqy7qKBu3brTp0/nVn777TfebtsiCAEAgL25c+eamJjID7Ozs//8809+Lo0gBAAA9uzs7CZNmsSt/PLLL3l5eTxcGkEIAACCMG/ePAMDA/lhZmbmmjVreLgughAAAAShcePGY8eO5VZCQkIKCws1fV0EIQAACMWCBQv09N4EU2pq6saNGzV9UQQhAAAIRYsWLfz8/LiVkJCQ0tJSjV4UQQgAAALy7bffSiQS+eGDBw+2bt2q0SsiCAEAQEDat2/ft29f+WHnzp0dHR01ekUEIQAACEvLli1lv3B3d//11199fX01ejkEIQAACEhxcfHOnTtlv46Pj+/cufPSpUs1ekUEIQAACMiGDRuSk5PlhxKJZMCAARq9IoIQAACEoqysTOHFvH5+fm3atNHoRRGEAAAgFFu3br179y63Mn/+fE1fFEEIAACCIJVKlyxZwq3079/fzc1N09dFEAIAgCBERERcu3aNW1mwYAEP10UQAgCAICxevJh72K1bN29vbx6uiyAEAAD2Dh8+nJCQwK0EBwfzc2kEIQAAsLdo0SLuoYeHR+/evfm5NIIQAAAYi4yMPHPmDLfC23CQEIQAAMDcwoULuYetWrUaNGgQb1dHEAIAAEsXLlw4fvw4t/Ldd99x30qoaQhCAABg6YcffpBKpfJDFxeXkSNH8tkAghAAAJi5cePG/v37uZXg4GB9fX0+e0AQAgAAMz/++GN5ebn80MnJafz48Tz3gCAEAAA27t+/L3/jksy8efOMjIx4bgNBCAAAbCxatKi0tFR+2KBBg6lTp/LfBoIQAAAYSE5O3rRpE7fy9ddf16pVi/9OEIQAAMBASEhIcXGx/NDa2nrmzJlMOkEQAgAA39LS0tauXcutzJ4928LCgkkzCEIAAODbsmXLCgoK5IeWlpaffvopq2YQhAAAwKusrKzQ0FBu5ZNPPqlTpw6rfhCEAADAqxUrVuTm5soPTUxMPv/8c4b9IAgBAIA/OTk5v//+O7cyc+ZMOzs7Vv0QghAAAPj0xx9/vHjxQn5oaGj41VdfMeyHEIQAAMCbwsLC3377jVuZPHmys7Mzq35kEIQAAMCT0NDQ1NRU+aG+vv4333zDsB8ZBCEAAPChpKRk+fLl3MrYsWObNm3Kqh85BCEAAPBh/fr1SUlJ8kOJRDJv3jyG/cghCAEAQOPKysp+/vlnbsXf379Nmzas+uFCEAIAgMZt2bLl7t273EpQUBCrZhQgCAEAQLOkUmlISAi30r9/fzc3N1b9KEAQAgCAZkVERFy7do1bWbBgAatmlCEIAQBAsxYvXsw99PX19fb2ZtWMMgQhAABo0KFDhxISEriV4OBgVs28FYIQAAA0aNGiRdxDDw+PXr16sWrmrRCEAACgKZGRkdHR0dyK0IaDhCAEAADNWbhwIfewdevWgwYNYtXMuyAIAQBAI+Li4o4dO8atfPfdd3p6gssdwTUEAADaQWE46OLiMmLECFbNVAJBCAAA6nfjxo0DBw5wK8HBwfr6+qz6qQSCEAAA1O/HH38sLy+XHzo5OY0fP55hP5VAEAIAgJrdv39/x44d3EpQUJCRkRGrfiqHIAQAADVbuHBhWVmZ/LBBgwZTpkxh2E/lEIQAAKBOycnJmzdv5la+/vrrWrVqsernvRCEAACgTiEhIcXFxfJDa2vrmTNnMuznvRCEAACgNmlpaWvXruVWZs+ebWFhwaqfqkAQAgCA2ixbtqygoEB+aGlp+emnnzLspyoQhAAAoB5ZWVmhoaHcyqxZs+rUqcOqnypCEAIAgHr8+uuvubm58kNTU9PZs2eza6eqEIQAAKAGOTk5K1eu5FZmzJhRv359Vv1UHYIQAADU4I8//njx4oX80NDQ8KuvvmLYT9UhCAEAoKYKCwt/++03bmXKlCnOzs6s+qkWBCEAANRUaGhoamqq/FBfX//rr79m2E+1IAgBAKBGSkpKli9fzq2MGzeuadOmrPqpLgQhAADUyLp165KSkuSHEolk7ty5DPupLgQhAACorqysbNmyZdyKv79/mzZtWPWjAgQhAACobsuWLXfv3uVWgoKCWDWjGgQhAACoSCqVhoSEcCsDBgxwc3Nj1Y9qEIQAAKCi3bt3X7t2jVuZP38+q2ZUhiAEAAAVLVmyhHvo6+vr7e3NqhmVIQgBAEAVhw4dSkhI4FaCg4NZNVMTCEIAAFDFokWLuIceHh69evVi1UxNIAgBAKDaTp8+HR0dza18++23rJqpIQQhAABU28KFC7mHrVu3HjhwIKtmasiAdQPqVlZGz59TZiZlZVFODuXl0ZkzFb7g3j365x8yNSUrK6pdm+rWpfr1ycSEUbsAAOITFxd3/PhxbuW7777T0xPryEorgrCwkB4+pEePKCmJnj+n8vIKv5uZWeHw1SuquPaTiKhOHXJyooYNqXFjql1bo80CAIidwnCwSZMmI0aMYNVMzYk5CPPz6fp1unmTkpIUw6+6XrygFy/oyhUionr1qGVLatOG6tVTS5sAANrk+vXrBw4c4FaCg4P19fVZ9VNzIgxCqZTu36eEBLp7t6b591bp6ZSeTlFRZG9Prq704YdkaKj+qwAAiNOPP/5Yzvne6+TkNG7cOIb91JyogrC8nC5fpthYysjg43IpKZSSQsePk7s7depEpqZ8XBQAQMDu37+/c+dObiUoKMjIyIhVP2ohkiCUSunyZYqMpJcv+b50QQFFRdG5c9SpE3l5kbEx3w0AAAjGwoULy8rK5Ie2trZTpkxh2I9aiCEIk5Pp4EF69oxlD8XFdOYMXbhAPXtS+/YkkbBsBgCAheTk5M2bN3MrX3/9da1atVj1oy7CDsKiIjp6lC5cYN3H/8nPp3376OJFGjqU6tZl3Q0AAK+WLFlSXFwsP6xbt25gYCDDftRFwMs+Hj2iP/8UUArKJSfTX3/R+fMklbJuBQCAJ8+ePVu3bh23Mnv2bAsLC1b9qJEgR4RSKUVGUlSUcJOmtJQOH6YHD8jPD4vxAUAXLFu2rKCgQH5oaWk5a9Yshv2okfBGhEVFtGULRUYKNwXl7tyhsDB6/px1HwAAmpWVlRUWFsatzJo1q06dOqz6US+BBWF2Nq1d+5adXwTrxQtau5YePGDdBwCABv3666+5ubnyQ1NT09mzZ7NrR82EFIQZGbR2rfgGWEVF9M8/dP066z4AADQiJydn5cqV3EpgYGD9+vVZ9aN2ggnC9HRav55yclj3UU2y+7dlZbRr1+sd2gAAtMuqVatevHghPzQ0NPzyyy8Z9qN2wgjCrCzauJHy81n3UX3yBYVSKe3ZQzduMO0GAEDNXr169euvv3IrU6ZMcXZ2ZtSORgggCPPyKDyc8vJY91FjUint3k0PH7LuAwBAbf7+++/nnBkrfX39r7/+mmE/msA6CEtLacsWBhunaUhZGW3fztNWqAAAGlZSUrJ8+XJuZdy4cU2bNmXVj4awDsL9+yklhXEP6lVYSFu3UlER6z4AAGpq3bp1SUlJ8kOJRDJ37lyG/WgI0yBMSNDOB0wyM2nvXtZNAADUSFlZ2bJly7iV4cOHt2nThlU/msMuCNPT6cgRZlfXtJs3KTGRdRMAAKr7559/7lZc1R0UFMSqGY1iFITl5RQRQaWlbK7Oj6NHtWfuEwB0jFQqXbp0KbcyYMAAV1dXVv1oFKMgPHuWUlPZXJo3xcW0fz/rJgAAVLF79+5r165xK/Pnz2fVjKaxCMLsbIqMZHBd/j14gB1nAECMlixZwj3s3r27t7c3q2Y0jUUQHj9OJSUMrsvEsWNafgcYALTOwYMHExISuJXg4GBWzfCA9yBMTaWKw20tl51NcXGsmwAAqIbFixdzDz08PHr27MmqGR7wHoSnT/N9ReZiYojzTmcAACGLjs569KhC5dtvv2XTCl/4DcJnz+jOHV6vKASvXmEpBQCIxX//a52ScsbTM7lx44FE1K5du0GDBrFuSrP4fUP92bO8Xk44zp+nTp1Ij/U+PgAAlTp/no4fJyI6e9ZRT++Ap2fy11/fk8jfLqClePzWnJ+vi49Qyt7TlJ1Nt2+zbgUA4D0WLnzz6/JySk93Gjq0O7t2eMJjEF6+TGVl/F1OIOQ/SVXt7mieFryFAwDE6coVOnCgQiU4mPT1GXXDI36DUJc9eEC5uZV/SVZWVocOHYKCgsp08CcGAGBt0aLX97BknJ1p3Dh23fCIryBMTyfOG610kVRKN29W8vvl5eUTJky4d+9eSEjIoEGDuO+DBgDQtHv3aOfOCpWgIDIyYtQNv/gKwlu3eLqQkFUahEFBQYcOHZL9+vDhw+7u7levXuWlLQAAWriwwuSVrS1NnsysGZ7xFYQVtzDXUUlJ73pP4ZkzZxRed3L//v0uXbrs2LGDl84AQKclJdE//1SofPMN1arFqBve8RKERUX09CkfFxK48nJSWKf6f7y9vRcvXqxfcVY6Ly9v9OjRX3zxRSk2aQMATVqypMK2H3XrUmAgu254x0sQPnlC5eV8XEj4Hj9+a1kikcybN+/AgQN16tTh1qVS6W+//da7d+/nOj7DCgAa8+wZrV9foTJ7Npmbs2mGCb6CEGQq/Sj69esXHx//4YcfKtRPnz7t5uamsAcuAIBaLFtGBQVvDi0tadYsdt2wwEsQav2rB6suLa3C48lKXFxcYmNjR44cqVBPTk728fHZsGGDJpsDAJ2TmUlhYRUqn35KFe9MaT9eghC39eSKi9/72npzc/Pt27eHhoYaGFTYAK+wsHDy5MkzZ84s0Z2XWAGAhv36a4UVzqam9MUX7LphRPNBWFb23m/9uiUzsypfFRgYeOzYsfr16yvUw8LCevXqhSlDAKi5nBxatapCZeZMUvquo/00H4TZ2ZXfDNQ5Vf6xwNfXNyEhwc3NTaEeFRXl5uYWHx+v5sYAQMesXEncrTuMjWnOHHbdsKP5IHzfvmI6Jyen6l/r5OR05syZgIAAhXpycnLXrl3XKzzpBQBQZa9e0YoVFSqTJ5ODA6NumNJ8EObna/wS4vLqVbW+3MTEZP369aGhoYaGhtx6YWHhlClTMGUIAKoJC6vw/Ia+Pn39NbtumNJ8EBYWavwS4qLSBxIYGHj8+PG3Thn27NkzLS1NHZ0BgK4oLqb//a9CZfx4atKEUTesaT4IBTZeOffkSQnbdzuo+oF07do1ISHB3d1doX7mzBk3N7e4uLgadwYAumLdOkpKenOop0dz57LrhjXNByHrPWVszc19GzXybdSovpkZEd3NzOy5cWMaw9f+1eADcXJyioqKmqy0Fe6TJ0+6du26du3aGjUGALqhrIwq7m1M/v7UujWjbgRA80EofzMtI31cXE5NntzHxeX5/81Wnnn82C0s7Dyr/W5q9oGYmJisW7dOecqwqKho2rRpmDIEgPf65x+6d69CJSiIUSvCoPkgFMDrjS8/e/bdqVPcypOcHN/16zdcusSgG3V8IIGBgSdOnGjQoIFCPSwsrEePHs+ePav5JQBAK5WX09KlFSoDB5KrK6NuhEHzQWhsrPFLvE87W9s/Bg40rJhAhaWlk/fsmbl/fzHPU4Zq+kB8fHwSEhI8PDwU6tHR0W5ubufPn1fLVQBAy+zeTdeuVajMn8+oFcHQfBAK45VWga6uJyZNslXaUD0sMbHHhg3P+JwyVN8H4ujoGBUVNXXqVIX606dPu3Xrtnr1anVdCAC0xpIlFQ579CAvL0atCIbmg1AwL/PwadgwITCwk6OjQj0mKcktLOwcb1OGav1AjI2N16xZ89YpwxkzZsycObOY+5IxANBt//5LiYkVKsHBjFoREs0HoZWVxi9RZQ6WlpGTJ0/r2FGh/jQnx3f9+tUXLvDRhAY+kMDAwJMnT9ra2irUZVOGqXj7BwAQEdHixRUOO3WiHj0YtSIkmg9CU1MhTBPKGRsYrB4yJHTwYKOKU4ZFpaUz9u3jY8pQMy848fb2TkhI6Ny5s0I9JibGzc3t7NmzmrgoAIjIqVMUE1Oh8u23jFoRGF5ew1SvHh9XqY5AV9eTAQFvnTLsvn59qkb3R7Wx0dCJHRwcTp8+PX36dIV6SkpK9+7dwxTeOQYAOmbhwgqH7drRwIGMWhEYXoJQ6Sl/IfBydk4IDOysNGUYm5zsFhZ2NjlZI1etU0ej42NjY+O///47NDTUyMiIWy8qKpo5cyamDAF01vnzdOJEhUpwMPNl3kLBSxDa2/NxlepzsLQ8PXnydKUpw5TcXN/161ecO6f+S/LyUcimDO3s7BTqYWFhXbp0SeJurAQAuuGnnyocNm9Ow4czakV4eAlCZ2c+rqISYwODv982ZVhcVjb78OFJEREF6t2oha+PwsvLKyEhwdPTU6GemJjo5uZ2+vRpftoAACG4fJn+/bdCZf580uPl278o8PJJ1K0rnEUUbxXo6npq8mQ7CwuFevjlyz7r1iVlZ9f0AvJXEzdqVNNTVZm9vf2pU6cCAwMV6unp6b179w4JCeGtEwBga9GiCu9Hd3amcePYdSM8vAShREIuLnxcqAa6ODklBAZ6Ojkp1BNTUtzCwk49fFijs8vuxFta8vzckLGxcWhoqPKUYWlpaVBQ0MSJEwsKCvjsBwD4d+8e7dpVoTJ/PlVceKzr+BobN2/O04VqwN7C4vTkyZ936qRQT8/P7xMeHhIdXdMLNGvGZG46MDAwNjbWWemu7KZNm7y9vR8/fsx/SwDAm59+Iu6iMFtbCghg140g8RWETZpQxUGJMBnp66/o33+Dn1+tij8vlZaXBx0/PnH3blWmDOW3JFq1UkePqnB1dU1ISPD19VWoX7hwwc3N7eTJkyyaAgCNS0qiLVsqVL75RiAbXwoIX0FoaEgtWvB0rRqb1K7dmSlTnJW2gNl05Yr32rWPX76s3ulko0ALCz4nCJXVq1fv2LFj8+bNU6hnZGT07dsXU4YAWmnxYuKumapbl5QeGwDegpCI2rfn71o15mpvnxAY2P2DDxTqF1JT3cLCTqowZdiuHfM1OwYGBkuWLAkPD69V8QdC2ZThhAkTXr16xao3AFC7Z89ow4YKlS+/FPiTi2zwGISNGmluUxVNqGdmdnTixHne3gr1jFev+lZ3ylAiEc77viZMmBAdHd2wYUOF+ubNm729vR89esSiKQBQv59/Ju7zcJaWNGsWu24EjMcglEhI6TkUgTPQ01vSq1e4v/9bpwwn7N79qopThi1aUO3amuhQNR07dkxISOihtNvuxYsX3d3dTyjsPwEAIpSZSQr7Kn72maC+DwkIvysq27cnMzNer6gOE9q2jZ46taHS36DNV654r137qCpThsJ735eNjc2RI0feOmXYr18/TBkCiN3//kfcF62amtIXX7DrRtj4DUIDAwFGQlV0tLNLCAzsoTRleDE11T0s7MSDB5X94SZNyMFBg82pSjZluGnTJlNTU25dNmU4btw4TBkCiFRODq1aVaHy0UcCfP2BUPC+x467O1la8n1RdbAxNT3yjinDfps2vXPKUCIR+Pu+xo8fHx0d3UjpidYtW7Z4eXk9rOFOAgDAwu+/E/delbExffUVs2aEj/cgNDCgnj35vqiayKYMN/n7m75tynDcrl1vmTJs25aUNr8Wmg4dOsTHx/dU+v/l0qVL7u7ux48fZ9IVAKjm1StasaJCZcoUYd6WEgoWu65++KGQt+F+r/Ft20ZPndpIacpwy9WrXmvWPHzx4k3J2Jh69eKzN5XZ2NgcPnxYecowMzMTU4YA4hIaSunpbw4NDGjuXHbdiIFEyt2KlTfp6RQaSpp+F7wmZbx6NWbnTuXZwbqmpluGD+8t21t10CDhrJqooi1btkyfPl15dnDMmDGrV682E+GzTgA6paiIXFzo6dM3lYAAWr+eWT+iwOg9HPXqkdJ2X+JiY2p6eMIE5SnDzFev+m/eHBIdLf3gA1J606HwjR07NiYm5gOlx4K2bt3q5eX1oPLHggCAtXXrKqSgnh598w27bkSC3QupvLxIaU23uMimDP8ZPlxhyrBMNmW4b1++OJ+6bN++fXx8fC+lm7qXL192d3c/evQok64A4L3KyuiXXypUhg+n1q0ZdSMe7IJQIiF/fzEuK1Qw9sMPY6ZN+6BOHYX61l27unTpItIhVN26dd86ZZiVlTVgwICQkBA2d9QBoFKbN9O9exUqSv+I4S0YzRHKPXpE4eFUXs6yB3XIfPVq7K5dx+7fV6hbW1tv2bKlT58+TLqqua1bt06fPj0/P1+hPnr06DVr1mDKEEA4ysupbVu6fv1NZdAg2r+fXUPiwW5EKNOoEQ0cyLgHdahranpo/Ph53t6Sijtri30INWbMmJiYmMaNGyvUt23b1qVLl/tKwQ8ArOzaVSEFiSgoiFErYsN6RChz6hRFRbFuQh0aNdpqaDg9MFB5CDVq1Ki1a9eKdAiVlZU1duxY5dlBS0vL8PDwIUOGMOkKALjc3Cgx8c1hjx6EbYOriPWIUKZ7d9Htx/0Wjo40duyYceNiY2OVh1Dbt293c3O7desWk9ZqyNra+uDBg/PmzVMY7+bk5AwbNiwoKKhc/De3AUTtwIEKKUhEwcGMWhEhYYwIiUgqpaNH6dw51n2oysmJxo8nY2PZUSVDqI0bNw4dOpT3/tRj27Zt06ZNUx7vDh48ODw83ErpVcYAwA9vb4qJeXPYqZOIv5vyTxgjQiKSSKhvX7EuLnRxoYkT5SlIlQ6h/Pz8xDuEGj16dGxsrItsuwCO/fv3d+rU6ebNm0y6AtBxJ09WSEEi+u47Rq2Ik2BGhHKXLtGBA2LadKZjRxo4kPTe/iPF9u3bp06dqmVDqJycnIkTJ+7bt0+hbmlpuWHDhmHDhrFoCkB39exJJ0++OWzXji5epIo/hENlBDMilGvfngICyNycdR9VoK9P/fvT4MHvSkEiGjVqVEJCQosWLRTqoh5CWVpa7tmzZ8mSJXoV/8NzcnL8/f3FO94FEKPz5yukIBF9+y1SsHqENyKUyc+n3btJyKvRa9em4cPJ0bEqX5uTkzNp0qS9e/cq1MU+hNq/f//EiROzs7MV6oMGDdq0aZNIx7sA4jJoEP3775vDFi3o+vVKfjiHtxBqEBKRVEpxcXTiBCm/24i5Dh2ob1/upOB7SaXSpUuXLliwQGG0JJFI5s6du2jRIj1x/s29ffu2n5+f8tC2WbNmERERrVq1YtIVgI64fJk6dCDud/ENG2jSJHYNiZOAg1DmxQv6918SzsJta2saOJCUVkdUkVYOoXJycgICAvbs2aNQt7Cw2LBhg5+fH4umAHTCqFG0Y8ebQ2dnunePKm5+DO8n+CCUuX2bjh2jzExeLyqVVrjRbmJC3t7UuTPp69fkrFo5hNLW8S6AkN26Ra1bV9ih8q+/aOZMdg2JlkiCkIjKy+nyZTpzhrhvvuWHsTF5eJCnJ9WqpZbzaesQ6sCBAxMmTFAe7w4cOHDTpk21lV5lDAA1ERBAGze+ObS1pYcPycSEXUOiJZ4glCkvp5s36dw5evJEzWdWGP/JWFmRhwd17Kj2v1zaOoS6c+eOn5/fjRs3FOpNmzaNiIhojffBAKhJUhI1aVLhCYrly+nLL9k1JGZiC0K5tDS6eJGuX6e8PPWf3NCQmjWjdu2oSRONPob8riHUgAEDNm/eLNIhVG5ubkBAQEREhELdwsJi3bp1w4cPZ9IVgJb5+GP66683h3Xr0qNH4lh3JkCiDUIZqZSSk+n2bbp/n9LSano2S0tycaGmTalJE96mm7VyCKWt410AgXj2jD74gAoL31QWLqQFC9g1JHIiD0KuggJ68oSePqXnzyk9nV68eM/2NBIJWVqSjQ3Vr092duTkRIxGYO8aQpmbm69fv168Q6h///13woQJL1++VKj3799/8+bNdZReZQwAVfTVV/S//705tLKiR49YfQPTBloUhMry8ig/nwoKqLiYSkuJiPT0yMiIatUiMzMyNxfOolNtHULdvXvXz8/vusJL0oiaNGkSERHRpk0bJl0BiFpmJjVqVGFS6Ntv6ccf2TUkflodhGJz8ODB8ePHa9kQKi8vb/Lkybt27VKom5ubr1u3bsSIEUy6AhCv4GBatOjNoZkZPXxI9eqxa0j8RDnO0FYDBgyIi4tTnhc8dOiQh4fHtWvXmHRVQ+bm5jt27FDemDQvL2/UqFFBQUFlItpgHYC1nBz6448KlZkzkYI1hSAUlqZNm547d055XvDevXuenp47d+5k0lUNSSSSefPmHThwQGFQK5VKQ0JCBg0a9IL/taEA4vT778S9Z2RsTF99xawZrYEgFBxtHUL1798/Li5OeV7w8OHD7u7uV69eZdIVgIjk59OKFRUqU6eSgwOjbrQIglCItHUI1aRJk7NnzyrPC96/f9/T03P79u1MugIQi9BQSk9/c2hoSN98w64bLYIgFC6tHEKZm5tv3759yZIl+hW3bM3Pzx89evTMmTNLZc/3AkBFRUW0fHmFyvjx9MEHjLrRLghCQatkCNWpU6eN3H0GxeNd410iCgsL69Wr1/Pnz5k0BiBka9fS06dvDvX0MBxUGwSh0L1rCFVQUBAQECDeIVS/fv3i4+M//PBDhXpkZKSbm1tCQgKTrgCEqaSEfv65QmXECBLnu2qECEEoAto6hHJxcTl79uyoUaMU6snJyT4+Phs2bGDSFYAA/fMPPXxYoTJvHqNWtBEW1IvJ/fv3/fz8lGcHnZycdu/e7ebmxqSrmgsLC5s1a5by0DYwMHDlypWGeM0o6LbycmrblrgbNA0eTPv2sWtI62BEKCYuLi7nz5+fNGmSQl3sQ6jAwMDjx4/Xr19foS4b76bVfDt1ADHbuZMUtikMCmLUipbCiFCUtHIIlZyc7O/vrzw76OjouHv3bnd3dyZdATDn6koXLrw57NmTjh9n1402wohQlLRyCOXk5HTmzJmAgACF+pMnT7p27bpu3TomXQGwdeBAhRQkouBgRq1oL4wIRUxbh1BhYWGffvppCffd20Qk8vEugGq8vSkm5s1h58509iy7brQURoQiJhtCTZ48WaEu9iGUbLzboEEDhXpYWFjPnj1FOt4FUMGJExVSkIi++45RK1oNI0JtoJVDqCdPnvj7+8fHxyvUHR0dd+3a5eHhwaQrAD716EGnTr05bN+eLlwgiYRdQ1oKI0JtUPkQ6tmzZ0y6qiFHR8eoqKgpU6Yo1GXj3bVr1zLpCoA3585VSEEi+vZbpKBGYESoPd41hHJwcNi1a1enTp2YdFVzlYx3f//9dyMjIyZdAWjawIF08OCbwxYt6Pp10sPgRQPwoWqPdw2hnj592q1btzVr1jDpquYCAwNPnDihZeNdgMpdvkyHDlWoLFiAFNQUjAi1kFYOoZ48eTJ8+PC4uDiFuoODw86dOzt37sykKwANGTmSuO/hbtyYbt8mAwN2DWk1/IChhWRDKFtbW4V6WFhYjx49RDqEko13p06dqlB/+vSpr6/v6tWrmXQFoAm3btHu3RUq8+YhBTUIQaidfHx8EhISlOcFY2Ji3Nzczp07x6SrGjI2Nl6zZk1oaKjCoLaoqGjGjBkzZ84sLi5m1RuAGi1aROXlbw7t7EhpX0VQJwSh1nJwcIiMjJw2bZpCXexDqMrHu6mpqUy6AlCXhw9py5YKlblzycSEUTe6AUGozYyNjVevXq19Qyhvb++EhATleUHZePcsNt4AMQsJIe4uwnXr0vTp7LrRDQhC7RcYGHjy5Mm3DqG6d+8u0iGUg4PD6dOnpyt9h0hJSenevXtYWBiTrgBqKDWVFN4iM2cOmZsz6kZnIAh1gpeX11uHULGxseIdQhkbG//9999vHe/OnDlz0qRJBQUFrHoDUM3vv1Nh4ZvD2rXpk0/YdaMzEIS6opIhlK+v74oVK5h0VXOy8a6dnZ1CPTw83MfHJykpiUlXAKr5z38oNJScnF4ffvYZWVkxbUg3YB2hzgkLC/vss8+UZwcnTpwYGhpaq1YtJl3VUEpKyogRI5SHtvXq1du+fbuvry+LpgBUVFxMGzfSypV0/DjZ2LDuRgcgCHVRbGzsiBEjlGcHXV1dd+/e7ezszKSrGioqKvr888+VZwcNDAx++umnefPmMekKAIQPQaijKhlCbdu2rXv37ky6qrmNGzfOnDmzkDvNQkREEyZMCAsLE+l4FwA0CnOEOsre3v706dOff/65Qj09Pb1Pnz4hISFMuqq5SZMmRUdHKw9qN23a5O3t/fjxYyZdAYCQYUSo6zZu3PjRRx8pP2Ap6iFUenr6qFGjTp8+rVC3sbHZtm1bjx49WDQFAAKFEaGumzRp0pkzZ7RsCFWvXr1jx44pzwtmZGT07dtXvONdEKk2bUgief2/vn0r+8qgoNdf1qZNZedxdHzPFW1tKzsPKEAQArm6uiYkJCjPC164cMHNze3kyZNMuqohAwODJUuWhIeHKwxqS0tLg4KCJkyY8OrVK1a9gS47epSuXGHdBFSEIAQionr16h09elT7hlATJkyIjo5u2LChQn3z5s3e3t6PHj1i0RToul9+Yd0BVIQghNe0dQjVsWPH8+fP+/j4KNQvX7588+ZNJi2BjtuyhZ4+Zd0EcCAIoQKtHEI1aNDg5MmTCuPdH374oX///qxaAt1Upw4RUUkJ/fYb61aAA0EIijp27JiQkKD8aOXFixfd3d1PnDjBpKsako13N23aZGpqSkRDhw5dsGAB66ZA54wc+foXoaGUm8u0FeBAEMJb2NjYHDly5K1Thv369RPvlOH48eMjIyP79u0bHh4ukUhYtwM6x8eHPviAiCg7m0T7SlAthCCEt1MYQsnJpgzHjRsn0ilDNze3w4cPW1hYsG4EdFFODs2e/frXK1ZUeO8gMIQghMqMHz8+Ojq6UaNGCvUtW7Z4eXk9fPiQRVMAYvXqFU2dSrVrExE9fkw7dzLuB2QQhPAeHTp0iI+P79mzp0L90qVL7u7ux44dY9IVgBgVFZG5Oc2c+fpw2TKm3cD/QRDC+9nY2Bw+fFh5yjAzM7N///4hISHYqA+gKmT/UD77jAwNiYgSE0lpH0BgAEEIVSKbMvznn38UpgzLyspkU4b5+fmsegMQjrIyuniRVq6kBw/e+TUODjRmzOtfqzYofPr0zZ5tb/1fWpoqp9VZCEKohrFjx8bExHwge+6NY+vWrV26dHlQyT99AO2Vl0fR0RQSQoMHU7161LEjffYZHTpU2R+ZM+f1Lw4eJOzrwByCEKqnffv28fHxvXv3VqhfuXLF3d396NGjTLoC4FlKCu3YQV98QW5uZGVFPj4UFEQHDtCLF6+/IDa2sj/erh3Jpt2lUuy4xp4B6wZAfOrWrXvo0KHg4OClS5dyZwezsrIGDBiwcOHCuXPnYpUeaJnSUrp8maKjKTGRIiMpKek9X195EBLRnDkk251i0yZauJAaNKhGM/Xr05EjlX1Br16UmVmNE+o4BCGoQl9ff8mSJe3bt58+fTp3dlA2ZXjhwoW1a9eamZkx7BCg5rKzKT6eoqMpJoZiYkjprZ2VefSInj4lB4d3fkG/ftSqFd24QUVF9Pvv9NNP1Ti5oSG1b1/ZFxjgW3t14NYoqG7MmDGxsbGNGzdWqG/fvt3Nze3WrVtMugJQWXk5Xb9Of/9NkydT8+ZUuzb17k3//S8dP16NFKxThwYOpJ9+ev1o6LtIJPTVV69//eefJM4NKrQEfmyAGmnbtm18fPzYsWMVZgdv3brVqVOnjRs3Dh06lFVvAFWRn08XL1JiIsXE0MmTKt5RtLMjb2/y8iJvb+rQgfSqNsSYMIGCgyktjbKyaN06mjWrqn8Q1AtBCDVlbW198OBB5SnDnJwcPz+/uXPnLlq0SA//vkFIUlJeJ190NMXHU3Fxtc9gaEht275OPl9fqldPlTaMjWnWLPr+eyKi//2PPv74PYNI0BAEIaiBbMqwS5cuEydOzMnJkdelUmlISMiNGzfCw8OtrKwYdgg6rqyMbt16nXwxMZUt8quErS25ub0e+bm5kYmJGhr7+GNavJgKCuj+fYqIoIovAwWeIAhBbYYMGXL+/Hk/Pz+F2cH9+/d36tQpIiKiZcuWrHoDHZSbS+fPv37OMzqaXr6s9hn09al5c3J1fR1+rVqR2p+GtrGhgAD66y8iomXLaPJkNZ8fqgJBCOrUokWL8+fPT5o0ae/evdz67du3O3fuvGHDhmHDhjFqDXRCSsqbYd/Fi1ReXu0zmJtTu3avk8/Li6ytNdBlRV9+SaGhJJXSuXPk5aXxy4EyBCGomaWlZURExNKlSxcsWFDO+T6Uk5Pj7++PKUNQr5ISunLldfJFRtLz56qcRP6oi6srderE90Rds2Y0eDDt20dEtHUrr5cGGQQhqJ9EIpk3b16rVq0mTpyYnZ0tr8umDK9fv75p0yZMGYLK0tIoLu7N0y6FhdU+g4EBNWv2Ovy6diWl94zxbc6c10H49CnjTnQTghA0ZfDgwbIpw5sV91I8cOCAh4dHREREq1atWPUGovPgwethX3Q03bxJKrzvxNKSPDxeP+fp5SWsx1K6diU3N0pIYN2HrkIQggY1b9783LlzAQEBe/bs4dbv3LkjmzL08/Nj1BoInWx5nyz5YmMpK0uVkzRu/PqGp7c3deyo/kdd1GjOHBo7lnUTugpBCJplaWm5e/du5SnD3Nzc4cOHY8oQuOSPuiQmqri8z9SUOnR4nXzdu5ONjQa61IwRI2jevPdvYQqaIMErVYEfBw4cmDBhAnfKUGbAgAGbN2+uXbs2i6aAsdJSun37dfidOUOPHqlyEju7Nysc3N3J2FjNTYLWQxACf+7cuePn53fjxg2FetOmTSMiIlq3bs2kK+BZTg7Fxb2e8IuNVWWPTdnyPvlznviLAzWEIARe5ebmBgQEREREKNTNzc3Xr18/fPhwJl2BpskedZE951nz5X3e3lSnjga6BF2FIAS+SaVS5SlDIpJIJJgy1Brc5X2nTlFGhionUW0na4DqQhACGwcPHhw/fvxLpW2v+vfvv3nz5jr4gV+EUlMpIeH1hF9CAhUVVfsMBgbUrt3rG56+vuTsrIEuAZQgCIGZu3fv+vn5Xb9+XaHepEmTiIiINm3aMOkKqo67k3ViIilN/laJlRW5uwt0eR/oCAQhsJSXlzd58uRdu3Yp1M3NzdetWzdixAgmXUEl8vLo0qU3+3m+eKHKSWTL+zS3kzVAtSAIgbHKpwwXLlyor6/PqjeQuX8/PzbWLDaWYmLo+nUVH3WR7eri6UmenoTFMiAoCEIQhEOHDo0fP/6F0viiX79+//zzD6YMeVZaWnr58uXo6OjExMSoqCh7+y/Pnv2iuifhLu/z8CAjI010CqAGCEIQinv37vn5+V27dk2h7uLiEhER8eGHHzLpSndkZ2fHx8dHR0fHxMTExMQUFBTIf8vW1vXZs/fvg8ld3ufjQx98oMl2AdQHQQgCkpeXN2XKlJ07dyrUa9Wq9ddff02aNIlJV1rswYMHsuSLjo6+efNmJd8N7O1LUlLesiOjhQV16vT6Oc+uXQnvFAExQhCCsMimDIODg8vKyhR+KzAwcNWqVQYG2CBXdfn5+RcvXkxMTIyJiTl58mRmZmYV/2CXLo9jY1+vZsDyPtAyCEIQosOHD48bN055yrBbt27bt2+vX78+k65EKjk5OSYmJjY2NjY29vLly6WlpdU9g4mJyZgx2xs0GNylC3l6Ur16mmgTgBkEIQjU/fv3/fz8rl69qlB3cnLavXu3m5sbk65Eoays7NatW7IbnjExMQ8ePFDhJLa2tm5ubq6urt7e3t7e3iYmJmrvE0AgEIQgXAUFBR999NHGjRsV6iYmJn/99VdAQACTroQpNzf3/Pnzsuc8o6OjlbfseS99ff3mzZvLks/Ly6tVq1YSrO8D3YAgBKELCwubNWuW8g29wMDAlStXGhoaMulKCFJSUuTDvosXL5ZXf32fubl5u3btZMnn5eVlbW2tiT4BBA5BCCIQGRk5atSo58+fK9S7du26ffv2Bg0aMOmKfyUlJVeuXJElX2RkpPIHUhV2dnay5HN1de3UqZMu/yQBIIMgBHFITk729/dPSFBczebo6Lh79253d3cmXfEgLS0tLi5O9pxndHR0YWFhdc9gYGDQrFkzWfh17dq1UaNGGmgTQMQQhCAahYWFH3/88fr16xXqJiYmf/zxx5QpU1g0pX6yR13kyVf58r53sbS09PDw8PLykuVfLWxlDfBuCEIQmbCwsE8//bSkpEShLuopw7y8vEuXLsmSLzY2NisrS4WTNG7cWHbD09vbu2PHjnjUBaCKEIQgPlFRUaNGjUpLS1Oo+/j4bN++3dbWlklX1SV/1CUxMTEuLk452t/L1NS0Q4cOsuTr3r27jY2NJvoE0HoIQhClJ0+e+Pv7x8fHK9QdHBx27drVqVMnJl1VrrS09Pbt27LwO3PmzKNHj1Q4iZ2dnXyFg7u7u7GxsbrbBNA5CEIQq8LCwk8++WTdunUKdWNj41WrVk2bNo1JVwpycnLi4uLeupN1FcmW98mf82zdurUm+gTQZQhCELdKpgx///13Ixbv/pHtZC172uXChQsq/BOzsLBo27atLPx8fHxq4/V9AJqEIATRO3PmzKhRo549e6ZQ9/Ly2rlzJw9Thq9evbpw4YIs+U6dOpWRkaHCSeTL+7y9vTt06KCHrawB+IIgBG3w9OnT4cOHnz9/XqHu4OCwc+fOzp07q/2KqampCQkJsgm/hISEoqKi6p7BwMCgXbt2shuevr6+zs7Oam8SAKoCQQhaoqioaNasWWvWrFGoGxsbr1y5cvr06TU8P3cn68TExBs3bqhwkgYNGri7u2MnawBBQRCCVgkLC/vss8+Ki4sV6qpNGXKX98XExCi/FqoqZMv7sJM1gGAhCEHbxMTEjBgxQnnKsEuXLjt37rSzs6v8j9d8J2szM7P27dvLkq9Lly5169at7hkAgE8IQtBCT58+HTFixLlz5xTq9vb2O3fu9PT05BZLS0svX74su+EZGRmZlJSkwhW5y/s8PDyYPK0KAKpBEIJ2Kioq+vTTT1evXq1QNzIyWrp06dixY8+fP1+Tnay5y/t8fHw++OADNTUOAHxDEII2+/PPP2fPnq08Zagaa2trT0/PLl26yHZ1MTU1VctpAYAtBCFoudjY2BEjRqSmpqr2x7k7WWN5H4BWQhCC9ktJSRkxYsTZs2dlh82aNbtz5867vtjQ0LBt27ay5zx9fX3r1avHV5sAwAaCEHRCcXHxN99889tvv3l6eo4ZM+aLL77g/q6tra2bm5tsws/NzQ3L+wB0CoIQdMj69ev79u2blpbm5ubWvHlz+XOeWN4HoMsQhKBzysvL8/PzLSwsWDcCAIKAIAQAAJ2GR+AAAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECnIQgBAECn/X9/qZc/Jes5vwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": { @@ -5776,9 +5184,9 @@ { "output_type": "display_data", "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAACCdElEQVR4nO3dd3RU1doG8GfSe0hIIR0IvReREooFvJfeUcEAigKChCIS9KrB8l2JgARE2lUgNGkCgmABG10IPYD0FNJDEtLLZPb3x8QkpEwKM3OmPL/FYg1nzpl5E2Ce7H12kQkhQEREZKxMpC6AiIhISgxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyagxCIiIyamZSF6ARv/76q7Ozc6NGjdzc3ExNTaUuh4iIdJehBeHt27c//PDDffv2FRQUKI84OTl5eHh4enqW/u7k5KR84OPj4+DgIG3BREQkLZkQQuoa1KaoqCggIODcuXO+vr6Ojo7JycnJycmqv0BlTDZq1MjT09Pd3d3Ly8vNzc3Ly8vd3d3T09PR0VFrxRMRkSQMKgjfe++9zz77zMfH5/Lly05OTsqD6enp8fHxCQkJFX5PT0+PiYnJzs5W8YKWlpbOzs5VtiY9PT19fX3NzAytSU1EZGwMJwiPHz/+7LPPCiF+++23fv361fKqvLy8yhlZmpQJCQmqL1fR7+rr62tvb//EXxYREWmWgQRhRkZGp06doqOjQ0JCFi1apK6Xzc/PT0tLqy4pk5OTi4uLVVxuZWVVPiMrJKWHh4dMJlNXqUREVD8GEoQvv/zyjh07unXrdvLkSXNzc+28aWFhYXJyclxcXFJSUkJCQkJCQmJiYnx8fFJSUlxcXHJyclFRkYrLhwwZsmTJklatWmmnWiIiqpIhBOGGDRumTJliZ2d34cKF5s2bS11OGRX9rnFxcUIIc3PzmJgYGxsbqSslIjJeeh+Ed+/e7dy5c1ZW1tatWydMmCB1OXXQs2fPM2fOrFq1aubMmVLXQkRkvPR7ZRm5XP7KK69kZWWNHTtWv1IQwDvvvANg2bJlqm80EhGRRul3EH744Ydnzpzx8fFZt26d1LXU2YgRI5o3b37//v19+/ZJXQsRkfHS4yA8fvz4559/bmJismXLltJZg3rExMRk7ty5AD7//HOpayEiMl76eo+wHvMlTp06lZmZqVw7xt3dXcMF1kpeXp6fn19KSsqxY8f69OkjdTlERMZIX4OwHvMlBg0a9OOPP5b+sfJc+NJ5fn5+fnZ2dhqr/TGLFi366KOPhg0b9v3332vnHYmIqDy9DMKNGze+9tprdZ0vERIScurUKeVsv4cPH6o+2dXVVbncaKNGjSqsROrt7W1ra/vEX0SJhw8f+vn55ebmRkZGtmnTRl0vS0REtaR/QVg6X2LLli2vvPJK/V6ksLAwNTVVuYha5Xl+sbGxqufCW1lZlV90tMLv7u7uddr76c0331y7du0bb7yxfv36+n05RERUb3oWhHK5vE+fPmfOnBk7duyuXbs09C5CiKSkpNJVY+Lj4xMTE0vXjklISMjNzVVxubm5ubI16e7u/tFHH3Xu3Fn12927d69FixZmZmb379/38PBQ65dCREQ10LMgrHJ/Ce0rXTKmyjZlUlKSQqFQnnny5MlevXrV+IKjRo3at2/f+++//8knn2i4diIieow+BWH99pfQvoKCgtLlRvv169egQYMaLzl79mz37t2dnZ2jo6O1Nk6HiIigR0Goof0ldEdAQMCpU6dWrlw5a9YsqWshIjIiehOEkuwvoU379u0bNWpUkyZNbt26xf1+iYi0Rj9Wltm4ceOOHTvs7Oy2bdtmkCkIYPjw4a1bt75///7evXulroWIyIjoQRDevXt39uzZANasWaNTuyypl4mJSVBQEIAlS5ZIXQsRkRHR9a5R7cyX0BH5+flNmjRJTEz8448/dHk0EBGRIdH1FmFISIj+7i9RV1ZWVtOnTwewdOlSqWshqklmJt55B02awNISHh549VXExpY9u3AhZDKcOVN2JD8fMhmGDNF+pUSq6XQQHj9+PDQ0VH/3l6iHWbNm2draHjp06Pr161LXQlS9wkIMGIClS/HUU/jvfzFsGLZuRc+eiI+XujKiOtPdIMzIyAgMDCwuLn7//feNp5/Q2dl50qRJQojly5dLXQtR9daswdmzWLAAu3fj7bexbh2++QZxcfjwQ6krI6oz3Q3CN998Mzo6ulu3bu+//77UtWjV22+/bWpqumXLloSEBKlrIarG9u0wMcE775QdCQyEtzd27kRxsXRlEdWHjgahMcyXqE7Tpk2HDx9eUFDw1VdfSV0LUVWEwOXLaNIELi5lB2UydOuG7Gzcu1d2MCEBUVElv6KjtV8pUW3oYhAayXwJFRYuXAhg9erV2dnZUtdCVEl2NgoKHktBJVdXAEhNLTsyahSaNCn51aqV9iokqgudC0K5XP7KK69kZWWNHTu23rss6btu3boFBASkp6dv2LBB6lqIqlF55pXyiEm5T5XQUOzbV/LL0Kc/kf7SuaW8jGq+hArz588/efLk8uXLZ8yYwRXXSLfY2cHGBklJFY+npACAu3vZkb590aNHyeP8fK0UR1RnutUiLJ0vsXnzZiOZL1Ed5YprUVFR3333ndS1ED1OJkPnzoiOfmyyhBA4dw4uLmjcWLLCiOpFh4Kw/HyJZ555RupyJCaTyZQ3Sjm5nnTRhAkAsHhx2ZGdOxEXh8BAAIiJwc2b0hRGVHc6tMSawe8vUVcFBQWNGzdOTEz8/fff+ZMB6ZaiIvTtizNnMHo0+vbFjRv45hs4OKBnT5w/j9KZPydOICCg5HF+PqytMXgwfvhBqqqJqqQrLUJjni9RHUtLyzfffBNsFJIOMjfH//6HIUNw5AjmzMHatSgqwsOH+OEHlJ//mpYmXYlEtaUTLcJ79+517tw5MzNz8+bNgcquFQIApKWl+fr65ubmXr16tW3btlKXQ/SPHTvw8ss1n3b2LLp103w1RE9E+hahXC6fMGFCZmbmmDFjmIIVODs7T548WQjxxRdfSF0LUTkLFtTqtLg4DddBpAbSB2HpfIn169dLXYsuUq64tm3bNq64Rjpk3LhancY1uEkfSByEnC9RoyZNmowcObKgoODLL7+Uuhaif8yeDZms5tMYhKQPpAxCzpeopQULFgBYu3YtV1wjXeHjg169aj6NXaOkD6QMQqPdX6KuunXr1qdPn/T09G+++UbqWoj+UZvBMmwRkj6QLAg3bdrE+RK1N3/+fADLly+Xy+VS10IEABg3DjUu/scgJH0gTRDeu3dPuWzK6tWrjXN/iboaOnRo69ato6Oj9+zZI3UtRAAAV1c891wN57BrlPSBBEHI+RL1IJPJ5s6dC06uJ53y4os1nJCejrw8rZRCVH8STKj/z3/+89///tfHx+fy5cscKVp7pSuu/fbbb88++6zU5RABGRlo1AgFBarOuXMH/v7aKoioPrTdIkxMTAwLCzM1Nd2+fTtTsE4sLS1nzpwJNgpJdzRogH//u4Zz2DtKOk/bQZienu7l5dW4cePevXtr+a0NwIwZM+zs7H788ccrV65IXQsRgFr0jnK8DOk8bQehr69vWlra3bt3T58+rfpMIcS9e/e0U5W+KF1xLSwsTOpaiAAAw4fDzk7VCQxC0nnaDkJbW9tp06YBWLZsmYrTHj582LFjx+7du+fm5mqrNP0wb948MzOzrVu3PnjwQOpaiAAbGwwZouoEBiHpPAlGjQYFBVlZWe3bt+/OnTvVndOwYUM7O7vU1NRNmzZpsTQ9oFxxraio6KuvvpK6FiIANfWO8h4h6TwJgtDd3X3ChAkKhWL58uUqTps3bx6AZcuWFRcXa6s0/fDOO+8AWL16dWZmptS1EAGDBsHZudpn2SIknSfNhPp33nnHxMRk06ZNqamp1Z0zcuTIZs2a3bt3b//+/VosTQ9069atb9++mZmZGzZskLoWIsDCAsOHV/ssg5B0njRB2LJly0GDBuXm5q5evbq6c0xNTefMmQMgNDRUe5XpCeWKa8uWLSsqKpK6FiKVvaPsGiWdJ9kO9X/++eczzzzj6uoaHR1tbW1d5Tm5ubl+fn6pqanHjx/ndIvyhBDt2rW7fv369u3bX67N2sdEGiWXw9sbSUlVP5uWBk4aJh0m2aLb/fr16969e0pKyubNm6s7x8bGZsaMGeAU8kpKV1wLDQ2V6kcZojJmZhg9utpn2TtKuk3KbZhKh8MoFIrqzpk5c6a1tfWBAwdu3LihxdL0QGBgoIeHx+XLl3///XepayFi7yjpMSmDcPTo0f7+/rdv3z5w4EB157i5uU2cOFEIsWLFCm3WJqHCwsK0tLQaT7O0tHz55ZdNTU0vXLighaqIatC7N7y9q34qNla7pRDVjZRBaGpqqtyMSXXP57x585RDTBMTE7VVmpQ++OCDDh06HD9+XPVpcrn8xIkTCoXC3d1dO4URqWJignHjqnwmkZ0WpNukDEIAU6ZMadiw4cmTJ0+dOlXdOS1atBg6dGhBQcGaNWu0WZskjh07tmzZsqSkJLOatjz95JNPzp496+XlNXjwYO3URlSDl16qfOxv4FMHB+3XQlR7EgehjY3N9OnTUdOKa8rZAqtWrcrJydFSZVJIT08PDAwsLi7+8MMPe/bsqeLMkydP/t///Z+JicnmzZudVcxlJtKmbt0OuroWAH8BK4CXgcZAayAuIUHqyohUkTgI8c+Ka/v37//777+rO6d37949e/ZMS0sz7BXXpk+fHhMTExAQ8N5776k47dGjR6+88kpxcfHChQu5MSHplL+mTrUFegBzgB1ANAAgnqNGSbdJH4Rubm6vvPKKQqFYuXKlitPefvttGPSKa//73/927drl6Oi4detWU1NTFWfOmDEjKiqqa9euISEhWiuPqDYmTJhQ+f9nHEeNkm6TbEJ9eTdv3mzTpo2FhUVUVFR1Qz8UCkWrVq1u3769e/fuMWPGaLlCTbtz506XLl2ysrK2bds2fvx4FWdu2bJl4sSJtra2Fy5caNGihdYqJKql9u3bR0ZGlj9iampaUFCg+sc7IglJ3yIE0LJly8GDB+fn56sYDmNiYhIc/Fm/fhGrV4/QYmnaUFRU9Morr2RlZU2cOFF1Ct6/f/+tt94CsGrVKqYg6aYXK00oLC4uTk5OlqQYotrQiRYhgGPHjvXr169hw4bR0dG2trZVnpObi8aNkZKCY8fQp4+WC9Sg4ODgzz//vEmTJpcuXXKofnydXC7v27fv6dOnR48evWfPHm1WSFR7d+/ebd68eYUPloiIiK5du0pVEpFqOtEiBNC3b98ePXo8fPhQ5YprmDEDAAxpwTXlfAkzM7Nt27apSEEAn3zyyenTp729vdevX6+18ojqyt/f/6mnnqpwkLcJSZfpShDinxXXli5dqmI4zMyZsLbGwYMwjAXXOF+CDFLl3lEOHCVdpkNBOGrUKH9//9TUop9+ul/dOa6umDQJQkDlnr56g/MlyCC9/PLLJiaPfbYwCEmX6VAQmpqavv/+j0JE/9//NVNx2jvvwNQUmzdD3xdc+/rrrzlfggySp6dnhX3T2DVKukyHghDAuHHNLS1lp0/j5Mlqz2naFMOGoaAAX32lxcrU7c6dO8qu4NWrVzdu3FjFmVu2bNm+fbutre327dstLCy0VB/Rk6nQO8oWIeky3QpCGxu8+SZQ03CYhQsBYPVqZGdroyq1K50vERgYyPkSZJDGjh1bfr1cBiHpMt0KQgBvvQVra3z/varhME8/jV69kJYGPV1w7f333//rr7+aNGmyatUqFafJ5fIJEyZkZmaOHj168uTJ2qqOSA1cXV2ff/750j+ya7RmmZl45x00aQJLS3h44NVXH9u+auFCyGQ4c6bsSH4+ZDIMGaL9Sg2PzgWhmxsCAyEEVO8/OH8+AHzxBeRy7dSlNpwvQUaifO9oWlpafn6+hMXousJCDBiApUvx1FP4738xbBi2bkXPnmBLWjuE7rl5U5iYCEtLkZBQ7TnFxaJVKwGIXbu0WNkTS09P9/X1BfDxxx+rPvPEiROmpqYmJia//fabdmojUq/09HRLS8vSj5p79+5JXZEuqfDpFhYmALFgQdmR8HABiClTSv4YHCwAcfp02Ql5eQIQgwdrvlbDp3MtQgAtWmDoUBQUYO3aas8xMcHs2QDw+edaq0sNOF+CjEeDBg0GDhxY+kf2jpYQAgsWwNMTZmY4erTk4PbtMDHBO++UnRYYCG9v7NwJA91mQKfoYhDin57PL7+Eiv0HJ09Go0aIiMCxY1qr64l8/fXXO3fu5HwJMh7le0c5XgYA0tPx739jyRIIgeJizJ6NoiIIgcuX0aQJXFzKzpTJ0K0bsrNx717ZwYQEREWV/IqO1n75hkpHg7B3b/TsibQ0hIdXe46VFaZPB4AlS7RWV/1xvgQZoWHDhtnZ2Skfs0WIiAh06YJffik7cv06Vq1CdjYKCh5LQSVXVwBITS07MmoUmjQp+dWqleYrNhY6GoQA5s0DgGXLVHUMzJoFW1scOoTr17VWV31wvgQZJxsbm6FDhyofJxjzPvXK4X8BAYiKqvjUokUli4NU3v9AeaT8Gj2hodi3r+TXrl0aK9fomNV8ikRGjkSzZrhzB/v3Y/Toqs9xdsakSbh7V9d70T/44APOlyDj9OKLL3777bcw5hZhZiZefx27d1f77OLFsLFBUlLFp1JSAKD8Fq19+6JHj5LHHIWrPrrbIjQ1xZw5ABAaquq0lSvx009o314rNdXLsWPHli5dyvkSZJwGDhyoXCbeSO8RRkSgc+dqU1Bp0yY0a4bo6McmSwiBc+fg4gKVd1JILXQ3CAG8+ipcXHDuHE6cqPYcHd/1OiMjQ7m/xAcffMD9JcgIWVhYjBgxAkbYIhQCYWEICHhstEuVFApkZADA4sVlB3fuRFwcAgM1VyCV0ukgNIANCEvnS/znP/9RcRrnS5ABGzt2LIDo6OgjR44I3dgJXOMyM/Hii5g7F4WFtTo/Jgb+/vjyS4wZg5UrMWsWJk2Cvz8++EDDhRKg40GIfzYgPHBALzcg5HwJIgB79uwxMTGRy+UvvPBCy5YtV6xYkaNiXpQBOH++5u7QyjIzMXs2zp/H/Pn47jtMmoSTJ+HkpJkS6XFSz+iv2bRpAhDTpkldRx3dvn3b3t4ewLZt21SfuXnzZgC2trY3b97UTm1EWvPRRx8BsLKymj59upeXl/Jjp2HDhgsXLoyNjZW6Og1Yt05YWAigPr/+8x+pqzdSehCEtVlxTdcUFRV1794dQGBgoOoz7927pxxBs3HjRq2URqQ9W7dulclkpqam3333nRBCLpcfOHAgICBAGYcmJiZDhgxR9pcagkePxLhx9YxA5a9Vq6T+GoyUHgShEGL4cAGIDz+Uuo5aCw4OBtCkSZNHjx6pOK2oqEg5gmb06NFaq41IO3799VflihArV66s8FRERERgYKC5ubkyEbt06bJu3bq8vDxJ6lSPiAjh71//CGzcWHzxhcjJkfrLMFL6EYTHjwtAODuL7GypS6mFP//809TU1MzM7OTJk6rP/PDDDwF4e3s/fPhQO7URaUdERIRyTZn33nuvunMSEhJCQkJc/llRpVGjRiEhISkpKdqsUz2epDu0a1cRHi6KiqT+GoyafgShEKJnT/3oOSjdX+Kjjz5SfSb3lyBDde/evUaNGgF4+eWXFQqF6pPz8/PDw8Pb/zMX2NLSMjAw8PLly9op9UnVuzvUxEQMGSIMpltYz+lNEO7ZIwDRpImQy6UuRaXQ0FAAAQEBcpWFZmRkKFccVfHzMpE+Sk1NbdWqFYBnn302Pz+/9hceP3587NixpeOrAwICdu3apfr/kbRif/5Z7utb5wi0sxNTp4q//5a6fCqjN0Eol4tmzUSnTiImRupSVFIoFKtXr75//77q05Qrjnbt2rWgoEArdRFpQ25urnIsTLt27dLT0+vxCnfu3AkODm7QoIEyDps2bbp48eK0tDR1V/qkVqxYsczUtG4R6OEhQkIE74PoHr0JQiFEXJzUFagJ50uQQSouLh41ahQALy+vmCf7iTUzM3PdunUtW7ZUxqG9vf3UqVNv3LihrlKfREZGhvLLfLX2Edi5swgPF4WFUtdOVdOnIDQMnC9BhiooKAiAo6Ojuu7wFRcXHzlyZMiQITKZTDndon///gcOHKjxvqPmnD9/3t/fXxnPMuBMjTcCR44Ux45JVS3Vkj4FYXCwAMTzzz920N9fDB4sUUF1x/kSZKgWL14MwMLC4ujRo2p/8UuXLk2dOtXa2lqZQC1btgwLC8vR+mSDdevWWVpall+Q5GmgWMWNQN1owlKN9C8IAXH2bNlB/QpCzpcgg/Ttt9+amJjIZLKtW7dq7l2SkpIWL17s7e2tDCFXV9fg4OAn7IOtpYyMjNHV7Ab3TYUI9PISixcL3bupSSroWRA6OopmzcTIkWUH9SgIOV+CDNLvv/+ubCctX75cC29XUFCwa9eu0r1cLCwsxo4dW+Oc3ScRERFR2h1amRsQo4zA5s3F1q28EaiP9CwIzczEunVCJhPXrpUc1Jcg5HwJMkiRkZHKEZ7z5s3T8lsrl6cxMyvZXbxr167h4eGF6s6hVatWVegOrcDExGTo0KHXf/lFve9L2qRnQQiInBzh4SEmTiw5WBqEP/8sfvxRXLkiEhOFdLfSq8X5EmR4Hjx44OPjA2DcuHHFxcWS1BAfHx8SEtKwYUNlLHl4eISEhKSmpj75K2dmZr700ksqIlA59/9a6U/lpLf0LwiLisSSJcLMTERFCVEuCNu3f6yj3slJtGkj+vcXgYEiOFiEhYldu8Tx4yIyUmRlabtyzpcgw/Po0aMOHToA6Nu3r+TLhObl5YWHh7dt27Z8RF29erXeL3jhwoVmzZpVF4Hu7u76uhocVUUvgzAzUzg5ibfeEqJcEM6aJQYMEG3bCheXmmf1uLqKdu3EgAFi4kSxYIEICxPffiuOHRN//63+mDS6+RKPHon580XjxsLCQjRqJCZP1vVFEKjuCgoKnn/+eQBt2rTRqdnux48fL51ugfouTxMeHl46QrWCjh076v364FSJTOjPhtELFyI0FEVFMDPDBx9g2TJERSEgAC1b4ocfHjuzsBCpqUhPR0IC4uMr/h4bi6IiVW/Uu3fo3bsrPD09PTw8Kv/u7u6uepfd8uRyed++fU+fPj169Og9e/bU90vXH4WF6NMHZ89izBj06IFbt7BhA9zdcfYsPD2lLo7UQwgxceLErVu3enp6njp1ys/PT+qKKrp9+/aqVau++eYb5Q7AzZo1e+utt15//XVbW1vVF2ZmZr7++uu7K+2pK5PJBg4cOG/ePGX8k6GROonroLRFKIRISRE2NuLdd0WrVnUeLKNQiMREcfmyOHxYbNwo/u//xKxZYswY0bu38PcXNjaidetA1d80JyenNm3a9O/fPzAwMDg4OCwsbNeuXcePH797926FW4BGN18iLEwAYsGCsiPh4QIQU6ZIVxOp2bx58wA4ODhcvHhR6lpUefToUVhYWGlOOzg4BAUFqVj+sMruUGUva2RkpBYLJ23T1xYhgKAgbN4MHx/4+VVsET6hzMzM+Pj4pKSkuLi45OTkuLi4pKSk+Pj4xMTEhISEtLQ01Ze7ubm5ubl5eXmZmpr+9NNPMpns119/7devnzpL1FnduyMiAklJ+GdvHQgBX19kZCAjA7VuSZPOWrNmzYwZM8zNzQ8dOjRgwACpy6mZQqE4dOjQypUrjx49CsDExGTQoEGzZ8/u379/+dM2b948ffr0vLy80iNubm6vvvpqUFCQJzszDJ0eB2FMDJo1Q1ERBg9WcxCqVlBQ8PDhw4SEhPj4+PT0dOWD0t9jY2OLynW8ent7t23b9qefftJefRISAtbW8PbGnTuPHR81Cvv24dYtNG8uUWWkHgcOHBg1apRCodi0adPEiROlLqduLl68uHbt2s2bN+fn5wPo3Lnz9OnTAwMD5XL5tGnTvv3229IzW7RoMWPGjPJr2ZCBk7pJWgflu0aVJk0SgG7NIywuLk5ISLh06dKhQ4fmzJkDwNvbW+1zm3RUZqYARPfuFY9PnSoAceqUFDWR2hw7dszKygrA4sWLpa6l/uLj499//31XV1flB6Crq6ty60QAMpnsX//6188//yzhWqYkCX0KQr2jUCjatGkDYNu2bVLXohXKIHz66YrH33hDAOLMGSlqIvW4fv26s7MzgGnTpkldixool6fp3r07AEdHR0tLyylTpvBGoNHSp65RffT111+/8cYbHTt2vHjxYumQboMlBOzs4OqKqKjHjo8cif37cf8+GjeWpC56QgkJCT179oyOjh4yZMj+/ftrP2pa9/n7+9+7d+/XX3997rnnpK6FJGMidQEGLjAw0MPD4/Lly7/99pvUtWieTIbOnREdjfj4soNC4Nw5uLgwBfVUVlbW4MGDo6Ojn3766R07dhhSCgLIysoCUDoTn4wTg1CzLC0tZ86cCWDp0qVS16IVEyYAwOLFZUd27kRcHAJrmJFCuqmoqGjMmDEXL1709/c/ePBgjfPw9EtRUdHDhw9NTU1dSgc5k1Fi16jGpaen+/r6ZmdnX7p0qWPHjlKXo2FFRejbF2fOYPRo9O2L27exfj18fHDuHJycpC6O6kYI8dprr23atMnV1fXkyZPNDW7Qr3KtVA8Pj/jyfRhkfNgi1DgnJ6fXXnsNwPLly6WuRWNOnsSpU8jNhbk5jhzBggU4fx7z5+O77zBpEk6eZArqo//85z+bNm2ysbE5cOCA4aUggISEBAAeHh5SF0ISY4tQG6Kiopo3by6Tye7evatcrd/QdOqEy5dx/jy6dKn6hNu3IZejSRNYWWm3Mqqn9evXT5s2zdTUdO/evcOGDZO6HI04cODA8OHDBw8e/IM2ZyKT7mGLUBsaN248evTooqKiVatWSV2LZiQkAICKn6zffRdt2uDgQa1VRE/i0KFDM2fOlMlk69evN9QUBFuE9A8GoZbMnz8fwNq1ax89eiR1LeomlyM1FSYm+GeSchWUSfnPzGXSZefOnXvxxRflcvmiRYuUvfqGKjExEQxCYhBqzVNPPfXMM89kZmZ+8803UteibklJUCjg5oZ/9gqvQo1NRkNx5swZW1vbUaNGSV1IPd29e3fIkCE5OTlTpkxRLhlvwJQtwkb8+czoMQi1R9koXL58eZHqXaD0Tm1CLjERMIoWYUJCQm5urkKhKD0yadKkDh06nDlzRsKqaik1NXXgwIHJycmDBg1au3at1OVoHLtGSYlBqD2DBg1q27btgwcPdu3aJXUtaqUMORWfJhkZyMuDvT3s7LRWlFQqNzKuXbt29epVE5OS/2u3bt3q3bv33Llzpamverm5uUOHDr19+3bXrl137txppqJ9bygYhKTEINQemUym3Mjt888/N6jBujXe/zOaflFU9dla4cj9+/dPnjwZGRlZesJnn302efLky5cva7fSxxQXF48fP/7MmTNNmjQ5dOiQnRH8yAIGIf2DQahVEyZM8PDwuHLlyq+//ip1LepTY84ZcRAqFIqUlBSZTObu7q48UnmAxk8//RQeHl66z2VCQsKsWbO++uorbZYdFBT0/fffN2zY8Mcffywt1bAJIZKTkwEYyddLKjAItcrS0nLWrFkwsBXXauwarfEEQ1FQUKCckVaac6mpqUVFRc7OzhYWFsojlftOK2Tn3bt3V61atW3bttIT9uzZs2LFirt372qo7I8++mj16tXW1tYHDhxo2bKlht5F1zx8+LCwsNDJycmKc1uNHoNQ26ZPn25nZ/fzzz9funRJ6lrUpJZdo0YwUubdd99NSkoC4ObmpjxSY09p5SOVT9iwYcOcOXP+/vtv5R+VY4/V1amwbdu2jz76yNTUdNu2bb169VLLa+oF9otSKQahtjk5OU2ZMgWGtOIau0YBAEeOHAkLC1M+9vLyUj6oMfZycnKys7Otra0dHR1VX1LaiLx79+7rr7+uvN+sdP78+WPHjj18+LCuNf/444+TJ08WQixfvnzkyJF1vVyvMQipFINQAnPnzjUzM/v2229jYmKkrkUdGIRAamrqpEmThBAmJibl7wjWo/1X+SZijZd89tln/fr1K20jFhYWRkREKE9T4fz58+PGjZPL5e+9956yx96oMAipFINQAn5+fmPGjDGQFdeEQFISAKgYcWAE9winTJmSkJDQvXt3hULh4uJibm6uPK5MtfJ3BCvkXHW3DEuPyOXy1NRUExOT2ne33r9/v1u3bn379i094eHDhxWWNLp///6QIUOys7NffvnlTz/99Mm/A3qHQUilGITSUE6uX79+fW5mptS1PJm0NBQUoEED2NhUe46h3yNcs2bNgQMHGjRoEBISgpruCFaIxhpTLTk5ubi42NXVtXRiX42NyMonLFiwoEGDBhs2bCg98vXXXycmJj7//PObNm2SyWRP+i3QQ5V/RiHjJUgin7z6akTr1mLJEqkLeTKRkQIQrVurOqdBAwGI1FRt1aRV169ft7GxAbBjx468vLwrV65ERESUPpuamhoREREXF1d6JCYm5tSpU6V/zM/Pv3Xr1p07d0qPZGRkXL9+PSsrq/RIYmLizZs3S/9YVFQUFxeXlJRU/kWio6MLCwtLj2RnZycmJpavMz09PTs7u/SPr7/+OoBVq1bV+wvXd+PGjQOwfft2qQsh6XEbJukcPozBg+HlhXv38M/Yev1z9CgGDMCzz+K336o+IT8f1tawsEB+Pgyu5VFQUNCjR49Lly699tpr+rWK7LZt21555RU3N7ebN282aNBA6nIk0Ldv3+PHj//+++/PPPOM1LWQxNg1Kp2BA9GxI+LisHOn1KU8gVqOlGnUyPBSEMC777576dIlf3//0vGi+mL8+PH9+vVLTk7++OOPpa5FGrxHSKUYhNKRyTB7NgAsWQL9bZcb8ZBR5XwJMzOzbdu22dvbS11O3chksrCwMFNT0y+//PLq1atSlyMB7sFEpRiEknrlFXh74+pVHDkidSn1VeO2EgYahKXzJT7++OPu3btLXU59dOrUaerUqXK5fM6cOVLXom3Z2dnK6ZsODg5S10LSYxBKytwcM2cCgP6uuFZjzhno3InXX389ISGhT58+CxYskLqW+vv0009dXFx+++23vXv3Sl2LVrFflMpjEEptxgw4OuLIEVy8KHUp9dLLAjP7oK13tScY4tyJNWvWfP/99w0aNNiyZYupqanU5dSfs7PzRx99BGDOnDm5ublSl6M9DEIqj0EoNQcHvPYaAHzxhdSl1IvsDFyPw9ut2hOaKfB6Pzzlr8WaNOvGjRvKaaBr1qzx8/OTupwnNW3atE6dOsXGxi5ZskTqWrSHQUjlMQh1wLx5MDfHzp3QxxXXshMBwK76DxTzS/D+E80ctVaRRhUUFIwfPz43N/e111576aWXpC5HDUxNTVetWiWTyUJDQ6OioqQuR0sYhFQeg1AHeHtj7FgUFWHlSqlLqaOiXBRkwswKVg2qPSc7AQDsDeQT57333tPT+RIqBAQEvPTSS3l5ecqWrjHgkFEqj0GoGxYsgEyG9euRkSF1KXWhDDk7lff/amwy6o/S+RJbt27Vu/kSqi1ZssTOzu677777+eefpa5FGyov8UrGjEGoGzp2xHPPISsL//uf1KXURZYyCKsPOVGMnBTITGDrqrWiNEQ5X0KhUHz88cc9evSQuhw18/Lyeu+99wDMmzevqKhI6nI0jl2jVB6DUGcoe6VWrEBhodSl1FqN3Z45yRDFsHGBibnWitIQw5gvocLbb7/dokWL69evf/XVV1LXonEMQiqPQagz/v3vkhXXduyQupRaq7FFWOMJeiL366/l8fFOTk7btm3T6/kSKlhYWCxbtgxASEhIjXsZ6jsGIZXHINQlc+cCerXimvL+n4oWoWGMlLlxw2b27IMREWc3bfLx8ZG6Gg0aMmTI4MGDMzMz33//falr0aDCwsK0tDQzMzMXFxepayGdwCDUJePHw8cHkZH45RepS6mdGgfLGECLsKAAEyYgN1f26qvNhg2TuhqNW7FihaWl5aZNm/766y+pa9EU5Q5W7u7uJib8ACSAQahbzM3x1luA/qy4VmPO1dhk1H3vvYeLF+HvDwOaL6GCv7//nDlzFArFzJkzFQqF1OVoBIeMUgUMQh0zfTocHXH0KC5ckLqUWqix57M28yt02ZEjCAuDmRm2boVhzZdQ4f333/fy8jp//nx4eLjUtWgEbxBSBQxCHePggClTAD1Zcc2wB8ukpmLSJCgU+PhjGNx8CRXs7OxCQ0MBLFy4MEO/JrbWDoOQKmAQ6p65c0tWXIuOlroUlRRy5D2EzFTVHEG9Hizz+utISECfPjDQ+RIqGPa2vQxCqoBBqHu8vTFuHORyXV9xLScJQgFbN8iqn06gvy3CNWvw/fdo0ABbtsBA50uooNy29+XWrT/84w8Y3La9yvXVeI+QSjEIdZJycv2OHdDlNT6yanH/Lyep5nN00I0bJX8Fa9ZA//eXqJ9OnTptf+aZBhcvYt48qWtRM7YIqQIGoU7q1AlbtyIyEuY6vCBLjd2e+emQ58PSEeY2WitKDQoKMH48cnPx2mswiP0l6u/TT+HigqNHYVjb9jIIqQIzqQugakyYIHUFNantSBl9aw6+9x4uXTKe+RKqODvjo48wcybmzsW//w0bvfqBpnp6HYRff/31nTt3ABQWFubk5FQ+QS6XZ2VlVT6uUCgePXrk4zMuNvaNCk9lZqK4uIr3ystDfn7Fg8eOwcurXqXrMAYh1Vct507o10iZ0vkS27YZz3wJVaZNw//+h0uXsGQJQkKkrqa+4uNx7Rqio5GaqsjPT05MlMlk7kePokULtGuHBg2krq8Odu/e/csTLLjx9NP9zp59ogJ0+XZNvTEIdVhmJj75BHv2ID4ezs7497/x8cfQnSW+atxfSe9GypTOl/j0U3TvLnU1usHUFKtWoU8fhIZi0iQ0bix1QXX04AGOHCm/5XVqbm5RcXFDGxvL2FjExuK339C2Lfr3h6N+7B09ZcqU5557DoCFhYWtrW3lE8zMzKrcI8zExMTR0dHMrJFcXvEpB4eqB4RZW8PKquJBw2sOgkGouwoLMWAAzp7FmDHo0QO3bmHDBhw5grNn4ekpdXEAgIJMALBzr/YEvWsRGvF8CVUCAvDSS/j2W8yfjz17pK6m1oTA77/jxIkKK/cmZmcD8LCzKzstMhK3bmHIELRvr/0y62rcuHFSl2CAOFhGV61Zg7NnsWABdu/G229j3Tp88w3i4vDhh1JX9o9R2/CfPLQYWu0JJU1GPblHaNzzJWqwZAns7PDdd9CXbXsVCnz3HY4fr7x+fUJWFoBGpUGoVFiIvXtx6pTWCiSdwiDUVdu3w8QE77xTdiQwEN7e2LkTxcV49Ei6ysoxs4KpRbXPDliCt+PR5XUtFlRfnC+hmpcX3nsPAObN0497RD/9hGvXqnwmQdkirPIG8JEjuHJFo3WRbmLXqE4SApcvo0kTlN8mRiZDt27Ytw9376J9e8hkcHaGpyc8PODkVPKg9HdfX5hJ/ZcrM9GPG4ScL1Ebb7+NTZtw/Tq++gpz5khdjUo3b+LcueqeVLYIPSq0CEv98AN8fODkpKHSKisuLk5MTIyJiYmPj3/w4MGDBw/i4+NjY2NzcnI2btzYoUMHrVVizKT+rKQqZWejoACVN0tzdQWA6GhYWSEzEwkJqG4DVRMTuLvD3R2ennB3h5dXyeNGjeDhgUaNYG2tnlKPLsTJ0H/+IIOlPdw7oOtUdAhUz+trwbvvcr5EzSwssGwZhg5FSAhefBE6O/eguBg//aTi+UQVLUIARUU4cgTqvg+Xn58fFxcXHx9fOfASExOLq5y7AMyaNevPP/9UbyVqsXAhQkPx/PM4erTsYLNmaNUKP/wgXVlPgEGowypvz6s84uCAR4+Qm1sShImJJb/HxyMpCXFxSEpCcnLJs5cuVf3i7/SHXxzsGsHeE3busPeCnTvsPWHrDntPWDWoW6l934eTPxRyZMUj8lvsm4jUm3ju07p+xRLgfInaGzIEgwfj0CG8/z6++Ubqaqpx7RpULhSu7BqteI+wvBs38PAhGjas6ztnZWXFxsaWBl5cXFxcXFxsbGx8fHxKSkpdXw1AdQGpI379FefOoVs3qetQBwahTrKzg40NkpIqHlf+d3J3BwAbG/j7w9+/2hdJT0d8PBISKv6eno6YGJg8ROoNpN6o+lpTS1g7w94T9h6w94SdB6ydSh7Ye8LRFyaP/8tpPhje/+zPEPAO1nbGyc/ROxgWOh8tv/wCIfDxx5wvUSsrVuDoUWzahKlTdfQ7dvmy6udr6BpVunIFzz5b5TPp6enx8fEJCQnK3+/du1f6ID09vb5FV01U/lFYZzg6wtUVn31mIIsOMQh1kkyGzp1x8iTi48smSwiBc+fg4lLbuVxOTnByQtu2VT+bk4rcJGTFIzsRWQnITkB2ErLikJOMzAcozEZ2ArITUGXPq8wUtm5oMwYDq1oW3Mwavr3x8CYyH8Clda1KldCSJRgwAP37S12HnvD3x5w5CA3FzJk4exa6tsN7cXH5KYNVUjVY5h/pkZH3HBwqB15sbGyVi7ZoiC63CHNy8PnnmD4d16+jTRupq3liDEJdNWECTp7E4sVle1Ds3Im4OMydq57Xt3WBrQtcq4lJeT7y0pCdgKx4ZCUgK/6xxznJyE6AvNLiSyUEEi/BxBzZiTj+X9i6lfW72jWCnUed+1017YUXpK5Ar7z/PrZuxfnzCA/Hq69KXc3jHj5E5enij1PeI2xoYxOflZWQlXUvPT0+KyshO7v0jzGPHskVCq2UW4OHDx/u3r278vG8vLz8ykufAUVFRdnZ2VW+VEZGcJXNy0ePUOXXmpVV9TcyJwfLlgGAXI5XXsGiRQgNhQHs3yzT5da3USsqQt++OHMGo0ejb1/cvo316+Hjg3PntDmkrWqKIuQkA4C9V8lgmfGH4NEFohgZUfhrJa7tQvcguHfAgarmTtS131UtdHyZHv2ybRteeQVubrh5U7fWJ7t3D1u2VPlMzKNHp2Njj0VHrz53TgYY26eeqalQV/Py9Gns34/QUBQVISwM776LO3fg56ffg2UYhDosOxuffIJduxAXBxcXDBmCTz4puUGoOx4bNQoAMLXE0zPRfzEexSLmBLITkZ2A7ERkxZf0vhZW/UNrCWW/q70H7Dxg1wiXvNHQtWzUq7t7FYs+1aiwEH36VFymx91dh5bp0S9C4Nln8eefmDsXX3whdTXl3LmDbduUD4uKi68kJZ2IiTmfkHAiJub+PzfwTGQyhZ586DVo0GDAgAGVj1tbW1tV9b+gusXVADg7fyaErPJxR8equ7ft7auefmVri4AAfPZZSRDm5cHPDxMm4Msv9TsI2TWqw+zsEBqK0NCaz5Tcv5bDpRVkMlg6wL0DzG0BwKkpnJpWcXJt+l2Vy7MJW3xUaX19K6uKkybLz6T08ICs0n/40mV6Sr+ZAQGYNAkffoivv1bn98FIyGQIC8NTT+HLL/Hqq7qzMllUaurpq1fPPHhw5sGDi4mJReUaQQ1tbLp7efXw9v6/48cL5PIdY8Y8KihIzM5OzslJyMpKyslJzsmJz8rKKSyUsP4K/Pz8du3aJXUVqtjbY+ZMLFuGDz6o4r+dHmEQkjp49ygbNVojMyvYe8LeEx5dq3hW2e+aGYecJKSnQhZTMg9EOTMkORn5+bh3D/fuVf3i1tZl0yU9PODujrFjq16m5z//wc6dWLeOC6rVR6dOeOMNrF2LuXMfm02mXUVFRVeuXDlx4sT58+ePHTsWHR1d+pSpiUkbV9eunp69fX0DfHxau7qayGQAtl65cuvhww7u7q2Vs3Ifly+Xp+XlJZiaxvfunZ6erhwsU/ogPj4+Q+XcDPVS6MatStVmz8YXX5RMQdJf+lw7GSQTc9h7wf6fJe4rD9HPy6tiTkjp7+npuHsXd++Wnd+hg6pleu7dQ/Pmmv2KDNX//R/27MGvv2LvXowapbW3jY+PP3nypDL8IiIiCgoKSp9ydHTs5uER0KhRVw+P3r6+TlWtGuHj6Hjr4cPYzMwqg9DKzMzT3t6zS5euQ6teRDcvL680FyvEZHp6+oMHDzIzM9X1lepFELq4YMoUrF6t3zfcGYSkb6yt0bQpmlbV6QogJwcPHiA5GfHxJUsNNG6sapme1FQGYT2Vbtv73XeaDcKcHJw7t/bChR///PPMmTPJycmlz5iZmXXu3Llnz549evTo0aNH8+bNcewYfv9dxYv5ODgAiFW9Wm91k44Aa2tra2trT0/Prl2r6s8AMjIyEhISUlJS4uPjk5OTk5OTlQ8SExMTExOTk5OLar1Yqy5Pnyhv/nysXYvISD1eppdBSIbF1hYtW6Jly7Ijyolf1S3To2sz4fTLtGnw8sKwYep/5fh4nDyJEydw/jzOnUNh4dEePQ6cOQNls69bt4CAgK5du/bp06dBhWGrXbvi+HEVkyh8HB0BxKpot7m6okmTehfeoEGDBg0atG5d7Qza1NTUpKQkZUCmpKSUBqQyL1NSUuT/FF/lSBkd5OuL8eP1exIFR42SoRMCdnZwdUVU1GPHR47E/v24f1//Nps1SFlZOHsWp0/jzBn89RdSU8ueMjdHx46/DRuW0LRpz549m1bXGVDqjz9Q/RKd/zt/furBg6917vzN8OFVnzFhApo1q8dXoBZCCGUcpqen9+7dW6bXQ1D0B1uEZOjUskwPqVa/aZr37pW0+c6fx9mzj23w1KgRnnoKXbuid2/06gUbm+dqX0yfPrh1q7r16GtoEXbpImEKApDJZO7u7u66NkvK0DEIyQhoepkeI1dYiAEDKk7TPHKkimmaWVm4fLmkz/PMmceafWZmaNMGvXsjIABdu6q4S1czU1O89BI2bqxy9W1V9wibNsWgQfV/X9JbDEJSh9NfIO0OBq+Wuo5qvP46Nm/Gl18iPr5smR5/f3zwgdSVGQTV0zRLm30nT+LixcdW9PLwQNeuJc2+gAC1bQ0GwMEBr76Kb79FYmKFZ3yraxG2bYsRI3R/Ls2DBw+SKi3HX1hYmJNTacYtIISoMN/D1NShuPhfADIzUeVYnOzsqrdezsuDclm3SZNQ/Q1QfcV7hKQOP82BzAT/0qVFRirQi2V69FT37oiIQFJS2dBcIeDri4wMbNqEMWPKzrS0RNeu6N4dPXuiZ094e2u2MLkcf/6J06crfOQ7fvZZZkFBWnBwyfwKGxv074/OnTVbjJq88847S5curfflXl694uJOPkkB+/ejurur+ostQlKHAaEo1qElOaqgR8v06BchVE3T9PCAry969UKPHujeHV26wMJCe7WZmeH55/H00zh3DteuIS1NedjH0fFacnJsVpaTvz86dEDnzlqt6sn4+PhUnrlhYWFha2tb+WSZTFZhVK2tra+y6ahiEbUqvxnW1iWLGxrAXhOVMQhJHUwtYWopdREkhexsVdM0ZTKUW+1FGvb2eO45PPccsrLw8CFyc32OHr2WnBzbv3+HkSMlrq3ugoKCgoKCpK7C0HASFT2xy5uxpgN+mCZ1HSQdvZimaW+Pxo3Rpo1P69YAYitvfE3Gii1CemIPbyH5Kux4v80o2dnBxgaVQyUlBYBu3oX18fEBEBsbK3UhpCsYhPTEer2NtmMBzvw1Sno4TZNBSBXoUscF6SkrJ7h3hHsHqesgiUyYAACLF5cdUU7TDAyUqiLVGIRUAadP0JO5dRAnl8DBC/aeeGGZ1NWQFIqK0LcvzpzB6NFl0zR9fHDuHJycpC6uCrdu3WrZsqW/v/+dO3ekroV0AoOQnszJUBxdCAB2jfB21YtakeHTq2maeXl5tra2FhYWeXl5XMyTwCCkJ5WdgNSbyIqDohgdJ0pdDVGtuLi4PHz4MCkpyc3NTepaSHocLENPxs4Ddh5SF0FUNz4+Pg8fPoyNjWUQEjhYhp5IzHHsGI5DM/Dnx7jwPxRmSV0QUa1wvAyVxxYhPYGkq7h5oOyPrUbCwl66aohqi0FI5TEI6Qm0GAx7D2TGIScZWXGwbih1QUS1wiCk8hiE9AQc/eDoJ3URRHXGIKTyGIRUX4mX8NcK2HvBzh32nrBrBLf2sHSQuiyimjEIqTwGIdVX8lVc2vTYkUm/o/EzktRCVCe+vr5gENI/OI+Q6ivtDqL/LLtBmJ2EkZvh3EzqsohqVlRUZGVlZWJikp+fb6rzu9KTpjEIicgYeXp6JiQkPHjwwMvLS+paSGLsGqX6uHsXX30FLy+4u8PTE40awcNDN9eVJKqaj49PQkJCbGwsg5AYhFQfV69i+fKKB1euxKxZUlRDVHc+Pj5nz56NjY3t0aOH1LWQxBiEVB+tW2PJEsTFITkZcXFISkJcHBo1krosolrjwFEqxSCk+mjZEi1bSl0E0RNgEFIpBiHVWUICvv76sRuEbm7gyDvSLwxCKsUgpDq7cQMffvjYEVNTuLnhzTfxwQcS1URURwxCKsUgpDrz8sJ77yE+HklJePAAyclITkZCAuRyqSsjqjUGIZXiPEJSg6IiJCfDwgKurlKXQlQ7xcXF1tbWxcXFeXl5FhYWUpdDUmIQUt1kZGDrVnh7w82t5DahlZXUNRHVi5+fX0xMzP379xs3bix1LSQldo1S3dy+XXGyoJMTPDzQqBH69at475BIl/n4+MTExMTGxjIIjRyDkOrGwQHTp5fcIFTOI0xPR3o6rl9HgwZSF0dUF7xNSEoMQqqbli2xZs1jR1JSkJSE+HgGIekZBiEpMQipDvLysHdvyQ1CT084OgKAqytcXdGundTFEdURg5CUGIRUB1FReOWVsj9aW8PDo+QGoacn/Pzw9tvSFUdURwxCUmIQUh2Ym+PFF8tuEObk4N493LtX8myzZgxC0h9C+NjZAYj9+2+cPAkbGzg7w8sLZvxUNDr8K6c6aNYMO3aU/TEnp2TF7fh4JCbC0lK6yohqLzsbf/2FK1d8EhIAxD54gKNHS54yM0OLFnj6afj5SVkhaReDkGpLLscvv5TMHXRzg4kJbG3RogVatJC6MqJaEgKnT+OPP1BUBMDVxsbKzCw1Nze3qMjG3BwA5HJcv47r19GiBYYMgb29xAWTVjAIqbYSEzF4cMljMzO4uT12g9DNDd7eGDgQXKODdFRhIXbvxp07pQdkgJeDw920tLjMzOYNGz528q1bWLsWL78Mb29t10laxyCk2pLL8a9/lcwdTE5GfDzi4yuek5cnRWVENZLLsW0bYmIeOyiT+Tg43E1Li60chAByc7FlCwIDmYUGj0FItdW4MX76qeRxYWHZlrwJCUhIQGIiMjK43BrpqkOHKqYgAMDH0RFA7KNHVV9VWIidOzF9OmxtNVodSYtBSPVhYQFvb/6gTHrizh1culTlMz4ODgBiMzOrvTY7Gz/+iDFjNFMZ6QQTqQsgHbJwIWQynDlTdiQ/HzIZhgx57IT+/R+7qlmzshOIdI4QZYNCK6mhRah07RoSEtReF+kOBiHV2a+/4tw5qYsgqqXYWCQlVfdkzS1CJf6LN2gMQqobR0c0a4bPPpO6DqJaunFDxZO1ahEC+PtvcMc6w8UgpLrJycE772D/fly/LnUpRLVR1RiZUrVtEeblITVVjUWRTmEQUkUJCYiKKvkVHV3xWbkcr7yCRo0QGipFcUR1lZam4kkna2s7C4tH+fmZBQVP8jqk1xiEVNGoUWjSpORXq1ZVnGBhgXnzsH17FTFJpHNUJtyNlBRLMzM3W9uh27dfT0mp9+uQXmMQUkWhodi3r+TXrl1VnzNtGuztsXSpdisjqgdT0+qeOfPgQe8NGx7m5qbn5x+Lju68du3bP/+cXt2qENW/Duk7BiFV1LcvRowo+TV0aNXn2Ntj5kx88w2SkyGTlRz8+28Vo/OMSFxcnNQlUDnVrBf66717L2zZkpaXN7Rly3tBQUHduxcL8cXp081Wrgw9caKwuLiWr0MGgEFI9TR7NmQyhIWV7FqTm4tRo9C+PQ4elLoy6Vy7dm3AgAHe3t4BAQEKhULqcggA4OFR+dj2q1cHbtuWVVAwsWPHvS++6O3ouGLgwCtvvjmwefO0vLyFR492WLNm97VrZRfIZHB3117NpF0MQqonFxdMmYLVq0uCMCcHXl5IScHw4QgKQn6+1PVpV3p6elBQUKdOnY4ePQrg1KlTB435JwKd4u9f4cCqs2cD9+4tKi4O6t5904gRZiYlH4NtXF0PT5hwZOLENq6uN1NTx+3e3X/z5ivKXg4fH24zZsAYhFR/8+cjNxeRkQDg6opffkFYGCws8OWX6NKlujWtDE1xcfHatWtbtGjx5ZdfyuXy0uNLeQdVR7RtW35LlNATJ2YdPiyECB0wYMXAgbLSnv1/9G/a9NL06euGDnWxsfn13r3Oa9dO3LcvycdHu0WTVjEIqf58fTF+fNkfZTLMno2ICLRvjxs30LMnQkNh2B2Ef/75Z9euXd98883USpPMTpw4cfr0aUmqosdYWqJbNwDFCsX0H35YePSoqYnJ/4YNWxAQUN0V5qamU7t2vTlrVnDv3mYmJlsuX242cuSiRYvyja2joxzDXl5RJrhcAqlbXh4WLsSXX0II9O+P8HB4ekpdk7rFxcW9++67W7duVfE/aPTo0Xv27NFmVVS1wsLCL78M3Lhx17Vrlqam28eMGdW6dS0vvZma+sGdO7t/+gmAr6/vJ598EhgYWLkdafAWLiyZOnz2rPLnCgBo1gytWuGHHySsSz3YIiT1s7bGihX48Uc0aoSjR9Gpk0GNoMnLywsNDW3VqtWWLVtU/xy5b9++O+W2gSWpZBcWDj14cNe1aw2srI5OmlT7FATQcujQXT/+ePTo0Q4dOsTExEyaNKlXr17G2dY34OUVGYSkKf/6Fy5dwsCBJSNopk1Dbq7UNT2xgwcPtmnTZuHChdnZ2TWerFAoli9froWqSIWHDx/279//lz//bOTq+sfrr/f29a3DxZ07Y8AAAM8///zFixfDw8Pd3d3PnDkTEBAwbty4GJWLtxkeA15ekUFIGuTujkOHSkbQrF+Pp57S4xE0ly5d6tev37Bhw6Kiomp/1aZNmyrfPiStiYqK6tWr119//dW0adPjp051fO89ODvX6kpTUwwYgKFDS+fJmpiYTJw48c6dOyEhIZaWlrt3727dunUtfyQyDAa8vCKDkDSrwgiaHj30bwRNWlra7Nmzn3rqqWPHjtX12tzc3NWrV2uiKqrRtWvX+vTpc+vWra5du54+fbpZs2bw8MD06Xj2WVhbV3uZTIbWrTF9Onr1QqV7gXZ2dosWLbp582ZgYGBpJ/n69esNddpoZia2bStbb9xQl1fkYBnSEn0cQSOXy1evXr1o0aL09PR6v4ibm1tUVJS1ik9e0oC//vpr8ODBDx8+fOaZZ77//nsHB4fHnpbLcfs27t5FUhJycgDAzAwuLvDzQ6tWcHSszVscO3Zs7ty5Fy5caNCgSdeuNz/+2LxXLw18JVLIzcWvv2L3buzdi5wcrF2L+/cRGoqiIuTlwc8PEybgyy8NZ7AMBJEW/fSTaNRIAMLVVRw4IHU1Kh05cqRt27Zq+V/222+/Sf3VGJeDBw/a2NgAGD58eG5ubo3ny+Xy+r1RcXHxxo0bhwy5CAiZTLz4ooiKqt8r6YTMTLFtmxgxQlhZCUAAwsRE9OsnDhwQwcECEEVFQgjx/vvC2lokJYlmzcTgwVIXrQ4MQtK2xEQxaFDJf7PAQJGTI3VBldy9e3fEiBFqiUAAHTp0KFJ+fpBWbNmyxdzcHMDkyZNr853Pysp64YUXli1bVu93zMkRixcLOzsBCAsLERQkHj2q94tJIDdXHDggAgOFrW1Z/gUEiLAwERdXck75IExJETY24t13RatWJUGYlyfS06UqXw0YhCQBhUKEhQlLSwGI1q3FxYtSF/SPnJyckJAQKyurJ88/b2/voKCg48ePKxQKqb8sI7JixQoTExMAwcHBtfnOJyUlde3aFYCXl1dmZuaTvPWDB2LqVGFiIgDh4iLCwkR925laUpv8K1U+CIUQs2YJR0fRrp0YPFgUF4sxY0SbNiI6WstfgdowCEkyFy6I1q0FIKysxKpVBdIGhkKh2LVrl2+dxtZXxcXFZerUqcw/7VMoFCEhIQBkMtnSpUtrc0lUVFTLli0BNGnS5Pbt22op49w50adPSa60bi0OH1bLq6pTnfKvVIUgjI4W5uYCEIMHi5QU0batAISXl7h8WTtfhJoxCElKeXkiKEjIZKJnz7D+/fvHqfiPqEnnz58PqH7BrdpwdnYODAw8cOAAe0ElIZfL33jjDQBmZmYbNmyozSXXrl3z9vYG0K5dO7X/wztwQDRtWhIz/fuLyEj1vnx9lOafsgu3lvlXqkIQCiEmTSoJQiFEerp45hkBCDs78eOPmvoSNIdBSNI7fDjBxcUFgJub2w8//KDNt05NTQ0KCjKt756rTk5OyvwrLCzUZtlUXn5+/pgxYwDY2Ngcrl0T7K+//lL+k+vXr19GRoYmqiooEGFhwsFBAMLcXEydKpKTNfE+NXjC/Ku9ggIxfnzJXdItW9T5ylrAICSdkJiYOGjQIGW6BAYG5mh+CE1hYWFYWJhj7QbKV+Do6KjMv4KCAk3XSaplZWX1799f+UPJiRMnanPJkSNH7OzsAAwbNqw2Y0qfREqKCAoSpqYCEM7OYvFioZ1/Miry78EDTb2pQiFCQoRyAG1IiKbeRRMYhKQrFApFWFiYpaUlgNatW1/U5BCa+k2NsLa2HjJkSHh4uBZymmojMTGxU6dOADw8PC7X7vbU1q1blWNKJ02apLV+7OvXxcCBJYHUooXYtUtTbyRJ/lWwYkXJiKEpU4S+3ChgEJJuiYyMbN++PQBLS8vFixcXFxer9/Vv3bo1pI47x1hZWSnzLzs7W73F0JO4d+9e8+bNAbRq1Sq6dgMWV65cWacxpep15EjJoBJAPP+8OseVVM4/QLRpIxYv1l7+lbd3r7C2FoAYPlwX50dVxiAknZOXlxcUFKTc6UaNI2iys7OVq0TWNf+ecFQ9acLVq1e9vLwAPPXUU8m1uPlWfkzpkiVLtFBhlQoLxbp1wtW1pK0WGCgSE+v/atXlX0iIuHtXfUXXy+nTwsVFAKJ7d2lujtYJg5B01E8//dSoUSMArq6uB55sERqFQhEeHq58tRqZmpr2798/PDz8kX5NijYmf/zxh/Lm7nPPPVebv6byY0q/+eYbLVSo2sOHIjhYWFiUDLMMCRF5eSXDMp9//rEz/f2rWLpFl/OvvOvXhZ+fAIS/v7h1S+pqVGIQku5Sywias2fP9uzZszb5FxAQEBYWVpvmBUnowIEDyoVbR44cmZeXV+P55ceUHjp0SAsV1tKNG2Lw4JIYmzKlJAgBcfZs2Tnlg7A0/+ztK+bfnTuSfAU1i48XXboIQLi7i4gIqaupHoOQdNqTjKCJi4ubOnWq8p5QdUxMTJT5l/gkXVSkLeHh4WZmZgBmzJhRm/vHWVlZAwYMQF3GlGrZ0aOiSxcRGSmCg4Wjo2jWTIwcWfZsaRBOmVI2/10mEz17iuXLRUyMVFXXQVZWyUAhW1uh3blRdcAgJD1Q1xE0yqkRFTccqCr/4uPjtfMlUBUUCvHggThxQuzbJ7ZtE9u3i++/F6dPV3ffLCwsTHnnODg4uDYvn5iY2LlzZ9RlTKmEgoOFmZlYt07IZOLatZKDpUGonKKn4+2/6hQViddfF4AwNRVr10pdTVW4DRPph/z8/ODg4C+//FIIobyH51nNNk779+9/++237927V/kpmUzWs2fPcePGjR07trrLSRuKixERgb/+QnX7W7m5oVcvdOig3BFQCBEcHLxkyRKZTLZs2bK5c+fW+A5RUVEvvPDC7du3mzZt+ssvv/j7+6v3K1C7hQsRGoqcHDRrhgEDEB4OoGyfozt3YGGBJ14BUCOKi1HjihRC4KOP8NFHABAcjMWLtVBXHTAISZ/8/PPPkydPTkxMdHV1/eabb4YOHVr+2Zs3b86dO/fHH3+sfGGbNm3Gjh0bGBio+x+Ihi8+Hvv2lW32qoKXF0aNKnZ0nDp16oYNGywsLMLDw1966aUar7t27dq//vWvuLi4rl27Hj582M3NTQ1la5gyCIuKEBaGd9/FnTvw89ODDf9CQ/Hrr9izB9X3v5TZsAHTpkEux+TJWL8e5uaar692GISkZ5KSkl577bXDhw8DCAwMXLt2rY2NTUZGxuLFi5cvX15YWFj+ZGX+jR8/vkWLFhLVS4+7fh379kEur+XpBWZm40+c2PvTT7a2tnv27Pn3v/9d4yVnzpwZMmTIw4cPn3322f3796voIdcppUGoRzvfZmSgZUskJ+Opp/DDD3B3r/mSAwfw8svIzcWAAfjuO9jba77KWmAQkv4RQqxcuTI4OLigoKB169ajRo1at25darkWRrt27caNG/fiiy8y/3TLnTv49lsoFLU8PSM/f+j27SdiYpwbNPjh8OHajP49ePDgiy++mJeXN2LEiG+//VYtO2ppR2kQmpnhgw+wbBmiohAQgJYtdTcIAdy7h4EDcesWmjTBjz+iZcuaLzlzBkOHIjUV3brhhx9y3NxsNV9mDVQNqCPSTTKZbPbs2adPn27duvWNGzdCQ0OVKdiyZcsPPvggMjLy6tWrH3zwAVNQt2Rm4rvvap+CidnZz2zadCImxtfR8dSMGT27dKnxki1btowePTovL+/NN9/87rvv9CgFK5g9GzIZwsJgZiZ1KTVp2hSnTqFXL9y/j169cOJEzZf06IG//kLz5rCyWt+jR/ubN29qvswaMAhJX3Xu3DkiIqJRo0Zyufy11167ePHi33///fHHH9djEVHShp9+Qn4+ANSiF+peenqfDRsuJya2dnU98dprLS0s8Oefqi9ZsWKFcvnQ4ODg1atXq542o+NcXDBlClav1oMgBNCwIX75BUOGIC0N/ftj9+6aL2naFCdOFBcUbLh//36fPn3OnDmj+TJV0eN/K0Q2NjZyuRzAf//7X+Xiy6SjkpJw40bJY5lM9bnn4+N7fv31nbS0bl5ex1591Ue5Q8i5c8jJqfJ85ZjSOXPmAFi2bNliXRuSWC/z5yM3F5GRUtdRO7a22L8f06ejoAAvv4yvvqr5Ejc3099++3XIkCEpKSnPPvvs7trkp8YwCEmPFRUVpaWlmZqaKveWI9114UItT/wjKuq58PDknJznmzb9deJEFxubkifkcly+XPn84uLiN9544/PPP7ewsNi+ffu8efPUVbK0fH0xfrzURdSFqSnWrMHixVAo8NZbmD275l5wW1vb/fv3T58+PT8//+WXX169erVWKq0CB8uQHnvw4IGPj4+np2dcXJzUtZBKK1YgI6PGs77/+++X9uzJl8vHt2+/acQI8wrT0/z8MHly+QMFBQXjx4/fu3dv7ceU6rilS5Gbi1dfhY+P1KXUV3g43ngDRUV45RV88w0sLGq+JDQ09N133xVCBAUFLV++XPvd2mwRkh5LSEgAUMvVtEkyubm1ScFNly6N2bUrXy5/6+mnt4waVTEFASQklL+/mJGRMWDAgL179zo7O//yyy8GkIIA1qxBSEhtvlu6a9IkHD4MBwds3YqBA/HoUc2XBAcHb9y40dzcfOXKlZMnT64wCUoLGISkx5RB6OHhIXUhpFItPtdXnzv36v79xQrFp8899+WgQSZV3kcsLERenvJhYmLis88+e/z4cU9Pzz/++KNXr15qrVgaaWm4fx82NmjdWupSnkz//jh+HF5e+O039O6N2NiaL5k0adLhw4cdHBy2bNkycODAR7XJT/VhEJIeYxDqh6KiGk+RAZampp8PGPCfvn1rfCnlUMNLly61bt36zJkzynVoDUBEBIRA5876MVhUtQ4dcOIEWrVCZCR69MCVKzVf0r9//+PHj3t5ef3222+9e/d+8OCB5ssswSAkPcYg1A+1uE308927BcXFWTX2iVlYXL16tXfv3nfu3OnWrduff/7po7830yqJiACAp56Sug41adwYJ06gVy/Ex2PhwqXHjx+v8ZIOHTocP368VatWkZGRPXr0uFKb/FQHBiHpscTERPAeoe5r0KDGU+b36gVg1dmzOSqy0NISVlbz5s2Lj4//17/+9fvvv7u6uqqvSukZWBACaNgQR49i1qxffvppwQsvvLBnz54aL2nSpMnJkyd79+4dFxf3zDPPHDt2TAt1MghJj7FFqB+sreHsrPqU3r6+PX180vLywquaI1HCywsy2bfffhscHHzgwAFbW+mX5lIvwwtCANbWWL78+RkzZuTn548bNy40NLTGS5ydnY8ePTpu3Lj09PQXXnhhx44dmi6SQUh6jEGoN2qxBuXbPXsCWHrqVHF1E9BatADg4uKyePFii9qMytcrKSmIjYW9PQxvZUBTU9NVq1Ypt5NcuHDh7NmzFTXNMbS0tNy+ffvMmTOVM2Q+//xzjVbIICQ9xiDUG1261LigzMjWrZs5O99PT9//999VPG1ujg4dNFKbbjh7FgC6doU+rw2nyuzZs3fu3GllZbVy5cqxY8fm/TMAuDrl4zM4OLg28VlvBvotJyMghEhOTpbJZO612f2FpOXignbtVJ9iIpPN7dkTQOjJk1U83aMHrK01UZqOUPaLdusmdR2aNGbMmMOHDzs6Ou7du/f5559PrcWelOXjc9y4cfnK5WrVjUFI+io1NbWwsNDJyUl/NxkwLv/6F0rv6lWzoNXkTp1cbGzOxcWdiIl57AkXF/Tpo+H6JKYMwq5dpa5Dw5599tkTJ074+PicPn26X79+MRX+oqsyZsyYQ4cOOTo6fvfdd88///zDhw/VXhWDkPQVh4zqGVtbjB1bMkWumm5SG3PzGd26AVh66lTZUWtrvPiiDm1nrhnnzwMGN1KmSu3atTtz5kynTp2uX7/eo0ePixcv1njJc8899+eff3p6ep46deqZZ56Jj49Xb0kMQtJXvEGof/z88NJLqqcVznz6aWtz8wN//30jJQUA7OwQGAhDX1T9wQMkJMDZGU2bSl2KVnh6ev7+++/PPPNMQkJC3759f/rppxov6dix47lz51q2bJmenq72tYUZhKSvGIR6yd8fb7wBT8/qnneztZ3YsaMAws6cgb8/pk6FEfwVl06cqGlEkeFo0KDBzz//PH78+Ozs7OHDh2/durXGSzw9PceNGxcXF1ebk+uEQUj6ikGor1xc8PrrGDEC1XRrz+vZ00QmC796NbF/f9jba7k6SRjkDMIaWVhYbN26NSQkpLCwcOLEiYsWLarxkuvXrwPoqu5bqfq/pB0ZK+U9QgahXpLJ0LEjOnbEw4eIikJKCnJzIZPBzg6uri2aNBkWG7t///7Vq1d//PHHUteqDUYyUqYymUy2aNEiJyenefPmffTRR7169XrhhRdUnH/u3DkA3dQ9uJb7EZK+evHFF3ft2rV9+/aXX35Z6lpIzZSLbDk7O8fExBjeCjKVubkhJQUxMXq8DeET2rNnz6lTp7744gsV56SkpLi5udnZ2T169Ei9exaya5T0FbtGDVhAQEDPnj3T0tI2bdokdS0ad/8+UlLg6mq8KQhgzJgxqlMQ/zQHu3btqvadexmEpK8YhIZt/vz5AJYtW1ZcXCx1LZp17hwAPP201HXovIiICABPaeBWKoOQ9BXvERq2ESNGNG/e/P79+/v27ZO6Fs0ynhmET+j8+fNgEBKVysrKys7OtrGxcXBwkLoW0ggTE5O5c+cC0PSCy5IzziGj9cAWIdFj2Bw0BpMnT3Z1dT137lxt9nTVU0LgwgXAKIeM1klcXFx8fLyjo6O/v7/aX5xBSHpJeYOQ66sZNmtr6xkzZgBYunSp1LVoyq1byMiAt7cxLBvwRJTNwW7dusk0sOgAg5D0EkfKGIlZs2bZ2toePHhQOZPa8LBftJY0d4MQDELSUwxCI9GwYcPAwEAhRFhYmNS1aERk5Gdduix87rkLUhei65RzJxiERGUYhMbjnXfeMTU13bx5s/Iv3cCcOHH4woXQFi1SpC5E17FFSFQR92AyHk2bNh02bFhBQcHq1aulrkXNiouLlZsQdenSRepadFpUVFRKSoqLi4ufn58mXp9BSHqJLUKjsnDhQgCrV6/Ozs6WuhZ1unHjRk5OTpMmTVxdXaWuRaeVjpTR0OszCEkvMQiNytNPP92rV6+0tLSNGzdKXYs6aW5inIFhEBJVgUFobJQrri1fvlwul0tdi9owCGtJ+Y1S++5LpRiEpH8KCwvT0tLMzMxcDH3jcio1fPjw1q1bG9iKawzC2hBCaHSkDBiEpI8SExOFEO7u7mpfhJ50lomJSVBQEAxoxbWioqIrV67IZDKOlFHtzp07GRkZHh4enp6eGnoLfo6Q/uH6asZp8uTJ7u7uERERx44dk7oWNYiMjMzLy2vRokWDBg2krkWnKdvNT2tyew4GIekfrq9mnKysrN58800YyoprGp0hbkg0fYMQDELSRxwpY7SUK6798MMPBrDimvK+l0Y/3w2DFu6kMghJ/zAIjZazs/PEiROFEMuXL5e6lielbBFqbkqAYVAoFMo1B9giJHoM7xEas/nz55uamm7ZskWXV1zLy8tTfUJ+fn5kZKSpqWnnzp21U5KeunWruEOHS8OHf+fm5qa5dzHT3EsTaQjvERqzpk2bDh8+fO/evV999dWnn34qYSWFhYWpqakJCQn37t2Lj49XPlA+Tk5Ozs3NtbCwqO7ay5cvFxUVtWvXztbWVps1652//jI/ebLpmDFNNfouDELSP+waNXILFy7cu3fv6tWrFy5caGdnp9H3ksvlCQkJMTExsbGxDx48iI2NjYmJefDgwYMHD5Q9E1WysrJKSkry8fGp7gTOIKyl8+cBze9azCAk/cMgNHLdunULCAg4efLkhg0blJMLn1x6enr5hl3pg5iYmOrWsjE3N3dxcfH09GzatKlyllvpg8aNG6ue5MogrCXlfo2avpEqE0Jo9h2I1EqhUFhZWcnl8ry8PEtLS6nLIWns379/5MiRjRs3vn37tplZbX+gT09Prxx18fHx0dHROTk51V3l5ORUOeqaNm3q5+dnampav/rbt28fGRl55syZ7t271+8VjIFcDgcH5OcjNRXOzhp8IwYh6Znk5GR3d/eGDRumpqZKXQtJRgjRtm3bGzdu7Nix48UXX1R98pgxYyIjI2NiYlSMYXF3d/f29vb29vbz81M+8PX19fHx8fT0rH3Q1lJubq6jo6NMJnv06JG1tbV6X9yQXLqEzp3RvDlu3dLsG7FrlPQM+0UJgEwmmz179vTp05cuXfriU0/h/n0kJ0O5SZOVFVxd4eeH5s1hbg7g7t27N2/eBGBlZVWhVad87Ofnp+l7jeVduHBBLpd37tyZKaiasl9UC/3HDELSM9ySl5QmBwYu+s9/IiIi/vj002caN37sufv3cfYsLCzQtSv69Nm4caOlpaWvr6+ODNHkDMJa0s5IGXAeIekdtggJABISLDdseLNDBwBLT52q+pzCQpw+jVWrOllbt27dWkdSEFxTptbOnQM0P1IGDELSOwxCwu3b2LgRDx++9fTTthYWh2/dupacXO3JubnYsQOnT2uxvhpoeptZw1BYiMhImJhAC0sOMAhJzzAIjV1MDHbuRFERAGdr68mdOgngi+pyrnQw4C+/4MIFbZWoyqNHj27fvm1lZdWuXTupa9Fply+joACtWsHeXuPvxXuEpGfKr6+2dOnSHj169O7dW+qiSFvy8rBnD4qLSw+83bPn2oiIbVevfvrccx6VPzJlsrLHhw/Dywvu7lopFAAKCwvj4+PLz8GPjo6+efOmcjdNtQ9GNTBaGykDBiHpndL11U6dOhUcHCyTyd57770PP/yQHytG4fffkZVV/kATJ6eRrVrtuX79y7Nn//v886quLS7GoUN47TW1F1XdZPzo6Ojicpldys3NLTo6evLkyV9//bW5ubna6zEMypEy2glCziMkPdOsWTPlaPimTZt++umnn376aXFxcbdu3bZt29a8eXOpqyNNys5GWBgqRcu5uLin//c/J2vrmLlz7apf3rPExIlo0qQeb56UlFTaqlM+UK67Fh8fX93SM2ZmZh4eHsr5iN7e3j4+Pr6+vt7e3tHR0a+99lpmZuZzzz23d+9eR0fHetRj8Dp2xJUrOHUKPXtq/L0YhKRn7OzscnJyMjMz7e3tAZw5c2bChAn37t2zt7dfunTp1KlTpS6QNObUKRw5UuUzfTduPB4dHfbvf8/u0aOGF2nfHqNGVfdkXl5elYtox8TEZCsnKValuqVnfH19q+uouHLlyqBBg+Li4tq1a/fjjz96e3vXULaRyc2FoyOEQGYmbGw0/nYMQtInKSkpbm5utra25T+VMjMzZ8yYsW3bNgBjxoxZt26ds0aXYyKphIcjKqrKZw7cvDn8228bN2hwOyjITOUin7C2zp81Kz4hoULUJSQk3Llz59GjR9Vd5+TkVDnqPDw8GjduXL+JGffv3x80aNDff//t5eV1+PDhDh061ONFDNWpUwgIQMeOuHRJG2/H2yqkHzIzMzdu3Pj555+7urpOnjy5/FMODg5bt24dPnz41KlT9+zZc/bs2S1btvTt21eiSkljqt+AcGiLFq1dXW+kpOy5fv2ldu0AFBYXp+bmJmRl3UtPv5eeHp+VlZCdrXyQuHBhdQ2AykvPKB80b97cwcFBvV9NkyZNTp48OXz48BMnTjzzzDP79+/nP9pS2hwpA7YISfdFRUWFhYV98803ylZgp06dfvrpJ/eqxv5FRUW98sorJ0+eNDExeeutt5YuXcqRCIajoACLF6t4/n/nz089eNDJyqqps/ODzMyk6nsyra2sfP9ZULTCyqJqT7saFRQUTJw4cdeuXZaWlps2bXrppZe0XIBumjgRW7ZgzRpMn66Nt2MQku66dOnSF1988e233yoHIwQEBAQHBw8ZMkRWfkz84+RyOUfQGKacHCxdquL5fLl83s8/771xQxmBFqamDW1sPO3tmzo5edjZKR80dXLysLf3mD5d1qyZtuquWXFx8ezZs7/66iuZTLZ48eIFCxZIXZH02rTBjRs4d46jRsmInThxIjQ09IcffgBgbm4+YsSId955p/YrcXAEjQGSy/F//1fjWX9ERdmYm/s4ODSys6v2B6YpU6B7g1NWrFgxb948hUIRFBS0fPly1dsZGrbsbDg6wswMmZnQzk5rxvu9Jh1UWFi4efPm9u3b9+nT54cffrC3tw8KCrp79+6uXbuaNm36xRdfVDkrq7IePXpcvHhxwoQJWVlZ06ZNGzt2bFpamqaLJ80yM6vNEiPPNG78tJeXh729im4DODmpszA1mT179s6dO62srFauXDlu3Lj8/HypK5LM+fNQKNChg5ZSEAxC0hGZmZkrVqzw9/efNGlSZGRko0aNQkJCoqOjV6xY4ePjA2DatGlvv/32s88+GxMTU5sXVI6g2bVrV4MGDfbs2dO5c+djx45p+IsgDVNLM87JCTqz+nYFY8aMOXTokKOj43fffff8888/fPhQ6oqkoZ1d6ctjEJLEoqOjFy5c6OfnN2fOnAcPHnTo0GHdunVRUVGLFi1yKveT+4wZM7y8vI4fP96+ffutW7fW8sXHjh178eLFgICAmJiYZ599dvbs2UVFRZr5OkjzWrXSlRfRmOeee+7EiRM+Pj6nTp3q27dvLX/sMzDKINTm5hwMQpLMpUuXJk6c2Lx589DQ0IyMjICAgAMHDly6dGnq1KmWlfpEnnvuucjIyJdeeikzMzMwMHDcuHEZGRm1eZfGjRv/8ccfISEhMpls5cqVAQEBt2/fVv8XQ1rQps2TNuZkMq1+vtZLu3btzpw507Fjx+vXr/fo0eOSdmbS6RItz50Ag5AkceLEiaFDh3bp0mXLli0Axo4de/bsWeVBFbd2GjRo8O2334aHh9vZ2e3evbtz584nT56szduZmZktWrToxIkTTZs2PXfuXNeuXdevX6+2L4a0xswMTzjTrlMnNGyopmo0yNPT848//ujXr19CQkKfPn1+/vlnqSuqs7Q0bNiAgQPrMyP+l1+waxfatlV/VdXhqFHSnsLCwh07dixZsiQyMhKAvb39q6+++vbbb/v6+tbpdW7evDl+/PgLFy6YmZn95z//+eCDD0xNTWtzYUZGxvTp03fu3Alg99y5Yz78EA0a1P3rIOkIgU2bUL8OQwcHTJ8Oa2t116QpBQUFr7766rfffmthYbFhw4YJEyZIXVHN0tNx8CB278Yvv6CwEADefRf//a/UZdWEQUhakZWFDRsy163zefAgMyurUaNG06ZNmz17tlN9x+8p5wt+8sknCoWiR48e27Zta9q0aS2v3b1797oPPjiSnCyzt8eWLU/ayCAty87GN9+gdh3jZSwsMGkSPD01UpLGCCE++uijjz76SCaTffjhh4sWLZK6oqpVzj9TU/TogbFj8dJL2tz5qp4YhKRhMTEIC8PXXyt3zwl7+eWGAwe+9NJLalnz5bfffps4cWJcXJyDg8NXX331yiuv1PLC4jt3TF95BX/9BVNTvPsuQkLAXZz0yKNH2LYNKSm1Pd/GBi+/rINzB2updIrh66+/vmbNGt3ZcUxd+bdwIUJD8fzzOHq07GCzZmjVCj/8oP6yK2MQksZcvoxly7Bjh3IzcQQEIDgYQ4ZAxQSvusvIyHjzzTd37NgBYOzYsevXr29Qy95OuRyffopPP0VxMbp1w7Zt4Bo0eqSwsGTT+Ro/wfz9MWwYtL52mnrt27dvwoQJeXl5I0aM2L59u7WkHbwZGThwQJ3tP2UQAjh7tmzWBIOQ9NyJEwgNxaFDEAImJhg0CB98gKef1twbbt68eebMmdnZ2Y0bN96yZUsd9qw/cwYTJuDePdjbY+lScA0a/ZKYiNOncf06Ku8IKJPB3x89esDfX4rK1O/06dPDhg1LTU3t3r37wYMHXV1dtVxAenr6zz+nbd7s/+uvJflnZoZnn8W4cRg58okGIS1ciLVr4eqK9u2xd2/JQW0GIQSRuhQWil27xFNPCUAAwt5eBAWJ6GjtvPnff//dpUsXAGZmZiEhIXK5vLZXPnokJkwoqXnMGPHwoSbLJA0oLBT37okzZ8TRo+LIEXHypLh5U+TmSl2W+l27ds3Pzw+Av7//7du3tfOm6enp4eHhQ4YMsbCw6NZtCCBMTUVAgAgLE4mJ6nmL4GBhZibWrRMymbh2reSgv78YPFg9r18jBiGpQ2amCAsTPj4lceLuLkJCRFqalqsoKioKCQlRLtLYo0ePu3fv1uHiXbtEgwYCEL6+4s8/NVYj0ROJj4/v3LkzgEaNGkVERGjujdLT0zdt2jRo0CALCwtlq8nMzGzgwMHr1ytSUtT8XsHBAhA5OcLDQ0ycWHKQQUj6IyFBhISURAgg2rcX69aJvDwJK/r111+9vLwAODg4bNmypQ5X3r8vAgIEIExMRFCQKCjQWI1E9ZeVlfXvf/8bgK2t7aFDh9T74uXbf8r8MzU1DQgICAsLS0hIUOMbFRWJX34Rn3wixD9BWFQkliwRZmYiKkoIBiHph0uXRGCgMDcvicCAAHHggFAopC5LCCHS09NL93UbO3Zsenp6ba8sKhIhIcLUVACiWzdx65YGqySqr6Kiotdff13ZSlu3bt2Tv2Bp/pUu6qSh/JPLxfHjIihINGpU8slx925ZEGZmCicn8dZbQjAISdcdPy6GDBEyWUnjacgQ8ddfUtdUhXXr1tnY2ABo1bx5/tmzdbjy9GnRtGnJbU51fMoQqZ1CoQgJCQEgk8lCQkLq9yIZGRkV8s/ExERD+XfkiJg6Vbi6luQfIFq3Fh9+KBISyoJQCPH++8LaWiQliWbNGIQklUePxPz5onFjYWEhGjUSkyeLmJiSp5RjYbp1K/lXbGcngoJKejF0lXIEzZFnnhFmZiIkRHAEDRmWr7/+Wjmt8NVXXy0sLKzTtSkpKVZWVqXtv+eee27NmjVJSUlqLK+4uGL7DxBNmoigIHH8eNlp5YMwJUXY2Ih33xWtWjEISRIFBeLpp0s++pcuFVOnCjMz4eUl4uKEEOKZZ0r+IXt6isWLRe37GyVVUFBQHBwsTEwEIPr0qVtycwQN6bz9+/crez5eeOGFzMzMOl3bs2dPTbT/apl/pcoHoRBi1izh6CjatWMQkiTCwgQgFiwoOxIeLgAxZYoQQqxeLZo3F2Fh0o6FqadffxXe3gIQDg6CI2jIsPz111/KaYXdunWrU5OuqDR81KGu+VeqQhBGR5eMPWAQkhSeflqYmIjyg6MVCuHtLezshFwuiop0ZCxMPaWni5deKvnfOXZsHVq0HEFDOu/OnTvNmzcH0KRJk7///lubb13v/CtVIQiFEJMmMQhJEgqFsLQU/v4Vj48cKQDD+fQPDxd2dgIQfn61/W+qdOyY8PUtGUGzf7/G6iOqp8TExKeeegqAs7PziRMnNP12pfnn4VGWf40b1yH/dAf3I6R/ZGejoAAuLhWPK1dySk3VfkUaMXEiIiLQpQuio/Hss1i0CMXFtbqwTx9cvYoJE1BYCD8/DVdJVGfu7u5//PHH4MGD09LSXnjhhYMHD2riXYqLi3///fcZM2a0bJnepw9WrkRCAlq0wH/+g0uXcP8+VqxA7Zc41BEMQnpc5bVnlUdMDOifSsuW+OsvhIRAocBHH6F3b9y9W6sLHRywdSsuXUKnTpqtkKhebG1tv//++2nTpuXm5o4cOXLNmjXqemWFQnHixInZs2f7+Pgox5c2avR948YICsLx47h5E59+io4d1fVu2sZFt+kfQsDODq6uiIp67PjIkdi/H/fvo3FjSerSoN9+w6RJePAADg746ivUehcnIh0XGhr67rvvCiGCgoLCwsJk9d3ypbi4+Pjx47t37967d29iYqLyYPPmzceOHTtu3KSOHVuor2QpMQipnN69cfIk4uLK9i8VAj4+KCiow95v+iUjA2++iR07AGDsWKxfzz3ryTBs2rRp6tSpRUVFEydO/Prrr+u0A6hCoTh16tTu3bt3796dkJCgPNi4ceNhw4aNHTs2ICCg3smqoyS+R0k6ZfVqAYhZs8qOfPutAMTcudLVpBV1GkGjYs0BIl3yyy+/ODg4AHj++ecfPXpU4/nK9l9QUJCHh0dpRvj5+QUFBR0/flyh14PGVWKLkMopKkLfvjhzBqNHo29f3L6N9evh44Nz5+DkJHVxGnbzJsaPx4ULMDPDf/6DDz6AqWkVpxUWok8fnD2LMWPQowdu3cKGDXB3x9mzZc1oIp0RERExZMiQpKSk9u3bHz582Nvbu/I5pe2/PXv2xMfHKw/6+fkNHz7cMNt/lUmdxKRjsrLEggXC21uYmwsPD/HGG2rbc0z3KecLKteg6dFD3LlTxTmq1xwg0j337t1r2bIlgMaNG1+/fr30eGn7z7Pcz3DG0P6rjC1CqkqXLrh4ERER6NpV6lK07vffMXFitSNoundHRASSksrmmQgBX19kZCAjo+pGJJHU0tLShg8ffuLECScnp71795qZmRl1+68SBiFVxcMDiYl48ABeXlKXIoWHD/HGG9i3DwAmTsS6dVCuTSwErK3h7Y07dx47f9Qo7NuHW7fQvLkE1RLVQm5u7vjx47///nsbG5vc3FzlwaZNm44dO3bs2LFdjfBH3nLMpC6AdE9xMVJSYGICNzepS5FIw4bYuxebN2PmTCQm4p8dampec4BBSLrKxsbmu+++69Kly4MHD+zs7F566SWjbf9VxiCkSpKTUVwMNzfUZby1AZo4Ed27w9ERFT4pjGHNATJEpqamlpaWaWlpf/zxR79+/aQuR4cwCKkS5bShcuOnjVfLlo/90c4ONjZISqp4mnKSpbu7lqoiqpfCwsIrV66YmJh07txZ6lp0C3+GpUoYhNWRydC5M6Kj8c8QAwAQAufOwcXFAFfeIcNy5cqVgoKCli1bKicXUikGIVWiXEiJQVilCRMAYPHisiM7dyIuDoGBUlVEVEsREREAlDtUUHnsGqVKlC3CRo2krkMnvf46Nm/Gl18iPr5szQF/f3zwgdSVEdXg/PnzAIx8gGiV2CKkStg1qoK5OY4cwYIFOH8e8+fju+8waRJOnjT8lXdI/507dw5At27dpC5E53AeIVWinBW3ezfGjJG6FCJSj/z8fAcHB4VC8ejRI1tbW6nL0S1sEVIlbBESGZyLFy8WFRW1bduWKVgZg5AqUQ6W4T1CIgPCkTIqMAipEuU8OQYhkQFhEKrAIKTHpacjLw8ODmD/CZEBYRCqwCCkx/EGIZHByc7OvnnzpoWFRYcOHaSuRRcxCOlxDEIig3PhwoXi4uL27dtblq4gT+UwCOlxDEIig8N+UdUYhPQ4rq9GZHAYhKoxCOlxXF+NyOAwCFVjENLj2DVKZFgePXp0584dKyurtm3bSl2LjmIQ0uMYhESGJSIiQgjRqVMncyPfart6DEJ6HO8REhkWZb8o19pWgUFIj+M9QiLDogxC7r6kAoOQysnLw6NHsLTkpkJEBoMjZWrEIKQyUYmJnd3dZ3TvDplM6lqISA1SU1OjoqLs7OxatWoldS26izvUU5m4+PhLSUlWTZpIXQgRqcfly/Fubh1atHAwNTWVuhbdxRYhlUlISADgwZEyRIbizJkOycmXAwJ+lboQncYgpDKJiYlgEBIZkHPnAKBDBwupC9FpDEIqo2wRNuKQUSJDEREBAJw6oRqDkMqwa5TIkCQmIi4Ojo5o1kzqUnQbg5DKMAiJDMnZswDw1FMcBl4DBiGV4T1CIkNy/jwAcAJhjRiEVIb3CIkMifIGIYOwRjIhhNQ1kE6Qy+VWVlZCiIKCAjMzTjAl0nvu7khOxv37aNxY6lJ0G1uEVCI5Obm4uNjV1ZUpSGQAoqORnAwXF6ZgzRiEVIIjZYgMCftFa49BSCUYhESGhCNlao9BSCUYhESGRLmmDIOwNgw/CBUKxb59+0aMGLFmzZpff+WCe9VSzp3gkFEiAyAELlwAGIS1Y8jDIvLy8jZv3rxs2bLbt28DOHTokEKhmD9//ieffGJhwZX3KmKLkMhg3L2LtDQ0agQvL6lL0QeG2SJMTU0NDQ1t1qzZ9OnTb9++3aRJk+XLl//3v/81NTX9/PPPu3btevXqValr1DkMQiKDwSVG68TQWoT37t1bsWLF119/nZubC6Bz585z5swZP368ckrAM888M2HChMjIyO7du3/22WezZ8+Wul4dwiAkMhgcMlonhtMiPH/+/MSJE1u2bLly5cq8vLz+/fsfOHDgwoULEydOLJ0Y161bt/Pnz0+dOjUvL2/OnDkjR458+PChtGXrDq6vRmQwGIR1ovcryygUikOHDq1cufLo0aMALCwsXnzxxeDg4LZt26q4as+ePdOmTUtLS3N3d9+4cePAgQO1Va+OEkLY2Njk5+fn5OTY2NhIXQ4R1Z9CAScnZGYiKQlublJXow/0OAgLCgp27ty5ePHiGzduAHBwcJg8efKCBQu8and3OCYmJjAw8NixYzKZbNasWUuWLDHmETRpaWkNGzZ0dHTMyMiQuhYieiI3bqBNG/j6Ijpa6lL0hF52jT569GjFihVNmzadNGnSjRs3GjduvHjx4piYmBUrVtQyBQH4+vr+/vvvYWFhZmZmK1euNPIRNLxBSGQw2C9aV3oWhPfv3589e7aXl9ecOXPi4+M7d+4cHh5++/bt4OBgR0fHur6aiYnJ7NmzT5482bx5c+UImhUrVmiibN3HICQyGAzCutKbIFQOe2nRosXKlStzc3OrHAtTPxxBA+C3334Dg5DIIDAI60zoNoVCceDAgf79+yurtbCwCAwMjIyM1MR77d6929nZGYC7u/vhw4c18Ra6pri4+MCBAz169ADQp0+f33//XeqKiOiJyOXC1lbIZCI1VepS9IfuBmFBQUF4eHibNm2UEejg4BAUFBQbG6vRN42Oju7bty8AmUwWFBSUn5+v0beTUFZW1ooVKxr/s0GLm5vb4sWLpS6KiJ7Uw4dizBjRr5/UdegVXRw1+ujRo02bNi1ZsiQuLg5A48aNp0+fPn369HrcBawHhULx5ZdfLliwoLCwsF27dtu3b2/fvr0W3ldrkpOTV69evWrVKmUPsL+//6xZs9544w3OmiAiIyV1Ej/m3r17QUFBtra2yto6deoUHh5eVFSk/UrOnj3bvHlzANbW1mFhYdovQBNu374dFBRkbW2t/PZ27do1PDxcLpdLXRcRkZR0JQgvXLgQGBhYOuwlICDgwIED0paUmZk5depUZT0jRoxI1ece9+PHj48dO9bU1BSAiYnJkCFDTp48KXVRRKQewcECEM8//9hBf38xePBjJ5w+XfZsXp4Ayk4wchKPGhVCHD16dOjQoV26dNmyZYuJiUlgYODVq1dPnDgxdOhQaWuzt7dft27dnj17nJ2d9+/f37Zt2x9//FHakupKoVAcPHiwV69effr02b17t5mZWWBg4LVr15QHpa6OiNTp119L9iCkupIsCAsLCzdv3tyuXbsBAwb88MMPyrEwd+/eVR6UqqrKRo8effHixb59+yYlJQ0ePHj27NkFBQVSF1WznJyc9evXt27detiwYadPn3Z1dQ0ODr5///7mzZtbtWoldXVEpGaOjmjWDJ99JnUd+kmCIMzMzCxdF+b69eseHh4hISHR0dErVqzw9vbWfj01Kl2DxtzcfOXKlU899ZQur0GTnJy8aNEiPz+/adOm3bp1y9/fPywsLCoqavHixZwmSGSocnLwzjvYvx/Xr0tdij7SZj/s/fv3K4+FKSws1GYNT0LHR9BwLAyRcVLeAszJER4eYuLEkoOV7xHu3Svu3y/59fffvEdYRktBWOVYGIVCoZ13VyPdHEFTeSzMiRMnpC6KiLREmXNFRWLJEmFmJqKihKgqCCv/YhAqabZrVKgcCyOTyTT67pqgUyNoVIyFCQgIkKoqIpLKtGmwt8fSpVU/GxqKfftKfu3apd3KdJyGAla5LkzppoD29vZBQUExMTEaejvti46O7tevHyRagyY/Pz88PLxly5bKb6+Li0twcHB8fLw2ayAiHVHaIhRCvP++sLYWSUmiWTNOn6gt9bcIy4+FuXbtmnIsjHKPJB8fH7W/nVR8fX1/++037Y+gSUlJWbRokZeX16RJk27evNm0adOwsLDo6GiOhSEiALNnQyZDWBiebDMCI6PeXM3Pz3f7Z0fkzp07b926VY/GwtSP1kbQ3Llzh2NhiKiy8i1CIcSsWcLRUbRrV4cWYUGBWLBA+PgIOzvRp4/46y8tVq8D1NwitLS0HDt2rHIszPnz5ydMmGBubq7et9A1WtjF6fz58xMnTmzZsuXKlSsLCgqGDBly5MiRiIiIiRMnKgfIEBGVmj8fubmIjKzDJZ9+ip07sX07rl5Fq1YYOhR5eRqrT/eov2t0xYoV+jsWpn40NIJGORYmICDgqaee2rJli3IsTGRk5MGDB0v3pSIiqsDXF+PH1+2SuDj83/+hd280bozPPkNyMm7c0ExxOkkXd5/QXzExMRMnTvzzzz9lMtmsWbM+//xzS0vLerxOQUHBzp07P/vss7///huAi4vLlClTgoKCPD091V0yEdFjLl5E166IiYFOLnCiEQxCNXvCXZxSUlK++uqrr776KjU1FUDTpk2DgoK4RxIRaUdODnr3Ro8eWLNG6lK0iEGoEefOnZswYcLt27etra0/++yz2bNn13jJ3bt3V65c+fXXX+fm5gLo0qXL7Nmzx48fb8axX0SkFenpGDQItrY4eBD/DMszCgxCTcnKypo/f/769esBjBgx4n//+5+Li0uVZ54/f37FihXbt28vLi42MTEZNGjQ7NmzeReQiLQpPh4DBqBdO4SHw8pK6mq0i0GoWd99993UqVPT0tLc3d03btw4cODA0qcUCsWhQ4dCQ0NPnjwJwNLScty4ce+++27r1q2lq5eIjFF6Onr3Rp8+WLMGRjPMsQyDUONiY2MDAwPLj6ABUH4sjKOj46RJk4KDgzkWhogk8eqruHYNP/0Ek39mEtjYwMJC0pq0iEGoDcXFxZ999tlHH30kl8v9/Pyys7OVcw39/f3nzp376quvciwMEUmluBjm5qgQBWvWYPp0iQrSOgah9pw7d27MmDHFxcVxcXEcC0NEpCMYhFr1+eefBwcHjx49es+ePVLXQkREgCQ71BuztLQ0AF26dJG6ECIiKsEg1KqEhAQA3CaCiEh3MAi1ikFIRKRrGIRalZiYCKBRo0ZSF0JERCUYhFrFFiERka7hqFHtKSoqsrKyMjExyc/P5z6CREQ6gi1C7UlMTFQoFG5ubkxBIiLdwSDUHvaLEhHpIAah9ihHyjAIiYh0CoNQe5QtQg4ZJSLSKQxC7WHXKBGRDmIQag+DkIhIBzEItYf3CImIdBCDUHt4j5CISAcxCLWHXaNERDqIK8toiRDC0tJSLpfn5uZaWVlJXQ4REZVgi1BLUlNTi4qKnJycmIJERDqFQagl7BclItJNDEIt4ZBRIiLdxCDUEg4ZJSLSTQxCLWHXKBGRbmIQagmDkIhINzEItYT3CImIdBODUEt4j5CISDcxCLWEXaNERLqJQagl7BolItJNDEJtyMrKys7OtrGxcXBwkLoWIiJ6DINQG9gvSkSksxiE2sB+USIincUg1AYOGSUi0lkMQm1g1ygRkc5iEGoDg5CISGcxCLWB9wiJiHQWg1AbeI+QiEhnMQi1gV2jREQ6i0GoDQxCIiKdJRNCSF2DgSssLLSysjI1NS0oKDAx4U8eRES6hZ/LGpeYmCiEcHd3ZwoSEekgfjRrnHLIKEfKEBHpJgahxvEGIRGRLmMQahyDkIhIlzEINY5BSESkyxiEGsd7hEREuoxBqHFsERIR6TIGocYxCImIdBmDUOMYhEREuowry2iWEMLS0lIul+fl5VlaWkpdDhERVcQWoWalpqYWFRU5OzszBYmIdBODULO4ARMRkY5jEGoWbxASEek4BqFmMQiJiHQcg1CzGIRERDqOQahZXFaGiEjHMQg16P79+z///DPYIiQi0mEMQo24ePHixIkTW7RocfPmzXfeead///5SV0RERFUzk7oAgyKEOHz48NKlS//44w8AFhYWkyZNmjRpkouLi9SlERFR1RiE6lFYWLhjx47PP//82rVrAOzt7V999dX58+f7+PhIXRoREanCIHxSmZmZGzduXLp06YMHDwA0atRo2rRpc+bMadCggdSlERFRzRiE9RcVFbV27dq1a9c+evQIQMeOHefNm/fyyy+bm5tLXRoREdUWg7A+Ll269MUXX3z77bdyuRxAQEBAcHDwkCFDZDKZ1KUREVHdcPeJujlx4kRoaOgPP/wAwNzcfMSIEe+88063bt2krouIiOqJLcJaUY6FWbJkSWRkJDgWhojIgDAIa1DlWJjZs2c7OTlJXRoREakBg7BaCQkJ69atW7FiRUZGBoAOHTrMnDlz0qRJ3FmQiMiQMAircPny5WXLlu3YsaOoqAgcC0NEZNA4WOYxyrEwhw4dEkJwLAwRkTFgixCoZizM22+/7evrK3VpRESkWcYehFlZWRs2bFi2bFlsbCw4FoaIyPgYbxBWORZm4sSJVlZWUpdGRETaY4xByLEwRERUyrgGy5QfC2NiYjJo0KAPPvjg6aeflrouIiKSjFG0CIuKivbv379kyZJz584BsLOze+211zgWhoiIYPBBWGEsjLu7+/Tp0zkWhoiIShlsECYmJq5du7Z0LEz79u3feustjoUhIqIKDDAIr1y5snTpUo6FISKi2jCowTKVx8K8//773bt3l7ouIiLSXYbTIuzXr9+xY8cA2Nvbv/7663PmzOFYGCIiqpHhBGH37t1v3rw5ffr0oKAgZ2dnqcshIiL9YDhdo1lZWZaWlhYWFlIXQkRE+sRwgpCIiKgeTKQugIiISEoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmoMQiIiMmr/D4Lydc45I7NQAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlgAAAJYCAIAAAAxBA+LAABytUlEQVR4nO3dd1xU57ov8HfoIk0RVARRURGxIXaNKZbExG7siigw1IjZ59yd3HvOvXefz73nnmSfz9lbVNrQsYAYe4s1MfaCgIqICipSVJAivc79Y7IniwUsZmCtd035ff/a7+tinocdnWdmrfd9XolcLicAAAD6ykDsBAAAAMSEQggAAHoNhRAAAPQaCiEAAOg1FEIAANBrKIQAAKDXUAgBAECvoRACAIBeQyEEAAC9hkIIAAB6DYUQAAD0GgohAADoNRRCAADQayiEAACg11AIAQBAr6EQAgCAXkMhBAAAvYZCCAAAeg2FEAAA9BoKIQAA6DUUQgAA0GsohAAAoNdQCAEAQK+hEAIAgF5DIQTQMunp6f7+/lu3bm1ra/Pw8PD398/OzhY7KQAtJpHL5WLnAADda2hoSEtL+9vf/paVlUUIMTY2jouL8/LyUvzp7NmzQ0NDV6xYYWRkJGqaANoHhRBA0z19+jQ+Pj4mJqa8vJw57+rqmpuby5wZPHiwl5dXcHCwk5MT3RwBtBgKIYCGam1tPXPmzK5duy5duqTWv1MTE5Nly5ZJpdJ58+ZJJBLhMgTQDSiEABqnqKhIJpPFxMSUlJRwXCaRdPPvd9y4cYGBgZs3b7a0tOQ7RwDdgUIIoEGuXbu2a9euY8eONTc3c1zm6ekplUqXLVt2/Pjx3bt3P3r0iONiS0vL9evXBwUFTZw4ke98AXQBCiGA+D58+JCamtrjkqZW+fTy8jIzM+MnbwCdgEIIIKaMjIyoqKj9+/fX1tZyXObq6rp161apVNqvX7+urikpKUlOTg4PD3/9+jXHSw0cONDb2zsgIGDYsGE9ThtAl6AQAoigsbHxxIkTMpns4sWLHJcZGhouWrQoNDRU9WUvKi6xMTAw+Oyzz6RS6cqVKw0NDdX+BQB0CAohAFVFRUUxMTERERGlpaUcl/V+I0RXmy5YRo4c6evr6+PjM2DAgJ4FAtB2KIQANLS1tV2+fFkmkx05cqS1tZXjSn63xtfU1Bw4cCAiIkKxDb8rpqamS5cuDQ0NnT17du+DAmgXFEIAYVVWViYlJe3atSs/P5/jMisrq3Xr1m3fvt3d3V2INNLT02Uy2d69e+vr6zkuUyyo2bRpk7m5uRBpAGggFEIAoShqz759++rq6jgumzx5sr+//8aNG/v27St0Su/evUtISIiKinr58iXHZdbW1mvXrg0NDR07dqzQKQGIDoUQgGeKpqA7d+7MyMjguExxN1Iqlc6fP59abgoq3qdVLqhBC1PQbSiEALx59uxZXFxcbGzs+/fvOS4bMWKEVCrdtm2bnZ0dtdw6lZeXFxMTExcXV1ZWxnGZg4PD5s2bQ0JCHB0dqeUGQA0KIUBvKb5ghYWFnT59Wht3LKi4lwMtTEFXoRAC9NybN2+SkpIiIiIKCgo4LrOxsfHy8tqxY8fw4cOp5dYDioea3e7uHzNmTEBAgI+Pj4WFBbXcAISDQgjQE+np6WFhYampqap0Ndu8eXOfPn2o5dZLVVVVBw8eDAsLe/z4Mcdlin5vISEh48ePp5YbgBBQCAHUUF1dnZKSsmfPnocPH3JcZmZmtnr16j/96U+TJk2ilRrP5HL5pUuXZDLZ0aNHW1paOK709PTcvn37+vXrjY2NqaUHwCMUQgCVPHnyJDIyMj4+vqamhuOyUaNG+fj4+Pn59e/fn1pugiouLt67d++ePXsKCws5Lhs0aNCWLVsCAwOdnZ2p5QbACxRCAC5NTU3Hjx/vdiGJgYHBl19+qVZTUO2i/P9BlRam27dvX7x4sU7+/wA6CYUQoHOKb0K7d+8uKiriuEzxTSgoKGjo0KHUchPRkydPEhMTo6OjKysrOS5TfDP29fW1tbWllRpAD6EQArSj3GyOZ2McFM9Kw8PDHzx4wHGZ4lnpt99+6+HhQS03AHWhEAL8TsXVkoqmoFgtqaDW6lm0MAXNhEIIoGpTUOyf68rbt28TExMjIyNfvXrFcZliP2VoaOiIESOo5QbQLRRC0F+KjiphYWHXr1/nuEzZUYV+U1Dtou0ddkBvoRCCPnr+/HlsbKwqPTb9/PyCgoLs7e2p5aYDVOy5OmTIEF9f3+DgYNF7roKeQyEEPYJTF2hqaGg4efLkzp07b9y4wXGZiKdwACigEIJeUDzE6vYcPhsbmzVr1uzYscPNzY1WarpPxUewHh4eAQEBdM5lBGBCIQQdp3gXTk5Obmho4LgMyxqFVllZmZSUtGvXrvz8fI7LFItyv/nmm3HjxlHLDfQcCiHoJsVGt4iIiKysLI7LFPflduzYMWvWLGq56TPVt2nOnj07NDR0+fLlerhNEyhDIQRdk5ubm5CQIJPJKioqOC4bOXKkr68vWp+IpaioKCYmJiIiorS0lOOywYMHe3l5BQcHOzk5UcsN9A0KIegINMPURiq2cjU0NFy0aJEOt3IFcaEQgtYrKSlJTk4ODw9//fo1x2UDBw709vbG8QiaKScnJyoqqtvDPUaPHr1t2zZdOtwDNAEKIWixa9eu7dq1S5WmoFKp1MvLy8zMjFpu0AMfPnxITU3dvXv3o0ePOC7TgeMeQaOgEIL2UTQF3bVrV3Z2NsdliiPUg4ODJ0yYQC034IXiI86xY8dUaWG6efPmPn36UMsNdA8KIWiT+/fvR0dH79+/v7a2luMyV1fXrVu3SqXSfv36UcsNePfmzZukpKSIiIiCggKOy+zt7bdu3RoQEDBs2DBaqYFOQSEELaBoCtrtkgplU1AsqdAlra2tZ86c2bVrlyrLoNDCFHoAhRA0Wl5eXkxMTLdNQRWL7ENCQhwdHanlBpQ9ffo0Pj4+JiamvLyc4zIXFxc/Pz8fH58BAwZQyw20GgohaCIVm4JKJJJ58+ahKaheaWhoSEtL+/vf/56ZmclxGVqYgupQCEGzKBpxhYWFvXjxguMya2vrtWvXbt++3d3dnVpuoFEUzfP27t1bX1/PcZliQQ1amAIHFELQFHhfgx5Q65NTaGjo2LFjqeUG2gKFEESGO13Qe7iXDr2BQgiiwdoH4J2Kq6scHBw2b96M1VWggEIItGE1PAgN+21ALSiEQI9a+6P9/f2HDx9OLTfQSejAAKpAIQQa0tPTw8LCUlNT0TEL6ENPPuCGQggCUvRQ3rNnz8OHDzkuQw9loEP1Lu3bt29fv349zgTWEyiEIAicqgMaC+d2AQsKIfAJ56yCtsBJzqCEQgj8KCoqiomJiYyMfPfuHcdliqagwcHBTk5O1HID4JCbm5uQkCCTySoqKjguGzlypK+vr6+vr62tLbXcgA4UQugV5Ubmbp+7zJ49OzQ0dPny5XjuAhqouro6JSUlIiIiKyuL4zIzM7MlS5bs2LFj1qxZ1HIDoaEQQg9VVlampaXt3LkzJyeH4zIrK6t169Z9880348aNo5YbQI8pWv0lJyc3NDRwXKZY4bxp0yZzc3NquYFAUAhBbYp3in379tXV1XFc5ubm5u/v7+vri6agoHXevn2bmJgYFRX18uVLjstsbGzWrFmzY8cONzc3WqkB/1AIQVUNDQ0nT57cuXPnjRs3OC5TdutAU1DQdoo7/2FhYadPn1alCxJamGopFELo3vPnz2NjY2NjY9+/f89x2ZAhQ3x9fYODg+3s7KjlBkCBWv8EgoKC7O3tqeUGvYdCCF3Cx2EAJkUL07CwsOvXr3NchpsiWgeFEDqheEASGRn56tUrjstsbGy8vLxCQ0NHjBhBLTcA0an1mNzHx8fCwoJabtADKITQjlpNQbFkDvSZooWpigunQ0JCxo8fTy03UAsKIRDyj01U4eHhDx484LhMsYnq22+/nTlzJrXcADSZ6ltp0cJUY6EQ6jsV22qMGjXKx8cHbTUAulJcXCyTybptrjRo0KAtW7YEBQUNHTqUWm7ADYVQT6HRIoAQ0G5XG6EQ6p3i4uK9e/fu2bOnsLCQ4zLF51a03gfomSdPnkRGRuIAFq2AQqgv5HL5pUuX8CQDgCYcyakVUAh1n2JtW1hY2OPHjzkuw/HcAMJRaz325s2b+/TpQy03QCHUZYrdTvv376+treW4bMyYMQEBAdjtBCC0N2/eJCUlRUREFBQUcFym2KG7Y8eO4cOHU8tNn6EQ6iBF/4tuH9cr+1/gcT0ATer2bFq5cqWhoSHNDPUNCqFOycvLi4mJiYuLKysr47jMwcFh8+bNISEhjo6O1HIDAJZnz57FxcXFxMSUl5dzXObi4uLn5+fj4zNgwABquekVFEJdoNzSe+TIkdbW1q4uQ1NQAA3U0NCgONozIyOD4zJTU9OlS5eihakQUAi127t37xISEro9Nc3a2nrt2rWhoaFjx46llRoAqEfxUH/v3r319fUcl02ePNnf33/jxo046ZMvKITaSsV/M2gKCqBdKisrk5KSwsLCXrx4wXGZ4tPt9u3b3d3dqeWmq1AItYziLsrf//73zMxMjssUd1FCQ0Nnz55NKzUA4I2KzzsIIbNnzw4NDcXzjt5AIdQaT58+jY+P7/a5+siRI319ffFcHUA3FBUVxcTERERElJaWclw2ePBgLy+v4OBgJycnarnpDBRCTdfa2nrmzJldu3ap0hQUK60BdBL2RAkKhVDT/Y//8T/+4z/+g+OCgQMH+vj4+Pv7o5k9gM67f/9+ZGTkgQMHuM8EPnbs2LJly6hlpe1QCDXX+/fvHzx44Ojo6Orq2ul/JnRjAtBPihamu3fvfvToUcc/HTRo0LNnzy5fvrx06VL6uWkjFELN9eOPP37//fejR49ua2t7/vy5cl7RFDQoKGjixIkipgcAort27dquXbuOHTvGbGE6b9683NzcwsLCK1euzJ07V8T0tAUKoYZqa2sbNWpUfn4+c9LV1XXr1q1SqbRfv35iJQYAmqakpCQ5OTk8PPz169fM+bVr16ampoqVlRZBIdRQp0+fXrx4sXIokUgOHDiwbt06EVMCAE3W3Nz85z//eefOncoZY2Pjly9fOjg4iJeUdjAQOwHoXEREBHO4ePFiVEEA4GBsbPyf//mfQ4YMUc40NzfHx8eLmJK2QCHURK9evTp37hxzJjAwUKxkAEBbGBkZ+fj4MGeio6O5D+IGgkKomSIiIpi9JEaMGPH555+LmA8AaAt/f39jY2PlsLCw8OTJkyLmoxVQCDVOY2NjYmIicyYgIMDAAP+lAKB7Dg4OS5YsYc5ERkaKlYy2wNurxjl48OC7d++UQ1NTU29vb/HSAQAtw3qScvHixdzcXLGS0QoohBqH9fFt3bp1dnZ2YiUDAFpn3rx5rq6uyqFcLo+OjhYxH82HQqhZMjMzb926xZzBMhkAUItEIgkICGDOJCQk1NbWipWP5kMh1Czh4eHM4aRJk6ZPny5WMgCgpby9vZnH9lZWVmJnPQcUQg1SWVmZkpLCnAkJCRErGQDQXjY2NuvXr2fO7NmzR6xkNB8KoQZJTExk3r6wsbHBJnoA6Jng4GDmMDMz8/bt22Ilo+FQCDWFXC6PiopizmzdupV5cwMAQHWTJk2aMWMGcwb7KLqCQqgpLl26xFziLJFI/P39RcwHALQda6ldamoq9zH3eguFUFOwPqzNnz+fuQAaAEBda9eutbe3Vw47NusABRRCjVBcXMxqg4RdEwDQSx3bcURFRbW1tYmUjuZCIdQI0dHRzHM1HRwcmGcwAQD0TFBQkKGhoXKYn5/PaugPBIVQE7S0tMTFxTFnAgICmG1zAQB6xtnZmdWyH0tmOkIhFN+RI0eKioqUQ2NjY9ZBKgAAPcZ6znL69OkXL16IlYxmQiEUH+sD2sqVK3GiNADw5csvvxw+fLhy2NbWFhMTI2I+GgiFUGQ5OTlXrlxhzmCZDADwyMDAQCqVMmdiYmIaGhrEykcDoRCKLCIiQi6XK4dubm5z584VMR8A0D2+vr5mZmbKYVlZ2eHDh0XMR9OgEIqppqZm7969zJng4GCJRCJWPgCgkwYMGPD1118zZ7BkhgmFUEz79u2rqqpSDi0sLDZv3ixiPgCgq1jPXK5fv37//n2xktE0KIRikslkzOHmzZutrKzESgYAdNisWbMmT57MnGG9/+gzFELRXLt2LSMjgznDOksTAIBHrPbFe/furaioECsZjYJCKJqIiAjmcO7cuRMmTBArGQDQeZs2berXr59yWFdXt2/fPhHz0RwohOIoLS09cuQIcwa7JgBAUObm5qxVCJGRkcxV63oLhVAcMpmssbFRObSzs1uxYoWI+QCAPggMDGSuS8/Jyfn111/FS0dToBCKoLW1NTY2ljnj7+9vamoqVj4AoCfGjBnz6aefMmewj4KgEIri1KlTL1++VA4NDQ19fX3FSwcA9EhQUBBzePToUWavY/2EQigC1kewJUuWODs7i5UMAOiVZcuWOTo6KoctLS1oPYpCSFteXt6FCxeYM1gmAwDUGBkZsW5ByWQy5nmoegiFkLbIyEjmCdEuLi7z588XMR8A0DdSqZR54mlJScnx48dFzEd0KIRU1dfXJyYmMmeCgoIMDPBfAQDoGTx48PLly5kzer5kBm/BVKWmpr5//1457NOnj7e3t3jpAICeYj2RuXz5cnZ2tljJiA6FkCrWx67169f3799frGQAQG99+umn48aNY87oc+tRFEJ6MjIy7t69y5zBMhkAEAvrtN6kpKTa2lqxkhEXCiE9u3fvZg6nTZs2ZcoUsZIBAD3n7e1taWmpHFZVVe3fv1/EfESEQkhJZWXlwYMHmTP4OggAIrK0tNywYQNzhnUSgP5AIaQkLi6urq5OOezXr9+aNWuoRU9KSoqIiPjw4QO1iACg+VhdZrKysm7cuCFWMiJCIaRBLpezHkT7+PiYm5vTid7W1vaXv/wlODjY0dHR398/KyuLTlwA0HATJkyYPXs2c0Y/91FIcAYHBefOnfviiy+UQ4lE8uTJk9GjR9OJfvLkyaVLlyqHhoaGhYWFgwYNohMdADTZ/v37N23apByamJi8fv3a3t5exJTowzdCGlgfsj7//HNqVZB0uO+/ePFiVEEAUFi9evXAgQOVw6ampvj4eBHzEQUKoeBev3596tQp5gzNZTJ5eXnnz58XKzoAaDgTE5Nt27YxZyIjI1tbW8XKRxQohIKLjo5m/q0aOnToV199RS16VFQUq7XpggULqEUHAM0XEBBgaGioHBYUFJw9e1bEfOhDIRRWU1NTxzN4mX/nBFVfX5+QkMCcCQwMRGtTAGAaOnTol19+yZzRtyUzeE8U1uHDh9++fascmpiY+Pj4UIt+8OBBtDYFgG6xnpicPXv22bNnYiVDHwqhsFgfrL7++mvmc2nK0detW2dra0stOgBoiy+++GLUqFHKoVwuZ93K0m0ohALKzs6+du0ac4a1fVVQGRkZd+7cYc5gmQwAdEoikfj5+TFn4uLiGhoaxMqHMhRCAe3Zs4e5TbPj3lWhozOHHh4eU6dOpRYdALSLr68vs8vH+/fv09LSRMyHJhRCoVRXVx84cIA5Q/PrYGVlZWpqKnNm+/bt1KIDgNbp16/f6tWrmTP6s2QGhVAoSUlJzN6eHfvbCio+Pl7E1qYAoI1YT09u3bqVnp4uVjI0oRAKJTo6mjlknXgiKLlczopOs7UpAGip6dOns86Gi4qKEisZmlAIBfHrr78+evSIOcM6A1NQ58+ff/r0qXLY8TE4AECnAgICmMP9+/eXl5eLlQw1KISCYN1b//TTT8eNGydWdMqtTQFAe23YsKF///7KYX19fXJysoj50IFCyL83b94cPXqUOUNz34K4rU0BQKv16dNny5YtzJnw8HCdP6QIhZB/MpmsublZORw8ePDy5cupRRe3tSkAaLvg4GBmI8bnz59funRJxHwoQCHkWUtLS0xMDHNGKpUaGxvTid7c3MxqLkqztSkA6AAXF5f58+czZ1hHuekeFEKenThxorCwUDk0MjLy9fWlFv2nn34qLi5WDim3NgUA3cB6nnLixIlXr16JlQwFKIQ8Yy1UWbZsmaOjo1jRKbc2BQDdsGTJEmdnZ+WwtbU1Li5OxHyEhkLIp44302kuVHn8+DGrtSmWyQBADxgaGrJuZcXExDQ1NYmVj9BQCPnEWl41atSozz77jFp0VmtTd3f3OXPmUIsOALrE19fXxMREOey4GF6XoBDypuOGm+DgYIlEQid6dXX1/v37mTMhISF0QgOA7hk0aNDKlSuZMzrcehSFkDesFgzm5uZeXl7UoicnJ4vY2hQAdA/r2cqVK1cePnwoVjKCQiHkDasp38aNG/v160ctOqu56JYtW6ysrKhFBwDdM3fu3PHjxzNnWO8zOgOFkB8d27T7+/tTi97xkxrN6ACgq1itR1l3nnQGCiE/WHfPZ86c6enpKVb0Tz75hGZrUwDQVV5eXsx7Sx3XIugGFEIedDzKmea+BXFbmwKADrOwsNi0aRNzhrU6XTegEPIgLi6uoaFBORwwYADroGdBsfb3DBo0aMWKFdSiA4BuY61+77hfWQegEPaWXC6PjY1lzvj4+JiZmdGJ3rHjA83WpgCg88aOHfvRRx8xZ3RvHwUKYW+dPXv22bNnyqGBgQHNhSqsHoBGRkY4gxcA+MV62nL48OG3b9+KlYwQUAh7i/XhaNGiRcOHDxcr+tKlS2m2NgUAfbBq1SoHBwflsKmpiXUbTNuhEPbKq1evzp49y5wJCgqiFl3c1qYAoCeMjY23bdvGnGGde6rtUAh7JTIykvm3wdnZ+fPPP6cWPSIioq2tTTkcOXLkvHnzqEUHAP0hlUqNjIyUw9evX586dUrEfPiFQthzjY2NrFNwg4KCqJ2CW19fn5SUxJyh2doUAPSKk5PT4sWLmTO6tGQGhbDn0tLS3r17pxyampp6e3tTi37gwAERW5sCgL5hPXk5f/7806dPxUqGXyiEPcf6QLR27Vp7e3uxom/cuLF///7UogOAvlmwYMHo0aOVQ7lcLpPJRMyHRyiEPZSVlXXz5k3mDM2FKuK2NgUAPSSRSFjvM3FxcXV1dWLlwyMUwh4KDw9nDidNmjRjxgxq0cVtbQoA+mnbtm3m5ubKYWVl5cGDB0XMhy8ohD1RVVV14MAB5kxwcDC16OK2NgUAvWVjY7Nu3TrmzO7du8VKhkcohD2RmJhYW1urHNrY2Kxfv55adHFbmwKAPgsJCWEOMzIy7t69K1YyfEEh7ImYmBjm0Nvbu2/fvnRCi9vaFAD0nIeHx7Rp05gzOrCPAoVQbZcuXcrOzlYOJRIJ6+xKQYnb2hQAgPUsJjU19f3792IlwwsUQrWxPv7MmzfP1dVVrOiUW5sCAKxfv97Ozk45rK+vT0xMFC8dHqAQqqekpOTEiRPMGZoLVQoKClitTbFMBgAo69g8JDIyktnuUeugEKonOjq6ublZOXRwcFiyZAm16B1bm37xxRfUogMAKAQEBBgY/FE+8vLyzp8/L2I+vYRCqIaWlhbWQhV/f39qp+A2NTXFx8czZwIDA6m1NgUAUBoxYgTrgAGtXjKDQqiGo0ePFhUVKYdGRkY+Pj7Uondsbbp161Zq0QEAmFjPZU6dOvXy5UuRcuktFEI1sD7yrFy5csiQIWJFX7NmDc3WpgAATF999RVzpV5bWxtrX5kWQSFUVU5Ozq+//sqcoblQJSsr68aNG2JFBwBgMTAw8PPzY87ExMQ0NjaKlU9voBCqKjIyUi6XK4dubm4ff/wxtegRERHM4cSJE2fOnEktOgBAR35+fsxuHqWlpYcPHxYxnx5DIVRJTU1NcnIycyYoKIjaKbhVVVX79+9nztBsbQoA0KkBAwasWrWKOaOlS2ZQCFWyf//+qqoq5dDCwmLz5s3UoiclJTFbm1pbW2/YsIFadACArrCe0Vy7di0jI0OsZHoMhVAlrPMnN23aZG1tLVZ0mq1NAQA4zJ49e/LkycwZbTytF4Wwe9evX79//z5zRiqVUot++fJlZmtTQgjrATUAgIhY74f79u1j3j/TCiiE3WPd9Z4zZ46Hh4dY0efNm+fu7k4tOgAAt40bNzLvkNXU1Ozdu1fEfHoAhbAbHddBBQUFUYteUlJy/Phx5gx2TQCARrGwsPDy8mLOREREMNfYaz4Uwm7ExMQwT8G1s7NbuXIlteis1qaDBw9eunQptegAAKoIDAxkrqLPycm5cuWKiPmoC4WQS8deCVKp1NTUlE50cVubAgCoyM3N7ZNPPmHOaNc+ChRCLqzueYaGhr6+vtSiHzt2jNXalGZ0AADVsZ7aHDlyhPn2peFQCLmwPtQsXrx42LBh1KKzuslQbm0KAKC6FStWMN+gWlpa4uLiRMxHLSiEXep4whbNhSritjYFAFBLx1tWrCUOmgyFsEtRUVHMM5ddXFwWLFhALbq4rU0BANTFWsRQXFx88uRJEfNRHQph5xobG5OSkpgzgYGBzBOZBVVXV7dv3z7mDM3WpgAAPdBxWbu2LJlBIexcSkpKaWmpctinTx9vb29q0ffu3VtRUaEcUm5tCgDQM6wnOJcuXcrNzRUrGdWhEHaO9UFm3bp1tra21KKL29oUAKBnWK2v5HJ5VFSUiPmoCIWwExkZGXfu3GHO0FyoIm5rUwCA3mA1Q05MTGQenqOZUAg7sWfPHubQw8Nj6tSp1KKL29oUAKA3WMfjVFZWpqSkiJiPKlAI2SorK1NTU5kz33zzDbXoZWVlrNam2DUBAFqk44Gp4eHhYiWjIhRCtvj4+Lq6OuXQxsZm7dq11KJ3bG3KOgAaAEDDBQcHM4eZmZm3bt0SKxlVoBC2I5fLo6OjmTM+Pj7m5uZ0ondsbern50ettSkAAC8mTpw4c+ZM5oyG76NAIWznwoULT58+VQ4lEgnNhSqnT59+8eKFcmhgYIAzeAFAG7GOqzt48OC7d+/ESqZbKITtsD62LFy4cPTo0WJFp9zaFACAL2vWrLG3t1cOGxsbExISRMyHGwrhH16/fn3q1CnmDM2FKvn5+efOnRMrOgAAj0xMTLZt28aciYyMbG1tFSsfbiiEf5DJZC0tLcqhk5PT4sWLqUXv2Np04cKF1KIDAPArMDDQ0NBQOXz16tXPP/8sYj4cUAh/19zcHB8fz5zx9/dn/lcUVGNjY2JiInMmICCAWmtTAADeDR06dNGiRcwZjV0yg7fa3x0+fLi4uFg5NDExoXkKbmpqKqu16datW6lFBwAQAuv5ztmzZ5nrATUHCuHvWB9VVq1aNXDgQLGir127lmZrUwAAISxatGjUqFHKYVtbG2t/moZAISSEkMePH1+9epU5Q3OhSmZm5u3bt8WKDgAgEIlEwrq1FhcXx+wZoiFQCAkhJDw8nHkKrru7+5w5c6hF79jadNq0adSiAwAIx8fHx8zMTDksKys7dOiQiPl0CoWQVFdXs07BDQkJoXYKbsfWpiEhIXRCAwAIzdbWds2aNcwZDVwyg0JIkpOTP3z4oBxaWlqyOsYKKiEhgXlGiY2Nzbp166hFBwAQGutZz82bN9PT08VKplMohIT18HbLli1WVlZ0Qndsbbpt2zZqrU0BACiYMWOGp6cnc0bTlszoeyG8cuXKw4cPmTP+/v7Uol+8eDE3N1c5lEgkNKMDANAREBDAHO7fv7+iokKsZDrS90LIulv9ySefjBs3jlr0iIgI5pBya1MAADo2btzYv39/5bCuri45OVnEfFj0uhC+efPm6NGjzBma+xbEbW0KAEBNnz59vLy8mDOstfri0utCGBMT09TUpBwOGjRoxYoV1KKL29oUAICm4OBg5mr8Z8+eXb58WcR8mPS3ELa2tsbFxTFnpFKpsbExnejitjYFAKBs5MiR8+bNY85ozj4K/S2EJ06cePXqlXJoZGRE8xRcVmtTY2NjNBcFAN3Gevpz/PjxwsJCsZJh0t9CyPowsnTpUkdHR7Gif/311w4ODtSiAwDQx3qbbWlpiY2NFTEfJT0thM+fP7906RJzhuZCFXFbmwIAiKLjjTeZTNbc3CxWPkp6WggjIiKYp+B2vHktKNZyqbFjx9JsbQoAIBbWUoySkpJjx46Jl87v9LEQ1tfXJyUlMWdYy5kEVVNTI2JrUwAAEXVcnK8JS2b0sRAeOHCgvLxcOey4wUVQHVubbty4kVp0AABxsZ4E/fLLL48ePRIrGQV9LIRRUVHMIavlAeXoXl5e1FqbAgCIrmMDL5lMJlYyCnpXCG/fvn3v3j3mDKsJnqB+++03EVubAgBoAtb7XmJiYnV1tVjJED0shKz70R3botOM/vHHH48fP55adAAATcA65Ke6uvrAgQMi5qPNhbCtjdTWkooKUlZGSktJRQWpqSGtrRw/8f79+4MHDzJnaO5bKC0tFbG1KQCAhuh47CvrBALKjESMrZ6WFlJcTIqLydu3pKyMVFaSmprOr+zbl1hbE1tbMnAgcXAgQ4YQExPFn8THxzc0NCgv7Hh0sqCio6MbGxuVQ8qtTQEANEdISEh0dLRyI9mDBw+uX78+e/ZsUZLR+EJYUUFycsjz5+T1a8JoUc2ltpbU1pLiYqJ4GmdgQIYMIaNGyV1dY2JimBf6+PiYmZkJkHQnWltbWT0U/Pz8TP5RoQEA9Iq7u/ucOXOYrUUiIiLEKoQSzTkIo53GRvLwIcnMJEVF6v2gXE662JN39tmzL/fvVw4lEklubu6oUaN6k6bqjh07xvz+Z2homJeX5+zsTCc6AICmSUlJYd4gNTExKSgoGDhwIP1MNO8Z4YcP5Nw58re/kdOn1a6ChHRVBQkhke0Xiy6aN49aFSSdtTZFFQQAfbZq1Spm2WtqamKdCESNJhXC2lpy9izZtYvcukUYxwTyoqCq6syzZ8yZwIEDybFjpKqK30CdysvLu3jxYrvoWCYDAPrNxMTE19eXORMdHd3KueBRIJpRCNvayM2bZPducucO97LPHou6d6+V0VzU2cZmkYsLycoie/aQX35R9eljT4nb2hQAQDOxzmEtKCg4ffo0/TQ0oBC+fUtiYsj584SxopJfTa2t8RkZzJnAKVMMDQwIIaSlhfz2G4mMJAUFAkWvr69PTExkzgQFBRkYaMD/8wAAonJyclq8eDFzRpTWo6K+Hcvl5NYtEhND3rwRNE5advZbxl4LUyOjrR4e7a4oLyeJieTyZcL43saXlJQUVmvTLVu28B4FAEAbsZ4TnTt37unTp5RzEK8QNjWRtDRy7pxA90KZIu/eZQ7XuLvb9+3LvkguJ1evkr17SW0tz9Hbf8DZsGEDzdamAACabOHChaNHj1YO5XI5a58bBSIVwg8fSFwcefKEQqisN29uvH7NnAmcMqXLq1++JLGxpLSUr+h37twRsbUpAICGk0gkUqmUORMXF1dXV0czBzEKYVkZiY0l797RiRbR/uvgxEGDZjo5cf1AZSWJjyfta2fPo7fvGzRjxowpHGUYAED/+Pj4mJubK4cVFRVpaWk0E6BeCN+9I4mJhFaj8erGxpT2J10FT53a/Y81NJB9+8irV72MXlFRcejQIeYMdk0AALDY2NisXbuWOUN5yQzdQlheTpKTeX8IxyEhM7OasRjV2sxsg4qnPTQ1kQMHerKjnyE2Npb5BZ9ya1MAAG3xzTffMIcdHyoJimIhrK0l+/bRrIKEEFl6OnPoPWlSX9XbeypqIWPBp1o6PvKl2doUAECLeHh4TG1/u47ml0JahbC1lRw8SCoqKIUjhBBy+cWL7PZPIv0mT1bvJerqSEpKzzY4/vzzz88YvWwkEgmrhwIAACixnhyxNp4JilYhPHeOr+UnqmPtmpg3YoS7vb3ar1JWRo4dI+q3Jmd9nFm0aBHN1qYAANpl3bp1tra2ymHHViTCoVIIc3JI+5pEQUl19fHcXOYM164Jbk+eqJt/QUHBmTNn2kXHMhkAgK716dPH29ubOcNqTikc4QthbS05eVLwKB1Ep6c3M7bqD7a0XOrq2vOXu3CBvH+v+uVRUVHM1rFDhw5dtGhRz6MDAOiBwMBAZvvJjscVCET4Qnj2LKmvJ4T04O5ij7W0tcXev8+c8ff0NGa0dlX/FVvIiRMq/gpNTU3x8fHMmcDAQMPeRAcA0AMuLi4LFixgztBZMiNwIczPJ9nZv//vrk8K5N2xJ0+KPnxQDo0MDHzVXSbTUUEBefBAlQsPHTr09u1b5dDExGTbtm29jQ4AoAdYT5FOnjz5qtdburslZCGUy8m5cwK+ftdYy2RWuLkNsbLi4XUvXSLNzd1Hb/8RZs2aNfY9WKQDAKB/Fi9ePGzYMOWwtbU1NjZW6KBCFsKHD6n1UWN6Ulb2y4sXzJmeL5NRUtwUra4md+5wX/jgwYPr16+3i45lMgAAqjE0NOx4Wm+jYIf0KQhWCOVy8ttvQr04p8i7d5mP8tzs7D5hfL7oIeV93Rs3uL8UspqLTpw4cdasWb2NDgCgN6RSqampqXJYWlp69OhRQSMKVghzc9VaZsmXuubmve2f5AVOmSLh8fFkXR1pf8YvU3V19YEDB5gzQUFBvIUGANADdnZ2K1euZM4IvWRGsEJ4+7ZQr8xp34MHFYpFqoQQQixMTLwmTuQ5xp07XS0fTUxMrGb0E7e2tt64cSPP0QEAdB3ridJvv/328OFD4cIJUwjLy8nLl4K8cnei2/dp3ThhgjXv7T3fvycFBZ3+iUwmYw63bNnSt+MJwAAAwOmjjz6aMGECcyYqKkq4cMIUQtW2GfDuxuvX90tKmDNST09BInX2C758+fLNmzfKoUQiwRm8AAA9w3r/TE5O/sDYFMcvYQrh48eCvGx3WLsmZg8dOnnwYEEi5eSQDo1/hg0bVlhYmJaWNn/+fELIp59+6ubmJkh0AABdt3nzZmtra+WwpqZm3759AsUSoBCWl5PSUv5ftjtldXU/tS/APOya6Ep9fac9xE1NTVevXn3hwoX09PQffvhBqOgAALrOwsJi8+bNzJnw8HC5MB3KBCiEeXn8v6YKYu/fb2hpUQ7t+vZdNXasgPGeP+f4w8mTJ7PO1gIAALUEBQUx1/w/fvz4N2F25QlQCMVYJtMml7PO4PWbPNnMyEjAkMJ3/QEA0Gdubm4ff/wxc0agfRQCFMLCQv5fszunnz59wTj110Ai8RNomYxScTFhnC8BAAC8Y+2jOHLkSHFxMe9R+C6EtbVEsbCH4lkThJDI9rsmFo8ePczGRtiQra2iPAoFANAfK1euHDJkiHLY3NwcFxfHexS+C6GyNlA8ayK/ouJc+yd2gXSez4nRSRUAQH8YGRn5+PgwZ2QyWQtjOQgv+C6E5eU8v6AKou7da2N8AXXp33+hiwuNwGL8sgAAesXf39/Y2Fg5LCwsPMn3Ye98F8KqKp5fsDuNLS2JmZnMmYApUwzofB+l/ssCAOgbBweHJUuWMGd4XzLDdyGsqeH5BbuT+uhRaW2tcmhqZLSF9+aiXWHEBQAAgbCWzFy8eDE3N5fH1+e7EDIaXtPBWiazftw4O2rtPan/sgAAemjevHmurq7KoVwuj46O5vH1+S6EfD/D5PampuZZ+8OeAoTrJtORCqfVAwBAL3Vs3czvznq+C2GHDpyCGmRhUfxP/xS2aJGjlRUhxMrUdLqjI73wdH9ZAAC9tWHDBkWXmWHDhkVGRt7m9aQ/vgshxV0ThJC3NTVf7NsXevZs4YcPhJCapqZ8xrZ6wRkIdpojAAAwXLp0SdFo9OXLl6Ghodu2beOx7yjfb+WCdjXrwL5v3yLGQbhtcjnrPEJh0f1lAQD0VkREhPJ/NzU1tba2Svj73sV3IeT9FFxOEonEb/Jk5kxcRkYDteeUdH9ZAAD9lJ2dfe3aNeYMax1pL/FdCKkfyO47ebI5Y6/l+7q6tOxsSrFx+jwAgPD27NnDHE6YMGH27Nk8vj7fhdDKiucX7E6/Pn1Wu7szZ1jH8wqI+i8LAKBvqqurDxw4wJwJCgriNwTfhbB/f55fUAWsA3hvFRamC9CevBP9+tGIAgCgx5KSkj4oznIghBBiaWm5YcMGfkPwXQgHDOD5BVUw3dFxioMDcyaKzpIZOzsaUQAA9Bhr77y3t7elpSW/IfguhNbWoiwhYe2j3//wYbnQbV8kEjJwoLAhAAD026+//vro0SPmjFQq5T2KAPsIGWdHUbNh/Pj+ffooh/XNzclZWcKGtLMjJibChgAA0G+s/tqffvrpuHHjeI8iwJZwZ2f+X7M7fYyNt0yaxJwJv3OHx+2WnRDj1wQA0B9v3rw5evQoc4bfXRNKAhRCOmcBdhA0dSrz9KXn5eWXXrwQMJ5IvyYAADX5+flt4vWSlMlkzYyWzoMHD16+fLkQgQQohIMHEwsL/l+2OyP79583YgRzRsB9FEZGpH0sAAAd09bWNn/+fCcnp7/85S/v3r2jHL2lpSUmJoY5I5VKmSf08kiAQiiRkLFj+X9ZFbD2URzPzX1VWSlIpJEjiTD/PQAANMTPP//84sWL4uLif/u3fxs6dKiXlxfNb4cnTpwoLCxUDo2MjHx9fQWKJUzb6PHjBXnZ7ix1dXW2sVEOW9va4jMyBIk0YYIgLwsAoDHCw8OV/7uxsbGystKA4kkDrGUyy5YtcxTscCFhfitHR1H22BkaGPi2bz0qS09vam3lOUzfvmT0aJ5fEwBAk7x69ercuXPMGYEWqnTq+fPnly5dohZdsPI+dapQr8zJd/JkE0ND5fBNTc3RnByeY0yeTBghAAB0T0RERCvjW8SIESM+//xzatHDw8OZy/7HjBnz2WefCRdOsEI4aRIxNxfqxbs2yMJipZsbcyaS3y4zRkZk2jQ+XxAAQMM0NjYmJiYyZwICAqjdF62rq0tOTmbOBAYG8njoUkeC/WLGxmTmzN//t6D7+ToIbP9l9MrLlw/fvuXt1T08RFkTCwBAzcGDB5nLRE1NTb29valF379/f3l5uXJobm6+efNmQSMKWeGnTfu9ZtA9tn6us/P49s3PotPT+XlpY2Py0Uf8vBQAgKZiLVRZt26dHcVlH6zmops2beon8AkHQhZCExPy6acCvn7XWK1Hk7OyPjQ28vC6s2YRvpu9AgBolMzMzFu3bjFnaC6TuXnzZnr7ry7+/v5CBxX4nq+HB2l/LgQdXhMnWpmaKofVjY37Hzzo7Yv260fmzOntiwAAaDbmrglCyKRJk6ZPn04tOuvL6KxZsya33wsgBIELoURCli6lv8bSwsRkU/utfnt633p08WJiZNSrVwAA0GyVlZUpKSnMmZCQEGrRy8rKDh06xJyh82VU+FVAAweSjz8WPEoHwdOmMZ9MPi4tvVZQ0POXmzoVPdUAQOclJibW1tYqhzY2NuvWraMWPS4urqGhQTkcMGDA119/TSEuleWwc+aQ4cNpBGIYa2f3UfsDItTeR6H8BmlvTxYu5CkvAAANJZfLo6KimDNbt27t27cvnehtbW2sZTK+vr5mVA64pVIIJRKyahWxsvp9SGs3BWsfxeHHj9/W1Kjx84rFrmZmZO1a3BQFAJ136dKl3Nxc5VAikVBYqKJ09uzZF4wjgwwMDIQ4g7dTtBrH9e1L1q37/SRbWrspVrm5OTAWeTa1tsbev6/eSxgYkNWrSf/+PGcGAKB5WAtV5s+f7+rqKlb0L7/8cjitW4n0OqiSwYPJ6tU0F84YGxpu8/BgzkSnp7eq1T196VI8GgQAfVBcXHzy5EnmDM1dE69evfr555/Fik6xEBJCRo4kq1YRiv3LpZ6eRoxwr6uqTj19quoPf/klmThRkLQAADRMdHQ08xRcR0fHJUuWUIseGRnJbG3q7OxMs7Up3UJICHFzo/m90MnaenH7kyJUWjIjkZDFi8XqGw4AQFlLS0tcXBxzRiqVGtFaG9HY2JiQkMCcCQoKMqR4+5B6ISSEjBlDNm0iVNYCkQ5LZs4/f/70/XuuHzAyIl9/TTw9hU0LAEBjHDlypKioSDk0Njb28fGhFj0tLU3E1qZEnEJICBk2jPj4EFtbCqEWjBgxmhFIToiMo/WopSXx9iZjx1JIDABAQ7AWqqxcudKBYlMwVvS1a9fa29tTi05EK4SEkAEDiJ8fhZIjkUj827cejbt/v45xK/wPw4cTf38yZIjQKQEAaI6cnJwrV64wZ2guVMnKyrp586ZY0RXEK4SEEFNTsno1WbaMMPqCCmGbh4e5sbFyWNnQcPDRo3ZXGBmRhQvJ5s2E1tZRAAANERERwexA6ebmNnfuXGrRO7Y2nTFjBrXoCqIWQoVJk0hwsKBfDW3MzNaNG8ec2X3nzh+DESNIYCCZOZPycVEAAKKrqanZu3cvcyYkJETQU3CZqqqqDhw4wJwJDg6mE5pJAwohIcTSkqxeTbZsEe6oipD2x8pnlJTcLSoiAwaQdevI5s3YMg8A+mnv3r1VVVXKoYWFxaZNm6hF79jadP369dSiK2lGIVQYNoz4+pJ164ijI++v7TF48LT2D/8iS0tJUBCh2DcBAEDTsJqLenl5WSnbYQovJiaGOfT29qbW2pRJ0tvDiQRSWEju3SOPH5NOV7X0SGJm5tZjx5TDPn36vH792pbKylUAAA109epV1uPABw8ejB8/nk70S5cuzZ8/XzmUSCQ5OTk0m7opadI3QiZHR7J8OfnnfyYrVxI3t96upjE2JqNGrf/+e7sBA5Rz9fX1iYmJvUwTAEB7sfYtzJ07l1oV7Bh93rx5olRBornfCFna2khxMSkoIEVF5O1bUlFBuFuGSiTExoYMHEgcHMjQoWTIEMXxEX/+85//8z//U3mVi4vL06dPDSi2fAMA0BClpaVOTk6NjY3KmZSUFGqnD5aUlDg7OzObuh0+fHjlypV0orNoSSFkaWsjHz6Q6mpSV0eamkhTEyGEGBsTExNibk4sLIi1dadd3PLz80eNGtXGKKJnz5794osvqCUOAKAh/v3f//1f//VflUM7O7vXr1+bCryZTekvf/nLv/3bvymHDg4OL1++NGbsc6NJO4/ZMzAgNjbExkbdnxsxYsTnn39+9uxZ5UxkZCQKIQDom9bW1tjYWOaMv78/tSrY0tLSMbpYVZBo7jNCwbB6Fpw6derly5ci5QIAIA7WW5+hoaGvry+16EePHmW2NjUyMqLZ2rQjvSuEX331FfOwx7a2Ntb6XQAAncdaqLJkyRJnZ2exoq9cuXKIqL0t9a4QGhgY+Pn5MWdiYmKYj4sBAHRbXl7ehQsXmDM023vm5OT8+uuvzJmgoCBq0Tuld4WQEOLn52fGOASqtLT08OHDIuYDAEBTZGQkc82gi4sLcz+f0MRtbdopfSyEAwYMWLVqFXOG9T0dAEBXddxCHRQURG0XWcfWpsHBwdRam3ZFHwsh6XAf4Nq1axkZGWIlAwBATWpq6nvG4eR9+vSheQruvn37WK1NN2/eTC16V/S0EM6ePXvy5MnMGZlMJlYyAADUsG6ArV+/vj/FUwdY77SbN2+m2dq0K3paCAkhUqmUOWR9TgEA0D0ZGRl3795lztBcJtPx3ltAQAC16Bz0txBu3LjR2tpaOex45xoAQMfs3r2bOZw2bdqUKVOoRWd9Gf3oo48mTJhALToH/S2EFhYWXl5ezBnWWiYAAF1SWVl58OBB5gzNr4Md1+fTjM5NfwshISQwMJC5WiknJ+fKlSsi5gMAIJy4uLi6ujrlsF+/fmvWrKEWnbVj287OTqwW2x3pdSF0c3P75JNPmDPYRwEAOkkul7MWqvj4+Jibm9OJ3rGHl1QqpdbatFt6XQhJh+/mR44cYXbAAwDQDefPn3/69KlyKJFIWD22BCVua9Nu6XshXLFiBbPHXUtLS1xcnIj5AAAIgXW76/PPPx89erRY0RcvXjxs2DBq0bul74XQyMiI9cEkOjqaeVYkAIC2e/369alTp5gzNBeq5OXlnT9/XqzoqtD3Qkg6nINVXFx88uRJEfMBAOBXdHR0a2urcjh06NCvvvqKWvSoqChWa9MFCxZQi64KFEIyePDgpUuXMmewZAYAdEZTUxPrFNyAgABDQ0M60evr6xMSEpgzgYGB1FqbqkizshEL63v6pUuXcnNzxUoGAIBHhw8ffvv2rXJoYmKybds2atEPHjwoYmtTFaEQEkLIvHnz3N3dlUO5XB4VFSViPgAAfImIiGAOV69ePXDgQGrRWTfY1q1bZ2trSy26ilAIf8daSZyYmFhbWytWMgAAvMjOzr527RpzhuZClYyMjDt37ogVXXUohL/z9vbu27evclhZWZmSkiJiPgAAvbdnzx7mcMKECbNnzxYruoeHx9SpU6lFVx0K4e+sra03btzInGH9JwQA0C7V1dUHDhxgzgQFBVGLXllZmZqaypz55ptvqEVXCwrhH1h/RbKysm7evClWMgAAvZSUlPThwwfl0NLScsOGDdSix8fHM1ub2tjYrF27llp0taAQ/mHixImzZs1izmAfBQBor+joaObQ29vb0tKSTmi5XM6KTrO1qbpQCNthPchNS0t79+6dWMkAAPTYr7/++ujRI+YM6zRyQV24cIHV2pRmdHWhELazZs0ae3t75bCxsZG1FRQAQCuwbmh9+umn48aNEyv6woULabY2VRcKYTsdt5pGRkYyWxMBAGi+N2/eHD16lDlDc9+CuK1NewCFkC0wMJDZfOjVq1c///yziPkAAKhLJpMxDw8YPHjw8uXLaUZvaWlRDp2cnBYvXkwteg+gELINHTp00aJFzBksmQEALdLS0tLxFFzm0QKCam5ujo+PZ874+/tTa23aMyiEnWB9iz979uyLFy/ESgYAQC0nTpwoLCxUDjseNieow4cPFxcXK4cmJiYadQZvp1AIO7Fo0aJRo0Yph21tbax1wAAAGot1E2vZsmWOjo7UorNam3799dc0W5v2DAphJyQSCesjTFxcXENDg1j5AACo6Pnz55cuXWLO0Fyo8vjxYxFbm/YYCmHnfHx8zMzMlMOysrJDhw6JmA8AgCrCw8PlcrlyOGrUqM8++4xa9D179jCju7u7z5kzh1r0HkMh7Jytre2aNWuYM1gyAwAarr6+Pjk5mTkTHBwskUjoRK+urt6/fz9zJiQkhE7oXkIh7BLrG/3NmzfT09PFSgYAoFv79+8vLy9XDs3Nzb28vKhFT05OFrG1aW+gEHZpxowZnp6ezBksmQEATcY6UXzjxo39+vWjFp31DrllyxYrKytq0XsDhZBLQEAAc7h///6KigqxkgEA4HDr1i3WXSt/f39q0a9cufLw4UOxovcSCiGXjRs39u/fXzmsq6tj3X8HANAQrHUMM2fOZN3Tohn9k08+odnatJdQCLn06dOHdYedtSILAEATvH//Pi0tjTlDc9+CuK1New+FsBusNVfPnj27fPmyiPkAAHTE2us8YMCA1atXU4seExPT1NSkHA4aNGjFihXUovceCmE3Ro4cOW/ePOYM9lEAgEaRy+WxsbHMGdZOaEG1trbGxcUxZ2i2NuUFCmH3WN/xjx8/zuzjBwAgrrNnzz579kw5NDAwoLlQ5cSJE69evVIOjYyM/Pz8qEXnBQph95YtW+bs7KwctrS0sD58AQCIiNXe88svvxw+fDi16KybZEuXLqXZ2pQXKITdMzQ0ZJ3WyzrrCwBALB3PTKW5UEXc1qZ8QSFUiVQqNTExUQ5LSkqOHTsmXjoAAL+LjIxsbW1VDp2dnT///HNq0SMiItra2pTDjosqtAIKoUo6LoJi3YsAAKCvsbExISGBORMUFETtFNz6+vqkpCTmDM3WpjxCIVQV6/v+r7/++ujRI7GSAQAghKSlpb179045NDU19fb2phb9wIEDzNamHTdeawsUQlV9/PHH48ePZ86g9SgAiIu1UGXt2rX29vbUondsbcpsxaVFUAjVwFqRnJSUxGy1DgBAU1ZW1s2bN5kzNBeq3L59+969e8wZVnNmLYJCqAYvLy9mM/Xq6uoDBw6ImA8A6LPw8HDmcNKkSTNmzKAWnfVltONxPVoEhVANlpaWGzduZM7s2bNHrGQAQJ9VVVWxPogHBwdTi/7+/fuDBw8yZ7Rx14QSCqF6QkJCmGuisrOzr127JmI+AKCfEhMTa2trlUMbG5v169dTix4fH89sbWpra7tmzRpq0XmHQqiesWPHzpkzhzmD1qMAQF9MTAxz6O3t3bdvXzqh5XI5KzrN1qZCQCFUG+sOwE8//fT27VuxkgEAPXTp0qXs7GzlUCKR0Fyo8vPPPzNbm0okEl9fX2rRhYBCqLZVq1YNHDhQOWxqamJ1XgcAEBTrRtS8efNcXV3Fir5o0aJRo0ZRiy4EFEK1mZiYsD7+REdHM1scAQAIp6Sk5MSJE8wZmgtVCgoKzpw5I1Z0gaAQ9oS/vz+ziVFBQcHp06dFzAcA9Ed0dDSz6b+Dg8OSJUuoRY+KimK1Nl20aBG16AJBIewJJyenxYsXM2ewZAYAKOh4DJy/vz+1U3Cbmpri4+OZM4GBgdRamwoHhbCHWHcDzp079/TpU7GSAQA9cfTo0aKiIuXQyMjIx8eHWvRDhw4x1waamppu3bqVWnThoBD20MKFC0ePHq0cdlxPDADAO9bNp5UrVw4ZMoRadNapO2vWrKHZ2lQ4KIQ9JJFIpFIpcyYuLq6urk6sfABA5+Xk5Pz666/MGZoLVbKysm7cuCFWdEGhEPacj4+Pubm5clhRUZGWliZiPgCg2yIjI+VyuXLo5ub28ccfU4vO+jo4ceLEmTNnUosuKBTCnrOxsVm7di1zBktmAEAgNTU1ycnJzJmgoCBqp+BWV1enpKQwZ2i2NhUaCmGvfPPNN8zhnTt3WOeSAADwYv/+/VVVVcqhhYXF5s2bqUVPSEiorq5WDq2trTds2EAtutBQCHvFw8Nj6tSpzBl8KQQAIchkMuZw06ZN1tbWYkWn2dqUAhTC3mI9Lk5JSSkvLxcrGQDQSdevX79//z5zhrVYT1CXL19mtjYlhPj5+VGLTgEKYW+tW7fO1tZWOayvr09MTBQvHQDQQayFKh999JGHhwe16B1bm7q7u1OLTgEKYW/16dPH29ubORMREdHW1iZSOgCga0pLSw8fPsycoblvoaSk5Pjx42JFpwOFkAeBgYEGBn/8P5mXl3fx4kUR8wEAXRITE9PY2Kgc2tnZrVy5klp0VmvTwYMHL126lFp0OlAIeeDi4rJw4ULmDOs+BgBAz7S1tbG6VkmlUlNTUzrRxW1tSg0KIT9Y9wpOnTr18uVLkXIBAN3BejMxNDSkeQrusWPHWK1Ntf0M3k6hEPJj8eLFw4YNUw5bW1tZH6MAAHqAtVCF9VZDOfqKFStotjalBoWQHwYGBqz1xDKZjHlbHwBAXXl5eefPn2fO0Fyo8uTJk19++UWs6DShEPLGz8+PeeO+tLT0yJEjIuYDANouKiqKuQTdxcVlwYIF1KJ3bG36ySefUItOEwohb+zs7FatWsWcQZcZAOixxsbGpKQk5gxrgbqg6urq9u7dy4pOrbUpZSiEfGLdN7h69eqDBw/ESgYAtFpKSkppaaly2HHLsqD27dtXUVGhHFpYWHh5eVGLThkKIZ/mzJnDavcQFRUlVjIAoNVYt5RYTayEFh0dzRxu3LiRZmtTylAIecZqALh3794PHz6IlQwAaKmMjIw7d+4wZ2guVLlx44aIrU3pQyHkGaslfE1NDes+OwBAt/bs2cMcdjzoRlCsL6OzZ8+ePHkytej0oRDyrOMhYeHh4cyVVwAA3CorK1NTU5kzrKNPBVVWVvbTTz8xZ3R114QSCiH/WMdG5+Tk/PbbbyLmAwDaJT4+vq6uTjm0sbFZu3YtteixsbENDQ3K4YABA1jr4XUPCiH/3NzcPv74Y+YM9lEAgIrkcjlroYqPj4+5uTmd6G1tbawzeP38/MzMzOhEFwsKoSBYdxKOHDlSXFwsVjIAoEUuXLjw9OlT5VAikdBcqHLmzJkXL14ohx17ZukkFEJBrFy5ktmRr7m5OS4uTsR8AEBbsG4gLVy4cPTo0WJF/+qrr4YPH04tulhQCAVhZGTk4+PDnImKimKe6QUA0NHr169PnTrFnKG5UCU/P//nn39mzgQFBVGLLiIUQqGwTu0qLi5m/f0GAGCRyWQtLS3KoZOT0+LFi6lF79jalHXSqq5CIRSKg4PDkiVLmDNYMgMAHJqbm+Pj45kz/v7+hoaGdKI3NjYmJiYyZwICAqi1NhWXXvySYmHd07h48WJubq5YyQCAhjt8+DBzVZ2JiQnNU3BTU1OZrU1NTU23bNlCLbq4UAgFNG/ePFdXV+Ww46poAAAl1k2jVatWDRw4UKzo69evt7OzoxZdXCiEApJIJAEBAcyZhISE2tpasfIBAI31+PHjq1evMmdoLlTJzMy8ffs2c0bnu8kwoRAKy9vbu2/fvsphx85JAACEkD179jB7Mbq7u8+ZM4dmdObQw8Nj2rRp1KKLDoVQWDY2NuvXr2fOsP7CAQBUV1fv37+fORMSEkItescP6DSjawIUQsGx/kp1vAUBAHouOTmZeV6bpaXlhg0bqEVnPbKxsbFZt24dteiaAIVQcBMnTpw5cyZzJiIiQqxkAEADsZbRbdmyxcrKik7ojov4tm3bRq21qYZAIaSB9dj54MGD7969EysZANAoV65cefjwIXPG39+fWnTWti6JREIzuoZAIaRhzZo19vb2ymHHjasAoLfGjh37f//v/3VyclIM586dO27cOGrRWbsmFixYQLO1qYZAIaTB1NR069atzJno6GhmKyMA0Ft2dnb/8i//8uLFi6NHjy5YsCA4OJha6I6tH/Vq14SSBIen0/Hq1SsXF5fW1lblzOnTp7/88ksRUwIAPfe//tf/+j//5/8oh05OTvn5+UZGRiKmJAq9+4XF4uzs/MUXX5w+fVo5ExkZ2a4QNjSQ4mLy5g0pKyMVFaS6mtTWEsY50cTQkJibEwsLYm1NbG2JvT0ZPJgMGEAkEoq/BwBohtZW8uYNefOGvHtHKipIVRWprSV1dUT53UYiIX36kL59ibU16deP2NuTQYPIoEHkH3Wu4/FwUqlUD6sgwTdCmk6fPs1sJG9gYPD82bPhBgbk6VOSn0/evSM9+G9hZkacncnIkcTVlVha8pkuAGig0lKSm0vy8khhIWGcU6EqQ0MyZAhxcSGurgd//ZW5TcLY2Pjly5cODg58ZqslUAjpaWtrGzlyJPP05+8//fQ/Pv6Y62fkclW/8EkkxMmJTJxIxo0jJia9yxQANEx1NcnKIg8eEEZf7F76ZN++K8+fK4fr1q1LSUnh68W1CwohVT/+8MP3//2/K4cDzM1f/+lPZvzeizA1JR4eZMYMYm3N58sCgChKSsj16yQnh/C6vC6ntNQ9PJz57n/lypW5c+fyGEKLYNUoLXI5efzYRyJhlr2yurqfHj/mOVBjI7l1i+zeTU6dItXVPL84AFDz5g05cIDIZCQ7m98qSAgJv3uXWQXH2tl99OIFKSriN4q2wDdCKkpKyJkzpLCQEOJ19OjerCzln8xycrru4yNUXGNjMmcOmTWL6OUDcABtVVtLLl4kWVk9WTeggpqmpiH/9V8fGhuVM+FffRU0dSohhIwbRxYu1LcFB/hGKLCWFnLuHImJUVRBQkjglCnMP7/x+vX9khKhojc3k19+IVFRpKBAqBAAwK/MTLJnD8nMFKgKEkL2ZmUxq6CFicmmCRN+Hzx6RMLDyb17wkXXQCiEQnrzhkRHk1u3mH+lZjo5ebZflxV9756wabx/TxITyaVLvN9dAQA+1deTgwfJ8ePt9k0JIKr9e47XxIlWpqZ/jBsbyenTZP9+ojeHp6IQCiYjg8TFkbKyjn/i7+nJHO578KCivl7YZORycu0aSUwkNTXCBgKAnikuJtHR5MkToeNcffXqwdu3zJmA9repfpeXpz83k1AIBSCXk59/JidOdLXLZ+OECf369FEO65qb9z54QCOx16+JTEbevKERCwBUl51NEhJIVRWFUJHtvw7OdXYeP3Bg55fW1JDkZJKZSSErcaEQ8q21lRw6RDhPHDQ3NvaaOJE5E3n3LqVVS9XVJCGBMPYyAoDIbt0iP/3Uk93x6iutrT2Sk8OcCVSskelKays5fpz89puwaYkNhZBXLS0kJYW0/3vWqeCpU5n75J+Ulf3y8qVgabXX1EQOHCDPnlEKBwAcrl4l585RiyZLT29kVNxBFhYr3dy6/7FffiEXLwqYlthQCPnT1kbS0khenirXjrK1/WzECOZM5N27wqTVnuJ7Z0sLSUtT8XthfX19FZU7NgB658YNcvkytWitbW2x9+8zZ3wnTzYxNFTph69fJ7/+KkRWmgCFkCdyOTl+XK2vWax9FMeePCn68IHvtDqQSP6ohampqjwv3Ldv36BBg7y8vDIyMgRPD0B/ZGWRCxdoBjz19OnLykrl0NDAwHfyZDV+/soVQufzOnUohDy5do2oueBl2ZgxjlZWymFLW1tM+w9rQlHWQsU90u7WkUZHRzc0NOzdu3fy5MlTpkyRyWT1Qq9xBdB5BQXk5EnKMVnLZJaMHu1sY6PeS5w9q+JNL+2CQsiH58/JL7+o+0NGHT6OydLTmxkHFgpI2ci7upqkpXHsL7xx40Z6erpymJ6e7u/v7+DgEBoa+gIrbgB6pqaGHDpE6Pxj/4e88vIL7WtYEPcymU7J5eSnnwjja6VuQCHstepqcvRoz7owSD09jRk36Euqq4/n5vKXmWpev+ao4pGRkR0nKysrd+3aNXLkyAULFhw6dKiV7r9nAO0ml5MjR+jv6I24e7eN8TY1sn//ee2XKXRP8eMNDeSnn3SsOwcKYe8oHg3W1f3+v9U02NJy+ZgxzBlKS2ZYrl/vatvskCFDbLq4edLW1nbx4sU1a9aMHj36r3/9a1lnrQMAgO3WLfr7l+qbmxPbbwcMmjrVQN0zvZXXFxXp2IYKFMLeefDgjzvmPTopnnV34vKLF4/evet9XuqRy7va/v/DDz+UlJSkpaXNmjWrq5/Oz8//7rvvHB0d16xZc1Gn11gD9FZFBc1lokopjx6VMx7t9zE23jJpUq9e8epVQv+dSjAohL3Q0EDOn+/la3wybNg4e3vmjIzxTI6e9+/J9eud/omZmdnq1auvX79+7949qVRqbm7e6WWNjY2HDh1asGCBp6enTCar1ZsuhQBqOHOGzsZ5Ftatpg3jx/dnNLfqibY2cvq0zjTmRiHshStXfr8p2jv+7fdRJGZmVjMaw9Nz/Tr3+YWenp7R0dFFRUU7d+4c0fXThfv37ysW1Pj7+z969EiARAG00/PnhHEiPDV3ioruFRczZzpvLqquggLC+3GqIkEh7KmqKr621Gxp3/q9urHxwMOHvLyyepqbyZUr3V5lY2MTGhr67NmzCxcurF692qiLkw4/fPggk8nGjx8/Z86cQ4cONTc3850ugFaRy8VqzsL6Ojjd0XFK+wNweu7yZd1YNYNC2FPXrvG1+tnS1HTD+PHMmQixdq1mZqrY9tfAwGD+/PlpaWkvX7783//7f9vZ2XV15fXr19esWePs7Pz999+/fv2av1wBtEpuLml/5gMdFfX1adnZzJlAXr4OKpSXE1E+tfMNhbBHamv57cgeMm0ac/jg7dvropx+0tpKbt1S6yeGDBnyl7/8pbCwMC0tbf78+V1dVlJS8uOPPw4fPnzJkiUXL16k1GEcQHN08QxeaHEZGXWM+zH9+vRZ7e7OZ4Dr13XgSSEKYY+kp/P7xNvd3n7O0KHMmUihT+vtSkYGaWpS94dMTExWr1594cKFx48fb9++3cLCotPLWltbT506tWDBgjFjxvz444/l5eW9ThdAGxQVkcJC+mHlcnlM+8V3vpMnmxsb8xmjtFQHTrNBIVSfXE4E6IXGOgzlUHb2W1EO0W1sJL1Y4eLm5hYWFlZUVBQdHT1u3LiuLnv69On333/v7Ozs7++flZXV43AA2kGUpeCEnMvLe/r+vXIoIcRPreaiKqLTG1JIKITqe/Xq9wdpvN4Q+Hrs2IGML1JNra3xYjW57nVlsrKykkqlDx8+vHr16urVq427+ARaU1Mjk8kmTZqEFqagy1paxFpdyVom88WoUaNsbfkPk5tLGhr4f1mKUAjVp3zy3KMd9F0xMTT08fBgzkTdu9cqyoqs16+591Gobs6cOWlpaQUFBT/88MPQ9vd+mRQtTIcNG/b999+/pHYuIwAdz58TMTZEFVRVnW5/Hg6fy2SYWloI/d6QvEIhVJNcLtx/cv8pUwwN/vgvUlBVdUaU43P5/h0HDRr03Xff5efnnzhxYv78+ZIuPkC8e/fuxx9/dHFxQQtT0ClPnogSNrr9J+mh1tZfjholVDAUQv3y7h1f35Y6Gmpt/VX7v6miLZkR4KQVQ0PDJUuWXLhw4cmTJ999913//v07vUzZwtTV1fXHH39EC1PQbnK5KOcWNbW2xrV/thLQ/nM2z/LztXpDIQqhmgS+ccdaMvPzs2fPGM+66Xn5Urgl0aNHj/7hhx+KioqSkpImdd3wMC8v7/vvv0cLU9Bu79/TP2iCEPLT48fM1XYmhobb2j954VljIykpEfD1BYZCqCaBt4R/7uIymvE0W04IpdN6WRoaiMBfxczMzBSn3itamPbpovOhsoWpYkENWpiClhGpiQRrmcxqd/eBXWxq4o02t8tAIVSTwJ96JBKJn6cncyb2/v06UfqT0fp8p2hh+vLlyx9++GH48OFdXaZYUDNkyBB/f//HutLhEHRf+yafdGS/e3etfUcOoZbJMOEbob5obiYVFUIH8fHwYO54raivP9S+QxIldM9Ysbe3/+67754/f65oYWrIOK+YqaqqSiaTjRs3TrGgpkWMRv4AahDjrKI9d+4whxMGDpzd9Zpt3mjzqUwohOooL6fQTKhfnz5r2vdAEmfJjBjPJpUtTHNzc7/77rsBAwZ0eplcLlcsqFG0MC0Uo2cHgEqot0/q2LWfdeipUERZzcATFEJ1qNaQuvdYS2ZuFxbeo3+DhdYv2ykXF5cffvih2xamxcXFih0XigU1aGEKmqW1lf5KmaSsrA+MbYsde/oLpbmZl2PpRIFCqA5af6enDRnCOicliv6XQlEavLVnamqqaGGqWFDTt2/fTi9rampSLKgZO3bsjz/+WFlZSTdNgC6I8Y8ouv17hfekSZaMU96EpQFvGj2DQqgOij3AWF8K92Zl5Qv/eLIdTWp4pjwTODo62r3r3vlPnjz5/vvvhw4d6u/v/+DBA5oZAnSC+j+ilEePHrV/Vidtv/hOWJr0pqEWFEJ1UFy9uX7cuP6MHQVNra2uu3evOXSI3vFMLS2adrqKtbW1VCp99OiRooVpV2cCV1dXy2SyiRMnTpkyJTk5GWcCg2ho/d1rk8sv5uevOXRo0+HDzPlPhg0bZ29PJwdCqL5D8guFUB0Um371MTb2njSJEGJiaDjU2poQ0tLWdig7e058/BSZTJaeTmNPhaY2OVO0MH316tUPP/zg6OjY1WXp6elbtmxxcnL6/vvvX716RTNDAEJo/AuqbGgIu3Vr1K5dC5KTD2Vnt8nlhJBx9vaKNoaUlskoaW1zGQnWF6jh8mVy9Sq1aM/Lyw9lZy8fM8YjOrqxwz4BazOzte7uO2bMcOv6dPje+td/JV1sY9AcTU1Nx48fl8lkly5d4vjLbGBg8Nlnn23fvn3x4sVdNTsF4NmrVyQxUaDXTi8ulqWn73vwoONnYjc7uyNr1yZnZf3bJ58Y0/wnvH49GT2aXjj+oBCq47ffyC+/UI75/65e/ZdLl7r6UwOJ5LPhw6Wenivc3Iz4bSQokZD/+T/5PWFDULm5uQkJCTKZrILzYeqoUaN8fHx8fX1thTiPBoCpqIjExvL7kg0tLSdzc/9+69ZNzk4ul7ds+bTr9hRC2byZjBhBOygfUAjVcfs2+flnyjGfl5dH3r2bkJlZwfkgepiNjf+UKds8POy7WFqpNjMz8t13/LwURdXV1SkpKeHh4dyLZczMzJYsWbJjx45Zs2ZRyw30TlkZCQ/n68Vyy8qi7t1LzMys5Dz8b0S/fv5Tpvh4eNiam/MVWlV+fqT9cndtgUKojkePSPtn0dQ0trScyM0Nu32be7GMiaHhsjFjpJ6e83v/uax/f/LNN719EfGkp6fLZLLk5OQGzncNT09PqVS6adMmc/rvGqDz6uvJX//ay9dok8svv3gRduvW6adPOd6slTeHVrq5CXjKBLfQUGJjI07o3kEhVEdBAUlIEDcFjgcDTGMGDAiYMsVn8mQLE5MeRho2jGzZ0sOf1Rhv375NTEyMjIzkXixjY2Pj5eUVGho6Qjtv7ICGksvJ//t/pKeNAN/U1CRlZkbeu/eKc2usjZmZ18SJO2bMGN6vX88C8UMiIf/yL5q/qqBTKITq+PCB/P3vYidBCCFVDQ0Hs7PDbt16XFrKcZmVqem6ceNCpk0bP3Cg2jE8PMjSpT1PUZO0tbVdvnw5LCzs9OnT3S6okUqlK1as6GpvBoB6IiII5z/STqUXF4fdvp366FEz57pTTwcHqafn5gkT+jC6E4vGyop8+63YSfQQCqE65HLy44+E0b5IXIp7JrL09KM5OS2cC5c9HRy2T5++ftw4NZaQLVxIZs7kIUtN8vz589jY2NjY2PecfRGHDBni6+sbHBxsJ9yKXNATaWkkJ0fFa6sbG1MePQq/c+fB27ccl5kZGa12d/92xgyPwYP5SJEnLi5k0yaxk+ghFEI1JSQQalvaVVZcXb03K2vPnTuFHz5wXDbIwmLLpElBU6cqNiZ2Q2sXgHWrsbHxxIkTO3fuvHHjBsdlJiYmy5Ytk0qlHM1OAbqh2lLzJ2VliZmZ0ffucS+EGWVr6+Ph4efp2b+L8zvFNGsWWbBA7CR6CIVQTefPk5s3xU6ic02trcefPJGlp1/Kz+d+qP7lqFGhM2bMGz68yx11Egn585+JmZkwmWoKxYKaffv21XE2C3Zzc/P39/fx8bEQ+mhT0D15eWTfvq7+UPV/s58NH759+vTFo0dr7i7Y1avJ2LFiJ9FDKIRqys0lqaliJ9GNJ2VlkXfvxmdk1DQ1cVzG9ely4EASECBgipqkqqrq4MGDO3fuzOG8hWVlZbVu3bpvvvlm3Lhx1HIDrdfYSP76144tVwS5iyOuf/5nwtfeLepQCNXUxV9rDaR43rDnzp2HPXjeMHMmWbhQ8BQ1iWJBjUwmO3r0KPd5v7Nnzw4NDV2+fLmxJqxQAM0XH0/+sfldLpdfEu65vojs7UlgoNhJ9BwKofqSksjLl2InoYaerEDbsoUMG0YrQc1SXFwsk8kiIyPfcZ64PWjQoC1btgQFBQ2lcPY3aLVr18ilSyqu9LY0NV3f45XeIpozh8ybJ3YSPYdCqL47d8jZs2InoTY19iRNmbIjMXG4iwut1DSRsoXpxYsXOS4zNDRctGhRaGjovHnzNPfhDYgq/fJl2Z//TGPvr4i0tqeMAgqh+mpryd/+phV3RztStUvFP3bUrVy50lAr7swI5smTJ5GRkfHx8TWch46OHj1627Ztfn5+/fv3p5YbaDLF4uSwsLDr169zXMZnNyix2NqSkBCxk+gVFMIeSU0lubliJ9Erz96/j8vIiL1//z3ngknsqFP48OFDamrqnj17Hj58yHGZmZnZ6tWr//SnP02aNIlWaqBx8vLyYmJi4uLiysrKOC5zsLTcPHHiN9OmDbGyopabIObPJ7Nni51Er6AQ9gjnkmgt0tDSkpadvfPWrYySEo7LTE1Nly5dih11hJD09PSwsLDU1FTu834VLUw3b97cRwP3e4EwlOutjhw50tr183gBT4wRhZER+fZbouWtelEIe0QuJ1FRhHMxhXZRsYXp5MmT/f39N27c2Fdr10nz4s2bN0lJSREREQWc3RXs7e23bt3q7+8/nP6BOEDRu3fvEhISoqKiXnIuo1OcIRo6Y8ZYXbq/MnkyWbJE7CR6C4Wwp7KzyU8/iZ0Ezyr79k0yMtq1e3d+fj7HZYodddu3b3d3d6eWmwZqa2s7ffr0rl27VDkTGA9cdZKiJ8PevXvrOU9J8/T0lHp5bSovN9exFVUGBiQkhIjb7JsPKIQ9JZeT6GjCuUVP+6xeTcaOxY46dT179iwuLi4mJqa8vJzjMhcXFz8/Px8fnwEDBlDLDYRQU1Nz4MCBiIiIrKwsjssUzxRCQ0NnKx6haXBfqh7Sia+DBIWwV3TlSeHvHB3Jtm3MI+mLiopiYmIiIiJKOXc+DR482MvLKzg42MnJSfgsNVdDQ0NaWtrf//73zMxMjsvwwFWrPX36ND4+XiaTVVRUcFw2cuRIX19fX19fW1vbP2br68nu3YTzu6M2MTEhISHE0lLsPHiAQtg7Bw+SJ0/EToIPEgnx8yOdNbPHjjp1qXG7TCrFA1et0NraeubMGRVvg2/fvn3x4sWd/yu4d4+cPi1gojRp/2JRJRTC3qmuJuHhmnMwU8/NmEE+/5z7kpycnKioqLi4uNraWo7LXF1dt27dKpVK+2n/k4PeqKysTEpKCgsLe/HiBcdl1tbWa9euxQNXjVVSUpKcnBweHv76H23SOjVw4EBvb++AgIBh3C2Z5HKSkEA4X0o7DBpE/PyIDqx6JYSgEPIgK4scOyZ2Er0zYADx9yeqHUWr2FG3e/fuR48ecVxmaWm5fv36oKCgiRMn8pSlVlJxST35xwNXnAmsOa5du7Zr165jx46pslXGy8vLTMXTWsrLSVQU4XxNTWdoSPz8iHY1geOEQsiHw4cJZ1XQaEZGxMeHDBqk7s+p9TaBHXX5+fkymSw+Pl6VB64hISGOjo7UcgMmGh/1MjPJ8eM9T1F0X3xBpk8XOwk+oRDyoamJxMYSzjc4zbV0KfHw6PFP83zjSNc1NDQcPHgwMjLy9u3bHJeZmJisXLnyv/7rvxy0uX+j1rl//350dPT+/ftp3Pw/cYJkZPT8x0Xk7k5WrSK6tQ4AhZAn5eUkLo4o2pXJ5Vrzt2T6dPLFF71/GbWWEmBHHVHhPdfGxqawsBDraChQNAXtdjmYiYnJsmXLpFIpP8vBWltJcjLhbMigiQYPJlu3Ep3bLoVCyJ/CQpKc/Putf62ohW5uZPVqfvNULC7vdkedYnE5dtQp7sLt2rUrOzub9Ufffvvt3/72N1Gy0h8ibxCqryfx8YSzH6lm6dePbNtGLCzEzoN/KIS8yssjqamEcxO6pnBxIevXE2G+lqm1o+6P7cZ6TPHAVdnBQCKR5OTkuLq6ip2XbtKgFUzV1SQhgXBuSdQUVlbE21sHmsh0CoWQb/n5JDVV05eEjRpF1qxRcZlob6i1o27Tpk3mWt66t5eUD1zHjBlz/vx5sdPRQZq4p+XDB5KcTN6/FzxQb9jYEC8vXa2CBIVQEIWFJCWFcB5vRBvzVu2ECWTZMpobgFRtSWxtvXbt2tDQ0LFjx9JKTRM1NTWVlpYOGTJE7ER0ioqfycRpK19bS1JSSFERvYhqGTiQbNyoGx1kuoJCKIyKCpKSonHrSCUS8umnZM4cUZ5fqnpIzT8W1GBHHfSe4i79zp07MziXaIrf9665mRw/Tjo8Khbf6NFk1SpiYiJ2HsJCIRRMUxM5eVKD9heam5MVK8jIkWLnofKxpQ4Omzdvxo466Bnt64Qul5Nbt8jFi6StTeRMFCQS8skn5KOPtGDdX6+hEAosK4ucPSt+DzYXF7JsmUbd3BBnzTroOsWNh7CwsNOnT2vlTp6SEnLkiPhLSfv1IytWEL1po49CKLzqanLmjGi9uc3NycKFZMIEjf1Yp3h40+0u5jFjxgQEBPj4+Fjo4upt6D0VT0u2sbHx8vLasWOH5p6W3NJCrl4l168TzhWtQjEwINOnk08/1b3NghxQCGnJyyMXLgh7fiFr86KhIZk6lXz8MVGxBaKoqqqqDh48GBYW9vjxY47LFH2tQkJCxo8fTy038TU1kbdvSWkpqaggHz6Q2lpSX0+U/3INDUmfPqRvX2JtTWxtiZ0dsbfXmW7IqkhPTw8LC0tNTdWpbn/l5eTiRZKTQzXoqFFkwQJiZ0c1qAZAIaRILifZ2eTqVfLunbCBjIzIpElkzhxibS1sIL7J5fJLly6pciawp6fn9u3b169fr7NnAjc3k/x8kpdHXr0ipaVErX+nhobEwYE4O5ORI4mTk64Wxerq6pSUlD179jx8+JDjMjMzs9WrV//pT3+aNGkSrdT4U1xMrlwhT58KHsjFhcydS4YOFTyQRkIhpE4uJ/n55O5d8uwZ/0/Fra3J5MnE05NoeWuu4uLivXv37tmzp7CwkOOyQYMGbdmyJTAw0NnZmVpuwmptJU+fkgcPyPPn/HRm6NOHuLmRCRPI0KEae3tcXU+ePImMjIyPj6+pqeG4bPTo0du2bfPz8+vfvz+13ARRVkbu3CEPH5KGhk7+tDd9rExMyPjxZOpUXTpKogdQCMVTW0uys0lODiko6G1FtLQkrq7E3Z04O+vMmx1hnAnc29NQtUJNDblzh9y/TziflfacrS2ZPp1MmqS9z370/Yzolhby9Cl5/Jg8f97b9XfGxmTkSDJ2LHF11d6/DzxCIdQAjY3k5Uvy6hUpKiJv36r0V1wiIf36kcGDiZMTGTaM2NvrUv3rKDc3NyEhQSaTVXA2oxo1apSPj4+vr6+trS213HhQXU2uXiUZGTSa85mbk1mzyLRp2vX2p7hDsHv37iLOXeeKOwRBQUFDdfsWX1sbKSwkL1+SwkLy5g2prlbpp/r2JYMHE0dH4uxMnJwEaq+opVAINc+HD6SyklRVkbo60tT0e100MCCmpsTcnPTtS2xsSL9+2vVGxgvFM6Hw8PAHDx5wXKZ4JvTtt9969OJ4KUpaWsi1a+TGDdo9+Swtybx5mryWWEHZhAHPjLnU1/++iqq6mjQ1kYaG358o9+lDTEyIhQWxsiL9+xOtWCIkEhRC0D5qrRLU3Bam+fnk1CkxGy4PHUqWLiUa+e1ZxVXEVlZW69at07tVxMA3FELQVm/fvk1MTIyMjHz16hXHZYp9Y6GhoSNGjKCWWzdaWsj58+TuXXGiM9dWGBmRBQvI1Kma89VQsa903759dZzdet3c3Pz9/bGvFHiBQgjaTd1OIuK3MC0vJwcPCr6FRi1jxpDly4mpqYgpKDoNhYWFXb9+neMyZach0ZqCgi5CIQQd8fz589jY2NjY2PecJ9oMGTLE19c3ODjYTpRdw/n55NChzhfBi2vAALJ+PRFjm4Fa/+GCgoLs7e2p5QZ6AoUQdEpDQ8PJkyd37tx548YNjsvE+WKRlUVOnNCUlsodmZuTDRsIreOftO+rPOguFELQTWo9avL19RX8/Lm7d8mZM7//797sgBaUiQnZuFHo9iKKh7vdnk9pY2OzZs2aHTt2uLm5CZoPAAoh6DLFieS7du3Kz8/nuEyx+PCbb74ZN26cIHmkp5NTpwR5Zd6ZmJBNmwQ6dkDx6SQ5ObmB8+awpi/3BZ2DQgi6T/XtaLNnzw4NDV2+fDmf29EePyY//aRes1BxmZmRbdt47Lys2AAaERGRlZXFcZnidNwdO3bMmjWLr9AAqkAhBD1SVFQUExMTERFRWlrKcdngwYO9vLyCg4Odev/FqKiIJCbSaBnDLxsb4uvb+461KrYEGjlypK+vr/a1BAJdgUIIeodey8raWhIdrWoHLE3j7Ey8vHp2coV+NYkF7YdCCPorJycnKipKqEMM5HKybx/hfDap6T76iHz2mVo/UVJSkpycHB4e/vr1a47LBg4c6O3trVPHhoA2QyEEfffhw4fU1NTdu3c/evSI4zK1j7W7eZOcP89LhqKRSIi3t4qLSK9du7Zr1y40BQVthEII8Ds+DzovLyeRkdr3aLCj/v1JYCDpegOfoinorl27srOzOV7G0tJy/fr1wcHBEyZMECBLgF5BIQRo582bN0lJSREREQUFBRyX2dvbb926NSAgYNiwYZ388d692n1TlGnuXPLppx2n79+/Hx0dvX///lrOAxRdXV23bt0qlUr79esnWIoAvYJCCNCJ1tbWM2fO7Nq1S5XlHlKpdOXKlYbKA95yc0lqKqVEKTAyIiEhxNpaMVI0Be12qZGyd4+unY4LugiFEIDL06dP4+PjY2JiysvLOS5zcXHx8/Pz8fEZYGtLIiJIWRm1DGmYOJEsX56XlxcTExMXF1fG+dspNp+EhIQ4OjpSSxCgN1AIAbrX0NCQlpb297//PTMzk+MyU1PTpR9/LB00aL7mHPnUa21y+eWXL2WVlUdOnWptbe3qMolEMm/ePDQFBW2EQgigBkWTsL1799bX13Nc5ungIPX03Dh+fF8TE2q58a6yoSEpMzPs9u0XnNvhra2t165du337dnd3d2q5AfAIhRBAbYoWpmFhYS9evOC4zNrMbK27e+iMGWNFOfKpF9KLi2Xp6XsfPKhXYQHtxo0bBW9ZDiAkFEKAHlK2MD1y5AjXPUNC5o0YIfX0XOHmZtSjRi3UNLS0pGVn//3mzcw3bzguUzQFxem4oDNQCAF66/dVJLGxZZxHyzpYWm6eODFk2jRHKytquano6fv38RkZMenp5Zy3fP9YEzRgALXcAISGQgjAj8bbt0/813/J0tMvcu4gNDE0XDZmjNTTc97w4aLvK2htazvz7Nmu27cv5edzvBEYSCSfDR8u9fRcuWuX4aBB9PIDoAKFEIAnBw6QZ88IIfdLSqLv3dv/8GFtUxPH5a4DBmydNEnq6dmPo0ONYN7U1CRlZkbcvVtQVcVxmX3fvls9PPw9PYcrtsN/9hn56CNKKQLQgkIIwIeWFvLXvxLG0pKqhoaD2dm7bt/OfveO4+csTU3XjxsXPG3ahIEDhc+SEELSi4vDbt9OffSouevnmuQfC183T5jQh9kU1MmJbNsmeIoAdKEQAvDh1SuSmNjpn1wrKNh1+/bRnJyWtjaOF/B0cNg+ffr6ceOMlR1qePWhsTH10aM9d+48fPuW4zIzI6PV7u5/mjlzUqe3QA0MyHffEW3eEwLQEQohAB+uXiWXL3P8eUl1dXJW1p47dwo/fOC4bKCFhfekSYFTpjjb2PCVWk5padS9e/EZGTWct2pH29pu8/Dw8/Tsz32r1suLDB/OV24AmgCFEIAPBw+SJ0+6vaqptfX4kyey9HRVFqdsnz598ejRPV5Qo4zFvXjH0MBg0ciRoTNmqLp4Z/58Mnt2z1IC0EwohAB82LWLcLZfYcktK0vIzJSlp1dwblcY2b+/7+TJvpMn25qbq/7iRR8+xNy/H3n37jvOcyEGW1p6TZwYNHXq0H801FbJ+PFk5Uo1rgfQeCiEAL3W2kr+/d+J+v+UqhsbUx49irh7N4tzA7uZkdESV9cdM2bMcnLiuKxNLr/84oUsPb3b55Gzhw4NnT59+ZgxPXke6eBA/PzU/ikADYZCCNBr5eVk9+7evICipVlyVlYD9/HuDg5ST89NEyaYtz/evbKhIS07e+etWzmlpRw/bmVqum7cuG+mTx9nb9/zXM3NyX/7bz3/cQDNg0II0GsFBSQhofcv87amJjEzM+revZeVlRyX2ZiZeU2cGDpjxoh+/RQVdN+DB3WcTUHd7Oz8PT19J0/mpwn4v/4rEWZpK4AoUAgBeu3JE3LwIF8v1trWdvLp04i7dy/m5XEvqLE0Na1qaOB4KVMjo9VjxwZNnTqT856q2v7pn4iFBZ8vCCAqHBsG0Guc2xLUZWhgsHzMmOVjxjwvL4+9fz/2/v33dXUdL2uTyzmq4BArK9/Jk4OnTrUT4lwIXn9fANHhGyFAr2VkkBMnBHrtxpaWE7m5YbdvXy8oYP2Rfd++rHWhyqagwp50ERREtO1gKQAO+EYIoNFMjYxWu7uvdnfv+Djwm2nT/ucvvyj+N/PBoXjJAmglFEKAXjOi8e/I08Eh2sHhrwsWHMzO3nnr1mALi3+aNWvn7dvDbGw6XUoqICq/LwA1uDUK0Gu5uSQ1lWbANrm8rK7Ovm/fNzU1g+ivW/nnfyY4kh50iEaflw2gHdRp+8ILA4nEvm9fQogIVVAiIWKcGwUgHBRCgF6ztBQ7A4r69iXCLcMBEAP+QgP0mpWVHtUG/o7FANAQevOvF0A4BgZEf9ZqDhggdgYAPEMhBOBDp8fY6qSBA8XOAIBnKIQAfHBwEDsDWvTnNwW9gUIIwIehQ8XOgAojIxRC0D0ohAB8cHAgpqZiJyE8R0fspgfdg0IIwAcDAzJihNhJCG/0aLEzAOAfCiEAT9zcxM5AeGPGiJ0BAP9QCAF44upKqHX7FMWQIXq0SwT0CQohAE9MTMjYsWInIaRJk8TOAEAQKIQA/JkyRewMBGNiQsaPFzsJAEGgEALwx9GRODqKnYQwJk/Wi2WxoJdQCAF4NWeO2BkIwNCQzJwpdhIAQkEhBODV6NE6uOXc05NYWYmdBIBQUAgBeCWRkAULxE6CV6amZO5csZMAEBAKIQDfhg3TqeWjH3+M8+hBt6EQAgjgiy90ZGnJoEFk+nSxkwAQFgohgAAsLckXX4idRK8ZGpLly/XozGHQV/grDiCMSZO0/gbp/Pk4fRD0AQohgGCWLiW2tmIn0VNjxuCmKOgJFEIAwZiaknXriJmZ2Hmoz96erFhBJBKx8wCgAYUQQEgDBpC1a4mhodh5qMPKimzcSExMxM4DgBIUQgCBDRtGvv76jyUncrmo2XTH3Jxs2oTt86BXUAgBhDdmDPn669+/F2ry/ca+fcmWLcTOTuw8AKiSyDX88ymAznj+nBw6RJqaxM6jC/36kU2bSP/+YucBQBsKIQBFb96QlBTy4YPYeRBCCJHL//h66uRE1q5FBxnQTyiEAHTV1pLDh8mLF2LnwTB1Kvn8cy1b0QPAHxRCAOrkcnLtGvn1V9LW9seMKM8O+/QhS5YQNzcRQgNoDBRCAJG8fUtOniRFRaIl4O5OFi3C7VAAFEIA8cjlJCOD/PILqamhGtfennz+ORkxgmpQAE2FQgggtqYmcucOuXmT1NUJHsvWlsydS8aP1+hdHAB0oRACaIbmZpKZSe7eJaWlvXqdrh43DhtGpk8nrq4ogQAsKIQAGqawkDx8SB4/5ud+qZ0dGTuWTJiADYIAXUEhBNBIcjkpKSF5eaSggBQWkoYGNX7Wyoo4OpJhw4iLC+ofQLdQCAG0QWUlKSsj5eXkwwdSW0saG0ljIyGESCTE1JSYmRELC2JtTfr3J/b2xNxc7HQBtAkKIQAA6DU03QYAAL2GQggAAHoNhRAAAPQaCiEAAOg1FEIAANBrKIQAAKDXUAgBAECvoRACAIBeQyEEAAC9hkIIAAB6DYUQAAD0GgohAADoNRRCAADQayiEAACg11AIAQBAr6EQAgCAXkMhBAAAvYZCCAAAeg2FEAAA9BoKIQAA6DUUQgAA0GsohAAAoNdQCAEAQK+hEAIAgF5DIQQAAL32/wG9cQ1iev4DFwAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": { @@ -5794,9 +5202,9 @@ "colab_type": "text" }, "source": [ - "As we can see above, 2 of 4 of the model's MLM predictions are chemically valid. The one the model would've chosen (with a score of 0.6), is the first image, in which the top left molecular structure resembles the benzene found in the therapy Remdesivir. Overall, the model seems to understand syntax with a pretty decent degree of certainity. \n", + "As we can see above, 5 out of 5 of the model's MLM predictions are chemically valid. Overall, the model seems to understand syntax with a pretty decent degree of certainity. \n", "\n", - "However, further training on a more specific dataset (say leads for a specific target) may generate a stronger MLM model. Let's now fine-tune our model on a dataset of our choice, Tox21." + "However, further training on a more specific dataset (say leads for a specific target) may generate a stronger chemical transformer model. Let's now fine-tune our model on a dataset of our choice, Tox21." ] }, { @@ -5824,7 +5232,7 @@ "base_uri": "https://localhost:8080/", "height": 16 }, - "outputId": "3a5079d6-ecc1-474a-970c-0e9afc667da3" + "outputId": "e1db2b5e-58c3-4ae0-a81f-f1cfef3b7f48" }, "source": [ "%%javascript\n", @@ -5920,7 +5328,7 @@ "base_uri": "https://localhost:8080/", "height": 394 }, - "outputId": "f557fa2f-dbe5-4343-ec3f-ab88ea1aa1bb" + "outputId": "7b4a267b-07da-4a0a-ba50-320da6b3d517" }, "source": [ "m = Chem.MolFromSmiles('CCCCC[C@@H](Br)CC')\n", @@ -5931,7 +5339,7 @@ { "output_type": "display_data", "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAF5CAYAAAB6A1o9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAY20lEQVR4nO3df5iNdd7A8c89c8ZkxiBMQsasNdJjGoxRJJ5GsVSbrHJl1LKF9dTGFlvsolareMSW6Idqk7IpNnnyY6crlpCMQcxo/djy4wpFY0ajmTHmnM/zR8tKZsaPM+ee87nfr+s618XpzLk/c195u93nO/ftqKoAAGyKcHsAAEDVIfIAYBiRBwDDiDwAGEbkAcAwX2UvcBxnqIgMFRGJjY1t36pVqyofCgBwfjZu3PiNqsaf+bxzPkso09LSNDs7O6iDAQAunuM4G1U17cznOV0DAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAwyqNvOM4Qx3HyXYcJ/vw4cOhmAkAECSVRl5VZ6lqmqqmxcfHh2ImAECQcLoGAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADPO5PQCA4Fv516myat60U7+PiPRJ7QaN5cprust/939Yataq6+J0CCUiDxh23//+n4iI+MtOyMEvcmXlX5+WIwd3S8b4N1yeDKFC5AHDrmjV/tSvmyV3lNLiY7LyralSWlIkNS6J+dHrNRCQQMAvkb6oUI6JKkTkAQ+JjokTDQREA34REXlm8LWSmNxJrmjVXta995Lkf7VPBj25QBL+6xqXJ0WwEHnAsIC/TES+P13z1Re58sn/vSItUtMlOibu1Gs+//Qj+Xr3Z9Lt7kfkktg6UrdhU7fGRRUg8oBhT/Rp9oPfN27RRm5/6NkfPHe8qFD+Z/qHElO7XihHQ4gQecCwIVOXiohIIOCXIwd2y0fvPCNvPpYh905+T6Kia4qISMJVHQi8YUQeMKxxUptTv77iylRp0DRJXn64l2z+8G255pZBIiJSq268S9MhFPhhKMBD4psmiYjIob3b//Ok47g0DUKByAMecjLusXXquzwJQoXTNYBhX27fKCIigUBAjhzcLavfeVZq1IyVNt3udHkyhAqRBwx79ZHbRETEiYiQuHoN5YqWqXJDxiip1yjR3cEQMo6qnvOL09LSNDs7uwrHAQBcCMdxNqpq2pnPV3pO3nGcoY7jZDuOk3348OGqmQ5ASJw4XiwfvPaEFBfmuz0KQqTSyKvqLFVNU9W0+HiWWgHhrOjbI7Ju4YsS8PvdHgUhwuoawEOKvv3+CL5mHJca9goiD3hIceERiY6Jk4hI1lx4BZEHPKSoMF9qxl3q9hgIISIPeEhxYb7EEHlPIfKAhxR9y5G81xB5wEOKC/MlpjaR9xIiD3hIMefkPYfIAx5SVFhA5D2GyAMewgev3kPkAQ/5/nQNPwjlJUQe8BBO13gPkQc8IuD3S8l3Rzld4zFEHvCIku+OiqhyJO8xRB7wiKJvj4iIsE7eY4g84BHFhfkSEemTGjVruT0KQojIAx5x8uJkjuO4PQpCiMgDHsEaeW8i8oBHfH9xMtbIew2RBzyC69Z4E5EHPIIbhngTkQc8oujbI5yT9yBu9Ah4xLrVq6RhUqF0d3sQhBRH8oBHbPg6WuoldXR7DIQYkQc8Yu/XBXJZk0S3x0CIEXnAI/Ly8qR+/fpuj4EQI/KABxQVFUlJSQmR9yAiD3hAXl6eiAiR9yAiD3jAycjXq1fP5UkQakQe8IC8vDyJi4uTGjVquD0KQozIAx5w5MgRTtV4FJEHPCAvL49TNR5F5F1w8OBBKS0tdXsMeAjLJ72LyIdQYWGhTJ48WRISEiQ9PV0KCgrcHgkeQeS9i8iHQH5+vkyYMEESExNl1qxZMnbsWCkqKpLk5GTJzMx0ezx4AJH3LiJfhQ4dOiRjxoyRZs2aybx582T69OmyY8cOeeyxxyQrK0sGDx4st956qwwbNkyOHTvm9rgwjMh7F5GvAvv375ff/va3kpiYKJmZmfLaa69Jbm6uDBgwQHy+7y/8GRUVJY8//risW7dOVq9eLSkpKfLRRx+5PDmsIvLeReSDaPfu3TJs2DBp3ry5ZGVlyYIFC2Tjxo3St29fiYg4+65OS0s79Zpu3brJyJEjpbi4OMSTwzoi711EPgi2b98uAwcOlKSkJNm1a5f8/e9/l7Vr18rNN98sjuNU+vWXXHKJTJkyRVauXCmLFi2S1NRU2bBhQwgmDy9r166V9evXuz1GWCLy3kXkL8KWLVukX79+0rp1a8nLy5PVq1fL8uXLJT09/Zzifqbrr79etmzZIt26dZPrrrtOxo0bx1JLEdm7d6888MAD0rVrV0lPT5d3333X7ZHCit/vl/z8fNbJexSRvwDr16+X2267TVJTUyUQCEh2drYsXrxYOnXqdNHvHRsbKzNnzpSlS5fK7Nmz5ZprrpGtW7cGYerws2vXLrn33nulRYsWsm3bNlmyZIlMmDBBMjIy5O6775b8/Hy3RwwLBQUFoqocyXsUkT9HqiqrVq2S7t27S+fOnaVOnTqSk5MjCxYskHbt2gV9e927d5ecnBxp166ddOjQQSZNmiRlZWVB3051lJOTI/3795dWrVrJgQMHZMWKFbJy5Urp2bOnjBo1SjZt2iTbt2+X5ORkWbZsmdvjVntcgdLjVPWcH+3bt1evCQQCumzZMu3cubNGRUXpkCFD9F//+ldIZ1i0aJE2bNhQO3bsqDt27AjptkMpKytLe/furY7j6O23364bNmwo97WlpaX6xz/+UX0+nw4ZMkS//fbbEE4aXtatW6eRkZEaCATcHgVVSESy9Szd5ki+HIFAQBYuXCgdOnSQPn36SGpqqnz++ecya9Ys+elPfxrSWW677TbJzc2Vpk2bStu2bWX69OkSCARCOkNVWr16tfTs2VM6duwoMTExsmXLFlm4cKGkpaWV+zVRUVEyfvx4Wb9+vXz88ceSkpIiK1euDN3QYeTkdWsu5HMiGHC28pf38MKR/IkTJ3Tu3LnaunVrrVWrlj7yyCP61VdfuT3WKfPmzdN69erpDTfcoLt373Z7nAsWCAT0gw8+0K5du6rP59Nf/epXF/yvlJKSEn300Uc1MjJSR4wYoUVFRUGeNrxNnjxZGzVqxJG8cVLOkTyR/7fjx4/rK6+8oi1atNC6devqY489pnl5eW6PdVYHDhzQW265RWvVqqUvv/xyWP3hDQQCumjRIu3QoYNGR0fr/fffr3v27AnKe69du1ZbtGihV155pX7yySdBec9wduzYMV2wYIFmZGSoz+fT3r17V6sDFgQXkS9HUVGRzpgxQ5s2barx8fE6adIkPXr0qNtjVSoQCOirr76qcXFxevPNN+v+/fvdHqlCZWVlOm/ePE1JSdGYmBgdOXKkHjhwIOjbOXbsmD744IPq8/n097//vZaUlAR9G9XZoUOH9JVXXtFbb71VmzdvrkuWLFFV1c8//1y7dOmi9evX1/nz57s8JaoCkT9DYWGhTpkyRRs2bKhNmjTRZ555Rr/77ju3xzpve/bs0fT0dL300kt17ty51e6ovrS0VGfPnq0tW7bU2rVr69ixY/Xw4cNVvt3ly5drQkKCpqSk6Kefflrl23PTF198oVOnTtUuXbpoRESEiohmZGTokSNHfvA6v9+v06ZN0+joaO3fv3+1/ZcqLgyR/7f8/HydMGGC1qtXT3/yk5/oSy+9FPZHe36/X5977jmtWbOm9u3bVw8dOuT2SFpcXKwvvPCCJiYmav369XXixIlaUFAQ0hmOHj2q9913n0ZFRemf/vQnPXHiREi3X5U2bdqk48eP15SUFBWRU4/4+Hj929/+VuHXfvbZZ9qhQwdt1KiRLl68OEQTo6p5PvKHDh3SMWPGaO3atbVVq1Y6Z84cU3/oVVV37typnTp10ssuu0wXLlzoygzHjh3TadOmaePGjbVRo0Y6depUPXbsmCuznPT+++/r5Zdfrh06dNB//vOfrs5yocrKyvQf//iHjhgxQps1a/aDsJ989OnT55z/gj9x4oQ+8cQTGhUVpffdd19YnKJExTwb+f379+tDDz2kMTEx2qZNG50/f76WlZW5PVaVKSsr08mTJ2uNGjX0l7/8pebn54dkuwUFBTpx4kRt0KCBJiQk6PPPP6/FxcUh2fa5+Oabb/Suu+7SSy65RKdNm6Z+v9/tkSpVVFSk7733ng4aNEjr169/1rCLiF566aX65ptvXtA2Nm/erFdffbUmJCTo8uXLg/wdhC+/36/vvvuu3nHHHWHTC89Ffvfu3Tps2DCtUaOGXnvttfr+++9Xu/PVVSknJ0fbtWunTZo00czMzCrbzuHDh3Xs2LFap04dTUpK0tdee01LS0urbHsX6+2339b69etr165d9YsvvnB7nB/Jy8vT119/Xfv06aMxMTHlhv3ko1evXhf9oXtJSYmOGTNGfT6fPvjgg2H52VSwnLmE+ne/+53r/xI9V56J/Pbt23XgwIHq8/k0PT1dP/zwQ0/F/XTHjx/X8ePHq8/n02HDhmlhYWHQ3vvgwYM6atQojY2N1eTkZH3rrbfC5ojn4MGD+vOf/1xjY2P1pZdecv3/j3379un06dM1PT1dfT5fpWEXEY2Li9OXX345qHOsW7dOW7ZsqUlJSfrxxx8H9b2ruzOXUI8fP16/+eYbt8c6L56J/PDhw7VXr166Zs0at0epNjZs2KBXXXWVNm/eXD/66KOLeq+9e/fqAw88oNHR0ZqWlqbvvfdeWJz6OFMgENDZs2dr7dq19Wc/+5l++eWXId1+Tk6OTpgwQVNTU88p6qc/0tPTg/azBWf67rvvdMSIEerz+XT06NFhvyihMmcuoX7qqafC9vMJz0Q+XI4mQ624uFhHjRqlkZGROnLkyPP+qdCdO3fqvffeqz6fT7t06aKZmZmuHwEHw759+/Smm27SunXr6htvvFFl35Pf79c1a9boyJEjtUWLFucddhHRmJgYnT59ekj2+4oVK7RZs2aanJysmzZtqvLthdrpS6gbN24ctkuoT+eZyKNiq1ev1ubNm+tVV12lWVlZlb4+NzdXMzIyNCIiQnv06KGrVq0KwZSh5ff7debMmRoTE6N9+vTRr7/+OijvW1JSokuWLNHBgwdrw4YNLyjsJx+dOnXSnTt3BmWuc3X06FEdMmSIRkVF6YQJE6r1Zy3nyuIS6pOIPE4pLCzU+++/X30+n44bN06PHz/+o9dkZ2drnz591HEc7d279zn9hRDudu3apZ07d9YGDRpUuta8PAUFBTp37ly98847NS4u7qLCLiIaHR2tkydPdvWU2JIlS7RRo0aalpam27Ztc22Oi+GFJdREHj+SmZmpV1xxhbZt21a3bt2qqqpr1qzRnj17akREhN51112nnveKsrIynTJlikZHR+uAAQN+9FOjZ7N//359/vnntUePHhoVFXXRYT/5SE1N1dzc3BB815XLy8vTjIwMjY6O1qeffjpsToueuYT6nXfeCZvZzxeRx1nl5+efWo3UvHlz9fl8OmjQIN2+fbvbo7kqNzdX27dvr40bN9Zly5b96L9/+eWX+tRTT+m1116rjuMELewiolFRUfr4449XyyPN+fPna4MGDbRLly4hv6/C+fDiEmoijwpNnDhRU1JSwvryxcF2+o1Jhg4d+oMbk2zbti3ocRcRTU5O1o0bN7r4XVfuq6++0t69e2tsbKy+8MIL1Sqepy+hvuGGGzy1hJrIAxcoOztbW7durYmJibpy5cpTz//iF78IWtwjIyP10UcfPevnI9VRIBDQOXPmaJ06dbRHjx66b98+V+fZsmWL9uvXTyMiIjy7hLq8yHNnKKAS7du3l+zsbOnXr5/ceOON8tBDD0lxcbH84Q9/CMr7t2zZUtasWSOTJk2SGjVqBOU9q5rjOHLPPfdIbm6uiIhcffXVMmfOnO+PHEMoKytLevfuLW3btpWysjLJysqSpUuXSufOnUM6R7V2tvKX9+BIHl535o1JevXqdcFH747j6PDhw8P+TlaBQEBffPFFjY2NDdmNSVatWqXdu3fXiIgIHTBgQLX5gNpNwpE8cPGuu+46+fTTT6V79+5y/fXXS3x8/AW9T2JioqxYsUKeffZZqVmzZpCnDC3HceTXv/61bN26VY4cOSKtW7eWBQsWBH07qiqZmZnSpUsXuemmmyQhIUF27Nghb775prRu3Tro2zPjbOUv78GRPPAfH374oTZt2lRjY2PP6wh+yJAhQb2OUHVSVlamU6dO1ejoaM3IyAjKjUn8fr8uXLhQ27dvr9HR0fqb3/xG9+7dG4RpbRGO5IHguvHGGyUnJ0cGDRp0Tq9v0qSJLFu2TGbNmiW1atWq2uFcEhkZKQ8//LBs3rxZdu7cKcnJybJ06dILei+/3y9vvfWWtGnTRu6++27p1q2b7NmzR5577jlJSEgI8uSGna385T04kgfOrlOnThUevd9zzz0hu7Z/dXH6jUkGDx58zhf+Ki0t1b/85S+alJSkderU0XHjxoXdFSHdICyhBKrO4sWLzxp3N+/SVV2cvDFJs2bNdMWKFeW+rri4WGfOnKkJCQnaoEEDffLJJ0N+y8hwRuSBKta2bdsfBP6OO+4IyU3Lw8HpNyYZPnz4D674WFhYqE8//bRefvnl2rhxY/3zn/8cNjfqqE7Ki7yj57GuNS0tTbOzs4N2qgiwZMGCBXLnnXdKvXr1ZMaMGdK/f3+3R6p2PvnkExk4cKCIiMyYMUPWr18vzzzzjMTFxcno0aNl0KBBEh0d7fKU4clxnI2qmnbm8z43hgEs6tu3rwwfPlxGjx4tjRo1cnucaqljx46yefNmGTVqlPTo0UMSEhJk2rRp0r9/f4mKinJ7PJMqPZJ3HGeoiAwVEUlISGi/d+/eUMwFwLjXX39dMjIyiHuQlHckz+kaADCgvMizTh4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGVRp5x3GGOo6T7ThO9uHDh0MxEwAgSCqNvKrOUtU0VU2Lj48PxUwAgCDhdA0AGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAM81X2AsdxhorI0H//9rjjOLlVO1JYayAi37g9RDXG/qkY+6di7J+KXXm2Jx1VPed3cBwnW1XTgjaSMeyfirF/Ksb+qRj7p2Ll7R9O1wCAYUQeAAw738jPqpIp7GD/VIz9UzH2T8XYPxU76/45r3PyAIDwwukaADCMyAOAYUQeAAwj8gBgGJEHAMP+H1MfbqcuYmutAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAF5CAYAAAB6A1o9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAY20lEQVR4nO3df5iNdd7A8c89c8ZkxiBMQsasNdJjGoxRJJ5GsVSbrHJl1LKF9dTGFlvsolareMSW6Idqk7IpNnnyY6crlpCMQcxo/djy4wpFY0ajmTHmnM/zR8tKZsaPM+ee87nfr+s618XpzLk/c195u93nO/ftqKoAAGyKcHsAAEDVIfIAYBiRBwDDiDwAGEbkAcAwX2UvcBxnqIgMFRGJjY1t36pVqyofCgBwfjZu3PiNqsaf+bxzPkso09LSNDs7O6iDAQAunuM4G1U17cznOV0DAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAwyqNvOM4Qx3HyXYcJ/vw4cOhmAkAECSVRl5VZ6lqmqqmxcfHh2ImAECQcLoGAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADPO5PQCA4Fv516myat60U7+PiPRJ7QaN5cprust/939Yataq6+J0CCUiDxh23//+n4iI+MtOyMEvcmXlX5+WIwd3S8b4N1yeDKFC5AHDrmjV/tSvmyV3lNLiY7LyralSWlIkNS6J+dHrNRCQQMAvkb6oUI6JKkTkAQ+JjokTDQREA34REXlm8LWSmNxJrmjVXta995Lkf7VPBj25QBL+6xqXJ0WwEHnAsIC/TES+P13z1Re58sn/vSItUtMlOibu1Gs+//Qj+Xr3Z9Lt7kfkktg6UrdhU7fGRRUg8oBhT/Rp9oPfN27RRm5/6NkfPHe8qFD+Z/qHElO7XihHQ4gQecCwIVOXiohIIOCXIwd2y0fvPCNvPpYh905+T6Kia4qISMJVHQi8YUQeMKxxUptTv77iylRp0DRJXn64l2z+8G255pZBIiJSq268S9MhFPhhKMBD4psmiYjIob3b//Ok47g0DUKByAMecjLusXXquzwJQoXTNYBhX27fKCIigUBAjhzcLavfeVZq1IyVNt3udHkyhAqRBwx79ZHbRETEiYiQuHoN5YqWqXJDxiip1yjR3cEQMo6qnvOL09LSNDs7uwrHAQBcCMdxNqpq2pnPV3pO3nGcoY7jZDuOk3348OGqmQ5ASJw4XiwfvPaEFBfmuz0KQqTSyKvqLFVNU9W0+HiWWgHhrOjbI7Ju4YsS8PvdHgUhwuoawEOKvv3+CL5mHJca9goiD3hIceERiY6Jk4hI1lx4BZEHPKSoMF9qxl3q9hgIISIPeEhxYb7EEHlPIfKAhxR9y5G81xB5wEOKC/MlpjaR9xIiD3hIMefkPYfIAx5SVFhA5D2GyAMewgev3kPkAQ/5/nQNPwjlJUQe8BBO13gPkQc8IuD3S8l3Rzld4zFEHvCIku+OiqhyJO8xRB7wiKJvj4iIsE7eY4g84BHFhfkSEemTGjVruT0KQojIAx5x8uJkjuO4PQpCiMgDHsEaeW8i8oBHfH9xMtbIew2RBzyC69Z4E5EHPIIbhngTkQc8oujbI5yT9yBu9Ah4xLrVq6RhUqF0d3sQhBRH8oBHbPg6WuoldXR7DIQYkQc8Yu/XBXJZk0S3x0CIEXnAI/Ly8qR+/fpuj4EQI/KABxQVFUlJSQmR9yAiD3hAXl6eiAiR9yAiD3jAycjXq1fP5UkQakQe8IC8vDyJi4uTGjVquD0KQozIAx5w5MgRTtV4FJEHPCAvL49TNR5F5F1w8OBBKS0tdXsMeAjLJ72LyIdQYWGhTJ48WRISEiQ9PV0KCgrcHgkeQeS9i8iHQH5+vkyYMEESExNl1qxZMnbsWCkqKpLk5GTJzMx0ezx4AJH3LiJfhQ4dOiRjxoyRZs2aybx582T69OmyY8cOeeyxxyQrK0sGDx4st956qwwbNkyOHTvm9rgwjMh7F5GvAvv375ff/va3kpiYKJmZmfLaa69Jbm6uDBgwQHy+7y/8GRUVJY8//risW7dOVq9eLSkpKfLRRx+5PDmsIvLeReSDaPfu3TJs2DBp3ry5ZGVlyYIFC2Tjxo3St29fiYg4+65OS0s79Zpu3brJyJEjpbi4OMSTwzoi711EPgi2b98uAwcOlKSkJNm1a5f8/e9/l7Vr18rNN98sjuNU+vWXXHKJTJkyRVauXCmLFi2S1NRU2bBhQwgmDy9r166V9evXuz1GWCLy3kXkL8KWLVukX79+0rp1a8nLy5PVq1fL8uXLJT09/Zzifqbrr79etmzZIt26dZPrrrtOxo0bx1JLEdm7d6888MAD0rVrV0lPT5d3333X7ZHCit/vl/z8fNbJexSRvwDr16+X2267TVJTUyUQCEh2drYsXrxYOnXqdNHvHRsbKzNnzpSlS5fK7Nmz5ZprrpGtW7cGYerws2vXLrn33nulRYsWsm3bNlmyZIlMmDBBMjIy5O6775b8/Hy3RwwLBQUFoqocyXsUkT9HqiqrVq2S7t27S+fOnaVOnTqSk5MjCxYskHbt2gV9e927d5ecnBxp166ddOjQQSZNmiRlZWVB3051lJOTI/3795dWrVrJgQMHZMWKFbJy5Urp2bOnjBo1SjZt2iTbt2+X5ORkWbZsmdvjVntcgdLjVPWcH+3bt1evCQQCumzZMu3cubNGRUXpkCFD9F//+ldIZ1i0aJE2bNhQO3bsqDt27AjptkMpKytLe/furY7j6O23364bNmwo97WlpaX6xz/+UX0+nw4ZMkS//fbbEE4aXtatW6eRkZEaCATcHgVVSESy9Szd5ki+HIFAQBYuXCgdOnSQPn36SGpqqnz++ecya9Ys+elPfxrSWW677TbJzc2Vpk2bStu2bWX69OkSCARCOkNVWr16tfTs2VM6duwoMTExsmXLFlm4cKGkpaWV+zVRUVEyfvx4Wb9+vXz88ceSkpIiK1euDN3QYeTkdWsu5HMiGHC28pf38MKR/IkTJ3Tu3LnaunVrrVWrlj7yyCP61VdfuT3WKfPmzdN69erpDTfcoLt373Z7nAsWCAT0gw8+0K5du6rP59Nf/epXF/yvlJKSEn300Uc1MjJSR4wYoUVFRUGeNrxNnjxZGzVqxJG8cVLOkTyR/7fjx4/rK6+8oi1atNC6devqY489pnl5eW6PdVYHDhzQW265RWvVqqUvv/xyWP3hDQQCumjRIu3QoYNGR0fr/fffr3v27AnKe69du1ZbtGihV155pX7yySdBec9wduzYMV2wYIFmZGSoz+fT3r17V6sDFgQXkS9HUVGRzpgxQ5s2barx8fE6adIkPXr0qNtjVSoQCOirr76qcXFxevPNN+v+/fvdHqlCZWVlOm/ePE1JSdGYmBgdOXKkHjhwIOjbOXbsmD744IPq8/n097//vZaUlAR9G9XZoUOH9JVXXtFbb71VmzdvrkuWLFFV1c8//1y7dOmi9evX1/nz57s8JaoCkT9DYWGhTpkyRRs2bKhNmjTRZ555Rr/77ju3xzpve/bs0fT0dL300kt17ty51e6ovrS0VGfPnq0tW7bU2rVr69ixY/Xw4cNVvt3ly5drQkKCpqSk6Kefflrl23PTF198oVOnTtUuXbpoRESEiohmZGTokSNHfvA6v9+v06ZN0+joaO3fv3+1/ZcqLgyR/7f8/HydMGGC1qtXT3/yk5/oSy+9FPZHe36/X5977jmtWbOm9u3bVw8dOuT2SFpcXKwvvPCCJiYmav369XXixIlaUFAQ0hmOHj2q9913n0ZFRemf/vQnPXHiREi3X5U2bdqk48eP15SUFBWRU4/4+Hj929/+VuHXfvbZZ9qhQwdt1KiRLl68OEQTo6p5PvKHDh3SMWPGaO3atbVVq1Y6Z84cU3/oVVV37typnTp10ssuu0wXLlzoygzHjh3TadOmaePGjbVRo0Y6depUPXbsmCuznPT+++/r5Zdfrh06dNB//vOfrs5yocrKyvQf//iHjhgxQps1a/aDsJ989OnT55z/gj9x4oQ+8cQTGhUVpffdd19YnKJExTwb+f379+tDDz2kMTEx2qZNG50/f76WlZW5PVaVKSsr08mTJ2uNGjX0l7/8pebn54dkuwUFBTpx4kRt0KCBJiQk6PPPP6/FxcUh2fa5+Oabb/Suu+7SSy65RKdNm6Z+v9/tkSpVVFSk7733ng4aNEjr169/1rCLiF566aX65ptvXtA2Nm/erFdffbUmJCTo8uXLg/wdhC+/36/vvvuu3nHHHWHTC89Ffvfu3Tps2DCtUaOGXnvttfr+++9Xu/PVVSknJ0fbtWunTZo00czMzCrbzuHDh3Xs2LFap04dTUpK0tdee01LS0urbHsX6+2339b69etr165d9YsvvnB7nB/Jy8vT119/Xfv06aMxMTHlhv3ko1evXhf9oXtJSYmOGTNGfT6fPvjgg2H52VSwnLmE+ne/+53r/xI9V56J/Pbt23XgwIHq8/k0PT1dP/zwQ0/F/XTHjx/X8ePHq8/n02HDhmlhYWHQ3vvgwYM6atQojY2N1eTkZH3rrbfC5ojn4MGD+vOf/1xjY2P1pZdecv3/j3379un06dM1PT1dfT5fpWEXEY2Li9OXX345qHOsW7dOW7ZsqUlJSfrxxx8H9b2ruzOXUI8fP16/+eYbt8c6L56J/PDhw7VXr166Zs0at0epNjZs2KBXXXWVNm/eXD/66KOLeq+9e/fqAw88oNHR0ZqWlqbvvfdeWJz6OFMgENDZs2dr7dq19Wc/+5l++eWXId1+Tk6OTpgwQVNTU88p6qc/0tPTg/azBWf67rvvdMSIEerz+XT06NFhvyihMmcuoX7qqafC9vMJz0Q+XI4mQ624uFhHjRqlkZGROnLkyPP+qdCdO3fqvffeqz6fT7t06aKZmZmuHwEHw759+/Smm27SunXr6htvvFFl35Pf79c1a9boyJEjtUWLFucddhHRmJgYnT59ekj2+4oVK7RZs2aanJysmzZtqvLthdrpS6gbN24ctkuoT+eZyKNiq1ev1ubNm+tVV12lWVlZlb4+NzdXMzIyNCIiQnv06KGrVq0KwZSh5ff7debMmRoTE6N9+vTRr7/+OijvW1JSokuWLNHBgwdrw4YNLyjsJx+dOnXSnTt3BmWuc3X06FEdMmSIRkVF6YQJE6r1Zy3nyuIS6pOIPE4pLCzU+++/X30+n44bN06PHz/+o9dkZ2drnz591HEc7d279zn9hRDudu3apZ07d9YGDRpUuta8PAUFBTp37ly98847NS4u7qLCLiIaHR2tkydPdvWU2JIlS7RRo0aalpam27Ztc22Oi+GFJdREHj+SmZmpV1xxhbZt21a3bt2qqqpr1qzRnj17akREhN51112nnveKsrIynTJlikZHR+uAAQN+9FOjZ7N//359/vnntUePHhoVFXXRYT/5SE1N1dzc3BB815XLy8vTjIwMjY6O1qeffjpsToueuYT6nXfeCZvZzxeRx1nl5+efWo3UvHlz9fl8OmjQIN2+fbvbo7kqNzdX27dvr40bN9Zly5b96L9/+eWX+tRTT+m1116rjuMELewiolFRUfr4449XyyPN+fPna4MGDbRLly4hv6/C+fDiEmoijwpNnDhRU1JSwvryxcF2+o1Jhg4d+oMbk2zbti3ocRcRTU5O1o0bN7r4XVfuq6++0t69e2tsbKy+8MIL1Sqepy+hvuGGGzy1hJrIAxcoOztbW7durYmJibpy5cpTz//iF78IWtwjIyP10UcfPevnI9VRIBDQOXPmaJ06dbRHjx66b98+V+fZsmWL9uvXTyMiIjy7hLq8yHNnKKAS7du3l+zsbOnXr5/ceOON8tBDD0lxcbH84Q9/CMr7t2zZUtasWSOTJk2SGjVqBOU9q5rjOHLPPfdIbm6uiIhcffXVMmfOnO+PHEMoKytLevfuLW3btpWysjLJysqSpUuXSufOnUM6R7V2tvKX9+BIHl535o1JevXqdcFH747j6PDhw8P+TlaBQEBffPFFjY2NDdmNSVatWqXdu3fXiIgIHTBgQLX5gNpNwpE8cPGuu+46+fTTT6V79+5y/fXXS3x8/AW9T2JioqxYsUKeffZZqVmzZpCnDC3HceTXv/61bN26VY4cOSKtW7eWBQsWBH07qiqZmZnSpUsXuemmmyQhIUF27Nghb775prRu3Tro2zPjbOUv78GRPPAfH374oTZt2lRjY2PP6wh+yJAhQb2OUHVSVlamU6dO1ejoaM3IyAjKjUn8fr8uXLhQ27dvr9HR0fqb3/xG9+7dG4RpbRGO5IHguvHGGyUnJ0cGDRp0Tq9v0qSJLFu2TGbNmiW1atWq2uFcEhkZKQ8//LBs3rxZdu7cKcnJybJ06dILei+/3y9vvfWWtGnTRu6++27p1q2b7NmzR5577jlJSEgI8uSGna385T04kgfOrlOnThUevd9zzz0hu7Z/dXH6jUkGDx58zhf+Ki0t1b/85S+alJSkderU0XHjxoXdFSHdICyhBKrO4sWLzxp3N+/SVV2cvDFJs2bNdMWKFeW+rri4WGfOnKkJCQnaoEEDffLJJ0N+y8hwRuSBKta2bdsfBP6OO+4IyU3Lw8HpNyYZPnz4D674WFhYqE8//bRefvnl2rhxY/3zn/8cNjfqqE7Ki7yj57GuNS0tTbOzs4N2qgiwZMGCBXLnnXdKvXr1ZMaMGdK/f3+3R6p2PvnkExk4cKCIiMyYMUPWr18vzzzzjMTFxcno0aNl0KBBEh0d7fKU4clxnI2qmnbm8z43hgEs6tu3rwwfPlxGjx4tjRo1cnucaqljx46yefNmGTVqlPTo0UMSEhJk2rRp0r9/f4mKinJ7PJMqPZJ3HGeoiAwVEUlISGi/d+/eUMwFwLjXX39dMjIyiHuQlHckz+kaADCgvMizTh4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGVRp5x3GGOo6T7ThO9uHDh0MxEwAgSCqNvKrOUtU0VU2Lj48PxUwAgCDhdA0AGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAM81X2AsdxhorI0H//9rjjOLlVO1JYayAi37g9RDXG/qkY+6di7J+KXXm2Jx1VPed3cBwnW1XTgjaSMeyfirF/Ksb+qRj7p2Ll7R9O1wCAYUQeAAw738jPqpIp7GD/VIz9UzH2T8XYPxU76/45r3PyAIDwwukaADCMyAOAYUQeAAwj8gBgGJEHAMP+H1MfbqcuYmutAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] @@ -5962,7 +5370,7 @@ "base_uri": "https://localhost:8080/", "height": 394 }, - "outputId": "97793e5b-7148-4923-9894-85ef1ffe7756" + "outputId": "57d86e32-66fe-46e2-80b5-89c9a655181a" }, "source": [ "m = Chem.MolFromSmiles('CCCCC[C@H](Br)CC')\n", @@ -5973,7 +5381,7 @@ { "output_type": "display_data", "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAF5CAYAAAB6A1o9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAZbklEQVR4nO3dfXjN993A8c8vORESEY8lHsJUNCbUw1E6D3fTqzHt3TL0tope02q53KUPu9GxYavWxhSdFqsp1q6tlVWfjOzCIqoWwpaKzlNrYShpJJo0iUjO5/6jN7eQeEzOcT6/9+u6XFf9nJzfx6/t2y/f83WOo6oCALApJNADAABqDpEHAMOIPAAYRuQBwDAiDwCGea70AMdxxojIGBGRyMjI7vHx8TU+FADg2uzcufMrVW1y8XHnWrZQer1ezcjIqNbBAAA3znGcnarqvfg4yzUAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAw7IqRdxxnjOM4GY7jZOTk5PhjJgBANbli5FV1iap6VdXbpEkTf8wEAKgmLNcAgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYZ5ADwCg+qW+NVc2r5x3/uchoR6p17i53HZHkvzH8P+ROnXrB3A6+BORBwx77NcfiIhIedlZOf5FlqS+9aKcOn5Ikqe/EeDJ4C9EHjCsZXz38//cOqGXlBYXSurbc6W0pEhq1Y645PHq84nPVy6hnjB/jokaROQBFwmPiBL1+UR95SIi8tLjPaVNwp3SMr67bHvvVcn78rA88svVEvvdOwI8KaoLkQcM85WXici3yzVffpElf/tgqbTrlijhEVHnH/P5P9LkxKHP5O6Hn5XakdFSv2mrQI2LGkDkAcOeH9y6ws+bt7tdfvDj31Q4dqaoQP57wQaJqNfQn6PBT4g8YNjouX8WERGfr1xOHTskae+8JH/4ebKMmv2ehIXXERGR2A49CLxhRB4wrHnc7ef/ueVt3aRxqzj53f/cK3/f8Ee54z8fERGRuvWbBGg6+AN/GQpwkSat4kRE5GT23v8/6DgBmgb+QOQBFzkX98joRgGeBP7Ccg1g2L/37hQREZ/PJ6eOH5It7/xGatWJlNvv/q8ATwZ/IfKAYa89O1BERJyQEIlq2FRatu8mdyVPlIYxbQI7GPzGUdWrfrDX69WMjIwaHAcAcD0cx9mpqt6Lj19xTd5xnDGO42Q4jpORk5NTM9MB8IuzZ4rlL8ufl+KCvECPAj+5YuRVdYmqelXV26QJW62AYFb09SnZtua34isvD/Qo8BN21wAuUvT1t3fwdaJ4q2G3IPKAixQXnJLwiCgJCWXPhVsQecBFigrypE5Ug0CPAT8i8oCLFBfkSQSRdxUiD7hI0dfcybsNkQdcpLggTyLqEXk3IfKAixSzJu86RB5wkaKCfCLvMkQecBFeeHUfIg+4yLfLNfxFKDch8oCLsFzjPkQecAlfebmUfHOa5RqXIfKAS5R8c1pElTt5lyHygEsUfX1KRIR98i5D5AGXKC7Ik5BQj9SqUzfQo8CPiDzgEufenMxxnECPAj8i8oBLsEfenYg84BLfvjkZe+TdhsgDLsH71rgTkQdcgg8McSciD7hE0denWJN3IT7oEXCJbVs2S9O4AkkK9CDwK+7kAZfYcSJcGsb1CvQY8DMiD7hE9ol8uaVFm0CPAT8j8oBL5ObmSqNGjQI9BvyMyAMuUFRUJCUlJUTehYg84AK5ubkiIkTehYg84ALnIt+wYcMATwJ/I/KAC+Tm5kpUVJTUqlUr0KPAz4g84AKnTp1iqcaliDzgArm5uSzVuBSRD4Djx49LaWlpoMeAi7B90r2IvB8VFBTI7NmzJTY2VhITEyU/Pz/QI8EliLx7EXk/yMvLkxkzZkibNm1kyZIlMnXqVCkqKpKEhARJSUkJ9HhwASLvXkS+Bp08eVKmTJkirVu3lpUrV8qCBQtk37598vOf/1y2b98ujz/+uNx///0yduxYKSwsDPS4MIzIuxeRrwFHjx6VZ555Rtq0aSMpKSmyfPlyycrKkhEjRojH8+0bf4aFhckvfvEL2bZtm2zZskU6d+4saWlpAZ4cVhF59yLy1ejQoUMyduxYadu2rWzfvl1Wr14tO3fulKFDh0pISOWX2uv1nn/M3XffLRMmTJDi4mI/Tw7riLx7EflqsHfvXhk5cqTExcXJgQMHZP369bJ161a57777xHGcK3597dq1Zc6cOZKamirvv/++dOvWTXbs2OGHyYPL1q1bJT09PdBjBCUi715E/gZkZmbKsGHDpGPHjpKbmytbtmyRjRs3SmJi4lXF/WJ9+vSRzMxMufvuu+V73/ueTJs2ja2WIpKdnS3jxo2Tfv36SWJiorz77ruBHimolJeXS15eHvvkXYrIX4f09HQZOHCgdOvWTXw+n2RkZMhHH30kd9555w0/d2RkpCxcuFD+/Oc/y4oVK+SOO+6QTz/9tBqmDj4HDhyQUaNGSbt27WTPnj2ydu1amTFjhiQnJ8vDDz8seXl5gR4xKOTn54uqcifvUkT+KqmqbN68WZKSkqR3794SHR0tu3fvltWrV0vXrl2r/XxJSUmye/du6dq1q/To0UNmzZolZWVl1X6em9Hu3btl+PDhEh8fL8eOHZNNmzZJamqqDBgwQCZOnCi7du2SvXv3SkJCgqxbty7Q4970eAdKl1PVq/7RvXt3dRufz6fr1q3T3r17a1hYmI4ePVoPHjzo1xnef/99bdq0qfbq1Uv37dvn13P70/bt23XQoEHqOI7+4Ac/0B07dlT52NLSUn3uuefU4/Ho6NGj9euvv/bjpMFl27ZtGhoaqj6fL9CjoAaJSIZW0m3u5Kvg8/lkzZo10qNHDxk8eLB069ZNPv/8c1myZInceuutfp1l4MCBkpWVJa1atZIuXbrIggULxOfz+XWGmrRlyxYZMGCA9OrVSyIiIiQzM1PWrFkjXq+3yq8JCwuT6dOnS3p6unzyySfSuXNnSU1N9d/QQeTc+9Zcz+tEMKCy8lf1ww138mfPntU333xTO3bsqHXr1tVnn31Wv/zyy0CPdd7KlSu1YcOGetddd+mhQ4cCPc518/l8+pe//EX79eunHo9HH3300ev+LqWkpER/8pOfaGhoqD799NNaVFRUzdMGt9mzZ2tMTAx38sYJd/KXV1paKq+99pp06NBBxo0bJw8++KBkZ2fL7NmzpWnTpoEe77wf/vCHkpWVJZGRkdKpUydZunSpfPvvNzioqnzwwQfSs2dPeeCBByQhIUEOHjwoy5Ytk/bt21/Xc4aHh8usWbMkLS1N1q5dK127dnXtVktVPb8Gf05+fr7k5OTI4MGD5cSJEwGaDAFTWfmr+mHxTr6oqEhfeeUVbdWqlTZp0kRnzZqlp0+fDvRYV+Tz+fS1117TqKgove+++/To0aOBHumyysrKdOXKldq5c2eNiIjQCRMm6LFjx6r9PIWFhfrkk0+qx+PRn/70p1pSUlLt57iZ3XvvvZqYmHjJ8c8//1z79u2rjRo10lWrVgVgMtQ0qeJO3rWRLygo0Dlz5mjTpk21RYsW+tJLL+k333wT6LGu2b/+9S9NTEzUBg0a6JtvvnnTfUteWlqqK1as0Pbt22u9evV06tSpmpOTU+Pn3bhxo8bGxmrnzp31H//4R42fz998Pp+mpKToxx9/XOH4vHnzVEQuOa6qWl5ervPmzdPw8HAdPny45ubm+mtc+AGR/z95eXk6Y8YMbdiwoX7nO9/RV199Nejv9srLy/Xll1/WOnXq6NChQ/XkyZOBHkmLi4t18eLF2qZNG23UqJHOnDlT8/Pz/TrD6dOn9bHHHtOwsDB94YUX9OzZs349f3W78A/w0tJSbd26tfbs2bPC8cLCQm3WrJnOnz+/yuf57LPPtEePHhoTE6MfffRRjc4M/3F95E+ePKlTpkzRevXqaXx8vL7++utB/z/9xfbv36933nmn3nLLLbpmzZqAzFBYWKjz5s3T5s2ba0xMjM6dO1cLCwsDMss5H374oTZr1kx79Oih//znPwM6y/U4ceKE3nPPPbp69eoKxxcvXqwiohs2bKhw/Gqu99mzZ/X555/XsLAwfeyxx4JiiRKX59rIHz16VH/84x9rRESE3n777bpq1SotKysL9Fg1pqysTGfPnq21atXSH/3oR5qXl+eX8+bn5+vMmTO1cePGGhsbq4sWLdLi4mK/nPtqfPXVV/rQQw9p7dq1dd68eVpeXh7okar097//vcINSFlZmcbFxWmXLl0q3LUXFxdrTExMpWvw13KuTp06aWxsrG7cuPGG5rakvLxc3333XX3wwQeDpheui/yhQ4d07NixWqtWLe3Zs6d++OGHN916dU3avXu3du3aVVu0aKEpKSk1dp6cnBydOnWqRkdHa1xcnC5fvlxLS0tr7Hw36o9//KM2atRI+/Xrp1988UWgx7nE1q1bVUT0jTfeqHB8+fLlKiKXLK+c+67pRpboSkpKdMqUKerxePTJJ58MytemqsvFW6gnTZoU8O9Er5ZrIr93714dOXKkejweTUxM1A0bNrgq7hc6c+aMTp8+XT0ej44dO1YLCgqq7bmPHz+uEydO1MjISE1ISNC33347aO54jh8/rg888IBGRkbqq6++GrD/Pvbt26dPPPFEhdeEysvLtVOnThofH1/hu41za/C9evW65G6+ul5T2rZtm7Zv317j4uL0k08+qZbnDBZnzpzRpUuXart27bR+/fo6ffp0/eqrrwI91jVxTeSfeuopvffeeyvdXeBWO3bs0A4dOmjbtm01LS3thp4rOztbx40bp+Hh4er1evW99967qZc+quLz+XTFihVar149/f73v6///ve/a/ycF/8lrZSUFBURXbJkSYXjK1euVBG5ZKvjokWLVERqdFnlm2++0aefflo9Ho9Onjw56DclXMnFW6h/9atfBe3rE66JfLDcTfpbcXGxTpw4UUNDQ3XChAnX/LdC9+/fr6NGjVKPx6N9+/bVlJQUE98hHT58WO+55x6tX7++vvHGGzX2e5o/f762bNmywnX3+Xzq9Xq1bdu2l6zBt2/fvso1+MmTJ9fIjBfatGmTtm7dWhMSEnTXrl01fj5/u3ALdfPmzYN2C/WFXBN5XN6WLVu0bdu22qFDB92+ffsVH5+VlaXJyckaEhKi/fv3182bN/thSv8qLy/XhQsXakREhA4ePFhPnDhxQ8935MgRXb9+fYVjmzdvVhHRBQsWVDj+3nvvXdMavD+XEE6fPq2jR4/WsLAwnTFjxk39WsvVsriF+hwij/MKCgr0iSeeUI/Ho9OmTdMzZ85c8piMjAwdPHiwOo6jgwYNuqo/EILdgQMHtHfv3tq4cWP905/+dN3PM2TIEG3QoMEl74zZt29fbdmyZYXrfW4NvkOHDle1Bh8Ia9eu1ZiYGPV6vbpnz56AznK93LCFmsjjEikpKdqyZUvt0qWLfvrpp6qq+vHHH+uAAQM0JCREH3roofPH3aKsrEznzJmj4eHhOmLECD116lSljysqKlKfz6eTJ0/WF154ocKvpaenq4jo7NmzKxxfv369ioj+7ne/q3D87bffVhG5ZB/8okWLNCoq6qbYBZSbm6vJyckaHh6uL774YtAsi168hfqdd94JmtmvFZFHpfLy8s7vRmrbtq16PB595JFHdO/evYEeLaCysrK0e/fu2rx5c123bl2FX0tLS9MGDRrorl27dNiwYRoVFXXJHwZJSUl6yy23XPUafGX74EtKSqr8QyZQVq1apY0bN9a+ffv6/XMVroUbt1ATeVzWzJkztXPnzkH99sXV7cIPJhkzZsz55Zf8/HyNjo7WoUOHamZmpoqIPvfccxW+9kpr8H/4wx8qHK9qDf5m9OWXX+qgQYM0MjJSFy9efFPF88It1HfddZertlATeeA6ZWRkaMeOHbVNmzaampqqqqrTpk1TEdE9e/bowIEDr2kNPiEhQb/73e9Wugb/6KOP+uc3dYN8Pp++/vrrGh0drf3799fDhw8HdJ7MzEwdNmyYhoSEuHYLNZEHbkBxcbE+++yzGhoaqs8884weOXJEIyMj9eGHHz6/Bv/rX/+6wtdcaQ3+4hd3Dx8+HHR3nUeOHNH+/ftrdHS0/v73v/f7/Onp6Tpw4EB1HEeHDBmiGRkZfj3/zYTIA9Vg69at2q5dO73tttt0xIgRGhISogcPHtSkpCRt2rRppWvwt956a6Vr8F27dg26qFfG5/Ppb3/7W42MjNRBgwb55ZPUNm/erElJSRoSEqIjRozQrKysGj/nzY7IA9WksLBQx48fr6GhoRoaGqqjRo3S1NRUFRF9+eWXKzy2qjX4ZcuWqcfj0czMTH+OXqNq+oNJfD6frl+/Xvv06XP+3TMPHDhQ7ecJVkQeqGYbNmzQunXrquM4mpmZqX369LmmNXiLL3KXlZXp3LlzNTw8XJOTk6vlg0nKy8t1zZo12r17dw0PD9fx48drdnZ2NUxrC5EHasDu3bs1JCREx48fr+vWrVMR0aVLl1Z4TFVr8JZ99tln6vV6NSYmRteuXXtdz1FWVqZvvfWWJiQkaGRkpE6aNEmPHz9ezZPaQeSBGjJq1CitXbu2Hjt2TLt3717lGvz9998fwCn978IPJnn88cev+o2/SktLddmyZRoXF6fR0dE6bdq0oHtHyEAg8kAN2b9/v4aEhOikSZN0zZo1la7B79u3r9K3j3CDcx9M0rp1a920aVOVjysuLtaFCxdqbGysNm7cWH/5y1/6/SMjgxmRB2rQ8OHDtW7dunry5Ent2LHjJWvwbnfhB5M89dRTFd7xsaCgQF988UVt1qyZNm/eXOfPnx80H9RxM6kq8s63v3Z1vF6vZmRkXPXjAbfIysqSTp06yfTp0yU+Pl6Sk5MlLS1N+vbtG+jRbip/+9vfZOTIkSIi8sorr0h6erq89NJLEhUVJZMnT5ZHHnlEwsPDAzxlcHIcZ6eqei85TuSB6jFkyBD561//Kl988YVkZ2dLly5dAj3STamoqEgmTpwoixcvltjYWHn++edl+PDhEhYWFujRglpVkfdcxReOEZExIiKxsbE1MBpgw89+9jNp0aKF+Hw+An8ZERERsmjRIunZs6ckJycT9xrGnTwAGFDVnXxIIIYBAPgHkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMOyKkXccZ4zjOBmO42Tk5OT4YyYAQDW5YuRVdYmqelXV26RJE3/MBACoJizXAIBhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDDPlR7gOM4YERnzfz894zhOVs2OFNQai8hXgR7iJsb1uTyuz+VxfS7vtsoOOqp61c/gOE6GqnqrbSRjuD6Xx/W5PK7P5XF9Lq+q68NyDQAYRuQBwLBrjfySGpnCDq7P5XF9Lo/rc3lcn8ur9Ppc05o8ACC4sFwDAIYReQAwjMgDgGFEHgAMI/IAYNj/ApwmbOxjKSiqAAAAAElFTkSuQmCC\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAF5CAYAAAB6A1o9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAZbklEQVR4nO3dfXjN993A8c8vORESEY8lHsJUNCbUw1E6D3fTqzHt3TL0tope02q53KUPu9GxYavWxhSdFqsp1q6tlVWfjOzCIqoWwpaKzlNrYShpJJo0iUjO5/6jN7eQeEzOcT6/9+u6XFf9nJzfx6/t2y/f83WOo6oCALApJNADAABqDpEHAMOIPAAYRuQBwDAiDwCGea70AMdxxojIGBGRyMjI7vHx8TU+FADg2uzcufMrVW1y8XHnWrZQer1ezcjIqNbBAAA3znGcnarqvfg4yzUAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAw7IqRdxxnjOM4GY7jZOTk5PhjJgBANbli5FV1iap6VdXbpEkTf8wEAKgmLNcAgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYZ5ADwCg+qW+NVc2r5x3/uchoR6p17i53HZHkvzH8P+ROnXrB3A6+BORBwx77NcfiIhIedlZOf5FlqS+9aKcOn5Ikqe/EeDJ4C9EHjCsZXz38//cOqGXlBYXSurbc6W0pEhq1Y645PHq84nPVy6hnjB/jokaROQBFwmPiBL1+UR95SIi8tLjPaVNwp3SMr67bHvvVcn78rA88svVEvvdOwI8KaoLkQcM85WXici3yzVffpElf/tgqbTrlijhEVHnH/P5P9LkxKHP5O6Hn5XakdFSv2mrQI2LGkDkAcOeH9y6ws+bt7tdfvDj31Q4dqaoQP57wQaJqNfQn6PBT4g8YNjouX8WERGfr1xOHTskae+8JH/4ebKMmv2ehIXXERGR2A49CLxhRB4wrHnc7ef/ueVt3aRxqzj53f/cK3/f8Ee54z8fERGRuvWbBGg6+AN/GQpwkSat4kRE5GT23v8/6DgBmgb+QOQBFzkX98joRgGeBP7Ccg1g2L/37hQREZ/PJ6eOH5It7/xGatWJlNvv/q8ATwZ/IfKAYa89O1BERJyQEIlq2FRatu8mdyVPlIYxbQI7GPzGUdWrfrDX69WMjIwaHAcAcD0cx9mpqt6Lj19xTd5xnDGO42Q4jpORk5NTM9MB8IuzZ4rlL8ufl+KCvECPAj+5YuRVdYmqelXV26QJW62AYFb09SnZtua34isvD/Qo8BN21wAuUvT1t3fwdaJ4q2G3IPKAixQXnJLwiCgJCWXPhVsQecBFigrypE5Ug0CPAT8i8oCLFBfkSQSRdxUiD7hI0dfcybsNkQdcpLggTyLqEXk3IfKAixSzJu86RB5wkaKCfCLvMkQecBFeeHUfIg+4yLfLNfxFKDch8oCLsFzjPkQecAlfebmUfHOa5RqXIfKAS5R8c1pElTt5lyHygEsUfX1KRIR98i5D5AGXKC7Ik5BQj9SqUzfQo8CPiDzgEufenMxxnECPAj8i8oBLsEfenYg84BLfvjkZe+TdhsgDLsH71rgTkQdcgg8McSciD7hE0denWJN3IT7oEXCJbVs2S9O4AkkK9CDwK+7kAZfYcSJcGsb1CvQY8DMiD7hE9ol8uaVFm0CPAT8j8oBL5ObmSqNGjQI9BvyMyAMuUFRUJCUlJUTehYg84AK5ubkiIkTehYg84ALnIt+wYcMATwJ/I/KAC+Tm5kpUVJTUqlUr0KPAz4g84AKnTp1iqcaliDzgArm5uSzVuBSRD4Djx49LaWlpoMeAi7B90r2IvB8VFBTI7NmzJTY2VhITEyU/Pz/QI8EliLx7EXk/yMvLkxkzZkibNm1kyZIlMnXqVCkqKpKEhARJSUkJ9HhwASLvXkS+Bp08eVKmTJkirVu3lpUrV8qCBQtk37598vOf/1y2b98ujz/+uNx///0yduxYKSwsDPS4MIzIuxeRrwFHjx6VZ555Rtq0aSMpKSmyfPlyycrKkhEjRojH8+0bf4aFhckvfvEL2bZtm2zZskU6d+4saWlpAZ4cVhF59yLy1ejQoUMyduxYadu2rWzfvl1Wr14tO3fulKFDh0pISOWX2uv1nn/M3XffLRMmTJDi4mI/Tw7riLx7EflqsHfvXhk5cqTExcXJgQMHZP369bJ161a57777xHGcK3597dq1Zc6cOZKamirvv/++dOvWTXbs2OGHyYPL1q1bJT09PdBjBCUi715E/gZkZmbKsGHDpGPHjpKbmytbtmyRjRs3SmJi4lXF/WJ9+vSRzMxMufvuu+V73/ueTJs2ja2WIpKdnS3jxo2Tfv36SWJiorz77ruBHimolJeXS15eHvvkXYrIX4f09HQZOHCgdOvWTXw+n2RkZMhHH30kd9555w0/d2RkpCxcuFD+/Oc/y4oVK+SOO+6QTz/9tBqmDj4HDhyQUaNGSbt27WTPnj2ydu1amTFjhiQnJ8vDDz8seXl5gR4xKOTn54uqcifvUkT+KqmqbN68WZKSkqR3794SHR0tu3fvltWrV0vXrl2r/XxJSUmye/du6dq1q/To0UNmzZolZWVl1X6em9Hu3btl+PDhEh8fL8eOHZNNmzZJamqqDBgwQCZOnCi7du2SvXv3SkJCgqxbty7Q4970eAdKl1PVq/7RvXt3dRufz6fr1q3T3r17a1hYmI4ePVoPHjzo1xnef/99bdq0qfbq1Uv37dvn13P70/bt23XQoEHqOI7+4Ac/0B07dlT52NLSUn3uuefU4/Ho6NGj9euvv/bjpMFl27ZtGhoaqj6fL9CjoAaJSIZW0m3u5Kvg8/lkzZo10qNHDxk8eLB069ZNPv/8c1myZInceuutfp1l4MCBkpWVJa1atZIuXbrIggULxOfz+XWGmrRlyxYZMGCA9OrVSyIiIiQzM1PWrFkjXq+3yq8JCwuT6dOnS3p6unzyySfSuXNnSU1N9d/QQeTc+9Zcz+tEMKCy8lf1ww138mfPntU333xTO3bsqHXr1tVnn31Wv/zyy0CPdd7KlSu1YcOGetddd+mhQ4cCPc518/l8+pe//EX79eunHo9HH3300ev+LqWkpER/8pOfaGhoqD799NNaVFRUzdMGt9mzZ2tMTAx38sYJd/KXV1paKq+99pp06NBBxo0bJw8++KBkZ2fL7NmzpWnTpoEe77wf/vCHkpWVJZGRkdKpUydZunSpfPvvNzioqnzwwQfSs2dPeeCBByQhIUEOHjwoy5Ytk/bt21/Xc4aHh8usWbMkLS1N1q5dK127dnXtVktVPb8Gf05+fr7k5OTI4MGD5cSJEwGaDAFTWfmr+mHxTr6oqEhfeeUVbdWqlTZp0kRnzZqlp0+fDvRYV+Tz+fS1117TqKgove+++/To0aOBHumyysrKdOXKldq5c2eNiIjQCRMm6LFjx6r9PIWFhfrkk0+qx+PRn/70p1pSUlLt57iZ3XvvvZqYmHjJ8c8//1z79u2rjRo10lWrVgVgMtQ0qeJO3rWRLygo0Dlz5mjTpk21RYsW+tJLL+k333wT6LGu2b/+9S9NTEzUBg0a6JtvvnnTfUteWlqqK1as0Pbt22u9evV06tSpmpOTU+Pn3bhxo8bGxmrnzp31H//4R42fz998Pp+mpKToxx9/XOH4vHnzVEQuOa6qWl5ervPmzdPw8HAdPny45ubm+mtc+AGR/z95eXk6Y8YMbdiwoX7nO9/RV199Nejv9srLy/Xll1/WOnXq6NChQ/XkyZOBHkmLi4t18eLF2qZNG23UqJHOnDlT8/Pz/TrD6dOn9bHHHtOwsDB94YUX9OzZs349f3W78A/w0tJSbd26tfbs2bPC8cLCQm3WrJnOnz+/yuf57LPPtEePHhoTE6MfffRRjc4M/3F95E+ePKlTpkzRevXqaXx8vL7++utB/z/9xfbv36933nmn3nLLLbpmzZqAzFBYWKjz5s3T5s2ba0xMjM6dO1cLCwsDMss5H374oTZr1kx79Oih//znPwM6y/U4ceKE3nPPPbp69eoKxxcvXqwiohs2bKhw/Gqu99mzZ/X555/XsLAwfeyxx4JiiRKX59rIHz16VH/84x9rRESE3n777bpq1SotKysL9Fg1pqysTGfPnq21atXSH/3oR5qXl+eX8+bn5+vMmTO1cePGGhsbq4sWLdLi4mK/nPtqfPXVV/rQQw9p7dq1dd68eVpeXh7okar097//vcINSFlZmcbFxWmXLl0q3LUXFxdrTExMpWvw13KuTp06aWxsrG7cuPGG5rakvLxc3333XX3wwQeDpheui/yhQ4d07NixWqtWLe3Zs6d++OGHN916dU3avXu3du3aVVu0aKEpKSk1dp6cnBydOnWqRkdHa1xcnC5fvlxLS0tr7Hw36o9//KM2atRI+/Xrp1988UWgx7nE1q1bVUT0jTfeqHB8+fLlKiKXLK+c+67pRpboSkpKdMqUKerxePTJJ58MytemqsvFW6gnTZoU8O9Er5ZrIr93714dOXKkejweTUxM1A0bNrgq7hc6c+aMTp8+XT0ej44dO1YLCgqq7bmPHz+uEydO1MjISE1ISNC33347aO54jh8/rg888IBGRkbqq6++GrD/Pvbt26dPPPFEhdeEysvLtVOnThofH1/hu41za/C9evW65G6+ul5T2rZtm7Zv317j4uL0k08+qZbnDBZnzpzRpUuXart27bR+/fo6ffp0/eqrrwI91jVxTeSfeuopvffeeyvdXeBWO3bs0A4dOmjbtm01LS3thp4rOztbx40bp+Hh4er1evW99967qZc+quLz+XTFihVar149/f73v6///ve/a/ycF/8lrZSUFBURXbJkSYXjK1euVBG5ZKvjokWLVERqdFnlm2++0aefflo9Ho9Onjw56DclXMnFW6h/9atfBe3rE66JfLDcTfpbcXGxTpw4UUNDQ3XChAnX/LdC9+/fr6NGjVKPx6N9+/bVlJQUE98hHT58WO+55x6tX7++vvHGGzX2e5o/f762bNmywnX3+Xzq9Xq1bdu2l6zBt2/fvso1+MmTJ9fIjBfatGmTtm7dWhMSEnTXrl01fj5/u3ALdfPmzYN2C/WFXBN5XN6WLVu0bdu22qFDB92+ffsVH5+VlaXJyckaEhKi/fv3182bN/thSv8qLy/XhQsXakREhA4ePFhPnDhxQ8935MgRXb9+fYVjmzdvVhHRBQsWVDj+3nvvXdMavD+XEE6fPq2jR4/WsLAwnTFjxk39WsvVsriF+hwij/MKCgr0iSeeUI/Ho9OmTdMzZ85c8piMjAwdPHiwOo6jgwYNuqo/EILdgQMHtHfv3tq4cWP905/+dN3PM2TIEG3QoMEl74zZt29fbdmyZYXrfW4NvkOHDle1Bh8Ia9eu1ZiYGPV6vbpnz56AznK93LCFmsjjEikpKdqyZUvt0qWLfvrpp6qq+vHHH+uAAQM0JCREH3roofPH3aKsrEznzJmj4eHhOmLECD116lSljysqKlKfz6eTJ0/WF154ocKvpaenq4jo7NmzKxxfv369ioj+7ne/q3D87bffVhG5ZB/8okWLNCoq6qbYBZSbm6vJyckaHh6uL774YtAsi168hfqdd94JmtmvFZFHpfLy8s7vRmrbtq16PB595JFHdO/evYEeLaCysrK0e/fu2rx5c123bl2FX0tLS9MGDRrorl27dNiwYRoVFXXJHwZJSUl6yy23XPUafGX74EtKSqr8QyZQVq1apY0bN9a+ffv6/XMVroUbt1ATeVzWzJkztXPnzkH99sXV7cIPJhkzZsz55Zf8/HyNjo7WoUOHamZmpoqIPvfccxW+9kpr8H/4wx8qHK9qDf5m9OWXX+qgQYM0MjJSFy9efFPF88It1HfddZertlATeeA6ZWRkaMeOHbVNmzaampqqqqrTpk1TEdE9e/bowIEDr2kNPiEhQb/73e9Wugb/6KOP+uc3dYN8Pp++/vrrGh0drf3799fDhw8HdJ7MzEwdNmyYhoSEuHYLNZEHbkBxcbE+++yzGhoaqs8884weOXJEIyMj9eGHHz6/Bv/rX/+6wtdcaQ3+4hd3Dx8+HHR3nUeOHNH+/ftrdHS0/v73v/f7/Onp6Tpw4EB1HEeHDBmiGRkZfj3/zYTIA9Vg69at2q5dO73tttt0xIgRGhISogcPHtSkpCRt2rRppWvwt956a6Vr8F27dg26qFfG5/Ppb3/7W42MjNRBgwb55ZPUNm/erElJSRoSEqIjRozQrKysGj/nzY7IA9WksLBQx48fr6GhoRoaGqqjRo3S1NRUFRF9+eWXKzy2qjX4ZcuWqcfj0czMTH+OXqNq+oNJfD6frl+/Xvv06XP+3TMPHDhQ7ecJVkQeqGYbNmzQunXrquM4mpmZqX369LmmNXiLL3KXlZXp3LlzNTw8XJOTk6vlg0nKy8t1zZo12r17dw0PD9fx48drdnZ2NUxrC5EHasDu3bs1JCREx48fr+vWrVMR0aVLl1Z4TFVr8JZ99tln6vV6NSYmRteuXXtdz1FWVqZvvfWWJiQkaGRkpE6aNEmPHz9ezZPaQeSBGjJq1CitXbu2Hjt2TLt3717lGvz9998fwCn978IPJnn88cev+o2/SktLddmyZRoXF6fR0dE6bdq0oHtHyEAg8kAN2b9/v4aEhOikSZN0zZo1la7B79u3r9K3j3CDcx9M0rp1a920aVOVjysuLtaFCxdqbGysNm7cWH/5y1/6/SMjgxmRB2rQ8OHDtW7dunry5Ent2LHjJWvwbnfhB5M89dRTFd7xsaCgQF988UVt1qyZNm/eXOfPnx80H9RxM6kq8s63v3Z1vF6vZmRkXPXjAbfIysqSTp06yfTp0yU+Pl6Sk5MlLS1N+vbtG+jRbip/+9vfZOTIkSIi8sorr0h6erq89NJLEhUVJZMnT5ZHHnlEwsPDAzxlcHIcZ6eqei85TuSB6jFkyBD561//Kl988YVkZ2dLly5dAj3STamoqEgmTpwoixcvltjYWHn++edl+PDhEhYWFujRglpVkfdcxReOEZExIiKxsbE1MBpgw89+9jNp0aKF+Hw+An8ZERERsmjRIunZs6ckJycT9xrGnTwAGFDVnXxIIIYBAPgHkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMOyKkXccZ4zjOBmO42Tk5OT4YyYAQDW5YuRVdYmqelXV26RJE3/MBACoJizXAIBhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDAiDwCGEXkAMIzIA4BhRB4ADCPyAGAYkQcAw4g8ABhG5AHAMCIPAIYReQAwjMgDgGFEHgAMI/IAYBiRBwDDiDwAGEbkAcAwIg8AhhF5ADCMyAOAYUQeAAwj8gBgGJEHAMOIPAAYRuQBwDDPlR7gOM4YERnzfz894zhOVs2OFNQai8hXgR7iJsb1uTyuz+VxfS7vtsoOOqp61c/gOE6GqnqrbSRjuD6Xx/W5PK7P5XF9Lq+q68NyDQAYRuQBwLBrjfySGpnCDq7P5XF9Lo/rc3lcn8ur9Ppc05o8ACC4sFwDAIYReQAwjMgDgGFEHgAMI/IAYNj/ApwmbOxjKSiqAAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] @@ -6016,228 +5424,32 @@ } }, "base_uri": "https://localhost:8080/", - "height": 942, - "referenced_widgets": [ - "dde0ff73c3544b1ca17f15054f7afb8b", - "33343d7e01eb49dbacc8094b2432f8ff", - "b36fc55690694e2cae051eda093406a8", - "43739e5bee4c46ccb2ed246983386607", - "36ca4c7b9f7f4309ae67833715ff7290", - "d95b880d008e4e2892d23d5521bbf996", - "8282fd0873424a50a0e94f2f61269f2f", - "1e9eecc206df42b6abc38f879ece9fbd", - "d21d80567a4b47e79a377806fd89be34", - "3a6b4fd9fdb1470b838b5bbb2b140dab", - "8acf67a7eb5c4038929b65110a9e726d", - "53bd772af72540fb98683953071d2ce9", - "3c4fbeba7daf4c29be0641c14c391082", - "d622d59af30e44dd95ccb49d42e7b7ae", - "f90877640e3a43c381bd5ed8b802dda0", - "db17e76c0d0f4eba8dd01e35c642c11e", - "987ddef0ff664b6eb491597364bf3cb9", - "8bc4a38a6d0e43e8a4d332817c8f9406", - "634462afacee43f89e93e5413d0daa6b", - "dd527df79ed844efb2b10916c7d0c955", - "6a8d7546b69c4818896449daa3127a27", - "3e3ca6b4229e4fb3b985260c60eaec52", - "4e1c338648354a2eb50054cf4245fe47", - "5b9f6eaa15a14a1d90ad4402ee67bf19", - "736e44e3cb374895bedcf188c410381e", - "6b97fbdac2f34443ac9f8d7c8902b5c5", - "7b75be2cfb7a4012a4f90e81401034c1", - "85cc12ea1050448e9f14b6841db97b5c", - "ef3e457fd62149e8aa4dc0a5b6356c4b", - "1095ce8d23d643fc8095ae7d509744e6", - "bf963742546d4254937e679300ca10ea", - "294b001c57e4444dae15bde61cf9ba54", - "83c90fda230a4a089bcee7905d765ee9", - "5ffe945d78da49cd997595479764c10d", - "c385de22e24a41e1bd819911c0928c58", - "3cb96b04a2bd43ca939155e73804a529", - "48216c031181421fb44f6623d9052951", - "dd91954841e64caab850c137d4866d00", - "01b86bfcbd8f4b0ba8cf8b995ba97e98", - "9498d0a02f104a07833f9b8fce78e43b", - "eadc3ece700643ee8dcfc62c6ac9390e", - "b25e2925e32748f9abc0f2fa9f061dae", - "ec951b3c633048e4953622abfcf1ed77", - "93706b45524b4e61948b437a3c2bf75a", - "4be1b2f15c55402a9c11ffc611555769", - "b21308fc036b434a8479c88985adacf8", - "9e82afe32c1e4503bde2f6cdfc31abe4", - "f0f78df7f8144c0b9e621a85c1be8bec" - ] + "height": 568 }, - "outputId": "bd31afcd-6ad4-47b8-e58d-80a61101b664" + "outputId": "e172699c-37e3-4205-d6f7-68f024283570" }, "source": [ "from transformers import RobertaModel, RobertaTokenizer\n", "from bertviz import head_view\n", "\n", - "model_version = 'seyonec/ChemBERTa-zinc250k-v1'\n", + "model_version = 'seyonec/ChemBERTa_zinc250k_v2_40k'\n", "model = RobertaModel.from_pretrained(model_version, output_attentions=True)\n", - "tokenizer = RobertaTokenizer.from_pretrained(model_version)\n", - "\n", - "sentence_a = \"CCCCC[C@@H](Br)CC\"\n", - "sentence_b = \"CCCCC[C@H](Br)CC\"\n", - "inputs = tokenizer.encode_plus(sentence_a, sentence_b, return_tensors='pt', add_special_tokens=True)\n", - "input_ids = inputs['input_ids']\n", - "attention = model(input_ids)[-1]\n", - "input_id_list = input_ids[0].tolist() # Batch index 0\n", - "tokens = tokenizer.convert_ids_to_tokens(input_id_list)\n", - "\n", - "call_html()\n", - "\n", - "head_view(attention, tokens)" - ], - "execution_count": 15, - "outputs": [ - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "dde0ff73c3544b1ca17f15054f7afb8b", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=480.0, style=ProgressStyle(description_…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "d21d80567a4b47e79a377806fd89be34", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=336404667.0, style=ProgressStyle(descri…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "987ddef0ff664b6eb491597364bf3cb9", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=11058.0, style=ProgressStyle(descriptio…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "736e44e3cb374895bedcf188c410381e", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=4056.0, style=ProgressStyle(description…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "83c90fda230a4a089bcee7905d765ee9", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=150.0, style=ProgressStyle(description_…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "eadc3ece700643ee8dcfc62c6ac9390e", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=16.0, style=ProgressStyle(description_w…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py:831: FutureWarning: Parameter max_len is deprecated and will be removed in a future release. Use model_max_length instead.\n", - " category=FutureWarning,\n" - ], - "name": "stderr" - }, + "tokenizer = RobertaTokenizer.from_pretrained(model_version)\n", + "\n", + "sentence_a = \"CCCCC[C@@H](Br)CC\"\n", + "sentence_b = \"CCCCC[C@H](Br)CC\"\n", + "inputs = tokenizer.encode_plus(sentence_a, sentence_b, return_tensors='pt', add_special_tokens=True)\n", + "input_ids = inputs['input_ids']\n", + "attention = model(input_ids)[-1]\n", + "input_id_list = input_ids[0].tolist() # Batch index 0\n", + "tokens = tokenizer.convert_ids_to_tokens(input_id_list)\n", + "\n", + "call_html()\n", + "\n", + "head_view(attention, tokens)" + ], + "execution_count": 15, + "outputs": [ { "output_type": "display_data", "data": { @@ -6286,7 +5498,7 @@ "output_type": "display_data", "data": { "application/javascript": [ - "window.params = {\"attention\": {\"all\": {\"attn\": [[[[0.015762679278850555, 0.024463526904582977, 0.31396323442459106, 0.05895601958036423, 0.016421372070908546, 0.011737994849681854, 0.03874201700091362, 0.03660546615719795, 0.029645103961229324, 0.0678732842206955, 0.011365757323801517, 0.042948395013809204, 0.03178062289953232, 0.017082469537854195, 0.02014056220650673, 0.06245425343513489, 0.014991723001003265, 0.027286306023597717, 0.016096610575914383, 0.02376537211239338, 0.030847594141960144, 0.04167555272579193, 0.01630471833050251, 0.029089277610182762], [0.030142389237880707, 0.05453120917081833, 0.07882066071033478, 0.09012992680072784, 0.01871202141046524, 0.017929283902049065, 0.043508123606443405, 0.03757813572883606, 0.032126929610967636, 0.15299779176712036, 0.016828063875436783, 0.08753278106451035, 0.023751547560095787, 0.028420398011803627, 0.010115685872733593, 0.03235689178109169, 0.024995338171720505, 0.05611937865614891, 0.03409217670559883, 0.041342370212078094, 0.03890709951519966, 0.024429678916931152, 0.008010783232748508, 0.016621319577097893], [0.016468187794089317, 0.027264606207609177, 0.16388411819934845, 0.07733185589313507, 0.0403577983379364, 0.014584922231733799, 0.05401241034269333, 0.015347698703408241, 0.029911084100604057, 0.025385668501257896, 0.03148777782917023, 0.022254016250371933, 0.023791441693902016, 0.02672765962779522, 0.029567722231149673, 0.027592018246650696, 0.05426017940044403, 0.062157124280929565, 0.03427448868751526, 0.027845682576298714, 0.06013811379671097, 0.05128742381930351, 0.031011776998639107, 0.05305611714720726], [0.06461041420698166, 0.029304351657629013, 0.12740053236484528, 0.022483352571725845, 0.009188227355480194, 0.03398508578538895, 0.013407074846327305, 0.05435388535261154, 0.045294784009456635, 0.0773269534111023, 0.03043787181377411, 0.020937900990247726, 0.012796806171536446, 0.02356344647705555, 0.09629786014556885, 0.013914219103753567, 0.013628297485411167, 0.027292372658848763, 0.009468404576182365, 0.1443931758403778, 0.01554164569824934, 0.07220336049795151, 0.011363821104168892, 0.03080618940293789], [0.00883458275347948, 0.038431908935308456, 0.007826928049325943, 0.2471485137939453, 0.05742489919066429, 0.007093418855220079, 0.067841537296772, 0.00139536801725626, 0.027717847377061844, 0.005287783686071634, 0.07867342233657837, 0.0013721669092774391, 0.07307202368974686, 0.0023300834000110626, 0.034575268626213074, 0.012349236756563187, 0.0868939459323883, 0.004269605968147516, 0.11470718681812286, 0.0012942980974912643, 0.03587285056710243, 0.01442044135183096, 0.0633949488401413, 0.007771735079586506], [0.03865044564008713, 0.05373422056436539, 0.11162200570106506, 0.033116914331912994, 0.039598122239112854, 0.019708245992660522, 0.0391925573348999, 0.008839752525091171, 0.027649562805891037, 0.013211739249527454, 0.01764822006225586, 0.002580540254712105, 0.012656345032155514, 0.005710262339562178, 0.09960854798555374, 0.00564418314024806, 0.030158353969454765, 0.021978916600346565, 0.09694251418113708, 0.02756977081298828, 0.09706124663352966, 0.09826093167066574, 0.07808677107095718, 0.020769841969013214], [0.026822742074728012, 0.03408430889248848, 0.04227762296795845, 0.013264903798699379, 0.025792459025979042, 0.0726829394698143, 0.09646104276180267, 0.06238896772265434, 0.03554973006248474, 0.027690470218658447, 0.05526658147573471, 0.005705276969820261, 0.03489705175161362, 0.014459202066063881, 0.06414204835891724, 0.002798195229843259, 0.03851733356714249, 0.004200316965579987, 0.04591827839612961, 0.024824731051921844, 0.02932056039571762, 0.11021335422992706, 0.11868678033351898, 0.014035097323358059], [0.02396298013627529, 0.028185734525322914, 0.24582868814468384, 0.012620334513485432, 0.04640713334083557, 0.020806828513741493, 0.056957073509693146, 0.031897976994514465, 0.0650811642408371, 0.02272331900894642, 0.04514170065522194, 0.028026117011904716, 0.03633681684732437, 0.013016169890761375, 0.10631608217954636, 0.010840585455298424, 0.02597932703793049, 0.005207057576626539, 0.013682179152965546, 0.014815070666372776, 0.029145004227757454, 0.057586245238780975, 0.03986281156539917, 0.019573599100112915], [0.017582323402166367, 0.019032331183552742, 0.08176509290933609, 0.005678306333720684, 0.017487742006778717, 0.19054846465587616, 0.0534183606505394, 0.2890831232070923, 0.020336855202913284, 0.1780560314655304, 0.010331468656659126, 0.005913447123020887, 0.003584324149414897, 0.005806654691696167, 0.016262724995613098, 0.0012810686603188515, 0.00406300462782383, 0.0034551762510091066, 0.005425740033388138, 0.008689974434673786, 0.008592690341174603, 0.023252246901392937, 0.016111234202980995, 0.014241652563214302], [0.05546436458826065, 0.022706393152475357, 0.08478473126888275, 0.014924895949661732, 0.017711900174617767, 0.03641828894615173, 0.054160211235284805, 0.11751717329025269, 0.10328083485364914, 0.14892426133155823, 0.07042554020881653, 0.018958697095513344, 0.014116067439317703, 0.012923620641231537, 0.04918067529797554, 0.016089417040348053, 0.013301897794008255, 0.017937887459993362, 0.010340635664761066, 0.05828748270869255, 0.015895644202828407, 0.02620791830122471, 0.009568259119987488, 0.010873175226151943], [0.002710341941565275, 0.000988575047813356, 0.05989323556423187, 0.0015990155516192317, 0.0011487379670143127, 0.009077084250748158, 0.0205343309789896, 0.6426239013671875, 0.006958905141800642, 0.21060334146022797, 0.005971413105726242, 0.020612744614481926, 0.0015554464189335704, 0.0011573232477530837, 0.002081860089674592, 0.001408578478731215, 0.0004431517154444009, 0.0007042562938295305, 0.0005247892113402486, 0.0034983763471245766, 0.0007013534777797759, 0.0011262251064181328, 0.0006450965302065015, 0.0034319369588047266], [0.010643727146089077, 0.00833797175437212, 0.05228384956717491, 0.015590811148285866, 0.013316798023879528, 0.007536173798143864, 0.030865781009197235, 0.03781968355178833, 0.13791640102863312, 0.13916292786598206, 0.3583192825317383, 0.011166825890541077, 0.04794953763484955, 0.009130812250077724, 0.02381097339093685, 0.03551948070526123, 0.02287175878882408, 0.0039088851772248745, 0.0037622905801981688, 0.0039961873553693295, 0.0037148911505937576, 0.012459812685847282, 0.004753545857965946, 0.005161583423614502], [0.004566307179629803, 0.004159293603152037, 0.009212720207870007, 0.005605729296803474, 0.0010219617979601026, 0.01183972880244255, 0.00125782354734838, 0.03261004760861397, 0.006743623409420252, 0.7518895864486694, 0.0036732761655002832, 0.07948249578475952, 0.0030304458923637867, 0.007342629600316286, 0.0015284080291166902, 0.014284235425293446, 0.001268404652364552, 0.03555556386709213, 0.00035779079189524055, 0.016237279400229454, 0.0014919526875019073, 0.0021887964103370905, 0.0003058934526052326, 0.004345929250121117], [0.0050406684167683125, 0.012716449797153473, 0.014003932476043701, 0.03479583188891411, 0.007054895628243685, 0.003367739263921976, 0.019927846267819405, 0.013581814244389534, 0.10281942784786224, 0.15202024579048157, 0.3866932690143585, 0.02275068871676922, 0.10492293536663055, 0.007439795415848494, 0.01858443021774292, 0.016285300254821777, 0.035766903311014175, 0.004741146229207516, 0.012796576134860516, 0.0037187219131737947, 0.010078145191073418, 0.005512998905032873, 0.003852218622341752, 0.0015280491206794977], [0.0026315120048820972, 0.00229522492736578, 0.07824766635894775, 0.005273914895951748, 0.0019244770519435406, 0.004240210168063641, 0.0029216152615845203, 0.01144114974886179, 0.005695781670510769, 0.019802546128630638, 0.005040714517235756, 0.705732524394989, 0.009270558133721352, 0.05209682509303093, 0.011419904418289661, 0.024522744119167328, 0.0023685090709477663, 0.01285997498780489, 0.0011947338934987783, 0.0136563116684556, 0.005043524783104658, 0.009766336530447006, 0.0020402290392667055, 0.010512946173548698], [0.0020401158835738897, 0.003927676938474178, 0.045233845710754395, 0.011749864555895329, 0.002814143430441618, 0.0024209467228502035, 0.006607451941817999, 0.011492149904370308, 0.04646245017647743, 0.015790030360221863, 0.08482850342988968, 0.0030557350255548954, 0.13922199606895447, 0.0444193109869957, 0.34634867310523987, 0.056255046278238297, 0.01235159207135439, 0.004446808248758316, 0.00259069399908185, 0.013058866374194622, 0.005751613061875105, 0.12377618998289108, 0.008180495351552963, 0.007175807375460863], [0.0010380259482190013, 0.004466721322387457, 0.003198940074071288, 0.04844358190894127, 0.007840416394174099, 0.0016122923698276281, 0.00799855962395668, 0.0010527035919949412, 0.010291093029081821, 0.0009376915404573083, 0.04000012204051018, 0.004288796801120043, 0.12791314721107483, 0.1436910182237625, 0.02643596939742565, 0.4566892087459564, 0.05096709355711937, 0.016519881784915924, 0.005718008615076542, 0.001714396639727056, 0.002577840583398938, 0.020443374291062355, 0.010782941244542599, 0.005378222558647394], [0.0018275437178090215, 0.003507254645228386, 0.01412270963191986, 0.003002611454576254, 0.0033935480751097202, 0.0006546186632476747, 0.0034080713521689177, 0.004234778694808483, 0.03482084721326828, 0.003126733237877488, 0.10069078207015991, 0.0004352650430519134, 0.01750331185758114, 0.0039316811598837376, 0.682522714138031, 0.005828946828842163, 0.032880764454603195, 0.004165558144450188, 0.01323634386062622, 0.007797720842063427, 0.013610069639980793, 0.021591363474726677, 0.022383613511919975, 0.0013232359196990728], [0.007173168007284403, 0.0057199569419026375, 0.023305373266339302, 0.004403858911246061, 0.006055888254195452, 0.0036759458016604185, 0.010500490665435791, 0.03876242786645889, 0.015636572614312172, 0.007583717815577984, 0.005554604344069958, 0.004684435669332743, 0.01532567199319601, 0.01582288183271885, 0.02620071917772293, 0.2705627679824829, 0.03951359912753105, 0.2043084353208542, 0.0288863442838192, 0.11216584593057632, 0.016227712854743004, 0.07540969550609589, 0.012437895871698856, 0.0500820130109787], [0.004963899962604046, 0.005713841412216425, 0.01393347978591919, 0.004152959678322077, 0.01549807470291853, 0.0008370212744921446, 0.0035736432764679193, 0.001364616327919066, 0.023313356563448906, 0.00251566618680954, 0.05766954645514488, 0.0019842395558953285, 0.027660252526402473, 0.0024263570085167885, 0.27836892008781433, 0.0071371858939528465, 0.33260056376457214, 0.00313896918669343, 0.05953202024102211, 0.005171565338969231, 0.02260439470410347, 0.019568154588341713, 0.10463922470808029, 0.0016320813447237015], [0.0013018905883654952, 0.0022461467888206244, 0.011533088982105255, 0.002851085038855672, 0.0010752829257398844, 0.001029213541187346, 0.0008151145884767175, 0.003683604998514056, 0.0009654220775701106, 0.004610789939761162, 0.0005807846318930387, 0.0014103958383202553, 0.000631710106972605, 0.0020353335421532393, 0.004374789539724588, 0.014436627738177776, 0.0027821515686810017, 0.8246915340423584, 0.002404544735327363, 0.09383156150579453, 0.005514699500054121, 0.00872588437050581, 0.0007254900992847979, 0.007742894347757101], [0.01105394959449768, 0.006916990969330072, 0.014448482543230057, 0.008169994689524174, 0.017269520089030266, 0.008214415982365608, 0.006370447110384703, 0.0060040648095309734, 0.012292549014091492, 0.027369605377316475, 0.014999760314822197, 0.003106846008449793, 0.010417910292744637, 0.0019883650820702314, 0.11139582842588425, 0.012493069283664227, 0.07439304143190384, 0.07867418974637985, 0.3023281991481781, 0.042653393000364304, 0.13393986225128174, 0.027782989665865898, 0.06282725185155869, 0.004889342002570629], [0.003885796060785651, 0.0011199864093214273, 0.01715654507279396, 0.002697428921237588, 0.0018518554279580712, 0.003092391649261117, 0.006686271168291569, 0.019578203558921814, 0.0027947372291237116, 0.006526059936732054, 0.00299064046703279, 0.006962302606552839, 0.0024820889811962843, 0.0026086869183927774, 0.015887724235653877, 0.005736963823437691, 0.0023097791709005833, 0.03825583681464195, 0.009442129172384739, 0.7699679732322693, 0.012286358512938023, 0.030486956238746643, 0.005787451285868883, 0.029405750334262848], [0.02216438204050064, 0.014309332706034184, 0.06368351727724075, 0.013206930831074715, 0.038592904806137085, 0.018284190446138382, 0.027531199157238007, 0.018201559782028198, 0.01654529757797718, 0.0219870638102293, 0.02736026421189308, 0.01102377288043499, 0.023504381999373436, 0.009365817531943321, 0.083177849650383, 0.021099675446748734, 0.04498191922903061, 0.03264209255576134, 0.07612068206071854, 0.03810139745473862, 0.11020611971616745, 0.05622332915663719, 0.15540820360183716, 0.05627816915512085]], [[0.004169648978859186, 0.0026631357613950968, 0.8531606197357178, 0.001252102549187839, 0.024372847750782967, 0.010058499872684479, 0.007964002899825573, 0.01518664974719286, 0.011638079769909382, 0.0049317097291350365, 0.01086623128503561, 0.006501068826764822, 0.007240790408104658, 0.00204801675863564, 0.017905086278915405, 0.0007130177109502256, 0.0007124410476535559, 0.0015739047667011619, 0.003262285841628909, 0.005454348865896463, 0.001981649547815323, 0.0015189256519079208, 0.0031962187495082617, 0.0016288601327687502], [0.004911305382847786, 0.002856919774785638, 0.7038610577583313, 0.002036504680290818, 0.045844003558158875, 0.012354346923530102, 0.010328538715839386, 0.03150061145424843, 0.02545035257935524, 0.004745430778712034, 0.02720535360276699, 0.021233929321169853, 0.021258415654301643, 0.004030017182230949, 0.035077616572380066, 0.0030049749184399843, 0.0019629874732345343, 0.002375861629843712, 0.0023614848032593727, 0.012581253424286842, 0.006568193435668945, 0.0018921502633020282, 0.009586505591869354, 0.006972186267375946], [0.007219742052257061, 0.004406445659697056, 0.18199001252651215, 0.00114752899389714, 0.016821768134832382, 0.050324320793151855, 0.10512349754571915, 0.07105983048677444, 0.05229127034544945, 0.03975888714194298, 0.010263738222420216, 0.08373971283435822, 0.0891132578253746, 0.017652101814746857, 0.07640070468187332, 0.002639925805851817, 0.0036724014207720757, 0.014238509349524975, 0.0688081681728363, 0.03403175249695778, 0.030196409672498703, 0.005497362464666367, 0.004109039902687073, 0.029493656009435654], [0.0016970850992947817, 0.0028025482315570116, 0.9074742794036865, 0.00041699386201798916, 0.03641310706734657, 0.0030381132382899523, 0.004103853367269039, 0.005725167226046324, 0.0017681613098829985, 0.003978161606937647, 0.0073699988424777985, 0.001614232431165874, 0.0038390096742659807, 0.0016750978538766503, 0.008330672048032284, 0.00023367925314232707, 0.0003132833226118237, 0.00027688450063578784, 0.001515097450464964, 0.0019626787398010492, 0.0006032938254065812, 0.00155863375402987, 0.002703150035813451, 0.0005868189036846161], [0.0027857802342623472, 0.0031908575911074877, 0.3436507284641266, 0.011970116756856441, 0.07538251578807831, 0.010109350085258484, 0.04036739096045494, 0.0927075669169426, 0.01870913803577423, 0.0053907535038888454, 0.02226058766245842, 0.08362647145986557, 0.02117360569536686, 0.006828144192695618, 0.038316547870635986, 0.011208673939108849, 0.05788058415055275, 0.021332671865820885, 0.013083497993648052, 0.0504031665623188, 0.028180398046970367, 0.001518918783403933, 0.01140770222991705, 0.02851477451622486], [0.010189676657319069, 0.005557059310376644, 0.7609386444091797, 0.0008863233379088342, 0.040121570229530334, 0.03669393062591553, 0.017707370221614838, 0.019869977608323097, 0.010142717510461807, 0.02384151704609394, 0.02167576365172863, 0.0047689443454146385, 0.007582290098071098, 0.004552485886961222, 0.014473335817456245, 0.0004134033515583724, 0.0006543574272654951, 0.001009596511721611, 0.0033437104430049658, 0.005450098309665918, 0.0007659941329620779, 0.0049790432676672935, 0.0033161884639412165, 0.001066002412699163], [0.02173837274312973, 0.006562079302966595, 0.4317232072353363, 0.0019734264351427555, 0.02489071898162365, 0.0500442199409008, 0.03263849392533302, 0.08113046735525131, 0.041999589651823044, 0.06286901235580444, 0.019103463739156723, 0.04333879053592682, 0.03623221814632416, 0.01682388037443161, 0.05069119855761528, 0.0022411211393773556, 0.000800616922788322, 0.006076381541788578, 0.013361768797039986, 0.026365183293819427, 0.004061169922351837, 0.010608017444610596, 0.005339889787137508, 0.009386790916323662], [0.011456061154603958, 0.007919606752693653, 0.3940826952457428, 0.0035631752107292414, 0.09933822602033615, 0.04451245069503784, 0.07202211022377014, 0.05077657476067543, 0.036058418452739716, 0.05268307030200958, 0.023884981870651245, 0.02151196263730526, 0.017597923055291176, 0.013588907197117805, 0.03627493605017662, 0.0024811201728880405, 0.011296778917312622, 0.003759595798328519, 0.025650516152381897, 0.025973886251449585, 0.009474911727011204, 0.02025924250483513, 0.008140134625136852, 0.007692710030823946], [0.019935600459575653, 0.010475019924342632, 0.2182050496339798, 0.010785725899040699, 0.05674422159790993, 0.04720943421125412, 0.04391677677631378, 0.05896596610546112, 0.052744749933481216, 0.04929749295115471, 0.06284105032682419, 0.09566831588745117, 0.05709400027990341, 0.023791233077645302, 0.06449656933546066, 0.012532074935734272, 0.010680004023015499, 0.023471571505069733, 0.010784626938402653, 0.020100269466638565, 0.014933368191123009, 0.008948438800871372, 0.007502690888941288, 0.0188757237046957], [0.01423995103687048, 0.0070901489816606045, 0.2051030546426773, 0.003623482072725892, 0.046500563621520996, 0.10536251962184906, 0.1447012573480606, 0.061709754168987274, 0.03959881514310837, 0.10193664580583572, 0.012610775418579578, 0.051867108792066574, 0.053192492574453354, 0.012121761217713356, 0.05755341053009033, 0.005458611063659191, 0.007051229942589998, 0.003379120957106352, 0.020214488729834557, 0.012171139940619469, 0.004994209855794907, 0.016651995480060577, 0.0018486448097974062, 0.01101888157427311], [0.0160951130092144, 0.005252243019640446, 0.12229171395301819, 0.004401017911732197, 0.04036625847220421, 0.045639585703611374, 0.11048223078250885, 0.04243640601634979, 0.08516588807106018, 0.08909431099891663, 0.020053399726748466, 0.14693324267864227, 0.08194123953580856, 0.01895984821021557, 0.07150740176439285, 0.008369159884750843, 0.007501989137381315, 0.006539505440741777, 0.02404731884598732, 0.01468956470489502, 0.011458657681941986, 0.00895814411342144, 0.0033179575111716986, 0.014497887343168259], [0.016038112342357635, 0.002338879741728306, 0.2615593373775482, 0.0009291854221373796, 0.017567971721291542, 0.07067564129829407, 0.0688423216342926, 0.06192425265908241, 0.05433228611946106, 0.18144747614860535, 0.023476410657167435, 0.041466306895017624, 0.04387688264250755, 0.011193210259079933, 0.08245822787284851, 0.001503421925008297, 0.0013924349332228303, 0.0037488339003175497, 0.020438862964510918, 0.01402752660214901, 0.0026011853478848934, 0.011089724488556385, 0.0016221099067479372, 0.005449363030493259], [0.020894087851047516, 0.0021146959625184536, 0.26286324858665466, 0.00156545196659863, 0.014730902388691902, 0.06491214781999588, 0.08794447779655457, 0.09596788138151169, 0.06627264618873596, 0.0586087629199028, 0.02567869983613491, 0.07457412779331207, 0.05413339287042618, 0.008917603641748428, 0.0721806138753891, 0.003252636408433318, 0.0021156813018023968, 0.005708423908799887, 0.02450258657336235, 0.027064679190516472, 0.004842798691242933, 0.0046164304949343204, 0.002786134136840701, 0.013751818798482418], [0.023507410660386086, 0.01226556021720171, 0.2243046909570694, 0.009396389126777649, 0.061209436506032944, 0.02243482880294323, 0.048829447478055954, 0.06776325404644012, 0.07946852594614029, 0.035229798406362534, 0.05599804222583771, 0.07676989585161209, 0.044214919209480286, 0.015696877613663673, 0.08099880069494247, 0.016618406400084496, 0.008163615129888058, 0.010373798198997974, 0.014293627813458443, 0.03306732699275017, 0.013004186563193798, 0.015475915744900703, 0.01594880223274231, 0.014966459944844246], [0.018289539963006973, 0.010133355855941772, 0.023497944697737694, 0.0034620927181094885, 0.007737031672149897, 0.04129291698336601, 0.2600119411945343, 0.039861880242824554, 0.06870682537555695, 0.08034989982843399, 0.0102548124268651, 0.06804264336824417, 0.0691932886838913, 0.032767701894044876, 0.0530153252184391, 0.012664604932069778, 0.003896083915606141, 0.012372688390314579, 0.10234920680522919, 0.017766837030649185, 0.01505843922495842, 0.019283024594187737, 0.005745001137256622, 0.024246983230113983], [0.015196969732642174, 0.01984419859945774, 0.2907249331474304, 0.00558173144236207, 0.052012816071510315, 0.03332233801484108, 0.07220309227705002, 0.027724696323275566, 0.03813258558511734, 0.07606236636638641, 0.01959490403532982, 0.033957574516534805, 0.06084810197353363, 0.037924494594335556, 0.0584888681769371, 0.00629595248028636, 0.005666425917297602, 0.0075609865598380566, 0.04306232929229736, 0.015140804462134838, 0.013358129188418388, 0.04685576632618904, 0.007085275370627642, 0.013354677706956863], [0.010750558227300644, 0.003369424259290099, 0.029776252806186676, 0.011220558546483517, 0.00727890245616436, 0.01891704462468624, 0.07291524857282639, 0.0658603310585022, 0.064809150993824, 0.016745522618293762, 0.010732468217611313, 0.15011709928512573, 0.05011870339512825, 0.014386248774826527, 0.09091740846633911, 0.04792076721787453, 0.02080845646560192, 0.0818934440612793, 0.07757385820150375, 0.055977702140808105, 0.04299824684858322, 0.006516754161566496, 0.004006960894912481, 0.04438883811235428], [0.035856518894433975, 0.01599724218249321, 0.06987765431404114, 0.011515075340867043, 0.0205059964209795, 0.07501786947250366, 0.07459155470132828, 0.03708796575665474, 0.07848449796438217, 0.04998321831226349, 0.036652322858572006, 0.0454694889485836, 0.05292704328894615, 0.03737418353557587, 0.07597095519304276, 0.02072373405098915, 0.011134224012494087, 0.025287210941314697, 0.05865773558616638, 0.043006863445043564, 0.0342755950987339, 0.03899819403886795, 0.02017052471637726, 0.030434364452958107], [0.02402568981051445, 0.018187489360570908, 0.05472191795706749, 0.01598050631582737, 0.03905654326081276, 0.05685233697295189, 0.027406439185142517, 0.06576994061470032, 0.06301363557577133, 0.06340718269348145, 0.04986264184117317, 0.04787427932024002, 0.05103763937950134, 0.043991878628730774, 0.06103840097784996, 0.025342876091599464, 0.030208397656679153, 0.0380227230489254, 0.025004589930176735, 0.04652377590537071, 0.03410761430859566, 0.0439458005130291, 0.029460549354553223, 0.04515715688467026], [0.030159927904605865, 0.031625013798475266, 0.11941058933734894, 0.015381733886897564, 0.05594457685947418, 0.028808562085032463, 0.056920066475868225, 0.02617153339087963, 0.024337071925401688, 0.037078965455293655, 0.03341009095311165, 0.013931956142187119, 0.018459804356098175, 0.04080318287014961, 0.058984752744436264, 0.014198402874171734, 0.03135441616177559, 0.020602066069841385, 0.09700290858745575, 0.05744202435016632, 0.05182687193155289, 0.06813916563987732, 0.04289582744240761, 0.025110580027103424], [0.030712630599737167, 0.022750629112124443, 0.05111785978078842, 0.022345667704939842, 0.020319581031799316, 0.05262414738535881, 0.03817394748330116, 0.04403434321284294, 0.0355767160654068, 0.06579948216676712, 0.05111263319849968, 0.08134229481220245, 0.07441569864749908, 0.03762604668736458, 0.07431406527757645, 0.03439565375447273, 0.012352201156318188, 0.054100748151540756, 0.038287822157144547, 0.027109308168292046, 0.03313959017395973, 0.026617132127285004, 0.02956690825521946, 0.0421648733317852], [0.023434892296791077, 0.02048959955573082, 0.027106042951345444, 0.018083389848470688, 0.016230277717113495, 0.06533866375684738, 0.0994505062699318, 0.041869599372148514, 0.03438471630215645, 0.03498801216483116, 0.015072026289999485, 0.03787156939506531, 0.04421338066458702, 0.03719402849674225, 0.0618777796626091, 0.03124585747718811, 0.024771159514784813, 0.04697689041495323, 0.11612334102392197, 0.042033400386571884, 0.068056620657444, 0.02366224303841591, 0.01860206015408039, 0.05092395097017288], [0.01912236027419567, 0.00799344852566719, 0.003128709737211466, 0.04238731041550636, 0.0030851424671709538, 0.013026055879890919, 0.03322131931781769, 0.010063692927360535, 0.03028709813952446, 0.02046641893684864, 0.011571726761758327, 0.07644850015640259, 0.030946552753448486, 0.026840059086680412, 0.031141027808189392, 0.1212657019495964, 0.03011101298034191, 0.18480102717876434, 0.07408512383699417, 0.0317385196685791, 0.1060289740562439, 0.015248102135956287, 0.014468920417129993, 0.06252310425043106], [0.0470246858894825, 0.00977203156799078, 0.1041429415345192, 0.012882817536592484, 0.013994788751006126, 0.059377044439315796, 0.042136989533901215, 0.05652027949690819, 0.05159711837768555, 0.05133823677897453, 0.04338163509964943, 0.04588989168405533, 0.03971175104379654, 0.02230820618569851, 0.07929510623216629, 0.027606384828686714, 0.007087633013725281, 0.056441109627485275, 0.06691744923591614, 0.06332654505968094, 0.026032796129584312, 0.024499304592609406, 0.021169135347008705, 0.027546217665076256]], [[0.015819285064935684, 0.026924125850200653, 0.042775921523571014, 0.02240678481757641, 0.009192337282001972, 0.014498492702841759, 0.05742539092898369, 0.0247067678719759, 0.07627016305923462, 0.024947158992290497, 0.045215968042612076, 0.08423014730215073, 0.09769445657730103, 0.037242528051137924, 0.08560913801193237, 0.040443334728479385, 0.023708615452051163, 0.017200738191604614, 0.03387461602687836, 0.014965608716011047, 0.03815624490380287, 0.036739904433488846, 0.04364349693059921, 0.08630873262882233], [0.015577632002532482, 0.008143957704305649, 0.031591035425662994, 0.021193429827690125, 0.010488497093319893, 0.01406208984553814, 0.055376891046762466, 0.028569437563419342, 0.06615139544010162, 0.026977049186825752, 0.07340992987155914, 0.08112452179193497, 0.08154318481683731, 0.01815582998096943, 0.10173408687114716, 0.0383727103471756, 0.023049987852573395, 0.047920580953359604, 0.028946585953235626, 0.013872754760086536, 0.03640979528427124, 0.056531187146902084, 0.0594320073723793, 0.06136539578437805], [0.007375726941972971, 0.007035403978079557, 0.05774497985839844, 0.01280373614281416, 0.009374410845339298, 0.0026843769010156393, 0.05871366709470749, 0.020142044872045517, 0.057348333299160004, 0.0420360192656517, 0.044826850295066833, 0.09346815943717957, 0.06147973611950874, 0.01251076441258192, 0.1438879519701004, 0.07139606773853302, 0.04182921722531319, 0.028076784685254097, 0.015695134177803993, 0.010660221800208092, 0.0069993711076676846, 0.13255615532398224, 0.016593443229794502, 0.04476146027445793], [0.006483416073024273, 0.005644343327730894, 0.03183538839221001, 0.022166844457387924, 0.009189301170408726, 0.002706758212298155, 0.04073796048760414, 0.022116709500551224, 0.0998995304107666, 0.03432492911815643, 0.033161524683237076, 0.043253351002931595, 0.10140874981880188, 0.01373384427279234, 0.15632124245166779, 0.09080728143453598, 0.0392439179122448, 0.029768560081720352, 0.027180779725313187, 0.014006325975060463, 0.028569448739290237, 0.07500026375055313, 0.017560867592692375, 0.054878681898117065], [0.004506794270128012, 0.002312267431989312, 0.04331909120082855, 0.016858579590916634, 0.0021372949704527855, 0.005422212649136782, 0.0833166316151619, 0.010714022442698479, 0.019625714048743248, 0.014123807661235332, 0.04105384275317192, 0.035965390503406525, 0.04737154394388199, 0.008831944316625595, 0.46674713492393494, 0.03312591835856438, 0.004471112042665482, 0.04269065707921982, 0.015126973390579224, 0.015270392410457134, 0.010530935600399971, 0.041218504309654236, 0.012330357916653156, 0.022928891703486443], [0.01361851766705513, 0.016854697838425636, 0.06089509651064873, 0.026829324662685394, 0.01870936155319214, 0.014037185348570347, 0.08747139573097229, 0.020617244765162468, 0.06187679246068001, 0.02311631664633751, 0.0700736716389656, 0.026962358504533768, 0.04933270439505577, 0.0345279835164547, 0.15263406932353973, 0.04405709356069565, 0.017725348472595215, 0.06018052250146866, 0.024418456479907036, 0.015218528918921947, 0.042030587792396545, 0.06691553443670273, 0.02607269585132599, 0.02582447975873947], [0.020198490470647812, 0.00572221027687192, 0.05234304815530777, 0.010621036402881145, 0.00474315881729126, 0.015585023909807205, 0.10813885927200317, 0.03795843571424484, 0.026108860969543457, 0.014110100455582142, 0.05898719280958176, 0.0478847362101078, 0.07296131551265717, 0.012162097729742527, 0.2299162894487381, 0.02657872997224331, 0.008269090205430984, 0.022416021674871445, 0.05640954151749611, 0.04253079369664192, 0.02424859069287777, 0.029317043721675873, 0.028418265283107758, 0.04437113553285599], [0.005323055200278759, 0.004246942233294249, 0.03594833239912987, 0.011424291878938675, 0.00573565112426877, 0.004393060225993395, 0.06798447668552399, 0.009107949212193489, 0.05532107874751091, 0.014095459133386612, 0.06427759677171707, 0.1459210366010666, 0.08890976011753082, 0.007095170672982931, 0.20912158489227295, 0.05798886716365814, 0.02841350808739662, 0.016304291784763336, 0.025888539850711823, 0.005767578724771738, 0.008539164438843727, 0.05544493347406387, 0.03143080696463585, 0.04131679609417915], [0.006888173054903746, 0.005888954736292362, 0.055983766913414, 0.004564840812236071, 0.002856846898794174, 0.012821217067539692, 0.08836081624031067, 0.02933535911142826, 0.012379192747175694, 0.01940612867474556, 0.11824164539575577, 0.033861614763736725, 0.07047968357801437, 0.00986458733677864, 0.34870630502700806, 0.007873800583183765, 0.005459833890199661, 0.01588498428463936, 0.021591825410723686, 0.00906410813331604, 0.007738722488284111, 0.02881006710231304, 0.06094397231936455, 0.022993527352809906], [0.007739379070699215, 0.0035704888869076967, 0.027197252959012985, 0.02204066514968872, 0.012057292275130749, 0.0070341709069907665, 0.04346088692545891, 0.031170301139354706, 0.02544984593987465, 0.022557659074664116, 0.0426739938557148, 0.09692857414484024, 0.10625512897968292, 0.012783946469426155, 0.19654731452465057, 0.04543667286634445, 0.038537461310625076, 0.04426654428243637, 0.029638269916176796, 0.022622467949986458, 0.013589609414339066, 0.07996873557567596, 0.028924886137247086, 0.03954849764704704], [0.0026955583598464727, 0.0013384043704718351, 0.04249623045325279, 0.005333033390343189, 0.0006768426392227411, 0.003587909508496523, 0.130182683467865, 0.012217887677252293, 0.030162258073687553, 0.014796728268265724, 0.06770054996013641, 0.020068060606718063, 0.032931629568338394, 0.005243957042694092, 0.45201966166496277, 0.020960349589586258, 0.002191907027736306, 0.02935807593166828, 0.03177417814731598, 0.007948758080601692, 0.01080187875777483, 0.030606640502810478, 0.02522677555680275, 0.01968011073768139], [0.005830694455653429, 0.004881970584392548, 0.049054104834795, 0.009207397699356079, 0.0033965681213885546, 0.006408302579075098, 0.0560116246342659, 0.01447529997676611, 0.04503266140818596, 0.021931838244199753, 0.12464922666549683, 0.05087114870548248, 0.07861587405204773, 0.012002440169453621, 0.2343657910823822, 0.027741527184844017, 0.01226719468832016, 0.04534469544887543, 0.029765011742711067, 0.011489585041999817, 0.03475075587630272, 0.05598649010062218, 0.019602037966251373, 0.04631779342889786], [0.011973466724157333, 0.00821115355938673, 0.050550512969493866, 0.00932349544018507, 0.009419888257980347, 0.010000393725931644, 0.04817905277013779, 0.044203538447618484, 0.04359981417655945, 0.02871367521584034, 0.08514997363090515, 0.05709832161664963, 0.06378915160894394, 0.015546993352472782, 0.15106411278247833, 0.029789438471198082, 0.029706090688705444, 0.04696820676326752, 0.04829583689570427, 0.036956630647182465, 0.03808603435754776, 0.05083045735955238, 0.02643917128443718, 0.0561046339571476], [0.013464822433888912, 0.013215594924986362, 0.017758704721927643, 0.03660162165760994, 0.014732546173036098, 0.009572304785251617, 0.027449825778603554, 0.03482463210821152, 0.05050887539982796, 0.018204694613814354, 0.04323364049196243, 0.08126205950975418, 0.10090174525976181, 0.0237989854067564, 0.049628593027591705, 0.07563869655132294, 0.0614963099360466, 0.03909948468208313, 0.029279716312885284, 0.024425355717539787, 0.03716461732983589, 0.04162425547838211, 0.060532934963703156, 0.09557998180389404], [0.015825534239411354, 0.015478378161787987, 0.08148988336324692, 0.007189614232629538, 0.006836214102804661, 0.01929348334670067, 0.06677643954753876, 0.020012307912111282, 0.03462541475892067, 0.0854221060872078, 0.17204312980175018, 0.020258327946066856, 0.029241161420941353, 0.01678495667874813, 0.12369884550571442, 0.014112833887338638, 0.008093651384115219, 0.03714800253510475, 0.05446021631360054, 0.031203070655465126, 0.020701073110103607, 0.05059920623898506, 0.04007088765501976, 0.02863527275621891], [0.010560587048530579, 0.010280352085828781, 0.06575015932321548, 0.01995682716369629, 0.009108413010835648, 0.007820547558367252, 0.029732108116149902, 0.023993797600269318, 0.08296177536249161, 0.06298288702964783, 0.08828325569629669, 0.028176410123705864, 0.05637047812342644, 0.013582304120063782, 0.17027242481708527, 0.042777322232723236, 0.023579280823469162, 0.039093729108572006, 0.041939686983823776, 0.01592344045639038, 0.03643452003598213, 0.046082962304353714, 0.033442698419094086, 0.04089409112930298], [0.005951763596385717, 0.004207103047519922, 0.0724625438451767, 0.009987544268369675, 0.001788630150258541, 0.009268262423574924, 0.06827990710735321, 0.01294653583317995, 0.018514586612582207, 0.032138314098119736, 0.05741463601589203, 0.03856053575873375, 0.04350529983639717, 0.008942664600908756, 0.4225136637687683, 0.015388591215014458, 0.004021224100142717, 0.02199258655309677, 0.030536770820617676, 0.01177630852907896, 0.012985843233764172, 0.03875783458352089, 0.02898409403860569, 0.029074767604470253], [0.0687570571899414, 0.03190179914236069, 0.05907980352640152, 0.027225565165281296, 0.025799307972192764, 0.05282806605100632, 0.023529518395662308, 0.036684129387140274, 0.08606965839862823, 0.08135754615068436, 0.0721484050154686, 0.02348901703953743, 0.032380178570747375, 0.024813147261738777, 0.04499392956495285, 0.026031088083982468, 0.015225382521748543, 0.03927023336291313, 0.0246469397097826, 0.02515445649623871, 0.04454340785741806, 0.05584648624062538, 0.04915141686797142, 0.029073411598801613], [0.046102125197649, 0.01842459663748741, 0.06757502257823944, 0.01714194193482399, 0.008194896392524242, 0.06086503714323044, 0.0604681521654129, 0.03855670616030693, 0.028956105932593346, 0.03121415339410305, 0.11226887255907059, 0.020873719826340675, 0.028379209339618683, 0.01619740203022957, 0.12190455198287964, 0.025725066661834717, 0.008334606885910034, 0.027769025415182114, 0.04964492842555046, 0.041948847472667694, 0.044008709490299225, 0.015785282477736473, 0.0776844248175621, 0.03197658434510231], [0.034550830721855164, 0.03426187485456467, 0.06105315685272217, 0.01603134535253048, 0.022478261962532997, 0.023193322122097015, 0.024587756022810936, 0.027541905641555786, 0.07372730225324631, 0.06309740990400314, 0.06773073971271515, 0.07581689953804016, 0.054884303361177444, 0.016503848135471344, 0.08271624147891998, 0.03523476794362068, 0.04657650366425514, 0.011063291691243649, 0.04175909608602524, 0.013515826314687729, 0.025788867846131325, 0.04484469071030617, 0.04887351766228676, 0.054168302565813065], [0.05901459977030754, 0.06951946765184402, 0.06713695824146271, 0.01248626783490181, 0.019180769100785255, 0.12499696016311646, 0.01993347704410553, 0.07491602003574371, 0.0130996685475111, 0.06618563830852509, 0.11016455292701721, 0.02636280469596386, 0.018865853548049927, 0.02671900950372219, 0.050265803933143616, 0.009697937406599522, 0.012705300003290176, 0.017543550580739975, 0.03715306147933006, 0.03720582276582718, 0.0246921107172966, 0.015440010465681553, 0.0632215216755867, 0.02349284663796425], [0.07028453797101974, 0.03803817555308342, 0.06484199315309525, 0.01629164069890976, 0.052715253084897995, 0.06614629179239273, 0.00814906321465969, 0.06756555289030075, 0.015926901251077652, 0.04303313419222832, 0.1042247787117958, 0.014194218441843987, 0.01161638181656599, 0.020347202196717262, 0.05507032945752144, 0.013839290477335453, 0.03323501721024513, 0.0428585410118103, 0.023137252777814865, 0.07685285061597824, 0.04192281514406204, 0.023343699052929878, 0.0769646093249321, 0.01940038986504078], [0.03907002508640289, 0.025523794814944267, 0.09840674698352814, 0.014514436945319176, 0.0061791217885911465, 0.041704095900058746, 0.037996795028448105, 0.038921695202589035, 0.0371793657541275, 0.07667599618434906, 0.13808637857437134, 0.014228308573365211, 0.018335619941353798, 0.021949738264083862, 0.15228348970413208, 0.022441279143095016, 0.006293612066656351, 0.028412124142050743, 0.036041259765625, 0.01991061493754387, 0.02826876938343048, 0.03171888366341591, 0.04807493835687637, 0.017782896757125854], [0.04081736505031586, 0.054070744663476944, 0.09273099899291992, 0.012232346460223198, 0.02726481668651104, 0.036969076842069626, 0.01925075240433216, 0.027663379907608032, 0.03000355325639248, 0.05391421541571617, 0.18642310798168182, 0.025519469752907753, 0.025082705542445183, 0.023509599268436432, 0.061750221997499466, 0.011668363586068153, 0.026676030829548836, 0.013590282760560513, 0.024639926850795746, 0.021113196387887, 0.04716289043426514, 0.027379700914025307, 0.07744047790765762, 0.03312687203288078]], [[0.057467103004455566, 0.02076822705566883, 0.018417280167341232, 0.02561381831765175, 0.07382692396640778, 0.04245009645819664, 0.11719062924385071, 0.05155020207166672, 0.13851507008075714, 0.0865674540400505, 0.03346595913171768, 0.03656884655356407, 0.07092194259166718, 0.022079836577177048, 0.01434214785695076, 0.010874290019273758, 0.022745750844478607, 0.011435085907578468, 0.02741556614637375, 0.01943863555788994, 0.04430045187473297, 0.01299966685473919, 0.008208712562918663, 0.03283639997243881], [0.037933360785245895, 0.01957595720887184, 0.0561896376311779, 0.023228077217936516, 0.035687949508428574, 0.048181790858507156, 0.05842788144946098, 0.07652390748262405, 0.04927201196551323, 0.03568287938833237, 0.07641520351171494, 0.044957634061574936, 0.03353789821267128, 0.019777672365307808, 0.07266319543123245, 0.031661488115787506, 0.03023282065987587, 0.03612106665968895, 0.035454150289297104, 0.0406542643904686, 0.0321112796664238, 0.02546040527522564, 0.05570710450410843, 0.02454228512942791], [0.04008086398243904, 0.011255201883614063, 0.008743281476199627, 0.0466369166970253, 0.11897250264883041, 0.5223038196563721, 0.015145760960876942, 0.013440211303532124, 0.041746899485588074, 0.04091993719339371, 0.015575146302580833, 0.019331689924001694, 0.017368149012327194, 0.025305651128292084, 0.003121240297332406, 0.009315765462815762, 0.013179266825318336, 0.0026122250128537416, 0.00484081357717514, 0.008764786645770073, 0.00599551061168313, 0.006331634242087603, 0.0032677671406418085, 0.005744996480643749], [0.007642517797648907, 0.0032454708125442266, 0.007471208926290274, 0.024463940411806107, 0.05364113673567772, 0.7457591891288757, 0.012826516292989254, 0.01723094843327999, 0.06925132125616074, 0.02479429915547371, 0.004803826101124287, 0.0039897495880723, 0.005170508287847042, 0.0030552088283002377, 0.0005295266746543348, 0.0038461789954453707, 0.0005925959558226168, 0.0003186811227351427, 0.0005909849423915148, 0.003836205694824457, 0.0016983632231131196, 0.0021697923075407743, 0.0005684405914507806, 0.0025034844875335693], [0.008578835055232048, 0.0029878122732043266, 0.002834792248904705, 0.012459455989301205, 0.01930934190750122, 0.798172116279602, 0.020811766386032104, 0.006530069280415773, 0.05876186490058899, 0.005303625017404556, 0.0068059517070651054, 0.0016001994954422116, 0.004058254417032003, 0.003544124076142907, 0.002062755636870861, 0.006297771818935871, 0.0006965077482163906, 0.003345916513353586, 0.002701355842873454, 0.004216022789478302, 0.011158586479723454, 0.0066623627208173275, 0.005729188211262226, 0.005371324252337217], [0.04058092087507248, 0.020502395927906036, 0.03228716179728508, 0.023677831515669823, 0.10709626227617264, 0.030679043382406235, 0.0717281848192215, 0.10444001108407974, 0.06563395261764526, 0.14053845405578613, 0.0833560973405838, 0.03223579749464989, 0.03532945737242699, 0.03392625227570534, 0.022565213963389397, 0.008515791967511177, 0.010549359023571014, 0.0022742555011063814, 0.02996104769408703, 0.03614110127091408, 0.013155143707990646, 0.038085468113422394, 0.009788410738110542, 0.006952312774956226], [0.046089738607406616, 0.04987785220146179, 0.0768977552652359, 0.025143392384052277, 0.053960978984832764, 0.023907383903861046, 0.031389448791742325, 0.09628899395465851, 0.18185359239578247, 0.04132020100951195, 0.10671504586935043, 0.02574271522462368, 0.03740697726607323, 0.04003571346402168, 0.03656509146094322, 0.011823429726064205, 0.008815146051347256, 0.006850611884146929, 0.01230232510715723, 0.012525258585810661, 0.01539839617908001, 0.02052428387105465, 0.02465352602303028, 0.013912123627960682], [0.006654892582446337, 0.003810916095972061, 0.009182722307741642, 0.020447073504328728, 0.0706256777048111, 0.3241981267929077, 0.04477633535861969, 0.013196531683206558, 0.21898598968982697, 0.15637299418449402, 0.059636663645505905, 0.008803079836070538, 0.023786423727869987, 0.0023167768958956003, 0.00491896690800786, 0.0071455989964306355, 0.000672442780341953, 0.0028438365552574396, 0.0021514352411031723, 0.0017287349328398705, 0.004445524886250496, 0.009579467587172985, 0.0020330138504505157, 0.0016868385719135404], [0.05364329367876053, 0.008494672365486622, 0.02327561378479004, 0.012081699445843697, 0.029927857220172882, 0.010309172794222832, 0.237191841006279, 0.04296811297535896, 0.09266691654920578, 0.05840868875384331, 0.11325012892484665, 0.05814412981271744, 0.0770462155342102, 0.025091035291552544, 0.03565044328570366, 0.009104723110795021, 0.008463933132588863, 0.006554081104695797, 0.021259956061840057, 0.005253759678453207, 0.015452228486537933, 0.0072280946187675, 0.0258382186293602, 0.02269514463841915], [0.019732961431145668, 0.0035395189188420773, 0.029007339850068092, 0.011773071251809597, 0.01423447672277689, 0.055100273340940475, 0.11088111251592636, 0.1472545713186264, 0.16315609216690063, 0.0367932952940464, 0.1821071058511734, 0.06951412558555603, 0.05210605263710022, 0.006641406565904617, 0.017143236473202705, 0.013275686651468277, 0.0011523026041686535, 0.004624498542398214, 0.011569511145353317, 0.014785360544919968, 0.007774027064442635, 0.00776966568082571, 0.011852141469717026, 0.008212181739509106], [0.03356535732746124, 0.015957145020365715, 0.03225395455956459, 0.004478755407035351, 0.007666046731173992, 0.0004306508635636419, 0.06701331585645676, 0.04936273396015167, 0.05929394066333771, 0.06111788749694824, 0.1542510986328125, 0.06716404855251312, 0.17511871457099915, 0.07028904557228088, 0.07528570294380188, 0.006737357936799526, 0.019605180248618126, 0.006666585803031921, 0.020331447944045067, 0.008884786628186703, 0.012247066013514996, 0.016481218859553337, 0.02007589302957058, 0.015722062438726425], [0.01467908639460802, 0.007737939711660147, 0.027475222945213318, 0.004811993800103664, 0.015063794329762459, 0.017374491319060326, 0.07559449225664139, 0.056220825761556625, 0.07464340329170227, 0.12456865608692169, 0.14719565212726593, 0.043345704674720764, 0.12849225103855133, 0.12580664455890656, 0.03820578008890152, 0.00942477211356163, 0.007635494228452444, 0.010102530010044575, 0.0071206120774149895, 0.008548039011657238, 0.006231627892702818, 0.016808051615953445, 0.01184109691530466, 0.02107175625860691], [0.01600884459912777, 0.005145729519426823, 0.027156641706824303, 0.0020217953715473413, 0.0077863833867013454, 0.0032823127694427967, 0.03294295445084572, 0.08336564153432846, 0.09549587219953537, 0.0672764852643013, 0.30016565322875977, 0.07058988511562347, 0.111845001578331, 0.03249667212367058, 0.07693304866552353, 0.004954291973263025, 0.007514502387493849, 0.005598192568868399, 0.006665930617600679, 0.007556634489446878, 0.004451546352356672, 0.006419571582227945, 0.013633955270051956, 0.010692421346902847], [0.025485293939709663, 0.018294410780072212, 0.03833390772342682, 0.008506162092089653, 0.0244775228202343, 0.027656851336359978, 0.06045101210474968, 0.048017632216215134, 0.10475408285856247, 0.047360509634017944, 0.21725726127624512, 0.09323097765445709, 0.08463367074728012, 0.03593306615948677, 0.06683879345655441, 0.017204521223902702, 0.006151220761239529, 0.012733378447592258, 0.010246739722788334, 0.00725402170792222, 0.009430940262973309, 0.008941445499658585, 0.01806476339697838, 0.008741834200918674], [0.017875155434012413, 0.020908795297145844, 0.043729268014431, 0.0025638570077717304, 0.0019467034144327044, 0.00045522378059104085, 0.008497321978211403, 0.013906078413128853, 0.0215266402810812, 0.04915907233953476, 0.16988900303840637, 0.049809884279966354, 0.11173925548791885, 0.060203585773706436, 0.23081812262535095, 0.010133699513971806, 0.05068828910589218, 0.03521211817860603, 0.015760080888867378, 0.016403522342443466, 0.015780465677380562, 0.00759484525769949, 0.03817965090274811, 0.007219389081001282], [0.02032269723713398, 0.025101739913225174, 0.08256281167268753, 0.018190165981650352, 0.009577390737831593, 0.004654210992157459, 0.021949198096990585, 0.05544991046190262, 0.027559425681829453, 0.19021670520305634, 0.03600965440273285, 0.0492413155734539, 0.09767445921897888, 0.05224694684147835, 0.08844916522502899, 0.03197755292057991, 0.0323345921933651, 0.04084879159927368, 0.011568893678486347, 0.027643734589219093, 0.016050850972533226, 0.03178354352712631, 0.01151084341108799, 0.017075397074222565], [0.016721302643418312, 0.01708456128835678, 0.017034078016877174, 0.020835280418395996, 0.010479575023055077, 0.13948944211006165, 0.02726030722260475, 0.011824817396700382, 0.03876955062150955, 0.02964916080236435, 0.051887400448322296, 0.012891624122858047, 0.07191171497106552, 0.030676083639264107, 0.07446575909852982, 0.05610420182347298, 0.01456863060593605, 0.11140840500593185, 0.03458592668175697, 0.025024186819791794, 0.06745501607656479, 0.04769079014658928, 0.05278167501091957, 0.019400568678975105], [0.009806032292544842, 0.023082168772816658, 0.06091272085905075, 0.006709100678563118, 0.0037564353551715612, 0.001337511115707457, 0.005906734615564346, 0.02453574538230896, 0.005505817010998726, 0.023695914074778557, 0.053872086107730865, 0.032290536910295486, 0.035838544368743896, 0.03947479650378227, 0.15569178760051727, 0.03175187110900879, 0.07172133028507233, 0.06467388570308685, 0.03941154479980469, 0.1867319643497467, 0.023142265155911446, 0.026632115244865417, 0.05911898985505104, 0.014400084502995014], [0.005054273642599583, 0.01813516765832901, 0.02798866666853428, 0.0024045640602707863, 0.001292683300562203, 0.0017932128394022584, 0.0036530219949781895, 0.014592713676393032, 0.0051286304369568825, 0.022797372192144394, 0.02858620509505272, 0.008598526008427143, 0.02162034437060356, 0.016832217574119568, 0.25257036089897156, 0.027770301327109337, 0.03379521891474724, 0.27538350224494934, 0.029579639434814453, 0.04298021271824837, 0.046133801341056824, 0.05591816082596779, 0.04716838523745537, 0.010222850367426872], [0.0033653879072517157, 0.02358970418572426, 0.029282886534929276, 0.0058023217134177685, 0.004208091180771589, 0.0031398090068250895, 0.0010066042887046933, 0.00939235184341669, 0.0065404148772358894, 0.0105655612424016, 0.015361515805125237, 0.005870065651834011, 0.010093709453940392, 0.010963012464344501, 0.05248498544096947, 0.047225479036569595, 0.05562417209148407, 0.23263658583164215, 0.016672343015670776, 0.12392102926969528, 0.05159799009561539, 0.19547466933727264, 0.07457894831895828, 0.01060232613235712], [0.0061793578788638115, 0.014770357869565487, 0.0184787604957819, 0.002901839092373848, 0.0017925172578543425, 0.001125697628594935, 0.0017769791884347796, 0.005476669408380985, 0.0024495210964232683, 0.0032367431558668613, 0.018852803856134415, 0.007186245638877153, 0.010282302275300026, 0.025498902425169945, 0.1101582869887352, 0.016749562695622444, 0.12888604402542114, 0.18675796687602997, 0.022675497457385063, 0.04517098888754845, 0.04567031189799309, 0.033889614045619965, 0.26960131525993347, 0.020431768149137497], [0.004900042433291674, 0.005690551828593016, 0.013112809509038925, 0.010101048275828362, 0.0012795276707038283, 0.011956354603171349, 0.0024731045123189688, 0.013627604581415653, 0.0025016837753355503, 0.005775552708655596, 0.0030169119127094746, 0.00471189571544528, 0.0035946620628237724, 0.0040058293379843235, 0.00713814003393054, 0.03800360485911369, 0.009419070556759834, 0.1070062667131424, 0.010729227215051651, 0.597217321395874, 0.03696981444954872, 0.03678596392273903, 0.03279627487063408, 0.037186723202466965], [0.006910581141710281, 0.013096684589982033, 0.03231871500611305, 0.008032205514609814, 0.0016331080114468932, 0.00014017226931173354, 0.004705635830760002, 0.012928028590977192, 0.003083623945713043, 0.005898316856473684, 0.009762322530150414, 0.006847570650279522, 0.01116273459047079, 0.012060582637786865, 0.07551455497741699, 0.018287431448698044, 0.06851671636104584, 0.06939228624105453, 0.08305674046278, 0.15870632231235504, 0.08727966248989105, 0.129718616604805, 0.14495648443698883, 0.03599090874195099], [0.0023149040061980486, 0.0032241486478596926, 0.011726626195013523, 0.005867440719157457, 0.0013391555985435843, 0.0032203886657953262, 0.0007649276521988213, 0.006816201377660036, 0.0010026684030890465, 0.0027952431701123714, 0.001688696793280542, 0.002438761293888092, 0.0020803730003535748, 0.0016559719806537032, 0.007539732381701469, 0.027059072628617287, 0.015995962545275688, 0.11510548740625381, 0.012670216150581837, 0.5237204432487488, 0.04711448773741722, 0.11329527944326401, 0.06866388767957687, 0.021899988874793053]], [[0.03409641608595848, 0.02131110243499279, 0.07901372015476227, 0.039774589240550995, 0.05015566945075989, 0.03638526797294617, 0.07282435148954391, 0.08322229981422424, 0.08066504448652267, 0.03806992992758751, 0.07779485732316971, 0.016935214400291443, 0.02146166004240513, 0.017147613689303398, 0.023298872634768486, 0.040381237864494324, 0.01728481985628605, 0.03936396539211273, 0.037073634564876556, 0.06281313300132751, 0.02301480993628502, 0.04321381077170372, 0.024366924539208412, 0.02033110521733761], [0.03481725975871086, 0.02328414097428322, 0.03866223618388176, 0.014535670168697834, 0.028706246986985207, 0.025438999757170677, 0.03930852189660072, 0.09683404862880707, 0.04914024472236633, 0.06651882827281952, 0.05541878566145897, 0.06685015559196472, 0.04026160016655922, 0.06993526220321655, 0.058009687811136246, 0.037296831607818604, 0.04786492884159088, 0.04582170397043228, 0.030449647456407547, 0.03048362396657467, 0.01963799260556698, 0.025441709905862808, 0.02900543063879013, 0.026276450604200363], [0.04415871575474739, 0.059246987104415894, 0.02793949842453003, 0.09683815389871597, 0.07391901314258575, 0.04695655778050423, 0.04382891207933426, 0.04429240897297859, 0.04560456424951553, 0.02830681763589382, 0.030740221962332726, 0.026316728442907333, 0.02657938376069069, 0.06702135503292084, 0.024041494354605675, 0.12102462351322174, 0.0425887256860733, 0.041974470019340515, 0.022526372224092484, 0.02184413932263851, 0.017035849392414093, 0.007253405172377825, 0.03202719986438751, 0.007934335619211197], [0.03176043555140495, 0.03907507285475731, 0.08238822966814041, 0.08469106256961823, 0.020504184067249298, 0.03878532722592354, 0.06246420368552208, 0.21815000474452972, 0.023461036384105682, 0.24046431481838226, 0.00593183096498251, 0.0483531728386879, 0.020474905148148537, 0.006026759278029203, 0.015549221076071262, 0.002261400455608964, 0.0009118790621869266, 0.0059516578912734985, 0.014120342209935188, 0.007846325635910034, 0.00704552186653018, 0.008255287073552608, 0.0020176239777356386, 0.013510186225175858], [0.006157738622277975, 0.04649084061384201, 0.015343084931373596, 0.23181229829788208, 0.05574040859937668, 0.5205127000808716, 0.022866642102599144, 0.003856360912322998, 0.005135274492204189, 0.006845998112112284, 0.007592817768454552, 0.00905103050172329, 0.01794704981148243, 0.009924941696226597, 0.010058386251330376, 0.002564667724072933, 0.0009639008203521371, 0.0025462531484663486, 0.004294385202229023, 0.0006139291217550635, 0.005113258957862854, 0.004318069200962782, 0.00739908404648304, 0.00285096513107419], [0.04336733743548393, 0.05925924330949783, 0.04687505587935448, 0.13893641531467438, 0.1436775177717209, 0.053896546363830566, 0.15200957655906677, 0.031336598098278046, 0.1669500172138214, 0.020957093685865402, 0.007949293591082096, 0.006394407711923122, 0.01190140936523676, 0.003130050143226981, 0.010148391127586365, 0.009413785301148891, 0.0010420220205560327, 0.0024390656035393476, 0.004457823932170868, 0.012078963220119476, 0.009577046148478985, 0.02266760915517807, 0.005749909207224846, 0.035784829407930374], [0.012220812030136585, 0.06464997678995132, 0.027815287932753563, 0.030687255784869194, 0.02078494243323803, 0.6308772563934326, 0.022656317800283432, 0.055411119014024734, 0.012686026282608509, 0.033156994730234146, 0.004768884740769863, 0.01813925988972187, 0.013522337190806866, 0.019801165908575058, 0.002393001224845648, 0.0008404234540648758, 0.0007866889354772866, 0.0024659852497279644, 0.0018694396130740643, 0.0015273410826921463, 0.007651580963283777, 0.001193201169371605, 0.008776049129664898, 0.005318670533597469], [0.032372042536735535, 0.03007032535970211, 0.0651448667049408, 0.03587115928530693, 0.14738516509532928, 0.06744907051324844, 0.16899625957012177, 0.0306081660091877, 0.12056346237659454, 0.033631738275289536, 0.021161921322345734, 0.027972131967544556, 0.075668103992939, 0.006520355585962534, 0.0309526938945055, 0.004573270678520203, 0.007984839379787445, 0.004936708137392998, 0.0026003301609307528, 0.005331103224307299, 0.009785205125808716, 0.012461477890610695, 0.007186287082731724, 0.050773344933986664], [0.002017183229327202, 0.0009960634633898735, 0.009619226679205894, 0.0030720029026269913, 0.0028314031660556793, 0.050843533128499985, 0.008003728464245796, 0.7538034319877625, 0.004161028191447258, 0.04997789487242699, 0.003400868969038129, 0.09011739492416382, 0.00416715769097209, 0.006729124579578638, 0.0029816629830747843, 0.000805737916380167, 0.0002450532920192927, 0.0018242503283545375, 0.0006507543148472905, 0.0010296566179022193, 0.0002585098845884204, 0.00043281071702949703, 0.0009117299341596663, 0.0011197674321010709], [0.001686559058725834, 0.0020048220176249743, 0.0027298072818666697, 0.0014570910716429353, 0.0040487125515937805, 0.001954730600118637, 0.08455199003219604, 0.028569413349032402, 0.8058176040649414, 0.024623865261673927, 0.015127033926546574, 0.0038202644791454077, 0.011658879928290844, 0.00046471250243484974, 0.0010692658834159374, 0.0006820702110417187, 0.0002648688096087426, 0.0006221556686796248, 0.0006986354128457606, 0.0017693731933832169, 0.000906103930901736, 0.0022986261174082756, 0.00015839101979508996, 0.0030149950180202723], [0.006651302333921194, 0.00356566091068089, 0.029643112793564796, 0.017341334372758865, 0.017182262614369392, 0.02040557935833931, 0.017664920538663864, 0.45953723788261414, 0.01465473510324955, 0.18652121722698212, 0.021661337465047836, 0.06368586421012878, 0.0018357934895902872, 0.008122658357024193, 0.002641830127686262, 0.007894358597695827, 0.0018847205210477114, 0.02322852425277233, 0.0019362125312909484, 0.08576645702123642, 0.0008786905673332512, 0.004048475064337254, 0.0007003481150604784, 0.002547350712120533], [0.0014561648713424802, 0.0008713615243323147, 0.0023046082351356745, 0.0008322681533172727, 0.010388635098934174, 0.00018739279767032713, 0.02079407498240471, 0.005153916776180267, 0.2580963969230652, 0.04076235741376877, 0.5727391242980957, 0.002347108442336321, 0.023041803389787674, 0.0002726152597460896, 0.033989571034908295, 0.0007344166515395045, 0.0111940773203969, 0.002034028759226203, 0.0037504020147025585, 0.004911040421575308, 0.0012070373632013798, 0.0026990522164851427, 0.00011594167881412432, 0.00011667040962493047], [0.00470432685688138, 0.0004792682302650064, 0.0051914299838244915, 0.0011292273411527276, 0.0048290882259607315, 0.0009575962903909385, 0.00631891842931509, 0.06678230315446854, 0.0034565231762826443, 0.20947447419166565, 0.01668722741305828, 0.5393936038017273, 0.015558137558400631, 0.017591752111911774, 0.01371049601584673, 0.003270061919465661, 0.008137037977576256, 0.02858162112534046, 0.007239439990371466, 0.04244302958250046, 0.000686347542796284, 0.002340365666896105, 0.000823355105239898, 0.00021432657376863062], [0.004433403257280588, 0.004885478876531124, 0.008160842582583427, 0.0031906762160360813, 0.00994165614247322, 0.0029735651332885027, 0.023084213957190514, 0.012462816201150417, 0.059534501284360886, 0.008717312477529049, 0.16581352055072784, 0.0072707426734268665, 0.25107210874557495, 0.010329273529350758, 0.2947591245174408, 0.004071222618222237, 0.05829644575715065, 0.004055400844663382, 0.024437852203845978, 0.003216243814677, 0.0198249202221632, 0.004261606838554144, 0.01311197318136692, 0.0020950722973793745], [0.004236764740198851, 0.0008264032658189535, 0.0017504135612398386, 0.0036667243111878633, 0.001513686147518456, 0.00395633839070797, 0.0023851697333157063, 0.05945531651377678, 0.0006676155608147383, 0.0032329687383025885, 0.0014522485435009003, 0.06997597217559814, 0.0029292753897607327, 0.27101877331733704, 0.0018988142255693674, 0.4388323128223419, 0.004322742111980915, 0.0965508446097374, 0.0015723485266789794, 0.015926161780953407, 0.0002604158944450319, 0.0010170135647058487, 0.009942814707756042, 0.002608785405755043], [0.012514036148786545, 0.006541287526488304, 0.021292656660079956, 0.00970767717808485, 0.0018719220533967018, 0.0017943094717338681, 0.018030749633908272, 0.07211057096719742, 0.01296956092119217, 0.07108136266469955, 0.01198886800557375, 0.025890953838825226, 0.061987996101379395, 0.0037267382722347975, 0.5856818556785583, 0.004876724444329739, 0.0110412472859025, 0.003989990334957838, 0.044229235500097275, 0.0013193346094340086, 0.0044715567491948605, 0.003408709540963173, 0.0016026162775233388, 0.007869962602853775], [0.002169216750189662, 0.001396584790199995, 0.0021934357937425375, 0.006629745941609144, 0.0023354862350970507, 0.008983091451227665, 0.006275989580899477, 0.008778166957199574, 0.003778161946684122, 0.00413304939866066, 0.006921872496604919, 0.01612788438796997, 0.005344551056623459, 0.017184613272547722, 0.001917011453770101, 0.5154634118080139, 0.004578659776598215, 0.3204120099544525, 0.003797625657171011, 0.033143166452646255, 0.000587755988817662, 0.015698080882430077, 0.0035218121483922005, 0.008628576062619686], [0.006448242347687483, 0.005055154673755169, 0.009047010913491249, 0.0016590767772868276, 0.0010288109770044684, 0.00017765708616934717, 0.0018602035706862807, 0.0017886862624436617, 0.0052144587971270084, 0.0023919863160699606, 0.0027091887313872576, 0.0009739061933942139, 0.007703406736254692, 0.0016087195836007595, 0.07504051178693771, 0.023617910221219063, 0.261697918176651, 0.0217637550085783, 0.46851226687431335, 0.006483266595751047, 0.059425242245197296, 0.013112138956785202, 0.007313187699764967, 0.015367298386991024], [0.002227051882073283, 0.002141711302101612, 0.002345064654946327, 0.0010928927222266793, 0.00042760922224260867, 0.0008984743035398424, 0.0010012887651100755, 0.004480778705328703, 0.0006250610458664596, 0.005192126147449017, 0.0007733172969892621, 0.0009287027060054243, 0.0002797123452182859, 0.0016745569882914424, 0.0002779986534733325, 0.01040485966950655, 0.0006967399967834353, 0.46799537539482117, 0.005682948045432568, 0.4728659689426422, 0.0019166098209097981, 0.013488083146512508, 0.0014889542944729328, 0.0010940809734165668], [0.004901896696537733, 0.0051522161811590195, 0.00925877969712019, 0.0033241629134863615, 0.004646445624530315, 0.0012139775790274143, 0.0007867084932513535, 0.0005256670992821455, 0.0003058931033592671, 0.0027224866207689047, 0.0011244597844779491, 0.001597885275259614, 0.0030683595687150955, 0.0010087640257552266, 0.017563384026288986, 0.0005729681579396129, 0.07078557461500168, 0.0052031767554581165, 0.5008592009544373, 0.005808450281620026, 0.30835360288619995, 0.010037598200142384, 0.03855695575475693, 0.002621286315843463], [0.0005833529867231846, 0.00030121137388050556, 0.002359499456360936, 0.001589720486663282, 0.0036789593286812305, 0.0014612622326239944, 0.0018594545545056462, 0.0030951949302107096, 0.0006982979830354452, 0.0009507957147434354, 0.0011473593767732382, 0.001232491573318839, 0.00025493119028396904, 0.00032719236332923174, 0.0006873178645037115, 0.0012008203193545341, 0.001175577868707478, 0.028555549681186676, 0.003586023347452283, 0.8136497735977173, 0.004873383790254593, 0.11703049391508102, 0.005002783611416817, 0.00469836313277483], [0.005025045946240425, 0.01862274296581745, 0.016100125387310982, 0.0024122935719788074, 0.0026296309661120176, 0.0034814151003956795, 0.006479276344180107, 0.0031890443060547113, 0.0004795632266905159, 0.007059089373797178, 0.0004505925753619522, 0.0035489306319504976, 0.005678058601915836, 0.0024892096407711506, 0.0058579109609127045, 0.000334842101437971, 0.002890333067625761, 0.002068981295451522, 0.24180495738983154, 0.006085576489567757, 0.5276426076889038, 0.03028440661728382, 0.09908973425626755, 0.006295736879110336], [0.0006239608628675342, 0.0010187061270698905, 0.008264495059847832, 0.004431003704667091, 0.004471987020224333, 0.002363055245950818, 0.004685568157583475, 0.002719455398619175, 0.0016832553083077073, 0.00015388532483484596, 0.0008936995291151106, 0.0002723880752455443, 0.0005251271068118513, 0.00027996551943942904, 0.0031628275755792856, 0.004563149530440569, 0.0006927828653715551, 0.004841150250285864, 0.00114941515494138, 0.09456675499677658, 0.005987474229186773, 0.5722424387931824, 0.01391004677861929, 0.2664973735809326], [0.017284950241446495, 0.013339528813958168, 0.028274795040488243, 0.006540087517350912, 0.029317794367671013, 0.006112768780440092, 0.03702850267291069, 0.040293559432029724, 0.009112573228776455, 0.012600786983966827, 0.006561080925166607, 0.015464117750525475, 0.014698371291160583, 0.010358540341258049, 0.03193448856472969, 0.007718951907008886, 0.014181969687342644, 0.01630707085132599, 0.03979339450597763, 0.03888218477368355, 0.09647706151008606, 0.025630556046962738, 0.4657244384288788, 0.016362471505999565]], [[0.02703859657049179, 0.01672639138996601, 0.05082635581493378, 0.017601214349269867, 0.033871881663799286, 0.02016550302505493, 0.049165140837430954, 0.09673435240983963, 0.0656290203332901, 0.053858377039432526, 0.03937919810414314, 0.017896253615617752, 0.0458114892244339, 0.057815805077552795, 0.07430478930473328, 0.03496570512652397, 0.01327573973685503, 0.06687159836292267, 0.0577755831182003, 0.05817895755171776, 0.02175319194793701, 0.030032463371753693, 0.033461734652519226, 0.016860537230968475], [0.017516113817691803, 0.021245039999485016, 0.1041758805513382, 0.03329765424132347, 0.05239866301417351, 0.009247860871255398, 0.07098852843046188, 0.08854254335165024, 0.07719919830560684, 0.1016676053404808, 0.07404850423336029, 0.0641883909702301, 0.035184770822525024, 0.03136444464325905, 0.07758332788944244, 0.03382422402501106, 0.005474430974572897, 0.013986297883093357, 0.010209738276898861, 0.01974002830684185, 0.009786482900381088, 0.024385971948504448, 0.014421183615922928, 0.009523089043796062], [0.03539532050490379, 0.06907296925783157, 0.018403418362140656, 0.0053923167288303375, 0.008711506612598896, 0.016704626381397247, 0.007305896375328302, 0.007252044510096312, 0.010524573735892773, 0.015258201397955418, 0.030144287273287773, 0.024655381217598915, 0.030192963778972626, 0.19991077482700348, 0.07143058627843857, 0.03356381505727768, 0.06700505316257477, 0.11029313504695892, 0.07457809150218964, 0.018223894760012627, 0.05600089952349663, 0.020172277465462685, 0.036077212542295456, 0.03373078629374504], [0.004353268072009087, 0.006782354786992073, 0.026531057432293892, 0.006372067611664534, 0.030505813658237457, 0.005598739255219698, 0.01823139190673828, 0.4106789827346802, 0.00936783105134964, 0.01762971840798855, 0.032269228249788284, 0.007994906045496464, 0.02775733917951584, 0.01255231536924839, 0.01578463241457939, 0.009852810762822628, 0.00033843747223727405, 0.010865806601941586, 0.008790896274149418, 0.3078921437263489, 0.004196890629827976, 0.012049296870827675, 0.00837713573127985, 0.005226988811045885], [0.0514773465692997, 0.02966010756790638, 0.03842241317033768, 0.06001311168074608, 0.012010370381176472, 0.04357780143618584, 0.06322558224201202, 0.08946872502565384, 0.061046019196510315, 0.2375672310590744, 0.041106536984443665, 0.03273535892367363, 0.014255058951675892, 0.020448651164770126, 0.01226652693003416, 0.017423540353775024, 0.0073634046129882336, 0.015524381771683693, 0.028817590326070786, 0.027428558096289635, 0.007317529525607824, 0.05927696451544762, 0.017460504546761513, 0.01210673339664936], [0.008915907703340054, 0.022419050335884094, 0.0302151869982481, 0.07600444555282593, 0.011720329523086548, 0.02712557278573513, 0.09626726061105728, 0.3482580780982971, 0.02552769146859646, 0.10733744502067566, 0.017000995576381683, 0.04212388023734093, 0.04415613040328026, 0.006546743214130402, 0.015941888093948364, 0.014048154465854168, 0.0011271745897829533, 0.005210287868976593, 0.005949507467448711, 0.01820964552462101, 0.0011310490081086755, 0.05882396548986435, 0.004454738460481167, 0.011484784074127674], [0.00537040876224637, 0.00852535106241703, 0.03700622543692589, 0.009508252143859863, 0.0026192760560661554, 0.00713829742744565, 0.14731259644031525, 0.29035162925720215, 0.1879209727048874, 0.10680414736270905, 0.03341070935130119, 0.040661394596099854, 0.029183445498347282, 0.0071402378380298615, 0.016808461397886276, 0.007298568729311228, 0.0008841899107210338, 0.016703380271792412, 0.010862801223993301, 0.011975622735917568, 0.0023163247387856245, 0.007587164640426636, 0.0034214507322758436, 0.00918920710682869], [0.006602777633816004, 0.013304116204380989, 0.013803629204630852, 0.006862284615635872, 0.0053022997453808784, 0.03732534125447273, 0.06003939360380173, 0.02565467730164528, 0.3706296384334564, 0.2453511655330658, 0.030717499554157257, 0.022028852254152298, 0.06679283827543259, 0.014533153735101223, 0.0158474650233984, 0.0027993526309728622, 0.003983175382018089, 0.022371243685483932, 0.019455188885331154, 0.0013138331705704331, 0.0017572061624377966, 0.007602367550134659, 0.0029875938780605793, 0.002934873104095459], [0.018094433471560478, 0.018540555611252785, 0.04337028041481972, 0.014240880496799946, 0.030066825449466705, 0.023383062332868576, 0.28671762347221375, 0.05579095333814621, 0.1023380383849144, 0.10652703791856766, 0.06739833205938339, 0.0684865266084671, 0.029793912544846535, 0.03604437783360481, 0.03847609460353851, 0.015412325039505959, 0.001738967141136527, 0.007170377764850855, 0.007230129558593035, 0.0025356898549944162, 0.006739737931638956, 0.009991941042244434, 0.00579115329310298, 0.004120738245546818], [0.005350831430405378, 0.005953433457762003, 0.024565650150179863, 0.010428723879158497, 0.00456323241814971, 0.010045217350125313, 0.05414076894521713, 0.375232458114624, 0.046899136155843735, 0.1546710729598999, 0.07546474039554596, 0.03896743804216385, 0.052482880651950836, 0.007180359214544296, 0.06132902204990387, 0.014797660522162914, 0.0007276780088432133, 0.01830960251390934, 0.004761947318911552, 0.007283939514309168, 0.0016080618370324373, 0.01916923001408577, 0.0032903924584388733, 0.0027765214908868074], [0.01186602097004652, 0.027599729597568512, 0.038925252854824066, 0.013756037689745426, 0.0019489424303174019, 0.020499616861343384, 0.022697489708662033, 0.043820302933454514, 0.02905644103884697, 0.076581671833992, 0.03313283249735832, 0.0414288304746151, 0.2349117398262024, 0.08294572681188583, 0.17007872462272644, 0.04288975149393082, 0.007202619686722755, 0.02981899492442608, 0.012988559901714325, 0.008623647503554821, 0.004331439267843962, 0.017019610852003098, 0.014033131301403046, 0.013842913322150707], [0.0031476698350161314, 0.008463547565042973, 0.03226882591843605, 0.0024302301462739706, 0.0048124357126653194, 0.0035598513204604387, 0.00861453264951706, 0.025173841044306755, 0.017369752749800682, 0.0504082553088665, 0.12061767280101776, 0.01641857996582985, 0.41074442863464355, 0.06047436222434044, 0.16538798809051514, 0.015542160719633102, 0.0068549225106835365, 0.013013189658522606, 0.006796826608479023, 0.006502860225737095, 0.0029024016112089157, 0.005376932676881552, 0.011248057708144188, 0.0018705782713368535], [0.0075231147930026054, 0.014733902178704739, 0.04657052457332611, 0.00375565979629755, 0.0027891071513295174, 0.006254573352634907, 0.0069873095490038395, 0.03500434011220932, 0.07689543813467026, 0.10916585475206375, 0.05559484288096428, 0.04115833714604378, 0.12424596399068832, 0.13588935136795044, 0.14503054320812225, 0.04322505742311478, 0.023008223623037338, 0.08239022642374039, 0.010217467322945595, 0.00971250794827938, 0.004669103771448135, 0.0030710718128830194, 0.004810159094631672, 0.007297332864254713], [0.02012629620730877, 0.021882543340325356, 0.0455753318965435, 0.01598350517451763, 0.01009273063391447, 0.0077710384503006935, 0.03051232360303402, 0.04597490653395653, 0.0837022140622139, 0.05992259457707405, 0.08733680844306946, 0.04344193637371063, 0.030608762055635452, 0.035264041274785995, 0.3231031000614166, 0.04250996187329292, 0.015027480199933052, 0.018982429057359695, 0.018473608419299126, 0.009106325916945934, 0.006225219462066889, 0.012435190379619598, 0.012063110247254372, 0.0038785552605986595], [0.009017778560519218, 0.01901455968618393, 0.018009690567851067, 0.002448579529300332, 0.0016946085961535573, 0.007906123995780945, 0.004314210265874863, 0.024886807426810265, 0.013212469406425953, 0.045721180737018585, 0.022013701498508453, 0.04261372238397598, 0.1395924836397171, 0.15735994279384613, 0.05945555865764618, 0.02979062683880329, 0.06315948069095612, 0.1741572469472885, 0.03754069656133652, 0.0509624183177948, 0.0227705929428339, 0.018789466470479965, 0.014300044625997543, 0.021267998963594437], [0.0006762910634279251, 0.0022935671731829643, 0.004746744409203529, 0.00034855384728871286, 0.0001634370710235089, 0.00032777205342426896, 0.00018614117288962007, 0.02500550076365471, 0.0014264563797041774, 0.002998140174895525, 0.00393709447234869, 0.004154981579631567, 0.06640208512544632, 0.02728031761944294, 0.03249038755893707, 0.00702145230025053, 0.02515111118555069, 0.048397600650787354, 0.010658406652510166, 0.7088426947593689, 0.01195836067199707, 0.0031403014436364174, 0.003950058948248625, 0.008442508056759834], [0.011691943742334843, 0.012372874654829502, 0.015798017382621765, 0.010507948696613312, 0.0027631197590380907, 0.013505452312529087, 0.005674378480762243, 0.05241209641098976, 0.026928238570690155, 0.08699612319469452, 0.01335303857922554, 0.025473617017269135, 0.047397345304489136, 0.08067610114812851, 0.028878524899482727, 0.038577400147914886, 0.029461558908224106, 0.13741885125637054, 0.028398334980010986, 0.24730044603347778, 0.02263832278549671, 0.03402819484472275, 0.010913820937275887, 0.016834355890750885], [0.014634974300861359, 0.015217545442283154, 0.020509647205471992, 0.01358384545892477, 0.008751807734370232, 0.006667179986834526, 0.0059771849773824215, 0.07820812612771988, 0.005551627371460199, 0.02760174870491028, 0.022500913590192795, 0.033580683171749115, 0.03881732374429703, 0.021049682050943375, 0.07278414070606232, 0.024329954758286476, 0.016488030552864075, 0.020093636587262154, 0.04563440382480621, 0.3207828998565674, 0.020029786974191666, 0.11550536751747131, 0.02391325682401657, 0.02778625674545765], [0.003196379402652383, 0.005580044351518154, 0.01750207506120205, 0.0020715147256851196, 0.0013164780102670193, 0.001554305898025632, 0.006498999893665314, 0.09043418616056442, 0.017225749790668488, 0.006753728725016117, 0.009675558656454086, 0.015771761536598206, 0.01678040437400341, 0.02180170826613903, 0.04024870693683624, 0.013399829156696796, 0.005955891218036413, 0.07774243503808975, 0.021125473082065582, 0.5018184185028076, 0.051616378128528595, 0.018575279042124748, 0.018737122416496277, 0.03461763635277748], [0.012874328531324863, 0.012916233390569687, 0.022793669253587723, 0.004761595278978348, 0.004534109961241484, 0.00900179985910654, 0.004119632299989462, 0.007315461989492178, 0.007802996318787336, 0.022124813869595528, 0.04136965796351433, 0.015566867776215076, 0.03320403769612312, 0.03634029999375343, 0.1517428159713745, 0.01850098744034767, 0.03870721906423569, 0.08354011923074722, 0.06831406056880951, 0.048262644559144974, 0.21997812390327454, 0.038227379322052, 0.07343526184558868, 0.02456582710146904], [0.011382071301341057, 0.015264932997524738, 0.025776250287890434, 0.003190363757312298, 0.01613348349928856, 0.0037343159783631563, 0.008655370213091373, 0.028381360694766045, 0.011401534080505371, 0.005176024977117777, 0.02114655077457428, 0.017427755519747734, 0.027880476787686348, 0.05000115558505058, 0.0566716194152832, 0.02232777699828148, 0.057379428297281265, 0.07154744118452072, 0.065787672996521, 0.16395263373851776, 0.11131139099597931, 0.04088450223207474, 0.09564747661352158, 0.06893841177225113], [0.008001764304935932, 0.005858518183231354, 0.012160349637269974, 0.006949397269636393, 0.003076865803450346, 0.006484643090516329, 0.008783242665231228, 0.1449359804391861, 0.01793661154806614, 0.030351504683494568, 0.009507489390671253, 0.009076807647943497, 0.021395057439804077, 0.0058720167726278305, 0.02348736859858036, 0.018646493554115295, 0.008921676315367222, 0.28192153573036194, 0.04687130078673363, 0.21643871068954468, 0.020311275497078896, 0.03437425196170807, 0.02159113623201847, 0.037045978009700775], [0.020004138350486755, 0.024079615250229836, 0.019402002915740013, 0.010498632676899433, 0.006930164527148008, 0.005408950615674257, 0.002797874854877591, 0.01770990714430809, 0.002546515315771103, 0.005534319207072258, 0.010351220145821571, 0.005988758988678455, 0.012040969915688038, 0.015627555549144745, 0.03742412477731705, 0.027166832238435745, 0.03945783153176308, 0.0563199408352375, 0.061259228736162186, 0.39007768034935, 0.04690517485141754, 0.03905278816819191, 0.066676564514637, 0.07673925906419754], [0.022694643586874008, 0.01691923476755619, 0.041600968688726425, 0.006740243639796972, 0.024939948692917824, 0.004617534577846527, 0.005217378027737141, 0.023239364847540855, 0.008341366425156593, 0.009366383776068687, 0.04258549585938454, 0.010610519908368587, 0.017757084220647812, 0.019083766266703606, 0.05815267190337181, 0.020042704418301582, 0.052197620272636414, 0.05266466736793518, 0.05341299623250961, 0.24806994199752808, 0.10319642722606659, 0.033054009079933167, 0.096622034907341, 0.028873000293970108]], [[0.039607733488082886, 0.03536931425333023, 0.07658465206623077, 0.04303257539868355, 0.058567892760038376, 0.03462882712483406, 0.04951738193631172, 0.016818655654788017, 0.05135660991072655, 0.05616849660873413, 0.03372275084257126, 0.06580345332622528, 0.05752340331673622, 0.05673551559448242, 0.035652048885822296, 0.03278655186295509, 0.03905467689037323, 0.02954220026731491, 0.04194045066833496, 0.015073884278535843, 0.029003093019127846, 0.04823656752705574, 0.017767341807484627, 0.035505905747413635], [0.029848678037524223, 0.06148405373096466, 0.06697716563940048, 0.054699547588825226, 0.05907110869884491, 0.041370753198862076, 0.036793746054172516, 0.02310461923480034, 0.08032361418008804, 0.033130861818790436, 0.03492508456110954, 0.03518173098564148, 0.023567862808704376, 0.0645672008395195, 0.022587278857827187, 0.03412715718150139, 0.03782971575856209, 0.030410058796405792, 0.03463001921772957, 0.024459071457386017, 0.06616667658090591, 0.05379891395568848, 0.02471039816737175, 0.026234736666083336], [0.02018905058503151, 0.026830976828932762, 0.37626177072525024, 0.11489327251911163, 0.18788255751132965, 0.08712229132652283, 0.009820585139095783, 0.003150043310597539, 0.006738572381436825, 0.014962323941290379, 0.0008461562683805823, 0.017651673406362534, 0.01367176789790392, 0.018705522641539574, 0.004700292367488146, 0.0163496695458889, 0.02322169952094555, 0.01677182875573635, 0.007151409052312374, 0.00359390489757061, 0.012782435864210129, 0.009523862972855568, 0.0013525169342756271, 0.005825763568282127], [0.0010623226407915354, 0.002982367994263768, 0.966486394405365, 0.0012075083795934916, 0.010280906222760677, 0.009393028914928436, 0.0017793525476008654, 0.0004008370160590857, 5.839059303980321e-05, 0.001113938633352518, 2.780419890768826e-06, 0.00030353065812960267, 9.647633123677224e-05, 0.0018738532671704888, 0.00011480778630357236, 4.05443825002294e-05, 0.00012553292617667466, 0.00026379601331427693, 0.00018858243129216135, 0.00041913942550309, 8.284837531391531e-05, 0.0014374471502378583, 5.957191660854733e-06, 0.00027976103592664003], [0.0006099499296396971, 0.0013372857356444001, 0.13256236910820007, 0.057539425790309906, 0.02116267755627632, 0.7782805562019348, 0.0002883325796574354, 0.0006779131945222616, 0.004082402214407921, 0.0005254417774267495, 7.809890666976571e-05, 0.0007095023756846786, 0.00023302533372770995, 0.0005809114663861692, 0.0002945291926153004, 9.267870336771011e-05, 0.00013932943693362176, 0.00021724410180468112, 3.2147145248018205e-05, 0.00011107314639957622, 6.107086664997041e-05, 0.00010614636266836897, 7.269441266544163e-05, 0.00020523369312286377], [0.01741407997906208, 0.01856810972094536, 0.42543157935142517, 0.026386642828583717, 0.08278072625398636, 0.1314731389284134, 0.013297018595039845, 0.005928136873990297, 0.050298161804676056, 0.010869216173887253, 0.014674903824925423, 0.05453452095389366, 0.004643081221729517, 0.019990423694252968, 0.01541033573448658, 0.002245474373921752, 0.003428044728934765, 0.005663315299898386, 0.008381315506994724, 0.014026056043803692, 0.006643933244049549, 0.015884269028902054, 0.01619582250714302, 0.03583161160349846], [0.002861554268747568, 0.005259277299046516, 0.007828882895410061, 0.10853175073862076, 0.00530166644603014, 0.7074840664863586, 0.0028992488514631987, 0.010716424323618412, 0.0990002453327179, 0.007293408270925283, 0.0066763246431946754, 0.0036874369252473116, 0.0030344368424266577, 0.004578243941068649, 0.0015349462628364563, 0.004521591123193502, 0.001965489936992526, 0.007020077668130398, 0.0006133473361842334, 0.0017502516275271773, 0.0006459844880737364, 0.0030853883363306522, 0.00204846472479403, 0.001661485992372036], [0.0002070654882118106, 0.0003281007520854473, 0.0019497681641951203, 0.16723382472991943, 0.002115407958626747, 0.8101412057876587, 7.00369564583525e-05, 0.0007360474555753171, 0.013027239590883255, 0.0005333389854058623, 0.00033019413240253925, 0.0003448444767855108, 0.0003054917906410992, 0.000375989853637293, 0.00011509145406307653, 0.0007161767571233213, 0.00034929075627587736, 0.0006094170385040343, 2.072815186693333e-05, 7.089720747899264e-05, 2.50704943027813e-05, 0.00016698837862350047, 9.624774975236505e-05, 0.00013152346946299076], [0.003098880872130394, 0.009779969230294228, 0.008141648955643177, 0.06061221659183502, 0.015591896139085293, 0.2340194433927536, 0.0075678699649870396, 0.39361611008644104, 0.02345862239599228, 0.040581658482551575, 0.037248168140649796, 0.008083767257630825, 0.06375490874052048, 0.006484936457127333, 0.014481666497886181, 0.025416741147637367, 0.0058930073864758015, 0.01257232390344143, 0.0018307658610865474, 0.007416080217808485, 0.0012084650807082653, 0.00493775587528944, 0.010901217348873615, 0.003301857504993677], [0.015105457976460457, 0.031138475984334946, 0.14610399305820465, 0.0026034079492092133, 0.006468450650572777, 0.03295037895441055, 0.014437837526202202, 0.12005197256803513, 0.12398842722177505, 0.08627337217330933, 0.1411156952381134, 0.026797372847795486, 0.021175026893615723, 0.021087775006890297, 0.06742298603057861, 0.0038954736664891243, 0.008607257157564163, 0.007434427738189697, 0.005682363640516996, 0.009664785116910934, 0.006677664816379547, 0.03471605107188225, 0.04685095697641373, 0.01975039578974247], [0.010593047365546227, 0.010739867575466633, 0.05702624469995499, 0.00041220997809432447, 0.0015023979358375072, 0.0009385565062984824, 0.015115432441234589, 0.0677577331662178, 0.005363296251744032, 0.1251462697982788, 0.12635326385498047, 0.02754429168999195, 0.08906897157430649, 0.03876635059714317, 0.32473793625831604, 0.01074633002281189, 0.021279966458678246, 0.0035989475436508656, 0.007331258617341518, 0.0067289299331605434, 0.013216378167271614, 0.00811395887285471, 0.019965853542089462, 0.007952533662319183], [0.012244106270372868, 0.024041246622800827, 0.01920875534415245, 0.022841138765215874, 0.0024904669262468815, 0.07559852302074432, 0.004565137438476086, 0.21629515290260315, 0.006808259058743715, 0.16023020446300507, 0.09416552633047104, 0.015865584835410118, 0.2039085328578949, 0.02542888931930065, 0.02798936888575554, 0.02047768421471119, 0.009708931669592857, 0.016746830195188522, 0.0020125126466155052, 0.006246791686862707, 0.004651014227420092, 0.010290581732988358, 0.015090183354914188, 0.003094507846981287], [0.04498300328850746, 0.03220139443874359, 0.0339878648519516, 0.0676887184381485, 0.008523927070200443, 0.10639648884534836, 0.01695019006729126, 0.06323417276144028, 0.05943436548113823, 0.05773409828543663, 0.08846337348222733, 0.04439851641654968, 0.07419778406620026, 0.0476478636264801, 0.04110806807875633, 0.03259601444005966, 0.02761712484061718, 0.018860360607504845, 0.013960395939648151, 0.022943750023841858, 0.02239665575325489, 0.03226887434720993, 0.02524918131530285, 0.01715785637497902], [0.018324561417102814, 0.022765839472413063, 0.028208497911691666, 0.01184710580855608, 0.005171327386051416, 0.012249778024852276, 0.008928864262998104, 0.015819482505321503, 0.020720256492495537, 0.03318203240633011, 0.04775823652744293, 0.04030653089284897, 0.14931116998195648, 0.04466591030359268, 0.35184869170188904, 0.030484285205602646, 0.038502324372529984, 0.02375178039073944, 0.007654052227735519, 0.0033564637415111065, 0.04014093801379204, 0.013516117818653584, 0.02071959525346756, 0.01076614297926426], [0.06776005029678345, 0.04105527698993683, 0.039375267922878265, 0.0009677361231297255, 0.0011746595846489072, 0.0035139480605721474, 0.03532091900706291, 0.006512404885143042, 0.00785661768168211, 0.07438148558139801, 0.05698239430785179, 0.03663153573870659, 0.032575853168964386, 0.15565526485443115, 0.0807977169752121, 0.018562814220786095, 0.0505068339407444, 0.014853446744382381, 0.04367045313119888, 0.018913935869932175, 0.06773567944765091, 0.09143196791410446, 0.0335952527821064, 0.020168565213680267], [0.06973010301589966, 0.06024301052093506, 0.058292340487241745, 0.0054946173913776875, 0.00192832772154361, 0.0160963237285614, 0.029658274725079536, 0.007843462750315666, 0.006826245691627264, 0.049523256719112396, 0.017875052988529205, 0.04068993404507637, 0.01781676709651947, 0.13152366876602173, 0.081678606569767, 0.02867073379456997, 0.04768923297524452, 0.04441245645284653, 0.05088568106293678, 0.02259085886180401, 0.03190666437149048, 0.11214913427829742, 0.025010429322719574, 0.04146481677889824], [0.009172676131129265, 0.027247941121459007, 0.46918460726737976, 0.04020821675658226, 0.026698917150497437, 0.13090136647224426, 0.005939210765063763, 0.011238335631787777, 0.014110115356743336, 0.02104114554822445, 0.008970295079052448, 0.028663916513323784, 0.054022595286369324, 0.03310992568731308, 0.06228947266936302, 0.008045827969908714, 0.013272524811327457, 0.0066447085700929165, 0.0018259919015690684, 0.0021883875597268343, 0.009813525713980198, 0.0031508258543908596, 0.0056021385826170444, 0.006657312158495188], [0.05254676565527916, 0.03222344070672989, 0.02569274790585041, 0.0010239563416689634, 0.0012810073094442487, 0.0015900750877335668, 0.025115706026554108, 0.0033664063084870577, 0.009415225125849247, 0.015242827124893665, 0.048512112349271774, 0.04258070886135101, 0.007352378219366074, 0.08672652393579483, 0.08963204175233841, 0.030049454420804977, 0.0472705103456974, 0.023896466940641403, 0.14881515502929688, 0.04961550608277321, 0.0729612484574318, 0.0587189644575119, 0.05244053155183792, 0.07393023371696472], [0.0328693687915802, 0.04319300130009651, 0.02942880429327488, 0.014764176681637764, 0.00871001835912466, 0.01150229200720787, 0.024310950189828873, 0.012833398766815662, 0.03191725164651871, 0.028269115835428238, 0.07486086338758469, 0.02897213213145733, 0.024070782586932182, 0.0560368075966835, 0.12298433482646942, 0.053426820784807205, 0.03646932914853096, 0.054177574813365936, 0.02857411839067936, 0.030106965452432632, 0.08038285374641418, 0.04757973551750183, 0.08739251643419266, 0.037166789174079895], [0.020870203152298927, 0.031708624213933945, 0.12680160999298096, 0.0360335074365139, 0.005348767153918743, 0.023204006254673004, 0.006500779185444117, 0.0077880253084003925, 0.010434857569634914, 0.02884586527943611, 0.03478240966796875, 0.033167265355587006, 0.018610218539834023, 0.08780866861343384, 0.06444652378559113, 0.11724511533975601, 0.02654922753572464, 0.07245441526174545, 0.026278197765350342, 0.02003738097846508, 0.09270317852497101, 0.03546193987131119, 0.04309296980500221, 0.029826253652572632], [0.03060328960418701, 0.024286441504955292, 0.0206963662058115, 0.0398944616317749, 0.027318306267261505, 0.01589318923652172, 0.027796978130936623, 0.013014115393161774, 0.017148053273558617, 0.027871835976839066, 0.04396307095885277, 0.03687147796154022, 0.023844484239816666, 0.030169086530804634, 0.04282607510685921, 0.05923499912023544, 0.06057173013687134, 0.07444695383310318, 0.08007123321294785, 0.0700341984629631, 0.05899174511432648, 0.047879498451948166, 0.07468339055776596, 0.05188904330134392], [0.03680902719497681, 0.03637406602501869, 0.10774548351764679, 0.0008553644875064492, 0.0032541437540203333, 0.0019331302028149366, 0.04664193093776703, 0.007491250056773424, 0.0024522177409380674, 0.031142545863986015, 0.026702800765633583, 0.016010504215955734, 0.012448897585272789, 0.05236091464757919, 0.07299438863992691, 0.010746268555521965, 0.010605890303850174, 0.06883375346660614, 0.08436472713947296, 0.06766091287136078, 0.06767648458480835, 0.10621567070484161, 0.080161914229393, 0.048517752438783646], [0.054824747145175934, 0.03058644011616707, 0.10513477027416229, 0.0011129033518955112, 0.003525319742038846, 0.001121917157433927, 0.05490529164671898, 0.01209670677781105, 0.007428795099258423, 0.051688361912965775, 0.045846495777368546, 0.030475476756691933, 0.015041593462228775, 0.05452875792980194, 0.06495744735002518, 0.015769144520163536, 0.023255592212080956, 0.013476820662617683, 0.06624451279640198, 0.032046057283878326, 0.14288361370563507, 0.08731251955032349, 0.043270401656627655, 0.042466286569833755], [0.07060243934392929, 0.04715189337730408, 0.10231591761112213, 0.011694613844156265, 0.014982023276388645, 0.024998677894473076, 0.03749072924256325, 0.054576046764850616, 0.012082289904356003, 0.07473523914813995, 0.02538296952843666, 0.022879047319293022, 0.02583305537700653, 0.041649505496025085, 0.03983130306005478, 0.018882116302847862, 0.016730574890971184, 0.02283741720020771, 0.03178240358829498, 0.05883293226361275, 0.041112322360277176, 0.12990258634090424, 0.03427725285291672, 0.03943667933344841]], [[0.0738314613699913, 0.040088068693876266, 0.06733904778957367, 0.048215702176094055, 0.15014971792697906, 0.016561053693294525, 0.04737505316734314, 0.03173613175749779, 0.0730186253786087, 0.011965631507337093, 0.06412685662508011, 0.04834179952740669, 0.037316180765628815, 0.03772832825779915, 0.02763017639517784, 0.01866842992603779, 0.0464596152305603, 0.004645919427275658, 0.011272726580500603, 0.020928509533405304, 0.035005535930395126, 0.013038435950875282, 0.030757423490285873, 0.04379955679178238], [0.06643112748861313, 0.05546043813228607, 0.03779228404164314, 0.046085771173238754, 0.05355154350399971, 0.012287070043385029, 0.0607416070997715, 0.02578343078494072, 0.03545811027288437, 0.011789598502218723, 0.04225975647568703, 0.09869398921728134, 0.05876004695892334, 0.07884576171636581, 0.031606707721948624, 0.02097085863351822, 0.05948413908481598, 0.03074776753783226, 0.031011031940579414, 0.01850762963294983, 0.03241017833352089, 0.008553748950362206, 0.027759192511439323, 0.05500825121998787], [0.09227404743432999, 0.06486936658620834, 0.08110400289297104, 0.1419483721256256, 0.09071498364210129, 0.018200233578681946, 0.08500368893146515, 0.014504133723676205, 0.06679294258356094, 0.0147174634039402, 0.05522897467017174, 0.040240198373794556, 0.017024753615260124, 0.05188451707363129, 0.041725922375917435, 0.009433547966182232, 0.026541482657194138, 0.006800093688070774, 0.007537134923040867, 0.006765525788068771, 0.016911165788769722, 0.006410330533981323, 0.02196394093334675, 0.021403079852461815], [0.03639883175492287, 0.02082228474318981, 0.06463950872421265, 0.03709087893366814, 0.025052495300769806, 0.03662008047103882, 0.0617300346493721, 0.062058113515377045, 0.014910684898495674, 0.02728644199669361, 0.017105232924222946, 0.027129707857966423, 0.016374893486499786, 0.03577738255262375, 0.02552351914346218, 0.041449591517448425, 0.013907255604863167, 0.2554090619087219, 0.016319304704666138, 0.06550465524196625, 0.014067554846405983, 0.034961502999067307, 0.009941039606928825, 0.03991985693573952], [0.0033038894180208445, 0.0018108240328729153, 0.0013138955691829324, 0.9756816029548645, 0.004695202223956585, 0.0015791907208040357, 0.0005553778610192239, 0.0006478069117292762, 0.0008246484794653952, 0.0009108746889978647, 0.00066944066202268, 0.0005507204332388937, 0.00024206453235819936, 0.0006909735384397209, 0.000279106548987329, 0.004143883008509874, 0.0001727238850435242, 0.0002173000102629885, 2.598998798930552e-05, 0.00017527145973872393, 0.00018191069830209017, 0.00040725633152760565, 0.00023031310411170125, 0.0006896138074807823], [0.0338159017264843, 0.030329974368214607, 0.01647198013961315, 0.6158331036567688, 0.18697205185890198, 0.0026433407329022884, 0.010348351672291756, 0.0037142354995012283, 0.0360553003847599, 0.0025434617418795824, 0.005452561192214489, 0.00892479345202446, 0.005146427545696497, 0.009009003639221191, 0.003722851164638996, 0.00365378987044096, 0.00427134009078145, 0.0007777179125696421, 0.0003675154293887317, 0.0006025088950991631, 0.004176270216703415, 0.0014585416065528989, 0.0008926691371016204, 0.01281627919524908], [0.061484575271606445, 0.03225281834602356, 0.0511750653386116, 0.03575573116540909, 0.11834963411092758, 0.09368386119604111, 0.02876114472746849, 0.05310206860303879, 0.11188770830631256, 0.024186182767152786, 0.058517683297395706, 0.04735235497355461, 0.04095655679702759, 0.02646247297525406, 0.016534525901079178, 0.028294546529650688, 0.019184015691280365, 0.0032255006954073906, 0.013679473660886288, 0.013574501499533653, 0.025391576811671257, 0.03037385083734989, 0.04298953339457512, 0.022824665531516075], [0.06524144113063812, 0.04722035676240921, 0.05144186690449715, 0.4597463309764862, 0.23596824705600739, 0.006534748710691929, 0.0152991758659482, 0.008439971134066582, 0.02691132016479969, 0.006888409145176411, 0.021322786808013916, 0.02016444504261017, 0.004678189288824797, 0.008553240448236465, 0.004161381628364325, 0.002550289500504732, 0.002224820898845792, 0.0007787555223330855, 0.00038476227200590074, 0.0004072840674780309, 0.0021035184618085623, 0.0017152894288301468, 0.0024768419098109007, 0.004786476492881775], [0.026564927771687508, 0.06705231964588165, 0.029266441240906715, 0.016304267570376396, 0.0840240865945816, 0.046030718833208084, 0.0826721265912056, 0.26703691482543945, 0.05480283871293068, 0.05368093401193619, 0.06058166176080704, 0.03210964798927307, 0.018305055797100067, 0.03139099106192589, 0.027011990547180176, 0.011121122166514397, 0.016580011695623398, 0.008383027277886868, 0.008347841911017895, 0.010430889204144478, 0.00580202741548419, 0.009456099942326546, 0.01974373683333397, 0.013300412334501743], [0.01800825260579586, 0.01744852028787136, 0.04902833700180054, 0.013211783021688461, 0.027471870183944702, 0.025751778855919838, 0.03571994975209236, 0.24407216906547546, 0.03509732335805893, 0.11188635230064392, 0.03298259526491165, 0.08901641517877579, 0.04438596963882446, 0.016849137842655182, 0.022982077673077583, 0.03293919935822487, 0.012780913151800632, 0.012959638610482216, 0.009416606277227402, 0.08467516303062439, 0.007804171647876501, 0.03730931878089905, 0.006107242777943611, 0.012095311656594276], [0.003455354832112789, 0.01213790848851204, 0.009663446806371212, 1.7007801943691447e-05, 0.00559291522949934, 0.04720272123813629, 0.06470798701047897, 0.02980571985244751, 0.02964044362306595, 0.08215989172458649, 0.0989178866147995, 0.023844780400395393, 0.01844952069222927, 0.036723531782627106, 0.04441186413168907, 0.005466345697641373, 0.022998275235295296, 0.1364843249320984, 0.17771579325199127, 0.06120907887816429, 0.040331825613975525, 0.0035437571350485086, 0.04127679392695427, 0.004242747090756893], [0.016658127307891846, 0.022344090044498444, 0.09140025079250336, 0.0024795413482934237, 0.0522235669195652, 0.026464760303497314, 0.05011648312211037, 0.05021898075938225, 0.08371690660715103, 0.07200726121664047, 0.09780683368444443, 0.06907744705677032, 0.02871386893093586, 0.026568567380309105, 0.11823788285255432, 0.01510667148977518, 0.021790580824017525, 0.032410163432359695, 0.026520296931266785, 0.04441074654459953, 0.024939026683568954, 0.007925229147076607, 0.012723048217594624, 0.006139679346233606], [0.020134177058935165, 0.01596922241151333, 0.08324001729488373, 0.0019640016835182905, 0.03795035555958748, 0.014715954661369324, 0.05143406242132187, 0.032137516885995865, 0.03708094730973244, 0.025350557640194893, 0.05658086761832237, 0.13894858956336975, 0.04756180942058563, 0.04063710942864418, 0.13278436660766602, 0.01994568109512329, 0.05926235392689705, 0.04183756187558174, 0.039161067456007004, 0.051050636917352676, 0.017556805163621902, 0.00920196995139122, 0.016816403716802597, 0.008677888661623001], [0.07012484222650528, 0.04732619225978851, 0.03998512029647827, 0.013243419118225574, 0.04201997071504593, 0.008242937736213207, 0.03299794718623161, 0.01818227954208851, 0.0215609110891819, 0.015695128589868546, 0.06918992102146149, 0.11127061396837234, 0.07049605995416641, 0.05100754275918007, 0.16616831719875336, 0.03216711804270744, 0.056151073426008224, 0.01359082106500864, 0.03269129991531372, 0.022754203528165817, 0.014950310811400414, 0.008902167901396751, 0.030364444479346275, 0.010917275212705135], [0.013837607577443123, 0.010949688032269478, 0.05482720956206322, 7.388208177872002e-05, 0.009427006356418133, 0.012187168002128601, 0.04709351435303688, 0.006007287185639143, 0.05256539583206177, 0.009347166866064072, 0.09248549491167068, 0.05733661353588104, 0.0468313992023468, 0.16423682868480682, 0.15653859078884125, 0.007466873154044151, 0.03403107449412346, 0.02730000764131546, 0.07681108266115189, 0.030538206920027733, 0.03021993674337864, 0.011059749871492386, 0.03484371304512024, 0.01398452091962099], [0.011519107036292553, 0.007222061511129141, 0.01608133316040039, 0.0021491306833922863, 0.0019375085830688477, 0.009957280941307545, 0.02462841384112835, 0.015494802966713905, 0.007600704208016396, 0.007763323839753866, 0.014571798965334892, 0.006494673900306225, 0.011641599237918854, 0.04074953496456146, 0.31658822298049927, 0.026113316416740417, 0.014470446854829788, 0.29010793566703796, 0.0324561633169651, 0.04804912209510803, 0.011465718038380146, 0.027557916939258575, 0.02586839348077774, 0.029511582106351852], [0.028397273272275925, 0.01232057437300682, 0.042855385690927505, 0.009032746776938438, 0.00993234384804964, 0.02363046258687973, 0.024104110896587372, 0.013953838497400284, 0.01412756834179163, 0.013436046428978443, 0.03499222546815872, 0.02412961609661579, 0.016256393864750862, 0.023674746975302696, 0.06310716271400452, 0.18612483143806458, 0.016533609479665756, 0.14881910383701324, 0.04485750570893288, 0.1337457001209259, 0.023577040061354637, 0.03397178649902344, 0.03270537033677101, 0.02571457251906395], [0.028447629883885384, 0.013680722564458847, 0.020569199696183205, 0.0004271202487871051, 0.0020371561404317617, 0.0045829215086996555, 0.030995694920420647, 0.014102267101407051, 0.013281886465847492, 0.005399501416832209, 0.018786687403917313, 0.014821702614426613, 0.017203984782099724, 0.033297087997198105, 0.07124493271112442, 0.015033012256026268, 0.04678124189376831, 0.1349441409111023, 0.22934700548648834, 0.13081258535385132, 0.048594359308481216, 0.03389114513993263, 0.045131415128707886, 0.026586614549160004], [0.032755352556705475, 0.018853874877095222, 0.026990516111254692, 0.004313352983444929, 0.012492701411247253, 0.022809937596321106, 0.02775229886174202, 0.046119630336761475, 0.024132607504725456, 0.03155822679400444, 0.05453499034047127, 0.017528580501675606, 0.017396148294210434, 0.009853334166109562, 0.03157588467001915, 0.022513246163725853, 0.03284094110131264, 0.1516200304031372, 0.13763722777366638, 0.11834356188774109, 0.04122070595622063, 0.04639531672000885, 0.056370824575424194, 0.014390695840120316], [0.07435733824968338, 0.029451271519064903, 0.0811595767736435, 0.01982004940509796, 0.02108561061322689, 0.014938141219317913, 0.029438000172376633, 0.012366357259452343, 0.02037815749645233, 0.018025370314717293, 0.05803104117512703, 0.020026840269565582, 0.012695586308836937, 0.023410512134432793, 0.06139848753809929, 0.019727015867829323, 0.03205786645412445, 0.07645393162965775, 0.07507984340190887, 0.038245294243097305, 0.07989727705717087, 0.05854320526123047, 0.09124120324850082, 0.03217202425003052], [0.01600085385143757, 0.019306905567646027, 0.033341895788908005, 0.002542163012549281, 0.009919191710650921, 0.03485408052802086, 0.05473216995596886, 0.044479671865701675, 0.01576976105570793, 0.034379687160253525, 0.029469406232237816, 0.023129448294639587, 0.020351415500044823, 0.034190982580184937, 0.062267325818538666, 0.03445405513048172, 0.03609774261713028, 0.09792649745941162, 0.08229156583547592, 0.18189536035060883, 0.02016255259513855, 0.03848979249596596, 0.04835430905222893, 0.025593237951397896], [0.004887537565082312, 0.007354453206062317, 0.027191922068595886, 0.005942732095718384, 0.002600920619443059, 0.022219395264983177, 0.018254274502396584, 0.020083127543330193, 0.010276333428919315, 0.07721488177776337, 0.009987376630306244, 0.014814235270023346, 0.016715778037905693, 0.020582472905516624, 0.03105158545076847, 0.0516933798789978, 0.011615843512117863, 0.10706155747175217, 0.059248629957437515, 0.2912929058074951, 0.09923514723777771, 0.043543823063373566, 0.025393513962626457, 0.021738147363066673], [0.003489825641736388, 0.0018922288436442614, 0.003945999313145876, 1.0187355655943975e-05, 0.00039113237289711833, 0.014388930052518845, 0.016521329060196877, 0.0037964137736707926, 0.005682417191565037, 0.0020882785320281982, 0.010104739107191563, 0.0014621746959164739, 0.002331616822630167, 0.009168927557766438, 0.02419396862387657, 0.012944705784320831, 0.010016496293246746, 0.1994781345129013, 0.3592076599597931, 0.11474297195672989, 0.06671269983053207, 0.03550034388899803, 0.0903443917632103, 0.011584416963160038], [0.028953615576028824, 0.01008299458771944, 0.0400543250143528, 0.0013348560314625502, 0.006403060629963875, 0.02424914762377739, 0.02237357199192047, 0.02379726804792881, 0.014794941060245037, 0.0077782743610441685, 0.024790504947304726, 0.013465555384755135, 0.008173905313014984, 0.013823236338794231, 0.07164204120635986, 0.025461560115218163, 0.0280673298984766, 0.0872398167848587, 0.056689951568841934, 0.21760597825050354, 0.05035353824496269, 0.039387401193380356, 0.1610221266746521, 0.02245498262345791]], [[0.05772469937801361, 0.01785699650645256, 0.03858008608222008, 0.049059607088565826, 0.035157471895217896, 0.037686411291360855, 0.02734125591814518, 0.03650331124663353, 0.03812403976917267, 0.037230439484119415, 0.020644502714276314, 0.03837139531970024, 0.053240757435560226, 0.020667677745223045, 0.04461449757218361, 0.03219857066869736, 0.0393412820994854, 0.0635838583111763, 0.06195122376084328, 0.03903406858444214, 0.06992912292480469, 0.04413424804806709, 0.03568970412015915, 0.0613347664475441], [0.044619474560022354, 0.011347807943820953, 0.011974857188761234, 0.034502822905778885, 0.010421490296721458, 0.01529239397495985, 0.029387040063738823, 0.01825781725347042, 0.019314836710691452, 0.013353826478123665, 0.01094763819128275, 0.02190352790057659, 0.030320806428790092, 0.03326335921883583, 0.02485935017466545, 0.06400679796934128, 0.026938682422041893, 0.07407370954751968, 0.13466934859752655, 0.07991917431354523, 0.14066796004772186, 0.05006439983844757, 0.036396000534296036, 0.06349684298038483], [0.02390729822218418, 0.002269284799695015, 0.011156812310218811, 0.014223545789718628, 0.003592365887016058, 0.008917135186493397, 0.012688535265624523, 0.009822065010666847, 0.006823393050581217, 0.005791848059743643, 0.012445596978068352, 0.00589120713993907, 0.0034955074079334736, 0.009664085693657398, 0.038211580365896225, 0.0903332531452179, 0.029665058478713036, 0.10764234513044357, 0.17516086995601654, 0.10203826427459717, 0.08329259604215622, 0.057820748537778854, 0.1224077045917511, 0.06273896992206573], [0.016538945958018303, 0.003881556447595358, 0.01607932150363922, 0.016804207116365433, 0.00910292100161314, 0.020436273887753487, 0.01994023099541664, 0.022194847464561462, 0.00946525763720274, 0.017033860087394714, 0.010552849620580673, 0.01528695784509182, 0.019651003181934357, 0.013859757222235203, 0.0284135565161705, 0.042590074241161346, 0.03584141284227371, 0.1286717802286148, 0.13444888591766357, 0.13436348736286163, 0.09601368755102158, 0.06577567756175995, 0.058021172881126404, 0.06503231823444366], [0.022392714396119118, 0.0027194905560463667, 0.00818886049091816, 0.015025215223431587, 0.0047485120594501495, 0.006518403999507427, 0.013685513287782669, 0.0048092082142829895, 0.006165609695017338, 0.0021061780862510204, 0.006782804615795612, 0.002597131999209523, 0.0041113547049462795, 0.013380688615143299, 0.03421904891729355, 0.05436829477548599, 0.03893100097775459, 0.08542334288358688, 0.23729898035526276, 0.0629395842552185, 0.2030811607837677, 0.026033254340291023, 0.09007168561220169, 0.05440202355384827], [0.010776778683066368, 0.012508252635598183, 0.014779571443796158, 0.030826449394226074, 0.007896224968135357, 0.021075382828712463, 0.01918371394276619, 0.0125499926507473, 0.018543623387813568, 0.01422369945794344, 0.017012162134051323, 0.02141190692782402, 0.01932842843234539, 0.026502810418605804, 0.04159136489033699, 0.0695599764585495, 0.028999408707022667, 0.15067967772483826, 0.1315421462059021, 0.061697885394096375, 0.09992831200361252, 0.0410260371863842, 0.04940430074930191, 0.07895182818174362], [0.014995662495493889, 0.00414509791880846, 0.01706686057150364, 0.00905236043035984, 0.005950352642685175, 0.022610977292060852, 0.03442833200097084, 0.014315711334347725, 0.015573552809655666, 0.026476705446839333, 0.01819666102528572, 0.011003490537405014, 0.013845388777554035, 0.021727625280618668, 0.05480727553367615, 0.046352047473192215, 0.05428303778171539, 0.09932392835617065, 0.17188087105751038, 0.030806906521320343, 0.0678255632519722, 0.048924922943115234, 0.07661626487970352, 0.11979037523269653], [0.023785896599292755, 0.008682480081915855, 0.015179719775915146, 0.01903798244893551, 0.006518739741295576, 0.02227470837533474, 0.023610295727849007, 0.010392668657004833, 0.021028488874435425, 0.020802827551960945, 0.014801464043557644, 0.017007607966661453, 0.02197929471731186, 0.014953440055251122, 0.04588630422949791, 0.05187257379293442, 0.04047323763370514, 0.13251300156116486, 0.16950780153274536, 0.03501368314027786, 0.10456093400716782, 0.04418788477778435, 0.059720780700445175, 0.0762082189321518], [0.019153451547026634, 0.007702284958213568, 0.013837018050253391, 0.02330627664923668, 0.0027276284527033567, 0.010796694085001945, 0.01615450717508793, 0.012477675452828407, 0.010684353299438953, 0.008067801594734192, 0.005805949680507183, 0.013879399746656418, 0.012859742157161236, 0.013039390556514263, 0.04148184135556221, 0.08407142013311386, 0.014301304705440998, 0.11397457867860794, 0.16507552564144135, 0.06522667407989502, 0.1253531128168106, 0.035789333283901215, 0.08095196634531021, 0.10328210145235062], [0.014762173406779766, 0.003234800649806857, 0.01116246823221445, 0.011306053027510643, 0.0025900588370859623, 0.008658348582684994, 0.022751187905669212, 0.010514292865991592, 0.006040335167199373, 0.006694147828966379, 0.008098273538053036, 0.005981341004371643, 0.00766708143055439, 0.0064109754748642445, 0.04349591210484505, 0.056907471269369125, 0.02635008469223976, 0.13011032342910767, 0.2580812871456146, 0.05923449620604515, 0.07395509630441666, 0.03476402163505554, 0.11706900596618652, 0.07416074723005295], [0.038664527237415314, 0.002855088096112013, 0.007602888625115156, 0.013149920850992203, 0.0051644123159348965, 0.010359317064285278, 0.009917406365275383, 0.006143857724964619, 0.007226176094263792, 0.004830851219594479, 0.012834346853196621, 0.003438100218772888, 0.004084022715687752, 0.016797786578536034, 0.02509629912674427, 0.03784355893731117, 0.0325351282954216, 0.10976247489452362, 0.16465072333812714, 0.07135981321334839, 0.14156733453273773, 0.04782147333025932, 0.17964741587638855, 0.0466470830142498], [0.045988794416189194, 0.0032398102339357138, 0.007552777882665396, 0.012383703142404556, 0.004137675277888775, 0.005343886092305183, 0.006042514927685261, 0.009658673778176308, 0.007218279875814915, 0.011877506040036678, 0.021083258092403412, 0.00819089263677597, 0.009933595545589924, 0.015192409977316856, 0.03222697600722313, 0.07472064346075058, 0.05495183914899826, 0.14903002977371216, 0.11766844987869263, 0.07081371545791626, 0.08759120106697083, 0.05887196958065033, 0.1205902248620987, 0.06569118797779083], [0.050550881773233414, 0.005067578982561827, 0.008814082480967045, 0.012439798563718796, 0.00409979373216629, 0.005959323141723871, 0.009160012938082218, 0.01118423417210579, 0.0066678994335234165, 0.017701607197523117, 0.012562427669763565, 0.016006583347916603, 0.01500658132135868, 0.01885126903653145, 0.03810692951083183, 0.07656131684780121, 0.043024927377700806, 0.1195773035287857, 0.13405603170394897, 0.06893879175186157, 0.07418782263994217, 0.0721719041466713, 0.07207941263914108, 0.10722348839044571], [0.03739388659596443, 0.006168350111693144, 0.00902664102613926, 0.02941468171775341, 0.004831169731914997, 0.008964849635958672, 0.015522005036473274, 0.012400410138070583, 0.01072180550545454, 0.0042765079997479916, 0.007341167889535427, 0.007804198656231165, 0.00967743992805481, 0.014778634533286095, 0.02758220210671425, 0.09782113879919052, 0.018755359575152397, 0.06141999736428261, 0.16930748522281647, 0.12186210602521896, 0.180310919880867, 0.02666369639337063, 0.05761617422103882, 0.06033918634057045], [0.03504415974020958, 0.004392706323415041, 0.017267432063817978, 0.010275471955537796, 0.004991549998521805, 0.0109008913859725, 0.01181645505130291, 0.011678471229970455, 0.0063712759874761105, 0.01352598238736391, 0.01685519516468048, 0.010283323936164379, 0.007221993058919907, 0.01562614180147648, 0.051049333065748215, 0.047129757702350616, 0.045180585235357285, 0.09444508701562881, 0.15885832905769348, 0.0652298852801323, 0.07232480496168137, 0.07471944391727448, 0.1318952441215515, 0.08291643857955933], [0.03754059597849846, 0.004217840265482664, 0.01706215739250183, 0.01860419288277626, 0.005930120125412941, 0.013770516961812973, 0.010878235101699829, 0.021930046379566193, 0.00925840251147747, 0.01906256005167961, 0.012948192656040192, 0.00874898862093687, 0.00998871959745884, 0.012022261507809162, 0.03216071426868439, 0.04008913412690163, 0.02922568842768669, 0.12464214861392975, 0.11129927635192871, 0.18431462347507477, 0.10033746808767319, 0.06036479398608208, 0.06607484817504883, 0.04952853173017502], [0.05702696740627289, 0.006487166974693537, 0.012289025820791721, 0.015842048451304436, 0.003215731354430318, 0.006625736132264137, 0.007100250106304884, 0.005779166240245104, 0.004819578491151333, 0.0034411607775837183, 0.007267378270626068, 0.004307721741497517, 0.006018306128680706, 0.016127170994877815, 0.028149373829364777, 0.06080656126141548, 0.02204790711402893, 0.11508171260356903, 0.12384132295846939, 0.11333955824375153, 0.18134842813014984, 0.0573606938123703, 0.07446993142366409, 0.0672072246670723], [0.0404120497405529, 0.009339975193142891, 0.012049315497279167, 0.027865149080753326, 0.003917608875781298, 0.014226442202925682, 0.012587418779730797, 0.014151349663734436, 0.007169964723289013, 0.006758755072951317, 0.007656296249479055, 0.0094848508015275, 0.009194505400955677, 0.011807886883616447, 0.03494597226381302, 0.08003036677837372, 0.015345696359872818, 0.09122582525014877, 0.11041796952486038, 0.15889590978622437, 0.1363348364830017, 0.04854349046945572, 0.06525306403636932, 0.0723852887749672], [0.020097142085433006, 0.004209454171359539, 0.01954452507197857, 0.012518924660980701, 0.011351373046636581, 0.01862790621817112, 0.019512180238962173, 0.01277462113648653, 0.009332885965704918, 0.027311963960528374, 0.019935112446546555, 0.0065279630944132805, 0.008634637109935284, 0.016370132565498352, 0.05433756113052368, 0.04009552299976349, 0.08610446751117706, 0.11183571070432663, 0.13185201585292816, 0.07594156265258789, 0.07864362001419067, 0.053602006286382675, 0.09824170172214508, 0.06259704381227493], [0.057769980281591415, 0.01857016794383526, 0.01343091856688261, 0.02793087437748909, 0.008226493373513222, 0.03346223384141922, 0.014422047883272171, 0.01160412561148405, 0.0156721044331789, 0.02069150283932686, 0.01040448248386383, 0.014124455861747265, 0.02050723135471344, 0.017496101558208466, 0.03334250673651695, 0.06733162701129913, 0.03458251804113388, 0.0997999981045723, 0.09795710444450378, 0.06313259899616241, 0.1349153220653534, 0.06793347001075745, 0.05354994907975197, 0.06314225494861603], [0.045873988419771194, 0.020186619833111763, 0.017957305535674095, 0.0305064357817173, 0.004600078333169222, 0.014933987520635128, 0.009838257916271687, 0.008402290754020214, 0.011115815490484238, 0.006846048403531313, 0.00959035661071539, 0.013532878831028938, 0.017255321145057678, 0.02032538875937462, 0.054674096405506134, 0.07635901123285294, 0.027534445747733116, 0.06526120007038116, 0.08549293130636215, 0.06896814703941345, 0.20293372869491577, 0.03486654534935951, 0.0721215158700943, 0.08082357048988342], [0.030789362266659737, 0.004078610334545374, 0.012831066735088825, 0.014072609134018421, 0.00439415592700243, 0.004938360303640366, 0.018029896542429924, 0.011033104732632637, 0.00582413375377655, 0.004951178096234798, 0.004926706198602915, 0.00504196947440505, 0.006381570361554623, 0.007852076552808285, 0.050527364015579224, 0.06260412186384201, 0.03915474936366081, 0.06330545246601105, 0.20344704389572144, 0.132169708609581, 0.13713745772838593, 0.03603456914424896, 0.08066225051879883, 0.05981256812810898], [0.04702379181981087, 0.004140866920351982, 0.011350955814123154, 0.02047084830701351, 0.006363881751894951, 0.0077681830152869225, 0.009240607731044292, 0.007115424610674381, 0.010711288079619408, 0.009714704938232899, 0.021665319800376892, 0.006692619528621435, 0.006157737225294113, 0.022682465612888336, 0.03938237577676773, 0.06081400811672211, 0.04304014518857002, 0.1003982201218605, 0.10315583646297455, 0.07591617852449417, 0.14074142277240753, 0.061404772102832794, 0.12904991209506989, 0.054998427629470825], [0.09805618971586227, 0.0074311248026788235, 0.011619512923061848, 0.018143590539693832, 0.008942404761910439, 0.005412144120782614, 0.009866023436188698, 0.016229460015892982, 0.011486880481243134, 0.02055761031806469, 0.030756963416934013, 0.01250616554170847, 0.008148528635501862, 0.0155067453160882, 0.032114990055561066, 0.07205846905708313, 0.05942051485180855, 0.08097056299448013, 0.1131284311413765, 0.09236040711402893, 0.0735621526837349, 0.05240772292017937, 0.09949145466089249, 0.04982197657227516]], [[0.025521917268633842, 0.026624739170074463, 0.02366539090871811, 0.038268428295850754, 0.04402834177017212, 0.027899187058210373, 0.0264778733253479, 0.03568527102470398, 0.04316236078739166, 0.06855333596467972, 0.034936148673295975, 0.042437732219696045, 0.047747354954481125, 0.05071854591369629, 0.0592600479722023, 0.038229357451200485, 0.022447794675827026, 0.039170730859041214, 0.026112360879778862, 0.02960561215877533, 0.03488791733980179, 0.11844193190336227, 0.03637957572937012, 0.059738095849752426], [0.057019926607608795, 0.06374318897724152, 0.025477377697825432, 0.04109261929988861, 0.038418643176555634, 0.08115497976541519, 0.03930036723613739, 0.030812138691544533, 0.0478813536465168, 0.03562138229608536, 0.0379241444170475, 0.0356232225894928, 0.03461729735136032, 0.08719199895858765, 0.03075091354548931, 0.022495534271001816, 0.023485267534852028, 0.04408823326230049, 0.027806181460618973, 0.030738018453121185, 0.025268318131566048, 0.04179584980010986, 0.03340427204966545, 0.06428880244493484], [0.010284407064318657, 0.009176220744848251, 0.029692599549889565, 0.006468544248491526, 0.03190822899341583, 0.006784751545637846, 0.0154738649725914, 0.013032901100814342, 0.03859572112560272, 0.06865068525075912, 0.11137672513723373, 0.02499721571803093, 0.022986281663179398, 0.012608022429049015, 0.08915853500366211, 0.038024287670850754, 0.024788595736026764, 0.027969177812337875, 0.030848627910017967, 0.033029038459062576, 0.06269552558660507, 0.15462565422058105, 0.10890939086675644, 0.027915053069591522], [0.024939436465501785, 0.025398967787623405, 0.054108746349811554, 0.02177431434392929, 0.056670308113098145, 0.038593556731939316, 0.029961617663502693, 0.03450027480721474, 0.06200749799609184, 0.06348700821399689, 0.038727086037397385, 0.028454281389713287, 0.04888088256120682, 0.028582051396369934, 0.06747936457395554, 0.038539350032806396, 0.05962493270635605, 0.03285093605518341, 0.018264351412653923, 0.03263511881232262, 0.024834590032696724, 0.12442667037248611, 0.024095473811030388, 0.021163182333111763], [0.013652696274220943, 0.012808253057301044, 0.05000005289912224, 0.03249334543943405, 0.06565413624048233, 0.023142103105783463, 0.0226789228618145, 0.019238140434026718, 0.02845761366188526, 0.08480911701917648, 0.07675085216760635, 0.008931751362979412, 0.011951673775911331, 0.01921275071799755, 0.0836964100599289, 0.0945180356502533, 0.024233436211943626, 0.027435442432761192, 0.0420563779771328, 0.027021925896406174, 0.03852074220776558, 0.049357421696186066, 0.1348811835050583, 0.008497600443661213], [0.0366462767124176, 0.0457763634622097, 0.03541788458824158, 0.028970841318368912, 0.05396945774555206, 0.057509250938892365, 0.04432770609855652, 0.0474834069609642, 0.05698836222290993, 0.05952220410108566, 0.03349241986870766, 0.024528922513127327, 0.030013831332325935, 0.045618437230587006, 0.03473229333758354, 0.025299055501818657, 0.018694566562771797, 0.05962038040161133, 0.023770079016685486, 0.02908284403383732, 0.03368542715907097, 0.10741642117500305, 0.040865458548069, 0.02656814642250538], [0.014390457421541214, 0.01633933186531067, 0.02801039069890976, 0.021694285795092583, 0.04435521364212036, 0.03353194519877434, 0.014273817650973797, 0.02818474918603897, 0.05363565683364868, 0.11775845289230347, 0.04467831552028656, 0.02407657727599144, 0.028311101719737053, 0.04336007684469223, 0.044993285089731216, 0.04123583808541298, 0.022110769525170326, 0.05599794536828995, 0.017240328714251518, 0.05069909989833832, 0.03922606632113457, 0.15607106685638428, 0.03844935819506645, 0.021375924348831177], [0.004106605891138315, 0.004237595945596695, 0.011229968629777431, 0.005085643846541643, 0.015901681035757065, 0.03098919987678528, 0.004404915496706963, 0.021161234006285667, 0.08581683784723282, 0.24595898389816284, 0.03896681219339371, 0.010155629366636276, 0.012723241001367569, 0.007378897629678249, 0.036305204033851624, 0.006653294898569584, 0.007053507026284933, 0.035990677773952484, 0.002987263258546591, 0.01072673313319683, 0.017632637172937393, 0.3601089417934418, 0.01826467178761959, 0.0061598531901836395], [0.008544649928808212, 0.0107567198574543, 0.018265917897224426, 0.016773493960499763, 0.06281191110610962, 0.02608022280037403, 0.018037645146250725, 0.023959435522556305, 0.046662963926792145, 0.0802343338727951, 0.06215309724211693, 0.02758972719311714, 0.031018156558275223, 0.0232625063508749, 0.06802640855312347, 0.037275590002536774, 0.03119083121418953, 0.08504176139831543, 0.019305454567074776, 0.014340843074023724, 0.032002195715904236, 0.17737345397472382, 0.061756253242492676, 0.017536405473947525], [0.01492026261985302, 0.012304721400141716, 0.02985474281013012, 0.013493803329765797, 0.019534535706043243, 0.034177232533693314, 0.01960313320159912, 0.039602458477020264, 0.03994147479534149, 0.08430854976177216, 0.07248099893331528, 0.050184350460767746, 0.04968933388590813, 0.014295142143964767, 0.05810560658574104, 0.03667515888810158, 0.016487130895256996, 0.056039538234472275, 0.019285162910819054, 0.04701174050569534, 0.023360276594758034, 0.16762636601924896, 0.03322438895702362, 0.0477939210832119], [0.016735786572098732, 0.012529697269201279, 0.0333675853908062, 0.01291579008102417, 0.16281823813915253, 0.012992325238883495, 0.025054842233657837, 0.011582308448851109, 0.07024794816970825, 0.06732882559299469, 0.036133114248514175, 0.021748000755906105, 0.01829848624765873, 0.015406081452965736, 0.035364747047424316, 0.015351683832705021, 0.027178993448615074, 0.041756436228752136, 0.03494453430175781, 0.023743970319628716, 0.06122703477740288, 0.17390097677707672, 0.04689827188849449, 0.022474275901913643], [0.014528430998325348, 0.009786466136574745, 0.029834583401679993, 0.015426138415932655, 0.04576258733868599, 0.03414810448884964, 0.020027223974466324, 0.03192778304219246, 0.07142575085163116, 0.11329378932714462, 0.06923861056566238, 0.018220998346805573, 0.01810886338353157, 0.023792844265699387, 0.060290589928627014, 0.045205116271972656, 0.025099484249949455, 0.050400227308273315, 0.015588534064590931, 0.02728256583213806, 0.034324876964092255, 0.1473117619752884, 0.059975557029247284, 0.018999144434928894], [0.013345961458981037, 0.00849216990172863, 0.026886485517024994, 0.01973998360335827, 0.030632635578513145, 0.014061370864510536, 0.01827671192586422, 0.044332824647426605, 0.04534594714641571, 0.10077585279941559, 0.08484520018100739, 0.014579767361283302, 0.017053848132491112, 0.015088227577507496, 0.07115635275840759, 0.06682193279266357, 0.02645746059715748, 0.03383168578147888, 0.019625555723905563, 0.045838434249162674, 0.027048101648688316, 0.1708941012620926, 0.06347909569740295, 0.02139028161764145], [0.056734222918748856, 0.05969052016735077, 0.022365057840943336, 0.04259224236011505, 0.047932229936122894, 0.07736105471849442, 0.026861391961574554, 0.04402421414852142, 0.06893378496170044, 0.04312509670853615, 0.03997968137264252, 0.028632251545786858, 0.024451380595564842, 0.07997040450572968, 0.021400654688477516, 0.033632006496191025, 0.024861019104719162, 0.033862799406051636, 0.018894221633672714, 0.032797835767269135, 0.029143700376152992, 0.05270792543888092, 0.035813938826322556, 0.05423242971301079], [0.024553624913096428, 0.016241298988461494, 0.03410661593079567, 0.03841717168688774, 0.03734353929758072, 0.01415776927024126, 0.02652984857559204, 0.08087242394685745, 0.046349115669727325, 0.07070410996675491, 0.044323213398456573, 0.043982405215501785, 0.02190502919256687, 0.018273789435625076, 0.025365496054291725, 0.09939440339803696, 0.03822718933224678, 0.04674863442778587, 0.030961239710450172, 0.053372666239738464, 0.04189383611083031, 0.06716398894786835, 0.028584716841578484, 0.05052784085273743], [0.019111355766654015, 0.010077062994241714, 0.0351221039891243, 0.013247963041067123, 0.029805224388837814, 0.04201542213559151, 0.018446223810315132, 0.04918467253446579, 0.06344663351774216, 0.14912723004817963, 0.05082438141107559, 0.02346489578485489, 0.027590151876211166, 0.020548582077026367, 0.046547435224056244, 0.034817397594451904, 0.03681853041052818, 0.06231764703989029, 0.011730419471859932, 0.03436477482318878, 0.016499819234013557, 0.1691371202468872, 0.01802685856819153, 0.017728030681610107], [0.021616501733660698, 0.015412166714668274, 0.06492681056261063, 0.03481828421354294, 0.09982695430517197, 0.02117069624364376, 0.01948116347193718, 0.0433063879609108, 0.03686848282814026, 0.06994765251874924, 0.05207207798957825, 0.00888814963400364, 0.010343175381422043, 0.022879261523485184, 0.05701269581913948, 0.08844849467277527, 0.02404625341296196, 0.038892198354005814, 0.03240601718425751, 0.05483049154281616, 0.0361182875931263, 0.0405513271689415, 0.09580235183238983, 0.010334111750125885], [0.0242540892213583, 0.024808689951896667, 0.050721801817417145, 0.02114507555961609, 0.030391553416848183, 0.040124837309122086, 0.02619965374469757, 0.10764186084270477, 0.053107064217329025, 0.05561678856611252, 0.046714115887880325, 0.03736988455057144, 0.024333376437425613, 0.03129100054502487, 0.045498382300138474, 0.05456582456827164, 0.033607497811317444, 0.03171406686306, 0.014941916801035404, 0.07133569568395615, 0.022195471450686455, 0.06313259899616241, 0.0349767692387104, 0.05431196093559265], [0.017324356362223625, 0.016634300351142883, 0.0334748700261116, 0.03361289203166962, 0.028673022985458374, 0.031143059954047203, 0.027679122984409332, 0.08327389508485794, 0.04538995400071144, 0.05789753049612045, 0.042737845331430435, 0.026823610067367554, 0.0237954780459404, 0.036752842366695404, 0.03391590341925621, 0.07001068443059921, 0.0311770997941494, 0.03768577054142952, 0.0348108634352684, 0.13661997020244598, 0.04426577687263489, 0.04681027680635452, 0.03351476415991783, 0.0259760320186615], [0.005617646500468254, 0.00473429448902607, 0.043317873030900955, 0.009687177836894989, 0.011133173480629921, 0.018548892810940742, 0.008256541565060616, 0.08465985953807831, 0.06225435435771942, 0.20744501054286957, 0.03905400633811951, 0.01708410680294037, 0.018212977796792984, 0.009606321342289448, 0.051740244030952454, 0.057347506284713745, 0.02189098484814167, 0.019868412986397743, 0.008567657321691513, 0.07315832376480103, 0.02315700426697731, 0.16615551710128784, 0.020700538530945778, 0.01780167780816555], [0.021129339933395386, 0.018348416313529015, 0.04199491813778877, 0.03592982888221741, 0.03259267657995224, 0.043794166296720505, 0.030952829867601395, 0.07697740942239761, 0.0492260716855526, 0.031795188784599304, 0.027551783248782158, 0.02954055927693844, 0.042402662336826324, 0.04191099852323532, 0.033940572291612625, 0.08696645498275757, 0.045810405164957047, 0.04923590272665024, 0.03628068417310715, 0.09634923189878464, 0.039792876690626144, 0.020754113793373108, 0.03330134227871895, 0.03342154622077942], [0.01643206924200058, 0.006819251924753189, 0.04664117470383644, 0.014973045326769352, 0.014418579638004303, 0.026690203696489334, 0.021931402385234833, 0.08688752353191376, 0.061050910502672195, 0.05833292752504349, 0.03264018893241882, 0.028140680864453316, 0.0302385576069355, 0.01157311536371708, 0.03239059820771217, 0.07932011783123016, 0.02668059431016445, 0.026028424501419067, 0.02034628391265869, 0.20006221532821655, 0.02507145144045353, 0.0619238056242466, 0.01889001578092575, 0.05251680687069893], [0.0343845970928669, 0.028212400153279305, 0.048272229731082916, 0.021288607269525528, 0.09699810296297073, 0.025627268478274345, 0.031166279688477516, 0.020171506330370903, 0.06281182914972305, 0.045749031007289886, 0.06163505092263222, 0.01126064732670784, 0.011571248061954975, 0.019457288086414337, 0.041808322072029114, 0.0414312444627285, 0.05194805562496185, 0.023189492523670197, 0.0687924474477768, 0.051534272730350494, 0.05991378426551819, 0.05429030954837799, 0.06797222048044205, 0.020513691008090973], [0.017953045666217804, 0.008264790289103985, 0.028422614559531212, 0.015501082874834538, 0.02434946969151497, 0.02992270328104496, 0.023245884105563164, 0.03049343265593052, 0.06123138591647148, 0.11189354956150055, 0.07802245020866394, 0.021621325984597206, 0.027940819039940834, 0.013253011740744114, 0.0391826406121254, 0.06949732452630997, 0.02744435891509056, 0.02715560607612133, 0.02360704354941845, 0.07991143316030502, 0.028628606349229813, 0.13473311066627502, 0.0542604960501194, 0.023463822901248932]], [[0.028765428811311722, 0.04051727056503296, 0.04004944860935211, 0.028539255261421204, 0.04798516258597374, 0.09194047003984451, 0.08895497769117355, 0.08142950385808945, 0.028943253681063652, 0.027862058952450752, 0.06928082555532455, 0.04245155304670334, 0.036774490028619766, 0.027048850432038307, 0.03427129238843918, 0.04613348841667175, 0.01646948978304863, 0.03273282200098038, 0.035343389958143234, 0.040598705410957336, 0.030911331996321678, 0.02239646576344967, 0.04772953316569328, 0.012870941311120987], [0.025248203426599503, 0.01595926098525524, 0.016193656250834465, 0.027774428948760033, 0.04543246701359749, 0.05599263682961464, 0.04030517116189003, 0.05406760424375534, 0.015711480751633644, 0.07312841713428497, 0.04014868661761284, 0.22228237986564636, 0.0621972382068634, 0.03302927687764168, 0.017374299466609955, 0.049081284552812576, 0.03348185867071152, 0.06095884367823601, 0.031087178736925125, 0.01927543617784977, 0.00795671809464693, 0.012381981126964092, 0.02002905122935772, 0.020902486518025398], [0.026128316298127174, 0.015577850863337517, 0.04488038644194603, 0.02454887516796589, 0.025393739342689514, 0.04997264966368675, 0.031141629442572594, 0.13757488131523132, 0.012274650856852531, 0.011958062648773193, 0.06068502366542816, 0.09397739917039871, 0.03127438947558403, 0.03613127022981644, 0.04159288853406906, 0.07180461287498474, 0.027057815343141556, 0.04808235540986061, 0.02890109457075596, 0.04283580183982849, 0.009141863323748112, 0.038744036108255386, 0.05461455136537552, 0.03570588305592537], [0.02726878598332405, 0.017115794122219086, 0.042975954711437225, 0.029206519946455956, 0.07345734536647797, 0.11054780334234238, 0.033468086272478104, 0.12878891825675964, 0.03679812327027321, 0.0852092057466507, 0.02177743799984455, 0.1584528684616089, 0.03566009923815727, 0.008692574687302113, 0.02025471068918705, 0.018533723428845406, 0.01771661266684532, 0.011599424295127392, 0.019019847735762596, 0.013730854727327824, 0.015941070392727852, 0.017131725326180458, 0.009366569109261036, 0.04728599265217781], [0.021703559905290604, 0.006662921980023384, 0.04215303435921669, 0.021534861996769905, 0.01373929064720869, 0.2931908071041107, 0.040165532380342484, 0.33404868841171265, 0.011544063687324524, 0.0480927899479866, 0.014667770825326443, 0.0441894493997097, 0.010703301057219505, 0.009910529479384422, 0.015897907316684723, 0.017441479489207268, 0.0019824353512376547, 0.0058241649530828, 0.0186375193297863, 0.0050114854238927364, 0.005466865841299295, 0.0025522157084196806, 0.009235559031367302, 0.0056437281891703606], [0.06012622267007828, 0.029941746965050697, 0.06321346759796143, 0.03485305234789848, 0.04918783903121948, 0.061713118106126785, 0.03507891669869423, 0.1016695573925972, 0.04633977636694908, 0.05986344441771507, 0.02875657007098198, 0.06920771300792694, 0.05558478459715843, 0.03331337869167328, 0.04988160729408264, 0.02637241780757904, 0.017880452796816826, 0.008453141897916794, 0.021882878616452217, 0.02229001559317112, 0.03340941295027733, 0.0273758377879858, 0.0219260361045599, 0.041678592562675476], [0.011998251080513, 0.006215905304998159, 0.010284966789186, 0.008079051971435547, 0.011723016388714314, 0.026259275153279305, 0.007308793254196644, 0.8350272178649902, 0.011014467105269432, 0.01258019357919693, 0.00791653897613287, 0.007589646615087986, 0.003988068550825119, 0.004648410715162754, 0.007463967427611351, 0.003683994757011533, 0.005555171985179186, 0.0016277108807116747, 0.0036848413292318583, 0.0015281803207471967, 0.004622144158929586, 0.0007087915437296033, 0.005225847940891981, 0.0012655751779675484], [0.01528799906373024, 0.012760485522449017, 0.019141102209687233, 0.030267128720879555, 0.023408550769090652, 0.026874341070652008, 0.011382633820176125, 0.02852472849190235, 0.015049746260046959, 0.5206554532051086, 0.13751688599586487, 0.01440581027418375, 0.007489616051316261, 0.0029296616557985544, 0.008448359556496143, 0.042778801172971725, 0.013516273349523544, 0.00337469344958663, 0.004514921456575394, 0.0016594474436715245, 0.007485539186745882, 0.0074224392883479595, 0.043234001845121384, 0.0018713462632149458], [0.02081231400370598, 0.010655495338141918, 0.01976187154650688, 0.008553651161491871, 0.005635491106659174, 0.21784427762031555, 0.014379038475453854, 0.3306500017642975, 0.004672781564295292, 0.2781198024749756, 0.01956290565431118, 0.03232812508940697, 0.0019079487537965178, 0.006032121833413839, 0.00646099541336298, 0.005887734238058329, 0.004922908265143633, 0.0014062859117984772, 0.0048834336921572685, 0.0005738554755225778, 0.0008285412332043052, 0.00010239038965664804, 0.003606664016842842, 0.00041135947685688734], [0.022633492946624756, 0.005149535369127989, 0.018242713063955307, 0.04299996420741081, 0.008748914115130901, 0.051007382571697235, 0.03367521986365318, 0.09488089382648468, 0.02624489553272724, 0.03066924214363098, 0.028008796274662018, 0.35623863339424133, 0.08222591876983643, 0.017203422263264656, 0.01797148957848549, 0.04609714075922966, 0.006505830679088831, 0.02361857332289219, 0.011351281777024269, 0.0416533388197422, 0.007537117227911949, 0.006031114608049393, 0.007264170330017805, 0.01404102984815836], [0.0045962026342749596, 0.0019389491062611341, 0.009677628986537457, 0.0015211534919217229, 0.0018587701488286257, 0.019054610282182693, 0.0026473053731024265, 0.14890973269939423, 0.0004305407637730241, 0.08703286945819855, 0.024147331714630127, 0.6561999320983887, 0.0024765573907643557, 0.014224588871002197, 0.003962626215070486, 0.012842187657952309, 0.0017578218830749393, 0.0019701020792126656, 0.0008652149699628353, 0.0009442387381568551, 9.202575165545568e-05, 0.0003320295363664627, 0.0019927890971302986, 0.0005246758810244501], [0.049528226256370544, 0.01777065172791481, 0.03223191574215889, 0.02348695509135723, 0.02138610929250717, 0.029040809720754623, 0.06318388134241104, 0.02114216983318329, 0.046288035809993744, 0.010021771304309368, 0.08177924156188965, 0.16342222690582275, 0.12375883758068085, 0.013606260530650616, 0.04716962203383446, 0.032774828374385834, 0.03167518228292465, 0.010852981358766556, 0.04002777114510536, 0.019480399787425995, 0.03433239459991455, 0.013368598185479641, 0.035569917410612106, 0.03810114413499832], [0.004849510733038187, 0.0025807449128478765, 0.00662267254665494, 0.00212936126627028, 0.0029529130551964045, 0.010673047974705696, 0.007010770961642265, 0.013140959665179253, 0.0004396717413328588, 0.018284784629940987, 0.0019820278976112604, 0.5575461983680725, 0.007182675413787365, 0.2924516201019287, 0.004909663926810026, 0.03663616254925728, 0.002668406581506133, 0.015438353642821312, 0.0037353853695094585, 0.0042985351756215096, 0.0001747371134115383, 0.0009404465090483427, 0.0008006578427739441, 0.002550732810050249], [0.04411806911230087, 0.0385998860001564, 0.01844855397939682, 0.023900067433714867, 0.040889229625463486, 0.047346390783786774, 0.08343293517827988, 0.021483659744262695, 0.037420421838760376, 0.034419335424900055, 0.034956566989421844, 0.05966819077730179, 0.04568404331803322, 0.03351147100329399, 0.026523450389504433, 0.05017015337944031, 0.05828752741217613, 0.053246285766363144, 0.08720672875642776, 0.013651572167873383, 0.02810661494731903, 0.04286857694387436, 0.023400483652949333, 0.05265980586409569], [0.002873230492696166, 0.002638811944052577, 0.0075695570558309555, 0.0021491723600775003, 0.001529341097921133, 0.008134901523590088, 0.0054143196903169155, 0.02198275923728943, 0.00035443154047243297, 0.0024744076654314995, 0.0035073065664619207, 0.08406862616539001, 0.0030940112192183733, 0.138546422123909, 0.007253999821841717, 0.5941351652145386, 0.0022648025769740343, 0.07093403488397598, 0.005600810516625643, 0.009536925703287125, 0.00024344128905795515, 0.009292750619351864, 0.0061739785596728325, 0.010226775892078876], [0.026413587853312492, 0.028490673750638962, 0.044125013053417206, 0.02270974963903427, 0.030031897127628326, 0.08060099929571152, 0.06586631387472153, 0.033779773861169815, 0.04489739239215851, 0.03340492397546768, 0.03494676575064659, 0.07871819287538528, 0.05125296488404274, 0.031142182648181915, 0.04927694424986839, 0.06527085602283478, 0.03802938014268875, 0.027386415749788284, 0.042597729712724686, 0.00969692226499319, 0.029127411544322968, 0.021903129294514656, 0.0339772067964077, 0.07635349780321121], [0.004266486968845129, 0.0029275703709572554, 0.011358128860592842, 0.01100288238376379, 0.004926283378154039, 0.0062408833764493465, 0.026506220921874046, 0.003198788268491626, 0.0008222296601161361, 0.008831331506371498, 0.007307791616767645, 0.014126420952379704, 0.0038273350801318884, 0.04794676601886749, 0.005179544910788536, 0.20022226870059967, 0.003065419150516391, 0.47324129939079285, 0.04636358842253685, 0.037555236369371414, 0.0015409457264468074, 0.06128900870680809, 0.010338041000068188, 0.007915529422461987], [0.05072883516550064, 0.03367036208510399, 0.057028863579034805, 0.024112142622470856, 0.031260211020708084, 0.020788537338376045, 0.030948419123888016, 0.018103713169693947, 0.063751220703125, 0.04376557469367981, 0.04505765810608864, 0.056323423981666565, 0.06323055922985077, 0.022051826119422913, 0.058803729712963104, 0.026981182396411896, 0.07337969541549683, 0.018770674243569374, 0.03917727619409561, 0.013048103079199791, 0.07498360425233841, 0.03486190736293793, 0.0398978665471077, 0.059274688363075256], [0.004803771153092384, 0.0020404697861522436, 0.00547065818682313, 0.006994579918682575, 0.005949170328676701, 0.001353679457679391, 0.006260568276047707, 0.0005709612742066383, 0.001511265174485743, 0.0007919033523648977, 0.00580189935863018, 0.004089703317731619, 0.005183090455830097, 0.0037895895075052977, 0.0045628356747329235, 0.026689641177654266, 0.004739296156913042, 0.20718318223953247, 0.03064313903450966, 0.42672404646873474, 0.008773915469646454, 0.21221283078193665, 0.009023179300129414, 0.014836495742201805], [0.02809581533074379, 0.022442884743213654, 0.02634679339826107, 0.03805916756391525, 0.025827398523688316, 0.033497072756290436, 0.03644775226712227, 0.011165055446326733, 0.02967541292309761, 0.04844776913523674, 0.08247184008359909, 0.03235059604048729, 0.0302907582372427, 0.00609277468174696, 0.027271665632724762, 0.10238172113895416, 0.02181076630949974, 0.019810572266578674, 0.042975425720214844, 0.021633367985486984, 0.06183435767889023, 0.11675386130809784, 0.09749586135149002, 0.03682125359773636], [0.010263410396873951, 0.004554999992251396, 0.012853216379880905, 0.005235398653894663, 0.003874377813190222, 0.00659565394744277, 0.024478457868099213, 0.0009628177504055202, 0.002687780885025859, 0.0013258290709927678, 0.007479973137378693, 0.005196539219468832, 0.004765888676047325, 0.004674715455621481, 0.007982964627444744, 0.018772156909108162, 0.00470859045162797, 0.08512937277555466, 0.09715133905410767, 0.13670481741428375, 0.01609685644507408, 0.47705593705177307, 0.013139713555574417, 0.048309169709682465], [0.024331681430339813, 0.01701674982905388, 0.025316821411252022, 0.01963430643081665, 0.005388517398387194, 0.014841115102171898, 0.01772376522421837, 0.037867624312639236, 0.007918908260762691, 0.011524482630193233, 0.004168423358350992, 0.20758336782455444, 0.051767878234386444, 0.12104713916778564, 0.044780977070331573, 0.08263345062732697, 0.012095375917851925, 0.07554251700639725, 0.027381569147109985, 0.05592596158385277, 0.01909179985523224, 0.021118393167853355, 0.01235763356089592, 0.08294162154197693], [0.013524515554308891, 0.01999000273644924, 0.10146911442279816, 0.004284179303795099, 0.008156723342835903, 0.01811741106212139, 0.029825257137417793, 0.05013274401426315, 0.010899249464273453, 0.019068840891122818, 0.020379196852445602, 0.015798745676875114, 0.01050097681581974, 0.027838261798024178, 0.059040289372205734, 0.012587863020598888, 0.004391103517264128, 0.011786725372076035, 0.02858663536608219, 0.017319677397608757, 0.02156345546245575, 0.12891526520252228, 0.043814633041620255, 0.32200905680656433], [0.021390171721577644, 0.036982450634241104, 0.043505214154720306, 0.015278241597115993, 0.026576213538646698, 0.007606164552271366, 0.05357956886291504, 0.01419835351407528, 0.024665992707014084, 0.002349943621084094, 0.0240265391767025, 0.011445529758930206, 0.03961286321282387, 0.022613614797592163, 0.06620893627405167, 0.028293007984757423, 0.045992206782102585, 0.030652208253741264, 0.08186108618974686, 0.03348594903945923, 0.16225138306617737, 0.021856551989912987, 0.12375690042972565, 0.0618109405040741]], [[0.020332133397459984, 0.03675532341003418, 0.06841706484556198, 0.023099534213542938, 0.017871303483843803, 0.03369784727692604, 0.02552301436662674, 0.022972989827394485, 0.060679636895656586, 0.03482970595359802, 0.050575703382492065, 0.04267881438136101, 0.07000209391117096, 0.03585165739059448, 0.09057188779115677, 0.038461290299892426, 0.014986326918005943, 0.027113769203424454, 0.026475634425878525, 0.057998839765787125, 0.04078793153166771, 0.03990600258111954, 0.05917920917272568, 0.06123228743672371], [0.050090137869119644, 0.07633300125598907, 0.07563960552215576, 0.049396876245737076, 0.040387898683547974, 0.06591536849737167, 0.025950275361537933, 0.04222841188311577, 0.039568524807691574, 0.03981032222509384, 0.04128989204764366, 0.04143502190709114, 0.04889748990535736, 0.0534248985350132, 0.04478615149855614, 0.022075045853853226, 0.029558762907981873, 0.0376620814204216, 0.04234999418258667, 0.035177554935216904, 0.021110666915774345, 0.020094122737646103, 0.02728511579334736, 0.02953271009027958], [0.009342573583126068, 0.015957359224557877, 0.0992676168680191, 0.03212207183241844, 0.01363056804984808, 0.014263165183365345, 0.017426514998078346, 0.028028016909956932, 0.029782569035887718, 0.008458118885755539, 0.05171196535229683, 0.010580355301499367, 0.0065277740359306335, 0.021625980734825134, 0.07471899688243866, 0.10540463775396347, 0.019571371376514435, 0.10461673140525818, 0.01767268404364586, 0.1127721294760704, 0.10410672426223755, 0.02138698473572731, 0.07035473734140396, 0.010670317336916924], [0.012170792557299137, 0.023852456361055374, 0.08652652055025101, 0.010731051675975323, 0.010327907279133797, 0.017449192702770233, 0.025366442278027534, 0.03977242112159729, 0.028678379952907562, 0.040260013192892075, 0.02115027979016304, 0.0487109012901783, 0.04589169844985008, 0.06844936311244965, 0.09670547395944595, 0.04745039343833923, 0.020432423800230026, 0.05371056869626045, 0.023756692185997963, 0.10174136608839035, 0.03927179053425789, 0.07072389125823975, 0.020777462050318718, 0.04609246179461479], [0.007183551788330078, 0.0127639165148139, 0.21788792312145233, 0.014402572065591812, 0.005694212391972542, 0.013719498179852962, 0.4012366831302643, 0.014859132468700409, 0.01461873110383749, 0.003263076301664114, 0.020413560792803764, 0.02739257737994194, 0.009238683618605137, 0.032621413469314575, 0.024176953360438347, 0.022867996245622635, 0.005678014829754829, 0.0272385161370039, 0.03597891330718994, 0.023160340264439583, 0.0220914538949728, 0.005823273677378893, 0.021717770025134087, 0.01597118005156517], [0.02063399739563465, 0.023316234350204468, 0.04661306366324425, 0.01833093725144863, 0.017012255266308784, 0.01947771944105625, 0.07079807668924332, 0.0664568841457367, 0.08953364938497543, 0.06509412825107574, 0.01066845003515482, 0.06211376190185547, 0.1030401736497879, 0.04965996369719505, 0.06207609921693802, 0.018640320748090744, 0.02191656082868576, 0.017460988834500313, 0.0271464791148901, 0.028417719528079033, 0.04857087507843971, 0.05428675562143326, 0.013451781123876572, 0.04528312757611275], [0.012207414023578167, 0.016707394272089005, 0.06725575029850006, 0.01613703928887844, 0.013530796393752098, 0.04218301177024841, 0.018012940883636475, 0.04131172224879265, 0.059737931936979294, 0.08474716544151306, 0.038714878261089325, 0.03114684298634529, 0.03280907869338989, 0.05370396003127098, 0.08850999921560287, 0.026313098147511482, 0.015292786993086338, 0.029477113857865334, 0.0397547222673893, 0.06931662559509277, 0.027779122814536095, 0.04402471333742142, 0.06576374918222427, 0.06556205451488495], [0.01164016779512167, 0.01510701421648264, 0.07608164101839066, 0.02272151969373226, 0.009090975858271122, 0.03899570554494858, 0.041062965989112854, 0.07700268179178238, 0.05410098284482956, 0.05228047072887421, 0.05405024439096451, 0.021106816828250885, 0.018692484125494957, 0.03606090694665909, 0.0770009458065033, 0.0653509572148323, 0.006918023806065321, 0.021295206621289253, 0.01970662549138069, 0.11128643900156021, 0.03466316685080528, 0.0376180075109005, 0.08023255318403244, 0.017933465540409088], [0.008747267536818981, 0.008928910829126835, 0.02520878240466118, 0.021338440477848053, 0.013801567256450653, 0.04813973233103752, 0.0469750314950943, 0.02480100654065609, 0.028376327827572823, 0.012598716653883457, 0.10271725058555603, 0.032943278551101685, 0.02719648741185665, 0.026210207492113113, 0.09673100709915161, 0.06425485759973526, 0.01799456961452961, 0.02383159101009369, 0.01858256384730339, 0.048685070127248764, 0.047114040702581406, 0.020315544679760933, 0.13775373995304108, 0.0967540591955185], [0.013321969658136368, 0.024025410413742065, 0.04002277925610542, 0.02769191563129425, 0.012242875061929226, 0.012402734719216824, 0.021371541544795036, 0.03517795354127884, 0.035146456211805344, 0.023632043972611427, 0.027866479009389877, 0.029339388012886047, 0.019104784354567528, 0.02963169664144516, 0.04432126134634018, 0.10999230295419693, 0.017637677490711212, 0.04969719424843788, 0.011797213926911354, 0.11432360112667084, 0.11655928939580917, 0.09856533259153366, 0.049247074872255325, 0.03688092902302742], [0.013622868806123734, 0.013428892940282822, 0.07482093572616577, 0.019416045397520065, 0.011638960801064968, 0.026660334318876266, 0.01794208213686943, 0.04626407474279404, 0.03571954742074013, 0.013971471227705479, 0.09955446422100067, 0.03175020590424538, 0.02979169599711895, 0.09870771318674088, 0.11109183728694916, 0.04879293218255043, 0.018908429890871048, 0.06188912317156792, 0.02050926350057125, 0.040445588529109955, 0.04723167046904564, 0.01935724727809429, 0.06617170572280884, 0.03231291472911835], [0.006453040521591902, 0.006332305260002613, 0.05567342787981033, 0.00653213681653142, 0.005654457025229931, 0.025495389476418495, 0.00633396627381444, 0.016657745465636253, 0.023155858740210533, 0.08770221471786499, 0.16684147715568542, 0.02587084472179413, 0.042590975761413574, 0.03837820887565613, 0.11839428544044495, 0.02370205521583557, 0.011244640685617924, 0.024305082857608795, 0.008550734259188175, 0.017497600987553596, 0.018449578434228897, 0.032320450991392136, 0.16784676909446716, 0.06401680409908295], [0.008627829141914845, 0.006804103963077068, 0.037087637931108475, 0.006722611375153065, 0.010703129693865776, 0.04698660597205162, 0.00560133857652545, 0.01882861740887165, 0.03944949433207512, 0.1516202986240387, 0.0944063737988472, 0.04527682811021805, 0.0403858907520771, 0.027533169835805893, 0.07196692377328873, 0.014770706184208393, 0.013867545872926712, 0.020204834640026093, 0.006911836098879576, 0.019740290939807892, 0.01747814752161503, 0.0351945199072361, 0.14014974236488342, 0.11968151479959488], [0.023226937279105186, 0.028427697718143463, 0.026291877031326294, 0.02993505261838436, 0.013696367852389812, 0.03435865789651871, 0.02556360885500908, 0.04137638583779335, 0.05121397599577904, 0.021732931956648827, 0.10601059347391129, 0.025069689378142357, 0.03648700937628746, 0.05359341949224472, 0.09522240608930588, 0.05933792144060135, 0.031519897282123566, 0.04295308515429497, 0.03991786763072014, 0.06764505803585052, 0.042832765728235245, 0.0256251972168684, 0.05155519023537636, 0.02640637755393982], [0.00922238826751709, 0.006380717270076275, 0.03543655574321747, 0.009160999208688736, 0.010459104552865028, 0.01654880680143833, 0.006550470367074013, 0.023331457749009132, 0.017842328175902367, 0.011402478441596031, 0.29796460270881653, 0.009182218462228775, 0.009440938010811806, 0.017916491255164146, 0.029757866635918617, 0.06668853014707565, 0.010991348884999752, 0.028885813429951668, 0.014040376991033554, 0.06380073726177216, 0.019599352031946182, 0.0150324497371912, 0.2576903700828552, 0.012673555873334408], [0.009831036441028118, 0.016222286969423294, 0.053124163299798965, 0.005800317041575909, 0.009087003767490387, 0.017773644998669624, 0.0068016438744962215, 0.027739068493247032, 0.04570027440786362, 0.042523227632045746, 0.056682754307985306, 0.013531140983104706, 0.03258270025253296, 0.05195075646042824, 0.14799225330352783, 0.020907824859023094, 0.018402772024273872, 0.030374538153409958, 0.025105806067585945, 0.07289542257785797, 0.08990202099084854, 0.05438739061355591, 0.1106310486793518, 0.040050942450761795], [0.009908963926136494, 0.009243253618478775, 0.072079136967659, 0.006245187018066645, 0.007744770962744951, 0.01734505407512188, 0.09840168803930283, 0.02571781910955906, 0.03878409415483475, 0.008316133171319962, 0.04280681535601616, 0.01582563854753971, 0.013239424675703049, 0.03410279378294945, 0.09889306128025055, 0.049509599804878235, 0.017681488767266273, 0.05726536735892296, 0.08755816519260406, 0.08259723335504532, 0.07377263903617859, 0.028378618881106377, 0.06587263196706772, 0.03871039301156998], [0.014194686897099018, 0.025622224435210228, 0.05137190595269203, 0.004139121621847153, 0.009437286294996738, 0.020730996504426003, 0.008771904744207859, 0.025486420840024948, 0.051071129739284515, 0.050347886979579926, 0.07646362483501434, 0.02070770226418972, 0.04137995466589928, 0.042466845363378525, 0.06917704641819, 0.020350176841020584, 0.015356103889644146, 0.024000070989131927, 0.029952887445688248, 0.06956746429204941, 0.06380818039178848, 0.0861266478896141, 0.11270420253276825, 0.06676559150218964], [0.013637371361255646, 0.017134664580225945, 0.05996683984994888, 0.006901200395077467, 0.01332040410488844, 0.028013555333018303, 0.027153540402650833, 0.03183848783373833, 0.05816122889518738, 0.05911718308925629, 0.043295565992593765, 0.025032110512256622, 0.03104369156062603, 0.04133940115571022, 0.06053508445620537, 0.016284463927149773, 0.02020280808210373, 0.034847453236579895, 0.0870504379272461, 0.10367287695407867, 0.022639937698841095, 0.060981385409832, 0.07297404110431671, 0.06485629081726074], [0.00867766235023737, 0.017821110785007477, 0.027749495580792427, 0.005085039418190718, 0.009952329099178314, 0.021819185465574265, 0.016949355602264404, 0.05044430121779442, 0.06206309795379639, 0.06848271936178207, 0.0189650971442461, 0.010226542130112648, 0.026265574619174004, 0.03043166920542717, 0.11692019551992416, 0.03232913464307785, 0.02166965790092945, 0.030599389225244522, 0.042146362364292145, 0.109872005879879, 0.05729923024773598, 0.08830294013023376, 0.0629086121916771, 0.06301926076412201], [0.014835931360721588, 0.0166308656334877, 0.013316511176526546, 0.007671067491173744, 0.016054637730121613, 0.0390324629843235, 0.026483744382858276, 0.023347733542323112, 0.07802190631628036, 0.017333664000034332, 0.05689888074994087, 0.013967993669211864, 0.03509032353758812, 0.017173979431390762, 0.07121749222278595, 0.03866969794034958, 0.03479793295264244, 0.04350026696920395, 0.06183303892612457, 0.08839482069015503, 0.046313200145959854, 0.06016905978322029, 0.09467536956071854, 0.08456944674253464], [0.016803612932562828, 0.021738039329648018, 0.02067248336970806, 0.007906620390713215, 0.018153410404920578, 0.019439632073044777, 0.012803932651877403, 0.020872555673122406, 0.0703393742442131, 0.06017669662833214, 0.04093114659190178, 0.018521690741181374, 0.022148512303829193, 0.01656808890402317, 0.028385447338223457, 0.021997051313519478, 0.02916734851896763, 0.03787603601813316, 0.03105262853205204, 0.10969585180282593, 0.08810044080018997, 0.0830894410610199, 0.11695510894060135, 0.08660484850406647], [0.018667815253138542, 0.022367063909769058, 0.05679779127240181, 0.009530487470328808, 0.022681482136249542, 0.02820640243589878, 0.027642391622066498, 0.03576705977320671, 0.046224795281887054, 0.018956050276756287, 0.03252825140953064, 0.036293815821409225, 0.06389173865318298, 0.0678667277097702, 0.0840504914522171, 0.02151571400463581, 0.0538482666015625, 0.047921162098646164, 0.06516722589731216, 0.03768618404865265, 0.06547180563211441, 0.028720486909151077, 0.027745729312300682, 0.0804511234164238], [0.011613546870648861, 0.013281309977173805, 0.03194555267691612, 0.006538077257573605, 0.009657280519604683, 0.018373355269432068, 0.007001005113124847, 0.021570419892668724, 0.0843641459941864, 0.11413142830133438, 0.04211501404643059, 0.024001486599445343, 0.05040564388036728, 0.02314945124089718, 0.09064650535583496, 0.010324847884476185, 0.019771423190832138, 0.02317666821181774, 0.018889687955379486, 0.04388263076543808, 0.0666278675198555, 0.08231355994939804, 0.08685935288667679, 0.09935972094535828]]], [[[0.04673907533288002, 0.06729947775602341, 0.01923380419611931, 0.05372636765241623, 0.11894576996564865, 0.045413557440042496, 0.1255384087562561, 0.10800886899232864, 0.039190638810396194, 0.014797481708228588, 0.0286489836871624, 0.017825616523623466, 0.021079039201140404, 0.03780185058712959, 0.015190423466265202, 0.007283841259777546, 0.02623186632990837, 0.009488116949796677, 0.030133401975035667, 0.012022772803902626, 0.036199577152729034, 0.015482550486922264, 0.06911905109882355, 0.03459953889250755], [0.03399592265486717, 0.04776058718562126, 0.01693769358098507, 0.05645010247826576, 0.15289145708084106, 0.09401208907365799, 0.028778666630387306, 0.022624768316745758, 0.029212113469839096, 0.06850624829530716, 0.02954038232564926, 0.026884065940976143, 0.019749434664845467, 0.024583283811807632, 0.015372347086668015, 0.049114715307950974, 0.11878102272748947, 0.03636976704001427, 0.022163039073348045, 0.006231867242604494, 0.022502996027469635, 0.012048622593283653, 0.023053806275129318, 0.04243501275777817], [0.04462376609444618, 0.039318621158599854, 0.07008501887321472, 0.12472739815711975, 0.05995956063270569, 0.05519333854317665, 0.03673812374472618, 0.039379652589559555, 0.07522348314523697, 0.04016001150012016, 0.09520953893661499, 0.025728927925229073, 0.0366424098610878, 0.01231159083545208, 0.061165619641542435, 0.041192080825567245, 0.019226111471652985, 0.015622667968273163, 0.022876102477312088, 0.01144260261207819, 0.017158381640911102, 0.01174930203706026, 0.029919704422354698, 0.014346071518957615], [0.05618274584412575, 0.024519063532352448, 0.0519283264875412, 0.032654404640197754, 0.05412948131561279, 0.0717015415430069, 0.08036664873361588, 0.0705852061510086, 0.06270748376846313, 0.005858021788299084, 0.015189753845334053, 0.008205980062484741, 0.022892985492944717, 0.017113590613007545, 0.05084816738963127, 0.07411422580480576, 0.016550203785300255, 0.04893684387207031, 0.03225075080990791, 0.017242617905139923, 0.03455497324466705, 0.021299146115779877, 0.05214754492044449, 0.07802028954029083], [0.026931460946798325, 0.01682864874601364, 0.05328533425927162, 0.06255347281694412, 0.030004853382706642, 0.2330365926027298, 0.08064053952693939, 0.051811881363391876, 0.12627215683460236, 0.12378884106874466, 0.03991526737809181, 0.015489851124584675, 0.018824411556124687, 0.007230482995510101, 0.033665917813777924, 0.016891485080122948, 0.004065495450049639, 0.011000474914908409, 0.019813720136880875, 0.005666963756084442, 0.004661251790821552, 0.005831694696098566, 0.0059001450426876545, 0.005889083258807659], [0.0016549426363781095, 0.002476759720593691, 0.002193358726799488, 0.0067526549100875854, 0.010555225424468517, 0.01730796881020069, 0.013062379322946072, 0.8968229293823242, 0.01826358772814274, 0.0072055901400744915, 0.0031853297259658575, 0.0069343410432338715, 0.0015747162979096174, 0.005620671436190605, 0.0023568226024508476, 0.0013218584936112165, 0.00031448135268874466, 0.00011872239701915532, 0.00010075502359541133, 0.00042507852776907384, 8.141637226799503e-05, 0.00020467877038754523, 0.0007913335575722158, 0.0006744895945303142], [0.008101106621325016, 0.014954525046050549, 0.026560023427009583, 0.02388627454638481, 0.014528175815939903, 0.13726480305194855, 0.0276053287088871, 0.11281032860279083, 0.2071295976638794, 0.3660505414009094, 0.017805548384785652, 0.010424057953059673, 0.007442566100507975, 0.004080342128872871, 0.010389049537479877, 0.002744204830378294, 0.0021703180391341448, 0.0017961066914722323, 0.0011600992875173688, 0.0005832227761857212, 0.000256392580922693, 0.0003812731883954257, 0.0007608016021549702, 0.0011153023224323988], [0.0008474793867208064, 0.0013348518405109644, 0.013977937400341034, 0.0017129466868937016, 0.0009942672913894057, 0.04726096987724304, 0.008581224828958511, 0.011576784774661064, 0.024166520684957504, 0.8740216493606567, 0.008566539734601974, 0.0024183078203350306, 0.0012398998951539397, 0.0001734936813591048, 0.0018506125779822469, 0.0003390488272998482, 7.446663948940113e-05, 0.0004179369716439396, 0.000171386418514885, 8.544916636310518e-05, 1.9123175661661662e-05, 1.724152207316365e-05, 2.8308510081842542e-05, 0.00012359698303043842], [0.024764396250247955, 0.009337575174868107, 0.014713303185999393, 0.028568988665938377, 0.015497521497309208, 0.22815272212028503, 0.11158885061740875, 0.053744010627269745, 0.09170109778642654, 0.14041152596473694, 0.2104177474975586, 0.011934799142181873, 0.026363616809248924, 0.002896079560741782, 0.010143626481294632, 0.0011253156699240208, 0.0024892615620046854, 0.0014513572677969933, 0.009388704784214497, 0.0007142634713090956, 0.0014076001243665814, 0.00033878866815939546, 0.0018028839258477092, 0.0010458639590069652], [0.001104910857975483, 0.0007505848188884556, 0.01684037409722805, 0.0036582136526703835, 0.003980859648436308, 0.012995674274861813, 0.007503615692257881, 0.012458820827305317, 0.011359826661646366, 0.014371516183018684, 0.02797398902475834, 0.863287091255188, 0.010688716545701027, 0.0025299994740635157, 0.005160559434443712, 0.0010393926640972495, 0.00014878937508910894, 0.00027449859771877527, 0.0004884011577814817, 0.0029376428574323654, 0.00018586385704111308, 0.000137324386741966, 8.075817459030077e-05, 4.270056524546817e-05], [0.003388076089322567, 0.0035107058938592672, 0.023033643141388893, 0.0016681203851476312, 0.010618109256029129, 0.11364465206861496, 0.034187231212854385, 0.05641891062259674, 0.08036863803863525, 0.22209250926971436, 0.038196928799152374, 0.059557490050792694, 0.21981456875801086, 0.04371517151594162, 0.06945909559726715, 0.0019293990917503834, 0.007228340022265911, 0.0021771772298961878, 0.003972719889134169, 0.0029431581497192383, 0.0012429279740899801, 0.00022870888642501086, 0.0002765447716228664, 0.0003271917812526226], [0.009100047871470451, 0.004869026131927967, 0.02600514143705368, 0.004665972199290991, 0.007558744866400957, 0.007576073054224253, 0.00584274809807539, 0.00186169205699116, 0.009815561585128307, 0.006318329833447933, 0.02656596153974533, 0.04127451404929161, 0.033253420144319534, 0.6530637741088867, 0.10224307328462601, 0.015790991485118866, 0.01051523070782423, 0.004328027367591858, 0.0028869081288576126, 0.002167114522308111, 0.009342803619801998, 0.009035307914018631, 0.0033307932317256927, 0.002588696079328656], [0.011584167368710041, 0.006078717764467001, 0.021693186834454536, 0.014575645327568054, 0.0077241333201527596, 0.005589890293776989, 0.01127054076641798, 0.0026654282119125128, 0.008722683414816856, 0.0018870477797463536, 0.048725713044404984, 0.09420333057641983, 0.1911611109972, 0.1139817014336586, 0.38279011845588684, 0.016663504764437675, 0.017548007890582085, 0.000938229844905436, 0.005558133590966463, 0.0007742441375739872, 0.013211140409111977, 0.005708654411137104, 0.01163003034889698, 0.0053145745769143105], [0.0012153394054621458, 0.001359176472760737, 0.0007542706443928182, 0.002150654559955001, 0.0005657793954014778, 0.0011798992054536939, 0.0005548761691898108, 0.0019544477108865976, 0.0011903695994988084, 0.0014445931883528829, 0.0004446991952136159, 0.0029359720647335052, 0.0019513292936608195, 0.003010594053193927, 0.014901289716362953, 0.9431464672088623, 0.008194678463041782, 0.004358640871942043, 0.001755829551257193, 0.00027566339122131467, 0.00012257677735760808, 0.0012355047510936856, 0.0006585849332623184, 0.004638821817934513], [0.003343217307701707, 0.00478028878569603, 0.00404778216034174, 0.0022769742645323277, 0.0024967051576822996, 0.004289229866117239, 0.0024438060354441404, 0.0022266169544309378, 0.009650155901908875, 0.0073572127148509026, 0.0064128004014492035, 0.0030779296066612005, 0.04423045367002487, 0.07172122597694397, 0.16000990569591522, 0.2318580001592636, 0.35597580671310425, 0.04586192965507507, 0.025912905111908913, 0.0016524741658940911, 0.002033652039244771, 0.002309455769136548, 0.0022315029054880142, 0.003800018224865198], [0.00734944362193346, 0.001493290881626308, 0.01839984767138958, 0.0006816611276008189, 0.0006276469794102013, 0.001779831130988896, 0.0008916958468034863, 0.0008582869195379317, 0.00218074768781662, 0.001476787612773478, 0.0013172447215765715, 0.0005547496839426458, 0.0007462062640115619, 0.001112902769818902, 0.00893314741551876, 0.024412726983428, 0.00450280774384737, 0.8275958299636841, 0.030807146802544594, 0.023026149719953537, 0.016480350866913795, 0.01748368702828884, 0.0012069741496816278, 0.006080819759517908], [0.011490924283862114, 0.003140907734632492, 0.005327205639332533, 0.0025130638387054205, 0.0035938944201916456, 0.010546942241489887, 0.0050694942474365234, 0.0005300916382111609, 0.015729855746030807, 0.010240698233246803, 0.008941774256527424, 0.0020996283274143934, 0.015885457396507263, 0.0008033456397242844, 0.019122730940580368, 0.027109429240226746, 0.0552828349173069, 0.1300658881664276, 0.6315604448318481, 0.009613344445824623, 0.023599136620759964, 0.004768868442624807, 0.0011875188210979104, 0.0017764940857887268], [0.006990671157836914, 0.0026265729684382677, 0.0019124229438602924, 0.0011628976790234447, 0.006881749257445335, 0.001874025329016149, 0.001935372012667358, 0.00043099973117932677, 0.0020564808510243893, 0.000994849018752575, 0.00168700166977942, 0.012490087188780308, 0.007427839562296867, 0.0026088557206094265, 0.0012413081713020802, 0.013032895512878895, 0.04197064787149429, 0.08287063241004944, 0.19570618867874146, 0.44204676151275635, 0.13319912552833557, 0.025699324905872345, 0.003690708428621292, 0.009462742134928703], [0.013073903508484364, 0.006006366573274136, 0.029932256788015366, 0.0044023022055625916, 0.005828989204019308, 0.00391788873821497, 0.003468069015070796, 0.00045580952428281307, 0.00637587858363986, 0.0041208951734006405, 0.01631280593574047, 0.004861446563154459, 0.018094493076205254, 0.001143645029515028, 0.019526610150933266, 0.0020215907134115696, 0.029767563566565514, 0.07545467466115952, 0.18686549365520477, 0.034367769956588745, 0.4800204038619995, 0.035746920853853226, 0.011251288466155529, 0.006982959806919098], [0.013183352537453175, 0.00606828648597002, 0.04371201992034912, 0.007869078777730465, 0.0028841558378189802, 0.002186036668717861, 0.007355420850217342, 0.002247971249744296, 0.0020242517348378897, 0.0011260116007179022, 0.00986594520509243, 0.020870525389909744, 0.008602458983659744, 0.0036604302003979683, 0.03817679360508919, 0.01614450477063656, 0.0014421300729736686, 0.013882307335734367, 0.044586192816495895, 0.08810165524482727, 0.1558205932378769, 0.38856908679008484, 0.0663227066397667, 0.0552980937063694], [0.01182261761277914, 0.005532050505280495, 0.0023349046241492033, 0.0145005714148283, 0.010969232767820358, 0.0045503913424909115, 0.0156833715736866, 0.002326061250641942, 0.003351418301463127, 0.00014472100883722305, 0.0057787164114415646, 0.0016109752468764782, 0.020383767783641815, 0.0034720192197710276, 0.014797317795455456, 0.006515772547572851, 0.015139810740947723, 0.0017869712319225073, 0.05909935012459755, 0.011031294241547585, 0.10530183464288712, 0.0628022849559784, 0.5425258278846741, 0.07853870838880539], [0.015515835955739021, 0.013174076564610004, 0.038906529545784, 0.03927542269229889, 0.028824256733059883, 0.01972975954413414, 0.015503555536270142, 0.005663018673658371, 0.008894513361155987, 0.005356607027351856, 0.009984097443521023, 0.022106986492872238, 0.020820247009396553, 0.08228179067373276, 0.0543237030506134, 0.0978378877043724, 0.014303945004940033, 0.02373676188290119, 0.009728537872433662, 0.015604916960000992, 0.04863398149609566, 0.13385657966136932, 0.11942289024591446, 0.15651407837867737], [0.024747712537646294, 0.019691811874508858, 0.03579956293106079, 0.012804465368390083, 0.02101944573223591, 0.04395277053117752, 0.03141142055392265, 0.04332989826798439, 0.05580271780490875, 0.028985371813178062, 0.01768355630338192, 0.006139832083135843, 0.03557944670319557, 0.01738612726330757, 0.14919932186603546, 0.08379825204610825, 0.05807644501328468, 0.03176683932542801, 0.05261371657252312, 0.01302699837833643, 0.027522221207618713, 0.04884996637701988, 0.05832931026816368, 0.0824827328324318], [0.03188948333263397, 0.026720423251390457, 0.08058828115463257, 0.02020794153213501, 0.013519353233277798, 0.014530926011502743, 0.009145776741206646, 0.0063169607892632484, 0.03380216658115387, 0.03192969784140587, 0.026320764794945717, 0.011473853141069412, 0.0043532452546060085, 0.005488107446581125, 0.023783477023243904, 0.07785624265670776, 0.014490040950477123, 0.07291986048221588, 0.026410076767206192, 0.027711618691682816, 0.07443947345018387, 0.10985586792230606, 0.08373779058456421, 0.1725085824728012]], [[0.010531526990234852, 0.019602179527282715, 0.08841779083013535, 0.037032730877399445, 0.02230132929980755, 0.012777971103787422, 0.02493879571557045, 0.03931030258536339, 0.11139558255672455, 0.011795501224696636, 0.04680943489074707, 0.07944482564926147, 0.12166284024715424, 0.016143502667546272, 0.11239403486251831, 0.025248493999242783, 0.012123683467507362, 0.020478829741477966, 0.041621532291173935, 0.015776516869664192, 0.049790360033512115, 0.021711552515625954, 0.02848081663250923, 0.03020990453660488], [0.09107287973165512, 0.05646840110421181, 0.056672628968954086, 0.06261498481035233, 0.1331772804260254, 0.03748919814825058, 0.0752907246351242, 0.058298129588365555, 0.048969972878694534, 0.022723032161593437, 0.03345705196261406, 0.026078278198838234, 0.029669668525457382, 0.017579367384314537, 0.029179390519857407, 0.020320482552051544, 0.0358562134206295, 0.018897319212555885, 0.04285752773284912, 0.037645164877176285, 0.025379996746778488, 0.008091241121292114, 0.020849816501140594, 0.011361290700733662], [0.027100998908281326, 0.024277452379465103, 0.12756501138210297, 0.014512203633785248, 0.040391962975263596, 0.021453579887747765, 0.03129350021481514, 0.021774310618638992, 0.09852132946252823, 0.019327852874994278, 0.05602674558758736, 0.025359565392136574, 0.06845852732658386, 0.016363004222512245, 0.12505587935447693, 0.01503444742411375, 0.026195110753178596, 0.023106055334210396, 0.04574427753686905, 0.011137370951473713, 0.062048133462667465, 0.017781509086489677, 0.05625757575035095, 0.02521354705095291], [0.015192708931863308, 0.017062809318304062, 0.0955146998167038, 0.10280724614858627, 0.16170735657215118, 0.03632630035281181, 0.05284767970442772, 0.041365768760442734, 0.10851401090621948, 0.005106489639729261, 0.004022706300020218, 0.04902193322777748, 0.07050826400518417, 0.008316758088767529, 0.03671417757868767, 0.05674281716346741, 0.0026467889547348022, 0.042010147124528885, 0.024116693064570427, 0.012557274661958218, 0.023653516545891762, 0.012767738662660122, 0.003411057638004422, 0.017065027728676796], [0.02554117515683174, 0.024343475699424744, 0.25670525431632996, 0.08728709071874619, 0.018707184121012688, 0.05389879643917084, 0.051122721284627914, 0.03279249370098114, 0.15766099095344543, 0.006754433736205101, 0.024940723553299904, 0.005427863914519548, 0.014601606875658035, 0.005303957499563694, 0.090137779712677, 0.01538288313895464, 0.002644820138812065, 0.017432652413845062, 0.016267919912934303, 0.008075220510363579, 0.0363730750977993, 0.009316151961684227, 0.031199341639876366, 0.008082353509962559], [0.02892460860311985, 0.02538408897817135, 0.04090559482574463, 0.2583002746105194, 0.05109727382659912, 0.020490026101469994, 0.07087023556232452, 0.07928856462240219, 0.0474201962351799, 0.03375257924199104, 0.022975722327828407, 0.03662557527422905, 0.028735091909766197, 0.017054539173841476, 0.025400785729289055, 0.0935787633061409, 0.00967460684478283, 0.03283298760652542, 0.014404678717255592, 0.01833713985979557, 0.012566547840833664, 0.013914409093558788, 0.0055024875327944756, 0.011963201686739922], [0.01672358624637127, 0.016648368909955025, 0.17659227550029755, 0.10735438764095306, 0.02402419224381447, 0.028576387092471123, 0.024078086018562317, 0.02651640959084034, 0.17072607576847076, 0.007853376679122448, 0.021970828995108604, 0.01735406368970871, 0.07698407024145126, 0.0077188825234770775, 0.1148025318980217, 0.04448646679520607, 0.003053272608667612, 0.019689468666911125, 0.014103487133979797, 0.006655941717326641, 0.04205821827054024, 0.008275188505649567, 0.01151941902935505, 0.012234942987561226], [0.010125458240509033, 0.0057203564792871475, 0.06247415766119957, 0.01680104434490204, 0.002499884692952037, 0.012820570729672909, 0.015669547021389008, 0.016333485022187233, 0.16490879654884338, 0.025744741782546043, 0.01498015969991684, 0.05782865360379219, 0.06625119596719742, 0.025835897773504257, 0.0842699185013771, 0.030722014605998993, 0.006282973103225231, 0.03143816813826561, 0.024825988337397575, 0.01024511456489563, 0.08686821162700653, 0.13127140700817108, 0.030986346304416656, 0.06509587913751602], [0.005220601800829172, 0.00683791097253561, 0.11335619539022446, 0.07934043556451797, 0.04476797208189964, 0.03632371872663498, 0.02198983170092106, 0.03791114687919617, 0.15600642561912537, 0.016504965722560883, 0.033827442675828934, 0.03250958397984505, 0.06954056024551392, 0.011526164598762989, 0.12125390022993088, 0.03284606337547302, 0.010949593968689442, 0.03419739753007889, 0.014474114403128624, 0.004932331386953592, 0.05132247880101204, 0.016415497288107872, 0.02096695825457573, 0.026978710666298866], [0.00495510920882225, 0.0030511373188346624, 0.010672098957002163, 0.021704526618123055, 0.007296880707144737, 0.032489314675331116, 0.014065166004002094, 0.03974407538771629, 0.06525792181491852, 0.04588739573955536, 0.016335759311914444, 0.1918850839138031, 0.12217096239328384, 0.06094419211149216, 0.03329683840274811, 0.09702205657958984, 0.006776357535272837, 0.01645166054368019, 0.006810489110648632, 0.0105079161003232, 0.025855017825961113, 0.04558461159467697, 0.009189853444695473, 0.11204554885625839], [0.015777481719851494, 0.005973454099148512, 0.05042113736271858, 0.013338776305317879, 0.015991032123565674, 0.019385922700166702, 0.01818985491991043, 0.013222143054008484, 0.17958548665046692, 0.023107966408133507, 0.0620894581079483, 0.057325731962919235, 0.14160515367984772, 0.01348297018557787, 0.09630391746759415, 0.018164874985814095, 0.013941595330834389, 0.014462944120168686, 0.02057665027678013, 0.005865307990461588, 0.09220701456069946, 0.027405375614762306, 0.03771493211388588, 0.04386083409190178], [0.0059347692877054214, 0.002169274492189288, 0.02442353218793869, 0.005105071235448122, 0.008517829701304436, 0.01357704121619463, 0.007541060447692871, 0.01877766102552414, 0.05594496428966522, 0.019414585083723068, 0.022470872849225998, 0.18003717064857483, 0.20940105617046356, 0.01638488844037056, 0.08413943648338318, 0.022749653086066246, 0.012573403306305408, 0.01803755946457386, 0.013411230407655239, 0.009064804762601852, 0.04114478826522827, 0.033942148089408875, 0.029468825086951256, 0.1457684189081192], [0.004461625125259161, 0.0032840485218912363, 0.03733060136437416, 0.004671450238674879, 0.00597093440592289, 0.01601041853427887, 0.005658282898366451, 0.008486696518957615, 0.08877697587013245, 0.009617163799703121, 0.030737122520804405, 0.05757156386971474, 0.2000092715024948, 0.01956353522837162, 0.1567506492137909, 0.013371752575039864, 0.007750583812594414, 0.011168958619236946, 0.011490728706121445, 0.005886377301067114, 0.07999221980571747, 0.032086338847875595, 0.08333182334899902, 0.10602088272571564], [0.020906977355480194, 0.0060279835015535355, 0.013332054018974304, 0.028252746909856796, 0.06268561631441116, 0.023212039843201637, 0.0187741219997406, 0.051780816167593, 0.017184602096676826, 0.01653473637998104, 0.017393579706549644, 0.08504379540681839, 0.06049006059765816, 0.030779723078012466, 0.027861226350069046, 0.05359398573637009, 0.03377198427915573, 0.0678040087223053, 0.04255397617816925, 0.08433477580547333, 0.031876422464847565, 0.06397878378629684, 0.04018282890319824, 0.10164305567741394], [0.01592230796813965, 0.00629850197583437, 0.02597089111804962, 0.009256025776267052, 0.02428458444774151, 0.019638504832983017, 0.01552597340196371, 0.014341834932565689, 0.046327851712703705, 0.012861036695539951, 0.042992718517780304, 0.018955355510115623, 0.04385416582226753, 0.02253143861889839, 0.0716967061161995, 0.022604813799262047, 0.033258307725191116, 0.0237027145922184, 0.04302069544792175, 0.02974248118698597, 0.0959896370768547, 0.07053100317716599, 0.19488760828971863, 0.09580481052398682], [0.00847064983099699, 0.006904810667037964, 0.02086762711405754, 0.00901790615171194, 0.006257228087633848, 0.01280138548463583, 0.008472996763885021, 0.016266807913780212, 0.027890782803297043, 0.009543756023049355, 0.01591223105788231, 0.038195572793483734, 0.04284412041306496, 0.05074593797326088, 0.07687431573867798, 0.06524747610092163, 0.024205826222896576, 0.07884097844362259, 0.048226505517959595, 0.04678455740213394, 0.0581151582300663, 0.14388807117938995, 0.08494109660387039, 0.09868421405553818], [0.01611669361591339, 0.009645499289035797, 0.028543882071971893, 0.00736713781952858, 0.01063117291778326, 0.017711685970425606, 0.02237863838672638, 0.008993362076580524, 0.03603619709610939, 0.002139675198122859, 0.032484885305166245, 0.0029765376821160316, 0.011825061403214931, 0.00994242262095213, 0.05761949345469475, 0.010797183960676193, 0.022112147882580757, 0.015945695340633392, 0.052825264632701874, 0.021995004266500473, 0.08384591341018677, 0.031455520540475845, 0.44158676266670227, 0.04502410814166069], [0.025528335943818092, 0.017217446118593216, 0.025154590606689453, 0.014226487837731838, 0.02233121357858181, 0.019917288795113564, 0.01981324888765812, 0.03207007795572281, 0.023052100092172623, 0.014220085926353931, 0.049131669104099274, 0.014305731281638145, 0.014165752567350864, 0.054245904088020325, 0.039867185056209564, 0.030592134222388268, 0.07810661196708679, 0.060893964022397995, 0.039130765944719315, 0.07456635683774948, 0.041463468223810196, 0.03911778703331947, 0.18890078365802765, 0.061980973929166794], [0.012562121264636517, 0.009086056612432003, 0.02131493203341961, 0.005345901474356651, 0.009169238619506359, 0.017327426001429558, 0.005232313647866249, 0.004411157686263323, 0.032203588634729385, 0.0015331243630498648, 0.03662877902388573, 0.003366172080859542, 0.01867706887423992, 0.011784454807639122, 0.05513821169734001, 0.00917837955057621, 0.03466200828552246, 0.023982780054211617, 0.032635971903800964, 0.020137373358011246, 0.10618048161268234, 0.01760380156338215, 0.47642529010772705, 0.035413309931755066], [0.016405461356043816, 0.007659297436475754, 0.02712409198284149, 0.006304378621280193, 0.0056149628944695, 0.014346510171890259, 0.00730314152315259, 0.007965298369526863, 0.04032185301184654, 0.00508722523227334, 0.02319113165140152, 0.008186849765479565, 0.016591345891356468, 0.015665438026189804, 0.056287411600351334, 0.014865965582430363, 0.031662534922361374, 0.04435133561491966, 0.04795730113983154, 0.034439150243997574, 0.09476902335882187, 0.08577712625265121, 0.33505749702453613, 0.05306565389037132], [0.015602333471179008, 0.01007692888379097, 0.025736317038536072, 0.006918812170624733, 0.01986958645284176, 0.016172433272004128, 0.006359036546200514, 0.008256674744188786, 0.01596459373831749, 0.003838881151750684, 0.05109727010130882, 0.004332309123128653, 0.011032868176698685, 0.00961657427251339, 0.06463440507650375, 0.008246154524385929, 0.08880071341991425, 0.03879059478640556, 0.04057752713561058, 0.023318663239479065, 0.06231819465756416, 0.03263716772198677, 0.40521734952926636, 0.0305845495313406], [0.01274376455694437, 0.013432069681584835, 0.019972078502178192, 0.00846666656434536, 0.011865893378853798, 0.04281618446111679, 0.01032815407961607, 0.024133311584591866, 0.0217044148594141, 0.012778007425367832, 0.03637619689106941, 0.009235655888915062, 0.012518465518951416, 0.049687668681144714, 0.06345347315073013, 0.024815939366817474, 0.04019223526120186, 0.0230789165943861, 0.02379082329571247, 0.07772190123796463, 0.040525954216718674, 0.05857323855161667, 0.295856773853302, 0.06593216210603714], [0.046117156744003296, 0.04767489433288574, 0.12267673760652542, 0.014650861732661724, 0.035408005118370056, 0.036766115576028824, 0.04803536459803581, 0.023735912516713142, 0.062226392328739166, 0.007544384803622961, 0.08542648702859879, 0.0032084693666547537, 0.0083073191344738, 0.009413506835699081, 0.09028310328722, 0.005692929495126009, 0.03436102718114853, 0.012954415753483772, 0.029598383232951164, 0.02684175595641136, 0.044189102947711945, 0.009094077162444592, 0.1859622299671173, 0.009831459261476994], [0.01690184697508812, 0.0231503713876009, 0.10260387510061264, 0.007307597901672125, 0.015762802213430405, 0.04726281017065048, 0.02404550276696682, 0.07028497010469437, 0.05784686282277107, 0.016059063374996185, 0.07269410789012909, 0.015315031632781029, 0.02029634639620781, 0.01757919415831566, 0.18805617094039917, 0.009743082337081432, 0.02203679271042347, 0.012205064296722412, 0.012634129263460636, 0.04611274600028992, 0.02376023679971695, 0.013967865146696568, 0.13558413088321686, 0.028789479285478592]], [[0.022232145071029663, 0.01062980480492115, 0.0427093580365181, 0.026409123092889786, 0.015185973607003689, 0.06335382908582687, 0.028223123401403427, 0.08465839177370071, 0.1333189159631729, 0.02835019864141941, 0.0367516465485096, 0.08620656281709671, 0.06861495971679688, 0.01718197949230671, 0.027358027175068855, 0.01612197607755661, 0.005368147976696491, 0.015192116610705853, 0.011895607225596905, 0.029000096023082733, 0.04897037148475647, 0.04125967249274254, 0.057015229016542435, 0.08399269729852676], [0.04605935513973236, 0.02714066579937935, 0.08568768948316574, 0.07394775748252869, 0.02149832807481289, 0.04623260349035263, 0.05403025075793266, 0.028021620586514473, 0.06357923150062561, 0.05704623460769653, 0.042132578790187836, 0.05599578842520714, 0.046413905918598175, 0.014321858063340187, 0.0285051092505455, 0.02590985968708992, 0.011829100549221039, 0.03059675171971321, 0.03556717187166214, 0.020373636856675148, 0.037716370075941086, 0.05018553510308266, 0.048910293728113174, 0.04829828441143036], [0.006562103983014822, 0.005991069599986076, 0.11960314959287643, 0.013786903582513332, 0.01840001903474331, 0.015337967313826084, 0.02925133891403675, 0.020003436133265495, 0.12108425050973892, 0.03403715044260025, 0.17547444999217987, 0.0628310814499855, 0.05005206912755966, 0.015323299914598465, 0.09292525053024292, 0.008954423479735851, 0.012621757574379444, 0.01321529969573021, 0.04782063141465187, 0.01862826570868492, 0.03924105688929558, 0.015936672687530518, 0.048419419676065445, 0.014498880133032799], [0.007644977420568466, 0.00403391569852829, 0.09457482397556305, 0.015889683738350868, 0.0023261725436896086, 0.057230569422245026, 0.024223681539297104, 0.012926708906888962, 0.14202940464019775, 0.058687444776296616, 0.23836424946784973, 0.0970849022269249, 0.04603094980120659, 0.01682271435856819, 0.08129315078258514, 0.011469002813100815, 0.0014489946188405156, 0.012066050432622433, 0.007888739928603172, 0.004262836184352636, 0.016835270449519157, 0.013497618958353996, 0.023817114531993866, 0.009550920687615871], [0.0044908965937793255, 0.010642382316291332, 0.25546956062316895, 0.02155541069805622, 0.018520815297961235, 0.015112289227545261, 0.08636286109685898, 0.06150420010089874, 0.08248322457075119, 0.06976691633462906, 0.06378433108329773, 0.04083798825740814, 0.029079219326376915, 0.005119931418448687, 0.12284580618143082, 0.01066588144749403, 0.008552263490855694, 0.010390742681920528, 0.03444647789001465, 0.005506466142833233, 0.00800994224846363, 0.012175479903817177, 0.01434908714145422, 0.00832786038517952], [0.062078483402729034, 0.03229597210884094, 0.07528489828109741, 0.0879492536187172, 0.003402107860893011, 0.04799828305840492, 0.024746054783463478, 0.006296214647591114, 0.17921221256256104, 0.06479880213737488, 0.061691273003816605, 0.10614606738090515, 0.05950305238366127, 0.029054660350084305, 0.0243851225823164, 0.017573487013578415, 0.0030311529990285635, 0.02004922181367874, 0.011629197746515274, 0.006735712755471468, 0.032596927136182785, 0.014988220296800137, 0.01977686770260334, 0.008776752278208733], [0.020678309723734856, 0.02708139829337597, 0.36216476559638977, 0.06561736017465591, 0.05258515104651451, 0.007662664167582989, 0.04132867604494095, 0.020599735900759697, 0.03756646811962128, 0.019184978678822517, 0.03889746591448784, 0.024788236245512962, 0.028305601328611374, 0.009420580230653286, 0.04977695643901825, 0.018197819590568542, 0.02957482822239399, 0.01055977214127779, 0.02731766737997532, 0.022169729694724083, 0.02594459243118763, 0.014372692443430424, 0.03411083295941353, 0.012093712575733662], [0.004749135114252567, 0.0030205855146050453, 0.14164234697818756, 0.007076209411025047, 0.0026248469948768616, 0.019181782379746437, 0.020866278558969498, 0.017464490607380867, 0.07516779005527496, 0.14637890458106995, 0.138546884059906, 0.09971652179956436, 0.07554621994495392, 0.006532686296850443, 0.10487710684537888, 0.005439234897494316, 0.005557992495596409, 0.014311911538243294, 0.022645941004157066, 0.009727642871439457, 0.01605871133506298, 0.03171028569340706, 0.017158837988972664, 0.013997595757246017], [0.008019831962883472, 0.010166003368794918, 0.23824934661388397, 0.04338764771819115, 0.007494428660720587, 0.02735130861401558, 0.029201185330748558, 0.018373752012848854, 0.06265810877084732, 0.035654179751873016, 0.15770113468170166, 0.0781986191868782, 0.044825222343206406, 0.020765112712979317, 0.102704256772995, 0.017110003158450127, 0.003410805482417345, 0.00992024876177311, 0.014691620133817196, 0.005010335240513086, 0.012924134731292725, 0.01511572115123272, 0.022954842075705528, 0.014112171716988087], [0.005498736165463924, 0.007137062028050423, 0.2402637004852295, 0.025568393990397453, 0.006262998096644878, 0.03539254143834114, 0.032386112958192825, 0.08171817660331726, 0.09010078012943268, 0.07838865369558334, 0.09040220826864243, 0.061216846108436584, 0.02582276239991188, 0.019544528797268867, 0.09192690253257751, 0.009321313351392746, 0.0029892930760979652, 0.022340765222907066, 0.018283428624272346, 0.02024298720061779, 0.013358947820961475, 0.012227911502122879, 0.006884999573230743, 0.0027200165204703808], [0.019304392859339714, 0.02324908785521984, 0.17669455707073212, 0.042235519737005234, 0.011499679647386074, 0.026009034365415573, 0.04424202814698219, 0.02700442261993885, 0.05990198627114296, 0.04776803404092789, 0.10343653708696365, 0.06363728642463684, 0.03588046133518219, 0.03472528234124184, 0.08701489120721817, 0.021221669390797615, 0.016232917085289955, 0.028756819665431976, 0.04842947795987129, 0.024887513369321823, 0.018037209287285805, 0.009878590703010559, 0.018928859382867813, 0.011023728176951408], [0.007912960834801197, 0.012818200513720512, 0.07662022113800049, 0.00987508799880743, 0.01822456158697605, 0.03357509896159172, 0.025066684931516647, 0.04223566874861717, 0.03244994208216667, 0.03636223450303078, 0.12631440162658691, 0.06014446169137955, 0.051211997866630554, 0.028635574504733086, 0.210327610373497, 0.021933820098638535, 0.023735342547297478, 0.04276654124259949, 0.026396960020065308, 0.02015010453760624, 0.013238775543868542, 0.021475784480571747, 0.038019951432943344, 0.020507941022515297], [0.006512368097901344, 0.01279484760016203, 0.11563064903020859, 0.01228225976228714, 0.03244277834892273, 0.037376768887043, 0.029949752613902092, 0.06583954393863678, 0.030323926359415054, 0.01465710811316967, 0.08006372302770615, 0.053588904440402985, 0.05878344550728798, 0.020320750772953033, 0.19064053893089294, 0.02109389379620552, 0.024312833324074745, 0.03205680474638939, 0.02106671966612339, 0.019521988928318024, 0.01256392989307642, 0.013130915351212025, 0.046807099133729935, 0.04823843389749527], [0.0024602171033620834, 0.0031007141806185246, 0.34375059604644775, 0.012909884564578533, 0.02082723006606102, 0.017355147749185562, 0.017906207591295242, 0.08431114256381989, 0.07882934808731079, 0.01759813167154789, 0.06501106172800064, 0.05771530419588089, 0.042736250907182693, 0.006717446725815535, 0.14304903149604797, 0.008390926755964756, 0.005662080831825733, 0.008239359594881535, 0.007364357355982065, 0.008578399196267128, 0.009219350293278694, 0.00831923820078373, 0.017424996942281723, 0.012523526325821877], [0.0012917127460241318, 0.0013362891040742397, 0.0544942244887352, 0.004389537964016199, 0.029290398582816124, 0.027551233768463135, 0.009362081065773964, 0.03858792409300804, 0.05336175113916397, 0.014794173650443554, 0.14313609898090363, 0.10128972679376602, 0.12993048131465912, 0.025666071102023125, 0.17281146347522736, 0.008501467294991016, 0.02602524682879448, 0.024580707773566246, 0.016302919015288353, 0.027372704818844795, 0.022997912019491196, 0.007750502787530422, 0.024842891842126846, 0.03433242812752724], [0.0010777448769658804, 0.0010901422938331962, 0.12376166880130768, 0.008518008515238762, 0.012559878639876842, 0.03557449206709862, 0.010085714049637318, 0.0718720331788063, 0.09865641593933105, 0.024915190413594246, 0.23984608054161072, 0.08538675308227539, 0.040884554386138916, 0.013681965880095959, 0.16458465158939362, 0.011914282105863094, 0.0036258078180253506, 0.011332998052239418, 0.005286132916808128, 0.006987551227211952, 0.009607438929378986, 0.00545347249135375, 0.00772693008184433, 0.005570220295339823], [0.0016492678551003337, 0.0017853631870821118, 0.07240227609872818, 0.005085534881800413, 0.026983045041561127, 0.02898513711988926, 0.015510768629610538, 0.07652619481086731, 0.11088354885578156, 0.027655556797981262, 0.09414764493703842, 0.0569772906601429, 0.07987053692340851, 0.013982265256345272, 0.2550395429134369, 0.009284872561693192, 0.01703396439552307, 0.02318720705807209, 0.019820690155029297, 0.010970895178616047, 0.018472149968147278, 0.009259033016860485, 0.011596642434597015, 0.012890603393316269], [0.005249433685094118, 0.003377513960003853, 0.06768320500850677, 0.009803984314203262, 0.023531217128038406, 0.05993345379829407, 0.014481565915048122, 0.08718852698802948, 0.14484034478664398, 0.025013351812958717, 0.09244637191295624, 0.0690622553229332, 0.0750509575009346, 0.03432422876358032, 0.14499938488006592, 0.017494549974799156, 0.01636146567761898, 0.014689779840409756, 0.007238597143441439, 0.010104740038514137, 0.027460094541311264, 0.012851793318986893, 0.02041114680469036, 0.016402091830968857], [0.002017578575760126, 0.003935160581022501, 0.11503592878580093, 0.014208463951945305, 0.21349339187145233, 0.011301184073090553, 0.01564738154411316, 0.08355855196714401, 0.03586454689502716, 0.007733624428510666, 0.03269859030842781, 0.018459377810359, 0.03975202143192291, 0.010294144973158836, 0.15471971035003662, 0.020963186398148537, 0.09024032205343246, 0.01009163074195385, 0.01077589113265276, 0.011536028236150742, 0.028829263523221016, 0.016202501952648163, 0.028539059683680534, 0.02410244755446911], [0.0011040962999686599, 0.001262314384803176, 0.08454131335020065, 0.0028347305487841368, 0.01924767717719078, 0.014688441529870033, 0.021230574697256088, 0.0889568105340004, 0.06573604047298431, 0.03600262850522995, 0.08608690649271011, 0.05110006406903267, 0.07166630029678345, 0.006416788790374994, 0.29718491435050964, 0.00737447664141655, 0.016643116250634193, 0.009553897194564342, 0.012211090885102749, 0.008395210839807987, 0.016616493463516235, 0.024087322875857353, 0.02605043724179268, 0.031008396297693253], [0.006093372590839863, 0.009890624321997166, 0.0769159346818924, 0.011087669059634209, 0.0655049979686737, 0.02656317502260208, 0.032568782567977905, 0.07726182788610458, 0.06704995781183243, 0.016901139169931412, 0.08415454626083374, 0.03944366052746773, 0.06416100263595581, 0.02074768953025341, 0.13221915066242218, 0.010215569287538528, 0.021629175171256065, 0.015393850393593311, 0.025334177538752556, 0.019363220781087875, 0.031802691519260406, 0.02253437414765358, 0.06876100599765778, 0.054402489215135574], [0.0022472827695310116, 0.0037771877832710743, 0.06159811466932297, 0.006160805933177471, 0.046493858098983765, 0.017783425748348236, 0.018143638968467712, 0.10689759254455566, 0.048000793904066086, 0.027186982333660126, 0.13095080852508545, 0.05002017691731453, 0.05143914744257927, 0.01712241768836975, 0.1980578750371933, 0.00751508167013526, 0.022039487957954407, 0.018279146403074265, 0.02089069038629532, 0.051694534718990326, 0.027174144983291626, 0.0163717158138752, 0.031807493418455124, 0.01834765635430813], [0.009132573381066322, 0.009978665970265865, 0.07491440325975418, 0.014692127704620361, 0.011223693378269672, 0.01429725717753172, 0.021986093372106552, 0.016420913860201836, 0.06383524090051651, 0.0523751936852932, 0.1162029579281807, 0.08356600999832153, 0.06280887126922607, 0.022298619151115417, 0.08172640949487686, 0.01139131747186184, 0.03117205947637558, 0.04461796581745148, 0.08980110287666321, 0.05501917377114296, 0.03817128390073776, 0.0166509710252285, 0.029975995421409607, 0.027741096913814545], [0.0035281002055853605, 0.004181285388767719, 0.04986373707652092, 0.006977716460824013, 0.025892453268170357, 0.013137648813426495, 0.0145995132625103, 0.03577357903122902, 0.01776873506605625, 0.03154610097408295, 0.08175810426473618, 0.09038738161325455, 0.09322593361139297, 0.013671455904841423, 0.11224103718996048, 0.01931108348071575, 0.0611027255654335, 0.050593286752700806, 0.058033984154462814, 0.06730414927005768, 0.022344067692756653, 0.02797814831137657, 0.037902671843767166, 0.06087709590792656]], [[0.0029304891359061003, 0.008953476324677467, 0.2793901860713959, 0.03383907303214073, 0.32548758387565613, 0.1024077832698822, 0.013802197761833668, 0.03311879187822342, 0.026686809957027435, 0.018491676077246666, 0.007740766275674105, 0.015451361425220966, 0.02045990526676178, 0.009562094695866108, 0.013407662510871887, 0.005806176923215389, 0.013729949481785297, 0.0019608167931437492, 0.0031762518920004368, 0.011444443836808205, 0.010528219863772392, 0.013288582675158978, 0.01691826619207859, 0.011417336761951447], [0.003510013921186328, 0.019926799461245537, 0.3349233865737915, 0.0534987598657608, 0.2859921157360077, 0.06974251568317413, 0.023745490238070488, 0.013066809624433517, 0.023091400042176247, 0.024180367588996887, 0.022143861278891563, 0.01720651611685753, 0.013759150169789791, 0.01899315044283867, 0.006581311579793692, 0.008467662148177624, 0.0205838643014431, 0.002686494728550315, 0.006670236587524414, 0.005231661256402731, 0.004047771915793419, 0.008592582307755947, 0.009715458378195763, 0.0036426750011742115], [0.0021351375617086887, 0.002322245156392455, 0.672610878944397, 0.00647863419726491, 0.09752721339464188, 0.17250196635723114, 0.00234602321870625, 0.006254278123378754, 0.004195005167275667, 0.002125231781974435, 0.006168851628899574, 0.005771205760538578, 0.0015914830146357417, 0.0011178788263350725, 0.0023395505268126726, 0.0006744691054336727, 0.0011618990683928132, 0.0006829042104072869, 0.00012729191803373396, 0.0010766413761302829, 0.0008138494449667633, 0.0014700175961479545, 0.006435515824705362, 0.0020717910956591368], [0.019215084612369537, 0.028973419219255447, 0.6491565704345703, 0.013187752105295658, 0.02330949157476425, 0.014132421463727951, 0.012739225290715694, 0.028091154992580414, 0.047289226204156876, 0.010563221760094166, 0.007804378401488066, 0.01559489592909813, 0.020424215123057365, 0.007268925663083792, 0.011395568028092384, 0.006334890145808458, 0.004485463723540306, 0.0019867313094437122, 0.003814364317804575, 0.007913796231150627, 0.02628060057759285, 0.008384042419493198, 0.009974386543035507, 0.021680140867829323], [2.5185565391439013e-05, 1.9936005628551356e-05, 0.9980103373527527, 1.7277065126108937e-05, 3.835369716398418e-05, 5.8704583352664486e-05, 3.739552266779356e-05, 2.0080507965758443e-05, 0.0009666724945418537, 2.950049292849144e-06, 0.00012111943942727521, 6.720927103742724e-06, 2.3084876374923624e-05, 1.4402889974007849e-06, 4.668928886530921e-05, 4.9031482376449276e-06, 1.6953507611106033e-06, 3.6641006317950087e-07, 9.343282727058977e-06, 2.7167202460987028e-06, 0.0003944068739656359, 3.575280061340891e-06, 0.00017578277038410306, 1.1123053809569683e-05], [0.00438398402184248, 0.003903312375769019, 0.9442117810249329, 0.008657003752887249, 0.002919434104114771, 0.003088211640715599, 0.007836215198040009, 0.002486646408215165, 0.009978881105780602, 0.0019500487251207232, 0.0007782948669046164, 0.0003160043270327151, 0.0005271218251436949, 0.00014472728071268648, 0.00021622126223519444, 0.0003399497363716364, 6.19418133283034e-05, 7.387703226413578e-05, 0.0004377971345093101, 0.0003772165218833834, 0.0032276995480060577, 0.001324513228610158, 0.00174643041100353, 0.0010124711552634835], [0.0024856426753103733, 0.001436402671970427, 0.9430878758430481, 0.003912855871021748, 0.022420957684516907, 0.008815121836960316, 0.0043364232406020164, 0.0029753490816801786, 0.0019397798459976912, 0.0008663616026751697, 0.000804332026746124, 0.0007793845725245774, 0.0004328500363044441, 0.000284601585008204, 0.0008535137749277055, 0.0002900463587138802, 0.0002642290201038122, 6.73876129440032e-05, 0.0001597385562490672, 0.00028361723525449634, 0.0006981759215705097, 0.0006330151809379458, 0.001616830937564373, 0.0005556272226385772], [0.039217106997966766, 0.052304141223430634, 0.3652294874191284, 0.10176534950733185, 0.06083134189248085, 0.046540215611457825, 0.050798751413822174, 0.13059888780117035, 0.02594105340540409, 0.03333931416273117, 0.0012705517001450062, 0.010495511814951897, 0.007425510790199041, 0.011024989187717438, 0.0027998813893646, 0.00879198219627142, 0.000517148117069155, 0.006709571927785873, 0.0010177789954468608, 0.01255449466407299, 0.002079723170027137, 0.006358571350574493, 0.002244234085083008, 0.020144324749708176], [0.01820007711648941, 0.013166580349206924, 0.5704882144927979, 0.012148047797381878, 0.005513601005077362, 0.0043854122050106525, 0.14741568267345428, 0.07019872218370438, 0.054363057017326355, 0.006854628212749958, 0.04788986220955849, 0.0019421122269704938, 0.0023337171878665686, 0.0022124627139419317, 0.012903043068945408, 0.0037536576855927706, 0.00036333949537947774, 0.0011952221393585205, 0.0011847029672935605, 0.0009017193224281073, 0.005000599659979343, 0.0011399115901440382, 0.015227947384119034, 0.0012176607269793749], [0.0019440415780991316, 0.0009523846092633903, 0.9303693175315857, 0.007728490978479385, 0.0070729805156588554, 0.005092701409012079, 0.009260229766368866, 0.02306412346661091, 0.004836163017898798, 0.0021495164837688208, 0.00046844425378367305, 0.001282984740100801, 0.0011199663858860731, 0.0001010784981190227, 0.0009353129426017404, 0.0003551281406544149, 3.698304499266669e-05, 7.724691386101767e-05, 4.772306783706881e-05, 0.00026686314959079027, 0.00043594822636805475, 0.0004611280746757984, 0.0006005847244523466, 0.001340704271569848], [0.014954338781535625, 0.010558456182479858, 0.15442749857902527, 0.11820007115602493, 0.0035705198533833027, 0.006079946644604206, 0.07901143282651901, 0.3264351487159729, 0.1286155730485916, 0.08539383858442307, 0.0022268416360020638, 0.015448097139596939, 0.012606265023350716, 0.0035613514482975006, 0.010842693038284779, 0.01674688048660755, 0.00021382153499871492, 0.0023700897581875324, 0.0003272466128692031, 0.0012477334821596742, 0.002083443570882082, 0.001255964394658804, 0.00019037550373468548, 0.0036323859822005033], [0.002262198133394122, 0.006412186194211245, 0.1056530699133873, 0.08466164767742157, 0.004999485332518816, 0.04912619665265083, 0.0070892078801989555, 0.128708153963089, 0.270058810710907, 0.05827532336115837, 0.022052349522709846, 0.09733182936906815, 0.02457568235695362, 0.011861568316817284, 0.026033207774162292, 0.043913304805755615, 0.0003606485261116177, 0.03698848560452461, 0.0005479915416799486, 0.0031211217865347862, 0.003099855501204729, 0.0012608608230948448, 0.0012350027682259679, 0.010371755808591843], [0.004455339629203081, 0.0077650765888392925, 0.1761852502822876, 0.032220564782619476, 0.001748913899064064, 0.008568903431296349, 0.005430165678262711, 0.041403476148843765, 0.3815901577472687, 0.019793279469013214, 0.08090049773454666, 0.05146541818976402, 0.05076082795858383, 0.010510865598917007, 0.0530376136302948, 0.026015209034085274, 0.0007259220001287758, 0.01111368928104639, 0.0020137690007686615, 0.0030662519857287407, 0.021049270406365395, 0.0020937789231538773, 0.003575572744011879, 0.004510162398219109], [0.007235214579850435, 0.007754152175039053, 0.34539029002189636, 0.040331315249204636, 0.02888382598757744, 0.15279345214366913, 0.009374875575304031, 0.03452660143375397, 0.049908362329006195, 0.01641807332634926, 0.1964532732963562, 0.0385366827249527, 0.014044860377907753, 0.009772485122084618, 0.015848837792873383, 0.011798612773418427, 0.002714748028665781, 0.005448779556900263, 0.0007664341246709228, 0.0016885697841644287, 0.0020497054792940617, 0.0005304106161929667, 0.006724389735609293, 0.0010060155764222145], [0.03975763916969299, 0.022105496376752853, 0.06577277928590775, 0.06402063369750977, 0.0008611080702394247, 0.010693411342799664, 0.005290708038955927, 0.05578169599175453, 0.13408559560775757, 0.052176494151353836, 0.01660853996872902, 0.05173340439796448, 0.09399112313985825, 0.04529272019863129, 0.12753647565841675, 0.06276021897792816, 0.0021767145954072475, 0.030372964218258858, 0.005577677395194769, 0.03082399070262909, 0.05618174374103546, 0.01237279362976551, 0.002426740014925599, 0.011599410325288773], [0.0081217335537076, 0.010824103839695454, 0.006884838454425335, 0.006125963758677244, 0.0018650845158845186, 0.012912891805171967, 0.0013067316031083465, 0.052374228835105896, 0.0510135218501091, 0.006657651625573635, 0.06850121915340424, 0.1408419907093048, 0.06266388297080994, 0.06789495795965195, 0.3138241469860077, 0.07000277191400528, 0.005635259207338095, 0.0553089939057827, 0.0020054751075804234, 0.020299965515732765, 0.011736118234694004, 0.0019367823842912912, 0.005157758481800556, 0.01610392890870571], [0.002482261275872588, 0.0027707619592547417, 0.3199738562107086, 0.0005683166091330349, 0.00014687224756926298, 0.0007267958717420697, 0.0010548433056101203, 0.004477460868656635, 0.183846578001976, 0.0005978619446977973, 0.022658545523881912, 0.007029500789940357, 0.06026327610015869, 0.005902586504817009, 0.21251218020915985, 0.005982781760394573, 0.0007198494859039783, 0.0009342337143607438, 0.0075825778767466545, 0.002759807277470827, 0.14757342636585236, 0.0008720917976461351, 0.006155200302600861, 0.002408368280157447], [0.021197373047471046, 0.02350635640323162, 0.022101864218711853, 0.01900169625878334, 0.0032655552495270967, 0.014708778820931911, 0.0035452963784337044, 0.031931713223457336, 0.053638603538274765, 0.023248765617609024, 0.013078281655907631, 0.0821147933602333, 0.08312925696372986, 0.07899316400289536, 0.15939167141914368, 0.09374497830867767, 0.009617136791348457, 0.03166230022907257, 0.009344507940113544, 0.0669325664639473, 0.04955274611711502, 0.022876963019371033, 0.009782295674085617, 0.0736333429813385], [0.012865250930190086, 0.014301794581115246, 0.008924451656639576, 0.004647658206522465, 0.0016279424307867885, 0.001529152155853808, 0.0015373502392321825, 0.011346589773893356, 0.04858466237783432, 0.010673345997929573, 0.013644592836499214, 0.04315614700317383, 0.07115968316793442, 0.07922052592039108, 0.3088066875934601, 0.09441989660263062, 0.043726846575737, 0.025413569062948227, 0.019896958023309708, 0.02994345873594284, 0.10112638771533966, 0.016438093036413193, 0.009229215793311596, 0.027779750525951385], [0.02232244983315468, 0.025396760553121567, 0.007614856120198965, 0.01352405734360218, 0.00429999316111207, 0.010606079362332821, 0.0031512873247265816, 0.0382024310529232, 0.027025578543543816, 0.04367763176560402, 0.009720168076455593, 0.08030489832162857, 0.06044682115316391, 0.11160608381032944, 0.06279215216636658, 0.15311583876609802, 0.03551279753446579, 0.12455437332391739, 0.008798071183264256, 0.05008791759610176, 0.01374463364481926, 0.012867987155914307, 0.00513090007007122, 0.07549627125263214], [0.012822219170629978, 0.01014432031661272, 0.00607940461486578, 0.001306617632508278, 0.0003233755414839834, 0.0006623807712458074, 0.0020613372325897217, 0.0030357094947248697, 0.13533315062522888, 0.00520901195704937, 0.037121716886758804, 0.005251334048807621, 0.030784040689468384, 0.022653236985206604, 0.1302773356437683, 0.027117038145661354, 0.026017816737294197, 0.0221982654184103, 0.1719510853290558, 0.018082760274410248, 0.28737396001815796, 0.013108175247907639, 0.02219030074775219, 0.008895349688827991], [0.009533846750855446, 0.004291556775569916, 0.051296137273311615, 0.019998589530587196, 0.004113550763577223, 0.01948367804288864, 0.001238340395502746, 0.009750733152031898, 0.050278034061193466, 0.01199146918952465, 0.0034501736517995596, 0.04257926717400551, 0.03853446617722511, 0.006088955793529749, 0.06512579321861267, 0.060289375483989716, 0.006573808379471302, 0.03003956377506256, 0.022327199578285217, 0.09400920569896698, 0.15701916813850403, 0.08243054896593094, 0.013662791810929775, 0.19589383900165558], [0.023941559717059135, 0.010599375702440739, 0.02716570347547531, 0.031233981251716614, 0.0012511396780610085, 0.0020661058370023966, 0.004560051951557398, 0.016831088811159134, 0.13374397158622742, 0.020468737930059433, 0.0009301466634497046, 0.020487403497099876, 0.05486280471086502, 0.00779486121609807, 0.06506115198135376, 0.05505156144499779, 0.005725502502173185, 0.008920488879084587, 0.03457652032375336, 0.05172932893037796, 0.31503933668136597, 0.05023353174328804, 0.0014238683506846428, 0.05630182847380638], [0.003251962596550584, 0.005268697161227465, 0.027795597910881042, 0.006863276474177837, 0.004936366342008114, 0.009403674863278866, 0.0019664387218654156, 0.0032806515228003263, 0.06354130059480667, 0.003721693530678749, 0.0035090043675154448, 0.032970137894153595, 0.03618022799491882, 0.0063668848015367985, 0.055796053260564804, 0.017265217378735542, 0.009697173722088337, 0.02191433683037758, 0.05939248576760292, 0.04739179462194443, 0.4032696783542633, 0.07035183906555176, 0.014138038270175457, 0.09172745048999786]], [[1.4792226465942804e-05, 4.6932367695262656e-05, 0.0002596964768599719, 0.00013942796795163304, 0.00015343718405347317, 5.03626542922575e-05, 0.0010671357158571482, 5.0787333748303354e-05, 0.000329767819494009, 0.0006830388447269797, 0.00010058022598968819, 0.17152240872383118, 0.708656370639801, 9.964439232135192e-05, 0.0006179120973683894, 0.0002868551528081298, 0.00033835467183962464, 0.00023220482398755848, 0.003927909303456545, 0.0001508842979092151, 0.0002370062720729038, 0.0003933164698537439, 4.1957435314543545e-05, 0.11059917509555817], [0.001819581724703312, 0.003558157477527857, 0.004983999766409397, 0.003401821246370673, 0.0024912988301366568, 0.0023969190660864115, 0.011233914643526077, 0.0028044532518833876, 0.003001793287694454, 0.011539927683770657, 0.0013989288127049804, 0.3502565920352936, 0.38039687275886536, 0.004050597548484802, 0.005958701949566603, 0.003896738402545452, 0.002685040235519409, 0.005700611509382725, 0.017951354384422302, 0.004243805538862944, 0.0018354204948991537, 0.004694228991866112, 0.0005981974536553025, 0.16910098493099213], [0.0256815105676651, 0.016414670273661613, 0.03540201112627983, 0.08897300809621811, 0.019765321165323257, 0.06279630213975906, 0.04086069390177727, 0.05706116929650307, 0.04212593287229538, 0.06552272289991379, 0.08836273849010468, 0.005172180477529764, 0.004573192447423935, 0.01703709550201893, 0.03253885731101036, 0.0849742516875267, 0.01780891977250576, 0.055922940373420715, 0.028556406497955322, 0.042714089155197144, 0.03366284817457199, 0.04992087185382843, 0.07723492383956909, 0.006917333696037531], [0.039348892867565155, 0.036692481487989426, 0.01777839846909046, 0.04599366709589958, 0.01556604728102684, 0.0505661740899086, 0.03985193744301796, 0.02465054579079151, 0.03292600065469742, 0.03380430117249489, 0.026562750339508057, 0.10305868089199066, 0.10362915694713593, 0.05712062865495682, 0.03158140927553177, 0.04400566592812538, 0.018427135422825813, 0.03293813019990921, 0.052017826586961746, 0.017951948568224907, 0.03351947292685509, 0.030751517042517662, 0.029988577589392662, 0.08126869052648544], [0.010810035280883312, 0.008481285534799099, 0.016865968704223633, 0.07637897878885269, 0.01499552559107542, 0.038073960691690445, 0.047774605453014374, 0.02583283744752407, 0.038798294961452484, 0.032204899936914444, 0.10675802081823349, 0.011552728712558746, 0.015389373525977135, 0.02651682123541832, 0.04973040893673897, 0.09898248314857483, 0.01929406262934208, 0.028128821402788162, 0.036830756813287735, 0.03203325718641281, 0.07815612107515335, 0.04545294865965843, 0.12324021011590958, 0.017717663198709488], [0.04066057503223419, 0.04493315517902374, 0.04278101027011871, 0.08173812925815582, 0.03977871313691139, 0.04257526993751526, 0.031373098492622375, 0.04260219261050224, 0.029402099549770355, 0.045842256397008896, 0.0506785623729229, 0.023877274245023727, 0.01926540397107601, 0.03725104406476021, 0.027141094207763672, 0.06465394794940948, 0.03664736822247505, 0.05070396885275841, 0.03317407891154289, 0.056848905980587006, 0.03211904317140579, 0.05508838966488838, 0.044144634157419205, 0.026719819754362106], [0.007873914204537868, 0.008950588293373585, 0.018092399463057518, 0.034419357776641846, 0.02419651672244072, 0.043071433901786804, 0.02105996385216713, 0.029764650389552116, 0.04988636076450348, 0.08839208632707596, 0.08918612450361252, 0.005548767279833555, 0.005232126452028751, 0.057851944118738174, 0.036977507174015045, 0.07589990645647049, 0.0437125563621521, 0.039351657032966614, 0.022715874016284943, 0.06525281816720963, 0.07310758531093597, 0.07705610245466232, 0.0766456350684166, 0.005754084791988134], [0.014540034346282482, 0.017395872622728348, 0.036181528121232986, 0.05140141025185585, 0.04543042182922363, 0.01908046379685402, 0.04361795261502266, 0.018837537616491318, 0.04331180453300476, 0.018098721280694008, 0.05629498511552811, 0.012000723741948605, 0.018261171877384186, 0.018367450684309006, 0.02477819100022316, 0.06833084672689438, 0.10953469574451447, 0.04314883053302765, 0.06091514974832535, 0.03655670955777168, 0.10472583025693893, 0.035886071622371674, 0.07540106773376465, 0.027902476489543915], [0.015776176005601883, 0.01103205792605877, 0.024905845522880554, 0.0322912223637104, 0.03338082879781723, 0.021838882938027382, 0.033975034952163696, 0.039540376514196396, 0.05215590074658394, 0.051369115710258484, 0.11021576821804047, 0.005758966784924269, 0.005083235912024975, 0.015158028341829777, 0.046261146664619446, 0.04300900921225548, 0.0480625256896019, 0.03508439287543297, 0.03092433698475361, 0.06533065438270569, 0.059645071625709534, 0.08077343553304672, 0.13050228357315063, 0.007925722748041153], [0.00524466298520565, 0.007393545936793089, 0.020743107423186302, 0.04953240975737572, 0.023852191865444183, 0.011969984509050846, 0.02440204657614231, 0.025583792477846146, 0.04081406816840172, 0.045334454625844955, 0.06548354029655457, 0.012434535659849644, 0.011250892654061317, 0.023361310362815857, 0.034172117710113525, 0.090855173766613, 0.029885342344641685, 0.029094040393829346, 0.029856206849217415, 0.07776582986116409, 0.08887293189764023, 0.13983140885829926, 0.07986316084861755, 0.032403286546468735], [0.024640792980790138, 0.013908912427723408, 0.02707444317638874, 0.10037686675786972, 0.01894368976354599, 0.042301759123802185, 0.04901191592216492, 0.029626814648509026, 0.03432677686214447, 0.06124081462621689, 0.05750252678990364, 0.01479683443903923, 0.01607144996523857, 0.025640929117798805, 0.04768570885062218, 0.13540266454219818, 0.017319759353995323, 0.04259064793586731, 0.043057359755039215, 0.03937039151787758, 0.030084902420639992, 0.05952124670147896, 0.052559807896614075, 0.01694287545979023], [0.006829413119703531, 0.008343765512108803, 0.038000643253326416, 0.045766398310661316, 0.022315742447972298, 0.015228223986923695, 0.04941494017839432, 0.0177175160497427, 0.040506284683942795, 0.047484997659921646, 0.05926540493965149, 0.0416727252304554, 0.02471642754971981, 0.027065422385931015, 0.04110891371965408, 0.12161197513341904, 0.024586232379078865, 0.03218654543161392, 0.04684960097074509, 0.02154628001153469, 0.047110579907894135, 0.05851128697395325, 0.0457574799656868, 0.11640319973230362], [0.012182527221739292, 0.011238504201173782, 0.03567780926823616, 0.04486263915896416, 0.026783738285303116, 0.023589754477143288, 0.05276549234986305, 0.03140103444457054, 0.050001293420791626, 0.040684495121240616, 0.0907205268740654, 0.016614988446235657, 0.01083819568157196, 0.022232305258512497, 0.04914741963148117, 0.08626225590705872, 0.02685002237558365, 0.04116281867027283, 0.04522646591067314, 0.03530348464846611, 0.05932642146945, 0.05781136453151703, 0.09630339592695236, 0.03301297873258591], [0.0018488488858565688, 0.003295579692348838, 0.025502735748887062, 0.03401517868041992, 0.014638388529419899, 0.007169199176132679, 0.05482516437768936, 0.015201042406260967, 0.032976873219013214, 0.04511169716715813, 0.02902069129049778, 0.10420940816402435, 0.13912774622440338, 0.006868486292660236, 0.03169366344809532, 0.060010846704244614, 0.01734398864209652, 0.026348480954766273, 0.049711454659700394, 0.026249883696436882, 0.023111719638109207, 0.051943741738796234, 0.01996898278594017, 0.17980600893497467], [0.024912657216191292, 0.014166293665766716, 0.021592119708657265, 0.05681798607110977, 0.02513689547777176, 0.04771783947944641, 0.02434523031115532, 0.029938440769910812, 0.05539445951581001, 0.04513169080018997, 0.10070767253637314, 0.0038332815747708082, 0.004883876536041498, 0.021759621798992157, 0.04074782878160477, 0.08266733586788177, 0.03554176911711693, 0.04043205827474594, 0.021769311279058456, 0.032985132187604904, 0.07263029366731644, 0.06279779970645905, 0.12967827916145325, 0.004412161186337471], [0.0395582914352417, 0.02744392305612564, 0.017744068056344986, 0.04998385161161423, 0.04069150239229202, 0.050934210419654846, 0.03764467313885689, 0.03446003794670105, 0.0564151294529438, 0.05002093315124512, 0.057453226298093796, 0.019050080329179764, 0.022385312244296074, 0.03748500347137451, 0.03626143932342529, 0.050457101315259933, 0.03417307883501053, 0.03523100167512894, 0.028570789843797684, 0.02458670176565647, 0.08825619518756866, 0.06316237151622772, 0.0724097266793251, 0.025621414184570312], [0.009791632182896137, 0.006345310714095831, 0.010609750635921955, 0.0455096960067749, 0.01801425777375698, 0.03054819442331791, 0.040611088275909424, 0.022053301334381104, 0.04997948929667473, 0.030925795435905457, 0.15698467195034027, 0.006543029099702835, 0.008290586993098259, 0.024638663977384567, 0.04502737149596214, 0.09221777319908142, 0.030212080106139183, 0.020965151488780975, 0.02836841344833374, 0.01964244432747364, 0.08799594640731812, 0.03940504416823387, 0.16491776704788208, 0.010402633808553219], [0.015215140767395496, 0.00833135936409235, 0.013876455835998058, 0.03151703625917435, 0.0215658750385046, 0.02393367514014244, 0.02878474071621895, 0.035973142832517624, 0.05391460657119751, 0.07167179137468338, 0.10025880485773087, 0.01531956810504198, 0.00897596962749958, 0.040219996124506, 0.02891373634338379, 0.10312704741954803, 0.057075418531894684, 0.03438153490424156, 0.039469163864851, 0.05637282505631447, 0.05580547824501991, 0.062230080366134644, 0.07567647099494934, 0.017390085384249687], [0.004590080585330725, 0.004854025784879923, 0.012336674146354198, 0.025055713951587677, 0.017526879906654358, 0.024213723838329315, 0.019979387521743774, 0.018935762345790863, 0.05388876423239708, 0.044936519116163254, 0.09897639602422714, 0.010552529245615005, 0.014101220294833183, 0.05801638588309288, 0.04998180642724037, 0.0855836570262909, 0.05497872084379196, 0.03397638723254204, 0.030239220708608627, 0.04592263698577881, 0.11706937849521637, 0.05812838301062584, 0.10314956307411194, 0.013006138615310192], [0.005037004593759775, 0.00457302900031209, 0.025765003636479378, 0.01864488236606121, 0.02782740630209446, 0.011374259367585182, 0.026448838412761688, 0.011717617511749268, 0.05761878192424774, 0.020619841292500496, 0.10804048925638199, 0.007532276213169098, 0.008894093334674835, 0.02491135150194168, 0.03544039651751518, 0.07769183069467545, 0.16129063069820404, 0.0386253260076046, 0.047859080135822296, 0.028026755899190903, 0.11056377738714218, 0.034123364835977554, 0.08955083042383194, 0.017823167145252228], [0.010852398350834846, 0.00388871761970222, 0.016359830275177956, 0.017381085082888603, 0.03367830440402031, 0.019460387527942657, 0.015011020004749298, 0.024044770747423172, 0.06626524031162262, 0.04784337431192398, 0.13176487386226654, 0.002302807290107012, 0.0024587989319115877, 0.014693912118673325, 0.04058356210589409, 0.05166362598538399, 0.08617419004440308, 0.03202393651008606, 0.015235639177262783, 0.03437086567282677, 0.06757251173257828, 0.07246483862400055, 0.1898813545703888, 0.00402390630915761], [0.003320622257888317, 0.002632369287312031, 0.01363975927233696, 0.023766450583934784, 0.017957329750061035, 0.011048349551856518, 0.007959975861012936, 0.023493556305766106, 0.03318997472524643, 0.05349306762218475, 0.11466772854328156, 0.0009732228354550898, 0.0006321780965663493, 0.028878768905997276, 0.028751108795404434, 0.10206856578588486, 0.036235153675079346, 0.027978450059890747, 0.010152952745556831, 0.08695413172245026, 0.0719345360994339, 0.1551777422428131, 0.14284648001194, 0.0022474913857877254], [0.025570319965481758, 0.008560623973608017, 0.019164837896823883, 0.06702311336994171, 0.02126442827284336, 0.03404964879155159, 0.027570897713303566, 0.02522781863808632, 0.03392700105905533, 0.07524576783180237, 0.09338050335645676, 0.005898992531001568, 0.007813628762960434, 0.03079129196703434, 0.053836923092603683, 0.09603199362754822, 0.03189671039581299, 0.04011256620287895, 0.02848172001540661, 0.04597054049372673, 0.0425952710211277, 0.09549938887357712, 0.08363277465105057, 0.006453254725784063], [0.007186983246356249, 0.006362755782902241, 0.020420441403985023, 0.021318087354302406, 0.024462586268782616, 0.011797307059168816, 0.016679959371685982, 0.017226068302989006, 0.054123155772686005, 0.06348367035388947, 0.10989446192979813, 0.006663308013230562, 0.0033908169716596603, 0.03801470994949341, 0.03017176315188408, 0.09674709290266037, 0.05103026330471039, 0.030815185979008675, 0.022284751757979393, 0.03594357520341873, 0.08006951957941055, 0.1173226609826088, 0.11796418577432632, 0.016626615077257156]], [[0.08588650822639465, 0.1451805830001831, 0.07787468284368515, 0.07046253979206085, 0.06887409836053848, 0.07296250760555267, 0.024886716157197952, 0.004186274018138647, 0.027455657720565796, 0.023147236555814743, 0.045607905834913254, 0.015670331194996834, 0.019417356699705124, 0.0999322459101677, 0.07239680737257004, 0.0442483089864254, 0.031183794140815735, 0.017894666641950607, 0.006050356198102236, 0.0031807334162294865, 0.008289387449622154, 0.00575541565194726, 0.0206731166690588, 0.00878283940255642], [0.2866157293319702, 0.2358066737651825, 0.04515852406620979, 0.03365936875343323, 0.08294814079999924, 0.05317237228155136, 0.010228519327938557, 0.0012690513394773006, 0.009313439950346947, 0.006734724622219801, 0.03324011340737343, 0.0056004305370152, 0.01038165669888258, 0.05641566589474678, 0.029258405789732933, 0.023377148434519768, 0.03519744426012039, 0.008879667147994041, 0.002656285185366869, 0.0006849888013675809, 0.0025849270168691874, 0.0018981577595695853, 0.020368125289678574, 0.004550443962216377], [0.019075827673077583, 0.04923047497868538, 0.03389867767691612, 0.2218417376279831, 0.019471924751996994, 0.030472764745354652, 0.007326045073568821, 0.013130792416632175, 0.03973453491926193, 0.019436758011579514, 0.04191043972969055, 0.11368804425001144, 0.061695460230112076, 0.0594695545732975, 0.11374343186616898, 0.07633843272924423, 0.01733304373919964, 0.01145758293569088, 0.008012289181351662, 0.007504443638026714, 0.011869559995830059, 0.002394117182120681, 0.005456257611513138, 0.01550793182104826], [0.11539266258478165, 0.11222848296165466, 0.049976129084825516, 0.04361201077699661, 0.050911594182252884, 0.19502651691436768, 0.017361437901854515, 0.011809449642896652, 0.03685053810477257, 0.026962412521243095, 0.037435322999954224, 0.038591090589761734, 0.04405929520726204, 0.06179855763912201, 0.0505150705575943, 0.03345450013875961, 0.02095463126897812, 0.006605928298085928, 0.0048924763686954975, 0.0035489134024828672, 0.009898951277136803, 0.00454370304942131, 0.011766298674046993, 0.011804000474512577], [0.11678502708673477, 0.1985565423965454, 0.04771653935313225, 0.20128147304058075, 0.03867649659514427, 0.04657973721623421, 0.008731954731047153, 0.01025957241654396, 0.025380687788128853, 0.004689499270170927, 0.06442274153232574, 0.016908816993236542, 0.013809029012918472, 0.03604888170957565, 0.07542092353105545, 0.04718603938817978, 0.013526072725653648, 0.004461649339646101, 0.002337767742574215, 0.0031809546053409576, 0.006077366881072521, 0.0006377575919032097, 0.013192933052778244, 0.004131616093218327], [0.026021553203463554, 0.058882467448711395, 0.06167897582054138, 0.23856647312641144, 0.07804788649082184, 0.012129922397434711, 0.02238573506474495, 0.00949589628726244, 0.024705952033400536, 0.011638840660452843, 0.04250162094831467, 0.035028353333473206, 0.02298772521317005, 0.040353331714868546, 0.11495683342218399, 0.06785237789154053, 0.04180489107966423, 0.019205566495656967, 0.018412234261631966, 0.007934067398309708, 0.011090758256614208, 0.006606848910450935, 0.012083790265023708, 0.015627898275852203], [0.010069256648421288, 0.008449142798781395, 0.02822037786245346, 0.06546960026025772, 0.018825599923729897, 0.05829734727740288, 0.00802026130259037, 0.12689682841300964, 0.04594532027840614, 0.0428607352077961, 0.07401610910892487, 0.15947601199150085, 0.056773535907268524, 0.010619424283504486, 0.06973852217197418, 0.06272611767053604, 0.015519291162490845, 0.022358661517500877, 0.009278475306928158, 0.036526795476675034, 0.014322567731142044, 0.01014635618776083, 0.01528928428888321, 0.030154351145029068], [0.0019137648632749915, 0.0061024995520710945, 0.020497458055615425, 0.023156914860010147, 0.010465291328728199, 0.01675630360841751, 0.0018155052093788981, 0.01610882580280304, 0.026910895481705666, 0.06882713735103607, 0.0530216209590435, 0.4509044289588928, 0.09616676717996597, 0.03340791538357735, 0.05389447137713432, 0.07423896342515945, 0.01618664525449276, 0.01128621306270361, 0.0006638542981818318, 0.0017473552143201232, 0.001907467725686729, 0.0006864581955596805, 0.0010464427759870887, 0.012286754325032234], [0.048226140439510345, 0.2506250739097595, 0.0762055292725563, 0.15166564285755157, 0.04791652411222458, 0.025177376344799995, 0.014441273175179958, 0.0025622027460485697, 0.03260897845029831, 0.010411783121526241, 0.04165951535105705, 0.022648178040981293, 0.017763303592801094, 0.06374169141054153, 0.10284023731946945, 0.024631241336464882, 0.024380628019571304, 0.009432118386030197, 0.0046991268172860146, 0.0024385603610426188, 0.010452156886458397, 0.002591772237792611, 0.007489129900932312, 0.005391832906752825], [0.00019738732953555882, 0.0010397747391834855, 0.009306303225457668, 0.044520094990730286, 0.0036992712412029505, 0.0014555989764630795, 0.004961303900927305, 0.12369338423013687, 0.008354319259524345, 0.054416485130786896, 0.016304774209856987, 0.4818505644798279, 0.08250299841165543, 0.0038252947852015495, 0.010601812042295933, 0.023252133280038834, 0.006929389666765928, 0.014540884643793106, 0.010653064586222172, 0.044387537986040115, 0.005539777688682079, 0.015069671906530857, 0.0011580713326111436, 0.03174012154340744], [0.0213426873087883, 0.03662749379873276, 0.026609525084495544, 0.007673217449337244, 0.03966864198446274, 0.018607186153531075, 0.025177840143442154, 0.0788143128156662, 0.029003076255321503, 0.0349586196243763, 0.04727252200245857, 0.14290304481983185, 0.07385670393705368, 0.05393805727362633, 0.024601206183433533, 0.04267582669854164, 0.054360054433345795, 0.02900790423154831, 0.02290884219110012, 0.05776212736964226, 0.03223109617829323, 0.014462231658399105, 0.02987835742533207, 0.0556594617664814], [0.0011350339045748115, 0.0009040817385539412, 0.005748441442847252, 0.004316026344895363, 0.008329554460942745, 0.002444574609398842, 0.007529381662607193, 0.11995424330234528, 0.007849683053791523, 0.04809688404202461, 0.017001483589410782, 0.23471228778362274, 0.07926072925329208, 0.004618159029632807, 0.005212969146668911, 0.020731190219521523, 0.03174377605319023, 0.03357229754328728, 0.02132694236934185, 0.12982752919197083, 0.019911011680960655, 0.045379288494586945, 0.012890285812318325, 0.13750408589839935], [0.003988174721598625, 0.0028339338023215532, 0.01247863844037056, 0.009371782653033733, 0.013353623449802399, 0.008535945788025856, 0.017537450417876244, 0.07171181589365005, 0.014251578599214554, 0.05594430863857269, 0.019687224179506302, 0.1192953810095787, 0.07930702716112137, 0.005015800707042217, 0.011667176149785519, 0.016352925449609756, 0.03532643988728523, 0.03533496707677841, 0.05484523996710777, 0.1387663632631302, 0.04802611470222473, 0.07798057049512863, 0.030175557360053062, 0.11821196973323822], [0.004968194756656885, 0.004922006744891405, 0.028467999771237373, 0.039144255220890045, 0.022798359394073486, 0.008983074687421322, 0.009178981184959412, 0.10867810994386673, 0.019961224868893623, 0.04045655578374863, 0.03021114505827427, 0.13979600369930267, 0.0701642856001854, 0.0058294846676290035, 0.02712290920317173, 0.0352095328271389, 0.04261084273457527, 0.048305850476026535, 0.025837862864136696, 0.08380106091499329, 0.023509077727794647, 0.06168343871831894, 0.024974381551146507, 0.09338536113500595], [0.02118634805083275, 0.03924032300710678, 0.011233231984078884, 0.005781347397714853, 0.014343210496008396, 0.03959069028496742, 0.029077330604195595, 0.059333436191082, 0.04634176567196846, 0.03815637156367302, 0.019821427762508392, 0.07501908391714096, 0.05398467555642128, 0.07214631140232086, 0.019120140001177788, 0.019478535279631615, 0.06810247898101807, 0.06907883286476135, 0.07583972066640854, 0.07699882239103317, 0.05841813236474991, 0.02001490257680416, 0.019009847193956375, 0.04868294298648834], [0.022898763418197632, 0.01854119822382927, 0.020734230056405067, 0.01030010636895895, 0.022724755108356476, 0.012151944451034069, 0.018591538071632385, 0.13760675489902496, 0.028310028836131096, 0.03440532088279724, 0.04233310744166374, 0.08932404965162277, 0.049146827310323715, 0.045213665813207626, 0.019706670194864273, 0.023496432229876518, 0.05079955607652664, 0.04671206325292587, 0.0352211557328701, 0.12186864018440247, 0.03863377124071121, 0.0180705226957798, 0.030214538797736168, 0.0629943236708641], [0.08848412334918976, 0.08296577632427216, 0.016514580696821213, 0.009181381203234196, 0.048425160348415375, 0.05150386318564415, 0.03117240220308304, 0.04345986247062683, 0.028563419356942177, 0.011787287890911102, 0.037921447306871414, 0.015284057706594467, 0.01983034610748291, 0.030018560588359833, 0.02941039763391018, 0.02897929772734642, 0.08422308415174484, 0.054101698100566864, 0.05904855579137802, 0.060609083622694016, 0.04890119656920433, 0.014412224292755127, 0.08309147506952286, 0.02211063914000988], [0.0064049591310322285, 0.004528742749243975, 0.007120887748897076, 0.005169575568288565, 0.01841513067483902, 0.008622797206044197, 0.021929407492280006, 0.118111252784729, 0.023671533912420273, 0.01905495673418045, 0.016379661858081818, 0.029232554137706757, 0.01634589023888111, 0.007129725068807602, 0.010911774821579456, 0.02446936070919037, 0.03878825157880783, 0.06784475594758987, 0.08584951609373093, 0.23808865249156952, 0.05443538725376129, 0.10835135728120804, 0.024579178541898727, 0.044564589858055115], [0.009365282952785492, 0.004767491947859526, 0.010557135567069054, 0.007146498188376427, 0.004975426476448774, 0.028111102059483528, 0.015968043357133865, 0.10024602711200714, 0.031366024166345596, 0.021015694364905357, 0.04274506866931915, 0.044669754803180695, 0.025371169671416283, 0.007556375116109848, 0.031677983701229095, 0.020097509026527405, 0.017054090276360512, 0.08073994517326355, 0.061177607625722885, 0.20144997537136078, 0.06420641392469406, 0.04897910729050636, 0.0679422914981842, 0.05281393975019455], [0.003912751562893391, 0.0026951166801154613, 0.013227077201008797, 0.008033833466470242, 0.006245321594178677, 0.011276381090283394, 0.014170892536640167, 0.22960098087787628, 0.03728120028972626, 0.02717834711074829, 0.04045259207487106, 0.10061716288328171, 0.04794904217123985, 0.011836175806820393, 0.024296920746564865, 0.03268707916140556, 0.01764611341059208, 0.0586848147213459, 0.02360212244093418, 0.14279156923294067, 0.03648471087217331, 0.02604851871728897, 0.021536611020565033, 0.061744652688503265], [0.10498276352882385, 0.10457057505846024, 0.029898496344685555, 0.03387228772044182, 0.02358582615852356, 0.046131812036037445, 0.06580956280231476, 0.019660867750644684, 0.04825381934642792, 0.005922496318817139, 0.021057799458503723, 0.0033565948251634836, 0.006795102264732122, 0.02364816889166832, 0.039947960525751114, 0.01972653716802597, 0.0169533584266901, 0.04488811641931534, 0.060263823717832565, 0.052862975746393204, 0.09198243916034698, 0.033869873732328415, 0.08563446998596191, 0.01632430963218212], [0.0007130173617042601, 0.0007422424387186766, 0.00472958292812109, 0.03684569150209427, 0.00121354463044554, 0.002146094338968396, 0.006243493407964706, 0.30202561616897583, 0.006867404095828533, 0.008846352808177471, 0.011820169165730476, 0.06089875474572182, 0.01856077089905739, 0.0017361992504447699, 0.007322132121771574, 0.016359582543373108, 0.0022059017792344093, 0.02241464890539646, 0.0242229625582695, 0.39480060338974, 0.01926460489630699, 0.012369759380817413, 0.007676566950976849, 0.02997422404587269], [0.08205047249794006, 0.06181202828884125, 0.010174433700740337, 0.00838431902229786, 0.009219583123922348, 0.018256966024637222, 0.04562335088849068, 0.07644718140363693, 0.04049382358789444, 0.011859841644763947, 0.030275631695985794, 0.020297368988394737, 0.019344191998243332, 0.0297092217952013, 0.01100501324981451, 0.020223820582032204, 0.014142286963760853, 0.03734218701720238, 0.07151999324560165, 0.14945439994335175, 0.12228207290172577, 0.013212896883487701, 0.070156991481781, 0.026711856946349144], [0.0035522417165338993, 0.0009504796471446753, 0.0032442291267216206, 0.0034529140684753656, 0.004835580009967089, 0.003466861555352807, 0.008316785097122192, 0.1492583453655243, 0.0070501659065485, 0.01743565872311592, 0.010648478753864765, 0.021666185930371284, 0.012391136959195137, 0.0012688710121437907, 0.0032413392327725887, 0.010865813121199608, 0.011646541766822338, 0.03986562043428421, 0.04649168625473976, 0.3743551969528198, 0.045279163867235184, 0.11118996143341064, 0.04061553254723549, 0.06891115754842758]], [[0.04458087682723999, 0.04502090439200401, 0.024908168241381645, 0.040026355534791946, 0.0591345839202404, 0.02256053499877453, 0.03338091820478439, 0.08222176879644394, 0.02811622805893421, 0.017334317788481712, 0.0602186881005764, 0.04817547649145126, 0.0386328250169754, 0.04941682144999504, 0.03545157238841057, 0.034417539834976196, 0.05075303092598915, 0.03965950012207031, 0.04714623838663101, 0.05051203444600105, 0.03657782822847366, 0.016581548377871513, 0.048771053552627563, 0.04640112444758415], [0.025114230811595917, 0.02593623846769333, 0.030246537178754807, 0.036154717206954956, 0.06806730479001999, 0.0351722426712513, 0.052376918494701385, 0.1468617469072342, 0.0594983845949173, 0.018588794395327568, 0.08176162093877792, 0.05879097431898117, 0.03378351032733917, 0.03662898391485214, 0.03818671405315399, 0.020393695682287216, 0.04495552182197571, 0.02952110953629017, 0.03311218321323395, 0.04318075254559517, 0.027166789397597313, 0.011559097096323967, 0.027769900858402252, 0.015172014012932777], [0.014317450113594532, 0.019040409475564957, 0.07549012452363968, 0.08413434773683548, 0.027046501636505127, 0.06011820212006569, 0.0294931773096323, 0.11994527280330658, 0.19032998383045197, 0.040153101086616516, 0.038446664810180664, 0.03871579468250275, 0.03023369610309601, 0.02089611440896988, 0.029162954539060593, 0.0321279801428318, 0.013888594694435596, 0.01567608118057251, 0.00603611720725894, 0.008291718550026417, 0.054828815162181854, 0.029165705665946007, 0.009055917151272297, 0.013405314646661282], [0.008934522047638893, 0.007468793075531721, 0.09097164124250412, 0.025803927332162857, 0.02541370689868927, 0.03605744242668152, 0.027198484167456627, 0.032024286687374115, 0.09623806923627853, 0.07634163647890091, 0.025364819914102554, 0.04390721023082733, 0.1260756254196167, 0.026608329266309738, 0.0586988739669323, 0.031235992908477783, 0.020046332851052284, 0.014390120282769203, 0.008445978164672852, 0.020989341661334038, 0.08675852417945862, 0.05893419682979584, 0.011048218235373497, 0.041044000536203384], [0.0037141013890504837, 0.005164287053048611, 0.07645539194345474, 0.06627499312162399, 0.011027798987925053, 0.002586106304079294, 0.027214938774704933, 0.18046239018440247, 0.12558910250663757, 0.007975558750331402, 0.07077060639858246, 0.02963731251657009, 0.03064759634435177, 0.00376361352391541, 0.15249724686145782, 0.01332042831927538, 0.016642557457089424, 0.014502467587590218, 0.013571178540587425, 0.0216187983751297, 0.051324211061000824, 0.04563493654131889, 0.01904461905360222, 0.010559679009020329], [0.004983898252248764, 0.005206167232245207, 0.04796120896935463, 0.049088314175605774, 0.014323912560939789, 0.02177746407687664, 0.016936155036091805, 0.37485960125923157, 0.06538528949022293, 0.0265215951949358, 0.043479323387145996, 0.021247902885079384, 0.020811058580875397, 0.004345408175140619, 0.0632217675447464, 0.021173963323235512, 0.009372549131512642, 0.022511418908834457, 0.006069323979318142, 0.013522444292902946, 0.0315910205245018, 0.08082686364650726, 0.019091026857495308, 0.015692366287112236], [0.016644835472106934, 0.026920663192868233, 0.07961174100637436, 0.036168407648801804, 0.02686622552573681, 0.23152390122413635, 0.03464395925402641, 0.03724418580532074, 0.07359985262155533, 0.19635362923145294, 0.03923921659588814, 0.014545846730470657, 0.03281858563423157, 0.01570362038910389, 0.01592411659657955, 0.005911949556320906, 0.012604997493326664, 0.00786609761416912, 0.006940988823771477, 0.00823658611625433, 0.026718776673078537, 0.030548924580216408, 0.014247418381273746, 0.009115469641983509], [0.0029572807252407074, 0.0028015184216201305, 0.08110319823026657, 0.021113434806466103, 0.010574753396213055, 0.030800314620137215, 0.030233168974518776, 0.028955910354852676, 0.0785008892416954, 0.11928186565637589, 0.04792196303606033, 0.033663444221019745, 0.10035081207752228, 0.008610561490058899, 0.09377606213092804, 0.010163992643356323, 0.011270281858742237, 0.027667958289384842, 0.022583695128560066, 0.04640690237283707, 0.06807409971952438, 0.09042535722255707, 0.016322288662195206, 0.01644020713865757], [0.013583126477897167, 0.017523182556033134, 0.04092291742563248, 0.07050066441297531, 0.04047844931483269, 0.011873392388224602, 0.04853345826268196, 0.43524909019470215, 0.06904160976409912, 0.007106147240847349, 0.05787157639861107, 0.029753031209111214, 0.007314445450901985, 0.00870309118181467, 0.04291529580950737, 0.011621486395597458, 0.019300740212202072, 0.018431473523378372, 0.011563420295715332, 0.007174537982791662, 0.01099866908043623, 0.0050201863050460815, 0.009975029155611992, 0.004544922616332769], [0.00293900677934289, 0.0028270904440432787, 0.03531181812286377, 0.014168722555041313, 0.016466598957777023, 0.007233187090605497, 0.03955177217721939, 0.025711361318826675, 0.06726629287004471, 0.03439529612660408, 0.03664523735642433, 0.04068203642964363, 0.029955588281154633, 0.006500928662717342, 0.06510735303163528, 0.03888671100139618, 0.023532550781965256, 0.09558846056461334, 0.0480324886739254, 0.04190611094236374, 0.07807234674692154, 0.1750023365020752, 0.022391390055418015, 0.05182535573840141], [0.015569387003779411, 0.029690874740481377, 0.12332386523485184, 0.021189097315073013, 0.015085156075656414, 0.15784968435764313, 0.019782686606049538, 0.030723605304956436, 0.21039631962776184, 0.09085191786289215, 0.039719101041555405, 0.022960161790251732, 0.06548880785703659, 0.01926635578274727, 0.05001037195324898, 0.005709374323487282, 0.005801979452371597, 0.002503618597984314, 0.0016621795948594809, 0.001696368446573615, 0.054819636046886444, 0.006337533239275217, 0.004876487422734499, 0.004685435444116592], [0.010238973423838615, 0.006874313578009605, 0.0659499540925026, 0.024114931002259254, 0.023044288158416748, 0.02845175378024578, 0.059416864067316055, 0.08177759498357773, 0.05050795525312424, 0.05701548978686333, 0.07638058811426163, 0.045060571283102036, 0.03496019169688225, 0.008614586666226387, 0.04577925428748131, 0.03272281214594841, 0.02031990885734558, 0.04918329790234566, 0.02445269748568535, 0.024865679442882538, 0.05562365800142288, 0.07997028529644012, 0.03892951086163521, 0.055744852870702744], [0.008951903320848942, 0.0074661653488874435, 0.05346328020095825, 0.01814495399594307, 0.029963834211230278, 0.0174777302891016, 0.047379788011312485, 0.11253282427787781, 0.051538512110710144, 0.015996461734175682, 0.09674129635095596, 0.06231805309653282, 0.03494966775178909, 0.007644488476216793, 0.07482298463582993, 0.02367238886654377, 0.02854740619659424, 0.035218264907598495, 0.027694575488567352, 0.02797817252576351, 0.06249316781759262, 0.05301729589700699, 0.058816298842430115, 0.04317057132720947], [0.007763049099594355, 0.007636801339685917, 0.0864168182015419, 0.013608631677925587, 0.022953303530812263, 0.10612034797668457, 0.04807237163186073, 0.05256548896431923, 0.10312116891145706, 0.04910691827535629, 0.062367942184209824, 0.05191165208816528, 0.0605546198785305, 0.011924576945602894, 0.06391645222902298, 0.021020432934165, 0.01887945830821991, 0.035204727202653885, 0.02163628861308098, 0.022889522835612297, 0.044115230441093445, 0.03887511417269707, 0.023920057341456413, 0.025418905541300774], [0.008692755363881588, 0.008930105715990067, 0.06153066083788872, 0.014705419540405273, 0.010635473765432835, 0.12266941368579865, 0.023367730900645256, 0.009443553164601326, 0.16173960268497467, 0.14234119653701782, 0.026245327666401863, 0.016385214403271675, 0.11803726106882095, 0.02373361401259899, 0.03943807631731033, 0.007592364680022001, 0.01204339787364006, 0.007314570248126984, 0.005281627178192139, 0.009409484453499317, 0.1062285304069519, 0.03636603057384491, 0.015064822509884834, 0.012803858146071434], [0.004451446700841188, 0.0035005758982151747, 0.06727781891822815, 0.014520678669214249, 0.014604558236896992, 0.013433144427835941, 0.027355222031474113, 0.014210831373929977, 0.09494160860776901, 0.060053642839193344, 0.01810135878622532, 0.05618509650230408, 0.10014272481203079, 0.02108769305050373, 0.058141469955444336, 0.04571294039487839, 0.029828721657395363, 0.0413503497838974, 0.02713419497013092, 0.037324968725442886, 0.10651294142007828, 0.07085996866226196, 0.008626178838312626, 0.06464197486639023], [0.0014672812540084124, 0.0017738272435963154, 0.057968318462371826, 0.005951404571533203, 0.009724240750074387, 0.0037103653885424137, 0.030960069969296455, 0.06436961889266968, 0.11815007030963898, 0.006647112313657999, 0.068691685795784, 0.050586581230163574, 0.05402816832065582, 0.00392128387466073, 0.17448309063911438, 0.0073186783120036125, 0.03790432959794998, 0.020306093618273735, 0.08580624312162399, 0.06203474849462509, 0.06876065582036972, 0.041090674698352814, 0.014939921908080578, 0.009405546821653843], [0.007810702081769705, 0.0062346686609089375, 0.0512857660651207, 0.01304759830236435, 0.0131229842081666, 0.04738316684961319, 0.02865718863904476, 0.1597418189048767, 0.05971341207623482, 0.039629824459552765, 0.027586568146944046, 0.04736848920583725, 0.038681693375110626, 0.016768429428339005, 0.042928945273160934, 0.01721801795065403, 0.019473861902952194, 0.03413859382271767, 0.030383799225091934, 0.15536099672317505, 0.04084646701812744, 0.059819918125867844, 0.01790499873459339, 0.024892006069421768], [0.012385008856654167, 0.016972342506051064, 0.059056010097265244, 0.02000385709106922, 0.024563053622841835, 0.0384722575545311, 0.03070269152522087, 0.03359071537852287, 0.11383699625730515, 0.10977768152952194, 0.05743314325809479, 0.04905156418681145, 0.07383929938077927, 0.03799730911850929, 0.055955905467271805, 0.010545696131885052, 0.031020602211356163, 0.018462039530277252, 0.027926182374358177, 0.022161854431033134, 0.07860637456178665, 0.04023679718375206, 0.02056119777262211, 0.016841350123286247], [0.0020050781313329935, 0.0013575670309364796, 0.02513495273888111, 0.0049947029910981655, 0.0057456400245428085, 0.005744319409132004, 0.010029125027358532, 0.03254936635494232, 0.024886488914489746, 0.008935119956731796, 0.026914503425359726, 0.053020574152469635, 0.07173819094896317, 0.00837624166160822, 0.08429143577814102, 0.02119811438024044, 0.01063426025211811, 0.03956766426563263, 0.057220228016376495, 0.19695411622524261, 0.06279486417770386, 0.19852840900421143, 0.020031023770570755, 0.027348129078745842], [0.008946917951107025, 0.0057894145138561726, 0.04212081804871559, 0.01573052443563938, 0.021530529484152794, 0.008163471706211567, 0.04520820826292038, 0.03302790969610214, 0.02688729763031006, 0.007613744121044874, 0.059670589864254, 0.04970928654074669, 0.055583298206329346, 0.016980817541480064, 0.12734836339950562, 0.05767938867211342, 0.04267891123890877, 0.03366280719637871, 0.07439769804477692, 0.0986030176281929, 0.05460240691900253, 0.028727944940328598, 0.06473487615585327, 0.020601728931069374], [0.0016605493146926165, 0.0012166677042841911, 0.022699011489748955, 0.007164731156080961, 0.0034226938150823116, 0.0024939069990068674, 0.010598192922770977, 0.0028189157601445913, 0.022063612937927246, 0.008924136869609356, 0.01487461756914854, 0.011001380160450935, 0.03202628344297409, 0.007649505510926247, 0.07058360427618027, 0.09288109838962555, 0.012186877429485321, 0.052389755845069885, 0.022385526448488235, 0.027987578883767128, 0.16541838645935059, 0.1364770531654358, 0.03409142419695854, 0.23698453605175018], [0.012488095089793205, 0.015050382353365421, 0.07562954723834991, 0.014805690385401249, 0.009082628414034843, 0.007811200805008411, 0.017455872148275375, 0.039936114102602005, 0.08962219953536987, 0.008428140543401241, 0.051178883761167526, 0.020418280735611916, 0.04529570788145065, 0.016245095059275627, 0.18981291353702545, 0.02159518003463745, 0.012248874641954899, 0.02024715393781662, 0.018466589972376823, 0.029478328302502632, 0.15639592707157135, 0.06413593888282776, 0.034307245165109634, 0.029863936826586723], [0.005171943921595812, 0.0022537424229085445, 0.021371597424149513, 0.002928693313151598, 0.006522635463625193, 0.005728626623749733, 0.028372742235660553, 0.011843804270029068, 0.007102147676050663, 0.006340681575238705, 0.022123493254184723, 0.008576623164117336, 0.009932528249919415, 0.004998000338673592, 0.03051433525979519, 0.02127576805651188, 0.01713666133582592, 0.06964189559221268, 0.110556460916996, 0.19851316511631012, 0.057027824223041534, 0.10924734175205231, 0.1515243500471115, 0.09129498153924942]], [[0.018551966175436974, 0.006560661364346743, 0.06533464044332504, 0.018398908898234367, 0.030735531821846962, 0.039231039583683014, 0.1964523047208786, 0.02905448153614998, 0.14427998661994934, 0.0461956262588501, 0.11772020906209946, 0.028891514986753464, 0.039140526205301285, 0.011646986939013004, 0.06151391938328743, 0.04377686604857445, 0.008846893906593323, 0.00636994419619441, 0.030747735872864723, 0.004171022679656744, 0.006705279927700758, 0.008577975444495678, 0.025175059214234352, 0.01192096434533596], [0.008325619623064995, 0.004142462275922298, 0.04761451855301857, 0.009732209146022797, 0.017229599878191948, 0.03061594069004059, 0.07270532846450806, 0.03369714319705963, 0.1303960680961609, 0.038515929132699966, 0.15216536819934845, 0.049178097397089005, 0.09366385638713837, 0.018248310312628746, 0.13456028699874878, 0.027534693479537964, 0.006334122736006975, 0.009152448736131191, 0.024854538962244987, 0.013392062857747078, 0.014535639435052872, 0.011708911508321762, 0.03293142095208168, 0.018765322864055634], [0.00553148053586483, 0.002366168424487114, 0.08094343543052673, 0.0031532577704638243, 0.011393520049750805, 0.00946017075330019, 0.07223672419786453, 0.019487205892801285, 0.12650303542613983, 0.01990780048072338, 0.4278597831726074, 0.011589928530156612, 0.030219420790672302, 0.0037394955288618803, 0.1450807750225067, 0.002444662619382143, 0.0002839423541445285, 0.000496392953209579, 0.007357165217399597, 0.0025698456447571516, 0.0018126486102119088, 0.0023899299558252096, 0.011716615408658981, 0.001456652651540935], [0.007015898823738098, 0.0011165618197992444, 0.08625157922506332, 0.021082798019051552, 0.012105382978916168, 0.05686955153942108, 0.06966502219438553, 0.05704433470964432, 0.16418756544589996, 0.16534432768821716, 0.09269940853118896, 0.09198559820652008, 0.052995529025793076, 0.0051429090090096, 0.07792968302965164, 0.009965396486222744, 0.000704572768881917, 0.0013680048286914825, 0.0023456010967493057, 0.001659950939938426, 0.0015341747784987092, 0.005331854801625013, 0.005743199028074741, 0.009911119937896729], [0.0030666375532746315, 0.004101530648767948, 0.023323630914092064, 0.003053413238376379, 0.044532645493745804, 0.0219404436647892, 0.1463475525379181, 0.04272088408470154, 0.518138587474823, 0.11322492361068726, 0.027131719514727592, 0.007230817340314388, 0.019792621955275536, 0.004542763344943523, 0.015483002178370953, 0.000979349366389215, 0.0005808864370919764, 0.0001655527885304764, 0.0009158082539215684, 0.00028096369351260364, 0.00039073475636541843, 0.000918062636628747, 0.0006302841356955469, 0.0005070787156000733], [0.014104710891842842, 0.025524592027068138, 0.10090022534132004, 0.019853906705975533, 0.024263208732008934, 0.05577594414353371, 0.04322138428688049, 0.09080268442630768, 0.11847656220197678, 0.1445816159248352, 0.10155368596315384, 0.06803259998559952, 0.036492474377155304, 0.03942330926656723, 0.054303817451000214, 0.006884158588945866, 0.0062089054845273495, 0.004662442486733198, 0.004198822192847729, 0.006801806390285492, 0.00846706423908472, 0.009227803908288479, 0.008852283470332623, 0.007386038079857826], [0.005500328727066517, 0.00873272493481636, 0.02966134250164032, 0.003043125616386533, 0.036590296775102615, 0.015420191921293736, 0.06398399919271469, 0.03457649052143097, 0.32314160466194153, 0.052118606865406036, 0.26111990213394165, 0.012589006684720516, 0.038524702191352844, 0.010829217731952667, 0.08564264327287674, 0.002933698706328869, 0.002803641837090254, 0.0015674149617552757, 0.003824597457423806, 0.001717067789286375, 0.0015584538923576474, 0.0007186994189396501, 0.003035168396309018, 0.0003670562873594463], [0.005577285308390856, 0.0028077091556042433, 0.045338284224271774, 0.004213751293718815, 0.012562520802021027, 0.003679427085444331, 0.05744296312332153, 0.015976980328559875, 0.15705466270446777, 0.04254636913537979, 0.311769038438797, 0.0155408326536417, 0.05089109390974045, 0.0067130462266504765, 0.23100747168064117, 0.005090885329991579, 0.0010084452806040645, 0.0009351768530905247, 0.009611913934350014, 0.0034611066803336143, 0.003539665136486292, 0.004109010100364685, 0.007660832721740007, 0.001461491920053959], [0.004922098945826292, 0.013633550144731998, 0.03983525559306145, 0.009172389283776283, 0.04671545699238777, 0.005455471575260162, 0.032833606004714966, 0.04493038356304169, 0.11192340403795242, 0.028768151998519897, 0.13320115208625793, 0.023713381960988045, 0.10272832214832306, 0.045915231108665466, 0.22348099946975708, 0.012784288264811039, 0.012900619767606258, 0.004811821971088648, 0.025143183767795563, 0.02127755619585514, 0.018105220049619675, 0.014243441633880138, 0.013761989772319794, 0.009742964059114456], [0.0018814187496900558, 0.00037508815876208246, 0.013813234865665436, 0.005757618695497513, 0.002626835135743022, 0.0036566252820193768, 0.00786951370537281, 0.0217362642288208, 0.055071666836738586, 0.015932351350784302, 0.04258614033460617, 0.011733937077224255, 0.03240567073225975, 0.003319508396089077, 0.2606014013290405, 0.04336950182914734, 0.018953755497932434, 0.1126050353050232, 0.11315836757421494, 0.08581332117319107, 0.04721056669950485, 0.03851838409900665, 0.029476575553417206, 0.031527262181043625], [0.007182130590081215, 0.004921608604490757, 0.02002805471420288, 0.008147015236318111, 0.023169027641415596, 0.008445775136351585, 0.047311536967754364, 0.022709660232067108, 0.13885028660297394, 0.035979244858026505, 0.08994822949171066, 0.011780675500631332, 0.05836495757102966, 0.0226924829185009, 0.19616913795471191, 0.0240166075527668, 0.041755542159080505, 0.020088963210582733, 0.07562305778265, 0.0370631068944931, 0.0597807839512825, 0.017875252291560173, 0.021384747698903084, 0.006712113507091999], [0.001294654910452664, 0.0004902863875031471, 0.0023296321742236614, 0.0034763214644044638, 0.001618006150238216, 0.0021613663993775845, 0.00272643705829978, 0.01174889039248228, 0.006233376916497946, 0.004237298853695393, 0.003365547629073262, 0.0031326990574598312, 0.007390979211777449, 0.0023011136800050735, 0.050790298730134964, 0.039197225123643875, 0.0449754036962986, 0.25334736704826355, 0.21259696781635284, 0.1862742006778717, 0.06305629760026932, 0.048263341188430786, 0.016259560361504555, 0.03273269534111023], [0.0032920828089118004, 0.001252059475518763, 0.004749705083668232, 0.008850046433508396, 0.004286292474716902, 0.004551946185529232, 0.003907250240445137, 0.011666889302432537, 0.010144270025193691, 0.006946504581719637, 0.008630522526800632, 0.004406830295920372, 0.010222163051366806, 0.003999955020844936, 0.06092767044901848, 0.04009227827191353, 0.06980330497026443, 0.1817525178194046, 0.15269909799098969, 0.1384209245443344, 0.1101926863193512, 0.07970695197582245, 0.04090064391493797, 0.03859737887978554], [0.002374261384829879, 0.0006775386864319444, 0.013607360422611237, 0.0063567012548446655, 0.0010106919799000025, 0.003185285022482276, 0.0054867323487997055, 0.004741603508591652, 0.009856492280960083, 0.005572330206632614, 0.01599705219268799, 0.008962543681263924, 0.015215874649584293, 0.0038781454786658287, 0.15952932834625244, 0.04561861604452133, 0.019683439284563065, 0.16356652975082397, 0.1599990725517273, 0.06403114646673203, 0.09486081451177597, 0.04061982035636902, 0.084642693400383, 0.07052595168352127], [0.004355795681476593, 0.0010846639052033424, 0.012392436154186726, 0.009266790933907032, 0.0030893629882484674, 0.002642963547259569, 0.002346684457734227, 0.005930383689701557, 0.01086426991969347, 0.005701350513845682, 0.013739265501499176, 0.00611455412581563, 0.017724230885505676, 0.005269773304462433, 0.08113033324480057, 0.05297043174505234, 0.07021599262952805, 0.070933036506176, 0.06481339037418365, 0.08867809176445007, 0.14785541594028473, 0.10392538458108902, 0.12570969760417938, 0.09324564039707184], [0.015870483592152596, 0.0010732628870755434, 0.04071632772684097, 0.06371870636940002, 0.007445416413247585, 0.009981167502701283, 0.008216300047934055, 0.01573660410940647, 0.01937730424106121, 0.02369079925119877, 0.04631359875202179, 0.024898435920476913, 0.034308962523937225, 0.004118075128644705, 0.09031607955694199, 0.04623137786984444, 0.018324794247746468, 0.04680507257580757, 0.055528540164232254, 0.08066355437040329, 0.09603561460971832, 0.08884089440107346, 0.09024003893136978, 0.07154858112335205], [0.013002301566302776, 0.010968155227601528, 0.016708724200725555, 0.030315782874822617, 0.12024584412574768, 0.017408836632966995, 0.023719169199466705, 0.05012722313404083, 0.06961112469434738, 0.030236491933465004, 0.008955328725278378, 0.011163117364048958, 0.04245253652334213, 0.013790813274681568, 0.02249528467655182, 0.03207927569746971, 0.117847740650177, 0.02614498883485794, 0.05541636049747467, 0.04599833860993385, 0.07522360235452652, 0.08801136165857315, 0.026945890858769417, 0.0511317178606987], [0.028976714238524437, 0.012721680104732513, 0.012564965523779392, 0.042038753628730774, 0.013526716269552708, 0.011761979199945927, 0.004548889584839344, 0.008642555214464664, 0.0036463423166424036, 0.0050341724418103695, 0.002218908164650202, 0.011015359312295914, 0.007687133736908436, 0.008744793944060802, 0.0051252287812530994, 0.03489411249756813, 0.1006874367594719, 0.04517889395356178, 0.03983008489012718, 0.04004789516329765, 0.08838231861591339, 0.12513867020606995, 0.0822032243013382, 0.2653830945491791], [0.05050260201096535, 0.029844338074326515, 0.01596412993967533, 0.030006397515535355, 0.05079904571175575, 0.020683379843831062, 0.031439729034900665, 0.012526326812803745, 0.03410213440656662, 0.009183013811707497, 0.010910469107329845, 0.0074884905479848385, 0.020748501643538475, 0.010613796301186085, 0.02155682072043419, 0.05679755657911301, 0.1436682641506195, 0.07198239862918854, 0.07734571397304535, 0.01635866053402424, 0.0570523776113987, 0.04405917227268219, 0.1049247458577156, 0.07144183665513992], [0.07775741815567017, 0.01045867707580328, 0.03794471174478531, 0.061770979315042496, 0.01737932302057743, 0.018172351643443108, 0.02036537230014801, 0.00940365344285965, 0.013026232831180096, 0.011816933751106262, 0.017321467399597168, 0.010460124351084232, 0.012704421766102314, 0.003985970746725798, 0.030224645510315895, 0.07559867203235626, 0.03257305175065994, 0.04885295405983925, 0.0747009664773941, 0.027976304292678833, 0.048277847468853, 0.10092408210039139, 0.12358730286359787, 0.11471649259328842], [0.023669809103012085, 0.02662781998515129, 0.03476599603891373, 0.06566714495420456, 0.04400831088423729, 0.03031940571963787, 0.022837648168206215, 0.025301674380898476, 0.01708906888961792, 0.009028634056448936, 0.006205878220498562, 0.011121601797640324, 0.012285460717976093, 0.009474781341850758, 0.011210019700229168, 0.05858035758137703, 0.05306762084364891, 0.032332152128219604, 0.04269055277109146, 0.02266557887196541, 0.04198309779167175, 0.08729401230812073, 0.06929385662078857, 0.2424795776605606], [0.03133795037865639, 0.0033462876453995705, 0.06579920649528503, 0.0654020830988884, 0.008207684382796288, 0.05971665307879448, 0.035355981439352036, 0.03169174864888191, 0.027309969067573547, 0.020215578377246857, 0.011309048160910606, 0.008697438053786755, 0.007511752191931009, 0.0013936751056462526, 0.019475828856229782, 0.05556337535381317, 0.010422070510685444, 0.06959372013807297, 0.0642084926366806, 0.034115344285964966, 0.027106767520308495, 0.07969383895397186, 0.08718673884868622, 0.17533880472183228], [0.1042867973446846, 0.03718514367938042, 0.10169469565153122, 0.07953933626413345, 0.06516615301370621, 0.14032652974128723, 0.05713100731372833, 0.0495947040617466, 0.07711312174797058, 0.05381094664335251, 0.035500284284353256, 0.014745795167982578, 0.013146025128662586, 0.00967664085328579, 0.01409487146884203, 0.015760304406285286, 0.009928204119205475, 0.006564279552549124, 0.006232257466763258, 0.009610814973711967, 0.022463466972112656, 0.022258851677179337, 0.031888216733932495, 0.022281503304839134], [0.08794113248586655, 0.021597901359200478, 0.04789199307560921, 0.0867735743522644, 0.016344094648957253, 0.08761905878782272, 0.025142192840576172, 0.03990126773715019, 0.011530835181474686, 0.019238866865634918, 0.0039023193530738354, 0.0076657915487885475, 0.0032756596338003874, 0.0029437355697155, 0.006334666628390551, 0.048426222056150436, 0.017913704738020897, 0.07748652249574661, 0.0555761493742466, 0.0488959439098835, 0.05267995223402977, 0.09256633371114731, 0.03702333942055702, 0.1013287678360939]], [[0.004523343872278929, 0.0011668505612760782, 0.003585450118407607, 0.0021088954526931047, 0.0026631057262420654, 0.0015969488304108381, 0.0029438072815537453, 0.003615917172282934, 0.022672031074762344, 0.006328873801976442, 0.013863537460565567, 0.08944883942604065, 0.2798328399658203, 0.026406219229102135, 0.049432411789894104, 0.10573585331439972, 0.02894272841513157, 0.02086096815764904, 0.024904148653149605, 0.023875020444393158, 0.10508861392736435, 0.03237468749284744, 0.021768657490611076, 0.12626025080680847], [0.004567069001495838, 0.0017269050003960729, 0.0052482010796666145, 0.002334248274564743, 0.010853112675249577, 0.003355571534484625, 0.007567542605102062, 0.005715822800993919, 0.01933799870312214, 0.012236983515322208, 0.019558047875761986, 0.11179061979055405, 0.2808234393596649, 0.02682720310986042, 0.052969980984926224, 0.06180183216929436, 0.09217341244220734, 0.026994841173291206, 0.07081331312656403, 0.02125300094485283, 0.05391029268503189, 0.0171782448887825, 0.01385314017534256, 0.07710912823677063], [0.003304621670395136, 0.0010458765318617225, 0.011218028143048286, 0.0034025199711322784, 0.008642012253403664, 0.003830923931673169, 0.00880713015794754, 0.00586329260841012, 0.07494419068098068, 0.014302695170044899, 0.03871666640043259, 0.050915539264678955, 0.11314708739519119, 0.01689780317246914, 0.09111161530017853, 0.07572346925735474, 0.05358438566327095, 0.016662849113345146, 0.048966314643621445, 0.022633060812950134, 0.13887548446655273, 0.05777551606297493, 0.05232907086610794, 0.08729984611272812], [0.002155926311388612, 0.0009714306215755641, 0.012899180874228477, 0.003254172159358859, 0.00813657883554697, 0.01997668854892254, 0.04983595758676529, 0.021556368097662926, 0.05534839257597923, 0.03420862555503845, 0.12408500164747238, 0.12786607444286346, 0.1335647851228714, 0.013231923803687096, 0.06580516695976257, 0.06352056562900543, 0.03638777881860733, 0.024106187745928764, 0.0796518474817276, 0.016379063948988914, 0.039551593363285065, 0.011513526551425457, 0.02459397166967392, 0.03139927610754967], [0.010108768939971924, 0.00324650970287621, 0.034896593540906906, 0.007786597590893507, 0.009365087375044823, 0.009415588341653347, 0.03567804396152496, 0.02777339518070221, 0.034184448421001434, 0.03140213340520859, 0.08043644577264786, 0.032357003539800644, 0.050204407423734665, 0.0124288871884346, 0.16845321655273438, 0.0794425904750824, 0.036245837807655334, 0.04952579364180565, 0.08075258880853653, 0.04972757026553154, 0.05608817934989929, 0.0168781578540802, 0.047160953283309937, 0.0364411436021328], [0.004176140297204256, 0.0017503307899460196, 0.006500092800706625, 0.005481070838868618, 0.012701260857284069, 0.006557609420269728, 0.007604501210153103, 0.01532872673124075, 0.032528478652238846, 0.03558361157774925, 0.0391651913523674, 0.11518728733062744, 0.18471793830394745, 0.031214764341711998, 0.04152245447039604, 0.07586103677749634, 0.03922101482748985, 0.028911307454109192, 0.034890491515398026, 0.040790338069200516, 0.08180626481771469, 0.038782667368650436, 0.017950499430298805, 0.10176693648099899], [0.015979411080479622, 0.004028433468192816, 0.014940734952688217, 0.009634497575461864, 0.006019369699060917, 0.002113168127834797, 0.009614845737814903, 0.010028508491814137, 0.05333171412348747, 0.01177570503205061, 0.03305840864777565, 0.05154408514499664, 0.09750451892614365, 0.027750372886657715, 0.1311100423336029, 0.08053895086050034, 0.03134973347187042, 0.030330151319503784, 0.0498339906334877, 0.03551802784204483, 0.13173061609268188, 0.05392424762248993, 0.04933797940611839, 0.059002455323934555], [0.014723874628543854, 0.0063371616415679455, 0.023429764434695244, 0.010638375766575336, 0.0056193191558122635, 0.0020006331615149975, 0.013828138820827007, 0.012327677570283413, 0.04108812287449837, 0.02478611096739769, 0.06312498450279236, 0.055653635412454605, 0.09266145527362823, 0.03596233204007149, 0.1417999416589737, 0.05782433599233627, 0.034962717443704605, 0.03347377851605415, 0.0711183100938797, 0.05059878155589104, 0.08650802075862885, 0.04309463873505592, 0.035382818430662155, 0.04305518418550491], [0.003760743420571089, 0.0008133887895382941, 0.01079124677926302, 0.003255804069340229, 0.001826181192882359, 0.0007995901396498084, 0.0034938156604766846, 0.003429789561778307, 0.03485628962516785, 0.004262630827724934, 0.010949205607175827, 0.029685398563742638, 0.13294516503810883, 0.011027238331735134, 0.09996602684259415, 0.02474294602870941, 0.015528591349720955, 0.014920108951628208, 0.041811127215623856, 0.03240484744310379, 0.3029559850692749, 0.06507040560245514, 0.056172944605350494, 0.09453054517507553], [0.009115881286561489, 0.0035093254409730434, 0.028399961069226265, 0.003759450512006879, 0.004079641308635473, 0.0030887087341398, 0.016783909872174263, 0.010108496993780136, 0.043452195823192596, 0.014319311827421188, 0.07391621172428131, 0.020919514819979668, 0.04294011741876602, 0.021021153777837753, 0.20195844769477844, 0.033777832984924316, 0.029032055288553238, 0.036710165441036224, 0.09167002141475677, 0.044132642447948456, 0.10952680557966232, 0.030792873352766037, 0.09131855517625809, 0.03566668927669525], [0.013173925690352917, 0.006794311106204987, 0.0162519384175539, 0.014272745698690414, 0.00370103120803833, 0.0038890463765710592, 0.012493823654949665, 0.006517832633107901, 0.06051633134484291, 0.0074139744974672794, 0.01947834901511669, 0.015711341053247452, 0.02960844896733761, 0.007369278930127621, 0.051810700446367264, 0.045207761228084564, 0.021002713590860367, 0.021834222599864006, 0.12370442599058151, 0.03887058049440384, 0.3210518956184387, 0.06621237844228745, 0.05445144698023796, 0.03866158053278923], [0.005693309009075165, 0.0017973026260733604, 0.014506706967949867, 0.005113512277603149, 0.003190513700246811, 0.0030853603966534138, 0.005674153100699186, 0.0067596533335745335, 0.023186709731817245, 0.011119384318590164, 0.014443812891840935, 0.03294089436531067, 0.06268075108528137, 0.017749782651662827, 0.06807713210582733, 0.030341416597366333, 0.018518058583140373, 0.05161463841795921, 0.049830999225378036, 0.08232413977384567, 0.11943158507347107, 0.08101336658000946, 0.0617845356464386, 0.22912222146987915], [0.00568431755527854, 0.0011500397231429815, 0.010972591117024422, 0.004628476221114397, 0.003274402813985944, 0.002547025680541992, 0.002723303157836199, 0.006854281760752201, 0.021809931844472885, 0.004973203409463167, 0.011189110577106476, 0.024296652525663376, 0.06389699131250381, 0.011284613981842995, 0.052328236401081085, 0.02486991323530674, 0.017955975607037544, 0.05324865132570267, 0.0342748761177063, 0.09443770349025726, 0.12006327509880066, 0.06614447385072708, 0.0729510709643364, 0.2884408235549927], [0.0017941773403435946, 0.0002781361690722406, 0.0061125075444579124, 0.000779111753217876, 0.0014746218221262097, 0.0009892649250105023, 0.003322609467431903, 0.0012676267651841044, 0.008190816268324852, 0.0037697593215852976, 0.01566336862742901, 0.040468979626894, 0.12989918887615204, 0.006445553619414568, 0.08742809295654297, 0.017724499106407166, 0.02468414418399334, 0.032540448009967804, 0.08582370728254318, 0.03604098781943321, 0.095657117664814, 0.05316944420337677, 0.055897653102874756, 0.2905781865119934], [0.0017736656591296196, 0.00023600882559549063, 0.010272416286170483, 0.0018140895990654826, 0.004323739558458328, 0.002162522403523326, 0.004818203393369913, 0.002395722083747387, 0.03084166906774044, 0.004860326647758484, 0.012581692077219486, 0.01658402383327484, 0.03184301778674126, 0.0017914216732606292, 0.03620356693863869, 0.010973007418215275, 0.018585918471217155, 0.010475094430148602, 0.056366030126810074, 0.04175892099738121, 0.20509010553359985, 0.15466853976249695, 0.0892128199338913, 0.25036752223968506], [0.005297405179589987, 0.00031071543344296515, 0.016432341188192368, 0.0037488730158656836, 0.0009874328970909119, 0.0018779024248942733, 0.006928798742592335, 0.0035099550150334835, 0.0203497726470232, 0.003228693036362529, 0.013768395408987999, 0.006384491920471191, 0.0085451016202569, 0.0012518824078142643, 0.03858492523431778, 0.00924923736602068, 0.00482134660705924, 0.048853158950805664, 0.10034151375293732, 0.13758054375648499, 0.1523648500442505, 0.08336532115936279, 0.13450878858566284, 0.19770856201648712], [0.013257487677037716, 0.0012046854244545102, 0.04149679094552994, 0.0054459962993860245, 0.0023054564371705055, 0.004111688584089279, 0.017629822716116905, 0.011025434359908104, 0.02388528361916542, 0.008610020391643047, 0.016745466738939285, 0.00811707228422165, 0.015089810825884342, 0.0018648954574018717, 0.09511469304561615, 0.02046027220785618, 0.008640020154416561, 0.045554377138614655, 0.0782736986875534, 0.11341562122106552, 0.16141772270202637, 0.09145405143499374, 0.10659517347812653, 0.10828443616628647], [0.01639855094254017, 0.0024646897800266743, 0.026431957259774208, 0.008204275742173195, 0.006776092574000359, 0.0058733997866511345, 0.01731278747320175, 0.020596632733941078, 0.036496564745903015, 0.009664667770266533, 0.023887602612376213, 0.012349671684205532, 0.013475994579494, 0.0036782813258469105, 0.04081467539072037, 0.02168167009949684, 0.014814169146120548, 0.03456944227218628, 0.08081598579883575, 0.17534223198890686, 0.1025514155626297, 0.08277512341737747, 0.08907941728830338, 0.15394465625286102], [0.03168730437755585, 0.0030892782378941774, 0.046071913093328476, 0.018153328448534012, 0.004469888750463724, 0.0032388754189014435, 0.012875099666416645, 0.014916147105395794, 0.04040123149752617, 0.006007377058267593, 0.011876898817718029, 0.007469442207366228, 0.009398115798830986, 0.0029530434403568506, 0.07568439096212387, 0.02836771309375763, 0.010147782042622566, 0.027703365311026573, 0.0364680141210556, 0.09995216131210327, 0.15128856897354126, 0.1323041170835495, 0.12299778312444687, 0.10247813165187836], [0.052955057471990585, 0.014188559725880623, 0.07623016089200974, 0.021377475932240486, 0.005075601860880852, 0.007250795606523752, 0.01791597716510296, 0.028406692668795586, 0.019633708521723747, 0.010628417134284973, 0.012826540507376194, 0.004154270514845848, 0.005276248790323734, 0.006579473149031401, 0.05690603330731392, 0.015961354598402977, 0.009824980050325394, 0.07085557281970978, 0.05072744935750961, 0.20748457312583923, 0.05716593936085701, 0.06728612631559372, 0.09389359503984451, 0.08739534020423889], [0.017172599211335182, 0.0014808096457272768, 0.049047138541936874, 0.014948047697544098, 0.0031205476261675358, 0.004061469808220863, 0.005054566077888012, 0.012878570705652237, 0.06447123736143112, 0.00567220663651824, 0.004470278508961201, 0.00395261961966753, 0.009091926738619804, 0.001566195976920426, 0.05009257793426514, 0.0163270253688097, 0.007160994224250317, 0.0230470672249794, 0.019293159246444702, 0.07791712880134583, 0.2406931221485138, 0.11760083585977554, 0.12224799394607544, 0.1286318600177765], [0.05690193176269531, 0.014382394030690193, 0.13756002485752106, 0.03957198187708855, 0.011402890086174011, 0.0321660079061985, 0.022400660440325737, 0.03472236543893814, 0.0670078918337822, 0.022221611812710762, 0.03802449256181717, 0.0029308537486940622, 0.003294251160696149, 0.003359850961714983, 0.06528116017580032, 0.018711285665631294, 0.013945070095360279, 0.03450501710176468, 0.022089708596467972, 0.06228525564074516, 0.07383942604064941, 0.04535544663667679, 0.15461203455924988, 0.02342836745083332], [0.08987422287464142, 0.02391870692372322, 0.06725283712148666, 0.11012803763151169, 0.008860019035637379, 0.04712531715631485, 0.030655622482299805, 0.05052352324128151, 0.1136554479598999, 0.0177167821675539, 0.015944965183734894, 0.006248014979064465, 0.006571034900844097, 0.002562587847933173, 0.02515166439116001, 0.04042346030473709, 0.006571178324520588, 0.02089238539338112, 0.02537456713616848, 0.0534590408205986, 0.1264767199754715, 0.046580970287323, 0.04385484382510185, 0.020177997648715973], [0.08504929393529892, 0.021513836458325386, 0.09867586195468903, 0.07971380650997162, 0.009668254293501377, 0.049947094172239304, 0.02106875367462635, 0.07455576211214066, 0.03670813515782356, 0.020897559821605682, 0.014841178432106972, 0.009870014153420925, 0.011267328634858131, 0.012369651347398758, 0.055579762905836105, 0.031875357031822205, 0.006337576545774937, 0.03922467678785324, 0.013375692069530487, 0.08926112204790115, 0.04408794268965721, 0.04789702966809273, 0.06661409884691238, 0.05960012227296829]], [[0.08374729007482529, 0.17560893297195435, 0.09382178634405136, 0.010750237852334976, 0.03726649284362793, 0.029483232647180557, 0.12985238432884216, 0.13290026783943176, 0.09337463974952698, 0.01683669723570347, 0.061209116131067276, 0.010553299449384212, 0.005596889648586512, 0.020687950775027275, 0.02068863995373249, 0.001428784802556038, 0.0035654855892062187, 0.0034238158259540796, 0.010079275816679, 0.009087388403713703, 0.018427129834890366, 0.0026983446441590786, 0.02318711206316948, 0.005724800284951925], [0.0818057730793953, 0.29719847440719604, 0.025054931640625, 0.032411009073257446, 0.058801159262657166, 0.11069270223379135, 0.08158700168132782, 0.04076877608895302, 0.035907305777072906, 0.062387652695178986, 0.040954794734716415, 0.02195793017745018, 0.011457049287855625, 0.07081989198923111, 0.005114687141031027, 0.004279269836843014, 0.005144886206835508, 0.002644843189045787, 0.0031519539188593626, 0.0011151980143040419, 0.0020543306600302458, 0.0008042926201596856, 0.0023441084194928408, 0.0015418173279613256], [0.029181281104683876, 0.013273533433675766, 0.05471539869904518, 0.0298870000988245, 0.06959255039691925, 0.11039358377456665, 0.08368068933486938, 0.24593105912208557, 0.15401028096675873, 0.03786596283316612, 0.04917820170521736, 0.02134246751666069, 0.01669987663626671, 0.018320783972740173, 0.01618099771440029, 0.0032047692220658064, 0.004834068473428488, 0.0029120263643562794, 0.0037186804693192244, 0.00461640814319253, 0.01092776469886303, 0.003577234921976924, 0.010659871622920036, 0.005295509938150644], [0.0027277593035250902, 0.0008687977679073811, 0.06817516684532166, 0.008362763561308384, 0.002111098961904645, 0.032323677092790604, 0.02952680177986622, 0.7889418005943298, 0.01474746409803629, 0.0022656822111457586, 0.007616002112627029, 0.0003686463460326195, 0.0003443435998633504, 0.00026039956719614565, 0.0046331086196005344, 0.0003558364405762404, 1.4901136637490708e-05, 0.00010447952809045091, 0.0008281477494165301, 0.007676342967897654, 0.005961546208709478, 0.0074219610542058945, 0.013238660991191864, 0.0011245844652876258], [0.009069127961993217, 0.004088579211384058, 0.03821542486548424, 0.13986775279045105, 0.015830736607313156, 0.08978497982025146, 0.28195422887802124, 0.19216743111610413, 0.10861480236053467, 0.053697168827056885, 0.016662949696183205, 0.0073113953694701195, 0.004153975285589695, 0.0006625677924603224, 0.0014956106897443533, 0.002324597677215934, 0.0004668117326218635, 0.003089416539296508, 0.009768298827111721, 0.0011883288389071822, 0.008808380924165249, 0.003216571407392621, 0.003583466401323676, 0.003977488726377487], [0.005869498010724783, 0.0032635731622576714, 0.03214505314826965, 0.009294032119214535, 0.007927126251161098, 0.06323663890361786, 0.05744340643286705, 0.7400039434432983, 0.023654183372855186, 0.026711231097579002, 0.01411521341651678, 0.002040153369307518, 0.0004602092376444489, 0.0002273762074764818, 0.0007350781233981252, 4.869248004979454e-05, 4.868388714385219e-05, 0.0004820475005544722, 0.0006231715669855475, 0.003207596717402339, 0.0016360521549358964, 0.0020381242502480745, 0.004085130989551544, 0.0007036968600004911], [0.009916644543409348, 0.003773616161197424, 0.019954511895775795, 0.04971013963222504, 0.0057680741883814335, 0.24540667235851288, 0.024618370458483696, 0.3468798100948334, 0.046567633748054504, 0.15422214567661285, 0.04214470088481903, 0.02539043128490448, 0.006464939098805189, 0.0023614235688000917, 0.0013675568625330925, 0.000981334364041686, 0.00011078250099672005, 0.0016294801607728004, 0.00046744663268327713, 0.005424133501946926, 0.0021408952306956053, 0.0023811478167772293, 0.0015984303317964077, 0.000719621661119163], [0.01103768590837717, 0.009809297509491444, 0.038642700761556625, 0.1985556036233902, 0.003918003290891647, 0.25786077976226807, 0.03560097515583038, 0.06272795051336288, 0.10043639689683914, 0.14909881353378296, 0.05604240670800209, 0.024104705080389977, 0.023126354441046715, 0.010118531063199043, 0.004928836598992348, 0.004678471013903618, 0.00012455058458726853, 0.0023641835432499647, 0.000600792292971164, 0.000734959146939218, 0.0022188364528119564, 0.000734129745978862, 0.0013825846835970879, 0.0011525979498401284], [0.018196921795606613, 0.023483173921704292, 0.01699863187968731, 0.019673630595207214, 0.02051762491464615, 0.3553188443183899, 0.1096656545996666, 0.07747220247983932, 0.2799786925315857, 0.01885557547211647, 0.02549150586128235, 0.012008321471512318, 0.005295161623507738, 0.003983472939580679, 0.0020956434309482574, 0.00027123457402922213, 0.0006484971381723881, 0.0017793452134355903, 0.0009657290647737682, 0.00031672450131736696, 0.005026238039135933, 0.0001591620675753802, 0.0009492510580457747, 0.0008487991290166974], [0.0012397010577842593, 0.0007274636882357299, 0.014113835990428925, 0.01634407602250576, 0.0014724889770150185, 0.15327903628349304, 0.006310861092060804, 0.5421842932701111, 0.039174407720565796, 0.04159415513277054, 0.042825810611248016, 0.0941682755947113, 0.02008778415620327, 0.007012398913502693, 0.011893689632415771, 0.001646361779421568, 9.146144293481484e-05, 0.0008378790225833654, 8.100261038634926e-05, 0.0012970390962436795, 0.00035682012094184756, 0.00195605237968266, 0.0004964034887962043, 0.0008086857851594687], [0.001121348119340837, 0.003384856041520834, 0.007736446335911751, 0.0008806705009192228, 0.007216642145067453, 0.05167682468891144, 0.0036013589706271887, 0.02140050008893013, 0.2986809015274048, 0.0052877990528941154, 0.024694034829735756, 0.06002324819564819, 0.07320532202720642, 0.23500791192054749, 0.1765456348657608, 0.002508715493604541, 0.010486825369298458, 0.009841187857091427, 0.0005961415590718389, 0.0006207191618159413, 0.0025102447252720594, 0.0001938677451107651, 0.0006996692973189056, 0.002079141791909933], [0.0014418251812458038, 0.004098088946193457, 0.05607154220342636, 0.011362393386662006, 0.003450109390541911, 0.005286634899675846, 0.011866359040141106, 0.04261181131005287, 0.08118826150894165, 0.004435242619365454, 0.04343116655945778, 0.03839344531297684, 0.06396228820085526, 0.02917032688856125, 0.39748862385749817, 0.15649768710136414, 0.004833771847188473, 0.0063740164041519165, 0.0058713024482131, 0.0057839821092784405, 0.005981080234050751, 0.0027611630503088236, 0.004811062011867762, 0.012827739119529724], [0.0023879052605479956, 0.006352030672132969, 0.019526708871126175, 0.021848296746611595, 0.002665703883394599, 0.008936039172112942, 0.012677903287112713, 0.037187159061431885, 0.07503823190927505, 0.016912715509533882, 0.05394783243536949, 0.19343554973602295, 0.1417582482099533, 0.038424257189035416, 0.14955289661884308, 0.16892778873443604, 0.008065858855843544, 0.013771294616162777, 0.006078480742871761, 0.006123436149209738, 0.0037959839683026075, 0.0015764172421768308, 0.0017228772630915046, 0.009286369197070599], [0.0021415064111351967, 0.009246519766747952, 0.026505377143621445, 0.008435762487351894, 0.0017741270130500197, 0.009466097690165043, 0.007257342338562012, 0.02337324060499668, 0.31690338253974915, 0.01196921057999134, 0.0597483329474926, 0.23372869193553925, 0.13190126419067383, 0.033622562885284424, 0.07933815568685532, 0.016951780766248703, 0.001792258583009243, 0.012576073408126831, 0.0035918059293180704, 0.003133028745651245, 0.004083495587110519, 0.00013199263776186854, 0.0003361511917319149, 0.0019917809404432774], [0.0009112763218581676, 0.0014057623920962214, 0.002535782288759947, 0.0032432423904538155, 0.00040413124952465296, 0.004244229290634394, 0.00021920779545325786, 0.0018120968015864491, 0.031846895813941956, 0.005623939912766218, 0.01783553697168827, 0.38956117630004883, 0.2678217887878418, 0.11140771210193634, 0.06243318319320679, 0.05786604434251785, 0.006216341629624367, 0.023793965578079224, 0.0013507273979485035, 0.004214953165501356, 0.0026316766161471605, 0.0002500805421732366, 0.00020925392163917422, 0.002160959644243121], [0.0015765116550028324, 0.0014146745670586824, 0.04120967909693718, 0.00424983212724328, 0.0009013116941787302, 0.0024066376499831676, 0.0014322304632514715, 0.01900508999824524, 0.0362338162958622, 0.0025268583558499813, 0.023075029253959656, 0.05813298374414444, 0.04821456968784332, 0.013527998700737953, 0.43198296427726746, 0.030315730720758438, 0.002773198764771223, 0.02267725020647049, 0.012307741679251194, 0.1528594195842743, 0.04466762766242027, 0.010708022862672806, 0.012568376027047634, 0.025232426822185516], [0.0009743968839757144, 0.0011116362875327468, 0.011956928297877312, 0.04002271220088005, 0.0007461233763024211, 0.012720935977995396, 0.004274914041161537, 0.005399863701313734, 0.05775190889835358, 0.002814975567162037, 0.01105526089668274, 0.10146508365869522, 0.1879170686006546, 0.027889756485819817, 0.10834918916225433, 0.27210456132888794, 0.004856303334236145, 0.046289924532175064, 0.035927388817071915, 0.008642952889204025, 0.029104437679052353, 0.004126336425542831, 0.0022460713516920805, 0.022251319140195847], [0.000262497051153332, 0.00023085260181687772, 0.0076731243170797825, 0.002145569771528244, 0.00013790998491458595, 0.0008335306774824858, 0.00020035495981574059, 0.0024047328624874353, 0.00489093316718936, 0.0003345625882502645, 0.005387772340327501, 0.038559895008802414, 0.061386194080114365, 0.0415344312787056, 0.573042094707489, 0.1487797498703003, 0.0027844959404319525, 0.009793553501367569, 0.00511539913713932, 0.04885558411478996, 0.013842962682247162, 0.00691854115575552, 0.004969314206391573, 0.019915975630283356], [0.0001568755687912926, 0.00012575587606988847, 0.005819317419081926, 0.004851207602769136, 8.183833415387198e-05, 0.00029005008400417864, 0.00014372625446412712, 0.0005387411802075803, 0.004515539389103651, 0.0002984872553497553, 0.002818700857460499, 0.01898367889225483, 0.05618412420153618, 0.01274492684751749, 0.35025396943092346, 0.4671816825866699, 0.0036187467630952597, 0.016455749049782753, 0.006325882393866777, 0.014134705998003483, 0.012639951892197132, 0.004366230219602585, 0.0024680851493030787, 0.01500190980732441], [0.0002355042815906927, 0.00020133242651354522, 0.0060074208304286, 0.011736803688108921, 0.00010221028060186654, 0.0005508614704012871, 0.0004513958701863885, 0.0002543731243349612, 0.004379059188067913, 0.00035707466304302216, 0.0024845784064382315, 0.008452638052403927, 0.049396779388189316, 0.0110619543120265, 0.21302808821201324, 0.6190535426139832, 0.004981196019798517, 0.022376948967576027, 0.011430701240897179, 0.0022069832775741816, 0.005907760001718998, 0.002947826636955142, 0.0032726761419326067, 0.019122207537293434], [0.0019026404479518533, 0.0016437104204669595, 0.018607784062623978, 0.006216912530362606, 0.0006224646931514144, 0.00033707855618558824, 0.00230801641009748, 0.00015001864812802523, 0.00868947897106409, 0.00017728994134813547, 0.0026306062936782837, 0.002617157530039549, 0.012934863567352295, 0.001952997175976634, 0.1600772738456726, 0.08025768399238586, 0.03798336908221245, 0.11286799609661102, 0.293087363243103, 0.013870091177523136, 0.128456711769104, 0.004234324209392071, 0.03455200046300888, 0.07382215559482574], [0.0002719854237511754, 7.289019413292408e-05, 0.008588257245719433, 0.0045111821964383125, 0.00013658194802701473, 0.00010310867946827784, 0.00015654225717298687, 0.0008484688005410135, 0.0014097102684900165, 0.0012228989508002996, 0.005463066976517439, 0.030630502849817276, 0.03618369624018669, 0.0010635132202878594, 0.08606866002082825, 0.36630040407180786, 0.007968132384121418, 0.11966390162706375, 0.034830085933208466, 0.10752207785844803, 0.01987573318183422, 0.08665485680103302, 0.010443152859807014, 0.07001057267189026], [0.0018261983059346676, 0.0009016465628519654, 0.008971808478236198, 0.003212741808965802, 0.002427272964268923, 0.0021310467272996902, 0.0006517039146274328, 0.0006301059620454907, 0.00547471409663558, 0.0007696724496781826, 0.005127412732690573, 0.012964142486453056, 0.012851721607148647, 0.0041101668030023575, 0.02364841289818287, 0.020588677376508713, 0.022705011069774628, 0.15696220099925995, 0.10352890938520432, 0.17854514718055725, 0.21910837292671204, 0.11319278925657272, 0.04082055762410164, 0.05884948745369911], [0.0003993179416283965, 0.00012934562982991338, 0.0046849483624100685, 0.0025385108310729265, 0.00016063770453911275, 9.731885802466422e-05, 0.000149663130287081, 0.0004619772080332041, 8.184791659004986e-05, 6.04643537371885e-05, 0.0003918383736163378, 0.0006569155375473201, 0.0008945969166234136, 0.00016832487017381936, 0.006409931927919388, 0.06373520195484161, 0.0005495420191437006, 0.004326747264713049, 0.027310676872730255, 0.5217934250831604, 0.04086872562766075, 0.23091737926006317, 0.05066707730293274, 0.0425456240773201]], [[0.020286450162529945, 0.009666753932833672, 0.030020594596862793, 0.03580186143517494, 0.012790534645318985, 0.07942108064889908, 0.015466433949768543, 0.022492097690701485, 0.06602644920349121, 0.02740425616502762, 0.06445463746786118, 0.0756574496626854, 0.06456422060728073, 0.022760625928640366, 0.0775240957736969, 0.052883487194776535, 0.025874214246869087, 0.04544145241379738, 0.026327330619096756, 0.018092166632413864, 0.06761828809976578, 0.028190210461616516, 0.05739735811948776, 0.053838055580854416], [0.012643632479012012, 0.005458412226289511, 0.02527347207069397, 0.02771047316491604, 0.01024417020380497, 0.04792104661464691, 0.010128960944712162, 0.021465783938765526, 0.05877383053302765, 0.042791422456502914, 0.06424299627542496, 0.13036634027957916, 0.0711238756775856, 0.016009235754609108, 0.08741084486246109, 0.048499032855033875, 0.03527514263987541, 0.05647141486406326, 0.020783277228474617, 0.016899287700653076, 0.04527990147471428, 0.030438942834734917, 0.039596255868673325, 0.07519221305847168], [0.015421504154801369, 0.0051985839381814, 0.016739685088396072, 0.02543356828391552, 0.017199236899614334, 0.02134472131729126, 0.008483619429171085, 0.05500563979148865, 0.04736480861902237, 0.021200891584157944, 0.052151355892419815, 0.039553917944431305, 0.019880948588252068, 0.013121497817337513, 0.04237214848399162, 0.09525749087333679, 0.08897077292203903, 0.07866933196783066, 0.019921083003282547, 0.056610263884067535, 0.09969756007194519, 0.047321632504463196, 0.05492736026644707, 0.05815231427550316], [0.03267625346779823, 0.0642259493470192, 0.0872795581817627, 0.037227995693683624, 0.013080607168376446, 0.025866789743304253, 0.01891408860683441, 0.02883533015847206, 0.11960220336914062, 0.02770463563501835, 0.0770331621170044, 0.015864774584770203, 0.014227275736629963, 0.02560841105878353, 0.027515120804309845, 0.015833020210266113, 0.010558653622865677, 0.02249186486005783, 0.0381261482834816, 0.03273025155067444, 0.13700474798679352, 0.04063490778207779, 0.07412955909967422, 0.012828649021685123], [0.03988339379429817, 0.015229248441755772, 0.10826783627271652, 0.061845965683460236, 0.038062017410993576, 0.030829312279820442, 0.061482105404138565, 0.04856014624238014, 0.09560692310333252, 0.010653818026185036, 0.045860692858695984, 0.01446184329688549, 0.007753295823931694, 0.010939662344753742, 0.02772045135498047, 0.02937537431716919, 0.04538184031844139, 0.033498767763376236, 0.0691499188542366, 0.03760494291782379, 0.1161460429430008, 0.013811206445097923, 0.023620719090104103, 0.014254415407776833], [0.033417366445064545, 0.02417493239045143, 0.09997984021902084, 0.06438372284173965, 0.04859045147895813, 0.031852904707193375, 0.03822145611047745, 0.032643549144268036, 0.04925324022769928, 0.024824725463986397, 0.04251262918114662, 0.019937748089432716, 0.024988191202282906, 0.023373691365122795, 0.033738669008016586, 0.023669075220823288, 0.05202613025903702, 0.031222663819789886, 0.05299612507224083, 0.039582379162311554, 0.0850585401058197, 0.04160435497760773, 0.0565694160759449, 0.025378042832016945], [0.023101331666111946, 0.01609194092452526, 0.06916923820972443, 0.034615110605955124, 0.04302709177136421, 0.02742152288556099, 0.03024394065141678, 0.030491068959236145, 0.06505883485078812, 0.02432211861014366, 0.0424879752099514, 0.04079706594347954, 0.03117828071117401, 0.030181430280208588, 0.05374455824494362, 0.04509212076663971, 0.06648588925600052, 0.029064904898405075, 0.03223065659403801, 0.035728227347135544, 0.09921432286500931, 0.04648900032043457, 0.04283789545297623, 0.04092556610703468], [0.03482078015804291, 0.029092473909258842, 0.04807653650641441, 0.06278533488512039, 0.03892235457897186, 0.03296912834048271, 0.02612798474729061, 0.023885535076260567, 0.06694969534873962, 0.027715107426047325, 0.03605486825108528, 0.026495639234781265, 0.032996855676174164, 0.03317035362124443, 0.03429967164993286, 0.058692727237939835, 0.0629209354519844, 0.035383451730012894, 0.039982136338949203, 0.04071073979139328, 0.09734304994344711, 0.04391847923398018, 0.04016204550862312, 0.026524145156145096], [0.03028636798262596, 0.015428020618855953, 0.07390406727790833, 0.06886611133813858, 0.07651876658201218, 0.04137343540787697, 0.05748876556754112, 0.04231096804141998, 0.05297159031033516, 0.01776350848376751, 0.03655180335044861, 0.021556183695793152, 0.01589684933423996, 0.013648388907313347, 0.021038729697465897, 0.047128450125455856, 0.07664764672517776, 0.05008866265416145, 0.0489775612950325, 0.043406736105680466, 0.05211782455444336, 0.025463463738560677, 0.038320142775774, 0.03224596381187439], [0.03787108138203621, 0.02643624320626259, 0.13694912195205688, 0.08478162437677383, 0.0811815857887268, 0.037996940314769745, 0.050040263682603836, 0.052770763635635376, 0.046262115240097046, 0.020923230797052383, 0.02622491866350174, 0.014904593117535114, 0.013411047868430614, 0.015243918634951115, 0.016135361045598984, 0.04302533343434334, 0.046459704637527466, 0.039725642651319504, 0.0310690775513649, 0.049698226153850555, 0.04907430335879326, 0.01804988645017147, 0.025162700563669205, 0.03660232946276665], [0.03831469267606735, 0.03329760208725929, 0.07932127267122269, 0.08601940423250198, 0.024644872173666954, 0.047068819403648376, 0.04273802787065506, 0.046351633965969086, 0.08389632403850555, 0.021400775760412216, 0.03592408448457718, 0.03876841440796852, 0.027783753350377083, 0.010954853147268295, 0.011871208436787128, 0.031203312799334526, 0.010539975948631763, 0.04823996499180794, 0.0405447743833065, 0.0542544461786747, 0.05159676447510719, 0.03431149572134018, 0.03454611450433731, 0.06640750914812088], [0.03403094410896301, 0.026855556294322014, 0.05799155309796333, 0.09707660973072052, 0.019943546503782272, 0.04408787563443184, 0.031814612448215485, 0.0390176884829998, 0.03889259323477745, 0.027717988938093185, 0.034734684973955154, 0.055874668061733246, 0.04856724664568901, 0.028654688969254494, 0.03571704402565956, 0.06623971462249756, 0.014805138111114502, 0.039137691259384155, 0.039795082062482834, 0.03619818016886711, 0.040666595101356506, 0.028017858043313026, 0.04234709218144417, 0.07181530445814133], [0.005330606363713741, 0.001534702256321907, 0.03366962820291519, 0.035077180713415146, 0.0038783208001405, 0.028861364349722862, 0.0045728194527328014, 0.02312156744301319, 0.05493038892745972, 0.016246555373072624, 0.06413228064775467, 0.1005752831697464, 0.06006577983498573, 0.007928806357085705, 0.061839863657951355, 0.06366421282291412, 0.011017825454473495, 0.05680735036730766, 0.016877250745892525, 0.024195626378059387, 0.06533622741699219, 0.0334959402680397, 0.07042291760444641, 0.15641748905181885], [0.006898147985339165, 0.0024212906137108803, 0.030169043689966202, 0.027674488723278046, 0.004905780777335167, 0.042080122977495193, 0.005262836813926697, 0.021730341017246246, 0.043920960277318954, 0.016730090603232384, 0.037169452756643295, 0.11278845369815826, 0.08266827464103699, 0.01613793522119522, 0.06600724905729294, 0.03875038027763367, 0.00949151162058115, 0.042567163705825806, 0.016415966674685478, 0.024245353415608406, 0.05989440530538559, 0.039112675935029984, 0.048855796456336975, 0.20410224795341492], [0.009292550384998322, 0.0035428814589977264, 0.014161564409732819, 0.009771662764251232, 0.001775987446308136, 0.016142569482326508, 0.002849338110536337, 0.025515958666801453, 0.05603763833642006, 0.018821800127625465, 0.0283669400960207, 0.13731040060520172, 0.08238024264574051, 0.01575007289648056, 0.06185974180698395, 0.03751501441001892, 0.0033325038384646177, 0.027566730976104736, 0.0074648731388151646, 0.029966216534376144, 0.05368610844016075, 0.09878476709127426, 0.039177972823381424, 0.21892644464969635], [0.0032917021308094263, 0.004538413602858782, 0.022408848628401756, 0.010801208205521107, 0.0016440000617876649, 0.03353601321578026, 0.002107802079990506, 0.019016195088624954, 0.07568687945604324, 0.016499005258083344, 0.07096640020608902, 0.114971823990345, 0.06960994005203247, 0.029878467321395874, 0.055183108896017075, 0.023664722219109535, 0.0028092425782233477, 0.026912705972790718, 0.008074776269495487, 0.016372643411159515, 0.09859725832939148, 0.08572502434253693, 0.09601571410894394, 0.11168814450502396], [0.006558413151651621, 0.0030347644351422787, 0.02774268202483654, 0.01379322074353695, 0.0036760589573532343, 0.027768146246671677, 0.004637134727090597, 0.025187671184539795, 0.10236978530883789, 0.01627725176513195, 0.07612103968858719, 0.11932746320962906, 0.04585660621523857, 0.021565014496445656, 0.10607399046421051, 0.05185793712735176, 0.011544951237738132, 0.03644530102610588, 0.01607004553079605, 0.017943136394023895, 0.0813298150897026, 0.047398921102285385, 0.05140206590294838, 0.08601857721805573], [0.006656644865870476, 0.0035362825728952885, 0.021976439282298088, 0.01726137474179268, 0.004859312437474728, 0.03551343083381653, 0.005986788310110569, 0.037590645253658295, 0.0401633158326149, 0.01662428304553032, 0.06369830667972565, 0.11185406893491745, 0.06125650554895401, 0.03466865047812462, 0.08151958137750626, 0.04718159884214401, 0.013555055484175682, 0.03732703626155853, 0.014030433259904385, 0.03199866786599159, 0.061398785561323166, 0.04995675012469292, 0.09482479095458984, 0.10656125843524933], [0.003427832154557109, 0.001482450170442462, 0.01043076254427433, 0.0048051029443740845, 0.0028682739939540625, 0.023690572008490562, 0.0027204821817576885, 0.0180196613073349, 0.04052158072590828, 0.018852047622203827, 0.07403695583343506, 0.17432169616222382, 0.06898446381092072, 0.030208533629775047, 0.12794767320156097, 0.054423652589321136, 0.016592005267739296, 0.024877918884158134, 0.00832420215010643, 0.016560828313231468, 0.06321722269058228, 0.052086811512708664, 0.07648277282714844, 0.08511651307344437], [0.005724661983549595, 0.0026774064172059298, 0.01075491402298212, 0.014665897004306316, 0.003639432368800044, 0.023014863952994347, 0.0026429288554936647, 0.018654389306902885, 0.04144413396716118, 0.023605920374393463, 0.07283885031938553, 0.10882530361413956, 0.07911702245473862, 0.03946935757994652, 0.10343731939792633, 0.09937910735607147, 0.02071348950266838, 0.04587827995419502, 0.012179626151919365, 0.025266101583838463, 0.06577826291322708, 0.05484406277537346, 0.07085563987493515, 0.054593075066804886], [0.006309924181550741, 0.003197312820702791, 0.014921708032488823, 0.00844558421522379, 0.005486293695867062, 0.026794543489813805, 0.0037444059271365404, 0.024654172360897064, 0.05097078159451485, 0.02340429462492466, 0.06082947552204132, 0.12648765742778778, 0.0789097473025322, 0.039366476237773895, 0.11517052352428436, 0.06838546693325043, 0.02354377508163452, 0.04999100789427757, 0.01371569000184536, 0.023204637691378593, 0.06458387523889542, 0.050085194408893585, 0.05778094753623009, 0.06001650542020798], [0.005574643146246672, 0.0015150867402553558, 0.0076245819218456745, 0.009385601617395878, 0.0017556969542056322, 0.023787055164575577, 0.002398914657533169, 0.04122472181916237, 0.018077710643410683, 0.011634145863354206, 0.04329878091812134, 0.15839996933937073, 0.08242755383253098, 0.03231193497776985, 0.11229316890239716, 0.08937305212020874, 0.007831581868231297, 0.041896723210811615, 0.009744768030941486, 0.030998334288597107, 0.040055982768535614, 0.03489285334944725, 0.051868390291929245, 0.14162863790988922], [0.01335869263857603, 0.003549856599420309, 0.011823054403066635, 0.01433224231004715, 0.0027134434785693884, 0.04511816054582596, 0.0054294453002512455, 0.045349761843681335, 0.04774290323257446, 0.02199961245059967, 0.044811610132455826, 0.16002601385116577, 0.08039162307977676, 0.02511008083820343, 0.07669749855995178, 0.07104966044425964, 0.006616792641580105, 0.04272349923849106, 0.013354896567761898, 0.023559533059597015, 0.037163686007261276, 0.058838557451963425, 0.04163256287574768, 0.1066068634390831], [0.006144022569060326, 0.0012625399976968765, 0.007897753268480301, 0.0114787258207798, 0.0019961907528340816, 0.027624130249023438, 0.00264370976947248, 0.02151138335466385, 0.022038880735635757, 0.0242618340998888, 0.04146777465939522, 0.20136725902557373, 0.09166461229324341, 0.02485097572207451, 0.14235439896583557, 0.08436150848865509, 0.009372579865157604, 0.036040034145116806, 0.010128123685717583, 0.013370494358241558, 0.034304432570934296, 0.038506802171468735, 0.04833298549056053, 0.09701883047819138]], [[0.008036954328417778, 0.0033010696060955524, 0.07266351580619812, 0.004808782134205103, 0.0077685159631073475, 0.004300389904528856, 0.01612572744488716, 0.010241203010082245, 0.040309444069862366, 0.007778226863592863, 0.09022843837738037, 0.10097432136535645, 0.08811566978693008, 0.04508397355675697, 0.2445368617773056, 0.015767483040690422, 0.05015251412987709, 0.018193529918789864, 0.03741990402340889, 0.02421669475734234, 0.04858213663101196, 0.005541484337300062, 0.02165449783205986, 0.034198686480522156], [0.008961480110883713, 0.009705858305096626, 0.04321083426475525, 0.008883699774742126, 0.0347168929874897, 0.008006451651453972, 0.017758388072252274, 0.016997607424855232, 0.10720159858465195, 0.02943931333720684, 0.14982298016548157, 0.1476784497499466, 0.05096492916345596, 0.06597734987735748, 0.09558116644620895, 0.00984474178403616, 0.08865740150213242, 0.017109647393226624, 0.014876184985041618, 0.02441582642495632, 0.02316485159099102, 0.0019188572186976671, 0.007925907149910927, 0.017179537564516068], [0.011006283573806286, 0.012740411795675755, 0.15352405607700348, 0.021192820742726326, 0.022565482184290886, 0.06782429665327072, 0.24814581871032715, 0.09070909768342972, 0.0990411639213562, 0.029328590258955956, 0.03892156854271889, 0.0271266158670187, 0.0321226604282856, 0.009663904085755348, 0.008049529045820236, 0.001247685868293047, 0.0004067452682647854, 0.000506095471791923, 0.004199610557407141, 0.008784571662545204, 0.015990179032087326, 0.002918175421655178, 0.023023134097456932, 0.07096145302057266], [0.0009738897788338363, 0.0005130546051077545, 0.013512780889868736, 0.0015572096453979611, 0.01169500034302473, 0.3318233788013458, 0.008929268456995487, 0.009098760783672333, 0.5476090908050537, 0.003836859716102481, 0.013398493640124798, 0.005379874259233475, 0.024838274344801903, 0.0006539322203025222, 0.0046787871979177, 0.00039096068940125406, 0.0015732083702459931, 0.00037797761615365744, 0.0008207797072827816, 0.0004895766614936292, 0.012695608660578728, 0.002047948306426406, 0.0023472076281905174, 0.000758106354624033], [0.012095506303012371, 0.011671814136207104, 0.10298703610897064, 0.005147439893335104, 0.054333124309778214, 0.010161836631596088, 0.05965511500835419, 0.06029626727104187, 0.1597742885351181, 0.06180558353662491, 0.14189104735851288, 0.014137850143015385, 0.04843896999955177, 0.004636138677597046, 0.09697636216878891, 0.0015970548847690225, 0.02129007689654827, 0.0020003761164844036, 0.012943151406943798, 0.006761889439076185, 0.04164748266339302, 0.01043084729462862, 0.039020832628011703, 0.02029993012547493], [0.015555359423160553, 0.020629705861210823, 0.07794710993766785, 0.0083647221326828, 0.025639614090323448, 0.030255086719989777, 0.08689142763614655, 0.47426339983940125, 0.09510892629623413, 0.023263530805706978, 0.060145940631628036, 0.012060469016432762, 0.008355875499546528, 0.007123146206140518, 0.03416162729263306, 0.004090613219887018, 0.0036307002883404493, 0.0013257992686703801, 0.0010117333149537444, 0.0007026572129689157, 0.0019333583768457174, 0.0016632388578727841, 0.003975787665694952, 0.0019002610351890326], [0.0021418321412056684, 0.0035344662610441446, 0.046523816883563995, 0.0015871679643169045, 0.02740459516644478, 0.04945772886276245, 0.03466762229800224, 0.039159391075372696, 0.6115201711654663, 0.05836770310997963, 0.05704531446099281, 0.01319018006324768, 0.02723226323723793, 0.001424625632353127, 0.015871521085500717, 0.00023454830807168037, 0.002851360710337758, 0.00029551630723290145, 0.0005263620405457914, 0.0004399158642627299, 0.004006068222224712, 0.0001652796199778095, 0.0014245175989344716, 0.0009279533987864852], [0.003970519173890352, 0.005485043860971928, 0.025893347337841988, 0.003094522515311837, 0.011115124449133873, 0.005019139964133501, 0.033574726432561874, 0.07139962166547775, 0.05566037446260452, 0.6577118039131165, 0.027012908831238747, 0.02176436223089695, 0.03187369927763939, 0.010483015328645706, 0.011756078340113163, 0.0013304413296282291, 0.0033727032132446766, 0.002823243383318186, 0.0012624531518667936, 0.00472290301695466, 0.0010691717034205794, 0.0003421600558795035, 0.0011842272942885756, 0.008078459650278091], [0.003980828914791346, 0.005888139363378286, 0.04954370856285095, 0.005966607481241226, 0.018943196162581444, 0.006428719498217106, 0.010325204581022263, 0.029601898044347763, 0.155721977353096, 0.04929368570446968, 0.29511621594429016, 0.09886976331472397, 0.09514185786247253, 0.038472894579172134, 0.08046413213014603, 0.005034272093325853, 0.027309631928801537, 0.00607569795101881, 0.0033547384664416313, 0.00521069997921586, 0.0055685644038021564, 0.0006077535217627883, 0.0009133715066127479, 0.0021664570085704327], [0.004654975142329931, 0.0023037490900605917, 0.007942690514028072, 0.011442484334111214, 0.013073272071778774, 0.08023664355278015, 0.008751637302339077, 0.05713397637009621, 0.06563723087310791, 0.04591411352157593, 0.027116142213344574, 0.5416907072067261, 0.02344391494989395, 0.033559828996658325, 0.020165279507637024, 0.013572447001934052, 0.010888252407312393, 0.017865827307105064, 0.0007869636756367981, 0.007719989400357008, 0.0024413978680968285, 0.0007617191295139492, 0.0005640187300741673, 0.0023326994851231575], [0.0019680019468069077, 0.001335575943812728, 0.014308849349617958, 0.00327040976844728, 0.005324684549123049, 0.008570863865315914, 0.019420621916651726, 0.0099132489413023, 0.042145587503910065, 0.02444325014948845, 0.03100617602467537, 0.03785265237092972, 0.567018985748291, 0.015054863877594471, 0.1450774073600769, 0.02405315265059471, 0.0057717603631317616, 0.0035276864655315876, 0.00820070132613182, 0.0032214527018368244, 0.01145528070628643, 0.00336678558960557, 0.002095536794513464, 0.01159653253853321], [0.0040171826258301735, 0.004928378853946924, 0.023149291053414345, 0.009225641377270222, 0.0042602187022566795, 0.003220566548407078, 0.005282398778945208, 0.01577940583229065, 0.005224692169576883, 0.021043354645371437, 0.019655324518680573, 0.04171639680862427, 0.015897167846560478, 0.4600045084953308, 0.07090859860181808, 0.17642517387866974, 0.012404726818203926, 0.042158909142017365, 0.0050215148366987705, 0.018512867391109467, 0.003436321159824729, 0.018934007734060287, 0.00716416584327817, 0.011629248037934303], [0.0018267659470438957, 0.0015601451741531491, 0.014148871414363384, 0.003243230516090989, 0.0032041941303759813, 0.001558408373966813, 0.008660702034831047, 0.003999923821538687, 0.004225400276482105, 0.01442993525415659, 0.017249230295419693, 0.009027322754263878, 0.0449400432407856, 0.013562156818807125, 0.6357757449150085, 0.08346112817525864, 0.038817740976810455, 0.018703028559684753, 0.012228314764797688, 0.0017477946821600199, 0.007313170935958624, 0.008591984398663044, 0.027358099818229675, 0.02436661906540394], [0.0014643248869106174, 0.0011476316722109914, 0.013831299729645252, 0.0028912427369505167, 0.003632869105786085, 0.0008806870318949223, 0.00441539054736495, 0.005633274558931589, 0.004506561905145645, 0.004784435499459505, 0.01529216393828392, 0.014808046631515026, 0.00649440661072731, 0.02771538682281971, 0.3399474322795868, 0.31426283717155457, 0.15964347124099731, 0.04300430044531822, 0.015501040033996105, 0.0035632450599223375, 0.001818746910430491, 0.003049653023481369, 0.004212677013128996, 0.007498862221837044], [0.002271113684400916, 0.0007516929763369262, 0.032379500567913055, 0.0038163820281624794, 0.002341807121410966, 0.0003672802704386413, 0.009035488590598106, 0.007768392097204924, 0.011784043163061142, 0.0020780754275619984, 0.02599414996802807, 0.01261590700596571, 0.025254923850297928, 0.00435444014146924, 0.27538275718688965, 0.03736403211951256, 0.1555168181657791, 0.019696302711963654, 0.04888663813471794, 0.03865702450275421, 0.02114083245396614, 0.002363581908866763, 0.08252881467342377, 0.17765000462532043], [0.007133296225219965, 0.0041861385107040405, 0.07768196612596512, 0.004941700492054224, 0.007283532526344061, 0.0007342509343288839, 0.006268578581511974, 0.017396174371242523, 0.010090277530252934, 0.015723584219813347, 0.04020831361413002, 0.01478480827063322, 0.011666987091302872, 0.004878822714090347, 0.13382267951965332, 0.031210882589221, 0.09926697611808777, 0.28392916917800903, 0.0832749456167221, 0.0247796718031168, 0.027545103803277016, 0.019198795780539513, 0.011078419163823128, 0.06291494518518448], [0.007582934573292732, 0.0016244736034423113, 0.042723484337329865, 0.004387735389173031, 0.006918597500771284, 0.0019583345856517553, 0.007647119462490082, 0.008493030443787575, 0.017511142417788506, 0.007814230397343636, 0.06013968214392662, 0.008817709982395172, 0.030291346833109856, 0.001131427357904613, 0.1105719655752182, 0.023770950734615326, 0.07119835168123245, 0.024695836007595062, 0.31886163353919983, 0.051523976027965546, 0.06385784596204758, 0.07644721865653992, 0.03880002722144127, 0.013230949640274048], [0.01738453283905983, 0.009698018431663513, 0.01524006575345993, 0.012325870804488659, 0.0027030308265239, 0.013474551029503345, 0.0035162854474037886, 0.009085114113986492, 0.0013946079416200519, 0.004766048863530159, 0.006006560288369656, 0.030153878033161163, 0.006778405979275703, 0.0239554550498724, 0.003669323166832328, 0.014440705999732018, 0.0034217725042253733, 0.044232163578271866, 0.02764018625020981, 0.5148992538452148, 0.02645144797861576, 0.13029786944389343, 0.021155240014195442, 0.05730968713760376], [0.002564267721027136, 0.0013812438119202852, 0.05596073716878891, 0.001643509604036808, 0.0017405436374247074, 0.003976929467171431, 0.009344791062176228, 0.00291431718505919, 0.0037889364175498486, 0.0014070431934669614, 0.013712028972804546, 0.010187679901719093, 0.05707438290119171, 0.0012479693396016955, 0.08678945899009705, 0.0016315978718921542, 0.001989637967199087, 0.004220405127853155, 0.025175703689455986, 0.014811470173299313, 0.2440258413553238, 0.015310723334550858, 0.11880581080913544, 0.3202950060367584], [0.004044011235237122, 0.0012212211731821299, 0.002518733963370323, 0.004537811037153006, 0.0004186475707683712, 0.0009390276973135769, 0.0022066973615437746, 0.0010311849182471633, 7.266149623319507e-05, 0.0005041877157054842, 0.000378288677893579, 0.0008931679767556489, 0.0006019803113304079, 0.003776944475248456, 0.0008271150873042643, 0.015044881962239742, 0.0003414188395254314, 0.008189349435269833, 0.036078598350286484, 0.07099298387765884, 0.011239751242101192, 0.6025274991989136, 0.11067204922437668, 0.12094178795814514], [0.0017676472198218107, 0.0009861503494903445, 0.016941716894507408, 0.004724616650491953, 0.002277504187077284, 0.0034722164273262024, 0.008724220097064972, 0.0029373036231845617, 0.0015355439390987158, 0.0012165382504463196, 0.0034657600335776806, 0.002185810822993517, 0.00875439029186964, 0.0015449802158400416, 0.03477580100297928, 0.009670860134065151, 0.007038849871605635, 0.005012545734643936, 0.025357617065310478, 0.023155029863119125, 0.034472957253456116, 0.04487553611397743, 0.5138096809387207, 0.24129672348499298], [0.0004557558859232813, 0.00027392737683840096, 0.0013783533358946443, 0.0004933194722980261, 0.00016485335072502494, 0.00017826375551521778, 0.0006081328028813004, 0.001186421257443726, 4.188527600490488e-05, 9.787480667000636e-05, 6.072908945498057e-05, 0.0003500001330394298, 9.213147859554738e-05, 0.00021477136760950089, 0.0008729046094231308, 0.000743926502764225, 0.00016407351358793676, 0.0004099069337826222, 0.0001735934056341648, 0.0006827053730376065, 0.0015895258402451873, 0.0023126869928091764, 0.017448239028453827, 0.970005989074707], [0.010877971537411213, 0.0024285640101879835, 0.027432583272457123, 0.008728365413844585, 0.0041395011357963085, 0.002490341430529952, 0.0710277110338211, 0.013291587121784687, 0.01165742613375187, 0.003108826931566, 0.005493442993611097, 0.0020775857847183943, 0.008785270154476166, 0.00038042059168219566, 0.02007380500435829, 0.01384566817432642, 0.004209049511700869, 0.0036786808632314205, 0.07659738510847092, 0.005567301530390978, 0.029818130657076836, 0.05699663236737251, 0.19102662801742554, 0.4262671172618866], [0.014018451794981956, 0.0034301765263080597, 0.018437787890434265, 0.026042863726615906, 0.0008772645960561931, 0.0011368796695023775, 0.006638020277023315, 0.005291528534144163, 0.0013394681736826897, 0.0016544356476515532, 0.0034078769385814667, 0.004776314366608858, 0.0003182301588822156, 0.001654239953495562, 0.0007043928490020335, 0.04419642314314842, 0.0012042337330058217, 0.04321809113025665, 0.03533879667520523, 0.04147128015756607, 0.012818103656172752, 0.03455127775669098, 0.14049731194972992, 0.5569765567779541]]], [[[0.005623971577733755, 0.00866770651191473, 0.7851794958114624, 0.014153921976685524, 0.003053793916478753, 0.013694223016500473, 0.0052650850266218185, 0.016266826540231705, 0.03819546848535538, 0.03555463254451752, 0.013206122443079948, 0.015319516882300377, 0.005369136575609446, 0.005878434516489506, 0.0064176213927567005, 0.003356808563694358, 0.001384088071063161, 0.0018320229137316346, 0.0004406635998748243, 0.0009350198088213801, 0.009891239926218987, 0.0035967628937214613, 0.0008252968546003103, 0.005892134737223387], [0.01601445861160755, 0.0245953481644392, 0.6453245282173157, 0.02635337971150875, 0.006956256926059723, 0.008641648106276989, 0.004727458581328392, 0.013893000781536102, 0.018475865945219994, 0.03399686515331268, 0.012184408493340015, 0.04058895632624626, 0.030027110129594803, 0.022847319021821022, 0.0072213453240692616, 0.004364700056612492, 0.001569467014633119, 0.0033338565845042467, 0.0014698095619678497, 0.008626156486570835, 0.042821623384952545, 0.022160274907946587, 0.0006355784134939313, 0.0031705223955214024], [0.005813127383589745, 0.019949357956647873, 0.09937547147274017, 0.02116512507200241, 0.020873937755823135, 0.01447196863591671, 0.011203189380466938, 0.03475131839513779, 0.15076977014541626, 0.012117207050323486, 0.016390688717365265, 0.01766042411327362, 0.010147550143301487, 0.021558823063969612, 0.1377585530281067, 0.05053286254405975, 0.09641965478658676, 0.027939992025494576, 0.01288458239287138, 0.021348947659134865, 0.06884332746267319, 0.014775723218917847, 0.03336023911833763, 0.07988809794187546], [0.0020296962466090918, 0.0005211950046941638, 0.7766743302345276, 0.008561499416828156, 0.0017406452680006623, 0.008822128176689148, 0.001394340069964528, 0.006665925960987806, 0.001590263214893639, 0.0006687415298074484, 0.0013276943936944008, 0.0005792768206447363, 0.001085764029994607, 0.00022399438603315502, 0.0059755477122962475, 0.0026143542490899563, 0.0013760724104940891, 0.005195737350732088, 0.003683663671836257, 0.016864221543073654, 0.09829255193471909, 0.03009536676108837, 0.010395925492048264, 0.01362094096839428], [0.0006023632595315576, 9.038503776537254e-05, 0.9601346254348755, 0.004149949178099632, 1.7325730368611403e-05, 0.020490070804953575, 0.00023670573136769235, 0.003266299143433571, 0.0015970325330272317, 0.00027220408082939684, 5.09785495523829e-05, 0.0005037084338255227, 0.00033473240910097957, 5.586471161223017e-05, 0.000641466467641294, 0.0002892428601626307, 1.0104924967890838e-06, 0.00026558039826340973, 4.217971581965685e-05, 0.0006874793907627463, 0.00224653840996325, 0.001960545079782605, 0.00010573906183708459, 0.00195802072994411], [0.02704840525984764, 0.014989730902016163, 0.1891222447156906, 0.2879146337509155, 0.041702013462781906, 0.07567066699266434, 0.01760159805417061, 0.11181272566318512, 0.005595661699771881, 0.002263688715174794, 0.001265794737264514, 0.003231783863157034, 0.003401203313842416, 0.0007768873474560678, 0.0014434836339205503, 0.007039686664938927, 0.00021034583915024996, 0.0029179127886891365, 0.0019590023439377546, 0.03926478326320648, 0.012518531642854214, 0.08733388781547546, 0.019957128912210464, 0.04495823755860329], [0.019542481750249863, 0.00887828879058361, 0.0186961367726326, 0.047349169850349426, 0.0022744808811694384, 0.4932999014854431, 0.04992074519395828, 0.09518758952617645, 0.24467909336090088, 0.002603675704449415, 0.0028358723502606153, 0.000700329605024308, 0.00032125128200277686, 0.0007891675923019648, 0.001969581237062812, 0.001887463964521885, 8.345547030330636e-06, 0.0001732188684400171, 3.70691304851789e-05, 0.00023697617871221155, 0.0007273529772646725, 0.00036476211971603334, 0.004299594089388847, 0.003217503195628524], [0.003775665070861578, 0.0018623985815793276, 0.023011744022369385, 0.02698509581387043, 0.0010817910078912973, 0.2693832516670227, 0.287908136844635, 0.07688819617033005, 0.28976374864578247, 0.0037003725301474333, 0.0024829350877553225, 0.00015400606207549572, 5.766174217569642e-05, 0.00018893850210588425, 0.0009924776386469603, 0.0014659338630735874, 6.316005965345539e-06, 5.555300958803855e-05, 7.022159934422234e-06, 1.1855292541440576e-05, 8.741924102650955e-05, 9.301063255406916e-05, 0.004285333212465048, 0.005751173943281174], [0.0038182444404810667, 0.0007726442418061197, 0.04644179344177246, 0.006829683668911457, 0.00020912896434310824, 0.05876010283827782, 0.010358051396906376, 0.20230168104171753, 0.5928921699523926, 0.0056276023387908936, 0.03438391163945198, 0.0014875370543450117, 0.000495246727950871, 0.0002662624465301633, 0.016679910942912102, 0.00487914914265275, 4.497067129705101e-05, 0.0012989522656425834, 0.00011563602311071008, 0.0006342668202705681, 0.002711979206651449, 6.733639747835696e-05, 0.00570277776569128, 0.003220957238227129], [0.022484781220555305, 0.13348956406116486, 0.0011559088015928864, 0.01627950742840767, 0.005120072979480028, 0.021747423335909843, 0.05243365466594696, 0.13752157986164093, 0.585289716720581, 0.010732892900705338, 0.005400918889790773, 0.0010231257183477283, 0.000424553727498278, 0.001691920100711286, 0.000984109123237431, 0.003381801303476095, 6.802116695325822e-05, 0.00013589198351837695, 3.187966285622679e-05, 4.76963869004976e-05, 2.2851052108308068e-06, 5.31060231878655e-06, 0.00025015868595801294, 0.0002972263901028782], [0.009691054932773113, 0.00709520373493433, 0.026904653757810593, 0.021278684958815575, 0.005457510240375996, 0.043972454965114594, 0.03410321846604347, 0.03435768187046051, 0.5033741593360901, 0.04256933555006981, 0.0648268312215805, 0.030548958107829094, 0.013035707175731659, 0.006822044029831886, 0.036454442888498306, 0.024608375504612923, 0.0038387009408324957, 0.025179583579301834, 0.027206232771277428, 0.011343316175043583, 0.010978206992149353, 0.00149053824134171, 0.005759072955697775, 0.009104063734412193], [0.0409623384475708, 0.061834823340177536, 0.015462066978216171, 0.017878413200378418, 0.02194182574748993, 0.00480596162378788, 0.019269876182079315, 0.013197105377912521, 0.031434282660484314, 0.07096540182828903, 0.6381816267967224, 0.028786776587367058, 0.010363507084548473, 0.007268782239407301, 0.0034085188526660204, 0.0026772082783281803, 0.0006849734927527606, 0.0015968094812706113, 0.003431373741477728, 0.0034046771470457315, 0.0016986231785267591, 0.0004486891266424209, 0.00026369892293587327, 3.26884510286618e-05], [0.04967556148767471, 0.07203447073698044, 0.018505441024899483, 0.019835341721773148, 0.016287971287965775, 0.0073676807805895805, 0.010779955424368382, 0.013058885000646114, 0.03568897023797035, 0.039988528937101364, 0.29403164982795715, 0.13340115547180176, 0.10965951532125473, 0.06751072406768799, 0.029302822425961494, 0.015344520099461079, 0.0017753823194652796, 0.005207604728639126, 0.012423085980117321, 0.029649704694747925, 0.015426691621541977, 0.0023480572272092104, 0.0006091785035096109, 8.710381371201947e-05], [0.005061449483036995, 0.006629016250371933, 0.029845137149095535, 0.008876635693013668, 0.0011528816539794207, 0.003194952616468072, 0.0031722274143248796, 0.005466730333864689, 0.003817455843091011, 0.0011767082614824176, 0.04547208547592163, 0.04017234221100807, 0.4509478807449341, 0.08389590680599213, 0.17091584205627441, 0.03095441684126854, 0.00030438878457061946, 0.0038782560732215643, 0.01855713129043579, 0.05964465066790581, 0.022390006110072136, 0.0029348828829824924, 0.0014394792960956693, 9.958138252841309e-05], [0.00033368656295351684, 0.0007282888982445002, 0.0024653500877320766, 0.0006442566518671811, 0.0001803103950805962, 0.0020870999433100224, 0.0018439472187310457, 0.0030303162056952715, 0.0026231317315250635, 9.054694237420335e-05, 0.002524655545130372, 0.004124443978071213, 0.04270622879266739, 0.06805037707090378, 0.7908861041069031, 0.037127282470464706, 0.0014013817999511957, 0.0032422924414277077, 0.0071188402362167835, 0.012149294838309288, 0.007370581850409508, 0.001032273517921567, 0.006955716293305159, 0.001283619669266045], [0.00014591531362384558, 5.5513610277557746e-05, 0.004908505827188492, 0.00010907051910180598, 1.340345261269249e-05, 0.000424514728365466, 0.0007762148743495345, 0.0013695526868104935, 0.0003152030985802412, 1.4431269846681971e-05, 0.002064442727714777, 0.00016442383639514446, 0.0024755150079727173, 0.0016573232132941484, 0.9118443727493286, 0.0213424451649189, 0.0019144571851938963, 0.005333054345101118, 0.01786215603351593, 0.008396542631089687, 0.0078008947893977165, 0.0002656039723660797, 0.009958147071301937, 0.0007882321369834244], [0.00019664896535687149, 4.927959162159823e-05, 0.015525665134191513, 0.0002569324860814959, 3.320333235024009e-06, 0.0006480899755842984, 0.0004575767379719764, 0.0037695923820137978, 0.0006770463660359383, 5.548796980292536e-05, 0.00029624433955177665, 0.0014478195225819945, 0.0059144143015146255, 0.0028611328452825546, 0.7032576203346252, 0.07001475244760513, 0.0014136368408799171, 0.039472609758377075, 0.06144315376877785, 0.07727299630641937, 0.009005333296954632, 0.0007763529429212213, 0.0011558461701497436, 0.004028461407870054], [0.00011510286276461557, 6.121608021203429e-05, 0.0009883642196655273, 6.185756501508877e-05, 1.9854855054290965e-05, 1.877883005363401e-05, 4.411306508700363e-05, 0.0003642539959400892, 2.340576065762434e-05, 1.780101956683211e-05, 0.0003109508834313601, 0.00021057362027931958, 0.0006069166120141745, 0.00022643752163276076, 0.04148881137371063, 0.01825110614299774, 0.08685611933469772, 0.17132264375686646, 0.47007495164871216, 0.20123127102851868, 0.00271681253798306, 0.0005385273834690452, 0.002911288756877184, 0.001538765849545598], [0.000226277596084401, 5.622627941193059e-05, 0.0014469203306362033, 8.82434324012138e-05, 2.1653358999174088e-05, 0.00015366697334684432, 6.638868944719434e-05, 0.00013665833103004843, 0.0002270515833515674, 3.679572182591073e-05, 0.000735993031412363, 0.0002610499213915318, 0.0002406853745924309, 0.0001680807617958635, 0.01917302794754505, 0.005887447856366634, 0.01632574573159218, 0.26826223731040955, 0.49160751700401306, 0.1692240983247757, 0.023655809462070465, 0.00038570634205825627, 0.0011478536762297153, 0.00046491555985994637], [0.00019678223179653287, 5.627446807920933e-05, 0.003487027483060956, 0.000581606465857476, 0.00016202848928514868, 0.0003471333475317806, 0.00012349423195701092, 0.00010633569763740525, 0.0009942748583853245, 0.00018336769426241517, 0.0022731758654117584, 0.00026336792507208884, 0.00021548829681705683, 2.611999116197694e-05, 0.0021633023861795664, 0.0030558661092072725, 0.019338857382535934, 0.20465347170829773, 0.5559292435646057, 0.12985460460186005, 0.06902579963207245, 0.0020539420656859875, 0.0038403202779591084, 0.0010680286213755608], [0.0001664453448029235, 1.0887966709560715e-05, 0.0015892620431259274, 0.0002382162492722273, 8.000755769899115e-05, 0.00031253296765498817, 9.730319106893148e-06, 9.419331036042422e-05, 9.841623977990821e-05, 6.967547051317524e-06, 0.00014819027273915708, 8.864732808433473e-05, 0.0001561782119097188, 1.1892278052982874e-05, 0.0009254863834939897, 0.0007662259740754962, 0.0013374903937801719, 0.026392366737127304, 0.03774780035018921, 0.30402714014053345, 0.6183189749717712, 0.0043085296638309956, 0.002438190160319209, 0.0007263204315677285], [0.041808120906353, 0.014905157499015331, 0.0022226087749004364, 0.004462096840143204, 0.01827537827193737, 0.005288075190037489, 0.0006723879487253726, 0.0002743910299614072, 2.6725716452347115e-05, 2.3448508727597073e-05, 6.906032649567351e-05, 0.00021113765251357108, 0.0004825725918635726, 0.000886148598510772, 0.00041496066842228174, 0.001003532437607646, 0.0043772319331765175, 0.012192552909255028, 0.062092579901218414, 0.47832590341567993, 0.1775708794593811, 0.1585136502981186, 0.012957265600562096, 0.002944085281342268], [0.0010373771656304598, 0.00014145478780847043, 0.0024137506261467934, 0.0021084733307361603, 0.0012087413342669606, 0.0040133302100002766, 0.0006022357847541571, 0.0002723240468185395, 2.513505933166016e-05, 4.472489763429621e-06, 3.191918494849233e-06, 5.853463881067e-05, 0.0001258420670637861, 0.00021044675668235868, 0.0015714208129793406, 0.003372365375980735, 0.0019417657749727368, 0.008083458058536053, 0.045014817267656326, 0.23477280139923096, 0.3165954351425171, 0.2673605978488922, 0.021630356088280678, 0.0874316394329071], [0.08378318697214127, 0.023809216916561127, 0.016354240477085114, 0.045552223920822144, 0.046722497791051865, 0.03701898083090782, 0.01712283119559288, 0.006180104799568653, 0.0002049457689281553, 5.641934694722295e-05, 4.360152888693847e-05, 0.00010771159577416256, 0.00013430869148578495, 0.0011068691965192556, 0.0024388646706938744, 0.015730759128928185, 0.0034842807799577713, 0.0029630111530423164, 0.010132110677659512, 0.07351479679346085, 0.0888693630695343, 0.31434041261672974, 0.09804417937994003, 0.11228517442941666]], [[0.14985503256320953, 0.12848147749900818, 0.05922376364469528, 0.13078497350215912, 0.05325450003147125, 0.02602526918053627, 0.04742579534649849, 0.05921131372451782, 0.023371117189526558, 0.0426921471953392, 0.020825544372200966, 0.04294537380337715, 0.011178323067724705, 0.026321614161133766, 0.004493385553359985, 0.026600949466228485, 0.02082953043282032, 0.016885433346033096, 0.01629435084760189, 0.030892064794898033, 0.013684898614883423, 0.01852579228579998, 0.009647433646023273, 0.020549967885017395], [0.09903134405612946, 0.14229461550712585, 0.06560297310352325, 0.2333640307188034, 0.04910585284233093, 0.029640669003129005, 0.024178562685847282, 0.019424760714173317, 0.01405631098896265, 0.03354791924357414, 0.00992346741259098, 0.05027128383517265, 0.019178444519639015, 0.073785699903965, 0.010921729728579521, 0.031994327902793884, 0.014407818205654621, 0.007402242161333561, 0.0029689015354961157, 0.009116525761783123, 0.014397745952010155, 0.03487631306052208, 0.004888987634330988, 0.005619421601295471], [0.009296237491071224, 0.034698087722063065, 0.04335404187440872, 0.03656969219446182, 0.04398101940751076, 0.016115745529532433, 0.10192333161830902, 0.04642646387219429, 0.029620742425322533, 0.17823077738285065, 0.003486522939056158, 0.06212661415338516, 0.03107507713139057, 0.05495719611644745, 0.019686348736286163, 0.013107268139719963, 0.006806260906159878, 0.0008177233394235373, 0.0018026134930551052, 0.00109214021358639, 0.008353530429303646, 0.06827739626169205, 0.009105941280722618, 0.17908921837806702], [0.03702164813876152, 0.019726769998669624, 0.06324336677789688, 0.16046522557735443, 0.1815306693315506, 0.026120014488697052, 0.016733694821596146, 0.008503518998622894, 0.0567922368645668, 0.12091418355703354, 0.021501775830984116, 0.024228211492300034, 0.009000961668789387, 0.009814411401748657, 0.003517451696097851, 0.019893554970622063, 0.08094761520624161, 0.018303200602531433, 0.04209921136498451, 0.012753386050462723, 0.02212122641503811, 0.00818368885666132, 0.008914072066545486, 0.027669962495565414], [0.0456504225730896, 0.02807638607919216, 0.10745556652545929, 0.4771376848220825, 0.019901419058442116, 0.003255866700783372, 0.011650769039988518, 0.052392203360795975, 0.014506214298307896, 0.046504296362400055, 0.019453106448054314, 0.03540119156241417, 0.0035331968683749437, 0.002822163049131632, 0.001528488821350038, 0.024165844544768333, 0.006608934141695499, 0.004552412312477827, 0.006530741695314646, 0.032297637313604355, 0.02984755113720894, 0.01802227832376957, 0.003737033111974597, 0.004968705587089062], [0.06000132113695145, 0.052676282823085785, 0.05555145815014839, 0.44455981254577637, 0.1150187999010086, 0.018274884670972824, 0.01585984230041504, 0.01274381298571825, 0.0064129955135285854, 0.00234517571516335, 0.020835284143686295, 0.04061604663729668, 0.02439655363559723, 0.0197971910238266, 0.0010365558555349708, 0.0020919693633913994, 0.005905421916395426, 0.0008502603159286082, 0.0035714618861675262, 0.018584104254841805, 0.04229268804192543, 0.0179931428283453, 0.014928298071026802, 0.0036566434428095818], [0.03458043187856674, 0.07013951987028122, 0.0331362746655941, 0.02203143574297428, 0.09560485929250717, 0.2081756442785263, 0.03799518197774887, 0.04432595893740654, 0.07128454744815826, 0.04282955080270767, 0.005264206789433956, 0.023338524624705315, 0.10270416736602783, 0.03291748836636543, 0.004778134170919657, 0.0009555976721458137, 0.0023267895448952913, 0.0008440231904387474, 0.001933304243721068, 0.009945601224899292, 0.041588716208934784, 0.04571754112839699, 0.017972281202673912, 0.04961026832461357], [0.006758521310985088, 0.010111797600984573, 0.0024170703254640102, 0.0033505158498883247, 0.02641221508383751, 0.5587126016616821, 0.3247166872024536, 0.01467534527182579, 0.0026225880719721317, 0.0021045852918177843, 0.0002887472801376134, 0.0004115005722269416, 0.0008242157637141645, 0.015926716849207878, 0.0005813302122987807, 0.00039678963366895914, 0.00015887348854448646, 5.124169911141507e-05, 0.000138060117023997, 0.0001189428658108227, 0.000450782710686326, 0.0036323387175798416, 0.008013173937797546, 0.017125463113188744], [0.005626159254461527, 0.0048544807359576225, 0.010568210855126381, 0.004460809286683798, 0.0022302952129393816, 0.015956571325659752, 0.5456545948982239, 0.19884833693504333, 0.0632840171456337, 0.004107323475182056, 0.0041208635084331036, 0.0001820115139707923, 0.00039698590990155935, 0.0008469945751130581, 0.07104218751192093, 0.014829362742602825, 0.003401634283363819, 0.0002381290978519246, 0.00044512542081065476, 4.452260327525437e-06, 0.00039127765921875834, 0.0004521265218500048, 0.03133881837129593, 0.016719156876206398], [0.00034162221709266305, 0.00042054650839418173, 0.00020848980057053268, 0.0001514127798145637, 5.323067307472229e-05, 0.0005658823647536337, 0.030240118503570557, 0.9583679437637329, 0.0005211196839809418, 0.003773626871407032, 6.587400275748223e-05, 8.515116496710107e-05, 2.034528051808593e-06, 4.329906005295925e-05, 5.132131991558708e-05, 0.001923184609040618, 1.4892671060806606e-05, 0.00010436232696520165, 8.014441846171394e-06, 7.696102329646237e-06, 8.98855461173298e-08, 1.911985054903198e-05, 5.4310381528921425e-05, 0.002976582385599613], [0.002487603109329939, 0.008678130805492401, 0.001633650390431285, 0.0003539221943356097, 0.004912317730486393, 0.013053178787231445, 0.004534984938800335, 0.005970108322799206, 0.32882651686668396, 0.5893260836601257, 0.009522817097604275, 0.0015814844518899918, 0.004664331674575806, 0.0004378503072075546, 0.0031574342865496874, 0.0009806797606870532, 0.009651098400354385, 0.003255669493228197, 0.0013664651196449995, 4.166821236140095e-05, 5.7277844462078065e-05, 3.155590093228966e-05, 0.0006368437316268682, 0.004838304594159126], [0.00013506552204489708, 0.0002915115328505635, 0.0004702481091953814, 0.0002380457444814965, 0.00035405985545367, 0.0006262295646592975, 0.0005655160639435053, 0.0013441353803500533, 0.003154696198180318, 0.9814015030860901, 0.005691861268132925, 0.0047695813700556755, 8.044812420848757e-05, 8.870028250385076e-05, 9.330841749033425e-06, 0.00012017915287287906, 2.2820147933089174e-05, 0.000205826829187572, 8.893711492419243e-05, 0.0001206482556881383, 1.6450010207336163e-06, 1.126661300077103e-05, 3.028596211152035e-06, 0.00020476839563343674], [0.0002518606197554618, 0.00027963423053734004, 0.004598484840244055, 0.0010714831296354532, 0.00044988677836954594, 4.2136278352700174e-05, 0.00044615482329390943, 0.00011205895862076432, 0.006049527786672115, 0.00416968809440732, 0.9370068311691284, 0.025907978415489197, 0.015299513004720211, 1.0941442269540858e-05, 0.00032583068241365254, 2.4862136342562735e-05, 0.0002637350407894701, 2.2170044758240692e-05, 0.0025883677881211042, 0.0001647689496167004, 0.0008021284593269229, 8.590010111220181e-06, 0.00010067053517559543, 2.6468169380677864e-06], [0.0008623444009572268, 0.0016391489189118147, 0.0010382682085037231, 0.00965435616672039, 0.0004651540075428784, 0.0003945440985262394, 0.00011810367141151801, 0.00016390238306485116, 0.00015286797133740038, 0.0029972614720463753, 0.018562892451882362, 0.9054226875305176, 0.034570470452308655, 0.014831358566880226, 7.41323601687327e-05, 0.0006465368787758052, 1.7351052520098165e-05, 0.0001890748244477436, 4.0115821320796385e-05, 0.0067174313589930534, 0.0003973423154093325, 0.0010351695818826556, 6.281618425418856e-06, 3.2476573323947378e-06], [1.9955021343776025e-05, 0.00011180240835528821, 7.827204535715282e-05, 3.6748297134181485e-05, 6.414574454538524e-05, 0.00028950042906217277, 5.4172756790649146e-05, 8.662918276058917e-07, 0.00016418083396274596, 2.642612707859371e-05, 0.00021886364265810698, 0.0012102999025955796, 0.9061214923858643, 0.060309261083602905, 0.0283693578094244, 3.757131707970984e-05, 1.281129789276747e-05, 6.467727189374273e-07, 3.676941560115665e-06, 1.1311010894132778e-05, 0.0019921197090297937, 0.0004825759679079056, 0.00037500335020013154, 9.128620149567723e-06], [0.0007922447402961552, 0.00099611422047019, 0.0004955410840921104, 0.001950734993442893, 0.005495027638971806, 0.00740014249458909, 0.002116526709869504, 0.000783985888119787, 0.0006641106447204947, 0.018788091838359833, 0.00025515799643471837, 0.006112577859312296, 0.01398569904267788, 0.7840087413787842, 0.03195780888199806, 0.04062453657388687, 0.0019932736176997423, 0.0007228897302411497, 5.04537092638202e-05, 0.0008567409822717309, 0.0009761948022060096, 0.03322982415556908, 0.0032894897740334272, 0.042454104870557785], [0.00030589240486733615, 0.00035727964132092893, 0.00042955964454449713, 0.0002895616053137928, 5.381637311074883e-05, 0.00012488516222219914, 0.0005319692427292466, 0.0004414377617649734, 0.0017059975070878863, 0.0004758860741276294, 0.00036191867548041046, 0.00033371159224770963, 0.008711600676178932, 0.01252057310193777, 0.7424606680870056, 0.21222718060016632, 0.011070857755839825, 0.00048118835547938943, 0.00018987496150657535, 8.770351996645331e-05, 0.0014495259383693337, 0.0007889298722147942, 0.0025313945952802896, 0.0020685845520347357], [0.0005933817592449486, 0.00037124031223356724, 0.00023757090093567967, 0.0011938520474359393, 0.00026306736981496215, 0.00017324577493127435, 0.00016941226203925908, 0.0024608143139630556, 0.0006297352956607938, 0.0025234208442270756, 0.0003252882743254304, 0.002598909894004464, 0.0004405477666296065, 0.006005513481795788, 0.010391481220722198, 0.8445419669151306, 0.07705904543399811, 0.0169901754707098, 0.0005943190772086382, 0.001958635402843356, 0.00010390252282377332, 0.0011023088591173291, 0.0005773080629296601, 0.028694866225123405], [0.0004269884084351361, 0.00018323240510653704, 0.0001898624177556485, 0.00011372808512533084, 7.070512947393581e-05, 5.9249814512440935e-06, 1.0911945537372958e-05, 0.00047052293666638434, 0.0077262334525585175, 0.0014973736833781004, 0.001082652946934104, 0.0004079834616277367, 0.00034683867124840617, 1.32683362608077e-05, 0.007108623161911964, 0.018984250724315643, 0.6100618839263916, 0.24278438091278076, 0.10044527053833008, 0.0038390150293707848, 0.0026796271558851004, 0.00015319878002628684, 0.00026835029711946845, 0.0011293541174381971], [0.00023279213928617537, 4.7299781726906076e-05, 6.644662062171847e-05, 0.0004957106430083513, 0.00019686922314576805, 1.2944920854351949e-05, 5.788796897832071e-06, 0.0001410148397553712, 6.700521043967456e-05, 0.00127530621830374, 0.0003300510870758444, 0.00038789736572653055, 7.869974183449813e-07, 1.1651961813186062e-06, 1.4524478046951117e-06, 0.0008392926538363099, 0.00656794523820281, 0.7488278746604919, 0.15592771768569946, 0.08376990258693695, 0.0002857790095731616, 0.0003766281879507005, 9.964118362404406e-06, 0.00013232951459940523], [0.00011917696974705905, 2.1548890799749643e-05, 0.0011093540815636516, 0.0008143266313709319, 0.0003611621505115181, 2.5805185941862874e-05, 1.3647720152221154e-05, 3.040322781089344e-06, 0.0011278822785243392, 0.00012329001037869602, 0.01341097243130207, 0.00022599668591283262, 0.0003518729645293206, 1.5772640153954853e-06, 0.0002530800993554294, 0.00016919105837587267, 0.014282993040978909, 0.010305403731763363, 0.8640198707580566, 0.01579190045595169, 0.07466241717338562, 0.000461359741166234, 0.0022643504198640585, 7.97597604105249e-05], [1.3962303455627989e-05, 2.3307418359763687e-06, 3.2281703170156106e-05, 0.00018833854119293392, 3.19605169352144e-05, 4.275026185496245e-06, 1.7504377183286124e-06, 1.129997781390557e-05, 2.8515626127045834e-07, 8.653399163449649e-06, 2.364127794862725e-05, 0.00020873536414001137, 1.2899345165351406e-05, 1.3146675883035641e-05, 3.7596933566419466e-07, 2.090384623443242e-05, 3.298365527371061e-06, 0.00032924037077464163, 0.0012397817336022854, 0.9889494180679321, 0.001456203986890614, 0.007362706586718559, 4.330675074015744e-05, 4.124303814023733e-05], [0.0003546889638528228, 0.000341400591423735, 0.0003302588884253055, 0.0009630115237087011, 0.0019946375396102667, 0.0009592982241883874, 2.546799623814877e-05, 1.477440855524037e-05, 5.2657553169410676e-05, 4.326845100877108e-06, 3.606214886531234e-05, 7.401497714454308e-05, 0.005533752962946892, 0.0010485650273039937, 0.001144316280260682, 7.095023465808481e-05, 0.00042079685954377055, 0.00019842319306917489, 0.0010403306223452091, 0.023735910654067993, 0.8175612092018127, 0.12647181749343872, 0.01720144785940647, 0.00042187332292087376], [0.00015785408322699368, 7.943952368805185e-05, 0.000124652506201528, 0.0011180323781445622, 0.0005285352817736566, 0.0028962132055312395, 0.00015370013716164976, 0.00035677471896633506, 3.5249177017249167e-06, 3.1556262456433615e-06, 4.866671474701434e-07, 5.217963007453363e-06, 9.559449608786963e-06, 0.001684795250184834, 9.475577098783106e-05, 0.0004228993784636259, 6.524077889480395e-06, 6.220408249646425e-05, 1.6172338291653432e-05, 0.004212912172079086, 0.006129696033895016, 0.9506017565727234, 0.014864431694149971, 0.016466744244098663]], [[0.0420386865735054, 0.7883263230323792, 0.005673989653587341, 0.00288626691326499, 0.01620045304298401, 0.002686314983293414, 0.0022077213507145643, 0.002319781109690666, 0.0013288380578160286, 0.001300873002037406, 0.0021091937087476254, 0.004769986029714346, 0.008230580016970634, 0.06770047545433044, 0.00338209280744195, 0.0008275217842310667, 0.006879508029669523, 0.002190890721976757, 0.004805160686373711, 0.01775607280433178, 0.005174641497433186, 0.006553607061505318, 0.0034518027678132057, 0.0011993960943073034], [0.022533675655722618, 0.9443545341491699, 0.0010542507516220212, 0.000416949565988034, 0.0079310592263937, 0.000957149313762784, 0.0005134593811817467, 0.0006980017060413957, 0.0003583071520552039, 0.0005603586905635893, 0.000362198828952387, 0.0007947739213705063, 0.0014550643973052502, 0.014705345965921879, 0.0002889492898248136, 8.153873932315037e-05, 0.001242052298039198, 0.0001392570266034454, 0.00017595815006643534, 0.0003515266871545464, 9.657659393269569e-05, 0.0001995089987758547, 0.0003435488324612379, 0.0003859291027765721], [0.04841303825378418, 0.09790927171707153, 0.0175021942704916, 0.36746758222579956, 0.04212528467178345, 0.014309351332485676, 0.01736072450876236, 0.010171633213758469, 0.23377983272075653, 0.0021504350006580353, 0.027878833934664726, 0.024411587044596672, 0.03269264101982117, 0.005984609480947256, 0.0033139281440526247, 0.0014345033559948206, 0.007153007667511702, 0.002968300599604845, 0.024879854172468185, 0.0035390120465308428, 0.011467460542917252, 0.0006571926642209291, 0.002319513587281108, 0.00011023526167264208], [0.012138765305280685, 0.02627749741077423, 0.3910299837589264, 0.025527577847242355, 0.3789580762386322, 0.022305089980363846, 0.09327542781829834, 0.009443857707083225, 0.0014792295405641198, 0.0006035025580786169, 0.0007015218143351376, 0.00031191104790195823, 0.00045242992928251624, 0.00031197501812130213, 0.0004512005834840238, 0.00016309968486893922, 0.0003409779747016728, 0.0005659134476445615, 0.013109634630382061, 0.002712308894842863, 0.0015367609448730946, 0.014836625196039677, 0.003186179092153907, 0.0002805312687996775], [0.0014686365611851215, 0.001925959950312972, 0.004536604508757591, 0.004256227985024452, 0.005859545897692442, 0.9231027960777283, 0.007050682790577412, 0.015138731338083744, 0.01307624764740467, 0.005386472679674625, 0.0004094520991202444, 0.00023828174744267017, 0.001177463331259787, 0.0006125581567175686, 0.0005246877553872764, 6.83097678120248e-05, 6.393255171133205e-05, 0.00014850537991151214, 6.314940401352942e-05, 0.00011257726873736829, 0.002264315728098154, 0.001971521880477667, 0.004336123820394278, 0.006207128055393696], [0.0118123022839427, 0.01604202575981617, 0.05159320309758186, 0.021650390699505806, 0.2768886983394623, 0.032205868512392044, 0.39046213030815125, 0.10219907760620117, 0.010254350490868092, 0.005532353650778532, 0.006741990800946951, 0.002988605061545968, 0.0044192420318722725, 0.002076620003208518, 0.013358267955482006, 0.0018553201807662845, 0.005681580398231745, 0.00015420763520523906, 0.001386704621836543, 0.0005647067446261644, 0.004185063764452934, 0.006416558753699064, 0.01940099708735943, 0.012129801325500011], [0.004696856718510389, 0.005810958798974752, 0.0023388422559946775, 0.0028208636213093996, 0.005733126774430275, 0.0032554087229073048, 0.030152929946780205, 0.9100984930992126, 0.010114669799804688, 0.005465344525873661, 0.00037691855686716735, 0.0022261198610067368, 2.7142017643200234e-05, 0.0007920910138636827, 0.0005937363603152335, 0.0017493355553597212, 0.0004031193384435028, 0.00012891118240077049, 2.346169640077278e-05, 0.00012324427370913327, 4.562865797197446e-05, 0.0002906565787270665, 0.0004904617089778185, 0.01224176213145256], [0.0009827475296333432, 0.004004760179668665, 0.0007129737641662359, 0.001455113640986383, 0.0010025205556303263, 0.0004663609724957496, 0.0025766631588339806, 0.01096043549478054, 0.95585036277771, 0.011433529667556286, 0.006065524183213711, 0.0013069683918729424, 0.000909488124307245, 8.519444963894784e-05, 0.0001549844746477902, 5.912220149184577e-05, 0.0007095966720953584, 0.00020045466953888535, 0.0002567414485383779, 3.131812991341576e-05, 3.671376543934457e-05, 8.105293090920895e-06, 0.00014676910359412432, 0.0005834887851960957], [0.00395890511572361, 0.006988399662077427, 0.00041745021007955074, 0.0010770449880510569, 0.0006454475224018097, 0.0021838322281837463, 0.0003343596472404897, 0.0014898721128702164, 0.02133617177605629, 0.855859100818634, 0.02565401792526245, 0.043664973229169846, 0.00037235545460134745, 0.0004220547270961106, 2.0155534912191797e-06, 5.7432367611909285e-05, 0.0001815768046071753, 0.030695226043462753, 0.0011991969076916575, 0.0032667433843016624, 1.9609900846262462e-05, 3.256245463489904e-06, 1.9407768832024885e-06, 0.00016901962226256728], [0.00025479448959231377, 7.936869224067777e-05, 0.0007461850182153285, 0.0011916800867766142, 0.0014349347911775112, 0.0001611526677152142, 0.0012019735295325518, 0.00014884640404488891, 0.029289033263921738, 0.00348307634703815, 0.9509161114692688, 0.0033188408706337214, 0.004730304703116417, 1.1418492249504197e-06, 1.6978015992208384e-05, 9.278264769818634e-07, 0.00019869131210725754, 0.0002657029253896326, 0.002132730558514595, 7.433557038893923e-05, 0.000348406785633415, 6.507856653570343e-08, 4.577849267661804e-06, 2.2785229703004006e-07], [0.003935978747904301, 0.0009493736433796585, 0.0003817934775725007, 0.003956696949899197, 0.00013328151544556022, 0.00018726267444435507, 0.00018708399147726595, 0.0003974100109189749, 7.446116796927527e-05, 0.004446825012564659, 0.003856119466945529, 0.9298545122146606, 0.006153980270028114, 0.01506795920431614, 1.048412286763778e-05, 0.00021056877449154854, 4.8274841901729815e-06, 0.0008535216911695898, 0.00029747566441074014, 0.028239954262971878, 0.00028545953682623804, 0.0005103013245388865, 1.224772177010891e-06, 3.6864200865238672e-06], [0.0020551898051053286, 0.032670263200998306, 0.00018466261099092662, 0.00014305523654911667, 0.0004044832894578576, 0.00043504443601705134, 0.0001868158287834376, 4.68936104880413e-06, 5.4338153859134763e-05, 1.987172936424031e-06, 0.002422003773972392, 0.0006577158928848803, 0.8481961488723755, 0.1044282540678978, 0.005510938353836536, 1.0531987300055334e-06, 7.212372292997316e-05, 1.9279250409454107e-06, 2.8310798370512202e-05, 1.493525633122772e-05, 0.002462130505591631, 1.0841575203812681e-05, 5.312666326062754e-05, 6.970763966052118e-09], [5.9183756093261763e-05, 0.0032169828191399574, 1.2799158639609232e-06, 1.4689037470816402e-06, 5.4523015933227725e-06, 1.7258213119930588e-05, 3.0899777812010143e-06, 1.56409021201398e-06, 2.6588846679942435e-08, 1.0304970601282548e-06, 1.9858141797612916e-08, 5.625765697914176e-05, 1.3258302715257742e-05, 0.9964014291763306, 9.613849397283047e-05, 3.829873094218783e-05, 4.875575427831791e-07, 4.357461023118958e-07, 4.4602290749651274e-09, 1.0920589375018608e-06, 4.195363771941629e-07, 8.403376705246046e-05, 2.831973233696772e-07, 5.172481678528129e-07], [5.44138902114355e-06, 9.950529783964157e-05, 4.722351604868891e-06, 3.2821110380609753e-06, 1.6931513528106734e-05, 1.4461044202107587e-06, 6.924547506059753e-06, 3.700812840179424e-06, 1.412205392625765e-06, 1.4404609949281166e-08, 5.801696261187317e-07, 6.007028474641629e-08, 0.00022442091722041368, 0.0009871574584394693, 0.9947513937950134, 0.0011551798088476062, 0.002389610279351473, 1.24755416663902e-07, 5.662262125838424e-08, 8.217536096033484e-10, 2.1254190869512968e-06, 1.1165957403136417e-06, 0.00034293989301659167, 1.986437837331323e-06], [3.885061596520245e-05, 0.00026842483202926815, 1.7901875253301114e-05, 4.248061668477021e-05, 1.902180338220205e-05, 1.4251203310777782e-06, 6.3577276705473196e-06, 0.000142886841786094, 4.664021616918035e-05, 1.5890735085122287e-05, 5.891923819945077e-07, 1.4379061212821398e-05, 6.495973821074585e-07, 0.0009521761094219983, 0.0025975967291742563, 0.987122118473053, 0.006365715526044369, 0.0011082128621637821, 1.200510814669542e-05, 7.355555453614215e-07, 9.795751054753055e-08, 8.854873158270493e-06, 3.062134419451468e-05, 0.0011863914551213384], [0.001190529903396964, 0.0035925679840147495, 0.0009101605392061174, 0.0002532019279897213, 0.00024322826357092708, 3.6840850953012705e-05, 0.00016918274923227727, 0.0007996232016012073, 0.008698029443621635, 0.00010082902008434758, 0.0010630807373672724, 1.0556027518759947e-05, 0.00023594038793817163, 4.003741923952475e-05, 0.029232090339064598, 0.05191032588481903, 0.77791827917099, 0.028055960312485695, 0.07741767168045044, 2.1134143025847152e-05, 4.540499867289327e-05, 1.1735111911548302e-05, 0.014771571382880211, 0.0032720111776143312], [8.816229819785804e-05, 3.463311804807745e-05, 0.00012701679952442646, 0.00012033613165840507, 4.89487501909025e-05, 6.512457912322134e-05, 1.4980057585489703e-06, 9.635377500671893e-05, 0.0010456909658387303, 0.0017709678504616022, 0.0001336714340141043, 9.789053729036823e-05, 5.311023414833471e-06, 5.430514192994451e-06, 1.432787121302681e-05, 0.005827333312481642, 0.006101460196077824, 0.959725558757782, 0.01614920049905777, 0.005693711806088686, 0.00014629501674789935, 5.472628618008457e-05, 4.027743125334382e-05, 0.002606132300570607], [1.3905997548135929e-05, 2.1253604245430324e-06, 3.176748941768892e-05, 5.494795914273709e-05, 2.360437429160811e-05, 1.1227484719711356e-06, 4.070554382451519e-07, 2.45057236725188e-07, 1.9520421119523235e-05, 7.379642283922294e-07, 0.0017210929654538631, 5.864671493327478e-06, 0.0001262838632101193, 2.4142584820197044e-08, 2.8395149911375483e-06, 3.4185984532086877e-06, 0.0026252996176481247, 0.0035573714412748814, 0.9730461835861206, 0.010562034323811531, 0.008016503416001797, 4.60294768345193e-06, 0.00017912423936650157, 9.199383725899679e-07], [7.564003226434579e-06, 1.4625194353357074e-06, 4.311812517698854e-06, 5.19780087415711e-06, 4.1440243876422755e-06, 8.263464224000927e-07, 1.1773902031109174e-07, 1.5087655924617138e-07, 8.973870535555761e-08, 1.2547455980893574e-06, 3.5596804082160816e-06, 5.592896923189983e-05, 2.9357647690630984e-07, 5.340531288311468e-07, 5.188872442829506e-09, 2.442903337396274e-07, 7.482994988095015e-07, 0.0006038413848727942, 0.0016558489296585321, 0.9951997995376587, 0.001960835652425885, 0.00048605859046801925, 1.9844374037347734e-06, 5.3128687795833685e-06], [6.829857011325657e-05, 2.430420499877073e-05, 0.00015961455937940627, 9.38598532229662e-05, 0.00011569417256396264, 0.00014999648556113243, 2.6701934984885156e-05, 5.395631319515815e-07, 1.4529369991578278e-06, 1.204052182401938e-07, 5.8740810345625505e-05, 1.1764419468818232e-05, 0.0038154111243784428, 1.4321878552436829e-05, 2.1488740458153188e-05, 2.022199474538411e-08, 8.298338229906221e-07, 1.2719526694127126e-06, 0.0010182970436289907, 0.02075362764298916, 0.946869969367981, 0.02461128495633602, 0.0021803590934723616, 1.9948183762608096e-06], [0.0005346477264538407, 0.0006106987129896879, 0.00012747581058647484, 3.968595774495043e-05, 0.00012299652735237032, 0.00015818572137504816, 1.7455968190915883e-05, 7.168596312112641e-06, 3.560127481705422e-07, 1.5231341876642546e-06, 3.4317892527724325e-07, 2.945395499409642e-05, 1.3835896425007377e-05, 0.0006831231876276433, 7.1566287260793615e-06, 2.900313347709016e-06, 6.536191108352796e-07, 1.7143449440482073e-05, 5.270838664728217e-05, 0.02351364493370056, 0.007705009542405605, 0.9647759199142456, 0.0007145011913962662, 0.0008633440011180937], [1.4088741409068462e-05, 5.754626545240171e-05, 0.00014272777480073273, 6.549733370775357e-05, 0.0020564792212098837, 0.00021202709467615932, 0.0004522592935245484, 1.594214882061351e-05, 7.97534448793158e-06, 1.8763341103067432e-08, 2.847594657851005e-07, 3.145248328451089e-08, 1.540075936645735e-05, 1.3040833437116817e-05, 0.0030396936926990747, 1.3248976756585762e-05, 0.00013510037388186902, 1.1869352078974771e-07, 2.3828379198675975e-05, 6.843351911811624e-06, 0.012440632097423077, 0.045726627111434937, 0.9264766573905945, 0.009083875454962254], [1.318823251494905e-05, 1.4090682270762045e-05, 1.01521773103741e-05, 3.537459861036041e-06, 2.3822663933970034e-05, 1.800021891540382e-05, 1.183356380352052e-05, 0.0002492215426173061, 2.006408976740204e-06, 2.6087438527611084e-05, 2.7692903969978033e-08, 7.584629884149763e-07, 4.010876253346396e-08, 6.818716883572051e-06, 6.027806648489786e-06, 0.0004597996885422617, 1.227413576998515e-05, 8.10208439361304e-06, 6.356921744554711e-07, 1.0632087651174515e-05, 1.6827893887239043e-06, 0.0034244118724018335, 0.00030353624606505036, 0.9953933954238892], [0.00881014484912157, 0.02787148766219616, 0.0003432740631978959, 8.421840175287798e-05, 0.0024431312922388315, 0.012239977717399597, 0.00564518291503191, 0.02455325797200203, 0.05122315511107445, 0.00119205960072577, 0.0005510879564099014, 3.64843458555697e-06, 0.00012389826588332653, 3.8048208807595074e-05, 0.0033277245238423347, 0.0006066603236831725, 0.04457412660121918, 0.00018731878662947565, 0.0001920033828355372, 5.88054444961017e-06, 0.0004326167982071638, 6.114253483247012e-05, 0.125427708029747, 0.6900622844696045]], [[0.06071431562304497, 0.09186197072267532, 0.027326863259077072, 0.03987500071525574, 0.058513056486845016, 0.10454054176807404, 0.017195312306284904, 0.03392420709133148, 0.0069125196896493435, 0.06838610768318176, 0.004899505525827408, 0.10454829782247543, 0.010191568173468113, 0.16455335915088654, 0.0011995058739557862, 0.00967990979552269, 0.004054305609315634, 0.021836595609784126, 0.003732877317816019, 0.05291152745485306, 0.009644529782235622, 0.06490356475114822, 0.002675524214282632, 0.03591898828744888], [0.022702205926179886, 0.053482379764318466, 0.03365161642432213, 0.021556247025728226, 0.02718806453049183, 0.08326871693134308, 0.008721047081053257, 0.08555864542722702, 0.011405428871512413, 0.07746099680662155, 0.003247169777750969, 0.07041469216346741, 0.021555732935667038, 0.2631128430366516, 0.011443068273365498, 0.059689510613679886, 0.004957498051226139, 0.01361045055091381, 0.0007158118532970548, 0.0064584072679281235, 0.0019932740833610296, 0.04078727588057518, 0.005837898701429367, 0.0711810365319252], [0.10052972286939621, 0.10039756447076797, 0.024270614609122276, 0.3169747591018677, 0.023866886273026466, 0.056072164326906204, 0.006859512999653816, 0.044737476855516434, 0.006530684418976307, 0.03464220464229584, 0.013589947484433651, 0.10562429577112198, 0.01787625066936016, 0.007755231577903032, 0.0013099665520712733, 0.011097458191215992, 0.00611081812530756, 0.02499573864042759, 0.007365718949586153, 0.04597334936261177, 0.012925916351377964, 0.02084154449403286, 0.006135826464742422, 0.0035163804423063993], [0.04427196830511093, 0.06556743383407593, 0.7060241103172302, 0.028555655851960182, 0.030913103371858597, 0.011987549252808094, 0.008988801389932632, 0.010921971872448921, 0.0029805537778884172, 0.02846875786781311, 0.005213397089391947, 0.005940203554928303, 0.0038789203390479088, 0.000549189921002835, 0.0020459245424717665, 0.003174206940457225, 0.0011368849081918597, 0.004587030503898859, 0.0035656928084790707, 0.0032323459163308144, 0.0038081309758126736, 0.019572211429476738, 0.0022618239745497704, 0.002353993710130453], [0.02255915105342865, 0.022272992879152298, 0.02237536571919918, 0.07558868080377579, 0.013374868780374527, 0.32276061177253723, 0.0026737311854958534, 0.1526920050382614, 0.004422355908900499, 0.13794708251953125, 0.002745290519669652, 0.03959178552031517, 0.006358186714351177, 0.004539927002042532, 0.002891751006245613, 0.010305522941052914, 0.00482375780120492, 0.05627061799168587, 0.0014750909758731723, 0.02010085992515087, 0.0019219742389395833, 0.040523216128349304, 0.004773081745952368, 0.027011942118406296], [0.0068154484033584595, 0.00898136105388403, 0.02908591739833355, 0.012518053874373436, 0.4077191948890686, 0.09968707710504532, 0.30238932371139526, 0.031265027821063995, 0.007411961909383535, 0.02006407640874386, 0.0021803039126098156, 0.006524610798805952, 0.0053392443805933, 0.0052172522991895676, 0.003135968931019306, 0.0010192604968324304, 0.0014595311367884278, 0.00044755576527677476, 0.0006563019123859704, 0.001010720618069172, 0.002818359062075615, 0.019783996045589447, 0.007469428703188896, 0.017000101506710052], [0.017138086259365082, 0.020988117903470993, 0.005090906284749508, 0.029194438830018044, 0.015383805148303509, 0.13149920105934143, 0.004372311756014824, 0.5272948741912842, 0.006423089187592268, 0.12168364226818085, 0.005598194897174835, 0.06785149872303009, 0.008624833077192307, 0.009823744185268879, 0.0027431887574493885, 0.002016570884734392, 0.0016842670738697052, 0.0012038928689435124, 5.974349187454209e-05, 0.001698042033240199, 0.00038607799797318876, 0.006893584970384836, 0.0023035332560539246, 0.010044287890195847], [0.0028791693039238453, 0.0035398586187511683, 0.015968849882483482, 0.032519467175006866, 0.006096722092479467, 0.055307649075984955, 0.3456394076347351, 0.04873419925570488, 0.1036636233329773, 0.2672947645187378, 0.001754152704961598, 0.0047635226510465145, 0.0011977842077612877, 0.0016247399616986513, 0.0024316797498613596, 0.022553404793143272, 0.0006237492780201137, 0.002130450215190649, 0.0003766246372833848, 0.0003119121247436851, 0.0009330808534286916, 0.0304581169039011, 0.0066817631013691425, 0.04251532629132271], [0.03127700090408325, 0.045482341200113297, 0.007284923456609249, 0.006843519397079945, 0.027754561975598335, 0.03331432864069939, 0.06581174582242966, 0.2375420778989792, 0.028950616717338562, 0.34437495470046997, 0.03799382597208023, 0.05615959316492081, 0.001073669409379363, 0.00962059199810028, 0.0014398036291822791, 0.00520313810557127, 0.013114568777382374, 0.03257005661725998, 0.006619045976549387, 0.003009357023984194, 7.708267366979271e-05, 0.00023909234732855111, 0.0006838293629698455, 0.003560276934877038], [0.0006133865099400282, 0.0006990438560023904, 0.0005574385286308825, 0.0010040641063824296, 0.0005860130186192691, 0.0005311873974278569, 0.0013717833207920194, 0.015914956107735634, 0.1670147329568863, 0.7420286536216736, 0.04280791059136391, 0.020956283435225487, 0.00327386986464262, 1.0629002645146102e-05, 0.0001004487494355999, 0.00033522568992339075, 0.0008447060827165842, 0.00041830542613752186, 0.0005582189187407494, 7.970706064952537e-06, 2.4716127882129513e-06, 3.2123464279720793e-06, 3.9240378100657836e-05, 0.00032013244344852865], [0.007507418282330036, 0.006438258569687605, 0.002260475652292371, 0.014787072315812111, 0.0012600990012288094, 0.00304046249948442, 0.0008148047490976751, 0.014523512683808804, 0.019836971536278725, 0.6082401275634766, 0.0032518282532691956, 0.2858707010746002, 0.0048391493037343025, 0.0009562623454257846, 3.225554610253312e-05, 0.004466357175260782, 0.00016710204363334924, 0.008534200489521027, 0.00041664481977932155, 0.009159283712506294, 0.00019667757442221045, 0.0025704570580273867, 1.7294054487138055e-05, 0.0008126269094645977], [0.0035754498094320297, 0.0035679542925208807, 0.0060367463156580925, 0.0025534951128065586, 0.0007550474838353693, 0.00024832686176523566, 0.0009209921699948609, 0.0012390539050102234, 0.005145884118974209, 0.013122785836458206, 0.782822847366333, 0.024448836222290993, 0.1338520050048828, 0.00039414866478182375, 0.009666119702160358, 0.0002751631254795939, 0.0013755145482718945, 0.00035586277954280376, 0.005699541885405779, 0.0009108853992074728, 0.0019582274835556746, 0.00012520141899585724, 0.000928852241486311, 2.11807982850587e-05], [0.029364030808210373, 0.09257902204990387, 0.004183641634881496, 0.0136673953384161, 0.0047938707284629345, 0.004368779715150595, 0.0005394347244873643, 0.01713225059211254, 0.00030929691274650395, 0.018706468865275383, 0.005887209437787533, 0.28498896956443787, 0.014690395444631577, 0.4144814908504486, 0.005139824468642473, 0.02210431732237339, 0.000608675938565284, 0.00394013524055481, 0.00013568256690632552, 0.05047163739800453, 0.0007394961430691183, 0.009426881559193134, 0.0006382514256983995, 0.0011029178276658058], [0.0005442866822704673, 0.0026291797403246164, 0.002872392302379012, 0.000599216902628541, 0.0005429817247204483, 0.000861502019688487, 0.00046968169044703245, 0.0025179022923111916, 0.0011233194964006543, 0.0004620984254870564, 0.004606038331985474, 0.0014331320999190211, 0.11280915886163712, 0.03065348044037819, 0.8277568817138672, 0.006151809357106686, 0.00038569539901800454, 6.202953227329999e-05, 2.5778399503906257e-05, 5.0115337216993794e-05, 0.0006272272439673543, 0.0003695639898069203, 0.00234445882961154, 0.00010207715968135744], [0.004537790548056364, 0.020816177129745483, 0.00411357032135129, 0.00998573936522007, 0.001403582515195012, 0.004799173679202795, 0.00274484371766448, 0.011229489929974079, 0.0019995097536593676, 0.002874233992770314, 0.00011108308535767719, 0.002361387014389038, 0.002944100880995393, 0.13861703872680664, 0.05231637880206108, 0.7174533605575562, 0.0010772914392873645, 0.005350705701857805, 8.871252066455781e-05, 0.0008755140588618815, 0.0005551418871618807, 0.008184436708688736, 0.0015047647757455707, 0.0040560029447078705], [0.001238060649484396, 0.0038457605987787247, 0.005594924557954073, 0.0007033711299300194, 3.467387068667449e-05, 0.0001302216696785763, 3.434064274188131e-05, 0.0006927159847691655, 0.0005102003924548626, 0.00011735782754840329, 0.0012750369496643543, 8.663290645927191e-05, 0.003107490949332714, 0.0012559148017317057, 0.9180879592895508, 0.029473595321178436, 0.020731331780552864, 0.0023563834838569164, 0.001136256381869316, 0.00013037513417657465, 0.0017566134920343757, 0.00024160636530723423, 0.006826847791671753, 0.0006323509733192623], [0.0013294880045577884, 0.0021474126260727644, 0.0038300976157188416, 0.0029752617701888084, 0.00016457254241686314, 0.0004248923796694726, 8.092996722552925e-05, 0.0032084155827760696, 0.0008765487582422793, 0.005550543311983347, 3.5228091292083263e-05, 0.0002711146662477404, 6.15680983173661e-05, 0.0004396380390971899, 0.004727280233055353, 0.7081689238548279, 0.021315021440386772, 0.22643537819385529, 0.0017963498830795288, 0.00285021192394197, 0.00016771542141214013, 0.002276243409141898, 0.00028613960603252053, 0.010581034235656261], [0.0017381039215251803, 0.0013971371809020638, 0.00444241426885128, 0.0016734504606574774, 0.0002024098066613078, 2.4270177163998596e-05, 1.6085557945189066e-05, 0.0002771710860542953, 0.001988066826015711, 0.0006119096651673317, 0.002101635094732046, 0.00034160548239015043, 0.0011684689670801163, 7.025957165751606e-05, 0.010484982281923294, 0.03707924112677574, 0.5944247245788574, 0.1436106413602829, 0.16742950677871704, 0.012525675818324089, 0.013306297361850739, 0.0005613954272121191, 0.0024334690533578396, 0.0020911290775984526], [0.004236523061990738, 0.001984496833756566, 0.00158753152936697, 0.00859800260514021, 0.0002709435939323157, 7.080200157361105e-05, 3.8250932448136155e-06, 0.00018465430184733123, 0.00027918501291424036, 0.0015893523814156651, 0.0005199245060794055, 0.0037784737069159746, 0.00018033181549981236, 0.00020031584426760674, 0.00010090015712194145, 0.029717907309532166, 0.022592635825276375, 0.32764241099357605, 0.038544539362192154, 0.5214751362800598, 0.01778905838727951, 0.01655411161482334, 0.0003386466996744275, 0.0017602101434022188], [0.0013927890686318278, 0.00043687300058081746, 0.0016258974792435765, 0.011013873852789402, 6.811261846451089e-05, 8.251520921476185e-05, 5.79872266825987e-06, 1.60942963702837e-05, 0.00019166745187249035, 0.00019777670968323946, 0.0029595806263387203, 0.001209968701004982, 0.0031189259607344866, 7.317634299397469e-05, 0.00035334055428393185, 0.002671103924512863, 0.002926348941400647, 0.026049265637993813, 0.09904805570840836, 0.16584265232086182, 0.5987341403961182, 0.077869713306427, 0.003861239179968834, 0.0002510968188289553], [0.007187787909060717, 0.0048330603167414665, 0.001606879523023963, 0.0019292422803118825, 0.0011204307666048408, 0.000924954132642597, 0.0002935364900622517, 0.000213369115954265, 2.105182829836849e-05, 5.965983655187301e-05, 0.0007830715039744973, 0.0016084886156022549, 0.00011379901843611151, 0.003044791053980589, 9.930717351380736e-05, 0.0004123589606024325, 0.0006748396554030478, 0.01634104736149311, 0.025024324655532837, 0.8251428604125977, 0.03944775089621544, 0.06446041166782379, 0.004153053276240826, 0.000503893883433193], [0.007655529771000147, 0.007554641924798489, 0.0030471552163362503, 0.018909303471446037, 0.00222965469583869, 0.005403530318289995, 0.0005946289747953415, 0.002370145870372653, 0.00010176871001021937, 5.786613473901525e-05, 0.0016243568388745189, 0.0018455871613696218, 0.011501938104629517, 0.0018819809192791581, 0.0058778743259608746, 0.0018876349786296487, 0.0020947095472365618, 0.0017540218541398644, 0.008555728010833263, 0.048487935215234756, 0.17607223987579346, 0.14695163071155548, 0.5268601179122925, 0.016680054366588593], [0.0005967204342596233, 0.0006866455078125, 0.0023427463602274656, 0.003466388676315546, 0.0007588334265165031, 0.005466391798108816, 0.00062351900851354, 0.008083157241344452, 0.00023175236128736287, 0.0002015697245951742, 4.8813358262123074e-06, 0.00015550617536064237, 9.219667845172808e-05, 0.0008809419814497232, 0.0003693350590765476, 0.01113972533494234, 4.796434222953394e-05, 0.0006025280454196036, 3.9871982153272256e-05, 0.010869563557207584, 0.004484551027417183, 0.7785983681678772, 0.016574880108237267, 0.1536818891763687], [0.00029981727129779756, 0.0002167394559364766, 0.003935761749744415, 0.0013044923543930054, 0.000330350041622296, 0.001019610557705164, 0.0041452432051301, 0.009412870742380619, 0.0010671246564015746, 9.513604163657874e-05, 0.00016027047240640968, 9.667380254541058e-06, 0.00014260651369113475, 1.6968479030765593e-05, 0.019835492596030235, 0.0043383254669606686, 0.001776761026121676, 0.00012714482727460563, 0.0007648559403605759, 0.00027011564816348255, 0.001613688305951655, 0.008067009970545769, 0.7338382601737976, 0.20721176266670227]], [[0.11268872022628784, 0.20947006344795227, 0.022961152717471123, 0.011008553206920624, 0.013875480741262436, 0.011341817677021027, 0.03209437057375908, 0.017062608152627945, 0.02484130673110485, 0.1033056378364563, 0.022598227486014366, 0.06825356185436249, 0.016750261187553406, 0.036976464092731476, 0.0031639502849429846, 0.005160665139555931, 0.015456438064575195, 0.035728465765714645, 0.023508083075284958, 0.033239927142858505, 0.015750722959637642, 0.0469236820936203, 0.01056073047220707, 0.10727903991937637], [0.08911127597093582, 0.15500225126743317, 0.012012530118227005, 0.011161348782479763, 0.003694073762744665, 0.00474133063107729, 0.009190103970468044, 0.006998252123594284, 0.002738635055720806, 0.007328738924115896, 0.007450288161635399, 0.0830850750207901, 0.1117204874753952, 0.2917254865169525, 0.01357138529419899, 0.009323786944150925, 0.0035528221633285284, 0.006482876371592283, 0.006413189694285393, 0.05249727889895439, 0.028753018006682396, 0.05705837160348892, 0.00945550948381424, 0.016931958496570587], [0.16321004927158356, 0.08173071593046188, 0.463218629360199, 0.058178987354040146, 0.021540585905313492, 0.019469154998660088, 0.014143344014883041, 0.0282550361007452, 0.04346476122736931, 0.022520912811160088, 0.008700674399733543, 0.004998108837753534, 0.0018333828775212169, 0.0031509632244706154, 0.002926879096776247, 0.0011682460317388177, 0.0009793491335585713, 0.004298200365155935, 0.0017299477476626635, 0.009589393623173237, 0.03155796229839325, 0.00815650075674057, 0.0028490102849900723, 0.00232917838729918], [0.03570922091603279, 0.025488831102848053, 0.14440956711769104, 0.042739566415548325, 0.13520488142967224, 0.02961556427180767, 0.01738794893026352, 0.005839931312948465, 0.34944167733192444, 0.01415175013244152, 0.03060922399163246, 0.002920550527051091, 0.009137868881225586, 0.0008796719484962523, 0.0026995805092155933, 0.004009663127362728, 0.010915243998169899, 0.010101111605763435, 0.02571677602827549, 0.003359092865139246, 0.08288363367319107, 0.0039871977642178535, 0.010881478898227215, 0.0019100010395050049], [0.016898881644010544, 0.0069262185133993626, 0.7306488156318665, 0.004313356708735228, 0.01836700178682804, 0.0008581439615227282, 0.009501311928033829, 0.012812228873372078, 0.10550382733345032, 0.0046552568674087524, 0.03726653382182121, 0.0006627577822655439, 0.0002333938900846988, 1.3040030353295151e-05, 0.00033744200482033193, 0.0004910464049316943, 0.0027304640971124172, 0.0021170570980757475, 0.0123243797570467, 0.0039052420761436224, 0.026096545159816742, 0.0001948879798874259, 0.0028609614819288254, 0.0002812141610775143], [0.028707845136523247, 0.01741054095327854, 0.1322612166404724, 0.5303527116775513, 0.033344049006700516, 0.018799487501382828, 0.019764596596360207, 0.0007455165614373982, 0.0011940886033698916, 0.008144628256559372, 0.015472663566470146, 0.012902641668915749, 0.00413711229339242, 0.0011159747373312712, 0.000698074116371572, 0.00012810768384952098, 0.0007531860028393567, 0.0043029747903347015, 0.007146070711314678, 0.006909118965268135, 0.06714756041765213, 0.06872677803039551, 0.013354567810893059, 0.006480562034994364], [0.01498075295239687, 0.036709725856781006, 0.36998605728149414, 0.0014074955834075809, 0.15342099964618683, 0.023672452196478844, 0.011873772367835045, 0.00917519349604845, 0.3494739234447479, 0.0007604939164593816, 0.002972907153889537, 7.23247358109802e-05, 0.00027540611336007714, 1.8395388906355947e-05, 0.00010575997293926775, 1.9485218217596412e-05, 3.903443575836718e-05, 3.221552833565511e-05, 0.00020400491484906524, 8.765978418523446e-05, 0.016807297244668007, 0.0007216723752208054, 0.006990649737417698, 0.00019234963110648096], [0.011016171425580978, 0.016049480065703392, 0.005419441498816013, 0.040792640298604965, 0.01631888560950756, 0.7500472068786621, 0.03781825304031372, 0.012483458034694195, 0.0016836964059621096, 0.0007228306494653225, 0.00015827758761588484, 0.0003907074860762805, 0.0006247684359550476, 0.015143358148634434, 0.00027069286443293095, 0.00020270865934435278, 1.9561204680940136e-05, 4.196699592284858e-05, 1.6107051123981364e-05, 0.000426141225034371, 0.004701059777289629, 0.02684074081480503, 0.03151656314730644, 0.027295328676700592], [0.012092187069356441, 0.015112106688320637, 0.004708799067884684, 0.0009364238940179348, 0.003891595173627138, 0.005908424500375986, 0.8531316518783569, 0.062285181134939194, 0.016671152785420418, 0.0010033282451331615, 0.004576044622808695, 0.00027885290910489857, 0.003443569177761674, 0.0031200749799609184, 0.00542029831558466, 0.0001544786209706217, 0.00025679898681119084, 2.5863739665510366e-06, 2.0577790564857423e-05, 2.7494415917317383e-06, 0.00011142575385747477, 2.781231887638569e-05, 0.0043810224160552025, 0.002462887205183506], [0.00040861425804905593, 0.00013012479757890105, 0.0005867861327715218, 3.190479037584737e-05, 0.00020824087550863624, 0.0023133771028369665, 0.000998700619675219, 0.9818084836006165, 0.002183937467634678, 0.003988654352724552, 7.664732947887387e-06, 2.941545426438097e-05, 8.989414368443249e-08, 8.210736268665642e-05, 1.903924930957146e-05, 0.0006525046192109585, 4.026561782666249e-06, 9.373605280416086e-06, 2.5056005270585047e-08, 2.177393753299839e-06, 5.293976457210192e-08, 7.944336175569333e-06, 1.801778307708446e-05, 0.006508754100650549], [0.0022688989993184805, 0.003212941810488701, 0.0011341022327542305, 0.00012562223128043115, 0.0013907774118706584, 0.0003885884361807257, 0.0016296874964609742, 0.0029387492686510086, 0.968818187713623, 0.009422508999705315, 0.006234027910977602, 3.302429831819609e-05, 0.0001998850639211014, 5.724845323129557e-06, 0.0001204791697091423, 0.00010617749649100006, 0.001339617883786559, 7.569255831185728e-05, 0.00037079915637150407, 1.8781062181005836e-06, 6.68371285428293e-05, 7.632187930539658e-07, 9.347755258204415e-05, 2.160163057851605e-05], [0.0010372382821515203, 0.0005676397704519331, 0.002641425933688879, 0.0003387675096746534, 0.00030403886921703815, 0.0006045525660738349, 8.638439612695947e-05, 0.011536960490047932, 0.040811486542224884, 0.9281846284866333, 0.0022555983159691095, 0.004754575435072184, 6.7634264269145206e-06, 6.913843390066177e-05, 1.1587118024181109e-05, 0.0021305778063833714, 8.624832116765901e-05, 0.0038842628709971905, 3.353221109136939e-05, 0.0003187129623256624, 3.0390924621315207e-06, 1.0769259461085312e-05, 2.6689667720347643e-06, 0.00031932478304952383], [0.00024338184448424727, 0.00032534130150452256, 0.006640137173235416, 0.00024271152506116778, 0.00019678483658935875, 6.046163889550371e-06, 0.001094931154511869, 2.1991669200360775e-05, 0.028341911733150482, 0.0006314494530670345, 0.9334582090377808, 0.0004252393264323473, 0.012538276612758636, 1.0306978310836712e-06, 0.000846114126034081, 9.060963748197537e-06, 0.00045812115422450006, 3.169268893543631e-05, 0.013865377753973007, 1.6914344087126665e-05, 0.0005285344668664038, 1.5766487138080265e-07, 7.635916699655354e-05, 1.6359942378585401e-07], [0.00039121590089052916, 0.0002591839001979679, 0.00022471156262326986, 0.001146927708759904, 4.9367758037988096e-05, 6.323042180156335e-05, 5.0112197641283274e-05, 0.00024915015092119575, 1.787357723515015e-05, 0.0007114345789887011, 0.0046471040695905685, 0.967279314994812, 0.0037869014777243137, 0.0156633872538805, 2.77989347523544e-05, 8.58264829730615e-05, 4.447466608326067e-07, 6.267879507504404e-05, 1.2144432730565313e-05, 0.005160308443009853, 3.219282007194124e-05, 7.510402792831883e-05, 9.46059628859075e-07, 2.632466248542187e-06], [0.0005755372112616897, 0.0012316565262153745, 0.00010255716915708035, 0.00018721497326623648, 6.295795901678503e-05, 8.059261017479002e-05, 0.0009627907420508564, 3.064401607844047e-05, 0.00021133928385097533, 7.0536439125135075e-06, 0.004563028924167156, 0.0007376300636678934, 0.9262778162956238, 0.039384886622428894, 0.01936400681734085, 2.7475065508042462e-05, 1.165627509180922e-05, 2.144021209460334e-07, 0.00013869132089894265, 6.803653377573937e-05, 0.005373937543481588, 5.2742088882951066e-05, 0.0005472911288961768, 2.892859640724055e-07], [0.0009013406233862042, 0.0005344762466847897, 0.00010060907516162843, 0.00017621458391658962, 0.00022590610024053603, 0.0006126450607553124, 0.001195422257296741, 0.0038501948583871126, 7.585091952932999e-05, 0.00040870747761800885, 0.00014168804045766592, 0.011229808442294598, 0.010200664401054382, 0.9449086785316467, 0.012001628056168556, 0.008249341510236263, 0.00010310571087757125, 5.2752322517335415e-05, 2.1942549210507423e-05, 0.0012399445986375213, 0.00018427582108415663, 0.0023777198512107134, 0.00021594298596028239, 0.0009911460801959038], [4.803305273526348e-05, 2.2905793230165727e-05, 5.5765565775800496e-05, 1.2517151844804175e-05, 2.4812294213916175e-05, 6.460425993282115e-06, 0.0010251527419313788, 0.0007795262499712408, 0.001057154149748385, 1.3099584975861944e-05, 0.0003897666756529361, 5.9202393458690494e-06, 0.005427564959973097, 0.0014290729304775596, 0.9530384540557861, 0.019097227603197098, 0.015422923490405083, 1.1391791304049548e-05, 0.0006917264545336366, 9.316055184172e-06, 0.00023404674720950425, 2.8857730285380967e-06, 0.0011766875395551324, 1.772984251147136e-05], [2.928731009887997e-05, 1.4245509191823658e-05, 6.006933745084098e-06, 2.6701045499066822e-06, 8.715166586625855e-06, 1.1000855010934174e-05, 4.717499905382283e-06, 0.006387920584529638, 6.425245373975486e-05, 0.007352718152105808, 3.6728649774886435e-06, 0.0010247434256598353, 4.545822775980923e-06, 0.019248247146606445, 0.008767232298851013, 0.8449709415435791, 0.03601188585162163, 0.05436546355485916, 1.9218556190025993e-05, 0.0004768113431055099, 6.652719548583264e-07, 0.00022427229851018637, 4.865778464591131e-06, 0.020995894446969032], [3.425808245083317e-05, 2.7090994990430772e-05, 0.00015893590170890093, 4.5548381422122475e-06, 2.7089057766715996e-05, 1.5199721019598655e-06, 8.490062100463547e-06, 0.00011148227349622175, 0.01816519722342491, 0.00032538181403651834, 0.00040136263123713434, 5.585464350588154e-06, 9.920414595399052e-05, 1.5949844964779913e-06, 0.02216433547437191, 0.02606404386460781, 0.8760741353034973, 0.025189688429236412, 0.03085457533597946, 2.8125938115408644e-05, 0.00017775157175492495, 1.0674247050701524e-06, 5.4077638196758926e-05, 2.036479600064922e-05], [4.025308044219855e-06, 8.409812721765775e-07, 6.890664735692553e-06, 6.569678134837886e-06, 2.0766624402313028e-06, 3.208335783710936e-07, 1.4675297421717914e-08, 4.013696980109671e-06, 1.0020333320426289e-05, 0.00035368045791983604, 4.6163236220309045e-06, 0.00028704330907203257, 7.136079460678957e-08, 2.6009908538071613e-07, 3.3565723356332455e-07, 0.0003693080216180533, 0.0010404267814010382, 0.9890093207359314, 0.00138044951017946, 0.007455216720700264, 1.4666758033854421e-05, 1.2331428479228634e-05, 4.910766548960055e-08, 3.746055517694913e-05], [0.00012493257236201316, 3.8154132198542356e-05, 0.0001975560444407165, 7.155272032832727e-05, 4.325289773987606e-05, 2.067709829134401e-06, 7.053774425003212e-06, 4.980061021342408e-06, 0.0007193004712462425, 0.0001719709689496085, 0.011706924997270107, 0.0009248732822015882, 0.0009913910180330276, 1.149340050687897e-06, 4.457102477317676e-05, 4.932151205139235e-05, 0.009012388065457344, 0.04506821557879448, 0.9068571329116821, 0.014367643743753433, 0.009545546025037766, 1.8007291146204807e-05, 2.629723348945845e-05, 5.676197361026425e-06], [2.4396442313445732e-05, 2.8307506454439135e-06, 7.523374370066449e-05, 2.7369400413590483e-05, 4.219443781039445e-06, 1.921965349538368e-06, 4.8717460288116854e-08, 9.482423592999112e-07, 1.5598926950133318e-07, 4.2608999137883075e-06, 3.2331611237168545e-06, 0.0006510906969197094, 8.118377081700601e-07, 1.7904899323184509e-06, 2.418414624116849e-08, 6.0805491557403e-06, 1.245281509909546e-06, 0.005756591912358999, 0.0013257015962153673, 0.9885311126708984, 0.002118622651323676, 0.0014526412123814225, 7.015217988737277e-07, 8.849948244460393e-06], [0.00031561258947476745, 0.0001882429060060531, 0.00013430423859972507, 0.0004902433138340712, 0.0001241808058694005, 2.72670677077258e-05, 3.99538257624954e-05, 3.9512909211225633e-07, 3.7966140098433243e-06, 2.556274125709024e-07, 7.07452927599661e-05, 5.738237814512104e-05, 0.005009201355278492, 4.2625481000868604e-05, 3.0140183298499323e-05, 2.132742110916297e-06, 2.901750303863082e-05, 3.199895581929013e-05, 0.03046327643096447, 0.011792906560003757, 0.9388269186019897, 0.00956038013100624, 0.002750288462266326, 8.817362868285272e-06], [2.630511335155461e-05, 1.0398740414530039e-05, 6.997438322287053e-05, 9.28291046875529e-05, 3.7494795833481476e-05, 0.00024205587396863848, 4.949315552948974e-06, 1.973420694412198e-05, 6.587381307099349e-08, 5.545209091906145e-07, 7.949081748392928e-08, 1.7909247617353685e-05, 4.062244443048257e-06, 0.00033679584157653153, 5.900415999349207e-06, 3.218850906705484e-05, 4.538181315183465e-07, 1.5637044270988554e-05, 1.0559303518675733e-05, 0.03150218725204468, 0.005321340635418892, 0.9464573860168457, 0.0037639536894857883, 0.012027141638100147]], [[0.002455379581078887, 0.01069711335003376, 0.47920843958854675, 0.04864303767681122, 0.02692314237356186, 0.08217724412679672, 0.12726140022277832, 0.04557475075125694, 0.09055604040622711, 0.0038499566726386547, 0.008252017199993134, 0.0011315494775772095, 0.021421901881694794, 0.0021886127069592476, 0.0318712443113327, 0.00038309936644509435, 0.001578698051162064, 0.0005427002906799316, 0.00247991643846035, 0.0003308449231553823, 0.005394710227847099, 0.0017126320162788033, 0.004264704883098602, 0.0011009202571585774], [0.0018067440250888467, 0.015478136949241161, 0.1379874050617218, 0.0036516068503260612, 0.060737669467926025, 0.3086843192577362, 0.07906272262334824, 0.07756980508565903, 0.25382286310195923, 0.032407473772764206, 0.0032723471522331238, 0.0005079287220723927, 0.007328846957534552, 0.0012509973021224141, 0.00725723709911108, 0.0001679368142504245, 0.0020434351172298193, 0.00017363451479468495, 0.0003184280067216605, 1.7929007299244404e-05, 0.00016423447232227772, 0.0002558958367444575, 0.001947097247466445, 0.00408542063087225], [0.004488380625844002, 0.0062738037668168545, 0.04330393299460411, 0.9111384153366089, 0.0034491640981286764, 0.0009293495095334947, 0.0032612676732242107, 0.003263972932472825, 0.001930905389599502, 0.001243248931132257, 0.0019640473183244467, 0.0025992761366069317, 0.0013068541884422302, 0.0002177929418394342, 0.0013582026585936546, 0.0011306348023936152, 0.0008538399706594646, 0.0005328840925358236, 0.0011238879524171352, 0.0004777976719196886, 0.0008642908651381731, 0.0023571152705699205, 0.005660992115736008, 0.00026990962214767933], [0.0015798731474205852, 0.007277462165802717, 0.1238519623875618, 0.00865323469042778, 0.7481173872947693, 0.04908294975757599, 0.0017979627009481192, 0.006593435537070036, 0.003559292294085026, 0.00013735596439801157, 0.00016497467004228383, 0.000390317989513278, 0.0034108341205865145, 0.00024323916295543313, 0.0027779950760304928, 0.0001188504757010378, 0.000951424241065979, 0.00020552607020363212, 0.0007055862224660814, 0.0011210090015083551, 0.011599461548030376, 0.02034117467701435, 0.005836340133100748, 0.0014823406236246228], [0.002665687119588256, 0.0017027505673468113, 0.017256274819374084, 0.004965798929333687, 0.0038677642587572336, 0.8930054306983948, 0.007348408456891775, 0.017444290220737457, 0.0013071949360892177, 0.003913783933967352, 0.0003824948216788471, 0.0004852970887441188, 0.003701785346493125, 0.0019042098429054022, 0.0015214636223390698, 6.449077773140743e-05, 2.5749866836122237e-05, 7.798385195201263e-05, 9.123046038439497e-05, 0.0005827395361848176, 0.003458946943283081, 0.022445110604166985, 0.005169570446014404, 0.006611568387597799], [0.00022784496832173318, 0.0001425920781912282, 0.004534967243671417, 0.0006960463360883296, 0.0009359077084809542, 0.010118672624230385, 0.8227341175079346, 0.10652171075344086, 0.0009954161942005157, 0.0030293867457658052, 0.0006800066912546754, 0.00011529698531376198, 2.7876338208443485e-05, 4.5333541493164375e-05, 0.0012918494176119566, 0.0001222683786181733, 1.7265732822124846e-05, 3.2797317999211373e-06, 2.7903413865715265e-05, 6.9283096308936365e-06, 1.3411078725766856e-05, 0.001423112116754055, 0.008547060191631317, 0.037741657346487045], [0.00013269484043121338, 1.6255047739832662e-05, 0.0009945619385689497, 0.0013219056418165565, 4.818522938876413e-05, 0.0016572902677580714, 0.012566950172185898, 0.9432915449142456, 0.0005442688125185668, 0.014292274601757526, 0.0001509634021203965, 0.009997870773077011, 2.6720370442490093e-05, 2.609927651064936e-05, 0.00043624467798508704, 0.00042758320341818035, 3.0568442070944e-06, 2.0982790829293663e-06, 2.666642444637546e-07, 1.9561859971872764e-06, 1.0923166655629757e-06, 0.001069153775461018, 6.750020111212507e-05, 0.012923432514071465], [5.209432129049674e-05, 7.459839980583638e-05, 0.0019096708856523037, 0.0006625264650210738, 0.00045631674584001303, 0.0011112549109384418, 0.002481800736859441, 0.00492413155734539, 0.3607407510280609, 0.6202103495597839, 0.0019818642176687717, 0.00038257797132246196, 0.00043595003080554307, 1.2084191439498682e-05, 0.00044664315646514297, 0.0005074554355815053, 0.0009565365617163479, 0.00020415660401340574, 5.8339534007245675e-05, 5.44302565685939e-07, 3.0284559215942863e-06, 5.876189607079141e-05, 0.0004833057464566082, 0.0018453036900609732], [0.0020318739116191864, 0.004302291665226221, 0.01391538791358471, 0.005536223761737347, 0.002241414738819003, 0.0024867975153028965, 0.012608401477336884, 0.005679480265825987, 0.06131444498896599, 0.5361493229866028, 0.26411426067352295, 0.020330660045146942, 0.010177918709814548, 0.002486900892108679, 0.0006267625140026212, 0.0011001031380146742, 0.009245205670595169, 0.03203796595335007, 0.011864363215863705, 0.001459007617086172, 7.582377293147147e-05, 1.947971895788214e-06, 6.141579069662839e-05, 0.00015195885498542339], [4.865538721787743e-05, 7.496172656829003e-06, 7.685676246182993e-05, 3.1649648008169606e-05, 1.5193922990874853e-05, 5.653494099533418e-06, 0.0002303359069628641, 0.00012763385893777013, 0.00021072484378237277, 0.0019027948146685958, 0.9889398217201233, 0.005233149975538254, 0.0021102842874825, 1.0675980774976779e-06, 1.3140595001459587e-05, 5.768329174316023e-07, 3.0443407013081014e-05, 2.7805828722193837e-05, 0.0009449059725739062, 3.2057643693406135e-05, 8.186030754586682e-06, 6.114394324185923e-08, 1.3277276593726128e-06, 9.439718695603005e-08], [0.0017519152024760842, 0.000795002153608948, 0.0002242714399471879, 0.0033964484464377165, 8.67982889758423e-05, 2.9918517611804418e-05, 1.5454583262908272e-05, 8.467052248306572e-05, 1.2983196029381361e-05, 0.0004337042919360101, 0.0019549899734556675, 0.9664211273193359, 0.00663745729252696, 0.0038380951154977083, 2.040871777353459e-06, 1.2994580174563453e-05, 1.1162160262756515e-06, 7.659001130377874e-05, 3.840203498839401e-05, 0.014024467207491398, 9.011686051962897e-05, 7.098715286701918e-05, 1.9267115192178608e-07, 2.203325948357815e-07], [0.011500977911055088, 0.010759809985756874, 7.138620276236907e-05, 0.00047889171401038766, 0.0002189231017837301, 7.029830157989636e-05, 1.804161729523912e-05, 6.145192401163513e-06, 4.295957842259668e-05, 1.6340245565515943e-06, 0.0012178110191598535, 0.0008143791346810758, 0.9683659076690674, 0.004196956753730774, 0.0006040750886313617, 2.411260993540054e-06, 3.932512481696904e-05, 2.284090214743628e-06, 0.0001563036785228178, 3.490438393782824e-05, 0.0013410538667812943, 4.143390924582491e-06, 5.147304182173684e-05, 1.7684570252640697e-08], [0.0031975337769836187, 0.014876047149300575, 3.5327961086295545e-05, 0.00014948581520002335, 4.920395895169349e-06, 1.02225085356622e-05, 4.3822251427627634e-06, 3.256118134231656e-06, 2.9063036777188245e-07, 4.906488356937189e-06, 5.078941285319161e-07, 0.00010264148295391351, 4.8672634875401855e-05, 0.9799464344978333, 0.0007018555188551545, 0.0007301201694644988, 1.1438205547165126e-06, 9.97874576569302e-06, 1.1033429814233386e-07, 1.128838357544737e-05, 1.9181456991645973e-06, 0.00015269518189597875, 3.493158146739006e-06, 2.870668140531052e-06], [6.345880592562025e-06, 1.9432807675912045e-05, 1.3717236470256466e-05, 8.032934033508354e-07, 7.915547826087277e-07, 4.9252616918238346e-06, 6.224502430995926e-05, 4.229879050399177e-05, 5.835098363604629e-06, 8.382411209595375e-08, 3.041829359062831e-06, 1.271989020779074e-07, 0.000139489202410914, 0.00011487273150123656, 0.9991793036460876, 0.00014370010467246175, 7.772848039167002e-05, 4.209670478871885e-08, 1.6881324427231448e-07, 4.8851711564879e-10, 3.805803601153457e-07, 7.381079285551095e-07, 0.00017206738993991166, 1.174924364022445e-05], [0.00035416713217273355, 0.0016943421214818954, 5.9263009461574256e-05, 4.256018655723892e-05, 1.5495059415115975e-05, 1.3020558071730193e-06, 1.3165193195163738e-05, 0.0003845489409286529, 0.0002386291162110865, 4.869977055932395e-05, 4.554618499241769e-06, 1.267479638045188e-05, 5.525368464986968e-07, 0.00036756627378053963, 0.008878331631422043, 0.9566622972488403, 0.027785858139395714, 0.0005564686143770814, 9.340142241853755e-06, 1.214911776514782e-06, 2.1433629626699258e-07, 7.86618602433009e-06, 0.0003465830232016742, 0.0025143148377537727], [0.0001049725033226423, 0.00018411689961794764, 0.00034292653435841203, 1.878371949715074e-05, 0.0001071486112778075, 3.944072432204848e-06, 6.658565325778909e-06, 4.8013094783527777e-05, 0.0005622597527690232, 7.642831405973993e-06, 0.0002754714514594525, 2.918108521043905e-06, 5.31517289346084e-05, 1.2313372508288012e-06, 0.021583620458841324, 0.0028300131671130657, 0.9617334008216858, 0.0026482066605240107, 0.007456624880433083, 6.490972282335861e-06, 9.398660768056288e-05, 1.3636733910971088e-06, 0.0016534049063920975, 0.0002737304603215307], [0.0010774345137178898, 0.0014036804204806685, 0.0010055985767394304, 0.00024573810514993966, 0.00013465825759340078, 2.3605653041158803e-05, 2.797083880068385e-06, 1.678660191828385e-05, 0.0002937244425993413, 0.0005376915214583278, 0.0006845776224508882, 8.088665344985202e-05, 1.1750842531910166e-05, 4.092687231604941e-05, 0.00017203895549755543, 0.005878471303731203, 0.04045066237449646, 0.864177405834198, 0.06846658140420914, 0.014295335859060287, 0.0005664670607075095, 6.173652946017683e-05, 0.00014775866293348372, 0.00022366346092894673], [1.7750209053701838e-06, 1.0049634511233307e-06, 3.690063749672845e-06, 1.1670957064779941e-05, 4.952478047925979e-05, 2.586400000836875e-07, 4.308860468427156e-07, 2.796830500528813e-08, 2.955197260234854e-06, 4.589961122292152e-07, 0.000756027759052813, 1.492834144301014e-06, 2.0779416445293464e-05, 2.5612723053569653e-09, 5.218237788540137e-07, 8.432188565166143e-07, 0.0012098838342353702, 0.0007027444080449641, 0.9936448335647583, 0.0019374735420569777, 0.0015590413240715861, 2.766575335044763e-06, 9.168797987513244e-05, 7.303044924356072e-08], [7.777726568747312e-05, 1.1302088751108386e-05, 1.3818849765812047e-05, 0.00035149307223036885, 3.078881491092034e-05, 6.291963472904172e-06, 1.277060505344707e-06, 6.211437835190736e-07, 2.670825836048607e-07, 1.7230817320523784e-05, 3.0404355129576288e-05, 0.0012896520784124732, 1.0595976164040621e-05, 6.266310265345965e-06, 8.404720119870035e-08, 8.702358172740787e-06, 5.114705800224328e-06, 0.0017089162720367312, 0.00669697904959321, 0.9691537022590637, 0.007462987210601568, 0.013050252571702003, 2.9020920919720083e-05, 3.63925464625936e-05], [5.1779697969323024e-05, 7.097056368365884e-06, 2.3038101062411442e-05, 0.00041052448796108365, 2.854193007806316e-05, 0.00010325796756660566, 1.0210817890765611e-05, 1.9308161824937997e-07, 8.416649279752164e-07, 3.963024255426717e-07, 6.626717367907986e-05, 6.92558251103037e-06, 0.006135249510407448, 5.172972578293411e-06, 4.2878760723397136e-05, 3.658486491531221e-07, 3.4214222068840172e-06, 9.181891073239967e-06, 0.00795045681297779, 0.0027588331140577793, 0.9427505731582642, 0.03211071342229843, 0.007519581355154514, 4.376219749246957e-06], [0.004324847366660833, 0.005786948837339878, 0.004262135364115238, 0.005710388533771038, 0.004484756384044886, 0.006940674036741257, 0.0035176961682736874, 0.0008633933030068874, 6.16010365774855e-05, 6.768589742023323e-07, 3.794174699578434e-05, 4.122816972085275e-05, 0.0017018400831148028, 0.009545406326651573, 0.009747360832989216, 0.000598141981754452, 0.00036073438241146505, 0.0002707544481381774, 0.005547365173697472, 0.055170394480228424, 0.482774019241333, 0.2148953080177307, 0.1773044466972351, 0.006052051670849323], [0.00012133536074543372, 5.425190465757623e-05, 8.508353857905604e-06, 1.7184233001898974e-05, 0.00021293395548127592, 0.00010174328781431541, 0.00022876982984598726, 5.230966053204611e-05, 3.1165286600298714e-06, 4.509156781296042e-08, 4.4880127347823873e-07, 6.498488147599346e-08, 2.00653012143448e-05, 6.800953542551724e-06, 0.001079390523955226, 4.669729241868481e-05, 0.00010661211126716807, 1.2596183296409436e-07, 3.4873570257332176e-05, 9.700568625703454e-06, 0.0011799855856224895, 0.007776898797601461, 0.9794387817382812, 0.009499330073595047], [0.0006168180261738598, 0.0006027090712450445, 0.00013035870506428182, 3.237438795622438e-05, 0.0001038400805555284, 0.0004970093141309917, 0.0009426283650100231, 0.0028937608003616333, 2.8754337108694017e-05, 5.865218918188475e-05, 4.956803536515508e-07, 3.0425555905821966e-06, 7.536258550544517e-08, 0.00015846786845941097, 5.982965012663044e-05, 0.0007215419318526983, 3.8144164136610925e-05, 1.8671571524464525e-05, 2.350062231926131e-06, 6.895366823300719e-05, 1.426416292815702e-05, 0.002001491840928793, 0.0031590494327247143, 0.9878467917442322], [0.10646221041679382, 0.02241288311779499, 0.0006631187279708683, 9.075352136278525e-05, 0.0016352327074855566, 0.0006229592836461961, 0.0410892628133297, 0.08375873416662216, 0.04682966694235802, 0.00033792437170632184, 0.0007656642119400203, 1.5015630197012797e-06, 7.625289981660899e-06, 1.406222622790665e-06, 0.001328948768787086, 0.0005329736741259694, 0.04036516696214676, 4.475707464735024e-05, 0.0004998120130039752, 2.0795509954041336e-06, 7.558971992693841e-05, 5.5787495512049645e-06, 0.23546960949897766, 0.4169965088367462]], [[0.18145588040351868, 0.16334673762321472, 0.047718193382024765, 0.01914931833744049, 0.2208530604839325, 0.023958882316946983, 0.006851618643850088, 0.015077827498316765, 0.0700262263417244, 0.010021074675023556, 0.07578698545694351, 0.017129074782133102, 0.09152588248252869, 0.008509764447808266, 0.010212996043264866, 0.0004867310053668916, 0.009634158574044704, 0.001292490866035223, 0.0025537805631756783, 0.0035446130204945803, 0.008142085745930672, 0.0012782664271071553, 0.009648753330111504, 0.0017956269439309835], [0.1331053227186203, 0.1091850996017456, 0.04376038908958435, 0.012275551445782185, 0.1666012406349182, 0.03167302906513214, 0.013713551685214043, 0.01879027672111988, 0.038914307951927185, 0.0016420612810179591, 0.045226067304611206, 0.008704190142452717, 0.2540174126625061, 0.020154638215899467, 0.062010519206523895, 0.0003132422862108797, 0.006268672179430723, 0.0002499269612599164, 0.0007496175821870565, 0.0004216564993839711, 0.008249117992818356, 0.002686240477487445, 0.01998368836939335, 0.001304076286032796], [0.26428043842315674, 0.2365707904100418, 0.05873110517859459, 0.023917241021990776, 0.05098757892847061, 0.12395869195461273, 0.054154157638549805, 0.007049113046377897, 0.005112920422106981, 0.004564769100397825, 0.01606418751180172, 0.010054518468677998, 0.01402272842824459, 0.042470354586839676, 0.006282190326601267, 0.0019090170972049236, 0.006671431940048933, 0.007042343262583017, 0.004984940402209759, 0.010673577897250652, 0.027995727956295013, 0.008937445469200611, 0.011411036364734173, 0.0021536105778068304], [0.05345158278942108, 0.029563307762145996, 0.7800650596618652, 0.02103608101606369, 0.005545391235500574, 0.007644838187843561, 0.0012224685633555055, 0.0016270468477159739, 0.006666179280728102, 0.004039874766021967, 0.022744901478290558, 0.0012386699672788382, 0.00805720780044794, 0.0015269063878804445, 0.0038571134209632874, 0.0006523392512463033, 0.0017544793663546443, 0.0017500292742624879, 0.0009181297500617802, 0.003111919853836298, 0.0408918596804142, 0.0006848397897556424, 0.001776325749233365, 0.00017354940064251423], [0.12245871871709824, 0.07858289778232574, 0.0770772397518158, 0.3349987864494324, 0.12290870398283005, 0.07057393342256546, 0.0043646348640322685, 0.010306901298463345, 0.01392908114939928, 0.0007755736587569118, 0.005969940219074488, 0.001420541200786829, 0.007088279351592064, 0.0004828513483516872, 0.002146676182746887, 0.00161877297796309, 0.0292426198720932, 0.015044976957142353, 0.020518667995929718, 0.01129020843654871, 0.0335875079035759, 0.026504697278141975, 0.00852759089320898, 0.0005801619845442474], [0.01726684719324112, 0.008679079823195934, 0.014835450798273087, 0.00453580915927887, 0.7043405771255493, 0.05500214919447899, 0.0037752962671220303, 0.002004186389967799, 0.00405652541667223, 0.0011477852240204811, 0.001139958156272769, 0.007282763719558716, 0.029778046533465385, 0.0014912310289219022, 7.196550723165274e-05, 5.165155926079024e-06, 0.0001155960708274506, 0.00019191514002159238, 0.0046233669854700565, 0.03601910546422005, 0.029826274141669273, 0.07014822214841843, 0.0022310614585876465, 0.0014316028682515025], [0.027339207008481026, 0.025179412215948105, 0.003253272268921137, 0.0015124318888410926, 0.0251074880361557, 0.9038639664649963, 0.0023936342913657427, 0.00030433444771915674, 0.0022544432431459427, 0.00022934160369914025, 5.6447195674991235e-05, 0.0001586985745234415, 0.0016292226500809193, 0.0014684359775856137, 1.393813727190718e-05, 1.42811063597037e-06, 1.2322013390075881e-05, 4.107921267859638e-05, 3.864537211484276e-05, 0.00010672151256585494, 0.0018882190342992544, 0.0018231496214866638, 0.0005442265537567437, 0.0007799722370691597], [0.005412152037024498, 0.006922224536538124, 0.007066512946039438, 0.008068210445344448, 0.004327234346419573, 0.016744956374168396, 0.8758552670478821, 0.055758822709321976, 0.001657930202782154, 0.000293685618089512, 0.0006818107212893665, 3.3297397749265656e-05, 5.071879786555655e-05, 0.00010880979971261695, 0.001484012696892023, 0.00015892376541160047, 2.283380126755219e-05, 1.4966841490604565e-06, 6.140156528999796e-06, 3.038285058210022e-06, 1.1464563613117207e-05, 0.00011566934699658304, 0.007567977532744408, 0.007646896876394749], [0.021646371111273766, 0.01837824657559395, 0.002139544812962413, 0.004589335061609745, 0.0019269874319434166, 0.002638069912791252, 0.017815453931689262, 0.8928102850914001, 0.006769211497157812, 0.011733060702681541, 0.000785737473051995, 0.004963865969330072, 6.541314360219985e-05, 0.001161657739430666, 0.0008510378538630903, 0.006373231764882803, 0.0007045645616017282, 0.000886199006345123, 1.094389062927803e-05, 1.5528747098869644e-05, 7.635233032488031e-07, 9.209982090396807e-05, 4.648610047297552e-05, 0.003595929127186537], [0.00013441420742310584, 0.00015969359083101153, 8.517669812135864e-06, 4.937030553264776e-06, 0.0011023671831935644, 0.00018137051665689796, 0.00013574362674262375, 0.002724642166867852, 0.9917531609535217, 0.0025939710903912783, 0.00010707169712986797, 1.369843118936842e-07, 4.5603451326314826e-06, 1.2132967697198183e-07, 1.567296749271918e-05, 1.1022683793271426e-05, 0.0010278250556439161, 2.134905344064464e-06, 9.864149888016982e-07, 1.045866770965631e-08, 3.638429291186185e-08, 4.356463190191562e-09, 7.37883465262712e-06, 2.4259699785034172e-05], [6.877488340251148e-05, 0.00025811439263634384, 1.8854294467018917e-05, 2.1974028641125187e-06, 3.176116297254339e-05, 4.43696953880135e-05, 7.928362174425274e-05, 0.00020741675689350814, 0.001797354081645608, 0.9888004064559937, 0.0008571389480493963, 0.002645494183525443, 1.0682230822567362e-05, 7.903027290012687e-05, 1.9200078895664774e-06, 4.9413829401601106e-05, 0.00010077113984152675, 0.004805833101272583, 6.125008803792298e-05, 5.5673564929747954e-05, 7.476501195924357e-07, 6.633876523665094e-07, 8.650370375562488e-08, 2.2822056052973494e-05], [0.0010521382791921496, 0.0005444984417408705, 0.001284222467802465, 0.0007650371408089995, 0.0012671462027356029, 4.261531648808159e-05, 0.00028660643147304654, 0.00016136748308781534, 0.01428184099495411, 0.015650106593966484, 0.9594293236732483, 0.000681935518514365, 0.0027448448818176985, 1.5287613450709614e-06, 0.00013265525922179222, 8.026853720366489e-06, 0.0008160446304827929, 4.0140890632756054e-05, 0.000755243469029665, 1.8344253476243466e-05, 3.451469092397019e-05, 1.2707322127880616e-07, 1.7235172435903223e-06, 3.022856986945044e-08], [0.0010488665429875255, 0.001333513529971242, 0.0003741243854165077, 0.0007395148277282715, 0.0006892427918501198, 9.143326315097511e-05, 4.200782768748468e-06, 0.00015228672418743372, 2.264876638946589e-05, 0.004420239012688398, 0.000526548596099019, 0.9455932974815369, 0.00013953520101495087, 0.006553557235747576, 1.8838338746718364e-06, 0.00032945198472589254, 4.868701125815278e-06, 0.002459716284647584, 5.206693003856344e-06, 0.03353774920105934, 5.804645479656756e-05, 0.001910027815029025, 3.364042697739933e-07, 3.7055731354485033e-06], [7.975361768330913e-06, 2.363329258514568e-06, 7.682772775297053e-06, 6.801968766012578e-07, 0.00011631300003500655, 3.2475443731527776e-05, 7.056421509332722e-07, 1.1767298957465755e-07, 1.4499973076453898e-05, 1.7008765951231908e-07, 0.00010901885252678767, 6.478536670329049e-05, 0.9977426528930664, 0.000994019559584558, 0.0004589904274325818, 2.0308222659082276e-08, 2.294657633683528e-06, 1.3315435865024483e-08, 8.894991196939372e-07, 2.1378996279963758e-06, 0.0004357675788924098, 2.5214985726051964e-06, 3.819736775767524e-06, 2.6398037089592208e-09], [1.8471150724508334e-06, 3.7026015888841357e-06, 1.6885335298866266e-06, 9.109706411436491e-08, 2.4752267790972837e-07, 3.685387491714209e-05, 2.827289790729992e-06, 1.177266426566348e-06, 2.820258160340927e-08, 1.069553377419652e-06, 2.6172978451199924e-08, 0.00012657114712055773, 9.245926048606634e-05, 0.9988940358161926, 0.0003375323722139001, 0.0001586283469805494, 1.3134288678884332e-07, 3.0948465337132802e-06, 4.385371177306752e-09, 2.9451048249029554e-06, 4.214907676214352e-06, 0.00029032526072114706, 1.6523028989468003e-06, 3.895389454555698e-05], [5.258754754322581e-06, 3.23867857332516e-06, 2.9543269192799926e-05, 3.5898513033316704e-06, 6.75584942655405e-07, 9.065601261681877e-06, 2.8344933525659144e-05, 1.7516231309855357e-05, 2.728852632571943e-05, 1.1336600209688186e-06, 2.8340500648482703e-05, 7.443336471624207e-07, 0.0010910930577665567, 0.0014380853390321136, 0.9922789335250854, 0.0028471359983086586, 0.0015163373900577426, 3.5328982903592987e-06, 1.3515571026800899e-06, 7.439840743472814e-08, 2.7651673008222133e-05, 1.989948259506491e-06, 0.0006198842311277986, 1.9196490029571578e-05], [1.119538865168579e-05, 2.307235263288021e-05, 3.636300971265882e-05, 2.2751028154743835e-05, 4.5309334950616176e-07, 3.998277406935813e-06, 4.890572199656162e-06, 0.000744857476092875, 1.3813310033583548e-05, 5.13486702402588e-05, 8.107561484393955e-07, 8.427551620115992e-06, 1.0824550145116518e-06, 0.0006202057120390236, 0.004621061030775309, 0.9847044944763184, 0.002934178104624152, 0.004397244192659855, 2.5740087039594073e-06, 6.389308509824332e-06, 5.853814286638226e-07, 9.32031762204133e-05, 2.5568911951268092e-05, 0.0016714625526219606], [5.841677648277255e-06, 5.07684262629482e-06, 2.2887719751452096e-05, 4.822540631721495e-06, 2.1144487618585117e-06, 3.3804937515924394e-08, 2.4526570996386e-07, 8.62873548612697e-07, 0.0005499523249454796, 1.161986801889725e-05, 0.000455866742413491, 1.128335682665238e-07, 0.00012755072384607047, 3.405592963190429e-07, 0.003388429759070277, 0.0015287363203242421, 0.9748088121414185, 0.0010674081277102232, 0.017842909321188927, 5.219066224526614e-06, 8.955624798545614e-05, 3.3482741912393976e-08, 8.116196113405749e-05, 4.839769189857179e-07], [1.6755020624259487e-05, 4.392225673655048e-05, 3.4986929676961154e-05, 4.262140646460466e-05, 7.017093139438657e-06, 1.7890259584874002e-07, 2.532057763460216e-08, 6.364600153574429e-07, 6.093687625252642e-05, 0.00017925928113982081, 2.7772761313826777e-05, 2.1106428903294727e-05, 1.1198187621630495e-06, 5.184489850762475e-07, 6.475768827840511e-07, 0.0014277772279456258, 0.030939454212784767, 0.9422135353088379, 0.022114301100373268, 0.002727423794567585, 0.00012909923680126667, 7.295446721400367e-06, 1.228920154972002e-06, 2.433600684526027e-06], [2.181589479732793e-06, 1.6238254829659127e-06, 2.067474997602403e-05, 0.00010321121226297691, 3.693991311592981e-05, 2.4413893129349162e-08, 8.468433065900172e-08, 2.5220986188401184e-08, 3.195557292201556e-05, 2.319361783520435e-06, 0.003109736368060112, 2.1828861918038456e-06, 2.9561233532149345e-05, 5.31844124296299e-10, 1.7156536102902464e-07, 4.435445077888289e-07, 0.004718251060694456, 0.00041956367203965783, 0.9885767102241516, 0.0022219133097678423, 0.0007176861399784684, 1.9813961671388824e-07, 4.674777756008552e-06, 2.1713411069157473e-09], [8.444245759164914e-05, 3.6771001759916544e-05, 7.573676703032106e-05, 0.0011229687370359898, 0.00025572936283424497, 8.131286449497566e-06, 2.7958499231317546e-06, 1.0644642856050268e-07, 5.122958555148216e-07, 6.658465736109065e-06, 2.53170383075485e-05, 0.002532642101868987, 4.847822856390849e-05, 1.5087046449480113e-05, 4.0679253743292065e-08, 1.544377846585121e-05, 7.25507561583072e-05, 0.00811013299971819, 0.04768238216638565, 0.9311074614524841, 0.007613586727529764, 0.0011775015154853463, 4.73863337902003e-06, 7.700444939473527e-07], [4.981794518243987e-06, 9.80344111667364e-07, 2.999737080244813e-05, 8.510760380886495e-05, 0.00010461667261552066, 1.2112881449866109e-05, 5.172088890503801e-07, 3.820768590401258e-09, 1.2951622352375125e-07, 1.5797239072412594e-09, 3.046288838959299e-06, 4.2974042457899486e-07, 0.00033381374669261277, 1.245729094989656e-06, 9.411613064003177e-06, 4.1612005929891893e-07, 1.8867896869778633e-05, 3.909334282070631e-06, 0.0008786320104263723, 0.0024447001051157713, 0.9895080327987671, 0.0032732037361711264, 0.003285411512479186, 3.931844787530281e-07], [8.558538411307381e-07, 1.1153298373756115e-06, 2.747181724771508e-06, 8.36808521853527e-06, 3.874949015880702e-06, 4.289072967367247e-05, 5.546216016227845e-06, 2.2278204596659634e-06, 9.838292847064167e-09, 3.00032247935178e-08, 8.999224476724521e-09, 1.7877640857477672e-05, 1.977452939172508e-06, 0.00034532317658886313, 6.6381285250827204e-06, 6.135751027613878e-05, 3.6349999277263123e-07, 2.9357479434111156e-05, 7.54540769776213e-06, 0.0009858054108917713, 0.0006919064908288419, 0.994931161403656, 0.0004621342523023486, 0.002390890382230282], [2.8534618650155608e-06, 1.1421834642533213e-06, 5.30084525962593e-06, 2.322654108866118e-05, 4.9582853534957394e-05, 0.00014702827320434153, 0.00014470863970927894, 2.237041826447239e-06, 1.8750278059087577e-06, 8.261128447983879e-10, 1.649752157106832e-08, 1.5173514666955157e-09, 5.188263457966968e-06, 2.5928047762135975e-06, 0.0009067972423508763, 4.144165723118931e-05, 2.2102363800513558e-05, 9.14494293624557e-08, 3.753979171960964e-06, 6.120451985225372e-07, 0.0009092639666050673, 0.004974626004695892, 0.9793327450752258, 0.013422789983451366]], [[0.06982850283384323, 0.047530777752399445, 0.16880667209625244, 0.0952795073390007, 0.1934870034456253, 0.06472157686948776, 0.037264592945575714, 0.014529094099998474, 0.03174374997615814, 0.016316501423716545, 0.018550807610154152, 0.008904051966965199, 0.014829829335212708, 0.0180568415671587, 0.014189435169100761, 0.0062448387034237385, 0.021737731993198395, 0.00436438200995326, 0.0037006584461778402, 0.003994928207248449, 0.06661148369312286, 0.02940373308956623, 0.023975299671292305, 0.02592799812555313], [0.05251257121562958, 0.0624125599861145, 0.19100892543792725, 0.06002570316195488, 0.1827705055475235, 0.03356444090604782, 0.023987794294953346, 0.00951133668422699, 0.007550915237516165, 0.006018081214278936, 0.012511726468801498, 0.014964824542403221, 0.041286252439022064, 0.06790807098150253, 0.013660265132784843, 0.004114286974072456, 0.004814955871552229, 0.0005089465412311256, 0.0006267048302106559, 0.005407915450632572, 0.06545941531658173, 0.09322957694530487, 0.03363281860947609, 0.012511416338384151], [0.04643569886684418, 0.008537017740309238, 0.2788406312465668, 0.265417218208313, 0.08672820776700974, 0.19581928849220276, 0.005748601630330086, 0.0029555598739534616, 0.005684139207005501, 0.0019854274578392506, 0.007273447699844837, 0.00042856819345615804, 0.0006881441222503781, 0.00043889021617360413, 0.0010044261580333114, 0.001237325370311737, 0.0010438946774229407, 0.0018595712026581168, 0.0005006994470022619, 0.0017926308792084455, 0.02652982622385025, 0.008536767214536667, 0.044787079095840454, 0.005727006122469902], [0.03856119513511658, 0.0033566029742360115, 0.35973817110061646, 0.03921402618288994, 0.00837684515863657, 0.1631442904472351, 0.0013094960013404489, 0.0006515373825095594, 0.006463656667619944, 0.0006149369291961193, 0.003106177318841219, 0.000632988812867552, 0.0028151636943221092, 0.0012982947519049048, 0.0014429528964683414, 0.00031215063063427806, 0.00019074398733209819, 0.007025499362498522, 0.0020450029987841845, 0.010511034168303013, 0.2852938175201416, 0.025953639298677444, 0.033507008105516434, 0.004434630274772644], [0.07746192067861557, 0.011746595613658428, 0.2981264889240265, 0.31120291352272034, 0.015642981976270676, 0.10560113191604614, 0.01049036905169487, 0.0026897559873759747, 0.003530768910422921, 0.0010124508989974856, 0.009727511554956436, 0.0010657550301402807, 0.002082303399220109, 0.0004704433085862547, 0.0019473530119284987, 0.0026002125814557076, 0.0009665554971434176, 0.01547937747091055, 0.009404044598340988, 0.014780167490243912, 0.06369857490062714, 0.007459279615432024, 0.02962506003677845, 0.0031880487222224474], [0.02565954066812992, 0.014269438572227955, 0.2951106131076813, 0.23015601933002472, 0.1831451803445816, 0.10148661583662033, 0.008680491708219051, 0.0014404600951820612, 0.00045668776147067547, 0.0009385989978909492, 0.006779874209314585, 0.0014728782698512077, 0.0019137050257995725, 0.0005167390336282551, 0.0004991278983652592, 3.757308149943128e-05, 0.00019608487491495907, 0.00029416041797958314, 0.0013928171247243881, 0.008747344836592674, 0.02949560061097145, 0.05692896619439125, 0.02886761911213398, 0.0015138774178922176], [0.017905594781041145, 0.0076125911436975, 0.18779759109020233, 0.08641231805086136, 0.03581802919507027, 0.42650488018989563, 0.012705475091934204, 0.0092921182513237, 0.012937990948557854, 0.0003505097411107272, 0.005547522567212582, 0.00034645755658857524, 0.0022297664545476437, 0.002172952052205801, 0.003478084225207567, 0.0001880150375654921, 5.522620631381869e-05, 0.00012032857921440154, 6.026693881722167e-05, 0.00044146282016299665, 0.03304554149508476, 0.0066780331544578075, 0.14637607336044312, 0.001923184609040618], [0.004184373654425144, 0.0007618449744768441, 0.0043082707561552525, 0.0025190410669893026, 0.0023258395958691835, 0.7118592858314514, 0.23208287358283997, 0.006352333351969719, 0.006077313330024481, 0.00014382365043275058, 0.00011829030700027943, 6.173001747811213e-05, 0.00015529866504948586, 0.001543805468827486, 0.001768295420333743, 0.0001731569936964661, 3.073469633818604e-05, 9.15704367798753e-06, 1.804353587431251e-06, 2.2641766008746345e-06, 0.00030466754105873406, 0.00023867149138823152, 0.008162214420735836, 0.016814982518553734], [0.008327632211148739, 0.0056134844198822975, 0.01840902678668499, 0.020393839105963707, 0.021085530519485474, 0.10442636162042618, 0.4213714599609375, 0.03791077435016632, 0.25131070613861084, 0.013322371058166027, 0.01565416157245636, 0.0034621688537299633, 0.005096550565212965, 0.008347363211214542, 0.01793130487203598, 0.016879597678780556, 0.0011287372326478362, 6.156968447612599e-05, 2.1754436602350324e-05, 3.445526544965105e-06, 0.0007992621976882219, 0.00026604547747410834, 0.008753479458391666, 0.01942339725792408], [0.0007096265908330679, 0.0009860263671725988, 0.00022548627748619765, 0.002152689965441823, 0.001529561122879386, 0.003652938874438405, 0.04542045667767525, 0.7415778636932373, 0.13411948084831238, 0.050188276916742325, 0.001721168402582407, 0.0007804285269230604, 0.00017160506104119122, 0.0004970598383806646, 0.0012014751555398107, 0.008106482215225697, 0.0004906103713437915, 0.00020158135157544166, 1.1674997949739918e-05, 1.0433451279823203e-05, 1.971907977349474e-06, 1.4495335562969558e-05, 0.00027510893414728343, 0.005953468382358551], [0.0013239796971902251, 0.0003135635342914611, 0.0007824132335372269, 0.000886492314748466, 0.0005261959158815444, 0.0016392478719353676, 0.0056734830141067505, 0.016503039747476578, 0.4177214801311493, 0.49188297986984253, 0.02117876708507538, 0.003435586579144001, 0.000527115014847368, 0.00023856772168073803, 0.0012368547031655908, 0.011003308929502964, 0.008929668925702572, 0.011474128812551498, 0.0016381569439545274, 5.491988849826157e-05, 6.300410313997418e-05, 3.138446118100546e-05, 0.00010178113006986678, 0.002833783393725753], [0.002739348215982318, 0.0016544199315831065, 0.0014634126564487815, 0.0036458938848227262, 0.0008229153463616967, 0.002968632383272052, 0.006952605675905943, 0.009279941208660603, 0.025685936212539673, 0.6156167387962341, 0.2240898162126541, 0.06427616626024246, 0.00609254278242588, 0.0025925636291503906, 0.00047946220729500055, 0.0055304039269685745, 0.0005847752909176052, 0.013459859415888786, 0.006475296337157488, 0.004339148290455341, 0.000365548359695822, 0.0004485654935706407, 0.00019922426145058125, 0.00023670800146646798], [0.0025432738475501537, 0.0033999530132859945, 0.0027017260435968637, 0.00854889489710331, 0.0006239929352886975, 0.001147898961789906, 0.0033944938331842422, 0.002925598993897438, 0.008319840766489506, 0.1096666157245636, 0.4507863223552704, 0.2879304885864258, 0.0511290542781353, 0.005255617666989565, 0.0010373682016506791, 0.004684977699071169, 0.00033851913758553565, 0.01105642318725586, 0.020540792495012283, 0.019725706428289413, 0.0028358502313494682, 0.0010712710209190845, 0.00026617516414262354, 6.90682718413882e-05], [0.005074977409094572, 0.004145377315580845, 0.008821612223982811, 0.00799476820975542, 0.0006968178786337376, 0.004143642261624336, 0.0009396873065270483, 0.00033398246159777045, 0.0010238515678793192, 0.0007255342788994312, 0.17517736554145813, 0.17367880046367645, 0.48029106855392456, 0.07872765511274338, 0.01004277914762497, 0.007309580687433481, 6.591003329958767e-05, 0.0012460200814530253, 0.0005579824210144579, 0.008689925074577332, 0.023749038577079773, 0.0027536351699382067, 0.003777718637138605, 3.232255403418094e-05], [0.002507115714251995, 0.0026227154303342104, 0.0016621662070974708, 0.0011877448996528983, 0.00019998363859485835, 0.0009844638407230377, 0.0005453397170640528, 0.0004857653984799981, 0.0007378977024927735, 0.0011990078492090106, 0.01083399634808302, 0.05244157090783119, 0.2858605682849884, 0.4482002258300781, 0.08698553591966629, 0.07197312265634537, 0.000725763791706413, 0.0012863262090831995, 0.00042716952157206833, 0.0035723226610571146, 0.007571374997496605, 0.008517486043274403, 0.008467103354632854, 0.0010052898433059454], [0.0003042828757315874, 0.00023714530107099563, 8.173799142241478e-05, 2.0917274014209397e-05, 2.6203655579593033e-05, 0.00018126395298168063, 7.166185969254002e-05, 0.00010352871322538704, 0.00046872696839272976, 5.642910036840476e-05, 8.531866478733718e-05, 0.0009422944858670235, 0.019179726019501686, 0.7786266207695007, 0.1553068608045578, 0.03663304075598717, 0.0013821388129144907, 0.000613526557572186, 8.413004252361134e-05, 0.0002828763099387288, 0.002787745324894786, 0.0005608565406873822, 0.0010474632726982236, 0.0009155923617072403], [0.00029349574469961226, 0.00012802016863133758, 4.310147414798848e-05, 4.088474452146329e-05, 1.6311041690642014e-05, 6.0466914874268696e-05, 8.827921556076035e-05, 0.00028652019682340324, 0.0008789292769506574, 4.064848326379433e-05, 9.792039782041684e-05, 0.00018162412743549794, 0.0029009163845330477, 0.04684474691748619, 0.195477694272995, 0.7054079174995422, 0.024196507409214973, 0.01600870117545128, 0.0009241614025086164, 0.00037397656706161797, 0.0008283848874270916, 0.0001364434720017016, 0.0017370101995766163, 0.0030073472298681736], [0.00023234331456478685, 0.00024040906282607466, 4.030882701044902e-05, 1.4421668311115354e-05, 6.774184294044971e-05, 3.5817789466818795e-05, 0.00010690187627915293, 0.0015186353120952845, 0.003345271572470665, 0.0018009671475738287, 0.00033462527790106833, 0.0008979289559647441, 0.0010609535966068506, 0.02319057285785675, 0.05015983060002327, 0.11563415080308914, 0.457534521818161, 0.2933502197265625, 0.03833677992224693, 0.009126587770879269, 0.0004213021893519908, 0.00027257262263447046, 0.00016713846707716584, 0.0021100668236613274], [8.863569746608846e-06, 3.975285380874993e-06, 3.373037316123373e-06, 3.800159220190835e-06, 1.524785943729512e-06, 8.763928462940385e-07, 2.6836104893845913e-07, 1.360571422992507e-05, 0.00019536991021595895, 4.603497927746503e-06, 6.69869186822325e-05, 1.6918565961532295e-06, 5.906274964218028e-06, 2.748649967543315e-05, 0.00205395114608109, 0.014432420954108238, 0.06693229079246521, 0.865720272064209, 0.047507818788290024, 0.002683489117771387, 0.00021849323820788413, 3.7879403862461913e-06, 9.478507126914337e-05, 1.431516921002185e-05], [1.3301662875164766e-05, 1.5212149264698382e-06, 1.3788434443995357e-05, 2.3724518541712314e-05, 2.5553883915563347e-06, 4.904443358100252e-06, 4.5074017407387146e-07, 8.782916438576649e-07, 1.8099062799592502e-05, 1.8895264020102331e-06, 0.00014080105756875128, 1.025260303322284e-06, 7.63605839892989e-07, 4.186929061233968e-07, 4.963867468177341e-05, 0.0005426175193861127, 0.006971760652959347, 0.8199018239974976, 0.1664741337299347, 0.005497889127582312, 0.00029660528525710106, 2.5528161131660454e-06, 3.6492310755420476e-05, 2.2937042558623943e-06], [0.0006013705860823393, 0.00019342127779964358, 0.0019461017800495028, 0.002520558424293995, 0.0006053475080989301, 8.526329474989325e-05, 1.1855718184961006e-05, 8.458375305053778e-06, 0.00013791692617814988, 3.785705121117644e-05, 0.005223517771810293, 0.000295983103569597, 0.0005285091465339065, 3.0855651857564226e-05, 0.00031572944135405123, 0.0027953439857810736, 0.007113146595656872, 0.18858641386032104, 0.5586214065551758, 0.13490994274616241, 0.08889098465442657, 0.0029161435086280107, 0.0035370425321161747, 8.686440560268238e-05], [0.00027449047775007784, 0.0001868074614321813, 6.297724030446261e-05, 0.0001935393229359761, 4.789324157172814e-05, 5.885682185180485e-06, 1.633204647077946e-06, 6.444460723287193e-06, 9.168356740474337e-08, 2.62381877291773e-06, 2.7330836019245908e-05, 4.6529065002687275e-05, 5.433183105196804e-05, 1.3889693946111947e-05, 6.9250295382516924e-06, 8.488005551043898e-05, 3.138457395834848e-05, 0.003163291374221444, 0.008588247932493687, 0.9730702638626099, 0.00210072030313313, 0.011410929262638092, 0.0005793775781057775, 3.96734758396633e-05], [0.00598894665017724, 0.0012959876330569386, 0.002313715871423483, 0.0019350014626979828, 0.0008324611699208617, 0.0006120994803495705, 5.715981751563959e-05, 3.977059532189742e-05, 7.488711162295658e-06, 1.2707518180832267e-05, 7.434988219756633e-05, 0.00013709691120311618, 0.001125905429944396, 0.000931222049985081, 0.0020092769991606474, 0.0031542982906103134, 0.002217684406787157, 0.0070303152315318584, 0.015306399203836918, 0.1539754569530487, 0.19713962078094482, 0.48515215516090393, 0.09739765524864197, 0.021253177896142006], [0.002167830942198634, 0.0007900730124674737, 0.00012336275540292263, 0.00036987854400649667, 0.00019498998881317675, 0.0005081890849396586, 3.820969504886307e-05, 9.103766933549196e-05, 6.885187531224801e-07, 3.341011165503005e-07, 1.2154102932981914e-06, 5.308380423230119e-06, 8.237615111283958e-05, 0.0008778555202297866, 0.00044245406752452254, 0.0015440676361322403, 5.211049210629426e-05, 0.0002178448048653081, 0.00016124591638799757, 0.03507748991250992, 0.01878628507256508, 0.5609797835350037, 0.3364003002643585, 0.04108715057373047]], [[0.11210659891366959, 0.1094602420926094, 0.029657645151019096, 0.12283368408679962, 0.05758844316005707, 0.018804678693413734, 0.008887301199138165, 0.0029878844507038593, 0.09262962639331818, 0.0019643260166049004, 0.017497671768069267, 0.009213495068252087, 0.03050955757498741, 0.04572955518960953, 0.022793157026171684, 0.05416158214211464, 0.11231201142072678, 0.03351454436779022, 0.03286006674170494, 0.006780480034649372, 0.06494121253490448, 0.0019892898853868246, 0.008907457813620567, 0.0018694190075621009], [0.14372654259204865, 0.07852347195148468, 0.03457536920905113, 0.20614081621170044, 0.07536960393190384, 0.06013013422489166, 0.023050803691148758, 0.008499382995069027, 0.013133732602000237, 0.0007512872689403594, 0.010130888782441616, 0.01043106522411108, 0.06547533720731735, 0.047773126512765884, 0.019054651260375977, 0.02096417173743248, 0.023702790960669518, 0.00732032535597682, 0.03451753780245781, 0.012277604080736637, 0.056267883628606796, 0.015290344133973122, 0.030604982748627663, 0.002288093324750662], [0.0016597781796008348, 0.0013666790910065174, 0.0013430645922198892, 0.7805877923965454, 0.01676570437848568, 0.19169916212558746, 5.648788282996975e-05, 0.00026017430354841053, 0.0035325458738952875, 1.1359796189935878e-05, 0.00025012154947035015, 1.1468234333733562e-05, 8.059140236582607e-05, 2.289242547703907e-05, 3.5074928746325895e-05, 0.0005447774310596287, 0.00012396009697113186, 0.0002890396863222122, 2.4733308237046003e-05, 3.302449840703048e-05, 0.0004722554003819823, 1.643392715777736e-05, 0.0008046840666793287, 8.165535291482229e-06], [0.011587731540203094, 0.00426016328856349, 0.016189729794859886, 0.14167538285255432, 0.005884359125047922, 0.646325945854187, 0.008895566686987877, 0.13523060083389282, 0.009451120160520077, 0.003563845530152321, 0.0022911718115210533, 0.001430783187970519, 0.0018662727670744061, 0.0006179875344969332, 0.0006117084994912148, 0.0020503986161202192, 0.0003010584332514554, 0.0011447438737377524, 0.0010882396018132567, 0.0013915650779381394, 0.0007759058498777449, 0.0010800613090395927, 0.0015585650689899921, 0.0007270254427567124], [0.005359927657991648, 0.0054455106146633625, 0.004779947455972433, 0.4808637797832489, 0.007924734614789486, 0.43500855565071106, 0.0013768794015049934, 0.0012711624149233103, 0.039345305413007736, 4.8078669351525605e-05, 0.0010707819601520896, 0.00014316316810436547, 0.00044942559907212853, 6.41041187918745e-05, 0.00017541772103868425, 0.0005014202324673533, 0.00023121059348341078, 0.002582951681688428, 0.0009620141354389489, 0.00041775457793846726, 0.008697458542883396, 8.920463005779311e-05, 0.002956168260425329, 0.00023510350729338825], [0.059300150722265244, 0.020173363387584686, 0.02706495299935341, 0.13691115379333496, 0.043900083750486374, 0.16161932051181793, 0.0686308965086937, 0.009056207723915577, 0.0006607091636396945, 0.0029334730934351683, 0.0037218695506453514, 0.011522268876433372, 0.04447116702795029, 0.021741017699241638, 0.004295783583074808, 0.003810680005699396, 0.000893719436135143, 0.00352606107480824, 0.016563210636377335, 0.01759278029203415, 0.012899510562419891, 0.2639794945716858, 0.04232887923717499, 0.02240331657230854], [0.0011302087223157287, 0.001192872878164053, 0.002072356641292572, 0.026111610233783722, 0.002171780215576291, 0.8796381950378418, 0.005243915598839521, 0.06852617114782333, 0.006410577800124884, 0.0019274037331342697, 0.0004270878853276372, 0.00041592889465391636, 0.0002129897038685158, 0.0013502718647941947, 8.904968126444146e-05, 0.0004274570383131504, 1.1890027053595986e-05, 6.875683175167069e-05, 3.976322204835014e-06, 9.845026943366975e-05, 0.00010365075286244974, 0.0004082740633748472, 0.00101556780282408, 0.000941612059250474], [0.008389444090425968, 0.022552628070116043, 0.008838667534291744, 0.023977212607860565, 0.008134297095239162, 0.1439555436372757, 0.3447183072566986, 0.15676754713058472, 0.012094522826373577, 0.010124217718839645, 0.003969606012105942, 0.0025940968189388514, 0.008680588565766811, 0.07339151948690414, 0.04788197949528694, 0.00804087333381176, 0.00032168818870559335, 7.20023235771805e-05, 4.135613198741339e-05, 0.0001317110873060301, 0.001240188954398036, 0.0067410278134047985, 0.04330964386463165, 0.0640314444899559], [0.005235401913523674, 0.02245481312274933, 0.006753782741725445, 0.2941668629646301, 0.010957467369735241, 0.037662066519260406, 0.006194614805281162, 0.04280621185898781, 0.5543623566627502, 0.0007499148487113416, 0.0018414049409329891, 0.000479885027743876, 0.0001386465592077002, 0.0009992168052121997, 0.0012686133850365877, 0.008539356291294098, 0.0008264445350505412, 0.00020838677301071584, 2.1196379748289473e-05, 1.1141854884044733e-05, 0.0010305740870535374, 1.6563233657507226e-05, 0.0019314328674227, 0.0013435868313536048], [0.0007683417643420398, 0.0025086181703954935, 0.0009913695976138115, 0.0029228327330201864, 0.0009613083093427122, 0.03885659575462341, 0.01051001250743866, 0.31499791145324707, 0.6129688024520874, 0.005426015239208937, 0.0025653657503426075, 0.0003838952980004251, 0.00035340822068974376, 6.105755164753646e-05, 0.00015736719069536775, 0.002383929444476962, 0.0005822464008815587, 0.0006756930961273611, 0.00013831285468768328, 4.274667662684806e-05, 3.721610482898541e-05, 1.3969415704195853e-06, 0.0004266776377335191, 0.0012789166066795588], [0.0014596517430618405, 0.002021635416895151, 0.0009372245403937995, 0.004854278638958931, 0.0084072295576334, 0.004323986358940601, 0.001259509241208434, 0.002199642825871706, 0.8329998850822449, 0.08539790660142899, 0.020994344726204872, 0.010165619663894176, 0.0004262366273906082, 0.00019473450083751231, 5.195022458792664e-05, 0.002600317122414708, 0.005748074036091566, 0.013651564717292786, 0.001622718758881092, 0.00023892773606348783, 0.00031671879696659744, 3.3630610687396256e-06, 3.1821688025956973e-05, 9.267224959330633e-05], [0.018945496529340744, 0.009661580435931683, 0.012440218590199947, 0.01122888270765543, 0.010029763914644718, 0.016396909952163696, 0.03284995257854462, 0.010944054462015629, 0.08572956174612045, 0.07310391217470169, 0.5162109732627869, 0.06870843470096588, 0.028491860255599022, 0.001616650610230863, 0.0022571769077330828, 0.0014708524104207754, 0.003254224080592394, 0.010543339885771275, 0.05556795001029968, 0.011149856261909008, 0.015904828906059265, 0.000741579569876194, 0.0022567452397197485, 0.0004952242015860975], [0.06563153117895126, 0.023367082700133324, 0.00955134816467762, 0.019135452806949615, 0.004252164624631405, 0.005037310067564249, 0.002108224667608738, 0.00545408995822072, 0.0047034816816449165, 0.007222811691462994, 0.045223478227853775, 0.6366342306137085, 0.03694848716259003, 0.031271494925022125, 0.0005227451911196113, 0.003942788112908602, 0.00021572483819909394, 0.0022620386444032192, 0.0018884815508499742, 0.06990637630224228, 0.012847675941884518, 0.01067858375608921, 0.0008900627726688981, 0.00030427187448367476], [0.029317112639546394, 0.019884422421455383, 0.008024568669497967, 0.011528092436492443, 0.008787373080849648, 0.01185574196279049, 0.0029384582303464413, 0.0007243757718242705, 0.0024137627333402634, 4.3325770093360916e-05, 0.014090019278228283, 0.014185430482029915, 0.6359342336654663, 0.14753000438213348, 0.04749198630452156, 0.0016582019161432981, 0.00046825711615383625, 8.059364336077124e-05, 0.0002180199371650815, 0.0008423569961450994, 0.03622577711939812, 0.0013526829425245523, 0.004393315874040127, 1.1854370313812979e-05], [0.019265593960881233, 0.020731158554553986, 0.0032441976945847273, 0.005304524675011635, 0.002698901342228055, 0.003407110460102558, 0.0016924272058531642, 0.0047619701363146305, 0.0008694310672581196, 0.000124023063108325, 0.0005282168858684599, 0.0051174648106098175, 0.017725596204400063, 0.7085875272750854, 0.08818656951189041, 0.10171286016702652, 0.0013826750218868256, 0.00016813141701277345, 2.1767524231108837e-05, 0.0009071537060663104, 0.0015998415183275938, 0.004705728497356176, 0.0066665345802903175, 0.0005904808640480042], [0.001236245036125183, 0.0026752434205263853, 0.0008120179991237819, 0.0003904334153048694, 0.00018799876852426678, 0.00011152461229357868, 0.001849901513196528, 0.0008587975171394646, 0.0003994828730355948, 7.00926102581434e-05, 0.00015626111417077482, 0.00023824589152354747, 0.009088386781513691, 0.03923969343304634, 0.8824511766433716, 0.05132818967103958, 0.004445299040526152, 6.71211673761718e-05, 7.259557605721056e-05, 1.0914928679994773e-05, 0.00022551720030605793, 0.00040175768663175404, 0.0022857878357172012, 0.0013973440509289503], [0.0028925908263772726, 0.008893905207514763, 0.003338613547384739, 0.004438496194779873, 0.0014522225828841329, 0.0008966239402070642, 0.0008078096434473991, 0.001459181890822947, 0.19884605705738068, 0.00011425981210777536, 0.0004889255505986512, 0.0004828167147934437, 0.001026070094667375, 0.005118540953844786, 0.09847823530435562, 0.4860379099845886, 0.15640483796596527, 0.021383292973041534, 0.0012499531731009483, 8.975568925961852e-05, 0.002312860218808055, 4.1663912270450965e-05, 0.0013815389247611165, 0.0023637712001800537], [0.00030356604838743806, 0.00039881683187559247, 0.0007451035780832171, 0.00010215460497420281, 0.0001801208418328315, 1.0245154044241644e-05, 8.896116924006492e-05, 0.00013889939873479307, 0.002113821217790246, 0.00022188237926457077, 0.0003454814723227173, 0.00025325475144200027, 0.0022603487595915794, 0.00026894398615695536, 0.07457565516233444, 0.06141502782702446, 0.624470591545105, 0.11118900775909424, 0.1146218553185463, 0.0015366157749667764, 0.002312326803803444, 0.00021519805886782706, 0.0004701958387158811, 0.0017619200516492128], [7.396899309242144e-05, 7.737068517599255e-05, 0.00039320229552686214, 0.00010451146226841956, 0.00023755924485158175, 3.9335736801149324e-05, 5.948398666077992e-06, 9.038073767442256e-05, 0.008078230544924736, 0.001449049566872418, 0.0007713070372119546, 0.0005681279581040144, 2.3558388420497067e-05, 1.3029162801103666e-05, 0.00011188196367584169, 0.006169064901769161, 0.057435911148786545, 0.8756561279296875, 0.03263581171631813, 0.014382172375917435, 0.0014945761067792773, 6.0145659517729655e-05, 2.3095988581189886e-05, 0.00010561108501860872], [0.00021136915893293917, 9.381605923408642e-05, 0.000762521056458354, 0.0005290501867420971, 0.001302280928939581, 0.0001614733482711017, 2.1472937078215182e-05, 9.480038897891063e-06, 0.0018748634029179811, 0.0007398871821351349, 0.013031147420406342, 0.0013075076276436448, 0.002166719874367118, 4.118288870813558e-06, 0.0001452979486202821, 0.00011289019312243909, 0.01094029564410448, 0.11608105897903442, 0.7523279786109924, 0.05323183909058571, 0.044008202850818634, 0.000671790970955044, 0.0002511481288820505, 1.373337727272883e-05], [0.0014528672909364104, 0.0003863045130856335, 0.0016698027029633522, 0.030950861051678658, 0.003130223136395216, 0.0005042662960477173, 9.917373972712085e-06, 4.663924755732296e-06, 0.002266493858769536, 6.171583208924858e-06, 0.0010333003010600805, 0.0006088506197556853, 0.00014001225645188242, 1.1028834705939516e-05, 5.441097073344281e-06, 0.00011631692905211821, 0.00025952563737519085, 0.009062621742486954, 0.013685043901205063, 0.10739163309335709, 0.8247995972633362, 0.0018183779902756214, 0.0006749466410838068, 1.1653560250124428e-05], [0.0009739330853335559, 0.00018723774701356888, 0.0011757576139643788, 0.0020995615050196648, 0.00020407710690051317, 0.002499576425179839, 0.00011863355030072853, 0.00012899009743705392, 7.590675522806123e-06, 3.1908629694044066e-07, 0.00010723240120569244, 6.387459143297747e-05, 0.0011982249561697245, 2.721256169024855e-05, 5.8084311604034156e-05, 4.5436205255100504e-05, 1.0949331226584036e-05, 0.0005340587231330574, 0.010604706592857838, 0.7068493366241455, 0.18702243268489838, 0.05922885239124298, 0.026636898517608643, 0.00021693832240998745], [0.008346728049218655, 0.005515708588063717, 0.005593506153672934, 0.08802006393671036, 0.021083038300275803, 0.018406039103865623, 0.0027556486893445253, 0.0007178249070420861, 0.0010987733257934451, 9.412783583684359e-06, 6.742379628121853e-05, 0.00033092923695221543, 0.0014523975551128387, 0.006281823385506868, 0.0015892733354121447, 0.011497847735881805, 0.001139632542617619, 0.0026032417081296444, 0.0027769196312874556, 0.04391783848404884, 0.21056514978408813, 0.4104138910770416, 0.13474629819393158, 0.021070528775453568], [0.00016367394709959626, 0.0001716834813123569, 0.00043667349382303655, 0.0012839952250942588, 0.00018355487554799765, 0.0011779372580349445, 0.0027564798947423697, 0.0006578153697773814, 2.145608414139133e-05, 4.497566123973229e-07, 1.990234068216523e-06, 7.84037979428831e-07, 6.195234163897112e-05, 0.00017491109611000866, 0.002783700590953231, 0.0007113351020962, 3.091002508881502e-05, 9.397780559083913e-06, 5.346348189050332e-05, 0.00020538947137538344, 0.004780973773449659, 0.07815276086330414, 0.7497957944869995, 0.15638290345668793]], [[0.007902096956968307, 0.01990666799247265, 0.04123903065919876, 0.0810999944806099, 0.010922491550445557, 0.013305292464792728, 0.04182541370391846, 0.017402026802301407, 0.051778413355350494, 0.28341805934906006, 0.025267062708735466, 0.11523337662220001, 0.08325020223855972, 0.05902991443872452, 0.03536194935441017, 0.05348360538482666, 0.004668163601309061, 0.00312627456150949, 0.0006763480487279594, 0.0011455640196800232, 0.0021604716312140226, 0.02286773920059204, 0.004036224912852049, 0.020893573760986328], [0.0026239375583827496, 0.021566763520240784, 0.02492276392877102, 0.11303319782018661, 0.02572150155901909, 0.02014530636370182, 0.05685357376933098, 0.010161913931369781, 0.018236853182315826, 0.22312819957733154, 0.008577130734920502, 0.09094535559415817, 0.03392842039465904, 0.040367648005485535, 0.026283342391252518, 0.05279112607240677, 0.028212636709213257, 0.007643147837370634, 0.00144764909055084, 0.0006419757264666259, 0.0014875836204737425, 0.04416332393884659, 0.006246172823011875, 0.14087051153182983], [0.02779172547161579, 0.0693679228425026, 0.011586747132241726, 0.05709259584546089, 0.07445548474788666, 0.03633669763803482, 0.11972513794898987, 0.037622611969709396, 0.03683033213019371, 0.04554499313235283, 0.0011240368476137519, 0.01400129497051239, 0.006067576818168163, 0.00957026518881321, 0.0016503460938110948, 0.014757872559130192, 0.007952351123094559, 0.0011416039196774364, 0.0006853991653770208, 0.0021883537992835045, 0.007079773116856813, 0.0645739883184433, 0.02304365672171116, 0.3298093378543854], [0.026003772392868996, 0.032680902630090714, 0.0813373476266861, 0.06062421202659607, 0.01813720539212227, 0.08750908821821213, 0.2276049256324768, 0.19538037478923798, 0.06319401413202286, 0.02867601253092289, 0.011139551177620888, 0.010535269975662231, 0.004592108074575663, 0.004129213746637106, 0.006299581378698349, 0.005152752622961998, 0.0019513973966240883, 0.0035784731153398752, 0.0004972332390025258, 0.0047720312140882015, 0.009073419496417046, 0.009616567753255367, 0.027116741985082626, 0.08039779961109161], [0.010852617211639881, 0.014119317755103111, 0.03916626051068306, 0.10160759091377258, 0.006030367687344551, 0.04032624140381813, 0.05106769874691963, 0.05913759395480156, 0.2538871169090271, 0.18658334016799927, 0.017986301332712173, 0.021969472989439964, 0.010338523425161839, 0.001020007417537272, 0.002473189728334546, 0.006651073228567839, 0.00026546549634076655, 0.0008628456853330135, 0.00025948273832909763, 0.001339095993898809, 0.008673292584717274, 0.07774285227060318, 0.01940041221678257, 0.06823982298374176], [0.019593240693211555, 0.016034433618187904, 0.03099525161087513, 0.05229698121547699, 0.01205168105661869, 0.03521648421883583, 0.298452764749527, 0.1998118758201599, 0.034985609352588654, 0.02318994142115116, 0.003375233383849263, 0.0030434951186180115, 0.001777180121280253, 0.00317023484967649, 0.008926774375140667, 0.011105096898972988, 0.0008566661854274571, 0.00046177522744983435, 5.998697815812193e-05, 0.0004986059502698481, 0.0030833922792226076, 0.016968445852398872, 0.03803226351737976, 0.1860126554965973], [0.0014251082902774215, 0.0007177750812843442, 0.0012746761785820127, 0.010323661379516125, 0.002439674222841859, 0.0031771576032042503, 0.004194212146103382, 0.028121264651417732, 0.6769945025444031, 0.21725238859653473, 0.002990015083923936, 0.007287519983947277, 0.0021302606910467148, 0.0005445749266073108, 0.0004762088065035641, 0.011273388750851154, 0.0004536752530839294, 7.504343375330791e-05, 2.2124897895992035e-06, 6.589821168745402e-06, 7.737759005976841e-05, 0.0005722618079744279, 0.0007054962334223092, 0.027484899386763573], [0.0015878668054938316, 0.000791181402746588, 0.0016454479191452265, 0.012123005464673042, 0.0008766588289290667, 0.0031846975907683372, 0.030203813686966896, 0.02659197524189949, 0.19181153178215027, 0.6964216828346252, 0.01622675359249115, 0.005803859326988459, 0.0011736020678654313, 0.0002762911608442664, 0.0002545801398809999, 0.006495936773717403, 0.0005294146249070764, 0.001953256782144308, 0.00012505475024227053, 4.0461382013745606e-05, 3.528888919390738e-05, 6.372587813530117e-05, 7.282687874976546e-05, 0.0017110556364059448], [0.0011273091658949852, 0.0002707928360905498, 0.0003464639594312757, 0.0007964784745126963, 0.0003090773243457079, 0.001784098451025784, 0.0006565973162651062, 0.0023144828155636787, 0.23406489193439484, 0.1759435534477234, 0.5403717756271362, 0.026412423700094223, 0.005946754477918148, 9.384616714669392e-05, 7.209049363154918e-05, 0.0001444575609639287, 0.00020764843793585896, 0.003989268559962511, 0.0030697069596499205, 0.0013157364446669817, 0.0007338931318372488, 1.8436807295074686e-05, 6.259099791350309e-06, 3.9944779928191565e-06], [0.0018641584319993854, 0.00024170611868612468, 0.0011626057093963027, 0.0002689410757739097, 7.361490133916959e-05, 0.0010056975297629833, 6.372838106472045e-05, 0.0012341709807515144, 0.15874774754047394, 0.005590502638369799, 0.7700824737548828, 0.02079339139163494, 0.029840704053640366, 0.00017549932817928493, 0.0004335437261033803, 0.00017100379045587033, 3.9871109038358554e-05, 0.0008896571234799922, 0.0015109573723748326, 0.0035144684370607138, 0.002272827783599496, 6.948385362193221e-06, 1.54709105117945e-05, 2.618666883336118e-07], [0.021999867632985115, 0.009047414176166058, 0.0074811349622905254, 0.0040058717131614685, 0.002883730921894312, 0.008372887037694454, 0.005191359668970108, 0.0059251380153000355, 0.012577536515891552, 0.010476638562977314, 0.03613714873790741, 0.2228340357542038, 0.528896152973175, 0.051740482449531555, 0.007585105951875448, 0.0011946037411689758, 0.00026741132023744285, 0.0007760687149129808, 0.006620144471526146, 0.02355767786502838, 0.02395395189523697, 0.00764746218919754, 0.0006646318361163139, 0.00016349481302313507], [0.0022651830222457647, 0.005122258793562651, 0.017445940524339676, 0.0012055638944730163, 0.00021989941888023168, 0.0024633239954710007, 0.0010196546791121364, 0.005069061182439327, 0.003622362855821848, 0.000420404045144096, 0.04087960720062256, 0.03525672107934952, 0.31970277428627014, 0.19327032566070557, 0.3505646884441376, 0.0025507966056466103, 7.985067350091413e-05, 0.00022034216090105474, 0.000419201998738572, 0.0032921701204031706, 0.011159634217619896, 0.0013340875739231706, 0.002314747544005513, 0.00010139494406757876], [0.0005109877674840391, 0.002579138148576021, 0.0028971827123314142, 0.0003788693284150213, 0.00022614281624555588, 0.0003780802944675088, 0.0005706996889784932, 0.0025830818340182304, 0.0002858277002815157, 3.3252967114094645e-05, 0.0005883702542632818, 0.0027806442230939865, 0.02930573560297489, 0.19958899915218353, 0.7357932925224304, 0.010387699119746685, 0.0016452295240014791, 0.00016251714259851724, 7.721222937107086e-05, 0.0001829194079618901, 0.0010350138181820512, 0.0005694123101420701, 0.005457784049212933, 0.0019818823784589767], [0.00023943124688230455, 0.0009416408720426261, 0.0005354899913072586, 6.985344953136519e-05, 1.894338129204698e-05, 5.2490235248114914e-05, 0.00017770093108993024, 0.004593254532665014, 0.0007986443815752864, 2.0213141397107393e-05, 0.00022060364426579326, 0.00014304525393527, 0.0016472677234560251, 0.019579119980335236, 0.8270232081413269, 0.1228145956993103, 0.016282420605421066, 0.002370629459619522, 0.0004196744994260371, 3.013369678228628e-05, 4.131707828491926e-05, 1.1256038305873517e-05, 0.0014715607976540923, 0.0004975736374035478], [0.0039260005578398705, 0.009121245704591274, 0.0013911144342273474, 0.00041003487422131, 0.00027567637152969837, 0.00021318145445547998, 0.00025623722467571497, 0.010191616602241993, 0.005632307846099138, 0.0005708604585379362, 0.000313700147671625, 0.0005863130791112781, 0.000776322849560529, 0.0047126589342951775, 0.042543038725852966, 0.23105590045452118, 0.4559255540370941, 0.1642817258834839, 0.054771989583969116, 0.0020587502513080835, 0.0003643772506620735, 0.00010004829528043047, 0.002157577546313405, 0.008363707922399044], [0.0006257764180190861, 0.000652134302072227, 0.002610093681141734, 0.0001005811573122628, 3.05746725643985e-05, 4.1411141864955425e-05, 8.486495062243193e-07, 0.000828749849461019, 0.001589562394656241, 0.00014477610238827765, 0.0009852636139839888, 8.634676487417892e-05, 6.166713137645274e-05, 0.00015188503311946988, 0.010676780715584755, 0.011480547487735748, 0.11527349799871445, 0.7653271555900574, 0.06027122214436531, 0.027247322723269463, 0.001062604133039713, 2.2410500605474226e-05, 0.0004400322213768959, 0.00028866095817647874], [0.0007873913273215294, 0.0006777039379812777, 0.004021264147013426, 0.0004928400740027428, 7.516472396673635e-05, 0.00010543345706537366, 1.4609478284910438e-06, 9.720639354782179e-05, 0.002181000541895628, 0.0007477799081243575, 0.005036008544266224, 0.00034459077869541943, 0.00018216970784123987, 1.036264166032197e-05, 0.0004896454629488289, 0.0010136018972843885, 0.005566942971199751, 0.26001864671707153, 0.5115607380867004, 0.18207715451717377, 0.021794067695736885, 0.0019981812220066786, 0.0006607365212403238, 5.991779835312627e-05], [0.0003836602554656565, 0.0002817972854245454, 0.0019228399032726884, 0.00020795843738596886, 0.00024307820422109216, 0.00022006155631970614, 1.57022566327214e-06, 2.3020316803012975e-05, 1.9983390302513726e-05, 9.850451533566229e-06, 0.0007776744314469397, 2.007390867220238e-05, 1.869460174930282e-05, 1.559132033435162e-05, 0.00032083276892080903, 6.201523501658812e-05, 0.0020015472546219826, 0.04510603845119476, 0.1354316622018814, 0.6587300896644592, 0.13881631195545197, 0.00898696668446064, 0.00634722737595439, 5.137166226631962e-05], [0.0002016293874476105, 0.00011788296978920698, 0.0011097942478954792, 0.00026373917353339493, 0.0009548653033562005, 0.00033073918893933296, 1.5343579207183211e-06, 6.614334779442288e-06, 6.472702352766646e-06, 9.503728506388143e-06, 0.00020392374426592141, 4.414607974467799e-05, 5.208038419368677e-05, 3.1917417800286785e-05, 0.00013711712381336838, 1.75261029653484e-05, 0.0002563856542110443, 0.0009034885442815721, 0.005577882286161184, 0.22034955024719238, 0.42618682980537415, 0.31259527802467346, 0.02995217591524124, 0.0006889693322591484], [0.00020410084107425064, 0.00013513212616089731, 0.0017884453991428018, 0.0002496024826541543, 0.00019614002667367458, 0.0005716820596717298, 3.463156826910563e-05, 4.682890357798897e-05, 1.75991397100006e-06, 3.6799303870793665e-06, 8.31659126561135e-05, 1.4014573935128283e-05, 4.944141983287409e-05, 0.00011556391837075353, 0.000750205887015909, 2.5238481612177566e-05, 1.844026701292023e-05, 0.0001915038301376626, 0.0016061562346294522, 0.05523619428277016, 0.11410069465637207, 0.6962218880653381, 0.12650011479854584, 0.0018555383430793881], [0.004990258254110813, 0.002234508516266942, 0.0028041426558047533, 0.0004147088620811701, 0.0015243046218529344, 0.00525407399982214, 0.0005817884230054915, 0.0015036823460832238, 0.00022643222473561764, 2.5941759304259904e-05, 0.00011737887689378113, 5.913437780691311e-05, 0.0001596727961441502, 0.0004819650494027883, 0.0015743494732305408, 0.00018163237837143242, 0.00023541330301668495, 0.0006425128085538745, 0.0027078287675976753, 0.03788909316062927, 0.16464996337890625, 0.34949198365211487, 0.3860260844230652, 0.036223094910383224], [0.0012059375876560807, 0.0006100065656937659, 0.0013567678397521377, 9.172241698252037e-05, 0.00020367874822113663, 0.0020977999083697796, 0.00029919869848527014, 0.004929620772600174, 0.0002642322506289929, 6.069767550798133e-06, 4.0006103517953306e-05, 4.3693635234376416e-06, 1.3039945770287886e-05, 0.00014087023737374693, 0.003017381066456437, 0.0005390614969655871, 0.00015846006863284856, 0.0002195223787566647, 0.00016723251610528678, 0.0014966214075684547, 0.012587981298565865, 0.023419518023729324, 0.8384620547294617, 0.10866881906986237], [0.003540937090292573, 0.0013197273947298527, 0.0013353590620681643, 0.0007551646558567882, 0.0004196655936539173, 0.002167940139770508, 0.0024496624246239662, 0.015278695151209831, 0.0025414975825697184, 0.002509078476577997, 1.9533419617800973e-05, 4.470361818675883e-05, 1.3749349818681367e-05, 6.997207674430683e-05, 0.00017662928439676762, 0.0013364834012463689, 0.0003191700379829854, 0.0009122394840233028, 0.0004087313136551529, 0.0006127232336439192, 0.0008581579895690084, 0.0348668172955513, 0.023729000240564346, 0.9043143391609192], [0.021626470610499382, 0.01107238233089447, 0.023907842114567757, 0.0031793660018593073, 0.001926317811012268, 0.00981943029910326, 0.0034518043976277113, 0.08905288577079773, 0.07137927412986755, 0.016826055943965912, 0.0009059783187694848, 0.00014498508244287223, 3.3999891456915066e-05, 0.0001059738642652519, 0.0007105529657565057, 0.004298435989767313, 0.002776443725451827, 0.011389532126486301, 0.0018292444292455912, 0.003563710255548358, 0.003844513325020671, 0.0085079250857234, 0.052232302725315094, 0.6574146151542664]], [[0.07206687331199646, 0.041268110275268555, 0.01935713365674019, 0.03928283229470253, 0.04825347661972046, 0.05296003445982933, 0.05066673457622528, 0.04379667341709137, 0.020773552358150482, 0.04395347461104393, 0.047238271683454514, 0.033678531646728516, 0.04139160364866257, 0.014685450121760368, 0.010426837019622326, 0.022563613951206207, 0.028004847466945648, 0.033147893846035004, 0.0541716106235981, 0.04085066169500351, 0.028287425637245178, 0.06274929642677307, 0.08469128608703613, 0.06573380529880524], [0.16593408584594727, 0.06883805990219116, 0.01520522590726614, 0.024856096133589745, 0.04997219517827034, 0.04446110874414444, 0.0459793321788311, 0.03136298432946205, 0.02110869437456131, 0.10408248752355576, 0.038705483078956604, 0.03253541141748428, 0.03449471294879913, 0.01795712485909462, 0.004595793783664703, 0.015193858183920383, 0.02585374377667904, 0.027653934434056282, 0.023815017193555832, 0.02247808501124382, 0.01802200824022293, 0.06291646510362625, 0.04700641334056854, 0.056971676647663116], [0.013992362655699253, 0.023142609745264053, 0.01649564504623413, 0.011218922212719917, 0.04320991411805153, 0.035880595445632935, 0.022619500756263733, 0.0093381367623806, 0.05106207728385925, 0.02285773493349552, 0.005997610278427601, 0.024796009063720703, 0.04325738176703453, 0.03452913090586662, 0.01803615503013134, 0.026815801858901978, 0.04908767342567444, 0.06960485875606537, 0.06359932571649551, 0.027967611327767372, 0.08837952464818954, 0.14794890582561493, 0.024168211966753006, 0.12599435448646545], [0.004535824526101351, 0.0016959001077339053, 0.10482797771692276, 0.0012912375386804342, 0.017514687031507492, 0.051416102796792984, 0.03247040882706642, 0.048493217676877975, 0.07898509502410889, 0.06569118797779083, 0.04473135247826576, 0.046614862978458405, 0.011929157190024853, 0.09989877045154572, 0.28137293457984924, 0.009505846537649632, 0.017497379332780838, 0.007718438282608986, 0.007687046192586422, 0.0058504813350737095, 0.029082991182804108, 0.012160963378846645, 0.012335223145782948, 0.006692970637232065], [0.028859464451670647, 0.023376377299427986, 0.06135249137878418, 0.052240390330553055, 0.04170066490769386, 0.0533471442759037, 0.03327919542789459, 0.04250817000865936, 0.030795006081461906, 0.024201232939958572, 0.028169719502329826, 0.02147003263235092, 0.025228125974535942, 0.03325198218226433, 0.07883195579051971, 0.03519414737820625, 0.05103178694844246, 0.0387786328792572, 0.034707456827163696, 0.036663901060819626, 0.04611647129058838, 0.057896681129932404, 0.06588992476463318, 0.055109020322561264], [0.010565096512436867, 0.013678733259439468, 0.006648355629295111, 0.8614897131919861, 0.00708598829805851, 0.008687077090144157, 0.007984668016433716, 0.017959799617528915, 0.006312189158052206, 0.0015221545472741127, 0.011619152501225471, 0.003645417047664523, 0.004991119261831045, 0.002146966988220811, 0.002189525170251727, 0.004689438734203577, 0.005357585847377777, 0.004337830003350973, 0.0013624663697555661, 0.0034962743520736694, 0.0010953275486826897, 0.0008427583961747587, 0.009930855594575405, 0.0023615711834281683], [0.07218927890062332, 0.059596456587314606, 0.10613672435283661, 0.022205833345651627, 0.039227090775966644, 0.06679456681013107, 0.029149645939469337, 0.020322399213910103, 0.03732537850737572, 0.023672014474868774, 0.048506833612918854, 0.012872420251369476, 0.016636792570352554, 0.017413534224033356, 0.051366716623306274, 0.013553260825574398, 0.05330822244286537, 0.068462073802948, 0.05812760442495346, 0.02274804189801216, 0.04672745242714882, 0.026970600709319115, 0.05983683839440346, 0.026850100606679916], [0.03261418640613556, 0.01937468722462654, 0.02953161671757698, 0.36130180954933167, 0.013890287838876247, 0.10718228667974472, 0.046079982072114944, 0.01565345749258995, 0.008676198311150074, 0.0027409535832703114, 0.013236177153885365, 0.008082005195319653, 0.008121752180159092, 0.0034543946385383606, 0.010758091695606709, 0.03478525951504707, 0.0064580487087368965, 0.03086504340171814, 0.03837352991104126, 0.03114420175552368, 0.02913726679980755, 0.020122652873396873, 0.07690759003162384, 0.051508449018001556], [0.05333467945456505, 0.1050913855433464, 0.014676114544272423, 0.12424155324697495, 0.05241169035434723, 0.05861905217170715, 0.08392475545406342, 0.052505236119031906, 0.05544796958565712, 0.028225865215063095, 0.023439669981598854, 0.026658035814762115, 0.055511750280857086, 0.01692933589220047, 0.007253835443407297, 0.013897066935896873, 0.019701750949025154, 0.018899090588092804, 0.02517560124397278, 0.020665772259235382, 0.029558027163147926, 0.04372088611125946, 0.0332268662750721, 0.036883965134620667], [0.008757124654948711, 0.0031453229021281004, 0.14314378798007965, 0.009299489669501781, 0.03311162441968918, 0.07635083049535751, 0.056163717061281204, 0.10737992823123932, 0.030598346143960953, 0.07229650020599365, 0.06035096198320389, 0.05640867352485657, 0.02476734295487404, 0.04754040762782097, 0.18818533420562744, 0.007101323455572128, 0.01193174533545971, 0.0013223568676039577, 0.004452615976333618, 0.005263670813292265, 0.009286300279200077, 0.013420728035271168, 0.02100509963929653, 0.008716799318790436], [0.04232185333967209, 0.025210710242390633, 0.04387505725026131, 0.017552165314555168, 0.05422698333859444, 0.019751323387026787, 0.04879128932952881, 0.020207375288009644, 0.01715664751827717, 0.028347861021757126, 0.016539746895432472, 0.02018887922167778, 0.04506273940205574, 0.021714655682444572, 0.03879489004611969, 0.04387471079826355, 0.033946141600608826, 0.014266378246247768, 0.0370560847222805, 0.022607937455177307, 0.024006037041544914, 0.08243286609649658, 0.07650674134492874, 0.20556092262268066], [0.008769345469772816, 0.00777095602825284, 0.14663700759410858, 0.008437642827630043, 0.025453142821788788, 0.023850928992033005, 0.04161386936903, 0.13062725961208344, 0.05281718820333481, 0.07978320121765137, 0.09219550341367722, 0.02622242644429207, 0.01497873105108738, 0.04146804288029671, 0.2132415771484375, 0.019051704555749893, 0.028374575078487396, 0.0021882348228245974, 0.0021545253694057465, 0.0018545157508924603, 0.0027870861813426018, 0.002533185528591275, 0.01846720464527607, 0.008722112514078617], [0.015278278850018978, 0.021326692774891853, 0.13019947707653046, 0.006852725520730019, 0.01916978508234024, 0.012831142172217369, 0.017712760716676712, 0.07288341969251633, 0.10041625052690506, 0.13648246228694916, 0.09145727753639221, 0.03428319841623306, 0.0258010383695364, 0.049115993082523346, 0.16828645765781403, 0.016465533524751663, 0.039924487471580505, 0.008218127302825451, 0.005006757099181414, 0.004047940019518137, 0.004437544383108616, 0.0026946510188281536, 0.009144478477537632, 0.007963546551764011], [0.026521878316998482, 0.023742416873574257, 0.09512131661176682, 0.027700239792466164, 0.008510757237672806, 0.02860337123274803, 0.03307928889989853, 0.09282150119543076, 0.1239289864897728, 0.22158406674861908, 0.11558422446250916, 0.07609410583972931, 0.026204004883766174, 0.02737300656735897, 0.04228707775473595, 0.006202726624906063, 0.008223241195082664, 0.005743545945733786, 0.0021544615738093853, 0.0024177853483706713, 0.0017061237012967467, 0.0005002174293622375, 0.002036633901298046, 0.001859059790149331], [0.01081791054457426, 0.034649480134248734, 0.033030442893505096, 0.02376542054116726, 0.012876452878117561, 0.04027150943875313, 0.046928685158491135, 0.025877492502331734, 0.22562415897846222, 0.09752530604600906, 0.029077613726258278, 0.13059119880199432, 0.16887779533863068, 0.018786801025271416, 0.019295545294880867, 0.003824261948466301, 0.006639827974140644, 0.02314215525984764, 0.016167649999260902, 0.006188057828694582, 0.015128974802792072, 0.006178105715662241, 0.0010877702152356505, 0.0036472887732088566], [0.0052444953471422195, 0.005534951575100422, 0.04726850986480713, 0.000992775079794228, 0.007817420177161694, 0.02604481391608715, 0.019439352676272392, 0.019130634143948555, 0.1981857419013977, 0.15689238905906677, 0.06843715161085129, 0.10985550284385681, 0.058091968297958374, 0.04463580623269081, 0.11522946506738663, 0.0026194232050329447, 0.007180625572800636, 0.016161540523171425, 0.01583460532128811, 0.009032439440488815, 0.04377429932355881, 0.013196496292948723, 0.0047702970914542675, 0.004629223607480526], [0.0057669817470014095, 0.005524106789380312, 0.06509105116128922, 0.003985232673585415, 0.006477026734501123, 0.046724434942007065, 0.043009065091609955, 0.030668945983052254, 0.0518534816801548, 0.05712824687361717, 0.03451447933912277, 0.0926574245095253, 0.10384081304073334, 0.08760513365268707, 0.29093119502067566, 0.003994195256382227, 0.004683345556259155, 0.008381127379834652, 0.010845448821783066, 0.008450678549706936, 0.015615882351994514, 0.016985177993774414, 0.0030485123861581087, 0.0022180271334946156], [0.005388484802097082, 0.009102893061935902, 0.0247234795242548, 0.002978609874844551, 0.016956109553575516, 0.16305941343307495, 0.05398041382431984, 0.03257771208882332, 0.07749257981777191, 0.05317515879869461, 0.022666776552796364, 0.08597023040056229, 0.11169717460870743, 0.13652853667736053, 0.12696890532970428, 0.005639808718115091, 0.013704154640436172, 0.012686917558312416, 0.0044979313388466835, 0.002508455188944936, 0.00792353693395853, 0.016892118379473686, 0.0057340944185853004, 0.007146451622247696], [0.006529662758111954, 0.00953720510005951, 0.03386957570910454, 0.0004614427452906966, 0.003443910740315914, 0.027676725760102272, 0.010901895351707935, 0.007606159895658493, 0.02492978796362877, 0.033890437334775925, 0.015337917022407055, 0.020819727331399918, 0.05179866775870323, 0.10838470607995987, 0.5557618141174316, 0.009797343984246254, 0.018584255129098892, 0.02397838979959488, 0.007134431507438421, 0.0023254689294844866, 0.008387243375182152, 0.010394280776381493, 0.0036564290057867765, 0.004792577121406794], [0.003944651689380407, 0.00581276835873723, 0.022269627079367638, 0.00034762744326144457, 0.0031615172047168016, 0.03715548291802406, 0.013296765275299549, 0.012469514273107052, 0.02316916361451149, 0.033550363034009933, 0.007743375841528177, 0.017115090042352676, 0.019627396017313004, 0.08813974261283875, 0.559129536151886, 0.037104491144418716, 0.021097257733345032, 0.03646160289645195, 0.012058530002832413, 0.00294899451546371, 0.00884390901774168, 0.011221029795706272, 0.005620107054710388, 0.017711525782942772], [0.01004563644528389, 0.03603629395365715, 0.023165030404925346, 0.0012617434840649366, 0.007231842260807753, 0.016623470932245255, 0.01251104287803173, 0.01932261511683464, 0.09106682240962982, 0.05288654938340187, 0.016906727105379105, 0.03771892189979553, 0.06403039395809174, 0.160657599568367, 0.26257023215293884, 0.022031763568520546, 0.04347938671708107, 0.046939220279455185, 0.024175483733415604, 0.0071752043440938, 0.024759164080023766, 0.011651352979242802, 0.002981448546051979, 0.004772071726620197], [0.0005134321982041001, 0.0008251059334725142, 0.029809709638357162, 2.949741428892594e-05, 0.0018763740081340075, 0.0021597035229206085, 0.0008087632013484836, 0.0016638296656310558, 0.019354067742824554, 0.024320580065250397, 0.007503732573240995, 0.020662084221839905, 0.00927395187318325, 0.08845531940460205, 0.73516845703125, 0.005148848053067923, 0.019666464999318123, 0.007560006808489561, 0.00719062052667141, 0.002334903459995985, 0.012768375687301159, 0.001653374289162457, 0.0005824000108987093, 0.0006704636034555733], [0.005934903398156166, 0.005178418941795826, 0.025938451290130615, 0.0003288176958449185, 0.006890402175486088, 0.0016433469718322158, 0.001230493769980967, 0.0006509379600174725, 0.006979806814342737, 0.0071142204105854034, 0.006444485858082771, 0.00988217443227768, 0.01360439881682396, 0.07034579664468765, 0.22326426208019257, 0.04617659002542496, 0.042098358273506165, 0.09220807254314423, 0.1345970630645752, 0.07149099558591843, 0.15863795578479767, 0.044642314314842224, 0.011983445845544338, 0.012734219431877136], [0.0006271424936130643, 0.0006596305756829679, 0.027036838233470917, 3.219357313355431e-05, 0.0014603252056986094, 0.0009936249116435647, 0.0002688374661374837, 0.00033299255301244557, 0.0023111167829483747, 0.00373191200196743, 0.007783032488077879, 0.007840175181627274, 0.0022813905961811543, 0.15195229649543762, 0.6149671077728271, 0.01483306847512722, 0.015077870339155197, 0.022794930264353752, 0.02484038472175598, 0.02525421604514122, 0.060829248279333115, 0.009735112078487873, 0.0036881999112665653, 0.0006683605606667697]], [[0.0024661803618073463, 0.005009554326534271, 0.036934733390808105, 0.03686019778251648, 0.04991574585437775, 0.08722969144582748, 0.06917330622673035, 0.14823463559150696, 0.24586564302444458, 0.03483438491821289, 0.06776566058397293, 0.03351233899593353, 0.07137277722358704, 0.0400986447930336, 0.04296572133898735, 0.005271535832434893, 0.005718763452023268, 0.001108831143938005, 0.0007808419759385288, 0.0006293868063949049, 0.005572563502937555, 0.0008314457372762263, 0.004626487847417593, 0.0032209441997110844], [0.0014750846894457936, 0.0022250523325055838, 0.019568312913179398, 0.02236020751297474, 0.012935003265738487, 0.030295569449663162, 0.03794288635253906, 0.19406932592391968, 0.2501015067100525, 0.04734467715024948, 0.07041004300117493, 0.06924498826265335, 0.10441011935472488, 0.044328875839710236, 0.06103060021996498, 0.01683979108929634, 0.004800987895578146, 0.002580890664830804, 0.0007806516368873417, 0.0007208760362118483, 0.0024307407438755035, 0.0004359641170594841, 0.00184304965659976, 0.0018247767584398389], [0.018186967819929123, 0.01113509014248848, 0.07532021403312683, 0.04033307731151581, 0.016875367611646652, 0.07206945866346359, 0.03816325590014458, 0.2118077427148819, 0.3009989559650421, 0.06877071410417557, 0.0845852866768837, 0.013383661396801472, 0.015300079248845577, 0.00460493890568614, 0.01278718002140522, 0.0012144176289439201, 0.0009197905310429633, 0.0006822593277320266, 0.0005510238697752357, 0.0008378913043998182, 0.0031442272011190653, 0.0011273614363744855, 0.0038283143658190966, 0.003372637555003166], [0.0036157481372356415, 0.0023434003815054893, 0.02284148335456848, 0.02371269464492798, 0.009133127517998219, 0.037762176245450974, 0.06388125568628311, 0.44211259484291077, 0.24481701850891113, 0.06202351301908493, 0.023106858134269714, 0.012478867545723915, 0.020413542166352272, 0.005372172221541405, 0.012747111730277538, 0.004068089183419943, 0.0007329246145673096, 0.00039210094837471843, 0.0004547188291326165, 0.0005516026285476983, 0.002088801236823201, 0.0007675923989154398, 0.0014847330749034882, 0.0030977933201938868], [0.04315274953842163, 0.017936117947101593, 0.048248495906591415, 0.04159054160118103, 0.015000507235527039, 0.04071972519159317, 0.04214971885085106, 0.2987004220485687, 0.1949082463979721, 0.08469308167695999, 0.04494456946849823, 0.01724846474826336, 0.019427595660090446, 0.014023873023688793, 0.0258021280169487, 0.01345320139080286, 0.00366726191714406, 0.0042880200780928135, 0.001602783566340804, 0.0038549783639609814, 0.003920415882021189, 0.005617824383080006, 0.006729086861014366, 0.008320101536810398], [0.005173446144908667, 0.007806597277522087, 0.032242219895124435, 0.03413340076804161, 0.03467768803238869, 0.03669813275337219, 0.025318095460534096, 0.11771032959222794, 0.26844581961631775, 0.21598000824451447, 0.15983882546424866, 0.028057027608156204, 0.010706408880650997, 0.009113763459026814, 0.004897512961179018, 0.0019819235894829035, 0.004387174732983112, 0.0012905689654871821, 0.0003042877360712737, 0.00025914094294421375, 0.00044971067109145224, 6.707558350171894e-05, 0.0003445723850745708, 0.00011629856453510001], [0.01516038179397583, 0.01728442870080471, 0.015951385721564293, 0.03179197013378143, 0.029422273859381676, 0.02321499027311802, 0.01870253123342991, 0.02535700611770153, 0.10578314960002899, 0.03995394706726074, 0.2263481467962265, 0.16740083694458008, 0.1355734020471573, 0.06352490931749344, 0.032697878777980804, 0.01570904441177845, 0.018216565251350403, 0.0074609932489693165, 0.0029661927837878466, 0.001641849521547556, 0.0028154761530458927, 0.0004676520184148103, 0.0019707598257809877, 0.0005842869868502021], [0.002828421536833048, 0.00462467921897769, 0.0074426401406526566, 0.021448208019137383, 0.01751714013516903, 0.005907042883336544, 0.012721378356218338, 0.037700995802879333, 0.048162057995796204, 0.020518701523542404, 0.17254236340522766, 0.2943991422653198, 0.2972688674926758, 0.03591212257742882, 0.00935250986367464, 0.0028129552956670523, 0.002735932357609272, 0.001173614989966154, 0.001070080092176795, 0.0017074166098609567, 0.0017318848986178637, 0.00010881889465963468, 0.00025483581703156233, 5.823688843520358e-05], [0.0020923109259456396, 0.008109288290143013, 0.0195314958691597, 0.03783735632896423, 0.05039278790354729, 0.03263820335268974, 0.03363126143813133, 0.05282092094421387, 0.04038187488913536, 0.009863173589110374, 0.07041360437870026, 0.1319485455751419, 0.23068568110466003, 0.15528297424316406, 0.08269459009170532, 0.015370115637779236, 0.008435803465545177, 0.0016075728926807642, 0.001785498927347362, 0.0017979041440412402, 0.007868685759603977, 0.0012277448549866676, 0.0028661079704761505, 0.0007165573770180345], [0.0064948564395308495, 0.012663905508816242, 0.004274255130439997, 0.009046550840139389, 0.004679229576140642, 0.002523265779018402, 0.013713045977056026, 0.00712250079959631, 0.004382851533591747, 0.0012351104523986578, 0.009588126093149185, 0.03627590835094452, 0.1042063906788826, 0.43505027890205383, 0.23102322220802307, 0.08083613216876984, 0.008563529700040817, 0.004100698512047529, 0.004310911521315575, 0.004654639400541782, 0.004989098757505417, 0.004058859311044216, 0.004967489745467901, 0.0012390543706715107], [0.007908406667411327, 0.03230505809187889, 0.010875548236072063, 0.018216947093605995, 0.025508081540465355, 0.01728088967502117, 0.02989816479384899, 0.03587772697210312, 0.01473616249859333, 0.016709107905626297, 0.024525098502635956, 0.03597418591380119, 0.046752940863370895, 0.2209838479757309, 0.15129169821739197, 0.07761448621749878, 0.05149170011281967, 0.01572711206972599, 0.011690245009958744, 0.010059278458356857, 0.008486774750053883, 0.0356823094189167, 0.053916703909635544, 0.046487558633089066], [0.0017576462123543024, 0.005558904260396957, 0.006291683297604322, 0.004301148466765881, 0.003441320965066552, 0.0014002136886119843, 0.0066313366405665874, 0.013132905587553978, 0.010588756762444973, 0.00397660955786705, 0.018932785838842392, 0.026918405666947365, 0.04810021445155144, 0.04342587664723396, 0.22056487202644348, 0.21113181114196777, 0.07998255640268326, 0.03220393881201744, 0.0322556309401989, 0.019710106775164604, 0.00820248480886221, 0.011075892485678196, 0.07282143831253052, 0.11759337782859802], [0.0037748850882053375, 0.006592244375497103, 0.015292149037122726, 0.009930867701768875, 0.007816089317202568, 0.0034108636900782585, 0.007026589009910822, 0.013004172593355179, 0.021670928224921227, 0.01838715560734272, 0.03415841609239578, 0.04082927852869034, 0.02793932519853115, 0.014465732499957085, 0.0516342930495739, 0.11485660821199417, 0.14191362261772156, 0.16092261672019958, 0.07665418833494186, 0.03704299032688141, 0.012879758141934872, 0.018504485487937927, 0.05148422345519066, 0.10980848968029022], [0.0003883703611791134, 0.0004407520464155823, 0.0035907754208892584, 0.003210284747183323, 0.0005049995379522443, 0.0002547242911532521, 0.0004834112769458443, 0.004476006608456373, 0.00844663381576538, 0.002227889373898506, 0.019761918112635612, 0.02211867645382881, 0.029414691030979156, 0.0009743027039803565, 0.016383018344640732, 0.09766773879528046, 0.03585948422551155, 0.27609917521476746, 0.21824459731578827, 0.23324769735336304, 0.01115083321928978, 0.0013549693394452333, 0.004954813979566097, 0.008744284510612488], [0.0016518147895112634, 0.0006979092722758651, 0.0018538956064730883, 0.002280554734170437, 0.0004028423281852156, 0.0002662516199052334, 0.0003881502489093691, 0.0006415981333702803, 0.0005306065431796014, 0.0006942601758055389, 0.00509809423238039, 0.013057215139269829, 0.014037500135600567, 0.00046969024697318673, 0.0006775876972824335, 0.002108632354065776, 0.0012607391690835357, 0.026100171729922295, 0.24254892766475677, 0.6418029069900513, 0.03475376218557358, 0.006188785191625357, 0.0015486511401832104, 0.0009394298540428281], [0.0030341472011059523, 0.0012853245716542006, 0.004197434056550264, 0.006685304455459118, 0.000705288490280509, 0.0009845334570854902, 0.0025253822095692158, 0.0017515873769298196, 0.0009497448336333036, 0.0002737357863225043, 0.0023370920680463314, 0.010354478843510151, 0.04439610615372658, 0.0009143995121121407, 0.003000277327373624, 0.009093180298805237, 0.0005801932420581579, 0.009642509743571281, 0.17202292382717133, 0.42541036009788513, 0.22460129857063293, 0.04862162843346596, 0.01146350521594286, 0.015169601887464523], [0.0023202768061310053, 0.000879614322911948, 0.0014216109411790967, 0.001543490681797266, 0.0001220453268615529, 0.00045333016896620393, 0.0006754426285624504, 0.0016523216618224978, 4.8051399062387645e-05, 3.0408442398766056e-05, 0.0001375609717797488, 0.0009236467885784805, 0.004233286716043949, 0.0004618630337063223, 0.000991920125670731, 0.0016666098963469267, 3.146098606521264e-05, 0.0009870914509519935, 0.009067563340067863, 0.40873226523399353, 0.0789092555642128, 0.41807547211647034, 0.027610044926404953, 0.03902539983391762], [0.0014718093443661928, 0.0016075046733021736, 0.009011872112751007, 0.007359082344919443, 0.0035896410699933767, 0.01467189658433199, 0.006516201887279749, 0.01186778862029314, 0.0005864131380803883, 0.00017677013238426298, 0.00042505707824602723, 0.0013536675833165646, 0.006050209980458021, 0.0032444519456475973, 0.012063298374414444, 0.005813269410282373, 0.0003793977084569633, 0.0006138768512755632, 0.0010981676168739796, 0.0157685037702322, 0.04768194258213043, 0.20702148973941803, 0.2198503315448761, 0.4217774271965027], [0.00023079551465343684, 0.00016513050650246441, 0.0003023360623046756, 0.00022263842402026057, 7.385219942079857e-05, 0.00031506287632510066, 0.00024065401521511376, 0.0008828685968182981, 1.7888671209220774e-05, 4.178138624411076e-06, 7.491079031751724e-06, 1.5528687072219327e-05, 5.637008143821731e-05, 0.00010253343498334289, 0.0007755614933557808, 0.0005904067074880004, 2.9183982405811548e-05, 4.6094039134914055e-05, 8.771889406489208e-05, 0.001816658303141594, 0.003123614937067032, 0.09879346936941147, 0.12309728562831879, 0.7690026760101318], [0.001179719460196793, 0.001050914521329105, 0.001730037503875792, 0.000881344371009618, 0.0002725455560721457, 0.0013189533492550254, 0.001838234020397067, 0.021371079608798027, 0.001009046332910657, 0.00033899585832841694, 0.00020368557306937873, 2.0541498088277876e-05, 3.2185198506340384e-05, 6.84290353092365e-05, 0.0012039249995723367, 0.0008628361392766237, 0.00017449818551540375, 9.390543709741905e-05, 6.795053923269734e-05, 0.0003719531814567745, 0.00045324323582462966, 0.008104958571493626, 0.0918978601694107, 0.8654532432556152], [0.003998088650405407, 0.003238637000322342, 0.017423423007130623, 0.0073458473198115826, 0.0023883432149887085, 0.01679988019168377, 0.007825917564332485, 0.06766237318515778, 0.03592248633503914, 0.011845933273434639, 0.0057763303630054, 0.0001731107768137008, 0.00017168401973322034, 7.839276804588735e-05, 0.0017918358789756894, 0.0018820151453837752, 0.0013679629191756248, 0.0010245335288345814, 0.0009167084353975952, 0.001061299117282033, 0.0035800100304186344, 0.00966575089842081, 0.09891130030155182, 0.6991481184959412], [0.5979146957397461, 0.10104461014270782, 0.01643398590385914, 0.00700408685952425, 0.0015770441386848688, 0.0030953004024922848, 0.006828113459050655, 0.015481612645089626, 0.04386575147509575, 0.04803675785660744, 0.016423644497990608, 0.00036100222496315837, 0.0002562501758802682, 0.0003120901237707585, 0.0014357487671077251, 0.0030829019378870726, 0.0030781119130551815, 0.0024139557499438524, 0.0030087882187217474, 0.0024747871793806553, 0.0019655253272503614, 0.006724439561367035, 0.030878035351634026, 0.0863027572631836], [0.47351816296577454, 0.2014944851398468, 0.023000366985797882, 0.01704540103673935, 0.007793421857059002, 0.00400121184065938, 0.005918482784181833, 0.01965995877981186, 0.028214365243911743, 0.050429027527570724, 0.06029970943927765, 0.0033011261839419603, 0.0015381608391180634, 0.0005471977056004107, 0.0004132503818254918, 0.0011197462445124984, 0.0039058320689946413, 0.0036611484829336405, 0.011099105700850487, 0.02505401149392128, 0.01014825887978077, 0.011044977232813835, 0.017418915405869484, 0.019373571500182152], [0.4959709048271179, 0.14317110180854797, 0.02688714861869812, 0.01354831550270319, 0.0034873054828494787, 0.0008766127284616232, 0.0022876523435115814, 0.006538925692439079, 0.019321642816066742, 0.009334820322692394, 0.11029218882322311, 0.012837065383791924, 0.010350813157856464, 0.0006063086329959333, 0.0004995794151909649, 0.0008499338873662055, 0.0022966070100665092, 0.0036606660578399897, 0.02600557915866375, 0.06590919941663742, 0.02855539321899414, 0.0034459622111171484, 0.00902690552175045, 0.004239290952682495]]], [[[0.009132430888712406, 0.0025977124460041523, 0.3031119406223297, 0.18148647248744965, 0.0061944108456373215, 0.02695254608988762, 0.06363579630851746, 0.01242657471448183, 0.0145955178886652, 0.0020572165958583355, 0.014835568144917488, 0.004605387803167105, 0.0060699209570884705, 0.0008674224372953176, 0.014211053028702736, 0.016525613144040108, 0.001086189178749919, 0.01566658355295658, 0.016939766705036163, 0.033287785947322845, 0.09623672068119049, 0.015799490734934807, 0.05001522973179817, 0.09166266024112701], [0.010000635869801044, 0.0034368305932730436, 0.20716293156147003, 0.21491596102714539, 0.005907813087105751, 0.023644113913178444, 0.054525453597307205, 0.01068185642361641, 0.009101342409849167, 0.001102371490560472, 0.005082080606371164, 0.007133581675589085, 0.005486775655299425, 0.002613230375573039, 0.03017754666507244, 0.05720517784357071, 0.0016974988393485546, 0.014096641913056374, 0.010703494772315025, 0.014031491242349148, 0.03900064900517464, 0.008315631188452244, 0.030924323946237564, 0.23305246233940125], [0.012875408865511417, 0.011853862553834915, 0.14623838663101196, 0.03612544387578964, 0.08559238165616989, 0.023509079590439796, 0.01392842922359705, 0.011102779768407345, 0.08203724026679993, 0.0025967354886233807, 0.2819557785987854, 0.0011974065564572811, 0.0014706106157973409, 0.0011755060404539108, 0.003741499502211809, 0.002421529497951269, 0.009565572254359722, 0.003761260537430644, 0.0035561281256377697, 0.00540890684351325, 0.015536017715930939, 0.0015012499643489718, 0.23867221176624298, 0.004176481161266565], [0.005742568988353014, 0.004060654900968075, 0.036365438252687454, 0.0020922692492604256, 0.010092262178659439, 0.9059678316116333, 0.00497945724055171, 0.000335871271090582, 0.010604576207697392, 0.0004463450168259442, 0.00217976002022624, 2.240811227238737e-05, 0.00019083057122770697, 4.1973999032052234e-05, 0.00013239416875876486, 2.9074986741761677e-05, 0.00011186760093551129, 0.003810483729466796, 0.00041698524728417397, 0.0003894807887263596, 0.003362454706802964, 0.0007537702331319451, 0.007492339704185724, 0.0003788010508287698], [0.010827740654349327, 0.0027658676262944937, 0.11422731727361679, 0.02156616374850273, 0.004248116631060839, 0.16482749581336975, 0.5252029299736023, 0.06771837174892426, 0.05369732901453972, 0.007348380517214537, 0.007299676537513733, 0.0008074939833022654, 0.0024291262961924076, 0.0007212911732494831, 0.0005673995474353433, 0.00035584840225055814, 3.5952096368419006e-05, 0.00031952085555531085, 0.0007015820010565221, 0.00086215854389593, 0.0029257740825414658, 0.0021449581254273653, 0.006517208646982908, 0.0018822109559550881], [0.011455340310931206, 0.0024535313714295626, 0.048736315220594406, 0.01413415651768446, 0.0076388148590922356, 0.19599361717700958, 0.4149519205093384, 0.17763417959213257, 0.09669892489910126, 0.0023506886791437864, 0.005946548189967871, 0.0009254524484276772, 0.00038321129977703094, 0.0005847912398166955, 0.0005428826552815735, 0.001048786100000143, 0.00017927253793459386, 0.0004920995561406016, 0.00024314493930432945, 0.00019840151071548462, 0.0002953325165435672, 0.00020167315960861742, 0.006755304988473654, 0.010155619122087955], [0.013040662743151188, 0.001276730909012258, 0.007294148672372103, 0.026616062968969345, 0.0017426295671612024, 0.005757872015237808, 0.21938389539718628, 0.5350310802459717, 0.11233679205179214, 0.04674816504120827, 0.007697631139308214, 0.00846642255783081, 0.002034178702160716, 0.00032162535353563726, 0.00018036059918813407, 0.0026904642581939697, 9.493591642240062e-05, 0.00025694092619232833, 0.0003911616513505578, 0.00025839885347522795, 6.723995466018096e-05, 0.0003425452741794288, 0.0010716812685132027, 0.006898476742208004], [0.01150449924170971, 0.002325949724763632, 0.02179018035531044, 0.007489317562431097, 0.003096159780398011, 0.014852828346192837, 0.018766654655337334, 0.010676358826458454, 0.2138582020998001, 0.5532231330871582, 0.06771933287382126, 0.022170664742588997, 0.005951603874564171, 0.0011869200970977545, 0.0036452063359320164, 0.010904772207140923, 0.0027597586158663034, 0.022587426006793976, 0.0011027454165741801, 0.00017908912559505552, 4.9689155275700614e-05, 0.00036303006345406175, 0.0007228995091281831, 0.0030735053587704897], [0.0020722977351397276, 0.001055150176398456, 0.0030813871417194605, 0.0007693031802773476, 0.003032148350030184, 0.0029644875321537256, 0.003297476563602686, 0.005033712834119797, 0.056144434958696365, 0.16378895938396454, 0.6841731071472168, 0.05588690564036369, 0.010721727274358273, 0.0023469964507967234, 0.000690339831635356, 0.0006430607754737139, 0.002095756819471717, 0.0009631033753976226, 0.0007248549954965711, 0.0002782332303468138, 3.777094025281258e-05, 1.5570711184409447e-05, 0.00017441337695345283, 8.719429388293065e-06], [0.012888933531939983, 0.001224603271111846, 0.0024046902544796467, 0.012026307173073292, 0.0005190164665691555, 0.004380714148283005, 0.018714308738708496, 0.01915469393134117, 0.008726701140403748, 0.02520075812935829, 0.05721156671643257, 0.7459820508956909, 0.01947147771716118, 0.006733565125614405, 0.0007841315236873925, 0.011826186440885067, 0.0005713762366212904, 0.030479365959763527, 0.013177596963942051, 0.007462979294359684, 0.00027511196094565094, 0.00011907213774975389, 0.00011026370339095592, 0.0005544045125134289], [0.007124877534806728, 0.025838494300842285, 0.010759244672954082, 0.005353162065148354, 0.03046669438481331, 0.009496215730905533, 0.002545734168961644, 0.002728713909164071, 0.01084326021373272, 0.0019875410944223404, 0.2599993050098419, 0.08311090618371964, 0.1478358507156372, 0.22182653844356537, 0.033100344240665436, 0.004388255998492241, 0.015349543653428555, 0.003273516893386841, 0.00858121644705534, 0.03406401723623276, 0.050481971353292465, 0.00230144034139812, 0.028127027675509453, 0.0004161059623584151], [0.0007721673464402556, 0.002310546115040779, 0.0012929519871249795, 0.001832052250392735, 0.001332379993982613, 0.007618816569447517, 0.0014514698414132, 0.0006899756263010204, 0.0009168385295197368, 0.0023480940144509077, 0.017196781933307648, 0.013527309522032738, 0.431437611579895, 0.44182896614074707, 0.04050581529736519, 0.00557728111743927, 0.0005549402558244765, 0.004798098932951689, 0.0031033349223434925, 0.006540796719491482, 0.0018845883896574378, 0.004592697136104107, 0.007470735814422369, 0.00041573907947167754], [0.001422203378751874, 0.0020545830484479666, 0.00181602465454489, 0.0024015665985643864, 0.0006516968715004623, 0.0025338674895465374, 0.013626759871840477, 0.006489488296210766, 0.0005544311716221273, 0.0034082122147083282, 0.0015224323142319918, 0.03199340030550957, 0.22382192313671112, 0.49783286452293396, 0.1439305990934372, 0.023344241082668304, 0.000715283618774265, 0.0009004616877064109, 0.0015519511653110385, 0.0013536454644054174, 0.000534870894625783, 0.012719918973743916, 0.004754221998155117, 0.020065370947122574], [2.4151742763933726e-05, 7.445201481459662e-05, 0.0006059478037059307, 0.0005966894677840173, 3.555799412424676e-05, 0.0002333969168830663, 0.000781634880695492, 0.0011275993892922997, 0.00014297696179710329, 0.0031209359876811504, 4.0028822695603594e-05, 0.00041427763062529266, 0.01124074961990118, 0.021052371710538864, 0.5261058211326599, 0.39947599172592163, 0.0013716928660869598, 0.005450920667499304, 0.0008030778262764215, 0.00013660441618412733, 1.5518677173531614e-05, 0.00424745911732316, 0.000508075812831521, 0.022394057363271713], [0.00016579397197347134, 0.00048578574205748737, 0.0027177934534847736, 0.0005444217240437865, 0.00013199479144532233, 3.7704747228417546e-05, 0.00031039994792081416, 0.0005849022418260574, 0.00047008637920953333, 0.0006588966934941709, 0.0013421893818303943, 0.00020976088126190007, 0.0006509079830721021, 0.004187818616628647, 0.5394490957260132, 0.3561669886112213, 0.05065886676311493, 0.015125680714845657, 0.014232565648853779, 0.0019726252648979425, 0.00012631707068067044, 0.0003970778197981417, 0.003984934184700251, 0.005387375131249428], [0.000575725978706032, 0.0006355635123327374, 0.002609281800687313, 0.0007294232491403818, 0.0002520096895750612, 0.0004269986238796264, 9.627202234696597e-05, 4.253916995367035e-05, 0.00022232395713217556, 0.0014182644663378596, 0.000906983099412173, 7.361873576883227e-05, 0.0002602278545964509, 8.673092088429257e-05, 0.012219263240695, 0.029439404606819153, 0.03792814910411835, 0.7529200911521912, 0.14365950226783752, 0.01061247382313013, 0.001461536856368184, 0.0016161068342626095, 0.0011052008485421538, 0.0007023151847533882], [0.0018206291133537889, 0.0009079683222807944, 0.006115775089710951, 0.007336124312132597, 0.0008062048582360148, 0.00011261038889642805, 0.0022903403732925653, 0.0007830080576241016, 0.0009736174833960831, 0.0028128100093454123, 0.01615908369421959, 0.0005309262778609991, 0.0016740987775847316, 0.0003301613323856145, 0.004930880386382341, 0.020957784727215767, 0.015554402954876423, 0.038817405700683594, 0.6911436319351196, 0.15495158731937408, 0.02287861704826355, 0.002653711475431919, 0.0052011385560035706, 0.00025752215879037976], [0.005528201349079609, 0.0035448065027594566, 0.007898030802607536, 0.008087006397545338, 0.003317892085760832, 0.002029050374403596, 0.000966729421634227, 0.00018146603542845696, 0.00036539926077239215, 0.00016839346790220588, 0.0050772991962730885, 0.0005809907452203333, 0.0004966650740243495, 0.0002709035761654377, 0.0010040587512776256, 0.0029746468644589186, 0.008431226946413517, 0.08651839196681976, 0.31607282161712646, 0.27874448895454407, 0.25074124336242676, 0.008038320578634739, 0.008408179506659508, 0.0005539283738471568], [0.004036646336317062, 0.0013842907501384616, 0.0018092889804393053, 0.02034066617488861, 0.0008154388633556664, 0.00028992220177315176, 0.0008406071574427187, 0.00011500852997414768, 5.159737338544801e-05, 0.0003794328076764941, 0.0005376540939323604, 0.001913274871185422, 0.0027278719935566187, 0.0001596565416548401, 0.00043677634675987065, 0.0012318972731009126, 0.0007063778466545045, 0.008067154325544834, 0.12433378398418427, 0.2777981460094452, 0.41498976945877075, 0.13020597398281097, 0.0026154671795666218, 0.004213301464915276], [0.0014069135067984462, 0.0017483000410720706, 0.0030023527797311544, 0.003076394787058234, 0.000633770483545959, 0.002920291619375348, 0.00014929812459740788, 9.737642358231824e-06, 2.7523272365215234e-05, 7.479340274585411e-05, 2.967705404444132e-05, 0.0002251056139357388, 0.000790093676187098, 0.000490441161673516, 0.002723939251154661, 0.00041133450577035546, 0.0003909582446794957, 0.0062985485419631, 0.0031910541001707315, 0.012632177211344242, 0.371417760848999, 0.5626116991043091, 0.0029200618155300617, 0.022817743942141533], [0.001231458387337625, 0.006561398971825838, 0.005171678494662046, 0.0026079611852765083, 0.00846447329968214, 0.008490417152643204, 0.0006927456124685705, 0.0002898061939049512, 0.0002556279650889337, 1.6901021808735095e-05, 0.00032022566301748157, 9.162897185888141e-05, 0.000924588821362704, 0.004547883290797472, 0.00561113515868783, 0.0002866520080715418, 0.0012292590690776706, 0.00013122115342412144, 0.0008268862729892135, 0.009828695096075535, 0.6368071436882019, 0.09282142668962479, 0.19119752943515778, 0.021593280136585236], [0.0020569288171827793, 0.0012998998863622546, 0.002797066932544112, 0.005007332656532526, 0.0005421696696430445, 0.0037600889336317778, 0.009272330440580845, 0.0040798489935696125, 0.00043792222277261317, 1.0982988897012547e-05, 2.5851744794636033e-05, 0.00010714503878261894, 7.343514153035358e-05, 0.0007349805673584342, 0.002856465522199869, 0.0037403288297355175, 0.00029437741613946855, 0.0010349043877795339, 0.0009100664756260812, 0.001369768986478448, 0.011548617854714394, 0.006164675112813711, 0.03210068121552467, 0.909774124622345], [0.0012309557059779763, 0.00587102398276329, 0.03439398854970932, 0.0021921356674283743, 0.01667013205587864, 0.004222090821713209, 0.002704872516915202, 0.003459082916378975, 0.013572161085903645, 3.6544061003951356e-05, 0.0019322067964822054, 3.900247611454688e-05, 0.00010751801892183721, 0.000679920194670558, 0.026995902881026268, 0.003263687016442418, 0.014676090329885483, 0.00048089231131598353, 0.0005988589255139232, 0.0010303986491635442, 0.0381910614669323, 0.002078443532809615, 0.6690388917922974, 0.15653415024280548], [0.008324800059199333, 0.004187813028693199, 0.05941976234316826, 0.016021963208913803, 0.00823602918535471, 0.04295425862073898, 0.043683283030986786, 0.03676571696996689, 0.21699053049087524, 0.00651324400678277, 0.010064134374260902, 0.00011694525892380625, 0.00042682787170633674, 0.00021345618006307632, 0.006999613251537085, 0.021137695759534836, 0.004988424945622683, 0.03400701284408569, 0.004983356222510338, 0.0011345446109771729, 0.002114461036399007, 0.002253399696201086, 0.19997121393680573, 0.2684915363788605]], [[0.011128873564302921, 0.007963726297020912, 0.04586527869105339, 0.09792263805866241, 0.07054293900728226, 0.023286769166588783, 0.05885719880461693, 0.2816774249076843, 0.22243796288967133, 0.03454528748989105, 0.015728259459137917, 0.020534297451376915, 0.03874538466334343, 0.019813163205981255, 0.008486859500408173, 0.0036617787554860115, 0.0018598840106278658, 0.0003167070390190929, 0.000701952027156949, 0.004259528126567602, 0.0073585608042776585, 0.008843746036291122, 0.006686927750706673, 0.008774865418672562], [0.022156069055199623, 0.02169308438897133, 0.029363270848989487, 0.05461718142032623, 0.06662385165691376, 0.07533524185419083, 0.07087098807096481, 0.18057256937026978, 0.14343050122261047, 0.08011812716722488, 0.014944169670343399, 0.03194234147667885, 0.10579705238342285, 0.029483506456017494, 0.013377540744841099, 0.008533118292689323, 0.006839872803539038, 0.00229399255476892, 0.0018794884672388434, 0.004674417432397604, 0.006255271844565868, 0.015521660447120667, 0.005112325306981802, 0.008564320392906666], [0.011665409430861473, 0.00366970244795084, 0.02081170491874218, 0.01940920762717724, 0.011850662529468536, 0.03206505998969078, 0.0381590835750103, 0.14109572768211365, 0.5983593463897705, 0.07499571144580841, 0.01297673024237156, 0.0053725712932646275, 0.020989254117012024, 0.000363637664122507, 0.00040264317067340016, 9.184844384435564e-05, 3.113354614470154e-05, 7.87262397352606e-05, 7.329209620365873e-05, 0.0003272167523391545, 0.0008934473735280335, 0.0017303453059867024, 0.0016049991827458143, 0.0029825777746737003], [0.0022554504685103893, 0.0005395737243816257, 0.005412515718489885, 0.009126776829361916, 0.0010369740193709731, 0.01177122164517641, 0.0034461969044059515, 0.926676869392395, 0.015169876627624035, 0.006735348608344793, 0.0005960729904472828, 0.0036845137365162373, 0.0008482584962621331, 0.0008861037786118686, 0.00025476625887677073, 0.00015461361908819526, 1.3743116141995415e-05, 1.6534811948076822e-05, 8.413458090217318e-06, 0.004509621299803257, 0.000333988486090675, 0.0009141005575656891, 0.0003480571904219687, 0.005260363221168518], [0.0033431274350732565, 0.000800754816737026, 0.021470073610544205, 0.02562759444117546, 0.003874543122947216, 0.015732290223240852, 0.19245252013206482, 0.3186083734035492, 0.2520773410797119, 0.12310698628425598, 0.005560015793889761, 0.0028651407919824123, 0.010432593524456024, 0.00034045710344798863, 0.0008396145422011614, 0.00010829237726284191, 2.6859208446694538e-05, 1.8393515347270295e-05, 0.00025064716464839876, 0.001232449198141694, 0.004793236497789621, 0.012424572370946407, 0.0015205774689093232, 0.0024936150293797255], [0.001304985722526908, 0.0005041907425038517, 0.008171607740223408, 0.026173412799835205, 0.0012597289169207215, 0.014826526865363121, 0.012587538920342922, 0.7817543745040894, 0.05396536365151405, 0.05129026994109154, 0.0028446833603084087, 0.022290321066975594, 0.000250401470111683, 0.005660458467900753, 0.001936550484970212, 0.009820153936743736, 0.00012927775969728827, 0.00018887709302362055, 1.5402127246488817e-05, 0.0003844168095383793, 2.0652114471886307e-05, 0.00025310873752459884, 0.00015835001249797642, 0.004209422972053289], [0.0008859494118951261, 0.00024051066429819912, 0.007983246818184853, 0.013657018542289734, 0.00028572039445862174, 0.0017877360805869102, 0.01072576642036438, 0.04476536810398102, 0.6965017914772034, 0.14851772785186768, 0.03396625444293022, 0.009897705167531967, 0.00988723710179329, 0.001539197051897645, 0.015538817271590233, 0.0019022102933377028, 0.0001755008997861296, 8.822972449706867e-05, 0.00015199581685010344, 0.00011017247015843168, 0.00048534449888393283, 0.00022659948444925249, 0.00034843123285099864, 0.0003314651839900762], [0.015439167618751526, 0.009205988608300686, 0.006175358779728413, 0.03898365795612335, 0.004811569582670927, 0.012536351568996906, 0.004348252899944782, 0.20373867452144623, 0.04724764823913574, 0.08716920018196106, 0.02416497841477394, 0.4386201500892639, 0.0033129598014056683, 0.058640651404857635, 0.0026304509956389666, 0.02699611708521843, 0.0011314480798318982, 0.0024637209717184305, 0.00019405091006774455, 0.005976094864308834, 0.00011667135549942032, 0.00032203702721744776, 0.0002487306483089924, 0.0055260141380131245], [0.00022430458921007812, 0.00019250392506364733, 0.00178890663664788, 0.0013445229269564152, 0.0002834436309058219, 0.0005034722271375358, 0.0009649124694988132, 0.0043402682058513165, 0.046723462641239166, 0.05685051158070564, 0.11502529680728912, 0.027875494211912155, 0.727477490901947, 0.010702500119805336, 0.0048880972899496555, 0.0001992576289921999, 7.271437789313495e-05, 5.281745325191878e-05, 7.658657705178484e-05, 8.109623740892857e-05, 0.00015844337758608162, 0.00010588771692709997, 6.462103920057416e-05, 3.3865201203298056e-06], [0.0002404522820143029, 0.0004410096153151244, 0.0005799159989692271, 0.004705457482486963, 4.407758024171926e-05, 0.0006670363363809884, 3.544730498106219e-05, 0.004865116439759731, 0.0003304403508082032, 0.004076924175024033, 0.006389749702066183, 0.6636021733283997, 0.0022051134146749973, 0.2760356068611145, 0.005714473780244589, 0.012152129784226418, 9.823316213442013e-05, 0.0052488441579043865, 7.459698099410161e-05, 0.011361065320670605, 0.00014574575470760465, 0.00021557252330239862, 6.84469923726283e-05, 0.0007024158257991076], [0.0006191150168888271, 0.0012237721821293235, 0.00032992727938108146, 0.00010131551971426234, 0.0002822943206410855, 0.0002578691637609154, 0.0018163920613005757, 0.00019257540407124907, 0.001586985308676958, 0.001336276880465448, 0.008276959881186485, 0.0008863226394169033, 0.9740651249885559, 0.0011913293274119496, 0.0029349979013204575, 3.569914770196192e-05, 0.00015974351845216006, 4.771473686560057e-05, 0.0011721710907295346, 0.00013547937851399183, 0.0015246097464114428, 0.0008456458454020321, 0.0009652519365772605, 1.2397517821227666e-05], [0.06360040605068207, 0.1258675754070282, 0.0013416728470474482, 0.001113696489483118, 0.0004858619358856231, 0.007246135734021664, 0.00016874767607077956, 0.0163718331605196, 0.00035336101427674294, 0.003329525701701641, 0.0012721979292109609, 0.02958618849515915, 0.005526995286345482, 0.6303380131721497, 0.026136713102459908, 0.04754793271422386, 0.0014879105146974325, 0.011411992833018303, 0.0002542906440794468, 0.01679532788693905, 0.00017824990209192038, 0.004668638110160828, 0.0013068892294541001, 0.0036098738200962543], [0.0018881208961829543, 0.006009386386722326, 0.0014997198013588786, 0.0003329048049636185, 0.00013150965969543904, 0.0006883329479023814, 0.001404622453264892, 0.00042022630805149674, 0.0015052888775244355, 0.0003075683198403567, 0.008723296225070953, 5.663911360898055e-05, 0.02818322367966175, 0.0008932061609812081, 0.8058714270591736, 0.003774263197556138, 0.03286707401275635, 0.0029575922526419163, 0.01360955648124218, 0.00023813503503333777, 0.0038929739966988564, 0.001015444635413587, 0.08334912359714508, 0.0003804276930168271], [0.006888140924274921, 0.010531778447329998, 0.0003032872045878321, 0.000899381993804127, 0.00011969159095315263, 0.0011008073342964053, 1.0918563020823058e-05, 0.0005103170406073332, 2.3926129870233126e-05, 0.00033296755282208323, 9.236444748239592e-05, 0.002087539294734597, 1.608864840818569e-05, 0.010709262453019619, 0.003916703164577484, 0.3595886826515198, 0.015718623995780945, 0.5497117638587952, 0.001654940890148282, 0.019760511815547943, 9.492172102909535e-05, 0.0013745814794674516, 0.0009623862570151687, 0.013590381480753422], [0.0039087808690965176, 0.004076724871993065, 0.004108107183128595, 0.0018153281416743994, 0.0005338353221304715, 0.000564896035939455, 0.001379151945002377, 0.00032724725315347314, 0.005117705091834068, 0.0016604650299996138, 0.01744513399899006, 0.0008939547115005553, 0.03905179351568222, 0.0003837611002381891, 0.04137060418725014, 0.008350489661097527, 0.044177308678627014, 0.06310425698757172, 0.4702867865562439, 0.02746107615530491, 0.18863362073898315, 0.006978296209126711, 0.06623219698667526, 0.0021383818238973618], [0.0015063234604895115, 0.0008145806496031582, 0.0028032767586410046, 0.0025383708998560905, 9.374375804327428e-05, 0.00040234107291325927, 1.649778278078884e-05, 0.0010224528377875686, 0.00012902275193482637, 0.00022381900635082275, 0.0006754833739250898, 0.003521848702803254, 0.0001342704490525648, 0.0005325743113644421, 0.0007904856465756893, 0.007535202894359827, 0.0009222137159667909, 0.060245126485824585, 0.008663173764944077, 0.8592261075973511, 0.027352193370461464, 0.003611439373344183, 0.002908664057031274, 0.014330742880702019], [0.0005841002566739917, 0.0002704797370824963, 0.001953976461663842, 0.0009292360628023744, 0.00037302178679965436, 7.065803947625682e-05, 0.0008854765328578651, 9.599170152796432e-05, 0.0007066160906106234, 0.00045682713971473277, 0.002354179974645376, 0.00028196044149808586, 0.010080578736960888, 3.0214003345463425e-05, 0.000582345703151077, 9.294097253587097e-05, 0.0007776300190016627, 0.0006669044378213584, 0.18895113468170166, 0.06356853246688843, 0.6945905089378357, 0.02307914011180401, 0.008129511959850788, 0.00048796608461998403], [0.005621155723929405, 0.004217216279357672, 0.00927853025496006, 0.013227562420070171, 0.0028758011758327484, 0.0047120037488639355, 0.0007577072829008102, 0.002025516936555505, 0.0001916684996103868, 0.0007688266923651099, 0.0014670102391391993, 0.0303361713886261, 0.0007529736030846834, 0.01883462443947792, 0.0030032466165721416, 0.014983917586505413, 0.0017112161731347442, 0.022914322093129158, 0.014083717949688435, 0.5511660575866699, 0.07538127899169922, 0.08521151542663574, 0.020586026832461357, 0.11589185893535614], [0.00023241508461069316, 0.00013031240087002516, 0.002547590294852853, 0.0015290265437215567, 0.00016084130038507283, 0.00019802107999566942, 0.0007740338915027678, 9.226988913724199e-05, 0.00037239788798615336, 4.301322405808605e-05, 0.0004746554186567664, 5.731981946155429e-05, 0.000825823110062629, 7.40579780540429e-05, 0.007249028887599707, 0.00020525921718217432, 0.0002730460837483406, 0.00016029538528528064, 0.013081556186079979, 0.013153952546417713, 0.8066611289978027, 0.028335971757769585, 0.11063431203365326, 0.01273365132510662], [0.0017117789248004556, 0.0016625206917524338, 0.0005936691886745393, 0.002633824711665511, 0.0005555509706027806, 0.0015158847672864795, 0.00010929113341262564, 0.001981839071959257, 1.5998073649825528e-05, 3.3055193853215314e-06, 5.475667876453372e-06, 0.00027776529896073043, 1.833458100009011e-06, 0.0007579593220725656, 0.0002132374793291092, 0.0031979111954569817, 0.0001551880268380046, 0.0003441803273744881, 0.00011356819595675915, 0.03658630698919296, 0.004863585811108351, 0.006940391846001148, 0.013131920248270035, 0.9226270318031311], [0.002293857978656888, 0.0018790976610034704, 0.009851682931184769, 0.00492890877649188, 0.002250715857371688, 0.003762606531381607, 0.005338475573807955, 0.009929284453392029, 0.0027317253407090902, 0.00018802215345203876, 0.00040429941145703197, 4.582522888085805e-05, 0.0016696392558515072, 0.00024180450418498367, 0.010218942537903786, 0.0007137598586268723, 0.0009620354976505041, 0.0001412639394402504, 0.002418738091364503, 0.011650660075247288, 0.14577150344848633, 0.07966704666614532, 0.5334101915359497, 0.16952985525131226], [0.00040108172106556594, 0.0002979243581648916, 0.0009374887449666858, 0.003724571317434311, 0.0002327863621758297, 0.002380344085395336, 0.00047523665125481784, 0.015068195760250092, 0.000164158787811175, 0.00011957027163589373, 2.3886042981757782e-05, 0.0002608553331810981, 1.4385371969183325e-06, 0.00018405997252557427, 0.0005780797800980508, 0.0025703683495521545, 0.00022974061721470207, 0.0016391223762184381, 0.00017909117741510272, 0.023441554978489876, 0.001958302455022931, 0.003948192577809095, 0.011118916794657707, 0.9300650358200073], [0.024390514940023422, 0.009545717388391495, 0.008745837956666946, 0.005052374675869942, 0.0327029712498188, 0.007426416035741568, 0.31721362471580505, 0.021841151639819145, 0.055481214076280594, 0.01109254453331232, 0.006696568336337805, 0.00015405558224301785, 0.017636613920331, 9.694324035081081e-06, 0.0006714572664350271, 0.0001789474772522226, 0.007698277942836285, 0.0007127983844839036, 0.05644875019788742, 0.007200514432042837, 0.08023402094841003, 0.04736293852329254, 0.22154416143894196, 0.05995882302522659], [0.3355180025100708, 0.05271759256720543, 0.003805778454989195, 0.009120115078985691, 0.0038179345428943634, 0.009839467704296112, 0.0038908037822693586, 0.14380788803100586, 0.0059821647591888905, 0.011279897764325142, 0.0005426689749583602, 0.003999358508735895, 2.3621014406671748e-05, 0.00011050467583118007, 3.517642107908614e-05, 0.002885729307308793, 0.0008857053471729159, 0.004553439095616341, 0.0005598911084234715, 0.049636341631412506, 0.0004824165371246636, 0.0035577884409576654, 0.0030314731411635876, 0.3499163091182709]], [[0.0029665909241884947, 0.00478452118113637, 0.25994008779525757, 0.10825471580028534, 0.04044665768742561, 0.02752760425209999, 0.02588590234518051, 0.018822742626070976, 0.055146168917417526, 0.05883479118347168, 0.049312084913253784, 0.008352844044566154, 0.010365425609052181, 0.001972567057237029, 0.01645255833864212, 0.004889453761279583, 0.008349048905074596, 0.024898715317249298, 0.022409342229366302, 0.032007671892642975, 0.0742846205830574, 0.07839826494455338, 0.038131535053253174, 0.027566025033593178], [0.010635577142238617, 0.017712853848934174, 0.1753259003162384, 0.0697706937789917, 0.032885413616895676, 0.029395928606390953, 0.03997050225734711, 0.07592177391052246, 0.02400294877588749, 0.06406508386135101, 0.04544869065284729, 0.06264397501945496, 0.033094607293605804, 0.04517557844519615, 0.012553437612950802, 0.010050122626125813, 0.003720177337527275, 0.02259267494082451, 0.01697605475783348, 0.08928921818733215, 0.017308583483099937, 0.05192362889647484, 0.016710471361875534, 0.03282611444592476], [0.1700727343559265, 0.1230485811829567, 0.023673752322793007, 0.03263239935040474, 0.04554663971066475, 0.02405848354101181, 0.13765233755111694, 0.1527099907398224, 0.07358844578266144, 0.01674048602581024, 0.02915797010064125, 0.01382802426815033, 0.008912441320717335, 0.017084697261452675, 0.003226157743483782, 0.009495502337813377, 0.021877329796552658, 0.009789452888071537, 0.030341874808073044, 0.018986767157912254, 0.012076236307621002, 0.002252779668197036, 0.013387373648583889, 0.009859452955424786], [0.026660172268748283, 0.02080383338034153, 0.15487346053123474, 0.050326719880104065, 0.015343409962952137, 0.016767434775829315, 0.06256761401891708, 0.02370990440249443, 0.03118737041950226, 0.03174154832959175, 0.04148917272686958, 0.015438210219144821, 0.019826840609312057, 0.0034890274982899427, 0.010163743048906326, 0.0033602432813495398, 0.007167243864387274, 0.05015043541789055, 0.14446485042572021, 0.1052156314253807, 0.08294011652469635, 0.030782153829932213, 0.025615276768803596, 0.025915617123246193], [0.005436756648123264, 0.010130475275218487, 0.07376444339752197, 0.4409787356853485, 0.014094684273004532, 0.04647587239742279, 0.008012856356799603, 0.012163341976702213, 0.032296109944581985, 0.02094130963087082, 0.018585002049803734, 0.01034360658377409, 0.005482403561472893, 0.0014336778549477458, 0.0027588834054768085, 0.013757556676864624, 0.0025323396548628807, 0.019329270347952843, 0.006600272376090288, 0.02854323387145996, 0.1505957543849945, 0.043494801968336105, 0.018291696906089783, 0.013956928625702858], [0.008597731590270996, 0.012735427357256413, 0.12963147461414337, 0.1026519387960434, 0.15900354087352753, 0.05438695847988129, 0.03807681426405907, 0.021853938698768616, 0.088149793446064, 0.01423890981823206, 0.024049991741776466, 0.0018207457615062594, 0.012542357668280602, 0.0009666795958764851, 0.0036817826330661774, 0.0015307065332308412, 0.0053889453411102295, 0.007033515255898237, 0.0217715073376894, 0.025546682998538017, 0.14645616710186005, 0.05350840464234352, 0.055607058107852936, 0.010768864303827286], [0.022376740351319313, 0.02859732136130333, 0.041287291795015335, 0.18852680921554565, 0.048950325697660446, 0.42893171310424805, 0.043512117117643356, 0.04863383248448372, 0.018024519085884094, 0.013150263577699661, 0.002469003666192293, 0.017291121184825897, 0.0026137318927794695, 0.003128557000309229, 0.00037847907515242696, 0.0014111143536865711, 0.00032035625190474093, 0.003001198638230562, 0.00043771122000180185, 0.0055764345452189445, 0.01770182140171528, 0.023631099611520767, 0.004126282408833504, 0.035922110080718994], [0.005732778459787369, 0.0065043033100664616, 0.0689922645688057, 0.04245160520076752, 0.04871769994497299, 0.08284410834312439, 0.3851868212223053, 0.09501516819000244, 0.17761412262916565, 0.008780824020504951, 0.01805432327091694, 0.0016463586362078786, 0.005865946412086487, 0.0007772872922942042, 0.002656541997566819, 0.000261797133134678, 0.000889830116648227, 0.0009065622580237687, 0.0019761400762945414, 0.0017984895966947079, 0.01443836372345686, 0.002620902843773365, 0.016572201624512672, 0.009695577435195446], [0.02278633415699005, 0.014125143177807331, 0.018703395500779152, 0.04059869423508644, 0.02991749718785286, 0.21256104111671448, 0.06965094059705734, 0.37629449367523193, 0.12270154803991318, 0.017839834094047546, 0.001962812151759863, 0.0031467711087316275, 0.00014965847367420793, 0.005564813036471605, 0.0024578666780143976, 0.01873067393898964, 0.005902225151658058, 0.0058567458763718605, 0.0003458092687651515, 0.00046461689635179937, 0.00041617831448093057, 0.0003843162558041513, 0.0014532480854541063, 0.027985339984297752], [0.014912812039256096, 0.03020455874502659, 0.007922089658677578, 0.008171836845576763, 0.010392887517809868, 0.014639491215348244, 0.04435553774237633, 0.09733191877603531, 0.6662358045578003, 0.01997320167720318, 0.015452547930181026, 0.00328333443030715, 0.008386914618313313, 0.004394760355353355, 0.025169074535369873, 0.008511531166732311, 0.009166479110717773, 0.0030374987982213497, 0.0031972683500498533, 0.00023129017790779471, 0.00045165701885707676, 9.23893167055212e-05, 0.00182111538015306, 0.002664062660187483], [0.1466158628463745, 0.04953150823712349, 0.005820258054882288, 0.01430184580385685, 0.008011339232325554, 0.03437122330069542, 0.03761669620871544, 0.29868146777153015, 0.03238712251186371, 0.09078237414360046, 0.0070593454875051975, 0.13465286791324615, 0.0003832591464743018, 0.031986303627491, 0.0002661083126440644, 0.01748032681643963, 0.0030893548391759396, 0.054795071482658386, 0.00826308038085699, 0.019410789012908936, 0.0002739243791438639, 0.00019084199448116124, 0.00011418846406741068, 0.003914727363735437], [0.0015966894570738077, 0.0025909661781042814, 0.006197177805006504, 0.0002531821664888412, 0.004406578838825226, 0.001007356564514339, 0.021888794377446175, 0.004874983336776495, 0.014832870103418827, 0.041840266436338425, 0.8255271911621094, 0.009517833590507507, 0.032538529485464096, 0.0021166682709008455, 0.011827239766716957, 6.521799514302984e-05, 0.0015938293654471636, 0.005030154250562191, 0.01022533979266882, 0.0008747388492338359, 0.00014314576401375234, 0.0001015061279758811, 0.0009373857756145298, 1.2274753316887654e-05], [0.0018280809745192528, 0.001612965133972466, 2.0612604203051887e-05, 0.0005507747991941869, 0.0002556104154791683, 0.0009175781742669642, 6.200661300681531e-05, 0.00016661541303619742, 1.8697635823627934e-05, 0.004311793018132448, 8.113398507703096e-05, 0.9401606917381287, 0.0008922219858504832, 0.03949427232146263, 6.374577424139716e-06, 0.0013429793762043118, 2.473786116752308e-05, 0.005374896805733442, 0.00013683938595931977, 0.0021964467596262693, 1.954471372300759e-05, 0.0002922365674749017, 7.169101650106313e-07, 0.0002321697393199429], [0.00027797382790595293, 0.0012789485044777393, 8.351256110472605e-05, 8.059091487666592e-05, 0.00136255391407758, 0.00030076224356889725, 0.0012098412262275815, 0.0004088033747393638, 0.000396381743485108, 0.00122586521320045, 0.02117007225751877, 0.04680904000997543, 0.8678692579269409, 0.053209006786346436, 0.0025444268248975277, 4.400705802254379e-05, 9.050888911588117e-05, 0.0001519117649877444, 0.00032041827216744423, 0.0004803133197128773, 0.0001471416326239705, 0.0003099280584137887, 0.00021829424076713622, 1.042520580085693e-05], [0.004070378839969635, 0.005058200564235449, 5.411457459558733e-05, 3.0701077776029706e-05, 0.000286577211227268, 0.000637914752587676, 0.0008535412489436567, 0.002651744754984975, 6.248629506444559e-05, 0.0007376551511697471, 0.0002823452523443848, 0.009011002257466316, 0.003200582694262266, 0.9632304310798645, 0.0029743313789367676, 0.003664062824100256, 0.00042588304495438933, 0.0005572647205553949, 9.318043157691136e-05, 0.0005394790787249804, 1.1753710168704856e-05, 0.00031943729845806956, 0.00023714125563856214, 0.0010097865015268326], [0.001377485110424459, 0.0020908997394144535, 0.0006244443939067423, 6.522714829770848e-05, 0.0003504706546664238, 0.00014980934793129563, 0.001050305087119341, 0.00016350865189451724, 0.0004758947470691055, 0.0010325489565730095, 0.007447462994605303, 0.0009090491803362966, 0.05578034371137619, 0.04165637493133545, 0.7997760772705078, 0.00679695513099432, 0.03788358345627785, 0.00634099543094635, 0.01063615083694458, 0.0007872144342400134, 0.0008879068191163242, 0.0030700210481882095, 0.01848200522363186, 0.002165395300835371], [0.0027872510254383087, 0.00335258268751204, 0.004199558403342962, 0.003044853452593088, 0.0002540459099691361, 0.0021177218295633793, 0.00021811251644976437, 0.0012685329420492053, 0.0022180858068168163, 0.017827924340963364, 0.002892253687605262, 0.0017509720055386424, 0.0007440036861225963, 0.03823430463671684, 0.04001811146736145, 0.7265042662620544, 0.012900574132800102, 0.09916018694639206, 0.0019630801398307085, 0.004620910622179508, 0.001726873917505145, 0.014225740917026997, 0.0074470797553658485, 0.010522978380322456], [0.003335570450872183, 0.0032251733355224133, 0.004997864365577698, 0.000497686502058059, 0.0010271953651681542, 0.0002005763672059402, 0.00037152328877709806, 0.0003316097427159548, 0.012341641820967197, 0.009858496487140656, 0.0175629872828722, 0.00014154863310977817, 0.0030868996400386095, 0.001168050803244114, 0.14539016783237457, 0.04439511522650719, 0.44199079275131226, 0.17584100365638733, 0.11495789885520935, 0.004083592910319567, 0.005624445155262947, 0.0022741095162928104, 0.007080611772835255, 0.0002153989189537242], [0.016897857189178467, 0.01447618193924427, 0.007941008545458317, 0.011247839778661728, 0.00270167738199234, 0.002217547269538045, 0.0007577959331683815, 0.0010352963581681252, 0.004861121065914631, 0.03923775255680084, 0.009021072648465633, 0.024275153875350952, 0.002727788407355547, 0.004280640743672848, 0.007770068012177944, 0.07017677277326584, 0.07512158900499344, 0.5386325716972351, 0.058636635541915894, 0.05006036162376404, 0.02806916832923889, 0.021832741796970367, 0.0022766063921153545, 0.0057447366416454315], [0.0007165081333369017, 0.0009451212827116251, 0.0038422096986323595, 0.0025520939379930496, 0.0027089957147836685, 0.00011227714276174083, 0.0007715580286458135, 0.00010834328713826835, 0.008821849711239338, 0.005421653389930725, 0.02560904063284397, 0.006978195160627365, 0.06086114048957825, 9.74960858002305e-05, 0.0041579012759029865, 0.000314426317345351, 0.027047034353017807, 0.04790539667010307, 0.5237711071968079, 0.06624434143304825, 0.20435698330402374, 0.004960722289979458, 0.0014335185987874866, 0.0002620469022076577], [0.003173458855599165, 0.0022596903145313263, 0.0021860019769519567, 0.005945921875536442, 0.0018444540910422802, 0.0006396571989171207, 0.0001760303566697985, 8.181668090401217e-05, 0.00010009534162236378, 0.00037928138044662774, 0.0006488687358796597, 0.010309289209544659, 0.0018486841581761837, 0.0018983051413670182, 0.0010753913084045053, 0.0042224605567753315, 0.013343852013349533, 0.07452542334794998, 0.09666818380355835, 0.36136433482170105, 0.3173987567424774, 0.08112940937280655, 0.0039771199226379395, 0.01480349712073803], [0.003278509248048067, 0.009524605236947536, 0.002407173393294215, 0.004864404443651438, 0.001484143314883113, 0.0006549846730194986, 0.001063886913470924, 0.00010659831605153158, 0.00027390566538088024, 0.00014280926552601159, 0.0023367018438875675, 0.008957195095717907, 0.10050787031650543, 0.00568406144157052, 0.02123112790286541, 0.0012964850757271051, 0.003484225133433938, 0.003098229179158807, 0.10252750664949417, 0.06705804914236069, 0.5270959138870239, 0.0873623639345169, 0.0320173054933548, 0.013541920110583305], [0.02781430073082447, 0.02139180712401867, 0.00299276364967227, 0.015313168987631798, 0.0035874913446605206, 0.00723611656576395, 0.004399839323014021, 0.010161960497498512, 0.00012673439050558954, 0.00023127651365939528, 0.0002120180579368025, 0.023099567741155624, 0.0010003936477005482, 0.07473614811897278, 0.0003244304680265486, 0.00524562131613493, 0.0007490687421523035, 0.004225463140755892, 0.009426255710422993, 0.3231394588947296, 0.03715446963906288, 0.04588450491428375, 0.01357248891144991, 0.3679746389389038], [0.00045850846800021827, 0.0013877113815397024, 0.009201602078974247, 0.00025657398509792984, 0.00315217231400311, 0.0011046413565054536, 0.009434389881789684, 0.0010117096826434135, 0.00023801130009815097, 9.729260636959225e-05, 0.003877262119203806, 8.228721708292142e-05, 0.011257058009505272, 0.004495309665799141, 0.039101939648389816, 6.644334644079208e-05, 0.0009850572096183896, 0.0002222750918008387, 0.003267676569521427, 0.0029881505761295557, 0.011026715859770775, 0.04306342080235481, 0.8212345838546753, 0.0319892056286335]], [[0.031642377376556396, 0.014293412677943707, 0.01093975082039833, 0.08357249200344086, 0.007380096707493067, 0.014902829192578793, 0.013320432044565678, 0.012817160226404667, 0.005381127819418907, 0.0234242994338274, 0.013332466594874859, 0.013919404707849026, 0.03815595060586929, 0.02126426436007023, 0.01953076384961605, 0.13501319289207458, 0.02349694073200226, 0.05540013685822487, 0.05722492188215256, 0.15648964047431946, 0.060972828418016434, 0.09836657345294952, 0.03588106110692024, 0.05327795445919037], [0.023083306849002838, 0.01883138343691826, 0.006099745165556669, 0.02380456030368805, 0.006425308529287577, 0.0037863189354538918, 0.0036583752371370792, 0.00944606028497219, 0.0018152045086026192, 0.01296367309987545, 0.0130561962723732, 0.04805540665984154, 0.09581635892391205, 0.09840374439954758, 0.02098015695810318, 0.11360781639814377, 0.02714318037033081, 0.03300921246409416, 0.046750057488679886, 0.26741263270378113, 0.040932297706604004, 0.05984136089682579, 0.009671168401837349, 0.015406393446028233], [0.050593387335538864, 0.03987037390470505, 0.04566948860883713, 0.06413289904594421, 0.011638439260423183, 0.01791083626449108, 0.00612330948933959, 0.046653907746076584, 0.010180297307670116, 0.012432812713086605, 0.017540937289595604, 0.026261869817972183, 0.014483561739325523, 0.0326976552605629, 0.017542103305459023, 0.041179537773132324, 0.01291476096957922, 0.01556483656167984, 0.01423549558967352, 0.16990500688552856, 0.06435941159725189, 0.049471884965896606, 0.08610688149929047, 0.13253027200698853], [0.023164696991443634, 0.008519203402101994, 0.18138016760349274, 0.034773021936416626, 0.07806610316038132, 0.02594495192170143, 0.03261231258511543, 0.017902975901961327, 0.02493482455611229, 0.01684747263789177, 0.012821970507502556, 0.003084822790697217, 0.007707576267421246, 0.010458819568157196, 0.021292729303240776, 0.030206793919205666, 0.041624922305345535, 0.04480567201972008, 0.05543454363942146, 0.0703951045870781, 0.07819203287363052, 0.05205778032541275, 0.06554044038057327, 0.062231115996837616], [0.015997543931007385, 0.0013711476931348443, 0.7443658709526062, 0.02649604342877865, 0.012307984754443169, 0.013265649788081646, 0.052403002977371216, 0.0034848202485591173, 0.015692614018917084, 0.0034236188512295485, 0.0017386636463925242, 0.0002728183171711862, 0.0005067125312052667, 0.00021034492237959057, 0.0016202620463445783, 0.0037255329079926014, 0.0018106505740433931, 0.0151091692969203, 0.05881823971867561, 0.005832751281559467, 0.011239428073167801, 0.003211386501789093, 0.0028060891199856997, 0.00428968807682395], [0.03245095908641815, 0.011128406040370464, 0.3251183032989502, 0.25475436449050903, 0.016407795250415802, 0.042323485016822815, 0.012446372769773006, 0.007106063421815634, 0.0037057616282254457, 0.001935117645189166, 0.0027509452775120735, 0.004254752304404974, 0.001477905549108982, 0.0004851807316299528, 0.0012561854673549533, 0.004661972634494305, 0.0012365768197923899, 0.016757052391767502, 0.026556221768260002, 0.054884299635887146, 0.06381893903017044, 0.04818882420659065, 0.02004314586520195, 0.04625137522816658], [0.012549638748168945, 0.00692335981875658, 0.2696229815483093, 0.1529698669910431, 0.057652220129966736, 0.16914938390254974, 0.045162174850702286, 0.038181088864803314, 0.007146203890442848, 0.0017288887174800038, 0.004298639018088579, 0.0021164705976843834, 0.0008997126715257764, 0.0004300149448681623, 0.0007887822575867176, 0.000825126888230443, 0.00038040068466216326, 0.006746354047209024, 0.005283207166939974, 0.024498289451003075, 0.024251066148281097, 0.025020912289619446, 0.06327081471681595, 0.08010432124137878], [0.013269652612507343, 0.007761416491121054, 0.08000171184539795, 0.11129080504179001, 0.027469798922538757, 0.36952582001686096, 0.08368133753538132, 0.01627935655415058, 0.02079853229224682, 0.0020806354004889727, 0.005617233458906412, 0.001633756677620113, 0.0026293445844203234, 0.0025615484919399023, 0.009140031412243843, 0.0013320676516741514, 0.00031982839573174715, 0.00258832098916173, 0.001697836327366531, 0.004041868727654219, 0.03964385762810707, 0.01528975460678339, 0.11473940312862396, 0.06660609692335129], [0.004397053271532059, 0.004627837799489498, 0.016974985599517822, 0.006610050331801176, 0.008537419140338898, 0.4343659281730652, 0.17115764319896698, 0.25376033782958984, 0.07156214118003845, 0.0018630133708938956, 0.0009757563238963485, 0.0005823065876029432, 0.0004854793369304389, 0.00115415477193892, 0.0043209390714764595, 0.0002670914400368929, 9.29937741602771e-05, 0.00034982673241756856, 3.781902705668472e-05, 5.487998714670539e-05, 0.00021696495241485536, 0.00037815459654666483, 0.004413580987602472, 0.01281359326094389], [0.009949375875294209, 0.007053017616271973, 0.005114790517836809, 0.003481317777186632, 0.003863723250105977, 0.03196093067526817, 0.030876627191901207, 0.7628135085105896, 0.05908510461449623, 0.03329070657491684, 0.0025161802768707275, 0.004703994374722242, 0.004679253790527582, 0.016603728756308556, 0.00573675986379385, 0.002898696344345808, 0.0008287169621326029, 0.0007232907810248435, 0.0001199037506012246, 0.0009297216311097145, 8.399530634051189e-05, 0.0006843364099040627, 0.0014043526025488973, 0.010597987100481987], [0.0719311311841011, 0.03876572847366333, 0.010135271586477757, 0.012454882264137268, 0.02611171454191208, 0.05299904942512512, 0.22590932250022888, 0.14415931701660156, 0.19626742601394653, 0.10294746607542038, 0.009660156443715096, 0.016951967030763626, 0.012574768625199795, 0.02870224043726921, 0.005084797274321318, 0.016315966844558716, 0.009546696208417416, 0.004802846349775791, 0.007640021853148937, 0.00116172363050282, 0.0004665028827730566, 0.0005875984788872302, 0.0007158793159760535, 0.004107439890503883], [0.03171377629041672, 0.00935867615044117, 0.001691819867119193, 0.001883804565295577, 0.005426645278930664, 0.0030791484750807285, 0.024195773527026176, 0.09015525132417679, 0.17861410975456238, 0.42034706473350525, 0.04733557626605034, 0.030965493991971016, 0.04622761532664299, 0.05902708321809769, 0.005687203258275986, 0.009709280915558338, 0.013205230236053467, 0.007705580443143845, 0.007259812206029892, 0.0048631057143211365, 0.000268049567239359, 0.0002779899805318564, 0.00030662561766803265, 0.0006952404510229826], [0.007634544279426336, 0.0044856867752969265, 0.005385902244597673, 0.0008686049259267747, 0.00570023013278842, 0.0010336657287552953, 0.011662452481687069, 0.006957307457923889, 0.08925680071115494, 0.19836533069610596, 0.47074779868125916, 0.07021001726388931, 0.023085685446858406, 0.002007837174460292, 0.007654709741473198, 0.0005231052055023611, 0.01340576820075512, 0.016730912029743195, 0.05766928941011429, 0.004640496335923672, 0.0012019411660730839, 0.00019429487292654812, 0.00047811560216359794, 9.947916259989142e-05], [0.0021886725444346666, 0.0016775853000581264, 0.00024395955551881343, 0.00030887385946698487, 0.0014788672560825944, 0.00021076659322716296, 0.0012960511958226562, 0.0012863223673775792, 0.005089669954031706, 0.04475417360663414, 0.04501942917704582, 0.4489365816116333, 0.3143833875656128, 0.11498915404081345, 0.002134887268766761, 0.00022450958203990012, 0.0005043946439400315, 0.0017813221784308553, 0.0036320865619927645, 0.007183015812188387, 0.001956729916855693, 0.0006613909499719739, 2.688013410079293e-05, 3.137341627734713e-05], [0.005769871175289154, 0.016254868358373642, 0.0001464606903027743, 0.0011113060172647238, 0.0009997963206842542, 0.000515830353833735, 0.0015612897695973516, 0.001018636510707438, 0.0008798455237410963, 0.0023514381609857082, 0.02192680351436138, 0.12253491580486298, 0.2923191487789154, 0.4392300546169281, 0.0621761791408062, 0.007194628939032555, 0.0018878206610679626, 0.0008169560460373759, 0.005669665988534689, 0.00596061022952199, 0.005086214747279882, 0.0019234479404985905, 0.0020688946824520826, 0.0005953384097665548], [0.0006620009080506861, 0.00106589135248214, 6.11620198469609e-05, 0.00012009525380562991, 9.925595804816112e-05, 0.0001867699174908921, 0.00012558753951452672, 0.00012226215039845556, 0.0001714541285764426, 0.0004932364681735635, 0.002523351926356554, 0.0026608379557728767, 0.03766229748725891, 0.22446659207344055, 0.6998604536056519, 0.014453066512942314, 0.0016135798068717122, 0.0009610268753021955, 0.0005453744670376182, 0.0008889143355190754, 0.0021710789296776056, 0.0019238811219111085, 0.006157858297228813, 0.0010039182379841805], [0.008292334154248238, 0.002657782519236207, 0.0008214289555326104, 0.0008237494621425867, 0.0002699033939279616, 0.0005639125010930002, 0.005322882905602455, 0.0003940909809898585, 0.00130353937856853, 0.00128037272952497, 0.0010518768103793263, 0.0004913764423690736, 0.018992459401488304, 0.04934530705213547, 0.6340115666389465, 0.23604939877986908, 0.009622432291507721, 0.0027749217115342617, 0.014993748627603054, 0.0005094807129353285, 0.0017564542358741164, 0.001509986468590796, 0.004543245770037174, 0.0026177517138421535], [0.005996192805469036, 0.003978345077484846, 0.0003681066446006298, 0.0010042747016996145, 4.8714839067542925e-05, 0.00011705401266226545, 0.00013203025446273386, 0.00034261069959029555, 0.0002359792561037466, 0.0031898592133075, 0.0005505916196852922, 0.0016801235033199191, 0.0036476633977144957, 0.0400373674929142, 0.26538583636283875, 0.6276670098304749, 0.011801017448306084, 0.005785416811704636, 0.0045173619873821735, 0.0018455768004059792, 0.00051171361701563, 0.004918586928397417, 0.0032952430192381144, 0.012943360954523087], [0.0023347048554569483, 0.0016309043858200312, 0.0004963057581335306, 0.0014969680923968554, 6.62104575894773e-05, 7.619890675414354e-05, 7.500060019083321e-05, 0.00013899295299779624, 0.00016220318502746522, 0.001701689907349646, 0.001774500822648406, 0.0007827843655832112, 0.0011766731040552258, 0.006408470682799816, 0.15778854489326477, 0.7011811137199402, 0.03157217428088188, 0.03314634785056114, 0.016806919127702713, 0.004525630734860897, 0.0015525657217949629, 0.004445030819624662, 0.017846208065748215, 0.012813952751457691], [0.0050726840272545815, 0.0015528578078374267, 0.002668096451088786, 0.0022639944218099117, 0.00022518141486216336, 0.0001553743495605886, 7.606286817463115e-05, 3.040972660528496e-05, 0.0012063919566571712, 0.009250150062143803, 0.027076439931988716, 0.0016114244936034083, 0.0011081276461482048, 0.0015352407936006784, 0.28111907839775085, 0.10259189456701279, 0.09809407591819763, 0.2623680531978607, 0.11988680064678192, 0.01004042848944664, 0.021326174959540367, 0.0065014963038265705, 0.03942300006747246, 0.004816514905542135], [0.004425828345119953, 0.0017011346062645316, 0.002250120509415865, 0.0013986715348437428, 0.00041963986586779356, 8.469136082567275e-05, 4.296341285225935e-05, 3.087987715844065e-05, 0.0005806135013699532, 0.0015041372971609235, 0.031196648254990578, 0.0013742512091994286, 0.0013465241063386202, 0.00054370571160689, 0.10723866522312164, 0.04347708076238632, 0.24150219559669495, 0.19688928127288818, 0.19479969143867493, 0.026731880381703377, 0.08187410980463028, 0.006517790723592043, 0.05226689204573631, 0.0018026070902124047], [0.003970554564148188, 0.0018391332123428583, 0.0017953274073079228, 0.003675727639347315, 0.00044982729014009237, 4.797224028152414e-05, 3.134966755169444e-05, 6.92599787726067e-05, 5.029428211855702e-05, 0.0008072088239714503, 0.016000716015696526, 0.007275401148945093, 0.011088725179433823, 0.0037487272638827562, 0.009672119282186031, 0.011284369975328445, 0.018464617431163788, 0.02512519061565399, 0.10330337285995483, 0.5959445834159851, 0.13696523010730743, 0.026358919218182564, 0.02065066248178482, 0.001380657427944243], [0.007578122429549694, 0.0031155471224337816, 0.001100136199966073, 0.009857721626758575, 0.0035161643754690886, 0.00045567337656393647, 0.0008319832268171012, 3.3691045246087015e-05, 2.3132650312618352e-05, 5.307583705871366e-05, 0.0008095527300611138, 0.0011710815597325563, 0.00839213002473116, 0.0035806894302368164, 0.0011868266155943274, 0.005548663437366486, 0.003930707927793264, 0.003244546242058277, 0.1736914962530136, 0.11948510259389877, 0.5536173582077026, 0.06001950800418854, 0.032873865216970444, 0.005883250385522842], [0.003249815898016095, 0.0008964001899585128, 0.0002865942951757461, 0.002135201822966337, 0.000990850618109107, 0.00019978173077106476, 0.00019378509023226798, 5.3024145017843693e-05, 5.067627625976456e-06, 1.0927457879006397e-05, 0.00019605066336225718, 9.130741818808019e-05, 0.003548272652551532, 0.003934361506253481, 0.001145642250776291, 0.001483946107327938, 0.0008070656913332641, 0.0007745824404992163, 0.01760844513773918, 0.17727909982204437, 0.36893579363822937, 0.12439661473035812, 0.26488247513771057, 0.026895003393292427]], [[0.08878692984580994, 0.07610277831554413, 0.058851927518844604, 0.06332860141992569, 0.04851418361067772, 0.1481909453868866, 0.13637831807136536, 0.028708748519420624, 0.059126175940036774, 0.06508942693471909, 0.03217645734548569, 0.018383387476205826, 0.03701462969183922, 0.01782081462442875, 0.005769457668066025, 0.007033308502286673, 0.005266368389129639, 0.018247090280056, 0.01948297768831253, 0.005141974426805973, 0.013491659425199032, 0.027596522122621536, 0.010682196356356144, 0.008815166540443897], [0.13937810063362122, 0.08965142071247101, 0.0392070971429348, 0.07352638244628906, 0.015558654442429543, 0.11346258223056793, 0.057156164199113846, 0.03788391128182411, 0.045680053532123566, 0.0366324745118618, 0.03300571069121361, 0.061537280678749084, 0.054960984736680984, 0.037001028656959534, 0.015587667934596539, 0.027507422491908073, 0.007828430272638798, 0.032470233738422394, 0.02302934229373932, 0.011785013601183891, 0.010027339681982994, 0.0089862160384655, 0.007519181817770004, 0.020617280155420303], [0.06602973490953445, 0.038143791258335114, 0.026364766061306, 0.06492812186479568, 0.013089247047901154, 0.23084837198257446, 0.049598291516304016, 0.12459281086921692, 0.07715670019388199, 0.05239570885896683, 0.011165195144712925, 0.04206352308392525, 0.033608511090278625, 0.05270214006304741, 0.0018095189006999135, 0.004422684665769339, 0.0004842648340854794, 0.004566999152302742, 0.0024718584027141333, 0.01304751355201006, 0.00838028360158205, 0.013586796820163727, 0.010679141618311405, 0.057863932102918625], [0.061777468770742416, 0.03474647179245949, 0.0023806917015463114, 0.034647248685359955, 0.006735939532518387, 0.6745942831039429, 0.04012516140937805, 0.024341454729437828, 0.014435016550123692, 0.022363824769854546, 0.0030773833859711885, 0.007948040962219238, 0.03218739852309227, 0.009587208740413189, 0.00027048977790400386, 0.0029503460973501205, 0.0002878825762309134, 0.005804389715194702, 0.0017471638275310397, 0.004041558131575584, 0.002370490925386548, 0.003996651619672775, 0.0019686350133270025, 0.007614810485392809], [0.01653911918401718, 0.0074277338571846485, 0.027923915535211563, 0.04322699457406998, 0.012162303552031517, 0.10047155618667603, 0.15358413755893707, 0.38926053047180176, 0.041551679372787476, 0.0463452972471714, 0.06268614530563354, 0.03728532791137695, 0.01348738931119442, 0.006197828333824873, 0.005938894115388393, 0.008915391750633717, 0.0014990707859396935, 0.002579670399427414, 0.004282182082533836, 0.005419525783509016, 0.0010635398793965578, 0.0023324734065681696, 0.005149028263986111, 0.004670219495892525], [0.01568109355866909, 0.005882841534912586, 0.01104552298784256, 0.03859782591462135, 0.00910852663218975, 0.11997678130865097, 0.1701788455247879, 0.48289862275123596, 0.014428222551941872, 0.09688123315572739, 0.002192385960370302, 0.015320664271712303, 0.002407890046015382, 0.0011806883849203587, 0.000384659186238423, 0.0025570683646947145, 0.0002961005375254899, 0.0017446905840188265, 0.000863662688061595, 0.0008552009821869433, 5.2074246923439205e-05, 0.001400995533913374, 0.00014899394591338933, 0.005915373098105192], [0.017217425629496574, 0.004645811393857002, 0.010450170375406742, 0.03852593153715134, 0.011261722072958946, 0.06322058290243149, 0.05136782303452492, 0.26791098713874817, 0.2883110046386719, 0.17712931334972382, 0.02003994956612587, 0.026442021131515503, 0.007635296322405338, 0.002444778336212039, 0.0007121339440345764, 0.0055120293982326984, 0.0005428792792372406, 0.001982675865292549, 0.00034275167854502797, 0.00071391009259969, 0.00017111330816987902, 0.0005217660800553858, 0.0004911470459774137, 0.0024068045895546675], [0.024601584300398827, 0.00965956225991249, 0.006337359081953764, 0.03456303849816322, 0.007160828448832035, 0.05131218582391739, 0.014365240931510925, 0.217637836933136, 0.14164987206459045, 0.29014110565185547, 0.03195953369140625, 0.10742470622062683, 0.012008817866444588, 0.012686088681221008, 0.0011787917464971542, 0.010120407678186893, 0.0007323689642362297, 0.0114842364564538, 0.0008748255204409361, 0.010078785941004753, 0.0003903746255673468, 0.0006425637402571738, 0.00039710302371531725, 0.002592813689261675], [0.015948962420225143, 0.006763281300663948, 0.010679344646632671, 0.0011053768685087562, 0.0005748890107497573, 0.0023013681638985872, 0.00645288173109293, 0.005558884236961603, 0.08538392931222916, 0.006789645180106163, 0.6536943316459656, 0.11042706668376923, 0.056804876774549484, 0.010519679635763168, 0.011634604074060917, 0.0004104567342437804, 0.0008358569466508925, 0.0020745040383189917, 0.007081199437379837, 0.0008838066132739186, 0.003002246841788292, 5.654274355038069e-05, 0.0009688063291832805, 4.752865788759664e-05], [0.00020056984794791788, 0.00010392563126515597, 0.00011761108908103779, 0.0009032402304001153, 1.410365598530916e-06, 0.00022843752230983227, 7.191530130512547e-06, 0.0030944831669330597, 0.0002403860562480986, 0.0007659259135834873, 0.0008068412425927818, 0.9487196803092957, 0.0013198493979871273, 0.03751242533326149, 0.00042490530177019536, 0.0017901280662044883, 1.4598307416235912e-06, 0.000423591147409752, 6.994488558120793e-06, 0.002344063948839903, 3.224200918339193e-05, 2.842098183464259e-05, 8.284374416689388e-06, 0.0009180090273730457], [0.022393910214304924, 0.012416575103998184, 0.005456477403640747, 0.000428900180850178, 0.0016214889474213123, 0.0009818450780585408, 0.004835307598114014, 0.0006997043383307755, 0.025759601965546608, 0.0036712270230054855, 0.08040249347686768, 0.05169054493308067, 0.4809640347957611, 0.17595918476581573, 0.07188340276479721, 0.0014360116329044104, 0.00615772744640708, 0.001303258934058249, 0.015152733772993088, 0.002044485881924629, 0.030929885804653168, 0.0008985213353298604, 0.0026405698154121637, 0.00027216042508371174], [9.474289254285395e-05, 0.00012050831719534472, 2.807560667861253e-05, 0.0002294863952556625, 4.452359917195281e-06, 0.00027829466853290796, 2.0695051716757007e-06, 9.826620225794613e-05, 0.00010136684431927279, 0.000985468621365726, 0.00019306234025862068, 0.019225213676691055, 0.015413191169500351, 0.9566982984542847, 0.0011138715781271458, 0.0032130724284797907, 4.9222539928450715e-06, 0.000220990608795546, 2.616254278109409e-06, 0.0010091480799019337, 0.0002278551837662235, 0.0004424451326485723, 5.567252082983032e-05, 0.000237049869610928], [0.001103463931940496, 0.0024551134556531906, 0.005255029536783695, 0.0020456979982554913, 0.0003514211275614798, 0.0010752440430223942, 0.0005902306293137372, 0.0029003059025853872, 0.004228347912430763, 0.00342663936316967, 0.009574984200298786, 0.02389085479080677, 0.11794218420982361, 0.46948522329330444, 0.28812721371650696, 0.02977067604660988, 0.0030800001695752144, 0.0009094687411561608, 0.000660507008433342, 0.001959641696885228, 0.008363629691302776, 0.006687905173748732, 0.011295679956674576, 0.004820647183805704], [0.00012707459973171353, 0.0001673858059803024, 0.00044467984116636217, 0.0008950784686021507, 5.68018585909158e-05, 7.614982314407825e-05, 8.806881851342041e-06, 0.0018798249075189233, 0.0004600298998411745, 0.0032896632328629494, 0.0015979782911017537, 0.027277300134301186, 0.0037940347101539373, 0.5434854626655579, 0.1041409820318222, 0.2503272294998169, 0.003133951686322689, 0.0035505921114236116, 0.00012616136518772691, 0.023967264220118523, 0.0017382372170686722, 0.004023328889161348, 0.0049718995578587055, 0.020460220053792], [0.00265827146358788, 0.002497543813660741, 0.0033021681010723114, 0.002908579306676984, 0.0005390410078689456, 0.0005282476777210832, 0.0004258949193172157, 0.0034810558427125216, 0.00882177334278822, 0.00407829275354743, 0.050084032118320465, 0.014998279511928558, 0.02579370141029358, 0.029600264504551888, 0.1955108493566513, 0.1750033050775528, 0.08552516996860504, 0.052911024540662766, 0.04754249006509781, 0.08054438978433609, 0.05804411694407463, 0.008428558707237244, 0.12131842970848083, 0.025454459711909294], [0.005425731185823679, 0.0037465046625584364, 0.0009706166456453502, 0.004162498749792576, 0.000799874949734658, 0.005949366372078657, 0.0003929936792701483, 0.0007809916278347373, 0.0006775757065042853, 0.0012252123560756445, 0.00232327776029706, 0.003660851391032338, 0.006658901926130056, 0.0028302425052970648, 0.009737402200698853, 0.0380893275141716, 0.02351650595664978, 0.4199078679084778, 0.11402511596679688, 0.29999640583992004, 0.029140794649720192, 0.007021394092589617, 0.006256614811718464, 0.012703821994364262], [0.003191739786416292, 0.002230945974588394, 0.0020808205008506775, 0.003374251304194331, 0.002210293896496296, 0.0015570666873827577, 0.0006902394234202802, 0.0013649601023644209, 0.0018317148787900805, 0.0006305762217380106, 0.0427980050444603, 0.0009100540191866457, 0.006151808425784111, 0.00019305119349155575, 0.012587510980665684, 0.013640238903462887, 0.07459545135498047, 0.07401203364133835, 0.2753751575946808, 0.3381909430027008, 0.10107265412807465, 0.0035111031029373407, 0.037135567516088486, 0.0006638256018050015], [0.013921056874096394, 0.011321182362735271, 0.0034801331348717213, 0.0215341467410326, 0.003843765240162611, 0.009757226333022118, 0.004810738377273083, 0.005873178597539663, 0.0004400731122586876, 0.00356457382440567, 0.0015924072358757257, 0.005797926802188158, 0.003251266200095415, 0.001927941688336432, 0.0008638473809696734, 0.00806199386715889, 0.0022910817060619593, 0.028769591823220253, 0.06897006928920746, 0.6607210040092468, 0.05162888392806053, 0.06641032546758652, 0.005830179899930954, 0.015337400138378143], [0.008896348997950554, 0.008800620213150978, 0.005795782897621393, 0.028737086802721024, 0.010172858834266663, 0.006496467627584934, 0.003445243928581476, 0.004025659523904324, 0.00640113465487957, 0.0021838475950062275, 0.0025532168801873922, 0.0012680309591814876, 0.006073427386581898, 0.0012472213711589575, 0.0036996083799749613, 0.01756151206791401, 0.01305407751351595, 0.013705173507332802, 0.03099282644689083, 0.1809815764427185, 0.43082618713378906, 0.10261315107345581, 0.06753288954496384, 0.042936187237501144], [0.0028810661751776934, 0.002918061800301075, 0.0015815917868167162, 0.040644001215696335, 0.002688000909984112, 0.005862659774720669, 0.00088456179946661, 0.020549587905406952, 0.0007866108790040016, 0.002829732606187463, 0.0002494120562914759, 0.004038470331579447, 0.0011789867421612144, 0.005564851686358452, 0.0016818898729979992, 0.047269921749830246, 0.0014881688402965665, 0.006367514841258526, 0.0015036029508337379, 0.27654504776000977, 0.027954334393143654, 0.11198333650827408, 0.02109355293214321, 0.4114550054073334], [0.002325055655092001, 0.0038559988606721163, 0.003788273548707366, 0.004220214206725359, 0.0018478977726772428, 0.0009216173202730715, 0.0005717056919820607, 0.0015721487579867244, 0.003221297636628151, 0.0002645330678205937, 0.002088115783408284, 0.0003280949604231864, 0.002392555121332407, 0.0017873686738312244, 0.008408932946622372, 0.0045018126256763935, 0.007696605287492275, 0.0014748231042176485, 0.0048148781061172485, 0.01959996111690998, 0.36041319370269775, 0.03455701842904091, 0.4322754144668579, 0.09707251191139221], [0.0001761027378961444, 0.0002142872690455988, 0.0002828611177392304, 0.006186600774526596, 3.0097644412308e-05, 0.0008069606265053153, 2.3971804694156162e-05, 0.011190207675099373, 0.00024289365683216602, 0.0007860944606363773, 3.552967245923355e-05, 0.009528339840471745, 9.366661834064871e-05, 0.006913818884640932, 0.00033341487869620323, 0.00859801284968853, 2.0906745703541674e-05, 0.0004730039509013295, 1.0065444257634226e-05, 0.013995764777064323, 0.0007057931507006288, 0.003996667452156544, 0.0019211308099329472, 0.9334337711334229], [0.024433700367808342, 0.014868955127894878, 0.04194646328687668, 0.0027006000746041536, 0.040756408125162125, 0.0019211630569770932, 0.021426957100629807, 0.00943207647651434, 0.20052167773246765, 0.008350955322384834, 0.03822394087910652, 0.002308944473043084, 0.0096101900562644, 0.004706921521574259, 0.03561553731560707, 0.00310120009817183, 0.14531700313091278, 0.003516050986945629, 0.036297768354415894, 0.0080997534096241, 0.154599130153656, 0.006037478800863028, 0.11964689940214157, 0.06656023114919662], [0.00553830387070775, 0.0025866138748824596, 0.004209347069263458, 0.04613151401281357, 0.002416615141555667, 0.030030924826860428, 0.000267207418801263, 0.12154247611761093, 0.04773388430476189, 0.11048003286123276, 0.004585532005876303, 0.026528945192694664, 0.0017363326624035835, 0.03901282325387001, 0.000785917742177844, 0.033784035593271255, 0.0005909335450269282, 0.021257301792502403, 9.257539932150394e-05, 0.14764787256717682, 0.006680501624941826, 0.009901667013764381, 0.010227666236460209, 0.3262309432029724]], [[0.007699246052652597, 0.009071916341781616, 0.02662002108991146, 0.01013907603919506, 0.018596382811665535, 0.04647544398903847, 0.03868357092142105, 0.022899599745869637, 0.07231646031141281, 0.4619995057582855, 0.02553735487163067, 0.11433771252632141, 0.011098656803369522, 0.038783807307481766, 0.015332769602537155, 0.007571618538349867, 0.005531965289264917, 0.011888613924384117, 0.003034157445654273, 0.002843276597559452, 0.004185025580227375, 0.026676280423998833, 0.002612137235701084, 0.01606547087430954], [0.017266560345888138, 0.019123170524835587, 0.048003293573856354, 0.020700858905911446, 0.043374236673116684, 0.07154321670532227, 0.022888142615556717, 0.040335334837436676, 0.023956555873155594, 0.21769945323467255, 0.02816055528819561, 0.04683871939778328, 0.00607340270653367, 0.02544417604804039, 0.02031255140900612, 0.027124416083097458, 0.0332835428416729, 0.05691072717308998, 0.013019458390772343, 0.029086008667945862, 0.010597571730613708, 0.07615053653717041, 0.01477083656936884, 0.08733662217855453], [0.020360002294182777, 0.04331127181649208, 0.052673038095235825, 0.05381306633353233, 0.1291247010231018, 0.14401064813137054, 0.025214431807398796, 0.14214368164539337, 0.01784200593829155, 0.012959666550159454, 0.12949888408184052, 0.015139563009142876, 0.01775880716741085, 0.0073476266115903854, 0.0037799749989062548, 0.0011833187891170382, 0.0027846985030919313, 0.0076736705377697945, 0.00363140064291656, 0.013878144323825836, 0.006263560149818659, 0.004129444248974323, 0.12089011818170547, 0.024588271975517273], [0.004370485432446003, 0.006850299891084433, 0.053236812353134155, 0.027610888704657555, 0.2631996273994446, 0.06294828653335571, 0.19511055946350098, 0.009025073610246181, 0.012719436548650265, 0.05324118584394455, 0.02239859290421009, 0.004203413613140583, 0.0331367626786232, 0.0017622129525989294, 0.0023480202071368694, 0.0005390365840867162, 0.002416180446743965, 0.0015485403127968311, 0.009740966372191906, 0.0020519529934972525, 0.00964556448161602, 0.12276039272546768, 0.05884227529168129, 0.04029335826635361], [0.011806495487689972, 0.014937659725546837, 0.11055830121040344, 0.016684355214238167, 0.036191340535879135, 0.28148797154426575, 0.029579635709524155, 0.09063669294118881, 0.08788487315177917, 0.06414412707090378, 0.043660201132297516, 0.012764355167746544, 0.0013382176402956247, 0.0025343666784465313, 0.007957681082189083, 0.00048630748642608523, 0.006366891786456108, 0.021078212186694145, 0.002400654135271907, 0.008099525235593319, 0.01572439633309841, 0.031977616250514984, 0.054198380559682846, 0.0475018136203289], [0.007804238237440586, 0.008333188481628895, 0.021742796525359154, 0.023157477378845215, 0.02754487842321396, 0.06572926789522171, 0.4018305838108063, 0.05008791387081146, 0.2717149257659912, 0.027062056586146355, 0.020218368619680405, 0.008882878348231316, 0.00875394232571125, 0.0025719006080180407, 0.00451510027050972, 0.0004435619048308581, 0.0012310851598158479, 0.000564787071198225, 0.001019465853460133, 0.00027934706304222345, 0.007268332410603762, 0.007191479206085205, 0.013414252549409866, 0.018638189882040024], [0.00945345964282751, 0.011971613392233849, 0.06737032532691956, 0.03228021040558815, 0.0033517710398882627, 0.12113914638757706, 0.02031639777123928, 0.46334442496299744, 0.10101694613695145, 0.04278915748000145, 0.055757999420166016, 0.03800942376255989, 0.0005602744640782475, 0.003298933384940028, 0.0028869726229459047, 0.0011645054910331964, 0.00023670349037274718, 0.00417741946876049, 0.00018601611373014748, 0.002148842439055443, 0.000542837253306061, 0.0008465162245556712, 0.0045044030994176865, 0.01264564972370863], [0.0070052905939519405, 0.002991555957123637, 0.007805574219673872, 0.009654812514781952, 0.009762333706021309, 0.008820727467536926, 0.09214138239622116, 0.011659289710223675, 0.5485008955001831, 0.2529311180114746, 0.010083158500492573, 0.004467747174203396, 0.004568254109472036, 0.0005181765300221741, 0.0016973107121884823, 0.0036021186970174313, 0.007903038524091244, 0.0021758980583399534, 0.0032735182903707027, 9.960238094208762e-05, 0.0006464698235504329, 0.0018448897171765566, 0.0011047602165490389, 0.006742060650140047], [0.010201402008533478, 0.009083963930606842, 0.006243064068257809, 0.00938315037637949, 0.009449861012399197, 0.057855140417814255, 0.011589162051677704, 0.5577582716941833, 0.08766045421361923, 0.04379614070057869, 0.04363153129816055, 0.12863220274448395, 0.0006337680970318615, 0.012181092984974384, 0.0005425353883765638, 0.0008102395804598927, 0.0005387031123973429, 0.003070499049499631, 0.00010220581316389143, 0.0015214974991977215, 0.00016338579007424414, 7.041088974801823e-05, 0.0007393794367089868, 0.00434192456305027], [0.030071863904595375, 0.03504890203475952, 0.022690970450639725, 0.014264550991356373, 0.005275232717394829, 0.014416753314435482, 0.09067761898040771, 0.015982696786522865, 0.036876972764730453, 0.007608881685882807, 0.525459885597229, 0.027857091277837753, 0.04582194238901138, 0.004725358448922634, 0.009708588942885399, 0.002228983910754323, 0.006118521559983492, 0.009865384548902512, 0.07339318841695786, 0.00504663260653615, 0.005265556741505861, 0.0003304884012322873, 0.010998466052114964, 0.00026549093308858573], [0.016560176387429237, 0.022361358627676964, 0.004006010014563799, 0.02049054391682148, 0.0013881674967706203, 0.025039400905370712, 0.0003128210664726794, 0.06885021179914474, 0.0013440840411931276, 0.006811057683080435, 0.01653767190873623, 0.5468015670776367, 0.0025110947899520397, 0.1752999722957611, 0.002040134510025382, 0.019322112202644348, 0.00024349603336304426, 0.022520406171679497, 0.00024065416073426604, 0.04428131878376007, 0.0003335609508212656, 0.00017667895008344203, 0.0004748372593894601, 0.002052581636235118], [0.0013135538902133703, 0.001315771834924817, 0.00040577564504928887, 0.0015121110482141376, 0.0010268333135172725, 8.772493310971186e-05, 0.0020089547615498304, 4.2509695049375296e-05, 0.0005705132498405874, 0.0010178647935390472, 0.005356093402951956, 0.0022324612364172935, 0.9274458885192871, 0.016028525307774544, 0.010158753953874111, 0.005747731775045395, 0.0020327954553067684, 9.237850463250652e-05, 0.01451788004487753, 0.00031840556766837835, 0.0031581383664160967, 0.0019484664080664515, 0.001617531175725162, 4.322271706769243e-05], [0.0023525510914623737, 0.0042591579258441925, 0.0006134640425443649, 0.0007723754970356822, 0.00022707527386955917, 0.0014427906135097146, 7.57196539780125e-05, 0.0006414182134903967, 1.3863018466508947e-05, 0.001234040129929781, 6.489654333563522e-05, 0.019836939871311188, 0.00048153093666769564, 0.8843311667442322, 0.00647324975579977, 0.0469183474779129, 0.0002716589660849422, 0.002511984435841441, 0.0002050708862952888, 0.010112977586686611, 0.0002649608941283077, 0.011546426452696323, 0.0001815678842831403, 0.005166829563677311], [0.0003601062635425478, 0.00046108945389278233, 0.000740146089810878, 0.0002442820114083588, 0.0002522426366340369, 5.6754517572699115e-05, 0.0011698377784341574, 1.678438093222212e-05, 0.0003278182412032038, 0.0009755255887284875, 0.001132065081037581, 6.827645120210946e-05, 0.07705118507146835, 0.00803819578140974, 0.750119149684906, 0.08310116082429886, 0.026534637436270714, 0.0003422359877731651, 0.01992705836892128, 0.00010219242540188134, 0.0028482810594141483, 0.0174991674721241, 0.008335085585713387, 0.00029674306279048324], [0.0023536570370197296, 0.0031618166249245405, 0.0009189993725158274, 0.0004621722036972642, 0.0004019555635750294, 0.00030078133568167686, 0.00025898710009641945, 0.0005983037408441305, 3.568453394109383e-05, 0.002284437417984009, 0.000126005252241157, 0.0010977044003084302, 0.0009801742853596807, 0.07540037482976913, 0.03790485858917236, 0.7685033082962036, 0.03409759700298309, 0.015192295424640179, 0.013134175911545753, 0.01325372327119112, 0.00025373659445904195, 0.013335189782083035, 0.0014378344640135765, 0.014506159350275993], [0.0008533812942914665, 0.001223221537657082, 0.008426403626799583, 0.0006176985334604979, 0.0022269045002758503, 0.0002876155776903033, 0.0051305158995091915, 4.8296325985575095e-05, 0.0006623010849580169, 0.003843009239062667, 0.006996531505137682, 6.454718095483258e-05, 0.040795642882585526, 0.000732356624212116, 0.1411864161491394, 0.023702550679445267, 0.19209863245487213, 0.012056293897330761, 0.4862177073955536, 0.0022569934371858835, 0.0072298659943044186, 0.02967796102166176, 0.03210042417049408, 0.0015645526582375169], [0.0020060893148183823, 0.0034629832953214645, 0.02342543937265873, 0.0010458007454872131, 0.0014163122978061438, 0.0015179278561845422, 0.00023325755319092423, 0.00038387352833524346, 0.0004944648244418204, 0.00919767189770937, 0.0034830032382160425, 0.0017646498745307326, 0.000268862146185711, 0.001804493134841323, 0.027259204536676407, 0.0172983780503273, 0.1197015643119812, 0.5357766151428223, 0.0764574259519577, 0.10668555647134781, 0.010354568250477314, 0.037607964128255844, 0.006680443417280912, 0.011673547327518463], [0.0061464449390769005, 0.00730367936193943, 0.010166744701564312, 0.0038158250972628593, 0.01028510369360447, 0.0012524948688223958, 0.006515732500702143, 0.00012643911759369075, 0.006709706038236618, 0.004301864188164473, 0.03784283250570297, 0.0012520075542852283, 0.06608155369758606, 0.000414891546824947, 0.0159525815397501, 0.001070622238330543, 0.08901768177747726, 0.019809439778327942, 0.475310742855072, 0.011501714587211609, 0.17278414964675903, 0.025415394455194473, 0.026093751192092896, 0.000828535296022892], [0.0032074928749352694, 0.013125522993505001, 0.06452742964029312, 0.009708443656563759, 0.004303966648876667, 0.00808185525238514, 0.00037172241718508303, 0.0008900326793082058, 0.00034976517781615257, 0.0026828080881386995, 0.011934399604797363, 0.0034907555673271418, 0.0011230773525312543, 0.0018297533970326185, 0.008167730644345284, 0.0018595971632748842, 0.006276251282542944, 0.1684899926185608, 0.047027163207530975, 0.49169179797172546, 0.05800448730587959, 0.06967001408338547, 0.018072646111249924, 0.005113314371556044], [0.001335245673544705, 0.002424979815259576, 0.008403275161981583, 0.004435363691300154, 0.00940913986414671, 0.001290146610699594, 0.005750718060880899, 2.1874619051232003e-05, 0.00035342248156666756, 0.0008622051100246608, 0.0017952879425138235, 8.277579036075622e-05, 0.014079388231039047, 0.0001507794950157404, 0.003729480318725109, 0.0004298045241739601, 0.01232845988124609, 0.0051511432975530624, 0.28716471791267395, 0.011850278824567795, 0.23148511350154877, 0.36037442088127136, 0.03439046069979668, 0.0027014538645744324], [0.005569650325924158, 0.016866151243448257, 0.011138636618852615, 0.021947739645838737, 0.03165106847882271, 0.01843407191336155, 0.0026218306738883257, 0.018808338791131973, 0.00012206401879666373, 0.00015163350326474756, 0.00034921453334391117, 0.002136211609467864, 0.0006975280703045428, 0.02131580002605915, 0.0014628912322223186, 0.002766698831692338, 0.0017747774254530668, 0.003660279791802168, 0.0026596221141517162, 0.25674042105674744, 0.059358034282922745, 0.1766441911458969, 0.07414322346448898, 0.26897993683815], [0.008801544085144997, 0.01986278034746647, 0.015675663948059082, 0.0105460025370121, 0.008814089000225067, 0.011536319740116596, 0.026295483112335205, 0.004324935842305422, 0.0002712290734052658, 5.500005136127584e-05, 0.0007848363602533937, 7.021978672128171e-05, 0.0023814160376787186, 0.000983723090030253, 0.0053569115698337555, 0.0026607841718941927, 0.006564129143953323, 0.0037920677568763494, 0.07379290461540222, 0.04940911754965782, 0.0828692764043808, 0.11288020759820938, 0.49788591265678406, 0.05438540503382683], [0.004231716506183147, 0.007692749612033367, 0.005225365050137043, 0.010647140443325043, 0.002167649334296584, 0.013331321999430656, 0.00041546329157426953, 0.07498715817928314, 0.00014316203305497766, 0.0002305109373992309, 9.54280694713816e-05, 0.0007150436285883188, 1.0919986380031332e-05, 0.0027370834723114967, 0.0005427590222097933, 0.013077978976070881, 0.0007127383723855019, 0.01192791759967804, 0.0002234878920717165, 0.05640564486384392, 0.000538012885954231, 0.0027403784915804863, 0.009976428002119064, 0.7812238931655884], [0.0034558200277388096, 0.0033853440545499325, 0.008545942604541779, 0.006699495483189821, 0.014235646463930607, 0.0004819195019081235, 0.02945566549897194, 0.0008928699535317719, 0.0017448101425543427, 0.0009126083459705114, 0.0004720586584880948, 1.049219281412661e-05, 0.0033747325651347637, 9.535723802400753e-05, 0.0026607955805957317, 0.008844044990837574, 0.07341694831848145, 0.0009056358831003308, 0.11853407323360443, 0.003120737848803401, 0.01907976344227791, 0.09571326524019241, 0.2939288020133972, 0.3100332021713257]], [[0.005684775300323963, 0.01472481619566679, 0.06558426469564438, 0.018588688224554062, 0.03280321881175041, 0.02202576957643032, 0.03969661518931389, 0.02362506464123726, 0.16786536574363708, 0.013377484865486622, 0.12697267532348633, 0.025099724531173706, 0.051087480038404465, 0.01957419514656067, 0.09888307750225067, 0.005834072362631559, 0.02599046379327774, 0.010429673828184605, 0.02209330163896084, 0.01287082489579916, 0.11077766865491867, 0.009644796140491962, 0.0643484815955162, 0.012417479418218136], [0.01222902350127697, 0.018053384497761726, 0.05097102373838425, 0.03692380711436272, 0.014094025827944279, 0.021511917933821678, 0.015159917064011097, 0.029870033264160156, 0.16973121464252472, 0.02303154021501541, 0.07519976049661636, 0.035366736352443695, 0.023252379149198532, 0.03518615663051605, 0.07459419220685959, 0.04369715601205826, 0.024703366681933403, 0.0373002253472805, 0.021395236253738403, 0.02432125061750412, 0.07538335025310516, 0.01464608684182167, 0.07318665832281113, 0.05019152909517288], [0.07118590176105499, 0.052682142704725266, 0.005347730126231909, 0.06637260317802429, 0.11676599085330963, 0.012474406510591507, 0.020702432841062546, 0.07414627820253372, 0.04969874396920204, 0.41245532035827637, 0.008756699971854687, 0.02407902106642723, 0.007011010777205229, 0.0014757574535906315, 0.0002047082525677979, 0.0020292263943701982, 0.005170137621462345, 0.0005403040559031069, 0.0010755527764558792, 0.001510834670625627, 0.002080292208120227, 0.037082020193338394, 0.0039031975902616978, 0.02324969321489334], [0.015340150333940983, 0.010577320121228695, 0.1290462613105774, 0.04520520195364952, 0.10002783685922623, 0.05156383290886879, 0.05860447883605957, 0.16132263839244843, 0.13205134868621826, 0.021576959639787674, 0.05240069329738617, 0.008741876110434532, 0.005033882334828377, 0.004577578045427799, 0.011993280611932278, 0.003359528025612235, 0.0029890439473092556, 0.003615192836150527, 0.01225286815315485, 0.015458209440112114, 0.013781155459582806, 0.014809413813054562, 0.09051331877708435, 0.03515804186463356], [0.051400136202573776, 0.029206350445747375, 0.03951418399810791, 0.07425066828727722, 0.019976578652858734, 0.4139920473098755, 0.06783927232027054, 0.029709069058299065, 0.030114131048321724, 0.020055988803505898, 0.019467033445835114, 0.005551246460527182, 0.004080026410520077, 0.0051758429035544395, 0.005604386795312166, 0.0036367354914546013, 0.0019701288547366858, 0.015150584280490875, 0.00515405461192131, 0.004485820885747671, 0.017200466245412827, 0.02388738840818405, 0.08099174499511719, 0.03158609941601753], [0.0026657087728381157, 0.0025487898383289576, 0.08247027546167374, 0.02158011682331562, 0.041218921542167664, 0.030291719362139702, 0.23513314127922058, 0.04895709455013275, 0.24494917690753937, 0.016430484130978584, 0.15961995720863342, 0.0013666304294019938, 0.0059368181973695755, 0.00027214884175918996, 0.0051195938140153885, 0.00020818047050852329, 0.0005690669640898705, 0.000160439100000076, 0.0022366743069142103, 0.0003367721801623702, 0.00754655571654439, 0.0033690680284053087, 0.08426085114479065, 0.0027518663555383682], [0.03601624071598053, 0.020268229767680168, 0.05092068016529083, 0.04396930709481239, 0.015398462302982807, 0.28597792983055115, 0.03296159580349922, 0.322474867105484, 0.05893927440047264, 0.042732805013656616, 0.011411740444600582, 0.017957258969545364, 0.000480727874673903, 0.005054306238889694, 0.0015213085571303964, 0.00477127218618989, 0.000354566058376804, 0.003595333779230714, 0.0002103921287925914, 0.0012032658560201526, 0.001117102918215096, 0.002850764663890004, 0.008458949625492096, 0.031353600323200226], [0.00709577975794673, 0.005627197213470936, 0.011314788833260536, 0.003350295824930072, 0.005572971422225237, 0.005655636079609394, 0.052924856543540955, 0.040130365639925, 0.5662976503372192, 0.1844034641981125, 0.022765297442674637, 0.02231656014919281, 0.032810281962156296, 0.01104219350963831, 0.011748870834708214, 0.004310702905058861, 0.002391293877735734, 0.0003964125644415617, 0.0008104875450953841, 8.756914030527696e-05, 0.00037138329935260117, 0.0013149201404303312, 0.0014448516303673387, 0.0058163003996014595], [0.02356554940342903, 0.01304711401462555, 0.011922473087906837, 0.02136993780732155, 0.006648112554103136, 0.01337091252207756, 0.006739902310073376, 0.31830716133117676, 0.17185480892658234, 0.280747652053833, 0.0377090685069561, 0.0763741061091423, 0.0020486272405833006, 0.004827563650906086, 0.001404007081873715, 0.0038012072909623384, 0.0010260797571390867, 0.0014425154076889157, 0.00024252657021861523, 0.0011654727859422565, 0.0001527049607830122, 0.00024102417228277773, 0.0003371778584551066, 0.001654197578318417], [0.006107051391154528, 0.009307284839451313, 0.003035531844943762, 0.0076368581503629684, 0.02375510334968567, 0.0007343819597736001, 0.006416504271328449, 0.03093373216688633, 0.32999950647354126, 0.08835441619157791, 0.2173861563205719, 0.1785847246646881, 0.011543406173586845, 0.0034248053561896086, 0.0024511250667274, 0.0027504966128617525, 0.06381407380104065, 0.0020005949772894382, 0.002883787965402007, 0.001968069700524211, 0.004257077816873789, 0.0003598331240937114, 0.0012307388242334127, 0.0010647318558767438], [0.011527528055012226, 0.013004143722355366, 0.0015768579905852675, 0.021161416545510292, 0.012023553252220154, 0.004517478868365288, 0.0012721142265945673, 0.02733222395181656, 0.010147335939109325, 0.09826304018497467, 0.0038109635934233665, 0.6689208745956421, 0.00458506727591157, 0.01537580881267786, 9.958396549336612e-05, 0.011948698200285435, 0.005671040154993534, 0.022987941280007362, 0.004245147109031677, 0.05165925994515419, 0.0026181554421782494, 0.003147657262161374, 9.233351738657802e-05, 0.00401174183934927], [0.00159889692440629, 0.005797912832349539, 0.011502611450850964, 0.000913503929041326, 0.006353658623993397, 0.0004239886184222996, 0.005982266739010811, 0.0037257985677570105, 0.017086012288928032, 0.0038504833355545998, 0.15136735141277313, 0.045010779052972794, 0.4875141978263855, 0.03153933957219124, 0.11126285791397095, 0.001366431126371026, 0.01878434233367443, 0.00161548622418195, 0.05693574249744415, 0.022058244794607162, 0.009518579579889774, 0.0011203595204278827, 0.004340200684964657, 0.0003309193707536906], [0.002333475975319743, 0.010551140643656254, 0.0020260775927454233, 0.0025347319897264242, 0.002265785587951541, 0.006160641089081764, 0.0014413978205993772, 0.0187260452657938, 0.0005937221576459706, 0.005634048487991095, 0.0016924645751714706, 0.3815319538116455, 0.01056890469044447, 0.4562602639198303, 0.0034226926509290934, 0.011406106874346733, 0.0011298053432255983, 0.00883357785642147, 0.002199852839112282, 0.06035744771361351, 0.001414358033798635, 0.0035388502292335033, 0.000295661564450711, 0.00508089130744338], [3.9558206481160596e-05, 0.00032308814115822315, 0.0021851430647075176, 6.0525646404130384e-05, 1.0898766959144268e-05, 0.0002613689284771681, 0.0006906805792823434, 0.0003998648899141699, 0.001843768171966076, 7.707306940574199e-05, 0.0007596592186018825, 0.003997680731117725, 0.01413453184068203, 0.09743623435497284, 0.8651785850524902, 0.004947993904352188, 0.00032818858744576573, 0.0015908819623291492, 0.002343558706343174, 0.0008239183807745576, 0.0020842640660703182, 8.442537364317104e-05, 0.0001512980234110728, 0.00024686090182513], [0.023873867467045784, 0.053011830896139145, 0.0012121995678171515, 0.006992341950535774, 0.005206138361245394, 0.002982261124998331, 0.0017040171660482883, 0.01804586499929428, 0.001933952560648322, 0.04066821187734604, 0.0005678492016158998, 0.10987479239702225, 0.004285240545868874, 0.2454785257577896, 0.0062620192766189575, 0.28297120332717896, 0.02310752682387829, 0.02637704834342003, 0.003765091532841325, 0.021214401349425316, 0.001822445192374289, 0.032075028866529465, 0.0007305240724235773, 0.08583758026361465], [0.0013604172272607684, 0.003301011398434639, 0.0029092745389789343, 0.0004355513083282858, 0.00027661517378874123, 0.00019484762742649764, 0.00039721516077406704, 0.0007922661025077105, 0.007593484129756689, 0.0009148241952061653, 0.014138452708721161, 0.009580260142683983, 0.010010063648223877, 0.049133844673633575, 0.7031949758529663, 0.06750909984111786, 0.04651271179318428, 0.023124821484088898, 0.019782546907663345, 0.006605020258575678, 0.010386434383690357, 0.0010987865971401334, 0.011010687798261642, 0.009736835956573486], [0.010802480392158031, 0.010540951043367386, 0.0021773185580968857, 0.004959970247000456, 0.00016360824520234019, 0.00609763665124774, 0.0003126431838609278, 0.0008333768928423524, 0.0010730416979640722, 0.0021736244671046734, 0.0024556044954806566, 0.0077631729654967785, 0.0005087574827484787, 0.040954120457172394, 0.019781548529863358, 0.16739456355571747, 0.0064675770699977875, 0.6511555910110474, 0.008301128633320332, 0.02347307652235031, 0.005058684386312962, 0.0030922573059797287, 0.007213321980088949, 0.017245950177311897], [0.007361438125371933, 0.010864358395338058, 0.012861652299761772, 0.019529491662979126, 0.004186810925602913, 0.0012524094199761748, 0.0018069393699988723, 0.0008794405730441213, 0.010538998059928417, 0.0075856526382267475, 0.30081960558891296, 0.0055845072492957115, 0.023509182035923004, 0.002727494342252612, 0.058060359209775925, 0.034220773726701736, 0.07177417725324631, 0.05829275771975517, 0.10313371568918228, 0.02509506605565548, 0.05810011550784111, 0.010535142384469509, 0.16706101596355438, 0.004218902438879013], [0.017107820138335228, 0.028877267614006996, 0.0036757574416697025, 0.016319457441568375, 0.0009601793717592955, 0.010425696149468422, 0.00020896110800094903, 0.0006020637229084969, 0.00016054412117227912, 0.0011886453721672297, 0.004798779729753733, 0.01637374795973301, 0.0007972611347213387, 0.0233113095164299, 0.00390639528632164, 0.10634998232126236, 0.0054987152107059956, 0.5743861794471741, 0.00906798429787159, 0.11024433374404907, 0.01675250381231308, 0.013051803223788738, 0.0173372533172369, 0.018597422167658806], [0.00539555074647069, 0.016148541122674942, 0.0040655555203557014, 0.007879447191953659, 0.002025796100497246, 0.0021891130600124598, 0.0018383198184892535, 0.00015245650138240308, 0.0009254501783289015, 0.0012310333549976349, 0.018893515691161156, 0.012428310699760914, 0.12494166195392609, 0.03485812991857529, 0.04957544058561325, 0.018357165157794952, 0.028065498918294907, 0.048361893743276596, 0.12063179910182953, 0.04940929636359215, 0.30768367648124695, 0.0847010537981987, 0.05226953327655792, 0.007971787825226784], [0.017093271017074585, 0.024244826287031174, 0.003608489641919732, 0.03572425618767738, 0.008333753794431686, 0.01070804987102747, 0.0004649843613151461, 0.0023389034904539585, 7.770668889861554e-05, 0.00026265004999004304, 0.002398628043010831, 0.004152446985244751, 0.00278199533931911, 0.007903358899056911, 0.0025379080325365067, 0.008144154213368893, 0.00888581108301878, 0.04375183582305908, 0.020180119201540947, 0.6362481713294983, 0.060496505349874496, 0.05394000560045242, 0.03547609969973564, 0.010246098972856998], [0.005234045442193747, 0.009972590953111649, 0.0016112832818180323, 0.01854049786925316, 0.03851606324315071, 0.0030259143095463514, 0.003050298197194934, 0.0012843067524954677, 0.0005375007749535143, 0.0001618798851268366, 0.00428745336830616, 0.0017693137051537633, 0.00404635863378644, 0.001905025215819478, 0.003972693346440792, 0.0037296146620064974, 0.07881950587034225, 0.006636959034949541, 0.028639383614063263, 0.05116940662264824, 0.28244420886039734, 0.08589516580104828, 0.31479132175445557, 0.049959082156419754], [0.01148428488522768, 0.008838219568133354, 0.004077851306647062, 0.08465363085269928, 0.02042427659034729, 0.04344630241394043, 0.003431117394939065, 0.01802736520767212, 0.0008305470691993833, 0.0011105735320597887, 0.00018292589811608195, 0.005022455006837845, 0.0002829942968674004, 0.004188072867691517, 0.0004312261880841106, 0.030118757858872414, 0.0070127518847584724, 0.048871591687202454, 0.0131154153496027, 0.17232443392276764, 0.04387517273426056, 0.08081972599029541, 0.015172009356319904, 0.3822582960128784], [0.003125513903796673, 0.0019182654796168208, 0.03678448498249054, 0.009442277252674103, 0.015378501266241074, 0.008554365485906601, 0.028507597744464874, 0.011430458165705204, 0.010993627831339836, 0.00012208927364554256, 0.004777370486408472, 3.0910541681805626e-05, 0.0005386985139921308, 0.0001660689595155418, 0.021530862897634506, 0.0011536708334460855, 0.0067020258866250515, 0.0017347530229017138, 0.02411728724837303, 0.009776294231414795, 0.03162342682480812, 0.007080434821546078, 0.7156160473823547, 0.04889494553208351]], [[0.013323506340384483, 0.018008049577474594, 0.015502882190048695, 0.006188483443111181, 0.01810794696211815, 0.0333915613591671, 0.03571784868836403, 0.09052061289548874, 0.05885383114218712, 0.12319158762693405, 0.034361355006694794, 0.09731556475162506, 0.09673422574996948, 0.20379194617271423, 0.04913105070590973, 0.018781937658786774, 0.020503859966993332, 0.013575269840657711, 0.008921781554818153, 0.012039871886372566, 0.004789168015122414, 0.011634393595159054, 0.005249501205980778, 0.010363680310547352], [0.013570796698331833, 0.016071893274784088, 0.012053108774125576, 0.0036323906388133764, 0.010557296685874462, 0.008638323284685612, 0.006161098834127188, 0.05718375742435455, 0.07576677948236465, 0.16498233377933502, 0.054884254932403564, 0.044784966856241226, 0.06987954676151276, 0.20447617769241333, 0.08691811561584473, 0.06067011132836342, 0.034277837723493576, 0.011200251057744026, 0.006008438766002655, 0.020223025232553482, 0.009208687581121922, 0.01787460781633854, 0.006888206582516432, 0.004088059067726135], [0.004173034802079201, 0.007480265572667122, 0.04480831325054169, 0.6070606708526611, 0.0130770867690444, 0.060373250395059586, 0.04449619725346565, 0.016929948702454567, 0.09608697146177292, 0.004933323245495558, 0.047671135514974594, 0.008679470047354698, 0.004827200435101986, 0.0018982634646818042, 0.0008000798989087343, 0.0006625893875025213, 0.0001285246544284746, 0.0001893688749987632, 0.00010934586316579953, 0.0002613053657114506, 0.009342706762254238, 0.0007008857792243361, 0.01945258118212223, 0.005857502575963736], [0.004348098766058683, 0.004682144150137901, 0.022092167288064957, 0.0333266519010067, 0.003843904472887516, 0.05875246599316597, 0.08432045578956604, 0.36105459928512573, 0.07563315331935883, 0.102415531873703, 0.012332563288509846, 0.020867714658379555, 0.02663385309278965, 0.03894303739070892, 0.005000225268304348, 0.0015594173455610871, 0.00016246503219008446, 0.00048380764201283455, 0.000520893547218293, 0.007816351018846035, 0.006785357370972633, 0.04496181011199951, 0.020098837092518806, 0.06336449086666107], [0.001788038876838982, 0.0014959904365241528, 0.010276531800627708, 0.002330151619389653, 0.010635151527822018, 0.0384785532951355, 0.014099945314228535, 0.5733451843261719, 0.11911546438932419, 0.1585225909948349, 0.03244573622941971, 0.00634304853156209, 0.0034445880446583033, 0.006394379772245884, 0.0014957513194531202, 0.0001955903135240078, 0.0006502823671326041, 0.0003149851690977812, 9.468065400142223e-05, 0.003254385432228446, 0.0016004132339730859, 0.008107885718345642, 0.004139748401939869, 0.001430889475159347], [0.0024110055528581142, 0.0017450954765081406, 0.00574399484321475, 0.006339045241475105, 0.0027980103623121977, 0.01596604846417904, 0.02718466706573963, 0.3289998471736908, 0.11418911814689636, 0.41931551694869995, 0.021712815389037132, 0.0194831732660532, 0.01234927773475647, 0.00854238960891962, 0.0015015548560768366, 0.001558566465973854, 0.0007938037742860615, 0.001567880972288549, 0.0007449675467796624, 0.002261021640151739, 0.0002837859792634845, 0.0017247709911316633, 0.0005538457189686596, 0.00222975155338645], [0.005091778002679348, 0.0027980487793684006, 0.007837912999093533, 0.0015892288647592068, 0.0017109920736402273, 0.0028040495235472918, 0.0031602561939507723, 0.29334139823913574, 0.08444929122924805, 0.5347273945808411, 0.03623050078749657, 0.015370538458228111, 0.0021029352210462093, 0.00599065562710166, 0.0009661510703153908, 0.0001821869664127007, 0.0001537478092359379, 0.00010084384121000767, 1.7156708054244518e-05, 0.0005956932436674833, 4.823424023925327e-05, 0.0003376381646376103, 0.00021127013314981014, 0.00018208388064522296], [0.008912756107747555, 0.0065200901590287685, 0.005676736123859882, 0.0030417111702263355, 0.0023151796776801348, 0.005060167983174324, 0.02508704923093319, 0.0396910160779953, 0.12475491315126419, 0.4063546061515808, 0.04134761169552803, 0.14683479070663452, 0.11403117328882217, 0.055433254688978195, 0.003169798757880926, 0.002494214801117778, 0.0014094491489231586, 0.0025398083962500095, 0.0027066559996455908, 0.0007206922746263444, 0.00027390182367525995, 0.0005678755696862936, 0.00024339595984201878, 0.0008132871589623392], [0.0023294654674828053, 0.004448415711522102, 0.005871869623661041, 0.003284494625404477, 0.005721433088183403, 0.0019329910865053535, 0.0014882198302075267, 0.005424698814749718, 0.4019600450992584, 0.034215301275253296, 0.3444038927555084, 0.1280641406774521, 0.014728185720741749, 0.03424374759197235, 0.004472784698009491, 0.001348308753222227, 0.0023011781740933657, 0.00035999537794850767, 0.00011073868517996743, 0.0002306133246747777, 0.002641309518367052, 4.3784239096567035e-05, 0.00034628884168341756, 2.8053731512045488e-05], [0.03694244846701622, 0.030209816992282867, 0.0027583306655287743, 0.0008063883287832141, 0.0008147243061102927, 0.0011473331833258271, 0.009931232780218124, 0.0049881101585924625, 0.013408373109996319, 0.11313755065202713, 0.01792711578309536, 0.18118533492088318, 0.3470342457294464, 0.20859892666339874, 0.00924891047179699, 0.0007338228169828653, 0.0004708456981461495, 0.0026034703478217125, 0.007277261465787888, 0.0060004922561347485, 0.0016053578583523631, 0.002594136632978916, 0.00024059342104010284, 0.0003352661442477256], [0.024835893884301186, 0.07269327342510223, 0.004790609702467918, 0.002049660077318549, 0.0017318647587671876, 0.0018566532526165247, 0.0006782921263948083, 0.0014582262374460697, 0.025646688416600227, 0.004371246322989464, 0.0327579490840435, 0.07752305269241333, 0.06465371698141098, 0.6140205264091492, 0.045333076268434525, 0.010248535312712193, 0.0015017178375273943, 0.0002661199832800776, 0.00020784874504897743, 0.0008236331050284207, 0.00846653152257204, 0.0005906415753997862, 0.003033358370885253, 0.0004608099116012454], [0.0038781268522143364, 0.007300902158021927, 0.00045781212975271046, 0.0003539184690453112, 9.487092029303312e-05, 6.360700353980064e-05, 0.0005910725449211895, 0.0002982726146001369, 0.0010181930847465992, 0.0027924058958888054, 0.0013478354085236788, 0.026341339573264122, 0.21276597678661346, 0.6107548475265503, 0.0929490253329277, 0.026413938030600548, 0.0008845299016684294, 0.00031256466172635555, 0.0016211953479796648, 0.0013166568242013454, 0.002610762370750308, 0.0036396505311131477, 0.0006371473427861929, 0.0015553488628938794], [0.008167661726474762, 0.009916060604155064, 0.000876892008818686, 0.0006619929918088019, 0.0004462750512175262, 7.605463179061189e-05, 0.00023041099484544247, 0.0021888844203203917, 0.00598370935767889, 0.007923249155282974, 0.0020772558636963367, 0.018298614770174026, 0.036582689732313156, 0.5614917278289795, 0.10035479813814163, 0.21033352613449097, 0.010307252407073975, 0.0006575345760211349, 0.0008551353821530938, 0.004606620408594608, 0.006541598588228226, 0.007087182253599167, 0.0012726233107969165, 0.003062210278585553], [0.002240139292553067, 0.0019793654792010784, 0.0006257767090573907, 0.0002650214883033186, 0.00039914617082104087, 0.00014362685033120215, 0.0003606000682339072, 0.0028331545181572437, 0.002315083984285593, 0.07040148973464966, 0.0015778349479660392, 0.008954501710832119, 0.035237327218055725, 0.28155338764190674, 0.20866759121418, 0.20202264189720154, 0.06749492883682251, 0.023905685171484947, 0.018126370385289192, 0.0199379101395607, 0.0019539606291800737, 0.038917236030101776, 0.0011229579104110599, 0.008964263834059238], [0.004916503094136715, 0.0032446261029690504, 0.0047355759888887405, 0.0034112909343093634, 0.006795849185436964, 0.00041638565016910434, 0.0005961843999102712, 0.0008656664285808802, 0.012605596333742142, 0.013585160486400127, 0.016581691801548004, 0.007988505065441132, 0.014709233306348324, 0.03530315309762955, 0.10643693059682846, 0.2425488978624344, 0.24213330447673798, 0.046840421855449677, 0.03276187926530838, 0.02940031886100769, 0.0888877734541893, 0.046090878546237946, 0.02327890507876873, 0.015865258872509003], [0.0004330424126237631, 0.0003413913364056498, 0.0012215384049341083, 0.0018160956678912044, 0.00045315895113162696, 9.788705210667104e-05, 0.0002789293648675084, 0.0013459778856486082, 0.0015921180602163076, 0.004248825367540121, 0.0013718365225940943, 0.0025889223907142878, 0.017418332397937775, 0.008611065335571766, 0.00855324324220419, 0.0077190101146698, 0.004604745656251907, 0.01401823665946722, 0.026201006025075912, 0.4285084903240204, 0.29063841700553894, 0.15784703195095062, 0.007545188069343567, 0.01254556979984045], [0.0005044421995989978, 0.00032299821032211185, 0.0025128007400780916, 0.00047889843699522316, 0.00601534266024828, 0.0005180391017347574, 0.00018764298874884844, 0.002382430015131831, 0.004596828483045101, 0.005067448131740093, 0.008412988856434822, 0.0011442602844908834, 0.0024213686119765043, 0.0018293196335434914, 0.003925487864762545, 0.000761401723138988, 0.017429756000638008, 0.01020016148686409, 0.006269870325922966, 0.26496145129203796, 0.5078091621398926, 0.13958105444908142, 0.011610294692218304, 0.0010565478587523103], [0.0008626359049230814, 0.0006670505972579122, 0.001262528938241303, 0.0036137597635388374, 0.0014471819158643484, 0.0014306252123788, 0.0007627068553119898, 0.0005490148905664682, 0.00016835113638080657, 0.0006727299187332392, 0.0007860346231609583, 0.0007660119445063174, 0.006361052859574556, 0.0010136812925338745, 0.0015765530988574028, 0.0010756496340036392, 0.0016122939996421337, 0.015312994830310345, 0.0349554680287838, 0.320154070854187, 0.24752770364284515, 0.3418474495410919, 0.009258040226995945, 0.0063165295869112015], [0.0016651154728606343, 0.0010443136561661959, 0.004093860276043415, 0.0029776408337056637, 0.002690681256353855, 0.001115497201681137, 0.00022838071163278073, 0.001137292361818254, 0.0002364653628319502, 0.0004219801048748195, 0.000673064321745187, 0.00018597730377223343, 0.0005919402465224266, 0.00043112278217449784, 0.0021282187663018703, 0.0006509521044790745, 0.0011030277237296104, 0.0020693736150860786, 0.0017096324590966105, 0.18931162357330322, 0.31481048464775085, 0.4151371419429779, 0.0452921986579895, 0.01029401458799839], [0.003870630171149969, 0.00422675209119916, 0.00448259711265564, 0.007759689353406429, 0.0033302828669548035, 0.007860447280108929, 0.004820889327675104, 0.0017366368556395173, 0.00045611406676471233, 0.00043659083894453943, 0.00044676210382021964, 0.0008593209204263985, 0.00848530512303114, 0.0036009540781378746, 0.010408923029899597, 0.008126976899802685, 0.0035035875625908375, 0.00897509790956974, 0.018888117745518684, 0.031421512365341187, 0.12148062139749527, 0.4108230769634247, 0.10550929605960846, 0.22848984599113464], [0.0010434804717078805, 0.0013764126924797893, 0.008900023996829987, 0.020429519936442375, 0.013046910054981709, 0.005676416680216789, 0.0014904913259670138, 0.0021365699358284473, 0.004821800626814365, 8.067772432696074e-05, 0.0011747336247935891, 0.00014931659097783267, 0.00016469370166305453, 0.0003000342403538525, 0.006383563857525587, 0.010280991904437542, 0.007967148907482624, 0.0012268598657101393, 0.0007260330603457987, 0.004861475434154272, 0.35320326685905457, 0.03833532705903053, 0.37507790327072144, 0.14114642143249512], [0.01919432356953621, 0.0069546448066830635, 0.007842479273676872, 0.006549366749823093, 0.004003255628049374, 0.012749058194458485, 0.059302330017089844, 0.06552526354789734, 0.005573753267526627, 0.007636649534106255, 0.0004298650019336492, 0.0008226807112805545, 0.0024563930928707123, 0.0010046518873423338, 0.002580634318292141, 0.0022614661138504744, 0.0011180249275639653, 0.0036214771680533886, 0.006824989803135395, 0.014182022772729397, 0.007030506618320942, 0.10607470571994781, 0.04444324970245361, 0.611818253993988], [0.07780151069164276, 0.029060915112495422, 0.0676988959312439, 0.03498876839876175, 0.013038110919296741, 0.019905829802155495, 0.005964890122413635, 0.05154098942875862, 0.32642990350723267, 0.008591307327151299, 0.012486270628869534, 0.0018478967249393463, 0.000340746424626559, 0.002003788948059082, 0.0024678893387317657, 0.018144063651561737, 0.004087383858859539, 0.0011114015942439437, 0.0003551334666553885, 0.003003346733748913, 0.03311392292380333, 0.00522098271176219, 0.13873128592967987, 0.14206480979919434], [0.15824422240257263, 0.024314848706126213, 0.05185280367732048, 0.023784587159752846, 0.002560819499194622, 0.0054093278013169765, 0.034090038388967514, 0.1001492440700531, 0.12243875861167908, 0.10314315557479858, 0.005712383892387152, 0.004138929303735495, 0.0017613907111808658, 0.001341676339507103, 0.0016175595810636878, 0.006678048986941576, 0.0010172044858336449, 0.0026778460014611483, 0.0032343603670597076, 0.010247757658362389, 0.007808469235897064, 0.03534719720482826, 0.023580260574817657, 0.26884910464286804]], [[0.0043054320849478245, 0.006085729226469994, 0.04262187331914902, 0.011382547207176685, 0.015722133219242096, 0.019727474078536034, 0.017360195517539978, 0.0726717934012413, 0.1852513551712036, 0.08872703462839127, 0.14349055290222168, 0.1296887993812561, 0.0781102329492569, 0.08510662615299225, 0.0491960234940052, 0.008050658740103245, 0.008706099353730679, 0.010028611868619919, 0.00283333333209157, 0.006790719926357269, 0.003936159424483776, 0.0016856415895745158, 0.005361688323318958, 0.003159207059070468], [0.008285163901746273, 0.005037176422774792, 0.01680990681052208, 0.006126034073531628, 0.005000161472707987, 0.014234591275453568, 0.011389978229999542, 0.012720324099063873, 0.02305375412106514, 0.05976168438792229, 0.06724905222654343, 0.20304904878139496, 0.19922974705696106, 0.23050501942634583, 0.07098717987537384, 0.013254113495349884, 0.004507638048380613, 0.014737287536263466, 0.006084183230996132, 0.008309072814881802, 0.003956436179578304, 0.005468044430017471, 0.004855224397033453, 0.005389085039496422], [0.02093740925192833, 0.0217941552400589, 0.10079359263181686, 0.015779344365000725, 0.12920907139778137, 0.016913967207074165, 0.021152423694729805, 0.014822756871581078, 0.41413891315460205, 0.013382039964199066, 0.05347372964024544, 0.0020574908703565598, 0.002600351581349969, 0.0004989749868400395, 0.00314294989220798, 0.0002134500682586804, 0.017231425270438194, 0.0015683824894949794, 0.0028095238376408815, 0.0022205279674381018, 0.07876957207918167, 0.004199547693133354, 0.056330904364585876, 0.005959600210189819], [0.022926069796085358, 0.02026854082942009, 0.07192889600992203, 0.05246168375015259, 0.066399484872818, 0.0408734455704689, 0.009820051491260529, 0.07744959741830826, 0.15109054744243622, 0.10814055055379868, 0.020121091976761818, 0.010333586484193802, 0.021520480513572693, 0.003201110288500786, 0.01740669272840023, 0.011103508993983269, 0.07895175367593765, 0.05996650084853172, 0.008200963959097862, 0.0322580486536026, 0.03692079335451126, 0.03574910759925842, 0.014617936685681343, 0.02828957326710224], [0.051226504147052765, 0.022282464429736137, 0.1770179569721222, 0.10576769709587097, 0.014626715332269669, 0.11635778844356537, 0.018957247957587242, 0.028667420148849487, 0.04402186721563339, 0.0882660523056984, 0.004231898579746485, 0.0036352374590933323, 0.009081513620913029, 0.0075361719354987144, 0.062550850212574, 0.010854336433112621, 0.005997753236442804, 0.04917265847325325, 0.006344829685986042, 0.013434624299407005, 0.020567432045936584, 0.08550103008747101, 0.012753572314977646, 0.04114628955721855], [0.038716066628694534, 0.046729933470487595, 0.21979647874832153, 0.06201617419719696, 0.13534516096115112, 0.12646912038326263, 0.03634520247578621, 0.0574721023440361, 0.12898266315460205, 0.023287855088710785, 0.029585594311356544, 0.005018630996346474, 0.006992565467953682, 0.001061003771610558, 0.0029586877208203077, 0.00015750362945254892, 0.0037523629143834114, 0.00287470780313015, 0.00217633880674839, 0.005875179544091225, 0.027697527781128883, 0.00874305423349142, 0.023728037253022194, 0.004218171816319227], [0.01694279909133911, 0.0261093620210886, 0.043576449155807495, 0.06665007770061493, 0.22966216504573822, 0.1189354658126831, 0.08010795712471008, 0.05906100571155548, 0.1905246376991272, 0.03161616995930672, 0.007007627282291651, 0.010277966968715191, 0.01983424462378025, 0.010688798502087593, 0.00315406103618443, 0.0002249486424261704, 0.001298408256843686, 0.00021396375086624175, 0.0006320113316178322, 0.0019758485723286867, 0.027326466515660286, 0.01632598228752613, 0.0175046194344759, 0.020348958671092987], [0.0028482102788984776, 0.0009117849986068904, 0.0063890558667480946, 0.022213416174054146, 0.011937067843973637, 0.8109197616577148, 0.026455862447619438, 0.05079935863614082, 0.009551279246807098, 0.006424579303711653, 0.00032321360777132213, 0.005305714905261993, 0.0058725434355437756, 0.002393560716882348, 0.00037073128623887897, 2.2871337932883762e-05, 1.422481636836892e-05, 5.033136403653771e-05, 9.609821972844657e-06, 0.0002122131991200149, 0.0005440693930722773, 0.0031245944555848837, 0.0013890013797208667, 0.031916867941617966], [0.01029051374644041, 0.013575423508882523, 0.03301126882433891, 0.02330635115504265, 0.04350970312952995, 0.053041353821754456, 0.07361503690481186, 0.23414446413516998, 0.40071436762809753, 0.007317614741623402, 0.006126627326011658, 0.0023048524744808674, 0.0018240917706862092, 0.0016537263290956616, 0.0035957572981715202, 0.00071027094963938, 0.002473334316164255, 0.00015865570458117872, 0.00019976799376308918, 0.00012279656948521733, 0.0023176223039627075, 0.0011118014808744192, 0.016851291060447693, 0.06802331656217575], [0.004045362584292889, 0.003305216087028384, 0.001098418259061873, 0.00790945254266262, 0.0016580235678702593, 0.029348069801926613, 0.017720187082886696, 0.8398678302764893, 0.03298085927963257, 0.01703134924173355, 0.0006782846758142114, 0.00762815261259675, 0.0006405095919035375, 0.017280854284763336, 0.0003912732645403594, 0.003921550698578358, 0.00012834843073505908, 0.0003131902776658535, 4.5544129534391686e-05, 0.0004541492380667478, 3.583596117096022e-05, 0.00029506601276807487, 0.0002218525332864374, 0.013000648468732834], [0.01253324095159769, 0.012935509905219078, 0.02565326914191246, 0.0037676554638892412, 0.019664129242300987, 0.022857915610074997, 0.011834479868412018, 0.1450975239276886, 0.5129311084747314, 0.058322276920080185, 0.11965445429086685, 0.01637357473373413, 0.0017813886515796185, 0.002437052084133029, 0.003394330618903041, 0.0008008825243450701, 0.012290451675653458, 0.006457680836319923, 0.0006541670300066471, 0.0015404215082526207, 0.0007603922276757658, 0.00011887826985912398, 0.004894735291600227, 0.003244508756324649], [0.0024442262947559357, 0.0006947971996851265, 0.015054063871502876, 0.004814179148525, 0.0006273420294746757, 0.01532459445297718, 0.001002687611617148, 0.007530678994953632, 0.15877757966518402, 0.5330561995506287, 0.15828628838062286, 0.03267255797982216, 0.003061311785131693, 0.0008686791406944394, 0.0040793633088469505, 0.0015199396293610334, 0.0007476450991816819, 0.05755620449781418, 0.0003949106321670115, 0.0008774946327321231, 0.00015029238420538604, 0.00019166718993801624, 0.00014985899906605482, 0.0001173276687040925], [0.005255311261862516, 0.0020370427519083023, 0.005420004948973656, 0.008208448998630047, 0.0008897424559108913, 0.0022136776242405176, 0.0013905062805861235, 0.005068257916718721, 0.00518797105178237, 0.11845748871564865, 0.1939002126455307, 0.4176584780216217, 0.03318488970398903, 0.017078351229429245, 0.0035904233809560537, 0.011546154506504536, 0.002032686024904251, 0.11679679900407791, 0.009966439567506313, 0.03801706060767174, 0.0005338588962331414, 0.0010041790083050728, 0.0003117546148132533, 0.0002503079595044255], [0.0001233479124493897, 0.00017980234406422824, 0.001184015185572207, 0.000849563570227474, 0.00016126803529914469, 0.002868997398763895, 0.00035350507823750377, 0.0011903084814548492, 0.0017036012141034007, 0.00865304097533226, 0.059618499130010605, 0.7800637483596802, 0.08871494233608246, 0.04627356678247452, 0.004340542946010828, 0.0001771434472175315, 1.7616623154026456e-05, 0.0017759983893483877, 8.381497173104435e-05, 0.0014222485478967428, 9.888794011203572e-05, 9.754674101714045e-05, 3.246323103667237e-05, 1.5451778381248005e-05], [0.000740107789169997, 0.0015078146243467927, 0.002246793592348695, 0.0014599565183743834, 0.0010556703200563788, 0.0035315891727805138, 0.001165280002169311, 0.001140955020673573, 0.002640438498929143, 0.0025282336864620447, 0.022777916863560677, 0.17765438556671143, 0.346420556306839, 0.25953808426856995, 0.13411852717399597, 0.005627450533211231, 0.001085717580281198, 0.002819359302520752, 0.0009701368398964405, 0.007840263657271862, 0.006461723707616329, 0.0064753200858831406, 0.005686524324119091, 0.004507238045334816], [0.00012229369895067066, 0.0004106431151740253, 9.625325037632138e-05, 0.0006800959818065166, 0.00047759729204699397, 0.001217528828419745, 0.0001815920404624194, 0.00401238864287734, 0.00023646195768378675, 0.0018600717885419726, 0.0003028397914022207, 0.03771531209349632, 0.13418719172477722, 0.5177545547485352, 0.09159950166940689, 0.14158597588539124, 0.007190448697656393, 0.008863000199198723, 0.0004966052947565913, 0.020745258778333664, 0.0005516282399185002, 0.009869670495390892, 0.00040122735663317144, 0.019441893324255943], [0.00033291021827608347, 0.00024026106984820217, 0.00010004807700170204, 0.0003135943552479148, 5.9290319768479094e-05, 0.0007189670577645302, 0.00010157535143662244, 0.0006837916444055736, 7.519090286223218e-05, 0.001351153594441712, 2.1794972781208344e-05, 0.0008971802308224142, 0.005989918019622564, 0.14682556688785553, 0.1848669797182083, 0.5583904981613159, 0.0076870606280863285, 0.03659920021891594, 0.0009160715853795409, 0.004213015083223581, 0.00017355509044136852, 0.010736054740846157, 0.000327078509144485, 0.038379278033971786], [0.0014012325555086136, 0.0036730067804455757, 0.00027439038967713714, 0.00026360375341027975, 0.0019827294163405895, 0.00029182338039390743, 0.000182350559043698, 0.0033461209386587143, 0.0010388526134192944, 0.006474341731518507, 0.0008956584497354925, 0.001664783339947462, 0.0033330044243484735, 0.027988281100988388, 0.025551388040184975, 0.3266497254371643, 0.5139458179473877, 0.059865552932024, 0.006285691633820534, 0.007523949258029461, 0.00043660044320859015, 0.0023983055725693703, 0.0008334096637554467, 0.0036993669345974922], [0.00048246115329675376, 0.0014369020937010646, 0.0001894187298603356, 0.00043509050738066435, 0.0022927375975996256, 5.3830361139262095e-05, 8.502782293362543e-05, 0.00043682276736944914, 0.0005876136710867286, 0.004866925999522209, 0.0005055826040916145, 0.0016641117399558425, 0.004473926965147257, 0.019887523725628853, 0.025906754657626152, 0.37212711572647095, 0.5056316256523132, 0.030773300677537918, 0.00984650943428278, 0.010230328887701035, 0.001378790009766817, 0.003769501345232129, 0.0004941718652844429, 0.002443863544613123], [0.001192555413581431, 0.000784764182753861, 0.0011540876002982259, 0.005688278470188379, 0.003728330135345459, 0.002092042937874794, 0.00022515907767228782, 0.0022077420726418495, 0.0004898930783383548, 0.019053973257541656, 0.0012666091788560152, 0.015100609511137009, 0.008820387534797192, 0.004394343122839928, 0.007198874372988939, 0.1226269006729126, 0.1255449503660202, 0.5410088300704956, 0.017747143283486366, 0.09837588667869568, 0.0026739665772765875, 0.012072335928678513, 0.0003864463360514492, 0.006165973376482725], [0.0020192237570881844, 0.002046496607363224, 0.0015959099400788546, 0.002189961727708578, 0.0031741363927721977, 6.132155249360949e-05, 9.672918531578034e-05, 6.291209137998521e-05, 0.0001781835308065638, 0.00039000247488729656, 0.00201587681658566, 0.0008836330380290747, 0.0015814885264262557, 0.00013990348088555038, 0.00283190724439919, 0.017071884125471115, 0.35637253522872925, 0.09970518946647644, 0.22476540505886078, 0.11657395958900452, 0.13342037796974182, 0.024192171171307564, 0.006358026992529631, 0.0022727425675839186], [0.004434277303516865, 0.002932976698502898, 0.00025528663536533713, 0.007351420354098082, 0.001363115618005395, 0.000554105150513351, 0.0004650278715416789, 0.00031585394754074514, 2.9339389584492892e-05, 0.0008324044174514711, 0.0002877181686926633, 0.00751276733353734, 0.007695821579545736, 0.01655864156782627, 0.0008669817470945418, 0.04077618196606636, 0.005766971968114376, 0.017947331070899963, 0.04916153848171234, 0.5595883131027222, 0.05659075081348419, 0.20693784952163696, 0.0026335411239415407, 0.00914191734045744], [0.01460312306880951, 0.01896030083298683, 0.008417497389018536, 0.006123954430222511, 0.015409070067107677, 0.003557354211807251, 0.003453706158325076, 0.0010145717533305287, 0.0002112588845193386, 0.00011663118493743241, 0.0014188364148139954, 0.0013355029514059424, 0.00804096832871437, 0.0030720988288521767, 0.0035741578321903944, 0.0007026895182207227, 0.014871872961521149, 0.004529799334704876, 0.02918878011405468, 0.21349196135997772, 0.3864479660987854, 0.08296621590852737, 0.1480177789926529, 0.030474010854959488], [0.0018443934386596084, 0.0010348226642236114, 0.0019273203797638416, 0.019938381388783455, 0.0008937644888646901, 0.006614921148866415, 0.0007305808248929679, 0.00021345233835745603, 3.1782245059730485e-05, 0.00010356766142649576, 2.4865224986569956e-05, 9.96951712295413e-05, 0.0026220292784273624, 0.0008534971857443452, 0.003996891900897026, 0.0037714613135904074, 0.0007577429059892893, 0.004145400132983923, 0.003269095439463854, 0.0417664535343647, 0.10757026076316833, 0.7023134231567383, 0.019667640328407288, 0.0758085548877716]], [[0.009634776972234249, 0.013663498684763908, 0.05319693312048912, 0.08506418019533157, 0.009071454405784607, 0.15605813264846802, 0.11740870028734207, 0.02850761078298092, 0.16622011363506317, 0.10036447644233704, 0.07549041509628296, 0.05237676948308945, 0.012933672405779362, 0.0067668878473341465, 0.03514070436358452, 0.005243081133812666, 0.0009477115818299353, 0.007994448766112328, 0.004356930498033762, 0.0021098575089126825, 0.006265533156692982, 0.007327336817979813, 0.015490728430449963, 0.02836608700454235], [0.01138448715209961, 0.010605008341372013, 0.056850332766771317, 0.07826363295316696, 0.00744218286126852, 0.14288772642612457, 0.06825055181980133, 0.016554895788431168, 0.1629686802625656, 0.1228065937757492, 0.03611215949058533, 0.0403488464653492, 0.02729477360844612, 0.016808854416012764, 0.07113982737064362, 0.021057888865470886, 0.002388161141425371, 0.02316102385520935, 0.008176847361028194, 0.005245546344667673, 0.012225938029587269, 0.02300328202545643, 0.009911962784826756, 0.025110751390457153], [0.007468232419341803, 0.03671928495168686, 0.027501486241817474, 0.0017493749037384987, 0.00036444319994188845, 0.0016629825113341212, 0.0022603515535593033, 0.008499054238200188, 0.004404257517307997, 0.012216257862746716, 0.33944353461265564, 0.01852230913937092, 0.0033910172060132027, 0.028319666162133217, 0.006188743282109499, 0.006443541031330824, 0.001185969333164394, 0.006131590809673071, 0.004347100853919983, 0.0066164713352918625, 0.009073738940060139, 0.01762951724231243, 0.43394219875335693, 0.01591886207461357], [0.03665563091635704, 0.03588101640343666, 0.40715935826301575, 0.010031729005277157, 0.003172523807734251, 0.019523123279213905, 0.031751301139593124, 0.03617257997393608, 0.020609071478247643, 0.03038790449500084, 0.05779455229640007, 0.03881539776921272, 0.009508982300758362, 0.08136867731809616, 0.030478347092866898, 0.013600742444396019, 0.00360116851516068, 0.007974264211952686, 0.017576077952980995, 0.0187078807502985, 0.016507970169186592, 0.02566857449710369, 0.02905591018497944, 0.017997177317738533], [0.006827156525105238, 0.00715598976239562, 0.002224258380010724, 0.02070140838623047, 0.028242092579603195, 0.13869526982307434, 0.013455288484692574, 0.0034508313983678818, 0.05768093839287758, 0.1268574744462967, 0.022305738180875778, 0.040228113532066345, 0.17165525257587433, 0.03539653494954109, 0.04072139784693718, 0.03136470541357994, 0.026548760011792183, 0.15545986592769623, 0.0061476281844079494, 0.005354142747819424, 0.009250246919691563, 0.0266339723020792, 0.00783957913517952, 0.01580340415239334], [0.020626850426197052, 0.04351891204714775, 0.06356551498174667, 0.05675165355205536, 0.009495514445006847, 0.04582732915878296, 0.05471203476190567, 0.027733545750379562, 0.07134493440389633, 0.09046062082052231, 0.07363077998161316, 0.034374505281448364, 0.0327044315636158, 0.032168805599212646, 0.12061094492673874, 0.02786978706717491, 0.006435252260416746, 0.025529632344841957, 0.016935203224420547, 0.020082682371139526, 0.017302697524428368, 0.03930599242448807, 0.038940828293561935, 0.03007146716117859], [0.010677548125386238, 0.01297603640705347, 0.04635697603225708, 0.049481604248285294, 0.009871610440313816, 0.08377724140882492, 0.02969934791326523, 0.024202220141887665, 0.0676482617855072, 0.19105598330497742, 0.045876968652009964, 0.06142096966505051, 0.03774651139974594, 0.04782476648688316, 0.05020486190915108, 0.02216990478336811, 0.0038089167792350054, 0.04408112168312073, 0.007714809384196997, 0.012118866667151451, 0.01821492612361908, 0.06862875819206238, 0.022736577317118645, 0.03170511871576309], [0.028638776391744614, 0.020180126652121544, 0.08102419227361679, 0.1558067798614502, 0.013278882019221783, 0.10995030403137207, 0.07604995369911194, 0.011265202425420284, 0.17056863009929657, 0.06204503774642944, 0.026335975155234337, 0.04293478652834892, 0.021070625633001328, 0.01425879541784525, 0.05331593379378319, 0.017390914261341095, 0.0020060152746737003, 0.011741789989173412, 0.005904919933527708, 0.0034962629433721304, 0.02106720581650734, 0.017533782869577408, 0.007687292993068695, 0.026447905227541924], [0.027512747794389725, 0.03311576694250107, 0.023762041702866554, 0.04706849530339241, 0.05365455895662308, 0.0537191778421402, 0.07658340781927109, 0.02681020274758339, 0.0603315494954586, 0.03797827288508415, 0.025693604722619057, 0.027208132669329643, 0.03948306292295456, 0.018149359151721, 0.08741848915815353, 0.03910420835018158, 0.04482285678386688, 0.05264567956328392, 0.05095366761088371, 0.031864315271377563, 0.03830660507082939, 0.03345698118209839, 0.02642764151096344, 0.04392917826771736], [0.006948319263756275, 0.006616191938519478, 0.029463855549693108, 0.044057488441467285, 0.018428701907396317, 0.054886315017938614, 0.08562584966421127, 0.033127665519714355, 0.02391413040459156, 0.06378604471683502, 0.022828280925750732, 0.04190140217542648, 0.04984261840581894, 0.03134102001786232, 0.16674289107322693, 0.025118080899119377, 0.012130244635045528, 0.03389877825975418, 0.054911620914936066, 0.048289429396390915, 0.025123391300439835, 0.055847764015197754, 0.017602024599909782, 0.0475679486989975], [0.027369527146220207, 0.04507310315966606, 0.03935698792338371, 0.06263985484838486, 0.014708898030221462, 0.031483471393585205, 0.04132605344057083, 0.011173810809850693, 0.08598408848047256, 0.04042218253016472, 0.04168985038995743, 0.05422355234622955, 0.04292064160108566, 0.022535644471645355, 0.08586709201335907, 0.05921204015612602, 0.014508657157421112, 0.05658947676420212, 0.026353497058153152, 0.013303740881383419, 0.039396535605192184, 0.033694736659526825, 0.033778343349695206, 0.07638812065124512], [0.0030271108262240887, 0.00363339576870203, 0.5006741881370544, 0.038575589656829834, 0.0016197394579648972, 0.007383363321423531, 0.05326259881258011, 0.012266234494745731, 0.01688011735677719, 0.01498504914343357, 0.01690557226538658, 0.012925616465508938, 0.0049446658231318, 0.013371306471526623, 0.1603703498840332, 0.008535810746252537, 0.000833014608360827, 0.0035696292761713266, 0.02584908716380596, 0.02009143866598606, 0.013979855924844742, 0.02678815647959709, 0.0121218366548419, 0.02740630879998207], [0.019168274477124214, 0.012673980556428432, 0.060237545520067215, 0.030783653259277344, 0.007264941930770874, 0.020803650841116905, 0.011691317893564701, 0.00894775241613388, 0.03311815857887268, 0.047257959842681885, 0.021762700751423836, 0.05320208892226219, 0.034395307302474976, 0.08038376271724701, 0.084568552672863, 0.0819266140460968, 0.01789996400475502, 0.05883284658193588, 0.0260122362524271, 0.029661299660801888, 0.08463416993618011, 0.09085951000452042, 0.020150674507021904, 0.06376297771930695], [0.0034574512392282486, 0.004534490872174501, 0.4328833222389221, 0.05114798620343208, 0.0032736770808696747, 0.009044305421411991, 0.10684306919574738, 0.00960601307451725, 0.0430765300989151, 0.015734722837805748, 0.01645761728286743, 0.06332006305456161, 0.0054705399088561535, 0.015423327684402466, 0.08074831962585449, 0.0055910381488502026, 0.0008436432690359652, 0.0028866827487945557, 0.024221239611506462, 0.0066381702199578285, 0.016542870551347733, 0.013231181539595127, 0.005643480457365513, 0.06338023394346237], [0.017513994127511978, 0.019580567255616188, 0.030285608023405075, 0.01777956821024418, 0.005863716825842857, 0.01960965432226658, 0.01763402298092842, 0.005411628168076277, 0.06954431533813477, 0.03568517044186592, 0.054030708968639374, 0.08816919475793839, 0.06035082787275314, 0.05506506562232971, 0.07523047178983688, 0.07337013632059097, 0.015918320044875145, 0.09920945018529892, 0.02745615690946579, 0.01371461246162653, 0.028040366247296333, 0.03252910077571869, 0.036715321242809296, 0.10129205137491226], [0.01844772696495056, 0.011695832945406437, 0.06074465438723564, 0.009857253171503544, 0.009578258730471134, 0.06713453680276871, 0.0788431242108345, 0.032032161951065063, 0.03684372082352638, 0.058340493589639664, 0.07207685708999634, 0.06117810308933258, 0.048199985176324844, 0.08638468384742737, 0.05760035663843155, 0.019675279036164284, 0.014787339605391026, 0.036059074103832245, 0.055038969963788986, 0.03794366866350174, 0.019914530217647552, 0.033023901283741, 0.03758912533521652, 0.037010353058576584], [0.006544741801917553, 0.005803416948765516, 0.0028459173627197742, 0.011273724026978016, 0.020741382613778114, 0.08756251633167267, 0.012822270393371582, 0.0025615589693188667, 0.056272123008966446, 0.09784352034330368, 0.02954545058310032, 0.051851850003004074, 0.13996772468090057, 0.05688467249274254, 0.05744209140539169, 0.04339519515633583, 0.042464837431907654, 0.17742741107940674, 0.011986021883785725, 0.006718106102198362, 0.012248323298990726, 0.0261733066290617, 0.012013610452413559, 0.02761027216911316], [0.017300957813858986, 0.03367926552891731, 0.036592330783605576, 0.02416018396615982, 0.011830897070467472, 0.02774261124432087, 0.021115723997354507, 0.012791774235665798, 0.034859731793403625, 0.040404971688985825, 0.048272695392370224, 0.01992461085319519, 0.02674449048936367, 0.057517264038324356, 0.11228836327791214, 0.0561043843626976, 0.03500324487686157, 0.06388707458972931, 0.042949166148900986, 0.05194753408432007, 0.045351848006248474, 0.06213096156716347, 0.06868492066860199, 0.048715006560087204], [0.009963047690689564, 0.00965914037078619, 0.02332191914319992, 0.013317708857357502, 0.004801774397492409, 0.0474957674741745, 0.01857570931315422, 0.009688420221209526, 0.05367584526538849, 0.09772808104753494, 0.05067206546664238, 0.07815373688936234, 0.048410430550575256, 0.09469843655824661, 0.06545160710811615, 0.04705238714814186, 0.010222517885267735, 0.08044122159481049, 0.016157304868102074, 0.015551429241895676, 0.04260047897696495, 0.06443816423416138, 0.036411963403224945, 0.061510831117630005], [0.03126252070069313, 0.020819932222366333, 0.09786204248666763, 0.02180689573287964, 0.00559731712564826, 0.04776964709162712, 0.029873816296458244, 0.008150676265358925, 0.06531527638435364, 0.0375894159078598, 0.03976799175143242, 0.07422943413257599, 0.02785240299999714, 0.0771007090806961, 0.0765165314078331, 0.05813127011060715, 0.010495917871594429, 0.036690134555101395, 0.022295579314231873, 0.011825586669147015, 0.06872309744358063, 0.03829217702150345, 0.023348281159996986, 0.06868330389261246], [0.017931679263710976, 0.02082997001707554, 0.013592890463769436, 0.00595585722476244, 0.011833704076707363, 0.01987910270690918, 0.009994877502322197, 0.008252882398664951, 0.022516515105962753, 0.03274918347597122, 0.04795476049184799, 0.027187757194042206, 0.028664283454418182, 0.05567461624741554, 0.05841263383626938, 0.07799123227596283, 0.08513118326663971, 0.1158405989408493, 0.04494904354214668, 0.041472721844911575, 0.05583946779370308, 0.05449356883764267, 0.08339592814445496, 0.059455517679452896], [0.004176610615104437, 0.004470194224268198, 0.009172826074063778, 0.002845326205715537, 0.004196343943476677, 0.019424328580498695, 0.008118782192468643, 0.010976830497384071, 0.004386488813906908, 0.03847615793347359, 0.03579086810350418, 0.01945209875702858, 0.03709090128540993, 0.0850062444806099, 0.08303123712539673, 0.040637820959091187, 0.03293966129422188, 0.10853230208158493, 0.06381111592054367, 0.13392740488052368, 0.03255620226264, 0.10856903344392776, 0.07175955921411514, 0.04065168648958206], [0.01783626154065132, 0.026741476729512215, 0.035102106630802155, 0.013020716607570648, 0.0076055158860981464, 0.023435642942786217, 0.016107307747006416, 0.0056090159341692924, 0.03412587568163872, 0.022036850452423096, 0.042067404836416245, 0.029653489589691162, 0.03279690444469452, 0.03593013063073158, 0.07754811644554138, 0.08030376583337784, 0.026646027341485023, 0.14977431297302246, 0.041567761451005936, 0.03156376630067825, 0.05625858157873154, 0.046250324696302414, 0.0693768560886383, 0.07864174246788025], [0.0014242156175896525, 0.0018071531085297465, 0.38155266642570496, 0.0026183146983385086, 0.0005366720142774284, 0.001142557361163199, 0.005320638883858919, 0.004382590297609568, 0.0017408606363460422, 0.0037883655168116093, 0.011238360777497292, 0.002594140823930502, 0.002146426122635603, 0.02828398160636425, 0.13962553441524506, 0.01728997752070427, 0.0035071689635515213, 0.011426037177443504, 0.06106191873550415, 0.15371482074260712, 0.026340054348111153, 0.06308940798044205, 0.048264916986227036, 0.02710319496691227]], [[0.00045475777005776763, 0.0005392450839281082, 0.011391515843570232, 0.0012460522120818496, 0.0008968800539150834, 0.0018892899388447404, 0.0022814737167209387, 0.011805410496890545, 0.011661452241241932, 0.011717280372977257, 0.17997154593467712, 0.025979893282055855, 0.011776641011238098, 0.19720090925693512, 0.4530434012413025, 0.02574603632092476, 0.00320154195651412, 0.002854548394680023, 0.003930491860955954, 0.00677447859197855, 0.00394865358248353, 0.0020129310432821512, 0.02805178426206112, 0.0016238169046118855], [0.000379967677872628, 0.00042404085979796946, 0.010459593497216702, 0.0009129087557084858, 0.00037292364868335426, 0.0007076776237227023, 0.000699683150742203, 0.008919207379221916, 0.00511597516015172, 0.009110324084758759, 0.07994474470615387, 0.02427995577454567, 0.007660939358174801, 0.23694391548633575, 0.5422272682189941, 0.022152911871671677, 0.0018570291576907039, 0.0020449580624699593, 0.0024922573938965797, 0.015310120768845081, 0.005125564057379961, 0.0029519740492105484, 0.018452012911438942, 0.0014539946569129825], [0.002716467250138521, 0.001708358060568571, 0.1564943939447403, 0.02003067173063755, 0.017008502036333084, 0.03411902114748955, 0.052994996309280396, 0.12188499420881271, 0.11811618506908417, 0.011597088538110256, 0.20998582243919373, 0.025631068274378777, 0.007975665852427483, 0.019123338162899017, 0.09432456642389297, 0.01168769970536232, 0.005700765177607536, 0.0077717541716992855, 0.006427551154047251, 0.012574559077620506, 0.004852576646953821, 0.0008908095769584179, 0.04181889072060585, 0.014564274810254574], [0.009203944355249405, 0.006260496098548174, 0.07266512513160706, 0.017780043184757233, 0.013011287897825241, 0.05749967321753502, 0.06811904907226562, 0.12794610857963562, 0.1272541731595993, 0.06294267624616623, 0.12383047491312027, 0.05584387108683586, 0.016916994005441666, 0.05330246686935425, 0.09654690325260162, 0.018669692799448967, 0.005514976568520069, 0.01010302733629942, 0.009632270783185959, 0.01176263578236103, 0.005545976106077433, 0.003448466071859002, 0.014956342987716198, 0.01124331820756197], [0.005064563360065222, 0.0032889836002141237, 0.06657988578081131, 0.005417375359684229, 0.004022302571684122, 0.004701568279415369, 0.010960759595036507, 0.05853160098195076, 0.069691963493824, 0.08916337788105011, 0.19908899068832397, 0.10115103423595428, 0.021834926679730415, 0.13703852891921997, 0.15427836775779724, 0.01313983928412199, 0.004636705853044987, 0.004238456953316927, 0.006535952910780907, 0.013480445370078087, 0.005582781974226236, 0.004432480316609144, 0.013174464926123619, 0.003964665811508894], [0.0038292461540549994, 0.003231657203286886, 0.03177547827363014, 0.0037257669027894735, 0.00821635127067566, 0.06708142161369324, 0.026782531291246414, 0.2614153325557709, 0.2735939621925354, 0.008274518884718418, 0.2577211856842041, 0.009464782662689686, 0.0008761683711782098, 0.007320926059037447, 0.0231307465583086, 0.002267410047352314, 0.001196197816170752, 0.0034799245186150074, 0.000991675304248929, 0.0018055125838145614, 0.00045799685176461935, 3.417681000428274e-05, 0.0032374823931604624, 8.962667197920382e-05], [0.007033525966107845, 0.011576304212212563, 0.013788470067083836, 0.0010150427697226405, 0.0015835158992558718, 0.0016700953710824251, 0.0027315246406942606, 0.018163420259952545, 0.019670790061354637, 0.08085625618696213, 0.0976361483335495, 0.11511768400669098, 0.03149374946951866, 0.322711318731308, 0.23195451498031616, 0.026618212461471558, 0.0038527853321284056, 0.002133950823917985, 0.0028137436602264643, 0.0033578339498490095, 0.0005785958492197096, 0.0011102943681180477, 0.0019623911939561367, 0.000569770869333297], [0.00255717895925045, 0.0023232297971844673, 0.0423334576189518, 0.004224496893584728, 0.008241782896220684, 0.005132556427270174, 0.012125419452786446, 0.051634907722473145, 0.07063593715429306, 0.028231598436832428, 0.3404170572757721, 0.10301190614700317, 0.014484427869319916, 0.06600606441497803, 0.16639453172683716, 0.025083746761083603, 0.013512706384062767, 0.010033278726041317, 0.01146559976041317, 0.01227901317179203, 0.002144776051864028, 0.0005225111381150782, 0.006160618271678686, 0.0010432270355522633], [0.0006914559635333717, 0.0008582459413446486, 0.014017489738762379, 0.0007130759186111391, 0.0016421717591583729, 0.0007274546660482883, 0.003207982052117586, 0.0045150876976549625, 0.004405812826007605, 0.011076019145548344, 0.0887947678565979, 0.06232154741883278, 0.03518366813659668, 0.37397000193595886, 0.3527105152606964, 0.012912735342979431, 0.003368205390870571, 0.0018476609839126468, 0.0075867571868002415, 0.009208748117089272, 0.0016933567821979523, 0.0019134391332045197, 0.00575142540037632, 0.0008823815151117742], [0.01615557074546814, 0.019647827371954918, 0.022371456027030945, 0.0038414080627262592, 0.006148407235741615, 0.005085720214992762, 0.009474430233240128, 0.012156643904745579, 0.012348330579698086, 0.06551972776651382, 0.05688095837831497, 0.030832689255475998, 0.026702163740992546, 0.393511563539505, 0.13447074592113495, 0.025018228217959404, 0.009929420426487923, 0.008806884288787842, 0.03308578580617905, 0.04032173752784729, 0.015811748802661896, 0.03357211872935295, 0.015707258135080338, 0.0025992265436798334], [0.0028825150802731514, 0.0035973808262497187, 0.02950226329267025, 0.008306854404509068, 0.007477340288460255, 0.0035468898713588715, 0.0070793782360851765, 0.006206913851201534, 0.005167393479496241, 0.005681034177541733, 0.027478782460093498, 0.03452429547905922, 0.08861824870109558, 0.1654369831085205, 0.22808945178985596, 0.05331571400165558, 0.029380546882748604, 0.026907049119472504, 0.043335821479558945, 0.07332009822130203, 0.030030246824026108, 0.023797476664185524, 0.045796968042850494, 0.05052029713988304], [0.001256331568583846, 0.0017740422626957297, 0.0013386360369622707, 0.000242883907048963, 0.00018698061467148364, 2.777675399556756e-05, 0.000270103249931708, 9.936097922036424e-05, 0.00014148815535008907, 0.02853262983262539, 0.0008711742120794952, 0.012628489173948765, 0.1718393713235855, 0.37157005071640015, 0.12966714799404144, 0.017637435346841812, 0.005620281212031841, 0.001030980609357357, 0.025355270132422447, 0.014369955286383629, 0.005998966749757528, 0.18426118791103363, 0.0030072396621108055, 0.022272180765867233], [0.009363126009702682, 0.013153091073036194, 0.005394411738961935, 0.0024963640607893467, 0.0021858662366867065, 0.00029123600688762963, 0.0018561345059424639, 0.00040086027001962066, 0.0008486073929816484, 0.006951355375349522, 0.002254656283184886, 0.01197607908397913, 0.10278864949941635, 0.12272900342941284, 0.06392492353916168, 0.03556089475750923, 0.022818563506007195, 0.01353990938514471, 0.09904692322015762, 0.03564412146806717, 0.03280947729945183, 0.14497295022010803, 0.03724616765975952, 0.2317466139793396], [0.000641919206827879, 0.0009944358607754111, 0.0008718185708858073, 0.0003055291308555752, 0.00033287706901319325, 3.328429374960251e-05, 0.0002903610293287784, 2.122330988640897e-05, 4.682856524595991e-05, 0.009218045510351658, 0.00043193131568841636, 0.008627885952591896, 0.14203426241874695, 0.054936591535806656, 0.02210487239062786, 0.0076469420455396175, 0.009299292229115963, 0.003435677383095026, 0.05758517235517502, 0.008293086662888527, 0.011848249472677708, 0.43702927231788635, 0.009191951714456081, 0.21477849781513214], [0.0015648017870262265, 0.0007830065442249179, 0.01609262451529503, 0.015729451552033424, 0.007197363302111626, 0.0008223560289479792, 0.002730007516220212, 0.000516677217092365, 0.000741245283279568, 0.0017875464400276542, 0.00508248433470726, 0.004545846953988075, 0.01707698404788971, 0.005486220121383667, 0.01420997641980648, 0.010756048373878002, 0.03148059546947479, 0.027026118710637093, 0.09312469512224197, 0.08369550108909607, 0.13432857394218445, 0.1072278767824173, 0.12251909077167511, 0.2954748868942261], [0.0022121635265648365, 0.001892946078442037, 0.007572364527732134, 0.006032951641827822, 0.004293389152735472, 0.0006635914323851466, 0.001971452496945858, 0.00032518155057914555, 0.0003319759853184223, 0.007450744975358248, 0.002997630275785923, 0.008330565877258778, 0.026893096044659615, 0.012860219925642014, 0.013268264010548592, 0.008638528175652027, 0.022700341418385506, 0.013670692220330238, 0.08843280375003815, 0.047907207161188126, 0.09132370352745056, 0.3532435894012451, 0.060149531811475754, 0.21683718264102936], [0.004243243485689163, 0.0031238107476383448, 0.010579810477793217, 0.00791500136256218, 0.006757189519703388, 0.0008027831790968776, 0.0026800634805113077, 0.0006211638683453202, 0.0006054157274775207, 0.002287538256496191, 0.0019475530134513974, 0.007702616974711418, 0.029134754091501236, 0.007546776439994574, 0.004509374964982271, 0.0030145009513944387, 0.014932959340512753, 0.007952114567160606, 0.05151776224374771, 0.06031886115670204, 0.18029795587062836, 0.27456796169281006, 0.06276890635490417, 0.25417184829711914], [0.010397704318165779, 0.010565045289695263, 0.04677946865558624, 0.025793271139264107, 0.12909993529319763, 0.05891943722963333, 0.07266838848590851, 0.014060978777706623, 0.005935687571763992, 0.000487162615172565, 0.0057934122160077095, 0.001888609491288662, 0.009684424847364426, 0.0019358476856723428, 0.0036503963638097048, 0.0011884969426319003, 0.0234498530626297, 0.018111607059836388, 0.048217397183179855, 0.05136638134717941, 0.08090199530124664, 0.02154530957341194, 0.19901850819587708, 0.15854057669639587], [0.007276770193129778, 0.016683632507920265, 0.0096178213134408, 0.0038327074144035578, 0.012883502058684826, 0.0015241262735798955, 0.006539557129144669, 0.0014677410945296288, 0.0005816163611598313, 0.0013600910315290093, 0.0008722182246856391, 0.005119961686432362, 0.05317530035972595, 0.010621320456266403, 0.007464257068932056, 0.004364188760519028, 0.02451547048985958, 0.004959017038345337, 0.031802963465452194, 0.019426479935646057, 0.027143457904458046, 0.09404812753200531, 0.061098020523786545, 0.5936216711997986], [0.0015937548596411943, 0.0017148578772321343, 0.024565985426306725, 0.015803713351488113, 0.04096681997179985, 0.007449297234416008, 0.032112568616867065, 0.007845424115657806, 0.006312922108918428, 0.0005583127494901419, 0.0031315700616687536, 0.0019414788112044334, 0.004058116115629673, 0.00081512430915609, 0.003400580957531929, 0.0046667843125760555, 0.04121137782931328, 0.0200587697327137, 0.044699527323246, 0.017410924658179283, 0.03851185739040375, 0.00979041401296854, 0.12132438272237778, 0.5500555038452148], [0.0016617262735962868, 0.0012772692134603858, 0.019461622461676598, 0.014968442730605602, 0.035286907106637955, 0.00687662186101079, 0.03605877235531807, 0.006212402600795031, 0.004710935056209564, 0.0007294472306966782, 0.0017847990384325385, 0.0017252133693546057, 0.003783758031204343, 0.0010470431298017502, 0.0020326953381299973, 0.0029391497373580933, 0.016939476132392883, 0.009715664200484753, 0.03000967763364315, 0.014515192247927189, 0.02646051160991192, 0.012137054465711117, 0.07879135757684708, 0.670874297618866], [0.026284025982022285, 0.014391519129276276, 0.043042805045843124, 0.07042823731899261, 0.06985072046518326, 0.05007807910442352, 0.09632628411054611, 0.04377845674753189, 0.03226802125573158, 0.00438779266551137, 0.004222824703902006, 0.0009837239049375057, 0.0012335969367995858, 0.0005921213887631893, 0.0010098336497321725, 0.004652820527553558, 0.02375533990561962, 0.035155944526195526, 0.0588577538728714, 0.043112918734550476, 0.061929333955049515, 0.018736666068434715, 0.07779994606971741, 0.21712124347686768], [0.002142291283234954, 0.0010785666527226567, 0.06419593840837479, 0.04854796454310417, 0.0446387343108654, 0.028103657066822052, 0.07326719164848328, 0.014915626496076584, 0.01323198527097702, 0.0014480574754998088, 0.006379883270710707, 0.002620161045342684, 0.005200799088925123, 0.00025222942349500954, 0.0013703559525310993, 0.0023429563734680414, 0.023087099194526672, 0.045914310961961746, 0.04949241131544113, 0.02434178814291954, 0.026131387799978256, 0.006886293180286884, 0.04743586853146553, 0.4669744074344635], [0.02318374253809452, 0.011322458274662495, 0.02152951993048191, 0.016329726204276085, 0.013802312314510345, 0.005930097308009863, 0.04985307157039642, 0.004186280537396669, 0.004786998499184847, 0.05840057134628296, 0.0008688617963343859, 0.005467844195663929, 0.03517528250813484, 0.0007513358141295612, 0.0005584360915236175, 0.0010729384375736117, 0.01344385463744402, 0.006555152125656605, 0.09203135967254639, 0.012071790173649788, 0.01543420273810625, 0.14730946719646454, 0.00512262899428606, 0.45481210947036743]], [[0.13930176198482513, 0.03949093446135521, 0.05802241712808609, 0.08940353244543076, 0.020479470491409302, 0.04564790427684784, 0.012412328273057938, 0.03206614777445793, 0.013891497626900673, 0.008074542507529259, 0.013562404550611973, 0.02672845497727394, 0.002143092453479767, 0.0023143081925809383, 0.0006190554122440517, 0.0012561633484438062, 0.0018378890817984939, 0.031293291598558426, 0.014390012249350548, 0.1761254221200943, 0.16489185392856598, 0.044294122606515884, 0.0207300316542387, 0.041023340076208115], [0.06453584134578705, 0.0348065122961998, 0.06141658127307892, 0.13134074211120605, 0.0284498929977417, 0.04177197813987732, 0.04981774836778641, 0.04717491939663887, 0.05641203746199608, 0.006555191706866026, 0.021337056532502174, 0.014129508286714554, 0.005349853541702032, 0.00827631726861, 0.011538339778780937, 0.009907579980790615, 0.00950423814356327, 0.019490627571940422, 0.027972782030701637, 0.05301758274435997, 0.14192113280296326, 0.018440118059515953, 0.07637065649032593, 0.060462746769189835], [0.008500703610479832, 0.005976158659905195, 0.04829787090420723, 0.011417316272854805, 0.04178498685359955, 0.2354743629693985, 0.013334246352314949, 0.003083930118009448, 0.24280036985874176, 0.3112172484397888, 0.03043907694518566, 0.005203102715313435, 0.01194420363754034, 0.004138248506933451, 0.0039055882953107357, 8.12631260487251e-05, 5.981262438581325e-05, 0.0004997053183615208, 0.00012345575669314712, 0.00029957323567941785, 0.004002101719379425, 0.0032256986014544964, 0.007266739849001169, 0.006924258545041084], [0.006662188097834587, 0.0022675180807709694, 0.006201609969139099, 0.0007911332650110126, 0.007404362317174673, 0.9451061487197876, 0.0019891925621777773, 0.00593430595472455, 0.004231947008520365, 0.0032021882943809032, 0.0008511350606568158, 0.000457221147371456, 0.00011775334132835269, 0.0003664021787699312, 0.00011424599506426603, 2.345737630093936e-05, 7.902140350779518e-05, 0.004600907675921917, 3.0864059226587415e-05, 0.0020989482291042805, 0.0005907363956794143, 0.0007994050392881036, 0.001974024809896946, 0.0041053262539207935], [0.005444988142699003, 0.004426186438649893, 0.024851683527231216, 0.01338035985827446, 0.023822445422410965, 0.023645002394914627, 0.5535364747047424, 0.17222358286380768, 0.04101523011922836, 0.0313786119222641, 0.0024297547060996294, 0.0008837362984195352, 0.000978405587375164, 0.0003273168986197561, 0.0012071267701685429, 0.0003049425140488893, 0.0003003137244377285, 0.00014199521683622152, 0.0011140013812109828, 0.00262083625420928, 0.005552958231419325, 0.04087429121136665, 0.011262495070695877, 0.038277409970760345], [0.0033138019498437643, 0.003942601848393679, 0.011827531270682812, 0.011874646879732609, 0.003982359077781439, 0.1426730453968048, 0.03699534013867378, 0.5937643647193909, 0.006751682609319687, 0.040595944970846176, 0.0022100061178207397, 0.03779895231127739, 0.0001546627754578367, 0.004024169407784939, 0.0009010162320919335, 0.0005843464750796556, 3.986428419011645e-05, 0.00041262683225795627, 2.1068393834866583e-05, 0.0005744536756537855, 6.170880806166679e-05, 0.0026622929144650698, 0.0007184518035501242, 0.09411504119634628], [0.0006454493850469589, 0.0004093740426469594, 0.00048485351726412773, 0.00012826950114686042, 0.00023112082271836698, 0.0001992359320865944, 0.0007656703819520772, 0.0014428014401346445, 0.9892786145210266, 0.00484788604080677, 0.0004405889194458723, 6.515389395644888e-05, 0.0006080709281377494, 4.4849017285741866e-05, 9.28613735595718e-05, 5.590870841842843e-06, 2.098972436215263e-05, 1.253123627975583e-06, 5.413811322796391e-06, 8.434209348706645e-07, 8.415842603426427e-05, 8.492495908285491e-06, 0.00010567142453510314, 8.276064181700349e-05], [0.0048453486524522305, 0.0012007784098386765, 0.0007380428141914308, 0.001771052018739283, 0.00044084549881517887, 0.010238959453999996, 0.0005736697930842638, 0.014864546246826649, 0.0649065375328064, 0.8549669981002808, 0.0033844441641122103, 0.018259700387716293, 6.412939546862617e-05, 0.004488222301006317, 0.00017705005302559584, 0.005889184307307005, 0.0001921061339089647, 0.011680078692734241, 5.147097181179561e-05, 0.0003746422007679939, 5.88309922022745e-05, 0.00016165623674169183, 2.2868396627018228e-05, 0.0006487921345978975], [0.011400828137993813, 0.0030442550778388977, 0.00587640842422843, 0.003037232905626297, 0.001414690399542451, 0.0018793317722156644, 0.005593485198915005, 0.0032138412352651358, 0.25256964564323425, 0.006005534436553717, 0.6785050630569458, 0.011033318936824799, 0.0069617400877177715, 0.0005654082051478326, 0.0013679719995707273, 0.0001223970903083682, 0.0009606059757061303, 0.000783297698944807, 0.002413412556052208, 0.0003078838635701686, 0.0026808930560946465, 4.111627276870422e-06, 0.00025621167151257396, 2.3848892851674464e-06], [0.003284144913777709, 0.002127761719748378, 0.0001131048338720575, 0.0009067434002645314, 3.7408946809591725e-05, 0.001143255620263517, 9.286079148296267e-06, 0.002163119614124298, 0.00022879136668052524, 0.0004170096945017576, 0.0016425540670752525, 0.9713624119758606, 3.2314717827830464e-05, 0.009159225039184093, 7.546973392891232e-06, 0.000576679827645421, 2.5072076823562384e-05, 0.004134649410843849, 2.0586569007718936e-05, 0.0025048658717423677, 3.59842051693704e-05, 4.561560217553051e-06, 1.2999465752727701e-06, 6.152066634967923e-05], [0.0011547575704753399, 0.0010883004870265722, 0.0006287310970947146, 0.00011806951806647703, 0.001497699529863894, 9.195123129757121e-05, 0.0017245520139113069, 2.5175253540510312e-05, 0.011959312483668327, 7.91777711128816e-05, 0.004360050894320011, 0.0004002484492957592, 0.927492618560791, 0.001297857379540801, 0.007669698912650347, 9.854532436293084e-06, 0.000566542730666697, 5.753132427344099e-06, 0.005063917953521013, 5.505376975634135e-05, 0.034220654517412186, 8.727081876713783e-05, 0.0004018655454274267, 9.440670964977471e-07], [0.0014982545981183648, 0.0018051696242764592, 2.4659368136781268e-05, 6.588870019186288e-05, 6.537719309562817e-05, 0.0006285866838879883, 4.267041276762029e-06, 6.452568050008267e-05, 8.47478659125045e-05, 0.0001884265075204894, 3.270435627200641e-05, 0.014014728367328644, 0.0005064454162493348, 0.973084032535553, 0.0007275301613844931, 0.004238339606672525, 5.970467464067042e-05, 0.0006253838073462248, 9.779042557056528e-06, 0.0012410050258040428, 0.0004985241102986038, 0.00030213649733923376, 2.878807208617218e-05, 0.0002008128649322316], [0.00020718701125588268, 0.0010211779735982418, 0.0004944722168147564, 2.1089523215778172e-05, 0.00010496922914171591, 5.397147106123157e-05, 0.000981867196969688, 7.59468020987697e-05, 0.0007823538035154343, 3.5689413380168844e-06, 0.0015146925579756498, 3.488703441689722e-05, 0.034074440598487854, 0.0040138536132872105, 0.9428919553756714, 0.00031414447585120797, 0.0013891549315303564, 1.5497918184337323e-06, 0.00020353881700430065, 1.9607111880759476e-06, 0.0010109725408256054, 5.737797255278565e-05, 0.01071600429713726, 2.894510362239089e-05], [0.0001539300719741732, 0.0004441512282937765, 2.1153469788259827e-05, 5.390339356381446e-05, 1.1403281860111747e-05, 2.9613313017762266e-05, 7.678358997509349e-06, 0.0017381315119564533, 0.0001486924447817728, 0.00017429859144613147, 3.842080332105979e-05, 8.917442755773664e-05, 5.917262342336471e-07, 0.014704621396958828, 0.002694911789149046, 0.9709981083869934, 0.006004462018609047, 0.0022315005771815777, 1.729582618281711e-05, 4.799047746928409e-05, 2.34049434766348e-06, 2.219333327957429e-05, 0.0001112688914872706, 0.0002541717258282006], [0.0005445992574095726, 0.0006883411551825702, 0.0004998915828764439, 0.00039633820415474474, 0.0011266213841736317, 0.00017389804997947067, 0.00040597841143608093, 0.00010269950871588662, 0.014717621728777885, 0.00037789775524288416, 0.006544200703501701, 1.2734069059661124e-05, 0.0013304786989465356, 0.00019943766528740525, 0.04011918231844902, 0.03932566940784454, 0.8456553816795349, 0.011270823888480663, 0.025015488266944885, 5.9515394241316244e-05, 0.0007799380691722035, 2.2310507119982503e-05, 0.010558973997831345, 7.197792729130015e-05], [0.00025385103072039783, 0.0001069560821633786, 3.099281821050681e-05, 6.594930164283141e-05, 0.00017301812476944178, 0.00021125967032276094, 9.43696761623869e-07, 1.3285452041600365e-05, 3.2152649509953335e-05, 0.000366258027497679, 8.299069304484874e-05, 4.1851220885291696e-05, 1.5541652373940451e-06, 1.5052465641929302e-05, 5.414889528765343e-06, 0.003798122052103281, 0.012568887323141098, 0.9723410606384277, 0.0010996636701747775, 0.008478539995849133, 9.930554369930178e-05, 9.798465180210769e-05, 5.311637505656108e-05, 6.181683420436457e-05], [0.001827774802222848, 0.0008879292872734368, 0.000878850172739476, 0.003946749493479729, 0.012208668515086174, 0.00018790965259540826, 0.000978094874881208, 8.803201490081847e-05, 0.001472638687118888, 0.0011564911110326648, 0.0027294622268527746, 7.61369155952707e-05, 0.0024125156924128532, 7.496370017179288e-06, 0.00012895507097709924, 0.0008588240016251802, 0.10718031227588654, 0.04243946447968483, 0.5383836030960083, 0.07125183194875717, 0.18512268364429474, 0.018454425036907196, 0.007164567243307829, 0.0001565931597724557], [0.0022971266880631447, 0.0023797843605279922, 0.0027676064055413008, 0.00843892339617014, 0.008962470106780529, 0.003530247835442424, 0.00034064723877236247, 0.00019170911400578916, 7.117666973499581e-05, 0.0015859125414863229, 0.0006573577993549407, 0.007780902087688446, 0.0007081666844896972, 0.0004682939616031945, 1.931321094161831e-05, 0.00021847648895345628, 0.00036916270619258285, 0.02696722373366356, 0.01162977609783411, 0.6891229748725891, 0.10513629764318466, 0.12267828732728958, 0.0009798984974622726, 0.0026981926057487726], [0.0004098855424672365, 0.00027686188695952296, 0.0003870846121571958, 0.0015562836779281497, 0.00134277471806854, 3.424773967708461e-05, 0.00018190339324064553, 4.07210563935223e-06, 0.001080439775250852, 2.91613869194407e-05, 8.541428542230278e-05, 1.906659235828556e-05, 0.0058044809848070145, 1.413358131685527e-05, 6.325068534351885e-05, 8.009193152247462e-06, 0.0001474281889386475, 3.153154830215499e-05, 0.003438267158344388, 0.0009384767035953701, 0.9599880576133728, 0.018674807623028755, 0.005312993656843901, 0.00017144852608907968], [0.0006756273796781898, 0.0006439946591854095, 0.0002547148906160146, 0.003916015382856131, 0.00019867850642185658, 0.0009172233985736966, 3.580210614018142e-05, 0.00012272500316612422, 4.622762844519457e-06, 0.00015749457816127688, 4.55092003903701e-06, 0.0013894011499360204, 1.537647403893061e-05, 0.005896333605051041, 0.0001135251295636408, 0.0020026187412440777, 1.0910917808359955e-05, 0.001367090386338532, 5.3336843848228455e-05, 0.014760979451239109, 0.03193492814898491, 0.8567774891853333, 0.0012961787870153785, 0.07745035737752914], [0.0009921075543388724, 0.0009380790288560092, 0.0031468914821743965, 0.0011266631772741675, 0.0009619634365662932, 0.0016633995110169053, 0.002167955506592989, 0.0001399095926899463, 0.0011579814599826932, 6.172347184474347e-06, 0.00010893095168285072, 7.447565621987451e-06, 0.0010228067403659225, 0.0005576788098551333, 0.012825974263250828, 6.22431471128948e-05, 0.00018277870549354702, 3.3381747925886884e-05, 0.0004512109444476664, 0.0003731571778189391, 0.48018404841423035, 0.01940349116921425, 0.45739325881004333, 0.015092450194060802], [9.799934196053073e-05, 0.00020082498667761683, 0.00038213207153603435, 0.0003939012822229415, 3.898449722328223e-05, 0.00350753590464592, 0.00013389825471676886, 0.0017135088564828038, 6.68643624521792e-05, 3.0670569685753435e-05, 3.867626674036728e-06, 0.0002585445181466639, 1.5438131413247902e-06, 0.0017411914886906743, 0.00021579985332209617, 0.0004095069889444858, 4.497204372455599e-06, 7.92273785918951e-05, 1.0412286428618245e-06, 7.81149065005593e-05, 0.0001462678046664223, 0.00128938106354326, 0.0024645011872053146, 0.9867401719093323], [0.0016507487744092941, 0.0013727074256166816, 0.04591354727745056, 0.0021957517601549625, 0.0066556986421346664, 0.0016700313426554203, 0.2263377159833908, 0.013209737837314606, 0.2678860127925873, 0.00033678163890726864, 0.0037480290047824383, 1.0599411325529218e-05, 0.007416205480694771, 4.3340620322851464e-05, 0.06096404790878296, 0.00037845049519091845, 0.009949276223778725, 5.1475228246999905e-05, 0.008257650770246983, 8.288153912872076e-05, 0.03239460662007332, 0.0017201557056978345, 0.2920744717121124, 0.01568004861474037], [0.0033565526828169823, 0.0010285003809258342, 0.0023725703358650208, 0.002092445734888315, 0.0005413415492512286, 0.015452449209988117, 0.00034270514152012765, 0.07192496210336685, 0.012700412422418594, 0.011782096698880196, 0.00013391261745709926, 0.0010888312244787812, 3.451917791608139e-06, 0.0011316946474835277, 0.00010541921074036509, 0.03289508447051048, 0.0012495802948251367, 0.03467119485139847, 2.277418752782978e-05, 0.005475026089698076, 0.00017155066598206758, 0.0010269087506458163, 0.0021815586369484663, 0.7982490062713623]]], [[[0.019881073385477066, 0.004943607375025749, 0.4184548556804657, 0.01045581791549921, 0.002075456315651536, 0.0343557633459568, 0.048332586884498596, 0.014426699839532375, 0.14406974613666534, 0.0036563007161021233, 0.023508338257670403, 0.008469097316265106, 0.014627613127231598, 0.0033486043103039265, 0.009498322382569313, 0.0006219372153282166, 0.0006184009835124016, 0.0033652468118816614, 0.008666254580020905, 0.005487739574164152, 0.11060306429862976, 0.006174437701702118, 0.061661068350076675, 0.042698025703430176], [0.013609882444143295, 0.0034520081244409084, 0.189138263463974, 0.010562298819422722, 0.006063918583095074, 0.020666304975748062, 0.06801896542310715, 0.009871577844023705, 0.04364645853638649, 0.0016100360080599785, 0.01797954924404621, 0.004186575300991535, 0.01022765040397644, 0.002086021937429905, 0.010567445307970047, 0.00141320435795933, 0.004178452305495739, 0.006758223753422499, 0.04958391189575195, 0.01705102249979973, 0.2571120858192444, 0.009684747084975243, 0.17278917133808136, 0.06974228471517563], [0.017931092530488968, 0.008835348300635815, 0.05903646722435951, 0.014203757047653198, 0.013473229482769966, 0.022574981674551964, 0.04184771701693535, 0.20257705450057983, 0.2995569109916687, 0.006698968354612589, 0.08281169831752777, 0.025749269872903824, 0.0109785171225667, 0.004180763382464647, 0.013923434540629387, 0.0012898005079478025, 0.005403261166065931, 0.0020631642546504736, 0.00426892377436161, 0.022688882425427437, 0.04342031106352806, 0.004433850292116404, 0.043264247477054596, 0.048788461834192276], [0.0012552287662401795, 0.0012578285532072186, 0.012613347731530666, 0.15928533673286438, 0.00516737112775445, 0.04148438572883606, 0.1532706320285797, 0.00563314463943243, 0.007363566663116217, 0.011751417070627213, 0.0071308123879134655, 0.016238410025835037, 0.37798017263412476, 0.009139818139374256, 0.008598224259912968, 0.09207554161548615, 0.001097964239306748, 0.01235707476735115, 0.022985726594924927, 0.0027284969110041857, 0.004180058371275663, 0.012896871194243431, 0.008569302037358284, 0.024939261376857758], [0.051651421934366226, 0.031996969133615494, 0.25619739294052124, 0.007079883478581905, 0.010261334478855133, 0.08075278997421265, 0.10693520307540894, 0.12333234399557114, 0.027216708287596703, 0.01107801217585802, 0.013828528113663197, 0.006616093683987856, 0.0041747502982616425, 0.007506275549530983, 0.01677112840116024, 0.0008055752259679139, 0.003601688425987959, 0.010863615199923515, 0.023382479324936867, 0.08082277327775955, 0.023050332441926003, 0.0199571680277586, 0.04962893947958946, 0.032488591969013214], [0.007796285208314657, 0.0028727836906909943, 0.17713846266269684, 0.01313562411814928, 0.004266149364411831, 0.13568849861621857, 0.18079963326454163, 0.1421009600162506, 0.15045787394046783, 0.049076952040195465, 0.036630675196647644, 0.0296257883310318, 0.026522399857640266, 0.006329588126391172, 0.009531374089419842, 0.0008135517709888518, 0.00035976155777461827, 0.0036688209511339664, 0.0020124262664467096, 0.002013646299019456, 0.0009107889491133392, 0.002701927674934268, 0.005264004692435265, 0.010282051749527454], [0.019208746030926704, 0.007126846816390753, 0.19753196835517883, 0.0005513439537025988, 0.0036164121702313423, 0.033575210720300674, 0.014442810788750648, 0.31926462054252625, 0.33068305253982544, 0.014980986714363098, 0.03771710395812988, 0.005984459538012743, 0.00019026026711799204, 0.0022296744864434004, 0.0022046419326215982, 2.3388591216644272e-05, 0.000406170089263469, 0.0012016692198812962, 0.00028215444763191044, 0.0031755988020449877, 0.001327495090663433, 0.0006367161986418068, 0.0023906866554170847, 0.0012480518780648708], [0.010988208465278149, 0.006453624926507473, 0.04814468324184418, 0.0060347807593643665, 0.01165576372295618, 0.006287321448326111, 0.01480704452842474, 0.013984563760459423, 0.6549962162971497, 0.060363754630088806, 0.03690367937088013, 0.06428009271621704, 0.024503527209162712, 0.01876104809343815, 0.00719526968896389, 0.0007757340790703893, 0.0013903715880587697, 0.0004077540652360767, 0.0007652504718862474, 0.00020346262317616493, 0.00435783201828599, 0.0023084753192961216, 0.001638896530494094, 0.002792613347992301], [0.019224805757403374, 0.008092065341770649, 0.026134807616472244, 0.0025418451987206936, 0.0033112792298197746, 0.01060313917696476, 0.002328697359189391, 0.06781300902366638, 0.5828004479408264, 0.042971838265657425, 0.0797511413693428, 0.11517059803009033, 0.0017463115509599447, 0.009455770254135132, 0.01012937817722559, 0.0011417546775192022, 0.0015389305772259831, 0.0018514108378440142, 0.0003047730715479702, 0.0022384924814105034, 0.0057381195947527885, 0.0012722618412226439, 0.0013152190949767828, 0.002523774979636073], [0.044781506061553955, 0.036757439374923706, 0.005701499991118908, 0.022716520354151726, 0.001034466433338821, 0.02683790773153305, 0.0034293527714908123, 0.018121568486094475, 0.1664525717496872, 0.011969794519245625, 0.02640678733587265, 0.24035635590553284, 0.19475488364696503, 0.13562749326229095, 0.013669077306985855, 0.024971485137939453, 0.000844152644276619, 0.008551876991987228, 0.0008476028451696038, 0.004636112600564957, 0.004655761644244194, 0.000667159678414464, 0.0011510930489748716, 0.005057485308498144], [0.05701106786727905, 0.033717162907123566, 0.08472732454538345, 0.005061004310846329, 0.0048034582287073135, 0.023117652162909508, 0.0018321748357266188, 0.11590989679098129, 0.07903172820806503, 0.018742838874459267, 0.11310338973999023, 0.25816428661346436, 0.0013631859328597784, 0.02295496128499508, 0.027104433625936508, 0.00361433532088995, 0.004737792070955038, 0.00740152969956398, 0.0011313859140500426, 0.02921513468027115, 0.019208716228604317, 0.005747000686824322, 0.01570310816168785, 0.06659632176160812], [0.0001708488998701796, 0.0003076220164075494, 3.619664494181052e-05, 0.003161297645419836, 6.0120892158010975e-05, 0.0002372527087572962, 0.0005635506240651011, 8.993493247544393e-05, 0.0030379844829440117, 0.0005658043664880097, 0.0021199118345975876, 0.022404277697205544, 0.874381959438324, 0.03300470486283302, 0.005127068608999252, 0.04918646067380905, 0.00012411363422870636, 0.0006253106985241175, 0.0015093209221959114, 0.0003054601838812232, 0.0017073367489501834, 0.00016320311988238245, 0.000256827799603343, 0.0008533855434507132], [0.0016628324519842863, 0.0037539068143814802, 0.006707064341753721, 0.00808988232165575, 0.00020400734501890838, 0.0021204063668847084, 0.003143040230497718, 0.005666619632393122, 0.009021175093948841, 0.00516633503139019, 0.03437494859099388, 0.10430494695901871, 0.09445860236883163, 0.11460649967193604, 0.39729708433151245, 0.09716301411390305, 0.00099789013620466, 0.01080156397074461, 0.01554829441010952, 0.02701089344918728, 0.02039976790547371, 0.003957673907279968, 0.012520176358520985, 0.02102336846292019], [0.0008295879233628511, 0.0008953830692917109, 0.00027777699870057404, 0.00926094688475132, 0.00022916658781468868, 0.0007175002247095108, 0.006055368576198816, 0.00031907603261061013, 0.0017892604228109121, 0.0005906313890591264, 0.00849920604377985, 0.015853043645620346, 0.6632227301597595, 0.012678463943302631, 0.10199599713087082, 0.06919489800930023, 0.0017849511932581663, 0.003970711957663298, 0.056606873869895935, 0.00478969095274806, 0.018469197675585747, 0.0015162978088483214, 0.011424618773162365, 0.00902867503464222], [0.0004875172453466803, 0.0011073598871007562, 0.0005650985985994339, 0.0008407611749134958, 0.0001320053415838629, 0.00017452346219215542, 0.0002999090065713972, 0.002111380686983466, 0.0006070459494367242, 0.00017223697795998305, 0.007924476638436317, 0.0016128295101225376, 0.001760918297804892, 0.0012448024936020374, 0.07911416888237, 0.00767369382083416, 0.0035878049675375223, 0.005963717587292194, 0.0349162295460701, 0.31631651520729065, 0.37859034538269043, 0.009031559340655804, 0.10002937912940979, 0.045735638588666916], [0.0002630715898703784, 0.0010675856610760093, 0.0004236501990817487, 0.03810707479715347, 0.002044808119535446, 0.0014357909094542265, 0.018174398690462112, 0.0004918805207125843, 0.0001808080996852368, 0.0011577418772503734, 0.002048756694421172, 0.002293315250426531, 0.3119078278541565, 0.008099162019789219, 0.028932249173521996, 0.27301156520843506, 0.006493071559816599, 0.01750408671796322, 0.22269389033317566, 0.016250599175691605, 0.01150817796587944, 0.01462104544043541, 0.013643700629472733, 0.007645765785127878], [0.005793123506009579, 0.00816405564546585, 0.010098936036229134, 0.00106205849442631, 0.0020070690661668777, 0.0019422871991991997, 0.005865901708602905, 0.004788143560290337, 0.0002139526477549225, 0.0004631498595699668, 0.0013481192290782928, 0.00031261990079656243, 0.0003296411596238613, 0.001165769062936306, 0.019091719761490822, 0.001122134504839778, 0.009782946668565273, 0.011650200001895428, 0.1422576904296875, 0.45696085691452026, 0.1163138598203659, 0.041267622262239456, 0.12836354970932007, 0.029634416103363037], [0.011783850379288197, 0.010663853026926517, 0.05362605303525925, 0.009245323948562145, 0.012688630260527134, 0.02676558308303356, 0.029352011159062386, 0.02491229586303234, 0.006411372683942318, 0.0043987976387143135, 0.019685355946421623, 0.005163111723959446, 0.008637171238660812, 0.008017405867576599, 0.03535323590040207, 0.005573717877268791, 0.021911898627877235, 0.05996986851096153, 0.1064349040389061, 0.18925833702087402, 0.12594786286354065, 0.0332241989672184, 0.1420002430677414, 0.0489749014377594], [0.01072631310671568, 0.008769480511546135, 0.020298222079873085, 0.0003184432571288198, 0.0020628501661121845, 0.0018302003154531121, 0.0027570901438593864, 0.008230681531131268, 0.0021842338610440493, 0.0004641809209715575, 0.005148135591298342, 0.00018620672926772386, 5.421250898507424e-05, 0.0009240649524144828, 0.008334076032042503, 0.00014004443073645234, 0.006738211028277874, 0.008335371501743793, 0.04166193678975105, 0.2532450258731842, 0.3830585181713104, 0.020479841157794, 0.2013404667377472, 0.012712112627923489], [0.004826436750590801, 0.00749714020639658, 0.006618823856115341, 0.0026623005978763103, 0.012042568065226078, 0.001150486757978797, 0.010926388204097748, 0.0007932361331768334, 0.0025129325222223997, 0.001998291350901127, 0.004683435428887606, 0.0011255793506279588, 0.004221299197524786, 0.0036143322940915823, 0.014786082319915295, 0.0012133074924349785, 0.018145300447940826, 0.003129514865577221, 0.09718029946088791, 0.01198839396238327, 0.38583463430404663, 0.08964654803276062, 0.26150333881378174, 0.05189932882785797], [0.0002661417529452592, 0.0002722910139709711, 0.0004501163202803582, 2.1706748157157563e-05, 4.207923120702617e-05, 2.0545128791127354e-05, 2.2025147700333036e-05, 5.272766065900214e-05, 0.00020654761465266347, 1.585428799444344e-05, 0.0002115843235515058, 5.256159965938423e-06, 1.3594809615824488e-06, 1.9890625480911694e-05, 0.0008420141530223191, 1.4563121112587396e-05, 0.000383574835723266, 0.00021856614330317825, 0.0017320741899311543, 0.007143924944102764, 0.8583312034606934, 0.0062454924918711185, 0.11565396189689636, 0.007826501503586769], [0.026225430890917778, 0.05040296912193298, 0.010091429576277733, 0.009941425174474716, 0.0017855536425486207, 0.011153324507176876, 0.002376021584495902, 0.006644361186772585, 0.011501806788146496, 0.0007182011613622308, 0.00733142951503396, 0.0031008776277303696, 0.00772064970806241, 0.01472758874297142, 0.014700021594762802, 0.005951692350208759, 0.005150541663169861, 0.019079847261309624, 0.009887054562568665, 0.0826927125453949, 0.32821446657180786, 0.009953184053301811, 0.23619571328163147, 0.12445367872714996], [0.0022056903690099716, 0.0016723492881283164, 0.021224696189165115, 0.0001228504115715623, 0.00020343929645605385, 0.0007226894958876073, 0.00012609375698957592, 0.003484548069536686, 0.003322270466014743, 0.00013409738312475383, 0.001198122976347804, 9.851360664470121e-05, 2.2635526875092182e-06, 7.159564120229334e-05, 0.0010596929350867867, 1.556097595312167e-05, 0.00044630846241489053, 0.0007625381113030016, 0.0006373647483997047, 0.02671213634312153, 0.4787088632583618, 0.009298663586378098, 0.2359265685081482, 0.21184302866458893], [0.00353870983235538, 0.0062141986563801765, 0.006109766662120819, 0.01932753250002861, 0.006921886466443539, 0.007834067568182945, 0.017243975773453712, 0.004260269459336996, 0.02335192635655403, 0.0015175595181062818, 0.004752134904265404, 0.0022007895167917013, 0.06566236168146133, 0.0068142651580274105, 0.006600585300475359, 0.009590771049261093, 0.008120439015328884, 0.010459288954734802, 0.03350088745355606, 0.023210890591144562, 0.33650973439216614, 0.016730330884456635, 0.2013566493988037, 0.1781710684299469]], [[0.048338014632463455, 0.03277881070971489, 0.0682804062962532, 0.05091836676001549, 0.03885103762149811, 0.11145161837339401, 0.07199421525001526, 0.09898052364587784, 0.17824573814868927, 0.042033616453409195, 0.09246447682380676, 0.012608595192432404, 0.008821632713079453, 0.005236830096691847, 0.013232759200036526, 0.018578628078103065, 0.014176525175571442, 0.013587637804448605, 0.008167053572833538, 0.011650429107248783, 0.0173820648342371, 0.011714029125869274, 0.02316046506166458, 0.007346419617533684], [0.05514170974493027, 0.022311965003609657, 0.04027523100376129, 0.045643098652362823, 0.03543233126401901, 0.059769559651613235, 0.041447002440690994, 0.05821620672941208, 0.11095540970563889, 0.04763070121407509, 0.06123202294111252, 0.03392468020319939, 0.01745922863483429, 0.016825437545776367, 0.01805664785206318, 0.02845917083323002, 0.026464445516467094, 0.03207579255104065, 0.02792332135140896, 0.038276299834251404, 0.08227863162755966, 0.03223331272602081, 0.039013203233480453, 0.02895454503595829], [0.01832721382379532, 0.0063684540800750256, 0.044155653566122055, 0.02281567081809044, 0.014765726402401924, 0.03855925798416138, 0.059980764985084534, 0.2987450361251831, 0.36276015639305115, 0.03768167272210121, 0.05537047237157822, 0.004033038392663002, 0.0016553901368752122, 0.0006422238657251, 0.0016782539896667004, 0.0037125651724636555, 0.002914806827902794, 0.001453483011573553, 0.0019748203922063112, 0.007397947832942009, 0.003403944196179509, 0.0037868269719183445, 0.003709772601723671, 0.004106798674911261], [0.004011150915175676, 0.0044591110199689865, 0.056088242679834366, 0.010401604697108269, 0.00392127176746726, 0.008323890157043934, 0.025292644277215004, 0.033130984753370285, 0.21484830975532532, 0.12154295295476913, 0.046204447746276855, 0.08003167808055878, 0.07060546427965164, 0.025298351421952248, 0.08112812787294388, 0.010153081268072128, 0.0025777590926736593, 0.003559345379471779, 0.016170769929885864, 0.012979342602193356, 0.0420355349779129, 0.049185991287231445, 0.016632268205285072, 0.06141768395900726], [0.006608365103602409, 0.005881150718778372, 0.10222361236810684, 0.006451115943491459, 0.005369276739656925, 0.01108497567474842, 0.047336798161268234, 0.0382218100130558, 0.42087990045547485, 0.07350991666316986, 0.04863511770963669, 0.04199335724115372, 0.03026905283331871, 0.03808959200978279, 0.06794723868370056, 0.006325597874820232, 0.0017380894860252738, 0.0029929648153483868, 0.007961318828165531, 0.0034698641393333673, 0.009289875626564026, 0.00808543711900711, 0.007807251997292042, 0.00782827939838171], [0.004935511387884617, 0.0032414966262876987, 0.02916231006383896, 0.011967229656875134, 0.0075362673960626125, 0.03737121820449829, 0.02731594257056713, 0.11613459140062332, 0.5138084888458252, 0.06710246950387955, 0.09019284695386887, 0.028699766844511032, 0.013417616486549377, 0.006319084204733372, 0.013337451033294201, 0.007440966088324785, 0.0020174116361886263, 0.004173384513705969, 0.002126971958205104, 0.003964000381529331, 0.0029559952672570944, 0.0024630120024085045, 0.0026574935764074326, 0.0016584310214966536], [0.015035024844110012, 0.003537554293870926, 0.06405086070299149, 0.008753681555390358, 0.0062441276386380196, 0.02719431184232235, 0.03939962759613991, 0.10443838685750961, 0.4919649064540863, 0.049634382128715515, 0.1116214394569397, 0.035328663885593414, 0.0064726886339485645, 0.007346155121922493, 0.012312917970120907, 0.0032164151780307293, 0.0015676093753427267, 0.0015091145178303123, 0.00197822623886168, 0.0014682561159133911, 0.0017041524406522512, 0.001248587854206562, 0.0025335291866213083, 0.0014393687015399337], [0.006599353160709143, 0.012611552141606808, 0.026442663744091988, 0.04928253963589668, 0.013129997998476028, 0.01780802756547928, 0.04206087067723274, 0.01248527318239212, 0.08843068033456802, 0.09338648617267609, 0.16243381798267365, 0.19248270988464355, 0.08679069578647614, 0.04213471710681915, 0.054583657532930374, 0.052985526621341705, 0.008740384131669998, 0.011355499736964703, 0.009469258598983288, 0.000943297054618597, 0.002190887928009033, 0.003861677600070834, 0.00413529621437192, 0.005655061453580856], [0.005610068328678608, 0.004743647295981646, 0.015062494203448296, 0.010430149734020233, 0.00847281701862812, 0.015573985874652863, 0.027927838265895844, 0.041249729692935944, 0.10642439126968384, 0.1192433089017868, 0.2887028455734253, 0.16099229454994202, 0.07383166253566742, 0.013519088737666607, 0.06870436668395996, 0.010286489501595497, 0.00434951763600111, 0.004520139191299677, 0.0045061856508255005, 0.002858045045286417, 0.0013340383302420378, 0.004851922858506441, 0.003548793029040098, 0.003256122348830104], [0.003168831579387188, 0.008638164028525352, 0.004018976353108883, 0.013776767067611217, 0.0015179611509665847, 0.002701187739148736, 0.0028914392460137606, 0.0014903696719557047, 0.008312379010021687, 0.04908212274312973, 0.012444966472685337, 0.30941951274871826, 0.05042266473174095, 0.3360762894153595, 0.019560931250452995, 0.04132338613271713, 0.0020290291868150234, 0.005244853440672159, 0.004370006732642651, 0.001574046560563147, 0.00557099562138319, 0.017534712329506874, 0.003639592556282878, 0.09519088268280029], [0.018303362652659416, 0.014631111174821854, 0.02147618681192398, 0.03621858358383179, 0.061028894037008286, 0.027743211016058922, 0.026184048503637314, 0.027203300967812538, 0.030541863292455673, 0.10820669680833817, 0.08473269641399384, 0.08094222098588943, 0.13647297024726868, 0.015400869771838188, 0.04528549686074257, 0.02997232973575592, 0.04681727662682533, 0.013927212916314602, 0.00701448880136013, 0.0074025229550898075, 0.00782169122248888, 0.05955428257584572, 0.029627395793795586, 0.0634913295507431], [0.0010874747531488538, 0.002277818275615573, 0.0017187120392918587, 0.0029791847337037325, 0.0005530154448933899, 0.0004424526705406606, 0.0007323749596253037, 0.00039645162178203464, 0.0029550467152148485, 0.02914118766784668, 0.004111196845769882, 0.3050056993961334, 0.1903924196958542, 0.18304765224456787, 0.02925686165690422, 0.01695321872830391, 0.0011993463849648833, 0.00239546038210392, 0.00395404826849699, 0.001817727112211287, 0.015483787283301353, 0.04043592885136604, 0.004677083808928728, 0.15898580849170685], [0.0006975418073125184, 0.001422880799509585, 0.005661225877702236, 0.0020118318498134613, 0.0004861743072979152, 0.00021805190772283822, 0.0011078818934038281, 0.0006554374122060835, 0.0013742947485297918, 0.005088325589895248, 0.002135366667062044, 0.019851069897413254, 0.09811925143003464, 0.033235955983400345, 0.14290599524974823, 0.011806574650108814, 0.004081250634044409, 0.0044463458471000195, 0.04343738406896591, 0.031117456033825874, 0.16666938364505768, 0.1346733421087265, 0.03384983912110329, 0.25494712591171265], [0.0005165397888049483, 0.0013392759719863534, 0.0004061987856402993, 0.0009640479111112654, 7.30629762983881e-05, 2.9694580007344484e-05, 5.832681927131489e-05, 3.952782572014257e-05, 0.0003019586147274822, 0.0008335595484822989, 0.0002163048047805205, 0.03990168869495392, 0.011608374305069447, 0.13699549436569214, 0.0047285654582083225, 0.007937861606478691, 0.0008248365484178066, 0.002502624411135912, 0.004989554639905691, 0.005184648558497429, 0.1800728440284729, 0.026923958212137222, 0.007998406887054443, 0.5655527114868164], [0.0006614304729737341, 0.0009946146747097373, 0.0031574831809848547, 0.0014282866613939404, 0.0006050717202015221, 5.2867653721477836e-05, 0.0004230451013427228, 0.0004541248199529946, 0.0024157799780368805, 0.0024056490510702133, 0.004216826520860195, 0.01589256152510643, 0.014972160570323467, 0.006366419605910778, 0.03636571019887924, 0.004831856582313776, 0.007858012802898884, 0.0011578421108424664, 0.01234491728246212, 0.01792629063129425, 0.33268874883651733, 0.047093406319618225, 0.06280004233121872, 0.42288681864738464], [0.0020637924317270517, 0.005122003145515919, 0.008330139331519604, 0.002881180727854371, 0.0008321632631123066, 0.0005918068345636129, 0.0024635253939777613, 0.001599400769919157, 0.00518937548622489, 0.015524622984230518, 0.0031123412773013115, 0.02739102579653263, 0.04334324970841408, 0.06127425283193588, 0.05342298746109009, 0.008846462704241276, 0.0032656663097441196, 0.00635623699054122, 0.05282898619771004, 0.043489307165145874, 0.3233993649482727, 0.1573188304901123, 0.027790257707238197, 0.14356297254562378], [0.01134486123919487, 0.012578233145177364, 0.08726249635219574, 0.004529392346739769, 0.005926514510065317, 0.002103372011333704, 0.020365513861179352, 0.009005527943372726, 0.03491144999861717, 0.011352497152984142, 0.007550016976892948, 0.009538741782307625, 0.01972503960132599, 0.03749774396419525, 0.10024040192365646, 0.0068826861679553986, 0.009894282557070255, 0.006441814359277487, 0.07298973202705383, 0.04149041697382927, 0.30198225378990173, 0.0636766329407692, 0.06787886470556259, 0.05483159050345421], [0.01636282354593277, 0.019549531862139702, 0.026563147082924843, 0.017807377502322197, 0.014852337539196014, 0.011973336338996887, 0.01075297873467207, 0.041245874017477036, 0.0247456356883049, 0.012931805104017258, 0.007687937468290329, 0.005687241908162832, 0.010965188033878803, 0.01424581091850996, 0.016957595944404602, 0.017561759799718857, 0.020427672192454338, 0.025869490578770638, 0.037526924163103104, 0.2304878532886505, 0.28051385283470154, 0.06865095347166061, 0.040656089782714844, 0.02597687393426895], [0.03560702130198479, 0.01319943368434906, 0.07932274788618088, 0.012460506521165371, 0.013682031072676182, 0.009477243758738041, 0.025187194347381592, 0.048841193318367004, 0.023917999118566513, 0.0049353959038853645, 0.003691227175295353, 0.0026292053516954184, 0.0022867934312671423, 0.0042809671722352505, 0.008727882988750935, 0.0048105730675160885, 0.015056949108839035, 0.0076707531698048115, 0.045614197850227356, 0.10349805653095245, 0.3540416359901428, 0.047019604593515396, 0.06613069772720337, 0.06791071593761444], [0.007674859836697578, 0.019131416454911232, 0.03328872472047806, 0.04582054167985916, 0.024414217099547386, 0.006810206454247236, 0.0314902625977993, 0.005101368762552738, 0.004706544801592827, 0.007621129043400288, 0.002679663011804223, 0.005544146988540888, 0.015157226473093033, 0.006887955125421286, 0.020288318395614624, 0.036137066781520844, 0.04093242809176445, 0.027222607284784317, 0.09770945459604263, 0.021227775141596794, 0.1520049124956131, 0.08195893466472626, 0.06739065796136856, 0.2387995570898056], [0.008969198912382126, 0.005406960379332304, 0.07036426663398743, 0.0070423465222120285, 0.02318664640188217, 0.00835131574422121, 0.04983873292803764, 0.036860059946775436, 0.012276710011065006, 0.00549501134082675, 0.002503779251128435, 0.0010551010491326451, 0.0027881311252713203, 0.000500800961162895, 0.01355099305510521, 0.0022265464067459106, 0.02545531652867794, 0.008191600441932678, 0.09132403880357742, 0.09646525233983994, 0.21390089392662048, 0.08684982359409332, 0.08420388400554657, 0.14319251477718353], [0.008855712600052357, 0.014345875009894371, 0.02744276635348797, 0.025791430845856667, 0.009600582532584667, 0.01035625021904707, 0.026152074337005615, 0.00612005265429616, 0.007075977977365255, 0.013845800422132015, 0.0012664339737966657, 0.0067625814117491245, 0.0030906128231436014, 0.014494822360575199, 0.0035812505520880222, 0.017309503629803658, 0.008822609670460224, 0.010530318133533001, 0.034097496420145035, 0.012079977430403233, 0.05629425495862961, 0.05982597917318344, 0.023014184087514877, 0.5992435216903687], [0.017001153901219368, 0.008487739600241184, 0.17570902407169342, 0.013445720076560974, 0.07749814540147781, 0.02372821792960167, 0.14692135155200958, 0.03495509549975395, 0.04614511877298355, 0.020766599103808403, 0.010373423807322979, 0.0018413407960906625, 0.00704952934756875, 0.0005108210607431829, 0.00903778150677681, 0.0027765552513301373, 0.04222257062792778, 0.006183512508869171, 0.03319339081645012, 0.011502066627144814, 0.04490777105093002, 0.059278883039951324, 0.08644455671310425, 0.12001968175172806], [0.03521139174699783, 0.016307421028614044, 0.14723405241966248, 0.012843099422752857, 0.022320061922073364, 0.025502439588308334, 0.12276306748390198, 0.017224546521902084, 0.042145367711782455, 0.044988613575696945, 0.0036075518000870943, 0.011091026477515697, 0.005712335463613272, 0.006714814342558384, 0.0035845160018652678, 0.0035124493297189474, 0.007342902012169361, 0.006092245224863291, 0.04427371919155121, 0.0065823267214000225, 0.05862134322524071, 0.05808323249220848, 0.029388803988695145, 0.26885271072387695]], [[0.05880116671323776, 0.05395838990807533, 0.06199415773153305, 0.05929533764719963, 0.03798104450106621, 0.014325137250125408, 0.006048514507710934, 0.04016499221324921, 0.03354911878705025, 0.02684624306857586, 0.015989087522029877, 0.04478638246655464, 0.014264996163547039, 0.025180252268910408, 0.03975331038236618, 0.07470760494470596, 0.060487065464258194, 0.01846013218164444, 0.00987135898321867, 0.03203030303120613, 0.03998611867427826, 0.03469281271100044, 0.0510309673845768, 0.14579547941684723], [0.026207031682133675, 0.024194642901420593, 0.03819757327437401, 0.03078390099108219, 0.040768057107925415, 0.01472409162670374, 0.011826983653008938, 0.026718920096755028, 0.06306087225675583, 0.03562479838728905, 0.03751302883028984, 0.10592607408761978, 0.06331663578748703, 0.058305539190769196, 0.08894119411706924, 0.09339089691638947, 0.07008850574493408, 0.015470017679035664, 0.015154477208852768, 0.015674322843551636, 0.02796551212668419, 0.014060338959097862, 0.02940642461180687, 0.05268013849854469], [0.008194787427783012, 0.017019832506775856, 0.10547508299350739, 0.023253703489899635, 0.07118814438581467, 0.04193822667002678, 0.05746816098690033, 0.008756548166275024, 0.07504921406507492, 0.06697011739015579, 0.042271021753549576, 0.027382345870137215, 0.09654130786657333, 0.0286164041608572, 0.08059622347354889, 0.006234019063413143, 0.03771095722913742, 0.0316949337720871, 0.019449302926659584, 0.003196472767740488, 0.017704177647829056, 0.03861239179968834, 0.037561360746622086, 0.05711522698402405], [0.019834816455841064, 0.016706964001059532, 0.029700160026550293, 0.014634719118475914, 0.02750110812485218, 0.01555626280605793, 0.03759649395942688, 0.013295226730406284, 0.03003031760454178, 0.05513175576925278, 0.05146203190088272, 0.02096763253211975, 0.10835204273462296, 0.04243059456348419, 0.1050003245472908, 0.033867247402668, 0.04876459389925003, 0.027900053188204765, 0.05606972053647041, 0.02192607708275318, 0.036635953933000565, 0.08269978314638138, 0.07185886800289154, 0.032077252864837646], [0.04341038689017296, 0.019136548042297363, 0.03185676783323288, 0.033492885529994965, 0.017308764159679413, 0.03536931425333023, 0.008639143779873848, 0.05206209421157837, 0.018652211874723434, 0.01300684455782175, 0.05836741253733635, 0.04627922922372818, 0.022901501506567, 0.03430720418691635, 0.042066268622875214, 0.05332156643271446, 0.02438455820083618, 0.040976546704769135, 0.017150137573480606, 0.13443490862846375, 0.054412584751844406, 0.029104454442858696, 0.10809757560491562, 0.0612611398100853], [0.08598366379737854, 0.06950937956571579, 0.08373668789863586, 0.07940995693206787, 0.037134867161512375, 0.03749116137623787, 0.07298212498426437, 0.18929792940616608, 0.08103679120540619, 0.03296736255288124, 0.029213042929768562, 0.012618916109204292, 0.009213370271027088, 0.008648489601910114, 0.006422703620046377, 0.016849907115101814, 0.008786873891949654, 0.004747224971652031, 0.011206373572349548, 0.03429139032959938, 0.01716040074825287, 0.018990451470017433, 0.025423133745789528, 0.026877840980887413], [0.03873506188392639, 0.0490078441798687, 0.18672259151935577, 0.14210468530654907, 0.05639944225549698, 0.11277605593204498, 0.03044210374355316, 0.028056029230356216, 0.03100612387061119, 0.019537348300218582, 0.025615006685256958, 0.004461017437279224, 0.006146891042590141, 0.0064237178303301334, 0.032186683267354965, 0.017789697274565697, 0.01731436885893345, 0.03569108620285988, 0.00622418150305748, 0.010443158447742462, 0.013075708411633968, 0.029736561700701714, 0.06810437887907028, 0.03200019523501396], [0.025592371821403503, 0.019969483837485313, 0.09447839111089706, 0.06915228813886642, 0.03768029808998108, 0.18029573559761047, 0.024663900956511497, 0.014968130737543106, 0.058107439428567886, 0.02584218606352806, 0.020915433764457703, 0.025514664128422737, 0.012078240513801575, 0.027853747829794884, 0.03407389670610428, 0.036407556384801865, 0.017832722514867783, 0.07798892259597778, 0.009115062654018402, 0.008914715610444546, 0.03784490004181862, 0.033288147300481796, 0.03747720643877983, 0.0699445828795433], [0.0288193728774786, 0.035982437431812286, 0.15281297266483307, 0.03429968282580376, 0.0756339505314827, 0.059039756655693054, 0.044657152146101, 0.020911874249577522, 0.25703728199005127, 0.044460784643888474, 0.06694146245718002, 0.004233578220009804, 0.009126854129135609, 0.00797815341502428, 0.03826155886054039, 0.003957219887524843, 0.021272366866469383, 0.010953705757856369, 0.0057030534371733665, 0.0020399882923811674, 0.017048928886651993, 0.01992231048643589, 0.03255198895931244, 0.006353511940687895], [0.031844478100538254, 0.025880729779601097, 0.04432259500026703, 0.12577137351036072, 0.020061753690242767, 0.02086593210697174, 0.061570651829242706, 0.23911356925964355, 0.06600803881883621, 0.03364908695220947, 0.06511609256267548, 0.07291047275066376, 0.02087554521858692, 0.018901929259300232, 0.009051662869751453, 0.04986414313316345, 0.004957739729434252, 0.003680473193526268, 0.007292383350431919, 0.02873973920941353, 0.00842541828751564, 0.005240139551460743, 0.013511426746845245, 0.022344673052430153], [0.004371701739728451, 0.006693649105727673, 0.08216851204633713, 0.023433763533830643, 0.07887368649244308, 0.057699378579854965, 0.06075192987918854, 0.012982320040464401, 0.15112794935703278, 0.08011745661497116, 0.0882851630449295, 0.04362617805600166, 0.07738353312015533, 0.031076205894351006, 0.11539194732904434, 0.008295743726193905, 0.02565322257578373, 0.011710030026733875, 0.00692937383428216, 0.0008585082832723856, 0.0037492881529033184, 0.006409469526261091, 0.013544340617954731, 0.008866679854691029], [6.271764868870378e-05, 5.194969708099961e-05, 0.0002860281674657017, 0.0002782277297228575, 0.0016202761325985193, 0.0011510051554068923, 0.02033136412501335, 0.0016936842584982514, 0.009045866318047047, 0.05644296482205391, 0.0161279309540987, 0.08557259291410446, 0.7853318452835083, 0.01594085432589054, 0.003225558204576373, 0.0003416785621084273, 0.00025766444741748273, 0.0001421525957994163, 0.0007759400177747011, 7.240185368573293e-05, 5.7785971876000986e-05, 0.0006831157370470464, 8.74341421877034e-05, 0.0004189494939055294], [0.0019002481130883098, 0.0028525341767817736, 0.013301840052008629, 0.01225961372256279, 0.011915740557014942, 0.013668344356119633, 0.01676437444984913, 0.027264224365353584, 0.06335390359163284, 0.046833060681819916, 0.14498649537563324, 0.23429065942764282, 0.24586349725723267, 0.05317752808332443, 0.07197447121143341, 0.013572010211646557, 0.005673538893461227, 0.005869857966899872, 0.0037431365344673395, 0.0029932670295238495, 0.0018257454503327608, 0.001674455706961453, 0.0025291028432548046, 0.0017123236320912838], [0.0006628252449445426, 0.0005645381170324981, 0.0020889306906610727, 0.006225408520549536, 0.029510105028748512, 0.006877882871776819, 0.03660329058766365, 0.01255046483129263, 0.009707457385957241, 0.024390211328864098, 0.06988532841205597, 0.22138452529907227, 0.466068834066391, 0.061585623770952225, 0.014679187908768654, 0.009555160067975521, 0.012790649197995663, 0.0030782639514654875, 0.004679018631577492, 0.0010108979186043143, 0.00033925872412510216, 0.0007642587297596037, 0.0015978224109858274, 0.003400090616196394], [0.0005121644935570657, 0.000724844285286963, 0.0020645190961658955, 0.0014941433910280466, 0.005121528171002865, 0.0025925757363438606, 0.004037210717797279, 0.0008751892601139843, 0.024502795189619064, 0.025957705453038216, 0.030253566801548004, 0.07250382751226425, 0.6796492338180542, 0.037717655301094055, 0.08506888151168823, 0.004887772258371115, 0.007651892956346273, 0.002540356246754527, 0.003626377321779728, 0.0005253274575807154, 0.003413443686440587, 0.0021381094120442867, 0.0011991671053692698, 0.0009416104876436293], [0.005064330529421568, 0.004031313117593527, 0.004073029384016991, 0.004783046897500753, 0.010955114848911762, 0.008374642580747604, 0.013578515499830246, 0.007576989941298962, 0.018543561920523643, 0.04203122854232788, 0.03767899423837662, 0.05957665666937828, 0.335042268037796, 0.08050082623958588, 0.12021470069885254, 0.052518099546432495, 0.038058191537857056, 0.022732965648174286, 0.042357753962278366, 0.019340990111231804, 0.023043977096676826, 0.027589600533246994, 0.013991029001772404, 0.008342180401086807], [0.007570538204163313, 0.004072991199791431, 0.003475035773590207, 0.007149725221097469, 0.007427212316542864, 0.00834951177239418, 0.003304458688944578, 0.009142777882516384, 0.0074775321409106255, 0.006373817566782236, 0.04210514575242996, 0.060237735509872437, 0.11009098589420319, 0.08104647696018219, 0.13160742819309235, 0.0909775048494339, 0.04483649507164955, 0.04342660307884216, 0.0397411584854126, 0.1274474412202835, 0.07354423403739929, 0.013401811011135578, 0.06148124858736992, 0.015712136402726173], [0.012418028898537159, 0.015136243775486946, 0.010380956344306469, 0.0046424116007983685, 0.007809521164745092, 0.01057168748229742, 0.01740885153412819, 0.02988741360604763, 0.06554196774959564, 0.040698252618312836, 0.03011602722108364, 0.0440727174282074, 0.17417390644550323, 0.06581937521696091, 0.16484950482845306, 0.027791503816843033, 0.016634242609143257, 0.014015594497323036, 0.037928465753793716, 0.07318461686372757, 0.07847640663385391, 0.024290427565574646, 0.02413230389356613, 0.010019570589065552], [0.0026214662939310074, 0.005052119493484497, 0.00666065001860261, 0.007115138228982687, 0.005045785568654537, 0.006550144869834185, 0.0025991464499384165, 0.0009954111883416772, 0.007533858995884657, 0.006366079207509756, 0.010471699759364128, 0.007345478981733322, 0.07993495464324951, 0.024169467389583588, 0.49401238560676575, 0.058940768241882324, 0.03246215730905533, 0.061420176178216934, 0.02255874313414097, 0.014740047976374626, 0.07385467737913132, 0.019920729100704193, 0.04124647006392479, 0.008382434956729412], [0.0008626087219454348, 0.0012958458391949534, 0.002340473933145404, 0.0023160860873758793, 0.0013197580119594932, 0.0036058383993804455, 0.0010167331201955676, 0.00021272001322358847, 0.003807729110121727, 0.0030268896371126175, 0.0032055932097136974, 0.01855618506669998, 0.08014211803674698, 0.049326639622449875, 0.2857204079627991, 0.06426795572042465, 0.018300950527191162, 0.12032505124807358, 0.04170748591423035, 0.015725573524832726, 0.23033083975315094, 0.019894255325198174, 0.015908479690551758, 0.016783732920885086], [0.000313937955070287, 0.0008630482479929924, 0.000981000019237399, 0.00045797982602380216, 0.0008935919613577425, 0.0004747865896206349, 0.00031475277501158416, 2.825329647748731e-05, 0.003048563841730356, 0.0015655560418963432, 0.002542113186791539, 0.001537157455459237, 0.048253383487463, 0.010910199955105782, 0.5919156074523926, 0.010956442914903164, 0.028276439756155014, 0.046567756682634354, 0.034495532512664795, 0.0033046621829271317, 0.1819782704114914, 0.014729665592312813, 0.013857550919055939, 0.00173366058152169], [0.005354301538318396, 0.006328483112156391, 0.004150853026658297, 0.01939014159142971, 0.0017262930050492287, 0.0018345440039411187, 0.0031969775445759296, 0.00327263749204576, 0.004994702525436878, 0.0037365194875746965, 0.010906247422099113, 0.024906471371650696, 0.09615252912044525, 0.030953623354434967, 0.12243387848138809, 0.18954843282699585, 0.01266114879399538, 0.018939794972538948, 0.04923596978187561, 0.11684022843837738, 0.20296929776668549, 0.011581122875213623, 0.0367790050804615, 0.022106751799583435], [0.0005353611777536571, 0.000924881431274116, 0.0026960684917867184, 0.0029979965183883905, 0.0013111454900354147, 0.001064829993993044, 0.0006046579219400883, 6.850545469205827e-05, 0.0022425621282309294, 0.001340004033409059, 0.004469546023756266, 0.006514550652354956, 0.08588272333145142, 0.019244346767663956, 0.41356751322746277, 0.026752673089504242, 0.022487064823508263, 0.03583858162164688, 0.03849200904369354, 0.007677167188376188, 0.24035154283046722, 0.015320039354264736, 0.05162389948964119, 0.017992308363318443], [0.00016512807633262128, 0.0001260903081856668, 0.00012355083890724927, 0.000506167474668473, 0.00015856936806812882, 0.00015516695566475391, 0.0010395573917776346, 5.029584281146526e-05, 0.00037313534994609654, 0.0019583709072321653, 0.0017079797107726336, 0.009294028393924236, 0.7288402318954468, 0.026646889746189117, 0.02803516574203968, 0.01014180202037096, 0.0018105951603502035, 0.00518818711861968, 0.041927557438611984, 0.012178033590316772, 0.08093652129173279, 0.026316490024328232, 0.009992312639951706, 0.01232815533876419]], [[0.018407970666885376, 0.006206104997545481, 0.026788976043462753, 0.02432723343372345, 0.025413671508431435, 0.020938627421855927, 0.03823814168572426, 0.23573653399944305, 0.16017431020736694, 0.019007563591003418, 0.21951553225517273, 0.051397498697042465, 0.01338744256645441, 0.015180660411715508, 0.012906663119792938, 0.007484646514058113, 0.012153241783380508, 0.00629710778594017, 0.006371843162924051, 0.028037581592798233, 0.01531251147389412, 0.005133472848683596, 0.023275671526789665, 0.008307050913572311], [0.024098489433526993, 0.013201265595853329, 0.04923061281442642, 0.021196242421865463, 0.023288514465093613, 0.026677465066313744, 0.03401343896985054, 0.09257907420396805, 0.08594011515378952, 0.027110505849123, 0.06052226945757866, 0.04746600612998009, 0.018309731036424637, 0.018622763454914093, 0.019666295498609543, 0.013554858975112438, 0.022163409739732742, 0.024080874398350716, 0.02902705781161785, 0.06718818098306656, 0.10106948763132095, 0.028786586597561836, 0.07284682244062424, 0.0793599784374237], [0.008436407893896103, 0.005359513685107231, 0.015810532495379448, 0.008274038322269917, 0.039581019431352615, 0.007012685760855675, 0.016458990052342415, 0.04110356792807579, 0.4152454733848572, 0.1048041507601738, 0.07731516659259796, 0.04575035348534584, 0.04199666902422905, 0.028157919645309448, 0.01078837551176548, 0.005240896251052618, 0.015833672136068344, 0.0033815347123891115, 0.0026356095913797617, 0.007235650904476643, 0.03585176169872284, 0.029922546818852425, 0.016993820667266846, 0.016809560358524323], [0.003999368753284216, 0.003624614328145981, 0.021695047616958618, 0.01164148561656475, 0.010541516356170177, 0.015459239482879639, 0.03715149685740471, 0.177895650267601, 0.08321873098611832, 0.09907159954309464, 0.11261724680662155, 0.09551283717155457, 0.05366745963692665, 0.05389596149325371, 0.021666085347533226, 0.008480146527290344, 0.005036771297454834, 0.009374210610985756, 0.012027285993099213, 0.06266023218631744, 0.0192432664334774, 0.04040956869721413, 0.022898459807038307, 0.018211735412478447], [0.005135776940733194, 0.0036205588839948177, 0.02265569195151329, 0.009128349833190441, 0.012782509438693523, 0.010079865343868732, 0.027815327048301697, 0.06410275399684906, 0.4650479853153229, 0.020986691117286682, 0.0664725974202156, 0.010738339275121689, 0.004043100867420435, 0.007353837601840496, 0.003874784102663398, 0.004191836807876825, 0.007613744121044874, 0.009246991015970707, 0.010138622485101223, 0.020118458196520805, 0.15607401728630066, 0.011180263012647629, 0.034804292023181915, 0.012793628498911858], [0.02230915240943432, 0.017049958929419518, 0.036542247980833054, 0.03189893811941147, 0.040377743542194366, 0.035941705107688904, 0.042547814548015594, 0.14254803955554962, 0.04867713153362274, 0.1082799881696701, 0.0708497166633606, 0.07022546976804733, 0.04130009189248085, 0.07700594514608383, 0.03456239402294159, 0.01672891341149807, 0.02259881980717182, 0.016344038769602776, 0.011404848657548428, 0.031067978590726852, 0.009496732614934444, 0.03172018751502037, 0.018952276557683945, 0.021569903939962387], [0.00674690306186676, 0.00287937861867249, 0.02784929797053337, 0.017539264634251595, 0.03880864381790161, 0.01754574291408062, 0.0560913048684597, 0.08264001458883286, 0.20588815212249756, 0.0699830874800682, 0.21184466779232025, 0.08213096112012863, 0.05931095778942108, 0.019219204783439636, 0.020835068076848984, 0.00947937648743391, 0.02082529477775097, 0.0068136402405798435, 0.0062679145485162735, 0.008531956002116203, 0.007604923564940691, 0.006947563029825687, 0.00924730859696865, 0.004969351459294558], [0.010288911871612072, 0.008668516762554646, 0.016325591132044792, 0.015109003521502018, 0.008370931260287762, 0.04965434595942497, 0.017836667597293854, 0.17020687460899353, 0.027338583022356033, 0.11658606678247452, 0.04134047403931618, 0.14922115206718445, 0.017367707565426826, 0.06736524403095245, 0.042624905705451965, 0.02237316407263279, 0.006664477754384279, 0.037041522562503815, 0.010077486746013165, 0.07830522954463959, 0.00652270158752799, 0.05033767595887184, 0.007472475990653038, 0.022900108247995377], [0.014878377318382263, 0.012225472368299961, 0.01831054501235485, 0.03473815694451332, 0.020843634381890297, 0.012598451226949692, 0.00944769848138094, 0.03644736111164093, 0.3573208749294281, 0.0359426848590374, 0.07164012640714645, 0.10110317170619965, 0.04220696911215782, 0.01716642826795578, 0.036798812448978424, 0.032904159277677536, 0.020030474290251732, 0.00886519905179739, 0.004250203724950552, 0.009525921195745468, 0.057113662362098694, 0.010676326230168343, 0.019638793542981148, 0.01532643660902977], [0.009657507762312889, 0.014256044290959835, 0.014402241446077824, 0.014933415688574314, 0.01257121842354536, 0.014374345541000366, 0.020767340436577797, 0.0540192648768425, 0.009304077364504337, 0.022444967180490494, 0.025329822674393654, 0.0575505830347538, 0.032354529947042465, 0.06324519962072372, 0.10995765775442123, 0.049542490392923355, 0.02606588415801525, 0.06415794044733047, 0.09601552784442902, 0.1497516930103302, 0.02843262441456318, 0.04930846020579338, 0.02732987143099308, 0.034227292984724045], [0.024879222735762596, 0.034037791192531586, 0.017428183928132057, 0.013110851868987083, 0.048560284078121185, 0.016626451164484024, 0.022302042692899704, 0.07061029970645905, 0.1364831030368805, 0.09278610348701477, 0.08658786863088608, 0.05598263442516327, 0.037276871502399445, 0.06403091549873352, 0.05923411622643471, 0.020414896309375763, 0.039800975471735, 0.016391338780522346, 0.01526401937007904, 0.028673911467194557, 0.02689918503165245, 0.04109934717416763, 0.019611097872257233, 0.011908456683158875], [0.002494288608431816, 0.004137901123613119, 0.002397682052105665, 0.005167901981621981, 0.007318977732211351, 0.003385592717677355, 0.006652946583926678, 0.033569373190402985, 0.004196068737655878, 0.028153540566563606, 0.008380956016480923, 0.12368141114711761, 0.0639224424958229, 0.12834268808364868, 0.059500373899936676, 0.03072297014296055, 0.012252254411578178, 0.038849856704473495, 0.05757638439536095, 0.18465301394462585, 0.025477103888988495, 0.09205850958824158, 0.012545577250421047, 0.06456213444471359], [0.004881202708929777, 0.009543935768306255, 0.01788690872490406, 0.02065086178481579, 0.017939290031790733, 0.004570760764181614, 0.011618112213909626, 0.018116671591997147, 0.031433653086423874, 0.037457991391420364, 0.02718953974545002, 0.0799744501709938, 0.1993260681629181, 0.022638417780399323, 0.11956329643726349, 0.05219407007098198, 0.025157935917377472, 0.007815031334757805, 0.021864961832761765, 0.06429576128721237, 0.055731359869241714, 0.06361569464206696, 0.043524038046598434, 0.04301004484295845], [0.0005189875373616815, 0.0012509258231148124, 0.0059945364482700825, 0.0013243909925222397, 0.008601467125117779, 0.002416494069620967, 0.012690065428614616, 0.005509156733751297, 0.004845550749450922, 0.02188553474843502, 0.007825234904885292, 0.04081536829471588, 0.14335112273693085, 0.05113031715154648, 0.06917136907577515, 0.008359556086361408, 0.024998629465699196, 0.038756027817726135, 0.13072192668914795, 0.07066329568624496, 0.07701697945594788, 0.10463377833366394, 0.032108161598443985, 0.1354110836982727], [0.0001446372625650838, 0.00045278010657057166, 0.0020794114097952843, 0.0005917689995840192, 0.0014019593363627791, 0.00010386246140114963, 0.0002658125595189631, 0.0001321820600423962, 0.02373651973903179, 0.0009912345558404922, 0.0015733817126601934, 0.0011672358959913254, 0.007034498266875744, 0.001393197919242084, 0.011978335678577423, 0.003140590386465192, 0.0059805978089571, 0.0014611509395763278, 0.004236545413732529, 0.0027292505837976933, 0.8485751152038574, 0.00990302860736847, 0.04815397411584854, 0.02277284488081932], [0.0016504123341292143, 0.003321531694382429, 0.023346394300460815, 0.007790622301399708, 0.004346159752458334, 0.007622384931892157, 0.02078227512538433, 0.009180807508528233, 0.015393407084047794, 0.021251484751701355, 0.011796805076301098, 0.018325135111808777, 0.06573443114757538, 0.02334842085838318, 0.03264224901795387, 0.014367637224495411, 0.006782298441976309, 0.03353618085384369, 0.0845261961221695, 0.08081359416246414, 0.2121482789516449, 0.11194340139627457, 0.0778745487332344, 0.11147534847259521], [0.0006884552421979606, 0.0008728856919333339, 0.009630708955228329, 0.002323357155546546, 0.002313490491360426, 0.0011495535727590322, 0.003529226640239358, 0.0008554834639653563, 0.05437607318162918, 0.0012683592503890395, 0.0036150827072560787, 0.0004454570880625397, 0.0012112578842788935, 0.0006479276344180107, 0.0018490944057703018, 0.0018492097733542323, 0.004136895295232534, 0.0042999922297894955, 0.010954737663269043, 0.003918816801160574, 0.7928006649017334, 0.007286339998245239, 0.07259871810674667, 0.01737808622419834], [0.011556406505405903, 0.019007844850420952, 0.048338182270526886, 0.01755087450146675, 0.030121508985757828, 0.011314889416098595, 0.017844224348664284, 0.004099957644939423, 0.015169271267950535, 0.03024682030081749, 0.003379521891474724, 0.0065505304373800755, 0.054794006049633026, 0.026705440133810043, 0.02466406300663948, 0.017257962375879288, 0.039139289408922195, 0.03572164103388786, 0.04424675926566124, 0.019571499899029732, 0.18003569543361664, 0.12130527943372726, 0.06958645582199097, 0.1517917811870575], [0.002235370222479105, 0.0017857536440715194, 0.06084267050027847, 0.010977723635733128, 0.017389891669154167, 0.008204846642911434, 0.0341368094086647, 0.0029611587524414062, 0.05539456382393837, 0.015392184257507324, 0.016247760504484177, 0.0042176092974841595, 0.03789599984884262, 0.006310731638222933, 0.020178645849227905, 0.009545207023620605, 0.03061497025191784, 0.02262081205844879, 0.0543145015835762, 0.012590534053742886, 0.3664953410625458, 0.04195939004421234, 0.11183565855026245, 0.05585182085633278], [0.004806755110621452, 0.0060837119817733765, 0.034132227301597595, 0.011286498978734016, 0.0035365417134016752, 0.026696855202317238, 0.010189813561737537, 0.008938661776483059, 0.004992614034563303, 0.023219145834445953, 0.0036519139539450407, 0.007721059489995241, 0.006993260234594345, 0.01724282279610634, 0.024596504867076874, 0.014010857790708542, 0.0058328863233327866, 0.08196007460355759, 0.037436582148075104, 0.0790652185678482, 0.10167311131954193, 0.20716217160224915, 0.07313787192106247, 0.20563285052776337], [0.0016829121159389615, 0.0015223358059301972, 0.008362206630408764, 0.0073834932409226894, 0.0024691587314009666, 0.0012805350124835968, 0.0013507460243999958, 0.0001443958026356995, 0.011936451308429241, 0.0005236234865151346, 0.0006920325686223805, 0.00021703910897485912, 0.0008454248309135437, 0.0003454094403423369, 0.001864466816186905, 0.00436702836304903, 0.006609654985368252, 0.004327822010964155, 0.006584423594176769, 0.0013098148629069328, 0.7733825445175171, 0.007947574369609356, 0.10726796090602875, 0.04758292809128761], [0.005679211113601923, 0.006863818038254976, 0.029271027073264122, 0.010142263025045395, 0.009605311788618565, 0.008222454227507114, 0.02202760800719261, 0.01046907901763916, 0.008326690644025803, 0.008043703623116016, 0.00792890414595604, 0.0031009658705443144, 0.009577282704412937, 0.012618489563465118, 0.029878120869398117, 0.015491751953959465, 0.020179476588964462, 0.039960287511348724, 0.13484340906143188, 0.09121454507112503, 0.20035189390182495, 0.08316786587238312, 0.1621841937303543, 0.07085156440734863], [0.007104775402694941, 0.007936849258840084, 0.021017134189605713, 0.007857050746679306, 0.020504020154476166, 0.005377752240747213, 0.018653295934200287, 0.00400411756709218, 0.0950826033949852, 0.010119827464222908, 0.008365565910935402, 0.0015722300158813596, 0.005739040207117796, 0.00452152406796813, 0.006824946962296963, 0.005225921515375376, 0.022607695311307907, 0.010482486337423325, 0.026781810447573662, 0.007618089206516743, 0.5231311917304993, 0.03486131131649017, 0.1031871810555458, 0.04142361506819725], [0.002436436479911208, 0.002452310174703598, 0.00705031119287014, 0.0041838171891868114, 0.008706661872565746, 0.0046066646464169025, 0.02712525613605976, 0.016108868643641472, 0.006692798808217049, 0.027268214151263237, 0.0033906162716448307, 0.012767443433403969, 0.024268975481390953, 0.029680265113711357, 0.008518215268850327, 0.00872805155813694, 0.010091503150761127, 0.0361299142241478, 0.1420353502035141, 0.09491954743862152, 0.12889385223388672, 0.18847055733203888, 0.03658732771873474, 0.16888704895973206]], [[0.004319996107369661, 0.008847944438457489, 0.02501206286251545, 0.009851417504251003, 0.013048444874584675, 0.006755975540727377, 0.009111471474170685, 0.0020441499073058367, 0.009913544170558453, 0.12600639462471008, 0.02352343499660492, 0.04854081943631172, 0.04591471329331398, 0.07465161383152008, 0.08108214288949966, 0.029128435999155045, 0.02588794380426407, 0.021754419431090355, 0.023380419239401817, 0.008686021901667118, 0.040469251573085785, 0.2595198452472687, 0.03797098249197006, 0.06457856297492981], [0.009632655419409275, 0.0137168662622571, 0.013582812622189522, 0.007560295052826405, 0.007269983179867268, 0.0065157609060406685, 0.00752238417044282, 0.004973928444087505, 0.004639133810997009, 0.14166800677776337, 0.04593278467655182, 0.09277329593896866, 0.04669235274195671, 0.09158730506896973, 0.06619162112474442, 0.0426773726940155, 0.017071079462766647, 0.032916560769081116, 0.029528770595788956, 0.020886896178126335, 0.016655797138810158, 0.2164493054151535, 0.024791870266199112, 0.03876319155097008], [0.13620580732822418, 0.08881780505180359, 0.19150494039058685, 0.04845847561955452, 0.01579449512064457, 0.03805790841579437, 0.03924664109945297, 0.028244849294424057, 0.02290218323469162, 0.009751473553478718, 0.02983127348124981, 0.007757307030260563, 0.014679993502795696, 0.010896236635744572, 0.015794767066836357, 0.010015376843512058, 0.010279114358127117, 0.016808347776532173, 0.028085991740226746, 0.02594250626862049, 0.040560413151979446, 0.0419180728495121, 0.07852831482887268, 0.04991767555475235], [0.011137869209051132, 0.017513081431388855, 0.037422046065330505, 0.026391679421067238, 0.009514226578176022, 0.009780628606677055, 0.004733819980174303, 0.006044603418558836, 0.002393794246017933, 0.06920523941516876, 0.015059935860335827, 0.05256525054574013, 0.031738702207803726, 0.028553705662488937, 0.02755512297153473, 0.06600948423147202, 0.01128199603408575, 0.034810472279787064, 0.012861127965152264, 0.029056726023554802, 0.013225553557276726, 0.3192526400089264, 0.026326859369874, 0.13756538927555084], [0.004901644308120012, 0.00706104002892971, 0.020705586299300194, 0.04341662675142288, 0.017844852060079575, 0.03444678336381912, 0.004051819909363985, 0.04121226444840431, 0.008177876472473145, 0.040583640336990356, 0.002665581414476037, 0.06011265888810158, 0.013334492221474648, 0.052983079105615616, 0.03892425075173378, 0.06935003399848938, 0.019943388178944588, 0.08164903521537781, 0.0068768905475735664, 0.10542906075716019, 0.0319533534348011, 0.10246583819389343, 0.01575298234820366, 0.17615722119808197], [0.0228744950145483, 0.016826514154672623, 0.0978715717792511, 0.03693953901529312, 0.02462887205183506, 0.03630630671977997, 0.09937667101621628, 0.007410518359392881, 0.023531131446361542, 0.1278418004512787, 0.02583717554807663, 0.011335453949868679, 0.029659513384103775, 0.009194300509989262, 0.01714175008237362, 0.009268750436604023, 0.005059416405856609, 0.005806542467325926, 0.018793415278196335, 0.004911178257316351, 0.014306007884442806, 0.2706291079521179, 0.04213809221982956, 0.04231187701225281], [0.03258303925395012, 0.01572730392217636, 0.0674353837966919, 0.11092405021190643, 0.045574039220809937, 0.2637718617916107, 0.05916658788919449, 0.035021211951971054, 0.0437682643532753, 0.06411730498075485, 0.0029770240653306246, 0.029558787122368813, 0.006907360162585974, 0.007302396930754185, 0.00911164190620184, 0.01086510345339775, 0.00379189383238554, 0.012368876487016678, 0.0035627628676593304, 0.005248865112662315, 0.0058745513670146465, 0.042025692760944366, 0.009348117746412754, 0.11296785622835159], [0.009753878228366375, 0.006997250951826572, 0.18903392553329468, 0.05431243032217026, 0.053700558841228485, 0.08655928075313568, 0.12617191672325134, 0.020405080169439316, 0.13126927614212036, 0.027710191905498505, 0.005840125028043985, 0.007369538303464651, 0.06871404498815536, 0.004628523252904415, 0.00818804930895567, 0.0041756643913686275, 0.012842285446822643, 0.00932249054312706, 0.021633781492710114, 0.00844446662813425, 0.06580054014921188, 0.050111688673496246, 0.011999299749732018, 0.015015766955912113], [0.04713154211640358, 0.020695069804787636, 0.15136626362800598, 0.26705214381217957, 0.015221168287098408, 0.1995050311088562, 0.01325896941125393, 0.06705226749181747, 0.06810403615236282, 0.011600046418607235, 0.004565550480037928, 0.01691342517733574, 0.001873841043561697, 0.011683119460940361, 0.0024703103117644787, 0.02526376023888588, 0.0017563591245561838, 0.00934173259884119, 0.000854038808029145, 0.00406400253996253, 0.004937205463647842, 0.005436329636722803, 0.005035480950027704, 0.044818371534347534], [0.016103100031614304, 0.005458638537675142, 0.08227100968360901, 0.01775524951517582, 0.01405167393386364, 0.024840470403432846, 0.08647804707288742, 0.10412407666444778, 0.5420838594436646, 0.01478485856205225, 0.01917801797389984, 0.013658805750310421, 0.014797331765294075, 0.005630579777061939, 0.004320026841014624, 0.0028408956713974476, 0.001729991054162383, 0.000824872637167573, 0.0032498242799192667, 0.0036293307784944773, 0.011874455027282238, 0.0018514246912673116, 0.004745866172015667, 0.0037176574114710093], [0.03697577863931656, 0.027315037325024605, 0.02139251120388508, 0.03329479694366455, 0.02055799774825573, 0.05506949499249458, 0.028056582435965538, 0.3334822356700897, 0.013941447250545025, 0.055562861263751984, 0.0047402940690517426, 0.12874069809913635, 0.001217928365804255, 0.05466553941369057, 0.0041296593844890594, 0.03030196763575077, 0.008887337520718575, 0.006146272178739309, 0.008011633530259132, 0.07098305225372314, 0.002960137790068984, 0.009784051217138767, 0.0016317309346050024, 0.04215095937252045], [0.00045413090265356004, 0.00046218023635447025, 0.039517782628536224, 0.0029358668252825737, 0.004902200773358345, 0.0027624457143247128, 0.023649055510759354, 0.0005626050406135619, 0.06259201467037201, 0.25141215324401855, 0.19738437235355377, 0.11695695668458939, 0.23387283086776733, 0.017864365130662918, 0.030216578394174576, 0.0021899831481277943, 0.0014149562921375036, 0.0004471209249459207, 0.001499982550740242, 2.9528109735110775e-05, 0.00035489434958435595, 0.006369621492922306, 0.0009213325683958828, 0.0012270576553419232], [0.0009618261829018593, 0.0009649444255046546, 0.0006655006436631083, 0.0007846188964322209, 0.0005262216436676681, 0.0026747656520456076, 0.003523084335029125, 0.04873888939619064, 0.0016774075338616967, 0.01920173689723015, 0.0029758771415799856, 0.7553648948669434, 0.004450441338121891, 0.09993887692689896, 0.003235874231904745, 0.0067008561454713345, 0.0003790586779359728, 0.005490786395967007, 0.002937190467491746, 0.02725241146981716, 0.0003050428058486432, 0.0013317515840753913, 0.00011236413411097601, 0.00980573520064354], [0.00032432845910079777, 0.0002325698296772316, 0.0014740958577021956, 0.0006398678524419665, 0.004865576978772879, 0.001322177704423666, 0.019600918516516685, 0.0011572662042453885, 0.039118144661188126, 0.13116420805454254, 0.033764876425266266, 0.0839439108967781, 0.6363641619682312, 0.014837165363132954, 0.011567272245883942, 0.0015725713456049562, 0.0022262728307396173, 0.0015700694639235735, 0.006202773191034794, 0.00028887487133033574, 0.0012421433348208666, 0.005796689540147781, 0.0003257194475736469, 0.0003984816139563918], [0.003466655034571886, 0.002738774288445711, 0.002651065355166793, 0.0025140747893601656, 0.0031136032193899155, 0.004761596210300922, 0.009431449696421623, 0.012032457627356052, 0.003684854134917259, 0.14475151896476746, 0.02062690630555153, 0.42200958728790283, 0.06625314056873322, 0.1521308571100235, 0.018412744626402855, 0.013162217102944851, 0.003657217836007476, 0.015800829976797104, 0.0184944998472929, 0.01748211309313774, 0.0034180039074271917, 0.046138741075992584, 0.0018842780264094472, 0.011382880620658398], [0.0020312212873250246, 0.005704091861844063, 0.0005582061712630093, 0.0032480594236403704, 0.006228924263268709, 0.0016882832860574126, 0.004122009966522455, 0.0029390540439635515, 0.0031711210031062365, 0.06350546330213547, 0.023880530148744583, 0.10973997414112091, 0.44790104031562805, 0.041452132165431976, 0.062322504818439484, 0.03927105292677879, 0.02327214926481247, 0.025234488770365715, 0.027699986472725868, 0.021494727581739426, 0.01110902614891529, 0.05022471770644188, 0.00793137215077877, 0.015269720926880836], [0.0009945865022018552, 0.0021737113129347563, 0.0005766873946413398, 0.0031274231150746346, 0.005509461276233196, 0.0033342717215418816, 0.0009306885185651481, 0.012673105113208294, 0.0011323600774630904, 0.03772477060556412, 0.001845934777520597, 0.11891093105077744, 0.03180491551756859, 0.1424086093902588, 0.047700606286525726, 0.07314875721931458, 0.037381455302238464, 0.12215641140937805, 0.016111569479107857, 0.18150299787521362, 0.022181732580065727, 0.07397205382585526, 0.006325124762952328, 0.056371938437223434], [0.01422570925205946, 0.026251036673784256, 0.002132292604073882, 0.003909275867044926, 0.015823235735297203, 0.005876423325389624, 0.03422872722148895, 0.002478371374309063, 0.0066094789654016495, 0.0782686099410057, 0.07180408388376236, 0.03727223724126816, 0.1890375316143036, 0.030543221160769463, 0.12216649949550629, 0.02384321577847004, 0.05341969430446625, 0.026028743013739586, 0.10905123502016068, 0.007976454682648182, 0.011395116336643696, 0.0712018758058548, 0.04139639064669609, 0.015060566365718842], [0.004553653299808502, 0.007339204661548138, 0.0019881408661603928, 0.01133254636079073, 0.017626110464334488, 0.014496142975986004, 0.005985577125102282, 0.0037570015992969275, 0.0035736598074436188, 0.037171896547079086, 0.004451741464436054, 0.14744466543197632, 0.06439566612243652, 0.07136176526546478, 0.0805707722902298, 0.06099981814622879, 0.051973842084407806, 0.16334564983844757, 0.03836395591497421, 0.02294997312128544, 0.019367488101124763, 0.04996743053197861, 0.01320699043571949, 0.10377628356218338], [0.0006489446968771517, 0.001673180260695517, 0.0009338571107946336, 0.0013296243268996477, 0.008579373359680176, 0.0009805324953049421, 0.0027934396639466286, 0.0004453823494259268, 0.0013740018475800753, 0.004061133600771427, 0.0015575287397950888, 0.009660652838647366, 0.269553005695343, 0.0149168586358428, 0.02723405510187149, 0.007734269369393587, 0.12286948412656784, 0.07053444534540176, 0.1838161051273346, 0.0336555540561676, 0.17636139690876007, 0.04474649578332901, 0.008074641227722168, 0.006466034799814224], [0.003921037539839745, 0.009770727716386318, 0.002594177145510912, 0.009421924129128456, 0.003743327222764492, 0.002119298791512847, 0.00021525619376916438, 0.00032161796116270125, 0.000265152077190578, 0.0006923554465174675, 0.0012780207907781005, 0.019849685952067375, 0.01245883945375681, 0.037524402141571045, 0.036242712289094925, 0.0708928033709526, 0.07758115231990814, 0.4227614998817444, 0.04725657030940056, 0.04260764271020889, 0.10952848196029663, 0.020205175504088402, 0.020597560331225395, 0.048150576651096344], [0.011189429089426994, 0.013408699072897434, 0.011620131321251392, 0.006729819346219301, 0.008000529371201992, 0.002852073637768626, 0.008191552013158798, 0.008459868840873241, 0.011788317933678627, 0.0015287898713722825, 0.008127822540700436, 0.011298495344817638, 0.026483779773116112, 0.0154955442994833, 0.03128078952431679, 0.011643126606941223, 0.034437209367752075, 0.02135460078716278, 0.10752706229686737, 0.10770580172538757, 0.4391883313655853, 0.011117277666926384, 0.0733482614159584, 0.017222566530108452], [0.007649291772395372, 0.015917915850877762, 0.003044575685635209, 0.0070872437208890915, 0.004037665668874979, 0.002949059708043933, 0.0006464788457378745, 0.004637872334569693, 6.513569678645581e-05, 0.0026027632411569357, 0.0005040975520387292, 0.023561500012874603, 0.0005681065958924592, 0.044905032962560654, 0.012218995951116085, 0.03986204043030739, 0.04072960093617439, 0.04797196760773659, 0.043115101754665375, 0.34922799468040466, 0.04410931095480919, 0.08725601434707642, 0.0219864659011364, 0.19534580409526825], [0.0008365894900634885, 0.0019100270001217723, 0.014453789219260216, 0.0025972675066441298, 0.004284343216568232, 0.0005207445938140154, 0.0027592256665229797, 4.0639060898683965e-05, 0.0011306756641715765, 0.006595959421247244, 0.02214321307837963, 0.008320432156324387, 0.28907614946365356, 0.013417736627161503, 0.11257019639015198, 0.005435377825051546, 0.024567676708102226, 0.0076909190975129604, 0.04402664303779602, 0.0013172916369512677, 0.08760593831539154, 0.14164306223392487, 0.18456101417541504, 0.022495074197649956]], [[0.016802551224827766, 0.00990119855850935, 0.10250148177146912, 0.007799600716680288, 0.020896919071674347, 0.01759188622236252, 0.04227614030241966, 0.02680494822561741, 0.04598623514175415, 0.026040667667984962, 0.03763779625296593, 0.0076379417441785336, 0.013766065239906311, 0.0290997177362442, 0.202989861369133, 0.01003565825521946, 0.025650041177868843, 0.015952082350850105, 0.0666389912366867, 0.044000279158353806, 0.09623338282108307, 0.034185655415058136, 0.08461232483386993, 0.014958661049604416], [0.03460273519158363, 0.0257955901324749, 0.05812413990497589, 0.015150928869843483, 0.03503428027033806, 0.034299369901418686, 0.06355460733175278, 0.030026838183403015, 0.02669326215982437, 0.059491418302059174, 0.027420390397310257, 0.011474707163870335, 0.014897341839969158, 0.021630389615893364, 0.055235881358385086, 0.01479699183255434, 0.03970569744706154, 0.038687027990818024, 0.10482971370220184, 0.04660719633102417, 0.0638367235660553, 0.09874485433101654, 0.044978052377700806, 0.03438194468617439], [0.003752291901037097, 0.004194451496005058, 0.06497298181056976, 0.0048798201605677605, 0.004193030297756195, 0.0030500185675919056, 0.012099165469408035, 0.007794367615133524, 0.05412837117910385, 0.006625864189118147, 0.05343232303857803, 0.009369156323373318, 0.03638343885540962, 0.020424485206604004, 0.3859502971172333, 0.008664222434163094, 0.012544268742203712, 0.007475386839359999, 0.031697314232587814, 0.01819111593067646, 0.12074988335371017, 0.013190231285989285, 0.10530856251716614, 0.010928944684565067], [0.001327036996372044, 0.0015367817832157016, 0.058297380805015564, 0.007783769629895687, 0.006322943139821291, 0.004562144633382559, 0.013186643831431866, 0.019333798438310623, 0.10000099241733551, 0.013993658125400543, 0.0379549115896225, 0.026231268420815468, 0.07868746668100357, 0.05186332389712334, 0.34273484349250793, 0.01072006393224001, 0.01194040384143591, 0.005812855437397957, 0.018575483933091164, 0.02669825591146946, 0.10101979225873947, 0.009558373130857944, 0.03649754077196121, 0.015360210090875626], [0.009553952142596245, 0.011394929140806198, 0.07256808131933212, 0.021738989278674126, 0.03504614904522896, 0.02926911786198616, 0.01925879344344139, 0.041230857372283936, 0.06423652917146683, 0.04472750052809715, 0.026979006826877594, 0.044597841799259186, 0.05011513829231262, 0.06156497821211815, 0.12572044134140015, 0.02142227068543434, 0.03380874544382095, 0.01749596744775772, 0.018417824059724808, 0.04877576604485512, 0.06579189002513885, 0.034217771142721176, 0.05079220235347748, 0.05127524584531784], [0.017647406086325645, 0.01892755925655365, 0.07900446653366089, 0.005749281961470842, 0.02465994842350483, 0.010737626813352108, 0.03543318063020706, 0.0280922781676054, 0.07738294452428818, 0.03445536643266678, 0.04908537119626999, 0.006250082980841398, 0.011950470507144928, 0.015726497396826744, 0.1851484775543213, 0.009894092567265034, 0.03532857075333595, 0.010045135393738747, 0.05868364870548248, 0.04044162854552269, 0.11988470703363419, 0.04731021821498871, 0.0703720673918724, 0.007789026480168104], [0.0032577686943113804, 0.00410390505567193, 0.08695650100708008, 0.02821720764040947, 0.008846994489431381, 0.009737097658216953, 0.009674911387264729, 0.006010545417666435, 0.09777380526065826, 0.013059570454061031, 0.026616597548127174, 0.019288713112473488, 0.05261716991662979, 0.02908588945865631, 0.41203033924102783, 0.01499175000935793, 0.009829501621425152, 0.003865166800096631, 0.005738670006394386, 0.00539257051423192, 0.06916589289903641, 0.010287551209330559, 0.048054177314043045, 0.02539774589240551], [0.014589222148060799, 0.009732356294989586, 0.02830514870584011, 0.022284550592303276, 0.026648564264178276, 0.02086549811065197, 0.030734114348888397, 0.02861342765390873, 0.03185335919260979, 0.06905710697174072, 0.046939462423324585, 0.07462655752897263, 0.07467946410179138, 0.07942432165145874, 0.07822758704423904, 0.03137771412730217, 0.030260995030403137, 0.018566081300377846, 0.033704664558172226, 0.04187176376581192, 0.03819293528795242, 0.048817865550518036, 0.059569478034973145, 0.06105773523449898], [0.017746970057487488, 0.02450338751077652, 0.06789755076169968, 0.010571606457233429, 0.016692163422703743, 0.021897248923778534, 0.03516799956560135, 0.00766532588750124, 0.07963965833187103, 0.03486351668834686, 0.14409823715686798, 0.00784324761480093, 0.03149668499827385, 0.01608845591545105, 0.1085183247923851, 0.010198653675615788, 0.020626312121748924, 0.021373869851231575, 0.02667406015098095, 0.006008667405694723, 0.05935205519199371, 0.03546791523694992, 0.18677011132240295, 0.008837837725877762], [0.006356716621667147, 0.011742953211069107, 0.029302751645445824, 0.12468595057725906, 0.04073518142104149, 0.022673295810818672, 0.015229383483529091, 0.15212106704711914, 0.04546855762600899, 0.009195446036756039, 0.004967516288161278, 0.12595906853675842, 0.09420756995677948, 0.06790883839130402, 0.01446991041302681, 0.02127997763454914, 0.015023048035800457, 0.003004849422723055, 0.0032467914279550314, 0.04275454953312874, 0.011329425498843193, 0.0027649630792438984, 0.006860567722469568, 0.12871159613132477], [0.029908331111073494, 0.030847439542412758, 0.07782541215419769, 0.017377547919750214, 0.021416042000055313, 0.03269731253385544, 0.030649112537503242, 0.04392502084374428, 0.1332271695137024, 0.062050554901361465, 0.11066179722547531, 0.021817484870553017, 0.040428582578897476, 0.03205212205648422, 0.08464623242616653, 0.01583479344844818, 0.018095504492521286, 0.01402581948786974, 0.01637423224747181, 0.018628152087330818, 0.035930048674345016, 0.027849087491631508, 0.0658043846487999, 0.01792793907225132], [0.0022879934404045343, 0.0044553265906870365, 0.012490866705775261, 0.04968203976750374, 0.018250644207000732, 0.011088847182691097, 0.013066316023468971, 0.08127477765083313, 0.023002495989203453, 0.024595079943537712, 0.005143933929502964, 0.24324250221252441, 0.21865352988243103, 0.13107797503471375, 0.00825112871825695, 0.013266554102301598, 0.005269614048302174, 0.0016684276051819324, 0.002315797144547105, 0.02094270847737789, 0.003336963476613164, 0.0028549707494676113, 0.0026626852340996265, 0.10111880302429199], [0.0009104011696763337, 0.0023652324452996254, 0.009110702201724052, 0.07057370245456696, 0.0070973047986626625, 0.008745568804442883, 0.0046835290268063545, 0.03737850859761238, 0.025275662541389465, 0.020349211990833282, 0.002999075222760439, 0.43803340196609497, 0.18233446776866913, 0.09702587872743607, 0.002800807822495699, 0.008264693431556225, 0.0018400037661194801, 0.0005880141980014741, 0.00026589370099827647, 0.0024606771767139435, 0.0005415186169557273, 0.0010918641928583384, 0.0004145796992816031, 0.07484925538301468], [0.025626564398407936, 0.014617021195590496, 0.029449205845594406, 0.01090006809681654, 0.029176248237490654, 0.03287489712238312, 0.03337057679891586, 0.03970439359545708, 0.009725471958518028, 0.06682603061199188, 0.02995423786342144, 0.12703609466552734, 0.10206883400678635, 0.13808180391788483, 0.04458374157547951, 0.025545308366417885, 0.03393848240375519, 0.02176060527563095, 0.028937259688973427, 0.03836212307214737, 0.006870886776596308, 0.02663516253232956, 0.021285323426127434, 0.06266963481903076], [0.00405987398698926, 0.003799490397796035, 0.02106349729001522, 0.004321799613535404, 0.014653063379228115, 0.011936246417462826, 0.008369805291295052, 0.025797907263040543, 0.045433349907398224, 0.07172500342130661, 0.11231592297554016, 0.13401645421981812, 0.1712266206741333, 0.1594580113887787, 0.08853765577077866, 0.0110731590539217, 0.01916368305683136, 0.005900848191231489, 0.004791008774191141, 0.013249638490378857, 0.008057529106736183, 0.01455276645720005, 0.029025819152593613, 0.01747075654566288], [0.0014620748115703464, 0.0021828608587384224, 0.05899056792259216, 0.008080813102424145, 0.01077973935753107, 0.011560877785086632, 0.016143685206770897, 0.05397701635956764, 0.11423742026090622, 0.04834837093949318, 0.037376519292593, 0.07998879998922348, 0.1484455019235611, 0.10796458274126053, 0.1479080468416214, 0.007989531382918358, 0.010630050674080849, 0.005331122316420078, 0.009717305190861225, 0.031210558488965034, 0.033501263707876205, 0.01247315015643835, 0.015503483824431896, 0.026196584105491638], [0.01062224805355072, 0.011291736736893654, 0.04237626865506172, 0.011945155449211597, 0.026718564331531525, 0.03638945147395134, 0.010677478276193142, 0.03650656342506409, 0.02630430832505226, 0.10019399970769882, 0.048954226076602936, 0.09343775361776352, 0.07712411880493164, 0.1044258177280426, 0.09118808805942535, 0.025193991139531136, 0.029099859297275543, 0.02365284413099289, 0.010513238608837128, 0.041301481425762177, 0.016562502831220627, 0.04759803041815758, 0.03754889592528343, 0.04037339612841606], [0.02081959880888462, 0.037134941667318344, 0.06103391945362091, 0.007042900659143925, 0.03313417732715607, 0.01648656092584133, 0.021253596991300583, 0.027634957805275917, 0.06614743173122406, 0.12883234024047852, 0.1030455231666565, 0.021892229095101357, 0.025934509932994843, 0.03257528692483902, 0.09920854866504669, 0.017345190048217773, 0.04923318699002266, 0.013659361749887466, 0.024386154487729073, 0.024048691615462303, 0.029407622292637825, 0.07808970659971237, 0.05008767172694206, 0.011565959081053734], [0.002589118666946888, 0.0029265356715768576, 0.03864956647157669, 0.007575585972517729, 0.004920803010463715, 0.007724477909505367, 0.0024641244672238827, 0.003092467784881592, 0.032598040997982025, 0.0348467156291008, 0.08384352922439575, 0.035009365528821945, 0.09506528824567795, 0.07434951514005661, 0.4810183644294739, 0.016688954085111618, 0.008442722260951996, 0.0032314190175384283, 0.001407488132826984, 0.0023445601109415293, 0.00689974520355463, 0.009379898197948933, 0.0370585098862648, 0.007873187772929668], [0.011029050685465336, 0.006946741137653589, 0.014784514904022217, 0.009018130600452423, 0.014827827922999859, 0.018649570643901825, 0.01243594940751791, 0.019989121705293655, 0.014368544332683086, 0.11373593658208847, 0.10044585913419724, 0.1280105710029602, 0.100049689412117, 0.1325032114982605, 0.09552376717329025, 0.03941786289215088, 0.02500098943710327, 0.015149401500821114, 0.013844280503690243, 0.0234680213034153, 0.00607824232429266, 0.0317874476313591, 0.03193364292383194, 0.021001651883125305], [0.004644445143640041, 0.005174445919692516, 0.015417278744280338, 0.002026755828410387, 0.004846465308219194, 0.00626257574185729, 0.003783119609579444, 0.0014753780560567975, 0.010513991117477417, 0.03367742523550987, 0.367012083530426, 0.017667599022388458, 0.046650759875774384, 0.0390218086540699, 0.24286964535713196, 0.02012801356613636, 0.019600631669163704, 0.014881442300975323, 0.007069645449519157, 0.00215162243694067, 0.005377994384616613, 0.014380007982254028, 0.11342580616474152, 0.0019410577369853854], [0.0016910071717575192, 0.0034145198296755552, 0.017120568081736565, 0.06278184801340103, 0.01744367554783821, 0.00844349805265665, 0.004633874632418156, 0.05138305202126503, 0.017148854210972786, 0.006041232496500015, 0.009687277488410473, 0.21503718197345734, 0.21928103268146515, 0.13562066853046417, 0.06529155373573303, 0.03595762699842453, 0.017253423109650612, 0.0027624869253486395, 0.002249425044283271, 0.02764304354786873, 0.004677198827266693, 0.0013734496897086501, 0.007629588712006807, 0.06543393433094025], [0.008617659099400043, 0.008026999421417713, 0.02738870494067669, 0.012633527629077435, 0.01136032771319151, 0.008969114162027836, 0.0064962757751345634, 0.010923953726887703, 0.013288857415318489, 0.020058605819940567, 0.09631981700658798, 0.05956853926181793, 0.09132811427116394, 0.0735042616724968, 0.22794441878795624, 0.06395365297794342, 0.04343913868069649, 0.029944417998194695, 0.021367527544498444, 0.027582794427871704, 0.018833601847290993, 0.01826525293290615, 0.07649867981672287, 0.023685792461037636], [0.00015503127360716462, 0.000539578206371516, 0.001978781772777438, 0.03168248385190964, 0.0029458566568791866, 0.0006988136447034776, 0.0008459860109724104, 0.010147017426788807, 0.0011194840772077441, 0.0012523119803518057, 0.0007388820522464812, 0.3337886929512024, 0.3387242555618286, 0.11261522769927979, 0.0112457862123847, 0.026045309379696846, 0.004014861304312944, 0.0008195140981115401, 0.0009451567311771214, 0.015817873179912567, 0.0009227714617736638, 0.00038189932820387185, 0.0007291169022209942, 0.10184524208307266]], [[0.007776106707751751, 0.007139397785067558, 0.07094690203666687, 0.04827521741390228, 0.014788289554417133, 0.04904450476169586, 0.021012194454669952, 0.04560686647891998, 0.08715822547674179, 0.022974392399191856, 0.26347681879997253, 0.04778613522648811, 0.005387287586927414, 0.004581392742693424, 0.011289565823972225, 0.019247131422162056, 0.00612108176574111, 0.03696819394826889, 0.00805863831192255, 0.02094871737062931, 0.031364768743515015, 0.017277032136917114, 0.10837720334529877, 0.044393859803676605], [0.01618134044110775, 0.011683906428515911, 0.08492981642484665, 0.07142505049705505, 0.019025860354304314, 0.05482396483421326, 0.03204803541302681, 0.08393329381942749, 0.04164641723036766, 0.01132470928132534, 0.061056144535541534, 0.02390417270362377, 0.00415490847080946, 0.005418827291578054, 0.014480777084827423, 0.031906552612781525, 0.01165292039513588, 0.08941151201725006, 0.02744988352060318, 0.07907713204622269, 0.05844331532716751, 0.019083533436059952, 0.07750386744737625, 0.06943406164646149], [0.02109300158917904, 0.020756525918841362, 0.049137182533741, 0.027974490076303482, 0.009535628370940685, 0.03428049013018608, 0.027521852403879166, 0.024427777156233788, 0.16370052099227905, 0.07531607151031494, 0.033313632011413574, 0.06627083569765091, 0.03110560216009617, 0.0412328727543354, 0.05430717393755913, 0.021956194192171097, 0.004284511785954237, 0.020951425656676292, 0.013746929354965687, 0.013472471386194229, 0.057370491325855255, 0.04398302361369133, 0.02661052905023098, 0.11765071749687195], [0.013919277116656303, 0.012100204825401306, 0.017775965854525566, 0.031766436994075775, 0.06022458150982857, 0.12166444957256317, 0.04482997953891754, 0.07718008756637573, 0.10491663962602615, 0.08023475855588913, 0.020658813416957855, 0.07732497155666351, 0.0371645987033844, 0.05644052103161812, 0.030410317704081535, 0.029455291107296944, 0.021645231172442436, 0.022313376888632774, 0.012713721953332424, 0.02648582123219967, 0.01939689926803112, 0.02587679959833622, 0.009060370735824108, 0.04644077643752098], [0.007574934978038073, 0.005997462663799524, 0.03886979818344116, 0.024900449439883232, 0.050306014716625214, 0.02977672964334488, 0.04920937865972519, 0.08369448781013489, 0.06990866363048553, 0.1441900134086609, 0.05201791599392891, 0.10237029194831848, 0.02277831919491291, 0.06340031325817108, 0.024087045341730118, 0.016225622966885567, 0.03175436332821846, 0.03696160390973091, 0.03416869416832924, 0.03470736742019653, 0.013593790121376514, 0.028900574892759323, 0.007156469393521547, 0.027449704706668854], [0.0188266783952713, 0.024788610637187958, 0.041504159569740295, 0.02646070532500744, 0.030954411253333092, 0.033865202218294144, 0.040335483849048615, 0.09218785911798477, 0.11567080765962601, 0.07408198714256287, 0.06401143223047256, 0.07732252776622772, 0.08072592318058014, 0.060492709279060364, 0.026517033576965332, 0.018522735685110092, 0.016393953934311867, 0.016717426478862762, 0.018448898568749428, 0.030381353572010994, 0.024346783757209778, 0.026752416044473648, 0.019097231328487396, 0.02159358374774456], [0.0027685125824064016, 0.0034589432179927826, 0.009257923811674118, 0.003159091342240572, 0.010641125030815601, 0.007008053828030825, 0.014759177342057228, 0.018149934709072113, 0.23900385200977325, 0.2403440773487091, 0.10064616054296494, 0.08557571470737457, 0.1643395721912384, 0.04536000266671181, 0.01935882307589054, 0.002454544650390744, 0.0036713769659399986, 0.0014567070174962282, 0.0026552234776318073, 0.0022780767176300287, 0.005877834744751453, 0.010136671364307404, 0.004189528524875641, 0.0034491962287575006], [0.011480643413960934, 0.0044020055793225765, 0.004293904639780521, 0.004696325398981571, 0.014715967699885368, 0.028973286971449852, 0.013177813030779362, 0.029680605977773666, 0.03044186905026436, 0.5250466465950012, 0.013969463296234608, 0.21848806738853455, 0.0025872341357171535, 0.03235267475247383, 0.001939703244715929, 0.002233010483905673, 0.0028337608091533184, 0.007464367430657148, 0.0016978259664028883, 0.0033807174768298864, 0.0013593090698122978, 0.013915074057877064, 0.0008942090207710862, 0.029975520446896553], [0.0035177026875317097, 0.006071246694773436, 0.0380704365670681, 0.011766720563173294, 0.0062440913170576096, 0.03090403415262699, 0.023077504709362984, 0.01195544470101595, 0.3318335711956024, 0.08899954706430435, 0.15155673027038574, 0.05212448909878731, 0.082685686647892, 0.027911527082324028, 0.07038112729787827, 0.007432193960994482, 0.001923597534187138, 0.01176002062857151, 0.004119067918509245, 0.0016353758983314037, 0.012899359688162804, 0.0060881017707288265, 0.012258345261216164, 0.0047841668128967285], [0.012656974606215954, 0.01529429480433464, 0.008665764704346657, 0.018483076244592667, 0.024514107033610344, 0.008630593307316303, 0.005675173364579678, 0.033338870853185654, 0.010378465056419373, 0.016625409945845604, 0.06193993240594864, 0.2592688500881195, 0.06848093867301941, 0.2195819467306137, 0.027466347441077232, 0.044798802584409714, 0.033574432134628296, 0.020532624796032906, 0.007319148164242506, 0.044696077704429626, 0.00982674304395914, 0.007955429144203663, 0.019698960706591606, 0.020597077906131744], [0.005609571468085051, 0.01070496253669262, 0.020326677709817886, 0.007429653778672218, 0.007247691974043846, 0.0026026396080851555, 0.0068158116191625595, 0.003046131692826748, 0.05565642565488815, 0.026267699897289276, 0.04862280562520027, 0.021983126178383827, 0.3956640362739563, 0.02716045454144478, 0.21564844250679016, 0.012776491232216358, 0.013192659243941307, 0.002636376768350601, 0.009868440218269825, 0.00408589281141758, 0.03832561895251274, 0.014831745065748692, 0.040298279374837875, 0.009198358282446861], [0.005535749718546867, 0.007167233154177666, 0.015027707442641258, 0.013319316320121288, 0.013681392185389996, 0.007323064375668764, 0.00588195538148284, 0.02828460931777954, 0.008305735886096954, 0.013671760447323322, 0.015150162391364574, 0.12484196573495865, 0.05267185717821121, 0.1477130800485611, 0.07046450674533844, 0.07490851730108261, 0.03219921514391899, 0.019147709012031555, 0.02268942818045616, 0.13351070880889893, 0.04194030910730362, 0.028826210647821426, 0.02429511398077011, 0.09344272315502167], [0.0009894417598843575, 0.001463310793042183, 0.04265854135155678, 0.008354552090168, 0.0035320704337209463, 0.0005815940676257014, 0.004602773580700159, 0.0028781616128981113, 0.013315192423760891, 0.007234211545437574, 0.03349752724170685, 0.027461759746074677, 0.12247080355882645, 0.03552453592419624, 0.328978031873703, 0.0223353561013937, 0.01080064382404089, 0.003233078634366393, 0.030547933652997017, 0.02428494393825531, 0.09906622022390366, 0.03579078987240791, 0.08987738937139511, 0.05052116513252258], [0.0010359887965023518, 0.0016457008896395564, 0.010570527985692024, 0.029247378930449486, 0.005114913452416658, 0.0015126117505133152, 0.0006975028081797063, 0.018902184441685677, 0.0002676411240827292, 0.0011527234455570579, 0.0008314763545058668, 0.02140299789607525, 0.00222645397298038, 0.02880493365228176, 0.01688367873430252, 0.12006426602602005, 0.018209388479590416, 0.038385383784770966, 0.012125077657401562, 0.3780563175678253, 0.02224601060152054, 0.02283095195889473, 0.01016050111502409, 0.23762531578540802], [0.002168836537748575, 0.0037478189915418625, 0.04857263341546059, 0.03162679076194763, 0.004729498643428087, 0.001616648631170392, 0.0024110116064548492, 0.0037644903641194105, 0.0040121800266206264, 0.0019938182085752487, 0.007779193110764027, 0.0045622275210917, 0.0054969796910882, 0.00463171536102891, 0.08814150840044022, 0.0669635534286499, 0.023472437635064125, 0.023868173360824585, 0.047449853271245956, 0.06603793799877167, 0.23476415872573853, 0.05219319835305214, 0.1439322531223297, 0.12606307864189148], [0.00966714695096016, 0.010048530995845795, 0.03241245821118355, 0.032518088817596436, 0.031833332031965256, 0.03070555068552494, 0.021205613389611244, 0.02197251282632351, 0.01499954517930746, 0.020215904340147972, 0.009471539407968521, 0.04017825052142143, 0.010231892578303814, 0.048831209540367126, 0.044896893203258514, 0.05977218225598335, 0.0323435440659523, 0.0433892123401165, 0.04225356504321098, 0.06515948474407196, 0.05619325116276741, 0.07148997485637665, 0.029362967237830162, 0.22084732353687286], [0.006917897146195173, 0.006999897304922342, 0.06311433762311935, 0.027839289978146553, 0.029115885496139526, 0.0119396997615695, 0.022093823179602623, 0.028048181906342506, 0.01945224218070507, 0.03366141766309738, 0.016162969172000885, 0.026166558265686035, 0.010353261604905128, 0.030679523944854736, 0.04539743438363075, 0.03180338814854622, 0.05178380757570267, 0.05431337282061577, 0.09197630733251572, 0.09423226863145828, 0.08244756609201431, 0.08578041940927505, 0.03119809366762638, 0.09852232784032822], [0.01614074595272541, 0.02195735275745392, 0.03261832147836685, 0.02772720530629158, 0.03622548282146454, 0.01168686430901289, 0.015623155981302261, 0.020921986550092697, 0.0064277444034814835, 0.010040869005024433, 0.003997722640633583, 0.010982646606862545, 0.028918880969285965, 0.055212121456861496, 0.04525710269808769, 0.05005660280585289, 0.07812096178531647, 0.030449647456407547, 0.08926880359649658, 0.12413249909877777, 0.08861919492483139, 0.07176049053668976, 0.031233368441462517, 0.09262016415596008], [0.007431797217577696, 0.007900135591626167, 0.05052073672413826, 0.014269152656197548, 0.020136769860982895, 0.009055362083017826, 0.02042384073138237, 0.01875675469636917, 0.05817420035600662, 0.06353256851434708, 0.03901512920856476, 0.03145278990268707, 0.044709742069244385, 0.049713097512722015, 0.061625637114048004, 0.015271762385964394, 0.02469879947602749, 0.01259327307343483, 0.04445904493331909, 0.039854682981967926, 0.13716478645801544, 0.10019537806510925, 0.05790562927722931, 0.07113897800445557], [0.01766776666045189, 0.007280869875103235, 0.012048882432281971, 0.015427345409989357, 0.01984047330915928, 0.027399161830544472, 0.014529110863804817, 0.03524802625179291, 0.006865139119327068, 0.10164444148540497, 0.003952043130993843, 0.06255479902029037, 0.0007170886383391917, 0.019056210294365883, 0.003061775816604495, 0.008903877809643745, 0.009661194868385792, 0.022405659779906273, 0.012392951175570488, 0.0404619537293911, 0.015963982790708542, 0.11059372127056122, 0.008023944683372974, 0.42429956793785095], [0.0073294732719659805, 0.007662674877792597, 0.11538580805063248, 0.025151679292321205, 0.00784928910434246, 0.02631462924182415, 0.02558598667383194, 0.011093047447502613, 0.07835555821657181, 0.014072997495532036, 0.02667275443673134, 0.005663194693624973, 0.005934509914368391, 0.005818965844810009, 0.05660340189933777, 0.011440152302384377, 0.005466467700898647, 0.03449935466051102, 0.034554969519376755, 0.016887422651052475, 0.2175094038248062, 0.05568687617778778, 0.11671534925699234, 0.08774600178003311], [0.024540472775697708, 0.021213240921497345, 0.02661614492535591, 0.04297887906432152, 0.03756212070584297, 0.01551822479814291, 0.015125943347811699, 0.041762545704841614, 0.013272546231746674, 0.012739025056362152, 0.03957941755652428, 0.07120908796787262, 0.016312913969159126, 0.06922796368598938, 0.02653368189930916, 0.05167905241250992, 0.04704386740922928, 0.04230954498052597, 0.026578649878501892, 0.10372970253229141, 0.046340301632881165, 0.030577857047319412, 0.07848482578992844, 0.09906400740146637], [0.009498877450823784, 0.012275727465748787, 0.06958416104316711, 0.018217163160443306, 0.009238678961992264, 0.006465250160545111, 0.02128303237259388, 0.009957689791917801, 0.052239254117012024, 0.015361826866865158, 0.0226901862770319, 0.007489518262445927, 0.028122277930378914, 0.006242214702069759, 0.09485635906457901, 0.015396546572446823, 0.01328637357801199, 0.01233269926160574, 0.04967956244945526, 0.024599658325314522, 0.20982560515403748, 0.07322806119918823, 0.12047579139471054, 0.09765347093343735], [0.011533087119460106, 0.00698850629851222, 0.0254516638815403, 0.01707134209573269, 0.019994664937257767, 0.03984508290886879, 0.04058246314525604, 0.1310279369354248, 0.015714196488261223, 0.01439660880714655, 0.01554171834141016, 0.03679986670613289, 0.0019718538969755173, 0.01987542025744915, 0.008769955486059189, 0.01957053877413273, 0.013266503810882568, 0.051293738186359406, 0.043215878307819366, 0.20656085014343262, 0.04192136228084564, 0.04606224596500397, 0.02656414732336998, 0.14598026871681213]], [[0.010258806869387627, 0.010846924968063831, 0.03847846761345863, 0.00563077162951231, 0.023008236661553383, 0.005097625777125359, 0.04961662366986275, 0.014752811752259731, 0.02315492369234562, 0.01588149555027485, 0.016941800713539124, 0.005454156547784805, 0.10433301329612732, 0.013487554155290127, 0.10991498827934265, 0.006703569553792477, 0.04160807281732559, 0.014299017377197742, 0.11366044729948044, 0.054633647203445435, 0.15831631422042847, 0.059138085693120956, 0.07403537631034851, 0.03074727952480316], [0.004759819246828556, 0.005137534812092781, 0.041395626962184906, 0.0028542252257466316, 0.029115712270140648, 0.0037413411773741245, 0.050990741699934006, 0.03454635664820671, 0.027435507625341415, 0.026874158531427383, 0.024913927540183067, 0.011961814947426319, 0.14252887666225433, 0.020678095519542694, 0.10473879426717758, 0.0035614483058452606, 0.05385536700487137, 0.011185901239514351, 0.09287693351507187, 0.05696802958846092, 0.10356605798006058, 0.07169558852910995, 0.044712942093610764, 0.029905222356319427], [0.039016321301460266, 0.01454964280128479, 0.04664524272084236, 0.018548423424363136, 0.12150077521800995, 0.009831199422478676, 0.034127481281757355, 0.16059446334838867, 0.0473470464348793, 0.029820937663316727, 0.012377790175378323, 0.02795601636171341, 0.011868839152157307, 0.037175796926021576, 0.003401604015380144, 0.0010393676348030567, 0.02835630252957344, 0.002336528617888689, 0.009208104573190212, 0.05404935032129288, 0.054550834000110626, 0.07049746066331863, 0.019677983596920967, 0.14552243053913116], [0.007750331424176693, 0.005169033072888851, 0.04205375909805298, 0.03093746304512024, 0.043229155242443085, 0.005355120170861483, 0.01924743503332138, 0.05409101024270058, 0.027121176943182945, 0.00776032917201519, 0.020233498886227608, 0.026409203186631203, 0.09532907605171204, 0.01699179597198963, 0.2551102340221405, 0.02338556945323944, 0.07623885571956635, 0.008170154877007008, 0.035326357930898666, 0.09980573505163193, 0.05375710129737854, 0.007482933346182108, 0.02331445924937725, 0.01573018543422222], [0.006214428227394819, 0.007786046713590622, 0.043969497084617615, 0.17613936960697174, 0.006258904002606869, 0.010903585702180862, 0.01773407869040966, 0.016681984066963196, 0.06197798624634743, 0.0056330133229494095, 0.011870671063661575, 0.13682816922664642, 0.20474018156528473, 0.08685725182294846, 0.08159349113702774, 0.06276433914899826, 0.0047506485134363174, 0.005112847778946161, 0.006053614430129528, 0.008548582904040813, 0.010429148562252522, 0.0015985185746103525, 0.004204005468636751, 0.02134965918958187], [0.008600858971476555, 0.007537766359746456, 0.04535260796546936, 0.03669024631381035, 0.11263060569763184, 0.01614385098218918, 0.10451968014240265, 0.11975309997797012, 0.029092388227581978, 0.03147063031792641, 0.04539884999394417, 0.00802733562886715, 0.035077545791864395, 0.03621787950396538, 0.0108562046661973, 0.008268583565950394, 0.031536996364593506, 0.0063272882252931595, 0.043151188641786575, 0.08984734117984772, 0.019784415140748024, 0.048376116901636124, 0.08256599307060242, 0.022772474214434624], [0.07042960077524185, 0.04114528000354767, 0.03854721412062645, 0.08718221634626389, 0.02344302460551262, 0.18356528878211975, 0.02214822918176651, 0.0748760774731636, 0.04925134778022766, 0.006207357160747051, 0.002234611427411437, 0.14845909178256989, 0.0015507062198594213, 0.04329194128513336, 0.00266653997823596, 0.011691471561789513, 0.002966536208987236, 0.007982621900737286, 0.0011205892078578472, 0.004998169373720884, 0.004449400119483471, 0.0018733169417828321, 0.002026877598837018, 0.16789253056049347], [0.0024635076988488436, 0.0018667440162971616, 0.02444947324693203, 0.0008882411057129502, 0.01827947422862053, 0.01579619199037552, 0.6771681904792786, 0.008860143832862377, 0.092338427901268, 0.003995210397988558, 0.018195806071162224, 0.0003542797057889402, 0.026827262714505196, 0.0003888154460582882, 0.009908162988722324, 0.0001656158856349066, 0.003263382473960519, 0.0015616631135344505, 0.0525255911052227, 0.0017456619534641504, 0.015258429571986198, 0.002727237995713949, 0.020189223811030388, 0.0007831440889276564], [0.06997160613536835, 0.0615265928208828, 0.043953679502010345, 0.12755654752254486, 0.021914375945925713, 0.09750842303037643, 0.02686314843595028, 0.36993616819381714, 0.09974393248558044, 0.009495089761912823, 0.01255734171718359, 0.012859388254582882, 0.00031829721410758793, 0.018098052591085434, 0.0008576384861953557, 0.009558168239891529, 0.0012358158128336072, 0.0008582618902437389, 0.0002742204815149307, 0.002985199447721243, 0.0006744134589098394, 0.0009088788647204638, 0.0026400326751172543, 0.007704779971390963], [0.009538492187857628, 0.008959932252764702, 0.028339002281427383, 0.011376174166798592, 0.044280726462602615, 0.021067697554826736, 0.25173893570899963, 0.14751173555850983, 0.16771027445793152, 0.07129377871751785, 0.10495249927043915, 0.009405497461557388, 0.032613061368465424, 0.0034415735863149166, 0.007232805714011192, 0.0033268253318965435, 0.006692437455058098, 0.0029187523759901524, 0.019387152045965195, 0.010266026481986046, 0.0059052822180092335, 0.012653677724301815, 0.01637907326221466, 0.0030085647013038397], [0.003142759669572115, 0.002750352257862687, 0.009618046693503857, 0.016509246081113815, 0.010385999456048012, 0.00229652994312346, 0.002034289762377739, 0.5759153366088867, 0.007165208458900452, 0.019571639597415924, 0.0013318525161594152, 0.2394864559173584, 0.000704340054653585, 0.06557264924049377, 0.0012305635027587414, 0.0038732532411813736, 0.00193214847240597, 0.0007401082548312843, 0.0002889248135033995, 0.016087554395198822, 0.00021223169460427016, 0.001564398524351418, 9.96996823232621e-05, 0.017486369237303734], [0.0015484205214306712, 0.0017266402719542384, 0.01744483970105648, 0.00038921867962926626, 0.07743290066719055, 0.0030518516432493925, 0.07540247589349747, 0.13202893733978271, 0.06960519403219223, 0.0255285557359457, 0.33592724800109863, 0.014771977439522743, 0.09099224209785461, 0.004164915066212416, 0.10356175154447556, 0.0003201027284376323, 0.019622109830379486, 0.0006587289390154183, 0.010445397347211838, 0.004328747745603323, 0.0007974680047482252, 0.0009482241002842784, 0.009072771295905113, 0.0002292672434123233], [0.0010739152785390615, 0.0015347334556281567, 0.0007798729347996414, 0.00214506802149117, 0.0014809136046096683, 0.0011184249306097627, 0.0014043671544641256, 0.0566389262676239, 0.010998820886015892, 0.006319927051663399, 0.0018768624868243933, 0.8023082613945007, 0.028825776651501656, 0.061259083449840546, 0.002978944219648838, 0.010448366403579712, 0.0008277110173366964, 0.0011465477291494608, 0.00038910936564207077, 0.003603215329349041, 0.0003192793810740113, 0.00016332516679540277, 2.2311740394798107e-05, 0.002336170757189393], [0.00022067528334446251, 0.00017924030544236302, 0.0018548258813098073, 5.745398811995983e-05, 0.004581739194691181, 0.00013752061931882054, 0.010077341459691525, 0.04214577004313469, 0.05790119990706444, 0.003389249090105295, 0.03233225271105766, 0.15189126133918762, 0.49143287539482117, 0.014974789693951607, 0.17334143817424774, 0.0001361667673336342, 0.0046448479406535625, 0.00010611881589284167, 0.0034954682923853397, 0.0038172348868101835, 0.0024860703852027655, 9.791443881113082e-05, 0.0004432548303157091, 0.0002553242666181177], [0.0010215503862127662, 0.0017331173876300454, 0.00262626470066607, 0.00040455959970131516, 0.0033646412193775177, 0.0001853752473834902, 0.0029866904951632023, 0.004541637841612101, 0.0016423204215243459, 0.007335829082876444, 0.0030639353208243847, 0.41658732295036316, 0.10812083631753922, 0.3325902223587036, 0.07842870056629181, 0.003466794965788722, 0.006660176906734705, 0.0007313869427889585, 0.006153590977191925, 0.0030156567227095366, 0.001512146438471973, 0.0019646163564175367, 0.0006018795538693666, 0.011260720901191235], [8.088747563306242e-05, 0.00017176283290609717, 0.0006075851269997656, 0.0002334480086574331, 0.0007193080964498222, 4.6896930143702775e-05, 0.0007865416700951755, 0.0007180083775892854, 0.0012390476185828447, 0.0005610657390207052, 0.0013056938769295812, 0.00894954428076744, 0.35453638434410095, 0.0057898773811757565, 0.5838589072227478, 0.004595257807523012, 0.011712976731359959, 0.0009408018086105585, 0.011401977390050888, 0.004808748606592417, 0.0056151943281292915, 0.0002770610444713384, 0.0006262167589738965, 0.00041690215584822], [0.00033429701579734683, 0.0009767541196197271, 0.0018288003047928214, 0.003078675363212824, 0.00016433850396424532, 0.0001959124783752486, 0.0008772002765908837, 0.00031703259446658194, 0.001282692071981728, 0.0010315364925190806, 0.00041850778507068753, 0.06127696856856346, 0.3289264738559723, 0.10249282419681549, 0.4028262197971344, 0.06939821690320969, 0.0018175856675952673, 0.0029978498350828886, 0.0068337577395141125, 0.0020877837669104338, 0.004237203858792782, 0.0006469031795859337, 0.00040028526564128697, 0.005552185233682394], [0.0013413127744570374, 0.0038812116254121065, 0.005439338274300098, 0.0034343809820711613, 0.006750501226633787, 0.0010672955540940166, 0.0031716793309897184, 0.00515733053907752, 0.0018182964995503426, 0.010945419780910015, 0.013497460633516312, 0.011195885017514229, 0.14288383722305298, 0.04716560244560242, 0.34353870153427124, 0.06197324022650719, 0.09113503247499466, 0.03250120207667351, 0.07969705015420914, 0.05310032516717911, 0.013888695277273655, 0.02928422950208187, 0.02773072011768818, 0.009401270188391209], [0.0035380159970372915, 0.008303824812173843, 0.0027498588897287846, 0.0047791218385100365, 0.000979823525995016, 0.0037548583932220936, 0.0006504419725388288, 0.0009180328925140202, 0.000781947048380971, 0.001096438616514206, 0.00043268303852528334, 0.19260576367378235, 0.02337903343141079, 0.13186480104923248, 0.2793983519077301, 0.14782360196113586, 0.01448750775307417, 0.07401915639638901, 0.012735153548419476, 0.00898073986172676, 0.00985298678278923, 0.0017826792318373919, 0.0010677684331312776, 0.07401740550994873], [8.998931298265234e-05, 0.00015416859241668135, 0.0007103607058525085, 3.706021379912272e-05, 0.0007411781116388738, 0.00017024902626872063, 0.0066412524320185184, 4.3981519411318004e-05, 0.00033042323775589466, 0.0002969362831208855, 0.0013450447004288435, 0.0001880963973235339, 0.16923367977142334, 0.0004365683998912573, 0.21171222627162933, 0.0009618153562769294, 0.015782859176397324, 0.015492602251470089, 0.5107719898223877, 0.005477784667164087, 0.04298898205161095, 0.0032186529133468866, 0.01279544085264206, 0.00037856705603189766], [0.012927855364978313, 0.018955089151859283, 0.008937759324908257, 0.024597465991973877, 0.0014137366088107228, 0.0037676943466067314, 0.00034766923636198044, 0.000369903544196859, 0.0001298616552958265, 0.0004763985925819725, 0.0007027378887869418, 0.004357371479272842, 0.0036843123380094767, 0.01601335033774376, 0.18114091455936432, 0.3468828499317169, 0.030551277101039886, 0.11807678639888763, 0.02957761287689209, 0.049995213747024536, 0.060810115188360214, 0.015475251711905003, 0.025284256786108017, 0.04552458971738815], [0.002935125958174467, 0.0030319998040795326, 0.00967713538557291, 0.0061828275211155415, 0.00677385414019227, 0.0012989406241104007, 0.009230966679751873, 0.0009034126996994019, 0.0011883542174473405, 0.00819423608481884, 0.01085341814905405, 0.0027145398780703545, 0.07433345913887024, 0.0024878536351025105, 0.07347653806209564, 0.02480214089155197, 0.03343502804636955, 0.030477453023195267, 0.23862075805664062, 0.05202465131878853, 0.14309048652648926, 0.16395622491836548, 0.08730448782444, 0.013006171211600304], [0.0032287349458783865, 0.0027032047510147095, 0.01606835424900055, 0.020267073065042496, 0.005021610762923956, 0.000827273353934288, 0.00023056811187416315, 0.009955884888768196, 0.00013731593207921833, 0.0016555717447772622, 0.00045334859169088304, 0.035449933260679245, 0.0036871200427412987, 0.13080842792987823, 0.07031483203172684, 0.03154545649886131, 0.025027820840477943, 0.016370026394724846, 0.009130689315497875, 0.3009348511695862, 0.03997928649187088, 0.04112556204199791, 0.008615617640316486, 0.2264614999294281], [0.0011421559611335397, 0.0007756974082440138, 0.013397196307778358, 0.0002168914652429521, 0.010169398039579391, 0.0005652437685057521, 0.006617826875299215, 0.000802132417447865, 0.00018988465308211744, 0.000834047154057771, 0.004574621096253395, 0.00020913152548018843, 0.03916839882731438, 0.0018803843995556235, 0.29287195205688477, 0.0006636774633079767, 0.047827962785959244, 0.004999982193112373, 0.18529045581817627, 0.042356766760349274, 0.06937973201274872, 0.042306087911129, 0.22803041338920593, 0.005729921627789736]], [[0.03540727123618126, 0.029956607148051262, 0.06694845855236053, 0.08110020309686661, 0.04830385372042656, 0.04687412083148956, 0.010815180838108063, 0.01743338629603386, 0.0217489805072546, 0.014024356380105019, 0.01042906567454338, 0.0071354941464960575, 0.006746556144207716, 0.020986266434192657, 0.02573203854262829, 0.04862275719642639, 0.04227074235677719, 0.03766150400042534, 0.014936763793230057, 0.05042039230465889, 0.11976241320371628, 0.07324156910181046, 0.10486793518066406, 0.06457406282424927], [0.014087316580116749, 0.023799320682883263, 0.024543073028326035, 0.04483942314982414, 0.0368962399661541, 0.026505718007683754, 0.004246165044605732, 0.011514861136674881, 0.017081368714571, 0.008661209605634212, 0.01521233655512333, 0.007488170173019171, 0.010875040665268898, 0.023628326132893562, 0.08467002213001251, 0.06803329288959503, 0.09148704260587692, 0.06757410615682602, 0.01534404419362545, 0.055504582822322845, 0.15526266396045685, 0.045426130294799805, 0.10580357909202576, 0.04151586443185806], [0.011235632002353668, 0.021366458386182785, 0.04328165575861931, 0.023647502064704895, 0.07482379674911499, 0.01419123075902462, 0.01415619719773531, 0.017831604927778244, 0.08365219086408615, 0.027816014364361763, 0.03692391514778137, 0.005723021924495697, 0.006487517151981592, 0.007604518905282021, 0.020916303619742393, 0.010905076749622822, 0.0505475252866745, 0.010687756352126598, 0.010624479502439499, 0.015925783663988113, 0.16500166058540344, 0.09900901466608047, 0.18870805203914642, 0.03893318399786949], [0.05522066354751587, 0.03727762773633003, 0.08181304484605789, 0.04550352320075035, 0.020235762000083923, 0.09818002581596375, 0.02313370443880558, 0.021023645997047424, 0.07232332974672318, 0.017683647572994232, 0.018276367336511612, 0.10539089888334274, 0.006364606786519289, 0.06294620782136917, 0.04192778095602989, 0.018638119101524353, 0.008341774344444275, 0.03440813720226288, 0.012692192569375038, 0.02135845459997654, 0.06309659034013748, 0.013193551450967789, 0.03188944607973099, 0.08908085525035858], [0.01360626146197319, 0.03629617020487785, 0.046796150505542755, 0.06531810015439987, 0.02113695628941059, 0.03072466515004635, 0.022882521152496338, 0.019469887018203735, 0.01052586268633604, 0.008774957619607449, 0.004038037732243538, 0.030752340331673622, 0.012111913412809372, 0.06839822232723236, 0.03232608735561371, 0.08891049772500992, 0.030991677194833755, 0.07280144840478897, 0.07747256755828857, 0.09213972091674805, 0.0726260170340538, 0.02224177122116089, 0.03112640045583248, 0.08853181451559067], [0.06600929796695709, 0.06134674325585365, 0.0336899533867836, 0.2088628113269806, 0.02742115966975689, 0.016282113268971443, 0.004701007157564163, 0.120395727455616, 0.01226102840155363, 0.03342864662408829, 0.016236064955592155, 0.004705819766968489, 0.0034812677185982466, 0.005890188738703728, 0.0035247246269136667, 0.04425084590911865, 0.015062431804835796, 0.005645020864903927, 0.002471993677318096, 0.08880916982889175, 0.021188581362366676, 0.08470715582370758, 0.05743454024195671, 0.06219365820288658], [0.03192972019314766, 0.03912578150629997, 0.04316847398877144, 0.03827566280961037, 0.17213977873325348, 0.0008307953830808401, 0.009611106477677822, 0.025340503081679344, 0.009763128124177456, 0.018386974930763245, 0.010467524640262127, 0.0006405872409231961, 0.0043693482875823975, 0.004007742740213871, 0.004631910473108292, 0.010675753466784954, 0.1618974208831787, 0.0007125965785235167, 0.009703557938337326, 0.025997785851359367, 0.04576429724693298, 0.12077493965625763, 0.1853363811969757, 0.026448192074894905], [0.01023032981902361, 0.01118253730237484, 0.309129536151886, 0.05069110915064812, 0.005449294112622738, 0.10739384591579437, 0.008588275872170925, 0.023563891649246216, 0.08255875110626221, 0.018344616517424583, 0.043279848992824554, 0.018407706171274185, 0.0012640617787837982, 0.004093483090400696, 0.0476953461766243, 0.009179245680570602, 0.002570721786469221, 0.02120448276400566, 0.0018956507556140423, 0.008205901831388474, 0.035154104232788086, 0.01356441155076027, 0.08331479877233505, 0.08303800970315933], [0.005220211576670408, 0.01614118553698063, 0.10893556475639343, 0.03221810609102249, 0.06663580238819122, 0.033228807151317596, 0.06412092596292496, 0.05867548659443855, 0.4745330214500427, 0.03255031257867813, 0.03308425843715668, 0.012145640328526497, 0.004495329223573208, 0.004325805231928825, 0.009054239839315414, 0.0036245144437998533, 0.007186459377408028, 0.0020059754606336355, 0.0016490682028234005, 0.0011456089559942484, 0.011053116992115974, 0.0049763270653784275, 0.00877409428358078, 0.004220122937113047], [0.059892527759075165, 0.032196879386901855, 0.12448164820671082, 0.03353731334209442, 0.007030339911580086, 0.21850116550922394, 0.033586665987968445, 0.22016386687755585, 0.06039196625351906, 0.009501414373517036, 0.012270016595721245, 0.08664744347333908, 0.002284223446622491, 0.019640697166323662, 0.009204821661114693, 0.005616732407361269, 0.0010396561119705439, 0.01382420863956213, 0.002553818514570594, 0.021101461723446846, 0.0023673309478908777, 0.001285254373215139, 0.003018961288034916, 0.01986161433160305], [0.005442453548312187, 0.006172669120132923, 0.06709261983633041, 0.003695558989420533, 0.06509576737880707, 0.04202815145254135, 0.14462217688560486, 0.003287531668320298, 0.2881309390068054, 0.006631958298385143, 0.11804132908582687, 0.0022468888200819492, 0.04996141791343689, 0.004833100363612175, 0.09445996582508087, 0.0028848876245319843, 0.030272696167230606, 0.012653612531721592, 0.019602522253990173, 0.00039853897760622203, 0.008009896613657475, 0.002061903476715088, 0.021763507276773453, 0.0006099702441133559], [0.2035265564918518, 0.001369207981042564, 0.00028278588433749974, 0.0003338667447678745, 0.001154970726929605, 0.021828148514032364, 0.006972486153244972, 0.002839189488440752, 0.008449362590909004, 0.0062533188611269, 0.00036661792546510696, 0.4882485568523407, 0.004368700087070465, 0.25357216596603394, 4.19121679442469e-05, 4.248786353855394e-05, 6.116942586231744e-06, 0.00010446996020618826, 2.1799245587317273e-05, 3.074007327086292e-05, 1.256368250324158e-06, 1.4866104720567819e-05, 4.359700938039168e-07, 0.0001699845161056146], [0.12484978139400482, 0.01762847602367401, 0.009536809287965298, 0.005904982797801495, 0.022760560736060143, 0.08051791042089462, 0.12596289813518524, 0.010755263268947601, 0.0454789437353611, 0.014729526825249195, 0.05389333888888359, 0.1798226237297058, 0.0774327740073204, 0.20975211262702942, 0.0076783387921750546, 0.00290543120354414, 0.0019320448627695441, 0.0029586360324174166, 0.0036341554950922728, 0.000505843257997185, 0.00015386551967822015, 0.0002921113045886159, 0.0004276060499250889, 0.00048604109906591475], [0.020708220079541206, 0.0007245591259561479, 0.00016205813153646886, 0.0009953195694833994, 0.0011175668332725763, 0.03475736081600189, 0.004426873289048672, 0.0008286942029371858, 0.0022367776837199926, 0.004826091229915619, 0.0007270669448189437, 0.8466315269470215, 0.0065890406258404255, 0.07112263143062592, 0.00031779592973180115, 0.0010621582623571157, 3.942244075005874e-05, 0.0014336546882987022, 0.00015351625916082412, 8.687775698490441e-05, 1.414272264810279e-05, 7.140973320929334e-05, 4.8343890739488415e-06, 0.0009624367812648416], [0.0013694021617993712, 0.0053864819929003716, 0.000601820764131844, 0.0017047100700438023, 0.016815582290291786, 0.007336392533034086, 0.005425186362117529, 0.0002634789270814508, 0.007352028973400593, 0.002220664406195283, 0.01018099021166563, 0.08588489890098572, 0.13529422879219055, 0.4297686219215393, 0.08648664504289627, 0.019367050379514694, 0.04643943905830383, 0.0801142081618309, 0.04376199468970299, 0.0016935502644628286, 0.007619552314281464, 0.0016914374427869916, 0.0019219908863306046, 0.0012996657751500607], [0.03514588996767998, 0.023487625643610954, 0.003924927208572626, 0.011729661375284195, 0.005220240913331509, 0.02803559973835945, 0.0036837009247392416, 0.004581288900226355, 0.00411561131477356, 0.007264215033501387, 0.007670140825212002, 0.23155587911605835, 0.015818240121006966, 0.2828192114830017, 0.05154046043753624, 0.04729093983769417, 0.010966692119836807, 0.08057154715061188, 0.024188831448554993, 0.03942335769534111, 0.014478878118097782, 0.00684257410466671, 0.006456207018345594, 0.05318830907344818], [0.002093485090881586, 0.01127657387405634, 0.001523591228760779, 0.006704210769385099, 0.0026582027785480022, 0.003226851811632514, 0.001422842382453382, 0.0008103725267574191, 0.0007343110628426075, 0.0016304505988955498, 0.001736002042889595, 0.033577144145965576, 0.045690830796957016, 0.2365579754114151, 0.07913626730442047, 0.1007821261882782, 0.03226805850863457, 0.16579031944274902, 0.10438065975904465, 0.07025936990976334, 0.051742106676101685, 0.01085618231445551, 0.01182923186570406, 0.023312797769904137], [0.019288938492536545, 0.027364199981093407, 0.003534802235662937, 0.054356515407562256, 0.006407143548130989, 0.004395663272589445, 0.0008002313552424312, 0.012898801825940609, 0.0035231963265687227, 0.016963373869657516, 0.020038804039359093, 0.030385565012693405, 0.037882234901189804, 0.10063277930021286, 0.032256439328193665, 0.18021312355995178, 0.02755070850253105, 0.03206392377614975, 0.008328222669661045, 0.1583137959241867, 0.038484491407871246, 0.07926380634307861, 0.03978365659713745, 0.0652695819735527], [0.0018334517953917384, 0.009191828779876232, 0.0006744982674717903, 0.004134261980652809, 0.008725347928702831, 6.935091369086877e-05, 0.00027243138174526393, 0.0004009853000752628, 0.0004205071600154042, 0.003706397023051977, 0.0049946922808885574, 0.0027764104306697845, 0.04317610710859299, 0.03739427402615547, 0.07381410896778107, 0.053897127509117126, 0.2980220913887024, 0.007298193406313658, 0.03634670004248619, 0.042645905166864395, 0.11282212287187576, 0.11746631562709808, 0.11718504875898361, 0.022731781005859375], [0.0025976714678108692, 0.004789800848811865, 0.002775483066216111, 0.007311849854886532, 0.0003012324159499258, 0.005631753243505955, 0.00014885047858115286, 0.0007633062195964158, 0.0010490037966519594, 0.0035125650465488434, 0.008342460729181767, 0.08074366301298141, 0.008498973213136196, 0.04748719558119774, 0.25617507100105286, 0.0542936697602272, 0.004504827782511711, 0.13588006794452667, 0.007196374237537384, 0.057221513241529465, 0.08792462199926376, 0.030618304386734962, 0.04459691420197487, 0.1476348489522934], [0.0002778592170216143, 0.0036880539264529943, 0.0003208577400073409, 0.001385473646223545, 0.0005335019086487591, 0.0001512352901045233, 5.7654753618407995e-05, 0.00017829578428063542, 0.0008734619477763772, 0.002210042206570506, 0.0013178245862945914, 0.016973722726106644, 0.026505891233682632, 0.05300917848944664, 0.22035318613052368, 0.026729771867394447, 0.019387392327189445, 0.031063083559274673, 0.015721892938017845, 0.03716350719332695, 0.4277622103691101, 0.06839282065629959, 0.01994798704981804, 0.025995081290602684], [0.010183405131101608, 0.017853369936347008, 0.00832604244351387, 0.0060553178191185, 0.0006964594940654933, 0.008110057562589645, 0.0007120242225937545, 0.005756947211921215, 0.0021399897523224354, 0.002130570588633418, 0.003105791285634041, 0.06499199569225311, 0.008556743152439594, 0.08207199722528458, 0.12773236632347107, 0.02223331294953823, 0.004269532859325409, 0.09851589053869247, 0.0200145673006773, 0.28148460388183594, 0.08971554785966873, 0.016622917726635933, 0.02453581616282463, 0.0941847413778305], [0.0004739287542179227, 0.0018771589966490865, 0.001064723008312285, 0.00044826234807260334, 0.0019653320778161287, 0.0005072712665423751, 0.0007041652570478618, 3.5508539440343156e-05, 0.0012535881251096725, 0.0003488771035335958, 0.0021088134963065386, 0.0003761408443097025, 0.042449068278074265, 0.011676350608468056, 0.22454817593097687, 0.007756461389362812, 0.04674091562628746, 0.07641377300024033, 0.11332513391971588, 0.00811771024018526, 0.3667961657047272, 0.025981392711400986, 0.0631062388420105, 0.0019248025491833687], [0.09063845127820969, 0.0015551097458228469, 2.4992588805616833e-05, 9.400198905495927e-05, 8.336609607795253e-05, 0.00018988580268342048, 2.4508954084012657e-05, 8.056204387685284e-05, 4.900400745100342e-05, 0.0009271932649426162, 2.5439507226110436e-05, 0.05333951115608215, 0.007403047289699316, 0.8295702934265137, 0.000554086291231215, 0.00030336601776070893, 5.980403511784971e-05, 0.0010111125884577632, 0.00025444108177907765, 0.0046035354025661945, 0.0006642754306085408, 0.0037932402919977903, 3.583551733754575e-05, 0.004714973736554384]], [[0.0021136461291462183, 0.002988284220919013, 0.032925352454185486, 0.022873414680361748, 0.007756990846246481, 0.0028202396351844072, 0.003961903974413872, 0.004156001377850771, 0.018992707133293152, 0.017114678397774696, 0.09364162385463715, 0.021960750222206116, 0.09346505254507065, 0.02572663500905037, 0.20365332067012787, 0.03471294417977333, 0.015118729323148727, 0.005207811947911978, 0.014162290841341019, 0.019866278395056725, 0.09335251152515411, 0.03167426958680153, 0.1940552145242691, 0.037699371576309204], [0.004150604363530874, 0.00540083646774292, 0.03168042376637459, 0.01523976493626833, 0.0033863778226077557, 0.003612963017076254, 0.00216039945371449, 0.002309757051989436, 0.010030004195868969, 0.012075409293174744, 0.05464637279510498, 0.008665064349770546, 0.028937475755810738, 0.012041805312037468, 0.17644168436527252, 0.03757474571466446, 0.012134668417274952, 0.013765186071395874, 0.01409020833671093, 0.023534651845693588, 0.1378127783536911, 0.04150449112057686, 0.30315732955932617, 0.0456470288336277], [0.031543366611003876, 0.022446973249316216, 0.04466523230075836, 0.045476749539375305, 0.1046493798494339, 0.04129577800631523, 0.030514556914567947, 0.23876164853572845, 0.06730510294437408, 0.07422970980405807, 0.03437727317214012, 0.038215991109609604, 0.005438406951725483, 0.04889579862356186, 0.008485004305839539, 0.012955860234797001, 0.0238680187612772, 0.0035407058894634247, 0.005583848338574171, 0.03294616565108299, 0.010760230012238026, 0.02182379551231861, 0.026817748323082924, 0.025402570143342018], [0.007580237928777933, 0.006456418894231319, 0.13886581361293793, 0.03641406446695328, 0.03675216808915138, 0.016284247860312462, 0.034295253455638885, 0.017942169681191444, 0.024346793070435524, 0.026687750592827797, 0.08414284884929657, 0.02826463244855404, 0.24852901697158813, 0.025498565286397934, 0.06682208180427551, 0.02002994902431965, 0.014386506751179695, 0.008578785695135593, 0.01854141242802143, 0.010941174812614918, 0.019054580479860306, 0.023506468161940575, 0.05538921430706978, 0.030689852312207222], [0.09036575257778168, 0.040403105318546295, 0.02651963196694851, 0.04001658782362938, 0.1414063423871994, 0.1041075736284256, 0.04488556832075119, 0.12214567512273788, 0.016601046547293663, 0.025419706478714943, 0.0039741965010762215, 0.04169802367687225, 0.00159139942843467, 0.014241543598473072, 0.002276528626680374, 0.019044261425733566, 0.04858070984482765, 0.05043482035398483, 0.01284183282405138, 0.03937778249382973, 0.0071028308011591434, 0.017455516383051872, 0.006111228838562965, 0.08339832723140717], [0.04265666753053665, 0.01916866935789585, 0.13033214211463928, 0.06325098872184753, 0.08273515850305557, 0.01111103966832161, 0.05449717491865158, 0.018348582088947296, 0.08559895306825638, 0.11805381625890732, 0.16767916083335876, 0.02255568839609623, 0.035701874643564224, 0.005597521085292101, 0.008043980225920677, 0.013591292314231396, 0.012281935662031174, 0.0007924338569864631, 0.003171282121911645, 0.001237905235029757, 0.005122269503772259, 0.02546021342277527, 0.04793955758213997, 0.025071706622838974], [0.052979476749897, 0.021819930523633957, 0.039100874215364456, 0.09437921643257141, 0.04486098513007164, 0.12232274562120438, 0.029241913929581642, 0.18777483701705933, 0.07173532992601395, 0.03076677955687046, 0.05007406324148178, 0.09121440351009369, 0.011305263265967369, 0.037740595638751984, 0.0034136937465518713, 0.0464450977742672, 0.009363563731312752, 0.011192007921636105, 0.001884580822661519, 0.01075300294905901, 0.0017762825591489673, 0.0030837547965347767, 0.008451717905700207, 0.01831991598010063], [0.01809617131948471, 0.01758408732712269, 0.046983007341623306, 0.020785044878721237, 0.025492260232567787, 0.024572528898715973, 0.11827555298805237, 0.01414166297763586, 0.1272071748971939, 0.00809897668659687, 0.1893625110387802, 0.005404463969171047, 0.16651944816112518, 0.004615538753569126, 0.039034515619277954, 0.01035357266664505, 0.01716216653585434, 0.015296288765966892, 0.055481210350990295, 0.0047714198008179665, 0.020776746794581413, 0.0033124592155218124, 0.043560873717069626, 0.003112317994236946], [0.13339824974536896, 0.05702386423945427, 0.02928660809993744, 0.014490542002022266, 0.019522711634635925, 0.120264932513237, 0.1862880438566208, 0.0581732876598835, 0.039071619510650635, 0.13720059394836426, 0.028699588030576706, 0.09925900399684906, 0.0036751290317624807, 0.03517846390604973, 0.0018173534190282226, 0.008368426002562046, 0.0016804076731204987, 0.004969585686922073, 0.00432357843965292, 0.0008300545159727335, 0.00020694978593382984, 0.004754228517413139, 0.001104383496567607, 0.01041238009929657], [0.011297888122498989, 0.010235181078314781, 0.011160019785165787, 0.01449589803814888, 0.010010254569351673, 0.01956671103835106, 0.012843924574553967, 0.008543608710169792, 0.03900843486189842, 0.02296292595565319, 0.48715847730636597, 0.022365573793649673, 0.18801386654376984, 0.016178611665964127, 0.022384928539395332, 0.01798255927860737, 0.007018213625997305, 0.0046722921542823315, 0.004311813041567802, 0.0030027288012206554, 0.0024882035795599222, 0.004580818582326174, 0.057101137936115265, 0.0026158166583627462], [0.0577114075422287, 0.07110509276390076, 0.005019864533096552, 0.027177462354302406, 0.02197405882179737, 0.05743851140141487, 0.004293438978493214, 0.0198308527469635, 0.008210803382098675, 0.013754274696111679, 0.0018840611446648836, 0.11978702992200851, 0.0016444469802081585, 0.06576340645551682, 0.005624646786600351, 0.17465461790561676, 0.04216117039322853, 0.14996586740016937, 0.010060467757284641, 0.05463603138923645, 0.015004276297986507, 0.01448958832770586, 0.004339604638516903, 0.05346907302737236], [0.00042760532232932746, 0.0009305818239226937, 0.004282685462385416, 0.000984028447419405, 0.00039731847937218845, 0.0005517972749657929, 0.0008728149114176631, 0.0002962338039651513, 0.004402742721140385, 0.0016940570203587413, 0.032500941306352615, 0.008011803030967712, 0.7919414639472961, 0.006298186723142862, 0.12886668741703033, 0.0036606010980904102, 0.001129015814512968, 0.0016307588666677475, 0.0025523474905639887, 0.0004497110203374177, 0.0019194779451936483, 0.0012688511051237583, 0.004191335756331682, 0.0007389396778307855], [0.002198418602347374, 0.010037152096629143, 0.005256396718323231, 0.0027071277145296335, 0.0015555149875581264, 0.0052245487459003925, 0.0006493334076367319, 0.0027660431805998087, 0.003001241711899638, 0.026647688820958138, 0.009447921067476273, 0.0807022750377655, 0.17924153804779053, 0.4837985932826996, 0.06320872902870178, 0.05721621215343475, 0.004208456724882126, 0.021443258970975876, 0.001591197680681944, 0.010332216508686543, 0.0016712034121155739, 0.015516079030930996, 0.004352613817900419, 0.007226287387311459], [0.00010455989831825718, 0.00028545979876071215, 0.004280135501176119, 0.0017564401496201754, 0.0007122869719751179, 0.0003560276818461716, 0.0002623899490572512, 0.001323278876952827, 0.004482691176235676, 0.005200853571295738, 0.03438282385468483, 0.009172976948320866, 0.07947783917188644, 0.020085658878087997, 0.6423658132553101, 0.007965038530528545, 0.00735240476205945, 0.00640290230512619, 0.006378654856234789, 0.025911645963788033, 0.048895299434661865, 0.01696598343551159, 0.06982756406068802, 0.006051261443644762], [0.0011234243866056204, 0.006941861938685179, 0.0006707608699798584, 0.0012802818091586232, 0.003253392642363906, 0.00023747573141008615, 9.110040264204144e-05, 0.013697902671992779, 0.0016080222558230162, 0.0015607834793627262, 0.00026293963310308754, 0.0006915091071277857, 0.0006222991505637765, 0.008355814963579178, 0.011351196095347404, 0.020834824070334435, 0.04377075284719467, 0.011112842708826065, 0.0050630937330424786, 0.7730787992477417, 0.075536347925663, 0.012431232258677483, 0.004079942591488361, 0.002343336585909128], [0.0014045252464711666, 0.0037750534247606993, 0.014942878857254982, 0.008144676685333252, 0.0036769567523151636, 0.0010990055743604898, 0.0020398239139467478, 0.002011647680774331, 0.00704388041049242, 0.003578857285901904, 0.039144884794950485, 0.006209002807736397, 0.2947479486465454, 0.010151314549148083, 0.2730383574962616, 0.023562956601381302, 0.027213478460907936, 0.01475454680621624, 0.02639785036444664, 0.028126560151576996, 0.10301335155963898, 0.016205286607146263, 0.08058922737836838, 0.009127928875386715], [0.010480429045855999, 0.02252437360584736, 0.004000888671725988, 0.00608865637332201, 0.01617387682199478, 0.003647314151749015, 0.0009218297782354057, 0.014195119962096214, 0.002039954997599125, 0.00127443578094244, 0.0002204522752435878, 0.002205274533480406, 0.0001297790731769055, 0.0015758485533297062, 0.0036413988564163446, 0.016353944316506386, 0.10015721619129181, 0.18300668895244598, 0.018960319459438324, 0.3507699966430664, 0.1538945585489273, 0.02400972880423069, 0.007643831428140402, 0.056084081530570984], [0.03563595935702324, 0.03948412835597992, 0.030267011374235153, 0.024844888597726822, 0.008293152786791325, 0.0015117926523089409, 0.0044434829615056515, 0.0023027772549539804, 0.019494790583848953, 0.05761249363422394, 0.08267589658498764, 0.014213799498975277, 0.017252560704946518, 0.00555072259157896, 0.04693342000246048, 0.029004113748669624, 0.020673375576734543, 0.0018245537066832185, 0.008263903670012951, 0.0068425871431827545, 0.08825671672821045, 0.14846059679985046, 0.2361537665128708, 0.07000350207090378], [0.008224776946008205, 0.015176767483353615, 0.008874750696122646, 0.025765851140022278, 0.004679599776864052, 0.007092641666531563, 0.0006399952690117061, 0.0065911915153265, 0.005380129907280207, 0.003326338715851307, 0.006622407119721174, 0.012989661656320095, 0.003245168598368764, 0.009663080796599388, 0.020750368013978004, 0.0640367791056633, 0.0381123311817646, 0.09339485317468643, 0.008551406674087048, 0.16256985068321228, 0.23549042642116547, 0.035378266125917435, 0.11092531681060791, 0.11251804232597351], [0.0003272095345892012, 0.0011933858040720224, 0.002842842834070325, 0.001357415458187461, 0.0007441428606398404, 0.0002488830068614334, 0.0005814445903524756, 0.00014347593241836876, 0.0020184023305773735, 0.00019913449068553746, 0.004775781650096178, 0.0001461820356780663, 0.016629420220851898, 0.0003406460164114833, 0.051161766052246094, 0.002074373420327902, 0.013728860765695572, 0.01265005860477686, 0.040781524032354355, 0.016409769654273987, 0.682011067867279, 0.00886754784733057, 0.13704444468021393, 0.00372213963419199], [0.008589601144194603, 0.015487483702600002, 0.01956143230199814, 0.003976322244852781, 0.000870455929543823, 0.002353980438783765, 0.0009665254619903862, 0.0018898257985711098, 0.0013524387031793594, 0.0037756257224828005, 0.0033618167508393526, 0.00426032580435276, 0.0002772275765892118, 0.003242162289097905, 0.02015715278685093, 0.0052601853385567665, 0.005604222882539034, 0.020671233534812927, 0.01648329198360443, 0.042087946087121964, 0.2173278033733368, 0.12511716783046722, 0.13145893812179565, 0.34586676955223083], [0.0018693250603973866, 0.004567363299429417, 0.004914074670523405, 0.003718300722539425, 0.0032209958881139755, 0.0028413713444024324, 0.0005837274948135018, 0.0006967476801946759, 0.0020612140651792288, 0.0017503626877442002, 0.02819785289466381, 0.001061515067704022, 0.008657192811369896, 0.001812056521885097, 0.013362628407776356, 0.005693132523447275, 0.01895073615014553, 0.012725528329610825, 0.005542645696550608, 0.018699368461966515, 0.08847678452730179, 0.029704848304390907, 0.7177144289016724, 0.02317783422768116], [0.0029617231339216232, 0.0054650986567139626, 0.00992700457572937, 0.005065597128123045, 0.0014031685423105955, 0.001605594763532281, 9.819849947234616e-05, 0.002141564851626754, 0.0005937755922786891, 0.00040085488581098616, 0.00038080158992670476, 0.0014688485534861684, 1.6241809134953655e-05, 0.0003795753582380712, 0.0035043770913034678, 0.010899141430854797, 0.012991710565984249, 0.03458402678370476, 0.0028831155505031347, 0.09550722688436508, 0.21690967679023743, 0.02774973027408123, 0.10526891052722931, 0.4577939808368683], [0.00015188301040325314, 0.00038852629950270057, 0.05285520851612091, 0.0006843184819445014, 0.000507568649481982, 0.00020150089403614402, 0.0007043493678793311, 0.00026480579981580377, 0.002738820854574442, 0.0002907540765590966, 0.032051704823970795, 0.0001992179313674569, 0.06140914186835289, 0.00010692991781979799, 0.11069408059120178, 0.00042267446406185627, 0.0025103692896664143, 0.0020746001973748207, 0.007117744535207748, 0.0025572648737579584, 0.09379583597183228, 0.009889806620776653, 0.6031408905982971, 0.015242046676576138]], [[0.042859889566898346, 0.006282312795519829, 0.06361617147922516, 0.09092382341623306, 0.08636524528265, 0.007466480601578951, 0.010711900889873505, 0.1503555029630661, 0.04068189114332199, 0.02075786143541336, 0.012053587473928928, 0.004063676111400127, 0.004482952877879143, 0.007880549877882004, 0.000998673029243946, 0.011740699410438538, 0.057593803852796555, 0.006628901232033968, 0.006772052962332964, 0.1019187867641449, 0.07989028096199036, 0.06534553319215775, 0.06630006432533264, 0.05430936813354492], [0.013743222691118717, 0.006788535974919796, 0.029733039438724518, 0.06954419612884521, 0.045283135026693344, 0.0028333987575024366, 0.0020695021376013756, 0.04296314716339111, 0.008323443122208118, 0.004675297997891903, 0.00469454750418663, 0.0017511429032310843, 0.005060224328190088, 0.0056679705157876015, 0.002060617320239544, 0.03374075889587402, 0.09786165505647659, 0.011915555223822594, 0.011767679825425148, 0.2563285231590271, 0.17232856154441833, 0.05857367068529129, 0.07128635793924332, 0.04100582376122475], [0.051721036434173584, 0.03946864232420921, 0.07870172709226608, 0.059956032782793045, 0.06234998628497124, 0.06339273601770401, 0.013814685866236687, 0.06993904709815979, 0.051706477999687195, 0.0652926117181778, 0.13851980865001678, 0.04534152150154114, 0.01503698993474245, 0.0697786957025528, 0.015931682661175728, 0.007123459130525589, 0.01812547817826271, 0.011196715757250786, 0.0016859682509675622, 0.012174761854112148, 0.004194979555904865, 0.02659946121275425, 0.04000192880630493, 0.03794560953974724], [0.07088688760995865, 0.04791327565908432, 0.06341381371021271, 0.010049799457192421, 0.0458182767033577, 0.1299223005771637, 0.029866686090826988, 0.04336928203701973, 0.029742015525698662, 0.012842228636145592, 0.10541492700576782, 0.009700610302388668, 0.011320400983095169, 0.026971204206347466, 0.05950367823243141, 0.020693320780992508, 0.04649635776877403, 0.06764979660511017, 0.02124502696096897, 0.021867642179131508, 0.007245184388011694, 0.008812503889203072, 0.09321791678667068, 0.01603684388101101], [0.02630346082150936, 0.006311408244073391, 0.01646382547914982, 0.0006225623073987663, 0.008888212032616138, 0.01865369826555252, 0.7499819993972778, 0.016889045014977455, 0.03299817815423012, 0.006662603933364153, 0.005267977714538574, 0.004477351903915405, 0.0007246741442941129, 0.003100430592894554, 0.006100157275795937, 0.00021370234026107937, 0.003943035379052162, 0.004732129629701376, 0.07232755422592163, 0.002927028341218829, 0.003610983258113265, 0.0021665722597390413, 0.0023801338393241167, 0.004253260791301727], [0.09390994161367416, 0.022832542657852173, 0.03468043729662895, 0.015782905742526054, 0.05389072373509407, 0.015112880617380142, 0.06958504021167755, 0.27451464533805847, 0.07445745915174484, 0.029268907383084297, 0.050841256976127625, 0.015873467549681664, 0.005963586270809174, 0.027392668649554253, 0.004581579007208347, 0.009125999175012112, 0.022841302677989006, 0.006944030988961458, 0.02241477370262146, 0.06609327346086502, 0.018191542476415634, 0.015508390963077545, 0.02773444913327694, 0.02245822735130787], [0.03538723662495613, 0.009636970236897469, 0.019418831914663315, 0.0012744563864544034, 0.01819508522748947, 0.03473653644323349, 0.5064100623130798, 0.08054253458976746, 0.06884411722421646, 0.059737782925367355, 0.05381322279572487, 0.030074311420321465, 0.0017851406009867787, 0.011168813332915306, 0.004544610623270273, 0.00028333894442766905, 0.0030421323608607054, 0.003956617321819067, 0.019229114055633545, 0.003516447963193059, 0.002128450432792306, 0.010080480948090553, 0.007096513640135527, 0.015097110532224178], [0.02931246906518936, 0.016461394727230072, 0.06102097034454346, 0.014299397356808186, 0.05629749223589897, 0.23966678977012634, 0.08285748213529587, 0.05272764340043068, 0.06432721763849258, 0.048104144632816315, 0.09782811999320984, 0.04090860113501549, 0.023148128762841225, 0.02681775763630867, 0.04041312634944916, 0.011730257421731949, 0.026035074144601822, 0.027886420488357544, 0.010726071894168854, 0.005229114554822445, 0.0024937307462096214, 0.003922092728316784, 0.011319422163069248, 0.006467131897807121], [0.029598116874694824, 0.06364427506923676, 0.037030525505542755, 0.021006153896450996, 0.0271145086735487, 0.07831902801990509, 0.04272470623254776, 0.04266934469342232, 0.0442361943423748, 0.10237792134284973, 0.03060721606016159, 0.04281429573893547, 0.045005664229393005, 0.1612820327281952, 0.08533600717782974, 0.04329927638173103, 0.017172766849398613, 0.03158118948340416, 0.016740137711167336, 0.009169184602797031, 0.004230019170790911, 0.012193933129310608, 0.0038805189542472363, 0.007966986857354641], [0.007666470482945442, 0.004831704311072826, 0.003451006021350622, 0.009366610087454319, 0.05132278800010681, 0.006779216229915619, 0.041484784334897995, 0.051698699593544006, 0.04461972415447235, 0.09313912689685822, 0.241216778755188, 0.13701069355010986, 0.07658208906650543, 0.006077161058783531, 0.005430185701698065, 0.008979156613349915, 0.029125072062015533, 0.005921595264226198, 0.019525043666362762, 0.019840171560645103, 0.015769395977258682, 0.038656849414110184, 0.050114188343286514, 0.031391434371471405], [0.011180308647453785, 0.026844829320907593, 0.016160136088728905, 0.03182080015540123, 0.01914365030825138, 0.029641486704349518, 0.004709629341959953, 0.08340806514024734, 0.03423907980322838, 0.06027597561478615, 0.1600273996591568, 0.07084192335605621, 0.11090777814388275, 0.08057132363319397, 0.024301830679178238, 0.03104194439947605, 0.018683457747101784, 0.03221190720796585, 0.0036363438703119755, 0.05325109139084816, 0.011064568534493446, 0.03580522537231445, 0.028792692348361015, 0.02143852226436138], [0.0022211940959095955, 0.006049131043255329, 0.002718428848311305, 0.010635893791913986, 0.0258618351072073, 0.00905491691082716, 0.0012500927550718188, 0.02118590660393238, 0.00850294902920723, 0.015739377588033676, 0.29356276988983154, 0.055152345448732376, 0.20949116349220276, 0.006859992630779743, 0.018189582973718643, 0.025130512192845345, 0.036879781633615494, 0.018786855041980743, 0.0026952438056468964, 0.046288322657346725, 0.00907444953918457, 0.02953243814408779, 0.1268467903137207, 0.018289994448423386], [0.0020520102698355913, 0.023960111662745476, 0.008478586561977863, 0.003926775883883238, 0.0011953430948778987, 0.011426416225731373, 0.0004992563626728952, 0.0021054677199572325, 0.0015654634917154908, 0.005884817335754633, 0.29175880551338196, 0.037171460688114166, 0.061235107481479645, 0.07433067262172699, 0.24933667480945587, 0.032229866832494736, 0.007725434377789497, 0.08144359290599823, 0.0028571661096066236, 0.01360065583139658, 0.0037000542506575584, 0.009167155250906944, 0.06825178116559982, 0.006097313482314348], [0.0006396645330823958, 0.0013952829176560044, 0.0019776190165430307, 0.0013644041027873755, 0.0013016838347539306, 0.0008114614756777883, 0.0003613459994085133, 0.005064092576503754, 0.0021424044389277697, 0.029535740613937378, 0.09056422114372253, 0.2632073163986206, 0.04428000748157501, 0.0034199238289147615, 0.016640538349747658, 0.0028741657733917236, 0.00313587230630219, 0.007000225596129894, 0.0011111012427136302, 0.03807097673416138, 0.01955367811024189, 0.1997663974761963, 0.043365392833948135, 0.22241643071174622], [0.0004036028985865414, 0.006900359410792589, 0.0035878741182386875, 0.004006055183708668, 0.0005462322733364999, 0.0031288473401218653, 1.8963231923407875e-05, 0.00025084675871767104, 0.0005805757828056812, 0.0030568353831768036, 0.01788618229329586, 0.08634162694215775, 0.030409177765250206, 0.007265838328748941, 0.3596791923046112, 0.0778975635766983, 0.006842981558293104, 0.07080423086881638, 0.0006605645758099854, 0.013856678269803524, 0.024888677522540092, 0.0553600899875164, 0.029890313744544983, 0.19573675096035004], [0.010177470743656158, 0.02144208736717701, 0.01836332678794861, 0.004316180013120174, 0.003732992336153984, 0.017518596723675728, 0.0014460081001743674, 0.002538552973419428, 0.002644766354933381, 0.0020457159262150526, 0.11460280418395996, 0.008873079903423786, 0.012318284250795841, 0.020561987534165382, 0.21206092834472656, 0.048129744827747345, 0.028052231296896935, 0.14735820889472961, 0.02178761549293995, 0.028350481763482094, 0.01651761867105961, 0.009284119121730328, 0.2294539213180542, 0.018423307687044144], [0.03047974593937397, 0.03180569037795067, 0.026101967319846153, 0.0025338383857160807, 0.005059561692178249, 0.016897501423954964, 0.06300143897533417, 0.004075576551258564, 0.009414706379175186, 0.0032852438744157553, 0.003514579962939024, 0.010494058020412922, 0.002807580167427659, 0.011107765138149261, 0.11342202872037888, 0.0076728262938559055, 0.021253138780593872, 0.10026367008686066, 0.29254892468452454, 0.041796743869781494, 0.09383451193571091, 0.022565679624676704, 0.015495308674871922, 0.07056796550750732], [0.016350748017430305, 0.019229162484407425, 0.009912988170981407, 0.01569514535367489, 0.011131460778415203, 0.003967576194554567, 0.003984518349170685, 0.01404054369777441, 0.00544624263420701, 0.006020871456712484, 0.0087291169911623, 0.022525833919644356, 0.00880990456789732, 0.037564076483249664, 0.018559634685516357, 0.05242867395281792, 0.034021928906440735, 0.031805843114852905, 0.044195856899023056, 0.241265207529068, 0.16001352667808533, 0.04666180536150932, 0.04718152806162834, 0.1404578685760498], [0.014723292551934719, 0.015715166926383972, 0.012632733210921288, 0.003165224799886346, 0.004900297150015831, 0.009267483837902546, 0.030438296496868134, 0.005767431575804949, 0.006220610346645117, 0.010935725644230843, 0.009519262239336967, 0.029239024966955185, 0.0030411637853831053, 0.009746743366122246, 0.029126351699233055, 0.003644416341558099, 0.009256266988813877, 0.03786783665418625, 0.09953506290912628, 0.053777821362018585, 0.12445413321256638, 0.11938408017158508, 0.04117912799119949, 0.3164624273777008], [0.011819284409284592, 0.021158341318368912, 0.03024132363498211, 0.022169001400470734, 0.020391497761011124, 0.028947247192263603, 0.004445194732397795, 0.00563783710822463, 0.005154303275048733, 0.006394409574568272, 0.020828569307923317, 0.022685352712869644, 0.019522221758961678, 0.014155433513224125, 0.08969850093126297, 0.04540261626243591, 0.06636687368154526, 0.10749764740467072, 0.032113414257764816, 0.06815369427204132, 0.10261211544275284, 0.04764244332909584, 0.09694243222475052, 0.11002027988433838], [0.032437458634376526, 0.06353173404932022, 0.01607484370470047, 0.02923651598393917, 0.008369638584554195, 0.00700168963521719, 0.0028242687694728374, 0.005072926636785269, 0.0023241895250976086, 0.004408924840390682, 0.0005451919278129935, 0.002469704719260335, 0.002679356373846531, 0.007597628515213728, 0.018276160582900047, 0.038769714534282684, 0.02008899487555027, 0.045393358916044235, 0.03705905005335808, 0.14401422441005707, 0.21784864366054535, 0.13253989815711975, 0.013539996929466724, 0.1478959023952484], [0.00879936944693327, 0.006711674388498068, 0.0035597379319369793, 0.015038007870316505, 0.04699502885341644, 0.002339928410947323, 0.015865394845604897, 0.019395099952816963, 0.010748598724603653, 0.014503528364002705, 0.0230557918548584, 0.01797143742442131, 0.010958071798086166, 0.0015998798189684749, 0.0026878013741225004, 0.007405989337712526, 0.04741865023970604, 0.00724219623953104, 0.034897565841674805, 0.10261973738670349, 0.15387555956840515, 0.12026935815811157, 0.14830945432186127, 0.17773213982582092], [0.01719605177640915, 0.026573682203888893, 0.012842271476984024, 0.02187386155128479, 0.008227882906794548, 0.004905550740659237, 0.0013469599653035402, 0.024046555161476135, 0.0028081329073756933, 0.0044912430457770824, 0.0029812573920935392, 0.0016943826340138912, 0.0018574161222204566, 0.0020630883518606424, 0.003803182626143098, 0.013652720488607883, 0.013651341199874878, 0.02805575169622898, 0.0071317898109555244, 0.328235924243927, 0.09239614009857178, 0.17437636852264404, 0.04164992272853851, 0.16413851082324982], [0.007971057668328285, 0.0068504223600029945, 0.0025415930431336164, 0.014560086652636528, 0.05089288204908371, 0.0013929217820987105, 0.0007907213876023889, 0.016336046159267426, 0.0019495898159220815, 0.0028411608655005693, 0.007192324381321669, 0.0007183065172284842, 0.0025400435552001, 0.00010664766887202859, 0.000497274158988148, 0.008922556415200233, 0.053378038108348846, 0.006912578828632832, 0.004357917234301567, 0.1871107965707779, 0.06150132417678833, 0.16622920334339142, 0.29907557368278503, 0.09533096849918365]], [[0.021704290062189102, 0.0233236663043499, 0.0772220715880394, 0.025060709565877914, 0.025949804112315178, 0.0198043379932642, 0.040470004081726074, 0.019073903560638428, 0.03957590460777283, 0.051320020109415054, 0.02810097485780716, 0.01302286982536316, 0.049577437341213226, 0.009791610762476921, 0.034093767404556274, 0.023012077435851097, 0.03967295214533806, 0.02091308683156967, 0.03914649039506912, 0.024995647370815277, 0.1082378700375557, 0.10789842903614044, 0.08503371477127075, 0.0729985237121582], [0.0057062553241848946, 0.011572014540433884, 0.025156723335385323, 0.007913703098893166, 0.008233794011175632, 0.0022472285199910402, 0.00730216084048152, 0.009370568208396435, 0.007043912541121244, 0.04114571586251259, 0.004434988368302584, 0.004223243333399296, 0.031034937128424644, 0.0079448027536273, 0.04260452836751938, 0.022129172459244728, 0.02675493061542511, 0.009921291843056679, 0.03044048510491848, 0.06981151551008224, 0.16764256358146667, 0.3106946647167206, 0.0653371661901474, 0.08133362233638763], [0.012342390604317188, 0.009088404476642609, 0.006467051804065704, 0.05398313328623772, 0.018699947744607925, 0.029970407485961914, 0.01290225051343441, 0.6879133582115173, 0.01704181544482708, 0.00734704127535224, 0.02176443673670292, 0.0035308918450027704, 0.0004656361124943942, 0.003372725797817111, 0.00018418591935187578, 0.002743400866165757, 0.0026843734085559845, 0.007588669657707214, 0.00114404724445194, 0.07469536364078522, 0.0024748777505010366, 0.0033311331644654274, 0.01440601795911789, 0.005858392920345068], [0.032395608723163605, 0.01898287981748581, 0.08238934725522995, 0.0351528525352478, 0.018628524616360664, 0.058224279433488846, 0.053877949714660645, 0.020267026498913765, 0.031556304544210434, 0.1645449846982956, 0.02999786287546158, 0.013747231103479862, 0.04657864570617676, 0.017830071970820427, 0.006492555607110262, 0.021976802498102188, 0.006244645453989506, 0.03231344744563103, 0.013311096467077732, 0.01276534516364336, 0.018239067867398262, 0.17930616438388824, 0.03795376047492027, 0.04722357541322708], [0.012396235950291157, 0.013868963345885277, 0.1215081438422203, 0.031153913587331772, 0.02059590257704258, 0.021976102143526077, 0.01705247536301613, 0.2975456416606903, 0.05826593562960625, 0.030460042878985405, 0.030984262004494667, 0.005835263058543205, 0.0016551206354051828, 0.018985699862241745, 0.02268279902637005, 0.013720790855586529, 0.009073646739125252, 0.0224748682230711, 0.006514494773000479, 0.11414534598588943, 0.03815973177552223, 0.027038449421525, 0.04372388496994972, 0.020182345062494278], [0.05757546052336693, 0.024288026615977287, 0.04718494787812233, 0.17680954933166504, 0.020594069734215736, 0.10147521644830704, 0.07146133482456207, 0.06353648006916046, 0.10396017879247665, 0.1019776314496994, 0.043933965265750885, 0.006565334741026163, 0.016809623688459396, 0.002342029707506299, 0.0005691932747140527, 0.013680808246135712, 0.0019766108598560095, 0.010310531593859196, 0.003552175359800458, 0.006275212857872248, 0.012700132094323635, 0.04248099401593208, 0.04958698898553848, 0.02035341039299965], [0.015592630952596664, 0.014174874871969223, 0.0572371706366539, 0.048568956553936005, 0.016884595155715942, 0.04135000705718994, 0.012253835797309875, 0.5926113724708557, 0.027436207979917526, 0.01168343797326088, 0.048917800188064575, 0.02597946859896183, 0.0005260768230073154, 0.02264218032360077, 0.006578949745744467, 0.011004614643752575, 0.004100647289305925, 0.0064973896369338036, 0.0010948353447020054, 0.02111884579062462, 0.0009124837815761566, 0.0013444095384329557, 0.006335427053272724, 0.005153808277100325], [0.01672264188528061, 0.004019968677312136, 0.010720392689108849, 0.0202296432107687, 0.022266829386353493, 0.02911563031375408, 0.06651382893323898, 0.017669524997472763, 0.5959060788154602, 0.020854361355304718, 0.0870412066578865, 0.01089314091950655, 0.04995420202612877, 0.0018404180882498622, 0.0014269810635596514, 0.002862216904759407, 0.010393895208835602, 0.002210721606388688, 0.006074634380638599, 0.0006145533407106996, 0.013523734174668789, 0.0016684021102264524, 0.00639099907130003, 0.001085819792933762], [0.034792449325323105, 0.032382261008024216, 0.012110300362110138, 0.04008970409631729, 0.017375150695443153, 0.0715121328830719, 0.012733113951981068, 0.2708757221698761, 0.01392008364200592, 0.038891103118658066, 0.05396268889307976, 0.2517509162425995, 0.0007617373485118151, 0.08592008054256439, 0.0018394856015220284, 0.02766435407102108, 0.0037350147031247616, 0.012276554480195045, 0.0009060453739948571, 0.0074926516972482204, 0.00014449478476308286, 0.001422496628947556, 0.0007513007149100304, 0.006690213922411203], [0.017267273738980293, 0.018413804471492767, 0.044635266065597534, 0.018890783190727234, 0.06413257122039795, 0.03690663352608681, 0.03064383752644062, 0.01297676656395197, 0.10026510059833527, 0.11474602669477463, 0.18807926774024963, 0.010659721679985523, 0.20698192715644836, 0.007909155450761318, 0.03006492182612419, 0.0074835242703557014, 0.028391249477863312, 0.004910387564450502, 0.00624418817460537, 0.002049465896561742, 0.0029436415061354637, 0.024873819202184677, 0.018126286566257477, 0.0024044853635132313], [0.007083490956574678, 0.004329956602305174, 0.00040653892210684717, 0.0159407090395689, 0.0004711308574769646, 0.009214530698955059, 0.0002326323592569679, 0.007534967269748449, 4.839120083488524e-05, 0.000927784654777497, 0.0002495161024853587, 0.6930438280105591, 4.878683466813527e-05, 0.12515297532081604, 0.00017240179295185953, 0.05050680413842201, 0.00034050826798193157, 0.007286827079951763, 0.0001944263931363821, 0.009290007874369621, 3.1347095500677824e-05, 0.00038115191273391247, 7.426422234857455e-05, 0.06703704595565796], [0.0022105397656559944, 0.004564755130559206, 0.034645069390535355, 0.0026511463802307844, 0.006675149779766798, 0.010144881904125214, 0.016050921753048897, 0.0001945834228536114, 0.004770100116729736, 0.021916503086686134, 0.006613461300730705, 0.0030757961794734, 0.5254086256027222, 0.009479749016463757, 0.18766777217388153, 0.007410045713186264, 0.013362967409193516, 0.008045446127653122, 0.03035787120461464, 0.0007926516700536013, 0.010681310668587685, 0.06274155527353287, 0.018039951100945473, 0.012499132193624973], [0.004798348993062973, 0.022126706317067146, 0.003924276679754257, 0.00824575126171112, 0.012319901026785374, 0.0022015359718352556, 0.0007995623745955527, 0.008305400609970093, 0.00027157366275787354, 0.020662177354097366, 0.00875264871865511, 0.18696631491184235, 0.0005381878581829369, 0.29470402002334595, 0.08957555145025253, 0.07014895230531693, 0.027037713676691055, 0.007427870761603117, 0.002844019327312708, 0.029936863109469414, 0.0005179405561648309, 0.03731447458267212, 0.004065635148435831, 0.15651459991931915], [8.642303146189079e-05, 0.0005005362909287214, 0.0014285520883277059, 7.259969424922019e-05, 0.0016664776485413313, 7.344167534029111e-05, 0.001194652752019465, 7.23005214240402e-05, 0.005566929467022419, 0.04121650382876396, 0.0008967461180873215, 0.0010157240321859717, 0.8156993389129639, 0.004148620180785656, 0.0806037187576294, 0.00032779359025880694, 0.0027037777472287416, 0.00015295484627131373, 0.0018853676738217473, 0.00013745595060754567, 0.004368285182863474, 0.033916059881448746, 0.0015586670488119125, 0.0007071804720908403], [0.0008543253061361611, 0.0070920679718256, 0.0011337966425344348, 0.0016113455640152097, 0.0028800859581679106, 0.0003160774358548224, 0.00024341754033230245, 0.028748100623488426, 0.00026956317014992237, 0.0032184922602027655, 0.000700612785294652, 0.006164837162941694, 0.0009268497815355659, 0.08670444041490555, 0.048924557864665985, 0.02030816860496998, 0.013954225927591324, 0.008010380901396275, 0.003997765947133303, 0.7046725749969482, 0.00874373596161604, 0.0238895732909441, 0.006166706793010235, 0.02046814188361168], [0.005363665986806154, 0.012651532888412476, 0.005482334177941084, 0.005145810544490814, 0.004371770191937685, 0.0014073143247514963, 0.0015279968501999974, 0.0012823338620364666, 0.00837081577628851, 0.03386329859495163, 0.025365116074681282, 0.011723698116838932, 0.2588985562324524, 0.018892668187618256, 0.21109309792518616, 0.019524287432432175, 0.01836223341524601, 0.008533746004104614, 0.009981256909668446, 0.011912677437067032, 0.06872071325778961, 0.14563079178333282, 0.07956460118293762, 0.032329726964235306], [0.0011171542573720217, 0.004385726992040873, 0.010346460156142712, 0.0026656012050807476, 0.0023896812926977873, 0.00046295017818920314, 0.0005604016478173435, 0.025816891342401505, 0.00247544189915061, 0.004036662168800831, 0.0023854428436607122, 0.0013598429504781961, 0.0006757316878065467, 0.013388417661190033, 0.07530802488327026, 0.009564388543367386, 0.009539819322526455, 0.011715899221599102, 0.007119722198694944, 0.5008080005645752, 0.17310664057731628, 0.055598385632038116, 0.05148536339402199, 0.033687274903059006], [0.009485116228461266, 0.014977843500673771, 0.00676610367372632, 0.01612807996571064, 0.007104421500116587, 0.0026825331151485443, 0.004267412703484297, 0.006691553629934788, 0.003853593487292528, 0.015240894630551338, 0.0037489323876798153, 0.0009574603755027056, 0.0106708575040102, 0.001671296777203679, 0.006384116131812334, 0.013017524965107441, 0.015590585768222809, 0.01156421285122633, 0.02529810555279255, 0.09515238553285599, 0.23266001045703888, 0.27214449644088745, 0.16270297765731812, 0.0612395778298378], [0.0019136742921546102, 0.0077281431294977665, 0.006512163206934929, 0.005145123228430748, 0.003933256957679987, 0.0005720091285184026, 0.00041291903471574187, 0.03898221626877785, 0.0006507826619781554, 0.0009933991823345423, 0.0028679186943918467, 0.003339543007314205, 0.00021315498452167958, 0.018551718443632126, 0.0635393038392067, 0.01264908816665411, 0.025190988555550575, 0.008147290907800198, 0.007723154965788126, 0.6246691346168518, 0.05560608208179474, 0.013652213849127293, 0.05176501348614693, 0.04524173215031624], [0.0017303203931078315, 0.0018365649739280343, 0.0016093183076009154, 0.002830990357324481, 0.006037358660250902, 0.0003675214829854667, 0.0024579844903200865, 0.001170797855593264, 0.01739119179546833, 0.0019475733861327171, 0.007791437674313784, 0.001250581000931561, 0.025693532079458237, 0.0012766682775691152, 0.013804888352751732, 0.001814993447624147, 0.040760744363069534, 0.0015092339599505067, 0.02750495634973049, 0.010065369307994843, 0.7020551562309265, 0.018813621252775192, 0.09917768836021423, 0.011101479642093182], [0.0024703217204660177, 0.010278788395226002, 0.0015336504438892007, 0.005795478820800781, 0.006313040852546692, 0.0005672965198755264, 0.0004960777005180717, 0.03132742643356323, 0.00037599928327836096, 0.0010961750522255898, 0.00220714183524251, 0.0016481638886034489, 8.317745960084721e-05, 0.004548843018710613, 0.006447071209549904, 0.01054264698177576, 0.033762942999601364, 0.00905518140643835, 0.010400882922112942, 0.6160504221916199, 0.08249720931053162, 0.033573031425476074, 0.05183568596839905, 0.0770934447646141], [0.006325852125883102, 0.015659483149647713, 0.030795611441135406, 0.01407458633184433, 0.058101069182157516, 0.0050321524031460285, 0.005206608679145575, 0.009874006733298302, 0.007359153591096401, 0.012598150409758091, 0.029609566554427147, 0.0005449445452541113, 0.008038126863539219, 0.001707566436380148, 0.025041859596967697, 0.004817666485905647, 0.09499915689229965, 0.005876859650015831, 0.01609647646546364, 0.049502499401569366, 0.062365904450416565, 0.16657042503356934, 0.3442108631134033, 0.025591399520635605], [0.0026973052881658077, 0.003697987413033843, 0.0005064199795015156, 0.01156531274318695, 0.0004366814100649208, 0.001066907192580402, 0.00010993124305969104, 0.01143745705485344, 1.641756171011366e-05, 0.0002649075468070805, 6.268157449085265e-05, 0.005990037228912115, 7.068516424624249e-06, 0.0064705731347203255, 0.0001311416708631441, 0.013194380328059196, 0.0008351169526576996, 0.006401998922228813, 0.0008270232938230038, 0.346452534198761, 0.003728601848706603, 0.010001540184020996, 0.0050940741784870625, 0.5690038800239563], [0.0011479798704385757, 0.0020133075304329395, 0.04336053505539894, 0.0017372446600347757, 0.0026701909955590963, 0.0024975345004349947, 0.006160227116197348, 0.00029103446286171675, 0.0015074779512360692, 0.004290579352527857, 0.0012736058561131358, 3.43105748470407e-05, 0.04741547256708145, 0.0002896787482313812, 0.03711638227105141, 0.0013498112093657255, 0.008381741121411324, 0.005063009448349476, 0.027809815481305122, 0.006796441040933132, 0.14233152568340302, 0.350315660238266, 0.2613556385040283, 0.044790737330913544]]], [[[0.038433387875556946, 0.04183465614914894, 0.05290510505437851, 0.0879923552274704, 0.04568900913000107, 0.057382579892873764, 0.012037496082484722, 0.03288382664322853, 0.032084789127111435, 0.012935281731188297, 0.04292121157050133, 0.050409965217113495, 0.025489047169685364, 0.04274347424507141, 0.038659121841192245, 0.06606238335371017, 0.034908875823020935, 0.04499329999089241, 0.009262355975806713, 0.029171911999583244, 0.038327645510435104, 0.012875696644186974, 0.0759091004729271, 0.07408737391233444], [0.02453790418803692, 0.029762128368020058, 0.03713354095816612, 0.0518503300845623, 0.03514872118830681, 0.039724092930555344, 0.016425572335720062, 0.0395524725317955, 0.02982456237077713, 0.01934569515287876, 0.06797908991575241, 0.0527755506336689, 0.021149111911654472, 0.05854812636971474, 0.0407092310488224, 0.05434582754969597, 0.039336908608675, 0.056697484105825424, 0.01982031762599945, 0.04616842791438103, 0.041916538029909134, 0.02244546264410019, 0.0942845344543457, 0.06051837280392647], [0.015007571317255497, 0.014682694338262081, 0.042281314730644226, 0.0449143722653389, 0.04215385392308235, 0.02682274580001831, 0.022545045241713524, 0.05007977411150932, 0.024020014330744743, 0.0260476004332304, 0.07778126001358032, 0.07456664741039276, 0.02480851672589779, 0.04276205599308014, 0.03855908289551735, 0.058938417583703995, 0.06490394473075867, 0.04694969952106476, 0.02828521654009819, 0.045438747853040695, 0.033057939261198044, 0.027682794257998466, 0.08478358387947083, 0.04292706400156021], [0.02757500857114792, 0.028935810551047325, 0.03515055775642395, 0.02009367197751999, 0.03392984718084335, 0.027089709416031837, 0.04072395712137222, 0.053884293884038925, 0.018622778356075287, 0.014060262590646744, 0.04980131611227989, 0.03172421082854271, 0.03047914244234562, 0.04552707076072693, 0.07268799096345901, 0.02689342014491558, 0.05481394752860069, 0.0435403548181057, 0.05384722724556923, 0.07603389024734497, 0.03427693620324135, 0.02468477189540863, 0.09970526397228241, 0.055918607860803604], [0.052018824964761734, 0.028740348294377327, 0.024672096595168114, 0.10123956203460693, 0.013940262608230114, 0.039414405822753906, 0.03215842321515083, 0.04564125835895538, 0.04193270206451416, 0.029171882197260857, 0.03708963096141815, 0.23869064450263977, 0.04203221946954727, 0.029071733355522156, 0.03477151691913605, 0.07880429923534393, 0.008534164167940617, 0.01730586588382721, 0.01085745170712471, 0.01189304981380701, 0.009239346720278263, 0.00866546668112278, 0.015185242518782616, 0.04892963916063309], [0.05556102097034454, 0.05006476864218712, 0.06027531623840332, 0.14169663190841675, 0.04096636921167374, 0.12336868792772293, 0.038591787219047546, 0.06802666187286377, 0.06513998657464981, 0.0151539146900177, 0.039442338049411774, 0.041506458073854446, 0.010480005294084549, 0.03055463545024395, 0.025152716785669327, 0.04835569113492966, 0.016837088391184807, 0.03663529455661774, 0.009265662170946598, 0.014504489488899708, 0.01494104415178299, 0.005639547482132912, 0.024301229044795036, 0.02353869378566742], [0.06050976738333702, 0.038252975791692734, 0.035857632756233215, 0.06786417961120605, 0.026014329865574837, 0.038928765803575516, 0.021842190995812416, 0.07334554940462112, 0.023953303694725037, 0.015093664638698101, 0.07327987253665924, 0.14812226593494415, 0.02027655765414238, 0.03585830330848694, 0.027239300310611725, 0.06745007634162903, 0.023907264694571495, 0.03271662816405296, 0.011632570996880531, 0.037143126130104065, 0.01041498128324747, 0.009485376998782158, 0.035028211772441864, 0.06578314304351807], [0.08539144694805145, 0.019975122064352036, 0.03677566349506378, 0.08511751890182495, 0.022451043128967285, 0.06915702670812607, 0.031046004965901375, 0.0916074886918068, 0.03676028177142143, 0.013997889123857021, 0.012889303267002106, 0.1035023108124733, 0.017355704680085182, 0.013598499819636345, 0.007930116727948189, 0.058734580874443054, 0.014477954246103764, 0.059406179934740067, 0.017503933981060982, 0.045667052268981934, 0.027903320267796516, 0.013406183570623398, 0.012102117761969566, 0.10324320942163467], [0.02537948451936245, 0.009284360334277153, 0.07247073948383331, 0.07164701074361801, 0.03433500602841377, 0.0727045014500618, 0.08499003201723099, 0.036015283316373825, 0.1256108283996582, 0.052272047847509384, 0.03424787521362305, 0.12462019175291061, 0.055390506982803345, 0.019305016845464706, 0.06136380881071091, 0.03398917615413666, 0.01801452785730362, 0.009704777039587498, 0.013931059278547764, 0.004216340836137533, 0.009404806420207024, 0.006816569250077009, 0.0066266292706131935, 0.017659354954957962], [0.08206586539745331, 0.055205345153808594, 0.03673727437853813, 0.11418673396110535, 0.0318877138197422, 0.07043495029211044, 0.020885521546006203, 0.058259136974811554, 0.06740080565214157, 0.03271922841668129, 0.0548287034034729, 0.046662166714668274, 0.031220348551869392, 0.0497782900929451, 0.013554072007536888, 0.06853403896093369, 0.016384171321988106, 0.040817588567733765, 0.011393841356039047, 0.02284623496234417, 0.016920387744903564, 0.01552668772637844, 0.021925194188952446, 0.01982566900551319], [0.021607892587780952, 0.011293296702206135, 0.03194357827305794, 0.036171119660139084, 0.008977734483778477, 0.02077142894268036, 0.022699737921357155, 0.006948837079107761, 0.026762474328279495, 0.05143404379487038, 0.10979651659727097, 0.14700213074684143, 0.10951672494411469, 0.03108023665845394, 0.211570143699646, 0.04368278756737709, 0.011649076826870441, 0.020078260451555252, 0.01696811243891716, 0.0035280894953757524, 0.005182291846722364, 0.014204458333551884, 0.01857861876487732, 0.01855248585343361], [0.12510421872138977, 0.06854083389043808, 0.033969953656196594, 0.10298159718513489, 0.037442516535520554, 0.056041549891233444, 0.02844693697988987, 0.05353311821818352, 0.012165311723947525, 0.0060079218819737434, 0.05796497315168381, 0.009036737494170666, 0.00942592415958643, 0.02162758633494377, 0.011490345001220703, 0.09962324798107147, 0.026394495740532875, 0.047377828508615494, 0.021579818800091743, 0.04090457037091255, 0.01197036262601614, 0.009148264303803444, 0.09233889728784561, 0.016882918775081635], [0.021346788853406906, 0.02885730005800724, 0.026468873023986816, 0.04609828442335129, 0.014557869173586369, 0.013178031891584396, 0.01835048943758011, 0.021460678428411484, 0.06299518048763275, 0.05782066285610199, 0.1155785396695137, 0.0991629958152771, 0.052137140184640884, 0.06834640353918076, 0.06524544954299927, 0.07297597825527191, 0.020253093913197517, 0.018857469782233238, 0.028049852699041367, 0.022885914891958237, 0.021977456286549568, 0.035173606127500534, 0.03799619898200035, 0.03022577613592148], [0.04353281855583191, 0.02512495405972004, 0.01115590613335371, 0.01140135619789362, 0.012433561496436596, 0.019398633390665054, 0.047323260456323624, 0.04040198400616646, 0.017459958791732788, 0.12054954469203949, 0.1212330311536789, 0.04605783522129059, 0.05087607726454735, 0.07943911850452423, 0.021971428766846657, 0.03224531561136246, 0.014891267754137516, 0.03321641683578491, 0.09213170409202576, 0.044754426926374435, 0.0056901900097727776, 0.07831190526485443, 0.017292240634560585, 0.01310708187520504], [0.007455596700310707, 0.010478267446160316, 0.01004902645945549, 0.015950195491313934, 0.023872172459959984, 0.0032766875810921192, 0.006545320153236389, 0.011920681223273277, 0.004228045232594013, 0.007923494093120098, 0.13669264316558838, 0.010296379216015339, 0.011664552614092827, 0.031544122844934464, 0.03658350184559822, 0.048692163079977036, 0.09546738117933273, 0.03174659609794617, 0.04892204701900482, 0.07954538613557816, 0.021272100508213043, 0.03208592161536217, 0.2957998812198639, 0.017987743020057678], [0.020181117579340935, 0.025432366877794266, 0.02293555624783039, 0.012621928937733173, 0.022611968219280243, 0.014942633919417858, 0.026794396340847015, 0.035293322056531906, 0.011491994373500347, 0.019012678414583206, 0.11560843884944916, 0.024445349350571632, 0.03769669309258461, 0.0640062540769577, 0.08831078559160233, 0.023904070258140564, 0.042524874210357666, 0.04120345413684845, 0.057865384966135025, 0.07677698135375977, 0.017494607716798782, 0.03290868550539017, 0.13566194474697113, 0.03027450107038021], [0.03406285122036934, 0.027411796152591705, 0.015623618848621845, 0.06644850224256516, 0.014735586009919643, 0.017706383019685745, 0.02267177402973175, 0.030446263030171394, 0.022486234083771706, 0.031306520104408264, 0.043016158044338226, 0.15798769891262054, 0.039791420102119446, 0.03339458256959915, 0.063582643866539, 0.10198284685611725, 0.01893674023449421, 0.026179056614637375, 0.027846578508615494, 0.031060699373483658, 0.024032769724726677, 0.028540849685668945, 0.041750021278858185, 0.0789983719587326], [0.050101615488529205, 0.04634338244795799, 0.037556108087301254, 0.09863229840993881, 0.025131037458777428, 0.031276948750019073, 0.013095846399664879, 0.023248782381415367, 0.007167624309659004, 0.009212649427354336, 0.03052023984491825, 0.055749304592609406, 0.006943920161575079, 0.02267777919769287, 0.07216703146696091, 0.1016327440738678, 0.030605213716626167, 0.06241066753864288, 0.021819429472088814, 0.03573860228061676, 0.0242617130279541, 0.018266795203089714, 0.08207348734140396, 0.09336688369512558], [0.0335894376039505, 0.021187566220760345, 0.014582541771233082, 0.03211946785449982, 0.012911939062178135, 0.007834927178919315, 0.00697628827765584, 0.019807035103440285, 0.004450698383152485, 0.009186509065330029, 0.05424804612994194, 0.10971754789352417, 0.013694699853658676, 0.017971090972423553, 0.04157194867730141, 0.0834714025259018, 0.0322827585041523, 0.05271642282605171, 0.026803534477949142, 0.08490557223558426, 0.025841783732175827, 0.031531888991594315, 0.08759802579879761, 0.17499884963035583], [0.03509126231074333, 0.00837201252579689, 0.008049857802689075, 0.0394476093351841, 0.0078645134344697, 0.006119498983025551, 0.005399741232395172, 0.00865986105054617, 0.0033452571369707584, 0.00579210976138711, 0.0051179551519453526, 0.09378658980131149, 0.014332994818687439, 0.009408257901668549, 0.018081646412611008, 0.0995158925652504, 0.019923575222492218, 0.06887614727020264, 0.0342339426279068, 0.05988972261548042, 0.06137799099087715, 0.037181489169597626, 0.026652777567505836, 0.32347923517227173], [0.010063642635941505, 0.0032683417666703463, 0.011119760572910309, 0.02576131373643875, 0.02086157165467739, 0.004574920516461134, 0.007101705763489008, 0.005455845966935158, 0.004027243237942457, 0.005581103730946779, 0.004573382902890444, 0.06758899241685867, 0.012649234384298325, 0.00580932991579175, 0.0994807779788971, 0.05128628388047218, 0.07351568341255188, 0.0222244281321764, 0.03616711124777794, 0.03007746860384941, 0.09711413830518723, 0.031943317502737045, 0.04294665530323982, 0.3268077075481415], [0.03315950557589531, 0.030378276482224464, 0.018058206886053085, 0.06927073746919632, 0.01713789626955986, 0.012272507883608341, 0.004392516799271107, 0.010312149301171303, 0.009910940192639828, 0.009298848919570446, 0.025988250970840454, 0.03972099348902702, 0.022020477801561356, 0.03455158695578575, 0.037823501974344254, 0.11618933826684952, 0.0369933620095253, 0.08091684430837631, 0.023620786145329475, 0.051482174545526505, 0.07111680507659912, 0.03462284803390503, 0.10222519189119339, 0.10853633284568787], [0.011501268483698368, 0.007589440792798996, 0.009996285662055016, 0.026708703488111496, 0.015742314979434013, 0.005680350586771965, 0.004540352616459131, 0.0025374970864504576, 0.004567746538668871, 0.012088514864444733, 0.017284443601965904, 0.06796057522296906, 0.025824978947639465, 0.01171166356652975, 0.2271391898393631, 0.05951724946498871, 0.05478040128946304, 0.04038093611598015, 0.024288518354296684, 0.015419913455843925, 0.059732161462306976, 0.048314958810806274, 0.07692625373601913, 0.16976630687713623], [0.028319278731942177, 0.019580740481615067, 0.008553486317396164, 0.033527158200740814, 0.0182870514690876, 0.006416920106858015, 0.0054757180623710155, 0.008974305354058743, 0.001136724022217095, 0.0029714948032051325, 0.012924108654260635, 0.014219624921679497, 0.006428959313780069, 0.01644524745643139, 0.021285058930516243, 0.10236747562885284, 0.05857974290847778, 0.08198270201683044, 0.044679924845695496, 0.0874703973531723, 0.052520040422677994, 0.035911738872528076, 0.21600259840488434, 0.11593957990407944]], [[0.04249584674835205, 0.031660839915275574, 0.054013822227716446, 0.07620903849601746, 0.027012621983885765, 0.04289643093943596, 0.028217192739248276, 0.028618253767490387, 0.027916794642806053, 0.06822327524423599, 0.0036987289786338806, 0.0958256721496582, 0.02873007021844387, 0.031210174784064293, 0.02288837358355522, 0.08381431549787521, 0.020695818588137627, 0.05906542390584946, 0.022172322496771812, 0.023647576570510864, 0.034164927899837494, 0.05780690908432007, 0.006970811169594526, 0.08204471319913864], [0.05019734799861908, 0.043765559792518616, 0.05530419200658798, 0.055210184305906296, 0.031663089990615845, 0.04835769161581993, 0.04090561717748642, 0.052235089242458344, 0.022519251331686974, 0.034717001020908356, 0.013430478051304817, 0.05158042162656784, 0.02425886131823063, 0.03677418455481529, 0.03679104149341583, 0.06503748148679733, 0.03211154416203499, 0.06278326362371445, 0.04573283717036247, 0.05836515128612518, 0.02990885265171528, 0.03894836828112602, 0.015032694675028324, 0.05436989292502403], [0.05317751318216324, 0.06678517162799835, 0.021179266273975372, 0.02391956001520157, 0.13657613098621368, 0.10622584074735641, 0.04397590085864067, 0.060670435428619385, 0.15570412576198578, 0.14403797686100006, 0.013818769715726376, 0.032817624509334564, 0.0075223688036203384, 0.013428145088255405, 0.0017851360607892275, 0.007408312987536192, 0.022536974400281906, 0.01986892707645893, 0.006118181627243757, 0.005627491977065802, 0.010250277817249298, 0.029478827491402626, 0.00659931218251586, 0.010487787425518036], [0.07874332368373871, 0.10307619720697403, 0.026476433500647545, 0.028526196256279945, 0.010954974219202995, 0.035072218626737595, 0.041149429976940155, 0.05303596332669258, 0.0188668854534626, 0.02759126015007496, 0.017199357971549034, 0.02730926126241684, 0.03381282463669777, 0.047256406396627426, 0.05891800671815872, 0.04399774223566055, 0.010329248383641243, 0.050660375505685806, 0.06627420336008072, 0.07001485675573349, 0.03646437078714371, 0.035220105201005936, 0.052547503262758255, 0.026502888649702072], [0.03358155116438866, 0.05691727250814438, 0.0462995246052742, 0.03578784689307213, 0.014100943692028522, 0.029299091547727585, 0.022327281534671783, 0.03094031848013401, 0.011713356710970402, 0.05056552216410637, 0.009392431937158108, 0.08195710927248001, 0.07305105030536652, 0.07313474267721176, 0.09077153354883194, 0.046992331743240356, 0.01356168370693922, 0.04487696662545204, 0.02819991298019886, 0.038775451481342316, 0.017412977293133736, 0.04161752015352249, 0.022326882928609848, 0.08639664947986603], [0.012924039736390114, 0.02513110265135765, 0.06523506343364716, 0.02998489886522293, 0.08657333999872208, 0.07435134798288345, 0.11972079426050186, 0.06719162315130234, 0.1631525605916977, 0.07714424282312393, 0.016071144491434097, 0.03252715989947319, 0.04239245504140854, 0.01372119877487421, 0.011161667294800282, 0.01443537324666977, 0.021875575184822083, 0.0371912457048893, 0.02591518685221672, 0.01153385266661644, 0.01448606327176094, 0.019868938252329826, 0.006298162043094635, 0.011112930253148079], [0.016019798815250397, 0.02330908179283142, 0.06703366339206696, 0.020670020952820778, 0.3368544280529022, 0.08426913619041443, 0.08289878070354462, 0.04774363711476326, 0.08735538274049759, 0.022864297032356262, 0.0170254185795784, 0.0061533888801932335, 0.007147592958062887, 0.0038784556090831757, 0.0036744019016623497, 0.00739250099286437, 0.08491537719964981, 0.017026660963892937, 0.01806006208062172, 0.005795182194560766, 0.008137887343764305, 0.010357270017266273, 0.01784524694085121, 0.0035723415203392506], [0.01803879253566265, 0.034235890954732895, 0.061466384679079056, 0.03770490735769272, 0.08319775760173798, 0.09234274178743362, 0.060074582695961, 0.08033871650695801, 0.1360975056886673, 0.10997392237186432, 0.020227015018463135, 0.03349102661013603, 0.028561437502503395, 0.02389082871377468, 0.00462804501876235, 0.017862658947706223, 0.019076989963650703, 0.04719923809170723, 0.016835635527968407, 0.013768588192760944, 0.014099164865911007, 0.0279941875487566, 0.007067924831062555, 0.01182608213275671], [0.041960615664720535, 0.048400651663541794, 0.11718027293682098, 0.046889424324035645, 0.09957780689001083, 0.18237486481666565, 0.025446366518735886, 0.07954929769039154, 0.05993971228599548, 0.1635473668575287, 0.009214088320732117, 0.032247237861156464, 0.005678392481058836, 0.007080935873091221, 0.0028925088699907064, 0.010099477134644985, 0.012557472102344036, 0.017521293833851814, 0.001793155213817954, 0.004347013775259256, 0.0012346256989985704, 0.019955791532993317, 0.002016063081100583, 0.008495531044900417], [0.07644039392471313, 0.03302749618887901, 0.07590791583061218, 0.04333088919520378, 0.0823131874203682, 0.05334041267633438, 0.0436866395175457, 0.04594820737838745, 0.09579189866781235, 0.034044165164232254, 0.08607013523578644, 0.03729567676782608, 0.0994587242603302, 0.026136012747883797, 0.0348595567047596, 0.027982132509350777, 0.0400991328060627, 0.009231418371200562, 0.009321450255811214, 0.007859922014176846, 0.007202763110399246, 0.007217543665319681, 0.014189491979777813, 0.009244848974049091], [0.004993354436010122, 0.014327428303658962, 0.11328468471765518, 0.013575730845332146, 0.04140152037143707, 0.01578342355787754, 0.01884959079325199, 0.007264920976012945, 0.03275405988097191, 0.020959284156560898, 0.024918831884860992, 0.08492927253246307, 0.09663143754005432, 0.1080106720328331, 0.2849775552749634, 0.02164611965417862, 0.04146788641810417, 0.0070949033834040165, 0.009687078185379505, 0.0027595101855695248, 0.004416820593178272, 0.006309805437922478, 0.004178180359303951, 0.01977800391614437], [0.07913578301668167, 0.050526782870292664, 0.028114158660173416, 0.040289707481861115, 0.014210410416126251, 0.011983279138803482, 0.008756151422858238, 0.0050375028513371944, 0.00379951111972332, 0.0085841603577137, 0.04855971038341522, 0.048318758606910706, 0.03731384128332138, 0.11856330186128616, 0.32862308621406555, 0.06783673912286758, 0.018854491412639618, 0.004644942935556173, 0.008188934065401554, 0.004139733500778675, 0.00259777856990695, 0.005160707980394363, 0.034218680113554, 0.022541841492056847], [0.1805901825428009, 0.020707610994577408, 0.02396503835916519, 0.006417575292289257, 0.009593632072210312, 0.008394182659685612, 0.005308043211698532, 0.033108070492744446, 0.009974492713809013, 0.0042706504464149475, 0.23704928159713745, 0.00835676584392786, 0.013124971650540829, 0.022248080000281334, 0.06430362910032272, 0.009711864404380322, 0.02903592959046364, 0.002929197857156396, 0.010631727054715157, 0.06130755692720413, 0.02204253152012825, 0.007080730516463518, 0.20368389785289764, 0.00616435008123517], [0.013307802379131317, 0.02025175467133522, 0.05154961347579956, 0.01443421933799982, 0.011634445749223232, 0.009635509923100471, 0.018368249759078026, 0.01320159062743187, 0.014250644482672215, 0.003817040706053376, 0.13279679417610168, 0.024350708350539207, 0.033236730843782425, 0.0912819430232048, 0.2962729334831238, 0.020484600216150284, 0.02046206220984459, 0.00582391070201993, 0.03654071316123009, 0.021167442202568054, 0.016927633434534073, 0.0038160141557455063, 0.11269273608922958, 0.013694864697754383], [0.029784586280584335, 0.043542053550481796, 0.004683761857450008, 0.025417812168598175, 0.015410060063004494, 0.006392465904355049, 0.011952115222811699, 0.004652069881558418, 0.005350378807634115, 0.012823463417589664, 0.011675295419991016, 0.08051648736000061, 0.024864720180630684, 0.1525198221206665, 0.04980921372771263, 0.08482684940099716, 0.05833293870091438, 0.013538489118218422, 0.07669351994991302, 0.026255369186401367, 0.05247364193201065, 0.04096939414739609, 0.032842133194208145, 0.13467341661453247], [0.042898524552583694, 0.03202761337161064, 0.006583633832633495, 0.008072343654930592, 0.0021378262899816036, 0.006717498414218426, 0.027096716687083244, 0.020567147061228752, 0.0026578172110021114, 0.0021502571180462837, 0.02984018623828888, 0.006368034984916449, 0.01788255013525486, 0.03338218852877617, 0.1350485384464264, 0.021897874772548676, 0.006709657143801451, 0.016936346888542175, 0.19999782741069794, 0.13443177938461304, 0.04439249262213707, 0.00966772809624672, 0.18040207028388977, 0.012133387848734856], [0.017620081081986427, 0.03290070593357086, 0.011003485880792141, 0.024647526443004608, 0.006123825907707214, 0.008233848959207535, 0.010711810551583767, 0.008143564686179161, 0.0031776006799191236, 0.01699722930788994, 0.005408968310803175, 0.05811062827706337, 0.06126909703016281, 0.09142837673425674, 0.1476653516292572, 0.06645923852920532, 0.014880720525979996, 0.034955184906721115, 0.049394089728593826, 0.046485889703035355, 0.03658623993396759, 0.04624263569712639, 0.03898105025291443, 0.16257287561893463], [0.042675845324993134, 0.03494768589735031, 0.017587583512067795, 0.022135788574814796, 0.05192575976252556, 0.05569393187761307, 0.0808505266904831, 0.07667329162359238, 0.027900321409106255, 0.029676461592316628, 0.014243981800973415, 0.019781148061156273, 0.022760622203350067, 0.01601097732782364, 0.016983961686491966, 0.019403262063860893, 0.0359511561691761, 0.08107110857963562, 0.0910993367433548, 0.07668791711330414, 0.05131987854838371, 0.04687478020787239, 0.034905415028333664, 0.03283925727009773], [0.014982725493609905, 0.018600845709443092, 0.016567157581448555, 0.024342410266399384, 0.1420617401599884, 0.027490252628922462, 0.07489792257547379, 0.016457851976156235, 0.012889614328742027, 0.007313932757824659, 0.00933042261749506, 0.009107018820941448, 0.012532481923699379, 0.010665356181561947, 0.025890573859214783, 0.031463902443647385, 0.1696905791759491, 0.03910861164331436, 0.14326900243759155, 0.024892667308449745, 0.05257606878876686, 0.023878589272499084, 0.061767760664224625, 0.03022257797420025], [0.012563243508338928, 0.02290443703532219, 0.019862236455082893, 0.028003768995404243, 0.032050564885139465, 0.022083785384893417, 0.04821416363120079, 0.03260159492492676, 0.026938321068882942, 0.02787345089018345, 0.018850678578019142, 0.039601411670446396, 0.05444124713540077, 0.05680706351995468, 0.04041863977909088, 0.04406857118010521, 0.03704638406634331, 0.061447639018297195, 0.09646109491586685, 0.057463809847831726, 0.08086485415697098, 0.0430510975420475, 0.02687898278236389, 0.06950289756059647], [0.016983818262815475, 0.02664332464337349, 0.018238645046949387, 0.034143995493650436, 0.038385868072509766, 0.03882782161235809, 0.009711535647511482, 0.013963142409920692, 0.004123352002352476, 0.053350985050201416, 0.0012216028990224004, 0.041797734797000885, 0.005708286073058844, 0.012014021165668964, 0.01708417572081089, 0.045875828713178635, 0.03761788085103035, 0.10486147552728653, 0.017692571505904198, 0.027211882174015045, 0.02705829031765461, 0.1620563417673111, 0.010643345303833485, 0.2347840815782547], [0.037761982530355453, 0.02162407711148262, 0.023029200732707977, 0.030205918475985527, 0.037023257464170456, 0.0197892002761364, 0.024061327800154686, 0.0191760566085577, 0.014428915455937386, 0.01133142039179802, 0.018514294177293777, 0.031117092818021774, 0.09527626633644104, 0.03783489763736725, 0.1277463436126709, 0.07834924012422562, 0.0771045908331871, 0.03551270440220833, 0.045123662799596786, 0.039350476115942, 0.050650715827941895, 0.02150684967637062, 0.03212409093976021, 0.0713573470711708], [0.003130316035822034, 0.009889038279652596, 0.01502725388854742, 0.012808425351977348, 0.01709035038948059, 0.007352799642831087, 0.00983762089163065, 0.0017723854398354888, 0.0035952148027718067, 0.010876821354031563, 0.001071428065188229, 0.08825332671403885, 0.04671673849225044, 0.07130128145217896, 0.2254471480846405, 0.07283990830183029, 0.04719280079007149, 0.04087791219353676, 0.04157242551445961, 0.006970960646867752, 0.029633669182658195, 0.029519475996494293, 0.0038532784674316645, 0.2033693939447403], [0.13005225360393524, 0.022265534847974777, 0.005888450425118208, 0.014984015375375748, 0.0045318081974983215, 0.0037527577951550484, 0.004264052025973797, 0.0024443715810775757, 0.0005646580830216408, 0.004076873883605003, 0.012990075163543224, 0.030645716935396194, 0.01841093599796295, 0.058351851999759674, 0.4167317748069763, 0.056607600301504135, 0.01763024739921093, 0.006685169879347086, 0.015251360833644867, 0.010777798481285572, 0.007603948470205069, 0.013644766993820667, 0.06810739636421204, 0.07373663038015366]], [[0.02462169900536537, 0.01886291801929474, 0.043713610619306564, 0.03295610100030899, 0.021672677248716354, 0.0188464168459177, 0.0071797496639192104, 0.03615543618798256, 0.09093998372554779, 0.0179157517850399, 0.0230553075671196, 0.007005664519965649, 0.04800724238157272, 0.0072725145146250725, 0.03586731478571892, 0.018612373620271683, 0.021738708019256592, 0.026152826845645905, 0.009577475488185883, 0.05399328097701073, 0.34202995896339417, 0.02888905443251133, 0.04781324416399002, 0.01712067984044552], [0.02504800446331501, 0.02095261588692665, 0.033041562885046005, 0.03331539034843445, 0.020287610590457916, 0.019576529040932655, 0.028137067332863808, 0.0410480760037899, 0.054761871695518494, 0.040807146579027176, 0.02408541925251484, 0.010668735951185226, 0.05724484473466873, 0.007438927423208952, 0.02712762914597988, 0.02153252810239792, 0.02503262460231781, 0.03041432611644268, 0.042565830051898956, 0.0700751468539238, 0.2285769134759903, 0.07394269108772278, 0.040603406727313995, 0.02371508628129959], [0.008029816672205925, 0.007529743481427431, 0.034140147268772125, 0.028082525357604027, 0.03110077790915966, 0.017614291980862617, 0.005146279465407133, 0.04301757365465164, 0.33628472685813904, 0.030675671994686127, 0.153474822640419, 0.035500720143318176, 0.028323454782366753, 0.033143769949674606, 0.02275005728006363, 0.01706075109541416, 0.014971661381423473, 0.008531337603926659, 0.0012000147253274918, 0.015217266976833344, 0.04026510566473007, 0.011842912063002586, 0.0635145902633667, 0.01258193701505661], [0.0016701745335012674, 0.0014209412038326263, 0.02757103368639946, 0.004568610340356827, 0.03665262833237648, 0.005923383869230747, 0.3698309659957886, 0.010379468090832233, 0.12425214797258377, 0.007620836142450571, 0.01535100769251585, 0.0034499166067689657, 0.0367719940841198, 0.008848464116454124, 0.01903228834271431, 0.0033960125874727964, 0.02191445603966713, 0.00588342547416687, 0.2142130732536316, 0.0077970316633582115, 0.05839109793305397, 0.006588964257389307, 0.005321971140801907, 0.00315005867742002], [0.00014289790124166757, 8.900818647816777e-05, 0.0020788589026778936, 0.0011585751781240106, 0.006687304005026817, 0.0033659820910543203, 0.516063392162323, 0.001238869153894484, 0.002944100648164749, 0.0002292950957780704, 0.000704650825355202, 0.0010072842705994844, 0.0003848130872938782, 0.000847014831379056, 0.002828867407515645, 0.0014991533244028687, 0.010792911052703857, 0.004927773028612137, 0.4398808777332306, 0.0009294701158069074, 0.0009846081957221031, 0.00018048756464850157, 0.00015003060980234295, 0.0008838233770802617], [0.009543726220726967, 0.005051007494330406, 0.06498772650957108, 0.020794706419110298, 0.061625074595212936, 0.018258456140756607, 0.07169828563928604, 0.034515541046857834, 0.26532912254333496, 0.018610116094350815, 0.02627730555832386, 0.009876220487058163, 0.09381340444087982, 0.015512063167989254, 0.03326866775751114, 0.011799508705735207, 0.0387873649597168, 0.011682789772748947, 0.036336831748485565, 0.01876908726990223, 0.10287392884492874, 0.012973408214747906, 0.009414478205144405, 0.008201248943805695], [0.0418986938893795, 0.02183806151151657, 0.014266313053667545, 0.009683571755886078, 0.048490606248378754, 0.01670221798121929, 0.04638371244072914, 0.24726156890392303, 0.0864700973033905, 0.11623642593622208, 0.03687899187207222, 0.016881274059414864, 0.03163524344563484, 0.006738521158695221, 0.007198092993348837, 0.00476369634270668, 0.026919540017843246, 0.0059156776405870914, 0.013305263593792915, 0.08488854020833969, 0.022220898419618607, 0.07407993823289871, 0.009313568472862244, 0.01002939511090517], [0.013077206909656525, 0.01841646619141102, 0.021644912660121918, 0.09254217892885208, 0.025220166891813278, 0.03168942779302597, 0.044030290096998215, 0.012688055634498596, 0.22395674884319305, 0.04381967708468437, 0.08326885849237442, 0.032703232020139694, 0.13428030908107758, 0.032079312950372696, 0.010342626832425594, 0.05441420525312424, 0.011990484781563282, 0.011718235909938812, 0.015148065984249115, 0.00438434025272727, 0.030909767374396324, 0.015009863302111626, 0.023724637925624847, 0.012940945103764534], [0.01111113466322422, 0.0052984319627285, 0.024343159049749374, 0.030138570815324783, 0.027810268104076385, 0.050173234194517136, 0.011081482283771038, 0.025103017687797546, 0.6071833372116089, 0.016620825976133347, 0.07732585072517395, 0.030924588441848755, 0.01501277182251215, 0.020845282822847366, 0.003198879072442651, 0.010910611599683762, 0.0057007367722690105, 0.005721624940633774, 0.0008449516026303172, 0.0019911127164959908, 0.008403324522078037, 0.001362473121844232, 0.0062974588945508, 0.002596959937363863], [0.0023525909055024385, 0.006320231594145298, 0.043020691722631454, 0.05060604214668274, 0.011053246445953846, 0.00458364374935627, 0.0030071537476032972, 0.006435462273657322, 0.19739696383476257, 0.045926228165626526, 0.1442742645740509, 0.019644780084490776, 0.26806917786598206, 0.03278299793601036, 0.013882538303732872, 0.03507773205637932, 0.004539555869996548, 0.003684081370010972, 0.001340076676569879, 0.004662921652197838, 0.029937321320176125, 0.02369852550327778, 0.038171492516994476, 0.009532270953059196], [0.0005882413825020194, 0.0010555617045611143, 0.0387028269469738, 0.0077195256017148495, 0.01860736683011055, 0.008976045064628124, 0.0014858284266665578, 0.0011947897728532553, 0.0927366316318512, 0.010303517803549767, 0.28480973839759827, 0.032785799354314804, 0.08270585536956787, 0.03862423077225685, 0.18995334208011627, 0.007220678962767124, 0.018100133165717125, 0.009510902687907219, 0.0009278027573600411, 0.0008795844623818994, 0.021740421652793884, 0.004108353052288294, 0.1177595853805542, 0.009503327310085297], [0.0011430132435634732, 0.0034725635778158903, 0.01789856143295765, 0.03641463443636894, 0.005812505725771189, 0.000634564203210175, 0.0021413788199424744, 0.0050646155141294, 0.07568546384572983, 0.013487213291227818, 0.02467365749180317, 0.0033009429462254047, 0.37785130739212036, 0.006856189575046301, 0.011486886069178581, 0.026036549359560013, 0.004848510026931763, 0.0014407645212486386, 0.006674507632851601, 0.020797867327928543, 0.2664334177970886, 0.037875425070524216, 0.038673967123031616, 0.011295545846223831], [0.0020181091967970133, 0.006373101379722357, 0.02911558747291565, 0.011715099215507507, 0.0203179232776165, 0.011342553421854973, 0.01835539937019348, 0.006727338768541813, 0.0275847427546978, 0.022346651181578636, 0.21781325340270996, 0.036387041211128235, 0.035422515124082565, 0.017795929685235023, 0.05942718684673309, 0.019739389419555664, 0.03514343127608299, 0.017342902719974518, 0.023613063618540764, 0.015569150447845459, 0.026208976283669472, 0.026049280539155006, 0.2669489085674286, 0.04664240777492523], [0.00039855114300735295, 0.0021551030222326517, 0.019265906885266304, 0.010160134173929691, 0.002414856804534793, 0.0005545725580304861, 0.0004969750880263746, 0.0020645272452384233, 0.04002534970641136, 0.0029500790406018496, 0.02301042154431343, 0.0016292660729959607, 0.21069958806037903, 0.001850239234045148, 0.05459299683570862, 0.007170674856752157, 0.004804076161235571, 0.003084691008552909, 0.0033131279051303864, 0.01458146795630455, 0.4715658724308014, 0.009338540025055408, 0.10670052468776703, 0.0071724397130310535], [0.0001924668758874759, 0.0008582810405641794, 0.0066020069643855095, 0.0010811786632984877, 0.0007963533280417323, 0.0009004500461742282, 0.00016529551066923887, 0.0001882581418612972, 0.0033047455362975597, 0.0006906508933752775, 0.018190359696745872, 0.0011057055089622736, 0.0006040785810910165, 0.0002879881067201495, 0.0428297184407711, 0.001444710767827928, 0.006142196711152792, 0.0067014568485319614, 0.0021423054859042168, 0.0029806471429765224, 0.19561642408370972, 0.008612952195107937, 0.6818765997886658, 0.01668516732752323], [0.00019334237731527537, 0.00037465282366611063, 0.00741259939968586, 0.0009258873178623617, 0.0032755834981799126, 0.0005301363416947424, 0.10560929775238037, 0.0007780796731822193, 0.0028804372996091843, 0.0005901906406506896, 0.0018725816626101732, 0.0004882304056081921, 0.005980458110570908, 0.0010383299086242914, 0.03793039172887802, 0.0015046042390167713, 0.013104463927447796, 0.0037736985832452774, 0.7471193671226501, 0.0053823357447981834, 0.0483427420258522, 0.0028140246868133545, 0.005575883202254772, 0.0025027571246027946], [9.908462379826233e-05, 7.578729855595157e-05, 0.0012351353652775288, 0.001028357190079987, 0.002618124010041356, 0.0017284578643739223, 0.19690518081188202, 0.00045442962436936796, 0.0004631512856576592, 8.183322643162683e-05, 0.0002106379542965442, 0.0005632165702991188, 0.00012218316260259598, 0.00032679346622899175, 0.0034762092400342226, 0.002138067502528429, 0.011796511709690094, 0.0069698188453912735, 0.7631443738937378, 0.0014237426221370697, 0.0020699326414614916, 0.0002487713354639709, 0.00032345380168408155, 0.0024967200588434935], [0.007198461331427097, 0.005351320840418339, 0.02505887858569622, 0.06114060431718826, 0.025785841047763824, 0.003489506198093295, 0.007941817864775658, 0.007056300528347492, 0.019818836823105812, 0.006267360877245665, 0.004850719124078751, 0.011357764713466167, 0.05934133753180504, 0.006241450551897287, 0.027840662747621536, 0.08416616916656494, 0.04590394347906113, 0.009248136542737484, 0.03873637691140175, 0.036924563348293304, 0.3430878520011902, 0.03127317875623703, 0.03902439773082733, 0.09289449453353882], [0.03444593772292137, 0.022036392241716385, 0.00575067475438118, 0.00874460767954588, 0.009212058037519455, 0.003909852355718613, 0.0034825210459530354, 0.05512068420648575, 0.004804224241524935, 0.024218715727329254, 0.0031952778808772564, 0.006329005118459463, 0.0129753602668643, 0.0008900582324713469, 0.008825668133795261, 0.007521355990320444, 0.023844854906201363, 0.011391707696020603, 0.014624842442572117, 0.2668209671974182, 0.16457240283489227, 0.1958668977022171, 0.03348958492279053, 0.07792635262012482], [0.012055601924657822, 0.021468807011842728, 0.011872755363583565, 0.08993258327245712, 0.00559795368462801, 0.008451626636087894, 0.003655450651422143, 0.0026545156724750996, 0.013789522461593151, 0.009628134779632092, 0.011343402788043022, 0.017770668491721153, 0.05162951350212097, 0.0051052505150437355, 0.017626700922846794, 0.11213050782680511, 0.012809054926037788, 0.02489333041012287, 0.01685100421309471, 0.013276916928589344, 0.22806720435619354, 0.04057873785495758, 0.1414594203233719, 0.12735137343406677], [0.060870520770549774, 0.020201317965984344, 0.016217775642871857, 0.0668175220489502, 0.007140820845961571, 0.022891022264957428, 0.0027221590280532837, 0.022807905450463295, 0.034758374094963074, 0.006929936818778515, 0.0026232681702822447, 0.010467380285263062, 0.006300975568592548, 0.001208108034916222, 0.0030090545769780874, 0.03409142419695854, 0.007182532921433449, 0.04346632584929466, 0.00468543590977788, 0.04567250609397888, 0.38673433661460876, 0.022886687889695168, 0.04304235801100731, 0.12727221846580505], [0.0028494184371083975, 0.007527183275669813, 0.036226753145456314, 0.05793242156505585, 0.0057168821804225445, 0.0030955730471760035, 0.0006543145864270627, 0.0028034879360347986, 0.033308807760477066, 0.017516333609819412, 0.03140060231089592, 0.014195962809026241, 0.10309451818466187, 0.008347469381988049, 0.03185323253273964, 0.06413343548774719, 0.008583114482462406, 0.011845313012599945, 0.0017688983352854848, 0.013696987181901932, 0.2006637454032898, 0.07003369182348251, 0.1771489828824997, 0.09560286998748779], [0.00531899556517601, 0.00396511796861887, 0.03491930663585663, 0.026821492239832878, 0.009643152356147766, 0.009483261965215206, 0.004357850644737482, 0.0051401215605437756, 0.01699434034526348, 0.009271005168557167, 0.0178383756428957, 0.012635039165616035, 0.0303749181330204, 0.0037741579581052065, 0.07350562512874603, 0.02031133882701397, 0.020573675632476807, 0.059335947036743164, 0.012946484610438347, 0.021101264283061028, 0.27998843789100647, 0.042568810284137726, 0.14735932648181915, 0.13177193701267242], [0.0013178755762055516, 0.002343775937333703, 0.005491797812283039, 0.00959777645766735, 0.0007458992768079042, 0.00029965947032906115, 0.0004736982809845358, 0.0028397757560014725, 0.00366968777962029, 0.003695620456710458, 0.0005853187758475542, 0.0004816422879230231, 0.05433512479066849, 0.000377866585040465, 0.00470565864816308, 0.006763736251741648, 0.0019128229469060898, 0.0041965763084590435, 0.006521447561681271, 0.05676863342523575, 0.6885151863098145, 0.08426922559738159, 0.01602848432958126, 0.04406280443072319]], [[0.032944489270448685, 0.02229538932442665, 0.022867832332849503, 0.03778048977255821, 0.03007870353758335, 0.04138912260532379, 0.025314899161458015, 0.04256277158856392, 0.04170431196689606, 0.03915306180715561, 0.03488868847489357, 0.08504946529865265, 0.055940527468919754, 0.1562100350856781, 0.02758907340466976, 0.03183644264936447, 0.02034926787018776, 0.03476913273334503, 0.020136326551437378, 0.03758639842271805, 0.03532163426280022, 0.025035185739398003, 0.020107451826334, 0.07908939570188522], [0.0254196934401989, 0.019546115770936012, 0.029149776324629784, 0.039961207658052444, 0.029247421771287918, 0.052394166588783264, 0.027100957930088043, 0.03272029012441635, 0.07064449042081833, 0.03180692717432976, 0.03094499185681343, 0.04081980511546135, 0.06330835074186325, 0.084371417760849, 0.044943373650312424, 0.040812063962221146, 0.022608255967497826, 0.03809429332613945, 0.0259696077555418, 0.040139563381671906, 0.09147463738918304, 0.02938893437385559, 0.021862691268324852, 0.06727102398872375], [0.01028116513043642, 0.011005591601133347, 0.024532627314329147, 0.0299916360527277, 0.022788669914007187, 0.01797953061759472, 0.01366912480443716, 0.02404072694480419, 0.05384565144777298, 0.018264099955558777, 0.09425924718379974, 0.058878831565380096, 0.21216318011283875, 0.11719533801078796, 0.08637341856956482, 0.02702604979276657, 0.02445848099887371, 0.01574917696416378, 0.014274044893682003, 0.020937826484441757, 0.037873174995183945, 0.00869604293256998, 0.03924514353275299, 0.016471244394779205], [0.008309615775942802, 0.004843702539801598, 0.01637743040919304, 0.013553502969443798, 0.03390525281429291, 0.024401821196079254, 0.016234109178185463, 0.06712280213832855, 0.08273720741271973, 0.01969584822654724, 0.015521646477282047, 0.06252551823854446, 0.24635237455368042, 0.11380660533905029, 0.02322368137538433, 0.02638382837176323, 0.018156128004193306, 0.014198643155395985, 0.011452638544142246, 0.07747172564268112, 0.05798026919364929, 0.007459691260010004, 0.009102080017328262, 0.029183849692344666], [0.03852110728621483, 0.0142647260800004, 0.033668797463178635, 0.029013561084866524, 0.020429793745279312, 0.017224475741386414, 0.052656713873147964, 0.056640222668647766, 0.05433760583400726, 0.012023097835481167, 0.019527001306414604, 0.056695736944675446, 0.14060531556606293, 0.0476573184132576, 0.0672801285982132, 0.059663690626621246, 0.019207358360290527, 0.01305948756635189, 0.044667430222034454, 0.0720784068107605, 0.07365665584802628, 0.008144734427332878, 0.01697392761707306, 0.03200269863009453], [0.026577485725283623, 0.019513418897986412, 0.03499932959675789, 0.052401188760995865, 0.02022610604763031, 0.026656201109290123, 0.04210612177848816, 0.03857093304395676, 0.049406226724386215, 0.027746470645070076, 0.0966871827840805, 0.08084385842084885, 0.1122761219739914, 0.10041294991970062, 0.047514066100120544, 0.04583340510725975, 0.016270458698272705, 0.01287109311670065, 0.0237334743142128, 0.018022935837507248, 0.02570047415792942, 0.011231654323637486, 0.03534418344497681, 0.035054609179496765], [0.05639560520648956, 0.041728585958480835, 0.029408114030957222, 0.09665026515722275, 0.028619125485420227, 0.038149602711200714, 0.04275677725672722, 0.03950527310371399, 0.06932224333286285, 0.0201003085821867, 0.07209112495183945, 0.06518742442131042, 0.05270911008119583, 0.06740104407072067, 0.03967542201280594, 0.047520726919174194, 0.022422175854444504, 0.02439415268599987, 0.02696070447564125, 0.019218893721699715, 0.03403863683342934, 0.00823740940541029, 0.03223852440714836, 0.025268740952014923], [0.005202196072787046, 0.0024743760004639626, 0.011741983704268932, 0.019769130274653435, 0.024021413177251816, 0.012343931011855602, 0.016894884407520294, 0.05961858481168747, 0.052525755017995834, 0.044752296060323715, 0.03153875470161438, 0.0876980721950531, 0.18285274505615234, 0.15055373311042786, 0.0474848635494709, 0.0268955547362566, 0.012909350916743279, 0.009362195618450642, 0.01346651092171669, 0.06414948403835297, 0.047248248010873795, 0.02208702452480793, 0.020651107653975487, 0.03375786915421486], [0.0139686344191432, 0.013526364229619503, 0.01981440931558609, 0.0409102737903595, 0.03183189406991005, 0.03365200757980347, 0.03699147328734398, 0.045715585350990295, 0.10364473611116409, 0.01965285651385784, 0.06634320318698883, 0.04017876833677292, 0.15098363161087036, 0.04438721388578415, 0.06294561177492142, 0.027544591575860977, 0.018918076530098915, 0.01603446900844574, 0.023405103012919426, 0.03209822624921799, 0.07551847398281097, 0.012141031213104725, 0.05491232872009277, 0.014880988746881485], [0.010163814760744572, 0.007580229546874762, 0.02156871184706688, 0.026985084637999535, 0.035803865641355515, 0.009240960702300072, 0.01240516733378172, 0.05844603106379509, 0.058983076363801956, 0.016755158081650734, 0.021513652056455612, 0.09870800375938416, 0.2586447298526764, 0.07283629477024078, 0.039162635803222656, 0.03170987218618393, 0.03042827732861042, 0.010197525843977928, 0.01196683757007122, 0.049582578241825104, 0.046656254678964615, 0.011342472396790981, 0.012854175642132759, 0.0464647002518177], [0.011208467185497284, 0.010043198242783546, 0.04480033740401268, 0.04590313509106636, 0.03122778981924057, 0.020780198276042938, 0.02859569899737835, 0.015192700549960136, 0.179676353931427, 0.014643401838839054, 0.0736273005604744, 0.031006982550024986, 0.11578643321990967, 0.0521869994699955, 0.0908946543931961, 0.0219865795224905, 0.02522839605808258, 0.007630875799804926, 0.018590781837701797, 0.007904304191470146, 0.08597129583358765, 0.0075895413756370544, 0.045933596789836884, 0.013591044582426548], [0.013079743832349777, 0.010559359565377235, 0.010772266425192356, 0.016272183507680893, 0.021887673065066338, 0.020232822746038437, 0.009970483370125294, 0.08560465276241302, 0.02473730780184269, 0.03684082627296448, 0.013711650855839252, 0.11613879352807999, 0.08202889561653137, 0.12755295634269714, 0.014244459569454193, 0.03618704900145531, 0.012287539429962635, 0.03296304866671562, 0.01057827565819025, 0.13334323465824127, 0.032788343727588654, 0.027480345219373703, 0.008137533441185951, 0.1026005670428276], [0.00708283856511116, 0.0094269048422575, 0.018107816576957703, 0.0220810454338789, 0.03847699984908104, 0.018748151138424873, 0.016949433833360672, 0.05261852592229843, 0.10566214472055435, 0.09632931649684906, 0.03757256269454956, 0.06970778852701187, 0.05171975865960121, 0.07192915678024292, 0.020845942199230194, 0.015056031756103039, 0.018480483442544937, 0.022903162986040115, 0.01423572190105915, 0.05668700858950615, 0.06700699776411057, 0.07940282672643661, 0.02210944890975952, 0.06685996800661087], [0.009122112765908241, 0.005502874031662941, 0.018814677372574806, 0.01026823092252016, 0.026608040556311607, 0.01896780915558338, 0.01200166530907154, 0.07603423297405243, 0.03667335584759712, 0.029120495542883873, 0.006342652719467878, 0.07950206845998764, 0.10133972018957138, 0.043782852590084076, 0.02589895948767662, 0.03189948573708534, 0.01941153034567833, 0.03657916933298111, 0.01863659732043743, 0.19090604782104492, 0.065777987241745, 0.03172335401177406, 0.005022393073886633, 0.10006365925073624], [0.008317690342664719, 0.010960713028907776, 0.023533860221505165, 0.013797380030155182, 0.03600030764937401, 0.008662118576467037, 0.010235439985990524, 0.017203690484166145, 0.09800467640161514, 0.012241002172231674, 0.057785168290138245, 0.024806244298815727, 0.08956471085548401, 0.03728405758738518, 0.10144059360027313, 0.014070026576519012, 0.04984379559755325, 0.01661006733775139, 0.019491096958518028, 0.03549163416028023, 0.18105502426624298, 0.020560678094625473, 0.08882660418748856, 0.02421344816684723], [0.00431159557774663, 0.0032452649902552366, 0.014670592732727528, 0.007019818760454655, 0.02018316276371479, 0.009479277767241001, 0.007400323636829853, 0.04167531430721283, 0.030138494446873665, 0.0399358831346035, 0.006893608253449202, 0.12360712140798569, 0.17642842233181, 0.13415558636188507, 0.01883949711918831, 0.023339970037341118, 0.016784964129328728, 0.019797272980213165, 0.010916220024228096, 0.10803970694541931, 0.03544994816184044, 0.028398271650075912, 0.004350626841187477, 0.11493907868862152], [0.029365869238972664, 0.013356336392462254, 0.036461859941482544, 0.0201790202409029, 0.026514513418078423, 0.013486087322235107, 0.04874565824866295, 0.05087386444211006, 0.05221368372440338, 0.019692135974764824, 0.01498066820204258, 0.06127229332923889, 0.09083745628595352, 0.03538865968585014, 0.07804445922374725, 0.04627387225627899, 0.027044646441936493, 0.01338385883718729, 0.057246606796979904, 0.09098125249147415, 0.0903363972902298, 0.018254250288009644, 0.019490372389554977, 0.04557618498802185], [0.015094676986336708, 0.016519589349627495, 0.038109466433525085, 0.04724888131022453, 0.01373670157045126, 0.019099459052085876, 0.024350186809897423, 0.036556486040353775, 0.020458834245800972, 0.04714753478765488, 0.027588875964283943, 0.09173210710287094, 0.05764615163207054, 0.08873030543327332, 0.04049019142985344, 0.12508849799633026, 0.011996024288237095, 0.018748387694358826, 0.02613198384642601, 0.0446164496243, 0.020590294152498245, 0.04299992695450783, 0.017590485513210297, 0.10772857069969177], [0.05528395622968674, 0.04615342244505882, 0.033736031502485275, 0.06451737880706787, 0.03029528446495533, 0.03137711063027382, 0.03875717520713806, 0.03997163474559784, 0.03481089696288109, 0.03369880095124245, 0.0278888251632452, 0.05929651856422424, 0.025900904089212418, 0.05002806335687637, 0.044371116906404495, 0.07229841500520706, 0.026871725916862488, 0.033697206526994705, 0.041469551622867584, 0.04444288834929466, 0.038391102105379105, 0.03017723746597767, 0.02784373052418232, 0.06872106343507767], [0.004246586933732033, 0.0022858239244669676, 0.011357338167726994, 0.00985873956233263, 0.020711848512291908, 0.006586204748600721, 0.0118032805621624, 0.051465313881635666, 0.017964456230401993, 0.06842435896396637, 0.011423644609749317, 0.10022473335266113, 0.125716432929039, 0.12214123457670212, 0.05091587454080582, 0.031754299998283386, 0.0144615164026618, 0.009280862286686897, 0.016199810430407524, 0.11848773807287216, 0.03279080614447594, 0.06901491433382034, 0.013037887401878834, 0.07984622567892075], [0.011896139942109585, 0.010953031480312347, 0.02020518109202385, 0.01665276288986206, 0.03891967982053757, 0.013541470281779766, 0.025581028312444687, 0.056050803512334824, 0.026957357302308083, 0.03391709178686142, 0.01716487482190132, 0.07026807963848114, 0.10430150479078293, 0.047480251640081406, 0.09306753426790237, 0.0390130840241909, 0.028876611962914467, 0.0154819805175066, 0.033993277698755264, 0.11317586898803711, 0.04933025687932968, 0.04337448254227638, 0.02926582843065262, 0.06053180992603302], [0.008349798619747162, 0.005920650903135538, 0.02337474375963211, 0.015036328695714474, 0.03333229944109917, 0.0057432386092841625, 0.011020115576684475, 0.04348502308130264, 0.02465561032295227, 0.017695963382720947, 0.01004133652895689, 0.10379020869731903, 0.19138014316558838, 0.07284268736839294, 0.06523088365793228, 0.04181862249970436, 0.041225366294384, 0.011378430761396885, 0.019545510411262512, 0.08985525369644165, 0.0407964251935482, 0.020395519211888313, 0.009895628318190575, 0.09319014102220535], [0.021616162732243538, 0.016645396128296852, 0.04123492166399956, 0.03046972118318081, 0.03916260972619057, 0.01781095750629902, 0.026326734572649002, 0.03205359727144241, 0.06830903887748718, 0.017282642424106598, 0.033455878496170044, 0.05027718469500542, 0.09565568715333939, 0.07120852917432785, 0.09178202599287033, 0.044207628816366196, 0.03621377423405647, 0.014034459367394447, 0.03137850761413574, 0.0427858792245388, 0.09015391767024994, 0.01775999180972576, 0.03263728693127632, 0.03753750026226044], [0.00806674174964428, 0.0067879739217460155, 0.01109236292541027, 0.008632341399788857, 0.016350675374269485, 0.008783378638327122, 0.0077270339243113995, 0.055245291441679, 0.012335730716586113, 0.022216446697711945, 0.007753262761980295, 0.13027286529541016, 0.10655676573514938, 0.10471559315919876, 0.024921581149101257, 0.04275452718138695, 0.014962738379836082, 0.02358129993081093, 0.015365572646260262, 0.19285888969898224, 0.03004465252161026, 0.027075765654444695, 0.0075881402008235455, 0.1143103837966919]], [[0.030626261606812477, 0.017685027793049812, 0.04299888014793396, 0.035111818462610245, 0.04898705333471298, 0.11903877556324005, 0.03882491588592529, 0.023584537208080292, 0.13530568778514862, 0.03635459020733833, 0.04350211098790169, 0.03168905898928642, 0.030826356261968613, 0.014241496101021767, 0.02924834005534649, 0.017980678007006645, 0.04574718326330185, 0.060658048838377, 0.018700415268540382, 0.014594863168895245, 0.053974926471710205, 0.029663478955626488, 0.03659233823418617, 0.04406319186091423], [0.03449219837784767, 0.01669217459857464, 0.03709929436445236, 0.016406472772359848, 0.035156749188899994, 0.03301098197698593, 0.041395824402570724, 0.04658142849802971, 0.1483384221792221, 0.044336553663015366, 0.049838095903396606, 0.05233006551861763, 0.03705047443509102, 0.0256703682243824, 0.0272268895059824, 0.015140701085329056, 0.03584505617618561, 0.025010939687490463, 0.031818147748708725, 0.05080196261405945, 0.08408506214618683, 0.040165577083826065, 0.030260726809501648, 0.04124582186341286], [0.032855235040187836, 0.014809802174568176, 0.03297434374690056, 0.014788641594350338, 0.024580666795372963, 0.038201283663511276, 0.02271018549799919, 0.012121319770812988, 0.33408820629119873, 0.02283186838030815, 0.0889371931552887, 0.04317102208733559, 0.04725516587495804, 0.04665541276335716, 0.04375872015953064, 0.012191284447908401, 0.029315628111362457, 0.019962219521403313, 0.007462620735168457, 0.005141190253198147, 0.054986268281936646, 0.008182133547961712, 0.02853322960436344, 0.014486375264823437], [0.018078980967402458, 0.013843261636793613, 0.02034233883023262, 0.02535369247198105, 0.052995361387729645, 0.02409178763628006, 0.03603473678231239, 0.03712254390120506, 0.10833602398633957, 0.057534702122211456, 0.05147344991564751, 0.08675161004066467, 0.08653102070093155, 0.047439370304346085, 0.02058483101427555, 0.024981681257486343, 0.0412735790014267, 0.013904612511396408, 0.020453035831451416, 0.04593459889292717, 0.05152057856321335, 0.044237032532691956, 0.020446427166461945, 0.05073479562997818], [0.05943101644515991, 0.02956731803715229, 0.018406571820378304, 0.03650551289319992, 0.008621356450021267, 0.08140058070421219, 0.02611350268125534, 0.06539522856473923, 0.01908753626048565, 0.024994470179080963, 0.016667818650603294, 0.07823462784290314, 0.00814476702362299, 0.012012184597551823, 0.011548892594873905, 0.03546954691410065, 0.005685454234480858, 0.12678614258766174, 0.0314534530043602, 0.0997328832745552, 0.02416754513978958, 0.05123152211308479, 0.011099950410425663, 0.11824213713407516], [0.042018093168735504, 0.019496383145451546, 0.00864467117935419, 0.09325237572193146, 0.004225838929414749, 0.23313839733600616, 0.007563173770904541, 0.00786188431084156, 0.022086985409259796, 0.008044764399528503, 0.013173184357583523, 0.01035460364073515, 0.0017781774513423443, 0.0021994805429130793, 0.0037725295405834913, 0.02957915887236595, 0.002673375653102994, 0.4167137145996094, 0.005669873673468828, 0.004170933738350868, 0.010463714599609375, 0.009650100953876972, 0.019019197672605515, 0.024449395015835762], [0.14749334752559662, 0.09769975394010544, 0.029439561069011688, 0.12054624408483505, 0.009085137397050858, 0.05763211101293564, 0.03644566237926483, 0.011105349287390709, 0.017892153933644295, 0.007755234371870756, 0.012123160064220428, 0.050423119217157364, 0.01054765097796917, 0.02445138804614544, 0.016854848712682724, 0.043080009520053864, 0.007140056230127811, 0.03439902886748314, 0.017774349078536034, 0.005557455588132143, 0.016535049304366112, 0.00979616492986679, 0.0374850369989872, 0.17873811721801758], [0.008114530704915524, 0.00528399832546711, 0.006888020318001509, 0.008322736248373985, 0.0208334568887949, 0.22538775205612183, 0.018239423632621765, 0.02515021152794361, 0.0033555077388882637, 0.05184527486562729, 0.026142966002225876, 0.26274701952934265, 0.01704391837120056, 0.015461748465895653, 0.013493670150637627, 0.014090251177549362, 0.01600124128162861, 0.09976141899824142, 0.008621524088084698, 0.017176369205117226, 0.0038188761100172997, 0.020517565310001373, 0.023642191663384438, 0.08806031197309494], [0.018168503418564796, 0.02913067303597927, 0.033580828458070755, 0.06676708906888962, 0.04545794427394867, 0.026047764346003532, 0.014163888059556484, 0.009153353050351143, 0.1430545598268509, 0.031368400901556015, 0.0638512670993805, 0.04229551926255226, 0.20868778228759766, 0.08209971338510513, 0.03660990297794342, 0.05763757973909378, 0.03579148277640343, 0.00690868403762579, 0.0044022914953529835, 0.0033292267471551895, 0.01225423626601696, 0.00760396383702755, 0.015466460026800632, 0.006168805994093418], [0.01561666838824749, 0.007042068988084793, 0.021129749715328217, 0.042504459619522095, 0.01291023101657629, 0.02924501709640026, 0.0443117655813694, 0.18357053399085999, 0.026313964277505875, 0.20099318027496338, 0.010153714567422867, 0.20386992394924164, 0.005812869407236576, 0.016010694205760956, 0.0030367260333150625, 0.021306006237864494, 0.002288182731717825, 0.0017256223363801837, 0.0039156051352620125, 0.021289832890033722, 0.0016482042847201228, 0.05533137544989586, 0.001131757046096027, 0.06884191930294037], [0.004440511576831341, 0.003325960598886013, 0.05803772062063217, 0.002116836840286851, 0.054791729897260666, 0.019596800208091736, 0.025611670687794685, 0.011280979961156845, 0.23125217854976654, 0.02103445865213871, 0.18442583084106445, 0.013080035336315632, 0.07570832967758179, 0.01569521054625511, 0.0923476293683052, 0.0013741691363975406, 0.0783419981598854, 0.014659173786640167, 0.012076071463525295, 0.004375465214252472, 0.035842377692461014, 0.005656400695443153, 0.030360080301761627, 0.004568278323858976], [0.017716696485877037, 0.009028253145515919, 0.022375132888555527, 0.02416667900979519, 0.04262635111808777, 0.030849790200591087, 0.026377061381936073, 0.06543069332838058, 0.12315772473812103, 0.17353755235671997, 0.040832459926605225, 0.12665687501430511, 0.018393464386463165, 0.021511318162083626, 0.013713176362216473, 0.019548602402210236, 0.01776982471346855, 0.005006550345569849, 0.006616758182644844, 0.03060336224734783, 0.010316469706594944, 0.09475167840719223, 0.004008726216852665, 0.0550047792494297], [0.005409925244748592, 0.0023836405016481876, 0.13789771497249603, 0.0036154617555439472, 0.011239212937653065, 0.0028826817870140076, 0.015527642332017422, 0.03344924747943878, 0.4918177127838135, 0.027120405808091164, 0.043947841972112656, 0.02775508351624012, 0.07624951004981995, 0.05050324276089668, 0.03899790346622467, 0.001279162708669901, 0.005613216198980808, 0.0002602313179522753, 0.0013804328627884388, 0.005166350863873959, 0.008743558079004288, 0.004401462618261576, 0.0015571240801364183, 0.0028011437971144915], [0.004807267338037491, 0.0012177706230431795, 0.03840586170554161, 0.006091118790209293, 0.027958208695054054, 0.008345302194356918, 0.03860527276992798, 0.07286994159221649, 0.19431206583976746, 0.08813002705574036, 0.03349554166197777, 0.21507224440574646, 0.11250109225511551, 0.0336843803524971, 0.016962451860308647, 0.007077437825500965, 0.012927164323627949, 0.000999542186036706, 0.006973525509238243, 0.03348587453365326, 0.008807841688394547, 0.023280659690499306, 0.0008666579960845411, 0.013122713193297386], [0.006140843965113163, 0.002757062204182148, 0.0475037582218647, 0.0021049506030976772, 0.016331961378455162, 0.006693897303193808, 0.015840180218219757, 0.004689068999141455, 0.08905747532844543, 0.008340595290064812, 0.13403409719467163, 0.058926135301589966, 0.17730620503425598, 0.07067214697599411, 0.1553105264902115, 0.003835026640444994, 0.04388577863574028, 0.014567829668521881, 0.018652111291885376, 0.013159174472093582, 0.06267561763525009, 0.0064517236314713955, 0.028271982446312904, 0.012791895307600498], [0.008566192351281643, 0.007695761509239674, 0.01191109698265791, 0.02969416230916977, 0.030952543020248413, 0.009077334776520729, 0.019214587286114693, 0.030645135790109634, 0.0376817062497139, 0.054924286901950836, 0.030226850882172585, 0.20709815621376038, 0.04826827347278595, 0.034251533448696136, 0.016749326139688492, 0.05894162505865097, 0.02956259436905384, 0.013616562820971012, 0.02103927731513977, 0.08237133175134659, 0.04020635411143303, 0.06192634627223015, 0.013131396844983101, 0.10224752873182297], [0.024792952463030815, 0.018299974501132965, 0.00722537050023675, 0.009575778618454933, 0.003509070258587599, 0.018280018121004105, 0.011714980937540531, 0.028401853516697884, 0.004569306969642639, 0.008618517778813839, 0.01431566383689642, 0.050740357488393784, 0.005434630438685417, 0.008919982239603996, 0.016640938818454742, 0.027550049126148224, 0.00547634856775403, 0.19380156695842743, 0.07375022023916245, 0.24442769587039948, 0.047809336334466934, 0.04657864570617676, 0.01874397322535515, 0.11082267016172409], [0.008790343068540096, 0.007300646509975195, 0.0018080166773870587, 0.01536334678530693, 0.001281478675082326, 0.045231424272060394, 0.0019745470490306616, 0.0014996398240327835, 0.0011724471114575863, 0.0027675610035657883, 0.006812268868088722, 0.01026835571974516, 0.0013776031555607915, 0.0013111525913700461, 0.007428103592246771, 0.031142961233854294, 0.0024811876937747, 0.7467920184135437, 0.01567736081779003, 0.009420140646398067, 0.009287087246775627, 0.010919870808720589, 0.027024084702134132, 0.032868314534425735], [0.036560457199811935, 0.0573650486767292, 0.006765843369066715, 0.02234889566898346, 0.004204979632049799, 0.011942420154809952, 0.009666107594966888, 0.0032677394337952137, 0.001305788173340261, 0.0030082648154348135, 0.009841760620474815, 0.05447224900126457, 0.008117695339024067, 0.018221529200673103, 0.04355790466070175, 0.05940181016921997, 0.01185092143714428, 0.1129957064986229, 0.06618262082338333, 0.02885347045958042, 0.03318934515118599, 0.017307063564658165, 0.09540297836065292, 0.28416943550109863], [0.0016477038152515888, 0.002972857328131795, 0.0015805161092430353, 0.0017097393283620477, 0.011284001171588898, 0.023792171850800514, 0.003865918843075633, 0.0081010228022933, 0.0003480327141005546, 0.018818939104676247, 0.01771528832614422, 0.2376617193222046, 0.017083339393138885, 0.014201708137989044, 0.033971965312957764, 0.018562257289886475, 0.03657805547118187, 0.1733374297618866, 0.028384318575263023, 0.11168072372674942, 0.01164444163441658, 0.0357435904443264, 0.05940709263086319, 0.12990713119506836], [0.010974000208079815, 0.047951988875865936, 0.003805771004408598, 0.016225820407271385, 0.00718429870903492, 0.00342579185962677, 0.0015220731729641557, 0.0022343152668327093, 0.0017053037881851196, 0.0026908356230705976, 0.023441148921847343, 0.029660658910870552, 0.0321798101067543, 0.037345707416534424, 0.09485270082950592, 0.17893575131893158, 0.03798174113035202, 0.05951991677284241, 0.03265639394521713, 0.09693878889083862, 0.08536448329687119, 0.019060153514146805, 0.13671045005321503, 0.03763215243816376], [0.014076060615479946, 0.01347261667251587, 0.0044748191721737385, 0.019380871206521988, 0.0064260084182024, 0.00625463156029582, 0.013563733547925949, 0.047638457268476486, 0.0016013083513826132, 0.05658908933401108, 0.00598119618371129, 0.19775618612766266, 0.003194056451320648, 0.020397337153553963, 0.007238741964101791, 0.06254435330629349, 0.00487746624276042, 0.007576586212962866, 0.022596077993512154, 0.13080251216888428, 0.006815354805439711, 0.12141533195972443, 0.006222238298505545, 0.21910494565963745], [0.010509815067052841, 0.01206112839281559, 0.013395196758210659, 0.00730053661391139, 0.022696038708090782, 0.01219918578863144, 0.0058557214215397835, 0.00308894831687212, 0.010057004168629646, 0.004565948620438576, 0.057666294276714325, 0.016882769763469696, 0.022886699065566063, 0.014239751733839512, 0.14158640801906586, 0.019165504723787308, 0.10477368533611298, 0.15124467015266418, 0.04362354055047035, 0.026015911251306534, 0.12013614177703857, 0.013601227663457394, 0.1303223818540573, 0.03612557426095009], [0.024316977709531784, 0.01567942090332508, 0.0016586477868258953, 0.028297962620854378, 0.0036481134593486786, 0.0023961812257766724, 0.0028148419223725796, 0.00785007979720831, 0.0014221465680748224, 0.01823546178638935, 0.004448692314326763, 0.13648535311222076, 0.0017152626533061266, 0.01366274245083332, 0.0046664997935295105, 0.11425664275884628, 0.004637653473764658, 0.01209563110023737, 0.018140029162168503, 0.11832781881093979, 0.016926638782024384, 0.15121421217918396, 0.007940667681396008, 0.28916242718696594]], [[0.022283364087343216, 0.01987706683576107, 0.13688543438911438, 0.0170705895870924, 0.009609689936041832, 0.01320437341928482, 0.02554916962981224, 0.032525379210710526, 0.026269376277923584, 0.03264385089278221, 0.02960650995373726, 0.04576319456100464, 0.026104461401700974, 0.023789582774043083, 0.14668245613574982, 0.021229533478617668, 0.012200405821204185, 0.03859441727399826, 0.050528042018413544, 0.07776554673910141, 0.04140152409672737, 0.06332091987133026, 0.02297268621623516, 0.06412245333194733], [0.02401648834347725, 0.01763112284243107, 0.10451192408800125, 0.02370426058769226, 0.02019343711435795, 0.006239666603505611, 0.06394795328378677, 0.05217116326093674, 0.04960138723254204, 0.05823347344994545, 0.051745664328336716, 0.053185924887657166, 0.059927769005298615, 0.04605472460389137, 0.08069000393152237, 0.036459602415561676, 0.01953789032995701, 0.00750775309279561, 0.060913581401109695, 0.05987561121582985, 0.02178882621228695, 0.04382087290287018, 0.013949189335107803, 0.02429177053272724], [0.12859967350959778, 0.09909870475530624, 0.0311446413397789, 0.07539629936218262, 0.039948832243680954, 0.016666993498802185, 0.04109601303935051, 0.02396422065794468, 0.048518940806388855, 0.11446655541658401, 0.0300547257065773, 0.014550931751728058, 0.01497584581375122, 0.016196193173527718, 0.0056151398457586765, 0.028191080316901207, 0.018765835091471672, 0.006785929203033447, 0.02402500808238983, 0.01378585398197174, 0.025493400171399117, 0.1023583710193634, 0.02176603116095066, 0.05853480100631714], [0.018275929614901543, 0.01726064458489418, 0.049060553312301636, 0.0072413235902786255, 0.0053748274222016335, 0.004022788722068071, 0.006059000734239817, 0.017791924998164177, 0.013336150906980038, 0.0711180567741394, 0.023837225511670113, 0.0768384113907814, 0.0546194352209568, 0.07962857931852341, 0.16705894470214844, 0.03194183111190796, 0.012039042077958584, 0.019466005265712738, 0.016918957233428955, 0.07376863807439804, 0.030025748535990715, 0.12454110383987427, 0.02183511108160019, 0.05793985724449158], [0.062139689922332764, 0.08919626474380493, 0.05914667621254921, 0.1155586913228035, 0.06566313654184341, 0.03250247612595558, 0.03537534177303314, 0.01838594861328602, 0.05730520561337471, 0.059418223798274994, 0.038429614156484604, 0.028763145208358765, 0.03759589046239853, 0.05437218025326729, 0.028121450915932655, 0.05569712817668915, 0.03710417449474335, 0.012403571046888828, 0.018978042528033257, 0.009693839587271214, 0.01705176569521427, 0.029115958139300346, 0.016794562339782715, 0.021187031641602516], [0.046297214925289154, 0.02570895291864872, 0.10164881497621536, 0.010020649991929531, 0.06553123891353607, 0.021104369312524796, 0.062236521393060684, 0.03585411235690117, 0.05836378037929535, 0.12074483186006546, 0.07890674471855164, 0.007018575444817543, 0.03521474823355675, 0.027470501139760017, 0.025133859366178513, 0.008449617773294449, 0.04362192749977112, 0.012954470701515675, 0.03745103254914284, 0.022015446797013283, 0.01728162355720997, 0.09499151259660721, 0.026428265497088432, 0.015551166608929634], [0.05844856798648834, 0.044679053127765656, 0.008466890081763268, 0.00925036333501339, 0.039706259965896606, 0.46207091212272644, 0.05524855852127075, 0.005582831799983978, 0.017606576904654503, 0.004051060415804386, 0.004357055760920048, 0.0022662992123514414, 0.0025997066404670477, 0.00372039875946939, 0.0027969505172222853, 0.0036002506967633963, 0.016986127942800522, 0.22179915010929108, 0.013847480528056622, 0.0016202001133933663, 0.004773971624672413, 0.0027183545753359795, 0.007197007071226835, 0.0066059730015695095], [0.00814903061836958, 0.005534191615879536, 0.01164786797016859, 0.01147562637925148, 0.0038497881032526493, 0.18368948996067047, 0.009838595055043697, 0.026134680956602097, 0.005460991524159908, 0.004143815487623215, 0.002563738962635398, 0.030588706955313683, 0.001861434429883957, 0.006938982754945755, 0.015399460680782795, 0.010769344866275787, 0.003950456622987986, 0.5517449975013733, 0.010274240747094154, 0.03570997342467308, 0.010101414285600185, 0.007422023452818394, 0.006586792413145304, 0.036164309829473495], [0.05999431014060974, 0.03977862000465393, 0.190945103764534, 0.04217289760708809, 0.10862357169389725, 0.044661860913038254, 0.027344103902578354, 0.025376493111252785, 0.08017496019601822, 0.0371110625565052, 0.07525865733623505, 0.006051904056221247, 0.029315173625946045, 0.013810054399073124, 0.027043761685490608, 0.023779217153787613, 0.055949967354536057, 0.0087658716365695, 0.007768026553094387, 0.011211586184799671, 0.014003569260239601, 0.018657242879271507, 0.04564756527543068, 0.006554549094289541], [0.005548534449189901, 0.009625539183616638, 0.04675672575831413, 0.0053973449394106865, 0.02322383224964142, 0.00324700097553432, 0.02844332531094551, 0.19319964945316315, 0.04867725074291229, 0.07422695308923721, 0.03184402734041214, 0.01853647641837597, 0.017776018008589745, 0.03885143622756004, 0.03500010445713997, 0.00467300321906805, 0.0205089058727026, 0.004836963023990393, 0.03046225570142269, 0.1774609088897705, 0.052769921720027924, 0.10116098821163177, 0.015021305531263351, 0.012751596048474312], [0.06701412796974182, 0.04335736483335495, 0.08819062262773514, 0.03054654970765114, 0.012382852844893932, 0.28594616055488586, 0.01735313981771469, 0.010341550223529339, 0.04433434456586838, 0.03412908688187599, 0.05886949598789215, 0.10336127132177353, 0.04790536314249039, 0.05504264310002327, 0.03899676725268364, 0.01328186970204115, 0.004306517541408539, 0.019933922216296196, 0.0033443451393395662, 0.0013170058373361826, 0.001312296255491674, 0.003254852956160903, 0.006652043201029301, 0.008825824595987797], [0.00549015449360013, 0.004615834914147854, 0.13109484314918518, 0.0011633237591013312, 0.006601781118661165, 0.0031115952879190445, 0.02625402808189392, 0.06794073432683945, 0.03614512085914612, 0.10627484321594238, 0.10793552547693253, 0.035130925476551056, 0.058270636945962906, 0.05743149295449257, 0.16356146335601807, 0.00174007099121809, 0.0075407144613564014, 0.0033935708925127983, 0.019945522770285606, 0.059105996042490005, 0.008118784986436367, 0.07067400217056274, 0.01247870922088623, 0.005980407819151878], [0.012457754462957382, 0.009979627095162868, 0.016717640683054924, 0.0695638433098793, 0.001331391278654337, 0.011250360868871212, 0.006792054511606693, 0.1819581836462021, 0.033501800149679184, 0.004396948963403702, 0.023627042770385742, 0.47641822695732117, 0.015134031884372234, 0.04527318477630615, 0.024955328553915024, 0.027448872104287148, 0.0004658191173803061, 0.000644085870590061, 0.0013258883263915777, 0.02927469089627266, 0.001851994195021689, 0.00042714871233329177, 0.0012249780120328069, 0.003979061264544725], [0.0005032207118347287, 0.0002924345317296684, 0.008569600991904736, 0.005590256303548813, 9.962098556570709e-05, 0.0017179130809381604, 0.00162586010992527, 0.012491429224610329, 0.007768670562654734, 0.0020760181359946728, 0.008429016917943954, 0.8929917216300964, 0.010955534875392914, 0.018104225397109985, 0.022071003913879395, 0.004198362119495869, 2.9730370442848653e-05, 0.00012462316954042763, 0.000192109466297552, 0.0016451970441266894, 6.02312502451241e-05, 5.4063129937276244e-05, 4.2394349293317646e-05, 0.0003667583514470607], [0.02032800391316414, 0.012327241711318493, 0.05779829993844032, 0.04018259793519974, 0.006052273325622082, 0.0013098561903461814, 0.014342229813337326, 0.02908947505056858, 0.01569165103137493, 0.018181325867772102, 0.04386347532272339, 0.3490985035896301, 0.08407354354858398, 0.05963212251663208, 0.13591977953910828, 0.03206922858953476, 0.004377736244350672, 0.0002308035036548972, 0.011870604939758778, 0.020736945793032646, 0.006177390459924936, 0.006650520488619804, 0.008069843985140324, 0.021926509216427803], [0.002760515781119466, 0.003389182034879923, 0.01634804531931877, 0.0043792445212602615, 0.0007519684149883687, 0.0012636272003874183, 0.002030427334830165, 0.01512625627219677, 0.004142228979617357, 0.03700155019760132, 0.008506279438734055, 0.34451061487197876, 0.03733355551958084, 0.13038358092308044, 0.17921403050422668, 0.032353032380342484, 0.0020071701146662235, 0.007715356070548296, 0.006524096708744764, 0.07817849516868591, 0.0071490127593278885, 0.03877583518624306, 0.0030316109769046307, 0.03712433949112892], [0.03645440191030502, 0.06433719396591187, 0.038047198206186295, 0.04003767669200897, 0.04176730662584305, 0.008052275516092777, 0.023467471823096275, 0.01287318766117096, 0.02170393243432045, 0.03925333917140961, 0.034199684858322144, 0.06376560032367706, 0.06279248744249344, 0.14471641182899475, 0.09681062400341034, 0.06509711593389511, 0.053364284336566925, 0.007231141906231642, 0.033885613083839417, 0.019995318725705147, 0.018995137885212898, 0.026342246681451797, 0.020596781745553017, 0.026213547214865685], [0.020075805485248566, 0.017078209668397903, 0.064155712723732, 0.0038066317792981863, 0.030063385143876076, 0.004651955794543028, 0.02056184783577919, 0.02635154128074646, 0.018082065507769585, 0.07031328976154327, 0.08319075405597687, 0.019516559317708015, 0.04851997271180153, 0.10264966636896133, 0.10093174129724503, 0.012631471268832684, 0.05030339956283569, 0.00720156729221344, 0.03539837524294853, 0.06609956920146942, 0.022974951192736626, 0.08856403082609177, 0.05880254879593849, 0.02807495929300785], [0.032037846744060516, 0.032581064850091934, 0.006107593420892954, 0.003949045203626156, 0.011927534826099873, 0.09949993342161179, 0.023619093000888824, 0.004645383916795254, 0.005008199717849493, 0.002724433084949851, 0.003484179498627782, 0.019613822922110558, 0.0056494600139558315, 0.02141384594142437, 0.028151707723736763, 0.01166456937789917, 0.024528132751584053, 0.5111977458000183, 0.0512048676609993, 0.013411776162683964, 0.019356293603777885, 0.005880304612219334, 0.017297491431236267, 0.045045655220746994], [0.001416828716173768, 0.0011888755252584815, 0.0018028286285698414, 0.0014648522483184934, 0.0003697731881402433, 0.012022975832223892, 0.0008814858738332987, 0.007486305199563503, 0.0002798144123516977, 0.0006850937497802079, 0.0004492170410230756, 0.060752466320991516, 0.0008670933311805129, 0.010819066315889359, 0.0398561954498291, 0.009543126448988914, 0.0021643126383423805, 0.5702142119407654, 0.011683505028486252, 0.14002814888954163, 0.014547569677233696, 0.00565339857712388, 0.006178776267915964, 0.09964410960674286], [0.020995037630200386, 0.015998749062418938, 0.01626346819102764, 0.002017454942688346, 0.015306866727769375, 0.0008760729688219726, 0.0035064329858869314, 0.0027421684935688972, 0.0014939074171707034, 0.005678815767168999, 0.006512301973998547, 0.0052805677987635136, 0.014827500097453594, 0.01643393747508526, 0.10501637309789658, 0.018949296325445175, 0.10213803499937057, 0.018634894862771034, 0.06479654461145401, 0.11453355848789215, 0.11546153575181961, 0.08639872074127197, 0.14207801222801208, 0.10405971109867096], [0.0014531693886965513, 0.0038560994435101748, 0.004520625341683626, 0.001291568041779101, 0.0026743365451693535, 0.0002254965656902641, 0.002273005899041891, 0.021842556074261665, 0.001703548594377935, 0.007722657639533281, 0.0021646295208483934, 0.00906699150800705, 0.0039610713720321655, 0.023123478516936302, 0.039534781128168106, 0.005907649639993906, 0.013554916717112064, 0.008176741190254688, 0.04370216652750969, 0.4845501482486725, 0.13692276179790497, 0.10923007875680923, 0.017911652103066444, 0.054629795253276825], [0.05935734137892723, 0.033575110137462616, 0.036979831755161285, 0.008821647614240646, 0.007632414344698191, 0.0029770690016448498, 0.013886330649256706, 0.004436337389051914, 0.007204028312116861, 0.022570133209228516, 0.02608525939285755, 0.04915028437972069, 0.06462998688220978, 0.055952709168195724, 0.15404915809631348, 0.021225910633802414, 0.020178191363811493, 0.011374829337000847, 0.08720003068447113, 0.02955366112291813, 0.04215913638472557, 0.06715232133865356, 0.04822036996483803, 0.12562783062458038], [0.0005595156690105796, 0.0007775825215503573, 0.012792794033885002, 4.6043140173424035e-05, 0.00098694721236825, 1.4396731785382144e-05, 0.0008854230400174856, 0.001889862702228129, 0.0002923838619608432, 0.01332594733685255, 0.0039274729788303375, 0.003545196261256933, 0.010534883476793766, 0.02226339653134346, 0.2516253888607025, 0.0006097570294514298, 0.009981311857700348, 0.001403300673700869, 0.03397854045033455, 0.16787201166152954, 0.031617093831300735, 0.36940085887908936, 0.02645929716527462, 0.03521062806248665]], [[0.004506949335336685, 0.015277273021638393, 0.13172923028469086, 0.10973981022834778, 0.016620656475424767, 0.060261860489845276, 0.025188516825437546, 0.046213842928409576, 0.12580284476280212, 0.020396439358592033, 0.054546862840652466, 0.014460810460150242, 0.06421411782503128, 0.017269305884838104, 0.09694614261388779, 0.03494418039917946, 0.01004817895591259, 0.035481687635183334, 0.010187692008912563, 0.019602682441473007, 0.03494780883193016, 0.010059667751193047, 0.034527309238910675, 0.00702607911080122], [0.018578901886940002, 0.02200961858034134, 0.07658436894416809, 0.06778775155544281, 0.029287604615092278, 0.057155340909957886, 0.08050432801246643, 0.057556625455617905, 0.05481982231140137, 0.02074204571545124, 0.03593545779585838, 0.04240147024393082, 0.038501426577568054, 0.034369029104709625, 0.08890063315629959, 0.03350318595767021, 0.023945219814777374, 0.043225426226854324, 0.04997677728533745, 0.0352800227701664, 0.02900974079966545, 0.012853591702878475, 0.026330558583140373, 0.020741045475006104], [0.013578456826508045, 0.024034013971686363, 0.030763207003474236, 0.09546472877264023, 0.034339237958192825, 0.04495493695139885, 0.02061079815030098, 0.025451498106122017, 0.14696598052978516, 0.050007447600364685, 0.07122815400362015, 0.04534274712204933, 0.0832163468003273, 0.05122986063361168, 0.03567483648657799, 0.05455739423632622, 0.025369206443428993, 0.016089729964733124, 0.009543337859213352, 0.011595791205763817, 0.03678631782531738, 0.0173022523522377, 0.03770790249109268, 0.018185874447226524], [0.013711275532841682, 0.023558897897601128, 0.05380477011203766, 0.04456362873315811, 0.01937447115778923, 0.035926587879657745, 0.0351802296936512, 0.028481168672442436, 0.09919623285531998, 0.02646564319729805, 0.03791402280330658, 0.09106123447418213, 0.06287387013435364, 0.14476725459098816, 0.12578435242176056, 0.02652639150619507, 0.01620202139019966, 0.024158241227269173, 0.018014581874012947, 0.012344635091722012, 0.0256545040756464, 0.006715596187859774, 0.013572991825640202, 0.014147412031888962], [0.003914376255124807, 0.014498166739940643, 0.10300914198160172, 0.0834418535232544, 0.01640818826854229, 0.03741319850087166, 0.011364701204001904, 0.046300217509269714, 0.09237891435623169, 0.02283691242337227, 0.04175824299454689, 0.020934930071234703, 0.1529802680015564, 0.02582804299890995, 0.1283411979675293, 0.040919676423072815, 0.012007320299744606, 0.024616463109850883, 0.007377276197075844, 0.029619310051202774, 0.03228866308927536, 0.012803045101463795, 0.02839081734418869, 0.010569079779088497], [0.0009419364505447447, 0.0046731652691960335, 0.08899398893117905, 0.06013857573270798, 0.013748890720307827, 0.03508530929684639, 0.009551584720611572, 0.06421743333339691, 0.3941954970359802, 0.02507217414677143, 0.08442659676074982, 0.0016346701886504889, 0.10055150091648102, 0.0026475924532860518, 0.035250477492809296, 0.009342947974801064, 0.005282361060380936, 0.004714690614491701, 0.0012244486715644598, 0.0068445466458797455, 0.018940281122922897, 0.004675483331084251, 0.02718258649110794, 0.0006632668082602322], [0.004508517682552338, 0.02322409115731716, 0.046206362545490265, 0.07955126464366913, 0.0162424985319376, 0.014656045474112034, 0.001688258838839829, 0.040997881442308426, 0.09591726213693619, 0.029986059293150902, 0.06696046888828278, 0.024569030851125717, 0.10975154489278793, 0.08392351865768433, 0.08961193263530731, 0.04825969785451889, 0.018787844106554985, 0.01493887696415186, 0.001583786797709763, 0.040247924625873566, 0.055897168815135956, 0.021021192893385887, 0.05648601055145264, 0.014982708729803562], [0.005965463817119598, 0.012055407278239727, 0.10199107974767685, 0.08324366807937622, 0.030226102098822594, 0.08207402378320694, 0.034379228949546814, 0.03880356252193451, 0.13288968801498413, 0.022876594215631485, 0.0651879534125328, 0.0173135157674551, 0.06914277374744415, 0.018219860270619392, 0.08397936820983887, 0.026303213089704514, 0.02079787291586399, 0.03832737356424332, 0.014496182091534138, 0.013165561482310295, 0.030569393187761307, 0.009116998873651028, 0.04227353632450104, 0.006601485423743725], [0.0029945007991045713, 0.015468989498913288, 0.07423291355371475, 0.1002797782421112, 0.025836030021309853, 0.06740305572748184, 0.014336623251438141, 0.0444638729095459, 0.18191412091255188, 0.058726683259010315, 0.06868503242731094, 0.009861785918474197, 0.11581110954284668, 0.006689806003123522, 0.05274435877799988, 0.027544310316443443, 0.013921844772994518, 0.020687254145741463, 0.004489895887672901, 0.010705684311687946, 0.022528748959302902, 0.019108526408672333, 0.03572739660739899, 0.005837710574269295], [0.006435132585465908, 0.014195311814546585, 0.03023446537554264, 0.034012336283922195, 0.028152521699666977, 0.018046477809548378, 0.05166032910346985, 0.03151834383606911, 0.03869733214378357, 0.019539253786206245, 0.01887233927845955, 0.11457540839910507, 0.1462915688753128, 0.20654378831386566, 0.09508101642131805, 0.023693354800343513, 0.027073154225945473, 0.014423931948840618, 0.030952583998441696, 0.015546616166830063, 0.012023803777992725, 0.005324299447238445, 0.005188530310988426, 0.011918182484805584], [0.006253486033529043, 0.007667102385312319, 0.03612732142210007, 0.058113861829042435, 0.012066074647009373, 0.10572962462902069, 0.18465924263000488, 0.027840623632073402, 0.13390831649303436, 0.019050542265176773, 0.052835509181022644, 0.01580522209405899, 0.07600926607847214, 0.005620869342237711, 0.048113659024238586, 0.020356999710202217, 0.007567527238279581, 0.030740510672330856, 0.08452939242124557, 0.011141189374029636, 0.02920733578503132, 0.005001608282327652, 0.017819246277213097, 0.0038354217540472746], [0.027106650173664093, 0.015119715593755245, 0.027521837502717972, 0.00661395164206624, 0.030840622261166573, 0.011372504755854607, 0.25098225474357605, 0.04848821088671684, 0.042209457606077194, 0.013504967093467712, 0.016322601586580276, 0.07158886641263962, 0.03761241212487221, 0.1560799777507782, 0.039792001247406006, 0.0038569257594645023, 0.03403136506676674, 0.009759287349879742, 0.11305373907089233, 0.015116652473807335, 0.017066849395632744, 0.002619536127895117, 0.004940851591527462, 0.004398690070956945], [0.002313849749043584, 0.004104798659682274, 0.00998240802437067, 0.03079000860452652, 0.007198772393167019, 0.0052464487962424755, 0.05912478640675545, 0.004195366520434618, 0.027578797191381454, 0.007224421948194504, 0.010877430438995361, 0.011394038796424866, 0.15906786918640137, 0.03364025056362152, 0.10278035700321198, 0.06638745963573456, 0.020233934745192528, 0.020090876147150993, 0.23003800213336945, 0.021045740693807602, 0.123573899269104, 0.013127986341714859, 0.017776304855942726, 0.012206190265715122], [0.029381029307842255, 0.00725781312212348, 0.0027169017121195793, 0.0008467240841127932, 0.0009705211850814521, 0.001069069025106728, 0.10530625283718109, 0.0052479589357972145, 0.002537058899179101, 0.0017401399090886116, 0.0010216145310550928, 0.42105570435523987, 0.009506180882453918, 0.2091958224773407, 0.031010355800390244, 0.0011243977351114154, 0.0013970434665679932, 0.00269713974557817, 0.15122275054454803, 0.005702367518097162, 0.003094328800216317, 0.00030081806471571326, 0.00022969530255068094, 0.00536827277392149], [0.018795963376760483, 0.009948099963366985, 0.008801599033176899, 0.013736177235841751, 0.012757975608110428, 0.006517065688967705, 0.05252055823802948, 0.0061625768430531025, 0.013767179101705551, 0.012922958470880985, 0.01735002174973488, 0.030927488580346107, 0.03710734471678734, 0.06727156043052673, 0.04776537045836449, 0.04541603475809097, 0.03687075152993202, 0.03228914737701416, 0.2713063955307007, 0.03590826317667961, 0.12342812120914459, 0.029458891600370407, 0.03590761870145798, 0.033062759786844254], [0.015561857260763645, 0.011801918968558311, 0.02024816907942295, 0.016877103596925735, 0.005157060455530882, 0.004809448961168528, 0.022308776155114174, 0.007828816771507263, 0.011526801623404026, 0.005041381809860468, 0.011962002143263817, 0.17335860431194305, 0.027703529223799706, 0.2910388708114624, 0.16652603447437286, 0.02332579717040062, 0.009613439440727234, 0.02114025503396988, 0.06081757694482803, 0.023377256467938423, 0.029719054698944092, 0.004122802522033453, 0.009362993761897087, 0.026770466938614845], [0.013306910172104836, 0.01709786243736744, 0.0470888651907444, 0.04066668078303337, 0.010299875400960445, 0.01334542129188776, 0.007797187194228172, 0.02529584988951683, 0.017367878928780556, 0.01239361148327589, 0.02738172933459282, 0.04925408959388733, 0.06424295902252197, 0.06017186492681503, 0.1363232284784317, 0.060389790683984756, 0.016274040564894676, 0.042822014540433884, 0.02525065280497074, 0.10533668845891953, 0.07307472825050354, 0.02819785661995411, 0.05309927463531494, 0.05352092161774635], [0.011283619329333305, 0.009565346874296665, 0.04689816012978554, 0.040889937430620193, 0.015626851469278336, 0.011605684645473957, 0.005897423252463341, 0.04293457418680191, 0.03283533826470375, 0.01264639850705862, 0.08921928703784943, 0.017654990777373314, 0.026111416518688202, 0.01806623488664627, 0.06400712579488754, 0.03311789408326149, 0.02499052882194519, 0.027563806623220444, 0.012582842260599136, 0.11576449126005173, 0.11335700750350952, 0.028066709637641907, 0.17400984466075897, 0.025304457172751427], [0.01696745678782463, 0.01708906702697277, 0.00758353341370821, 0.009491320699453354, 0.0042933388613164425, 0.0010627037845551968, 0.0004144549020566046, 0.008746503852307796, 0.0024297686759382486, 0.005381275434046984, 0.014438354410231113, 0.11932375282049179, 0.010411771945655346, 0.32666659355163574, 0.05915239080786705, 0.028874298557639122, 0.016113679856061935, 0.013076670467853546, 0.004145005717873573, 0.12223875522613525, 0.05006212741136551, 0.021387256681919098, 0.04305025935173035, 0.09759962558746338], [0.03265024721622467, 0.014818885363638401, 0.01801614835858345, 0.019833868369460106, 0.010260224342346191, 0.006207054480910301, 0.008005714975297451, 0.012050793506205082, 0.004720540717244148, 0.006026261951774359, 0.019691260531544685, 0.12728968262672424, 0.01161247305572033, 0.13401709496974945, 0.08588208258152008, 0.03590861335396767, 0.02725200727581978, 0.0489344447851181, 0.0503707192838192, 0.08425556123256683, 0.06369594484567642, 0.01840912736952305, 0.0647507831454277, 0.09534046798944473], [0.02721601538360119, 0.016071951016783714, 0.017362669110298157, 0.025599127635359764, 0.008824765682220459, 0.004258900880813599, 0.0015333584742620587, 0.011079952120780945, 0.003992341924458742, 0.007160874083638191, 0.019489986822009087, 0.07222779095172882, 0.010242861695587635, 0.04539204016327858, 0.055962007492780685, 0.052175287157297134, 0.027117222547531128, 0.03788512572646141, 0.014175688847899437, 0.13180352747440338, 0.10081496089696884, 0.04043617844581604, 0.10639171302318573, 0.1627856343984604], [0.0063827200792729855, 0.0055517167784273624, 0.009892228990793228, 0.01519018318504095, 0.008275847882032394, 0.0016595367342233658, 0.005207477603107691, 0.006567788776010275, 0.0019192448817193508, 0.002300033112987876, 0.0074106426909565926, 0.1461556851863861, 0.025160841643810272, 0.3323500156402588, 0.09660089015960693, 0.04259183257818222, 0.030709881335496902, 0.019891245290637016, 0.044835835695266724, 0.07448925077915192, 0.03317919000983238, 0.007425328716635704, 0.01445814035832882, 0.0617944560945034], [0.021349970251321793, 0.011706876568496227, 0.033576007932424545, 0.06619646400213242, 0.01753983460366726, 0.036592211574316025, 0.03555241599678993, 0.018534967675805092, 0.02502559870481491, 0.01236711349338293, 0.03386189788579941, 0.053653307259082794, 0.02768503688275814, 0.021422456949949265, 0.07038372755050659, 0.06174696609377861, 0.02591819502413273, 0.0470627136528492, 0.07775446027517319, 0.057739123702049255, 0.09579788148403168, 0.020108630880713463, 0.06025020033121109, 0.06817404180765152], [0.07305452972650528, 0.01310284249484539, 0.01605875790119171, 0.006892835721373558, 0.01125484798103571, 0.003111150348559022, 0.013359432108700275, 0.01583322137594223, 0.0037314314395189285, 0.0020219760481268167, 0.009296106174588203, 0.1932850480079651, 0.0073435562662780285, 0.27603158354759216, 0.04157313331961632, 0.009635752998292446, 0.03188466653227806, 0.01594170182943344, 0.05122596025466919, 0.07789260894060135, 0.04684996232390404, 0.0038125081919133663, 0.02310006134212017, 0.05370623245835304]], [[0.052982281893491745, 0.059921760112047195, 0.06350628286600113, 0.04573923721909523, 0.048429884016513824, 0.04159886762499809, 0.03162418678402901, 0.028125667944550514, 0.041072774678468704, 0.018846420571208, 0.05238667130470276, 0.012238649651408195, 0.028253670781850815, 0.04668566957116127, 0.05372358486056328, 0.02335730381309986, 0.04300008341670036, 0.03821615129709244, 0.027064451947808266, 0.026370838284492493, 0.04713625833392143, 0.0221721101552248, 0.12046465277671814, 0.02708260342478752], [0.02903800643980503, 0.033901240676641464, 0.041051704436540604, 0.03322024270892143, 0.05403006076812744, 0.019980333745479584, 0.031279612332582474, 0.0360649898648262, 0.038324445486068726, 0.017473621293902397, 0.048445943742990494, 0.029257627204060555, 0.04677233472466469, 0.06705394387245178, 0.04715050756931305, 0.026808101683855057, 0.057251788675785065, 0.0361102931201458, 0.04544245824217796, 0.05283869430422783, 0.06679841876029968, 0.025503385812044144, 0.08042282611131668, 0.035779424011707306], [0.02610950358211994, 0.03272230550646782, 0.0577545091509819, 0.03053671307861805, 0.035327039659023285, 0.05961684510111809, 0.056616462767124176, 0.047479480504989624, 0.04789520800113678, 0.1937939077615738, 0.03604942560195923, 0.03780990466475487, 0.014223979786038399, 0.0377168171107769, 0.028392059728503227, 0.014478602446615696, 0.01610766164958477, 0.021891262382268906, 0.025501536205410957, 0.014411448501050472, 0.017867011949419975, 0.08449459075927734, 0.026673883199691772, 0.03652986139059067], [0.01162797212600708, 0.013239226303994656, 0.06608761101961136, 0.04615245759487152, 0.03468005359172821, 0.011977280490100384, 0.018215268850326538, 0.07086692005395889, 0.04360583424568176, 0.04118916019797325, 0.023185214027762413, 0.06692575663328171, 0.020184261724352837, 0.2529420256614685, 0.05421177297830582, 0.04450966790318489, 0.02675379253923893, 0.01007938850671053, 0.01331518217921257, 0.04358166828751564, 0.024819744750857353, 0.017319543287158012, 0.013937938958406448, 0.03059219755232334], [0.06935977190732956, 0.056029029190540314, 0.07048313319683075, 0.061346154659986496, 0.04096360132098198, 0.07965034246444702, 0.05044131726026535, 0.0783768743276596, 0.07542571425437927, 0.029515903443098068, 0.02741992473602295, 0.09721831977367401, 0.03141702339053154, 0.03770901635289192, 0.017403529956936836, 0.035371944308280945, 0.016153210774064064, 0.02684018760919571, 0.01229945383965969, 0.019253892824053764, 0.016438771039247513, 0.010885843075811863, 0.008032314479351044, 0.031964752823114395], [0.09541843831539154, 0.10927268862724304, 0.03736822307109833, 0.03527915105223656, 0.058342475444078445, 0.09686443209648132, 0.0596800297498703, 0.04291556030511856, 0.07704739272594452, 0.07302680611610413, 0.043059539049863815, 0.018321141600608826, 0.024243921041488647, 0.055953480303287506, 0.010714888572692871, 0.014250876381993294, 0.02220579795539379, 0.035672303289175034, 0.014755372889339924, 0.009683164767920971, 0.02011954039335251, 0.01695379801094532, 0.022451212629675865, 0.006399845704436302], [0.03421459719538689, 0.022159431129693985, 0.06422688812017441, 0.05711595341563225, 0.09002448618412018, 0.05980518087744713, 0.08013750612735748, 0.06514684110879898, 0.09848354756832123, 0.04135001450777054, 0.0575128048658371, 0.04420342296361923, 0.02400495670735836, 0.030790643766522408, 0.029972413554787636, 0.030605990439653397, 0.0420900359749794, 0.015016058459877968, 0.018349071964621544, 0.01689457707107067, 0.023206181824207306, 0.01649428717792034, 0.017611032351851463, 0.020583992823958397], [0.04243594408035278, 0.044129375368356705, 0.029907869175076485, 0.03625703975558281, 0.1980670541524887, 0.10336955636739731, 0.03672231361269951, 0.04521796107292175, 0.0740177184343338, 0.023134609684348106, 0.08216112107038498, 0.006869656965136528, 0.013410053215920925, 0.012339239940047264, 0.013464881107211113, 0.009878850542008877, 0.08140227198600769, 0.018385177478194237, 0.007933588698506355, 0.009805901907384396, 0.0185548048466444, 0.015309701673686504, 0.07030647248029709, 0.006918772589415312], [0.022440452128648758, 0.04282110184431076, 0.03351591154932976, 0.04425903782248497, 0.05259022116661072, 0.04938172921538353, 0.039218295365571976, 0.05023812875151634, 0.10699140280485153, 0.13625968992710114, 0.045890677720308304, 0.19690139591693878, 0.016431882977485657, 0.06646103411912918, 0.011928086169064045, 0.021691691130399704, 0.013665390200912952, 0.007391073275357485, 0.005049354862421751, 0.0036783479154109955, 0.004592106677591801, 0.014331956394016743, 0.0026394566521048546, 0.011631632223725319], [0.04275604337453842, 0.03349980711936951, 0.03105047345161438, 0.023234104737639427, 0.02738480269908905, 0.0447021909058094, 0.07355479896068573, 0.10755697637796402, 0.058652039617300034, 0.06688135117292404, 0.06698111444711685, 0.07310270518064499, 0.04593173414468765, 0.09592261165380478, 0.01695716753602028, 0.016017599031329155, 0.013007362373173237, 0.02961900644004345, 0.031858813017606735, 0.03348783403635025, 0.01303702499717474, 0.021270183846354485, 0.01602781191468239, 0.017506353557109833], [0.012571119703352451, 0.014965401031076908, 0.03631008788943291, 0.06778539717197418, 0.021656811237335205, 0.01199366245418787, 0.022162888199090958, 0.02892572432756424, 0.024780213832855225, 0.12651526927947998, 0.01860637776553631, 0.17690686881542206, 0.013322265818715096, 0.13016772270202637, 0.027282049879431725, 0.11257359385490417, 0.017473457381129265, 0.006890156306326389, 0.015183577314019203, 0.017962763085961342, 0.0091363824903965, 0.04968669265508652, 0.002744099125266075, 0.03439748287200928], [0.006521178875118494, 0.004594570491462946, 0.011309915222227573, 0.025134654715657234, 0.015289644710719585, 0.0015981670003384352, 0.007674130145460367, 0.010321054607629776, 0.0030310663860291243, 0.024238867685198784, 0.014570526778697968, 0.046085041016340256, 0.017284344881772995, 0.21484637260437012, 0.053151510655879974, 0.13548430800437927, 0.04945669695734978, 0.014760085381567478, 0.06019848212599754, 0.07185889035463333, 0.02695557288825512, 0.06544595956802368, 0.03522301837801933, 0.08496589958667755], [0.011724651791155338, 0.009718050248920918, 0.08566070348024368, 0.025504441931843758, 0.003976060077548027, 0.010480196215212345, 0.014245289377868176, 0.06358569115400314, 0.010157420299947262, 0.02120303176343441, 0.01420644111931324, 0.10784203559160233, 0.01567906141281128, 0.0819312334060669, 0.07261032611131668, 0.05018319934606552, 0.005583775695413351, 0.022540302947163582, 0.04049833118915558, 0.16340523958206177, 0.01572192646563053, 0.024946138262748718, 0.00879376195371151, 0.11980259418487549], [0.002294770907610655, 0.001515305251814425, 0.012087126262485981, 0.014314238913357258, 0.0041715288534760475, 0.0006274236948229373, 0.0023106548469513655, 0.04265623539686203, 0.004536217078566551, 0.0016268593026325107, 0.02551736682653427, 0.05046894773840904, 0.02056284062564373, 0.280599445104599, 0.033049076795578, 0.03147272765636444, 0.011360319331288338, 0.00896850973367691, 0.019933955743908882, 0.33291301131248474, 0.026882996782660484, 0.005249227397143841, 0.025014575570821762, 0.04186664894223213], [0.0022504692897200584, 0.0014719032915309072, 0.01670653373003006, 0.029964035376906395, 0.0018056826665997505, 0.000495993357617408, 0.0022435090504586697, 0.009714603424072266, 0.0020492211915552616, 0.008372297510504723, 0.010471080429852009, 0.07422219961881638, 0.007614506408572197, 0.07058413326740265, 0.0673908144235611, 0.12194675207138062, 0.00686738733202219, 0.00714095588773489, 0.030346190556883812, 0.12177974730730057, 0.027297595515847206, 0.055662162601947784, 0.022907176986336708, 0.3006950914859772], [0.005262759979814291, 0.004985329695045948, 0.03192563354969025, 0.026202034205198288, 0.01727186143398285, 0.0031133322045207024, 0.004537099506705999, 0.037479858845472336, 0.015543239191174507, 0.005862529389560223, 0.029558340087532997, 0.026140380650758743, 0.022371497005224228, 0.09486551582813263, 0.07261373847723007, 0.043674349784851074, 0.04287869110703468, 0.01534239575266838, 0.025928420946002007, 0.21941743791103363, 0.09553316235542297, 0.020055048167705536, 0.07944102585315704, 0.0599963404238224], [0.05016009137034416, 0.031191932037472725, 0.05684749782085419, 0.07214336842298508, 0.023015985265374184, 0.02864723652601242, 0.025215495377779007, 0.051689811050891876, 0.024753985926508904, 0.011014269664883614, 0.01621112786233425, 0.08109830319881439, 0.027987821027636528, 0.02431739866733551, 0.022866997867822647, 0.07532408833503723, 0.021075092256069183, 0.03882800415158272, 0.027983764186501503, 0.07823330909013748, 0.03830325976014137, 0.02159678190946579, 0.016070805490016937, 0.13542354106903076], [0.05702706426382065, 0.049452587962150574, 0.021291667595505714, 0.04509078338742256, 0.02314239926636219, 0.023583324626088142, 0.018853316083550453, 0.016957733780145645, 0.017637597396969795, 0.00646559800952673, 0.03418959304690361, 0.010472716763615608, 0.038241416215896606, 0.015497233718633652, 0.01963874138891697, 0.03350267931818962, 0.03784480318427086, 0.07900375872850418, 0.0501316636800766, 0.07599679380655289, 0.09473675489425659, 0.03152553364634514, 0.15464209020137787, 0.045074090361595154], [0.017933227121829987, 0.00846034474670887, 0.02847692184150219, 0.0639355331659317, 0.03682323917746544, 0.009556747041642666, 0.023556798696517944, 0.016570748761296272, 0.017353443428874016, 0.0038096397183835506, 0.03169485181570053, 0.025553593412041664, 0.024990463629364967, 0.009171589277684689, 0.03644265606999397, 0.06880838423967361, 0.07016152143478394, 0.022599363699555397, 0.05405501276254654, 0.0797891914844513, 0.09738043695688248, 0.02536729909479618, 0.07727309316396713, 0.15023593604564667], [0.019572781398892403, 0.019395440816879272, 0.013645462691783905, 0.028411252424120903, 0.07908622175455093, 0.025081492960453033, 0.013101449236273766, 0.011475078761577606, 0.013932384550571442, 0.00345045980066061, 0.0559120699763298, 0.0038491999730467796, 0.01630462519824505, 0.004800492897629738, 0.02130063809454441, 0.016881048679351807, 0.127282977104187, 0.03122526779770851, 0.023763995617628098, 0.03547047823667526, 0.051613353192806244, 0.024470357224345207, 0.328365296125412, 0.03160824999213219], [0.014000911265611649, 0.018908437341451645, 0.02334628254175186, 0.05240732431411743, 0.035365451127290726, 0.011758721433579922, 0.009090968407690525, 0.010140336118638515, 0.019842064008116722, 0.0060938019305467606, 0.04094669595360756, 0.028028154745697975, 0.017646318301558495, 0.008286907337605953, 0.033760108053684235, 0.043698329478502274, 0.0683029368519783, 0.02966850809752941, 0.030646584928035736, 0.046424467116594315, 0.08667832612991333, 0.04051034897565842, 0.14190562069416046, 0.18254241347312927], [0.05406995862722397, 0.037412602454423904, 0.02799246273934841, 0.029802029952406883, 0.025686120614409447, 0.040003497153520584, 0.052406180649995804, 0.037101589143276215, 0.02797471359372139, 0.020832214504480362, 0.04052535071969032, 0.01623990572988987, 0.04122837632894516, 0.017294002696871758, 0.021041110157966614, 0.01841026172041893, 0.02460860088467598, 0.06805269420146942, 0.07700223475694656, 0.05892409384250641, 0.05146709457039833, 0.0502692349255085, 0.09743846952915192, 0.06421714276075363], [0.01417381688952446, 0.010975479148328304, 0.03649815544486046, 0.08993519097566605, 0.020457010716199875, 0.008431882597506046, 0.01409293431788683, 0.01593133807182312, 0.012274067848920822, 0.021333690732717514, 0.012963901273906231, 0.04287996515631676, 0.013199004344642162, 0.02059229463338852, 0.03422919660806656, 0.13059666752815247, 0.03601180762052536, 0.0198784489184618, 0.04438414424657822, 0.06432123482227325, 0.067062146961689, 0.07989221811294556, 0.028470395132899284, 0.16141504049301147], [0.011495930142700672, 0.007327307015657425, 0.009918434545397758, 0.021092433482408524, 0.011364388279616833, 0.002704128623008728, 0.006148599088191986, 0.005767283495515585, 0.002368559595197439, 0.0030407931189984083, 0.006737562827765942, 0.0036306458059698343, 0.016828222200274467, 0.01399671845138073, 0.016334014013409615, 0.03618795424699783, 0.042046695947647095, 0.04939533397555351, 0.10414416342973709, 0.11682283878326416, 0.15066292881965637, 0.054771073162555695, 0.19148263335227966, 0.11573150753974915]], [[0.01803731732070446, 0.01143220067024231, 0.046672191470861435, 0.052026450634002686, 0.049461837857961655, 0.033908531069755554, 0.026229679584503174, 0.040167197585105896, 0.04705752804875374, 0.06802769005298615, 0.026856577023863792, 0.1300242841243744, 0.09524588286876678, 0.05837442725896835, 0.056905217468738556, 0.051439523696899414, 0.0375138595700264, 0.016914285719394684, 0.013552220538258553, 0.01929319277405739, 0.01890927366912365, 0.0224495567381382, 0.012767958454787731, 0.04673311859369278], [0.03221478313207626, 0.019664855673909187, 0.043186288326978683, 0.04504461959004402, 0.04767422378063202, 0.03556329384446144, 0.035773955285549164, 0.02851244993507862, 0.04449979588389397, 0.039865367114543915, 0.03529872000217438, 0.060370393097400665, 0.07645265758037567, 0.046846769750118256, 0.04607318714261055, 0.04792553558945656, 0.04583321884274483, 0.03495778888463974, 0.03694446012377739, 0.02418019436299801, 0.04696546122431755, 0.03255009278655052, 0.036163799464702606, 0.05743814632296562], [0.036559756845235825, 0.028263462707400322, 0.07689645886421204, 0.026754483580589294, 0.015406082384288311, 0.05414793640375137, 0.10417850315570831, 0.14560189843177795, 0.05198782682418823, 0.027835723012685776, 0.044133108109235764, 0.03284141421318054, 0.05617118254303932, 0.019546013325452805, 0.026187554001808167, 0.015238544903695583, 0.01498399768024683, 0.049832239747047424, 0.055035315454006195, 0.06181327998638153, 0.01809442974627018, 0.013047948479652405, 0.014085263945162296, 0.011357598938047886], [0.014471212401986122, 0.01041460782289505, 0.038132548332214355, 0.015040573664009571, 0.06900349259376526, 0.026236258447170258, 0.03831888362765312, 0.038857005536556244, 0.06121828407049179, 0.042731016874313354, 0.07647868245840073, 0.027602769434452057, 0.07601989805698395, 0.02684025838971138, 0.05699446052312851, 0.011266241781413555, 0.07313501834869385, 0.027520498260855675, 0.03394509479403496, 0.04036691039800644, 0.05042418837547302, 0.04212507978081703, 0.06694154441356659, 0.03591548651456833], [0.035815075039863586, 0.027540862560272217, 0.04961506649851799, 0.02457703836262226, 0.04209510609507561, 0.06044638156890869, 0.023320285603404045, 0.016371533274650574, 0.05216364935040474, 0.09895773231983185, 0.03713369742035866, 0.06420039385557175, 0.07163769751787186, 0.04397084191441536, 0.06658484041690826, 0.018421005457639694, 0.03535786271095276, 0.022305132821202278, 0.014453329145908356, 0.01218993030488491, 0.030085820704698563, 0.06751076877117157, 0.02803177200257778, 0.05721417814493179], [0.02660234272480011, 0.020562149584293365, 0.05101357400417328, 0.03734853118658066, 0.025321638211607933, 0.06893979758024216, 0.049529626965522766, 0.04886138439178467, 0.05310779809951782, 0.09260162711143494, 0.018393624573946, 0.14034967124462128, 0.123841792345047, 0.06105639785528183, 0.04295118898153305, 0.026355383917689323, 0.012152832932770252, 0.020626161247491837, 0.015342473983764648, 0.013024304062128067, 0.007901263423264027, 0.017981823533773422, 0.0060158115811645985, 0.020118629559874535], [0.046049814671278, 0.0321110375225544, 0.08643683046102524, 0.059960003942251205, 0.03464411199092865, 0.08345381170511246, 0.04125162214040756, 0.037159912288188934, 0.04940418899059296, 0.11016654968261719, 0.01273986417800188, 0.089786097407341, 0.04748522490262985, 0.03290961682796478, 0.03761104494333267, 0.03455604985356331, 0.01823911815881729, 0.017307903617620468, 0.01646154560148716, 0.011900489218533039, 0.013053341768682003, 0.04473917558789253, 0.007014482747763395, 0.03555818647146225], [0.007740366738289595, 0.010480412282049656, 0.05806044489145279, 0.04648641124367714, 0.03343481943011284, 0.014701606705784798, 0.021739376708865166, 0.020771076902747154, 0.05527608096599579, 0.06291593611240387, 0.014034599997103214, 0.06849788874387741, 0.11307891458272934, 0.0590740367770195, 0.08777985721826553, 0.0772283524274826, 0.045724961906671524, 0.010123233310878277, 0.022744910791516304, 0.023885492235422134, 0.05146445706486702, 0.042266473174095154, 0.011727160774171352, 0.04076322913169861], [0.06552886962890625, 0.0397811233997345, 0.03854408115148544, 0.027905261144042015, 0.013873595744371414, 0.08432642370462418, 0.05133204907178879, 0.09426887333393097, 0.10694260150194168, 0.06465030461549759, 0.02087397314608097, 0.13849477469921112, 0.03432399779558182, 0.055985040962696075, 0.008012504316866398, 0.022418417036533356, 0.00849268026649952, 0.03833397850394249, 0.02150508388876915, 0.025072131305933, 0.010135801509022713, 0.012574462220072746, 0.003466647118330002, 0.013157309964299202], [0.0037663874682039022, 0.0044183917343616486, 0.026486633345484734, 0.009098977781832218, 0.03517797589302063, 0.005469786003232002, 0.019306303933262825, 0.005605829879641533, 0.023959346115589142, 0.05150223150849342, 0.015036983415484428, 0.02084423042833805, 0.4405560791492462, 0.06335724145174026, 0.09916092455387115, 0.0194209273904562, 0.031582869589328766, 0.0036378109361976385, 0.014874482527375221, 0.0075781517662107944, 0.013509009964764118, 0.05074520781636238, 0.009552989155054092, 0.025351302698254585], [0.03782561421394348, 0.02206498198211193, 0.023989945650100708, 0.0224009919911623, 0.035016562789678574, 0.05044262111186981, 0.0609857551753521, 0.05943677946925163, 0.04035400599241257, 0.02922690473496914, 0.062453750520944595, 0.05556272715330124, 0.1770469695329666, 0.10812783241271973, 0.016517959535121918, 0.023364195600152016, 0.024934658780694008, 0.041750919073820114, 0.04578656330704689, 0.02937459386885166, 0.0052039227448403835, 0.010103771463036537, 0.007836339063942432, 0.01019163616001606], [0.0028036704752594233, 0.0036512541119009256, 0.015804210677742958, 0.014945093542337418, 0.06662678718566895, 0.002920543309301138, 0.010104626417160034, 0.002528001554310322, 0.014793673530220985, 0.014658820815384388, 0.029233131557703018, 0.010521849617362022, 0.18644244968891144, 0.03881613537669182, 0.17926613986492157, 0.0351853221654892, 0.0919068232178688, 0.005781975109130144, 0.023078888654708862, 0.010132022202014923, 0.052576784044504166, 0.04374117776751518, 0.07466547191143036, 0.06981514394283295], [0.008595158345997334, 0.005429253913462162, 0.010124360211193562, 0.004063830710947514, 0.13455840945243835, 0.006551838479936123, 0.012904276140034199, 0.00895720161497593, 0.04295080900192261, 0.049787960946559906, 0.08079706132411957, 0.02189476042985916, 0.1828344613313675, 0.07175572216510773, 0.023745883256196976, 0.0046927141956985, 0.10970345139503479, 0.007856079377233982, 0.016631988808512688, 0.01598658785223961, 0.026220008730888367, 0.07329543679952621, 0.0348796471953392, 0.04578312486410141], [0.00178168760612607, 0.002133617177605629, 0.012478312477469444, 0.006311688106507063, 0.06650982797145844, 0.0025263666175305843, 0.006343204062432051, 0.0034472632687538862, 0.024854669347405434, 0.013853414915502071, 0.10708259046077728, 0.008135488256812096, 0.1423802673816681, 0.02042144536972046, 0.1052904948592186, 0.012681744061410427, 0.1461378037929535, 0.004974297247827053, 0.019177652895450592, 0.017606569454073906, 0.06852323561906815, 0.05036570131778717, 0.1233552098274231, 0.033627524971961975], [0.004926084075123072, 0.004605602938681841, 0.026157191023230553, 0.004517358727753162, 0.022739361971616745, 0.0059084827080369, 0.017252452671527863, 0.014995967969298363, 0.021479040384292603, 0.006049127783626318, 0.27388715744018555, 0.0047536795027554035, 0.06955970823764801, 0.011015716008841991, 0.04013654962182045, 0.004022004548460245, 0.04881446436047554, 0.01841108873486519, 0.04910937324166298, 0.06070515140891075, 0.06252086907625198, 0.030991550534963608, 0.17423303425312042, 0.023208964616060257], [0.002428155392408371, 0.0017865010304376483, 0.010779830627143383, 0.004778822418302298, 0.058316994458436966, 0.0029770361725240946, 0.004626944661140442, 0.0035903523676097393, 0.023289470002055168, 0.011974714696407318, 0.06919407844543457, 0.005946747492998838, 0.049818214029073715, 0.010652243159711361, 0.06294592469930649, 0.005574611946940422, 0.1320439726114273, 0.007871516048908234, 0.01635419949889183, 0.01725207082927227, 0.16359461843967438, 0.06194797903299332, 0.21614274382591248, 0.05611235275864601], [0.025662308558821678, 0.022088780999183655, 0.029272282496094704, 0.023249628022313118, 0.048490576446056366, 0.02942492999136448, 0.010298891924321651, 0.008028805255889893, 0.03265764191746712, 0.05138570815324783, 0.03501726686954498, 0.029344825074076653, 0.05104082077741623, 0.02431645803153515, 0.07944445312023163, 0.01883404515683651, 0.06297566741704941, 0.021851489320397377, 0.014676439575850964, 0.014979875646531582, 0.08815353363752365, 0.10250349342823029, 0.07688268274068832, 0.09941934794187546], [0.04484262689948082, 0.048267215490341187, 0.033690646290779114, 0.055007655173540115, 0.028303513303399086, 0.028325265273451805, 0.03413119167089462, 0.017989620566368103, 0.034545619040727615, 0.026270978152751923, 0.01085167471319437, 0.05315662920475006, 0.04178372025489807, 0.036285899579524994, 0.05160956084728241, 0.05537353456020355, 0.03155217319726944, 0.04424191638827324, 0.059172775596380234, 0.026160340756177902, 0.0838882103562355, 0.037496328353881836, 0.03280925005674362, 0.08424367755651474], [0.03571454808115959, 0.028626523911952972, 0.06570550799369812, 0.0828583613038063, 0.03774361312389374, 0.028988199308514595, 0.014760083518922329, 0.01360884215682745, 0.025340501219034195, 0.04034921154379845, 0.008808442391455173, 0.029527384787797928, 0.025284817442297935, 0.01486253272742033, 0.06561776250600815, 0.06167883053421974, 0.03878038376569748, 0.01934937573969364, 0.021975819021463394, 0.01696365512907505, 0.08299530297517776, 0.08948039263486862, 0.03493049740791321, 0.11604945361614227], [0.0033411041367799044, 0.004812881350517273, 0.03267526626586914, 0.03163490816950798, 0.03360965847969055, 0.0028958090115338564, 0.005491297226399183, 0.004403320141136646, 0.02636805549263954, 0.02049030177295208, 0.007613976486027241, 0.016750292852520943, 0.06003478541970253, 0.022631121799349785, 0.11454962939023972, 0.07084326446056366, 0.08466418832540512, 0.005884817335754633, 0.0178997665643692, 0.01842561736702919, 0.23566842079162598, 0.0620243065059185, 0.03785379230976105, 0.07943344861268997], [0.05161009728908539, 0.04421568661928177, 0.05413404107093811, 0.037140484899282455, 0.01560199074447155, 0.018155094236135483, 0.018139444291591644, 0.031582776457071304, 0.05496715381741524, 0.014549658633768559, 0.013345417566597462, 0.02456166222691536, 0.011654992587864399, 0.011487412266433239, 0.029644690454006195, 0.03924576938152313, 0.024003757163882256, 0.04401719570159912, 0.04245021194219589, 0.05441281571984291, 0.21422307193279266, 0.036247942596673965, 0.04394787177443504, 0.07066082209348679], [0.006360655650496483, 0.008808942511677742, 0.03211776167154312, 0.013528977520763874, 0.03646684065461159, 0.0032961315009742975, 0.012574893422424793, 0.0047256979160010815, 0.016128748655319214, 0.032215800136327744, 0.0066286600194871426, 0.012829614803195, 0.23061785101890564, 0.04197238013148308, 0.17586414515972137, 0.03264341503381729, 0.048377055674791336, 0.004769697319716215, 0.019690129905939102, 0.012956345453858376, 0.06033645197749138, 0.09041890501976013, 0.024688992649316788, 0.07198194414377213], [0.10611774027347565, 0.0699993297457695, 0.03513976186513901, 0.043593451380729675, 0.026412954553961754, 0.037584442645311356, 0.03521699458360672, 0.04114225506782532, 0.018482623621821404, 0.010677443817257881, 0.020470168441534042, 0.030095316469669342, 0.04993167147040367, 0.04192231222987175, 0.03270837664604187, 0.0510188527405262, 0.02534531056880951, 0.08655878901481628, 0.055303506553173065, 0.048832397907972336, 0.032776061445474625, 0.014935465529561043, 0.02886047214269638, 0.05687430128455162], [0.0021971275564283133, 0.0045999325811862946, 0.012516153044998646, 0.010538476519286633, 0.021245179697871208, 0.0010155874770134687, 0.0025857179425656796, 0.0008942877757363021, 0.00435472559183836, 0.004610804840922356, 0.007944867014884949, 0.003829988418146968, 0.09081319719552994, 0.010895299725234509, 0.3947904109954834, 0.024030257016420364, 0.04769634082913399, 0.0034143426455557346, 0.010463897138834, 0.007652864791452885, 0.09516409039497375, 0.03415430337190628, 0.09888572245836258, 0.10570638626813889]], [[0.021480221301317215, 0.0179589930921793, 0.038062550127506256, 0.062103092670440674, 0.015046291053295135, 0.014690379612147808, 0.027978645637631416, 0.015114683657884598, 0.06862073391675949, 0.0274185910820961, 0.010797635652124882, 0.04666737839579582, 0.13984940946102142, 0.038739778101444244, 0.02811145968735218, 0.04556034877896309, 0.012877325527369976, 0.03975922614336014, 0.039902929216623306, 0.02201980911195278, 0.13998688757419586, 0.03671564534306526, 0.021142790094017982, 0.06939513981342316], [0.025752505287528038, 0.02259455993771553, 0.028019379824399948, 0.0529329814016819, 0.010403426364064217, 0.015930309891700745, 0.029145684093236923, 0.024493657052516937, 0.03340946137905121, 0.037877075374126434, 0.012533197179436684, 0.05678562819957733, 0.19703075289726257, 0.06599666178226471, 0.032816678285598755, 0.06901280581951141, 0.009575795382261276, 0.035477787256240845, 0.038641154766082764, 0.0411243662238121, 0.05017128959298134, 0.05062222480773926, 0.013029924593865871, 0.04662270098924637], [0.02694140374660492, 0.03394395858049393, 0.08897430449724197, 0.04415620118379593, 0.010272374376654625, 0.02991049364209175, 0.012288345023989677, 0.017399923875927925, 0.1751497983932495, 0.013983252458274364, 0.01694711670279503, 0.009716334752738476, 0.06751897931098938, 0.018230721354484558, 0.04395582526922226, 0.006872765254229307, 0.0070529598742723465, 0.02347654663026333, 0.008739925920963287, 0.011356689967215061, 0.2575874328613281, 0.012169712223112583, 0.04079899191856384, 0.022556012496352196], [0.008963635191321373, 0.009683610871434212, 0.012359589338302612, 0.006746338680386543, 0.008394245058298111, 0.007733129896223545, 0.01664842665195465, 0.007592856418341398, 0.023419544100761414, 0.06354732066392899, 0.006883079651743174, 0.00978813972324133, 0.5463482141494751, 0.0552339144051075, 0.030011583119630814, 0.00966519583016634, 0.00985807552933693, 0.010309450328350067, 0.018709883093833923, 0.016711391508579254, 0.026256825774908066, 0.08215682208538055, 0.006475583650171757, 0.006503107491880655], [0.04762519896030426, 0.03330674767494202, 0.014795145019888878, 0.025711150839924812, 0.047017525881528854, 0.03270304203033447, 0.042149629443883896, 0.01757708191871643, 0.06471195071935654, 0.03330307453870773, 0.01345274318009615, 0.012078057043254375, 0.09277768433094025, 0.02865956537425518, 0.01366298645734787, 0.03142477199435234, 0.04484085738658905, 0.05796067789196968, 0.05661282315850258, 0.03635973110795021, 0.12499293684959412, 0.05631684139370918, 0.036104168742895126, 0.035855576395988464], [0.02380272187292576, 0.015112917870283127, 0.019099680706858635, 0.04438474029302597, 0.024693429470062256, 0.009051215834915638, 0.014178491197526455, 0.0034940317273139954, 0.1337491273880005, 0.004595061298459768, 0.0027445326559245586, 0.0024432153441011906, 0.09437058866024017, 0.010419538244605064, 0.012022542767226696, 0.016666026785969734, 0.021143129095435143, 0.017460081726312637, 0.021627109497785568, 0.007454634178429842, 0.4640478193759918, 0.009081924334168434, 0.01597181335091591, 0.012385652400553226], [0.02217680774629116, 0.0230729840695858, 0.01981549710035324, 0.047968875616788864, 0.0347944013774395, 0.01452319510281086, 0.03435971215367317, 0.010180161334574223, 0.06440506875514984, 0.012298393994569778, 0.007312893867492676, 0.00971359945833683, 0.05368928983807564, 0.013887728564441204, 0.00985471811145544, 0.03363799676299095, 0.042266953736543655, 0.09025471657514572, 0.07680661976337433, 0.02613462693989277, 0.2618491053581238, 0.0298544242978096, 0.03719467669725418, 0.023947589099407196], [0.08850529789924622, 0.051373839378356934, 0.03427805006504059, 0.09403219819068909, 0.011028929613530636, 0.01649521477520466, 0.035179443657398224, 0.01767405867576599, 0.0355241522192955, 0.020523468032479286, 0.010102621279656887, 0.10636528581380844, 0.07215116918087006, 0.05172886326909065, 0.01643892005085945, 0.12034953385591507, 0.008803363889455795, 0.019554313272237778, 0.02635074593126774, 0.020876115188002586, 0.032495614141225815, 0.014872072264552116, 0.013909522444009781, 0.08138717710971832], [0.06723613291978836, 0.03153563663363457, 0.15032754838466644, 0.07036352902650833, 0.029553623870015144, 0.04587500914931297, 0.09434113651514053, 0.025472888723015785, 0.08159755915403366, 0.021239668130874634, 0.030187664553523064, 0.01053835079073906, 0.14995788037776947, 0.029926160350441933, 0.034166350960731506, 0.021131260320544243, 0.013018508441746235, 0.012435954064130783, 0.018714435398578644, 0.005256440490484238, 0.017029646784067154, 0.006784842815250158, 0.019840436056256294, 0.013469339348375797], [0.009672129526734352, 0.007944716140627861, 0.03711364045739174, 0.014665316790342331, 0.03916337341070175, 0.012653493322432041, 0.08053995668888092, 0.15351970493793488, 0.056487515568733215, 0.10582288354635239, 0.012071873992681503, 0.04242509976029396, 0.04148556664586067, 0.033364810049533844, 0.008931318297982216, 0.009842537343502045, 0.02431521937251091, 0.016707925125956535, 0.041952550411224365, 0.08192180842161179, 0.03903339058160782, 0.09799186885356903, 0.008843602612614632, 0.02352968044579029], [0.016505056992173195, 0.007747819181531668, 0.13320666551589966, 0.018229829147458076, 0.007293428760021925, 0.017682742327451706, 0.031225016340613365, 0.028874851763248444, 0.11201919615268707, 0.02394804172217846, 0.04186123237013817, 0.021559692919254303, 0.37650632858276367, 0.02590928040444851, 0.09532852470874786, 0.00273138121701777, 0.0030013006180524826, 0.001287775463424623, 0.0031205909326672554, 0.0025756233371794224, 0.00871514156460762, 0.003505520988255739, 0.010915511287748814, 0.006249386351555586], [0.008449326269328594, 0.0054804184474051, 0.017252806574106216, 0.0008132708026096225, 0.007994696497917175, 0.009829865768551826, 0.031226947903633118, 0.03625909611582756, 0.06211615353822708, 0.16678135097026825, 0.01370005402714014, 0.01207918580621481, 0.335286021232605, 0.10956192761659622, 0.018155310302972794, 0.0025452564004808664, 0.006449016742408276, 0.00280668749473989, 0.022205108776688576, 0.019978061318397522, 0.008598526939749718, 0.09969425946474075, 0.0015069304499775171, 0.0012296534841880202], [0.00033007521415129304, 0.00022988859564065933, 0.012880770489573479, 0.004932557698339224, 0.00027882494032382965, 0.0006926929345354438, 0.0020513932686299086, 0.004810464568436146, 0.005624051205813885, 0.022782256826758385, 0.01679326221346855, 0.7409986853599548, 0.09715357422828674, 0.042291272431612015, 0.02879517339169979, 0.00569978216663003, 0.00016096909530460835, 0.00034868810325860977, 0.0002644979686010629, 0.00043826102046296, 0.00015858326514717191, 0.0011118727270513773, 0.0004327438655309379, 0.010739694349467754], [0.0003855243558064103, 0.00015835383965168148, 0.005269045941531658, 0.0010356189450249076, 0.00023046454589348286, 0.0005859335069544613, 0.0053397067822515965, 0.0023429831489920616, 0.0034761265851557255, 0.03614020720124245, 0.005719443783164024, 0.07271380722522736, 0.7883030772209167, 0.044361039996147156, 0.024575350806117058, 0.002904822351410985, 0.00015636274474672973, 0.00015509710647165775, 0.0010120572987943888, 0.0004106637788936496, 0.00010028185351984575, 0.0033989183139055967, 0.00011766342504415661, 0.0011075008660554886], [0.0019915930461138487, 0.0018894418608397245, 0.03708465397357941, 0.005129463970661163, 0.0006108079105615616, 0.002569831907749176, 0.0038709109649062157, 0.014496472664177418, 0.024234801530838013, 0.03330273553729057, 0.017349708825349808, 0.11469310522079468, 0.49419301748275757, 0.08381547033786774, 0.13546603918075562, 0.003201280487701297, 0.00048425025306642056, 0.0012304234551265836, 0.001404267968609929, 0.004090128932148218, 0.003853735513985157, 0.006023446097970009, 0.002161344513297081, 0.006853074301034212], [0.0029853135347366333, 0.002573254518210888, 0.0020746118389070034, 0.002111996291205287, 0.002687611151486635, 0.0023946138098835945, 0.007088405545800924, 0.010592414066195488, 0.004742330405861139, 0.14676371216773987, 0.009391316212713718, 0.08384667336940765, 0.35726699233055115, 0.14297038316726685, 0.02086632326245308, 0.018229039385914803, 0.004105984698981047, 0.004241479095071554, 0.010326260700821877, 0.029586685821413994, 0.003340240800753236, 0.12232749164104462, 0.0019331590738147497, 0.0075536915101110935], [0.029124055057764053, 0.022213784977793694, 0.008167619816958904, 0.011761653237044811, 0.030402878299355507, 0.01989644765853882, 0.03239160776138306, 0.017626779153943062, 0.023621652275323868, 0.05457116663455963, 0.023340096697211266, 0.04412613809108734, 0.1140669658780098, 0.06444942951202393, 0.03007623739540577, 0.05027161166071892, 0.0466340072453022, 0.04603464901447296, 0.06971391290426254, 0.053711965680122375, 0.04590911045670509, 0.08298461884260178, 0.03091743402183056, 0.047986093908548355], [0.06159401312470436, 0.04214540496468544, 0.014018919318914413, 0.024977529421448708, 0.018214823678135872, 0.014512632973492146, 0.01426271814852953, 0.009253025986254215, 0.025814861059188843, 0.010670960880815983, 0.01258639432489872, 0.023155272006988525, 0.07452473044395447, 0.08265849947929382, 0.05832888185977936, 0.06622074544429779, 0.039894647896289825, 0.03346718102693558, 0.06460689753293991, 0.05294889211654663, 0.1484832763671875, 0.028096988797187805, 0.038272880017757416, 0.04128977283835411], [0.02202724479138851, 0.025728199630975723, 0.004793001338839531, 0.01725764013826847, 0.020684629678726196, 0.00866029318422079, 0.013823019340634346, 0.010635981336236, 0.010299485176801682, 0.01751704514026642, 0.010366562753915787, 0.04033217951655388, 0.026199493557214737, 0.04675903543829918, 0.016807304695248604, 0.09904365986585617, 0.056844085454940796, 0.10495702177286148, 0.10636841505765915, 0.09380848705768585, 0.10292190313339233, 0.06575474143028259, 0.03841268643736839, 0.03999780863523483], [0.04571326822042465, 0.03427454084157944, 0.004984436556696892, 0.026981763541698456, 0.004646801855415106, 0.004322696011513472, 0.006163258571177721, 0.012929164804518223, 0.004660347942262888, 0.011809738352894783, 0.007623673416674137, 0.2346329391002655, 0.014902738854289055, 0.09372446686029434, 0.014066585339605808, 0.19303655624389648, 0.008796711452305317, 0.018837928771972656, 0.021520791575312614, 0.07690443098545074, 0.019612673670053482, 0.020158424973487854, 0.012231198139488697, 0.10746482759714127], [0.04286424443125725, 0.037178125232458115, 0.008673273026943207, 0.017222747206687927, 0.04251855984330177, 0.012304660864174366, 0.009622753597795963, 0.008351312950253487, 0.012423374690115452, 0.010978901758790016, 0.01718929037451744, 0.011446716263890266, 0.014391870237886906, 0.0335911326110363, 0.02496558241546154, 0.0979684367775917, 0.11438577622175217, 0.07825261354446411, 0.05750637501478195, 0.0646059513092041, 0.1384851485490799, 0.038080163300037384, 0.07362972944974899, 0.03336318954825401], [0.007400561589747667, 0.0076973154209554195, 0.003775114193558693, 0.0066348835825920105, 0.021633943542838097, 0.002843782538548112, 0.008752552792429924, 0.0449068546295166, 0.009177811443805695, 0.021356340497732162, 0.003382875816896558, 0.021835697814822197, 0.005998903885483742, 0.021239139139652252, 0.004303917288780212, 0.02028944529592991, 0.03990417718887329, 0.030848247930407524, 0.045270610600709915, 0.3450118601322174, 0.1503203958272934, 0.11914447695016861, 0.017290519550442696, 0.04098062589764595], [0.04427260160446167, 0.03232557699084282, 0.03567715734243393, 0.019691620022058487, 0.019617674872279167, 0.012873565778136253, 0.0214005708694458, 0.02226409874856472, 0.05820152908563614, 0.014982763677835464, 0.015801075845956802, 0.011960218660533428, 0.09166860580444336, 0.043425023555755615, 0.052728764712810516, 0.018075307831168175, 0.028020787984132767, 0.018555257469415665, 0.03951171040534973, 0.05683332681655884, 0.2291627824306488, 0.03318234160542488, 0.05300898849964142, 0.026758583262562752], [0.003805659245699644, 0.0042762900702655315, 0.0005303279031068087, 0.0003845526371151209, 0.007550887297838926, 0.001104603405110538, 0.0023343523498624563, 0.0023954175412654877, 0.006781384348869324, 0.023340128362178802, 0.0011532035423442721, 0.0020762127824127674, 0.03820465877652168, 0.04224620386958122, 0.004532010294497013, 0.008464948274195194, 0.03345699980854988, 0.013339613564312458, 0.06606438755989075, 0.10591210424900055, 0.2759900689125061, 0.34635674953460693, 0.005707076285034418, 0.003992067649960518]], [[0.04063957557082176, 0.02002030983567238, 0.10256063938140869, 0.03572436794638634, 0.024852942675352097, 0.021021943539381027, 0.025860700756311417, 0.1475141942501068, 0.11768823117017746, 0.020194731652736664, 0.0946071520447731, 0.024155905470252037, 0.022202273830771446, 0.021947957575321198, 0.03696414828300476, 0.018927518278360367, 0.014804272912442684, 0.006770345848053694, 0.012443953193724155, 0.09672663360834122, 0.029647760093212128, 0.011621690355241299, 0.04034038260579109, 0.012762448750436306], [0.02854849398136139, 0.011298132129013538, 0.10232333093881607, 0.046386655420064926, 0.020328395068645477, 0.025618208572268486, 0.03462395444512367, 0.1428537219762802, 0.09224308282136917, 0.022841889411211014, 0.07259751111268997, 0.035630807280540466, 0.04303549602627754, 0.018563739955425262, 0.047145579010248184, 0.026633862406015396, 0.011827568523585796, 0.01147397793829441, 0.01879998855292797, 0.10170266777276993, 0.02465100586414337, 0.012728194706141949, 0.030773285776376724, 0.017370479181408882], [0.005718283820897341, 0.008057528175413609, 0.0711125060915947, 0.011697005480527878, 0.020831042900681496, 0.010183557868003845, 0.019999776035547256, 0.16341529786586761, 0.05869261920452118, 0.055851083248853683, 0.06796832382678986, 0.03289087116718292, 0.03889653831720352, 0.017111532390117645, 0.04439890384674072, 0.008948341012001038, 0.013919522985816002, 0.01631505787372589, 0.016975045204162598, 0.156027153134346, 0.035557277500629425, 0.051266226917505264, 0.05107693746685982, 0.023089559748768806], [0.0214459877461195, 0.022026289254426956, 0.058553654700517654, 0.01053437776863575, 0.03803769499063492, 0.01569536328315735, 0.06090030446648598, 0.09174066036939621, 0.1050259917974472, 0.061849258840084076, 0.0931539535522461, 0.010384819470345974, 0.04609024152159691, 0.020389238372445107, 0.032476864755153656, 0.006806765217334032, 0.025849271565675735, 0.01059926487505436, 0.03746607154607773, 0.07240093499422073, 0.054146189242601395, 0.05397634208202362, 0.04338282346725464, 0.007067753933370113], [0.008994110859930515, 0.007453701458871365, 0.09133796393871307, 0.010681034065783024, 0.009560499340295792, 0.008667992427945137, 0.015642492100596428, 0.15920686721801758, 0.07896789908409119, 0.010759866796433926, 0.08671081811189651, 0.005336480680853128, 0.03659193590283394, 0.02240212820470333, 0.10433869808912277, 0.008646960370242596, 0.013733165338635445, 0.013355313800275326, 0.015284779481589794, 0.19286945462226868, 0.045479245483875275, 0.011454050429165363, 0.04018053784966469, 0.00234396499581635], [0.029694076627492905, 0.016109677031636238, 0.06723406910896301, 0.05048700049519539, 0.03914940729737282, 0.017037320882081985, 0.02868696302175522, 0.12868155539035797, 0.17370754480361938, 0.030165070667862892, 0.12327329814434052, 0.028212182223796844, 0.023318162187933922, 0.019466208294034004, 0.02961375191807747, 0.02698354423046112, 0.017425982281565666, 0.003188443835824728, 0.008300725370645523, 0.05823042616248131, 0.021765144541859627, 0.010564313270151615, 0.03814755007624626, 0.010557673871517181], [0.017075100913643837, 0.007852437905967236, 0.10460519790649414, 0.018660830333828926, 0.006233210675418377, 0.025195186957716942, 0.012098989449441433, 0.13552746176719666, 0.2602052092552185, 0.02658328413963318, 0.02603035978972912, 0.11053728312253952, 0.06852002441883087, 0.0376725010573864, 0.033915456384420395, 0.01042198110371828, 0.0028310578782111406, 0.004866322968155146, 0.0033691844437271357, 0.029945772141218185, 0.02092585898935795, 0.0062409802339971066, 0.00974525697529316, 0.020941007882356644], [0.014243013225495815, 0.007134859915822744, 0.11438843607902527, 0.01340622827410698, 0.03684883564710617, 0.03532414138317108, 0.04182550311088562, 0.0229740459471941, 0.35142597556114197, 0.07344783842563629, 0.07658259570598602, 0.03204410895705223, 0.022445807233452797, 0.019601788371801376, 0.03137144073843956, 0.010458260774612427, 0.019249722361564636, 0.0069154598750174046, 0.01184009201824665, 0.0073149013333022594, 0.017956718802452087, 0.016743237152695656, 0.009808243252336979, 0.006648677866905928], [0.054288484156131744, 0.052984289824962616, 0.0396922267973423, 0.028436832129955292, 0.06778035312891006, 0.07859791070222855, 0.07696273922920227, 0.040481165051460266, 0.06213392689824104, 0.05012872442603111, 0.0668720155954361, 0.04453685134649277, 0.01586000621318817, 0.04069795832037926, 0.04289389029145241, 0.03131668642163277, 0.04942622408270836, 0.023112980648875237, 0.02908407524228096, 0.016925426200032234, 0.011732730083167553, 0.019892724230885506, 0.026644989848136902, 0.029516737908124924], [0.04281940311193466, 0.015918299555778503, 0.0880337506532669, 0.03073701076209545, 0.00331553490832448, 0.020547593012452126, 0.00848415307700634, 0.04668676108121872, 0.12401781976222992, 0.032628219574689865, 0.03663099557161331, 0.06359698623418808, 0.14217106997966766, 0.09039243310689926, 0.10928746312856674, 0.033799197524785995, 0.0031559488270431757, 0.010389229282736778, 0.0061538987793028355, 0.023145044222474098, 0.029259158298373222, 0.01253471802920103, 0.011226283386349678, 0.015069060027599335], [0.009555971249938011, 0.005960524547845125, 0.042493078857660294, 0.03863881528377533, 0.019420230761170387, 0.01776796206831932, 0.019871843978762627, 0.16319584846496582, 0.05795031785964966, 0.01112756971269846, 0.061876215040683746, 0.038296304643154144, 0.09827237576246262, 0.0203603133559227, 0.03414374962449074, 0.0428980328142643, 0.017079075798392296, 0.02379327453672886, 0.019126122817397118, 0.17997805774211884, 0.03557037562131882, 0.006583559326827526, 0.02629968337714672, 0.009740740992128849], [0.04860888794064522, 0.054526638239622116, 0.0412696897983551, 0.03009292669594288, 0.021761439740657806, 0.017358342185616493, 0.012294158339500427, 0.044605810195207596, 0.01115050632506609, 0.03488782048225403, 0.025845207273960114, 0.024439994245767593, 0.03338175639510155, 0.18785981833934784, 0.04527536779642105, 0.03831326216459274, 0.02732550911605358, 0.027126874774694443, 0.018444694578647614, 0.06956563144922256, 0.032459523528814316, 0.0677606537938118, 0.04012284427881241, 0.045522600412368774], [0.0014646692434325814, 0.0016779029974713922, 0.09848576039075851, 0.0031320415437221527, 0.0012814137153327465, 0.004804127849638462, 0.008776499889791012, 0.04435316100716591, 0.027611853554844856, 0.023512613028287888, 0.030931124463677406, 0.11122999340295792, 0.21867980062961578, 0.09241699427366257, 0.19136403501033783, 0.003532304661348462, 0.0011565914610400796, 0.014365948736667633, 0.010262757539749146, 0.029548445716500282, 0.012850606814026833, 0.011094133369624615, 0.012205555103719234, 0.04526166990399361], [0.004123490769416094, 0.0020505469292402267, 0.0759660005569458, 0.004670759197324514, 0.004630284383893013, 0.002506515709683299, 0.009366062469780445, 0.03965351730585098, 0.030559327453374863, 0.026107627898454666, 0.020141873508691788, 0.019305851310491562, 0.17487002909183502, 0.2720872461795807, 0.1913021355867386, 0.0056775761768221855, 0.005691418889909983, 0.010162770748138428, 0.014931841753423214, 0.0369185172021389, 0.015234727412462234, 0.020084701478481293, 0.00755126029253006, 0.006405833177268505], [0.0019818341825157404, 0.001134231104515493, 0.11373331397771835, 0.006210274528712034, 0.001221145037561655, 0.0030144467018544674, 0.002652839757502079, 0.14269016683101654, 0.01107621006667614, 0.012759811244904995, 0.03317292779684067, 0.02286067046225071, 0.05830300971865654, 0.04269421845674515, 0.11206185072660446, 0.005456704180687666, 0.0012332850601524115, 0.01824607327580452, 0.005482714157551527, 0.2961105406284332, 0.0211084745824337, 0.024301789700984955, 0.036107324063777924, 0.026386167854070663], [0.010381572879850864, 0.011751257814466953, 0.0738457664847374, 0.00938869547098875, 0.024757370352745056, 0.009899305179715157, 0.030295446515083313, 0.06259681284427643, 0.0661345049738884, 0.050697289407253265, 0.10725732147693634, 0.005981667898595333, 0.0609765462577343, 0.031349070370197296, 0.07065843790769577, 0.007966497913002968, 0.02696327492594719, 0.020409971475601196, 0.037707217037677765, 0.08787079900503159, 0.06559577584266663, 0.07227475196123123, 0.049912456423044205, 0.005328228231519461], [0.006632746662944555, 0.006119784899055958, 0.06333757936954498, 0.010343696922063828, 0.00906576868146658, 0.005766516551375389, 0.010139279067516327, 0.13375011086463928, 0.033160753548145294, 0.006905264221131802, 0.060269106179475784, 0.003065511817112565, 0.025056472048163414, 0.022458698600530624, 0.09893514961004257, 0.008724315091967583, 0.017206642776727676, 0.02860725298523903, 0.020297983661293983, 0.29337745904922485, 0.06410837173461914, 0.015499671921133995, 0.05445997044444084, 0.0027118439320474863], [0.03296901285648346, 0.029229460284113884, 0.03024337626993656, 0.04544159397482872, 0.05271167680621147, 0.008342466317117214, 0.019735833629965782, 0.06704907864332199, 0.037777405232191086, 0.028908349573612213, 0.032753050327301025, 0.020989524200558662, 0.027695516124367714, 0.03234262019395828, 0.03790014237165451, 0.03568897768855095, 0.0443921834230423, 0.01560207735747099, 0.025277188047766685, 0.13800622522830963, 0.07405119389295578, 0.053200457245111465, 0.06501723825931549, 0.04467533901333809], [0.031014973297715187, 0.020396392792463303, 0.06182320415973663, 0.026388898491859436, 0.0072255684062838554, 0.018143504858016968, 0.00898380484431982, 0.08774282783269882, 0.07420466095209122, 0.02186107076704502, 0.011078082025051117, 0.09257815033197403, 0.0934228003025055, 0.08622333407402039, 0.06435813754796982, 0.020264748483896255, 0.006361552979797125, 0.017304809764027596, 0.008423415943980217, 0.06452161818742752, 0.061825819313526154, 0.020352039486169815, 0.01960870251059532, 0.07589206844568253], [0.023214738816022873, 0.016540158540010452, 0.07950068265199661, 0.020704660564661026, 0.040915317833423615, 0.022508174180984497, 0.022636273875832558, 0.017502574250102043, 0.1000252515077591, 0.06217624247074127, 0.047024451196193695, 0.03851187974214554, 0.0403173454105854, 0.04722047224640846, 0.07789101451635361, 0.024020016193389893, 0.04423723742365837, 0.02674071304500103, 0.025489483028650284, 0.02675255574285984, 0.069788359105587, 0.06388862431049347, 0.029682127758860588, 0.032711587846279144], [0.0758061558008194, 0.14621227979660034, 0.01048221904784441, 0.020884333178400993, 0.029584819450974464, 0.0186594370752573, 0.014818156138062477, 0.01402949821203947, 0.005241369362920523, 0.0128538329154253, 0.008710291236639023, 0.022092310711741447, 0.007869784720242023, 0.029686463996767998, 0.03883559629321098, 0.021000821143388748, 0.04525044560432434, 0.0422329343855381, 0.028887726366519928, 0.03825413063168526, 0.040749598294496536, 0.05437474697828293, 0.06534969806671143, 0.20813336968421936], [0.04931079223752022, 0.0240755844861269, 0.05969120189547539, 0.02874932438135147, 0.002576362807303667, 0.011553122662007809, 0.0034476250875741243, 0.039411358535289764, 0.028589917346835136, 0.014477847144007683, 0.019757091999053955, 0.05077125504612923, 0.09319806098937988, 0.06115952879190445, 0.1552036553621292, 0.03583723306655884, 0.004152916371822357, 0.0235711969435215, 0.008118110708892345, 0.09220907837152481, 0.07946330308914185, 0.024985190480947495, 0.031274665147066116, 0.05841560661792755], [0.02281673066318035, 0.029189012944698334, 0.014820773154497147, 0.029706168919801712, 0.01876254193484783, 0.011607016436755657, 0.009855027310550213, 0.07678607851266861, 0.009326386265456676, 0.003889230079948902, 0.019889099523425102, 0.012234743684530258, 0.02735454961657524, 0.012319444678723812, 0.024441994726657867, 0.02839917689561844, 0.028903469443321228, 0.056132763624191284, 0.025883087888360023, 0.28678178787231445, 0.10355614125728607, 0.015996402129530907, 0.08963671326637268, 0.04171153903007507], [0.04528297111392021, 0.11932183057069778, 0.006976876873522997, 0.01367294229567051, 0.010799610987305641, 0.004599056672304869, 0.0027989475056529045, 0.012164794839918613, 0.0009924384066835046, 0.01253837626427412, 0.0047018518671393394, 0.023602284491062164, 0.015197631902992725, 0.04961495101451874, 0.023546528071165085, 0.015565261244773865, 0.01902693510055542, 0.021701306104660034, 0.011333346366882324, 0.09605982899665833, 0.03662371635437012, 0.1143244132399559, 0.05971517786383629, 0.2798389792442322]], [[0.01684599742293358, 0.012233881279826164, 0.10796629637479782, 0.03879198804497719, 0.05312265455722809, 0.04015496373176575, 0.04081796854734421, 0.03463421389460564, 0.08877316117286682, 0.04940122738480568, 0.09783563762903214, 0.06202371045947075, 0.05627850070595741, 0.06945410370826721, 0.03597855567932129, 0.01642146334052086, 0.030245916917920113, 0.022935571148991585, 0.015641523525118828, 0.01456503476947546, 0.023264944553375244, 0.0208437442779541, 0.027441198006272316, 0.024327756837010384], [0.01804145611822605, 0.013465965166687965, 0.04796084016561508, 0.013573898002505302, 0.061983127146959305, 0.02114456705749035, 0.02842358686029911, 0.02214726060628891, 0.024476122111082077, 0.0448199063539505, 0.0745520144701004, 0.03712372109293938, 0.04222969710826874, 0.05451282113790512, 0.05398653447628021, 0.016809159889817238, 0.07986665517091751, 0.04731028899550438, 0.03995371237397194, 0.028358953073620796, 0.04342592507600784, 0.06033128499984741, 0.0753381997346878, 0.05016424506902695], [0.03334927186369896, 0.028889434412121773, 0.021663513034582138, 0.052407585084438324, 0.03703794628381729, 0.11276907473802567, 0.014943249523639679, 0.043028462678194046, 0.42373499274253845, 0.07881402224302292, 0.06438733637332916, 0.014469173736870289, 0.006884121801704168, 0.005579269025474787, 0.0018367655575275421, 0.005225511733442545, 0.006560576148331165, 0.013186288997530937, 0.0009236137848347425, 0.0020794502925127745, 0.011194335296750069, 0.011195399798452854, 0.005015500821173191, 0.004825016483664513], [0.03291086480021477, 0.033816706389188766, 0.06546365469694138, 0.07844161987304688, 0.02176552265882492, 0.07509801536798477, 0.03330346196889877, 0.048144515603780746, 0.08186416327953339, 0.06319695711135864, 0.03952433913946152, 0.06453762948513031, 0.05579458922147751, 0.033677808940410614, 0.031451188027858734, 0.042192984372377396, 0.013488059863448143, 0.04594520479440689, 0.014426767826080322, 0.01934981904923916, 0.027980972081422806, 0.029983162879943848, 0.014759624376893044, 0.03288237750530243], [0.02481783740222454, 0.02205015905201435, 0.03294314071536064, 0.027838030830025673, 0.017982183024287224, 0.04764040559530258, 0.10413394868373871, 0.03167642652988434, 0.0451488234102726, 0.05817480385303497, 0.03915588557720184, 0.08354610949754715, 0.05037940293550491, 0.029097547754645348, 0.05568448454141617, 0.037604328244924545, 0.016434509307146072, 0.04238935932517052, 0.08024710416793823, 0.022662105038762093, 0.03211996704339981, 0.03773142024874687, 0.01840631291270256, 0.04213574528694153], [0.017314450815320015, 0.01297001726925373, 0.11178126186132431, 0.07864715158939362, 0.04496460780501366, 0.08671633154153824, 0.031955357640981674, 0.08652090281248093, 0.17652033269405365, 0.05987909808754921, 0.06222593039274216, 0.019049223512411118, 0.020149121060967445, 0.02446880377829075, 0.011104163713753223, 0.016368551179766655, 0.011414660140872002, 0.03248447924852371, 0.007483420893549919, 0.0164844561368227, 0.027525635436177254, 0.019821925088763237, 0.015318277291953564, 0.00883184652775526], [0.013810385018587112, 0.009543037973344326, 0.04849296063184738, 0.06733471900224686, 0.06015632674098015, 0.0348641499876976, 0.022448118776082993, 0.12263928353786469, 0.2713400423526764, 0.059624508023262024, 0.07756249606609344, 0.013855398632586002, 0.04727352410554886, 0.02635822258889675, 0.00584904570132494, 0.0115166325122118, 0.01624264381825924, 0.011932166293263435, 0.003921453841030598, 0.01972026936709881, 0.024619800969958305, 0.012661176733672619, 0.013146799057722092, 0.00508687412366271], [0.05694754794239998, 0.0399722158908844, 0.06362023204565048, 0.06531097739934921, 0.02527039498090744, 0.10406091064214706, 0.05352185666561127, 0.0327727273106575, 0.04840404540300369, 0.05634076148271561, 0.03543365001678467, 0.08177068829536438, 0.02304803766310215, 0.02170492522418499, 0.01940947398543358, 0.06194104999303818, 0.01711335778236389, 0.05296261981129646, 0.01803979091346264, 0.01097021996974945, 0.014377924613654613, 0.03073180466890335, 0.010968098416924477, 0.05530662462115288], [0.01714406907558441, 0.017896583303809166, 0.13263815641403198, 0.12141629308462143, 0.025510158389806747, 0.07907608896493912, 0.018311532214283943, 0.0445459708571434, 0.21304729580879211, 0.04151131585240364, 0.16226984560489655, 0.029961397871375084, 0.009839167818427086, 0.013127077370882034, 0.007478964515030384, 0.008081922307610512, 0.0046682823449373245, 0.010148045606911182, 0.0014940439723432064, 0.0028930609114468098, 0.009507284499704838, 0.006279136519879103, 0.01692992076277733, 0.006224237848073244], [0.004035799764096737, 0.007472009398043156, 0.08212033659219742, 0.02500602789223194, 0.006282015237957239, 0.023024799302220345, 0.02842574566602707, 0.027940385043621063, 0.29798194766044617, 0.043657705187797546, 0.12407143414020538, 0.03644530102610588, 0.11811365187168121, 0.030591195449233055, 0.07988087087869644, 0.00320573803037405, 0.0026936319191008806, 0.01372763141989708, 0.00800881627947092, 0.00733026722446084, 0.012559068389236927, 0.006755223032087088, 0.007065953221172094, 0.0036044970620423555], [0.007829924114048481, 0.02088828571140766, 0.14485181868076324, 0.09320440143346786, 0.028894953429698944, 0.06795519590377808, 0.03160176798701286, 0.006964530795812607, 0.19424229860305786, 0.013072120025753975, 0.028626548126339912, 0.05580122023820877, 0.01141411904245615, 0.02404092438519001, 0.13790486752986908, 0.031684618443250656, 0.019520949572324753, 0.01997409574687481, 0.01235401164740324, 0.001954685663804412, 0.022942187264561653, 0.0038108734879642725, 0.007713007275015116, 0.012752596288919449], [0.0014212594833225012, 0.0026174227241426706, 0.08133192360401154, 0.015111387707293034, 0.007820318453013897, 0.006998103111982346, 0.008381780236959457, 0.005361299496144056, 0.11351064592599869, 0.037372734397649765, 0.24782313406467438, 0.13664160668849945, 0.11731649935245514, 0.06878440082073212, 0.11478132754564285, 0.0015551102114841342, 0.0032367664389312267, 0.002609299262985587, 0.0018778677331283689, 0.0014304714277386665, 0.00418479647487402, 0.002783670322969556, 0.01393126044422388, 0.003116917796432972], [0.006889669690281153, 0.014102387242019176, 0.021561603993177414, 0.008992059156298637, 0.044253427535295486, 0.020528415217995644, 0.03924160823225975, 0.008356962352991104, 0.06692781299352646, 0.04306046664714813, 0.11796055734157562, 0.024100393056869507, 0.050619762390851974, 0.020802896469831467, 0.16361981630325317, 0.013807930983603, 0.08219397068023682, 0.018034106120467186, 0.04711681604385376, 0.010151191614568233, 0.052232857793569565, 0.040184661746025085, 0.06827189028263092, 0.01698867790400982], [0.000735185167286545, 0.002097794786095619, 0.046576909720897675, 0.012844149023294449, 0.013182222843170166, 0.0038630706258118153, 0.008645739406347275, 0.0032709878869354725, 0.086195208132267, 0.02205909602344036, 0.24033671617507935, 0.14796650409698486, 0.039886992424726486, 0.0793859213590622, 0.2325107604265213, 0.0030875871889293194, 0.013516890816390514, 0.0030481487046927214, 0.00486747408285737, 0.0017832565354183316, 0.007299837656319141, 0.003628223203122616, 0.01733209565281868, 0.0058792466297745705], [0.006051494739949703, 0.014388163574039936, 0.0038700951263308525, 0.0029153688810765743, 0.09302938729524612, 0.0041689518839120865, 0.01607322506606579, 0.00918173510581255, 0.04950160160660744, 0.04898570850491524, 0.10934608429670334, 0.02608925849199295, 0.021369699388742447, 0.016915371641516685, 0.05300714448094368, 0.004225563257932663, 0.19322584569454193, 0.009998292662203312, 0.036456480622291565, 0.017306407913565636, 0.07812377065420151, 0.05705321207642555, 0.11139661073684692, 0.01732044294476509], [0.0037234441842883825, 0.006065255030989647, 0.04327483847737312, 0.013258897699415684, 0.008043341338634491, 0.005822771694511175, 0.015303199179470539, 0.008794605731964111, 0.012193184345960617, 0.022327939048409462, 0.054486021399497986, 0.11491198092699051, 0.07433763146400452, 0.06058105453848839, 0.2732198238372803, 0.01778618060052395, 0.0183357372879982, 0.018325461074709892, 0.04184237867593765, 0.02434312179684639, 0.02718629315495491, 0.028622107580304146, 0.049819108098745346, 0.057395584881305695], [0.02049504779279232, 0.020017186179757118, 0.008749944157898426, 0.007864853367209435, 0.01650519110262394, 0.010129289701581001, 0.05900924280285835, 0.009718171320855618, 0.006537649780511856, 0.024126261472702026, 0.010636932216584682, 0.0738966092467308, 0.027685556560754776, 0.02533833310008049, 0.08511612564325333, 0.03980007395148277, 0.040824249386787415, 0.03175541013479233, 0.22212719917297363, 0.034938473254442215, 0.043052881956100464, 0.060040220618247986, 0.028283407911658287, 0.09335170686244965], [0.040412046015262604, 0.02603767067193985, 0.04658589884638786, 0.029784563928842545, 0.051553718745708466, 0.019836438819766045, 0.027343938127160072, 0.022196929901838303, 0.009542498737573624, 0.016709525138139725, 0.01132035069167614, 0.02214963175356388, 0.0202474407851696, 0.060303494334220886, 0.053655337542295456, 0.04923722892999649, 0.06880933791399002, 0.057495731860399246, 0.07791067659854889, 0.060467980802059174, 0.04939349740743637, 0.05363965034484863, 0.04433819651603699, 0.08102823793888092], [0.03380516543984413, 0.01812577247619629, 0.01729021966457367, 0.022543596103787422, 0.06114260479807854, 0.007775880862027407, 0.0204361230134964, 0.03168854862451553, 0.01354733295738697, 0.02218654192984104, 0.017756378278136253, 0.025431925430893898, 0.06234830617904663, 0.07953054457902908, 0.025593627244234085, 0.03950519487261772, 0.09789370745420456, 0.02390705980360508, 0.05131729692220688, 0.08920396864414215, 0.05972367525100708, 0.05118035525083542, 0.06064052879810333, 0.0674256682395935], [0.0399329848587513, 0.02366967499256134, 0.0073775239288806915, 0.007350971456617117, 0.010396230034530163, 0.005724740214645863, 0.017695190384984016, 0.003358560148626566, 0.0007992577739059925, 0.007452836260199547, 0.0038373905699700117, 0.053381551057100296, 0.014360944740474224, 0.02317204512655735, 0.04615607485175133, 0.09608644247055054, 0.05414639413356781, 0.03702188655734062, 0.11996921896934509, 0.02635917067527771, 0.017810489982366562, 0.05455821752548218, 0.027827268466353416, 0.30155491828918457], [0.10225911438465118, 0.03660808503627777, 0.010020875371992588, 0.0117837218567729, 0.013936707749962807, 0.005645412020385265, 0.013701778836548328, 0.007843516767024994, 0.000940669619012624, 0.009955305606126785, 0.006666088942438364, 0.0376058891415596, 0.006305535789579153, 0.021358896046876907, 0.010133703239262104, 0.034734781831502914, 0.028020339086651802, 0.026332635432481766, 0.053899772465229034, 0.03474622592329979, 0.024313101544976234, 0.07750007510185242, 0.08656897395849228, 0.33911874890327454], [0.012747708708047867, 0.015348945744335651, 0.028040776029229164, 0.007618908304721117, 0.004255075938999653, 0.005439308937638998, 0.025128040462732315, 0.009407893754541874, 0.011719216592609882, 0.014715958386659622, 0.027698297053575516, 0.0289152879267931, 0.15963514149188995, 0.04355834797024727, 0.25398674607276917, 0.011028594337403774, 0.01022297888994217, 0.032727666199207306, 0.0984216034412384, 0.042470306158065796, 0.03332417830824852, 0.03530490770936012, 0.04276426509022713, 0.04551994800567627], [0.06188567355275154, 0.047604143619537354, 0.02844288945198059, 0.03181562200188637, 0.016884563490748405, 0.021147828549146652, 0.0278251264244318, 0.004713769070804119, 0.003897220129147172, 0.009138807654380798, 0.0032733085099607706, 0.06009498983621597, 0.006269896402955055, 0.024829663336277008, 0.0485498383641243, 0.09833535552024841, 0.028619827702641487, 0.060120657086372375, 0.0867634266614914, 0.014734995551407337, 0.02872687578201294, 0.03575126454234123, 0.019295327365398407, 0.23127888143062592], [0.019414151087403297, 0.013430886901915073, 0.034257806837558746, 0.008097900077700615, 0.00271963351406157, 0.0034864265471696854, 0.007646519225090742, 0.004721622448414564, 0.0037860777229070663, 0.0197627954185009, 0.045260265469551086, 0.11442151665687561, 0.17114883661270142, 0.12444033473730087, 0.12609447538852692, 0.008686922490596771, 0.004210256971418858, 0.01645340770483017, 0.02074527181684971, 0.02055932767689228, 0.013460970483720303, 0.031048418954014778, 0.09409793466329575, 0.09204825013875961]]]], \"left_text\": [\"\", \" \", \"CCCCC\", \"[\", \"C\", \"@@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\", \"\", \" \", \"CCCCC\", \"[\", \"C\", \"@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\"], \"right_text\": [\"\", \" \", \"CCCCC\", \"[\", \"C\", \"@@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\", \"\", \" \", \"CCCCC\", \"[\", \"C\", \"@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\"]}}, \"default_filter\": \"all\"}" + "window.params = {\"attention\": {\"all\": {\"attn\": [[[[0.010725097730755806, 0.044450610876083374, 0.031308986246585846, 0.011544237844645977, 0.03997823968529701, 0.028261356055736542, 0.16280296444892883, 0.07003579288721085, 0.03949745371937752, 0.03670930862426758, 0.06174522638320923, 0.07385388761758804, 0.04142548516392708, 0.049353428184986115, 0.024993613362312317, 0.0659124031662941, 0.028954755514860153, 0.06914366781711578, 0.03148248419165611, 0.01896122470498085, 0.02835828624665737, 0.030501505360007286], [0.05164869502186775, 0.03496671840548515, 0.051947273313999176, 0.061695292592048645, 0.028228551149368286, 0.042747415602207184, 0.014519782736897469, 0.07709211856126785, 0.027241094037890434, 0.06255251169204712, 0.04949159547686577, 0.09194865077733994, 0.06052496284246445, 0.04580366238951683, 0.029187645763158798, 0.03795681521296501, 0.02276264689862728, 0.021973375231027603, 0.06338861584663391, 0.022431977093219757, 0.059357281774282455, 0.042533375322818756], [0.022974463179707527, 0.06773396581411362, 0.02379499189555645, 0.02620391920208931, 0.023058289662003517, 0.056391093879938126, 0.010174427181482315, 0.043018076568841934, 0.23801200091838837, 0.0544205866754055, 0.040574539452791214, 0.08613168448209763, 0.09898834675550461, 0.059978365898132324, 0.028902025893330574, 0.008275064639747143, 0.02322394587099552, 0.009376958943903446, 0.011755838990211487, 0.030446434393525124, 0.018973717465996742, 0.017591308802366257], [0.018829679116606712, 0.4520346224308014, 0.19066374003887177, 0.005308852065354586, 0.015889883041381836, 0.017854103818535805, 0.03593301773071289, 0.02460055984556675, 0.04304985702037811, 0.02240193821489811, 0.014844532124698162, 0.018936848267912865, 0.03594487905502319, 0.02778254821896553, 0.00550069147720933, 0.008362362161278725, 0.007012759335339069, 0.02014237828552723, 0.013444249518215656, 0.01165826991200447, 0.006557739805430174, 0.0032464666292071342], [0.009094779379665852, 0.055022045969963074, 0.007260314654558897, 0.04851672053337097, 0.009913027286529541, 0.02687196247279644, 0.0035347489174455404, 0.034838709980249405, 0.31493353843688965, 0.03887704759836197, 0.040871232748031616, 0.1412729173898697, 0.09488477557897568, 0.01861119270324707, 0.04224088042974472, 0.0054791891016066074, 0.011154761537909508, 0.003452860051766038, 0.010535932146012783, 0.04855502396821976, 0.01571005769073963, 0.018368219956755638], [0.015064490959048271, 0.05014730617403984, 0.019928770139813423, 0.02875988371670246, 0.06064516305923462, 0.03157109394669533, 0.0373343862593174, 0.04386947676539421, 0.26020297408103943, 0.033825404942035675, 0.0349414087831974, 0.05066126957535744, 0.06173202395439148, 0.01583869941532612, 0.029598552733659744, 0.04385101795196533, 0.015654923394322395, 0.05599072948098183, 0.01574692316353321, 0.05480306223034859, 0.021092642098665237, 0.018739823251962662], [0.01062595471739769, 0.026634231209754944, 0.008771426975727081, 0.04958968982100487, 0.0820012018084526, 0.02306719683110714, 0.010144928470253944, 0.03139542415738106, 0.2566063702106476, 0.04309887811541557, 0.049342382699251175, 0.1228787750005722, 0.07513722777366638, 0.020883293822407722, 0.045774899423122406, 0.034990329295396805, 0.010113226249814034, 0.011465135961771011, 0.008132644928991795, 0.023213936015963554, 0.026101423427462578, 0.030031396076083183], [0.01637379638850689, 0.08757597208023071, 0.01745939441025257, 0.024957973510026932, 0.029543789103627205, 0.03719503805041313, 0.009811216033995152, 0.0325777642428875, 0.3772699534893036, 0.019757796078920364, 0.03794034942984581, 0.09721541404724121, 0.04169934615492821, 0.02502037025988102, 0.014853449538350105, 0.012793424539268017, 0.01283275056630373, 0.008462268859148026, 0.008280379697680473, 0.055332817137241364, 0.010720946826040745, 0.022325698286294937], [0.009851394221186638, 0.07919318228960037, 0.09411094337701797, 0.02053714729845524, 0.106007881462574, 0.0404462106525898, 0.21440768241882324, 0.07653539627790451, 0.0063881524838507175, 0.034374821931123734, 0.02435418963432312, 0.017421100288629532, 0.04904542118310928, 0.02061351202428341, 0.010921265929937363, 0.032690271735191345, 0.024416519328951836, 0.07301092892885208, 0.03973999246954918, 0.0024788088630884886, 0.01624978706240654, 0.007205365225672722], [0.02766154147684574, 0.0542987659573555, 0.018864348530769348, 0.03180095553398132, 0.03706910088658333, 0.03720522299408913, 0.02997112274169922, 0.03221507370471954, 0.1282157152891159, 0.04114370793104172, 0.056514427065849304, 0.07532170414924622, 0.0484025664627552, 0.02506835199892521, 0.05621118098497391, 0.03790033236145973, 0.03452954441308975, 0.05690404027700424, 0.026700936257839203, 0.04875996708869934, 0.05964239686727524, 0.0355989895761013], [0.004888728726655245, 0.04576728865504265, 0.0432082824409008, 0.006339548621326685, 0.06289941072463989, 0.01351873017847538, 0.20923444628715515, 0.09639312326908112, 0.020444776862859726, 0.012894312851130962, 0.019197702407836914, 0.018027042970061302, 0.02457376942038536, 0.04265251010656357, 0.015389897860586643, 0.05505816265940666, 0.01692850887775421, 0.11626619845628738, 0.1140972152352333, 0.018474183976650238, 0.01960407756268978, 0.024142012000083923], [0.004050685092806816, 0.07205736637115479, 0.06024571508169174, 0.005027564708143473, 0.03249995782971382, 0.012685425579547882, 0.2246880978345871, 0.04708097130060196, 0.024754511192440987, 0.009045117534697056, 0.014304259791970253, 0.008727834559977055, 0.02059200033545494, 0.036426886916160583, 0.014976452104747295, 0.052063073962926865, 0.020411789417266846, 0.13101568818092346, 0.12496240437030792, 0.04665457829833031, 0.01933552324771881, 0.01839418150484562], [0.013188271783292294, 0.053477197885513306, 0.03736694157123566, 0.02293097972869873, 0.03320344537496567, 0.060755655169487, 0.16976846754550934, 0.0699412003159523, 0.024840906262397766, 0.0296306349337101, 0.05091992765665054, 0.04509265348315239, 0.027647461742162704, 0.03157919645309448, 0.009382682852447033, 0.049919649958610535, 0.031023379415273666, 0.12561380863189697, 0.06897394359111786, 0.012773055583238602, 0.018245389685034752, 0.01372510101646185], [0.007471046410501003, 0.10222043097019196, 0.06737156212329865, 0.013374284841120243, 0.03296836465597153, 0.026308199390769005, 0.1969906985759735, 0.06487219035625458, 0.07365942001342773, 0.03094419650733471, 0.055117666721343994, 0.043282244354486465, 0.07768530398607254, 0.035934604704380035, 0.008577783592045307, 0.021904921159148216, 0.011436695232987404, 0.08266708999872208, 0.023226067423820496, 0.012875530868768692, 0.007393890991806984, 0.0037178019993007183], [0.00860872957855463, 0.11692919582128525, 0.04955852031707764, 0.002543873619288206, 0.02180904895067215, 0.017617886886000633, 0.41949549317359924, 0.04458661377429962, 0.011178803630173206, 0.016925139352679253, 0.014163138344883919, 0.009397958405315876, 0.026273595169186592, 0.01763998717069626, 0.0016652062768116593, 0.013822132721543312, 0.014080964960157871, 0.14294539391994476, 0.03647303581237793, 0.005498059093952179, 0.006372989621013403, 0.002414182759821415], [0.0017296497244387865, 0.07202073186635971, 0.023392800241708755, 0.0029604339506477118, 0.014703032560646534, 0.012240873649716377, 0.47455543279647827, 0.04585998132824898, 0.008007730357348919, 0.012442218139767647, 0.02208787389099598, 0.012955429963767529, 0.030625827610492706, 0.012263339944183826, 0.003054679138585925, 0.011845762841403484, 0.01237314473837614, 0.18052281439304352, 0.03495920076966286, 0.004985250066965818, 0.003934734966605902, 0.0024789904709905386], [0.011932234279811382, 0.07646188139915466, 0.06594429910182953, 0.025003356859087944, 0.07163123786449432, 0.022240281105041504, 0.16617561876773834, 0.07963185012340546, 0.026579398661851883, 0.039288412779569626, 0.03356767073273659, 0.02952936291694641, 0.055739905685186386, 0.017436493188142776, 0.012685388326644897, 0.08351175487041473, 0.008818223141133785, 0.10110066831111908, 0.034810297191143036, 0.009808466769754887, 0.017907580360770226, 0.010195505805313587], [0.007673058193176985, 0.11187965422868729, 0.07296419888734818, 0.023838436231017113, 0.026982033625245094, 0.02408788353204727, 0.04266893118619919, 0.048558492213487625, 0.0516047440469265, 0.06085199862718582, 0.06729251146316528, 0.11459873616695404, 0.08905244618654251, 0.053939495235681534, 0.02681978978216648, 0.04261821508407593, 0.0118433041498065, 0.03216124325990677, 0.022703392431139946, 0.02696617692708969, 0.0181278008967638, 0.022767448797822], [0.01470466610044241, 0.07749783992767334, 0.05642857402563095, 0.014315829612314701, 0.03110230527818203, 0.02854265831410885, 0.2327050417661667, 0.10245781391859055, 0.01611466519534588, 0.022405575960874557, 0.04093881696462631, 0.05449497327208519, 0.05185632407665253, 0.04114251956343651, 0.0065468656830489635, 0.02773624286055565, 0.01599421165883541, 0.08287783712148666, 0.03856218606233597, 0.014447640627622604, 0.011855133809149265, 0.01727226749062538], [0.01609407179057598, 0.15453289449214935, 0.3498595356941223, 0.014480067417025566, 0.05223733186721802, 0.02531849965453148, 0.04153064265847206, 0.034125927835702896, 0.003549781162291765, 0.0287138931453228, 0.015420458279550076, 0.012653176672756672, 0.04249155893921852, 0.047333747148513794, 0.012675223872065544, 0.03651881590485573, 0.01865164004266262, 0.023686150088906288, 0.03669244050979614, 0.005526492837816477, 0.014328590594232082, 0.013579132966697216], [0.02994912676513195, 0.08739110082387924, 0.07218900322914124, 0.015495916828513145, 0.03694024309515953, 0.03137354925274849, 0.045377809554338455, 0.07090887427330017, 0.04339449480175972, 0.07470721751451492, 0.02830488234758377, 0.044791486114263535, 0.04456380382180214, 0.04740777239203453, 0.016603730618953705, 0.03309858962893486, 0.015877587720751762, 0.050036050379276276, 0.08186298608779907, 0.04211025685071945, 0.05681407451629639, 0.03080139495432377], [0.0033501910511404276, 0.03820521757006645, 0.08381029963493347, 0.0020786805544048548, 0.01813349686563015, 0.009041826240718365, 0.2287285029888153, 0.05329595506191254, 0.0039456868544220924, 0.006964010186493397, 0.009830381721258163, 0.00511246407404542, 0.009729834273457527, 0.0440862700343132, 0.006943500135093927, 0.03588137775659561, 0.014005162753164768, 0.17224082350730896, 0.19504599273204803, 0.02738638035953045, 0.015787767246365547, 0.01639614999294281]], [[0.01116255670785904, 0.14695540070533752, 0.049156948924064636, 0.021459022536873817, 0.06777159124612808, 0.32045668363571167, 0.04675313085317612, 0.10742155462503433, 0.025597726926207542, 0.026574982330203056, 0.01757034659385681, 0.05716675892472267, 0.013473332859575748, 0.020717140287160873, 0.001389852026477456, 0.025196874514222145, 0.013103287667036057, 0.0029268821235746145, 0.008023425005376339, 0.004223880358040333, 0.005511156748980284, 0.007387529592961073], [0.08731070160865784, 0.07338704913854599, 0.029602766036987305, 0.06458131968975067, 0.13923496007919312, 0.04492747038602829, 0.03147488459944725, 0.1653331071138382, 0.012368805706501007, 0.013416948728263378, 0.03432378172874451, 0.027672776952385902, 0.01511906273663044, 0.005021458957344294, 0.034692537039518356, 0.09174502640962601, 0.0033465533051639795, 0.021222813054919243, 0.029617229476571083, 0.007548446301370859, 0.012495971284806728, 0.055556412786245346], [0.014389035291969776, 0.16890032589435577, 0.0675182044506073, 0.04181249067187309, 0.05371396988630295, 0.2429913878440857, 0.0912296324968338, 0.052301403135061264, 0.05068321153521538, 0.014335798099637032, 0.016292890533804893, 0.03682660311460495, 0.017945682629942894, 0.008482021279633045, 0.007309828884899616, 0.04326007142663002, 0.018692966550588608, 0.0111699178814888, 0.008931117132306099, 0.013478398323059082, 0.008601841516792774, 0.011133097112178802], [0.0026377190370112658, 0.10373885929584503, 0.8297300934791565, 0.002946455031633377, 0.0039009852334856987, 0.011458647437393665, 0.000573576136957854, 0.010267645120620728, 0.002790190512314439, 0.004860774613916874, 0.001416934304870665, 0.00837091077119112, 0.0022464401554316282, 0.0018320124363526702, 0.0004582357360050082, 0.002157562645152211, 0.00422273576259613, 0.0007882479694671929, 0.0003783543361350894, 0.0012250604340806603, 0.00272859213873744, 0.0012699858052656054], [0.026116693392395973, 0.24480591714382172, 0.3489236831665039, 0.03155920282006264, 0.06449368596076965, 0.018956594169139862, 0.0036175402346998453, 0.030698776245117188, 0.022364329546689987, 0.09742812812328339, 0.03942934423685074, 0.031678229570388794, 0.003490644507110119, 0.005473351571708918, 0.0035760255996137857, 0.002368506044149399, 0.0022072147112339735, 0.000539734901394695, 0.0004867489042226225, 0.0024628250394016504, 0.006465704180300236, 0.01285721454769373], [0.010319016873836517, 0.00498763145878911, 0.8220669627189636, 0.009186772629618645, 0.0026781477499753237, 0.011004867032170296, 0.0005290909321047366, 0.004333742428570986, 0.002796711167320609, 0.10934711992740631, 0.009735649451613426, 0.0024653093423694372, 0.001468307338654995, 0.0024779224768280983, 0.0035662297159433365, 0.0001765223132679239, 0.0012314615305513144, 0.00022485925001092255, 9.783787390915677e-05, 0.0004403907514642924, 0.0006245917174965143, 0.00024095227126963437], [0.012176057323813438, 0.2971210181713104, 0.01831839606165886, 0.06939271092414856, 0.3526405394077301, 0.011478910222649574, 0.009982173331081867, 0.02292967215180397, 0.12419607490301132, 0.01035318709909916, 0.011335906572639942, 0.008812930434942245, 0.006061275489628315, 0.002666809828951955, 0.0029634609818458557, 0.010258774273097515, 0.0008749116095714271, 0.001052527572028339, 0.004625425208359957, 0.004482197109609842, 0.0014504559803754091, 0.016826646402478218], [0.0013629556633532047, 0.04135720059275627, 0.003457761835306883, 0.016249118372797966, 0.7913835644721985, 0.01629607006907463, 0.012622271664440632, 0.043689288198947906, 0.014360365457832813, 0.004149383399635553, 0.013426282443106174, 0.024684783071279526, 0.0013775239931419492, 0.0015309631126001477, 0.0010584272677078843, 0.004857291933149099, 0.0015408832114189863, 0.0011609604116529226, 0.0006860423600301147, 0.0016134824836626649, 0.0008977308752946556, 0.0022376368287950754], [0.005680794361978769, 0.025128737092018127, 0.00395358307287097, 0.029007982462644577, 0.05051514133810997, 0.2003343552350998, 0.4323864281177521, 0.04999730736017227, 0.04851000756025314, 0.013499942608177662, 0.047011569142341614, 0.03209390118718147, 0.0039043822325766087, 0.01350314263254404, 0.002613969147205353, 0.007974332198500633, 0.001126125454902649, 0.011470959521830082, 0.00534758111461997, 0.004600263200700283, 0.0006906448397785425, 0.01064883079379797], [0.0005798712372779846, 0.00412228237837553, 3.673116225400008e-05, 0.007613219786435366, 0.01579832099378109, 0.0062704444862902164, 0.8830613493919373, 0.011897686868906021, 0.00826891977339983, 0.013014494441449642, 0.007353407330811024, 0.013982969336211681, 0.0077491262927651405, 0.003025457262992859, 0.0049269432201981544, 0.004963109735399485, 0.0016824838239699602, 0.0014665371272712946, 0.002776842564344406, 0.00013986529665999115, 0.0007151043391786516, 0.0005548816989175975], [0.006812549661844969, 0.0068672155030071735, 0.003651347942650318, 0.005952429957687855, 0.011258875951170921, 0.17493699491024017, 0.019191179424524307, 0.46238407492637634, 0.021742625162005424, 0.11564698815345764, 0.04647209867835045, 0.06544255465269089, 0.009718524292111397, 0.02062537521123886, 0.0012984579661861062, 0.0031514205038547516, 0.011118395254015923, 0.006588733289390802, 0.0018332510953769088, 0.0010271532228216529, 0.0011519982945173979, 0.003127763979136944], [0.0011946918675675988, 0.0010449385736137629, 8.911087206797674e-05, 0.0012740249512717128, 0.022797027602791786, 0.0035961084067821503, 0.01281479187309742, 0.007075126748532057, 0.6431381106376648, 0.02397930435836315, 0.11653441935777664, 0.10541274398565292, 0.03143429383635521, 0.005960672628134489, 0.0017006580019369721, 0.006981890182942152, 0.0019482108764350414, 0.005351261235773563, 0.002158127957955003, 0.0020442032255232334, 0.0007596552604809403, 0.0027105892077088356], [0.004881392233073711, 0.0014974985970184207, 0.0009783018613234162, 0.002700685989111662, 0.0032693466637283564, 0.0035371938720345497, 0.003128951182588935, 0.010886968113481998, 0.0011543873697519302, 0.9017311334609985, 0.018125519156455994, 0.03110104613006115, 0.0030926289036870003, 0.0030041553545743227, 0.0038263730239123106, 0.00263373670168221, 0.001306927646510303, 0.000949203735217452, 0.0008177023846656084, 8.754341251915321e-05, 0.00104931287933141, 0.0002398150973021984], [0.00046056555584073067, 0.001276991912163794, 0.0004346126224845648, 0.002810514299198985, 0.04388699680566788, 0.0010095187462866306, 0.02057035267353058, 0.002196827670559287, 0.029554149135947227, 0.0131527129560709, 0.8041626811027527, 0.024149630218744278, 0.034604620188474655, 0.0013459081528708339, 0.0023584889713674784, 0.0016351599479094148, 0.0014787174295634031, 0.008099180646240711, 0.0021702898666262627, 0.0025839414447546005, 0.001519609009847045, 0.0005384513060562313], [0.00040720406104810536, 0.0063338312320411205, 0.00765209412202239, 0.0011535895755514503, 0.0025958081241697073, 0.003938556648790836, 0.003363602561876178, 0.0032325093634426594, 0.0037953201681375504, 0.005606205202639103, 0.005752358119934797, 0.8328233361244202, 0.02649868279695511, 0.04756776988506317, 0.0032598015386611223, 0.005290332715958357, 0.029889937490224838, 0.005070098210126162, 0.0011678831651806831, 0.0012711809249594808, 0.001766282832249999, 0.0015636233147233725], [0.00051274080760777, 0.0004576780484057963, 8.849997539073229e-05, 0.0007572381873615086, 0.002668333239853382, 0.0002701786579564214, 0.00105538300704211, 0.00028180141816847026, 0.01790034770965576, 0.004365772474557161, 0.050683192908763885, 0.06639394909143448, 0.7987503409385681, 0.0023033402394503355, 0.01782173477113247, 0.012880749069154263, 0.006849197670817375, 0.006156720221042633, 0.0017843744717538357, 0.0033849303144961596, 0.002281742636114359, 0.0023517829831689596], [0.004735897295176983, 0.001584414392709732, 0.001356323016807437, 0.003391423961147666, 0.004553558304905891, 0.00814280565828085, 0.014905475080013275, 0.002743762219324708, 0.00782869104295969, 0.0872642770409584, 0.023136213421821594, 0.01654127612709999, 0.0381944477558136, 0.7124295234680176, 0.03279414027929306, 0.006533768493682146, 0.012287233956158161, 0.0147285470739007, 0.001446243142709136, 0.0025831859093159437, 0.0008992765215225518, 0.0019194923806935549], [0.006391146220266819, 0.0023495787754654884, 0.0008278288878500462, 0.002872674260288477, 0.007809468079358339, 6.437332922359928e-05, 0.0010217288509011269, 0.0005413549952208996, 0.022253088653087616, 0.0023109125904738903, 0.00649446714669466, 0.008894854225218296, 0.03331983834505081, 0.006810321006923914, 0.8037846684455872, 0.04380650818347931, 0.004525093361735344, 0.01104204636067152, 0.0030312174931168556, 0.012682262808084488, 0.004273686558008194, 0.014892923645675182], [0.004295565653592348, 0.016245614737272263, 0.003083127085119486, 0.0035857302136719227, 0.008161468431353569, 0.007503497414290905, 0.004238214809447527, 0.026294752955436707, 0.004426210653036833, 0.030002914369106293, 0.007244081702083349, 0.026163524016737938, 0.01844431832432747, 0.027106381952762604, 0.007014281582087278, 0.6126822829246521, 0.013481037691235542, 0.13459870219230652, 0.010635744780302048, 0.007783413864672184, 0.02091904543340206, 0.0060901218093931675], [0.0012116836151108146, 0.0013815233251079917, 0.00027696313918568194, 0.0046654148027300835, 0.003912737593054771, 0.0005728991818614304, 0.024816088378429413, 0.0005413529579527676, 0.06414620578289032, 0.011505297385156155, 0.005856450647115707, 0.007674662861973047, 0.028167741373181343, 0.013746283017098904, 0.06654629111289978, 0.02261945977807045, 0.38674792647361755, 0.168565034866333, 0.06694173067808151, 0.10773593187332153, 0.0081607885658741, 0.004207654390484095], [0.0002191028033848852, 0.000687718391418457, 1.9119626813335344e-05, 0.0002526450844015926, 0.0016817155992612243, 0.0005986873293295503, 0.004379762336611748, 0.0006691893795505166, 0.0005488485330715775, 0.0010638388339430094, 0.0011080080876126885, 0.0034043523482978344, 0.009558101184666157, 0.0021896150428801775, 0.002493089297786355, 0.015879923477768898, 0.007215125020593405, 0.926349937915802, 0.009953436441719532, 0.006543837487697601, 0.0038885152898728848, 0.001295640366151929], [0.00034856339334510267, 0.00017268324154429138, 3.840460703941062e-05, 0.00043451000237837434, 0.00043112185085192323, 0.0016019028844311833, 0.00200991565361619, 0.0018701424123719335, 0.0020794346928596497, 0.007605171762406826, 0.010172230191528797, 0.004796032328158617, 0.010847196914255619, 0.005440668202936649, 0.0009605030645616353, 0.0038668811321258545, 0.026379674673080444, 0.041560135781764984, 0.8440771698951721, 0.0071649556048214436, 0.01718325912952423, 0.01095941849052906]], [[0.013923639431595802, 0.23299147188663483, 0.02453227899968624, 0.022396354004740715, 0.03272945061326027, 0.07408526539802551, 0.08649103343486786, 0.02762576751410961, 0.07209296524524689, 0.06861258298158646, 0.019408684223890305, 0.02658635564148426, 0.03697457164525986, 0.04856693744659424, 0.024181049317121506, 0.01552753895521164, 0.039591915905475616, 0.027013128623366356, 0.016085727140307426, 0.029432931914925575, 0.03839113935828209, 0.022759323939681053], [0.00029728299705311656, 0.33009058237075806, 0.01430921908468008, 0.01781180128455162, 0.004076649434864521, 0.5570430159568787, 0.017908308655023575, 0.0024822489358484745, 0.000883369822986424, 0.0006385919987224042, 0.0036411252804100513, 0.0022895054426044226, 0.00711445976048708, 0.01498893741518259, 0.0003330715117044747, 0.0008021139656193554, 0.007900252006947994, 0.014343355782330036, 0.0009308425942435861, 0.0003837816184386611, 0.0004935091710649431, 0.0012380362022668123], [0.007060659117996693, 0.3639615774154663, 0.021323291584849358, 0.012213470414280891, 0.029673002660274506, 0.025795772671699524, 0.15172472596168518, 0.04546496644616127, 0.05477266013622284, 0.015553313307464123, 0.011475654318928719, 0.04261820390820503, 0.03620321303606033, 0.018697192892432213, 0.006417142227292061, 0.022421732544898987, 0.010381451807916164, 0.07281085103750229, 0.007872399874031544, 0.017766064032912254, 0.012705294415354729, 0.013087327592074871], [0.0016615665517747402, 0.7259650230407715, 0.016964925453066826, 0.02034761756658554, 0.02160748653113842, 0.07265781611204147, 0.050180837512016296, 0.00619790842756629, 0.007498157676309347, 0.01095385942608118, 0.0039032241329550743, 0.006062038242816925, 0.016578223556280136, 0.004130688030272722, 0.003262692131102085, 0.0021676896139979362, 0.005203586537390947, 0.012403124943375587, 0.0018377373926341534, 0.0014776044990867376, 0.006887445226311684, 0.0020507657900452614], [0.000977415475063026, 0.7485002279281616, 0.01139331515878439, 0.03344201296567917, 0.023571163415908813, 0.043589308857917786, 0.037728648632764816, 0.013993937522172928, 0.011725923046469688, 0.017590006813406944, 0.0017914535710588098, 0.004155951552093029, 0.007295477204024792, 0.006461329758167267, 0.00044543988769873977, 0.0020674439147114754, 0.0032661170698702335, 0.019879106432199478, 0.001643767929635942, 0.0015395591035485268, 0.008279107511043549, 0.0006633187877014279], [0.0011743487557396293, 0.5509132742881775, 0.016800129786133766, 0.08566515892744064, 0.04247977212071419, 0.049610551446676254, 0.07969245314598083, 0.061733730137348175, 0.01712576113641262, 0.006378935184329748, 0.0062236846424639225, 0.009799170307815075, 0.009899972938001156, 0.027513867244124413, 0.0002241612965008244, 0.0017786809476092458, 0.006234285421669483, 0.020771745592355728, 0.0012848793994635344, 0.0009806157322600484, 0.001922182971611619, 0.001792593626305461], [0.0015468065394088626, 0.5932926535606384, 0.01686687208712101, 0.015805162489414215, 0.01183061208575964, 0.18623174726963043, 0.02236630953848362, 0.01915895752608776, 0.030262265354394913, 0.018664877861738205, 0.004059377126395702, 0.00868175644427538, 0.026268325746059418, 0.012188993394374847, 0.0016945069655776024, 0.0031332781072705984, 0.007125940639525652, 0.008395669981837273, 0.0019282599678263068, 0.0037288879975676537, 0.004919004626572132, 0.0018496649572625756], [0.005187311675399542, 0.6534914970397949, 0.03152652084827423, 0.01133844256401062, 0.010784137062728405, 0.022258685901761055, 0.10471067577600479, 0.01659877970814705, 0.022553090006113052, 0.006258267909288406, 0.00804190058261156, 0.015684261918067932, 0.026327596977353096, 0.01183800958096981, 0.010118167847394943, 0.0098258126527071, 0.001969647593796253, 0.013633488677442074, 0.003008143976330757, 0.006123185623437166, 0.003118697786703706, 0.005603555124253035], [0.002268025418743491, 0.48455101251602173, 0.011093873530626297, 0.01435806229710579, 0.04388667643070221, 0.11800672858953476, 0.0902198776602745, 0.02603810466825962, 0.021535953506827354, 0.013882790692150593, 0.01760442741215229, 0.02040073275566101, 0.016091179102659225, 0.025220924988389015, 0.0071047586388885975, 0.010181478224694729, 0.010927059687674046, 0.04501467943191528, 0.004742252640426159, 0.008460001088678837, 0.003196348901838064, 0.005215085577219725], [0.0009487414499744773, 0.6567457914352417, 0.008219863288104534, 0.00942500401288271, 0.020306682214140892, 0.018508654087781906, 0.07565513998270035, 0.06815782934427261, 0.02167046070098877, 0.004998121410608292, 0.007657167501747608, 0.021278031170368195, 0.009041784331202507, 0.013671726919710636, 0.00030099612195044756, 0.006320230662822723, 0.0012193412985652685, 0.049182627350091934, 0.002736451104283333, 0.001059823203831911, 0.0011406952980905771, 0.0017547542229294777], [0.002305437810719013, 0.26463812589645386, 0.0041520800441503525, 0.007424148730933666, 0.01841220259666443, 0.47070685029029846, 0.029716409742832184, 0.04108604043722153, 0.03801180422306061, 0.01746380515396595, 0.016844024881720543, 0.026362493634223938, 0.012680980376899242, 0.00952695682644844, 0.0010973131284117699, 0.003235691459849477, 0.015943355858325958, 0.009026256389915943, 0.003879058174788952, 0.001507466658949852, 0.0019021857297047973, 0.004077378660440445], [0.0030290274880826473, 0.3803667426109314, 0.007266612257808447, 0.030968116596341133, 0.06486566364765167, 0.10898351669311523, 0.06834092736244202, 0.08483054488897324, 0.05868074670433998, 0.043525416404008865, 0.040652137249708176, 0.02758782170712948, 0.013315673917531967, 0.021378496661782265, 0.003241160651668906, 0.002654074924066663, 0.005513565614819527, 0.021309219300746918, 0.0055077713914215565, 0.002505829092115164, 0.002039380371570587, 0.003437451086938381], [0.00437307870015502, 0.3513452410697937, 0.012114935554564, 0.03268679976463318, 0.041678767651319504, 0.010305166244506836, 0.07502154260873795, 0.06741967797279358, 0.02908286079764366, 0.022105565294623375, 0.051853056997060776, 0.05025869980454445, 0.05959396809339523, 0.08469244092702866, 0.008486796170473099, 0.010814689099788666, 0.006042202468961477, 0.04234957695007324, 0.017516810446977615, 0.008542018011212349, 0.004775497131049633, 0.008940580300986767], [0.007747923489660025, 0.24862708151340485, 0.024891521781682968, 0.02679303102195263, 0.05266522988677025, 0.04099895432591438, 0.08355307579040527, 0.06149030476808548, 0.04146280139684677, 0.09624288976192474, 0.028893573209643364, 0.04623180627822876, 0.04918086156249046, 0.032743439078330994, 0.0073463767766952515, 0.014942911453545094, 0.012190092355012894, 0.04962581396102905, 0.010020636953413486, 0.012303678318858147, 0.04363405331969261, 0.008414061740040779], [0.011794344522058964, 0.16593994200229645, 0.007681188639253378, 0.02239007130265236, 0.0565912164747715, 0.06017925217747688, 0.13824959099292755, 0.08380406349897385, 0.059148237109184265, 0.031070059165358543, 0.029534421861171722, 0.07391919195652008, 0.07223758101463318, 0.01847890019416809, 0.013275615870952606, 0.022884242236614227, 0.010169586166739464, 0.05636238306760788, 0.017237067222595215, 0.014925232157111168, 0.014888422563672066, 0.019239334389567375], [0.02427951619029045, 0.017370939254760742, 0.006637170445173979, 0.005498973187059164, 0.040236663073301315, 0.010798057541251183, 0.05667589604854584, 0.07709871977567673, 0.13294903934001923, 0.08050905168056488, 0.02364744246006012, 0.04306286945939064, 0.025552287697792053, 0.03202931582927704, 0.04290685057640076, 0.04245941713452339, 0.02289239689707756, 0.08134102821350098, 0.05693833529949188, 0.10455963015556335, 0.0445636585354805, 0.027992649003863335], [0.006842182949185371, 0.31814897060394287, 0.010848202742636204, 0.049889519810676575, 0.03864341601729393, 0.017349958419799805, 0.09325146675109863, 0.09606991708278656, 0.05957484990358353, 0.015639901161193848, 0.03884454816579819, 0.045422911643981934, 0.03457815572619438, 0.026058919727802277, 0.03509215638041496, 0.015501405112445354, 0.021110977977514267, 0.03068825602531433, 0.01956140622496605, 0.009911718778312206, 0.0060499380342662334, 0.010921180248260498], [0.015100013464689255, 0.03888079524040222, 0.01634230464696884, 0.007630934473127127, 0.018440749496221542, 0.004220679402351379, 0.053610607981681824, 0.03992219269275665, 0.15522490441799164, 0.07848816365003586, 0.022436311468482018, 0.06010838970541954, 0.06393028795719147, 0.05238800868391991, 0.0360272042453289, 0.07803697884082794, 0.013676609843969345, 0.037482328712940216, 0.018724657595157623, 0.13835272192955017, 0.03597911819815636, 0.0149959372356534], [0.032698340713977814, 0.02621285617351532, 0.0328311026096344, 0.019204093143343925, 0.02268883027136326, 0.018516888841986656, 0.06716569513082504, 0.02025652304291725, 0.030263900756835938, 0.017443932592868805, 0.06812495738267899, 0.06379532814025879, 0.03142308071255684, 0.06287936121225357, 0.09324978291988373, 0.06349725276231766, 0.050687093287706375, 0.09361761063337326, 0.03159037604928017, 0.06841100007295609, 0.03125353157520294, 0.05418846011161804], [0.006520026363432407, 0.018282677978277206, 0.007365503814071417, 0.007088277488946915, 0.020822791382670403, 0.051250457763671875, 0.035371921956539154, 0.014591190032660961, 0.02315528132021427, 0.032946933060884476, 0.03502660617232323, 0.019164914265275, 0.018522964790463448, 0.06419973075389862, 0.026091938838362694, 0.038533225655555725, 0.06869057565927505, 0.28495079278945923, 0.036177538335323334, 0.11683070659637451, 0.04909157007932663, 0.025324439629912376], [0.010150518268346786, 0.06151583790779114, 0.01643427275121212, 0.0216384194791317, 0.018305091187357903, 0.013892755843698978, 0.13249054551124573, 0.0519568994641304, 0.033760540187358856, 0.009810411371290684, 0.05293898284435272, 0.05469326674938202, 0.042820531874895096, 0.03199596330523491, 0.01622900739312172, 0.0507962629199028, 0.018464047461748123, 0.20884904265403748, 0.06361758708953857, 0.03928030654788017, 0.013922990299761295, 0.03643682599067688], [0.013324225321412086, 0.1960597038269043, 0.011220538057386875, 0.03387570008635521, 0.038841363042593, 0.018715910613536835, 0.0929412841796875, 0.08504172414541245, 0.0733971819281578, 0.03220164403319359, 0.0406428724527359, 0.06658219546079636, 0.040015459060668945, 0.04753270372748375, 0.012220063246786594, 0.02351788803935051, 0.018006794154644012, 0.055923473089933395, 0.024649186059832573, 0.02647610753774643, 0.026198269799351692, 0.022615674883127213]], [[0.044319599866867065, 0.09851907938718796, 0.018516631796956062, 0.06793386489152908, 0.11951562762260437, 0.042199794203042984, 0.03331516683101654, 0.029918072745203972, 0.01859157159924507, 0.08978284150362015, 0.1175948977470398, 0.06909756362438202, 0.06360214948654175, 0.02412101998925209, 0.01756753958761692, 0.02492891065776348, 0.027895215898752213, 0.010131679475307465, 0.012820744886994362, 0.015599898993968964, 0.029919544234871864, 0.024108534678816795], [0.029496872797608376, 0.026011567562818527, 0.044550687074661255, 0.03481239825487137, 0.049388621002435684, 0.031432975083589554, 0.033834464848041534, 0.11201661825180054, 0.03202054649591446, 0.04295166954398155, 0.09489782899618149, 0.0637747272849083, 0.07849516719579697, 0.03828766196966171, 0.029889550060033798, 0.03467376157641411, 0.026943296194076538, 0.01965983770787716, 0.049469780176877975, 0.016179034486413002, 0.05419604107737541, 0.05701689422130585], [0.0005963122239336371, 0.4809424579143524, 0.00016687385505065322, 0.005284683778882027, 0.44703975319862366, 0.0006458673160523176, 0.024504922330379486, 0.005054238252341747, 0.0011325166560709476, 0.014458255842328072, 0.0030409067403525114, 0.0008472286863252521, 0.0031349333003163338, 0.0001346414937870577, 0.0001105954943341203, 0.0026132629718631506, 0.0007314787362702191, 0.0013587045250460505, 0.001303937053307891, 0.00032143545104190707, 0.006376366596668959, 0.00020070193568244576], [0.00840800628066063, 0.13538989424705505, 0.02818630263209343, 0.012071614153683186, 0.01778356358408928, 0.02480779029428959, 0.041753657162189484, 0.07629936933517456, 0.00710098072886467, 0.06624968349933624, 0.030641524121165276, 0.018317412585020065, 0.15920603275299072, 0.07395519316196442, 0.027168285101652145, 0.02880672551691532, 0.04403999447822571, 0.04423493519425392, 0.054024383425712585, 0.01490383967757225, 0.0634661614894867, 0.023184729740023613], [0.004068636801093817, 0.03729153424501419, 0.008356429636478424, 0.01665922813117504, 0.03988762944936752, 0.00753835029900074, 0.3933577835559845, 0.06413723528385162, 0.013553992845118046, 0.039507485926151276, 0.1209908276796341, 0.05083337798714638, 0.05957714468240738, 0.037269625812768936, 0.0008157825213856995, 0.016576673835515976, 0.004194541834294796, 0.011023813858628273, 0.020406050607562065, 0.0036588688381016254, 0.02975665219128132, 0.020538330078125], [0.019637180492281914, 0.0845654085278511, 0.05448896437883377, 0.035745762288570404, 0.11288218200206757, 0.019999878481030464, 0.07529330253601074, 0.04067825898528099, 0.04102021083235741, 0.04458967596292496, 0.04633312299847603, 0.03924546390771866, 0.09046577662229538, 0.053328439593315125, 0.015931835398077965, 0.031429994851350784, 0.01859154738485813, 0.03019961155951023, 0.046032052487134933, 0.020841585472226143, 0.054843027144670486, 0.023856736719608307], [0.019150545820593834, 0.05682281777262688, 0.012959271669387817, 0.05906156077980995, 0.10181394964456558, 0.02968735620379448, 0.09310321509838104, 0.08774121105670929, 0.05066661909222603, 0.0735894963145256, 0.06396139413118362, 0.04581458121538162, 0.0574333593249321, 0.05054081603884697, 0.013460012152791023, 0.03411901742219925, 0.013805976137518883, 0.04029630869626999, 0.01912641152739525, 0.018658410757780075, 0.040873389691114426, 0.017314165830612183], [0.042314544320106506, 0.009647833183407784, 0.18769855797290802, 0.024929171428084373, 0.008116287179291248, 0.061791613698005676, 0.010277893394231796, 0.02350512333214283, 0.033651579171419144, 0.022364865988492966, 0.029501426964998245, 0.030708232894539833, 0.05997876077890396, 0.11094105243682861, 0.050956841558218, 0.020612549036741257, 0.059608668088912964, 0.015687160193920135, 0.04776452109217644, 0.04450017586350441, 0.04773655906319618, 0.057706646621227264], [0.024826474487781525, 0.01032626535743475, 0.011195574887096882, 0.00256452988833189, 0.012523039244115353, 0.05154917761683464, 0.032279860228300095, 0.26570478081703186, 0.01670851930975914, 0.025361193343997, 0.08026949316263199, 0.01550050638616085, 0.046776022762060165, 0.02299521490931511, 0.008671375922858715, 0.02815253660082817, 0.07307900488376617, 0.10547397285699844, 0.053416658192873, 0.039637915790081024, 0.02974667027592659, 0.04324124753475189], [0.03339916467666626, 0.04824478551745415, 0.019646864384412766, 0.008812060579657555, 0.02728124149143696, 0.04288726672530174, 0.03746426850557327, 0.14608478546142578, 0.027280883863568306, 0.019453015178442, 0.06021349877119064, 0.04426594451069832, 0.08321661502122879, 0.043033622205257416, 0.01665392518043518, 0.04503832384943962, 0.037714939564466476, 0.06541802734136581, 0.10362450778484344, 0.019801033660769463, 0.029834674671292305, 0.0406305268406868], [0.02333650551736355, 0.08147714287042618, 0.01113126240670681, 0.015328431501984596, 0.09140722453594208, 0.023526523262262344, 0.026922641322016716, 0.15745161473751068, 0.017216693609952927, 0.10010908544063568, 0.1360519528388977, 0.06984372437000275, 0.10470809787511826, 0.029947586357593536, 0.005717556923627853, 0.01454149093478918, 0.01668575033545494, 0.015645842999219894, 0.015140276402235031, 0.007290016859769821, 0.02191806212067604, 0.014602556824684143], [0.014481477439403534, 0.09821345657110214, 0.010621516965329647, 0.021375631913542747, 0.09784576296806335, 0.020271535962820053, 0.03329864889383316, 0.10022217035293579, 0.027100108563899994, 0.1072956994175911, 0.131325826048851, 0.051358047872781754, 0.13362175226211548, 0.028315618634223938, 0.008668272756040096, 0.013459008187055588, 0.02010715939104557, 0.01200148556381464, 0.016685573384165764, 0.013811308890581131, 0.024153737351298332, 0.01576615683734417], [0.016942890360951424, 0.051081378012895584, 0.021766630932688713, 0.017219679430127144, 0.03974820300936699, 0.030160807073116302, 0.04062028229236603, 0.13116878271102905, 0.022407300770282745, 0.06075626611709595, 0.06365270167589188, 0.08047259598970413, 0.07087381184101105, 0.03498416766524315, 0.019612004980444908, 0.03452526777982712, 0.017199423164129257, 0.05624285340309143, 0.05324085056781769, 0.016445910558104515, 0.08681459724903107, 0.034063633531332016], [0.015287905931472778, 0.17504894733428955, 0.010512475855648518, 0.020714567974209785, 0.03888639807701111, 0.0206308476626873, 0.03718007355928421, 0.09944794327020645, 0.029474448412656784, 0.04628289118409157, 0.04989926144480705, 0.027878113090991974, 0.0680544450879097, 0.02384091727435589, 0.050635844469070435, 0.05279335007071495, 0.03826276957988739, 0.03789215162396431, 0.0793100893497467, 0.02739321067929268, 0.029257865622639656, 0.02131548523902893], [0.014416727237403393, 0.10398036241531372, 0.031949277967214584, 0.008823259733617306, 0.010758891701698303, 0.025015367195010185, 0.03165984898805618, 0.11292218416929245, 0.015871897339820862, 0.07380128651857376, 0.027987129986286163, 0.03301011770963669, 0.06326467543840408, 0.0838833898305893, 0.0325130932033062, 0.03290111944079399, 0.03715701773762703, 0.0754493772983551, 0.07195986807346344, 0.028635511174798012, 0.051298968493938446, 0.03274056315422058], [0.011048665270209312, 0.13845422863960266, 0.011300439015030861, 0.01841699704527855, 0.027566149830818176, 0.0201104748994112, 0.06258334219455719, 0.10351689159870148, 0.01755026914179325, 0.10368962585926056, 0.04148653894662857, 0.023940106853842735, 0.07693363726139069, 0.030906228348612785, 0.029674889519810677, 0.045050352811813354, 0.03031640499830246, 0.05894951522350311, 0.061847664415836334, 0.01378115639090538, 0.053985532373189926, 0.018890956416726112], [0.015181468799710274, 0.11433294415473938, 0.03920880705118179, 0.016379453241825104, 0.06438272446393967, 0.023427193984389305, 0.04967670887708664, 0.04079630225896835, 0.03031829558312893, 0.07924448698759079, 0.06776516884565353, 0.07241404056549072, 0.059849224984645844, 0.059099189937114716, 0.011694848537445068, 0.04016326740384102, 0.014329209923744202, 0.03327061980962753, 0.04209033027291298, 0.03091389499604702, 0.06382547318935394, 0.03163645789027214], [0.022801300510764122, 0.07944119721651077, 0.03780463710427284, 0.050424497574567795, 0.02013830840587616, 0.02237224392592907, 0.025266144424676895, 0.03191712871193886, 0.04128464683890343, 0.07986476272344589, 0.06451281160116196, 0.05530686676502228, 0.1114579290151596, 0.07764972746372223, 0.06491940468549728, 0.03155754506587982, 0.028252195566892624, 0.018674619495868683, 0.038814663887023926, 0.031374868005514145, 0.03943679854273796, 0.026727745309472084], [0.05373051390051842, 0.028384951874613762, 0.05398143455386162, 0.0255647674202919, 0.011727879755198956, 0.09006695449352264, 0.009470128454267979, 0.05039949342608452, 0.0307560246437788, 0.04408525675535202, 0.04026908800005913, 0.06274783611297607, 0.038656070828437805, 0.051993478089571, 0.0657457783818245, 0.03170882910490036, 0.050475068390369415, 0.031100617721676826, 0.05781302973628044, 0.07039579749107361, 0.04652142897248268, 0.05440564453601837], [0.012892568483948708, 0.11768434941768646, 0.011868758127093315, 0.009641855023801327, 0.04068073257803917, 0.014325001277029514, 0.05711853876709938, 0.07339772582054138, 0.02004111185669899, 0.06863755732774734, 0.0583646185696125, 0.0335916206240654, 0.06970848888158798, 0.01233680173754692, 0.0223773792386055, 0.051581695675849915, 0.02314448356628418, 0.08085653185844421, 0.10680101066827774, 0.03639944642782211, 0.04570353031158447, 0.032846175134181976], [0.026612520217895508, 0.06736916303634644, 0.024226713925600052, 0.006236144341528416, 0.01810421608388424, 0.08425506204366684, 0.030971361324191093, 0.11223010718822479, 0.015122748911380768, 0.039660681039094925, 0.042035192251205444, 0.037604205310344696, 0.07356179505586624, 0.0712791308760643, 0.01647806540131569, 0.046229712665081024, 0.0515267439186573, 0.07280274480581284, 0.059123098850250244, 0.03221956267952919, 0.03252328559756279, 0.03982776030898094], [0.01746155135333538, 0.13648980855941772, 0.005892861168831587, 0.024300891906023026, 0.10716167092323303, 0.02409122698009014, 0.03199455514550209, 0.05790916457772255, 0.01721438392996788, 0.09639959037303925, 0.1727735847234726, 0.05267021059989929, 0.11112122982740402, 0.021540384739637375, 0.014476194977760315, 0.015477415174245834, 0.019502734765410423, 0.014985579997301102, 0.008406948298215866, 0.012291857041418552, 0.02367512881755829, 0.014163067564368248]], [[0.05713723599910736, 0.03743421286344528, 0.0790083110332489, 0.04927892982959747, 0.03669419139623642, 0.056796830147504807, 0.0361802913248539, 0.051239609718322754, 0.0694408193230629, 0.042386554181575775, 0.03317369148135185, 0.04630400240421295, 0.047352761030197144, 0.0765952318906784, 0.03127661719918251, 0.04020662233233452, 0.03151925653219223, 0.03140345960855484, 0.022950943559408188, 0.04591388627886772, 0.03436105325818062, 0.043345384299755096], [0.0034625523258000612, 0.0015383053105324507, 0.9029327034950256, 0.010305419564247131, 0.0014487250009551644, 0.005078006070107222, 0.0033752599265426397, 0.018584895879030228, 0.0067493063397705555, 0.007845914922654629, 0.0035417599137872458, 0.0029945867136120796, 0.0009649309795349836, 0.009655541740357876, 0.001739660743623972, 0.0015446428442373872, 0.0024942646268755198, 0.0013613867340609431, 0.005950590129941702, 0.005301920231431723, 0.0011783881345763803, 0.0019511455902829766], [0.016674472019076347, 0.028225772082805634, 0.09862430393695831, 0.030123906210064888, 0.038399457931518555, 0.017589189112186432, 0.01701698824763298, 0.02851659059524536, 0.029086174443364143, 0.07192570716142654, 0.06722477078437805, 0.18724937736988068, 0.02327837236225605, 0.06138160079717636, 0.024855097755789757, 0.05037480965256691, 0.014319525100290775, 0.019155491143465042, 0.020904647186398506, 0.060670170933008194, 0.020480118691921234, 0.07392347604036331], [0.007654025685042143, 0.00691634975373745, 0.6483871340751648, 0.020955486223101616, 0.008802478201687336, 0.051815394312143326, 0.00889363419264555, 0.07603596895933151, 0.007632515858858824, 0.025325432419776917, 0.007923295721411705, 0.006545624230057001, 0.0029782545752823353, 0.0358496718108654, 0.004048222675919533, 0.01816403493285179, 0.0158963892608881, 0.0064004831947386265, 0.008074750192463398, 0.02216695062816143, 0.000937779201194644, 0.008596162311732769], [0.02781195566058159, 0.008758456446230412, 0.24941112101078033, 0.03048894554376602, 0.011692860163748264, 0.09466356039047241, 0.06181110814213753, 0.09706678241491318, 0.04126911610364914, 0.04386250302195549, 0.028264764696359634, 0.01984218880534172, 0.011113384738564491, 0.03941256180405617, 0.01040167547762394, 0.010496026836335659, 0.031750600785017014, 0.04427566006779671, 0.018429195508360863, 0.08616668730974197, 0.00595144322142005, 0.027059420943260193], [0.0271547120064497, 0.0075480081140995026, 0.2986930012702942, 0.018315965309739113, 0.00635001715272665, 0.03258243575692177, 0.06697558611631393, 0.14728336036205292, 0.045831307768821716, 0.045802220702171326, 0.009980544447898865, 0.023783139884471893, 0.009169291704893112, 0.05751838535070419, 0.004669615533202887, 0.018380841240286827, 0.012978477403521538, 0.04471856355667114, 0.0166587196290493, 0.05599023029208183, 0.013178568333387375, 0.036437053233385086], [0.0032799318432807922, 0.004394857678562403, 0.7333399057388306, 0.013934542424976826, 0.0030131186358630657, 0.043588992208242416, 0.00924756471067667, 0.019662154838442802, 0.03386123850941658, 0.016452493146061897, 0.010411684401333332, 0.008859659545123577, 0.004898350220173597, 0.014619407244026661, 0.0033786771818995476, 0.003922825213521719, 0.013367688283324242, 0.004712420515716076, 0.004302648361772299, 0.04283025115728378, 0.0008352307486347854, 0.007086376193910837], [0.02912077121436596, 0.016673941165208817, 0.013173338025808334, 0.021122140809893608, 0.031113525852560997, 0.04936766251921654, 0.08600129932165146, 0.05349491909146309, 0.046538472175598145, 0.03475308045744896, 0.04068705067038536, 0.10655204206705093, 0.02612229250371456, 0.06701141595840454, 0.023516036570072174, 0.057905636727809906, 0.02779691480100155, 0.02783116325736046, 0.027253983542323112, 0.09458298236131668, 0.021734096109867096, 0.0976472944021225], [0.010065714828670025, 0.028116511180996895, 0.004766506142914295, 0.038267090916633606, 0.06088399142026901, 0.030976489186286926, 0.0253528393805027, 0.047552648931741714, 0.020318876951932907, 0.03717535734176636, 0.04156452789902687, 0.055577829480171204, 0.020803088322281837, 0.04692276939749718, 0.06656275689601898, 0.04874415695667267, 0.022702187299728394, 0.08514688909053802, 0.09751739352941513, 0.07827159762382507, 0.0894232913851738, 0.043287526816129684], [0.013887369073927402, 0.019529392942786217, 0.005887498147785664, 0.04241899400949478, 0.017042793333530426, 0.02522652968764305, 0.06714186072349548, 0.0631711483001709, 0.05440368130803108, 0.03642672300338745, 0.03677729144692421, 0.15783870220184326, 0.024696236476302147, 0.06047182157635689, 0.015597371384501457, 0.03205291926860809, 0.011481999419629574, 0.028726184740662575, 0.034525904804468155, 0.15706034004688263, 0.03556861728429794, 0.060066670179367065], [0.014610446989536285, 0.014365995302796364, 0.12860415875911713, 0.028540050610899925, 0.0112964091822505, 0.11886468529701233, 0.04079375043511391, 0.011052196845412254, 0.06047334894537926, 0.020152844488620758, 0.03073352389037609, 0.0371675044298172, 0.01798980124294758, 0.15198302268981934, 0.021481547504663467, 0.010957179591059685, 0.03402629494667053, 0.01586216874420643, 0.009474542923271656, 0.13835430145263672, 0.005251960828900337, 0.07796437293291092], [0.02538822777569294, 0.027313482016324997, 0.03028969094157219, 0.038120087236166, 0.024771109223365784, 0.06168728321790695, 0.03385778144001961, 0.03664296492934227, 0.10674621909856796, 0.025048350915312767, 0.05380680039525032, 0.055948156863451004, 0.046181876212358475, 0.06714994460344315, 0.024838672950863838, 0.01920885033905506, 0.02449580281972885, 0.06467760354280472, 0.022546563297510147, 0.09145183861255646, 0.03265746310353279, 0.08717122673988342], [0.04809226468205452, 0.02444831281900406, 0.003920365124940872, 0.024512358009815216, 0.027687938883900642, 0.021696332842111588, 0.026804635301232338, 0.13222649693489075, 0.044004421681165695, 0.030390888452529907, 0.03485910966992378, 0.07545144110918045, 0.025728344917297363, 0.02824990451335907, 0.021807022392749786, 0.05779605731368065, 0.012244991958141327, 0.04249391332268715, 0.12578842043876648, 0.04923996329307556, 0.10616212338209152, 0.03639466315507889], [0.0175325945019722, 0.04274608567357063, 0.0375017374753952, 0.017175262793898582, 0.047037266194820404, 0.021424157544970512, 0.032135672867298126, 0.039514295756816864, 0.0813383162021637, 0.06905990839004517, 0.04828827455639839, 0.06209232658147812, 0.06403732299804688, 0.03534365072846413, 0.018393507227301598, 0.037594541907310486, 0.02425168827176094, 0.05077157914638519, 0.05257915332913399, 0.09163001924753189, 0.07613684982061386, 0.03341573476791382], [0.015084280632436275, 0.025294844061136246, 0.0053480532951653, 0.013814017176628113, 0.031271856278181076, 0.07315725833177567, 0.015597470104694366, 0.1304759830236435, 0.018897024914622307, 0.06397093087434769, 0.053373876959085464, 0.04461309686303139, 0.01824629306793213, 0.059176694601774216, 0.016545597463846207, 0.09923474490642548, 0.03246792033314705, 0.05035577714443207, 0.07167217880487442, 0.08888251334428787, 0.020829545333981514, 0.05169007182121277], [0.06070182844996452, 0.024255145341157913, 0.012134959921240807, 0.037988610565662384, 0.05141916498541832, 0.03705316781997681, 0.05736543983221054, 0.10194229334592819, 0.051976412534713745, 0.03489990532398224, 0.031532060354948044, 0.02900603413581848, 0.031642600893974304, 0.018610455095767975, 0.035408392548561096, 0.05872146412730217, 0.04229849949479103, 0.08227461576461792, 0.06656154245138168, 0.0477311909198761, 0.043779581785202026, 0.042696598917245865], [0.01810210756957531, 0.02118845097720623, 0.0026246451307088137, 0.011071898974478245, 0.02422482892870903, 0.011418966576457024, 0.04910094290971756, 0.0628628209233284, 0.05383412167429924, 0.03719790652394295, 0.035605549812316895, 0.07212259620428085, 0.04925196245312691, 0.027357613667845726, 0.01718990132212639, 0.06929861754179001, 0.010682320222258568, 0.06274384260177612, 0.06893628090620041, 0.08248571306467056, 0.07817178219556808, 0.13452723622322083], [0.021800905466079712, 0.03453108295798302, 0.012987653724849224, 0.04026251658797264, 0.02160702832043171, 0.02484028786420822, 0.039488788694143295, 0.11651251465082169, 0.13231222331523895, 0.03314922749996185, 0.026288114488124847, 0.026075739413499832, 0.06853382289409637, 0.023586325347423553, 0.0342724435031414, 0.019586365669965744, 0.02556133270263672, 0.03945504501461983, 0.06903897970914841, 0.11233384907245636, 0.046998318284749985, 0.030777405947446823], [0.05084146931767464, 0.030659645795822144, 0.003597438335418701, 0.020480122417211533, 0.03971977159380913, 0.02464214339852333, 0.008541095070540905, 0.16398495435714722, 0.029073873534798622, 0.02342403121292591, 0.031289178878068924, 0.05178266391158104, 0.0234431941062212, 0.04006326198577881, 0.020759738981723785, 0.05949893221259117, 0.015391381457448006, 0.06407704949378967, 0.11337850987911224, 0.04380347579717636, 0.0633808821439743, 0.07816718518733978], [0.010488816536962986, 0.02734740637242794, 0.0017938800156116486, 0.019199125468730927, 0.037903472781181335, 0.012455021031200886, 0.02003980427980423, 0.06019638851284981, 0.014822986908257008, 0.060602303594350815, 0.016674771904945374, 0.024005437269806862, 0.03195478394627571, 0.01338235940784216, 0.03905390202999115, 0.039828527718782425, 0.012619826011359692, 0.02807641588151455, 0.15621615946292877, 0.015531531535089016, 0.34127092361450195, 0.016536196693778038], [0.03760408237576485, 0.025820130482316017, 0.004078955389559269, 0.05227538198232651, 0.01704113557934761, 0.018608741462230682, 0.023028500378131866, 0.25404849648475647, 0.01732654683291912, 0.030103031545877457, 0.014429614879190922, 0.05704968422651291, 0.019034739583730698, 0.07357406616210938, 0.016991352662444115, 0.04268065094947815, 0.004917635582387447, 0.026276463642716408, 0.06562317162752151, 0.09043226391077042, 0.03640491142868996, 0.07265043258666992], [0.03167328983545303, 0.05285872146487236, 0.020335860550403595, 0.030049104243516922, 0.046916503459215164, 0.02045929804444313, 0.012246142141520977, 0.09419137984514236, 0.09065507352352142, 0.03275243192911148, 0.026815250515937805, 0.05490834265947342, 0.05717957019805908, 0.08591058105230331, 0.016334321349859238, 0.03905673325061798, 0.009410850703716278, 0.053886059671640396, 0.032010290771722794, 0.04163375496864319, 0.10510065406560898, 0.04561567306518555]], [[0.016017060726881027, 0.023404402658343315, 0.04686204344034195, 0.03441617637872696, 0.04644289240241051, 0.02263500727713108, 0.15231041610240936, 0.035954710096120834, 0.0397832952439785, 0.04045478254556656, 0.042587295174598694, 0.03434862196445465, 0.02850678749382496, 0.07427335530519485, 0.04692506790161133, 0.03176609426736832, 0.06909215450286865, 0.053579043596982956, 0.05473320186138153, 0.03969589248299599, 0.04759032279253006, 0.018621345981955528], [0.0024644797667860985, 0.04021146520972252, 0.011120661161839962, 0.007006288506090641, 0.028842970728874207, 0.011404428631067276, 0.034910641610622406, 0.019925439730286598, 0.015613238327205181, 0.2858489453792572, 0.1816646307706833, 0.07076107710599899, 0.06901683658361435, 0.015903491526842117, 0.028137914836406708, 0.00957538839429617, 0.009630483575165272, 0.061690252274274826, 0.017737148329615593, 0.008451656438410282, 0.03519627824425697, 0.03488636761903763], [0.004709288477897644, 0.039413321763277054, 0.002133650006726384, 0.002951381029561162, 0.029387518763542175, 0.004834587685763836, 0.20601682364940643, 0.020905734971165657, 0.005783909931778908, 0.08260295540094376, 0.09890053421258926, 0.10076425224542618, 0.19450241327285767, 0.010199250653386116, 0.015541047789156437, 0.0340602733194828, 0.0100247235968709, 0.06766506284475327, 0.034706905484199524, 0.0031738250982016325, 0.018563799560070038, 0.013158692978322506], [0.0027696280740201473, 0.0234938096255064, 0.09276699274778366, 0.01459351647645235, 0.30464968085289, 0.0102217523381114, 0.27060917019844055, 0.006340506952255964, 0.035105932503938675, 0.03617721423506737, 0.0512121208012104, 0.04991770535707474, 0.00695883808657527, 0.05393989384174347, 0.0032900955993682146, 0.0025547624099999666, 0.0022692307829856873, 0.005159652326256037, 0.0014698466984555125, 0.01491424161940813, 0.0036938025150448084, 0.007891659624874592], [0.018780961632728577, 0.04262502118945122, 0.016029082238674164, 0.009922035969793797, 0.03964453563094139, 0.03408103436231613, 0.021683963015675545, 0.0603099949657917, 0.040529437363147736, 0.12270759046077728, 0.06540945172309875, 0.10260333120822906, 0.06115131452679634, 0.0268784761428833, 0.037042923271656036, 0.10158082842826843, 0.014396488666534424, 0.027619069442152977, 0.03793460503220558, 0.012287653982639313, 0.06162872537970543, 0.04515348747372627], [0.00043264878331683576, 0.006490766070783138, 0.0021082020830363035, 0.004076455254107714, 0.008465733379125595, 0.0011042170226573944, 0.8742868900299072, 0.007014959119260311, 0.00245438190177083, 0.02418612316250801, 0.00923920702189207, 0.01902001164853573, 0.01178956963121891, 0.006006965879350901, 0.003006708575412631, 0.0019904139917343855, 0.0013406402431428432, 0.008348400704562664, 0.004998155869543552, 0.0005467527080327272, 0.0017455428605899215, 0.0013471997808665037], [0.009292813017964363, 0.0740131139755249, 0.017268285155296326, 0.01386464387178421, 0.09596521407365799, 0.02244354598224163, 0.04000532254576683, 0.033116672188043594, 0.0581820011138916, 0.036898136138916016, 0.1469181627035141, 0.06088695675134659, 0.02239977940917015, 0.050484731793403625, 0.013150086626410484, 0.028048571199178696, 0.021290287375450134, 0.04414152354001999, 0.033439114689826965, 0.07446116954088211, 0.041377175599336624, 0.06235264986753464], [0.01681485027074814, 0.021984966471791267, 0.02111062966287136, 0.01357338111847639, 0.0177683737128973, 0.02251383475959301, 0.026584813371300697, 0.05496750399470329, 0.5126034617424011, 0.033150751143693924, 0.06970679759979248, 0.0779985561966896, 0.016980890184640884, 0.021724244579672813, 0.02327062003314495, 0.008999714627861977, 0.008459340780973434, 0.00485597038641572, 0.007204647641628981, 0.003937169443815947, 0.003602389246225357, 0.012187080457806587], [0.00052865338511765, 0.011805355548858643, 0.001201199134811759, 0.0011567252222448587, 0.05032078176736832, 0.0041323271580040455, 0.70219886302948, 0.00469663692638278, 0.008052684366703033, 0.0367281436920166, 0.08018941432237625, 0.028979429975152016, 0.025858048349618912, 0.003699545981362462, 0.0019294737139716744, 0.0012985779903829098, 0.005056277383118868, 0.024369362741708755, 0.00197753612883389, 0.0011359554482623935, 0.0013872954295948148, 0.003297732677310705], [0.0009370328043587506, 0.005066505633294582, 0.001530481968075037, 0.0017400109209120274, 0.010772445239126682, 0.011862015351653099, 0.007599689532071352, 0.0025326248724013567, 0.03442266210913658, 0.019794225692749023, 0.855798065662384, 0.012098168954253197, 0.010618641972541809, 0.0038807939272373915, 0.0023113335482776165, 0.0013925066450610757, 0.0019187740981578827, 0.008251049555838108, 0.00034559250343590975, 0.0023009011056274176, 0.0016063996590673923, 0.0032201323192566633], [0.0007270439527928829, 0.01090867631137371, 0.0021132221445441246, 0.004375293850898743, 0.018779253587126732, 0.0018390483455732465, 0.2997105121612549, 0.0068708439357578754, 0.009435257874429226, 0.06445653736591339, 0.08170778304338455, 0.2835131287574768, 0.11689851433038712, 0.006942512467503548, 0.027895718812942505, 0.017612671479582787, 0.006505626253783703, 0.031495388597249985, 0.002062693005427718, 0.0006168470135889947, 0.00319970422424376, 0.002333797048777342], [0.00018558076408226043, 0.003725186688825488, 0.0003553938295226544, 0.001425166497938335, 0.002729996107518673, 0.0007158031221479177, 0.01876852847635746, 0.0018634309526532888, 0.004267259035259485, 0.02768203616142273, 0.017274023965001106, 0.011490199714899063, 0.8636203408241272, 0.0028628630097955465, 0.011228918097913265, 0.005568814929574728, 0.0067243617959320545, 0.014189573004841805, 0.0014845597324892879, 0.001006177975796163, 0.0021961997263133526, 0.0006356271915137768], [0.0018340732203796506, 0.028116757050156593, 0.005170765332877636, 0.004325126297771931, 0.028176268562674522, 0.009619360789656639, 0.19280613958835602, 0.01943253166973591, 0.055378612130880356, 0.12504863739013672, 0.286171555519104, 0.04037560150027275, 0.06939458847045898, 0.03691839426755905, 0.013973234221339226, 0.006673059891909361, 0.010991732589900494, 0.050210777670145035, 0.0048224604688584805, 0.001905331970192492, 0.003970850259065628, 0.004684126935899258], [0.0022673248313367367, 0.02714668959379196, 0.001380078843794763, 0.0007482499349862337, 0.0070077795535326, 0.00420480826869607, 0.010566924698650837, 0.005086314398795366, 0.005368518177419901, 0.06608375161886215, 0.014170471578836441, 0.03259667754173279, 0.5354710221290588, 0.012850090861320496, 0.15633350610733032, 0.0438859723508358, 0.006801033392548561, 0.03418329358100891, 0.01671585999429226, 0.004956412594765425, 0.006438975688070059, 0.005736284889280796], [0.007525790948420763, 0.037725552916526794, 0.026251349598169327, 0.014657621271908283, 0.03513272479176521, 0.013310249894857407, 0.04827725887298584, 0.018211711198091507, 0.05441401153802872, 0.03339482471346855, 0.06434016674757004, 0.07511767745018005, 0.15798909962177277, 0.1088474690914154, 0.06834572553634644, 0.1255606710910797, 0.018249351531267166, 0.02774401195347309, 0.03290877118706703, 0.012741802260279655, 0.008980467915534973, 0.010273762047290802], [0.001238653319887817, 0.01898844540119171, 0.0020738081075251102, 0.002013623947277665, 0.003755028825253248, 0.005575800780206919, 0.005672382656484842, 0.003286871826276183, 0.0038977351505309343, 0.020788822323083878, 0.004938884638249874, 0.004822350572794676, 0.056486520916223526, 0.01337192952632904, 0.014746059663593769, 0.016359729692339897, 0.675742506980896, 0.029164999723434448, 0.03393780440092087, 0.02042294293642044, 0.054318927228450775, 0.008396107703447342], [0.0035916350316256285, 0.04740116745233536, 0.0019760201685130596, 0.010872069746255875, 0.01049019955098629, 0.004734881222248077, 0.024647828191518784, 0.01829886995255947, 0.005962039344012737, 0.05183311551809311, 0.019791383296251297, 0.04065525904297829, 0.4046951234340668, 0.010888899676501751, 0.027046047151088715, 0.055472150444984436, 0.0067057302221655846, 0.19694028794765472, 0.02125473879277706, 0.0104361018165946, 0.019150178879499435, 0.007156255189329386], [0.003624357981607318, 0.03592382371425629, 0.0004873141006100923, 0.003084789030253887, 0.004956027027219534, 0.004003203008323908, 0.0015793682541698217, 0.0050308480858802795, 0.018720468506217003, 0.005926699377596378, 0.005112026818096638, 0.0056740716099739075, 0.023581936955451965, 0.004239714704453945, 0.0036380644887685776, 0.015153962187469006, 0.01106118131428957, 0.01593082770705223, 0.48403069376945496, 0.24371160566806793, 0.0394979752600193, 0.06503105163574219], [0.018133381381630898, 0.07280623912811279, 0.019644923508167267, 0.019793739542365074, 0.01636788807809353, 0.034912943840026855, 0.013622689992189407, 0.06430836021900177, 0.006632820703089237, 0.03373131528496742, 0.012444800697267056, 0.048236943781375885, 0.05798180773854256, 0.023178255185484886, 0.04036986827850342, 0.035996247082948685, 0.03815017268061638, 0.06566929817199707, 0.0987803041934967, 0.1061805933713913, 0.0400177463889122, 0.13303960859775543], [0.004563498310744762, 0.03924867883324623, 0.0018728094873949885, 0.006236334331333637, 0.037185825407505035, 0.01055212877690792, 0.022125184535980225, 0.013105116784572601, 0.01225385069847107, 0.027265001088380814, 0.038951486349105835, 0.03309674188494682, 0.0807812511920929, 0.006756136193871498, 0.007218540646135807, 0.04602302238345146, 0.021294234320521355, 0.09962553530931473, 0.0657593235373497, 0.057157132774591446, 0.32351964712142944, 0.04540856555104256], [0.03569166362285614, 0.04437841475009918, 0.014478819444775581, 0.010964990593492985, 0.022160183638334274, 0.04459526762366295, 0.007250635884702206, 0.016951367259025574, 0.03230423480272293, 0.045541077852249146, 0.04142236337065697, 0.015620710328221321, 0.025025255978107452, 0.025883886963129044, 0.007349886000156403, 0.014736524783074856, 0.022967487573623657, 0.052045952528715134, 0.03968232497572899, 0.08391217887401581, 0.05298156291246414, 0.3440552353858948], [0.0037893145345151424, 0.026982828974723816, 0.007702368311583996, 0.0090220021083951, 0.01639479771256447, 0.007372014690190554, 0.03225883096456528, 0.014285187236964703, 0.006711805704981089, 0.052766911685466766, 0.014403765089809895, 0.03018340654671192, 0.1227409690618515, 0.030798621475696564, 0.04593759402632713, 0.07463337481021881, 0.042576514184474945, 0.10352396965026855, 0.11577676981687546, 0.06608220934867859, 0.12473092973232269, 0.05132574960589409]], [[0.029143471270799637, 0.05036020278930664, 0.0536494255065918, 0.02694389782845974, 0.03870147839188576, 0.031127726659178734, 0.024857662618160248, 0.08168580383062363, 0.058104224503040314, 0.09280010312795639, 0.024825444445014, 0.02724635601043701, 0.06069564074277878, 0.042000457644462585, 0.04545510560274124, 0.05215657129883766, 0.030921664088964462, 0.01674582064151764, 0.09404516220092773, 0.03072257898747921, 0.05793311446905136, 0.029878078028559685], [0.019550954923033714, 0.015614797361195087, 0.013795308768749237, 0.024504605680704117, 0.028398143127560616, 0.007558729499578476, 0.009706133976578712, 0.28133144974708557, 0.01337718591094017, 0.08868913352489471, 0.010211172513663769, 0.009824239648878574, 0.05993562191724777, 0.042231716215610504, 0.0726746991276741, 0.14731988310813904, 0.0069967880845069885, 0.040889494121074677, 0.05886533856391907, 0.007843296974897385, 0.029112478718161583, 0.011568904854357243], [0.0002104057202814147, 0.11609163135290146, 0.006535328924655914, 0.06130760535597801, 0.0032811888959258795, 0.5472725629806519, 0.019502250477671623, 0.0004368451773189008, 0.03969961404800415, 0.00045836201752536, 0.002355293370783329, 0.0010132327442988753, 0.0005109247285872698, 0.004110300447791815, 6.853816739749163e-05, 0.0033880225382745266, 0.16674482822418213, 0.0191187746822834, 0.00030257244361564517, 0.006080730352550745, 0.0002732568245846778, 0.001237696036696434], [0.0034637521021068096, 0.33532848954200745, 0.07960943132638931, 0.04915830120444298, 0.06880797445774078, 0.052641332149505615, 0.13853836059570312, 0.0009191675926558673, 0.13030342757701874, 0.0001358972949674353, 0.004518782254308462, 0.002813873579725623, 0.0009079873561859131, 0.017431313171982765, 0.0010052842553704977, 0.019309647381305695, 0.03430420160293579, 0.039082661271095276, 0.000948116765357554, 0.017900777980685234, 0.00012252383749000728, 0.0027486851904541254], [0.0004527211422100663, 0.007744454778730869, 0.0031400523148477077, 0.08445131033658981, 0.0010864239884540439, 0.8520883917808533, 0.006594388745725155, 0.0053800432942807674, 0.004469708073884249, 0.0007888352265581489, 0.0016511910362169147, 0.0017430607695132494, 0.0010740432189777493, 0.0038849185220897198, 0.0015841949498280883, 0.0013177235377952456, 0.014884456060826778, 0.004139220807701349, 0.0007857186137698591, 0.0016850365791469812, 0.00020203088934067637, 0.0008521086419932544], [0.030952144414186478, 0.07701060175895691, 0.04073096066713333, 0.02633458748459816, 0.11936692148447037, 0.029737524688243866, 0.09813585877418518, 0.0178420040756464, 0.17136207222938538, 0.02417251095175743, 0.038602668792009354, 0.028551487252116203, 0.05561205372214317, 0.017941750586032867, 0.030417880043387413, 0.041864633560180664, 0.03487817570567131, 0.03173051029443741, 0.022455710917711258, 0.0322851799428463, 0.016902832314372063, 0.013111944310367107], [0.010562998242676258, 0.1267758458852768, 0.03104633465409279, 0.07874736189842224, 0.03133813291788101, 0.2475057989358902, 0.05273997783660889, 0.055254943668842316, 0.18073949217796326, 0.004407168831676245, 0.016214273869991302, 0.018179992213845253, 0.006748576182872057, 0.014173293486237526, 0.004783701151609421, 0.015019843354821205, 0.031139571219682693, 0.021938800811767578, 0.007311510853469372, 0.03946123644709587, 0.001069247373379767, 0.004841931164264679], [0.042222023010253906, 0.06856481730937958, 0.0442601814866066, 0.0542212538421154, 0.049493562430143356, 0.031984321773052216, 0.0564548559486866, 0.0954192504286766, 0.038479384034872055, 0.09063933789730072, 0.02669532038271427, 0.03432794287800789, 0.05324472486972809, 0.031984373927116394, 0.05647388845682144, 0.05423369258642197, 0.017868604511022568, 0.035810161381959915, 0.03134667128324509, 0.032185148447752, 0.030796896666288376, 0.02329362742602825], [0.03501082584261894, 0.043737512081861496, 0.08185584098100662, 0.04827761650085449, 0.04135390371084213, 0.06848961859941483, 0.033876776695251465, 0.08193208277225494, 0.11097626388072968, 0.037598319351673126, 0.022014202550053596, 0.03951968625187874, 0.03780600428581238, 0.14191758632659912, 0.016534898430109024, 0.07641245424747467, 0.025776518508791924, 0.019636614248156548, 0.011234457604587078, 0.012932909652590752, 0.003798550460487604, 0.009307408705353737], [0.00514502078294754, 0.1346195638179779, 0.03242792561650276, 0.008782451041042805, 0.12573187053203583, 0.006352486554533243, 0.05059665068984032, 0.004781288094818592, 0.32378700375556946, 0.007004710845649242, 0.029167959466576576, 0.008542284369468689, 0.02943499945104122, 0.028294621035456657, 0.01556458044797182, 0.03450131416320801, 0.07543018460273743, 0.016252247616648674, 0.021995751187205315, 0.032590679824352264, 0.0044989995658397675, 0.004497339483350515], [0.014044774696230888, 0.021158559247851372, 0.014984839595854282, 0.01672704890370369, 0.010644824244081974, 0.01949365623295307, 0.006399982608854771, 0.05700355023145676, 0.0221160389482975, 0.18354220688343048, 0.027847103774547577, 0.13310952484607697, 0.1776820421218872, 0.04710087925195694, 0.11995221674442291, 0.035368822515010834, 0.026575477793812752, 0.006136778276413679, 0.019592098891735077, 0.01000723335891962, 0.022733720019459724, 0.007778691127896309], [0.004584868438541889, 0.024771226570010185, 0.00838638748973608, 0.01184894796460867, 0.009967385791242123, 0.010793047957122326, 0.005079901311546564, 0.04114702716469765, 0.04945949837565422, 0.19659046828746796, 0.031473129987716675, 0.027178354561328888, 0.31221631169319153, 0.03698722645640373, 0.07543465495109558, 0.0395999476313591, 0.034046247601509094, 0.008865793235599995, 0.023418758064508438, 0.007300259545445442, 0.03596004471182823, 0.004890530835837126], [0.012238270603120327, 0.024650663137435913, 0.014430510811507702, 0.004045496694743633, 0.04099453613162041, 0.010283920913934708, 0.009354839101433754, 0.13029998540878296, 0.019472608342766762, 0.0557594820857048, 0.01974644884467125, 0.07352227717638016, 0.06539227068424225, 0.20223376154899597, 0.012198768556118011, 0.192719504237175, 0.013452792540192604, 0.02161654457449913, 0.042771827429533005, 0.005512961186468601, 0.012621036730706692, 0.016681505367159843], [0.009186283685266972, 0.014472720213234425, 0.008131800219416618, 0.0008084503351710737, 0.01746688038110733, 0.0013727537589147687, 0.020245131105184555, 0.015325738117098808, 0.046612247824668884, 0.006164246704429388, 0.07670165598392487, 0.07074545323848724, 0.09681436419487, 0.02671261690557003, 0.054807644337415695, 0.08021469414234161, 0.0694066733121872, 0.09775518625974655, 0.062159549444913864, 0.03649162873625755, 0.011293374933302402, 0.177110955119133], [0.0019045991357415915, 0.17647388577461243, 0.03529907017946243, 0.0064253960736095905, 0.05298719182610512, 0.010107019916176796, 0.034981194883584976, 0.002929616952314973, 0.03369787335395813, 0.0010100336512550712, 0.007564529310911894, 0.0079020531848073, 0.004428654909133911, 0.23094423115253448, 0.0010133270407095551, 0.24530775845050812, 0.03310380503535271, 0.09745218604803085, 0.002735376823693514, 0.007392208091914654, 0.0011352660367265344, 0.005204641725867987], [0.005326703656464815, 0.07565712928771973, 0.006575542036443949, 0.004322637803852558, 0.011770241893827915, 0.003295272123068571, 0.04518554359674454, 0.019087370485067368, 0.06281053274869919, 0.03201277181506157, 0.017618199810385704, 0.030568189918994904, 0.09074422717094421, 0.04493614658713341, 0.08129871636629105, 0.05054015293717384, 0.14878757297992706, 0.09895866364240646, 0.07435595244169235, 0.014718201942741871, 0.0645623430609703, 0.01686778850853443], [0.004740421660244465, 0.02050795778632164, 0.00898131262511015, 0.011705023236572742, 0.028974276036024094, 0.006196097936481237, 0.018822820857167244, 0.006447020918130875, 0.029568282887339592, 0.052158892154693604, 0.020055711269378662, 0.016028141602873802, 0.03918905183672905, 0.0307594146579504, 0.014360780827701092, 0.1651276797056198, 0.015176255255937576, 0.34322330355644226, 0.010160282254219055, 0.07129085063934326, 0.06352492421865463, 0.02300133742392063], [0.005289445631206036, 0.03742027282714844, 0.006903265602886677, 0.006847254931926727, 0.019385926425457, 0.0031005116179585457, 0.028943119570612907, 0.011150093749165535, 0.32079970836639404, 0.004808525089174509, 0.02837330289185047, 0.016814718022942543, 0.019537873566150665, 0.00964415818452835, 0.022282803431153297, 0.026968635618686676, 0.0445069894194603, 0.06586174666881561, 0.14558307826519012, 0.11976136267185211, 0.02129290997982025, 0.034724291414022446], [0.018712742254137993, 0.015076217241585255, 0.021984852850437164, 0.033937275409698486, 0.011082833632826805, 0.041501376777887344, 0.015732496976852417, 0.03451262041926384, 0.010865786112844944, 0.016365166753530502, 0.027723737061023712, 0.04719866067171097, 0.013359983451664448, 0.03372485190629959, 0.010263189673423767, 0.04299307242035866, 0.01996411569416523, 0.12282141298055649, 0.025388214737176895, 0.25753816962242126, 0.01563122309744358, 0.16362199187278748], [0.004475231748074293, 0.054991573095321655, 0.02934795804321766, 0.01657518930733204, 0.028920836746692657, 0.017081011086702347, 0.03385685756802559, 0.001803064253181219, 0.16277946531772614, 0.010441059246659279, 0.04253124073147774, 0.030350640416145325, 0.023029491305351257, 0.04304058849811554, 0.009979184716939926, 0.04944576323032379, 0.1557292640209198, 0.03574969619512558, 0.029910216107964516, 0.04393264278769493, 0.14063189923763275, 0.035397183150053024], [0.00622208509594202, 0.05036059021949768, 0.026890328153967857, 0.008631139062345028, 0.03357899561524391, 0.0074757360853254795, 0.04237821325659752, 0.004804566036909819, 0.06666874885559082, 0.0035995026119053364, 0.021757405251264572, 0.019927101209759712, 0.0071107735857367516, 0.026408420875668526, 0.0033772638998925686, 0.04288206994533539, 0.030648482963442802, 0.10618426650762558, 0.0284406878054142, 0.28844380378723145, 0.009479429572820663, 0.16473042964935303], [0.010286848060786724, 0.03883901983499527, 0.028850315138697624, 0.04008384793996811, 0.020787296816706657, 0.017796820029616356, 0.015011263079941273, 0.03120843507349491, 0.0725170150399208, 0.04582451283931732, 0.013905278407037258, 0.020219430327415466, 0.04794222116470337, 0.023737607523798943, 0.052754856646060944, 0.041888464242219925, 0.01634695939719677, 0.02028031088411808, 0.08935809880495071, 0.03826427459716797, 0.27292731404304504, 0.041169799864292145]], [[0.016557106748223305, 0.03793696314096451, 0.03606581315398216, 0.018243545666337013, 0.0360286645591259, 0.030837323516607285, 0.014857188798487186, 0.08542055636644363, 0.17859582602977753, 0.06155312433838844, 0.0388786718249321, 0.1251208484172821, 0.01472950167953968, 0.07082455605268478, 0.02169589139521122, 0.02882835827767849, 0.0054822759702801704, 0.02088630013167858, 0.01871098019182682, 0.011997539550065994, 0.01519257016479969, 0.11155637353658676], [0.007754781749099493, 0.05171728506684303, 0.07763878256082535, 0.055189453065395355, 0.041364800184965134, 0.0366835743188858, 0.04307993873953819, 0.06501481682062149, 0.2707221806049347, 0.05712417513132095, 0.03215555474162102, 0.07294899970293045, 0.026435771957039833, 0.021925557404756546, 0.007789826951920986, 0.010247226804494858, 0.0069285971112549305, 0.01610087789595127, 0.043562304228544235, 0.024929577484726906, 0.020235948264598846, 0.010449947789311409], [0.004186724312603474, 0.07221990823745728, 0.020063387230038643, 0.014760260470211506, 0.024274172261357307, 0.03670089319348335, 0.06914516538381577, 0.07232202589511871, 0.11920884996652603, 0.033642448484897614, 0.33583322167396545, 0.0260474756360054, 0.016101129353046417, 0.0170363150537014, 0.01611396111547947, 0.0024081645533442497, 0.019334005191922188, 0.03967661038041115, 0.02030593901872635, 0.010089676827192307, 0.014100472442805767, 0.016429239884018898], [0.0028982621151953936, 0.029667457565665245, 0.20736144483089447, 0.004136258736252785, 0.011669241823256016, 0.15919657051563263, 0.20853860676288605, 0.016711456701159477, 0.014601103961467743, 0.1414535492658615, 0.026301927864551544, 0.05579737201333046, 0.012575929053127766, 0.06936467438936234, 0.0005933766951784492, 0.003007607301697135, 0.0011187524069100618, 0.020109284669160843, 0.0014877161011099815, 0.0012245842954143882, 0.004672015085816383, 0.007512866519391537], [0.00018612061103340238, 0.0033368077129125595, 0.009002690203487873, 0.00024214471341110766, 0.0007399967289529741, 0.0037763253785669804, 0.97138512134552, 0.004052404779940844, 0.0003580565098673105, 0.0015497240237891674, 0.0018342513358220458, 0.0010488296393305063, 0.00020043569384142756, 0.0005251372931525111, 2.9634949896717444e-05, 0.00016226638399530202, 4.3291896872688085e-05, 0.0010084803216159344, 0.00012201734352856874, 8.433883340330794e-05, 0.00012602200149558485, 0.00018585241923574358], [0.002446170663461089, 0.00799096655100584, 0.0027359507512301207, 0.0053168549202382565, 0.01413760520517826, 0.020445549860596657, 0.017223335802555084, 0.6930344700813293, 0.13748778402805328, 0.008686777204275131, 0.015842391178011894, 0.02180561237037182, 0.0020220219157636166, 0.00466243177652359, 0.0020129787735641003, 0.0024982343893498182, 0.0006423763115890324, 0.010908760130405426, 0.010851573199033737, 0.005240178667008877, 0.009358329698443413, 0.004649623762816191], [0.0002519859990570694, 0.0017640521982684731, 0.00312470062635839, 0.0007079445640556514, 0.00036837917286902666, 0.0018149636453017592, 0.001386338146403432, 0.00892709568142891, 0.9662333726882935, 0.00311684631742537, 0.0026848928537219763, 0.003230136353522539, 0.000227341428399086, 0.001276862807571888, 0.0002623855252750218, 4.349434675532393e-05, 0.00016491275164298713, 0.00042986744665540755, 0.00038407999090850353, 0.0017729277024045587, 0.0004579929227475077, 0.0013692841166630387], [0.002132730558514595, 0.014433142729103565, 0.011485201306641102, 0.0038569977041333914, 0.024602245539426804, 0.07053327560424805, 0.02442813292145729, 0.03005921095609665, 0.04412701353430748, 0.3525448739528656, 0.1484043151140213, 0.06375160813331604, 0.003981144167482853, 0.09797845780849457, 0.00227533676661551, 0.02092168666422367, 0.0052926200442016125, 0.04624604806303978, 0.0028542601503431797, 0.002664370695129037, 0.013277736492455006, 0.014149526134133339], [0.0002569362404756248, 0.00097745843231678, 0.0007531936862505972, 0.0007143148104660213, 0.005051865708082914, 0.0015196672175079584, 0.002947990782558918, 0.001759322127327323, 0.0003453815297689289, 0.0016118955099955201, 0.9625982046127319, 0.00645245099440217, 0.0014437229838222265, 0.0013845445355400443, 0.004169049672782421, 0.001096645137295127, 0.0015011028153821826, 0.0021398321259766817, 0.0008980859420262277, 0.00034106860402971506, 0.0005444536218419671, 0.0014928908785805106], [0.0044159796088933945, 0.030479807406663895, 0.01582658477127552, 0.0553269237279892, 0.03748726472258568, 0.010430014692246914, 0.014799906872212887, 0.03346635028719902, 0.2988467514514923, 0.03288761526346207, 0.01999932900071144, 0.1336517632007599, 0.09590646624565125, 0.02107165940105915, 0.03881838917732239, 0.018001090735197067, 0.004789511673152447, 0.012582505121827126, 0.03322204202413559, 0.06959715485572815, 0.009664161130785942, 0.0087286913767457], [0.0008052856428548694, 0.0025653415359556675, 0.0016576909692957997, 0.00035385560477152467, 0.004271267913281918, 0.0010559590300545096, 0.004864747170358896, 0.00046480982564389706, 0.001921066315844655, 0.004044020548462868, 0.027526207268238068, 0.027397289872169495, 0.877800464630127, 0.009912568144500256, 0.0017752994317561388, 0.022590821608901024, 0.0025329627096652985, 0.0025993252638727427, 0.001757497084327042, 0.0022577785421162844, 0.0005631350795738399, 0.0012825160520151258], [0.003502198029309511, 0.009330752305686474, 0.0028857968281954527, 0.0021931040100753307, 0.008111175149679184, 0.011782187968492508, 0.008605601266026497, 0.0017490809550508857, 0.001685888972133398, 0.0215989388525486, 0.4329277575016022, 0.03346274048089981, 0.049465667456388474, 0.3204512894153595, 0.008645758964121342, 0.018420882523059845, 0.00984406191855669, 0.034474946558475494, 0.0018524532206356525, 0.0032930427696555853, 0.00655962061136961, 0.00915707927197218], [0.003743603825569153, 0.013091593980789185, 0.007674822583794594, 0.005227446556091309, 0.012086867354810238, 0.004560347180813551, 0.0010574187617748976, 0.03832302987575531, 0.007927660830318928, 0.003760385559871793, 0.022534441202878952, 0.1723935455083847, 0.03674418479204178, 0.05885731801390648, 0.25409403443336487, 0.20546399056911469, 0.00726423179730773, 0.01620708964765072, 0.021807072684168816, 0.012657991610467434, 0.0038036759942770004, 0.09071926027536392], [0.004755374509841204, 0.016036055982112885, 0.005820384249091148, 0.004087568260729313, 0.0021911270450800657, 0.03194321691989899, 0.042285047471523285, 0.02548145316541195, 0.020778067409992218, 0.011654243804514408, 0.01578264869749546, 0.10954823344945908, 0.03959432616829872, 0.02924754098057747, 0.00912676565349102, 0.4164195656776428, 0.037548843771219254, 0.03866283968091011, 0.010652882978320122, 0.029086166992783546, 0.0055482531897723675, 0.09374938160181046], [0.001450881827622652, 0.0130781140178442, 0.011021699756383896, 0.00013553762983065099, 0.002128490712493658, 0.013952711597084999, 0.13576054573059082, 0.00730531569570303, 0.0006441958830691874, 0.014598535373806953, 0.010489648208022118, 0.0058399406261742115, 0.018671508878469467, 0.05898912623524666, 0.00036349266883917153, 0.020695187151432037, 0.4932819604873657, 0.14378492534160614, 0.020153891295194626, 0.013178078457713127, 0.008333004079759121, 0.006143191829323769], [0.004479327239096165, 0.024753017351031303, 0.005410254001617432, 0.005093385465443134, 0.015377136878669262, 0.01812615990638733, 0.006116021890193224, 0.024121366441249847, 0.00897304154932499, 0.02089272066950798, 0.03085356019437313, 0.025551917031407356, 0.004110922105610371, 0.07055142521858215, 0.027930065989494324, 0.05836101621389389, 0.03522123768925667, 0.44008028507232666, 0.044042930006980896, 0.037913717329502106, 0.019450457766652107, 0.07259003072977066], [0.0013862476916983724, 0.01027671154588461, 0.0033649089746177197, 0.0030144888442009687, 0.0054818713106215, 0.004415408242493868, 0.009301201440393925, 0.010105432942509651, 0.003927050158381462, 0.0065728649497032166, 0.00723046250641346, 0.014647279866039753, 0.015513568185269833, 0.008773424662649632, 0.00252559338696301, 0.0333799384534359, 0.020183192566037178, 0.05366772040724754, 0.7388510704040527, 0.02270735241472721, 0.009179181419312954, 0.015494934283196926], [0.0025241775438189507, 0.002765907673165202, 0.002064001513645053, 0.0020401333458721638, 0.00046877286513336003, 0.001799851655960083, 0.001633303938433528, 0.003907047677785158, 0.008195849135518074, 0.003413547994568944, 0.004042670596390963, 0.005079279188066721, 0.00038489754660986364, 0.01208552811294794, 0.0034092520363628864, 0.0032493174076080322, 0.0568876676261425, 0.02446068823337555, 0.023152323439717293, 0.7673993706703186, 0.011199592612683773, 0.05983677878975868], [0.006560447160154581, 0.016554489731788635, 0.006743272300809622, 0.006014005746692419, 0.008839053101837635, 0.024693742394447327, 0.0031835290137678385, 0.06180217117071152, 0.012137360870838165, 0.006081282626837492, 0.014396201819181442, 0.01888604834675789, 0.001451478572562337, 0.08512359112501144, 0.00587713485583663, 0.01964722014963627, 0.018147822469472885, 0.22913993895053864, 0.059822771698236465, 0.0371311753988266, 0.3196370005607605, 0.038130275905132294], [0.009527474641799927, 0.002965739695355296, 0.000823814538307488, 0.0035464363172650337, 0.0014043800765648484, 0.001093782833777368, 0.0004671530914492905, 0.0029831095598638058, 0.0007480653584934771, 0.00047076272312551737, 0.006215997040271759, 0.009117056615650654, 0.0014808226842433214, 0.0049064732156693935, 0.014760901220142841, 0.009624743834137917, 0.01455955021083355, 0.001571389497257769, 0.05448504164814949, 0.024310600012540817, 0.0026642242446541786, 0.832272469997406], [0.011139853857457638, 0.04226098954677582, 0.018066253513097763, 0.03791547939181328, 0.004656538367271423, 0.02384926937520504, 0.01311035081744194, 0.0023130988702178, 0.02317756600677967, 0.019325057044625282, 0.006915978621691465, 0.02009221725165844, 0.0076919980347156525, 0.0276048481464386, 0.021409720182418823, 0.009772608056664467, 0.031836915761232376, 0.027317030355334282, 0.013797452673316002, 0.5531247854232788, 0.019665364176034927, 0.06495669484138489], [0.029229681938886642, 0.05755335092544556, 0.006298809312283993, 0.03806443139910698, 0.024860333651304245, 0.06167598068714142, 0.006791263353079557, 0.019817933440208435, 0.007050271145999432, 0.03920575603842735, 0.022372351959347725, 0.02970399707555771, 0.009340149350464344, 0.06453703343868256, 0.034455783665180206, 0.07353749126195908, 0.0534902848303318, 0.07778041064739227, 0.028647059574723244, 0.03217903524637222, 0.1776823103427887, 0.10572631657123566]], [[0.03128042817115784, 0.02010219171643257, 0.005628648679703474, 0.5658754706382751, 0.023082125931978226, 0.017598869279026985, 0.007095955777913332, 0.029819229617714882, 0.07674591988325119, 0.012462498620152473, 0.020789310336112976, 0.015597762539982796, 0.01074731070548296, 0.007791279349476099, 0.004333416000008583, 0.030609043315052986, 0.013226063922047615, 0.008969606831669807, 0.024599000811576843, 0.04481429234147072, 0.019860515370965004, 0.008971142582595348], [0.018945757299661636, 0.060868389904499054, 0.026640238240361214, 0.019798284396529198, 0.013182662427425385, 0.014072143472731113, 0.06209097430109978, 0.07279711216688156, 0.026831787079572678, 0.0396684855222702, 0.00900362990796566, 0.010835745371878147, 0.06265104562044144, 0.022960912436246872, 0.03143143281340599, 0.026624346151947975, 0.04521825537085533, 0.06507253646850586, 0.17648950219154358, 0.06612879782915115, 0.10524589568376541, 0.023442134261131287], [0.006783016491681337, 0.0524035207927227, 0.003827876877039671, 0.05990653857588768, 0.061077117919921875, 0.012743664905428886, 0.1573084145784378, 0.05839857459068298, 0.04258688539266586, 0.0070524560287594795, 0.017583267763257027, 0.05428192391991615, 0.11926029622554779, 0.009492908604443073, 0.001927449950017035, 0.10794674605131149, 0.006824611686170101, 0.09786179661750793, 0.04473146051168442, 0.04232993349432945, 0.008768593892455101, 0.026902955025434494], [0.0126913757994771, 0.06676661223173141, 0.009464502334594727, 0.03623052313923836, 0.024166960269212723, 0.011812985874712467, 0.0572214238345623, 0.08811100572347641, 0.04660181328654289, 0.03261711448431015, 0.013623594306409359, 0.010903016664087772, 0.1412852257490158, 0.00844014436006546, 0.0193523820489645, 0.03745196759700775, 0.02125285379588604, 0.05471205711364746, 0.14875544607639313, 0.035639744251966476, 0.10494425147771835, 0.01795497164130211], [0.010139246471226215, 0.0700998529791832, 0.01741206645965576, 0.1151433065533638, 0.02752516232430935, 0.010920407250523567, 0.06970428675413132, 0.04824412986636162, 0.04030488803982735, 0.01555580459535122, 0.017973503097891808, 0.02488323114812374, 0.14896507561206818, 0.022355882450938225, 0.01613541878759861, 0.029551248997449875, 0.02358330599963665, 0.059532083570957184, 0.12159525603055954, 0.0456719733774662, 0.025307094678282738, 0.0393967367708683], [0.008124899119138718, 0.15869255363941193, 0.03772880882024765, 0.05105935037136078, 0.026211053133010864, 0.04974908381700516, 0.04473334178328514, 0.030599970370531082, 0.03085976652801037, 0.03128112480044365, 0.025657925754785538, 0.03286278247833252, 0.07430118322372437, 0.018478835001587868, 0.10273534059524536, 0.026848122477531433, 0.034195780754089355, 0.0650409385561943, 0.05955810844898224, 0.024763697758316994, 0.04549452289938927, 0.02102285996079445], [0.014528338797390461, 0.07889122515916824, 0.01570979878306389, 0.06464342027902603, 0.030127720907330513, 0.016746601089835167, 0.019774101674556732, 0.1212383583188057, 0.031243639066815376, 0.03238693252205849, 0.01342763938009739, 0.01478488091379404, 0.08787490427494049, 0.019703151658177376, 0.03659681975841522, 0.027980033308267593, 0.03290669992566109, 0.03197915107011795, 0.15132088959217072, 0.03160535544157028, 0.09857925027608871, 0.0279510710388422], [0.008417917415499687, 0.11397695541381836, 0.007765315938740969, 0.3341418504714966, 0.029300056397914886, 0.013757930137217045, 0.037366271018981934, 0.04627940431237221, 0.03801373392343521, 0.022238144651055336, 0.01612916961312294, 0.013828142546117306, 0.0609419047832489, 0.010234280489385128, 0.018237050622701645, 0.03690917417407036, 0.01519598439335823, 0.02243448980152607, 0.06640166789293289, 0.027314992621541023, 0.04337261617183685, 0.017742974683642387], [0.004574609454721212, 0.06742195039987564, 0.004413984250277281, 0.011254019103944302, 0.10341715067625046, 0.015084484592080116, 0.04100000485777855, 0.14765571057796478, 0.039463505148887634, 0.029419846832752228, 0.036808133125305176, 0.08133389800786972, 0.08430036902427673, 0.011397753842175007, 0.00599642051383853, 0.11305613070726395, 0.008401123806834221, 0.06086206063628197, 0.036843013018369675, 0.01976875588297844, 0.018848512321710587, 0.0586785152554512], [0.019135607406497, 0.06866958737373352, 0.025233445689082146, 0.056920718401670456, 0.03768100589513779, 0.071917325258255, 0.09188557416200638, 0.08633643388748169, 0.03220148757100105, 0.024372432380914688, 0.02329358085989952, 0.020251959562301636, 0.0839778408408165, 0.03629070892930031, 0.02064560167491436, 0.026847874745726585, 0.06522566825151443, 0.056014515459537506, 0.07157750427722931, 0.028206584975123405, 0.03402010723948479, 0.019294489175081253], [0.01539286132901907, 0.009584493935108185, 0.0015659944619983435, 0.7385498285293579, 0.03212882578372955, 0.004451141692698002, 0.007258470170199871, 0.02758946642279625, 0.04370366036891937, 0.004740052856504917, 0.00860536377876997, 0.016422228887677193, 0.00920711737126112, 0.0037119353655725718, 0.0008984751766547561, 0.02562597021460533, 0.0019312965450808406, 0.005826584994792938, 0.008091664873063564, 0.022476762533187866, 0.006255670916289091, 0.005982182454317808], [0.029038051143288612, 0.029521428048610687, 0.0017964544240385294, 0.13586735725402832, 0.09508810192346573, 0.023442454636096954, 0.02192814089357853, 0.26199591159820557, 0.04439922422170639, 0.017055761069059372, 0.01387752965092659, 0.024831296876072884, 0.04635358601808548, 0.005590237211436033, 0.0008998893317766488, 0.10829883068799973, 0.005270791240036488, 0.03430449590086937, 0.029664138332009315, 0.02670074999332428, 0.02942357398569584, 0.014651918783783913], [0.019258324056863785, 0.09264322370290756, 0.0363616980612278, 0.06400260329246521, 0.027096956968307495, 0.02350165694952011, 0.05663524940609932, 0.056364476680755615, 0.041857458651065826, 0.03373098000884056, 0.016430070623755455, 0.04002829268574715, 0.08223708719015121, 0.036512892693281174, 0.030063824728131294, 0.031360190361738205, 0.02844884991645813, 0.06762552261352539, 0.07226131856441498, 0.06741981208324432, 0.05444881319999695, 0.021710598841309547], [0.003695604158565402, 0.01556165050715208, 0.01189408265054226, 2.31323665502714e-05, 0.036436956375837326, 0.0061102500185370445, 0.1676054447889328, 0.04792675003409386, 0.024431584402918816, 0.011562250554561615, 0.017822880297899246, 0.09423353523015976, 0.07813440263271332, 0.02612118609249592, 0.0012461780570447445, 0.06771153211593628, 0.0038056625053286552, 0.26143530011177063, 0.02227725274860859, 0.052037544548511505, 0.005562709644436836, 0.044364042580127716], [0.010361343622207642, 0.08544665575027466, 0.014740029349923134, 0.009245716966688633, 0.027449723333120346, 0.020233340561389923, 0.05897356569766998, 0.052033454179763794, 0.043713245540857315, 0.03201623633503914, 0.03286222368478775, 0.0391731858253479, 0.11886430531740189, 0.01716001331806183, 0.021121639758348465, 0.06691885739564896, 0.022259535267949104, 0.08736857026815414, 0.08974155783653259, 0.047943901270627975, 0.04695688933134079, 0.05541599541902542], [0.011853671632707119, 0.025427736341953278, 0.02711568772792816, 0.0001657021202845499, 0.021990224719047546, 0.009248750284314156, 0.08070557564496994, 0.056015484035015106, 0.0388968400657177, 0.04033072665333748, 0.017893366515636444, 0.05895650386810303, 0.11845901608467102, 0.057878557592630386, 0.006146419793367386, 0.04900892823934555, 0.024198994040489197, 0.13564637303352356, 0.05157982558012009, 0.10429955273866653, 0.024624330922961235, 0.03955771401524544], [0.010857552289962769, 0.09506196528673172, 0.028872493654489517, 0.03258352354168892, 0.03895627707242966, 0.05391825735569, 0.0806431695818901, 0.020594019442796707, 0.028712963685393333, 0.06612957268953323, 0.028821781277656555, 0.030996620655059814, 0.1298258751630783, 0.027693798765540123, 0.04562115669250488, 0.02652376890182495, 0.03304196149110794, 0.08811737596988678, 0.021911488845944405, 0.03029807098209858, 0.04846009239554405, 0.03235810995101929], [0.010979783721268177, 0.060771144926548004, 0.021068181842565536, 0.001932342303916812, 0.011153067462146282, 0.01992044225335121, 0.020925303921103477, 0.05862129479646683, 0.022697923704981804, 0.04305902495980263, 0.008468843065202236, 0.013045644387602806, 0.16043299436569214, 0.03887986019253731, 0.01822695881128311, 0.0235366839915514, 0.044481489807367325, 0.053218018263578415, 0.1735123246908188, 0.05315621197223663, 0.09973454475402832, 0.04217798262834549], [0.015961436554789543, 0.10986216366291046, 0.020963428542017937, 0.013085025362670422, 0.02169218473136425, 0.03295053914189339, 0.05285457894206047, 0.04426371306180954, 0.04608170688152313, 0.03723835200071335, 0.028107624500989914, 0.029721589758992195, 0.0960269570350647, 0.04100700095295906, 0.019435785710811615, 0.03529132157564163, 0.041530538350343704, 0.052038125693798065, 0.07664681226015091, 0.0629400908946991, 0.0528208464384079, 0.06948021799325943], [0.005801186431199312, 0.0806170254945755, 0.011911598965525627, 0.003582179080694914, 0.03399882838129997, 0.06629455834627151, 0.05246217921376228, 0.0384347066283226, 0.02903074026107788, 0.038928937166929245, 0.02751590497791767, 0.03788416087627411, 0.10193681716918945, 0.03482978790998459, 0.00819341465830803, 0.16004683077335358, 0.02669631317257881, 0.10985502600669861, 0.0356278233230114, 0.02189222350716591, 0.023324372246861458, 0.05113540589809418], [0.013851703144609928, 0.08641703426837921, 0.054143860936164856, 0.01036197692155838, 0.014202946797013283, 0.04610705003142357, 0.0529806986451149, 0.03183794394135475, 0.03141845762729645, 0.05194687098264694, 0.020707257091999054, 0.016258908435702324, 0.08110703527927399, 0.050164565443992615, 0.03770758584141731, 0.019807904958724976, 0.1102258637547493, 0.047256454825401306, 0.08055076003074646, 0.05005163326859474, 0.06557326763868332, 0.02732015773653984], [0.031675662845373154, 0.03036344237625599, 0.007841981947422028, 0.1101112887263298, 0.06081277132034302, 0.02893604151904583, 0.02825121581554413, 0.049651727080345154, 0.09740498661994934, 0.03325561434030533, 0.020075807347893715, 0.032992880791425705, 0.046027351170778275, 0.020801451057195663, 0.002935728756710887, 0.14844967424869537, 0.016898803412914276, 0.056517984718084335, 0.0236224252730608, 0.07842576503753662, 0.050445131957530975, 0.02450229600071907]], [[0.02007477357983589, 0.10973131656646729, 0.014431480318307877, 0.02637208066880703, 0.07030871510505676, 0.025642359629273415, 0.03937350586056709, 0.040774039924144745, 0.03297891840338707, 0.08007509261369705, 0.03476530313491821, 0.045302435755729675, 0.07297080010175705, 0.0574524849653244, 0.034324973821640015, 0.07125888764858246, 0.026795603334903717, 0.05632532760500908, 0.039137911051511765, 0.023246372118592262, 0.040702927857637405, 0.03795464709401131], [0.02322327345609665, 0.04394044727087021, 0.011154208332300186, 0.006027974188327789, 0.1259593963623047, 0.012774982489645481, 0.013929811306297779, 0.17707523703575134, 0.021107124164700508, 0.026411976665258408, 0.02429838478565216, 0.091716468334198, 0.05044832080602646, 0.027065031230449677, 0.015252513810992241, 0.034761153161525726, 0.017183322459459305, 0.039025306701660156, 0.09273161739110947, 0.03608255088329315, 0.04623536020517349, 0.06359550356864929], [0.007155860774219036, 0.25683119893074036, 0.005443492438644171, 0.008323564194142818, 0.02065693773329258, 0.004447202663868666, 0.26574423909187317, 0.06021784245967865, 0.02023230493068695, 0.0033883454743772745, 0.025604622438549995, 0.046457987278699875, 0.021744845435023308, 0.002303494606167078, 0.002205955097451806, 0.0069655622355639935, 0.008529066108167171, 0.10947253555059433, 0.02187941037118435, 0.014721217565238476, 0.009126855060458183, 0.07854744046926498], [0.0034330321941524744, 0.06787588447332382, 0.0051643988117575645, 0.0018050593789666891, 0.018863113597035408, 0.43255624175071716, 0.011686026118695736, 0.02563210390508175, 0.004549573175609112, 0.025995496660470963, 0.009918037801980972, 0.028262969106435776, 0.05936214700341225, 0.009757296182215214, 0.011476458050310612, 0.01641913317143917, 0.06704313308000565, 0.024537239223718643, 0.02509678155183792, 0.0081189488992095, 0.026780936866998672, 0.11566606909036636], [0.010019777342677116, 0.019582953304052353, 0.012291020713746548, 0.015684446319937706, 0.02903611585497856, 0.026532623916864395, 0.06140240654349327, 0.0870339646935463, 0.039559684693813324, 0.0266664270311594, 0.1325610727071762, 0.11844246834516525, 0.023992808535695076, 0.040339455008506775, 0.019363852217793465, 0.01708380877971649, 0.02937842532992363, 0.022625096142292023, 0.048323776572942734, 0.030707770958542824, 0.031143778935074806, 0.15822823345661163], [0.013639464974403381, 0.0325775109231472, 0.008881156332790852, 0.014838519506156445, 0.22213439643383026, 0.03615918010473251, 0.02174600400030613, 0.14820867776870728, 0.010880953632295132, 0.038888879120349884, 0.011684599332511425, 0.048647284507751465, 0.028196392580866814, 0.015737881883978844, 0.020228203386068344, 0.05352216213941574, 0.020933721214532852, 0.027652772143483162, 0.05504428222775459, 0.015096044167876244, 0.05882851406931877, 0.09647336602210999], [0.008715055882930756, 0.006733766756951809, 0.011015103198587894, 0.005294707138091326, 0.07294665277004242, 0.01435218844562769, 0.04895658418536186, 0.03063887171447277, 0.3405950963497162, 0.008006908930838108, 0.04765332490205765, 0.0480584017932415, 0.012011474929749966, 0.019625868648290634, 0.012805333361029625, 0.012927546165883541, 0.016973743215203285, 0.03199157491326332, 0.01399829238653183, 0.19226397573947906, 0.011287740431725979, 0.03314780443906784], [0.009083484299480915, 0.01957077719271183, 0.0029977073427289724, 0.007940789684653282, 0.01886921189725399, 0.23661646246910095, 0.010186931118369102, 0.03516088426113129, 0.015944598242640495, 0.06924959272146225, 0.0225889440625906, 0.04314655065536499, 0.09165549278259277, 0.01214469876140356, 0.017282741144299507, 0.018997594714164734, 0.14927856624126434, 0.01859171874821186, 0.02768601104617119, 0.023596201092004776, 0.06744659692049026, 0.08196453750133514], [0.007548385299742222, 0.026069633662700653, 0.004296502564102411, 0.011053205467760563, 0.03689555451273918, 0.028172891587018967, 0.04981587827205658, 0.07070323824882507, 0.02449161559343338, 0.0790027603507042, 0.09624654054641724, 0.09634377062320709, 0.08742444217205048, 0.019003191962838173, 0.02015867829322815, 0.02908705174922943, 0.04323359206318855, 0.014272059313952923, 0.028258776292204857, 0.01806369610130787, 0.04897783324122429, 0.16088071465492249], [0.02114708162844181, 0.03609545901417732, 0.0215897299349308, 0.016126569360494614, 0.05713486671447754, 0.050844185054302216, 0.05287426710128784, 0.2283252328634262, 0.027771374210715294, 0.017760051414370537, 0.0332188606262207, 0.05804457888007164, 0.02651611901819706, 0.012451388873159885, 0.013877016492187977, 0.01656993478536606, 0.03021564893424511, 0.028550541028380394, 0.0752817690372467, 0.022802798077464104, 0.024837400764226913, 0.12796512246131897], [0.0040077706798911095, 0.053878605365753174, 0.0012165444204583764, 0.007435247767716646, 0.08368198573589325, 0.013751571998000145, 0.01258178986608982, 0.17252066731452942, 0.029403483495116234, 0.17885304987430573, 0.016607709228992462, 0.056499283760786057, 0.09801597893238068, 0.013032414019107819, 0.005827019456773996, 0.043784063309431076, 0.016697803512215614, 0.009400629438459873, 0.0427401140332222, 0.01072402112185955, 0.09654644131660461, 0.03279373049736023], [0.002050441689789295, 0.01880069263279438, 0.0012358642416074872, 0.004787589889019728, 0.024813907220959663, 0.05824318155646324, 0.008608078584074974, 0.13464628159999847, 0.011128943413496017, 0.2807219922542572, 0.019693739712238312, 0.045421190559864044, 0.07753657549619675, 0.019544051960110664, 0.003847273997962475, 0.025893379002809525, 0.028788356110453606, 0.015752678737044334, 0.04056733101606369, 0.0159294456243515, 0.11939926445484161, 0.04258965700864792], [0.013871773146092892, 0.04440072923898697, 0.00535044027492404, 0.006827111821621656, 0.036335643380880356, 0.01106716226786375, 0.015312476083636284, 0.22785624861717224, 0.02395777963101864, 0.0985255315899849, 0.056523121893405914, 0.07462727278470993, 0.06966115534305573, 0.02739017643034458, 0.008422667160630226, 0.016809536144137383, 0.016860760748386383, 0.024831430986523628, 0.06502360850572586, 0.037580423057079315, 0.07760581374168396, 0.04115918651223183], [0.010521259158849716, 0.02039646916091442, 0.009913671761751175, 0.00544960331171751, 0.027160106226801872, 0.02633795142173767, 0.13212600350379944, 0.04517878592014313, 0.06965186446905136, 0.009972691535949707, 0.019387179985642433, 0.055869486182928085, 0.03273571655154228, 0.00971553847193718, 0.009984329342842102, 0.012469930574297905, 0.026906874030828476, 0.28052690625190735, 0.019416099414229393, 0.1366497129201889, 0.012975502759218216, 0.026654329150915146], [0.010046492330729961, 0.07935920357704163, 0.0046629165299236774, 0.00316513329744339, 0.012617372907698154, 0.15561875700950623, 0.015193001367151737, 0.03451080247759819, 0.011344731785356998, 0.054774317890405655, 0.03517569229006767, 0.04646169766783714, 0.17901279032230377, 0.023767149075865746, 0.02229747176170349, 0.024008696898818016, 0.08828837424516678, 0.029323730617761612, 0.030974529683589935, 0.01531601045280695, 0.05368947237730026, 0.07039166986942291], [0.011193809099495411, 0.05600832775235176, 0.014588676393032074, 0.009548869915306568, 0.04510309547185898, 0.007778944913297892, 0.021831819787621498, 0.1388675421476364, 0.02294491045176983, 0.1265471875667572, 0.02268604375422001, 0.05900533124804497, 0.06528840214014053, 0.05571232736110687, 0.016997765749692917, 0.032424721866846085, 0.00802691001445055, 0.06882108747959137, 0.06817977130413055, 0.048972614109516144, 0.07326631993055344, 0.026205575093626976], [0.016575179994106293, 0.008615048602223396, 0.007425938732922077, 0.021482979878783226, 0.015182031318545341, 0.02668209746479988, 0.017041366547346115, 0.13715629279613495, 0.023426420986652374, 0.12154577672481537, 0.022882560268044472, 0.05917859822511673, 0.04604661464691162, 0.043257683515548706, 0.047634709626436234, 0.07346691191196442, 0.016144808381795883, 0.060435570776462555, 0.04564365744590759, 0.04113977029919624, 0.10300105810165405, 0.04603501781821251], [0.02067345380783081, 0.016636716201901436, 0.017651798203587532, 0.014355553314089775, 0.02331945113837719, 0.014093754813075066, 0.030639784410595894, 0.03464754670858383, 0.1665874868631363, 0.05343034863471985, 0.03456101566553116, 0.04525705799460411, 0.041718631982803345, 0.039440080523490906, 0.05227605253458023, 0.03351598605513573, 0.014770242385566235, 0.06646610796451569, 0.0212104469537735, 0.1956366002559662, 0.03887419030070305, 0.024237707257270813], [0.01370809506624937, 0.032716166228055954, 0.0066733285784721375, 0.009359738789498806, 0.01528701651841402, 0.032819610089063644, 0.008362873457372189, 0.0436541922390461, 0.027042420580983162, 0.10409272462129593, 0.025753743946552277, 0.07359790056943893, 0.17553214728832245, 0.043613068759441376, 0.03452088683843613, 0.04933540150523186, 0.028110602870583534, 0.06281568109989166, 0.028337888419628143, 0.05680515244603157, 0.09676212072372437, 0.03109937161207199], [0.011452432721853256, 0.027167467400431633, 0.0086747445166111, 0.014109921641647816, 0.019989874213933945, 0.01955188810825348, 0.02508220076560974, 0.0768999457359314, 0.024764368310570717, 0.1016068160533905, 0.021131867542862892, 0.05557584762573242, 0.08415643125772476, 0.036978501826524734, 0.02972852997481823, 0.08169102668762207, 0.008538158610463142, 0.1331741362810135, 0.03376426175236702, 0.042867787182331085, 0.10046496242284775, 0.042628899216651917], [0.02336576208472252, 0.035497330129146576, 0.032872602343559265, 0.026968156918883324, 0.01548690814524889, 0.11631681770086288, 0.04058573395013809, 0.05072050914168358, 0.04799361154437065, 0.08279551565647125, 0.03896769881248474, 0.02382255345582962, 0.08059199899435043, 0.038195542991161346, 0.04656041041016579, 0.015668611973524094, 0.0534127801656723, 0.05442912504076958, 0.047116994857788086, 0.0430842861533165, 0.04350345581769943, 0.04204362630844116], [0.004078339319676161, 0.056272391229867935, 0.0031543003860861063, 0.005365198012441397, 0.01187441311776638, 0.008618823252618313, 0.006502838805317879, 0.0701218992471695, 0.023021679371595383, 0.31115177273750305, 0.017394227907061577, 0.04674198105931282, 0.11551027745008469, 0.04334929585456848, 0.009152103215456009, 0.028171803802251816, 0.009871399030089378, 0.039385754615068436, 0.025995656847953796, 0.03226156905293465, 0.12023784220218658, 0.011766426265239716]], [[0.01543679740279913, 0.04791839048266411, 0.02519438974559307, 0.031826410442590714, 0.04599609225988388, 0.1608148217201233, 0.014320427551865578, 0.040764015167951584, 0.1023162454366684, 0.18703588843345642, 0.03094797022640705, 0.030083416029810905, 0.02206401154398918, 0.036619633436203, 0.01934843137860298, 0.046188537031412125, 0.049716781824827194, 0.010497757233679295, 0.011034269817173481, 0.022380024194717407, 0.029292581602931023, 0.020203227177262306], [0.005614531226456165, 0.08810937404632568, 0.035720065236091614, 0.008302354253828526, 0.01586304046213627, 0.3382079601287842, 0.038811322301626205, 0.011117305606603622, 0.14601804316043854, 0.15873588621616364, 0.019730541855096817, 0.03770396113395691, 0.014889148995280266, 0.015757432207465172, 0.0015647915424779058, 0.013217354193329811, 0.001676098327152431, 0.004341647028923035, 0.0038845576345920563, 0.009109769016504288, 0.012092593125998974, 0.01953211985528469], [0.001993848942220211, 0.14310240745544434, 0.01757228747010231, 0.0005989689379930496, 0.010825506411492825, 0.004048187285661697, 0.11210156232118607, 0.005585382226854563, 0.015860673040151596, 0.32125231623649597, 0.02230614423751831, 0.014925247058272362, 0.13661128282546997, 0.035984769463539124, 0.0009597638272680342, 0.005319478455930948, 0.010637166909873486, 0.06543032079935074, 0.012960074469447136, 0.013838067650794983, 0.031551942229270935, 0.01653457246720791], [0.00034744941513054073, 0.0035535788629204035, 0.0024607328232377768, 0.0003820857673417777, 0.003741169348359108, 0.9533566236495972, 0.0003263726830482483, 0.010926039889454842, 0.0041579594835639, 0.010419444181025028, 0.0009240839281119406, 0.0009146227966994047, 0.0005504732835106552, 0.0016213800990954041, 2.9685808840440586e-05, 0.00033507030457258224, 0.0002271654229843989, 0.0001486945548094809, 0.0003341106348671019, 0.00018775076023302972, 0.0021363634150475264, 0.0029190450441092253], [0.011633733287453651, 0.14316987991333008, 0.08716358989477158, 0.0041342065669596195, 0.03396492823958397, 0.18663758039474487, 0.08842425048351288, 0.015059467405080795, 0.06959260255098343, 0.007689206395298243, 0.024520501494407654, 0.03187081217765808, 0.03165558725595474, 0.019847309216856956, 0.0008486200240440667, 0.006887973751872778, 0.003388522192835808, 0.040651917457580566, 0.027410555630922318, 0.0523843914270401, 0.016548551619052887, 0.09651593863964081], [0.0030038063414394855, 0.0033581543248146772, 0.0031503138598054647, 0.0015074752736836672, 0.002383367856964469, 0.006865772418677807, 0.0004239602421876043, 0.8866103291511536, 0.014184702187776566, 0.00849236361682415, 0.0181695818901062, 0.03824398294091225, 0.0017668859800323844, 0.0084627540782094, 0.0004790556849911809, 0.0008526276214979589, 0.00021183431090321392, 9.06179120647721e-05, 0.00025855167768895626, 0.00021632201969623566, 0.0001458072365494445, 0.001121728797443211], [0.0018525022314861417, 0.01931842416524887, 0.008565805852413177, 0.004374497104436159, 0.011806328780949116, 0.8256440758705139, 0.002894813660532236, 0.003922970499843359, 0.08081146329641342, 0.006288414821028709, 0.002328556962311268, 0.003459007479250431, 0.001481477404013276, 0.00388019229285419, 0.0005530562484636903, 0.0008330954588018358, 0.00028587342239916325, 0.0009318395750597119, 0.0014776000753045082, 0.005434548016637564, 0.0058560860343277454, 0.007999379187822342], [0.003446399699896574, 0.11304251849651337, 0.005384915042668581, 0.0032082919497042894, 0.018758540973067284, 0.0027210053522139788, 0.02715817466378212, 0.027214739471673965, 0.0641491562128067, 0.5654354095458984, 0.028878772631287575, 0.02494504489004612, 0.051634471863508224, 0.009465712122619152, 0.002939472207799554, 0.006582576781511307, 0.009584043174982071, 0.009523145854473114, 0.002977263182401657, 0.0035225695464760065, 0.011805810034275055, 0.007622011471539736], [0.009003428742289543, 0.013641602359712124, 0.0011565249878913164, 0.0033559587318450212, 0.013080148957669735, 0.0027125426568090916, 0.0013596398057416081, 0.7510108947753906, 0.019545594230294228, 0.008400481194257736, 0.06617627292871475, 0.06629443168640137, 0.014675072394311428, 0.0031923954375088215, 0.006030526012182236, 0.01213593315333128, 0.0029198757838457823, 0.0003444083558861166, 0.0013627071166411042, 0.00029949223971925676, 0.000666641688439995, 0.002635406097397208], [0.0001869204716058448, 0.00887386780232191, 0.00023122662969399244, 0.0001759371516527608, 0.0019296942045912147, 8.73616590979509e-05, 0.0020936736837029457, 0.002472063060849905, 0.037864722311496735, 0.02548767253756523, 0.010407810099422932, 0.8813859224319458, 0.022169504314661026, 0.0005214819684624672, 0.000581622589379549, 0.002551016630604863, 0.0004505268589127809, 0.00034514805884100497, 0.0003243703977204859, 0.0011916662333533168, 0.000374001421732828, 0.00029386673122644424], [0.0005143504240550101, 0.005828134249895811, 0.002315637655556202, 0.0002808849676512182, 0.004470053594559431, 0.0030516015831381083, 0.0023536207154393196, 0.012396170757710934, 0.02915140986442566, 0.8259055614471436, 0.026534967124462128, 0.01811653934419155, 0.043898507952690125, 0.017528893426060677, 0.00021613159333355725, 0.0025733639486134052, 0.0014661697205156088, 0.000789932906627655, 0.00032290391391143203, 0.00026714816340245306, 0.0005527178291231394, 0.001465306617319584], [0.000706421909853816, 0.0032604148145765066, 0.001570480060763657, 0.0005094478256069124, 0.0014158814447000623, 0.0006599103799089789, 0.0008508163155056536, 0.010202428326010704, 0.004152338951826096, 0.03804174065589905, 0.35342055559158325, 0.08582323044538498, 0.09835483133792877, 0.3802869915962219, 0.0035965843126177788, 0.0037212399765849113, 0.006709197070449591, 0.0018277325434610248, 0.0010740171419456601, 0.00033775781048461795, 0.0005235475255176425, 0.0029543645214289427], [0.002583252964541316, 0.014272630214691162, 0.0006918379804119468, 0.0009666545083746314, 0.006366475019603968, 0.0003669759025797248, 0.014840143732726574, 0.0036796487402170897, 0.007766172755509615, 0.014401094987988472, 0.046934306621551514, 0.6867232322692871, 0.050558723509311676, 0.0023881217930465937, 0.038675565272569656, 0.06401816010475159, 0.01467783935368061, 0.014983744360506535, 0.0052422587759792805, 0.001961945090442896, 0.003877759212628007, 0.004023375455290079], [0.0025619221851229668, 0.01607627607882023, 0.0021102908067405224, 0.0006230572471395135, 0.0039774770848453045, 0.0006297205691225827, 0.02351241186261177, 0.003791202325373888, 0.006600674241781235, 0.014832616783678532, 0.0373929925262928, 0.10907188057899475, 0.43608972430229187, 0.015915244817733765, 0.012827740050852299, 0.19318993389606476, 0.033866509795188904, 0.056134581565856934, 0.014917552471160889, 0.0023541771806776524, 0.0032539749518036842, 0.01026999019086361], [0.0021240676287561655, 0.04453533887863159, 0.0028878147713840008, 0.0015449856873601675, 0.015776289626955986, 0.020555684342980385, 0.0014524278230965137, 0.012114536948502064, 0.010359111241996288, 0.08650927990674973, 0.06989840418100357, 0.08680284023284912, 0.15828904509544373, 0.1497723013162613, 0.006891998928040266, 0.025461379438638687, 0.23846398293972015, 0.0042830281890928745, 0.005157838575541973, 0.001980555010959506, 0.03589797392487526, 0.01924123242497444], [0.0018176065059378743, 0.01111691165715456, 0.002867328468710184, 0.0012917628046125174, 0.00502740079537034, 0.003218225436285138, 0.008274144493043423, 0.0019258566899225116, 0.001520004472695291, 0.008240359835326672, 0.04004586115479469, 0.004785037133842707, 0.08117401599884033, 0.04803086072206497, 0.020400838926434517, 0.05757036805152893, 0.06915110349655151, 0.44721755385398865, 0.0421181283891201, 0.02147912234067917, 0.06601111590862274, 0.056716375052928925], [0.005811970680952072, 0.022753756493330002, 0.0023374578449875116, 0.0057319276966154575, 0.006998644210398197, 0.0014659229200333357, 0.00942047219723463, 0.014918262138962746, 0.010636122897267342, 0.008181951940059662, 0.09930400550365448, 0.12951955199241638, 0.1338665932416916, 0.020510688424110413, 0.04489901289343834, 0.3861146867275238, 0.02406277507543564, 0.01967395283281803, 0.02548319287598133, 0.003386344527825713, 0.006744782906025648, 0.01817786693572998], [0.002870743628591299, 0.009209898300468922, 0.002721858909353614, 0.005162632092833519, 0.00412188982591033, 0.0012472779490053654, 0.016619395464658737, 0.0003785800072364509, 0.005409778095781803, 0.0007711737998761237, 0.012655707076191902, 0.004762656986713409, 0.010236059315502644, 0.0069876061752438545, 0.020641343668103218, 0.031680818647146225, 0.02913970872759819, 0.20242105424404144, 0.029671521857380867, 0.5196501016616821, 0.033622369170188904, 0.050017744302749634], [0.0005194219411350787, 0.025412626564502716, 0.000209745965548791, 0.000645218591671437, 0.002625860972329974, 0.0007325515034608543, 0.0037483072374016047, 0.0005360287032090127, 0.0006289142766036093, 0.0020773992873728275, 0.009924820624291897, 0.00227437075227499, 0.05813034623861313, 0.0016979260835796595, 0.009720927104353905, 0.02984527125954628, 0.22442767024040222, 0.43830931186676025, 0.019504578784108162, 0.006108148954808712, 0.1469235122203827, 0.01599709689617157], [0.01166468020528555, 0.017916280776262283, 0.0024648939725011587, 0.0031649970915168524, 0.015012807212769985, 0.002295067999511957, 0.017168382182717323, 0.013039263896644115, 0.011868586763739586, 0.002898751525208354, 0.02956697717308998, 0.014257545582950115, 0.056766606867313385, 0.012678063474595547, 0.02056221291422844, 0.1597217172384262, 0.1545214056968689, 0.056078750640153885, 0.10760073363780975, 0.030563466250896454, 0.02068423479795456, 0.23950456082820892], [0.0009428209159523249, 0.03685563802719116, 0.0007628729217685759, 0.0005431047175079584, 0.003698615822941065, 0.0005287142121233046, 0.02379913069307804, 0.0005346379475668073, 0.011644888669252396, 0.012927587144076824, 0.015355107374489307, 0.01766127347946167, 0.085024893283844, 0.0025910804979503155, 0.00565442256629467, 0.03906632214784622, 0.03023679181933403, 0.15520082414150238, 0.04596657305955887, 0.35249483585357666, 0.07683961093425751, 0.0816703513264656], [0.0024386390578001738, 0.021357053890824318, 0.003850360168144107, 0.0036510091740638018, 0.0071516260504722595, 0.005556690972298384, 0.013906543143093586, 0.005171631462872028, 0.006440889090299606, 0.02138693258166313, 0.02117105945944786, 0.006744172424077988, 0.05336899682879448, 0.01771148294210434, 0.008592941798269749, 0.10046879202127457, 0.1254725158214569, 0.28643494844436646, 0.042436715215444565, 0.03322026878595352, 0.1528632640838623, 0.06060352548956871]], [[0.014449577778577805, 0.05864037945866585, 0.017519205808639526, 0.1657136082649231, 0.02559766359627247, 0.013832737691700459, 0.04306158795952797, 0.014087500050663948, 0.12491122633218765, 0.004531483165919781, 0.015056734904646873, 0.030090460553765297, 0.008545810356736183, 0.04384072870016098, 0.10546603798866272, 0.028020845726132393, 0.08557014167308807, 0.012575827538967133, 0.06643664836883545, 0.08789144456386566, 0.007678775116801262, 0.026481546461582184], [0.00823169481009245, 0.02376195415854454, 0.004569241311401129, 0.017083050683140755, 0.012528661638498306, 0.007348800543695688, 0.01224350742995739, 0.010713933035731316, 0.058929938822984695, 0.015381338074803352, 0.013608760200440884, 0.15804600715637207, 0.029390458017587662, 0.021758608520030975, 0.06772896647453308, 0.035358577966690063, 0.16881781816482544, 0.049989912658929825, 0.053373441100120544, 0.12597493827342987, 0.03398614376783371, 0.07117428630590439], [0.00461431173607707, 0.00825697835534811, 0.002818675711750984, 0.7317230701446533, 0.012127124704420567, 0.0010373006807640195, 0.028319716453552246, 0.0010384637862443924, 0.09487435966730118, 0.0010398238664492965, 0.00800935085862875, 0.007763321045786142, 0.002615241799503565, 0.0029005995020270348, 0.059781141579151154, 0.005736897699534893, 0.0022023445926606655, 0.0006313416524790227, 0.002418606309220195, 0.019211068749427795, 0.0004349650698713958, 0.002445280086249113], [0.013883777894079685, 0.05066467449069023, 0.11255086213350296, 0.19996196031570435, 0.07002928853034973, 0.07873699814081192, 0.016911040991544724, 0.01201770268380642, 0.09327813237905502, 0.0027078725397586823, 0.02011396363377571, 0.01868026703596115, 0.0039305011741817, 0.012022526003420353, 0.04348602518439293, 0.0930108055472374, 0.044956739991903305, 0.021603619679808617, 0.013253609649837017, 0.06239091604948044, 0.002536471001803875, 0.01327231153845787], [0.00613583717495203, 0.033949654549360275, 0.015226886607706547, 0.06766780465841293, 0.02491523139178753, 0.6271477341651917, 0.012703260406851768, 0.02208280935883522, 0.03483053296804428, 0.0006840622518211603, 0.014522528275847435, 0.03457758575677872, 0.0009304714039899409, 0.004349594935774803, 0.019977916032075882, 0.033628467470407486, 0.019111933186650276, 0.0025269894395023584, 0.006952098570764065, 0.008275587111711502, 0.0003161146305501461, 0.009486867114901543], [0.027976948767900467, 0.03665341064333916, 0.0847906544804573, 0.08010265231132507, 0.12955744564533234, 0.051250144839286804, 0.026565654203295708, 0.08223965018987656, 0.05999463051557541, 0.002325167413800955, 0.06458375602960587, 0.029994294047355652, 0.022945407778024673, 0.019489942118525505, 0.037604376673698425, 0.05025550350546837, 0.09502436220645905, 0.023807113990187645, 0.01705108769237995, 0.037792470306158066, 0.00277348468080163, 0.017221732065081596], [0.02415785752236843, 0.04932719096541405, 0.038456447422504425, 0.010572419501841068, 0.19549259543418884, 0.025555336847901344, 0.041441213339567184, 0.32678115367889404, 0.05022644251585007, 0.0005892587942071259, 0.06445581465959549, 0.01778181456029415, 0.0017366654938086867, 0.00947639625519514, 0.002599162980914116, 0.02084875851869583, 0.043725352734327316, 0.010426484048366547, 0.008826757781207561, 0.0430988073348999, 0.0003091779362875968, 0.014114871621131897], [0.004993361420929432, 0.06420232355594635, 0.0029562069103121758, 0.5146846771240234, 0.0028353044763207436, 0.0062411692924797535, 0.18690522015094757, 0.006912156939506531, 0.03709306940436363, 0.004207589663565159, 0.004043755121529102, 0.024237114936113358, 0.0028595745097845793, 0.008063139393925667, 0.08644519746303558, 0.004098591860383749, 0.005118107423186302, 0.0007337582064792514, 0.01505891140550375, 0.010894447565078735, 0.0017273941775783896, 0.005688895937055349], [0.01508337166160345, 0.05068157613277435, 0.011522377841174603, 0.24476052820682526, 0.033507902175188065, 0.06791547685861588, 0.017353305593132973, 0.014945282600820065, 0.16722290217876434, 0.10909810662269592, 0.025937017053365707, 0.0518173947930336, 0.0018149238312616944, 0.028658129274845123, 0.04607250913977623, 0.02307615429162979, 0.006278018467128277, 0.00279233674518764, 0.004559720866382122, 0.058233436197042465, 0.0015339173842221498, 0.01713562197983265], [0.004667724948376417, 0.05820919945836067, 0.009987184777855873, 0.06022833287715912, 0.016940543428063393, 0.05246672406792641, 0.015574140474200249, 0.003932681865990162, 0.19080358743667603, 0.1438673585653305, 0.034498970955610275, 0.06598901748657227, 0.019049422815442085, 0.02198757976293564, 0.10324863344430923, 0.026602905243635178, 0.020254183560609818, 0.011874400079250336, 0.04130728542804718, 0.013344168663024902, 0.07348847389221191, 0.011677511967718601], [0.002194721018895507, 0.03041170723736286, 0.0013707616599276662, 0.006690926384180784, 0.004843344911932945, 0.016469845548272133, 0.01634703390300274, 0.0153758954256773, 0.025158101692795753, 0.004501510411500931, 0.012903798371553421, 0.6940547823905945, 0.019437439739704132, 0.02393544651567936, 0.009547381661832333, 0.008898751810193062, 0.04055846855044365, 0.0053089689463377, 0.015135323628783226, 0.014982878230512142, 0.0024784374982118607, 0.02939435839653015], [0.005877747666090727, 0.047791991382837296, 0.013365602120757103, 0.001930107711814344, 0.09862678498029709, 0.057389676570892334, 0.02234053984284401, 0.11402251571416855, 0.0196670088917017, 0.005524645559489727, 0.10982080549001694, 0.031278371810913086, 0.04021031782031059, 0.03788122907280922, 0.002641482511535287, 0.06478291749954224, 0.19733837246894836, 0.043727558106184006, 0.01394093781709671, 0.04418530687689781, 0.003232861403375864, 0.024423103779554367], [0.0072434707544744015, 0.020496468991041183, 0.0041371979750692844, 0.017755934968590736, 0.02609245851635933, 0.016773482784628868, 0.020314112305641174, 0.013424267992377281, 0.03423724323511124, 0.016717812046408653, 0.032690998166799545, 0.28160393238067627, 0.01621905155479908, 0.16259633004665375, 0.0213799849152565, 0.03736875206232071, 0.0642315223813057, 0.02132384665310383, 0.015325608663260937, 0.04709532484412193, 0.011127691715955734, 0.11184458434581757], [0.0045566619373857975, 0.0355788916349411, 0.011064590886235237, 0.007896729744970798, 0.02484738640487194, 0.004055280704051256, 0.02502983994781971, 0.007625918835401535, 0.037000447511672974, 0.13654616475105286, 0.03731639310717583, 0.05480320751667023, 0.32993388175964355, 0.04026622697710991, 0.03292800486087799, 0.03437751159071922, 0.018703456968069077, 0.033524274826049805, 0.018226701766252518, 0.03622596338391304, 0.05672919377684593, 0.012763247825205326], [0.004021737724542618, 0.03834307938814163, 0.006587846204638481, 0.017685988917946815, 0.005820282269269228, 0.015904437750577927, 0.012154865078628063, 0.008570846170186996, 0.031094256788492203, 0.04440386965870857, 0.013719366863369942, 0.12605638802051544, 0.02643289603292942, 0.024171601980924606, 0.040540434420108795, 0.288754940032959, 0.02938024327158928, 0.08462309092283249, 0.05296279489994049, 0.05721147730946541, 0.032173041254282, 0.03938654437661171], [0.01277772057801485, 0.05176530405879021, 0.00949155818670988, 0.013724088668823242, 0.01554315909743309, 0.007228931877762079, 0.025050358846783638, 0.02406236156821251, 0.07428587973117828, 0.015251831151545048, 0.05124201625585556, 0.23352353274822235, 0.020974991843104362, 0.03312591835856438, 0.03675786778330803, 0.04138202965259552, 0.08354629576206207, 0.017369352281093597, 0.052159227430820465, 0.06736822426319122, 0.02952871285378933, 0.08384058624505997], [0.010371063835918903, 0.02904753014445305, 0.00705332774668932, 0.034544702619314194, 0.012855138629674911, 0.0037271862383931875, 0.013142794370651245, 0.006359316874295473, 0.0323704369366169, 0.01651620678603649, 0.011937392875552177, 0.06415347009897232, 0.058846428990364075, 0.011577237397432327, 0.11789894849061966, 0.11305441707372665, 0.035233497619628906, 0.2602047622203827, 0.04264337196946144, 0.04581957310438156, 0.03299451991915703, 0.0396486297249794], [0.00571734644472599, 0.10796701908111572, 0.0191606767475605, 0.051588620990514755, 0.020896051079034805, 0.012935107573866844, 0.0420231930911541, 0.019934535026550293, 0.06157149001955986, 0.009831592440605164, 0.019122010096907616, 0.04168684035539627, 0.010704140178859234, 0.02347303181886673, 0.0737568587064743, 0.03989388421177864, 0.08907604217529297, 0.01593872159719467, 0.2438889443874359, 0.043064676225185394, 0.021091068163514137, 0.02667810767889023], [0.0068422057665884495, 0.086736299097538, 0.008950886316597462, 0.010272478684782982, 0.01812647469341755, 0.029577728360891342, 0.06318898499011993, 0.011826138943433762, 0.021439312025904655, 0.014654708094894886, 0.0076813302002847195, 0.020268164575099945, 0.011797228828072548, 0.020945604890584946, 0.012650209479033947, 0.03550329431891441, 0.07918451726436615, 0.13389359414577484, 0.06660192459821701, 0.2561497390270233, 0.038263075053691864, 0.04544614627957344], [0.016314316540956497, 0.041384242475032806, 0.05386761948466301, 0.04742828756570816, 0.08108483254909515, 0.11191894859075546, 0.014356742613017559, 0.05177289620041847, 0.04436599090695381, 0.018182791769504547, 0.0343814380466938, 0.042152874171733856, 0.00798249151557684, 0.0768669918179512, 0.019710155203938484, 0.07627063989639282, 0.0570070706307888, 0.022992940619587898, 0.03906514495611191, 0.05110059678554535, 0.038596875965595245, 0.053196121007204056], [0.007489809300750494, 0.08756764233112335, 0.00471588084474206, 0.03956376388669014, 0.01045366283506155, 0.02794107422232628, 0.04496971517801285, 0.003035922534763813, 0.02782939001917839, 0.05454573035240173, 0.0067758699879050255, 0.034732889384031296, 0.042579926550388336, 0.011684064753353596, 0.05164632573723793, 0.036696381866931915, 0.0315919853746891, 0.12744906544685364, 0.060195207595825195, 0.072421595454216, 0.15462668240070343, 0.06148740276694298], [0.008834018371999264, 0.11283634603023529, 0.013621116988360882, 0.03499528765678406, 0.015994461253285408, 0.024099793285131454, 0.14211730659008026, 0.03480235114693642, 0.023775072768330574, 0.021771764382719994, 0.014513126574456692, 0.02583397924900055, 0.0280339065939188, 0.025004098191857338, 0.016374340280890465, 0.03313968703150749, 0.05431871488690376, 0.031746864318847656, 0.10682249069213867, 0.09754637628793716, 0.07942192256450653, 0.05439696088433266]]], [[[0.010423000901937485, 0.05823008343577385, 0.056662365794181824, 0.07480587065219879, 0.03237948194146156, 0.014107605442404747, 0.18239979445934296, 0.06413872539997101, 0.0372978113591671, 0.04819721356034279, 0.01405648235231638, 0.016491960734128952, 0.041901715099811554, 0.020209435373544693, 0.06615014374256134, 0.01875452697277069, 0.01129524502903223, 0.07668457925319672, 0.04494372382760048, 0.037151869386434555, 0.053979694843292236, 0.019738636910915375], [0.0035328443627804518, 0.06948087364435196, 0.026220431551337242, 0.016875697299838066, 0.05486816540360451, 0.006558590568602085, 0.021731873974204063, 0.24535907804965973, 0.0013078009942546487, 0.024147290736436844, 0.0011194964172318578, 0.002014218596741557, 0.02064935863018036, 0.00684019410982728, 0.010160245932638645, 0.014789585955440998, 0.00492453807964921, 0.028828173875808716, 0.3870546817779541, 0.003378272755071521, 0.04321170598268509, 0.006946933921426535], [0.005185967311263084, 0.13370080292224884, 0.0357854887843132, 0.012662936933338642, 0.02883608266711235, 0.013081852346658707, 0.06383457034826279, 0.12397052347660065, 0.011422774754464626, 0.08084747195243835, 0.009161009453237057, 0.010135114192962646, 0.0705634132027626, 0.020796090364456177, 0.006057329010218382, 0.01653306931257248, 0.007277853786945343, 0.07427456974983215, 0.15654434263706207, 0.02732761576771736, 0.07538289576768875, 0.01661822944879532], [0.002759370720013976, 0.05789823457598686, 0.05153141915798187, 0.01412033662199974, 0.018023226410150528, 0.006644108798354864, 0.7314119338989258, 0.02536422573029995, 0.006013429723680019, 0.013826151378452778, 0.01049307081848383, 0.005557764787226915, 0.010800880379974842, 0.002952597802504897, 0.009767054580152035, 0.0019166473066434264, 0.0035616792738437653, 0.0026248767971992493, 0.015173877589404583, 0.0024734355974942446, 0.0052005755715072155, 0.0018851166823878884], [0.0022269687615334988, 0.048448268324136734, 0.011132686398923397, 0.009416241198778152, 0.022253861650824547, 0.01511881873011589, 0.1397334486246109, 0.04281716048717499, 0.043130531907081604, 0.05449837073683739, 0.007589535787701607, 0.009474809281527996, 0.05561580881476402, 0.022353501990437508, 0.012041112408041954, 0.03190184757113457, 0.01545011717826128, 0.2623571455478668, 0.06527643650770187, 0.04971998184919357, 0.0664074569940567, 0.01303590927273035], [0.005332274828106165, 0.052423104643821716, 0.038123227655887604, 0.03794030472636223, 0.05105151608586311, 0.026937415823340416, 0.146132692694664, 0.19179309904575348, 0.012825622223317623, 0.03778877109289169, 0.004733850248157978, 0.007438245695084333, 0.042643867433071136, 0.008393688127398491, 0.013724042102694511, 0.013437577523291111, 0.030079571530222893, 0.080556720495224, 0.13719777762889862, 0.009987104684114456, 0.0444195419549942, 0.00703999912366271], [0.005257181357592344, 0.10712902992963791, 0.02265985682606697, 0.03724539279937744, 0.03305819630622864, 0.02009042166173458, 0.09471829980611801, 0.09496449679136276, 0.04812527075409889, 0.05473332107067108, 0.02246469259262085, 0.009497758001089096, 0.08236652612686157, 0.019275106489658356, 0.025391338393092155, 0.01161386538296938, 0.016640091314911842, 0.059572864323854446, 0.11799981445074081, 0.037898678332567215, 0.06664206087589264, 0.012655620463192463], [0.014271245338022709, 0.16579672694206238, 0.06438703089952469, 0.016627606004476547, 0.03278709203004837, 0.02702549286186695, 0.0680830180644989, 0.0598825141787529, 0.013415095396339893, 0.07163003087043762, 0.04757927730679512, 0.022494275122880936, 0.09899163246154785, 0.0696907564997673, 0.021231571212410927, 0.02377285622060299, 0.012079373002052307, 0.028124751523137093, 0.046738151460886, 0.018933631479740143, 0.050326667726039886, 0.026131192222237587], [0.031845614314079285, 0.04229168966412544, 0.050615064799785614, 0.044583242386579514, 0.0415424220263958, 0.04863758757710457, 0.04743769019842148, 0.04315594583749771, 0.03814857825636864, 0.04512801393866539, 0.01905977353453636, 0.0195885319262743, 0.03527183458209038, 0.062135931104421616, 0.053156688809394836, 0.049168363213539124, 0.03790588304400444, 0.1053786650300026, 0.04463145509362221, 0.06573596596717834, 0.039623502641916275, 0.034957580268383026], [0.009501929394900799, 0.05632413178682327, 0.022608688101172447, 0.03705929219722748, 0.08346796780824661, 0.04127821698784828, 0.03333383426070213, 0.0989372730255127, 0.006970413029193878, 0.026349592953920364, 0.005458455998450518, 0.017053933814167976, 0.04426632821559906, 0.020134897902607918, 0.01520609576255083, 0.07131650298833847, 0.03309335559606552, 0.10721704363822937, 0.18602029979228973, 0.007246682420372963, 0.042555585503578186, 0.03459940105676651], [0.019425280392169952, 0.05696721374988556, 0.043712060898542404, 0.04228902608156204, 0.05529041588306427, 0.051284484565258026, 0.07103635370731354, 0.0823211744427681, 0.024893632158637047, 0.06238248571753502, 0.013670377433300018, 0.017631730064749718, 0.04259737953543663, 0.030171873047947884, 0.022320646792650223, 0.034638792276382446, 0.029812654480338097, 0.10124486684799194, 0.061973270028829575, 0.03444165736436844, 0.07350602000951767, 0.028388576582074165], [0.013767178170382977, 0.052002519369125366, 0.046599842607975006, 0.016712285578250885, 0.038938771933317184, 0.07022091001272202, 0.045292969793081284, 0.06264927983283997, 0.029378434643149376, 0.15585193037986755, 0.027198180556297302, 0.007963588461279869, 0.04093673452734947, 0.03426465019583702, 0.012578175403177738, 0.018360255286097527, 0.021465247496962547, 0.10079900920391083, 0.041361253708601, 0.05088808760046959, 0.0985049456357956, 0.014265711419284344], [0.0057641672901809216, 0.036417171359062195, 0.022781478241086006, 0.017100084573030472, 0.016223201528191566, 0.026525020599365234, 0.03624381497502327, 0.09269217401742935, 0.008743047714233398, 0.0533314011991024, 0.0045102727599442005, 0.010956082493066788, 0.03868201747536659, 0.03154764696955681, 0.022557202726602554, 0.04395093768835068, 0.028152598068118095, 0.21170206367969513, 0.1731724888086319, 0.017870506271719933, 0.077970489859581, 0.023106086999177933], [0.005689963232725859, 0.029857555404305458, 0.021815728396177292, 0.009511809796094894, 0.011499562300741673, 0.01806890033185482, 0.02409878373146057, 0.07751820236444473, 0.034803904592990875, 0.10322417318820953, 0.00847064983099699, 0.012544902041554451, 0.07909475266933441, 0.04299372062087059, 0.010738343000411987, 0.024303248152136803, 0.013303212821483612, 0.2454020231962204, 0.08084846287965775, 0.06106586754322052, 0.07085280865430832, 0.014293397776782513], [0.010668044909834862, 0.044659506529569626, 0.04431943967938423, 0.037016887217760086, 0.033073052763938904, 0.020748956128954887, 0.12314750254154205, 0.15160472691059113, 0.020712008699774742, 0.04549062252044678, 0.009382596239447594, 0.020707417279481888, 0.034585170447826385, 0.02220958285033703, 0.04694157838821411, 0.024222325533628464, 0.015421200543642044, 0.06709662824869156, 0.16461677849292755, 0.01241208054125309, 0.03677457198500633, 0.014189316891133785], [0.008098684251308441, 0.023743517696857452, 0.01697515696287155, 0.017008529976010323, 0.02210077829658985, 0.022900890558958054, 0.06231391429901123, 0.059996914118528366, 0.04982364922761917, 0.0858631432056427, 0.008215739391744137, 0.01195239182561636, 0.033836569637060165, 0.03779588267207146, 0.03265852853655815, 0.05061393231153488, 0.02128692716360092, 0.2521471083164215, 0.06745748966932297, 0.0403212271630764, 0.059465665370225906, 0.015423214063048363], [0.005671046674251556, 0.020853083580732346, 0.028796540573239326, 0.0239881481975317, 0.027528494596481323, 0.02443898655474186, 0.10257216542959213, 0.13085433840751648, 0.016816403716802597, 0.035746458917856216, 0.005580191034823656, 0.007984805852174759, 0.03425786271691322, 0.015814824029803276, 0.024489399045705795, 0.022386759519577026, 0.03250206261873245, 0.19348613917827606, 0.17595571279525757, 0.015139223076403141, 0.04529000073671341, 0.009847354143857956], [0.006332847755402327, 0.02851611189544201, 0.026623979210853577, 0.03868816792964935, 0.024990225210785866, 0.035448841750621796, 0.04784619063138962, 0.14959582686424255, 0.05182577297091484, 0.04169492796063423, 0.011805906891822815, 0.014467361383140087, 0.03707750886678696, 0.029624400660395622, 0.03402411565184593, 0.0279046930372715, 0.023474829271435738, 0.09785974770784378, 0.16840776801109314, 0.049183715134859085, 0.038865551352500916, 0.01574145257472992], [0.008596290834248066, 0.05509926751255989, 0.03913477435708046, 0.01909860596060753, 0.014804583974182606, 0.012287608347833157, 0.026651252061128616, 0.17444658279418945, 0.009439215995371342, 0.056200169026851654, 0.004893248435109854, 0.012426117435097694, 0.04264748841524124, 0.052268173545598984, 0.029632385820150375, 0.026814235374331474, 0.010093478485941887, 0.14194704592227936, 0.1727328896522522, 0.01446308009326458, 0.058571137487888336, 0.017752395942807198], [0.017911575734615326, 0.027472438290715218, 0.04661084711551666, 0.04123260825872421, 0.020297499373555183, 0.03222508355975151, 0.04386654868721962, 0.10335648059844971, 0.025227677077054977, 0.08874932676553726, 0.0046510170213878155, 0.01119187194854021, 0.02296563610434532, 0.055150356143713, 0.0288427472114563, 0.03413599729537964, 0.01466313749551773, 0.24987944960594177, 0.05174734815955162, 0.03926653787493706, 0.02947583794593811, 0.011080042459070683], [0.005260263103991747, 0.03403157740831375, 0.029857177287340164, 0.0222536101937294, 0.042243242263793945, 0.04744652286171913, 0.054726384580135345, 0.1426677107810974, 0.0026315716095268726, 0.030943678691983223, 0.00191501306835562, 0.007783954031765461, 0.02041816897690296, 0.016818905249238014, 0.012730657123029232, 0.05231302231550217, 0.022952904924750328, 0.21515677869319916, 0.18441331386566162, 0.0037976575549691916, 0.03177757188677788, 0.017860399559140205], [0.00861462950706482, 0.025771113112568855, 0.03682689741253853, 0.042114078998565674, 0.023207342252135277, 0.053220853209495544, 0.04776031896471977, 0.08657364547252655, 0.029204111546278, 0.07304958999156952, 0.0027000398840755224, 0.010858737863600254, 0.019785890355706215, 0.03696664422750473, 0.018320372328162193, 0.03586430847644806, 0.02721077762544155, 0.2513158321380615, 0.0526525154709816, 0.040809664875268936, 0.06344591081142426, 0.013726679608225822]], [[0.035661887377500534, 0.07009018957614899, 0.024592800065875053, 0.0385720320045948, 0.0829671174287796, 0.2011120617389679, 0.22732862830162048, 0.05805294215679169, 0.12103229761123657, 0.03411639481782913, 0.005408575292676687, 0.01396071445196867, 0.017299791797995567, 0.016410376876592636, 0.0023481224197894335, 0.009089889004826546, 0.003312045009806752, 0.01157374121248722, 0.002510643796995282, 0.01788182556629181, 0.0031039738096296787, 0.0035739911254495382], [0.007400056347250938, 0.2679597735404968, 0.08267441391944885, 0.06492801010608673, 0.3895619511604309, 0.040728405117988586, 0.08453832566738129, 0.006012503523379564, 0.02581673488020897, 0.009416681714355946, 0.005941908806562424, 0.0015082902973517776, 0.005958725698292255, 5.999541099299677e-05, 0.0007444817456416786, 0.0006428378983400762, 0.0002964819432236254, 0.00117309985216707, 0.000705485581420362, 0.00095078517915681, 0.0020118891261518, 0.0009692307794466615], [0.008281758055090904, 0.22606751322746277, 0.013911792077124119, 0.012408032082021236, 0.13482524454593658, 0.06289001554250717, 0.43129080533981323, 0.05355694144964218, 0.015002673491835594, 0.011857404373586178, 0.005361698567867279, 0.00428838562220335, 0.00717646349221468, 0.00026762590277940035, 0.00036807969445362687, 0.001990256365388632, 0.0005744201480410993, 0.005000871140509844, 0.0014861634699627757, 0.0012938914587721229, 0.0011265644570812583, 0.0009733386687003076], [0.0007917017210274935, 0.1418670266866684, 0.00420072628185153, 0.0014463122934103012, 0.05778712406754494, 0.024964401498436928, 0.7225316762924194, 0.007039590273052454, 0.003560283686965704, 0.0034438560251146555, 0.0013521085493266582, 0.0010685776360332966, 0.002469173399731517, 0.00010393714183010161, 3.861675941152498e-05, 0.0022939080372452736, 0.00025913375429809093, 0.010376942344009876, 0.0017507316078990698, 0.00896233506500721, 0.0015789587050676346, 0.0021128952503204346], [0.02077733539044857, 0.08727052807807922, 0.07830306142568588, 0.07307202368974686, 0.1307792067527771, 0.13787560164928436, 0.31113335490226746, 0.029333142563700676, 0.05869141221046448, 0.02820092812180519, 0.014402837492525578, 0.005809486843645573, 0.006825264543294907, 0.001490076188929379, 0.0032059948425740004, 0.0010568186407908797, 0.001067714998498559, 0.00571109214797616, 0.0013433033600449562, 0.001131302211433649, 0.0015074551338329911, 0.001012016087770462], [0.006069143768399954, 0.11956427991390228, 0.027151595801115036, 0.08912109583616257, 0.1760622262954712, 0.09523286670446396, 0.320695698261261, 0.09194955974817276, 0.05081118270754814, 0.009182855486869812, 0.0015920276055112481, 0.0013715833192691207, 0.0033201042097061872, 0.0002694656141102314, 0.0004301543813198805, 0.002010651398450136, 0.0003081158793065697, 0.001082445029169321, 0.000786275661084801, 0.0021933135576546192, 0.00031881220638751984, 0.00047653677756898105], [0.004607369191944599, 0.054787527769804, 0.007862976752221584, 0.022420872002840042, 0.3062852621078491, 0.043519508093595505, 0.09931991249322891, 0.05137393996119499, 0.26021796464920044, 0.06599485874176025, 0.00781579501926899, 0.006045090034604073, 0.0293040182441473, 0.0007386530051007867, 0.0027493720408529043, 0.01028246060013771, 0.001288647879846394, 0.011410265229642391, 0.003004200290888548, 0.0058144559152424335, 0.004245572257786989, 0.0009111635736189783], [0.0017225844785571098, 0.009574305266141891, 0.002222433453425765, 0.004808119032531977, 0.058736950159072876, 0.037010397762060165, 0.5622885823249817, 0.05209289863705635, 0.07264982163906097, 0.1765473335981369, 0.003634874941781163, 0.006047451868653297, 0.0025088279508054256, 0.0007382524199783802, 0.00013070683053229004, 0.0011426155688241124, 9.512279939372092e-05, 0.006128691602498293, 0.00020326739468146116, 0.001285131904296577, 0.0003756414807867259, 5.60047737963032e-05], [0.00028386234771460295, 0.013707038015127182, 0.001044981530867517, 0.0006322096451185644, 0.011133178137242794, 0.004989492241293192, 0.7118676900863647, 0.005756336729973555, 0.015602233819663525, 0.20412515103816986, 0.018733490258455276, 0.0020129457116127014, 0.004558956250548363, 5.9204005083302036e-05, 4.331664604251273e-05, 0.0001415966107742861, 5.351835352485068e-05, 0.003962217830121517, 9.52531408984214e-05, 7.756871491437778e-05, 0.0010941034415736794, 2.5780562282307073e-05], [0.003895029192790389, 0.005963197443634272, 0.0011354876914992929, 0.0022863512858748436, 0.02347632683813572, 0.010814563371241093, 0.027761215344071388, 0.18059709668159485, 0.05729315057396889, 0.4182877242565155, 0.06416837126016617, 0.1288972645998001, 0.04822147265076637, 0.002100638346746564, 0.0010429186513647437, 0.005289793014526367, 0.0015995193971320987, 0.01093058567494154, 0.0021386132575571537, 0.00045692716958001256, 0.0030483563896268606, 0.000595402903854847], [0.0018562821205705404, 0.0023719044402241707, 0.0005130801582708955, 0.00040169787826016545, 0.01187380775809288, 0.0029001240618526936, 0.020031295716762543, 0.024052735418081284, 0.04598516598343849, 0.1813863068819046, 0.05044257268309593, 0.13810335099697113, 0.20321504771709442, 0.024100368842482567, 0.005022276192903519, 0.08962377905845642, 0.024866094812750816, 0.14121600985527039, 0.01124342530965805, 0.003434012411162257, 0.013038169592618942, 0.004322483204305172], [0.0009967132937163115, 0.0031436055433005095, 0.0005826909909956157, 0.0008242406765930355, 0.008016470819711685, 0.011438274756073952, 0.052085474133491516, 0.012369189411401749, 0.012138957157731056, 0.146365687251091, 0.061619214713573456, 0.0684385672211647, 0.16826866567134857, 0.10138126462697983, 0.004130235407501459, 0.041139449924230576, 0.024361280724406242, 0.25060930848121643, 0.0035065559204667807, 0.009015774354338646, 0.013068966567516327, 0.006499351002275944], [0.0018667803378775716, 0.0078091369941830635, 0.0010990473674610257, 0.0009805801091715693, 0.0035566186998039484, 0.000907981360796839, 0.0023155524395406246, 0.003940999042242765, 0.005857523530721664, 0.11284606158733368, 0.29094555974006653, 0.07927948981523514, 0.3328554332256317, 0.007384313270449638, 0.04122983664274216, 0.007522697560489178, 0.00827720481902361, 0.015004507265985012, 0.015118151903152466, 0.0010014602448791265, 0.05517784133553505, 0.005023077595978975], [0.0010919078486040235, 0.0004410312103573233, 0.00015633647853974253, 0.0001993161567952484, 0.0016596452333033085, 0.001198286539874971, 0.0003660032816696912, 0.013929828070104122, 0.0008880298119038343, 0.02203562669456005, 0.01750941388309002, 0.17787404358386993, 0.13157117366790771, 0.04387945309281349, 0.015245085582137108, 0.44671326875686646, 0.034300729632377625, 0.07243656367063522, 0.006747436709702015, 0.001493648742325604, 0.005443235859274864, 0.004819913301616907], [0.0003648935235105455, 0.0009761314722709358, 0.00029197329422459006, 2.1987476429785602e-05, 0.0029994521755725145, 0.0006090706447139382, 0.0034792511723935604, 0.0022429407108575106, 0.0021601051557809114, 0.008745330385863781, 0.018491094931960106, 0.02387053519487381, 0.11630566418170929, 0.017715072259306908, 0.0032912518363445997, 0.508516788482666, 0.080609031021595, 0.11098768562078476, 0.059140246361494064, 0.013192946091294289, 0.018530620262026787, 0.00745787238702178], [0.0008420158992521465, 0.0002554004022385925, 0.00039918150287121534, 0.0002879718958865851, 0.00033871084451675415, 0.00039179902523756027, 0.00021531866514123976, 0.0004815468564629555, 0.00019708022591657937, 0.0027129214722663164, 0.003867780789732933, 0.010163719765841961, 0.01852056011557579, 0.08484233915805817, 0.07674223929643631, 0.0745357945561409, 0.12314697355031967, 0.5467919111251831, 0.01565047726035118, 0.003051696578040719, 0.026732193306088448, 0.009832444600760937], [0.0011035485658794641, 0.0029343036003410816, 0.002659143880009651, 0.0009878367418423295, 0.0017452975735068321, 0.0005022218683734536, 0.0014584008604288101, 0.0015511972596868873, 0.0028476680163294077, 0.001925410469993949, 0.00774918869137764, 0.00593181885778904, 0.02460184134542942, 0.0060796672478318214, 0.07819047570228577, 0.25894880294799805, 0.1450989991426468, 0.12830908596515656, 0.25113752484321594, 0.01984156295657158, 0.03588639572262764, 0.02050960622727871], [0.00017133026267401874, 0.0007669451297260821, 0.0001031814026646316, 0.00019312732911203057, 0.0005085544544272125, 0.00013707297330256552, 0.0007468166295439005, 0.0017001180676743388, 0.0017600986175239086, 0.0064428444020450115, 0.0008338841143995523, 0.0017463283147662878, 0.018499748781323433, 0.002829961245879531, 0.010646519251167774, 0.15939246118068695, 0.026535620912909508, 0.21995088458061218, 0.2222219854593277, 0.19026759266853333, 0.1266261786222458, 0.00791865773499012], [0.0001856769376900047, 0.0003704149858094752, 0.0003292981127742678, 7.320084114326164e-05, 0.0003883269091602415, 9.489774674875662e-05, 0.0007756571285426617, 0.0002770689607132226, 0.00019068902474828064, 0.0033269445411860943, 0.0029324672650545835, 0.0018166927620768547, 0.00252719153650105, 0.0043355864472687244, 0.004158121533691883, 0.01638875901699066, 0.03818681463599205, 0.41572219133377075, 0.06014708802103996, 0.05018983781337738, 0.3741198182106018, 0.023463161662220955], [3.0632592824986205e-05, 0.0012309179874137044, 0.0002080040576402098, 0.00013735293759964406, 0.00034063184284605086, 0.00040963938226923347, 0.007967590354382992, 8.067012822721153e-05, 0.00010182732512475923, 0.000979188596829772, 0.00040709745371714234, 0.00035571929765865207, 0.000654082337860018, 0.00041325518395751715, 0.00022708301548846066, 0.00341570982709527, 0.002487924648448825, 0.900093138217926, 0.0017551305936649442, 0.006314437836408615, 0.056712400168180466, 0.01567750982940197], [0.0009974004933610559, 0.006630444433540106, 0.001142858061939478, 0.00072091119363904, 0.003327829297631979, 0.000907150621060282, 0.0017086153384298086, 0.003847694955766201, 0.0003652208542916924, 0.0026519475504755974, 0.006641386076807976, 0.003088846802711487, 0.012866538017988205, 0.0009496051352471113, 0.004027971997857094, 0.01502851489931345, 0.033123575150966644, 0.04560495540499687, 0.27421560883522034, 0.017597461119294167, 0.3795982003211975, 0.18495729565620422], [0.00018269501742906868, 0.0007093515014275908, 0.0002281815541209653, 8.504172728862613e-05, 0.0004728540952783078, 0.000385933555662632, 0.00032077261130325496, 0.00020091267651878297, 9.913843314279802e-06, 0.0003205635875929147, 0.0004750340594910085, 0.0007848492823541164, 0.002896128222346306, 0.011200865730643272, 0.0010564649710431695, 0.01989404670894146, 0.038792215287685394, 0.1331602931022644, 0.019702311605215073, 0.05447190999984741, 0.342109352350235, 0.3725402355194092]], [[0.11397084593772888, 0.016176162287592888, 0.015417175367474556, 0.03852323442697525, 0.10543173551559448, 0.028855666518211365, 0.1308836191892624, 0.017210541293025017, 0.03988839313387871, 0.04183831438422203, 0.06318012624979019, 0.03874044492840767, 0.06016330420970917, 0.016852159053087234, 0.0834287628531456, 0.061192095279693604, 0.04694833979010582, 0.050667017698287964, 0.010695748031139374, 0.006531639024615288, 0.006308861076831818, 0.007095783017575741], [0.021059343591332436, 0.034759677946567535, 0.0723346546292305, 0.045006684958934784, 0.0572221502661705, 0.06201355159282684, 0.05212365835905075, 0.09467989951372147, 0.1387796700000763, 0.07893639802932739, 0.03922272101044655, 0.0639561116695404, 0.011257817037403584, 0.03613802790641785, 0.013184048235416412, 0.01832587458193302, 0.010019311681389809, 0.03435862064361572, 0.029070153832435608, 0.040484469383955, 0.029974624514579773, 0.017092565074563026], [0.027503598481416702, 0.27708250284194946, 0.07629796862602234, 0.0736055001616478, 0.1430443674325943, 0.03657195717096329, 0.03346192464232445, 0.007941055111587048, 0.1350172758102417, 0.016489870846271515, 0.05270841717720032, 0.006635894998908043, 0.028398819267749786, 0.014354489743709564, 0.001167511334642768, 0.003002065233886242, 0.009105837903916836, 0.004159026779234409, 0.005360301584005356, 0.027240153402090073, 0.00868308823555708, 0.012168378569185734], [0.0064364029094576836, 0.08246719092130661, 0.010342682711780071, 0.08126190304756165, 0.21706253290176392, 0.023900361731648445, 0.04680066928267479, 0.022052856162190437, 0.051500603556632996, 0.08774882555007935, 0.016754422336816788, 0.009817595593631268, 0.04807720705866814, 0.0033741742372512817, 0.041163112968206406, 0.033979497849941254, 0.024618852883577347, 0.06787513941526413, 0.023622244596481323, 0.027146974578499794, 0.06624260544776917, 0.007754159159958363], [0.06913656741380692, 0.06476599723100662, 0.12285171449184418, 0.1334141045808792, 0.1125735267996788, 0.1265418380498886, 0.10544891655445099, 0.05212342366576195, 0.05035509169101715, 0.005779908504337072, 0.010095024481415749, 0.013817102648317814, 0.00552999647334218, 0.05050060898065567, 0.004165665712207556, 0.014732937328517437, 0.009895257651805878, 0.015982866287231445, 0.004253947176039219, 0.016188865527510643, 0.002199435606598854, 0.009647196158766747], [0.0002480823895893991, 0.015473664738237858, 0.0046492526307702065, 0.020388368517160416, 0.7739072442054749, 0.015526114031672478, 0.061816878616809845, 0.0009141654591076076, 0.06513810157775879, 0.0015247196424752474, 0.0012099561281502247, 0.0005992514197714627, 0.008668920025229454, 0.0009337947703897953, 0.009547464549541473, 0.0019815319683402777, 0.005418085493147373, 0.005051872693002224, 0.0004851664125453681, 0.0051190597005188465, 0.0009298358345404267, 0.0004684626474045217], [0.0730518326163292, 0.052894119173288345, 0.02839665487408638, 0.030447591096162796, 0.04147837683558464, 0.055292095988988876, 0.10239638388156891, 0.30787670612335205, 0.10757318884134293, 0.03096902370452881, 0.05944116786122322, 0.013361357152462006, 0.010820003226399422, 0.01498229056596756, 0.0061110868118703365, 0.0069457064382731915, 0.007806848268955946, 0.006764095276594162, 0.024355560541152954, 0.008727316744625568, 0.004838220309466124, 0.005470390897244215], [0.004693153779953718, 0.009433245286345482, 0.004048990085721016, 0.00450577586889267, 0.017504118382930756, 0.01812824234366417, 0.7020161747932434, 0.03219420462846756, 0.0738183781504631, 0.032950446009635925, 0.021762648597359657, 0.0010491413995623589, 0.009376605041325092, 0.0005810403963550925, 0.016712578013539314, 0.0014939496759325266, 0.02787085622549057, 0.013465439900755882, 0.0067035481333732605, 0.000687415711581707, 0.0009569067624397576, 4.715073009720072e-05], [0.0005373454769141972, 0.010308790020644665, 0.004542177077382803, 0.0074329013004899025, 0.0035884270910173655, 0.18373402953147888, 0.011210915632545948, 0.6321284174919128, 0.020474817603826523, 0.10934210568666458, 0.0018388757016509771, 0.0019729123450815678, 0.00023667982895858586, 0.0023962692357599735, 0.0002428289590170607, 0.0007601345423609018, 0.002112314570695162, 0.0016552945598959923, 0.0030676471069455147, 0.0014766090316697955, 0.0008741855272091925, 6.633662997046486e-05], [0.0010883909417316318, 0.0024854992516338825, 0.0018009329214692116, 0.0009613387519493699, 0.01206015981733799, 0.0031447280198335648, 0.021611014381051064, 0.004468376748263836, 0.891141414642334, 0.018461061641573906, 0.02458908036351204, 0.0015597647288814187, 0.0064567457884550095, 0.0002457729133311659, 0.00141552509739995, 6.942022446310148e-05, 0.0020007644779980183, 0.001074383151717484, 0.001697090337984264, 0.00290643610060215, 0.0007314570830203593, 3.0804978450760245e-05], [0.002678587334230542, 0.00044453743612393737, 0.0013816538266837597, 0.0006256055785343051, 0.0006082956679165363, 0.00784230139106512, 0.003242919687181711, 0.008116725832223892, 0.031812652945518494, 0.4181078374385834, 0.15113510191440582, 0.3582967221736908, 0.003896297886967659, 0.00279837753623724, 0.00039807247230783105, 0.0005616224952973425, 0.001871513552032411, 0.000770159182138741, 0.002308977534994483, 0.000787619617767632, 0.001459070248529315, 0.0008553244988434017], [0.00021056873083580285, 0.00038392990245483816, 0.000158268740051426, 0.00017681090685073286, 0.0004031990247312933, 0.00014516922237817198, 0.001625239965505898, 3.94336020690389e-05, 0.06622940301895142, 0.0028336322866380215, 0.8039715886116028, 0.0035201667342334986, 0.11688835173845291, 7.433004066115245e-05, 0.000765503675211221, 8.565673851990141e-06, 0.0015099351294338703, 2.4904569727368653e-05, 0.00037063719355501235, 8.333926962222904e-05, 0.00040845750481821597, 0.000168608472449705], [0.0002285603404743597, 0.0008797285263426602, 0.013629915192723274, 0.004024289082735777, 0.001354174455627799, 0.004044495988637209, 0.0009311970788985491, 0.005198760889470577, 0.0017116949893534184, 0.09948556125164032, 0.002732345135882497, 0.5466575026512146, 0.0028455089777708054, 0.29176899790763855, 0.0018811533227562904, 0.006885032635182142, 0.00012568103556986898, 0.0041968640871346, 0.00028232423937879503, 0.002994154579937458, 0.00555139034986496, 0.0025906667578965425], [0.0003878469578921795, 0.0032205735333263874, 0.0008253372507169843, 0.001012772903777659, 0.0042263213545084, 0.00020687063806690276, 0.004076069686561823, 0.00019611275638453662, 0.010459805838763714, 0.0018439762061461806, 0.06724131107330322, 0.004366658627986908, 0.8807443976402283, 0.002298184670507908, 0.009009318426251411, 0.000933199655264616, 0.003908544313162565, 0.00046618570922873914, 0.0006358278333209455, 0.00022478221217170358, 0.002863753354176879, 0.0008522244170308113], [0.0009306028950959444, 0.005543526727706194, 0.0013861096231266856, 0.005853143520653248, 0.0014987204922363162, 0.011241862550377846, 0.0015210021520033479, 0.005877702962607145, 0.0003915141860488802, 0.044059790670871735, 0.0034192746970802546, 0.07165028154850006, 0.013903462328016758, 0.37600404024124146, 0.015519929118454456, 0.3541335463523865, 0.027148883789777756, 0.034486643970012665, 0.0013833779376000166, 0.006222906988114119, 0.005562907084822655, 0.012260796502232552], [0.00857832282781601, 0.0040339138358831406, 0.0024118460714817047, 0.004121213220059872, 0.007622133009135723, 0.0010536775225773454, 0.04205942898988724, 0.0014860035153105855, 0.03214822709560394, 0.0015661593060940504, 0.031949132680892944, 0.007445168681442738, 0.060598596930503845, 0.0036026097368448973, 0.5738435387611389, 0.028203386813402176, 0.10057620704174042, 0.06274976581335068, 0.010345819406211376, 0.0015381677076220512, 0.01092944759875536, 0.0031372052617371082], [0.00014272549015004188, 0.0006669512367807329, 0.00034356306423433125, 0.002141856588423252, 0.00138522544875741, 0.0010763842146843672, 0.002072072820737958, 0.0010981751838698983, 0.00017896649660542607, 0.0015071589732542634, 0.00020146321912761778, 0.008246543817222118, 0.0012872022343799472, 0.015187312848865986, 0.02892954647541046, 0.8186100125312805, 0.009348966181278229, 0.09929687529802322, 0.0009340143296867609, 0.004617653787136078, 0.000598541519138962, 0.0021288294810801744], [0.010063149966299534, 0.05745486915111542, 0.002895340323448181, 0.004189391620457172, 0.002877228194847703, 0.0006542058545164764, 0.053861748427152634, 0.002254069782793522, 0.031311601400375366, 0.0031038213055580854, 0.05192689597606659, 0.00041614819201640785, 0.05513432249426842, 0.0008649463416077197, 0.05896231532096863, 0.004844082053750753, 0.269494891166687, 0.02690374106168747, 0.345802903175354, 0.004050699062645435, 0.011727184988558292, 0.0012064349139109254], [0.0003597117029130459, 0.0005291553679853678, 0.000680810771882534, 0.00036929515772499144, 0.00026055442867800593, 0.0012629296397790313, 0.0043936013244092464, 0.007022923324257135, 0.0002607719216030091, 0.006497155874967575, 0.00039733972516842186, 0.0026242597959935665, 7.665240264032036e-05, 0.0012920754961669445, 0.0055312057957053185, 0.03907036408782005, 0.010949709452688694, 0.8770007491111755, 0.006219316739588976, 0.03247475251555443, 0.002083210274577141, 0.0006434884271584451], [5.308826075633988e-05, 0.006684496533125639, 0.000999173615127802, 0.0009318612865172327, 0.00024064358149189502, 0.0014267951482906938, 0.0013526234542950988, 0.0009291826863773167, 0.004332754295319319, 0.005423843394964933, 0.004104606807231903, 0.00016345662879757583, 0.0015204944647848606, 0.000144080157042481, 0.0013452514540404081, 0.00017302072956226766, 0.05100010335445404, 0.0015942518366500735, 0.7889230251312256, 0.0018262210069224238, 0.12622660398483276, 0.000604446220677346], [0.00020327015954535455, 0.0010253817308694124, 0.0020389279816299677, 0.0004044970264658332, 0.0009511448442935944, 0.00040903608896769583, 0.0021230452693998814, 0.0009500515880063176, 0.005398179870098829, 0.002440311247482896, 0.0028721527196466923, 0.0033481114078313112, 0.0007660541450604796, 0.0027132080867886543, 0.0007235652301460505, 0.0021559898741543293, 0.0015439348062500358, 0.04731747508049011, 0.0021101185120642185, 0.9120434522628784, 0.0025280918926000595, 0.005934032611548901], [0.0014104729052633047, 0.001031697727739811, 0.0028574501629918814, 0.0007801381289027631, 0.0016922076465561986, 0.00032255196128971875, 0.0024564601480960846, 0.00030104382312856615, 0.02590767852962017, 0.003536858828738332, 0.15678814053535461, 0.03620132431387901, 0.04815256968140602, 0.0005735408049076796, 0.013949219137430191, 0.0014060215326026082, 0.010993827134370804, 0.004935094155371189, 0.05787460133433342, 0.00479322113096714, 0.5819053649902344, 0.042130496352910995]], [[0.03747326508164406, 0.1507377326488495, 0.1406870037317276, 0.07205749303102493, 0.11000805348157883, 0.06049054116010666, 0.07932378351688385, 0.021523935720324516, 0.08203516155481339, 0.086004838347435, 0.010882203467190266, 0.01129684504121542, 0.040034789592027664, 0.045806314796209335, 0.00880469847470522, 0.011560598388314247, 0.008709253743290901, 0.006096282042562962, 0.0020873653702437878, 0.004161057528108358, 0.008861898444592953, 0.0013569629518315196], [0.006295626051723957, 0.20190443098545074, 0.010265704244375229, 0.06344931572675705, 0.022804655134677887, 0.09607941657304764, 0.09735569357872009, 0.3488394320011139, 0.08716194331645966, 0.015304978005588055, 0.015752572566270828, 0.005585703533142805, 0.011735142208635807, 0.002024246845394373, 0.0003631995932664722, 0.0009614919545128942, 0.0021726132836192846, 0.0017600214341655374, 0.002401490230113268, 0.005385835189372301, 0.001198204467073083, 0.0011983237927779555], [0.022812169045209885, 0.1557191163301468, 0.07832018285989761, 0.06427881866693497, 0.1158197894692421, 0.0025910960976034403, 0.32869023084640503, 0.053403571248054504, 0.07276621460914612, 0.0024762593675404787, 0.018094658851623535, 0.06652683019638062, 0.0052869501523673534, 0.003970705438405275, 0.0005424118135124445, 0.002018809085711837, 0.00033216006704606116, 0.0023043204564601183, 0.0002972820366267115, 0.002292934339493513, 0.0004627286980394274, 0.00099279941059649], [0.001183408428914845, 0.6890122890472412, 0.06854124367237091, 0.0024923093151301146, 0.034388720989227295, 0.017912885174155235, 0.14223138988018036, 0.010112247429788113, 0.001877460046671331, 0.007226394023746252, 0.00016130946460179985, 0.0006438821437768638, 0.005371570121496916, 0.012883315794169903, 3.945868229493499e-05, 0.0009432642254978418, 3.770114199141972e-05, 0.0037780706770718098, 0.000506810552906245, 8.60349100548774e-05, 0.0004597347870003432, 0.00011060963151976466], [0.008850826881825924, 0.3223136067390442, 0.05912816524505615, 0.014977649785578251, 0.014651021920144558, 0.12161041796207428, 0.06524758040904999, 0.09091822803020477, 0.033649053424596786, 0.18461261689662933, 0.004946019500494003, 0.0032820426858961582, 0.028166651725769043, 0.03365064412355423, 0.0010604646522551775, 0.0016616102075204253, 0.0013525411486625671, 0.001796863041818142, 0.004126126877963543, 0.0010069687850773335, 0.0022617948707193136, 0.0007290809298865497], [0.002207772573456168, 0.23497828841209412, 0.032295916229486465, 0.03196302801370621, 0.024690700694918633, 0.01770840957760811, 0.5458930134773254, 0.065483458340168, 0.016816187649965286, 0.019210709258913994, 0.0009511562529951334, 0.0013889438705518842, 0.0011547701433300972, 0.0034897439181804657, 0.00015881612489465624, 0.0001402194466209039, 0.00011980000999756157, 0.001076740911230445, 0.00012567343947011977, 4.1636893001850694e-05, 9.860986028797925e-05, 6.49248522677226e-06], [0.006229968275874853, 0.03650801628828049, 0.008082031272351742, 0.024414096027612686, 0.039482150226831436, 0.0030262216459959745, 0.01593410037457943, 0.8421264886856079, 0.0077561973594129086, 0.0032562294509261847, 0.0036718640476465225, 0.0031089892145246267, 0.0011373711749911308, 0.0008175743860192597, 0.0001553078618599102, 0.0018212408758699894, 0.00021224425290711224, 0.0002272904384881258, 0.0011388896964490414, 0.00034905868233181536, 0.00027379358652979136, 0.00027085552574135363], [0.0006481547025032341, 0.04382069408893585, 0.0033055508974939585, 0.00029944241396151483, 0.010145529173314571, 0.22905336320400238, 0.09739338606595993, 0.13123832643032074, 0.022739533334970474, 0.34482496976852417, 0.005611425265669823, 0.011303124949336052, 0.07475843280553818, 0.006017580162733793, 7.387021469185129e-05, 0.0034561394713819027, 0.004860393237322569, 0.005752681288868189, 0.0037816644180566072, 0.00012935060658492148, 0.0007195365033112466, 6.695292540825903e-05], [8.296796295326203e-05, 0.0023386047687381506, 0.00046124972868710756, 3.8530732126673684e-05, 0.0010312623344361782, 0.0021980905439704657, 0.004088541027158499, 0.006444940809160471, 0.01075834035873413, 0.9519911408424377, 0.005854144226759672, 0.0016194183845072985, 0.010224143043160439, 0.0015151818515732884, 2.4837934688548557e-05, 0.00014075903163757175, 8.617698040325195e-05, 0.0004409453540574759, 6.588397081941366e-05, 2.343117375858128e-05, 0.0005652570980601013, 6.072639280318981e-06], [0.0030253957957029343, 0.0026121637783944607, 0.002226809039711952, 0.06059698015451431, 0.011910846456885338, 0.009269166737794876, 0.020410871133208275, 0.18132972717285156, 0.1244673952460289, 0.05996957793831825, 0.27298909425735474, 0.176126629114151, 0.030940016731619835, 0.004529010038822889, 0.01044619269669056, 0.004515123553574085, 0.011448471806943417, 0.001686583156697452, 0.0026608151383697987, 0.004526956472545862, 0.0023168325424194336, 0.00199534697458148], [0.0008146126638166606, 0.0005745506496168673, 0.0014193553943186998, 0.004470736254006624, 0.0176235418766737, 0.0006014446844346821, 0.006893231067806482, 0.11971182376146317, 0.1312328726053238, 0.013467391021549702, 0.04593977704644203, 0.6389314532279968, 0.0033485577441751957, 0.002767558442428708, 0.005194341763854027, 0.0019406921928748488, 0.0007207950693555176, 0.0005242483457550406, 0.0026461584493517876, 0.0004232733335811645, 0.00047363084740936756, 0.00027993498952127993], [0.0027181324549019337, 0.004260217305272818, 0.0026429584249854088, 0.0009281090460717678, 0.013068454340100288, 0.012431896291673183, 0.0081978440284729, 0.08235005289316177, 0.042703695595264435, 0.35958102345466614, 0.07196057587862015, 0.09065281599760056, 0.24698291718959808, 0.02288055047392845, 0.0015453363303095102, 0.021898038685321808, 0.007154097780585289, 0.0015297882491722703, 0.0032073105685412884, 0.00029487733263522387, 0.0025557868648320436, 0.0004555836785584688], [0.002193039981648326, 0.004700912162661552, 0.001945940195582807, 0.021981501951813698, 0.003939563408493996, 0.02606985718011856, 0.004972692113369703, 0.015765057876706123, 0.03079547919332981, 0.048869989812374115, 0.20695222914218903, 0.04287583380937576, 0.09112732857465744, 0.31847789883613586, 0.07397453486919403, 0.00527587253600359, 0.07424594461917877, 0.005866817198693752, 0.004543722607195377, 0.006998740136623383, 0.0031988373957574368, 0.0052282121032476425], [0.0025584392715245485, 0.0005693081766366959, 0.0009606700041331351, 0.00017382002261001617, 0.0069155278615653515, 0.00034490591497160494, 0.00883424561470747, 0.06171165406703949, 0.01740107871592045, 0.002708879066631198, 0.1508903205394745, 0.6918450593948364, 0.006287808995693922, 0.003975533880293369, 0.00233125570230186, 0.019152790307998657, 0.0074073695577681065, 0.007871526293456554, 0.0058451080694794655, 0.0011699098395183682, 0.0003087840450461954, 0.0007360989693552256], [0.0016150689916685224, 0.0017091674963012338, 0.0038858165498822927, 0.0013887743698433042, 0.004822613671422005, 0.0010122919920831919, 0.01395806297659874, 0.002631101058796048, 0.0013424105709418654, 0.00520243588835001, 0.006622177083045244, 0.04622844234108925, 0.057283997535705566, 0.045122165232896805, 0.012609770521521568, 0.6762216687202454, 0.01005561463534832, 0.08557172119617462, 0.017103325575590134, 0.0009789131581783295, 0.0016993676545098424, 0.0029350852128118277], [0.013883470557630062, 0.002491959370672703, 0.0020345957018435, 0.0014165870379656553, 0.0015655445167794824, 0.004536676686257124, 0.003310875967144966, 0.019813675433397293, 0.006406598724424839, 0.004686644766479731, 0.08311145752668381, 0.06446288526058197, 0.01251078862696886, 0.06147575378417969, 0.022261178120970726, 0.05193774774670601, 0.49917155504226685, 0.03623285889625549, 0.05859764292836189, 0.02553948387503624, 0.004256380721926689, 0.02029554732143879], [0.004103098064661026, 0.004590251948684454, 0.004463018849492073, 0.0047890241257846355, 0.007821108214557171, 0.0014706488000229, 0.021425411105155945, 0.017333192750811577, 0.009554306045174599, 0.011098952032625675, 0.021342502906918526, 0.017878606915473938, 0.0015030616195872426, 0.015456591732800007, 0.10977379232645035, 0.05016174912452698, 0.035677459090948105, 0.5468922257423401, 0.08993703871965408, 0.009455214254558086, 0.012953094206750393, 0.002319675637409091], [0.003451311495155096, 0.0006500583840534091, 0.0010884840739890933, 0.0018972369143739343, 0.0012017586268484592, 0.0006957136210985482, 0.0014903105329722166, 0.010394648648798466, 0.0014793109148740768, 0.0008870555902831256, 0.01002854947000742, 0.010586725547909737, 0.0026209023781120777, 0.004355400335043669, 0.016621025279164314, 0.1835862696170807, 0.08088422566652298, 0.030055157840251923, 0.5139216184616089, 0.05185685679316521, 0.008411507122218609, 0.06383592635393143], [0.0005143543821759522, 0.0014018838992342353, 0.0002481896663084626, 9.986674558604136e-05, 0.0011177108390256763, 0.004759103525429964, 0.006067608017474413, 0.004582819528877735, 0.001749625662341714, 0.0017773230792954564, 0.017471613362431526, 0.012837901711463928, 0.008587395772337914, 0.0007866424857638776, 0.001049034297466278, 0.028184255585074425, 0.36664023995399475, 0.13306781649589539, 0.1792873740196228, 0.16755402088165283, 0.016673751175403595, 0.04554147273302078], [0.0031487667001783848, 0.001633250038139522, 0.0010726520558819175, 0.0010232150088995695, 0.005525404121726751, 0.001504038111306727, 0.005294125992804766, 0.009428951889276505, 0.006454676855355501, 0.003602387150749564, 0.054434843361377716, 0.03725465387105942, 0.002317959675565362, 0.002369162393733859, 0.004178812261670828, 0.0184633769094944, 0.015441669151186943, 0.19634689390659332, 0.03821151703596115, 0.11539940536022186, 0.1688486933708191, 0.30804550647735596], [0.0011103623546659946, 0.0004691367212217301, 0.0006294627673923969, 0.0051086968742311, 0.0008307708194479346, 0.0008093225187622011, 0.0008599404827691615, 0.0008271051337942481, 0.0018759340746328235, 0.00033012329367920756, 0.0036794987972825766, 0.002012376906350255, 0.0013072739820927382, 0.00045326503459364176, 0.009989502839744091, 0.0038574349600821733, 0.005783493164926767, 0.00801576767116785, 0.06154589354991913, 0.09118779003620148, 0.033840835094451904, 0.7654759883880615], [0.006665577180683613, 0.002016470767557621, 0.002978854812681675, 0.0063490355387330055, 0.01269215065985918, 0.0049843392334878445, 0.012892745435237885, 0.011512828059494495, 0.014062857255339622, 0.0006073986296541989, 0.01173081062734127, 0.02183849923312664, 0.0015487218042835593, 0.001978685613721609, 0.00876487884670496, 0.013082706369459629, 0.021793503314256668, 0.02559702843427658, 0.08961991220712662, 0.2693146765232086, 0.024351615458726883, 0.43561670184135437]], [[0.03311186656355858, 0.05904286354780197, 0.0315420962870121, 0.035618048161268234, 0.05227838456630707, 0.1261812001466751, 0.14498229324817657, 0.07361025363206863, 0.11746962368488312, 0.16305391490459442, 0.046412624418735504, 0.029683345928788185, 0.027804946526885033, 0.03150520101189613, 0.005131686571985483, 0.005425342824310064, 0.00418223487213254, 0.007657513953745365, 0.0008549391641281545, 0.0018397850217297673, 0.001571069355122745, 0.0010408350499346852], [0.18850786983966827, 0.03617675602436066, 0.022872356697916985, 0.11275171488523483, 0.022162728011608124, 0.4304397702217102, 0.02727646566927433, 0.01827589049935341, 0.11641533672809601, 0.012429793365299702, 0.006150512490421534, 0.0014785660896450281, 0.00103353476151824, 0.0005359497154131532, 0.0007110073347575963, 0.00016822529141791165, 0.0001503143139416352, 0.0002835434570442885, 0.00024715965264476836, 0.0009548826492391527, 0.00045758430496789515, 0.0005200697341933846], [0.046328309923410416, 0.07220663130283356, 0.006260451395064592, 0.04947219416499138, 0.08809584379196167, 0.04352357238531113, 0.04481404647231102, 0.3228920102119446, 0.02841348573565483, 0.006621469743549824, 0.04761982709169388, 0.0576750747859478, 0.01137502584606409, 0.0010426724329590797, 0.003264343598857522, 0.022563684731721878, 0.007252235896885395, 0.011321988888084888, 0.064890556037426, 0.014019038528203964, 0.016853444278240204, 0.03349402919411659], [0.023513492196798325, 0.08690749108791351, 0.006416460499167442, 0.4490601122379303, 0.01888086460530758, 0.22696922719478607, 0.03137247636914253, 0.007537613622844219, 0.11011640727519989, 0.00240749167278409, 0.0012074651895090938, 0.0004585320712067187, 0.0018249193672090769, 0.00041767681250348687, 0.007754043210297823, 0.0015351835172623396, 0.0017469181912019849, 0.0015447000041604042, 0.0009540610481053591, 0.01755560375750065, 0.0013029492693021894, 0.000516284431796521], [0.021333860233426094, 0.06792417913675308, 0.0246681310236454, 0.06710666418075562, 0.038564201444387436, 0.24454307556152344, 0.18365049362182617, 0.02478284388780594, 0.09098659455776215, 0.03696819022297859, 0.012287878431379795, 0.0030359188094735146, 0.009804473258554935, 0.004520181566476822, 0.009844902902841568, 0.00789918377995491, 0.014147806912660599, 0.036278001964092255, 0.008191240951418877, 0.0651058629155159, 0.02204308845102787, 0.006313260179013014], [0.0005636970163322985, 0.005981654394418001, 0.0013768619392067194, 0.006690666079521179, 0.008589702658355236, 0.01178914587944746, 0.009604092687368393, 0.9402579069137573, 0.009040762670338154, 0.0011145672760903835, 0.001838677329942584, 0.002281513763591647, 0.00010869642574107274, 3.5654382372740656e-05, 3.102465780102648e-05, 7.000281766522676e-05, 3.6937370168743655e-05, 0.00023899797815829515, 0.00013075917377136648, 7.476162136299536e-05, 5.832016540807672e-05, 8.55458274600096e-05], [0.019133418798446655, 0.0058224862441420555, 0.0010833846172317863, 0.05166725814342499, 0.04914100095629692, 0.19644872844219208, 0.014505680650472641, 0.25951290130615234, 0.31342482566833496, 0.007242798339575529, 0.012342341244220734, 0.005781259387731552, 0.0015986569924280047, 4.017467290395871e-05, 0.004280635621398687, 0.002982429228723049, 0.01304507814347744, 0.0033439393155276775, 0.021576205268502235, 0.010329637676477432, 0.003952380735427141, 0.0027447703760117292], [0.000708713021595031, 0.0022016370203346014, 0.009225575253367424, 0.0008392942836508155, 0.0035325533244758844, 0.02491210587322712, 0.013242264278233051, 0.005499572493135929, 0.04017501696944237, 0.8747637271881104, 0.01224265992641449, 0.001465889043174684, 0.0064377691596746445, 0.004154685419052839, 0.00016158731887117028, 0.00013650153414346278, 7.687905599595979e-05, 0.00012406827590893954, 1.0667306014511269e-05, 1.6336152839357965e-05, 6.886413757456467e-05, 3.6671383440989302e-06], [0.00023463858815375715, 0.005217744968831539, 0.008430913090705872, 0.0004053729062434286, 0.017611347138881683, 0.013385247439146042, 0.10238783806562424, 0.028216863051056862, 0.026206910610198975, 0.6826679706573486, 0.09185850620269775, 0.004587181378155947, 0.015904905274510384, 0.0016029056860134006, 0.00011045743303839117, 0.00011980645649600774, 0.00031269228202290833, 0.00034214777406305075, 2.383703031227924e-05, 3.8856313040014356e-05, 0.00031972985016182065, 1.419272393832216e-05], [0.0006165215745568275, 0.0007102385861799121, 0.0005951063940301538, 0.0005887517472729087, 0.012763737700879574, 0.009878188371658325, 0.0326484814286232, 0.5179324150085449, 0.0985165387392044, 0.06851252913475037, 0.10407991707324982, 0.14728961884975433, 0.003265273990109563, 0.0003758552484214306, 0.00014237783034332097, 0.0005060768453404307, 0.0005903338897041976, 0.0004710485809482634, 0.0003079396265093237, 4.784562406712212e-05, 8.939839608501643e-05, 7.190441829152405e-05], [0.0009762287954799831, 0.0014656831044703722, 0.0032527539879083633, 0.0010253083892166615, 0.009990796446800232, 0.004526669625192881, 0.025231830775737762, 0.03813016787171364, 0.03419450297951698, 0.05539262667298317, 0.24887889623641968, 0.44245028495788574, 0.10762985795736313, 0.010806970298290253, 0.00157807522919029, 0.0057232859544456005, 0.004864795133471489, 0.002156774513423443, 0.0007766906055621803, 0.0002561073051765561, 0.000394264527130872, 0.00029751809779554605], [0.00027191793196834624, 0.0041273972019553185, 0.009450781159102917, 0.0014196158153936267, 0.003441236913204193, 0.0066535803489387035, 0.007794332690536976, 0.005827350541949272, 0.0028654844500124454, 0.093973807990551, 0.05084821954369545, 0.06506571173667908, 0.2540501058101654, 0.4726938307285309, 0.0028667866718024015, 0.006996389012783766, 0.006252189166843891, 0.0038107733707875013, 0.00025205465499311686, 0.0003412941296119243, 0.0005294305155985057, 0.00046767666935920715], [0.014927023090422153, 0.003392481245100498, 0.022937411442399025, 0.0015203471994027495, 0.0078020826913416386, 0.0010116243502125144, 0.03809857740998268, 0.007017000112682581, 0.004142446909099817, 0.020532216876745224, 0.18450306355953217, 0.12423037737607956, 0.10672712326049805, 0.4058847725391388, 0.041508883237838745, 0.00513869896531105, 0.002658215584233403, 0.00311537878587842, 0.0018239059718325734, 9.678473725216463e-05, 0.001297245966270566, 0.001634286600165069], [0.0006444840109907091, 0.003200160339474678, 0.00021150082466192544, 0.001893478911370039, 0.0014552093343809247, 0.004449497442692518, 0.0021091417875140905, 0.04101799800992012, 0.00040775100933387876, 0.0015816806117072701, 0.02027086913585663, 0.20712095499038696, 0.10072015225887299, 0.010428794659674168, 0.06052854657173157, 0.3363155722618103, 0.14320123195648193, 0.016790905967354774, 0.04002188518643379, 0.0007563474355265498, 0.0008874337072484195, 0.005986371077597141], [0.0007313241949304938, 0.000993120833300054, 0.00023088262241799384, 0.004795969929546118, 0.00167083740234375, 0.001391368336044252, 0.005820180755108595, 0.0004789874656125903, 0.005798371974378824, 0.002178717404603958, 0.0030022989958524704, 0.004544991999864578, 0.07634252309799194, 0.02534697949886322, 0.2672726809978485, 0.2951834499835968, 0.25321435928344727, 0.019908985123038292, 0.012388347648084164, 0.0158707182854414, 0.002005374990403652, 0.0008294606814160943], [2.0260882592992857e-05, 0.0007889693952165544, 0.000268715841230005, 0.00034306125598959625, 0.0001373518316540867, 0.0006411502254195511, 0.0001553915935801342, 0.00010790053784148768, 1.6632471670163795e-05, 8.023536065593362e-05, 9.916099952533841e-05, 0.0001283057645196095, 0.002567062620073557, 0.011777649633586407, 0.027347931638360023, 0.028058581054210663, 0.6648162603378296, 0.1871861070394516, 0.05623795837163925, 0.0057080830447375774, 0.010053519159555435, 0.0034595790784806013], [0.00011176758562214673, 0.0011676333379000425, 0.00046575229498557746, 0.0009303570259362459, 0.0010523807723075151, 0.001341950031928718, 0.001088993507437408, 0.01287264283746481, 0.0014016971690580249, 0.0003368357429280877, 0.0012179433833807707, 0.003606041194871068, 0.0034549918491393328, 0.002732648979872465, 0.031852200627326965, 0.07713720202445984, 0.20889052748680115, 0.11555938422679901, 0.51649010181427, 0.009793618693947792, 0.004872702993452549, 0.0036226396914571524], [2.968918670376297e-05, 5.854173286934383e-05, 3.481938620097935e-05, 0.0003053621912840754, 0.00013879859761800617, 0.00027437382959760725, 2.9013497623964213e-05, 0.0006634793244302273, 0.0002559652493800968, 0.00010552747698966414, 0.00038057740312069654, 0.0016048630932345986, 0.0007694315281696618, 0.0003509803500492126, 0.011165089905261993, 0.06163420528173447, 0.39001893997192383, 0.04668959230184555, 0.3410089910030365, 0.12205900996923447, 0.008103171363472939, 0.014319483190774918], [0.0028475553262978792, 0.00212167133577168, 0.018442509695887566, 0.0009523513144813478, 0.0028435890562832355, 0.0003787502064369619, 0.006530162878334522, 0.0002950232883449644, 0.0018417364917695522, 0.009270310401916504, 0.011239076033234596, 0.00370580842718482, 0.022915581241250038, 0.09914470463991165, 0.024026118218898773, 0.0317259319126606, 0.05962614715099335, 0.2748030722141266, 0.11202570050954819, 0.05562003329396248, 0.2368180900812149, 0.022826094180345535], [0.00021661254868377, 0.0023599877022206783, 0.004002743866294622, 0.0007162531255744398, 0.004524008836597204, 0.0008578822598792613, 0.0013306409819051623, 0.0041643050499260426, 0.000342967570759356, 0.004284953232854605, 0.007160731591284275, 0.011806715279817581, 0.0032678088173270226, 0.0028434719424694777, 0.0011234245030209422, 0.003773834789171815, 0.0125155970454216, 0.1245700791478157, 0.03500164672732353, 0.04706358537077904, 0.4134024977684021, 0.31467029452323914], [0.001971097895875573, 0.006865146104246378, 0.004394039046019316, 0.0019775719847530127, 0.018206128850579262, 0.004639299586415291, 0.017181584611535072, 0.004755365662276745, 0.014537408947944641, 0.0030281315557658672, 0.009381050243973732, 0.008755532093346119, 0.0105644715949893, 0.005084783770143986, 0.006107879802584648, 0.02206510491669178, 0.06610733270645142, 0.06314196437597275, 0.390546053647995, 0.08947554230690002, 0.16444405913352966, 0.08677050471305847], [0.0007198539678938687, 0.007740961853414774, 0.009156002663075924, 0.0011560741113498807, 0.00405648211017251, 0.0005124675808474422, 0.0034939725883305073, 0.0003481600433588028, 0.00016044928634073585, 0.0012210343265905976, 0.005296197719871998, 0.014993500895798206, 0.024772867560386658, 0.041646651923656464, 0.005028672516345978, 0.03456709533929825, 0.032071731984615326, 0.09791199862957001, 0.014909186400473118, 0.15027596056461334, 0.16407744586467743, 0.3858832120895386]], [[0.002304844791069627, 0.01705913431942463, 0.007377782370895147, 0.022422371432185173, 0.024162910878658295, 0.03997775539755821, 0.01399573776870966, 0.04188060387969017, 0.04592420905828476, 0.04871811717748642, 0.01180607546120882, 0.04940987378358841, 0.03573472052812576, 0.06114523112773895, 0.041165731847286224, 0.12812890112400055, 0.09552598744630814, 0.10074043273925781, 0.055081021040678024, 0.073433056473732, 0.05068211629986763, 0.03332347795367241], [0.00599302351474762, 0.03738049790263176, 0.009004565887153149, 0.02711007371544838, 0.016772866249084473, 0.05442216992378235, 0.030270110815763474, 0.04948306828737259, 0.025801701471209526, 0.08555948734283447, 0.04354745149612427, 0.045435626059770584, 0.10212294012308121, 0.07623185217380524, 0.05087150260806084, 0.027723077684640884, 0.08685281127691269, 0.03660239279270172, 0.06938613206148148, 0.014337223023176193, 0.06928645074367523, 0.03580498322844505], [0.012398374266922474, 0.03333786502480507, 0.008741469122469425, 0.02510097436606884, 0.02246885374188423, 0.08793855458498001, 0.035007018595933914, 0.019423244521021843, 0.11561836302280426, 0.07990943640470505, 0.034930381923913956, 0.05801196023821831, 0.04846778139472008, 0.05004461109638214, 0.02979258820414543, 0.04899205267429352, 0.07141163945198059, 0.042628880590200424, 0.025657033547759056, 0.10169371217489243, 0.029562270268797874, 0.01886293664574623], [0.004313538782298565, 0.027867795899510384, 0.007902628742158413, 0.02289879322052002, 0.012396014295518398, 0.030853524804115295, 0.026109714061021805, 0.13181036710739136, 0.01924688182771206, 0.03339223563671112, 0.03609587624669075, 0.04265812784433365, 0.05931313708424568, 0.044313572347164154, 0.07519733905792236, 0.01774015836417675, 0.08583640307188034, 0.02104807272553444, 0.2403302937746048, 0.011662270873785019, 0.028584960848093033, 0.020428206771612167], [0.005240592639893293, 0.020909132435917854, 0.006608241703361273, 0.036220893263816833, 0.014611943624913692, 0.02149975672364235, 0.04966576024889946, 0.09805157780647278, 0.02040058933198452, 0.06686447560787201, 0.030269967392086983, 0.0677654817700386, 0.0380321741104126, 0.0528792180120945, 0.05397079512476921, 0.07384694367647171, 0.042663853615522385, 0.08047103136777878, 0.12377067655324936, 0.02673410065472126, 0.04006818309426308, 0.029454560950398445], [0.0035557944793254137, 0.0061449757777154446, 0.00375168863683939, 0.028563227504491806, 0.012061049230396748, 0.022494303062558174, 0.016882238909602165, 0.03540993854403496, 0.022505970671772957, 0.038614481687545776, 0.0630355104804039, 0.1286373883485794, 0.025069164112210274, 0.09457286447286606, 0.1234709769487381, 0.04467262700200081, 0.11161866039037704, 0.04511430487036705, 0.06641238182783127, 0.01760236918926239, 0.04300783947110176, 0.04680224135518074], [0.008451179601252079, 0.022695235908031464, 0.010333895683288574, 0.03202851116657257, 0.02481292001903057, 0.033403072506189346, 0.01890491507947445, 0.03273705020546913, 0.03558150306344032, 0.06742732971906662, 0.055403001606464386, 0.061822518706321716, 0.06328385323286057, 0.148258239030838, 0.08992239832878113, 0.031143663451075554, 0.09199895709753036, 0.020345743745565414, 0.07764165848493576, 0.016515333205461502, 0.04126259312033653, 0.016026455909013748], [0.0024996658321470022, 0.03336632624268532, 0.014778420329093933, 0.027647506445646286, 0.027305392548441887, 0.0385904461145401, 0.03688231483101845, 0.08104259520769119, 0.015059029683470726, 0.039651818573474884, 0.01908041350543499, 0.025016190484166145, 0.03990371897816658, 0.06970306485891342, 0.044755883514881134, 0.06076686084270477, 0.0830538421869278, 0.09456168115139008, 0.14489132165908813, 0.029237400740385056, 0.056112200021743774, 0.016093969345092773], [0.00022855361748952419, 0.006800399161875248, 0.001921183429658413, 0.010935774073004723, 0.019793475046753883, 0.00819762609899044, 0.009717077948153019, 0.023285789415240288, 0.027292922139167786, 0.01589038223028183, 0.024439625442028046, 0.07142998278141022, 0.03237546235322952, 0.0962187796831131, 0.05082801356911659, 0.09141860902309418, 0.07875781506299973, 0.09088511019945145, 0.1555277407169342, 0.07858805358409882, 0.03205135464668274, 0.07341630756855011], [0.0004564712580759078, 0.016093309968709946, 0.0063149514608085155, 0.011294836178421974, 0.010505435988307, 0.008097393438220024, 0.024693753570318222, 0.03536618873476982, 0.006100817117840052, 0.04878571256995201, 0.01170057337731123, 0.01329239085316658, 0.04806585982441902, 0.05602327734231949, 0.04144272208213806, 0.05525871738791466, 0.04402926191687584, 0.19975358247756958, 0.14637421071529388, 0.030433043837547302, 0.16811202466487885, 0.017805391922593117], [0.0007918645278550684, 0.008632645942270756, 0.0029772857669740915, 0.019100947305560112, 0.012054748833179474, 0.009141900576651096, 0.008024387061595917, 0.024956362321972847, 0.010404881089925766, 0.02698253095149994, 0.015979928895831108, 0.042109888046979904, 0.02208581008017063, 0.07251081615686417, 0.11345109343528748, 0.09246983379125595, 0.10225488245487213, 0.09775248169898987, 0.16008129715919495, 0.0417676605284214, 0.07765735685825348, 0.03881143778562546], [0.0009258422069251537, 0.017353927716612816, 0.004144147504121065, 0.012552672065794468, 0.020521963015198708, 0.010875954292714596, 0.010972263291478157, 0.037731099873781204, 0.010444889776408672, 0.02421843633055687, 0.013357912190258503, 0.033909451216459274, 0.03306996822357178, 0.05150516331195831, 0.03986028954386711, 0.0770173892378807, 0.07016933709383011, 0.11861710995435715, 0.19975252449512482, 0.054919712245464325, 0.0900895968079567, 0.06799036264419556], [0.00048475415678694844, 0.013257146812975407, 0.008171833120286465, 0.006518997251987457, 0.008638212457299232, 0.004368143621832132, 0.01965636946260929, 0.03196465224027634, 0.003836162155494094, 0.03714881092309952, 0.013442711904644966, 0.010928956791758537, 0.037590380758047104, 0.026095276698470116, 0.01978752762079239, 0.039382580667734146, 0.01606995426118374, 0.17425373196601868, 0.1673925518989563, 0.04221334680914879, 0.2608450949192047, 0.05795278400182724], [0.0011352207511663437, 0.012026785872876644, 0.010839462280273438, 0.0037356684915721416, 0.011444753035902977, 0.012754536233842373, 0.01181405782699585, 0.006800692994147539, 0.02072717621922493, 0.012109844945371151, 0.009180638939142227, 0.010331050492823124, 0.013205241411924362, 0.015783479437232018, 0.004618849139660597, 0.08428619801998138, 0.021605124697089195, 0.11735552549362183, 0.05457310751080513, 0.4378946125507355, 0.06619264930486679, 0.06158527359366417], [0.0005329149425961077, 0.023607786744832993, 0.023895137012004852, 0.006483413279056549, 0.014807010069489479, 0.010549500584602356, 0.029877301305532455, 0.05632871016860008, 0.006452623754739761, 0.017230462282896042, 0.00898054614663124, 0.009713682346045971, 0.027662741020321846, 0.013803319074213505, 0.009975079447031021, 0.024104079231619835, 0.01512109860777855, 0.1428258866071701, 0.19247229397296906, 0.08806271106004715, 0.15651056170463562, 0.12100307643413544], [0.001313000568188727, 0.010088649578392506, 0.017940297722816467, 0.008482731878757477, 0.021865075454115868, 0.013728762045502663, 0.02183958888053894, 0.02301391214132309, 0.011757629923522472, 0.01070465799421072, 0.015138234943151474, 0.023469092324376106, 0.011445415206253529, 0.023494603112339973, 0.012037212029099464, 0.06920725107192993, 0.023074662312865257, 0.14664098620414734, 0.07630594819784164, 0.14163663983345032, 0.11009497195482254, 0.20672067999839783], [0.0021092116367071867, 0.008483970537781715, 0.010361473076045513, 0.009880241006612778, 0.020322198048233986, 0.0067055607214570045, 0.011618911288678646, 0.015545789152383804, 0.007725940085947514, 0.007126733660697937, 0.028451021760702133, 0.03321008384227753, 0.01258731447160244, 0.017841516062617302, 0.017856862396001816, 0.0366717129945755, 0.015867119655013084, 0.05755671486258507, 0.06755285710096359, 0.0727764144539833, 0.09142032265663147, 0.44832804799079895], [0.023683423176407814, 0.04786846041679382, 0.08724408596754074, 0.014561162330210209, 0.07630626857280731, 0.055005043745040894, 0.021106403321027756, 0.012525000609457493, 0.018303845077753067, 0.01032339595258236, 0.02624676004052162, 0.01304433960467577, 0.017025304958224297, 0.026186656206846237, 0.006455244962126017, 0.04570451378822327, 0.022209346294403076, 0.03875311091542244, 0.03572281822562218, 0.1395590901374817, 0.06670942902565002, 0.19545623660087585], [0.004564111586660147, 0.06685170531272888, 0.06556394696235657, 0.014946048147976398, 0.05879037082195282, 0.024851756170392036, 0.03559919446706772, 0.0429331511259079, 0.010516013950109482, 0.012350868433713913, 0.01783978007733822, 0.009930189698934555, 0.03125042840838432, 0.010924865491688251, 0.009384074248373508, 0.02350565418601036, 0.012862840667366982, 0.07556240260601044, 0.06745085120201111, 0.1102416068315506, 0.10933646559715271, 0.18474361300468445], [0.003003686433658004, 0.061597246676683426, 0.02505563199520111, 0.018121402710676193, 0.14851193130016327, 0.01765773817896843, 0.017746927216649055, 0.03856499865651131, 0.02398175373673439, 0.017994074150919914, 0.028048628941178322, 0.03721873462200165, 0.06606195122003555, 0.03126804903149605, 0.024289678782224655, 0.04661313444375992, 0.013398678973317146, 0.028156006708741188, 0.04183124378323555, 0.052758343517780304, 0.04465966671705246, 0.21346057951450348], [0.003516237949952483, 0.11787670850753784, 0.06055079773068428, 0.01972367987036705, 0.08152124285697937, 0.021289069205522537, 0.060282282531261444, 0.038279350847005844, 0.012980399653315544, 0.04674242436885834, 0.01540432684123516, 0.007412012666463852, 0.08731511980295181, 0.011591652408242226, 0.01020891685038805, 0.019432056695222855, 0.004873633850365877, 0.08260519802570343, 0.026750531047582626, 0.06221825256943703, 0.13395968079566956, 0.07546636462211609], [0.010575352236628532, 0.10095532983541489, 0.03884313628077507, 0.027007678523659706, 0.1010202020406723, 0.0424620695412159, 0.03971375152468681, 0.038884397596120834, 0.029306357726454735, 0.044679488986730576, 0.021961882710456848, 0.03297651931643486, 0.053084228187799454, 0.02288784272968769, 0.016299670562148094, 0.043982114642858505, 0.014877327717840672, 0.054891377687454224, 0.019054951146245003, 0.08001241832971573, 0.051165804266929626, 0.11535803973674774]], [[0.08129899948835373, 0.07805877178907394, 0.12307145446538925, 0.057764045894145966, 0.05416440963745117, 0.06956829875707626, 0.3057602345943451, 0.02386408858001232, 0.01879369281232357, 0.03531401976943016, 0.014893699437379837, 0.03575442358851433, 0.012653907760977745, 0.016149314120411873, 0.027868879958987236, 0.018718227744102478, 0.004753659013658762, 0.012930386699736118, 0.000869524257723242, 0.0010068670380860567, 0.0025183672551065683, 0.004224741365760565], [0.026188481599092484, 0.286648690700531, 0.0732831358909607, 0.020132606849074364, 0.029401499778032303, 0.09216196835041046, 0.12195242196321487, 0.11792167276144028, 0.13051235675811768, 0.050604768097400665, 0.010342448949813843, 0.002838853280991316, 0.015828749164938927, 0.0021046919282525778, 0.0004308725765440613, 0.000706518127117306, 0.001050603692419827, 0.002500933362171054, 0.0019658743403851986, 0.0009567429078742862, 0.012031941674649715, 0.00043426311458460987], [0.00023672726820223033, 0.8916022777557373, 0.002121083904057741, 0.00012860576680395752, 0.01268272940069437, 0.0005358022172003984, 0.014920267276465893, 0.0023453787434846163, 0.0032698381692171097, 0.032134149223566055, 0.0018886453472077847, 0.00027506871265359223, 0.01628199592232704, 0.00015006559260655195, 3.41730446962174e-05, 0.00011542856373125687, 0.00041205176967196167, 0.0004669100162573159, 0.0003933918196707964, 0.00026834692107513547, 0.019623370841145515, 0.00011370307038305327], [0.0006523833726532757, 0.002074877265840769, 0.9346182346343994, 0.02684023045003414, 0.004790040664374828, 0.003234311006963253, 0.0015149560058489442, 0.00013542186934500933, 0.0005577776464633644, 0.0014551769709214568, 0.001154789119027555, 0.0020644015166908503, 0.00040672655450180173, 0.014959488995373249, 0.0013818880543112755, 0.0006884626345708966, 7.366320642177016e-05, 0.00013752402446698397, 1.7513890270492993e-05, 0.0003854803799185902, 0.0008582015871070325, 0.0019983798265457153], [0.0047970907762646675, 0.38446107506752014, 0.1561627984046936, 0.07281234860420227, 0.09716762602329254, 0.015357461757957935, 0.06624269485473633, 0.025596817955374718, 0.0739075317978859, 0.0025361496955156326, 0.0019243054557591677, 0.004721245728433132, 0.02421133778989315, 0.005968290846794844, 0.0007644532015547156, 0.005615083500742912, 0.0032959359232336283, 0.0021515630651265383, 0.002683604834601283, 0.030647018924355507, 0.007875941693782806, 0.011099675670266151], [0.0006053355755284429, 0.05318509787321091, 0.020857414230704308, 0.021732458844780922, 0.7844116687774658, 0.04996754229068756, 0.017845699563622475, 0.0024767068680375814, 0.038021888583898544, 0.0008260513423010707, 8.094357326626778e-05, 0.0002068676403723657, 0.0034641646780073643, 0.0017930103931576014, 0.0003623144584707916, 0.0021531907841563225, 0.00027603888884186745, 0.00040838567656464875, 0.00010815932182595134, 0.0008104252628982067, 0.00015455963148269802, 0.00025217756046913564], [0.0048890672624111176, 0.08844109624624252, 0.2018674910068512, 0.05182960629463196, 0.022185038775205612, 0.30924931168556213, 0.1317836344242096, 0.03008183091878891, 0.11968939006328583, 0.013196294195950031, 0.0034108201507478952, 0.000247644551564008, 0.00658583315089345, 0.0068824742920696735, 0.0005141959409229457, 0.0003545143117662519, 0.00025464888312853873, 0.0007247405592352152, 0.0007566437707282603, 0.005999848712235689, 0.00039662409108132124, 0.0006591786514036357], [0.11510580033063889, 0.009028125554323196, 0.028020761907100677, 0.03145639970898628, 0.011474001221358776, 0.016198953613638878, 0.7344873547554016, 0.007692678831517696, 0.010929133743047714, 0.0066091688349843025, 0.02015955187380314, 0.0014187564374879003, 0.0007866094820201397, 0.00021506960911210626, 0.0027264931704849005, 0.0013309140922501683, 0.00112833920866251, 0.0003642957308329642, 0.0005593568203039467, 0.00010493888112250715, 0.00011019368685083464, 9.309659799328074e-05], [0.0028089305851608515, 0.007521670777350664, 0.0005207279464229941, 0.0002890318864956498, 0.0015995687572285533, 0.010324081405997276, 0.004842815455049276, 0.9361923933029175, 0.02790900133550167, 0.0018402918940410018, 0.0009759651147760451, 0.0024659419432282448, 0.0002790455473586917, 4.4232459913473576e-05, 7.413002094835974e-06, 8.687552326591685e-05, 0.00016377838619519025, 0.0006409377092495561, 0.0012865668395534158, 2.2801819795859046e-05, 0.0001107869393308647, 6.721695535816252e-05], [0.003841865574941039, 0.005582909565418959, 0.00093187385937199, 0.0027477082330733538, 0.011476601473987103, 0.006257209461182356, 0.020876044407486916, 0.01452860701829195, 0.905669629573822, 0.013323130086064339, 0.0030207100789994, 0.0008968955953605473, 0.005476845894008875, 0.00035182959982194006, 0.000783681811299175, 0.0002477334055583924, 0.001847706618718803, 0.0007230546325445175, 0.0005990764475427568, 0.0006500058225356042, 0.00014788506086915731, 1.907187470351346e-05], [0.015983713790774345, 0.0023470032028853893, 0.0004526182892732322, 0.00017137806571554393, 0.0002851566532626748, 0.0027535269036889076, 0.030767256394028664, 0.007554044481366873, 0.0035393396392464638, 0.8545519709587097, 0.058733418583869934, 0.015011734329164028, 0.002969448920339346, 0.000275201047770679, 2.5707138775032945e-05, 0.00029072625329717994, 8.752149733481929e-05, 0.003130936063826084, 0.0005440195673145354, 8.799142233328894e-05, 0.0003562484052963555, 8.109505870379508e-05], [0.002081414218991995, 0.00034119567135348916, 0.0003688165161293, 4.7127745347097516e-05, 1.6956444596871734e-05, 0.00016296149988193065, 0.005606601480394602, 0.00011952892236877233, 0.002047531306743622, 0.0007861151243560016, 0.9821573495864868, 0.003224150976166129, 0.001437523402273655, 0.00012669590068981051, 0.0003578613104764372, 7.81703602115158e-06, 0.00047740127774886787, 0.00027618720196187496, 3.956793443649076e-05, 0.00014116382226347923, 2.873562516469974e-05, 0.00014726626977790147], [0.013065854087471962, 0.005266242194920778, 0.009941451251506805, 0.009083614684641361, 0.0060933977365493774, 0.0051767583936452866, 0.001759619452059269, 0.01836184225976467, 0.0025085851084440947, 0.04193227365612984, 0.006078243721276522, 0.8215495944023132, 0.0173267163336277, 0.005612384993582964, 0.01662854291498661, 0.005529748275876045, 0.0009311058674938977, 0.004495640750974417, 0.0005185414920561016, 0.00010761272278614342, 0.007358618546277285, 0.0006735712522640824], [0.0001116415805881843, 0.005728668533265591, 0.0008443612023256719, 0.00012064864858984947, 0.0022081951610744, 0.00042138557182624936, 0.0002492062048986554, 0.00046385489986278117, 0.003405319293960929, 0.014458832331001759, 0.008362079039216042, 0.0028083559591323137, 0.9372393488883972, 0.010711165145039558, 0.0010961811058223248, 0.0015709656290709972, 0.001166546018794179, 0.00016759354912210256, 0.0005801029619760811, 0.00043411110527813435, 0.007660755421966314, 0.00019072365830652416], [7.392750558210537e-05, 0.00022486223315354437, 0.003977532964199781, 0.002706709085032344, 0.0005241475882939994, 0.043867383152246475, 8.034618804231286e-05, 0.00010268341429764405, 0.00021173023560550064, 0.004120788536965847, 0.0029330251272767782, 0.026294251903891563, 0.005156650207936764, 0.8552954792976379, 0.020937250927090645, 0.004993813578039408, 0.025652587413787842, 0.0006366328452713788, 1.5269106370396912e-05, 8.370655996259302e-05, 0.0003244773542974144, 0.0017867798451334238], [0.0011947014136239886, 0.012852972373366356, 0.013942435383796692, 0.00040816140244714916, 0.01014051865786314, 0.0004401255864650011, 0.006534951273351908, 0.00135746318846941, 0.0009215069585479796, 0.00017480444512329996, 0.002526758937165141, 0.013460730202496052, 0.05463274568319321, 0.06836964190006256, 0.21865269541740417, 0.3188335597515106, 0.1699303388595581, 0.06719695031642914, 0.021292444318532944, 0.0021495993714779615, 0.0056442636996507645, 0.009342653676867485], [0.0005730044795200229, 0.0014713432174175978, 0.00047479599015787244, 0.0036722086369991302, 0.00605316087603569, 0.04541633278131485, 0.001447701477445662, 0.002245421754196286, 0.0007348365033976734, 0.002509685466066003, 8.01403948571533e-05, 0.004739086609333754, 0.00907242763787508, 0.03148307278752327, 0.009000309742987156, 0.8389676213264465, 0.02267123945057392, 0.014731417410075665, 0.003139026928693056, 0.00020253402180969715, 0.0002877965453080833, 0.0010268946643918753], [0.013833478093147278, 0.031901102513074875, 0.17645163834095, 0.0011327684624120593, 0.009075978770852089, 0.00012997798330616206, 0.12149067968130112, 0.009688439778983593, 0.025676926597952843, 0.0007031070999801159, 0.027233287692070007, 0.0003311351465526968, 0.047002822160720825, 0.09059220552444458, 0.012268397957086563, 0.015025882050395012, 0.08053602278232574, 0.06439998745918274, 0.20124080777168274, 0.058172713965177536, 0.002804469782859087, 0.010308081284165382], [0.01078721322119236, 0.0004976961645297706, 0.0003589688567444682, 0.001292158500291407, 0.0007386088254861534, 0.007306284736841917, 0.0021139641758054495, 0.010457738302648067, 0.0009287401917390525, 0.01152596902102232, 0.004229273181408644, 0.01813727617263794, 0.0008278197492472827, 0.0029350677505135536, 0.03801577538251877, 0.09168343991041183, 0.038831062614917755, 0.7051061987876892, 0.04277902841567993, 0.002133155707269907, 0.008593208156526089, 0.0007213283097371459], [0.00046152263530530035, 9.23393017728813e-05, 4.8951271310215816e-05, 2.584589992693509e-06, 2.2255022486206144e-05, 2.0020172087242827e-05, 0.0001962275564437732, 0.005926910322159529, 0.00022708546021021903, 8.794229506747797e-05, 0.0001586286089150235, 8.706665539648384e-05, 4.914140663458966e-05, 2.8559588827192783e-05, 2.5434571853111265e-06, 0.00024322461104020476, 0.0009605244849808514, 0.0008436741190962493, 0.9891042709350586, 0.00015849038027226925, 0.0006218653288669884, 0.0006562093622051179], [0.0030296670738607645, 0.002341165207326412, 0.0017790996935218573, 0.0031568214762955904, 0.0039855074137449265, 0.004022711887955666, 0.006049789488315582, 0.0030538891442120075, 0.023478439077734947, 0.01018783263862133, 0.011396140791475773, 0.004343053791671991, 0.005056365393102169, 0.005826432257890701, 0.01019594818353653, 0.003030159743502736, 0.08111327141523361, 0.09977424144744873, 0.01738523505628109, 0.6707897186279297, 0.01943334937095642, 0.010571174323558807], [0.029226768761873245, 0.0009960659081116319, 0.003863618243485689, 0.0002794221800286323, 0.00035878244671039283, 0.00021977376309223473, 0.001722928718663752, 0.002870005089789629, 0.0005062529817223549, 0.004426007624715567, 0.10985658317804337, 0.12874166667461395, 0.012580716982483864, 0.00569315766915679, 0.0024364388082176447, 0.010184288956224918, 0.002168069826439023, 0.042498186230659485, 0.06517817080020905, 0.0037838879507035017, 0.49561160802841187, 0.07679766416549683]], [[0.04180895537137985, 0.12311653047800064, 0.12527123093605042, 0.10466508567333221, 0.04069690778851509, 0.07760681211948395, 0.05245032161474228, 0.038987770676612854, 0.027655908837914467, 0.08547985553741455, 0.02418631874024868, 0.023165617138147354, 0.02382906898856163, 0.05977277085185051, 0.027960792183876038, 0.017377689480781555, 0.01833837851881981, 0.015585298649966717, 0.007616050075739622, 0.018448516726493835, 0.0295540913939476, 0.016425974667072296], [0.011494185775518417, 0.02484252117574215, 0.21490466594696045, 0.01685681752860546, 0.044329963624477386, 0.028979938477277756, 0.050474077463150024, 0.25819286704063416, 0.09194336831569672, 0.002586606191471219, 0.017390862107276917, 0.012878519482910633, 0.03302367031574249, 0.03382755443453789, 0.0031961011700332165, 0.015718452632427216, 0.007161974906921387, 0.010587343014776707, 0.05366528406739235, 0.046255942434072495, 0.0032927689608186483, 0.018396547064185143], [0.013128953985869884, 0.14580613374710083, 0.09083428233861923, 0.07595629245042801, 0.13412179052829742, 0.024699820205569267, 0.04950014129281044, 0.13138635456562042, 0.015990668907761574, 0.004662915598601103, 0.013946324586868286, 0.016522128134965897, 0.03407147526741028, 0.030658669769763947, 0.029665157198905945, 0.04858360067009926, 0.019854681566357613, 0.024005506187677383, 0.06243731081485748, 0.00839702133089304, 0.007462093140929937, 0.018308650702238083], [0.0007618964300490916, 0.9783523082733154, 0.0014711498515680432, 0.007497102953493595, 0.004117170814424753, 0.0012872847728431225, 0.0016897672321647406, 0.0005823676474392414, 0.00022503355285152793, 0.00012840847193729132, 0.0008127561304718256, 0.00015295430785045028, 0.0010900524212047458, 9.254331234842539e-05, 0.00015522984904237092, 0.00038249537465162575, 0.00022940630151424557, 6.962218321859837e-05, 0.0005853007896803319, 9.262979801860638e-06, 0.0001904088130686432, 0.00011759095650631934], [0.03254719451069832, 0.19370970129966736, 0.12473279982805252, 0.07860350608825684, 0.10339149832725525, 0.1672661453485489, 0.05822743847966194, 0.04444506764411926, 0.02841889299452305, 0.01622207649052143, 0.03632786124944687, 0.013085964135825634, 0.008014303632080555, 0.04203183576464653, 0.0018729616422206163, 0.006827124860137701, 0.013991163112223148, 0.002591415075585246, 0.00932060182094574, 0.0024289328139275312, 0.007395145948976278, 0.008548327721655369], [0.04050321504473686, 0.0028966423124074936, 0.08152954280376434, 0.041726160794496536, 0.06587352603673935, 0.07473564147949219, 0.06735248863697052, 0.03320593014359474, 0.29107430577278137, 0.0067059737630188465, 0.02705952525138855, 0.03675135597586632, 0.012643178924918175, 0.027371946722269058, 0.005983702372759581, 0.01395257469266653, 0.043997906148433685, 0.02344369702041149, 0.007337232585996389, 0.06733804196119308, 0.0035168773028999567, 0.02500055730342865], [0.01889905147254467, 0.010006011463701725, 0.0861431211233139, 0.007130220998078585, 0.04302341863512993, 0.07427286356687546, 0.04079825431108475, 0.09177489578723907, 0.32919350266456604, 0.008487327955663204, 0.04705720394849777, 0.021466046571731567, 0.035629063844680786, 0.02220870368182659, 0.00285529438406229, 0.01743246242403984, 0.010796139016747475, 0.01865347847342491, 0.041661061346530914, 0.04751083254814148, 0.00467672199010849, 0.020324476063251495], [0.013886232860386372, 0.2568277418613434, 0.03671523556113243, 0.0708695650100708, 0.05626486614346504, 0.24420468509197235, 0.047539692372083664, 0.09491024911403656, 0.011044195853173733, 0.03785226121544838, 0.009845288470387459, 0.006332985125482082, 0.013355116359889507, 0.028471730649471283, 0.005364518612623215, 0.009821655228734016, 0.006851169280707836, 0.005569820757955313, 0.02071657031774521, 0.004078791476786137, 0.011131946928799152, 0.00834574457257986], [0.021976439282298088, 0.05568351969122887, 0.05066072940826416, 0.0427524708211422, 0.056971676647663116, 0.0679110586643219, 0.04680219292640686, 0.07857653498649597, 0.22042763233184814, 0.021791016682982445, 0.050207991153001785, 0.019672667607665062, 0.05412192642688751, 0.03491045907139778, 0.00894506648182869, 0.01452529989182949, 0.019056087359786034, 0.017171353101730347, 0.04604622349143028, 0.03377282992005348, 0.020458733662962914, 0.017558103427290916], [0.0022424182388931513, 0.0005118322442285717, 0.0987359955906868, 0.0009611693676561117, 0.04671753570437431, 0.034592822194099426, 0.02218269370496273, 0.5190151929855347, 0.030765840783715248, 0.005057870410382748, 0.009173926897346973, 0.014839235693216324, 0.03686261177062988, 0.09696508198976517, 0.00031519183539785445, 0.014724357053637505, 0.0036944595631211996, 0.00839958619326353, 0.03514562174677849, 0.01260855421423912, 0.0009774925420060754, 0.0055105010978877544], [0.025364872068166733, 0.024314358830451965, 0.04639049619436264, 0.0162638109177351, 0.1251375526189804, 0.059207733720541, 0.09541112184524536, 0.04649491608142853, 0.06583933532238007, 0.04009944573044777, 0.13351963460445404, 0.03936482593417168, 0.10463155061006546, 0.044088296592235565, 0.007863420061767101, 0.019314778968691826, 0.03673304244875908, 0.010325353592634201, 0.0213021170347929, 0.004336225800216198, 0.02368716523051262, 0.010309929959475994], [0.03464221581816673, 0.02561965584754944, 0.032277435064315796, 0.019269049167633057, 0.05717790871858597, 0.05940942466259003, 0.04423440620303154, 0.11335636675357819, 0.038280241191387177, 0.27030545473098755, 0.054971132427453995, 0.056556303054094315, 0.021150168031454086, 0.03467477858066559, 0.01128518208861351, 0.014525649137794971, 0.01055429968982935, 0.02134215459227562, 0.015273798257112503, 0.008699040859937668, 0.04607197269797325, 0.01032331958413124], [0.0072745936922729015, 0.03734653815627098, 0.06097635254263878, 0.0022356503177434206, 0.04036225005984306, 0.015193076804280281, 0.02527131326496601, 0.11112415790557861, 0.03852817416191101, 0.02005557343363762, 0.09899009019136429, 0.022413043305277824, 0.26515668630599976, 0.04121214151382446, 0.0056593450717628, 0.031489718705415726, 0.017751285806298256, 0.015283219516277313, 0.09905190765857697, 0.009714542888104916, 0.02363637462258339, 0.011273990385234356], [0.00030710833379998803, 1.86266715900274e-05, 0.016079269349575043, 0.0001809492241591215, 0.005379094742238522, 0.0027022261638194323, 0.010010587982833385, 0.44840115308761597, 0.02747405506670475, 0.0034499100875109434, 0.003371716011315584, 0.010547117330133915, 0.06884531676769257, 0.0645841732621193, 0.0011059216922149062, 0.02649271860718727, 0.0031436453573405743, 0.11260165274143219, 0.08424276113510132, 0.10318951308727264, 0.002170593710616231, 0.005701835732907057], [0.0009241977240890265, 0.05420372635126114, 0.006001047324389219, 0.0004282105655875057, 0.043825414031744, 0.0036814496852457523, 0.016289176419377327, 0.027963949367403984, 0.009979098103940487, 0.003732410492375493, 0.039613183587789536, 0.004040407482534647, 0.6595102548599243, 0.007948655635118484, 0.0007917991606518626, 0.021582838147878647, 0.006451521068811417, 0.007809185888618231, 0.07665444165468216, 0.0008925959700718522, 0.005340115167200565, 0.0023363379295915365], [0.006381851155310869, 0.0008684624335728586, 0.033186301589012146, 0.0009845624445006251, 0.015211151912808418, 0.009927812032401562, 0.016079455614089966, 0.23521389067173004, 0.0625818520784378, 0.054946016520261765, 0.0739336684346199, 0.0672062411904335, 0.09905725717544556, 0.07595089823007584, 0.007094517350196838, 0.026824751868844032, 0.0070352703332901, 0.06502356380224228, 0.06069661304354668, 0.04174044355750084, 0.01818390004336834, 0.021871525794267654], [0.025294268503785133, 0.013204729184508324, 0.01918610744178295, 0.008424995467066765, 0.016701482236385345, 0.01553210150450468, 0.027230676263570786, 0.011212692596018314, 0.10487822443246841, 0.031644538044929504, 0.1663050800561905, 0.039148855954408646, 0.09815680980682373, 0.019638430327177048, 0.032183557748794556, 0.03454320877790451, 0.0815514624118805, 0.03832419216632843, 0.05036933720111847, 0.06324312090873718, 0.06021861359477043, 0.04300747811794281], [0.013021372258663177, 0.004956350661814213, 0.061464037746191025, 0.0023197359405457973, 0.017814207822084427, 0.010589739307761192, 0.02938413806259632, 0.1231912150979042, 0.06323589384555817, 0.01894887536764145, 0.03226007521152496, 0.03496831655502319, 0.09377294033765793, 0.07119622081518173, 0.006428772583603859, 0.05649174749851227, 0.015252135694026947, 0.07466993480920792, 0.07152334600687027, 0.1530149281024933, 0.018768060952425003, 0.026727942749857903], [0.0030552067328244448, 0.01816687174141407, 0.006124014966189861, 0.0014188073109835386, 0.008674717508256435, 0.003320525400340557, 0.015048906207084656, 0.04884771630167961, 0.025034237653017044, 0.01656087301671505, 0.01480086613446474, 0.007816041819751263, 0.24508386850357056, 0.011536724865436554, 0.016712503507733345, 0.04040471091866493, 0.017372936010360718, 0.10199625790119171, 0.23788785934448242, 0.07870199531316757, 0.05939895659685135, 0.022035449743270874], [0.00815497525036335, 0.00808499101549387, 0.01092944573611021, 0.01672467403113842, 0.01690947636961937, 0.010749631561338902, 0.04079953208565712, 0.03982192277908325, 0.1350175440311432, 0.008363420143723488, 0.011325445026159286, 0.021379895508289337, 0.053528450429439545, 0.014248833060264587, 0.020655343309044838, 0.036691538989543915, 0.018160026520490646, 0.19780801236629486, 0.077072873711586, 0.18908436596393585, 0.021714158356189728, 0.04277551919221878], [0.002095340983942151, 0.00408938666805625, 0.017908314242959023, 0.0009042864548973739, 0.03402994945645332, 0.011386726051568985, 0.021567555144429207, 0.09016076475381851, 0.015798373147845268, 0.004585400223731995, 0.02137313410639763, 0.012941754423081875, 0.22868365049362183, 0.050570208579301834, 0.0033007939346134663, 0.06863043457269669, 0.021946853026747704, 0.039649154990911484, 0.29785430431365967, 0.022661570459604263, 0.008286289870738983, 0.021575717255473137], [0.027957633137702942, 0.012140949256718159, 0.018155183643102646, 0.012592148967087269, 0.0351455882191658, 0.012176170013844967, 0.048857856541872025, 0.03936358913779259, 0.033658649772405624, 0.04489747807383537, 0.03784184902906418, 0.050950974225997925, 0.054993998259305954, 0.02211751602590084, 0.04140699282288551, 0.07736971229314804, 0.0392720103263855, 0.16433551907539368, 0.04953713342547417, 0.07422393560409546, 0.06368114799261093, 0.03932389244437218]], [[0.01485772430896759, 0.004430547822266817, 0.0035366888623684645, 0.01992594636976719, 0.03332258388400078, 0.012795297428965569, 0.004601253662258387, 0.0444762259721756, 0.09023185074329376, 0.06634209305047989, 0.05606991425156593, 0.08926723152399063, 0.06904479116201401, 0.036026351153850555, 0.14151634275913239, 0.09986390173435211, 0.0718778744339943, 0.02766488678753376, 0.04846889153122902, 0.019006483256816864, 0.03146737441420555, 0.015205786563456059], [0.5571335554122925, 0.02898446097970009, 0.10741859674453735, 0.10351040214300156, 0.03706449642777443, 0.0379459522664547, 0.0313616581261158, 0.01808936707675457, 0.011235098354518414, 0.011260027065873146, 0.0025736214593052864, 0.003560007316991687, 0.020598599687218666, 0.01928006485104561, 0.0022580481600016356, 0.0038402914069592953, 0.0008686133660376072, 0.0006271901656873524, 0.000532696139998734, 0.00015624568914063275, 0.0009594495058991015, 0.0007414743886329234], [0.0011139794951304793, 0.01787780225276947, 0.0010991123272106051, 0.010754281654953957, 0.44702449440956116, 0.014073921367526054, 0.020146513357758522, 0.19612950086593628, 0.018284980207681656, 0.009559977799654007, 0.0014220918528735638, 0.013175302185118198, 0.04704899713397026, 0.002541032386943698, 0.005016987212002277, 0.1305890679359436, 0.02573293447494507, 0.017291860654950142, 0.013355874456465244, 0.00483011594042182, 0.0019301238935440779, 0.001001024735160172], [0.01328317727893591, 0.13359922170639038, 0.036942873150110245, 0.024056311696767807, 0.06469012796878815, 0.027695417404174805, 0.12644559144973755, 0.22837398946285248, 0.0646643340587616, 0.09342879056930542, 0.0677422508597374, 0.025160597637295723, 0.0271214060485363, 0.009507008828222752, 0.004868641030043364, 0.005849641747772694, 0.005744419526308775, 0.004694761708378792, 0.014039454981684685, 0.004171565640717745, 0.014938312582671642, 0.002981992904096842], [0.005373063497245312, 0.07870662212371826, 0.020949069410562515, 0.01905907317996025, 0.03991789370775223, 0.027861958369612694, 0.36617130041122437, 0.029656432569026947, 0.039083223789930344, 0.04527385160326958, 0.029733778908848763, 0.01849048212170601, 0.012204641476273537, 0.009647181257605553, 0.039692364633083344, 0.038934048265218735, 0.041794341057538986, 0.06563632935285568, 0.02137327380478382, 0.030773617327213287, 0.014084274880588055, 0.005583162885159254], [0.3901051878929138, 0.01710943691432476, 0.05143723264336586, 0.031132757663726807, 0.016686497256159782, 0.026633666828274727, 0.07175202667713165, 0.2052270472049713, 0.028580034151673317, 0.08004400134086609, 0.02822880819439888, 0.021809248253703117, 0.002322092652320862, 0.004931941628456116, 0.0031649486627429724, 0.006729105953127146, 0.0018529657972976565, 0.0026244891341775656, 0.006011885590851307, 0.001437157392501831, 0.0009659408824518323, 0.0012135664001107216], [0.019957298412919044, 0.0032357070595026016, 0.004920545034110546, 0.04671192914247513, 0.01002526842057705, 0.015538005158305168, 0.014784811064600945, 0.10059526562690735, 0.7101726531982422, 0.01472434215247631, 0.011704564094543457, 0.026587653905153275, 0.001237001153640449, 0.0015360815450549126, 0.005241598002612591, 0.00045209407107904553, 0.0012405625311657786, 0.001137457904405892, 0.005577048286795616, 0.002555015031248331, 0.0013631952460855246, 0.0007018736796453595], [0.009161998517811298, 0.03156232833862305, 0.03002680279314518, 0.017849303781986237, 0.0036378821823745966, 0.013949494808912277, 0.041473258286714554, 0.058442119508981705, 0.009246106259524822, 0.5578168630599976, 0.019242193549871445, 0.05981377139687538, 0.0187832061201334, 0.07186141610145569, 0.005347694735974073, 0.005005099344998598, 0.0007058670744299889, 0.017726577818393707, 0.0029881654772907495, 0.004977329634130001, 0.016240134835243225, 0.0041425153613090515], [0.0030287273693829775, 0.018773522228002548, 0.009314149618148804, 0.006268225144594908, 0.004337642807513475, 0.00236050458624959, 0.0059031895361840725, 0.009863526560366154, 0.0284696277230978, 0.017584677785634995, 0.8130898475646973, 0.013147926889359951, 0.01323515921831131, 0.011720698326826096, 0.015264471061527729, 0.0006836798274889588, 0.001127210445702076, 0.000413750036386773, 0.013018360361456871, 0.00346927042119205, 0.002794202184304595, 0.006131565198302269], [0.001250660396181047, 0.0032622283324599266, 0.007477434352040291, 0.010279340669512749, 0.0020330145489424467, 0.003990151919424534, 0.003559121862053871, 0.02878318913280964, 0.024958401918411255, 0.011635582894086838, 0.011372768320143223, 0.8216440677642822, 0.012486004270613194, 0.035327643156051636, 0.0059461770579218864, 0.004051509778946638, 0.00015854541561566293, 0.0040342905558645725, 0.0005855226772837341, 0.0032627449836581945, 0.0006891735247336328, 0.0032123918645083904], [5.159510692465119e-05, 0.00010154957271879539, 4.094401447218843e-05, 0.000647381239105016, 0.0011844736291095614, 0.0003519611491356045, 3.397659020265564e-05, 7.232546340674162e-05, 0.003132701152935624, 0.00021502554591279477, 0.0009721926180645823, 0.0014417916536331177, 0.8944113254547119, 0.005313945934176445, 0.016377059742808342, 0.043609730899333954, 0.023878732696175575, 0.0005850914749316871, 0.0029464627150446177, 0.0015911057125777006, 0.0008696206496097147, 0.0021710742730647326], [4.104737672605552e-05, 0.0013274046359583735, 0.00023813810548745096, 0.0017245971830561757, 0.0011874536285176873, 0.002097858116030693, 0.0002832287864293903, 0.0006183524965308607, 0.0030204185750335455, 0.003195403143763542, 0.0017276359722018242, 0.01100325956940651, 0.09686824679374695, 0.5835545063018799, 0.10611520707607269, 0.06363320350646973, 0.06831996142864227, 0.023967934772372246, 0.0010526648256927729, 0.020342545583844185, 0.0035605437587946653, 0.006120312958955765], [0.05815259367227554, 0.0022737395484000444, 0.016458455473184586, 0.015382053330540657, 0.015248353593051434, 0.005510674323886633, 0.018084639683365822, 0.0011470622848719358, 0.0005921524716541171, 0.0030555541161447763, 0.0046515208669006824, 0.004995550494641066, 0.023849627003073692, 0.045457664877176285, 0.6853578090667725, 0.052563365548849106, 0.023544834926724434, 0.0179104283452034, 0.0012386299204081297, 6.477151327999309e-05, 0.003937570843845606, 0.0005230593378655612], [0.0008687535300850868, 0.0010201714467257261, 0.00022362433082889766, 0.0009394034859724343, 0.00420627323910594, 0.0025534245651215315, 0.003231588751077652, 0.00958702526986599, 0.00014916594955138862, 0.002860047621652484, 4.440640623215586e-05, 0.0019292989745736122, 0.010328088887035847, 0.0027881066780537367, 0.002988762455061078, 0.8439813256263733, 0.033124763518571854, 0.06957575678825378, 0.007411264348775148, 0.0013155502965673804, 0.00041482728556729853, 0.00045838748337700963], [0.0008656681748107076, 0.009737885557115078, 0.0017801770009100437, 0.002297141822054982, 0.009185461327433586, 0.005518611054867506, 0.02814667485654354, 0.01934548281133175, 0.022063616663217545, 0.0033004696015268564, 0.015782706439495087, 0.0033520832657814026, 0.006686090957373381, 0.0033980002626776695, 0.008492915891110897, 0.017678899690508842, 0.727209746837616, 0.032734885811805725, 0.0330057367682457, 0.042318280786275864, 0.006188522558659315, 0.0009109217207878828], [0.00019959686324000359, 0.00045827298890799284, 0.00019867239461746067, 0.000537541345693171, 0.0008079130202531815, 0.0011353653389960527, 0.0011573724914342165, 0.002296432387083769, 0.0014433881733566523, 0.011983959004282951, 0.0007158173830248415, 0.004558372776955366, 0.00026940193492919207, 0.001288622384890914, 0.035243093967437744, 0.02917674370110035, 0.05151151865720749, 0.7359353303909302, 0.01528371125459671, 0.056605298072099686, 0.04778970405459404, 0.001403918256983161], [0.020301492884755135, 0.0031226619612425566, 0.005920977797359228, 0.002134899841621518, 0.002287784591317177, 0.0018728040158748627, 0.010495109483599663, 0.015444410964846611, 0.00346140144392848, 0.004040065221488476, 0.012462936341762543, 0.0009775657672435045, 0.0014687426155433059, 0.0003483458131086081, 0.0011945718433707952, 0.010187586769461632, 0.01834707148373127, 0.011093076318502426, 0.8526204824447632, 0.010072245262563229, 0.005190589930862188, 0.006955308839678764], [0.016390446573495865, 0.01877780072391033, 0.025742197409272194, 0.025280840694904327, 0.011589392088353634, 0.010066160000860691, 0.04626508429646492, 0.04956896975636482, 0.0036814496852457523, 0.01552086416631937, 0.002204903634265065, 0.01679825969040394, 0.0034684892743825912, 0.019676728174090385, 0.007854212075471878, 0.015340058133006096, 0.019058967009186745, 0.2834128141403198, 0.03997692093253136, 0.3146476447582245, 0.01948150433599949, 0.03519628196954727], [0.00029471496236510575, 0.0017475676722824574, 0.002938175108283758, 0.0017477453220635653, 0.00034543746733106673, 0.0008884276612661779, 0.0028082341887056828, 0.0009307822911068797, 0.0014267545193433762, 0.006368997972458601, 0.0029363627545535564, 0.002921278355643153, 0.002599440747871995, 0.003652876242995262, 0.004646616522222757, 0.0002254494174849242, 0.0015456870896741748, 0.015074295923113823, 0.012792614288628101, 0.004912982229143381, 0.9178733825683594, 0.011322138831019402], [0.0003734003403224051, 0.003894263878464699, 0.006163983140140772, 0.0025192508473992348, 0.0007768357754684985, 0.0005198319558985531, 0.000136541246320121, 0.0004508561105467379, 0.00044199853437021375, 0.00022266749874688685, 0.0015536114806309342, 0.0029400198254734278, 0.01912667416036129, 0.015767302364110947, 0.001189050148241222, 0.0015056065749377012, 0.00033486809115856886, 0.0005814511678181589, 0.004692417569458485, 0.031013628467917442, 0.0015701533993706107, 0.9042255282402039], [0.006108187139034271, 0.08208519220352173, 0.19153568148612976, 0.053761765360832214, 0.05113760009407997, 0.04585392400622368, 0.07527175545692444, 0.004436314571648836, 0.006793812848627567, 0.0024537229910492897, 0.003706258488819003, 0.05537253990769386, 0.08834604918956757, 0.10479402542114258, 0.018683606758713722, 0.026868335902690887, 0.008844263851642609, 0.028961090371012688, 0.002985073020681739, 0.025690261274576187, 0.05382872745394707, 0.06248186528682709], [0.00020807133114431053, 0.0013864102074876428, 0.0005151366931386292, 0.0021249433048069477, 0.00964995939284563, 0.003918695729225874, 0.000273280922556296, 5.848549699294381e-05, 0.0003665168769657612, 0.00022401660680770874, 0.00014908128650858998, 0.00039104901952669024, 0.12486762553453445, 0.07070166617631912, 0.024748727679252625, 0.3048690855503082, 0.21293914318084717, 0.03961203992366791, 0.012151086702942848, 0.016193736344575882, 0.03456772118806839, 0.14008347690105438]], [[0.030811307951807976, 0.2094816267490387, 0.038013529032468796, 0.047935858368873596, 0.03650781512260437, 0.16765926778316498, 0.035574957728385925, 0.024796441197395325, 0.11797298491001129, 0.028282299637794495, 0.019812898710370064, 0.01974860578775406, 0.02701042778789997, 0.0064042010344564915, 0.02080652490258217, 0.015439063310623169, 0.023410674184560776, 0.03390929475426674, 0.01802314817905426, 0.05397031828761101, 0.013843199238181114, 0.010585546493530273], [0.06718022376298904, 0.16465841233730316, 0.05185703933238983, 0.03159036859869957, 0.04195414483547211, 0.2717953026294708, 0.03726985678076744, 0.10031315684318542, 0.021863173693418503, 0.025509560480713844, 0.05536609888076782, 0.006595326121896505, 0.011250454932451248, 0.0056419591419398785, 0.001806949614547193, 0.008949533104896545, 0.006840815301984549, 0.0291606355458498, 0.008551346138119698, 0.01973738707602024, 0.0068540372885763645, 0.02525421231985092], [0.008777687326073647, 0.43090856075286865, 0.016620289534330368, 0.042603638023138046, 0.012890567071735859, 0.30913063883781433, 0.011054408736526966, 0.006288092117756605, 0.0572294145822525, 0.002259667729958892, 0.011058554984629154, 0.00500568887218833, 0.02966698259115219, 0.003896938404068351, 0.0015049561625346541, 0.003753107041120529, 0.007579749450087547, 0.003648806130513549, 0.0024714041501283646, 0.02051161229610443, 0.0016072618309408426, 0.011532067321240902], [0.0062376162968575954, 0.01983867771923542, 0.006771633867174387, 0.006076847203075886, 0.038656845688819885, 0.26952722668647766, 0.0058710309676826, 0.5727423429489136, 0.003721389686688781, 0.033539559692144394, 0.007194908335804939, 0.002603859407827258, 0.004421084653586149, 0.0009154678555205464, 0.0003046969068236649, 0.007319050375372171, 0.0027538021095097065, 0.0011675828136503696, 0.0028078306932002306, 0.00108553864993155, 0.005380203016102314, 0.0010628337040543556], [0.002097139600664377, 0.0412042960524559, 0.006196481641381979, 0.09541843831539154, 0.014767157845199108, 0.7556268572807312, 0.0022562474478036165, 0.005563853308558464, 0.012129664421081543, 0.001090552774257958, 0.0046667903661727905, 0.0024122463073581457, 0.023471560329198837, 0.0056370641104876995, 0.0029395234305411577, 0.0033171807881444693, 0.010025366209447384, 0.00048006518045440316, 0.0016582731623202562, 0.00354724726639688, 0.0008789263665676117, 0.0046151308342814445], [0.04058851674199104, 0.03340412676334381, 0.009222574532032013, 0.014667129144072533, 0.03177063912153244, 0.22057954967021942, 0.03776299208402634, 0.298299640417099, 0.04144962877035141, 0.18497036397457123, 0.01990669220685959, 0.010195904411375523, 0.018734734505414963, 0.0017249463126063347, 0.0033680719789117575, 0.007060009520500898, 0.010715004988014698, 0.004890375770628452, 0.0029475418850779533, 0.002611555391922593, 0.004065395332872868, 0.00106469274032861], [0.03192518278956413, 0.06891829520463943, 0.030676014721393585, 0.024150170385837555, 0.022247811779379845, 0.08925700187683105, 0.010538691654801369, 0.5064025521278381, 0.06114431843161583, 0.01592370867729187, 0.036709751933813095, 0.0047413501888513565, 0.02568991668522358, 0.007045200560241938, 0.0016258106334134936, 0.01643107831478119, 0.0015389577019959688, 0.003981749527156353, 0.022081879898905754, 0.013121425174176693, 0.001937376568093896, 0.003911742474883795], [0.021122166886925697, 0.025810282677412033, 0.021156463772058487, 0.024927346035838127, 0.031021324917674065, 0.005586961749941111, 0.11417962610721588, 0.22221025824546814, 0.05238612741231918, 0.19716526567935944, 0.06539212912321091, 0.04657113924622536, 0.031027497723698616, 0.02366977371275425, 0.030612492933869362, 0.01582876220345497, 0.013186557218432426, 0.005410278681665659, 0.03939445689320564, 0.003350612474605441, 0.007740832399576902, 0.002249589189887047], [0.002015121281147003, 0.0015988515224307775, 0.00014907050353940576, 8.591100777266547e-05, 0.0008877465152181685, 0.010375958867371082, 0.0026906849816441536, 0.7196249961853027, 0.007080413401126862, 0.2457866221666336, 0.005985047668218613, 0.0018284786492586136, 0.00040823782910592854, 2.9903541872045025e-05, 2.6132663606404094e-06, 0.0001094539838959463, 0.00023419808712787926, 0.00017964281141757965, 0.0002713394642341882, 0.00010388651571702212, 0.0005230691749602556, 2.8748794647981413e-05], [0.046065788716077805, 0.009893080219626427, 0.052976250648498535, 0.0034315132070332766, 0.008590285666286945, 0.000746270758099854, 0.023764025419950485, 0.0037454809062182903, 0.09338352084159851, 0.011045284569263458, 0.28839361667633057, 0.05787191540002823, 0.02543748915195465, 0.2512614130973816, 0.019451046362519264, 0.009122827090322971, 0.005897865630686283, 0.013396757654845715, 0.0175882987678051, 0.009774893522262573, 0.00252787908539176, 0.045634523034095764], [0.0011200046865269542, 0.013841567561030388, 0.002369471127167344, 0.0055209011770784855, 0.015359621495008469, 0.01799483224749565, 0.0067624435760080814, 0.0866793692111969, 0.12757578492164612, 0.4869551658630371, 0.03209764510393143, 0.1048290953040123, 0.019712287932634354, 0.006967428606003523, 0.002187101636081934, 0.0023730576504021883, 0.009689949452877045, 0.0013309032656252384, 0.02016245573759079, 0.013385234400629997, 0.019733833149075508, 0.003351885126903653], [0.0034307462628930807, 0.028781132772564888, 0.020564014092087746, 0.020493915304541588, 0.019097765907645226, 0.0230387095361948, 0.03674451261758804, 0.032471247017383575, 0.038889817893505096, 0.01035275962203741, 0.42476794123649597, 0.03201933205127716, 0.11204086989164352, 0.04233945533633232, 0.00917668268084526, 0.003155478509142995, 0.035014357417821884, 0.0027628603857010603, 0.03522659093141556, 0.010331716388463974, 0.02851463295519352, 0.030785363167524338], [0.0014261811738833785, 0.003109410172328353, 0.006097138859331608, 0.0015657702460885048, 0.006263792049139738, 0.0005418515647761524, 0.003336752997711301, 0.0022675199434161186, 0.00486657815054059, 0.007737944833934307, 0.05559944733977318, 0.6343432664871216, 0.012002025730907917, 0.19756552577018738, 0.008100501261651516, 0.006939236540347338, 0.0034583299420773983, 0.009536274708807468, 0.0028796589467674494, 0.006156282965093851, 0.00386203289963305, 0.022344449535012245], [0.001707899966277182, 0.010054023005068302, 0.007438037544488907, 0.000795630388893187, 0.005120148416608572, 0.0005110235651955009, 0.006704504601657391, 0.003568559419363737, 0.00679920194670558, 0.0016506453976035118, 0.025382772088050842, 0.014817208983004093, 0.7743276953697205, 0.03566361591219902, 0.014651069417595863, 0.05143282562494278, 0.011516012251377106, 0.006042153108865023, 0.010636948049068451, 0.0028498449828475714, 0.004969928413629532, 0.0033603354822844267], [0.009633740410208702, 0.008562671020627022, 0.003972851205617189, 0.0051757171750068665, 0.014954640530049801, 0.03710607811808586, 0.0027725251857191324, 0.008152434602379799, 0.002100011333823204, 0.008168975822627544, 0.007213469594717026, 0.009482361376285553, 0.021140694618225098, 0.21147988736629486, 0.007547437679022551, 0.5641522407531738, 0.04419858753681183, 0.01700993813574314, 0.006905713118612766, 0.0017724685603752732, 0.002518292283639312, 0.005979298613965511], [0.002102944068610668, 0.0059522842057049274, 0.02555018663406372, 0.0024700548965483904, 0.009642564691603184, 0.0005441461107693613, 0.008261526934802532, 0.0036688100080937147, 0.004813939332962036, 0.00021351674513425678, 0.0032992030028253794, 0.00743892602622509, 0.042186345905065536, 0.058221522718667984, 0.16526994109153748, 0.3341432809829712, 0.11437427997589111, 0.05862677842378616, 0.13037428259849548, 0.014623861759901047, 0.0022357965353876352, 0.005985744763165712], [0.004310793709009886, 0.0011495646322146058, 0.0013485639356076717, 0.0003208401321899146, 0.0028774288948625326, 0.0012231182772666216, 0.0031850673258304596, 0.004849150311201811, 0.0006661387742497027, 0.002007112605497241, 0.0004321573651395738, 0.0015654704766348004, 0.0012903164606541395, 0.002327950671315193, 0.003376331180334091, 0.4627113938331604, 0.02303473651409149, 0.4685044586658478, 0.007575330790132284, 0.004689800553023815, 0.0019091550493612885, 0.0006450567161664367], [0.002085121814161539, 0.002326471731066704, 0.014114796184003353, 0.0003560144978109747, 0.0019449136452749372, 1.7527332602185197e-05, 0.00580643443390727, 0.005613032728433609, 0.0021744673140347004, 0.00015806661394890398, 0.0014007913414388895, 0.00038362358463928103, 0.0019505864474922419, 0.008083457127213478, 0.004104133229702711, 0.02727678418159485, 0.009333738125860691, 0.04325390234589577, 0.8237431645393372, 0.038104988634586334, 0.0021880916319787502, 0.005580040160566568], [0.001848672516644001, 0.001675443141721189, 0.003563184989616275, 0.0007615704671479762, 0.0023499031085520983, 0.0003414922393858433, 0.005103014875203371, 0.004409702494740486, 0.0010922265937551856, 0.0038887332193553448, 0.0009105110075324774, 0.0011634717229753733, 0.0005607764469459653, 0.0033398347441107035, 0.0039950027130544186, 0.024831555783748627, 0.007957030087709427, 0.7758834958076477, 0.025278229266405106, 0.11555349826812744, 0.012434947304427624, 0.00305776740424335], [0.0014428289141505957, 0.004641806706786156, 0.004451781045645475, 0.0005259652971290052, 0.003970365040004253, 0.000666131149046123, 0.012048830278217793, 0.03216039389371872, 0.006806123536080122, 0.00279663666151464, 0.012833887711167336, 0.003716308157891035, 0.0015656136674806476, 0.001226484659127891, 0.0002622822066769004, 0.003121632616966963, 0.01821896992623806, 0.032220788300037384, 0.6275293827056885, 0.041682008653879166, 0.15331895649433136, 0.03479282557964325], [0.0031093116849660873, 0.0006586582167074084, 0.026974670588970184, 0.00028004709747619927, 0.001186621026135981, 9.139384928857908e-06, 0.0026001022197306156, 0.00013891082198824733, 0.0004949747817590833, 0.00014117831597104669, 0.00502057047560811, 0.0013303530868142843, 0.0006222636438906193, 0.044345591217279434, 0.0019578333012759686, 0.002019681967794895, 0.000871333060786128, 0.05050279200077057, 0.014069082215428352, 0.2597055435180664, 0.013163463212549686, 0.5707978010177612], [0.00016339124704245478, 0.0021120368037372828, 0.004281424917280674, 0.0012664607493206859, 0.006530775222927332, 0.00013200065586715937, 0.0025473462883383036, 0.0010329225333407521, 0.00029107535374350846, 0.0003564675571396947, 0.0007687772740609944, 0.0008792674052529037, 0.0020903560798615217, 0.0011887374566867948, 0.002779744565486908, 0.0016528840642422438, 0.005459207110106945, 0.014180160127580166, 0.03273480013012886, 0.019502142444252968, 0.869425356388092, 0.030624601989984512]], [[0.002494835527613759, 0.28605273365974426, 0.1283876746892929, 0.010446767322719097, 0.06815892457962036, 0.01078891847282648, 0.06015839800238609, 0.01628766395151615, 0.0881987139582634, 0.01669633761048317, 0.034428179264068604, 0.01849648542702198, 0.09507814794778824, 0.02906814217567444, 0.013929999433457851, 0.020076030865311623, 0.006892542354762554, 0.05048959702253342, 0.010328609496355057, 0.008848381228744984, 0.014026837423443794, 0.010666022077202797], [0.0007292843074537814, 0.7231244444847107, 0.11220662295818329, 0.007406198885291815, 0.0492292083799839, 0.00527479313313961, 0.04632263630628586, 0.017204638570547104, 0.004488951526582241, 0.005433525890111923, 0.0045566256158053875, 0.004435847979038954, 0.011281231418251991, 0.0022930919658392668, 0.0010726703330874443, 0.0007161904941312969, 0.0002787189732771367, 0.0013307100161910057, 0.0009966195793822408, 0.0001757146092131734, 0.0007834644056856632, 0.000658838078379631], [5.44216345588211e-06, 0.9557223320007324, 0.002880169078707695, 0.0001906533434521407, 0.035067521035671234, 0.0001233172370120883, 0.0020560147240757942, 0.0003331484040245414, 0.00021855450177099556, 0.00032777892192825675, 0.0001923499658005312, 0.00011949069448746741, 0.002320766681805253, 2.2346897821989842e-05, 5.194501864025369e-05, 1.440716368961148e-05, 9.670538929640315e-06, 0.00017941200349014252, 2.4640454284963198e-05, 2.1235373424133286e-06, 0.00010912358266068622, 2.863865483959671e-05], [0.0004807469667866826, 0.8107981085777283, 0.09939132630825043, 0.0005020912503823638, 0.03935994207859039, 0.0006700649973936379, 0.004961061757057905, 0.0025044113863259554, 0.0012244185199961066, 0.0016843938501551747, 0.008776843547821045, 0.005965074989944696, 0.01738562621176243, 0.0011406302219256759, 0.00019752747903112322, 0.0011107678292319179, 0.0001474603486713022, 0.0009577093878760934, 0.00043474367703311145, 4.34920730185695e-05, 0.0007305820472538471, 0.0015329656889662147], [0.0015203679213300347, 0.6382585167884827, 0.09586609154939651, 0.007963597774505615, 0.02681935764849186, 0.006076372694224119, 0.10355149954557419, 0.015127011574804783, 0.03019717149436474, 0.009883202612400055, 0.014415882527828217, 0.010097440332174301, 0.017214052379131317, 0.008338133804500103, 0.0036789097357541323, 0.003976091742515564, 0.0006396602839231491, 0.0030690578278154135, 0.0015183421783149242, 0.0004350838717073202, 0.0007821526960469782, 0.000572097604162991], [0.0006571871344931424, 0.1419086456298828, 0.717033326625824, 0.0005999338463880122, 0.014680254273116589, 0.003239504061639309, 0.015428523533046246, 0.0039379592053592205, 0.05445004627108574, 0.00251905620098114, 0.016563814133405685, 0.003949859645217657, 0.013111842796206474, 0.008627875708043575, 0.00026075573987327516, 0.0007002846105024219, 0.00018585110956337303, 0.0011025085113942623, 0.00021242642833385617, 0.0002452813496347517, 0.00021240617206785828, 0.00037259000237099826], [0.0034642464015632868, 0.1821579486131668, 0.11111944168806076, 0.007843797095119953, 0.16146473586559296, 0.01963081769645214, 0.04510698467493057, 0.0711190477013588, 0.24867793917655945, 0.019260907545685768, 0.03238630294799805, 0.015430301427841187, 0.03633851557970047, 0.011580344289541245, 0.0073754494078457355, 0.00636634137481451, 0.002946197986602783, 0.006266807671636343, 0.006924859248101711, 0.0007069653947837651, 0.0019450526451691985, 0.0018869831692427397], [0.0014879869995638728, 0.09835367649793625, 0.03869542106986046, 0.009879388846457005, 0.10299400985240936, 0.026627041399478912, 0.059972621500492096, 0.03859569877386093, 0.15575481951236725, 0.08372023701667786, 0.05922790244221687, 0.030337078496813774, 0.14025261998176575, 0.019125867635011673, 0.035411056131124496, 0.02662486955523491, 0.01015233714133501, 0.026760054752230644, 0.013621069490909576, 0.0010925180977210402, 0.016557158902287483, 0.004756552167236805], [0.002872416051104665, 0.46293020248413086, 0.042253103107213974, 0.007462117355316877, 0.03655238822102547, 0.01172453910112381, 0.04749474301934242, 0.01683644764125347, 0.06032737344503403, 0.06074434146285057, 0.029603714123368263, 0.023740306496620178, 0.08855602890253067, 0.009358246810734272, 0.011505325324833393, 0.017534414306282997, 0.0033943001180887222, 0.031216295436024666, 0.005641045048832893, 0.003251709509640932, 0.019153442233800888, 0.00784747488796711], [0.0017228515353053808, 0.014941684901714325, 0.04132453352212906, 0.005799222271889448, 0.008286401629447937, 0.0075135789811611176, 0.044210486114025116, 0.030096255242824554, 0.03617079555988312, 0.08676350116729736, 0.1346706598997116, 0.1528611183166504, 0.09821438789367676, 0.1361648440361023, 0.03824760764837265, 0.043951477855443954, 0.008513424545526505, 0.05442557483911514, 0.017610356211662292, 0.00552754569798708, 0.014922382310032845, 0.01806127093732357], [0.0025453909765928984, 0.11837738007307053, 0.01917262375354767, 0.014078771695494652, 0.007420591078698635, 0.025163356214761734, 0.039802901446819305, 0.01385475229471922, 0.04843604192137718, 0.11928588151931763, 0.029434174299240112, 0.027667585760354996, 0.21689319610595703, 0.0454796627163887, 0.050552938133478165, 0.05293592810630798, 0.04574858769774437, 0.04557495564222336, 0.009320860728621483, 0.021676545962691307, 0.03447682783007622, 0.012101025320589542], [0.0022856732830405235, 0.09764517098665237, 0.04140253737568855, 0.00404627388343215, 0.027010126039385796, 0.013877315446734428, 0.0709504634141922, 0.01292745303362608, 0.18623891472816467, 0.0426524356007576, 0.03664948791265488, 0.02279536798596382, 0.17494530975818634, 0.04221127927303314, 0.025314265862107277, 0.0444197952747345, 0.018404055386781693, 0.09187329560518265, 0.008486605249345303, 0.009011838585138321, 0.0166940875351429, 0.010158383287489414], [0.0010561238741502166, 0.053173523396253586, 0.010893408209085464, 0.018686575815081596, 0.015586595982313156, 0.025470269843935966, 0.021379657089710236, 0.025797219946980476, 0.013067848049104214, 0.20795252919197083, 0.06042848899960518, 0.09324328601360321, 0.14196562767028809, 0.0251922570168972, 0.053627047687768936, 0.04580835625529289, 0.04220307618379593, 0.027559850364923477, 0.02059887908399105, 0.013552566058933735, 0.0467696338891983, 0.035987235605716705], [0.002103470265865326, 0.00894297007471323, 0.009746315889060497, 0.00619810214266181, 0.005473450757563114, 0.02430529147386551, 0.00768394535407424, 0.010708206333220005, 0.013045660220086575, 0.10511250793933868, 0.09495440870523453, 0.050187379121780396, 0.05465016886591911, 0.03402228280901909, 0.04936802387237549, 0.07200310379266739, 0.0991450771689415, 0.09356812387704849, 0.04162217676639557, 0.07464548200368881, 0.0930352583527565, 0.049478620290756226], [0.0008649870869703591, 0.008769562467932701, 0.025045569986104965, 0.0011605944018810987, 0.01635785959661007, 0.004173160530626774, 0.003171147545799613, 0.005230173002928495, 0.002972786547616124, 0.02661762572824955, 0.11094515025615692, 0.10315113514661789, 0.07115334272384644, 0.02578471601009369, 0.007211456075310707, 0.08751396834850311, 0.03592329099774361, 0.08430834859609604, 0.032160934060811996, 0.015626734122633934, 0.08953423798084259, 0.24232330918312073], [0.003110125893726945, 0.005362918134778738, 0.009505528025329113, 0.005721488036215305, 0.017006341367959976, 0.014262886717915535, 0.014657150022685528, 0.010342412628233433, 0.038069043308496475, 0.033365681767463684, 0.10045761615037918, 0.06918155401945114, 0.04282968118786812, 0.02603183127939701, 0.035571031272411346, 0.11889772862195969, 0.05858472362160683, 0.12290162593126297, 0.04730404540896416, 0.07382465153932571, 0.057454660534858704, 0.09555719792842865], [0.006852916907519102, 0.002274412428960204, 0.027914507314562798, 0.008983991108834743, 0.005984255578368902, 0.02476893737912178, 0.01138595212250948, 0.007115775719285011, 0.03546316549181938, 0.018474550917744637, 0.05702652782201767, 0.028581155464053154, 0.022977914661169052, 0.04385427013039589, 0.022488275542855263, 0.05223529413342476, 0.10003846883773804, 0.06618158519268036, 0.04372373968362808, 0.23096586763858795, 0.06849543750286102, 0.11421304196119308], [0.0067407432943582535, 0.04576007276773453, 0.10011651366949081, 0.013491078279912472, 0.07230303436517715, 0.010808981023728848, 0.03316750004887581, 0.017775943502783775, 0.006860476452857256, 0.01898515596985817, 0.029700251296162605, 0.03802395239472389, 0.027718521654605865, 0.02915438637137413, 0.020067526027560234, 0.046483468264341354, 0.016564708203077316, 0.10995440185070038, 0.0585174523293972, 0.05652711167931557, 0.08712119609117508, 0.15415750443935394], [0.0031535490415990353, 0.009183545596897602, 0.010075949132442474, 0.012129195965826511, 0.013722660951316357, 0.011340435594320297, 0.013884782791137695, 0.004205582197755575, 0.0032907447312027216, 0.028964707627892494, 0.009343220852315426, 0.00861704908311367, 0.0348978228867054, 0.009152829647064209, 0.03491704538464546, 0.03126903623342514, 0.05330047383904457, 0.12284925580024719, 0.05600735917687416, 0.05801893770694733, 0.3401336073875427, 0.13154210150241852], [0.02704527974128723, 0.15343812108039856, 0.07468616217374802, 0.010895120911300182, 0.03972261771559715, 0.006970252376049757, 0.031784649938344955, 0.010163738392293453, 0.009833728894591331, 0.016426293179392815, 0.007615876849740744, 0.010196520946919918, 0.04319773614406586, 0.008673066273331642, 0.007283429149538279, 0.022515006363391876, 0.007571742404252291, 0.08132969588041306, 0.03154721483588219, 0.08616021275520325, 0.18938642740249634, 0.12355708330869675], [0.0315171480178833, 0.012309006415307522, 0.07180914282798767, 0.029036957770586014, 0.013824287801980972, 0.019286135211586952, 0.03998105600476265, 0.013490481302142143, 0.0035315139684826136, 0.023038510233163834, 0.017505036666989326, 0.02406899444758892, 0.023571303114295006, 0.04027837887406349, 0.03554198145866394, 0.029959045350551605, 0.03995192050933838, 0.14577101171016693, 0.0680718943476677, 0.051420245319604874, 0.10656016319990158, 0.15947578847408295], [0.05736831575632095, 0.26941072940826416, 0.13054145872592926, 0.01688646897673607, 0.02338382788002491, 0.019602635875344276, 0.0840395838022232, 0.00944727472960949, 0.029458187520503998, 0.01055783499032259, 0.004521015100181103, 0.0027512891683727503, 0.03788728266954422, 0.010811793617904186, 0.006766305770725012, 0.014784122817218304, 0.015439855866134167, 0.09042651206254959, 0.015662819147109985, 0.08168642222881317, 0.04333541914820671, 0.025230785831809044]], [[0.05356153845787048, 0.11797928065061569, 0.1210939809679985, 0.069657102227211, 0.10803013294935226, 0.031215904280543327, 0.10050524771213531, 0.0610126368701458, 0.04752206429839134, 0.013794825412333012, 0.01245047152042389, 0.011662309058010578, 0.009343534708023071, 0.013389619998633862, 0.004566307179629803, 0.006557816173881292, 0.0023470018059015274, 0.03903425857424736, 0.024745311588048935, 0.06336984038352966, 0.030645834282040596, 0.057515088468790054], [0.015669699758291245, 0.0355989933013916, 0.0332103930413723, 0.029586512595415115, 0.015330524183809757, 0.01919892616569996, 0.0316474512219429, 0.026312420144677162, 0.004193542990833521, 0.03674110397696495, 0.005370615050196648, 0.00994548387825489, 0.01563606783747673, 0.04967503622174263, 0.019481683149933815, 0.03339414298534393, 0.03023349493741989, 0.19717945158481598, 0.08380253612995148, 0.05468875542283058, 0.18899430334568024, 0.06410887092351913], [0.05590151250362396, 0.06174429878592491, 0.020176231861114502, 0.21026894450187683, 0.0843614786863327, 0.08263631165027618, 0.06182882562279701, 0.16168470680713654, 0.015835464000701904, 0.06768937408924103, 0.006519329268485308, 0.007204012479633093, 0.01620597206056118, 0.007214197888970375, 0.03690044954419136, 0.010215776972472668, 0.011187488213181496, 0.021868091076612473, 0.0191777516156435, 0.008929584175348282, 0.02698606438934803, 0.00546412356197834], [0.007795408368110657, 0.24444584548473358, 0.26850196719169617, 0.03688254579901695, 0.0332847461104393, 0.050623808056116104, 0.047534119337797165, 0.06332939863204956, 0.014787659049034119, 0.01972666196525097, 0.005354208406060934, 0.0028889495879411697, 0.04625824838876724, 0.010918368585407734, 0.005089180544018745, 0.0055063506588339806, 0.004324929788708687, 0.02706199139356613, 0.046932656317949295, 0.014352578669786453, 0.03429752215743065, 0.010102849453687668], [0.06304790079593658, 0.023168791085481644, 0.6410360932350159, 0.08768138289451599, 0.019818106666207314, 0.016576474532485008, 0.009535349905490875, 0.009379290044307709, 0.014771368354558945, 0.01071882713586092, 0.014096272177994251, 0.011009830981492996, 0.00640769861638546, 0.00705836433917284, 0.02432234399020672, 0.0037710487376898527, 0.0020798328332602978, 0.005446195602416992, 0.004178465809673071, 0.006849739700555801, 0.00903300754725933, 0.010013571940362453], [0.054896946996450424, 0.14677490293979645, 0.17189349234104156, 0.15717415511608124, 0.04127545654773712, 0.03216307982802391, 0.028076056391000748, 0.07234374433755875, 0.07895307242870331, 0.029741743579506874, 0.015317358076572418, 0.02623855322599411, 0.03756844252347946, 0.015133170410990715, 0.022796273231506348, 0.0050418623723089695, 0.00231267255730927, 0.012879918329417706, 0.011429823935031891, 0.014014439657330513, 0.015010979026556015, 0.0089637516066432], [0.032943245023489, 0.06695745885372162, 0.2752309739589691, 0.08726347237825394, 0.055674828588962555, 0.01466186624020338, 0.011350964196026325, 0.02742576412856579, 0.11920768022537231, 0.026793519034981728, 0.023130472749471664, 0.02638661488890648, 0.03827610984444618, 0.01544792391359806, 0.031139332801103592, 0.009245269000530243, 0.004910482559353113, 0.018260810524225235, 0.00962041039019823, 0.05138202756643295, 0.032584358006715775, 0.02210645191371441], [0.12464553862810135, 0.05704229697585106, 0.2330327033996582, 0.2045159637928009, 0.04317595809698105, 0.047431688755750656, 0.024714993312954903, 0.04655680060386658, 0.02593277208507061, 0.0438668355345726, 0.05270400270819664, 0.03808783367276192, 0.009576707147061825, 0.006929908413439989, 0.02459833212196827, 0.0027853879146277905, 0.0007031751447357237, 0.0011131918290629983, 0.0009404051816090941, 0.0019495489541441202, 0.002308435505256057, 0.0073875500820577145], [0.01653572916984558, 0.20216260850429535, 0.03087720088660717, 0.01897149533033371, 0.06599145382642746, 0.2040482461452484, 0.18294532597064972, 0.07337229698896408, 0.08410678058862686, 0.012475716881453991, 0.018231874331831932, 0.01836921088397503, 0.019411567598581314, 0.011782856658101082, 0.002341317245736718, 0.002486037788912654, 0.007454965263605118, 0.005244161933660507, 0.004559207707643509, 0.010479235090315342, 0.0026948240119963884, 0.005457908846437931], [0.04399631917476654, 0.1547544300556183, 0.015752727165818214, 0.1473311334848404, 0.03402726724743843, 0.1567992866039276, 0.0624915175139904, 0.2402414083480835, 0.013351884670555592, 0.05388052389025688, 0.016354626044631004, 0.02770816721022129, 0.011846502311527729, 0.004260305780917406, 0.00687694875523448, 0.0024204759392887354, 0.0010113344760611653, 0.001227460103109479, 0.0018719477811828256, 0.000641013088170439, 0.0016052585560828447, 0.001549565582536161], [0.006945964880287647, 0.03880944103002548, 0.034373391419649124, 0.008152598515152931, 0.03161434456706047, 0.018810082226991653, 0.046090155839920044, 0.056863464415073395, 0.5894598960876465, 0.01499989815056324, 0.02998737432062626, 0.029763326048851013, 0.032154906541109085, 0.026575937867164612, 0.004366313107311726, 0.004068345762789249, 0.004073374904692173, 0.002315172692760825, 0.004091484937816858, 0.010972435586154461, 0.002022542292252183, 0.0034896372817456722], [0.02493215724825859, 0.07607394456863403, 0.03565076366066933, 0.030221709981560707, 0.03889068588614464, 0.06687188893556595, 0.0598410926759243, 0.16505560278892517, 0.06642623245716095, 0.12626825273036957, 0.03929528966546059, 0.07751865684986115, 0.03128733113408089, 0.07498843222856522, 0.01108010858297348, 0.020503686740994453, 0.007504845503717661, 0.012596293352544308, 0.006014492828398943, 0.017389677464962006, 0.005517198238521814, 0.006071740295737982], [0.009196310304105282, 0.1026238426566124, 0.05927152931690216, 0.01746273785829544, 0.03661720082163811, 0.02503366209566593, 0.04475291073322296, 0.16117696464061737, 0.2244383990764618, 0.05622407793998718, 0.08147028088569641, 0.05241705849766731, 0.06182527542114258, 0.022766409441828728, 0.005374558735638857, 0.004158619325608015, 0.0035146174486726522, 0.0055688959546387196, 0.008787295781075954, 0.00840890221297741, 0.004894474986940622, 0.004015936981886625], [0.00789001677185297, 0.06985512375831604, 0.00542630348354578, 0.03596341237425804, 0.02228546142578125, 0.03857285529375076, 0.04229388386011124, 0.45670491456985474, 0.03587660565972328, 0.13275304436683655, 0.018979312852025032, 0.05276801064610481, 0.033486295491456985, 0.01693544164299965, 0.007454880513250828, 0.0056181554682552814, 0.0012480973964557052, 0.005409815814346075, 0.00458529544994235, 0.002746082842350006, 0.0022834232077002525, 0.0008636421989649534], [0.0003932160325348377, 0.11823853850364685, 0.028353950008749962, 0.001998082734644413, 0.012035515159368515, 0.01896717958152294, 0.014773193746805191, 0.06538243591785431, 0.09169034659862518, 0.030195975676178932, 0.028017349541187286, 0.010263603180646896, 0.497148722410202, 0.026485078036785126, 0.0024539234582334757, 0.0050995685160160065, 0.007225411012768745, 0.004682108294218779, 0.026319362223148346, 0.004141538869589567, 0.004954367410391569, 0.0011804949026554823], [0.010516791604459286, 0.030809413641691208, 0.0111172404140234, 0.03906556963920593, 0.021142585203051567, 0.041308626532554626, 0.01751839928328991, 0.14176799356937408, 0.02493121102452278, 0.17591522634029388, 0.07054167985916138, 0.10653648525476456, 0.08731517195701599, 0.09634996950626373, 0.05979107692837715, 0.02404123917222023, 0.007114229258149862, 0.012717454694211483, 0.007139840163290501, 0.004222249146550894, 0.007745224051177502, 0.002392352093011141], [0.0005662715411745012, 0.031103478744626045, 0.007640391122549772, 0.0036219176836311817, 0.0035141860134899616, 0.004711849149316549, 0.0019853466656059027, 0.01655079796910286, 0.09316755831241608, 0.02166753076016903, 0.046198271214962006, 0.024002373218536377, 0.6714308261871338, 0.016252178698778152, 0.0312964953482151, 0.0027775662019848824, 0.007558345794677734, 0.001670596655458212, 0.007533241529017687, 0.002079240046441555, 0.004105002153664827, 0.0005665497155860066], [0.004170159809291363, 0.04047980532050133, 0.017543809488415718, 0.012099876068532467, 0.006761856377124786, 0.019861964508891106, 0.00547229265794158, 0.037602778524160385, 0.009828080423176289, 0.10067544877529144, 0.027076788246631622, 0.09997691959142685, 0.24588826298713684, 0.1691802740097046, 0.05500468239188194, 0.06802546232938766, 0.018851319327950478, 0.020834416151046753, 0.009418236091732979, 0.011858385056257248, 0.012900297529995441, 0.006488949526101351], [0.00229707476682961, 0.015392767265439034, 0.006238026078790426, 0.005818312056362629, 0.00706927664577961, 0.013081743381917477, 0.005346864927560091, 0.00803870614618063, 0.022600283846259117, 0.05925488471984863, 0.14047597348690033, 0.035658907145261765, 0.29903414845466614, 0.05266771465539932, 0.13939547538757324, 0.023015327751636505, 0.1091647818684578, 0.006663125474005938, 0.013953274115920067, 0.006100906524807215, 0.021172083914279938, 0.007560379337519407], [0.0009527478250674903, 0.015432587824761868, 0.002426525577902794, 0.000969179323874414, 0.0024717359337955713, 0.012409716844558716, 0.00965337734669447, 0.010347514413297176, 0.004389687906950712, 0.03775399923324585, 0.0036881885025650263, 0.022857630625367165, 0.061512503772974014, 0.2190985232591629, 0.01140532921999693, 0.12065310776233673, 0.0704205185174942, 0.24675120413303375, 0.02464578114449978, 0.06506261974573135, 0.046218425035476685, 0.010879105888307095], [0.001914327498525381, 0.04614122584462166, 0.0031096392776817083, 0.014524488709867, 0.004893022123724222, 0.03236721083521843, 0.009673744440078735, 0.01766437292098999, 0.0029707762878388166, 0.07902193814516068, 0.017110437154769897, 0.00912326667457819, 0.1563202291727066, 0.02174559235572815, 0.07213683426380157, 0.0287782009691, 0.2555788457393646, 0.026754945516586304, 0.09153561294078827, 0.007707860320806503, 0.0921032652258873, 0.008824205957353115], [0.0005734937149100006, 0.0027725433465093374, 0.004417249467223883, 0.000696789298672229, 0.0014093907084316015, 0.0009405870805494487, 0.002263774396851659, 0.004297187551856041, 0.0026981201954185963, 0.012360365130007267, 0.0024423745926469564, 0.008806233294308186, 0.009764806367456913, 0.23231396079063416, 0.005604300647974014, 0.08459808677434921, 0.03123982436954975, 0.2198840081691742, 0.04465552791953087, 0.260442316532135, 0.04155554249882698, 0.026263484731316566]]], [[[0.00480968551710248, 0.01883416809141636, 0.00379594205878675, 0.008108502253890038, 0.004907900467514992, 0.02203552983701229, 0.011176141910254955, 0.42068934440612793, 0.34966689348220825, 0.1310131847858429, 0.008399664424359798, 0.003153453813865781, 0.003406534204259515, 0.00032334012212231755, 0.00041981899994425476, 0.00039575452683493495, 0.0012535685673356056, 0.00047109395381994545, 0.0030717062763869762, 0.002355628414079547, 0.0016428069211542606, 6.934010161785409e-05], [0.0721978172659874, 0.1355370283126831, 0.13319501280784607, 0.10105975717306137, 0.05488850176334381, 0.04243851825594902, 0.015074437484145164, 0.16418957710266113, 0.1913691759109497, 0.018496882170438766, 0.004043524153530598, 0.006956816650927067, 0.04808907210826874, 0.002534226281568408, 0.00011677568545565009, 0.00011800710490206257, 0.0004650317132472992, 6.877443956909701e-05, 0.0006959103629924357, 0.0006927898502908647, 0.006128131877630949, 0.00164414057508111], [0.004219291731715202, 0.020201364532113075, 0.02011749893426895, 0.02283613383769989, 0.3832474648952484, 0.019738858565688133, 0.17903141677379608, 0.16754625737667084, 0.1414833515882492, 0.022143932059407234, 0.003867601277306676, 0.006037119310349226, 0.0029705355409532785, 0.0005835212650708854, 7.262292638188228e-05, 0.0003011847729794681, 0.00012029395293211564, 0.00024360945099033415, 0.00011190879013156518, 0.0004193441418465227, 0.003117603249847889, 0.0015890663489699364], [0.00015306116256397218, 0.0015258663333952427, 0.005554634612053633, 0.00036798723158426583, 0.0033369597513228655, 0.01124514825642109, 0.9101220965385437, 0.03146518021821976, 0.03287471830844879, 0.0017053090268746018, 0.00015055100084282458, 3.2810825359774753e-05, 4.65678094769828e-05, 0.0006301727262325585, 6.300621407717699e-06, 1.1240494131925516e-05, 4.180422911304049e-05, 0.0004958657082170248, 3.419137647142634e-05, 0.00010174352792091668, 7.834115967852995e-05, 1.9414759663050063e-05], [0.017373165115714073, 0.009179589338600636, 0.014199693687260151, 0.001918067573569715, 0.0040848711505532265, 0.29865455627441406, 0.2201843410730362, 0.21084515750408173, 0.13099780678749084, 0.021805932745337486, 0.007254989352077246, 0.0007364129414781928, 0.00029967393493279815, 0.0014296042500063777, 0.0007058990304358304, 0.0011078424286097288, 0.03941747546195984, 0.0036253216676414013, 0.00925598107278347, 0.005771308671683073, 0.000745070748962462, 0.00040725053986534476], [0.00011760245251934975, 0.0005983650335110724, 0.0004046711837872863, 0.00025291129713878036, 0.004001634195446968, 0.005693345796316862, 0.30742722749710083, 0.6382213830947876, 0.029083317145705223, 0.01228273008018732, 7.844366336939856e-05, 5.73603501834441e-05, 1.1845718290715013e-05, 3.1225805287249386e-05, 2.175803047066438e-06, 6.817630492150784e-05, 5.503034117282368e-05, 0.001158390543423593, 0.00018079944129567593, 0.00014550382911693305, 0.00012419256381690502, 3.7546099065366434e-06], [0.0018105555791407824, 0.01114294771105051, 0.00244245701469481, 0.0019252431811764836, 0.002464402699843049, 0.033927615731954575, 0.05787143111228943, 0.27692243456840515, 0.5162837505340576, 0.08444811403751373, 0.0053254179656505585, 0.0006026779301464558, 0.00034295712248422205, 4.1075374610954896e-05, 2.7768322979682125e-05, 1.3671665328729432e-05, 0.00033285608515143394, 0.00031126063549891114, 0.000665996631141752, 0.0021706910338252783, 0.0008787614060565829, 4.806789002032019e-05], [0.00015098755829967558, 0.002685034880414605, 0.0013121356023475528, 5.4161275329533964e-05, 8.837045606924221e-05, 0.0019750918727368116, 0.005031232256442308, 0.058871179819107056, 0.07386298477649689, 0.8167774081230164, 0.02063160203397274, 0.006393893156200647, 0.003010415006428957, 0.0012906494084745646, 1.5778477973071858e-05, 1.9546594558050856e-05, 3.564694998203777e-05, 0.001301179057918489, 0.0008308451506309211, 0.0029824492521584034, 0.0025502953212708235, 0.00012905651237815619], [0.001096156775020063, 0.007028361316770315, 0.002093323040753603, 0.001724870060570538, 0.0007770525407977402, 0.010878871195018291, 0.022188235074281693, 0.014888707548379898, 0.14390504360198975, 0.2624419033527374, 0.3376091718673706, 0.021471375599503517, 0.1449270248413086, 0.0073155914433300495, 0.0027915860991925, 0.0001059617061400786, 0.0014323259238153696, 0.0016608007717877626, 0.004688204266130924, 0.004006562754511833, 0.006240003742277622, 0.0007289357599802315], [0.002255955943837762, 0.008411313407123089, 0.005024828482419252, 0.0075343577191233635, 0.007208680734038353, 0.0005682847695425153, 0.0007832451956346631, 0.018709838390350342, 0.004874277859926224, 0.06815894693136215, 0.05089271813631058, 0.6432019472122192, 0.1268744170665741, 0.023586208000779152, 0.004242329858243465, 0.006448263768106699, 0.00012491097731981426, 0.0017854379257187247, 0.0015566637739539146, 0.0037292989436537027, 0.008490289561450481, 0.005537786986678839], [0.0003464612236712128, 0.005112233571708202, 0.0015780266840010881, 0.0127993980422616, 0.01246586162596941, 0.0008482890552841127, 0.0009906264021992683, 0.004163519944995642, 0.015642978250980377, 0.02138499543070793, 0.045925114303827286, 0.07821346074342728, 0.7071614861488342, 0.025843188166618347, 0.0377495177090168, 0.009642365388572216, 0.001280782395042479, 0.0003166984242852777, 0.0018176500452682376, 0.0012361503904685378, 0.008310562931001186, 0.007170540280640125], [0.00017765660595614463, 0.00276433234103024, 0.0007314748363569379, 0.006820394191890955, 0.007656783331185579, 0.004431308247148991, 0.0005776239559054375, 0.0019766506738960743, 0.000801076996140182, 0.01691167987883091, 0.006704692728817463, 0.07316724956035614, 0.1225859671831131, 0.21732912957668304, 0.2925836741924286, 0.22015643119812012, 0.012387307360768318, 0.004013600293546915, 0.0005784555687569082, 0.0011439005611464381, 0.0017864832188934088, 0.0047140964306890965], [0.009650821797549725, 0.0003176843165419996, 0.0008953476208262146, 0.008071072399616241, 0.007909586653113365, 0.005632985383272171, 0.0014768036780878901, 0.00017074731294997036, 4.723018719232641e-05, 0.00032011381699703634, 0.0007606763974763453, 0.0015197909669950604, 0.023897849023342133, 0.022645510733127594, 0.7141939401626587, 0.09215421229600906, 0.10558242350816727, 0.002753461478278041, 0.0014847812708467245, 7.886816092650406e-06, 0.00030039047123864293, 0.00020672053506132215], [0.0058562904596328735, 0.00028524803929030895, 0.000914866104722023, 0.0020728560630232096, 0.008050553500652313, 0.0023444918915629387, 0.005169952753931284, 0.011599414981901646, 0.0009040915756486356, 0.0021044069435447454, 0.001915378961712122, 0.015033580362796783, 0.0012605342781171203, 0.020170165225863457, 0.03493741527199745, 0.737962543964386, 0.042761512100696564, 0.08789732307195663, 0.011586579494178295, 0.0012184163788333535, 0.00031449648668058217, 0.005639802664518356], [0.0001989235752262175, 0.001988078933209181, 0.0025002569891512394, 0.0007098678615875542, 0.002829840872436762, 0.002405900275334716, 0.09473021328449249, 0.02744399942457676, 0.03683190420269966, 0.004372953902930021, 0.0010407583322376013, 0.0004158668452873826, 0.000998065690509975, 0.008525610901415348, 0.0036622241605073214, 0.008580915629863739, 0.06447740644216537, 0.6508929133415222, 0.0629526823759079, 0.01792081445455551, 0.005869407672435045, 0.0006512777763418853], [0.0008571513462811708, 0.0007214961806312203, 0.0010814472334459424, 0.0001484592503402382, 0.00018922696472145617, 0.0005548577755689621, 0.0007262553554028273, 0.006221705581992865, 0.0013030171394348145, 0.004393564537167549, 0.0007397359586320817, 0.0010874684667214751, 0.00014477739750873297, 0.004364400636404753, 0.0023886747658252716, 0.0359463170170784, 0.03895695507526398, 0.45626455545425415, 0.26482856273651123, 0.16685707867145538, 0.0069122943095862865, 0.005312045104801655], [4.3423890019766986e-05, 0.0003991842386312783, 0.00014624831965193152, 8.176598203135654e-05, 0.00015781636466272175, 0.00015093911497388035, 0.006003262009471655, 0.009766374714672565, 0.003978618420660496, 0.0028320092242211103, 0.0005377610796131194, 0.00010456064046593383, 8.314014849020168e-05, 0.00013667249004356563, 0.00022471048578154296, 0.0016599234659224749, 0.006253818515688181, 0.3689538836479187, 0.532704770565033, 0.05341393128037453, 0.011676330119371414, 0.0006909089861437678], [0.0001929229183588177, 0.0033115162514150143, 0.002327944850549102, 0.00020965724252164364, 0.0003229589492548257, 0.00011948666360694915, 0.0003169644915033132, 0.0017660080920904875, 0.0017385779647156596, 0.00481355469673872, 0.00490644620731473, 0.003699731780216098, 0.0005419945227913558, 0.001375270076096058, 0.00020962585404049605, 0.0008008105214685202, 0.0008353455341421068, 0.029156917706131935, 0.03630455210804939, 0.8400743007659912, 0.03471500426530838, 0.032260503619909286], [4.056276520714164e-05, 0.0033576120622456074, 0.002219606889411807, 0.0001133741025114432, 4.9647136620478705e-05, 1.83451429620618e-05, 0.0001421120105078444, 0.0002475180081091821, 0.0037260788958519697, 0.004489109385758638, 0.01295619085431099, 0.003205914283171296, 0.019003529101610184, 0.0009509496157988906, 0.0003163363435305655, 4.138117583352141e-05, 0.00020701761241070926, 0.005900193005800247, 0.0769444927573204, 0.21463628113269806, 0.6006302237510681, 0.050803616642951965], [9.056159615283832e-05, 0.0023050676099956036, 0.0022502238862216473, 0.0005247564986348152, 0.00018220157653559, 5.5740038078511134e-05, 4.47793718194589e-05, 4.334998448030092e-05, 0.0001588193408679217, 0.0004964717081747949, 0.0023717908188700676, 0.0025994705501943827, 0.013945521786808968, 0.032123345881700516, 0.000993551453575492, 0.0003617046168074012, 0.00010332364036003128, 0.0007897447212599218, 0.001728419796563685, 0.046184659004211426, 0.01538697350770235, 0.8772594928741455], [0.004331768490374088, 0.050863612443208694, 0.04833770543336868, 0.04515419527888298, 0.051955439150333405, 0.0014118874678388238, 0.000689449196215719, 0.0005509410984814167, 0.0002476200752425939, 0.0017065382562577724, 0.003292627166956663, 0.018275899812579155, 0.13601720333099365, 0.03822903707623482, 0.043278761208057404, 0.019896024838089943, 0.0020761715713888407, 0.003529979847371578, 0.0056356461718678474, 0.00788496807217598, 0.23416413366794586, 0.2824704349040985], [0.006256712134927511, 0.009122810326516628, 0.017211079597473145, 0.06639979034662247, 0.06973244994878769, 0.012755036354064941, 0.003612096421420574, 0.0008135208045132458, 0.00021555826242547482, 0.0005037674563936889, 0.0005610336665995419, 0.001791083486750722, 0.006039433181285858, 0.059362515807151794, 0.10931264609098434, 0.19922873377799988, 0.019360017031431198, 0.00690077617764473, 0.004764126613736153, 0.005418671295046806, 0.011576681397855282, 0.38906148076057434]], [[0.1440458595752716, 0.05216291919350624, 0.06282584369182587, 0.04486299306154251, 0.06087218225002289, 0.07345092296600342, 0.023729609325528145, 0.024642104282975197, 0.03694041818380356, 0.005939193069934845, 0.04055299609899521, 0.03821944445371628, 0.027057811617851257, 0.08774717152118683, 0.008770685642957687, 0.010300921276211739, 0.014526751823723316, 0.0029255289118736982, 0.004448711406439543, 0.02170676551759243, 0.00701714213937521, 0.20725402235984802], [0.1806352585554123, 0.024802297353744507, 0.02447948046028614, 0.15100757777690887, 0.18322302401065826, 0.004903215449303389, 0.023560447618365288, 0.1015317440032959, 0.09148772060871124, 0.007695106789469719, 0.0045906007289886475, 0.027063673362135887, 0.007200397085398436, 0.0022694552317261696, 0.00368445529602468, 0.00958437379449606, 0.0033560050651431084, 0.003185875015333295, 0.011942953802645206, 0.038401082158088684, 0.03676459938287735, 0.058630723506212234], [0.013316369615495205, 0.004280854482203722, 0.007217555306851864, 0.005962970666587353, 0.705883800983429, 0.000505814969073981, 0.0041205380111932755, 0.021191636100411415, 0.14174485206604004, 0.002519311150535941, 0.001934648142196238, 0.031092273071408272, 0.004310168791562319, 0.00045171796227805316, 0.00013028485409449786, 0.0023753962013870478, 0.000184652948519215, 0.000543852336704731, 0.0007106609409675002, 0.013871383853256702, 0.01194313820451498, 0.02570822462439537], [0.019044045358896255, 0.2244100719690323, 0.13366776704788208, 0.007094989065080881, 0.03913450613617897, 0.009150615893304348, 0.006635020021349192, 0.004798794165253639, 0.006866904906928539, 0.0064241173677146435, 0.03273088112473488, 0.054187797009944916, 0.04637179151177406, 0.0269860178232193, 0.001045356271788478, 0.0024911346845328808, 0.0026481561362743378, 0.000947150809224695, 0.002628705929964781, 0.015491284430027008, 0.04303959012031555, 0.31420519948005676], [0.05338757485151291, 0.04540449008345604, 0.0789630115032196, 0.021981332451105118, 0.007632318418473005, 0.07586908340454102, 0.18154241144657135, 0.0012512532994151115, 0.013645591214299202, 0.011660808697342873, 0.15429635345935822, 0.05586475506424904, 0.009895917028188705, 0.009247382171452045, 0.07910595834255219, 0.0017287340015172958, 0.023187408223748207, 0.01536251325160265, 0.0018484867177903652, 0.034501492977142334, 0.011058224365115166, 0.11256484687328339], [0.022046705707907677, 0.007088003680109978, 0.034077238291502, 0.07138154655694962, 0.6770245432853699, 0.0012274414766579866, 0.0005383410607464612, 0.10857561230659485, 0.0035948664881289005, 0.0003689512668643147, 0.0003428381460253149, 0.026720767840743065, 0.002944376552477479, 0.007568894885480404, 0.00017391100118402392, 0.006056781858205795, 2.0980842236895114e-05, 1.4509738321066834e-05, 0.0002922929124906659, 0.00032242247834801674, 0.0021849304903298616, 0.027434077113866806], [0.08326002955436707, 0.003935555927455425, 0.004010144155472517, 0.046142272651195526, 0.5360662341117859, 0.005591536872088909, 0.02669776976108551, 0.10857665538787842, 0.12350264191627502, 0.00309126079082489, 0.0009080189047381282, 0.00953033845871687, 0.013322638347744942, 0.0009731560130603611, 0.0031747170723974705, 0.012691563926637173, 0.004408489912748337, 0.0014028402511030436, 0.002963058650493622, 0.002458117436617613, 0.0036766999401152134, 0.0036162217147648335], [0.03256465494632721, 0.00957504566758871, 0.024304939433932304, 0.015313531272113323, 0.00127762695774436, 0.811870276927948, 0.03835438936948776, 0.02372673898935318, 0.000484064978081733, 0.0034041632898151875, 0.0016083205118775368, 0.0004082271771039814, 0.00014966524031478912, 0.022348757833242416, 0.0024636252783238888, 0.004389979410916567, 0.004932318814098835, 0.0018003986915573478, 0.0003738194645848125, 0.00012781747500412166, 6.323972775135189e-05, 0.000458490161690861], [0.23522216081619263, 0.000595421704929322, 0.028561435639858246, 0.004139232914894819, 0.014149622060358524, 0.006259867921471596, 0.6708477735519409, 0.0022054053843021393, 0.005481945350766182, 0.0005292995483614504, 0.023086106404662132, 0.0003532262926455587, 2.600083826109767e-05, 0.0007096925401128829, 0.004992796573787928, 0.00042893123463727534, 0.001352099236100912, 0.0007740010623820126, 0.00018132245168089867, 4.3878444557776675e-05, 1.331157636741409e-05, 4.65072771476116e-05], [0.015177889727056026, 0.0009914968395605683, 0.0011440407251939178, 0.05934510752558708, 0.00876906793564558, 0.004250410012900829, 0.02355053275823593, 0.8206573128700256, 0.012113298289477825, 0.0041158925741910934, 0.0028693072963505983, 0.004717222414910793, 0.00027365912683308125, 0.0020314804278314114, 0.0031923786737024784, 0.009695935994386673, 0.0006205584504641593, 0.007163772825151682, 0.01715814135968685, 0.0014954475918784738, 0.0003327231388539076, 0.0003343917487654835], [0.01467772014439106, 0.00020251756359357387, 0.0009080743766389787, 0.0010000838665291667, 0.00937188882380724, 0.0008215112611651421, 0.012942089699208736, 0.03728114441037178, 0.7841634154319763, 0.004076110199093819, 0.04014294221997261, 0.018761813640594482, 0.0022209021262824535, 0.0010808674851432443, 0.001262302161194384, 0.0025734477676451206, 0.009914199821650982, 0.008387994021177292, 0.027011899277567863, 0.020108146592974663, 0.0007984476396813989, 0.0022923562210053205], [0.02312477119266987, 0.005217873957008123, 0.020990729331970215, 0.001583385863341391, 0.0012169283581897616, 0.025970324873924255, 0.028924135491251945, 0.02459523268043995, 0.013486513867974281, 0.13617531955242157, 0.2672552764415741, 0.13029812276363373, 0.002313199220225215, 0.06253396719694138, 0.0016761459410190582, 0.00565724354237318, 0.003702147863805294, 0.1728184074163437, 0.006958420854061842, 0.043311554938554764, 0.005030322354286909, 0.01715986803174019], [0.00453499611467123, 0.004806562792509794, 0.019586050882935524, 0.0014640305889770389, 0.0041375672444701195, 0.0005660771275870502, 0.0031919232569634914, 0.001446415320970118, 0.06083939969539642, 0.010736900381743908, 0.7494024634361267, 0.026313887909054756, 0.01802777871489525, 0.006627349182963371, 0.01785883679986, 0.0007997491047717631, 0.0018865711754187942, 0.0016124506946653128, 0.01383251789957285, 0.026514830067753792, 0.015258245170116425, 0.010555370710790157], [0.004558902233839035, 0.003372569801285863, 0.016738468781113625, 0.0016553313471376896, 0.02664082683622837, 0.0008427758584730327, 0.00046169606503099203, 0.013649297878146172, 0.0068405126221477985, 0.03562479466199875, 0.010806513018906116, 0.565509021282196, 0.03786320984363556, 0.08179052174091339, 0.0016604604898020625, 0.051790520548820496, 0.00040749332401901484, 0.010765660554170609, 0.002759030554443598, 0.03953069448471069, 0.02162761427462101, 0.06510400027036667], [0.0009661685326136649, 0.0015958414878696203, 0.0016837972216308117, 0.00027285917894914746, 0.020759811624884605, 8.01631758804433e-05, 9.222677908837795e-05, 0.0030339902732521296, 0.005035766866058111, 0.0025072372518479824, 0.010233752429485321, 0.04947112873196602, 0.7844552993774414, 0.028678203001618385, 0.0016727576730772853, 0.022016558796167374, 0.004617343656718731, 0.0007145234267227352, 0.016632622107863426, 0.0034356554970145226, 0.023768184706568718, 0.018276171758770943], [0.013995055109262466, 0.007770455442368984, 0.009608772583305836, 0.009384339675307274, 0.0011936507653445005, 0.08146025240421295, 0.0017384883249178529, 0.0016975824255496264, 0.0007733607199043036, 0.014976740814745426, 0.014623255468904972, 0.061795394867658615, 0.04279448091983795, 0.3636137545108795, 0.07669872045516968, 0.052493397146463394, 0.08653777837753296, 0.040056854486465454, 0.0020850582513958216, 0.02010795660316944, 0.0035315053537487984, 0.09306307137012482], [0.007679261267185211, 0.005488010589033365, 0.033692169934511185, 0.020977886393666267, 0.0736425369977951, 0.0027350897435098886, 0.0030940354336053133, 0.0036364055704325438, 0.005079126916825771, 0.0008564281743019819, 0.00664245942607522, 0.01035609096288681, 0.10986088216304779, 0.08677030354738235, 0.40730494260787964, 0.13205723464488983, 0.029981572180986404, 0.0035314038395881653, 0.026685304939746857, 0.002255816478282213, 0.009812070988118649, 0.01786108873784542], [0.003285949816927314, 0.0002823400136549026, 0.0003295230562798679, 0.00406806031242013, 0.0023439188953489065, 0.005333783105015755, 0.0011558163678273559, 0.010446960106492043, 0.0018616323359310627, 0.004889529664069414, 3.654306783573702e-05, 0.0014982435386627913, 0.006667155772447586, 0.0064559741877019405, 0.02181730791926384, 0.534471333026886, 0.14963862299919128, 0.22026462852954865, 0.012308964505791664, 0.009263608604669571, 0.002662160200998187, 0.0009180012857541442], [0.0007400100002996624, 0.000223686482058838, 0.00017461771494708955, 0.0006436100229620934, 0.00036959623685106635, 0.0048305802047252655, 0.001495992299169302, 0.0009545384673401713, 0.0020722963381558657, 0.00039242871571332216, 0.001178888836875558, 0.00010006807860918343, 0.004072446841746569, 0.005125357303768396, 0.017539316788315773, 0.025259049609303474, 0.8841072916984558, 0.013853860087692738, 0.03401143476366997, 0.0016509650740772486, 0.0005511092604137957, 0.0006530443788506091], [0.029337365180253983, 0.0001396146253682673, 0.004435136914253235, 0.0015548793599009514, 0.004583044443279505, 0.0018646117532625794, 0.0333515889942646, 0.0035295896232128143, 0.0023211168590933084, 0.0016543548554182053, 0.0008836050401441753, 0.001472337986342609, 1.6698728359187953e-05, 0.003708493895828724, 0.003519160207360983, 0.04807049408555031, 0.00995080079883337, 0.8162785172462463, 0.005995303392410278, 0.02502170391380787, 0.0004061105428263545, 0.0019054778385907412], [0.000954394752625376, 0.0007998295477591455, 0.0002427270810585469, 0.013035304844379425, 0.0002528954646550119, 0.00035447979462333024, 0.009977748617529869, 0.003371019847691059, 0.0013987821293994784, 0.00037887951475568116, 0.003941726870834827, 0.0005058543756604195, 0.0003487438661977649, 0.00022498416365124285, 0.03954179957509041, 0.003729705000296235, 0.012482372112572193, 0.08206024765968323, 0.7868645787239075, 0.01798754557967186, 0.0181803610175848, 0.003366129007190466], [0.0009693196625448763, 6.33684903732501e-05, 0.0009259347571060061, 0.0002185149787692353, 0.00047286911285482347, 0.00019404500199016184, 0.00037413393147289753, 0.0001156888174591586, 0.0008105459273792803, 0.00019713150686584413, 0.001277424395084381, 0.005974177736788988, 5.076629531686194e-05, 0.004747309722006321, 0.00029904264374636114, 0.0037603569217026234, 0.0028005389031022787, 0.11627089977264404, 0.0036964488681405783, 0.5835981369018555, 0.0031649938318878412, 0.27001845836639404]], [[0.04543902352452278, 0.02265528403222561, 0.025915678590536118, 0.01522547286003828, 0.05274838209152222, 0.05059851333498955, 0.642859697341919, 0.042176615446805954, 0.011372390203177929, 0.0048017618246376514, 0.004549265839159489, 0.002643670653924346, 0.0024067761842161417, 0.008829286321997643, 0.012243672274053097, 0.013713881373405457, 0.006730715278536081, 0.031940724700689316, 0.0010799746960401535, 0.0007938790949992836, 0.0007702553411945701, 0.0005051431362517178], [0.060590825974941254, 0.08823433518409729, 0.036191247403621674, 0.01004221010953188, 0.025040002539753914, 0.005991595331579447, 0.01309259980916977, 0.08384755998849869, 0.19719864428043365, 0.08533234894275665, 0.04531346634030342, 0.04399625584483147, 0.01449151337146759, 0.005914213135838509, 0.004650937393307686, 0.003580755088478327, 0.0040979706682264805, 0.004835319239646196, 0.0700913518667221, 0.041942380368709564, 0.08177506923675537, 0.07374931126832962], [0.01697179488837719, 0.36712396144866943, 0.09153284132480621, 0.01469462737441063, 0.02390231378376484, 0.02617691271007061, 0.04603162407875061, 0.01683649607002735, 0.03637267276644707, 0.0562797375023365, 0.03712032362818718, 0.0067505124025046825, 0.023104950785636902, 0.03499605879187584, 0.006559481844305992, 0.009259686805307865, 0.009206431917846203, 0.02153734304010868, 0.022958926856517792, 0.013103409670293331, 0.06478086858987808, 0.05469902232289314], [0.011162280105054379, 0.08538792282342911, 0.5747055411338806, 0.014981505461037159, 0.032560355961322784, 0.00434214249253273, 0.0154647808521986, 0.00016239455726463348, 0.0012400817358866334, 0.003572902176529169, 0.005383852869272232, 0.009088220074772835, 0.02635837532579899, 0.13660815358161926, 0.02649218589067459, 0.00647008465602994, 0.00042602818575687706, 0.004696860909461975, 0.00010094998287968338, 0.0021701210644096136, 0.0046104383654892445, 0.034014880657196045], [0.01986950822174549, 0.0034436245914548635, 0.0042571621015667915, 0.7997251152992249, 0.02579180896282196, 0.017739132046699524, 0.002854724880307913, 0.020867563784122467, 0.0013926272513344884, 0.00016889587277546525, 0.0010396679863333702, 0.025705737993121147, 0.010808749124407768, 0.0020894529297947884, 0.0269668847322464, 0.0030340261291712523, 0.009172601625323296, 0.00022939190967008471, 0.0016209111781790853, 0.0005133680533617735, 0.0008870568126440048, 0.021821996197104454], [0.00032731221290305257, 0.008331645280122757, 0.006589038763195276, 0.011700292117893696, 0.8814737796783447, 0.017279233783483505, 0.04037627950310707, 0.0035309402737766504, 0.0012368045281618834, 0.0001320513110840693, 3.378136534593068e-05, 0.00010039957851404324, 0.008877715095877647, 0.005226220935583115, 0.0030930410139262676, 0.009000709280371666, 0.0009820129489526153, 0.0015369853936135769, 4.363015978015028e-05, 5.7636070778244175e-06, 4.1381819755770266e-05, 8.107948815450072e-05], [0.002078633289784193, 0.012640096247196198, 0.003335709450766444, 0.013199826702475548, 0.00538221700116992, 0.7508124709129333, 0.0013504824601113796, 0.15885789692401886, 0.006461186334490776, 0.008237541653215885, 0.000414988084230572, 0.0001690259377937764, 0.0002836206112988293, 0.0017432230524718761, 0.0005786925321444869, 0.002124498598277569, 0.014406726695597172, 0.001065857824869454, 0.014775561168789864, 0.0007419321336783469, 0.0003430603537708521, 0.0009967077057808638], [0.3043042719364166, 0.004914440680295229, 0.0088972682133317, 0.008703735657036304, 0.04284341633319855, 0.003409985452890396, 0.4302988052368164, 0.0163190308958292, 0.061018723994493484, 0.001037807553075254, 0.08140718191862106, 0.0014164680615067482, 0.0016008166130632162, 5.393481478677131e-05, 0.007579436060041189, 0.001015159417875111, 0.011552775278687477, 0.0005059725372120738, 0.010952169075608253, 0.001016915193758905, 0.0009169027907773852, 0.00023478048387914896], [8.753579459153116e-05, 2.3814594896975905e-05, 1.6483449144288898e-05, 9.473311365582049e-05, 4.204025026410818e-05, 0.000246875366428867, 0.00041498965583741665, 0.9935476183891296, 0.0007239853730425239, 0.001172867021523416, 1.1143647498101927e-05, 0.00011422995885368437, 6.526963147734932e-07, 1.312343442805286e-06, 1.4504169030260528e-06, 0.00017790058336686343, 2.739109368121717e-05, 0.0005846362910233438, 0.002482563955709338, 0.0001344069605693221, 9.100816532736644e-05, 2.306751412106678e-06], [0.0313456691801548, 0.00020766412490047514, 0.0001695210812613368, 4.4762567995348945e-05, 0.0009849151829257607, 0.00031579865026287735, 0.0016841405304148793, 0.006877397187054157, 0.8592115640640259, 0.005327567923814058, 0.07372675836086273, 0.004580584354698658, 0.0004270907666068524, 3.0399920433410443e-05, 0.00015133150736801326, 0.00010227490565739572, 0.0013506230898201466, 0.00028086802922189236, 0.007003760896623135, 0.0056732273660600185, 0.00018663989612832665, 0.00031757025863043964], [0.0019344311440363526, 0.004997559357434511, 0.0012904334580525756, 0.00018444033048581332, 0.0009200565400533378, 0.0038816314190626144, 0.00207946146838367, 0.07531144469976425, 0.04285169392824173, 0.7816334962844849, 0.0381304994225502, 0.010260785929858685, 0.003986930940300226, 0.0029180599376559258, 0.00016250048065558076, 0.003206902649253607, 0.0003482100728433579, 0.004073861986398697, 0.015224123373627663, 0.0021594902500510216, 0.00420505041256547, 0.00023884793336037546], [8.651307871332392e-05, 0.0024592680856585503, 0.0019058353500440717, 0.00015492085367441177, 6.67759231873788e-05, 6.37059347354807e-05, 0.0018619770416989923, 2.8267559173400514e-05, 0.0035818193573504686, 0.0009256240446120501, 0.9772348999977112, 0.0020871914457529783, 0.006406864617019892, 0.0009151491103693843, 0.0003466400958131999, 3.6915148484695237e-06, 7.891467248555273e-05, 1.6350992154912092e-05, 0.00010814949928317219, 0.0011505929287523031, 0.00014331321290228516, 0.00037354390951804817], [0.0007553757168352604, 6.583011418115348e-05, 0.000253684091148898, 0.0005120299756526947, 8.256009459728375e-05, 2.0010358639410697e-05, 1.1993609405180905e-05, 0.0004280484572518617, 8.597984560765326e-05, 0.0022024456411600113, 0.001972684171050787, 0.9831679463386536, 0.0036645568907260895, 0.003367677563801408, 0.0005286262603476644, 0.00016086343384813517, 3.517895493132528e-06, 3.0346596759045497e-05, 1.4499029020953458e-05, 0.0002320698695257306, 0.0002011305041378364, 0.00223804684355855], [3.965440555475652e-05, 9.449161734664813e-05, 4.1669216443551704e-05, 0.00014976267993915826, 0.0003621909418143332, 5.2924251576769166e-06, 1.5589132090099156e-05, 3.67252214346081e-05, 0.00038746107020415366, 0.0014341400237753987, 0.0008606034098193049, 0.004341690801084042, 0.9812278151512146, 0.0008749545668251812, 0.004736994858831167, 0.0006657195626758039, 0.0001074050014722161, 1.9655646610772237e-05, 0.0003853309899568558, 1.0076673788717017e-05, 0.00413266196846962, 7.016893505351618e-05], [0.0010284304153174162, 0.0015997405862435699, 0.0012082657776772976, 0.00022881879704073071, 0.0010559451766312122, 0.0012473991373553872, 4.724410973722115e-05, 1.2439753845683299e-05, 5.4165328037925065e-05, 0.0024510740768164396, 0.0018676697509363294, 0.004061630927026272, 0.00753973750397563, 0.9144372344017029, 0.014172936789691448, 0.020496651530265808, 0.0014298100722953677, 0.002262415364384651, 6.648365524597466e-05, 0.0010212380439043045, 0.00033838741364888847, 0.023372303694486618], [0.001726087648421526, 0.00010966951958835125, 0.00020970245532225817, 0.0010577430948615074, 0.003829078283160925, 0.0001366912038065493, 0.00157083326485008, 1.813210292311851e-05, 5.834433250129223e-05, 1.6474676158395596e-05, 0.0005551919457502663, 0.0014205946354195476, 0.005886691156774759, 0.0011120407143607736, 0.947726845741272, 0.010113313794136047, 0.02274146117269993, 0.0012123349588364363, 0.00022828640067018569, 1.4419229955819901e-05, 0.0001430041011190042, 0.0001131309472839348], [2.0293065972509794e-05, 3.150824340991676e-05, 2.4788814698695205e-05, 3.943269985029474e-05, 0.001653328537940979, 0.0007212511845864356, 0.00023608906485605985, 0.0005757592734880745, 2.251287014587433e-06, 4.015326703665778e-05, 4.1487567159492755e-07, 5.3194991778582335e-05, 0.00024676413158886135, 0.004183581564575434, 0.0024032285436987877, 0.9549957513809204, 0.002455946058034897, 0.03213353827595711, 0.00016917107859626412, 1.8177883021053276e-06, 7.867249223636463e-06, 3.937695055356016e-06], [6.312919867923483e-05, 0.00012108725786674768, 6.692601800750708e-06, 6.191105785546824e-05, 0.0003293260815553367, 0.0008201113087125123, 3.519860183587298e-05, 0.0035548226442188025, 0.002779029542580247, 0.0003226615663152188, 0.00023804270313121378, 1.9990080545539968e-05, 0.00020654544641729444, 0.00016771859372965991, 0.0015650567365810275, 0.008737897500395775, 0.6026870608329773, 0.0019919630140066147, 0.3746550381183624, 0.0011818531202152371, 0.00036847052979283035, 8.645030175102875e-05], [0.15799327194690704, 0.00012622016947716475, 0.00020118296379223466, 8.253607666119933e-05, 0.0031590862199664116, 0.0007479175110347569, 0.0010407675290480256, 0.019174495711922646, 0.0022816092241555452, 0.0035312199033796787, 0.0010789226507768035, 0.005431697238236666, 6.393502553692088e-05, 0.0006536704604513943, 0.00518103176727891, 0.36637744307518005, 0.0748019814491272, 0.2586632966995239, 0.08455157279968262, 0.011263924650847912, 0.0025771099608391523, 0.0010171396424993873], [6.355484219966456e-05, 2.2374475520337e-05, 6.2294298004417215e-06, 5.334781235433184e-06, 2.2296478618955007e-06, 1.0495949936739635e-05, 9.87816929409746e-06, 0.0023689311929047108, 0.0004705935134552419, 0.0011026370339095592, 8.092766802292317e-05, 7.583084880025126e-06, 6.047342139936518e-06, 6.616780865442706e-07, 8.59451392898336e-06, 0.00019624030392151326, 0.0006197018083184958, 0.00020108894386794418, 0.9793790578842163, 0.0006238433998078108, 0.014781223610043526, 3.291169196018018e-05], [0.0038624941371381283, 0.00012443882587831467, 0.00013414431305136532, 2.3837699700379744e-05, 4.493477172218263e-05, 5.9898717154283077e-05, 0.00013848382513970137, 0.000491913640871644, 0.006845866329967976, 0.0014975843951106071, 0.019470151513814926, 0.009426452219486237, 1.72966429090593e-05, 0.00024964322801679373, 5.969905760139227e-05, 0.00011098376126028597, 0.00103876949287951, 0.0027311858721077442, 0.0028232831973582506, 0.9046223163604736, 0.0014245460042729974, 0.04480208456516266], [5.588294516201131e-05, 0.00040526650263927877, 0.0002497182576917112, 6.605299859074876e-05, 7.342208118643612e-05, 1.5186097925834474e-06, 5.887174847885035e-05, 6.51477457722649e-05, 0.00024323526304215193, 0.0025647091679275036, 0.0009066663333214819, 0.0015042880550026894, 0.007197451312094927, 0.00016360002337023616, 0.0006155160954222083, 0.00017410017608199269, 2.8931175620527938e-05, 0.0002808038261719048, 0.0030971914529800415, 0.0011514317011460662, 0.9793865084648132, 0.001709757256321609]], [[0.34861090779304504, 0.02294115163385868, 0.026836905628442764, 0.004151721950620413, 0.1473511904478073, 0.028590364381670952, 0.05914048105478287, 0.059819817543029785, 0.060248807072639465, 0.012754657305777073, 0.0030275594908744097, 0.0029455055482685566, 0.0018973437836393714, 0.011692304164171219, 0.0008199821459129453, 0.03552927449345589, 0.02455749548971653, 0.04043569415807724, 0.014056390151381493, 0.07857473939657211, 0.006299111060798168, 0.009718632325530052], [0.23334071040153503, 0.1314959079027176, 0.08518955856561661, 0.036225661635398865, 0.12570001184940338, 0.024942751973867416, 0.09685947000980377, 0.026231860741972923, 0.04050527885556221, 0.0396723747253418, 0.037129200994968414, 0.034027691930532455, 0.02608318254351616, 0.003348385216668248, 0.007638436276465654, 0.006723914295434952, 0.006565089337527752, 0.0034440821036696434, 0.0024093352258205414, 0.004421172197908163, 0.019776979461312294, 0.008269033394753933], [0.0030701379291713238, 0.09096737951040268, 0.03873591125011444, 0.8303072452545166, 0.00928733590990305, 0.008070147596299648, 0.0032486640848219395, 0.0004064233507961035, 0.002695787698030472, 0.0002233956183772534, 0.003186122514307499, 0.0031291493214666843, 0.0008165419567376375, 0.0005678352317772806, 0.0006343546556308866, 0.00028369069332256913, 0.0003227439883630723, 9.787098861124832e-06, 1.3609283996629529e-05, 0.0002610907831694931, 0.00018067576456815004, 0.003582050558179617], [0.005644379183650017, 0.0031707952730357647, 0.0048032524064183235, 0.003241431899368763, 0.9442934393882751, 0.0006195177556946874, 0.01255773939192295, 0.0011626537889242172, 0.0008444308768957853, 4.6108249080134556e-05, 4.620654362952337e-05, 0.0005542171420529485, 0.0021237728651612997, 0.005656250286847353, 0.0005308903055265546, 0.013778443448245525, 0.00013052404392510653, 0.0004863472131546587, 1.1375853318895679e-05, 5.612200402538292e-05, 2.551750912971329e-05, 0.00021651813585776836], [0.0063992151990532875, 0.0004931256407871842, 0.000857722305227071, 0.013878283090889454, 0.0017580422572791576, 0.9520428776741028, 0.007284738589078188, 0.0008478928357362747, 0.0003840876743197441, 0.00011337531032040715, 0.00021070889488328248, 2.201535062340554e-05, 2.482756281096954e-05, 0.0008885552524589002, 0.0041129798628389835, 0.0004919184721074998, 0.009655138477683067, 0.00018501278827898204, 0.00015475315740332007, 3.101087349932641e-05, 2.229147867183201e-05, 0.00014157067926134914], [0.005921379663050175, 0.002309576142579317, 0.0016992200398817658, 0.0015635588206350803, 0.00455239275470376, 0.012004473246634007, 0.9504520297050476, 0.0009054269175976515, 0.003786487737670541, 0.00039761466905474663, 0.0007216015364974737, 0.0001683746959315613, 3.532140181050636e-05, 2.3111602786229923e-05, 0.003002566983923316, 0.0005981111316941679, 0.0023596337996423244, 0.008992222137749195, 0.0001897486945381388, 0.00024662440409883857, 6.88170621288009e-05, 1.7183119780384004e-06], [0.003194680204614997, 7.725439354544505e-05, 3.267054125899449e-05, 0.0004530976584646851, 0.00014501357509288937, 0.0015609686961397529, 0.0002462295815348625, 0.9842128753662109, 0.006555814296007156, 0.0007904171943664551, 0.0002650116221047938, 0.0002864715352188796, 2.2026279111742042e-05, 6.957827167752839e-07, 5.09566962136887e-06, 0.00029980417457409203, 8.537283429177478e-05, 2.5305447707069106e-05, 0.0012169289402663708, 0.0004657188546843827, 5.03902010677848e-05, 8.127827641146723e-06], [0.04537517577409744, 0.0008950083865784109, 0.0007584382547065616, 0.00012457587581593543, 0.08908846229314804, 0.002240164438262582, 0.03635029122233391, 0.12612570822238922, 0.6519505381584167, 0.022386102005839348, 0.007132383994758129, 0.0004110320005565882, 0.004375863820314407, 0.0005182099994271994, 2.945731466752477e-05, 0.0003713010810315609, 0.0009523567277938128, 0.003106968943029642, 0.005777812097221613, 0.001562907942570746, 0.000366466905688867, 0.00010073090379592031], [8.138342673191801e-05, 0.00044052564771845937, 9.748015145305544e-05, 1.5256621736625675e-05, 7.026001549093053e-05, 8.782048826105893e-05, 0.0003505049680825323, 0.01225026324391365, 0.0021409057080745697, 0.9687776565551758, 0.003206807654350996, 0.003515428863465786, 0.0003211090515833348, 2.7863006835104898e-05, 1.4118198123469483e-05, 1.9751405488932505e-05, 3.8105740713945124e-06, 0.0008346149697899818, 0.0004518133355304599, 0.0004010314296465367, 0.0068802800960838795, 1.1266812180110719e-05], [0.00016280345153063536, 0.0014988112961873412, 0.0007458087056875229, 0.0005538575351238251, 0.00012936828716192394, 0.0001851122797233984, 0.002482869429513812, 0.00015730844461359084, 0.051106564700603485, 0.00494797620922327, 0.9189803600311279, 0.004248715937137604, 0.009185553528368473, 0.0003658455389086157, 0.0003162224602419883, 5.98819042352261e-06, 0.0002520863781683147, 3.210339491488412e-05, 0.0002125101600540802, 0.0029924355912953615, 0.00034481287002563477, 0.0010927652474492788], [8.611716475570574e-05, 4.475896275835112e-05, 7.598617230542004e-05, 3.68687033187598e-05, 6.574500002898276e-05, 7.499694675061619e-06, 5.482157939695753e-05, 0.0003474233963061124, 4.156034265179187e-05, 0.002947969129309058, 0.001901748008094728, 0.9898111820220947, 0.001353326253592968, 0.001100377063266933, 0.0001761964667821303, 0.00022733944933861494, 2.638389844378253e-07, 8.181902376236394e-05, 5.88472539675422e-06, 0.0004193976637907326, 0.0005724122747778893, 0.0006413449882529676], [0.0009653830202296376, 0.00016531121218577027, 0.0001438236067770049, 0.0003126661467831582, 0.0007969440193846822, 3.829256456810981e-05, 3.129425385850482e-05, 0.00013589196896646172, 0.0005584380123764277, 0.0005834032199345529, 0.005370591767132282, 0.020270049571990967, 0.940138041973114, 0.02185506373643875, 0.0026959944516420364, 0.0004185372090432793, 4.980641097063199e-05, 2.451330601616064e-06, 0.0001211456983583048, 1.8986407667398453e-05, 0.001106344279833138, 0.00422146450728178], [6.887098425067961e-05, 0.0020331810228526592, 0.0025518699549138546, 0.002916335593909025, 0.008751695044338703, 0.00195556809194386, 0.000361765967682004, 1.1812149750767276e-05, 4.0974027797346935e-05, 0.00025478395400568843, 0.010034695267677307, 0.01392014604061842, 0.017514100298285484, 0.8618741035461426, 0.06559410691261292, 0.0020243776962161064, 0.0009662856464274228, 5.1200491725467145e-05, 3.0084336231084308e-06, 4.0316092054126784e-05, 2.153754212486092e-05, 0.009009143337607384], [0.0006525893695652485, 0.001974020851776004, 0.002601620275527239, 0.016708247363567352, 0.0013382488396018744, 0.001708822208456695, 0.0032390474807471037, 5.6539411161793396e-05, 7.393099804176018e-05, 0.00010458425094839185, 0.0033995171543210745, 0.003465014975517988, 0.024162208661437035, 0.010039188899099827, 0.9262436628341675, 0.0010736131807789207, 0.002621365012601018, 6.008816490066238e-05, 4.795271888724528e-05, 3.879070845869137e-06, 0.00021589623065665364, 0.0002100313431583345], [0.00025861660833470523, 6.910376396263018e-05, 0.00020745539222843945, 0.00028050810215063393, 0.003265006234869361, 2.8768437914550304e-05, 0.00010117377678398043, 0.0003594366426113993, 2.312454398634145e-06, 9.538006452203263e-06, 3.06119432025298e-06, 0.00012338349188212305, 0.00032258706050924957, 0.011202842928469181, 0.003959876950830221, 0.9746096134185791, 0.0009170915000140667, 0.0038707831408828497, 0.0003031451196875423, 9.86544182524085e-06, 1.8459075363352895e-05, 7.735300459899008e-05], [0.0006179845659062266, 0.00012064705515513197, 0.00014389852003660053, 0.0005067692836746573, 0.000496886670589447, 0.00745469331741333, 0.0020293574780225754, 0.00022815537522546947, 0.0006417690310627222, 1.3566220331995282e-05, 0.0005421447567641735, 1.1397524758649524e-05, 8.006005373317748e-05, 0.0012784089194610715, 0.00751751521602273, 0.005603861063718796, 0.9412963390350342, 0.016819722950458527, 0.013584799133241177, 0.0007529346621595323, 3.28870810335502e-05, 0.00022623782570008188], [8.292401616927236e-06, 6.993325314397225e-06, 1.418856845702976e-05, 1.8715937812885386e-06, 3.5359473258722574e-05, 1.1457414984761272e-05, 0.0003502687031868845, 0.0002655293792486191, 6.947563633730169e-06, 4.433616777532734e-05, 7.836960094209644e-07, 1.4439617189054843e-05, 6.935719909506588e-08, 3.3221920148207573e-06, 6.371659401338547e-05, 0.006794395390897989, 0.0003442394663579762, 0.9903551340103149, 0.001175168203189969, 0.0004611056938301772, 4.197323869448155e-05, 1.9930243411181436e-07], [0.00022035562142264098, 0.0006093504489399493, 8.964464359451085e-05, 0.00012328443699516356, 4.2905548980343156e-06, 0.00012157453602412716, 0.0001323799806414172, 0.0026848677080124617, 0.0018601977499201894, 0.0016211029142141342, 0.0058855158276855946, 9.878653509076685e-05, 0.0002404003171250224, 3.1293975553126074e-06, 5.735221930081025e-05, 0.0004956115735694766, 0.013026759959757328, 0.004045308567583561, 0.9316272139549255, 0.013277072459459305, 0.02337406575679779, 0.0004017435130663216], [0.0009716919739730656, 0.00035332818515598774, 0.0005719835171476007, 6.397306151484372e-06, 0.00324304704554379, 0.00011640154843917117, 0.0012851342326030135, 0.0021582820918411016, 0.0002634418196976185, 0.0026424399111419916, 0.0012634207960218191, 0.0020032052416354418, 5.27498523297254e-05, 0.002299040788784623, 1.0677343198040035e-05, 0.014682773500680923, 0.0012116729049012065, 0.7672922015190125, 0.011729402467608452, 0.16993680596351624, 0.0013156398199498653, 0.01659010723233223], [2.2621681637247093e-05, 0.00027127450448460877, 8.821595110930502e-05, 2.461815029164427e-06, 3.820373422058765e-06, 3.511853776672069e-07, 1.9609722585300915e-05, 3.16732948704157e-05, 2.9449027351802215e-05, 0.015393882989883423, 0.0001732090167934075, 0.000878028804436326, 0.0022972687147557735, 8.19678189145634e-06, 2.696280716918409e-05, 5.200298346608179e-06, 1.3525393569580046e-06, 0.0005979145062156022, 0.002058164682239294, 0.00012503660400398076, 0.9778380990028381, 0.00012720238009933382], [0.00011506243026815355, 0.001444746507331729, 0.0026887482963502407, 0.00027165014762431383, 0.0009482965106144547, 2.2677584638586268e-05, 4.108840221306309e-05, 2.7385607609176077e-05, 0.00029382220236584544, 0.00030094265821389854, 0.0045329248532652855, 0.0024486409965902567, 0.005619558971375227, 0.021083367988467216, 9.623099322197959e-05, 0.00018201986677013338, 7.846111839171499e-05, 9.944707562681288e-05, 0.00021998271404299885, 0.013519424945116043, 0.0028953200671821833, 0.9430702328681946], [0.024662991985678673, 0.021209539845585823, 0.06236123666167259, 0.0038606696762144566, 0.04436253383755684, 0.002277157735079527, 0.007791712414473295, 0.00015142328629735857, 0.00013618604862131178, 0.0035683708265423775, 0.001950214384123683, 0.07944479584693909, 0.060871437191963196, 0.07947682589292526, 0.05805259943008423, 0.0017727294471114874, 0.0003380078705959022, 0.004934409633278847, 0.00036636838922277093, 0.0011750842677429318, 0.32212162017822266, 0.21911413967609406]], [[0.0044110482558608055, 0.0167427659034729, 0.0023298519663512707, 0.0031124786473810673, 0.006240712013095617, 0.011949621140956879, 0.018861154094338417, 0.026360414922237396, 0.03525872156023979, 0.059544309973716736, 0.19843022525310516, 0.22937092185020447, 0.2530171573162079, 0.028278373181819916, 0.013314591720700264, 0.031388141214847565, 0.019435295835137367, 0.01989269256591797, 0.006990990601480007, 0.005757163278758526, 0.004380334634333849, 0.004933010321110487], [0.0007063857628963888, 0.17142142355442047, 0.0071572125889360905, 0.024209506809711456, 0.00767725333571434, 0.03159880265593529, 0.05492241680622101, 0.033654943108558655, 0.14610867202281952, 0.15174150466918945, 0.032688938081264496, 0.015825999900698662, 0.2702268362045288, 0.00499830674380064, 0.015865925699472427, 0.002311925869435072, 0.004800389055162668, 0.0017450416926294565, 0.0021987855434417725, 0.006933912634849548, 0.01231380458921194, 0.0008920360123738647], [0.0009596514282748103, 0.08031441271305084, 0.004887889605015516, 0.013089843094348907, 0.007462597917765379, 0.024642903357744217, 0.08364046365022659, 0.061248984187841415, 0.25094467401504517, 0.1558031588792801, 0.08637683093547821, 0.03401613608002663, 0.16872969269752502, 0.003951646853238344, 0.006127195432782173, 0.0015964633785188198, 0.0034174735192209482, 0.0014248887309804559, 0.0019070154521614313, 0.0038012200966477394, 0.004803343676030636, 0.0008534290827810764], [0.004679026082158089, 0.03190907835960388, 0.004577190615236759, 0.028764724731445312, 0.011344380676746368, 0.03666210174560547, 0.14357194304466248, 0.11328726261854172, 0.11485612392425537, 0.14819031953811646, 0.10593213140964508, 0.09449009597301483, 0.04960091412067413, 0.011371036991477013, 0.020657239481806755, 0.005955561995506287, 0.011992290616035461, 0.013453607447445393, 0.011941289529204369, 0.012776722200214863, 0.0159536674618721, 0.00803329050540924], [0.008286611177027225, 0.04159409925341606, 0.01352725736796856, 0.01247215922921896, 0.016638901084661484, 0.029369400814175606, 0.1249268501996994, 0.059874895960092545, 0.09310033917427063, 0.16089151799678802, 0.1446092277765274, 0.04910274222493172, 0.1197877824306488, 0.023541761562228203, 0.011804805137217045, 0.009159701876342297, 0.013153918087482452, 0.020171957090497017, 0.01752522774040699, 0.008916651830077171, 0.016617074608802795, 0.004927156493067741], [0.0008310533594340086, 0.006291957106441259, 0.0009826861787587404, 0.004271467216312885, 0.0007063343073241413, 0.003229408757761121, 0.005536946002393961, 0.025466520339250565, 0.01846175082027912, 0.0230376198887825, 0.2848023772239685, 0.45724767446517944, 0.13519223034381866, 0.01549304835498333, 0.00767927523702383, 0.0016422139015048742, 0.0014035813510417938, 0.0006149780238047242, 0.0016015000874176621, 0.0008951760828495026, 0.0009246981353498995, 0.0036874304059892893], [0.004498325753957033, 0.030556391924619675, 0.008696378208696842, 0.01712021976709366, 0.012089678086340427, 0.02292731963098049, 0.028913620859384537, 0.026604825630784035, 0.06250527501106262, 0.08163979649543762, 0.17630033195018768, 0.2107991725206375, 0.16891078650951385, 0.02231557108461857, 0.040452949702739716, 0.014968951232731342, 0.017616981640458107, 0.007487861905246973, 0.006145262159407139, 0.01601141132414341, 0.01206660084426403, 0.011372190900146961], [0.01259735506027937, 0.029349124059081078, 0.006037588231265545, 0.013868821784853935, 0.010247734375298023, 0.028030620887875557, 0.012284520082175732, 0.032689739018678665, 0.017139747738838196, 0.045688167214393616, 0.1166701391339302, 0.13632832467556, 0.2530088424682617, 0.08418760448694229, 0.04244079813361168, 0.04843765124678612, 0.04000808298587799, 0.015516542829573154, 0.023723650723695755, 0.006819931790232658, 0.008322985842823982, 0.016602007672190666], [0.005655260290950537, 0.001258254749700427, 0.0005214944831095636, 0.0025282169226557016, 0.0005894021014682949, 0.002912493422627449, 0.0007556749624200165, 0.009842490777373314, 0.00239315303042531, 0.007289869710803032, 0.10407599061727524, 0.5209177136421204, 0.05381909757852554, 0.07210559397935867, 0.03992818668484688, 0.03156915679574013, 0.028748705983161926, 0.012146820314228535, 0.021218180656433105, 0.006854376755654812, 0.007504359353333712, 0.06736551970243454], [0.005136948078870773, 0.019736751914024353, 0.004065875895321369, 0.005782173480838537, 0.004123158752918243, 0.004503152333199978, 0.00288115325383842, 0.0197182297706604, 0.013061546720564365, 0.02957509458065033, 0.09966348856687546, 0.23827151954174042, 0.08949936926364899, 0.04765109717845917, 0.04099450260400772, 0.06170497462153435, 0.02633105032145977, 0.021587789058685303, 0.06917981803417206, 0.03818969428539276, 0.05925872549414635, 0.09908381849527359], [0.00022529780108015984, 0.004572212230414152, 0.00045459700049832463, 0.002252912614494562, 0.0011438489891588688, 0.003342646174132824, 0.005402215290814638, 0.026233471930027008, 0.01202717050909996, 0.019796201959252357, 0.16568510234355927, 0.36273565888404846, 0.09945081919431686, 0.04056056961417198, 0.0285326074808836, 0.02931758388876915, 0.028373297303915024, 0.032579489052295685, 0.0546475313603878, 0.01487115677446127, 0.016731424257159233, 0.051064133644104004], [0.00024579884484410286, 0.00607285974547267, 0.0004123352118767798, 0.00180328160058707, 0.0014521675184369087, 0.0029195824172347784, 0.007537625264376402, 0.028304148465394974, 0.012811338528990746, 0.026127856224775314, 0.10740020126104355, 0.19830860197544098, 0.07407770305871964, 0.02346190996468067, 0.021916329860687256, 0.04245225340127945, 0.030797667801380157, 0.10233598947525024, 0.1457749307155609, 0.03226831555366516, 0.055559657514095306, 0.07795946300029755], [0.002546858275309205, 0.0068810866214334965, 0.0035053149331361055, 0.002135833725333214, 0.003968475852161646, 0.001788873691111803, 0.002928958274424076, 0.01800374500453472, 0.005408979952335358, 0.010137644596397877, 0.021082084625959396, 0.03713342547416687, 0.01572556421160698, 0.015211155638098717, 0.009232963435351849, 0.04272109642624855, 0.020194221287965775, 0.09965752065181732, 0.34521159529685974, 0.06675135344266891, 0.14009930193424225, 0.12967397272586823], [0.017311813309788704, 0.009649314917623997, 0.009250009432435036, 0.0036165695637464523, 0.010260794311761856, 0.003098649438470602, 0.00648414297029376, 0.007250858470797539, 0.006915680132806301, 0.006227027624845505, 0.01884530484676361, 0.027224380522966385, 0.012021023780107498, 0.015694163739681244, 0.00907963141798973, 0.05186194181442261, 0.029013898223638535, 0.12751443684101105, 0.16004978120326996, 0.10638632625341415, 0.08478695154190063, 0.27745723724365234], [0.0032679110299795866, 0.011582087725400925, 0.013934272341430187, 0.00864668283611536, 0.013329096138477325, 0.005568866152316332, 0.011188366450369358, 0.008400444872677326, 0.004097770433872938, 0.012410960160195827, 0.009983827359974384, 0.016344768926501274, 0.006049014162272215, 0.011368777602910995, 0.0153255145996809, 0.02582441456615925, 0.02450958453118801, 0.09378113597631454, 0.07847579568624496, 0.13702523708343506, 0.2095610797405243, 0.27932432293891907], [0.03743952885270119, 0.03496428579092026, 0.07994982600212097, 0.02122117020189762, 0.05675341561436653, 0.019726522266864777, 0.02793373540043831, 0.007451597601175308, 0.013554940931499004, 0.014229383319616318, 0.01049931813031435, 0.006561717949807644, 0.012187904678285122, 0.012589645572006702, 0.013384703546762466, 0.034162361174821854, 0.03731052204966545, 0.06224583089351654, 0.05909918248653412, 0.11113820225000381, 0.19233082234859467, 0.13526543974876404], [0.01102815568447113, 0.00874564703553915, 0.017572183161973953, 0.01541934348642826, 0.011609064415097237, 0.0062355236150324345, 0.004537015687674284, 0.007209928706288338, 0.002308301627635956, 0.004223715513944626, 0.02622629702091217, 0.07480230927467346, 0.015062941238284111, 0.019979558885097504, 0.013345609419047832, 0.021946720778942108, 0.009039357304573059, 0.012514094822108746, 0.021082181483507156, 0.03279092535376549, 0.05854027345776558, 0.6057808995246887], [0.0166622381657362, 0.01571471057832241, 0.03968474641442299, 0.02607293240725994, 0.06598871201276779, 0.01633286103606224, 0.007042448502033949, 0.0027772204484790564, 0.0029138848185539246, 0.002703011967241764, 0.002249258104711771, 0.0045651625841856, 0.00362491887062788, 0.0055305915884673595, 0.019581614062190056, 0.05557093769311905, 0.040440842509269714, 0.029731974005699158, 0.0261455699801445, 0.1235632598400116, 0.1478586494922638, 0.345244437456131], [0.0778312012553215, 0.03117392770946026, 0.13595916330814362, 0.0685829371213913, 0.11962637305259705, 0.0716443806886673, 0.011973035521805286, 0.007273272145539522, 0.0011549674673005939, 0.004321664106100798, 0.0013348672073334455, 0.00124038674402982, 0.0043409173376858234, 0.01604936644434929, 0.019885700196027756, 0.04159479960799217, 0.04145580530166626, 0.023607302457094193, 0.031298648566007614, 0.02356753684580326, 0.10617759078741074, 0.15990613400936127], [0.24761398136615753, 0.005093734245747328, 0.02713126689195633, 0.03386814519762993, 0.02479744330048561, 0.03059713914990425, 0.004660670179873705, 0.007672627456486225, 0.001386075047776103, 0.002102577593177557, 0.0024330527521669865, 0.004921266343444586, 0.0008602248854003847, 0.010728344321250916, 0.01722954586148262, 0.030041133984923363, 0.033241525292396545, 0.019894592463970184, 0.022941015660762787, 0.030985454097390175, 0.07934102416038513, 0.3624591827392578], [0.07979476451873779, 0.04821021482348442, 0.05408142879605293, 0.051647599786520004, 0.046000659465789795, 0.031694427132606506, 0.014908875338733196, 0.03419815003871918, 0.009807968512177467, 0.022922053933143616, 0.007691939361393452, 0.008813048712909222, 0.0060117668472230434, 0.008915443904697895, 0.014823753386735916, 0.02177397906780243, 0.01078562531620264, 0.011820664629340172, 0.03705397620797157, 0.04426174983382225, 0.26139727234840393, 0.1733846664428711], [0.04281037673354149, 0.040524449199438095, 0.03090498223900795, 0.056880805641412735, 0.03157810866832733, 0.08086927980184555, 0.10652995854616165, 0.11482033133506775, 0.05043644458055496, 0.04364360123872757, 0.03496047481894493, 0.023774713277816772, 0.012986495159566402, 0.012715304270386696, 0.01253749430179596, 0.00961393490433693, 0.012901097536087036, 0.018285097554326057, 0.025802886113524437, 0.03114504925906658, 0.10093564540147781, 0.10534343868494034]], [[0.2757936120033264, 0.048418644815683365, 0.08424628525972366, 0.05557674542069435, 0.07875151932239532, 0.03597874939441681, 0.15598058700561523, 0.1228349581360817, 0.010189495049417019, 0.04969767481088638, 0.015244116075336933, 0.015599348582327366, 0.006014774087816477, 0.009962745010852814, 0.0033212548587471247, 0.006729365326464176, 0.003174643265083432, 0.010607335716485977, 0.0027321602683514357, 0.0012001992436125875, 0.004564065486192703, 0.0033817437943071127], [0.02780275046825409, 0.13364048302173615, 0.19596344232559204, 0.0605822391808033, 0.023974254727363586, 0.03983229771256447, 0.12352042645215988, 0.026574360206723213, 0.02335835061967373, 0.029117310419678688, 0.08075226843357086, 0.06125849485397339, 0.06884928047657013, 0.02022862620651722, 0.02020297944545746, 0.013192176818847656, 0.009938674978911877, 0.02210319973528385, 0.0024324539117515087, 0.0052749696187675, 0.00571666331961751, 0.005684257484972477], [0.09622889757156372, 0.2167840451002121, 0.11317151039838791, 0.08297175914049149, 0.0766100361943245, 0.04069994017481804, 0.08570332825183868, 0.14752157032489777, 0.0026553133502602577, 0.027739187702536583, 0.01648099161684513, 0.018515296280384064, 0.013403158634901047, 0.007804789114743471, 0.001635249238461256, 0.008945753797888756, 0.00276000308804214, 0.006246563512831926, 0.007900824770331383, 0.0005746171809732914, 0.018938269466161728, 0.006708931643515825], [0.02095114439725876, 0.0214063860476017, 0.0966327115893364, 0.06455720961093903, 0.17436477541923523, 0.023683208972215652, 0.5357488393783569, 0.004079753998667002, 0.008107461035251617, 0.0013733747182413936, 0.0027728425338864326, 0.0009472946985624731, 0.003424981376156211, 0.0016902079805731773, 0.011696211993694305, 0.0019290451891720295, 0.009295639581978321, 0.011437853798270226, 0.0009699770016595721, 0.0019660289399325848, 0.0012640844797715545, 0.0017009855946525931], [0.07083606719970703, 0.06580201536417007, 0.08137533068656921, 0.1440202295780182, 0.03711467608809471, 0.04653146490454674, 0.07238659262657166, 0.3297636806964874, 0.008916892111301422, 0.021714767441153526, 0.006220896262675524, 0.0066415066830813885, 0.0016156486235558987, 0.013174746185541153, 0.003230274422094226, 0.014167175628244877, 0.005358358845114708, 0.007739354390650988, 0.03229125589132309, 0.0028224156703799963, 0.016585102304816246, 0.01169151533395052], [0.014522652141749859, 0.01019839383661747, 0.02710796147584915, 0.028046922758221626, 0.03497770428657532, 0.025306671857833862, 0.7940109372138977, 0.005875582806766033, 0.015309503301978111, 0.003817377844825387, 0.011497932486236095, 0.0016445540823042393, 0.0018695699982345104, 0.00027755252085626125, 0.006845163647085428, 0.00048751026042737067, 0.009782305918633938, 0.005500687286257744, 0.0008389209979213774, 0.00042176657007075846, 0.0014431398594751954, 0.0002172561507904902], [0.06333833187818527, 0.013690064661204815, 0.013119657523930073, 0.012974069453775883, 0.017480380833148956, 0.03430594876408577, 0.020760543644428253, 0.6403366923332214, 0.009001613594591618, 0.06458691507577896, 0.012128316797316074, 0.029108460992574692, 0.001623416319489479, 0.006621899548918009, 0.0005457547376863658, 0.02507038414478302, 0.0023899900261312723, 0.005691409111022949, 0.009950706735253334, 0.0032205949537456036, 0.006886506453156471, 0.007168298587203026], [0.0014804115053266287, 0.002088971436023712, 0.004226377233862877, 0.0036765318363904953, 0.012898390181362629, 0.006801176350563765, 0.2057342380285263, 0.0018803899874910712, 0.7051516771316528, 0.010215822607278824, 0.019784370437264442, 0.00033297474146820605, 0.0054193176329135895, 0.00022997880296315998, 0.004026590380817652, 7.376004941761494e-05, 0.010011349804699421, 0.0013894279254600406, 0.0011990189086645842, 0.0015964476624503732, 0.0015972068067640066, 0.00018553163681644946], [0.0014094754587858915, 0.006539949681609869, 0.005048539489507675, 0.0007061885553412139, 0.0010863045463338494, 0.0024674709420651197, 0.0020113978534936905, 0.054308004677295685, 0.05047593265771866, 0.4696263074874878, 0.02908717840909958, 0.22210751473903656, 0.018605411052703857, 0.017643513157963753, 0.0021607927046716213, 0.012389487586915493, 0.0015915252733975649, 0.013550025410950184, 0.008819748647511005, 0.028263328596949577, 0.04950346797704697, 0.002598388819023967], [0.010278266854584217, 0.00733824260532856, 0.03192343935370445, 0.015181555412709713, 0.015339161269366741, 0.013644998893141747, 0.06083621457219124, 0.006122955121099949, 0.4252411425113678, 0.030709248036146164, 0.26094895601272583, 0.016009142622351646, 0.05877259746193886, 0.009724695235490799, 0.01666950061917305, 0.0011176669504493475, 0.005104908253997564, 0.001037019770592451, 0.0021357377991080284, 0.004715043120086193, 0.003612434957176447, 0.003537115640938282], [0.005380899179726839, 0.006073659285902977, 0.015798937529325485, 0.004069850780069828, 0.0038446588441729546, 0.004073528107255697, 0.008281417191028595, 0.03134360909461975, 0.0021221644710749388, 0.16048882901668549, 0.02481614239513874, 0.5543269515037537, 0.010775557719171047, 0.10146449506282806, 0.0030845922883599997, 0.025338411331176758, 0.000787271885201335, 0.01874113827943802, 0.0004870383709203452, 0.0028237556107342243, 0.00859684869647026, 0.007280200254172087], [0.007353213150054216, 0.006572891026735306, 0.010243273340165615, 0.006457468960434198, 0.008790976367890835, 0.001906168065033853, 0.008025355637073517, 0.003087374148890376, 0.03038387931883335, 0.013327849097549915, 0.07385037839412689, 0.016750382259488106, 0.6510971188545227, 0.04026533663272858, 0.06595339626073837, 0.011267326772212982, 0.017659049481153488, 0.0022904325742274523, 0.008393822237849236, 0.0007539482903666794, 0.01402236707508564, 0.0015480640577152371], [0.0013711584033444524, 0.0029133029747754335, 0.009369160048663616, 0.014970424585044384, 0.0032281819730997086, 0.003273281967267394, 0.0017566792666912079, 0.014149785041809082, 0.0006621361244469881, 0.009751259349286556, 0.004918830003589392, 0.24238066375255585, 0.020187104120850563, 0.395119845867157, 0.03792301192879677, 0.20049640536308289, 0.0016937537584453821, 0.023482678458094597, 0.0007569440058432519, 0.002778905676677823, 0.0005272445851005614, 0.008289194665849209], [0.008511683903634548, 0.05264299735426903, 0.054388612508773804, 0.001093920785933733, 0.03651784360408783, 0.005137376952916384, 0.038248829543590546, 0.0017526240553706884, 0.0013774331891909242, 0.017960576340556145, 0.0060064829885959625, 0.005712820217013359, 0.389632910490036, 0.026321066543459892, 0.025917401537299156, 0.027874575927853584, 0.06803271174430847, 0.08930390328168869, 0.01563834771513939, 0.0010001123882830143, 0.12252168357372284, 0.004405957646667957], [0.003487891051918268, 0.0031554587185382843, 0.005308148451149464, 0.008790374733507633, 0.038847438991069794, 0.003303613979369402, 0.007124222815036774, 0.011054451577365398, 0.0007646627491340041, 0.003511629765853286, 0.0007212608470581472, 0.007204850669950247, 0.009080706164240837, 0.05203799530863762, 0.03969385474920273, 0.44503387808799744, 0.027344847097992897, 0.2619381844997406, 0.031157029792666435, 0.014957912266254425, 0.008007156662642956, 0.017474517226219177], [0.0034779023844748735, 0.023248394951224327, 0.01810104213654995, 0.013576571829617023, 0.030292220413684845, 0.0036356300115585327, 0.03929601609706879, 0.0017207672353833914, 0.00681394012644887, 0.0010728990891948342, 0.0026772846467792988, 0.00016713124932721257, 0.018173374235630035, 0.0031059784814715385, 0.06335271894931793, 0.00894126482307911, 0.3397490978240967, 0.06683743745088577, 0.3021208345890045, 0.0053941295482218266, 0.04401590675115585, 0.004229459445923567], [0.001451236312277615, 0.0022298377007246017, 0.0037480955943465233, 0.001671996433287859, 0.005475657992064953, 0.0023156567476689816, 0.007545242551714182, 0.005390701349824667, 0.0005815362092107534, 0.0049811117351055145, 0.0011475298088043928, 0.008569483645260334, 0.0011804635869339108, 0.005275318399071693, 0.008739101700484753, 0.07437710464000702, 0.015639012679457664, 0.7651823163032532, 0.018882490694522858, 0.04841846227645874, 0.007655997760593891, 0.009541701525449753], [0.0004549560253508389, 0.002376204589381814, 0.000247303512878716, 0.00031626768759451807, 0.0005726368399336934, 0.0003220604849047959, 0.0002862492110580206, 0.0010668754111975431, 0.0007481171051040292, 0.0008484928985126317, 0.0017602958250790834, 0.0002904975553974509, 0.005588183179497719, 0.0003721211978700012, 0.0016701078275218606, 0.004036647733300924, 0.029845645651221275, 0.010029624216258526, 0.8327178955078125, 0.004419195931404829, 0.09702225774526596, 0.005008204840123653], [7.62503404985182e-05, 0.0011596197728067636, 0.002170642837882042, 0.0005454849451780319, 0.0019129030406475067, 0.0005904970457777381, 0.006133467424660921, 0.00045292533468455076, 0.002275955630466342, 0.003830237779766321, 0.002312996191903949, 0.003209116170182824, 0.001305549987591803, 0.00507991062477231, 0.002973003778606653, 0.003522323677316308, 0.004098959732800722, 0.46114420890808105, 0.005281659308820963, 0.42255252599716187, 0.007483588997274637, 0.061888255178928375], [9.429028978047427e-06, 0.002340038539841771, 8.456299110548571e-05, 4.6878565626684576e-05, 0.00016705608868505806, 1.5025844732008409e-05, 7.375286077149212e-05, 0.00010490386193851009, 0.0006962513434700668, 0.0011089628096669912, 0.0002874682249967009, 0.000384504470275715, 0.010139023885130882, 0.00012929509102832526, 0.001940517220646143, 0.00048616674030199647, 0.001103458576835692, 0.005169052630662918, 0.040847260504961014, 0.006937317084521055, 0.9271278977394104, 0.0008010548772290349], [0.003553831484168768, 0.00933706946671009, 0.0418483167886734, 0.02764306589961052, 0.03359074890613556, 0.013026307336986065, 0.011960607953369617, 0.004683362320065498, 0.0063263000920414925, 0.002037992002442479, 0.0038715917617082596, 0.010082203894853592, 0.005922973155975342, 0.041798368096351624, 0.010667134076356888, 0.013558330945670605, 0.002727522049099207, 0.020115984603762627, 0.007080344017595053, 0.18950849771499634, 0.004907709546387196, 0.5357517004013062], [0.0025095611345022917, 0.10369903594255447, 0.04787447676062584, 0.004183527547866106, 0.03689098358154297, 0.001566977589391172, 0.023152654990553856, 0.001378776622004807, 0.0012283653486520052, 0.0054991040378808975, 0.0027156714349985123, 0.0030244493391364813, 0.04439517855644226, 0.0157898161560297, 0.021259447559714317, 0.004363188985735178, 0.0071001132018864155, 0.03259649500250816, 0.010435877367854118, 0.005594021175056696, 0.6075114011764526, 0.017230814322829247]], [[0.14716461300849915, 0.029200099408626556, 0.06113290414214134, 0.06937361508607864, 0.16992923617362976, 0.14826327562332153, 0.047386880964040756, 0.05241766944527626, 0.023430511355400085, 0.020586341619491577, 0.010828484781086445, 0.06612106412649155, 0.017715714871883392, 0.0341794453561306, 0.03256400674581528, 0.03018599934875965, 0.008929756470024586, 0.005372851621359587, 0.0007354066474363208, 0.0031848980579525232, 0.0054028998129069805, 0.015894273295998573], [0.08460115641355515, 0.16495458781719208, 0.2955043911933899, 0.040211211889982224, 0.015761034563183784, 0.021369056776165962, 0.012780343182384968, 0.007949123159050941, 0.013373794965445995, 0.026365725323557854, 0.010561684146523476, 0.0023209648206830025, 0.008204960264265537, 0.0139618543908, 0.009312472306191921, 0.007453028578311205, 0.012401201762259007, 0.0067330640740692616, 0.046217162162065506, 0.00970506202429533, 0.13623462617397308, 0.054023489356040955], [0.041340406984090805, 0.6257864236831665, 0.12870551645755768, 0.03250662982463837, 0.0052743935957551, 0.00870589166879654, 0.0070306770503520966, 0.008836059831082821, 0.007740632630884647, 0.013201220892369747, 0.008597140200436115, 0.0011276800651103258, 0.016391277313232422, 0.007072219625115395, 0.0017704651691019535, 0.0021789276506751776, 0.002668068278580904, 0.0010345001937821507, 0.027659762650728226, 0.0026283825282007456, 0.034406568855047226, 0.015337185934185982], [0.016812080517411232, 0.11476957052946091, 0.45653069019317627, 0.08976822346448898, 0.07668585330247879, 0.018982602283358574, 0.013936079107224941, 0.0031013323459774256, 0.002425069222226739, 0.015511849895119667, 0.009160211309790611, 0.012348583899438381, 0.02257494069635868, 0.030165433883666992, 0.006817093584686518, 0.014778654091060162, 0.0015567553928121924, 0.0035127217415720224, 0.0024081014562398195, 0.0031441114842891693, 0.025766078382730484, 0.059243883937597275], [0.06832564622163773, 0.05379423499107361, 0.08696908503770828, 0.3544462323188782, 0.04853719845414162, 0.09140145033597946, 0.008199475705623627, 0.007679418195039034, 0.004667759872972965, 0.0007219198741950095, 0.005878104362636805, 0.011387072503566742, 0.06107962131500244, 0.028932329267263412, 0.02764109894633293, 0.019734149798750877, 0.04692656919360161, 0.0012240061769261956, 0.008240005932748318, 0.0028736412059515715, 0.006433533970266581, 0.05490739643573761], [0.005237597972154617, 0.03573343902826309, 0.2629840672016144, 0.07468868046998978, 0.39052343368530273, 0.01531069353222847, 0.1538318693637848, 0.003109064418822527, 0.003175367135554552, 0.006049713119864464, 0.0010567255085334182, 0.000733479973860085, 0.004967091139405966, 0.0056744953617453575, 0.006211970932781696, 0.017063625156879425, 0.0008555318927392364, 0.003804844804108143, 0.0005876368377357721, 0.0001760823797667399, 0.00575683731585741, 0.0024676683824509382], [0.010679186321794987, 0.005276177544146776, 0.06726197898387909, 0.041028182953596115, 0.007407524157315493, 0.805940568447113, 0.002263123169541359, 0.01645652763545513, 0.002410750836133957, 0.0012755297357216477, 0.0003388605546206236, 0.00039437247323803604, 0.0002582780143711716, 0.02144699916243553, 0.0007066485704854131, 0.002568980446085334, 0.00808822549879551, 0.0006883906899020076, 0.001608754973858595, 0.001465272274799645, 0.0005214340053498745, 0.0019143001409247518], [0.00027120334561914206, 0.0003744521818589419, 0.00209184642881155, 0.00783549714833498, 0.40607210993766785, 0.010105512104928493, 0.5048008561134338, 0.0054057505913078785, 0.03732583299279213, 0.00041972583858296275, 0.004740943666547537, 0.000900651270058006, 0.0003952096158172935, 5.347483966033906e-05, 0.008825444616377354, 0.002331298775970936, 0.003796837292611599, 0.003782734740525484, 0.00023193337256088853, 0.00014216125418897718, 7.311326044145972e-05, 2.3425231120199896e-05], [0.0010541295632719994, 0.0013020685873925686, 0.00038052984746173024, 0.0030048335902392864, 0.0013633802300319076, 0.0021714456379413605, 0.001856630900874734, 0.9571625590324402, 0.0011461444664746523, 0.02488475665450096, 9.046222839970142e-05, 0.0006451125373132527, 6.784495781175792e-05, 0.00014264181663747877, 2.8407195713953115e-05, 0.003166112583130598, 4.5259312173584476e-05, 0.0002719905460253358, 0.0008037805091589689, 6.470607331721112e-05, 0.000334277719957754, 1.2878032066510059e-05], [0.005063208285719156, 0.001882398035377264, 0.010385118424892426, 0.004482433665543795, 0.14159205555915833, 0.006037842482328415, 0.38485923409461975, 0.009061366319656372, 0.2771797180175781, 0.01317201554775238, 0.06557933986186981, 0.0019998105708509684, 0.0026625199243426323, 0.0009527459624223411, 0.011477978900074959, 0.004641332197934389, 0.012096823193132877, 0.035171400755643845, 0.005788401700556278, 0.003140021115541458, 0.002245731186121702, 0.0005285457009449601], [0.0018325380515307188, 0.0012551330728456378, 0.002442982280626893, 0.0008769225678406656, 0.0011599462013691664, 0.005954191088676453, 0.0028096239548176527, 0.2536346912384033, 0.021358348429203033, 0.655007004737854, 0.004094877280294895, 0.011164668016135693, 0.00034272385528311133, 0.003447936149314046, 0.00014828321582172066, 0.002872183220461011, 0.00015871385403443128, 0.005276915151625872, 0.007225895766168833, 0.008977263234555721, 0.009585359133780003, 0.00037387022166512907], [0.0005233067786321044, 0.010669717565178871, 0.003371279686689377, 0.006440449506044388, 0.0035085193812847137, 0.002717492403462529, 0.058717645704746246, 0.006366114132106304, 0.49012741446495056, 0.024090705439448357, 0.29704123735427856, 0.00892728753387928, 0.02330445684492588, 0.0011550234630703926, 0.007835740223526955, 0.00024489453062415123, 0.003734409576281905, 0.002470343839377165, 0.005457544699311256, 0.03634796291589737, 0.005621200427412987, 0.0013273677323013544], [0.00045480323024094105, 0.003631328232586384, 0.009667680598795414, 0.007798410020768642, 0.0016574953915551305, 0.0003623933589551598, 0.0015975688584148884, 0.025539543479681015, 0.001971995923668146, 0.16559015214443207, 0.007838800549507141, 0.5802575349807739, 0.04410411790013313, 0.055801596492528915, 0.01565566100180149, 0.01527861412614584, 9.604280785424635e-05, 0.007168901152908802, 0.0011690460378304124, 0.0066222501918673515, 0.04418624937534332, 0.0035497653298079967], [0.0009680798393674195, 0.0014680036110803485, 0.001064424286596477, 0.008945499546825886, 0.005927962716668844, 0.0013070330023765564, 0.004383846651762724, 0.002380519174039364, 0.0049779280088841915, 0.009933028370141983, 0.08218927681446075, 0.017406461760401726, 0.779880702495575, 0.007519841659814119, 0.03285926207900047, 0.002322055632248521, 0.0040423148311674595, 0.0006958866724744439, 0.012207899242639542, 0.0005619195871986449, 0.017066432163119316, 0.0018916348926723003], [0.0002945025626104325, 0.010294959880411625, 0.005486441310495138, 0.009966176934540272, 0.007206284441053867, 0.01624871790409088, 0.004728613421320915, 0.002282207366079092, 0.0008340853382833302, 0.0206705741584301, 0.0026737339794635773, 0.020863762125372887, 0.06198233738541603, 0.6113293766975403, 0.05142178386449814, 0.09949637204408646, 0.010361370630562305, 0.0427645705640316, 0.0016726773465052247, 0.0071147591806948185, 0.0032480803783982992, 0.009058579802513123], [0.00028094660956412554, 0.00011572756920941174, 0.00011588617053348571, 0.0006489026709459722, 0.005954608786851168, 0.0005954154185019433, 0.007612916175276041, 7.84289113653358e-06, 0.0003596855094656348, 2.2551141228177585e-05, 0.0013360166922211647, 0.0008801223011687398, 0.027237214148044586, 0.0029183723963797092, 0.6595015525817871, 0.015371864661574364, 0.24986158311367035, 0.025093162432312965, 0.0014734516153112054, 9.587448585079983e-05, 0.00034740875707939267, 0.00016887986566871405], [0.00010281401046086103, 0.0003084783675149083, 0.00022889366664458066, 0.0004658237739931792, 0.0008591726655140519, 0.0016737532569095492, 0.0010483753867447376, 0.0015685827238485217, 2.4030219719861634e-05, 0.0026771242264658213, 1.1448951227066573e-05, 0.0005399114452302456, 0.00048165745101869106, 0.020377013832330704, 0.009245243854820728, 0.8370846509933472, 0.008437166921794415, 0.11064010113477707, 0.0031856782734394073, 0.00011526005255291238, 0.0008007797296158969, 0.00012404931476339698], [0.00029834831366315484, 0.00022906869708094746, 8.462797268293798e-05, 0.000185975237400271, 0.0001132557590608485, 0.0016475495649501681, 0.00028599172946996987, 9.23280167626217e-05, 0.0005413411417976022, 3.4660766687011346e-05, 0.0001669271441642195, 3.066302451770753e-05, 0.0003777179808821529, 0.0016918211476877332, 0.0201280377805233, 0.00573883019387722, 0.8879752159118652, 0.014581114985048771, 0.06353887170553207, 0.001890993327833712, 0.00022541280486620963, 0.00014124078734312207], [4.376746801426634e-06, 2.407386318736826e-06, 2.232252518297173e-05, 2.0715428036055528e-05, 0.0009278089855797589, 0.0002712682180572301, 0.0012273438042029738, 0.0004708873457275331, 4.933790842187591e-05, 0.0002920328115578741, 1.3317240700416733e-05, 0.00037284567952156067, 2.365914724578033e-06, 0.00028109870618209243, 0.005049473140388727, 0.068656325340271, 0.0030494867824018, 0.9173521995544434, 0.0011403911048546433, 0.0006939341546967626, 9.11869210540317e-05, 8.861741662258282e-06], [8.63802051753737e-05, 0.0005836630589328706, 2.4966959244920872e-05, 0.00014902916154824197, 5.995805622660555e-05, 7.278730481630191e-05, 0.00035537697840481997, 0.0021374893840402365, 0.00014030374586582184, 0.000913094962015748, 0.00013272941578179598, 7.2940074460348114e-06, 0.0001545182167319581, 3.726777504198253e-05, 0.00029206500039435923, 0.0016609304584562778, 0.0028286927845329046, 0.006394504569470882, 0.9741031527519226, 0.0003430839569773525, 0.009461592882871628, 6.097141158534214e-05], [0.0005787270492874086, 0.0006865042960271239, 0.007169890217483044, 0.0009111377294175327, 0.008053451776504517, 0.0008418304496444762, 0.009780783206224442, 0.0007045858656056225, 0.0031298997346311808, 0.002399763558059931, 0.0040456331335008144, 0.003943629562854767, 0.00032620213460177183, 0.0069872415624558926, 0.00792851485311985, 0.022914640605449677, 0.00973726436495781, 0.6138812303543091, 0.01171254925429821, 0.22014059126377106, 0.01249866932630539, 0.05162729695439339], [0.00012022055307170376, 0.0011542461579665542, 0.0008003888069652021, 0.0004675414820667356, 0.00014154693053569645, 5.864448394277133e-05, 0.00043498107697814703, 0.000352345232386142, 0.00032824810477904975, 0.004366881214082241, 0.000730538391508162, 0.0005886211292818189, 0.0015889337519183755, 0.00029491379973478615, 0.0015507686184719205, 0.00053321075392887, 0.00033486797474324703, 0.009932049550116062, 0.0680762529373169, 0.005310076288878918, 0.8995264172554016, 0.0033083599992096424]], [[0.010750791057944298, 0.29123905301094055, 0.08923650532960892, 0.10095120221376419, 0.027561575174331665, 0.05764489993453026, 0.021351724863052368, 0.13645410537719727, 0.013027245178818703, 0.16043776273727417, 0.004865737631917, 0.013775564730167389, 0.03465108200907707, 0.01550363376736641, 0.0012009458150714636, 0.0023258982691913843, 0.0003962703631259501, 0.0006081777391955256, 0.0008329310221597552, 0.0007585666608065367, 0.014367838390171528, 0.002058502519503236], [0.0003944916243199259, 0.7358107566833496, 0.007644988596439362, 0.030485399067401886, 0.0021736857015639544, 0.0006970988470129669, 0.0008629861404187977, 0.005296036135405302, 0.00047788445954211056, 0.03480987995862961, 0.00012338761007413268, 7.235730299726129e-05, 0.01954871043562889, 0.00022020757023710757, 0.0007567502907477319, 0.0006708676228299737, 0.0001409184478688985, 0.00030737370252609253, 0.0017938808305189013, 0.0009562516352161765, 0.15634793043136597, 0.00040829335921444], [0.028758497908711433, 0.31101906299591064, 0.11878939718008041, 0.09852050244808197, 0.005923965945839882, 0.013236298225820065, 0.08519008755683899, 0.051277074962854385, 0.045313138514757156, 0.15630336105823517, 0.014888747595250607, 0.01569398306310177, 0.008257325738668442, 0.0038222954608500004, 0.0005284266080707312, 0.00016389692609664053, 0.00022213805641513318, 0.0012126285582780838, 0.0005498992395587265, 0.0016680466942489147, 0.035615891218185425, 0.0030452280770987272], [0.015581213869154453, 0.2918006181716919, 0.3106229603290558, 0.01846718229353428, 0.0039519439451396465, 0.0742291659116745, 0.10079354792833328, 0.010739633813500404, 0.0074760159477591515, 0.06319267302751541, 0.012153316289186478, 0.007417421322315931, 0.025255290791392326, 0.020969852805137634, 0.0005025856662541628, 0.0005716083105653524, 0.0004203318967483938, 0.004239837173372507, 0.0006590135744772851, 0.001104087452404201, 0.022527512162923813, 0.00732422387227416], [0.008662655018270016, 0.11846129596233368, 0.34108150005340576, 0.0583677738904953, 0.00633824011310935, 0.30410730838775635, 0.06876129657030106, 0.009504060260951519, 0.004750819876790047, 0.0187577735632658, 0.003616300178691745, 0.004672475159168243, 0.0066895498894155025, 0.02254384011030197, 0.0026256106793880463, 0.002081190701574087, 0.0015159574104472995, 0.0054914867505431175, 0.00022641247778665274, 0.0011716583976522088, 0.0028370770160108805, 0.007735690101981163], [0.015430457890033722, 0.0759458839893341, 0.02276749536395073, 0.042217452079057693, 0.006771250162273645, 0.013181626796722412, 0.0076325456611812115, 0.7617606520652771, 0.0018070732476189733, 0.027063744142651558, 0.001661121379584074, 0.015575176104903221, 0.0021485737524926662, 0.0023206931073218584, 4.3312713387422264e-05, 0.00037829234497621655, 1.2052320016664453e-05, 5.727264942834154e-05, 0.00010746121552074328, 5.426914867712185e-05, 0.0016258797841146588, 0.0014377225888893008], [0.010110599920153618, 0.13872027397155762, 0.06341226398944855, 0.023281114175915718, 0.09718286991119385, 0.5501782298088074, 0.047417961061000824, 0.053275514394044876, 0.003977657295763493, 0.004984660539776087, 0.0009432855877093971, 0.00022729908232577145, 0.0012026031035929918, 0.00038390845293179154, 0.00013260387640912086, 0.0014465939020738006, 0.0013031736016273499, 0.00023098224482964724, 0.00014527217717841268, 3.56448108504992e-05, 0.000955231545958668, 0.0004521957889664918], [0.005462281405925751, 0.014020869508385658, 0.011845891363918781, 0.0025018402375280857, 0.0012606453383341432, 0.01981428451836109, 0.010090525262057781, 0.818187415599823, 0.002313615521416068, 0.09524387121200562, 0.0015910750953480601, 0.0016806945204734802, 0.003484526416286826, 0.006278125569224358, 0.00019824669288937002, 0.0018327025463804603, 7.006955274846405e-05, 0.0016461770283058286, 0.0019230575999245048, 7.207550515886396e-05, 0.00039795355405658484, 8.411578164668754e-05], [0.043169986456632614, 0.07489904761314392, 0.02136938087642193, 0.005400495138019323, 0.04815703257918358, 0.11590716987848282, 0.10607986152172089, 0.29350799322128296, 0.08122272044420242, 0.0973556637763977, 0.03545030206441879, 0.01128329336643219, 0.027685759589076042, 0.0037169659044593573, 0.0016774630639702082, 0.007672353647649288, 0.010953540913760662, 0.003572591347619891, 0.006847613491117954, 0.0006156249437481165, 0.003119552740827203, 0.0003355468506924808], [0.0330628864467144, 0.010448133572936058, 0.023907264694571495, 0.008341378532350063, 0.0052074529230594635, 0.007529302034527063, 0.03199280425906181, 0.7900381088256836, 0.04313751310110092, 0.01729745604097843, 0.005756879225373268, 0.003054644213989377, 0.003716090926900506, 0.0015388475731015205, 0.0015017602127045393, 0.0021744382102042437, 0.00036451604682952166, 0.0012577746529132128, 0.008044647052884102, 0.0007617371738888323, 0.0006749026360921562, 0.00019147468265146017], [0.05745357275009155, 0.009211055003106594, 0.01599728874862194, 0.008159126155078411, 0.004994607530534267, 0.002109949942678213, 0.034007951617240906, 0.16418179869651794, 0.2680346369743347, 0.087002232670784, 0.14404550194740295, 0.06408394128084183, 0.028275128453969955, 0.018045131117105484, 0.0064899190329015255, 0.002721614670008421, 0.003011680906638503, 0.007729522418230772, 0.053456373512744904, 0.007732374127954245, 0.011468403041362762, 0.0017881887033581734], [0.0063355350866913795, 0.00364849716424942, 0.004801975563168526, 0.0014675908023491502, 0.0006899124709889293, 0.0015573876444250345, 0.0032517211511731148, 0.058948978781700134, 0.05400266498327255, 0.29815152287483215, 0.10268783569335938, 0.2363196164369583, 0.08895136415958405, 0.07867825776338577, 0.005901458207517862, 0.0030205207876861095, 0.0016437876038253307, 0.00827864371240139, 0.021897707134485245, 0.010077781043946743, 0.008308257907629013, 0.0013789298245683312], [0.0029494480695575476, 0.023145277053117752, 0.005236457102000713, 0.0014179990394040942, 0.0014795877505093813, 0.0001716359838610515, 0.002310878364369273, 0.007165895309299231, 0.008437233977019787, 0.03782255947589874, 0.06070615351200104, 0.012089134193956852, 0.7312730550765991, 0.014307178556919098, 0.012242513708770275, 0.003127772593870759, 0.0015312345931306481, 0.0013565992703661323, 0.04407807067036629, 0.001250955043360591, 0.026972388848662376, 0.0009280036319978535], [0.06530864536762238, 0.0007868342217989266, 0.005104249343276024, 0.003217339050024748, 0.002009785268455744, 0.0002503730065654963, 0.0036973191890865564, 0.012519166804850101, 0.007946407422423363, 0.012814422138035297, 0.061192505061626434, 0.5968055725097656, 0.022395052015781403, 0.1110779196023941, 0.019496535882353783, 0.00906350463628769, 0.0012596318265423179, 0.012174983508884907, 0.011791929602622986, 0.010760181583464146, 0.006756529677659273, 0.023571258410811424], [0.006542916409671307, 0.0026175633538514376, 0.003847435349598527, 0.0005298226024024189, 0.0018148035742342472, 0.00025495095178484917, 0.0029662633314728737, 0.0010144890984520316, 0.0021201535128057003, 0.006209060549736023, 0.11146806180477142, 0.10258320719003677, 0.40740370750427246, 0.13971686363220215, 0.03817920386791229, 0.021552609279751778, 0.010803707875311375, 0.024361278861761093, 0.05672444403171539, 0.004091351758688688, 0.03218109533190727, 0.023016933351755142], [0.003573015332221985, 0.00431928550824523, 0.005743533372879028, 0.008068522438406944, 0.0027138267178088427, 0.007343216799199581, 0.001587025704793632, 0.0039232955314219, 0.00048784681712277234, 0.008160005323588848, 0.008019328117370605, 0.06865354627370834, 0.041520241647958755, 0.35433831810951233, 0.09207499027252197, 0.1867281198501587, 0.028487635776400566, 0.08088063448667526, 0.01852073147892952, 0.011445987969636917, 0.00505683571100235, 0.058353982865810394], [0.010710954666137695, 0.008028171956539154, 0.0048471251502633095, 0.010354924015700817, 0.01795349270105362, 0.003472780343145132, 0.0030890952330082655, 0.00794023834168911, 0.0013573385076597333, 0.003211577655747533, 0.02552347257733345, 0.04473506659269333, 0.11942640691995621, 0.08039113134145737, 0.15452717244625092, 0.23196181654930115, 0.07255131751298904, 0.027353163808584213, 0.12047886848449707, 0.003874663496389985, 0.015746720135211945, 0.03246442228555679], [0.0012384293368086219, 0.005308203399181366, 0.002759843599051237, 0.0036847006995230913, 0.02216608263552189, 0.011961941607296467, 0.004635178949683905, 0.002489886712282896, 0.0005210934905335307, 0.0006693563773296773, 0.0007247587200254202, 0.0009803520515561104, 0.002990281442180276, 0.007847635075449944, 0.020303966477513313, 0.4513709545135498, 0.19401764869689941, 0.1993468850851059, 0.04381170868873596, 0.010206478647887707, 0.002396388677880168, 0.010568305850028992], [0.0003970024117734283, 0.0005821652594022453, 0.00016156065976247191, 0.0003784839645959437, 0.0006560615147463977, 7.078750059008598e-05, 0.0004555876075755805, 0.0016760611906647682, 0.0001320469455095008, 0.0008890515891835093, 0.001283568679355085, 0.0004331569070927799, 0.008887112140655518, 0.006336492020636797, 0.022625207901000977, 0.08777309209108353, 0.021823152899742126, 0.10994242876768112, 0.7187818884849548, 0.005530532915145159, 0.008486173115670681, 0.002698407741263509], [0.0019079175544902682, 0.00248710997402668, 0.0010327327763661742, 0.0008478299132548273, 0.008149671368300915, 0.0010621220571920276, 0.0027743082027882338, 0.0017321082996204495, 0.001386149087920785, 0.004553437698632479, 0.0019481971394270658, 0.004368220455944538, 0.0027290198486298323, 0.006109640002250671, 0.009719080291688442, 0.1213686540722847, 0.07155399024486542, 0.4317028820514679, 0.12138555943965912, 0.1520041674375534, 0.02725091576576233, 0.023926254361867905], [0.003961359150707722, 0.004572119563817978, 0.01646830514073372, 0.002220581052824855, 0.004065011162310839, 0.00022420164896175265, 0.008088315837085247, 0.001026697107590735, 0.0014969900948926806, 0.0006564375362358987, 0.0033856460358947515, 0.0002313972363481298, 0.007884071208536625, 0.005105162039399147, 0.032674334943294525, 0.02786886692047119, 0.013252252712845802, 0.09445087611675262, 0.6167775392532349, 0.056720659136772156, 0.07055750489234924, 0.028311625123023987], [0.002664405619725585, 0.001140554086305201, 0.003274842631071806, 0.0013253630604594946, 0.0009919790318235755, 6.279080116655678e-05, 0.00169842888135463, 0.00021590096002910286, 0.0007308580097742379, 0.0011591733200475574, 0.0033988605719059706, 0.006427601911127567, 0.0011966773308813572, 0.019464122131466866, 0.003733852645382285, 0.0077318050898611546, 0.0031415335834026337, 0.12698894739151, 0.036334164440631866, 0.36871790885925293, 0.07384201139211655, 0.3357582986354828]], [[0.32811111211776733, 0.04105464741587639, 0.017021294683218002, 0.05007265508174896, 0.019609082490205765, 0.019383125007152557, 0.08162632584571838, 0.21569766104221344, 0.03453828766942024, 0.05682748928666115, 0.008895095437765121, 0.03358347713947296, 0.005036372225731611, 0.0007358305738307536, 0.011406964622437954, 0.0042864661663770676, 0.0021279132924973965, 0.01689928211271763, 0.016049306839704514, 0.006415276322513819, 0.02971150353550911, 0.0009109582751989365], [0.03535657003521919, 0.06534785777330399, 0.014123653061687946, 0.3819751739501953, 0.13701984286308289, 0.018174679949879646, 0.010073719546198845, 0.1675337851047516, 0.02104642055928707, 0.023580338805913925, 0.005079958122223616, 0.03377722576260567, 0.04032281041145325, 0.0015473793027922511, 0.0013626681175082922, 0.007934799417853355, 0.0014093288918957114, 0.0005036696093156934, 0.003779428545385599, 0.0008673664997331798, 0.02232132852077484, 0.006861996371299028], [0.0021697308402508497, 0.02912885881960392, 0.0020075570791959763, 0.15351974964141846, 0.7538934350013733, 0.008029233664274216, 0.004144683945924044, 0.012323307804763317, 0.010493668727576733, 0.0016096207546070218, 0.00015105464262887836, 0.004046534188091755, 0.007953688502311707, 0.0003150246338918805, 0.0013598536606878042, 0.003505572210997343, 0.0014177007833495736, 8.590704237576574e-05, 5.008261723560281e-05, 0.0006144235376268625, 0.002563114045187831, 0.0006171875284053385], [0.009733074344694614, 0.024523833766579628, 0.010241004638373852, 0.048035189509391785, 0.013552986085414886, 0.6757500767707825, 0.014968490228056908, 0.13684161007404327, 0.009750870987772942, 0.020430058240890503, 0.005025146994739771, 0.0006328550516627729, 0.0022335497196763754, 0.0024024993181228638, 0.0024342178367078304, 0.002114254981279373, 0.006566783878952265, 0.0007753132958896458, 0.0070585040375590324, 0.0004623699060175568, 0.004636615049093962, 0.0018307099817320704], [0.08377187699079514, 0.013941450975835323, 0.02704404480755329, 0.003287150524556637, 0.0033786653075367212, 0.035394296050071716, 0.64286869764328, 0.05322687700390816, 0.019601143896579742, 0.03348410129547119, 0.01461188867688179, 0.0021255360916256905, 0.0004040712083224207, 0.0018339419038966298, 0.003242149716243148, 0.0019547424744814634, 0.008809681981801987, 0.022304514423012733, 0.011840851046144962, 0.012447504326701164, 0.003028797684237361, 0.0013980185613036156], [0.003148450283333659, 0.0005057180533185601, 9.951068204827607e-05, 0.0037952593993395567, 0.0061728935688734055, 0.0011763254879042506, 0.00062658911338076, 0.9643767476081848, 0.0020778959151357412, 0.0013004064094275236, 9.719732588564511e-06, 0.00019764393800869584, 2.6630274078343064e-05, 1.322757361776894e-05, 1.5343450286309235e-05, 0.01187361404299736, 9.313752525486052e-05, 0.0002796564076561481, 0.002855657832697034, 0.001112233498133719, 0.000198316658497788, 4.504205935518257e-05], [0.0003192335134372115, 0.0003710434539243579, 4.237892062519677e-05, 0.00033588040969334543, 0.0018402603454887867, 9.54494607867673e-05, 0.0008023888221941888, 0.008141404949128628, 0.9711332321166992, 0.002518518129363656, 0.004110974259674549, 0.0007344440091401339, 0.0001657726097619161, 7.69769940234255e-06, 1.5722951502539217e-05, 1.3381336430029478e-05, 0.0003553515998646617, 6.430316716432571e-05, 0.0006408959743566811, 0.007831099443137646, 0.00040081178303807974, 5.9659811086021364e-05], [0.004777196329087019, 0.0014290842227637768, 0.004724963568150997, 0.00016191505710594356, 5.297239113133401e-05, 0.004089992493391037, 0.008872306905686855, 0.047677818685770035, 0.0033322498202323914, 0.8701696991920471, 0.016119493171572685, 0.03197072073817253, 0.00011477500083856285, 0.0005876723444089293, 6.055368794477545e-05, 6.469548679888248e-05, 1.6877664165804163e-05, 0.002561821835115552, 0.0005153919919393957, 0.000723587058018893, 0.0019089095294475555, 6.734410999342799e-05], [0.0014902011025696993, 0.005735126323997974, 0.004699942655861378, 0.0013094667810946703, 0.00045003672130405903, 0.0005804229876957834, 0.0009720654925331473, 0.0008700613398104906, 0.020301848649978638, 0.03987552598118782, 0.7801620364189148, 0.0067265452817082405, 0.09488826990127563, 0.010315093211829662, 0.006995248142629862, 0.00010578719229670241, 0.0005713499849662185, 0.00017174231470562518, 0.00941331498324871, 0.0016082595102488995, 0.009538387879729271, 0.003219242673367262], [0.0006561825866810977, 0.0005365749238990247, 0.0045445943251252174, 0.0008146469481289387, 0.001524818711914122, 0.0001364605559501797, 0.0001996564824366942, 0.006192202214151621, 0.0005577059928327799, 0.020744064822793007, 0.0010860732290893793, 0.9324439764022827, 0.0018858890980482101, 0.013990087434649467, 0.000589711416978389, 0.004632120486348867, 3.571166962501593e-05, 0.002387886168435216, 4.166306825936772e-05, 0.0031253802590072155, 0.0021008988842368126, 0.0017737987218424678], [0.01095922663807869, 0.0027915313839912415, 0.001979063730686903, 0.03177174553275108, 0.009285571984946728, 0.0010506634134799242, 0.0006202101358212531, 0.002179992850869894, 0.009633663110435009, 0.00028788563213311136, 0.006867523770779371, 0.0035844468511641026, 0.8054414391517639, 0.004375510383397341, 0.06126059591770172, 0.015071919187903404, 0.009911756962537766, 6.702774408040568e-05, 0.017757149413228035, 0.0003452473320066929, 0.0027239362243562937, 0.0020339018665254116], [0.0008633278193883598, 0.0005144051974639297, 0.013233874924480915, 0.005258501973003149, 0.001777776749804616, 0.011828369460999966, 0.00034135719761252403, 0.0002716313465498388, 7.384116179309785e-05, 0.002186145866289735, 0.0006365873850882053, 0.024209655821323395, 0.0028078060131520033, 0.8345346450805664, 0.06502009183168411, 0.025948336347937584, 0.0025185972917824984, 0.0028924299404025078, 2.2134316168376245e-05, 0.0010158581426367164, 9.89073232631199e-05, 0.003945659846067429], [0.009775991551578045, 0.00040147340041585267, 0.004063891246914864, 0.00395588343963027, 0.0036410270258784294, 0.006214762572199106, 0.001587734674103558, 6.358775135595351e-05, 0.00010948067938443273, 0.0004401069600135088, 0.0009445915347896516, 0.0011634008260443807, 0.017403410747647285, 0.00815183948725462, 0.8447822332382202, 0.0410364493727684, 0.052174992859363556, 0.0015056910924613476, 0.0012037336127832532, 6.127453616500134e-06, 0.0012971757678315043, 7.651487248949707e-05], [0.0022346279583871365, 0.0002312197902938351, 0.0004362422914709896, 0.007909134961664677, 0.011434918269515038, 0.002138162264600396, 0.00042522678268142045, 0.005329220090061426, 0.00010774182737804949, 0.00010441958875162527, 4.964624167769216e-06, 0.0006019663996994495, 0.0011495311046019197, 0.004154348745942116, 0.007867998443543911, 0.9461049437522888, 0.006524681579321623, 0.001929143792949617, 0.0008296258165501058, 0.00019119463104289025, 3.295030910521746e-05, 0.00025780117721296847], [0.0008704860229045153, 0.0012716335477307439, 0.0011853431351482868, 0.004251593723893166, 0.02931329235434532, 0.005941805895417929, 0.001117532141506672, 0.004654752556234598, 0.025843195617198944, 0.001393746817484498, 0.0038091165479272604, 0.0003375255037099123, 0.008822653442621231, 0.00749135622754693, 0.04397048056125641, 0.04426601156592369, 0.7354809641838074, 0.0050926594994962215, 0.06354758143424988, 0.0024176074657589197, 0.007634707260876894, 0.0012860152637585998], [0.0022876670118421316, 0.00028663905686698854, 0.0020263539627194405, 0.00030018738470971584, 0.0009570252732373774, 0.000173329419340007, 0.004759882111102343, 0.00885853637009859, 0.00013822874461766332, 0.008684905245900154, 9.101524483412504e-05, 0.0033063848968595266, 1.723699642752763e-05, 0.0009826146997511387, 0.003062913194298744, 0.04501784220337868, 0.0004718767886515707, 0.8703591227531433, 0.017934149131178856, 0.01913517154753208, 0.010430816560983658, 0.0007180777611210942], [7.456832099705935e-05, 0.00017019153165165335, 2.667674380063545e-05, 0.00014533186913467944, 0.00043572892900556326, 7.86722739576362e-06, 7.951394218252972e-05, 0.005170623306185007, 0.0015405568992719054, 0.00027884443989023566, 0.0001773395051714033, 1.6519623386557214e-05, 0.0002161782031180337, 8.483679266646504e-06, 0.0001338082511210814, 0.003886121790856123, 0.0005632844986394048, 0.0018946698401123285, 0.9736438989639282, 0.004076292272657156, 0.007174432277679443, 0.00027907188632525504], [8.341840839420911e-06, 5.933310967520811e-05, 0.000143799145007506, 1.7916900105774403e-05, 0.0006692925235256553, 6.326568495751417e-07, 5.259389945422299e-05, 0.0005070970510132611, 0.0024814594071358442, 0.001182642998173833, 0.0002574232348706573, 0.006176181137561798, 1.8321736206416972e-05, 0.0002504101721569896, 1.089947090804344e-05, 0.0002449792227707803, 1.3315307114680763e-05, 0.004898402374237776, 0.00047535367775708437, 0.9770424365997314, 0.0017710048705339432, 0.0037181125953793526], [0.0012395860394462943, 0.0021661545615643263, 0.0018904125317931175, 0.00023988420434761792, 0.00025605526752769947, 3.1944200600264594e-05, 0.0031761028803884983, 0.00262732757255435, 0.007929932326078415, 0.016861505806446075, 0.04641098529100418, 0.010057208128273487, 0.006324970629066229, 7.346749043790624e-05, 0.0010926557006314397, 7.2900002123788e-05, 0.00011449186422396451, 0.004495067987591028, 0.14949925243854523, 0.002405450213700533, 0.7419611215591431, 0.001073457533493638], [0.00014243388432078063, 0.0007619172101840377, 0.002375217154622078, 0.00046554836444556713, 0.0008382101077586412, 9.235663128492888e-06, 2.1782512703794055e-05, 0.00013566245615947992, 0.00022274574439506978, 0.0006957019213587046, 0.0018310417653992772, 0.0046547781676054, 0.003558946307748556, 0.03459839150309563, 0.00030596539727412164, 0.0015745528507977724, 1.4840068615740165e-05, 0.00032704119803383946, 0.0011146850883960724, 0.08127113431692123, 0.0030299387872219086, 0.8620502352714539], [0.0036192089319229126, 0.013959708623588085, 0.13210426270961761, 0.005699304398149252, 0.008272991515696049, 0.0023545643780380487, 0.0020297409500926733, 0.000624055159278214, 0.000217385109863244, 0.012923896312713623, 0.005982271395623684, 0.2126888930797577, 0.014979338273406029, 0.05765066295862198, 0.024081703275442123, 0.0026258446741849184, 0.0008737666648812592, 0.010964000597596169, 0.000502359529491514, 0.004488460719585419, 0.4105748236179352, 0.07278284430503845], [0.12526145577430725, 0.008716905489563942, 0.03734520822763443, 0.16291052103042603, 0.028053542599081993, 0.010896786116063595, 0.004268038552254438, 0.002455470385029912, 1.597488153493032e-05, 0.00018421334971208125, 0.00010944777022814378, 0.0006686443812213838, 0.010764162056148052, 0.04311797767877579, 0.05437501519918442, 0.3749723732471466, 0.0017946036532521248, 0.0027784432750195265, 0.007936987094581127, 0.0003680419467855245, 0.0028001507744193077, 0.1202060729265213]], [[0.004635021090507507, 0.24705900251865387, 0.05346502363681793, 0.0056474958546459675, 0.014055266045033932, 0.008401220664381981, 0.041404981166124344, 0.22297219932079315, 0.04040724039077759, 0.1467750519514084, 0.014805984683334827, 0.009402524679899216, 0.07217945903539658, 0.028094103559851646, 0.004415946546941996, 0.007834001444280148, 0.003906069090589881, 0.0356418751180172, 0.012739305384457111, 0.009777519851922989, 0.013709193095564842, 0.00267145037651062], [0.00611835764721036, 0.021757273003458977, 0.9167709350585938, 0.008834940381348133, 0.014856455847620964, 0.0016349053476005793, 0.004879124462604523, 0.001034347340464592, 0.00026995447115041316, 0.0005395560874603689, 0.0021928439382463694, 0.006410342175513506, 0.0006172610446810722, 0.004163757897913456, 0.0005065790028311312, 0.0010499428026378155, 4.134298796998337e-05, 0.00047431670827791095, 3.754648787435144e-05, 0.00031640433007851243, 0.0003612172731664032, 0.0071325707249343395], [0.06282370537519455, 0.1531633883714676, 0.08925896883010864, 0.02939201146364212, 0.08373561501502991, 0.047422222793102264, 0.022562582045793533, 0.07210684567689896, 0.013939647935330868, 0.014161414466798306, 0.04131746664643288, 0.053017016500234604, 0.07784885913133621, 0.011507341638207436, 0.005372434854507446, 0.0774473324418068, 0.015134396962821484, 0.005543690640479326, 0.02405388467013836, 0.00981047097593546, 0.03776923567056656, 0.05261150747537613], [0.00015744038682896644, 0.0019003520719707012, 0.0008351475116796792, 0.002883315086364746, 0.9392665028572083, 0.0026170415803790092, 0.04404456913471222, 0.0011918040690943599, 0.00033471386996097863, 0.0009729847079142928, 4.457051545614377e-05, 8.502834680257365e-05, 0.00034086042433045805, 0.0004958515055477619, 0.00024265650426968932, 0.0036763232201337814, 5.6079206842696294e-05, 0.0005126438336446881, 2.346229666727595e-05, 7.2530606303189415e-06, 0.0002574055106379092, 5.4046842706156895e-05], [0.029930662363767624, 0.022740913555026054, 0.0032874594908207655, 0.016077106818556786, 0.01305078249424696, 0.6281920671463013, 0.03639771044254303, 0.12294354289770126, 0.006847466807812452, 0.006870459299534559, 0.0026824339292943478, 0.0021568622905761003, 0.0024951270315796137, 0.002891840413212776, 0.002703294390812516, 0.012959853745996952, 0.04696273431181908, 0.020626530051231384, 0.01533240731805563, 0.0016818601870909333, 0.0013979452196508646, 0.0017709382809698582], [0.002979523502290249, 0.0045725759118795395, 0.009016112424433231, 0.0009752805344760418, 0.07802224904298782, 0.003309598658233881, 0.8773244619369507, 0.005472294054925442, 0.01163660641759634, 0.001055937958881259, 0.0005765201058238745, 0.00019776183762587607, 2.2156060367706232e-05, 0.00022856240684632212, 0.0002349263959331438, 0.0010249485494568944, 0.0001355414860881865, 0.00250519928522408, 0.0003192507429048419, 0.00018891002400778234, 0.00018492291565053165, 1.670759593253024e-05], [0.06645728647708893, 0.026810957118868828, 0.007896806113421917, 0.0055507454089820385, 0.03294006362557411, 0.13784010708332062, 0.05166694149374962, 0.44810938835144043, 0.06553427875041962, 0.0177119392901659, 0.020463095977902412, 0.016345972195267677, 0.002336283680051565, 0.0022531042341142893, 0.00032046897104009986, 0.028256049379706383, 0.010592211969196796, 0.018753651529550552, 0.014572829008102417, 0.018036162480711937, 0.002085393061861396, 0.005466310307383537], [0.0004226614546496421, 0.013822514563798904, 0.0009240741492249072, 0.0013838282320648432, 0.005219125188887119, 0.0018394163344055414, 0.16614565253257751, 0.11516868323087692, 0.5264026522636414, 0.08440906554460526, 0.0188046395778656, 0.0005073579959571362, 0.003977186046540737, 0.0001544699043733999, 0.0003880222502630204, 0.0001570657768752426, 0.0036560201551765203, 0.01790935918688774, 0.02997533231973648, 0.0047214338555932045, 0.00396798737347126, 4.3576925236266106e-05], [0.005066320300102234, 0.011444928124547005, 0.004542102571576834, 0.0009756892686709762, 0.002643020125105977, 0.0027002147398889065, 0.033218614757061005, 0.10427755117416382, 0.015862569212913513, 0.6760784983634949, 0.02278846502304077, 0.0368853397667408, 0.002911612158641219, 0.003205185756087303, 0.00029276107670739293, 0.0012347479350864887, 0.00013321569713298231, 0.01986563578248024, 0.0078953318297863, 0.0046132407151162624, 0.042411286383867264, 0.0009536721045151353], [0.0016060457564890385, 0.033354807645082474, 0.0870702788233757, 0.023634327575564384, 0.0024058325216174126, 0.0006996453157626092, 0.015156414359807968, 0.004811752587556839, 0.11416864395141602, 0.011547209694981575, 0.5578418374061584, 0.03642210364341736, 0.03252067789435387, 0.00860521849244833, 0.013315930962562561, 0.00023499761300627142, 0.0009957810398191214, 0.0011668041115626693, 0.011501305736601353, 0.017754485830664635, 0.005305266473442316, 0.01988065242767334], [0.0017707296647131443, 0.020120816305279732, 0.05200902000069618, 0.0022648528683930635, 0.0017147562466561794, 0.0007893921574577689, 0.00627195043489337, 0.014740370213985443, 0.006926660425961018, 0.08437603712081909, 0.054059870541095734, 0.5992767214775085, 0.03822823241353035, 0.04185926169157028, 0.002206765580922365, 0.004167424514889717, 0.00017414891044609249, 0.016752075403928757, 0.0018741340609267354, 0.01794380694627762, 0.02355976216495037, 0.008913278579711914], [0.00033133241231553257, 0.004451011307537556, 0.008154508657753468, 0.0015642506768926978, 0.0013687785249203444, 0.00012671462900470942, 0.0004789243685081601, 0.0002562529407441616, 0.002879864303395152, 0.005873676855117083, 0.03759167343378067, 0.010298958979547024, 0.8427020311355591, 0.0307832770049572, 0.01089137326925993, 0.0035900152288377285, 0.001366278389468789, 0.00024202150234486908, 0.0024393564090132713, 0.0006269782315939665, 0.027030279859900475, 0.006952530238777399], [0.00157332478556782, 0.0020068958401679993, 0.012256619520485401, 0.01075138058513403, 0.0043641263619065285, 0.006537904497236013, 0.0007807519868947566, 0.0007738819695077837, 0.0001592474291101098, 0.0006351316696964204, 0.0024782426189631224, 0.1784539818763733, 0.016681723296642303, 0.6840471625328064, 0.027036532759666443, 0.019892174750566483, 0.0010642333654686809, 0.0015466210898011923, 1.700729990261607e-05, 0.0007480861386284232, 5.503516877070069e-05, 0.02813989482820034], [0.00628926744684577, 0.00880281813442707, 0.0018639297923073173, 0.0009614253649488091, 0.006911750882863998, 0.005311125889420509, 0.002545603783801198, 0.000753805332351476, 0.0017534063663333654, 0.002194314729422331, 0.011736784130334854, 0.016365278512239456, 0.6003066897392273, 0.018324507400393486, 0.08693546801805496, 0.12636035680770874, 0.08825639635324478, 0.004166516941040754, 0.003379901871085167, 0.0007080191280692816, 0.004504290875047445, 0.001568404259160161], [0.0006809699698351324, 0.001210272777825594, 0.0004421327030286193, 0.004075417295098305, 0.07195364683866501, 0.005764768458902836, 0.017836948856711388, 0.0063585857860744, 0.0007953798049129546, 0.003797919023782015, 0.0001830078399507329, 0.0007482520304620266, 0.011078836396336555, 0.020691340789198875, 0.019334211945533752, 0.765095055103302, 0.026395462453365326, 0.03640148043632507, 0.004383946303278208, 0.00027627183590084314, 0.0016332294326275587, 0.0008628877112641931], [0.00392924714833498, 0.003933853469789028, 0.0013142566895112395, 0.001614227774553001, 0.0031006564386188984, 0.006833639927208424, 0.004085005261003971, 0.008311444893479347, 0.011214288882911205, 0.0009607350802980363, 0.0009785684524104, 0.00018460594583302736, 0.006328865885734558, 0.0030466055031865835, 0.020234178751707077, 0.022098977118730545, 0.7184523940086365, 0.07160316407680511, 0.1001313254237175, 0.009571743197739124, 0.0016381070017814636, 0.00043420129804871976], [0.0010730659123510122, 0.0022827214561402798, 0.004752886015921831, 0.00018361372349318117, 0.005463439505547285, 0.0013447669334709644, 0.018174203112721443, 0.019026929512619972, 0.0022481651976704597, 0.007388685829937458, 0.00023915062774904072, 0.0017395683098584414, 8.427929424215108e-05, 0.0032464272808283567, 0.0006014771643094718, 0.04870206490159035, 0.0015358475502580404, 0.8566842079162598, 0.009840603917837143, 0.012693892233073711, 0.0025109960697591305, 0.0001829695829655975], [0.0009977277368307114, 0.002833782462403178, 0.0003937868168577552, 0.00011173594975844026, 0.0002903660060837865, 0.00032362283673137426, 0.00033032469218596816, 0.006059604696929455, 0.013156161643564701, 0.0022918933536857367, 0.012357006780803204, 0.0019784620963037014, 0.005669245962053537, 0.00044207661994732916, 0.00011256430298089981, 0.003787113819271326, 0.015425610356032848, 0.007570043206214905, 0.7541738748550415, 0.12261254340410233, 0.03163067251443863, 0.01745196431875229], [1.8843447833205573e-05, 0.0014808853156864643, 0.0006802146090194583, 0.00012199421325931326, 6.141073390608653e-05, 0.00011493854253785685, 0.0005712303100153804, 0.007698581553995609, 0.0033302863594144583, 0.00985262356698513, 0.0006253255414776504, 0.002117984462529421, 0.0002698514726944268, 0.0009259484359063208, 8.033099584281445e-05, 0.00033428167807869613, 0.0008749706321395934, 0.3928810656070709, 0.01575314998626709, 0.5549786686897278, 0.005769978743046522, 0.0014574530068784952], [0.00010542760719545186, 0.0009460980072617531, 0.0004946712870150805, 9.714558837004006e-05, 3.475500125205144e-05, 1.1825778756247018e-06, 4.266604810254648e-05, 0.0002455242502037436, 0.00011902765254490077, 0.004673604387789965, 0.001422739471308887, 0.0022161530796438456, 0.003563627600669861, 8.872545004123822e-05, 0.00010181721881963313, 3.094140629400499e-05, 4.065478151460411e-06, 9.048938954947516e-05, 0.023139208555221558, 0.0008371506701223552, 0.9578408002853394, 0.003904256969690323], [0.00011218923464184627, 0.003006349317729473, 0.01418394222855568, 0.003468708833679557, 0.00030695690657012165, 4.352636096882634e-05, 2.4767530703684315e-05, 0.00015313216135837138, 0.00015961022290866822, 0.00013592281902674586, 0.001178519451059401, 0.004037793725728989, 0.0014398633502423763, 0.009013334289193153, 0.00034680491080507636, 0.00013446244702208787, 4.211966006550938e-05, 4.0216931665781885e-05, 0.0008875842904672027, 0.01302229892462492, 0.0028415198903530836, 0.9454203844070435], [0.002429688349366188, 0.03758931905031204, 0.25840485095977783, 0.005892001558095217, 0.008190581575036049, 0.0007176147773861885, 0.0005697562010027468, 0.0003964920178987086, 0.0003440504369791597, 0.0034220307134091854, 0.004824842792004347, 0.02463134378194809, 0.05155113711953163, 0.05182220786809921, 0.01170615665614605, 0.007514322642236948, 0.0007257128017954528, 0.0012209488777443767, 0.003373855957761407, 0.005841685924679041, 0.4461471736431122, 0.07268419861793518]], [[0.019591735675930977, 0.028682734817266464, 0.039929378777742386, 0.024996032938361168, 0.02406466379761696, 0.06604952365159988, 0.054931141436100006, 0.047169867902994156, 0.08835051208734512, 0.042608339339494705, 0.08274534344673157, 0.04491446912288666, 0.031796880066394806, 0.05440562218427658, 0.05717974528670311, 0.040813323110342026, 0.06332924216985703, 0.035808950662612915, 0.04943666607141495, 0.04600885510444641, 0.01916688121855259, 0.0380200520157814], [0.03050154820084572, 0.05732402950525284, 0.016286568716168404, 0.041498828679323196, 0.055198390036821365, 0.029511459171772003, 0.09751898050308228, 0.037906888872385025, 0.014801044017076492, 0.035326581448316574, 0.013479121960699558, 0.011478251777589321, 0.029274342581629753, 0.013562624342739582, 0.04125916585326195, 0.036981917917728424, 0.05687369406223297, 0.14009352028369904, 0.088694266974926, 0.02660074271261692, 0.08567187190055847, 0.04015618935227394], [0.011814258992671967, 0.07951321452856064, 0.10244712978601456, 0.048519767820835114, 0.19110046327114105, 0.04680996760725975, 0.06927435100078583, 0.03834313899278641, 0.05381153151392937, 0.02756350301206112, 0.023435093462467194, 0.019020559266209602, 0.01858353242278099, 0.024053223431110382, 0.019223248586058617, 0.05273646488785744, 0.027214961126446724, 0.02945535257458687, 0.026812905445694923, 0.03620513156056404, 0.026423348113894463, 0.027638843283057213], [0.005325342994183302, 0.104951411485672, 0.15711140632629395, 0.06547228246927261, 0.23359665274620056, 0.03126410394906998, 0.11671672761440277, 0.036609623581171036, 0.025043373927474022, 0.01081337034702301, 0.04373501241207123, 0.040643949061632156, 0.03214778006076813, 0.016150958836078644, 0.005531645379960537, 0.0062932223081588745, 0.00421812804415822, 0.011325179599225521, 0.010545788332819939, 0.007108831312507391, 0.009658563882112503, 0.0257366131991148], [0.00972742959856987, 0.06819378584623337, 0.13298773765563965, 0.05985315516591072, 0.02740628272294998, 0.022790491580963135, 0.034841012209653854, 0.016554538160562515, 0.06280101090669632, 0.04190325364470482, 0.042210500687360764, 0.029710398986935616, 0.016108369454741478, 0.01879320666193962, 0.045757077634334564, 0.018785065039992332, 0.017403727397322655, 0.021961156278848648, 0.03487221151590347, 0.13338212668895721, 0.07924784719944, 0.06470972299575806], [0.002264689188450575, 0.09153062850236893, 0.15636645257472992, 0.03342783823609352, 0.381689190864563, 0.024112187325954437, 0.154897540807724, 0.04004082456231117, 0.043248120695352554, 0.0062379795126616955, 0.0148383229970932, 0.01569698564708233, 0.008779125288128853, 0.0065926299430429935, 0.0007907395483925939, 0.0019665106665343046, 0.000538458174560219, 0.002066565677523613, 0.0013144687982276082, 0.003127302974462509, 0.0026337832678109407, 0.00783962570130825], [0.00341933686286211, 0.13838054239749908, 0.2945275604724884, 0.09555654227733612, 0.29589468240737915, 0.023034008219838142, 0.06153295189142227, 0.010986818931996822, 0.038062795996665955, 0.005337015725672245, 0.0017901849932968616, 0.0019243760034441948, 0.006904438138008118, 0.004270092584192753, 0.0027608219534158707, 0.0017637767596170306, 0.0009059156873263419, 0.0011141921859234571, 0.000624096835963428, 0.004146486986428499, 0.005048119928687811, 0.00201511662453413], [0.0016368733486160636, 0.12722189724445343, 0.28617823123931885, 0.04706413671374321, 0.08610743284225464, 0.03432963043451309, 0.2519434988498688, 0.0056212665513157845, 0.12478401511907578, 0.016832171007990837, 0.0038616617675870657, 0.0021306483540683985, 0.0030126594938337803, 0.0033594612032175064, 0.0015483149327337742, 0.0007506690453737974, 0.00046418647980317473, 0.000549567979760468, 6.452742672991008e-05, 0.001757911522872746, 0.0005271864356473088, 0.0002540400018915534], [0.016054987907409668, 0.1467195600271225, 0.22189103066921234, 0.051591210067272186, 0.2604811489582062, 0.07573921233415604, 0.15292349457740784, 0.004748410079628229, 0.03327000513672829, 0.011521547101438046, 0.006293710321187973, 0.0042919074185192585, 0.0063787708058953285, 0.003486776491627097, 0.0008751204004511237, 0.0011026415741071105, 0.000582612119615078, 0.0003238821227569133, 3.547813685145229e-05, 0.0004941528895869851, 0.0004691473732236773, 0.0007252305513247848], [0.004892671946436167, 0.018854854628443718, 0.0054095047526061535, 0.009133385494351387, 0.054034553468227386, 0.05778445675969124, 0.2821880877017975, 0.36477231979370117, 0.06851616501808167, 0.025354262441396713, 0.02720906026661396, 0.03561440855264664, 0.011775628663599491, 0.014759823679924011, 0.0006350252078846097, 0.0027344098780304193, 0.002423633122816682, 0.006842788774520159, 0.002774295164272189, 0.0007719383575022221, 0.0012244551908224821, 0.00229424936696887], [0.0021717119961977005, 0.036981817334890366, 0.10885120928287506, 0.008453885093331337, 0.11818327754735947, 0.04635453596711159, 0.2094826102256775, 0.057409483939409256, 0.29966697096824646, 0.03565544635057449, 0.022664548829197884, 0.00861902441829443, 0.009140237234532833, 0.018709270283579826, 0.0015031658113002777, 0.005769841372966766, 0.0032534091733396053, 0.0033472548238933086, 0.000592717609833926, 0.0020127035677433014, 0.0005651911487802863, 0.000611684110481292], [0.0020788710098713636, 0.028724636882543564, 0.0411260761320591, 0.0030676275491714478, 0.020854197442531586, 0.031971968710422516, 0.2290925681591034, 0.06929561495780945, 0.3264944851398468, 0.10201739519834518, 0.06262209266424179, 0.02324575185775757, 0.01518921460956335, 0.026259109377861023, 0.0013126230333000422, 0.00395925622433424, 0.003302966244518757, 0.005764728412032127, 0.0007011499837972224, 0.0018859574338421226, 0.0005338808987289667, 0.0004998738877475262], [0.009125567972660065, 0.015073365531861782, 0.006723049096763134, 0.005779407452791929, 0.011992359533905983, 0.033201493322849274, 0.192193865776062, 0.2932174503803253, 0.09516958147287369, 0.04690181091427803, 0.07110047340393066, 0.05714651942253113, 0.03516817092895508, 0.02439768798649311, 0.002418263116851449, 0.004450883716344833, 0.00985480286180973, 0.045119620859622955, 0.024232815951108932, 0.005978161934763193, 0.004381580278277397, 0.006373108364641666], [0.0056913550943136215, 0.005803166422992945, 0.0022186213172972202, 0.0019640913233160973, 0.006734295282512903, 0.028528904542326927, 0.024144772440195084, 0.3597160279750824, 0.06590214371681213, 0.0533493347465992, 0.1292591243982315, 0.20088715851306915, 0.04542103409767151, 0.03200654685497284, 0.00091977184638381, 0.0036660393234342337, 0.0037310596089810133, 0.008685066364705563, 0.010806812904775143, 0.0020431443117558956, 0.0018222150392830372, 0.006699309218674898], [0.001415681908838451, 0.0010505251120775938, 0.0005002353573217988, 0.0007725005852989852, 0.0015893300296738744, 0.008072842843830585, 0.010884004645049572, 0.05912879854440689, 0.010995451360940933, 0.013919373974204063, 0.20992158353328705, 0.5221662521362305, 0.07051197439432144, 0.04866613820195198, 0.0022107672411948442, 0.003856704104691744, 0.005283246282488108, 0.011249606497585773, 0.008526910096406937, 0.0009503668989054859, 0.0012466938933357596, 0.007080945186316967], [0.0016583299729973078, 0.0011338854674249887, 0.0006796043599024415, 0.0011766606476157904, 0.0010950772557407618, 0.004892059601843357, 0.010403129272162914, 0.049134768545627594, 0.0342043973505497, 0.11689753085374832, 0.21517100930213928, 0.34241750836372375, 0.06879207491874695, 0.06366264075040817, 0.018730754032731056, 0.007308773696422577, 0.009985741227865219, 0.022033026441931725, 0.01059691235423088, 0.006248414050787687, 0.007580430246889591, 0.006197213660925627], [0.0007009223336353898, 0.0004040120111312717, 0.00034145975951105356, 0.00027287850389257073, 0.0009951683459803462, 0.0018246799008920789, 0.004297698847949505, 0.019900420680642128, 0.0044728731736540794, 0.006963769439607859, 0.1705508828163147, 0.5943649411201477, 0.07881621271371841, 0.08134566247463226, 0.003933954052627087, 0.004535601008683443, 0.002706495113670826, 0.008671429008245468, 0.004715018905699253, 0.0010012099519371986, 0.001151366624981165, 0.008033355697989464], [0.003679189831018448, 0.002629284281283617, 0.0016202646074816585, 0.002962524304166436, 0.0036252387799322605, 0.010408873669803143, 0.012049158103764057, 0.04631718248128891, 0.01800062693655491, 0.04690256714820862, 0.07418885827064514, 0.26520249247550964, 0.2511037588119507, 0.1442842036485672, 0.02804884873330593, 0.012120590545237064, 0.02012055739760399, 0.027028359472751617, 0.009530114941298962, 0.0032428433187305927, 0.009981827810406685, 0.006952732801437378], [0.0017005658010020852, 0.0010331524536013603, 0.0013731805374845862, 0.0015940676676109433, 0.0019672145135700703, 0.006734512280672789, 0.008906415663659573, 0.010484350845217705, 0.013779271394014359, 0.06485333293676376, 0.08418704569339752, 0.19339674711227417, 0.154580757021904, 0.21298988163471222, 0.07141825556755066, 0.0354953333735466, 0.05543180927634239, 0.05431549251079559, 0.009595397859811783, 0.005471574142575264, 0.007359258830547333, 0.0033323431853204966], [0.028145892545580864, 0.007137395907193422, 0.004881908651441336, 0.006392383016645908, 0.014235883951187134, 0.02126285620033741, 0.017573699355125427, 0.015667088329792023, 0.013450264930725098, 0.0690319836139679, 0.09965575486421585, 0.15289728343486786, 0.10935016721487045, 0.11820190399885178, 0.05218389630317688, 0.0813429206609726, 0.08286643773317337, 0.050384216010570526, 0.01301882416009903, 0.00786722544580698, 0.014685395173728466, 0.01976664550602436], [0.0018305876292288303, 0.00048144382890313864, 0.00015517523570451885, 0.0009266665438190103, 0.0011003561085090041, 0.004116968717426062, 0.011418849229812622, 0.04932102933526039, 0.004678643308579922, 0.019151045009493828, 0.05157614126801491, 0.10221334546804428, 0.02826448529958725, 0.05881859362125397, 0.0193121749907732, 0.043828971683979034, 0.09746972471475601, 0.27212801575660706, 0.19342145323753357, 0.006892898119986057, 0.01637578383088112, 0.01651769131422043], [0.003779658116400242, 0.0025065012741833925, 0.003674836829304695, 0.0016767500201240182, 0.003295489586889744, 0.00296697486191988, 0.011539969593286514, 0.016042588278651237, 0.02058008313179016, 0.03302851691842079, 0.06323497742414474, 0.03901771083474159, 0.019419198855757713, 0.06079189106822014, 0.049518000334501266, 0.1314821094274521, 0.09475348889827728, 0.19381964206695557, 0.14641247689723969, 0.05694359168410301, 0.020688047632575035, 0.024827469140291214]], [[0.027435345575213432, 0.24697722494602203, 0.03954026848077774, 0.03270808234810829, 0.04179668799042702, 0.04420297220349312, 0.08473354578018188, 0.09593744575977325, 0.023311348631978035, 0.03993004187941551, 0.022552717477083206, 0.023855706676840782, 0.12087360769510269, 0.02169254794716835, 0.005022514145821333, 0.015122704207897186, 0.009832564741373062, 0.025475576519966125, 0.014855879358947277, 0.006245864555239677, 0.03201498091220856, 0.025882430374622345], [0.007882432080805302, 0.250928670167923, 0.31201091408729553, 0.008265189826488495, 0.017104577273130417, 0.006826863158494234, 0.26670458912849426, 0.03850933909416199, 0.00850711204111576, 0.028134524822235107, 0.018074410036206245, 0.0037238437216728926, 0.01626906730234623, 0.0032752547413110733, 0.0006977158482186496, 0.0006241753580980003, 0.0002364196552662179, 0.0024888277985155582, 0.0014370887074619532, 0.000708161445800215, 0.004945310298353434, 0.0026456215418875217], [0.006633547134697437, 0.8075401782989502, 0.026555260643363, 0.012670202180743217, 0.0025576173793524504, 0.006525890436023474, 0.002047726884484291, 0.04315725341439247, 0.005291572771966457, 0.015887774527072906, 0.007419385015964508, 0.00928849633783102, 0.008459473960101604, 0.0021794706117361784, 0.0001751432428136468, 0.0016173081239685416, 0.00032313851988874376, 0.0006092464900575578, 0.0068478952161967754, 0.003270030952990055, 0.01982773467898369, 0.01111553329974413], [0.011662920005619526, 0.022474296391010284, 0.2320466786623001, 0.009867199696600437, 0.15998326241970062, 0.011043944396078587, 0.26905593276023865, 0.03418850526213646, 0.013568037189543247, 0.03919167444109917, 0.022601231932640076, 0.045184310525655746, 0.022963279858231544, 0.010173018090426922, 0.0019292066572234035, 0.014990849420428276, 0.00046939379535615444, 0.030491184443235397, 0.0017445319099351764, 0.005693709012120962, 0.021019890904426575, 0.019657008349895477], [0.018845243379473686, 0.04221174865961075, 0.010792500339448452, 0.1525384932756424, 0.028304288163781166, 0.2849675416946411, 0.09284459054470062, 0.12770269811153412, 0.07345247268676758, 0.015012885443866253, 0.012207115069031715, 0.009952710941433907, 0.01908513531088829, 0.009341234341263771, 0.002300586784258485, 0.002542532980442047, 0.007094505708664656, 0.010320700705051422, 0.007391999009996653, 0.011907733976840973, 0.02563854493200779, 0.03554476052522659], [0.0018620798364281654, 0.00719993794336915, 0.02802225761115551, 0.001596158486790955, 0.05766072869300842, 0.0020573255605995655, 0.8494631052017212, 0.001901893294416368, 0.02720583975315094, 0.003357214154675603, 0.009675583802163601, 0.002134960377588868, 0.0014008850557729602, 0.0003502012405078858, 0.00031070111435838044, 0.0005566655308939517, 7.763965550111607e-05, 0.0021490883082151413, 0.0001128104267991148, 0.0008928478928282857, 0.0008018311345949769, 0.0012102429755032063], [0.010474367067217827, 0.00898654107004404, 0.012034095823764801, 0.08284707367420197, 0.009032693691551685, 0.1115269735455513, 0.005218434613198042, 0.657943606376648, 0.05389393866062164, 0.024816876277327538, 0.006826159544289112, 0.0071954745799303055, 0.00033990712836384773, 0.000942343263886869, 0.0008973510703071952, 0.000752201012801379, 0.0003285682469140738, 0.00013264940935187042, 0.0008984625455923378, 0.0012579858303070068, 0.0009047082276083529, 0.0027495992835611105], [0.001858195522800088, 0.002413311507552862, 0.001775994896888733, 0.002896524965763092, 0.02192586101591587, 0.01756308227777481, 0.8688444495201111, 0.0047394693829119205, 0.049672890454530716, 0.0031187429558485746, 0.014884602278470993, 0.0006345895235426724, 0.002519373083487153, 8.364165114471689e-05, 0.0007373635307885706, 0.00024503807071596384, 0.0016964318929240108, 0.003374118125066161, 0.00021260709036141634, 0.0002316012541996315, 0.0003813971416093409, 0.00019074160081800073], [0.005758275743573904, 0.015212997794151306, 0.03797626495361328, 0.009028935804963112, 0.014812194742262363, 0.08215422183275223, 0.18590007722377777, 0.15261390805244446, 0.1506870537996292, 0.24551743268966675, 0.06386729329824448, 0.022636862471699715, 0.002075031166896224, 0.001055161003023386, 0.0002870932512450963, 0.0010525388643145561, 0.0005322954966686666, 0.0018173307180404663, 0.00034439691808074713, 0.0020869506988674402, 0.003893710905686021, 0.000689996057190001], [0.0054536620154976845, 0.025771265849471092, 0.015788959339261055, 0.006074761506170034, 0.02884710021317005, 0.004915212281048298, 0.06515083461999893, 0.13081495463848114, 0.08454187959432602, 0.19597913324832916, 0.1884688138961792, 0.026794541627168655, 0.18005061149597168, 0.005608671810477972, 0.012495575472712517, 0.0021058020647615194, 0.0024275025352835655, 0.005594156216830015, 0.008066478185355663, 0.0009854966774582863, 0.0032450275029987097, 0.0008195384289138019], [0.001255819108337164, 0.030069177970290184, 0.00457319151610136, 0.0022142312955111265, 0.0015703999670222402, 0.013329599983990192, 0.006216906942427158, 0.15066784620285034, 0.04521862417459488, 0.44143185019493103, 0.025417422875761986, 0.14914605021476746, 0.034645240753889084, 0.022915055975317955, 0.0003691103484015912, 0.004515869077295065, 0.001797678298316896, 0.013949860818684101, 0.005563904996961355, 0.01585102267563343, 0.025059441104531288, 0.004221684765070677], [0.0014611295191571116, 0.03874845802783966, 0.0019193203188478947, 0.0057005854323506355, 0.002391333458945155, 0.015103579498827457, 0.01700466126203537, 0.016522925347089767, 0.06805597245693207, 0.03834724798798561, 0.10262748599052429, 0.03650703281164169, 0.5768957138061523, 0.014419260434806347, 0.0023388864938169718, 0.0015747868455946445, 0.01078084297478199, 0.014687354676425457, 0.008070508949458599, 0.006175187416374683, 0.015002419240772724, 0.005665460601449013], [0.02747010625898838, 0.007803051266819239, 0.006937755737453699, 0.011148764751851559, 0.02276272140443325, 0.008557875640690327, 0.002154915826395154, 0.05403323844075203, 0.004246879834681749, 0.07366035878658295, 0.02070014737546444, 0.38181227445602417, 0.16856680810451508, 0.09739688783884048, 0.006220363546162844, 0.05549173429608345, 0.0022270295303314924, 0.009033157490193844, 0.0035384336952120066, 0.00432415260002017, 0.012560833245515823, 0.01935243234038353], [0.010019483044743538, 0.009451218880712986, 0.0016886526718735695, 0.0020005626138299704, 0.0045614782720804214, 0.0024824037682265043, 0.0011444251285865903, 0.005751050543040037, 0.005201250314712524, 0.015127630904316902, 0.01598960906267166, 0.08030666410923004, 0.5538780689239502, 0.038286615163087845, 0.016700504347682, 0.07324909418821335, 0.02407882548868656, 0.04173418506979942, 0.04482041671872139, 0.010567051358520985, 0.03002827987074852, 0.012932556681334972], [0.007436708081513643, 0.0011615862604230642, 0.004850517492741346, 0.0017892169998958707, 0.011418398469686508, 0.002569305244833231, 0.004919396713376045, 0.012863818556070328, 0.000867458526045084, 0.032054707407951355, 0.004567855969071388, 0.04103902354836464, 0.01677883043885231, 0.058707430958747864, 0.03073531948029995, 0.32112428545951843, 0.03815199434757233, 0.29565563797950745, 0.056043997406959534, 0.010333550162613392, 0.027308769524097443, 0.019622156396508217], [0.006150547880679369, 0.0057354941964149475, 0.004469027277082205, 0.0029946244321763515, 0.00798426941037178, 0.002859539119526744, 0.024616576731204987, 0.0007751746452413499, 0.0022009252570569515, 0.0017007878050208092, 0.005989440251141787, 0.003564720507711172, 0.04604039713740349, 0.037140004336833954, 0.07027499377727509, 0.04856637120246887, 0.24365341663360596, 0.3740677535533905, 0.061765700578689575, 0.01565651036798954, 0.020016666501760483, 0.013777006417512894], [0.005425728857517242, 0.003342077834531665, 0.006659792270511389, 0.0009104536147788167, 0.012557929381728172, 0.002163361757993698, 0.035459164530038834, 0.003767757909372449, 0.0019918715115636587, 0.01687667891383171, 0.003964193165302277, 0.010239149443805218, 0.0042653470300138, 0.014745749533176422, 0.012347294017672539, 0.15445008873939514, 0.029068637639284134, 0.5532642006874084, 0.050874922424554825, 0.030632788315415382, 0.026881352066993713, 0.020111527293920517], [0.01474917121231556, 0.010537921451032162, 0.003269646782428026, 0.01224602572619915, 0.0046447524800896645, 0.0066806115210056305, 0.002818767447024584, 0.015063962899148464, 0.006366767454892397, 0.004423844162374735, 0.003387484233826399, 0.001638318644836545, 0.004213198088109493, 0.004920140374451876, 0.04582549259066582, 0.03283129259943962, 0.14253054559230804, 0.05613042786717415, 0.5417959690093994, 0.03801661357283592, 0.017070915549993515, 0.03083810955286026], [0.0016951484140008688, 0.0016929004341363907, 0.0007832910632714629, 0.0006063411710783839, 0.003700296161696315, 0.0018548377556726336, 0.00566420704126358, 0.0017023594118654728, 0.0006572136771865189, 0.0060426220297813416, 0.0020712458062916994, 0.002653653034940362, 0.0026297522708773613, 0.0021869956981390715, 0.013561091385781765, 0.060264017432928085, 0.09043793380260468, 0.5993951559066772, 0.1028558537364006, 0.034627724438905716, 0.047123368829488754, 0.017793990671634674], [0.00896301120519638, 0.031953297555446625, 0.023507047444581985, 0.008738920092582703, 0.014331783168017864, 0.014226125553250313, 0.01909146085381508, 0.013419240713119507, 0.009500440210103989, 0.04930558055639267, 0.03425983712077141, 0.015274844132363796, 0.01122303493320942, 0.003334861248731613, 0.007303719874471426, 0.022494660690426826, 0.02237832546234131, 0.06148830056190491, 0.12716716527938843, 0.05828271061182022, 0.4008457064628601, 0.04290985316038132], [0.012639995664358139, 0.037419483065605164, 0.014932171441614628, 0.007315070368349552, 0.026371153071522713, 0.003631266998127103, 0.007922530174255371, 0.03371603041887283, 0.004618747625499964, 0.05763415992259979, 0.028313489630818367, 0.029290081933140755, 0.05823977664113045, 0.023057933896780014, 0.025803817436099052, 0.03049183078110218, 0.01151258498430252, 0.07987182587385178, 0.1208532378077507, 0.06573359668254852, 0.15845148265361786, 0.16217976808547974], [0.0016394497361034155, 0.02792862430214882, 0.00213988171890378, 0.00523881521075964, 0.0031771305948495865, 0.003122318536043167, 0.001379886525683105, 0.0020589230116456747, 0.0012491834349930286, 0.005655214656144381, 0.002132187597453594, 0.0070074982941150665, 0.02911718748509884, 0.006326914299279451, 0.0022486632224172354, 0.008061478845775127, 0.0072683366015553474, 0.030431469902396202, 0.025652293115854263, 0.06386661529541016, 0.6353752613067627, 0.1289227157831192]]], [[[0.013329816050827503, 0.11532910168170929, 0.0447722002863884, 0.049210160970687866, 0.009630358777940273, 0.009098123759031296, 0.00796535424888134, 0.00819187331944704, 0.007352723274379969, 0.03061041608452797, 0.004423067439347506, 0.00776562700048089, 0.053945526480674744, 0.06812480837106705, 0.05678943544626236, 0.012567473575472832, 0.015669818967580795, 0.017308488488197327, 0.028044559061527252, 0.04302607476711273, 0.21712008118629456, 0.17972491681575775], [0.03535490110516548, 0.06205933168530464, 0.024663856253027916, 0.06627167016267776, 0.15048103034496307, 0.057766061276197433, 0.09747258573770523, 0.132368266582489, 0.11193396896123886, 0.1298731416463852, 0.03421070799231529, 0.013453124091029167, 0.0059248642064630985, 0.0021051864605396986, 0.0052260300144553185, 0.0016714413650333881, 0.0019403310725465417, 0.001038848888128996, 0.0024923542514443398, 0.0075366259552538395, 0.04696502164006233, 0.009190713986754417], [0.021269541233778, 0.04026561602950096, 0.043649494647979736, 0.09023090451955795, 0.016626952216029167, 0.0614122711122036, 0.01077402662485838, 0.07393039762973785, 0.07552448660135269, 0.04361557215452194, 0.022769996896386147, 0.06122054532170296, 0.016469797119498253, 0.02532939799129963, 0.03795145824551582, 0.006828246172517538, 0.027428660541772842, 0.005769520066678524, 0.04299549758434296, 0.10544238239526749, 0.054311178624629974, 0.11618407070636749], [0.024163395166397095, 0.14088653028011322, 0.04554757475852966, 0.01810387894511223, 0.013570177368819714, 0.03835327550768852, 0.4002905786037445, 0.08713934570550919, 0.07696585357189178, 0.023855460807681084, 0.030299363657832146, 0.011970996856689453, 0.02285624109208584, 0.016815535724163055, 0.0077739194966852665, 0.0009373273933306336, 0.005308128893375397, 0.013765321113169193, 0.004792500287294388, 0.004985583946108818, 0.0051670256070792675, 0.00645196670666337], [0.08247729390859604, 0.058592669665813446, 0.027144214138388634, 0.10301053524017334, 0.045533716678619385, 0.14930963516235352, 0.08298523724079132, 0.10481557995080948, 0.08484368026256561, 0.03847572207450867, 0.05412648990750313, 0.005816461984068155, 0.013240927830338478, 0.0027654480654746294, 0.04152781143784523, 0.005799456033855677, 0.03239091858267784, 0.0049498421140015125, 0.015954162925481796, 0.013250189833343029, 0.024740703403949738, 0.008249208331108093], [0.01861039735376835, 0.05617773160338402, 0.014687979593873024, 0.00892089493572712, 0.0022736373357474804, 0.007475759834051132, 0.7413445711135864, 0.06269175559282303, 0.010801651515066624, 0.00929829478263855, 0.004785843193531036, 0.004663604311645031, 0.015384405851364136, 0.00469361012801528, 0.004593182355165482, 0.00011929162428714335, 0.0018881994765251875, 0.024257026612758636, 0.003090441459789872, 0.0011562147410586476, 0.0017248920630663633, 0.0013605840504169464], [0.07706607133150101, 0.08311748504638672, 0.03806496411561966, 0.04631725326180458, 0.14665766060352325, 0.0403272807598114, 0.12557953596115112, 0.14505361020565033, 0.035881780087947845, 0.06742193549871445, 0.019364111125469208, 0.008775152266025543, 0.015046888031065464, 0.004524232819676399, 0.006779480259865522, 0.007649664301425219, 0.005474007688462734, 0.00639393599703908, 0.010234189219772816, 0.008959823288023472, 0.08110026270151138, 0.02021067775785923], [0.06625517457723618, 0.03810954838991165, 0.028810279443860054, 0.0717230886220932, 0.15899249911308289, 0.06507547199726105, 0.052031662315130234, 0.1387510895729065, 0.11430027335882187, 0.06304547935724258, 0.06623901426792145, 0.024915004149079323, 0.02849707379937172, 0.012582080438733101, 0.027475761249661446, 0.006099043879657984, 0.010773347690701485, 0.0011542629217728972, 0.006205203477293253, 0.0037687006406486034, 0.006776466500014067, 0.008419433608651161], [0.0108672259375453, 0.08775586634874344, 0.03495035693049431, 0.010776709765195847, 0.010318874381482601, 0.06546897441148758, 0.32353711128234863, 0.05532706901431084, 0.05003165081143379, 0.1387052834033966, 0.025506870821118355, 0.019625894725322723, 0.042086198925971985, 0.044640231877565384, 0.006310942117124796, 0.0025894471909850836, 0.011011438444256783, 0.038819268345832825, 0.003622821532189846, 0.003705881303176284, 0.008982175961136818, 0.005359726492315531], [0.0158266332000494, 0.11006417870521545, 0.015639346092939377, 0.018686331808567047, 0.015556531958281994, 0.050132859498262405, 0.046595003455877304, 0.03241162747144699, 0.07271543890237808, 0.3287586271762848, 0.028614813461899757, 0.03169675171375275, 0.09094233810901642, 0.013207526877522469, 0.012820245698094368, 0.005483018700033426, 0.011933280155062675, 0.004390294663608074, 0.004931379109621048, 0.022197319194674492, 0.049933839589357376, 0.017462583258748055], [0.008046927861869335, 0.017282094806432724, 0.014085621573030949, 0.03025956265628338, 0.029192587360739708, 0.010094312950968742, 0.002985633909702301, 0.013015596196055412, 0.04067784175276756, 0.21495313942432404, 0.03163984417915344, 0.15355561673641205, 0.12071576714515686, 0.1017708107829094, 0.03233538195490837, 0.026818791404366493, 0.012023909017443657, 0.004363886080682278, 0.006734039168804884, 0.018452266231179237, 0.06368516385555267, 0.047311291098594666], [0.01240499783307314, 0.028720097616314888, 0.02167985774576664, 0.02610538713634014, 0.02883104979991913, 0.00679260678589344, 0.035276859998703, 0.00894259475171566, 0.02904474548995495, 0.13179823756217957, 0.028125692158937454, 0.1119576245546341, 0.1329512894153595, 0.14023976027965546, 0.0354180634021759, 0.03701714053750038, 0.009516800753772259, 0.07494394481182098, 0.006474588066339493, 0.018207870423793793, 0.04253052547574043, 0.03302032873034477], [0.003578467294573784, 0.019804228097200394, 0.010486312210559845, 0.0060030254535377026, 0.01835053786635399, 0.0033680046908557415, 0.005858109798282385, 0.004382243379950523, 0.0677819550037384, 0.04076113551855087, 0.0390935055911541, 0.3041069209575653, 0.17563293874263763, 0.17303067445755005, 0.015112298540771008, 0.033047307282686234, 0.004264926537871361, 0.010954725556075573, 0.0030144695192575455, 0.0264287069439888, 0.0043605901300907135, 0.030578887090086937], [0.0015313368057832122, 0.008352359756827354, 0.011609972454607487, 0.009500904008746147, 0.008570794016122818, 0.0032857004553079605, 0.0007186689763329923, 0.0027450460474938154, 0.014084002934396267, 0.019746288657188416, 0.010625314898788929, 0.19626574218273163, 0.08407515287399292, 0.2615124583244324, 0.02298019826412201, 0.06738676875829697, 0.01101003773510456, 0.012404728680849075, 0.010846082121133804, 0.06613308191299438, 0.018872283399105072, 0.15774314105510712], [0.009745185263454914, 0.047490209341049194, 0.02951585128903389, 0.007732356432825327, 0.006898820400238037, 0.008565668947994709, 0.01728956028819084, 0.010617706924676895, 0.031481094658374786, 0.013641269877552986, 0.024029091000556946, 0.05846797302365303, 0.1774759441614151, 0.3220119774341583, 0.020731836557388306, 0.027316613122820854, 0.017776984721422195, 0.07981818169355392, 0.01696532778441906, 0.02744058519601822, 0.006820394191890955, 0.03816738352179527], [0.02009492740035057, 0.013248546980321407, 0.014457330107688904, 0.02772066555917263, 0.023933369666337967, 0.011991392821073532, 0.009467591531574726, 0.007513267919421196, 0.020483365282416344, 0.019731717184185982, 0.045216839760541916, 0.013641993515193462, 0.08804580569267273, 0.04188140109181404, 0.11848796159029007, 0.19999292492866516, 0.057951994240283966, 0.08107927441596985, 0.057886525988578796, 0.05124657228589058, 0.048843588680028915, 0.02708299085497856], [0.0023138399701565504, 0.01985454000532627, 0.008770515210926533, 0.0021574487909674644, 0.0011174640385434031, 0.0006771843763999641, 0.0940241813659668, 0.005605330225080252, 0.0028470654506236315, 0.005201152991503477, 0.004744488745927811, 0.017394980415701866, 0.14025239646434784, 0.07537514716386795, 0.014581728726625443, 0.0036140582524240017, 0.004409268032759428, 0.5673868656158447, 0.016103234142065048, 0.004975971765816212, 0.00388646824285388, 0.004706707317382097], [0.003933952189981937, 0.011427193880081177, 0.006819675210863352, 0.005142119713127613, 0.010069216601550579, 0.0012150590773671865, 0.0036837442312389612, 0.002927291439846158, 0.0014335057931020856, 0.011720304377377033, 0.006074029486626387, 0.02643711119890213, 0.10256257653236389, 0.03831561654806137, 0.030502645298838615, 0.2809465527534485, 0.013600893318653107, 0.17345154285430908, 0.07251938432455063, 0.02746090479195118, 0.1166364848613739, 0.05312023684382439], [0.002425484824925661, 0.00611210847273469, 0.00962191354483366, 0.012237587943673134, 0.008399538695812225, 0.0008314026636071503, 0.001552077941596508, 0.003380819223821163, 0.004212414380162954, 0.003728122217580676, 0.010052308440208435, 0.049649376422166824, 0.11684587597846985, 0.13445299863815308, 0.11901519447565079, 0.16857418417930603, 0.019878456369042397, 0.057536154985427856, 0.13426649570465088, 0.04105884209275246, 0.023436062037944794, 0.0727325826883316], [0.00037357822293415666, 0.011683231219649315, 0.007810074836015701, 0.0024789164308458567, 0.0006174463778734207, 0.0020616967231035233, 0.0038481729570776224, 0.0013364655897021294, 0.0009578316239640117, 0.007645429577678442, 0.0017760396003723145, 0.009744497016072273, 0.03191082924604416, 0.06878264248371124, 0.015218951739370823, 0.03731731325387955, 0.023107541725039482, 0.5671175122261047, 0.06840543448925018, 0.028428589925169945, 0.06702478975057602, 0.04235312342643738], [0.0010264451848343015, 0.016303274780511856, 0.008565264753997326, 0.009078336879611015, 0.004404651466757059, 0.002545578870922327, 0.000664740102365613, 0.0018768399022519588, 0.0018660146743059158, 0.021600741893053055, 0.004172877874225378, 0.01845400221645832, 0.051153652369976044, 0.031188059598207474, 0.03845953196287155, 0.10704054683446884, 0.01888677477836609, 0.021488362923264503, 0.0842084288597107, 0.09627176076173782, 0.3155233860015869, 0.1452207714319229], [0.00024275276518892497, 0.004416278097778559, 0.006568916607648134, 0.012387854978442192, 0.0017089411849156022, 0.00040106516098603606, 0.00018287419516127557, 0.0005546218017116189, 0.0008215706329792738, 0.0023035211488604546, 0.0008158140699379146, 0.029278162866830826, 0.011541127227246761, 0.048838645219802856, 0.03743743896484375, 0.052850428968667984, 0.009642433375120163, 0.06811880320310593, 0.07954518496990204, 0.1148756667971611, 0.1978471577167511, 0.3196207880973816]], [[0.027224190533161163, 0.07976085692644119, 0.0541442446410656, 0.017071794718503952, 0.01880619488656521, 0.03758401423692703, 0.042744047939777374, 0.09495820850133896, 0.0669282004237175, 0.0822884663939476, 0.05903572961688042, 0.07045638561248779, 0.0694318562746048, 0.10678926855325699, 0.013907063752412796, 0.014120466075837612, 0.017430981621146202, 0.019788922742009163, 0.0365811325609684, 0.0345415361225605, 0.019774017855525017, 0.01663234643638134], [0.025864925235509872, 0.05062786117196083, 0.03215295076370239, 0.016410982236266136, 0.013786666095256805, 0.1377604752779007, 0.09512532502412796, 0.17734771966934204, 0.2089209109544754, 0.06970807909965515, 0.039823319762945175, 0.04805050417780876, 0.012089362367987633, 0.025615675374865532, 0.0038174588698893785, 0.003013155423104763, 0.006609831936657429, 0.006940844003111124, 0.003899669973179698, 0.01336274016648531, 0.0019009371753782034, 0.0071706827729940414], [0.011992360465228558, 0.04116611182689667, 0.05647065117955208, 0.01263574231415987, 0.01583274081349373, 0.10664138942956924, 0.049655865877866745, 0.04958367720246315, 0.25023043155670166, 0.05309099704027176, 0.03158072754740715, 0.07490893453359604, 0.017285553738474846, 0.1058981642127037, 0.008202550932765007, 0.008045314811170101, 0.01157412864267826, 0.021364906802773476, 0.003462516935542226, 0.0487673357129097, 0.005091531667858362, 0.016518378630280495], [0.021084340289235115, 0.0889970138669014, 0.051358044147491455, 0.012583089992403984, 0.028267668560147285, 0.0471222959458828, 0.07104809582233429, 0.32718271017074585, 0.04793157801032066, 0.0549321323633194, 0.03803415969014168, 0.05414627492427826, 0.024475090205669403, 0.05973923206329346, 0.004542752169072628, 0.009253040887415409, 0.005195866338908672, 0.011619731783866882, 0.009887626394629478, 0.015081675723195076, 0.005398519337177277, 0.012119061313569546], [0.11109789460897446, 0.0736202821135521, 0.09646977484226227, 0.046279001981019974, 0.025841085240244865, 0.032156627625226974, 0.09459850937128067, 0.18796545267105103, 0.03849802911281586, 0.04433860629796982, 0.050190702080726624, 0.023021992295980453, 0.019979143515229225, 0.03212689235806465, 0.0171065554022789, 0.007321105804294348, 0.009433631785213947, 0.01803619973361492, 0.03482715040445328, 0.01112183928489685, 0.018233010545372963, 0.0077364337630569935], [0.008419889025390148, 0.02320699766278267, 0.055418647825717926, 0.009559418074786663, 0.022879622876644135, 0.129610076546669, 0.04287903383374214, 0.09196890890598297, 0.10937129706144333, 0.0949132964015007, 0.06455326825380325, 0.12651219964027405, 0.009196938015520573, 0.09808826446533203, 0.004779291804879904, 0.011517931707203388, 0.014326633885502815, 0.01312338374555111, 0.00487959710881114, 0.03207986056804657, 0.006382576655596495, 0.026332814246416092], [0.012471058405935764, 0.01638936437666416, 0.023256970569491386, 0.02821999229490757, 0.02163812331855297, 0.08226073533296585, 0.0520733967423439, 0.1395103484392166, 0.16343912482261658, 0.11251598596572876, 0.07980085164308548, 0.0831393450498581, 0.03514093533158302, 0.07663507759571075, 0.01809040457010269, 0.004522961564362049, 0.013843166641891003, 0.006922018714249134, 0.008326671086251736, 0.010945362038910389, 0.0048469118773937225, 0.006011195946484804], [0.016978571191430092, 0.026600904762744904, 0.02610667422413826, 0.00845765508711338, 0.016451837494969368, 0.021026533097028732, 0.021841617301106453, 0.07851675152778625, 0.04452582448720932, 0.07113588601350784, 0.12963661551475525, 0.17856061458587646, 0.059166841208934784, 0.17711521685123444, 0.015095391310751438, 0.036742083728313446, 0.008897609077394009, 0.01391436718404293, 0.015850337222218513, 0.012755864299833775, 0.004366107285022736, 0.016256624832749367], [0.002628189977258444, 0.010839552618563175, 0.003945675678551197, 0.010471976362168789, 0.004172449000179768, 0.03732378035783768, 0.03782461956143379, 0.01674746535718441, 0.1745922565460205, 0.0810830146074295, 0.09018813818693161, 0.15251842141151428, 0.14394733309745789, 0.02413349598646164, 0.038520559668540955, 0.013114940375089645, 0.07233086228370667, 0.03189822658896446, 0.017350856214761734, 0.023399828001856804, 0.005322280339896679, 0.007646109908819199], [0.0059545873664319515, 0.011462703347206116, 0.008881546556949615, 0.012282107025384903, 0.0051070391200482845, 0.026876598596572876, 0.01569279655814171, 0.02486608549952507, 0.04816454276442528, 0.08499667793512344, 0.08613928407430649, 0.26530686020851135, 0.1560366004705429, 0.13062113523483276, 0.022205302491784096, 0.009883815422654152, 0.015911299735307693, 0.016764357686042786, 0.009887498803436756, 0.02009332925081253, 0.003339158371090889, 0.01952667534351349], [0.0020578012336045504, 0.006215489003807306, 0.008468342013657093, 0.004570078570395708, 0.006138230208307505, 0.006518903654068708, 0.007129787467420101, 0.003830431494861841, 0.0371137298643589, 0.018007611855864525, 0.03536124899983406, 0.11180218309164047, 0.11426565796136856, 0.11517223715782166, 0.045119836926460266, 0.05397350713610649, 0.06392333656549454, 0.07657178491353989, 0.06765852123498917, 0.11824461817741394, 0.033312465995550156, 0.0645442008972168], [0.0028227055445313454, 0.007230573333799839, 0.010531773790717125, 0.00496258120983839, 0.0074689011089503765, 0.009966365061700344, 0.005579963326454163, 0.003036883193999529, 0.017958864569664, 0.008265296928584576, 0.008689366281032562, 0.06811104714870453, 0.01955106481909752, 0.0815005674958229, 0.020719630643725395, 0.07584665715694427, 0.056521937251091, 0.13256900012493134, 0.03689943626523018, 0.21309562027454376, 0.020865170285105705, 0.1878066062927246], [0.0032444556709378958, 0.014982543885707855, 0.00558377243578434, 0.0032007843255996704, 0.0110445786267519, 0.007431712932884693, 0.007097897585481405, 0.007813294418156147, 0.03304334357380867, 0.01275238674134016, 0.013931682333350182, 0.04519020766019821, 0.03074255958199501, 0.030332038179039955, 0.017717260867357254, 0.09254255890846252, 0.11328203231096268, 0.09506743401288986, 0.25192007422447205, 0.10380659997463226, 0.03902945667505264, 0.060243405401706696], [0.003581963712349534, 0.010570394806563854, 0.011846366338431835, 0.003039678791537881, 0.006773123051971197, 0.01024623028934002, 0.005813485477119684, 0.0028217590879648924, 0.019562579691410065, 0.011105978861451149, 0.008686419576406479, 0.043086569756269455, 0.016372274607419968, 0.05444017052650452, 0.012440674938261509, 0.083851657807827, 0.06445392221212387, 0.2367386817932129, 0.05699765309691429, 0.2010897397994995, 0.04025959596037865, 0.09622105956077576], [0.00413127988576889, 0.049016304314136505, 0.023698272183537483, 0.004594247788190842, 0.0171611737459898, 0.006505616474896669, 0.018208075314760208, 0.03562803938984871, 0.011508351191878319, 0.013822917826473713, 0.02057575061917305, 0.04467954486608505, 0.045121051371097565, 0.052712760865688324, 0.010465129278600216, 0.06577494740486145, 0.0288251806050539, 0.10300682485103607, 0.21134649217128754, 0.0772753581404686, 0.06810400635004044, 0.08783867955207825], [0.02837335504591465, 0.03711417689919472, 0.08876051008701324, 0.022802546620368958, 0.019743183627724648, 0.0071112713776528835, 0.016864696517586708, 0.01650129444897175, 0.0014082561247050762, 0.009742887690663338, 0.007123137824237347, 0.010107058100402355, 0.008692460134625435, 0.02489292249083519, 0.014204457402229309, 0.02791478857398033, 0.014033699408173561, 0.10809716582298279, 0.14466938376426697, 0.05049535259604454, 0.2279251366853714, 0.11342217773199081], [0.0018898388370871544, 0.025456393137574196, 0.05580620467662811, 0.003571987384930253, 0.020641833543777466, 0.012484016828238964, 0.011059381999075413, 0.004507414996623993, 0.006752396002411842, 0.014396626502275467, 0.007429134100675583, 0.019049353897571564, 0.010821823962032795, 0.05588474124670029, 0.00488731125369668, 0.031045865267515182, 0.0351497158408165, 0.06690862774848938, 0.07032337784767151, 0.1258489191532135, 0.23910032212734222, 0.1769847422838211], [0.004853586200624704, 0.031914517283439636, 0.05833367630839348, 0.010479033924639225, 0.028827931731939316, 0.0223674476146698, 0.018520113080739975, 0.016730617731809616, 0.004817193374037743, 0.0181556586176157, 0.00775168277323246, 0.026943592354655266, 0.0202474482357502, 0.10928761214017868, 0.009105991572141647, 0.017583759501576424, 0.023096995428204536, 0.06251035630702972, 0.0651761144399643, 0.07367094606161118, 0.14831510186195374, 0.2213105410337448], [0.003606783924624324, 0.0316176563501358, 0.04392627999186516, 0.0020871968008577824, 0.015261088497936726, 0.002242293208837509, 0.005135294049978256, 0.0025034896098077297, 0.0008695329888723791, 0.00376355042681098, 0.003129430580884218, 0.0045029520988464355, 0.01061532087624073, 0.020739883184432983, 0.0032060728408396244, 0.025825846940279007, 0.013013235293328762, 0.03480467200279236, 0.16535550355911255, 0.04508579149842262, 0.4165380299091339, 0.14617004990577698], [0.006209098733961582, 0.1277577131986618, 0.03767745569348335, 0.013847813941538334, 0.014169939793646336, 0.07586611807346344, 0.0722920373082161, 0.01773994415998459, 0.02109101228415966, 0.023972000926733017, 0.007199655286967754, 0.030093282461166382, 0.030051838606595993, 0.03308150917291641, 0.007751886732876301, 0.013866579160094261, 0.05470852181315422, 0.12466506659984589, 0.029271256178617477, 0.09213875234127045, 0.044259220361709595, 0.12228938937187195], [0.03160737827420235, 0.24355271458625793, 0.09134609997272491, 0.01924837753176689, 0.015421466901898384, 0.024906519800424576, 0.03741057589650154, 0.027913851663470268, 0.021638767793774605, 0.026489127427339554, 0.019882818683981895, 0.03435174375772476, 0.07670563459396362, 0.044646911323070526, 0.008773631416261196, 0.0057657621800899506, 0.014675445854663849, 0.015552964061498642, 0.03248156979680061, 0.0544821172952652, 0.06060877442359924, 0.09253780543804169], [0.01309046521782875, 0.09952002018690109, 0.10922721773386002, 0.008637293241918087, 0.022895384579896927, 0.04048741236329079, 0.01923629641532898, 0.005287813488394022, 0.012523608282208443, 0.0064201620407402515, 0.002230329206213355, 0.01119685173034668, 0.005381290800869465, 0.05552436411380768, 0.0037754869554191828, 0.020832812413573265, 0.022605935111641884, 0.0672585740685463, 0.01335853710770607, 0.16884218156337738, 0.07044295221567154, 0.22122512757778168]], [[0.013889333233237267, 0.024310674518346786, 0.015822581946849823, 0.004652051255106926, 0.0034509592223912477, 0.015959125012159348, 0.0049957833252847195, 0.24597524106502533, 0.08051042258739471, 0.1957736611366272, 0.0345037505030632, 0.011956032365560532, 0.02124916948378086, 0.021772874519228935, 0.004938834346830845, 0.0042424495331943035, 0.008550966158509254, 0.005471791140735149, 0.10275126248598099, 0.04580143466591835, 0.12186305969953537, 0.011558621190488338], [0.03332085534930229, 0.3685886263847351, 0.016378583386540413, 0.005971084348857403, 0.0012233993038535118, 0.17125999927520752, 0.028292618691921234, 0.18316805362701416, 0.08752048760652542, 0.05582023411989212, 0.004120252560824156, 0.0068380143493413925, 0.02644810453057289, 0.0009195520542562008, 0.00015734824410174042, 2.2107564291218296e-05, 0.0007469879928976297, 8.083357533905655e-05, 0.0009847470792010427, 0.0017591204959899187, 0.004881869535893202, 0.001497064484283328], [0.02636588364839554, 0.04639305919408798, 0.19748859107494354, 0.004983284045010805, 0.005461658351123333, 0.41018205881118774, 0.06746701151132584, 0.06883512437343597, 0.09489501267671585, 0.00793270394206047, 0.008530987426638603, 0.011209880001842976, 0.007036254741251469, 0.03162617236375809, 0.0003839124692603946, 0.00018299525254406035, 0.0047507272101938725, 0.00048120724386535585, 0.0006905588670633733, 0.0009890181245282292, 0.0005529368063434958, 0.00356089323759079], [0.0076461187563836575, 0.02211490087211132, 0.00838121771812439, 0.004232148639857769, 0.00417422316968441, 0.12832030653953552, 0.03139204531908035, 0.651923656463623, 0.06801528483629227, 0.02486296370625496, 0.0034371935762465, 0.004703977610915899, 0.002324180444702506, 0.003088249359279871, 0.001400690176524222, 0.0011336985044181347, 0.004705144092440605, 0.0025663210544735193, 0.020639877766370773, 0.0028191448654979467, 0.0016614478081464767, 0.0004572076431941241], [0.006788517348468304, 0.019868474453687668, 0.013623065315186977, 0.050097085535526276, 0.07683750241994858, 0.03280224651098251, 0.04842178523540497, 0.30990728735923767, 0.09425288438796997, 0.0648345872759819, 0.03323366120457649, 0.022760316729545593, 0.00815257802605629, 0.008909141644835472, 0.03675909712910652, 0.055139560252428055, 0.013713493011891842, 0.018824255093932152, 0.05713377892971039, 0.011733782477676868, 0.013665198348462582, 0.0025417362339794636], [0.004130590707063675, 0.008482594043016434, 0.007279593963176012, 0.0020815632306039333, 0.0009620613418519497, 0.034717004746198654, 0.014436732977628708, 0.6691219210624695, 0.14954078197479248, 0.05079814791679382, 0.016926249489188194, 0.019262520596385002, 0.0015820361441001296, 0.0035821935161948204, 0.0006810796330682933, 0.0003203656815458089, 0.0011748253600671887, 0.0010743285529315472, 0.007970765233039856, 0.0031130905263125896, 0.0013948236592113972, 0.0013668054016306996], [0.029553715139627457, 0.02440962940454483, 0.012560379691421986, 0.012035978958010674, 0.015558449551463127, 0.024723973125219345, 0.023755434900522232, 0.21082018315792084, 0.1860254853963852, 0.3870457112789154, 0.025178398936986923, 0.008189431391656399, 0.010255817323923111, 0.0019414238631725311, 0.0017670763190835714, 0.0011148822959512472, 0.0014685791684314609, 0.0012221033684909344, 0.0036704731173813343, 0.005016693379729986, 0.012824170291423798, 0.0008620654116384685], [0.0007498882478103042, 0.004303903318941593, 0.000836235354654491, 0.0006419371929951012, 0.00031931776902638376, 0.000990460510365665, 0.0007992546888999641, 0.10706619918346405, 0.042309656739234924, 0.7742831707000732, 0.022998660802841187, 0.021793298423290253, 0.004944006912410259, 0.0011549625778570771, 0.00046541483607143164, 0.00036085493047721684, 0.00013245544687379152, 0.00033766956767067313, 0.001785685308277607, 0.00242023728787899, 0.011019779369235039, 0.0002870217140298337], [0.010081635788083076, 0.006212344393134117, 0.0030213629361242056, 0.0024302604142576456, 0.0009148595854640007, 0.0006548618548549712, 0.0019478998146951199, 0.009308109991252422, 0.012585212476551533, 0.16455595195293427, 0.32552409172058105, 0.21421611309051514, 0.18534083664417267, 0.031106941401958466, 0.011159342713654041, 0.0011167848715558648, 0.001000502030365169, 0.0007071496802382171, 0.0034855289850383997, 0.0010050134733319283, 0.0067084734328091145, 0.006916641257703304], [0.030307576060295105, 0.020149579271674156, 0.0009856617543846369, 0.010165879502892494, 0.0004127054417040199, 0.0011792400619015098, 0.0006991982227191329, 0.016328340396285057, 0.07929829508066177, 0.1691557765007019, 0.024979684501886368, 0.2846132218837738, 0.2786753177642822, 0.013346417807042599, 0.037535905838012695, 0.001771488576196134, 0.0018959037261083722, 0.0006960752070881426, 0.003143094712868333, 0.01613430865108967, 0.006327144801616669, 0.0021992323454469442], [0.0018283659592270851, 0.006441562436521053, 0.006362327840179205, 0.0032542094122618437, 0.0017664505867287517, 0.001565900631248951, 0.0006110327667556703, 0.0023235357366502285, 0.004191476386040449, 0.019412856549024582, 0.03520503640174866, 0.07362096756696701, 0.34703969955444336, 0.33209842443466187, 0.03814489021897316, 0.016643185168504715, 0.01801823265850544, 0.004432047251611948, 0.02286759950220585, 0.006845253054052591, 0.025603851303458214, 0.03172311559319496], [0.0006497327703982592, 0.0032409541308879852, 0.0024517306592315435, 0.0007398778107017279, 0.001774869509972632, 0.0014488259330391884, 0.0011356197064742446, 0.0010844232747331262, 0.0005263227503746748, 0.010214547626674175, 0.02962348610162735, 0.039961911737918854, 0.21552611887454987, 0.3044285774230957, 0.019557319581508636, 0.07450375705957413, 0.03574607893824577, 0.05061343312263489, 0.04759567230939865, 0.011192061938345432, 0.03004932403564453, 0.11793529242277145], [0.003939492162317038, 0.005186048336327076, 0.0028129376005381346, 0.00047821577754803, 0.0011534192599356174, 0.0066472869366407394, 0.003391837002709508, 0.003660728922113776, 0.002599797211587429, 0.023237992078065872, 0.03698565065860748, 0.01136291678994894, 0.12165060639381409, 0.1453050673007965, 0.0126552265137434, 0.038784269243478775, 0.12416373938322067, 0.06729494035243988, 0.27289915084838867, 0.018611498177051544, 0.051009707152843475, 0.04616944491863251], [0.015550065785646439, 0.0033941976726055145, 0.0516139380633831, 0.0013842741027474403, 0.006015194579958916, 0.03050239570438862, 0.008321072906255722, 0.007394178304821253, 0.007370842155069113, 0.00869288481771946, 0.024659045040607452, 0.009686343371868134, 0.015659037977457047, 0.4006548225879669, 0.006922056898474693, 0.05484640225768089, 0.15007027983665466, 0.07635603845119476, 0.0518912598490715, 0.015959810465574265, 0.010322051122784615, 0.042733751237392426], [0.0014935133513063192, 0.004127115942537785, 0.003041086019948125, 0.0021027824841439724, 0.0052307965233922005, 0.01097947545349598, 0.003336509456858039, 0.024942224845290184, 0.003569636959582567, 0.0044708955101668835, 0.0025510303676128387, 0.0017183299642056227, 0.008214323781430721, 0.013063210994005203, 0.012707704678177834, 0.03780611976981163, 0.11310282349586487, 0.06512824445962906, 0.6444877982139587, 0.013957219198346138, 0.020305601879954338, 0.003663544077426195], [0.00026654236717149615, 0.0012676987098529935, 0.0008438777877017856, 0.0027181629557162523, 0.006285006180405617, 0.0003428158815950155, 0.0006039492436684668, 0.006052898708730936, 0.0005374694592319429, 0.003365809563547373, 0.00312202051281929, 0.0031045430805534124, 0.003305646823719144, 0.0030069751664996147, 0.016868840903043747, 0.12308444082736969, 0.013578404672443867, 0.14050203561782837, 0.5003294944763184, 0.051216769963502884, 0.10305144637823105, 0.016545236110687256], [0.00042641686741262674, 0.002447257749736309, 0.0038685176987200975, 0.0005537137622013688, 0.0007149241282604635, 0.0030899334233254194, 0.0017128023318946362, 0.010905580595135689, 0.002898385049775243, 0.004185476806014776, 0.009283619001507759, 0.003295479342341423, 0.0018944261828437448, 0.004415654577314854, 0.0021660111378878355, 0.003691440913826227, 0.020952586084604263, 0.043153274804353714, 0.6852627396583557, 0.05481585115194321, 0.08964934200048447, 0.05061660334467888], [0.0011002655373886228, 0.0029483058024197817, 0.0009983563795685768, 0.0017371447756886482, 0.003309000749140978, 0.0007353770197369158, 0.00043428890057839453, 0.0011390306754037738, 0.0003419536806177348, 0.01523875817656517, 0.002158441348001361, 0.0007853733259253204, 0.002411877503618598, 0.00040742315468378365, 0.0015283834654837847, 0.006205799989402294, 0.002763866912573576, 0.018219416961073875, 0.03403664752840996, 0.10809848457574844, 0.7484382390975952, 0.04696362838149071], [3.900891533703543e-05, 0.0016800134908407927, 0.0001720614091027528, 0.0004941789084114134, 0.00024220322666224092, 6.426161417039111e-05, 3.044537515961565e-05, 0.0007592158508487046, 0.00010837960144272074, 0.004137334413826466, 0.0005138221313245595, 0.0003520304162520915, 0.0018947436474263668, 0.00011433463805587962, 0.0008401465020142496, 0.0008604293689131737, 0.0007645919686183333, 0.002074305433779955, 0.060891859233379364, 0.02666434273123741, 0.8877707719802856, 0.00953155942261219], [0.030869409441947937, 0.02037700265645981, 0.005079362541437149, 0.012989310547709465, 0.0026108429301530123, 0.001225972198881209, 0.0012554284185171127, 0.0012895985273644328, 0.0003618541231844574, 0.008926505222916603, 0.01420968770980835, 0.00855319481343031, 0.03727669641375542, 0.009564511477947235, 0.021613353863358498, 0.004676229786127806, 0.006140925455838442, 0.010715650394558907, 0.02969781495630741, 0.03897075727581978, 0.24935956299304962, 0.48423632979393005], [0.0265885591506958, 0.23712484538555145, 0.009366214275360107, 0.03398608788847923, 0.0025218925438821316, 0.015784189105033875, 0.004194266628473997, 0.018547268584370613, 0.014833593741059303, 0.01165932510048151, 0.002808973425999284, 0.012079027481377125, 0.05839111655950546, 0.00446748360991478, 0.0247700996696949, 0.002263204660266638, 0.016434067860245705, 0.007020219229161739, 0.08164442330598831, 0.2125350832939148, 0.15148305892944336, 0.05149703100323677], [0.010691378265619278, 0.021421628072857857, 0.04253455623984337, 0.006654064171016216, 0.009182385168969631, 0.02161269076168537, 0.0046091508120298386, 0.0017019894439727068, 0.0004983704420737922, 0.0013803282054141164, 0.002961230929940939, 0.0014717766316607594, 0.008122924715280533, 0.06420918554067612, 0.006567050237208605, 0.021202098578214645, 0.04501955211162567, 0.03487745672464371, 0.04383077099919319, 0.04873768612742424, 0.08416750282049179, 0.5185463428497314]], [[0.05310942232608795, 0.1348220705986023, 0.014972653239965439, 0.053206298500299454, 0.022870590910315514, 0.1357005536556244, 0.012159416452050209, 0.20890739560127258, 0.03444475680589676, 0.037445809692144394, 0.012217259034514427, 0.026253482326865196, 0.02480938471853733, 0.008434928022325039, 0.021159853786230087, 0.021463511511683464, 0.028547437861561775, 0.004002728499472141, 0.06423930823802948, 0.023050539195537567, 0.04138515144586563, 0.0167975015938282], [0.10308168828487396, 0.09997699409723282, 0.053777918219566345, 0.045120719820261, 0.04750707373023033, 0.04435744136571884, 0.05501793324947357, 0.04894644394516945, 0.014875761233270168, 0.08383245766162872, 0.020654190331697464, 0.04284926503896713, 0.014437686651945114, 0.01401417888700962, 0.01647479459643364, 0.02714076079428196, 0.018850041553378105, 0.0377190001308918, 0.029118744656443596, 0.022113235667347908, 0.13384215533733368, 0.026291394606232643], [0.005201671738177538, 0.2088826298713684, 0.010550945065915585, 0.010404527187347412, 0.014596695080399513, 0.05397045984864235, 0.020167509093880653, 0.08468928188085556, 0.03648602217435837, 0.08003710210323334, 0.019331717863678932, 0.020188095048069954, 0.1327054351568222, 0.006403221748769283, 0.0042143408209085464, 0.01426133792847395, 0.03367102891206741, 0.02741200290620327, 0.067594975233078, 0.036980900913476944, 0.09881555289030075, 0.01343461126089096], [0.026013720780611038, 0.124604232609272, 0.09978049993515015, 0.043511997908353806, 0.06382285803556442, 0.04285633936524391, 0.05996227636933327, 0.0490751713514328, 0.10360530763864517, 0.031188253313302994, 0.04923752695322037, 0.04939796030521393, 0.031101014465093613, 0.025368226692080498, 0.012697448953986168, 0.017139438539743423, 0.008728942833840847, 0.01203949749469757, 0.011554454453289509, 0.0559249222278595, 0.03771117702126503, 0.04467867687344551], [0.054504189640283585, 0.1251368373632431, 0.13039107620716095, 0.019180577248334885, 0.04452313110232353, 0.035094305872917175, 0.0831463560461998, 0.06476180255413055, 0.016948755830526352, 0.04727930948138237, 0.008864641189575195, 0.01807231456041336, 0.010504575446248055, 0.06219421699643135, 0.004481426440179348, 0.03329682722687721, 0.004871019162237644, 0.030530136078596115, 0.016790026798844337, 0.034168437123298645, 0.09881709516048431, 0.056443002074956894], [0.010935979895293713, 0.13709574937820435, 0.06686083227396011, 0.031255364418029785, 0.1430632323026657, 0.06393157690763474, 0.24971042573451996, 0.0580558255314827, 0.09425404667854309, 0.023633483797311783, 0.025243915617465973, 0.004743678495287895, 0.033228423446416855, 0.007121070753782988, 0.003776353318244219, 0.0067634074948728085, 0.005777636077255011, 0.00548594631254673, 0.005274617113173008, 0.006207454949617386, 0.014867769554257393, 0.002713315887376666], [0.013047688640654087, 0.16356341540813446, 0.020635494962334633, 0.028323540464043617, 0.01648377999663353, 0.2451157420873642, 0.0604371577501297, 0.1602398008108139, 0.04527917876839638, 0.037346526980400085, 0.01029165554791689, 0.019699370488524437, 0.03510252758860588, 0.017322508618235588, 0.009108630940318108, 0.014890170656144619, 0.019330861046910286, 0.011729697696864605, 0.020798487588763237, 0.018117612227797508, 0.019946733489632607, 0.013189406134188175], [0.03803807124495506, 0.05296729877591133, 0.10079770535230637, 0.0297976303845644, 0.0977151095867157, 0.013954720459878445, 0.05818924307823181, 0.013446220196783543, 0.3923022747039795, 0.019233301281929016, 0.051468200981616974, 0.0018058590358123183, 0.039040349423885345, 0.004016534890979528, 0.024623362347483635, 0.00214465893805027, 0.010929122567176819, 0.0009271741728298366, 0.0088606933131814, 0.0118239875882864, 0.024968188256025314, 0.0029502480756491423], [0.004302759654819965, 0.04846099019050598, 0.0017953821225091815, 0.004523179959505796, 0.006757632363587618, 0.11898816376924515, 0.006198470946401358, 0.691307783126831, 0.018345454707741737, 0.03318379819393158, 0.0035194542724639177, 0.0030156662687659264, 0.011832290329039097, 0.0014316969318315387, 0.0004920806386508048, 0.005465487949550152, 0.006352199707180262, 0.003732155542820692, 0.018817022442817688, 0.005396767053753138, 0.005217834375798702, 0.0008637873688712716], [0.016230851411819458, 0.02346467785537243, 0.014995042234659195, 0.007220591884106398, 0.01057481113821268, 0.028795631602406502, 0.21918489038944244, 0.03208623453974724, 0.29023000597953796, 0.08873945474624634, 0.07354021072387695, 0.048332247883081436, 0.04875728115439415, 0.009890235029160976, 0.007474597543478012, 0.005593019537627697, 0.0167164895683527, 0.03563349321484566, 0.0070075374096632, 0.009042586199939251, 0.003958214074373245, 0.00253180880099535], [0.0013802197063341737, 0.045497339218854904, 0.0014958071988075972, 0.0019328398630023003, 0.0013030597474426031, 0.04599804803729057, 0.0017614028183743358, 0.4750140309333801, 0.07061789184808731, 0.17974671721458435, 0.009144118055701256, 0.029673784971237183, 0.0388905331492424, 0.0033926714677363634, 0.000766835524700582, 0.006606546230614185, 0.00449671596288681, 0.005398771725594997, 0.03287923336029053, 0.02954951487481594, 0.012285740114748478, 0.0021681685466319323], [0.0010042574722319841, 0.02185620181262493, 0.0023663819301873446, 0.002207065699622035, 0.0028821753803640604, 0.009547639638185501, 0.005632468499243259, 0.006249378900974989, 0.46745020151138306, 0.0331832617521286, 0.1402471512556076, 0.011625013314187527, 0.10447607189416885, 0.0015622314531356096, 0.005997032392770052, 0.0017454181797802448, 0.04929162189364433, 0.002959485864266753, 0.05667153000831604, 0.04494859650731087, 0.025135459378361702, 0.002961267251521349], [0.0041319276206195354, 0.009988305158913136, 0.001655775704421103, 0.009246055036783218, 0.0019254367798566818, 0.006355068646371365, 0.0009000013815239072, 0.019387105479836464, 0.004056631587445736, 0.0375538095831871, 0.017761457711458206, 0.7689065933227539, 0.014836568385362625, 0.007627115119248629, 0.009271074086427689, 0.01781311258673668, 0.0020979319233447313, 0.006672979798167944, 0.0053980182856321335, 0.019558019936084747, 0.006281584035605192, 0.028575366362929344], [0.00019935713498853147, 0.004922952502965927, 0.000477236055303365, 0.0011488820891827345, 0.0014066238654777408, 0.0011631948873400688, 0.00034465527278371155, 0.001210101880133152, 0.012236570008099079, 0.006974854040890932, 0.02043837681412697, 0.002881919499486685, 0.8099002242088318, 0.0029152068309485912, 0.012199115939438343, 0.0031240233220160007, 0.04806842654943466, 0.001017145230434835, 0.047954391688108444, 0.0052900840528309345, 0.013931741937994957, 0.002194973872974515], [0.011663202196359634, 0.01966938190162182, 0.01105324737727642, 0.0360584557056427, 0.015302056446671486, 0.020694101229310036, 0.0033229782711714506, 0.02392120473086834, 0.018765030428767204, 0.02232804335653782, 0.024380862712860107, 0.19736985862255096, 0.04426422715187073, 0.09504427760839462, 0.043987855315208435, 0.10771849006414413, 0.017292069271206856, 0.02936392091214657, 0.01658349297940731, 0.1139245554804802, 0.012094405479729176, 0.11519831418991089], [0.016236301511526108, 0.004102243576198816, 0.020084990188479424, 0.004882314708083868, 0.0341668501496315, 0.0025413178373128176, 0.03988299518823624, 0.001445916248485446, 0.02722821943461895, 0.00931577943265438, 0.02808111533522606, 0.004396017640829086, 0.03425800800323486, 0.061321794986724854, 0.10371734201908112, 0.07541707903146744, 0.16334491968154907, 0.10672284662723541, 0.1200563982129097, 0.04504089802503586, 0.07091706991195679, 0.026839667931199074], [0.005207501817494631, 0.01962292566895485, 0.0037106508389115334, 0.016740281134843826, 0.024015581235289574, 0.03821024298667908, 0.004795283079147339, 0.08161075413227081, 0.005930824670940638, 0.013352588750422001, 0.005230376496911049, 0.04147205874323845, 0.01410337258130312, 0.0348806269466877, 0.011385263875126839, 0.4407646059989929, 0.017302662134170532, 0.07066602259874344, 0.03848038241267204, 0.07327024638652802, 0.007734264712780714, 0.03151344880461693], [0.0006436582771129906, 0.003961893729865551, 0.0010401929030194879, 0.0013662304263561964, 0.0015424893936142325, 0.0029321485199034214, 0.0017668299842625856, 0.0004819755267817527, 0.016799572855234146, 0.0012617232277989388, 0.00607364671304822, 0.0013328429777175188, 0.07421419024467468, 0.0063791130669415, 0.04604626074433327, 0.014083622954785824, 0.495615690946579, 0.02027941681444645, 0.22622790932655334, 0.04178613796830177, 0.023099342361092567, 0.013065041974186897], [0.015185861848294735, 0.007897170260548592, 0.008405840024352074, 0.0168925691395998, 0.011793205514550209, 0.007972890511155128, 0.0016455411678180099, 0.012068303301930428, 0.005820004735141993, 0.010269507765769958, 0.0030269380658864975, 0.02140204794704914, 0.008385111577808857, 0.03498563542962074, 0.0692068338394165, 0.19190755486488342, 0.046289220452308655, 0.1259486973285675, 0.08310786634683609, 0.22060510516166687, 0.03833993524312973, 0.05884421244263649], [0.000696788658387959, 0.016820784658193588, 0.0006123929633758962, 0.00108251569326967, 0.001314462278969586, 0.00461602071300149, 0.0010579536901786923, 0.0024597691372036934, 0.005949906073510647, 0.0024956136476248503, 0.0021033640950918198, 0.0002868453739210963, 0.05107981339097023, 0.0005493721109814942, 0.0036190038081258535, 0.0035167743917554617, 0.20921501517295837, 0.0139852873980999, 0.5577502846717834, 0.016207680106163025, 0.10188432782888412, 0.0026959723327308893], [0.01525637786835432, 0.01225972082465887, 0.008335014805197716, 0.004008774179965258, 0.0026666377671062946, 0.0038871753495186567, 0.005528980866074562, 0.006242379080504179, 0.0037729586474597454, 0.014174346812069416, 0.0044085378758609295, 0.07387752085924149, 0.0061907474882900715, 0.024107536301016808, 0.008278445340692997, 0.04995125159621239, 0.01391818467527628, 0.41661450266838074, 0.03812016546726227, 0.15753807127475739, 0.02403520792722702, 0.10682747513055801], [0.00013937009498476982, 0.04127083718776703, 0.0008179721189662814, 0.00041451939614489675, 0.00026193694793619215, 0.001987214433029294, 0.0002692131674848497, 0.001165015622973442, 0.006305762100964785, 0.009434210136532784, 0.002322638873010874, 0.0017220351146534085, 0.10933394730091095, 0.0007667355239391327, 0.0015873287338763475, 0.001445943140424788, 0.061193786561489105, 0.005673724692314863, 0.2751707434654236, 0.04293359816074371, 0.42648208141326904, 0.009301500394940376]], [[0.2822325825691223, 0.12519122660160065, 0.042887136340141296, 0.11708007007837296, 0.03998766466975212, 0.12998060882091522, 0.023512771353125572, 0.008755924180150032, 0.01217581331729889, 0.00832386501133442, 0.005239577032625675, 0.01699448563158512, 0.022402891889214516, 0.015544791705906391, 0.04121703281998634, 0.005114803556352854, 0.02217494510114193, 0.0025804874021559954, 0.0021307964343577623, 0.008812612853944302, 0.011898321099579334, 0.05576159805059433], [0.09039568156003952, 0.06412247568368912, 0.05620339885354042, 0.05213481932878494, 0.022210581228137016, 0.03259110450744629, 0.008051144890487194, 0.03974035009741783, 0.05036379396915436, 0.020760469138622284, 0.029742686077952385, 0.031238362193107605, 0.01648813858628273, 0.004889580886811018, 0.017431939020752907, 0.006472737528383732, 0.06477569788694382, 0.007940195500850677, 0.07757168263196945, 0.06374094635248184, 0.07034347206354141, 0.1727907508611679], [0.11726350337266922, 0.02486586943268776, 0.04439239203929901, 0.05991422384977341, 0.027492573484778404, 0.06326665729284286, 0.02800041437149048, 0.07565823942422867, 0.08292129635810852, 0.0104284156113863, 0.01827540062367916, 0.032467592507600784, 0.0036342113744467497, 0.00556714553385973, 0.016848016530275345, 0.005890402942895889, 0.07574228197336197, 0.024455444887280464, 0.08968688547611237, 0.09289082139730453, 0.03650681674480438, 0.06383141130208969], [0.02398708276450634, 0.08952313661575317, 0.04570017009973526, 0.01096056867390871, 0.015167814679443836, 0.04508693888783455, 0.01760626956820488, 0.0016935404855757952, 0.007411513943225145, 0.022657444700598717, 0.01677352748811245, 0.04698380082845688, 0.016319507732987404, 0.007691584061831236, 0.005001412704586983, 0.008981631137430668, 0.027175430208444595, 0.050867944955825806, 0.014073357917368412, 0.1158614531159401, 0.13166958093643188, 0.27880626916885376], [0.1018771082162857, 0.054894085973501205, 0.06841243803501129, 0.01864277385175228, 0.02056063897907734, 0.01654248684644699, 0.01726212352514267, 0.0026729879900813103, 0.002321894746273756, 0.005313843954354525, 0.040930505841970444, 0.08733204007148743, 0.024875575676560402, 0.03972169756889343, 0.028814591467380524, 0.011742005124688148, 0.0179708544164896, 0.019633090123534203, 0.02394600212574005, 0.0405433215200901, 0.03760532662272453, 0.31838458776474], [0.27889660000801086, 0.01545778289437294, 0.012830357067286968, 0.05049550160765648, 0.09866495430469513, 0.2825336158275604, 0.056101568043231964, 0.017624501138925552, 0.022976957261562347, 0.0031173613388091326, 0.005031283479183912, 0.03644345700740814, 0.006271242164075375, 0.005282181780785322, 0.008890182711184025, 0.0064013744704425335, 0.04608504846692085, 0.0036485723685473204, 0.0016362008173018694, 0.011513172648847103, 0.0014015481574460864, 0.028696473687887192], [0.0889194905757904, 0.07369919866323471, 0.05422310158610344, 0.29554200172424316, 0.19823046028614044, 0.12897484004497528, 0.03311465308070183, 0.012842555530369282, 0.014393421821296215, 0.0011061906116083264, 0.00292765349149704, 0.004733944311738014, 0.009002922102808952, 0.005319722928106785, 0.027598787099123, 0.006640784442424774, 0.012704752385616302, 0.0013859517639502883, 0.000993837951682508, 0.002711143111810088, 0.001550173037685454, 0.023384369909763336], [0.016308505088090897, 0.09141673147678375, 0.010777379386126995, 0.05183994770050049, 0.12216949462890625, 0.6191883683204651, 0.036737073212862015, 0.022948572412133217, 0.0036542252637445927, 0.0036436221562325954, 0.00046256950008682907, 0.0003325961297377944, 0.0010133370524272323, 0.0008723590290173888, 0.001857372815720737, 0.006332061253488064, 0.004824914038181305, 0.0006768028833903372, 0.0010123624233528972, 0.000373270915588364, 0.0015247896080836654, 0.002033612923696637], [0.021338511258363724, 0.001875351881608367, 0.0033189819660037756, 0.0037625227123498917, 0.00810437835752964, 0.02605423703789711, 0.729677677154541, 0.08995331823825836, 0.032017406076192856, 0.002230494748800993, 0.0011400951771065593, 0.0005430784076452255, 0.0002586783666629344, 0.0012494269758462906, 0.0022653713822364807, 0.004276297055184841, 0.018100528046488762, 0.030568046495318413, 0.018678339198231697, 0.004111673217266798, 0.00018221761274617165, 0.0002934566291514784], [0.07836302369832993, 0.00015645322855561972, 0.0004694931267295033, 0.00062597292708233, 0.003558453870937228, 0.011698946356773376, 0.19736044108867645, 0.5630931258201599, 0.04362969100475311, 0.007864333689212799, 0.024797962978482246, 0.01092729065567255, 0.0008696999284438789, 0.001049255020916462, 0.00023804407101124525, 0.0015413587680086493, 0.004182844888418913, 0.021496981382369995, 0.02429291047155857, 0.0023152881767600775, 8.518546383129433e-05, 0.001383282127790153], [0.017083194106817245, 0.002392067573964596, 0.0009540338069200516, 0.002255034167319536, 0.009510509669780731, 0.006671373266726732, 0.008009465411305428, 0.167305588722229, 0.6573893427848816, 0.018535776063799858, 0.02459060214459896, 0.011931275017559528, 0.007367977872490883, 0.0009599532349966466, 0.0021228701807558537, 0.0020728495437651873, 0.008530309423804283, 0.0027605355717241764, 0.03271007910370827, 0.014128436334431171, 0.0016492133727297187, 0.0010696263052523136], [0.005528116133064032, 0.007198686711490154, 0.0031445820350199938, 0.0021399222314357758, 0.00047867096145637333, 0.01767408289015293, 0.0071522630751132965, 0.06037617474794388, 0.10065029561519623, 0.37868231534957886, 0.16216585040092468, 0.11972836405038834, 0.0178899634629488, 0.009006354957818985, 0.0030887683387845755, 0.0014173458330333233, 0.00815904326736927, 0.017331639304757118, 0.01773841306567192, 0.044332630932331085, 0.00867537409067154, 0.007441102061420679], [0.003073370084166527, 0.006352569907903671, 0.012744521722197533, 0.0010514890309423208, 0.00163645192515105, 0.0012873796513304114, 0.004066957160830498, 0.002650537760928273, 0.02497495897114277, 0.11264529079198837, 0.4637230932712555, 0.05361034348607063, 0.08861023932695389, 0.022733589634299278, 0.0075514717027544975, 0.0063100531697273254, 0.012416357174515724, 0.01659090258181095, 0.03266160562634468, 0.03257524222135544, 0.08172556757926941, 0.011008019559085369], [0.01267341896891594, 0.011015678755939007, 0.04714898020029068, 0.018659714609384537, 0.007948564365506172, 0.004342333413660526, 0.005574199371039867, 0.019503796473145485, 0.025510774925351143, 0.09207309037446976, 0.0822770968079567, 0.2630923092365265, 0.036346081644296646, 0.14184504747390747, 0.031871456652879715, 0.012882271781563759, 0.004489844664931297, 0.019419802352786064, 0.012130219489336014, 0.040770068764686584, 0.07799213379621506, 0.0324331559240818], [0.002686644671484828, 0.00803389959037304, 0.01496668066829443, 0.0030889238696545362, 0.010402721352875233, 0.003740966320037842, 0.0033975085243582726, 0.0007451754063367844, 0.004200395196676254, 0.02521429769694805, 0.03803069517016411, 0.13346335291862488, 0.16323573887348175, 0.08565916866064072, 0.015157230198383331, 0.06090223044157028, 0.01932060904800892, 0.054904766380786896, 0.01481808815151453, 0.046527739614248276, 0.18053732812404633, 0.11096581816673279], [0.005677541717886925, 0.001717607257887721, 0.0020948979072272778, 0.006084005814045668, 0.005925928242504597, 0.0015842228895053267, 0.0008860653615556657, 0.0007576172356493771, 0.0004149937303736806, 0.004993502516299486, 0.013522130437195301, 0.16401515901088715, 0.11229320615530014, 0.2824847996234894, 0.1641259342432022, 0.11682575196027756, 0.009355338290333748, 0.021354302763938904, 0.008607150055468082, 0.010422276332974434, 0.010153583250939846, 0.05670400708913803], [0.009965412318706512, 0.001070581842213869, 0.001617630012333393, 0.005057987291365862, 0.011590664274990559, 0.0235748328268528, 0.004757912363857031, 0.0005485534202307463, 0.0031493548303842545, 0.0013048832770437002, 0.01012533251196146, 0.019765792414546013, 0.044091012328863144, 0.03364003822207451, 0.13259123265743256, 0.14224675297737122, 0.494108110666275, 0.022564947605133057, 0.009289389476180077, 0.012746193446218967, 0.002842160640284419, 0.013351215049624443], [0.0017052610637620091, 0.0008550009224563837, 0.001840447774156928, 0.03047000989317894, 0.009421685710549355, 0.008386148139834404, 0.0029164564330130816, 0.004956172779202461, 0.001029013772495091, 0.00035914636100642383, 0.0011242826003581285, 0.002377505414187908, 0.007954031229019165, 0.050733648240566254, 0.30107900500297546, 0.42811939120292664, 0.07982731610536575, 0.04204607754945755, 0.015350870788097382, 0.00332350330427289, 0.0008326105307787657, 0.005292404908686876], [0.0006303279660642147, 0.0023032925091683865, 0.0005058026290498674, 0.0037294160574674606, 0.02246973291039467, 0.014504419639706612, 0.00102397205773741, 0.0010974392062053084, 0.0018186343368142843, 0.0005090332124382257, 0.001192143652588129, 0.000519061868544668, 0.014142341911792755, 0.002626078901812434, 0.03410877287387848, 0.3089299201965332, 0.4398525655269623, 0.01567925326526165, 0.11474967747926712, 0.004553935024887323, 0.00985216349363327, 0.005202059168368578], [0.00035407886025495827, 6.535424472531304e-05, 0.0001746070629451424, 0.0006465907208621502, 0.00024914374807849526, 0.00044080178486183286, 0.006281605456024408, 0.009301397018134594, 0.0012122340267524123, 0.00024269895220641047, 0.00011915254435734823, 0.0003877313865814358, 5.1684677600860596e-05, 0.002063428983092308, 0.005023709964007139, 0.020348776131868362, 0.019036728888750076, 0.6811339259147644, 0.20375560224056244, 0.04698821157217026, 0.0008852879400365055, 0.0012371839256957173], [0.0008364887908101082, 0.00013730808859691024, 0.00026861950755119324, 3.270126035204157e-05, 0.0005373143358156085, 0.00030752355814911425, 0.004092332907021046, 0.004788788501173258, 0.0008808189886622131, 0.0004721125296782702, 0.006053353194147348, 0.0009636193281039596, 0.00035501320962794125, 0.00030443715513683856, 0.00011291661212453619, 0.007796401623636484, 0.00821683369576931, 0.1720670610666275, 0.7571839094161987, 0.01829834282398224, 0.0045729270204901695, 0.011721148155629635], [0.001334105501882732, 0.001421088818460703, 0.001790117472410202, 0.0009394153603352606, 0.00030762891401536763, 0.00031716973171569407, 0.0007022629724815488, 0.005253891460597515, 0.010180171579122543, 0.0012762933038175106, 0.008107486180961132, 0.018422380089759827, 0.002400952624157071, 0.006352836731821299, 0.00578388711437583, 0.0030591131653636694, 0.013050566427409649, 0.08480604737997055, 0.26709938049316406, 0.4741295278072357, 0.0365770198404789, 0.05668850988149643]], [[0.03999787196516991, 0.13633936643600464, 0.13917548954486847, 0.05749603360891342, 0.05875691771507263, 0.051014360040426254, 0.012422792613506317, 0.025810951367020607, 0.015280197374522686, 0.004410990979522467, 0.0033218595199286938, 0.005844538565725088, 0.013040170073509216, 0.04572293162345886, 0.017798684537410736, 0.038477573543787, 0.027789639309048653, 0.020188868045806885, 0.04143977165222168, 0.08902537822723389, 0.07098504155874252, 0.0856606736779213], [0.11921419948339462, 0.02355477213859558, 0.0933554396033287, 0.07802135497331619, 0.11111357808113098, 0.15978024899959564, 0.1510603129863739, 0.1892486959695816, 0.014039800502359867, 0.010096955113112926, 0.009749578312039375, 0.002005455782637, 0.0006381708662956953, 0.005944457370787859, 0.003905054647475481, 0.00876191072165966, 0.005088548641651869, 0.0021565924398601055, 0.003382065799087286, 0.002052068244665861, 0.003104611998423934, 0.003726072609424591], [0.02873813919723034, 0.008199452422559261, 0.0581994391977787, 0.021361490711569786, 0.1282757669687271, 0.032532699406147, 0.08279058337211609, 0.07729537785053253, 0.021665681153535843, 0.021569345146417618, 0.07606270909309387, 0.03133494779467583, 0.003067159792408347, 0.04294995218515396, 0.008577180095016956, 0.12141382694244385, 0.033793967217206955, 0.05677007883787155, 0.05357655882835388, 0.02056729421019554, 0.025024278089404106, 0.0462341383099556], [0.005298456642776728, 0.31294214725494385, 0.05099542811512947, 0.02071276120841503, 0.08663376420736313, 0.04068746790289879, 0.08590728789567947, 0.01446249894797802, 0.02042919211089611, 0.0480210967361927, 0.018828852102160454, 0.014519577845931053, 0.016885971650481224, 0.010242459364235401, 0.004897757433354855, 0.033912286162376404, 0.015300787054002285, 0.04165439307689667, 0.009520936757326126, 0.028035903349518776, 0.09691519290208817, 0.02319568768143654], [0.051399316638708115, 0.10134744644165039, 0.1339493989944458, 0.038566362112760544, 0.06991154700517654, 0.050861407071352005, 0.12046512961387634, 0.07890050858259201, 0.013300820253789425, 0.020965229719877243, 0.03146025910973549, 0.01616312935948372, 0.00795214157551527, 0.03341425582766533, 0.008206801488995552, 0.03459560498595238, 0.009765363298356533, 0.04345010593533516, 0.02416963130235672, 0.020640285685658455, 0.03769872710108757, 0.052816614508628845], [0.03074025921523571, 0.2495996206998825, 0.04384952783584595, 0.2657131552696228, 0.05263487622141838, 0.07226650416851044, 0.05452629178762436, 0.017420828342437744, 0.022277936339378357, 0.016219450160861015, 0.012880057096481323, 0.015461058355867863, 0.026729943230748177, 0.008643822744488716, 0.02828214317560196, 0.00540115823969245, 0.010103701613843441, 0.004721149802207947, 0.0032940555829554796, 0.013527346774935722, 0.013892457820475101, 0.03181466832756996], [0.0886710062623024, 0.18424734473228455, 0.06591016054153442, 0.09396620094776154, 0.05254710093140602, 0.1065930500626564, 0.024804729968309402, 0.09957823157310486, 0.010867265984416008, 0.010703024454414845, 0.007149841636419296, 0.005971965845674276, 0.037284620106220245, 0.018378885462880135, 0.015050175599753857, 0.02684059552848339, 0.01589384116232395, 0.008473332040011883, 0.04984516277909279, 0.0131990322843194, 0.03213206306099892, 0.03189229592680931], [0.17006038129329681, 0.09598390758037567, 0.06842314451932907, 0.10811103135347366, 0.025760438293218613, 0.18442745506763458, 0.03533710911870003, 0.129045307636261, 0.03229370340704918, 0.0397929884493351, 0.007738678716123104, 0.0032919393852353096, 0.03872378543019295, 0.016173016279935837, 0.01828768290579319, 0.002589513547718525, 0.003759975777938962, 0.0012341307010501623, 0.004220856819301844, 0.0032280755694955587, 0.007732089143246412, 0.003784722415730357], [0.041170015931129456, 0.1641829013824463, 0.0514712929725647, 0.08994370698928833, 0.03251234069466591, 0.3027729094028473, 0.05343855544924736, 0.05861964449286461, 0.05335240811109543, 0.030644459649920464, 0.021125217899680138, 0.009498181752860546, 0.04038940742611885, 0.02088487707078457, 0.007311908062547445, 0.0052909450605511665, 0.007939696311950684, 0.0007495827740058303, 0.0007736149127595127, 0.002011024858802557, 0.0019056606106460094, 0.00401162076741457], [0.1807015836238861, 0.011137936264276505, 0.019160443916916847, 0.008325118571519852, 0.02533840760588646, 0.10677351802587509, 0.35074636340141296, 0.10721901804208755, 0.03737333044409752, 0.06034132093191147, 0.05895484983921051, 0.010832867585122585, 0.0021482857409864664, 0.0064216600731015205, 0.000737943162675947, 0.003332480788230896, 0.001962200039997697, 0.004207657650113106, 0.0010183332487940788, 0.000689883076120168, 0.0008110897615551949, 0.0017656440613791347], [0.06158625707030296, 0.008680044673383236, 0.03519434854388237, 0.0177833940833807, 0.024469831958413124, 0.0482257604598999, 0.08503898233175278, 0.1677950918674469, 0.07551420480012894, 0.06804568320512772, 0.053908295929431915, 0.033021558076143265, 0.02357024885714054, 0.07212331891059875, 0.012188125401735306, 0.046533599495887756, 0.02128451317548752, 0.04878285154700279, 0.05251014232635498, 0.017850805073976517, 0.012356264516711235, 0.013536770828068256], [0.012146024033427238, 0.0274797510355711, 0.018446845933794975, 0.018388163298368454, 0.015428858809173107, 0.06262984871864319, 0.05062039941549301, 0.13429273664951324, 0.08170091360807419, 0.22831860184669495, 0.04712942615151405, 0.04686319828033447, 0.06597419083118439, 0.03329865261912346, 0.013313562609255314, 0.022179797291755676, 0.026117267087101936, 0.036136481910943985, 0.03003809228539467, 0.012532584369182587, 0.012462942861020565, 0.004501676186919212], [0.007000788580626249, 0.011110203340649605, 0.01332679484039545, 0.014581963419914246, 0.005316345952451229, 0.02388429082930088, 0.023862646892666817, 0.03248567134141922, 0.15557947754859924, 0.30398574471473694, 0.132398322224617, 0.0530916303396225, 0.10648566484451294, 0.04011742025613785, 0.028620963916182518, 0.007941178046166897, 0.01419869065284729, 0.007044382859021425, 0.0028920769691467285, 0.008815746754407883, 0.004789292812347412, 0.002470721723511815], [0.0013366767670959234, 0.002608912531286478, 0.005865528713911772, 0.0023466700222343206, 0.0035891933366656303, 0.0026315529830753803, 0.005482606589794159, 0.012209036387503147, 0.10123398154973984, 0.12753410637378693, 0.23896224796772003, 0.1767703890800476, 0.09427078068256378, 0.07876976579427719, 0.015160622075200081, 0.02142944186925888, 0.017717914655804634, 0.024332741275429726, 0.013343472965061665, 0.029895003885030746, 0.012510998174548149, 0.011998281814157963], [0.00018904171884059906, 0.013851407915353775, 0.0035983340349048376, 0.001519187819212675, 0.0011880460660904646, 0.0012532976688817143, 0.0008125862805172801, 0.0017804349772632122, 0.021883785724639893, 0.038999564945697784, 0.011500506661832333, 0.024357417598366737, 0.6794815063476562, 0.08032473176717758, 0.014405528083443642, 0.014263911172747612, 0.01076631247997284, 0.008446337655186653, 0.00594039773568511, 0.026187047362327576, 0.03368992730975151, 0.005560738034546375], [0.004214724525809288, 0.009449340403079987, 0.01764342561364174, 0.006341645959764719, 0.008949420414865017, 0.003373797982931137, 0.004364225547760725, 0.017019689083099365, 0.01112251915037632, 0.033902399241924286, 0.05939311534166336, 0.1075752004981041, 0.21798689663410187, 0.23535360395908356, 0.03187654912471771, 0.06403500586748123, 0.01677064597606659, 0.026626629754900932, 0.04240012541413307, 0.019489029422402382, 0.029154570773243904, 0.03295738622546196], [0.00031488112290389836, 0.0037548255641013384, 0.001231553265824914, 0.004478266462683678, 0.00044985298882238567, 0.0005336561007425189, 0.00021374689822550863, 0.0005958583788014948, 0.009749229066073895, 0.008582275360822678, 0.005613719113171101, 0.034729305654764175, 0.7111873626708984, 0.06481549888849258, 0.07360197603702545, 0.005169469863176346, 0.010350532829761505, 0.0036290325224399567, 0.006413915194571018, 0.0290662944316864, 0.01276366040110588, 0.01275516115128994], [0.0001564932317705825, 0.0011308749672025442, 0.0006062138127163053, 0.0011056943330913782, 0.00019019933824893087, 0.00017971749184653163, 1.8499427824281156e-05, 0.0004105975094716996, 0.0012296299682930112, 0.0014726866502314806, 0.0011909313034266233, 0.01604086346924305, 0.6796392798423767, 0.07137461751699448, 0.05308975651860237, 0.02307272143661976, 0.015759747475385666, 0.004730944987386465, 0.06405448913574219, 0.02784370258450508, 0.020877884700894356, 0.015824533998966217], [0.0003120468172710389, 0.000329299975419417, 0.0005880086100660264, 0.00168377417139709, 0.0002060971746686846, 0.00013234143261797726, 3.092927727266215e-05, 0.0007057294133119285, 0.0036599705927073956, 0.0038930284790694714, 0.0016226450679823756, 0.012208083644509315, 0.6020154356956482, 0.07431188970804214, 0.16447558999061584, 0.013122373260557652, 0.01165603008121252, 0.004478863440454006, 0.04905034974217415, 0.0277261920273304, 0.020940130576491356, 0.006851105950772762], [0.0009515918209217489, 0.006168926600366831, 0.004523825831711292, 0.004291617311537266, 0.0018394176149740815, 0.002368772868067026, 0.0006260741502046585, 0.0017014509066939354, 0.009437035769224167, 0.005624907091259956, 0.00533437030389905, 0.024674054235219955, 0.24862483143806458, 0.08944667130708694, 0.04555438831448555, 0.08237340301275253, 0.060921818017959595, 0.043959345668554306, 0.10718940198421478, 0.13425438106060028, 0.06928807497024536, 0.05084555223584175], [0.009104182943701744, 0.0003731527249328792, 0.0037936880253255367, 0.0008182828314602375, 0.004442253150045872, 0.0021395727526396513, 0.007574884686619043, 0.006354537792503834, 0.00832727923989296, 0.008489013649523258, 0.012122727930545807, 0.014942306093871593, 0.004711809568107128, 0.024760505184531212, 0.006091086659580469, 0.10089290142059326, 0.026716232299804688, 0.3345773220062256, 0.2843339741230011, 0.06940167397260666, 0.04078805074095726, 0.029244689270853996], [0.0002974004310090095, 0.00012995673750992864, 0.0013644680147990584, 0.0002384464314673096, 0.0008454260532744229, 0.0001149473391706124, 0.0005105127929709852, 0.0010227300226688385, 0.0006452680099755526, 0.0007083697128109634, 0.0014110086485743523, 0.004363375250250101, 0.0018935146508738399, 0.012606200762093067, 0.003662578761577606, 0.06202584505081177, 0.013364705257117748, 0.28776469826698303, 0.4527113437652588, 0.08560911566019058, 0.04212610796093941, 0.02658390998840332]], [[0.004968677181750536, 0.040945250540971756, 0.0036739930510520935, 0.016916919499635696, 0.04697772487998009, 0.008766950108110905, 0.012496487237513065, 0.14009933173656464, 0.11674445867538452, 0.06302979588508606, 0.03852527588605881, 0.033096734434366226, 0.024230197072029114, 0.005214687902480364, 0.009043782949447632, 0.026475483551621437, 0.007641279604285955, 0.012997717596590519, 0.08748069405555725, 0.18091410398483276, 0.07785625755786896, 0.04190414771437645], [0.008745179511606693, 0.028146905824542046, 0.006058879196643829, 0.00787487905472517, 0.11123515665531158, 0.017687037587165833, 0.018315056338906288, 0.06540282070636749, 0.04163898527622223, 0.03377329185605049, 0.026598017662763596, 0.07072515040636063, 0.07897026836872101, 0.0442059226334095, 0.012225408107042313, 0.16351301968097687, 0.028432128950953484, 0.035071104764938354, 0.06515970826148987, 0.027903003618121147, 0.04153516888618469, 0.0667828842997551], [0.030052706599235535, 0.01846538856625557, 0.008983110077679157, 0.051598869264125824, 0.2477734535932541, 0.04323885217308998, 0.0425751768052578, 0.075035959482193, 0.05466070771217346, 0.02953382395207882, 0.027544131502509117, 0.03506001830101013, 0.025457847863435745, 0.02799176424741745, 0.04598955437541008, 0.10683530569076538, 0.03339606523513794, 0.016064828261733055, 0.024484237655997276, 0.01442151889204979, 0.02376800775527954, 0.0170687697827816], [0.02289312146604061, 0.02623225748538971, 0.010867725126445293, 0.026231123134493828, 0.057992786169052124, 0.02420707233250141, 0.026702944189310074, 0.06663139164447784, 0.03889290243387222, 0.01925436034798622, 0.027523037046194077, 0.06967966258525848, 0.040607452392578125, 0.0445622056722641, 0.033981479704380035, 0.1032436192035675, 0.04797236993908882, 0.046138547360897064, 0.11677595227956772, 0.04465143010020256, 0.03549366816878319, 0.06946490705013275], [0.05167795717716217, 0.01618902198970318, 0.05290801078081131, 0.045392509549856186, 0.022126667201519012, 0.05084610357880592, 0.06678931415081024, 0.011826897971332073, 0.02766847237944603, 0.031862590461969376, 0.053828444331884384, 0.06002126634120941, 0.010212033987045288, 0.040971048176288605, 0.06430786848068237, 0.03692477568984032, 0.07616470009088516, 0.11985574662685394, 0.022572068497538567, 0.044534794986248016, 0.020474202930927277, 0.07284548878669739], [0.017708538100123405, 0.0534040741622448, 0.1051381528377533, 0.011371416039764881, 0.024122413247823715, 0.02008678950369358, 0.06035376712679863, 0.010433878749608994, 0.015973666682839394, 0.01774793490767479, 0.04752284288406372, 0.049050770699977875, 0.05190841853618622, 0.08200719952583313, 0.02359006367623806, 0.06310250610113144, 0.02636655420064926, 0.1751980483531952, 0.018720898777246475, 0.02018967643380165, 0.020539766177535057, 0.08546262234449387], [0.021481337025761604, 0.05386662110686302, 0.03785444796085358, 0.042601000517606735, 0.039499327540397644, 0.03967165946960449, 0.024614546447992325, 0.04552619159221649, 0.04692533612251282, 0.06473013013601303, 0.056201331317424774, 0.04882865399122238, 0.06901610642671585, 0.04449725151062012, 0.04933413118124008, 0.03261619061231613, 0.0336456336081028, 0.026471905410289764, 0.0403003953397274, 0.05469071865081787, 0.07784760743379593, 0.049779441207647324], [0.006490893196314573, 0.09766529500484467, 0.034978292882442474, 0.06314463168382645, 0.03299699351191521, 0.038408126682043076, 0.03517940640449524, 0.07737032324075699, 0.04692130163311958, 0.06248481199145317, 0.0377533994615078, 0.031509410589933395, 0.024611320346593857, 0.01253514178097248, 0.023256655782461166, 0.01706216111779213, 0.013701499439775944, 0.027999291196465492, 0.05044665187597275, 0.1352275311946869, 0.0665602758526802, 0.06369654089212418], [0.0031172928865998983, 0.052729927003383636, 0.01260797306895256, 0.01073879562318325, 0.02399338409304619, 0.004858596716076136, 0.013234332203865051, 0.0612574964761734, 0.05843706056475639, 0.023722583428025246, 0.08744517713785172, 0.0932704508304596, 0.1126384511590004, 0.01825174130499363, 0.014129250310361385, 0.05189301073551178, 0.005367399659007788, 0.018171051517128944, 0.06812470406293869, 0.12267771363258362, 0.04358324781060219, 0.09975039958953857], [0.009866696782410145, 0.02656625397503376, 0.003149157389998436, 0.0064754122868180275, 0.01871504820883274, 0.013263600878417492, 0.005473238416016102, 0.1035161167383194, 0.02864569053053856, 0.0901091992855072, 0.014232669025659561, 0.03241217881441116, 0.02250676415860653, 0.019330350682139397, 0.00768064521253109, 0.031111281365156174, 0.022828394547104836, 0.01852481998503208, 0.1822468787431717, 0.07563585042953491, 0.20365247130393982, 0.06405722349882126], [0.0024635542649775743, 0.05552801862359047, 0.004343550186604261, 0.007927401922643185, 0.05787081643939018, 0.005208796355873346, 0.01586936041712761, 0.1312408447265625, 0.05247008055448532, 0.05152970924973488, 0.04327263683080673, 0.04812074825167656, 0.07046014815568924, 0.015247712843120098, 0.006948410999029875, 0.0635833665728569, 0.004887830466032028, 0.018599385395646095, 0.10378634929656982, 0.08281992375850677, 0.09163819998502731, 0.06618313491344452], [0.001991687808185816, 0.1206386610865593, 0.00731792813166976, 0.006385676562786102, 0.0258328877389431, 0.004104522988200188, 0.004765221383422613, 0.13853782415390015, 0.027090586721897125, 0.03421990945935249, 0.023472437635064125, 0.03825563192367554, 0.14272786676883698, 0.023725593462586403, 0.006259567569941282, 0.04079573228955269, 0.004227152094244957, 0.011256557889282703, 0.11811231076717377, 0.05341993644833565, 0.11040548235177994, 0.05645688995718956], [0.0036420084070414305, 0.09990554302930832, 0.006858844310045242, 0.0032612334471195936, 0.015261911787092686, 0.007312741596251726, 0.006066611036658287, 0.042315781116485596, 0.015450653620064259, 0.03816709294915199, 0.012616374529898167, 0.04241587966680527, 0.29637274146080017, 0.04166606068611145, 0.007813488133251667, 0.0471089668571949, 0.0139264315366745, 0.029484109953045845, 0.08287850767374039, 0.029382778331637383, 0.10402638465166092, 0.05406584218144417], [0.018461748957633972, 0.06761181354522705, 0.006485629826784134, 0.1352333128452301, 0.04237111657857895, 0.02615305222570896, 0.003451118478551507, 0.17339111864566803, 0.024667128920555115, 0.040656670928001404, 0.014706983231008053, 0.015338449738919735, 0.0431857705116272, 0.010863223113119602, 0.06835354119539261, 0.02653890661895275, 0.01880447193980217, 0.0025772841181606054, 0.09514341503381729, 0.044962503015995026, 0.10460419207811356, 0.016438594087958336], [0.022999973967671394, 0.04396217688918114, 0.009682106785476208, 0.07627753168344498, 0.03621842339634895, 0.01964651420712471, 0.010511326603591442, 0.0951315313577652, 0.03712042421102524, 0.021013526245951653, 0.018780162557959557, 0.03118833526968956, 0.03830130398273468, 0.015348159708082676, 0.049289770424366, 0.044429294764995575, 0.025441553443670273, 0.013113446533679962, 0.15552166104316711, 0.12386338412761688, 0.06725487112998962, 0.0449044369161129], [0.061390217393636703, 0.023409752175211906, 0.014118160121142864, 0.11664387583732605, 0.013820447959005833, 0.04333237558603287, 0.009304632432758808, 0.06634677946567535, 0.02172471582889557, 0.031916651874780655, 0.019392667338252068, 0.029937012121081352, 0.012412833981215954, 0.018747661262750626, 0.10311390459537506, 0.019394783303141594, 0.07247672975063324, 0.012473469600081444, 0.13329240679740906, 0.07410438358783722, 0.06584939360618591, 0.03679713234305382], [0.024160051718354225, 0.13000968098640442, 0.08341880142688751, 0.05851416289806366, 0.0326974056661129, 0.02599189803004265, 0.02607724256813526, 0.02438277378678322, 0.02387791872024536, 0.020812978968024254, 0.03483130410313606, 0.033078163862228394, 0.09324789047241211, 0.03998280689120293, 0.061239905655384064, 0.04878482222557068, 0.02324247919023037, 0.032369621098041534, 0.032115936279296875, 0.05610805004835129, 0.04292188584804535, 0.05213424563407898], [0.029819928109645844, 0.11006604880094528, 0.018131231889128685, 0.05933527648448944, 0.012517414055764675, 0.030651988461613655, 0.0036082782316952944, 0.10833783447742462, 0.020167773589491844, 0.06399201601743698, 0.015294192358851433, 0.020257659256458282, 0.09076672047376633, 0.020780278369784355, 0.04179183021187782, 0.009686066769063473, 0.019818954169750214, 0.0026566069573163986, 0.07549519091844559, 0.04488319158554077, 0.1826501190662384, 0.019291328266263008], [0.012193100526928902, 0.10588841140270233, 0.01394807081669569, 0.11410462856292725, 0.023300832137465477, 0.028496434912085533, 0.01240374892950058, 0.17692020535469055, 0.03355710953474045, 0.04839204251766205, 0.016029762104153633, 0.01427012775093317, 0.04127311334013939, 0.007762848865240812, 0.035902123898267746, 0.008676053956151009, 0.0119856558740139, 0.004306779243052006, 0.08819851279258728, 0.09670879691839218, 0.08387592434883118, 0.021805765107274055], [0.002644906286150217, 0.0572403259575367, 0.0032951608300209045, 0.013297829777002335, 0.011503130197525024, 0.0028298236429691315, 0.0012802951969206333, 0.1432207077741623, 0.04057375341653824, 0.04420192539691925, 0.023268133401870728, 0.02481035329401493, 0.13604436814785004, 0.00975587498396635, 0.01528729498386383, 0.019707849249243736, 0.004074277821928263, 0.0019595513585954905, 0.1525489091873169, 0.12703381478786469, 0.13133804500102997, 0.03408379480242729], [0.017182037234306335, 0.019167376682162285, 0.0016713981749489903, 0.015818312764167786, 0.032315682619810104, 0.01633022539317608, 0.003036660375073552, 0.15787597000598907, 0.04404456540942192, 0.09556423872709274, 0.006739548407495022, 0.019784854725003242, 0.031909193843603134, 0.01981479302048683, 0.016639431938529015, 0.031858768314123154, 0.030037662014365196, 0.004982573911547661, 0.17420156300067902, 0.0715017318725586, 0.17021256685256958, 0.01931089721620083], [0.0037636710330843925, 0.07091870903968811, 0.0019294553203508258, 0.008229296654462814, 0.035637639462947845, 0.003516856813803315, 0.001857399009168148, 0.17453792691230774, 0.028205638751387596, 0.047728825360536575, 0.016610249876976013, 0.025067033246159554, 0.20298731327056885, 0.017190715298056602, 0.010472831316292286, 0.04783643037080765, 0.0053144218400120735, 0.0027489413041621447, 0.12259841710329056, 0.029323630034923553, 0.12250101566314697, 0.02102360688149929]], [[0.12757429480552673, 0.09719819575548172, 0.04133240506052971, 0.032768648117780685, 0.028539566323161125, 0.04749145358800888, 0.16707691550254822, 0.015541781671345234, 0.21075758337974548, 0.02521410956978798, 0.02782500348985195, 0.016341445967555046, 0.030513431876897812, 0.011508272960782051, 0.014894921332597733, 0.00619524484500289, 0.029754571616649628, 0.02794620394706726, 0.005373951513320208, 0.019764361903071404, 0.0044114068150520325, 0.011976221576333046], [0.17571987211704254, 0.2085377275943756, 0.12894324958324432, 0.21089938282966614, 0.003668938297778368, 0.029185006394982338, 0.013079775497317314, 0.008391310460865498, 0.00971123855561018, 0.015050049871206284, 0.01994205079972744, 0.038910750299692154, 0.02995520457625389, 0.004400452133268118, 0.04345770925283432, 0.001211238093674183, 0.004112578462809324, 0.0014633007813245058, 0.0015822371933609247, 0.004719909746199846, 0.005086562596261501, 0.04197147116065025], [0.0016747728222981095, 0.03168536722660065, 0.03905782103538513, 0.7235816121101379, 0.017882874235510826, 0.039459627121686935, 0.003051471896469593, 0.03640015795826912, 0.00575432600453496, 0.0010168416192755103, 0.0008052748162299395, 0.0026257596909999847, 0.00910282600671053, 0.015375814400613308, 0.04885217174887657, 0.0020949281752109528, 0.007515889126807451, 0.0006455205148085952, 0.005248870700597763, 0.0010085710091516376, 0.0015629309928044677, 0.005596550181508064], [0.00037727708695456386, 0.0014186600456014276, 0.00313441245816648, 0.0007104542455635965, 0.9535367488861084, 0.0005505126900970936, 0.01133729424327612, 0.0009894907707348466, 0.007038183975964785, 0.000352736737113446, 0.0004230250488035381, 6.622356886509806e-05, 0.0003695646591950208, 0.0013446720549836755, 0.00015376985538750887, 0.013814187608659267, 0.00032904965337365866, 0.0015854116063565016, 0.00027769154985435307, 0.0010734001407399774, 0.0007269433117471635, 0.000390387955121696], [0.024114081636071205, 0.08271344751119614, 0.028766803443431854, 0.04345482960343361, 0.1720530092716217, 0.14968188107013702, 0.11624464392662048, 0.0882851853966713, 0.018221896141767502, 0.09255648404359818, 0.0061993906274437904, 0.00304717430844903, 0.005571523681282997, 0.007743338122963905, 0.0048653483390808105, 0.059437185525894165, 0.010704790242016315, 0.01950768567621708, 0.008185726590454578, 0.005754632875323296, 0.04931395873427391, 0.0035770428366959095], [0.013277528807520866, 0.0036739669740200043, 0.01478103082627058, 0.0010590680176392198, 0.01646004244685173, 0.015740584582090378, 0.8516192436218262, 0.012989549897611141, 0.04542621225118637, 0.0019321341533213854, 0.005791675765067339, 0.00020097331434953958, 0.00021162473422009498, 0.00063754350412637, 0.0002168232895201072, 0.0005246505606919527, 0.0024160477332770824, 0.008776596747338772, 0.0029346556402742863, 0.0008150177309289575, 0.00032712778192944825, 0.0001879217743407935], [0.03139342740178108, 0.007111086044460535, 0.001854176283814013, 0.07305244356393814, 0.0035024897661060095, 0.05020836368203163, 0.015805669128894806, 0.7431949377059937, 0.012529566884040833, 0.007865030318498611, 0.0019310053903609514, 0.003126197960227728, 0.0013552679447457194, 0.0004077281919308007, 0.007289526052772999, 0.0013260788982734084, 0.0047904388047754765, 0.0018865462625399232, 0.0280695091933012, 0.0013601552927866578, 0.0014693404082208872, 0.0004709529457613826], [0.29616400599479675, 0.004590731579810381, 0.0004941381048411131, 0.0053558372892439365, 0.0032878294587135315, 0.009785384871065617, 0.06382837891578674, 0.004571492783725262, 0.5210373997688293, 0.0061888969503343105, 0.026003744453191757, 0.0009389917831867933, 0.0067754765041172504, 7.070878200465813e-05, 0.007303288672119379, 0.0001696285034995526, 0.03280389681458473, 0.001539328834041953, 0.006735799368470907, 0.0018796318909153342, 0.00024350553576368839, 0.00023198295093607157], [0.002668574918061495, 0.004550672601908445, 0.0007431553676724434, 0.0020784775260835886, 0.0002361397200729698, 0.025389058515429497, 0.0033983630128204823, 0.007021632045507431, 0.006157165393233299, 0.8870530724525452, 0.0054862783290445805, 0.03977984935045242, 0.0014411024749279022, 0.000557228340767324, 0.0006083825137466192, 0.00030559097649529576, 0.0007805772474966943, 0.0022034880239516497, 0.00021226191893219948, 0.0013823637273162603, 0.0076355538330972195, 0.00031118610058911145], [0.014199473895132542, 0.05543927848339081, 0.011305772699415684, 0.0015216952888295054, 0.00026391312712803483, 0.00028546730754897, 0.008046670816838741, 0.0003159825864713639, 0.02786152996122837, 0.007913103327155113, 0.6273343563079834, 0.015579239465296268, 0.17314079403877258, 0.003930013161152601, 0.0028459609020501375, 0.0002481649862602353, 0.0006610595155507326, 0.0014442475512623787, 0.001529937842860818, 0.008038176223635674, 0.005679253023117781, 0.032415833324193954], [0.003878358518704772, 0.013413973152637482, 0.0022470278199762106, 0.029606487601995468, 0.0002803007373586297, 0.00211618235334754, 0.0011872631730511785, 0.0012263547396287322, 0.0009981790790334344, 0.02467162348330021, 0.007179337553679943, 0.6897154450416565, 0.13429544866085052, 0.005065991543233395, 0.05650006979703903, 0.002072093542665243, 0.0010899947956204414, 0.008274931460618973, 0.0004018530307803303, 0.0025417713914066553, 0.004871888551861048, 0.008365510031580925], [0.004091503098607063, 0.004135000053793192, 0.0025699944235384464, 0.026017285883426666, 0.005119776353240013, 0.0003378770488779992, 0.002101193182170391, 0.0005945760058239102, 0.020708199590444565, 0.001391708035953343, 0.03446534276008606, 0.027677446603775024, 0.6817039847373962, 0.02295205183327198, 0.1385064721107483, 0.00611764146015048, 0.003907559439539909, 0.0018130890093743801, 0.0035418346524238586, 0.002728385617956519, 0.0024748980067670345, 0.007044205907732248], [0.03340433910489082, 0.002247173571959138, 0.0007492025033570826, 0.01100108027458191, 0.0003813113726209849, 0.011829103343188763, 0.0009945114143192768, 0.0004013901634607464, 0.0013466946547850966, 0.007675123400986195, 0.004732539411634207, 0.625091552734375, 0.18730735778808594, 0.023722151294350624, 0.04156605899333954, 0.004099168814718723, 0.0074445875361561775, 0.0059083024971187115, 8.360787614947185e-05, 0.00497621251270175, 0.0002359493519179523, 0.024802539497613907], [0.0025355974212288857, 0.004341382533311844, 0.010083962231874466, 0.05882599577307701, 0.02312266081571579, 0.007043078076094389, 0.0022286863531917334, 0.005900159478187561, 0.011914695613086224, 0.001375246443785727, 0.004753556568175554, 0.00542708532884717, 0.10095790773630142, 0.08588278293609619, 0.43508660793304443, 0.035909540951251984, 0.1481708288192749, 0.0044241491705179214, 0.03651506453752518, 0.004707275424152613, 0.005234185606241226, 0.005559598561376333], [0.0010517152259126306, 0.0005083185387775302, 0.001631470280699432, 0.000920793623663485, 0.24843762814998627, 0.0005568441119976342, 0.0038225774187594652, 0.003260215977206826, 0.0049349162727594376, 0.0013621088583022356, 0.002329813549295068, 0.0023609416093677282, 0.005079321097582579, 0.04336204007267952, 0.0026187028270214796, 0.5897389054298401, 0.004446649923920631, 0.0470617413520813, 0.007428490556776524, 0.01861964538693428, 0.004225368611514568, 0.006241742987185717], [0.007873776368796825, 0.015377591364085674, 0.01334191020578146, 0.002723389072343707, 0.057266660034656525, 0.0013709316262975335, 0.05249759927392006, 0.0015954604605212808, 0.06039433553814888, 0.015759894624352455, 0.0468900166451931, 0.002671209629625082, 0.023097490891814232, 0.016553467139601707, 0.014638626016676426, 0.1593729555606842, 0.06377727538347244, 0.21981070935726166, 0.07537634670734406, 0.06967680156230927, 0.07032080739736557, 0.009612737223505974], [0.00262373685836792, 0.0009846148313954473, 0.0065110912546515465, 0.00027519717696122825, 0.0043894448317587376, 0.002571355551481247, 0.057756856083869934, 0.009641701355576515, 0.00938035361468792, 0.003196605248376727, 0.006742444355040789, 0.005192221142351627, 0.00040435956907458603, 0.010029966942965984, 0.0005227726069279015, 0.01724419929087162, 0.005343148950487375, 0.7511274814605713, 0.027752798050642014, 0.06924214214086533, 0.0018517159624025226, 0.007215858902782202], [0.003918484319001436, 0.001450302661396563, 0.0006958426092751324, 0.0015880733262747526, 0.00011585249012568966, 0.0004996701027266681, 0.0005903345881961286, 0.0069730570539832115, 0.0032729501836001873, 0.0007113813189789653, 0.009015917778015137, 0.0010515034664422274, 0.004147696308791637, 0.0003975703730247915, 0.006171500310301781, 0.0007744657341390848, 0.022183436900377274, 0.005877651274204254, 0.9030944108963013, 0.01249650213867426, 0.011155789718031883, 0.0038175892550498247], [0.03534265235066414, 0.00298696244135499, 0.0008374211029149592, 0.00045729969860985875, 0.0005780942155979574, 0.0005397353670559824, 0.003355934051796794, 0.0007146616699174047, 0.012661720626056194, 0.00486555602401495, 0.03574497252702713, 0.05006442964076996, 0.010061160661280155, 0.001142184599302709, 0.005923380609601736, 0.004602161236107349, 0.019815709441900253, 0.18624991178512573, 0.04014834389090538, 0.5128813982009888, 0.005010002292692661, 0.06601624935865402], [0.017541414126753807, 0.05041312053799629, 0.008465800434350967, 0.008299698121845722, 0.002149360254406929, 0.007392676081508398, 0.0027099759317934513, 0.0012024459429085255, 0.0075778355821967125, 0.09180012345314026, 0.04734744504094124, 0.06237753853201866, 0.05013266205787659, 0.0021083687897771597, 0.011153425090014935, 0.005164097994565964, 0.01910894550383091, 0.009396672248840332, 0.0170902069658041, 0.026897819712758064, 0.51310795545578, 0.03856245055794716], [0.004003522917628288, 0.029539773240685463, 0.013258080929517746, 0.004624798893928528, 0.00024260592181235552, 0.00011289273970760405, 0.00022598440409637988, 0.0001215714801219292, 0.000537005253136158, 0.0010848731035366654, 0.013422925025224686, 0.011182501912117004, 0.04003743827342987, 0.009872217662632465, 0.004956763703376055, 0.0005236141732893884, 0.0002975494717247784, 0.0016682158457115293, 0.0013882833300158381, 0.02647642232477665, 0.010225889272987843, 0.8261970281600952], [0.0051943464204669, 0.08088953793048859, 0.031297098845243454, 0.14231008291244507, 0.027176648378372192, 0.0063520013354718685, 0.0017791162244975567, 0.0006328733288682997, 0.0018670103745535016, 0.002450331347063184, 0.004881577100604773, 0.035185739398002625, 0.1393279731273651, 0.011736269108951092, 0.19946357607841492, 0.027412142604589462, 0.04494767636060715, 0.009490776807069778, 0.01073061116039753, 0.010938155464828014, 0.09089136123657227, 0.1150452047586441]], [[0.026088079437613487, 0.13332924246788025, 0.14531421661376953, 0.05320208892226219, 0.01825951598584652, 0.04677853733301163, 0.052487172186374664, 0.015249156393110752, 0.01703646220266819, 0.016599487513303757, 0.027003584429621696, 0.01233028806746006, 0.036930881440639496, 0.10542915761470795, 0.06370677053928375, 0.009203781373798847, 0.04300697147846222, 0.012532801367342472, 0.030455635860562325, 0.025907844305038452, 0.027887744829058647, 0.08126059174537659], [0.13957750797271729, 0.0257126372307539, 0.032974425703287125, 0.02012859284877777, 0.05004062131047249, 0.023036876693367958, 0.08259249478578568, 0.04085918515920639, 0.08915688842535019, 0.006947243120521307, 0.02605609968304634, 0.03632046654820442, 0.013256766833364964, 0.020838283002376556, 0.0179127249866724, 0.019755076617002487, 0.046613603830337524, 0.09454546123743057, 0.07592307031154633, 0.07685944437980652, 0.005291069392114878, 0.055601391941308975], [0.1397392302751541, 0.021819235756993294, 0.030422763898968697, 0.030885787680745125, 0.16866324841976166, 0.016305606812238693, 0.07318098098039627, 0.017753778025507927, 0.07280157506465912, 0.0074209352023899555, 0.020091531798243523, 0.02703140117228031, 0.018271252512931824, 0.019656116142868996, 0.01951947808265686, 0.031537458300590515, 0.0343555323779583, 0.06547488272190094, 0.03878092020750046, 0.07870689779520035, 0.01935104839503765, 0.04823030158877373], [0.02323341742157936, 0.21654464304447174, 0.07968699187040329, 0.06481420993804932, 0.014687861315906048, 0.06616118550300598, 0.035495974123477936, 0.016377681866288185, 0.0034775116946548223, 0.016780495643615723, 0.005429160315543413, 0.03322714939713478, 0.016655253246426582, 0.027241341769695282, 0.025409987196326256, 0.020269937813282013, 0.026344748213887215, 0.03140150383114815, 0.02271106280386448, 0.042541295289993286, 0.04127064347267151, 0.17023800313472748], [0.01829446852207184, 0.12797127664089203, 0.047758325934410095, 0.014941983856260777, 0.008544756099581718, 0.017557619139552116, 0.09684910625219345, 0.006721531506627798, 0.033622030168771744, 0.026688650250434875, 0.045136041939258575, 0.04388010501861572, 0.07036922872066498, 0.027101460844278336, 0.026944924145936966, 0.007278886158019304, 0.02956691011786461, 0.059687063097953796, 0.030955659225583076, 0.09373218566179276, 0.07919812947511673, 0.0871996060013771], [0.07450966536998749, 0.033289097249507904, 0.05246718227863312, 0.3036018908023834, 0.11434927582740784, 0.07089871913194656, 0.01800466515123844, 0.2351159304380417, 0.00741927744820714, 0.004942907486110926, 0.0017415942857041955, 0.024272913113236427, 0.003523425431922078, 0.010593654587864876, 0.005059714894741774, 0.017365114763379097, 0.0019322298467159271, 0.002242780290544033, 0.0008379106875509024, 0.0016219300450757146, 0.0034168390557169914, 0.012793360278010368], [0.07155625522136688, 0.034835539758205414, 0.06445662677288055, 0.0966513454914093, 0.33021849393844604, 0.10134802758693695, 0.03907431289553642, 0.04282241314649582, 0.034930165857076645, 0.011137011460959911, 0.004316318314522505, 0.007271469570696354, 0.03079182840883732, 0.025881100445985794, 0.017988257110118866, 0.016341187059879303, 0.04059115797281265, 0.005064511206001043, 0.006136379670351744, 0.004285968374460936, 0.008126135915517807, 0.0061753885820508], [0.032228920608758926, 0.07878830283880234, 0.018719229847192764, 0.028265012428164482, 0.006257816683501005, 0.20858389139175415, 0.037715911865234375, 0.3703138828277588, 0.008695675991475582, 0.09329105913639069, 0.014439953491091728, 0.010430452413856983, 0.0038955772761255503, 0.02188362553715706, 0.005879588425159454, 0.018250029534101486, 0.003585869213566184, 0.008856801316142082, 0.006358375307172537, 0.011248503811657429, 0.0035957281943410635, 0.008715823292732239], [0.013453672640025616, 0.012412379495799541, 0.012504983693361282, 0.0130152041092515, 0.026090005412697792, 0.0314200222492218, 0.4484260678291321, 0.025515398010611534, 0.2626686096191406, 0.018214575946331024, 0.0654667466878891, 0.0007620599935762584, 0.008364947512745857, 0.002559855580329895, 0.007224262226372957, 0.0017435428453609347, 0.011595100164413452, 0.01782085746526718, 0.004114770796149969, 0.011149306781589985, 0.004213511012494564, 0.0012639712076634169], [0.07728053629398346, 0.015092091634869576, 0.009417897090315819, 0.005330249201506376, 0.003800363978371024, 0.07428492605686188, 0.020658301189541817, 0.2815369665622711, 0.0314677357673645, 0.011751430109143257, 0.04861675947904587, 0.36618149280548096, 0.011257309466600418, 0.00509823951870203, 0.001657885150052607, 0.005196044687181711, 0.006537836976349354, 0.003078186186030507, 0.008747692219913006, 0.001334727043285966, 0.0005265452200546861, 0.011146828532218933], [0.033814191818237305, 0.004416723735630512, 0.01274664606899023, 0.004048605915158987, 0.01909925416111946, 0.005700108129531145, 0.1065511628985405, 0.030615240335464478, 0.55421382188797, 0.005916024092584848, 0.051033228635787964, 0.01191625650972128, 0.01880079321563244, 0.01954137720167637, 0.012835059314966202, 0.0037781845312565565, 0.02390584535896778, 0.01305784098803997, 0.045688752084970474, 0.017955884337425232, 0.0016022390918806195, 0.0027626792434602976], [0.00829662848263979, 0.01131648663431406, 0.007574259769171476, 0.006348902825266123, 0.00827816966921091, 0.04905553534626961, 0.029319776222109795, 0.083323635160923, 0.011322731152176857, 0.4911911189556122, 0.07358941435813904, 0.049628499895334244, 0.003452748991549015, 0.02293993905186653, 0.00288070784881711, 0.051417019218206406, 0.0017347303219139576, 0.03508749231696129, 0.005776724312454462, 0.02014184556901455, 0.01071194838732481, 0.01661166176199913], [0.004482050891965628, 0.009640553034842014, 0.03363777697086334, 0.006881164386868477, 0.003999802283942699, 0.0079196160659194, 0.022716311737895012, 0.0022509435657411814, 0.2674255073070526, 0.02080528251826763, 0.37957248091697693, 0.04496142268180847, 0.06879933923482895, 0.03411707282066345, 0.042258068919181824, 0.001677598338574171, 0.028514059260487556, 0.0014149121707305312, 0.00771332485601306, 0.004918206017464399, 0.0028711589984595776, 0.0034233105834573507], [0.028978105634450912, 0.010993113741278648, 0.016742300242185593, 0.029774634167551994, 0.014162999577820301, 0.023482004180550575, 0.009048471227288246, 0.025288773700594902, 0.012715190649032593, 0.025271283462643623, 0.042249858379364014, 0.2790028750896454, 0.01700456440448761, 0.12765412032604218, 0.04412904754281044, 0.06746469438076019, 0.019438551738858223, 0.03515281155705452, 0.01717509515583515, 0.05850822851061821, 0.012652857229113579, 0.08311032503843307], [0.011995582841336727, 0.02799237333238125, 0.017934443429112434, 0.07600586116313934, 0.024589724838733673, 0.012463653460144997, 0.006071018520742655, 0.009662245400249958, 0.0115820886567235, 0.016888931393623352, 0.012333128601312637, 0.02526567317545414, 0.1711876392364502, 0.07997892796993256, 0.2605026364326477, 0.03855009377002716, 0.07091892510652542, 0.006060980260372162, 0.036414727568626404, 0.0093545438721776, 0.05607692897319794, 0.018169865012168884], [0.012417569756507874, 0.008734364993870258, 0.0025922979693859816, 0.008668859489262104, 0.004169947002083063, 0.009355923160910606, 0.009895621798932552, 0.01645653136074543, 0.008890199474990368, 0.04643315449357033, 0.03126871958374977, 0.12581785023212433, 0.07141384482383728, 0.08833316713571548, 0.08927198499441147, 0.08450477570295334, 0.034408167004585266, 0.13171745836734772, 0.06270132958889008, 0.051857996731996536, 0.05876098573207855, 0.042329251766204834], [0.013877221383154392, 0.021738778799772263, 0.03404518589377403, 0.12175362557172775, 0.04161929711699486, 0.017934301868081093, 0.006853482685983181, 0.011275394819676876, 0.021730560809373856, 0.01000715047121048, 0.02515491470694542, 0.035792142152786255, 0.16652721166610718, 0.057132817804813385, 0.21192817389965057, 0.03773995116353035, 0.07731527835130692, 0.00393599784001708, 0.01909656822681427, 0.003791454713791609, 0.04378394037485123, 0.016966570168733597], [0.014710779301822186, 0.0029705108609050512, 0.003131211968138814, 0.020369568839669228, 0.029648609459400177, 0.06146248057484627, 0.002101257210597396, 0.05498478561639786, 0.003020825097337365, 0.020879752933979034, 0.002648982685059309, 0.0714859887957573, 0.023117220029234886, 0.13472889363765717, 0.04638973996043205, 0.3433951139450073, 0.06543026119470596, 0.04111533984541893, 0.023641658946871758, 0.011314879171550274, 0.008324499242007732, 0.015127674676477909], [0.005323444958776236, 0.0033881873823702335, 0.0019870742689818144, 0.006383049767464399, 0.004570712801069021, 0.0019867955707013607, 0.0018168577225878835, 0.004312724806368351, 0.017336128279566765, 0.0029448089189827442, 0.015806732699275017, 0.002233984647318721, 0.1110713854432106, 0.02750687673687935, 0.205160990357399, 0.022976864129304886, 0.17003682255744934, 0.006981974933296442, 0.348910391330719, 0.008414149284362793, 0.027201544493436813, 0.0036485064774751663], [0.005367961712181568, 0.0012401934945955873, 0.0010585228446871042, 0.008257255889475346, 0.008623854257166386, 0.011057076044380665, 0.015367680229246616, 0.04368487000465393, 0.014847274869680405, 0.008489668369293213, 0.007578197866678238, 0.005705815274268389, 0.0015544936759397388, 0.009397887624800205, 0.0109296515583992, 0.04349426180124283, 0.013297022320330143, 0.5314074754714966, 0.02430998533964157, 0.1999029964208603, 0.01080668717622757, 0.023621272295713425], [0.008900880813598633, 0.005158300045877695, 0.0019387727370485663, 0.0008363588713109493, 0.0002776100591290742, 0.002056012861430645, 0.0030548961367458105, 0.0050390553660690784, 0.011262602172791958, 0.0025178606156259775, 0.0710555836558342, 0.10472027212381363, 0.04732845351099968, 0.003476591780781746, 0.01738128997385502, 0.004100933205336332, 0.05348260700702667, 0.012690169736742973, 0.5841453075408936, 0.009523509070277214, 0.010433973744511604, 0.04061891511082649], [0.009444582276046276, 0.0009324858547188342, 0.004253394436091185, 0.0015176505548879504, 0.002857096027582884, 0.002531174337491393, 0.009208230301737785, 0.004871439188718796, 0.004045125562697649, 0.00102803239133209, 0.002949455985799432, 0.01397649385035038, 0.0004520739894360304, 0.02385672926902771, 0.0038616014644503593, 0.01742701604962349, 0.007378571666777134, 0.33704864978790283, 0.050620947033166885, 0.3985181450843811, 0.0021663159132003784, 0.10105477273464203]], [[0.009445312432944775, 0.23885716497898102, 0.06457574665546417, 0.003628992009907961, 0.004160961601883173, 0.016594771295785904, 0.018216697499155998, 0.035321228206157684, 0.024142654612660408, 0.23024173080921173, 0.01944439299404621, 0.004622146021574736, 0.012466915883123875, 0.023551559075713158, 0.0014638694701716304, 0.0010546616977080703, 0.004332289565354586, 0.005461337976157665, 0.012971341609954834, 0.01686195097863674, 0.24252881109714508, 0.010055403225123882], [0.005951381288468838, 0.3235990107059479, 0.06795253604650497, 0.0060164486058056355, 0.006900945212692022, 0.010668283328413963, 0.06165245920419693, 0.08104309439659119, 0.018482400104403496, 0.21265120804309845, 0.028270097449421883, 0.008558868430554867, 0.011694862507283688, 0.010961023159325123, 0.0008149328059516847, 0.0006978298770263791, 0.000913088908419013, 0.003287471132352948, 0.00553968595340848, 0.004908935632556677, 0.12408991158008575, 0.005345530342310667], [0.013281341642141342, 0.047973185777664185, 0.35251694917678833, 0.013239394873380661, 0.040752001106739044, 0.16464363038539886, 0.13940517604351044, 0.0026361176278442144, 0.04050895571708679, 0.002401210367679596, 0.03532935306429863, 0.011271589435636997, 0.0012288622092455626, 0.043180156499147415, 0.0021141518373042345, 0.006649633403867483, 0.03057321533560753, 0.005838334560394287, 0.0014380768407136202, 0.010755404829978943, 0.0009370073094032705, 0.03332626819610596], [0.038200508803129196, 0.17923147976398468, 0.13977760076522827, 0.022028598934412003, 0.01007112767547369, 0.18300361931324005, 0.1464538872241974, 0.02805003710091114, 0.04680386930704117, 0.062415771186351776, 0.03622734546661377, 0.009529907256364822, 0.011101572774350643, 0.029910946264863014, 0.0038443682715296745, 0.0019753507804125547, 0.018293900415301323, 0.006362699903547764, 0.003060358576476574, 0.007573273964226246, 0.008140947669744492, 0.007942724972963333], [0.029722148552536964, 0.02535923197865486, 0.16242718696594238, 0.0841241329908371, 0.02604353241622448, 0.10506471991539001, 0.32667669653892517, 0.005254154559224844, 0.03656866401433945, 0.005923866294324398, 0.029707837849855423, 0.00863654911518097, 0.00282853189855814, 0.039182309061288834, 0.04132843762636185, 0.0122738191857934, 0.027437185868620872, 0.012176016345620155, 0.0016206569271162152, 0.007734706625342369, 0.0014475274365395308, 0.008462170138955116], [0.022106623277068138, 0.08671478182077408, 0.02551870420575142, 0.006742788478732109, 0.0026141139678657055, 0.038615304976701736, 0.6359610557556152, 0.04371066391468048, 0.009412371553480625, 0.06779231876134872, 0.01841534487903118, 0.019450794905424118, 0.009588046930730343, 0.0036637417506426573, 0.0005865710554644465, 0.0002549758064560592, 0.0008253782289102674, 0.004968410357832909, 0.00034263511770404875, 0.00024004258739296347, 0.001670665922574699, 0.0008047028095461428], [0.006204057484865189, 0.18253540992736816, 0.03613778203725815, 0.008326939307153225, 0.01933642104268074, 0.024571705609560013, 0.2477254569530487, 0.10497413575649261, 0.09018474072217941, 0.04813642427325249, 0.04482286423444748, 0.055230967700481415, 0.05425351858139038, 0.0078107318840920925, 0.004614084027707577, 0.0026217650156468153, 0.0045104497112333775, 0.011435111984610558, 0.010287660174071789, 0.0077036102302372456, 0.02174493484199047, 0.006831323727965355], [0.010463827289640903, 0.14803464710712433, 0.014997795224189758, 0.006734034046530724, 0.001990547403693199, 0.02772287279367447, 0.022340649738907814, 0.27558866143226624, 0.032241977751255035, 0.3710726499557495, 0.011250372044742107, 0.010200629942119122, 0.006411856506019831, 0.010111648589372635, 0.0017358451150357723, 0.0007206528098322451, 0.0012285817647352815, 0.0025568860583007336, 0.0036424645222723484, 0.007383857853710651, 0.030303018167614937, 0.0032665589824318886], [0.06782718747854233, 0.05780371278524399, 0.06302593648433685, 0.018908845260739326, 0.013100779615342617, 0.05662940815091133, 0.30399587750434875, 0.011884873732924461, 0.018510447815060616, 0.11114086955785751, 0.10427780449390411, 0.03686262294650078, 0.05109608918428421, 0.02226664125919342, 0.007453073747456074, 0.0026130599435418844, 0.01588779129087925, 0.0148220369592309, 0.0026983842253684998, 0.0018297497881576419, 0.010669432580471039, 0.006695355288684368], [0.0350869856774807, 0.10777661949396133, 0.03332865983247757, 0.05361432209610939, 0.014165016822516918, 0.061452049762010574, 0.026192843914031982, 0.04318959638476372, 0.028789890930056572, 0.13697801530361176, 0.09764908254146576, 0.2975413501262665, 0.012595356442034245, 0.016200358048081398, 0.00729083176702261, 0.005542288534343243, 0.0025361746083945036, 0.001365605858154595, 0.0008926375885494053, 0.002705436898395419, 0.0019552467856556177, 0.01315159723162651], [0.027202947065234184, 0.07222861051559448, 0.0384557731449604, 0.006253632716834545, 0.015219057910144329, 0.04189681634306908, 0.03573914244771004, 0.0167678352445364, 0.14915631711483002, 0.07769709080457687, 0.10937240719795227, 0.15597611665725708, 0.0995599627494812, 0.03721991926431656, 0.006386470515280962, 0.0059897564351558685, 0.03154950961470604, 0.011927351355552673, 0.014773370698094368, 0.012836256995797157, 0.01631801947951317, 0.017473606392741203], [0.03744703158736229, 0.021137919276952744, 0.007658321410417557, 0.0012099568266421556, 0.0023543599527329206, 0.019355395808815956, 0.012008636258542538, 0.024558885022997856, 0.005524234380573034, 0.5918417572975159, 0.03068670816719532, 0.11440060287714005, 0.02702667936682701, 0.025143127888441086, 0.0006409030174836516, 0.003441636683419347, 0.0040138158947229385, 0.02708042971789837, 0.0025933983270078897, 0.004278556443750858, 0.027250947430729866, 0.010346733964979649], [0.015155898407101631, 0.04998653754591942, 0.012529328465461731, 0.004798592533916235, 0.009165295399725437, 0.02427065372467041, 0.01706133596599102, 0.01462259329855442, 0.10201980173587799, 0.044858500361442566, 0.0458265095949173, 0.09514632076025009, 0.36293983459472656, 0.025333737954497337, 0.015657300129532814, 0.007520987186580896, 0.04595852270722389, 0.01404099352657795, 0.031569838523864746, 0.031531330198049545, 0.017935195937752724, 0.012070818804204464], [0.02516181394457817, 0.00799109973013401, 0.060688287019729614, 0.015300082042813301, 0.033974774181842804, 0.19575603306293488, 0.01163039542734623, 0.001776978257112205, 0.016845686361193657, 0.0023927800357341766, 0.013849422335624695, 0.05665026232600212, 0.0029893412720412016, 0.09681282937526703, 0.010228888131678104, 0.11397206783294678, 0.1598680466413498, 0.026657819747924805, 0.003112225793302059, 0.047761477530002594, 0.0006313455523923039, 0.09594835340976715], [0.054896030575037, 0.028623750433325768, 0.02075200341641903, 0.017639553174376488, 0.014335615560412407, 0.047095946967601776, 0.015570470131933689, 0.024465927854180336, 0.044028934091329575, 0.05978230759501457, 0.03118492290377617, 0.015650130808353424, 0.14731046557426453, 0.050388552248477936, 0.036571793258190155, 0.024083340540528297, 0.1261277198791504, 0.02887658029794693, 0.0870644822716713, 0.0394887812435627, 0.06488523632287979, 0.021177448332309723], [0.02549557015299797, 0.004774193279445171, 0.013992256484925747, 0.037121377885341644, 0.02333027683198452, 0.02784842438995838, 0.011019851081073284, 0.011841829866170883, 0.008105291984975338, 0.009603489190340042, 0.005764023866504431, 0.02568797394633293, 0.010165328159928322, 0.06997300684452057, 0.08207017183303833, 0.3175916075706482, 0.05693507939577103, 0.11781471222639084, 0.025716954842209816, 0.07122018188238144, 0.01144121028482914, 0.03248723968863487], [0.06861810386180878, 0.03654937446117401, 0.016183655709028244, 0.008067886345088482, 0.008096110075712204, 0.0240708589553833, 0.10460711270570755, 0.013173811137676239, 0.013442397117614746, 0.06200706586241722, 0.029980437830090523, 0.030894391238689423, 0.33940058946609497, 0.01953922025859356, 0.013710054568946362, 0.005766693037003279, 0.04621630534529686, 0.05580206215381622, 0.038477689027786255, 0.0036294525489211082, 0.05507352203130722, 0.006693258881568909], [0.007536378689110279, 0.010296058841049671, 0.004439019598066807, 0.01071125641465187, 0.02413143590092659, 0.0097269956022501, 0.015697354450821877, 0.01601886935532093, 0.010296466760337353, 0.007625198923051357, 0.005378033500164747, 0.2133496254682541, 0.043484166264534, 0.01597544178366661, 0.029554512351751328, 0.12675178050994873, 0.019190441817045212, 0.18276633322238922, 0.025409094989299774, 0.1279393434524536, 0.012623819522559643, 0.08109838515520096], [0.0060087027959525585, 0.013870801776647568, 0.0027703354135155678, 0.003332695458084345, 0.002797796856611967, 0.00258063618093729, 0.000791724945884198, 0.01099094096571207, 0.006408683955669403, 0.031101452186703682, 0.008606351912021637, 0.005975265521556139, 0.07477792352437973, 0.007695211097598076, 0.019790690392255783, 0.007553756237030029, 0.026629570871591568, 0.0075328294187784195, 0.21519608795642853, 0.023000972345471382, 0.501840353012085, 0.020747292786836624], [0.07957886159420013, 0.008168661035597324, 0.01628943346440792, 0.04920845478773117, 0.03936188668012619, 0.021791400387883186, 0.013289381749927998, 0.004500573500990868, 0.0014541959390044212, 0.0113749410957098, 0.008759859949350357, 0.07919219881296158, 0.018118727952241898, 0.0363948717713356, 0.03267369791865349, 0.10298124700784683, 0.03224586322903633, 0.13562597334384918, 0.010036138817667961, 0.04009687155485153, 0.013059785589575768, 0.24579693377017975], [0.03249753266572952, 0.07660327851772308, 0.02909873053431511, 0.11194178462028503, 0.03723711147904396, 0.018633246421813965, 0.0026196795515716076, 0.008590701967477798, 0.027259012684226036, 0.015747681260108948, 0.05905380845069885, 0.052912384271621704, 0.02535001002252102, 0.017398089170455933, 0.10637946426868439, 0.025075804442167282, 0.0329631045460701, 0.0021024225279688835, 0.04654518514871597, 0.05140383169054985, 0.052104402333498, 0.168482705950737], [0.07232773303985596, 0.008070911280810833, 0.04635213315486908, 0.014440218918025494, 0.027325736358761787, 0.05900224670767784, 0.007845633663237095, 0.0011470705503597856, 0.005499559920281172, 0.0021118600852787495, 0.00925343856215477, 0.06102115660905838, 0.0036729229614138603, 0.07280813902616501, 0.007952921092510223, 0.04334496706724167, 0.06876204162836075, 0.060915641486644745, 0.0047203111462295055, 0.074592225253582, 0.0019654349889606237, 0.3468676507472992]], [[0.14124369621276855, 0.1526389718055725, 0.011791038326919079, 0.1737171709537506, 0.06438577175140381, 0.055074840784072876, 0.08365628868341446, 0.0704089105129242, 0.0028994653839617968, 0.0900249034166336, 0.00514615885913372, 0.01192212849855423, 0.01705346815288067, 0.0033173675183206797, 0.03806779906153679, 0.01451671402901411, 0.0052569108083844185, 0.006031245458871126, 0.0038385491352528334, 0.0014970193151384592, 0.038996804505586624, 0.008514785207808018], [0.10452631115913391, 0.5933671593666077, 0.01772848516702652, 0.0794355496764183, 0.024528296664357185, 0.025343798100948334, 0.01242615096271038, 0.04232998192310333, 0.0018011060310527682, 0.035635173320770264, 0.003278900869190693, 0.0072334944270551205, 0.0029371255077421665, 0.00037207858986221254, 0.0037724606227129698, 0.0054082805290818214, 0.0011535431258380413, 0.0007087296689860523, 0.0030143181793391705, 0.0018716620979830623, 0.024469289928674698, 0.008658160455524921], [0.06627653539180756, 0.12192322313785553, 0.007455863058567047, 0.0782867893576622, 0.14467814564704895, 0.03498511761426926, 0.07125486433506012, 0.05624127388000488, 0.0163484588265419, 0.0677252933382988, 0.008325227536261082, 0.012539589777588844, 0.006488324608653784, 0.0010716556571424007, 0.017194701358675957, 0.037588655948638916, 0.010826288722455502, 0.04030172899365425, 0.02442508563399315, 0.024650678038597107, 0.13929937779903412, 0.012113134376704693], [0.038418080657720566, 0.2076994776725769, 0.028050178661942482, 0.15998457372188568, 0.0795985534787178, 0.01512873824685812, 0.038140278309583664, 0.01629958674311638, 0.006498523987829685, 0.06761903315782547, 0.007081724237650633, 0.030437937006354332, 0.03236021474003792, 0.00420767767354846, 0.05576274171471596, 0.015587535686790943, 0.004951434675604105, 0.008995896205306053, 0.0052613429725170135, 0.009896576404571533, 0.14677393436431885, 0.02124600298702717], [0.027089690789580345, 0.11209366470575333, 0.07869180291891098, 0.1097879409790039, 0.05301012098789215, 0.028067946434020996, 0.010346013121306896, 0.008404070511460304, 0.004230527672916651, 0.030311308801174164, 0.0132968220859766, 0.044999588280916214, 0.0222170352935791, 0.015584544278681278, 0.05795614793896675, 0.029849765822291374, 0.012644173577427864, 0.008720152080059052, 0.012058538384735584, 0.03142448142170906, 0.17674176394939423, 0.11247396469116211], [0.07108854502439499, 0.3280356824398041, 0.27432695031166077, 0.09359961003065109, 0.04570433124899864, 0.027210647240281105, 0.025061506778001785, 0.01767110638320446, 0.006972460076212883, 0.012573705054819584, 0.014039089903235435, 0.010817664675414562, 0.009518264792859554, 0.00828483048826456, 0.009364672005176544, 0.0021966658532619476, 0.0019715852104127407, 0.0007294956012628973, 0.0014329436235129833, 0.002228009281679988, 0.01376294158399105, 0.023409269750118256], [0.07766236364841461, 0.04927550256252289, 0.021106572821736336, 0.07151000946760178, 0.3357281982898712, 0.18523497879505157, 0.08423584699630737, 0.10301753133535385, 0.007966608740389347, 0.008991066366434097, 0.002551018027588725, 0.004442533478140831, 0.0047761076129972935, 0.003595588030293584, 0.005558142438530922, 0.00816136784851551, 0.006029572803527117, 0.0035030636936426163, 0.0017293007113039494, 0.0018659038469195366, 0.004925936460494995, 0.008132847025990486], [0.04256307706236839, 0.03626929968595505, 0.02047334611415863, 0.20327700674533844, 0.3737586736679077, 0.06391631811857224, 0.07507330924272537, 0.04882887005805969, 0.0007869375986047089, 0.012561783194541931, 0.002474044682458043, 0.0016142032109200954, 0.007258686237037182, 0.0033538993448019028, 0.05346309766173363, 0.02510511688888073, 0.013542793691158295, 0.0011031059548258781, 0.003773423144593835, 6.0113034123787656e-05, 0.007476360071450472, 0.0032665878534317017], [0.01065609697252512, 0.007976428605616093, 0.015910619869828224, 0.005416685249656439, 0.023976361379027367, 0.27475112676620483, 0.14561153948307037, 0.4592108130455017, 0.010570546612143517, 0.023469161242246628, 0.002447434701025486, 0.0010707392357289791, 0.0009589499095454812, 0.004663944710046053, 0.0003765026922337711, 0.0036409804597496986, 0.0031869420781731606, 0.0032240336295217276, 0.0016335389809682965, 0.00029625146999023855, 0.0003945018397644162, 0.00055675208568573], [0.11157557368278503, 0.09932545572519302, 0.002016610000282526, 0.02905862033367157, 0.01449279673397541, 0.047426920384168625, 0.11688203364610672, 0.07257825881242752, 0.09406156092882156, 0.32906997203826904, 0.027137096971273422, 0.012628153897821903, 0.01973152533173561, 0.00038047973066568375, 0.006665591150522232, 0.004591153468936682, 0.005464289337396622, 0.0017795724561437964, 0.0009210757561959326, 0.0009845481254160404, 0.002593460027128458, 0.0006352466298267245], [0.013949506916105747, 0.008391755633056164, 0.00117237470112741, 0.01036654133349657, 0.004879894200712442, 0.03877687081694603, 0.05046249181032181, 0.27764278650283813, 0.02398005686700344, 0.5088316202163696, 0.014175254851579666, 0.00978111196309328, 0.0060905152931809425, 0.0007265589665621519, 0.0026377965696156025, 0.005925478879362345, 0.004210920073091984, 0.009582576341927052, 0.004267512820661068, 0.0011137904366478324, 0.002703531179577112, 0.00033100342261604965], [0.00481086689978838, 0.008952479809522629, 0.01741226576268673, 0.0028566550463438034, 0.004262844566255808, 0.009459084831178188, 0.09500133991241455, 0.04508864879608154, 0.1531057059764862, 0.11205817013978958, 0.30766889452934265, 0.032363440841436386, 0.05584772303700447, 0.023680567741394043, 0.005349867511540651, 0.0063181351870298386, 0.013140713796019554, 0.04753277450799942, 0.033923257142305374, 0.00949108600616455, 0.007570100948214531, 0.004105494357645512], [0.008106403052806854, 0.005304665770381689, 0.003330829320475459, 0.008715931326150894, 0.0022459279280155897, 0.008590412326157093, 0.002676632720977068, 0.03635780140757561, 0.008242796175181866, 0.3714430630207062, 0.16405685245990753, 0.1346537172794342, 0.12404816597700119, 0.028888264670968056, 0.011066468432545662, 0.025193680077791214, 0.00843314453959465, 0.007173704914748669, 0.011951728723943233, 0.0035456493496894836, 0.019044626504182816, 0.006929420400410891], [0.0030468646436929703, 0.008953990414738655, 0.004065214656293392, 0.013509656302630901, 0.016655225306749344, 0.005317145958542824, 0.010883754119277, 0.010736385360360146, 0.020511845126748085, 0.11823444813489914, 0.15534044802188873, 0.07746351510286331, 0.25863537192344666, 0.047447096556425095, 0.04045785963535309, 0.04218589514493942, 0.015848293900489807, 0.031790636479854584, 0.014262123964726925, 0.01319140288978815, 0.07942664623260498, 0.012036129832267761], [0.0017139154952019453, 0.004023031797260046, 0.0013224371941760182, 0.015973377972841263, 0.0038266468327492476, 0.0020849897991865873, 0.001597228809259832, 0.0037118587642908096, 0.0021224122028797865, 0.058763567358255386, 0.01962493173778057, 0.1056489422917366, 0.34289902448654175, 0.08482126891613007, 0.12726271152496338, 0.05396636202931404, 0.016560900956392288, 0.024987351149320602, 0.012987000867724419, 0.010936693288385868, 0.08467692881822586, 0.020488357171416283], [0.0013500110944733024, 0.0010004275245591998, 0.0014082693960517645, 0.0070548406802117825, 0.00595642626285553, 0.0008306623203679919, 0.0008200438460335135, 0.0002750241837929934, 0.0006559959147125483, 0.0030927802436053753, 0.01982581987977028, 0.041783884167671204, 0.19440893828868866, 0.11926166713237762, 0.33411315083503723, 0.09646793454885483, 0.04248953238129616, 0.016473542898893356, 0.022185787558555603, 0.006877454463392496, 0.040543172508478165, 0.04312464967370033], [0.00484177703037858, 0.007603900041431189, 0.010259916074573994, 0.025166118517518044, 0.004690891597419977, 0.004253597930073738, 0.0036463364958763123, 0.006495928857475519, 0.0016583054093644023, 0.015531624667346478, 0.019427789375185966, 0.05478878319263458, 0.14259980618953705, 0.3069729804992676, 0.13215036690235138, 0.08711887896060944, 0.02299235388636589, 0.045446932315826416, 0.019722627475857735, 0.010681414976716042, 0.027136139571666718, 0.046813491731882095], [0.0009787828894332051, 0.0002454046916682273, 0.0007617257651872933, 0.0032316262368112803, 0.01855137012898922, 0.0043253772892057896, 0.004390457645058632, 0.003825811669230461, 0.0009448893251828849, 0.0008448063745163381, 0.005313803441822529, 0.0027890552300959826, 0.0318961925804615, 0.09869036823511124, 0.08264968544244766, 0.24180173873901367, 0.21810057759284973, 0.12672483921051025, 0.12678153812885284, 0.003522429382428527, 0.007880952209234238, 0.0157486479729414], [0.0003669844300020486, 0.00025729803019203246, 0.00024636476882733405, 0.008072174154222012, 0.005795304197818041, 0.0011629932560026646, 0.0005713978316634893, 0.002530331490561366, 3.673757237265818e-05, 0.0038253094535320997, 0.0006823940784670413, 0.0016796004492789507, 0.017362413927912712, 0.01890011504292488, 0.16430026292800903, 0.41277092695236206, 0.08798278868198395, 0.08126363903284073, 0.14396239817142487, 0.0009503241162747145, 0.039810605347156525, 0.007469736970961094], [0.000590944429859519, 0.0005747202085331082, 0.002160640899091959, 0.0008907351293601096, 0.004307083319872618, 0.004192338325083256, 0.015522552654147148, 0.0104148481041193, 0.0022978263441473246, 0.0021257938351482153, 0.004000382032245398, 0.000575518177356571, 0.0027498314157128334, 0.016073573380708694, 0.004422937985509634, 0.033082809299230576, 0.06638066470623016, 0.42423978447914124, 0.3658433258533478, 0.010448165237903595, 0.017805462703108788, 0.011300009675323963], [0.018797876313328743, 0.02368423156440258, 0.000648855057079345, 0.03635898604989052, 0.005827650893479586, 0.006316662300378084, 0.010976247489452362, 0.017603781074285507, 0.0034856931306421757, 0.12839682400226593, 0.0065505667589604855, 0.017321180552244186, 0.017017588019371033, 0.0011431730818003416, 0.06393348425626755, 0.08059985190629959, 0.04868106544017792, 0.1662629246711731, 0.10111147910356522, 0.036802131682634354, 0.18689779937267303, 0.02158202789723873], [0.0009744171984493732, 0.003739460837095976, 0.0026481659151613712, 0.0016888603568077087, 0.0018501332961022854, 0.0006078178412280977, 0.004344704560935497, 0.003371531842276454, 0.0020793594885617495, 0.006883196532726288, 0.008478150703012943, 0.003420888213440776, 0.005593236070126295, 0.0035941745154559612, 0.006270274054259062, 0.012933230958878994, 0.012513641268014908, 0.2896246612071991, 0.37429577112197876, 0.06326089054346085, 0.16567249596118927, 0.026154955849051476]], [[0.04139662906527519, 0.10330997407436371, 0.014012265019118786, 0.035441700369119644, 0.047019410878419876, 0.06231179088354111, 0.05923938751220703, 0.10622579604387283, 0.3962542414665222, 0.04628715664148331, 0.018178114667534828, 0.009003905579447746, 0.022737866267561913, 0.00392058864235878, 0.004488170612603426, 0.001650506630539894, 0.010038685984909534, 0.002944430336356163, 0.003305198159068823, 0.0068605937995016575, 0.00292545766569674, 0.0024481292348355055], [0.01840071566402912, 0.7347918152809143, 0.07751951366662979, 0.024748072028160095, 0.025181202217936516, 0.013277295976877213, 0.019195230677723885, 0.025944700464606285, 0.006421326193958521, 0.014327348209917545, 0.00547789316624403, 0.0039014448411762714, 0.007862821221351624, 0.0015871630748733878, 0.002934765536338091, 0.0008883369737304747, 0.0006862038862891495, 0.0014090441400185227, 0.0015713619068264961, 0.0011885283747687936, 0.006704253144562244, 0.00598107697442174], [0.05738770589232445, 0.12814025580883026, 0.05436641722917557, 0.04406197741627693, 0.04906059801578522, 0.09674911201000214, 0.0328134186565876, 0.2588595449924469, 0.01971348561346531, 0.027731968089938164, 0.015909964218735695, 0.028275486081838608, 0.019620859995484352, 0.013236464001238346, 0.004909320268779993, 0.00790654681622982, 0.00443696416914463, 0.005249169655144215, 0.0190387275069952, 0.017443589866161346, 0.019047200679779053, 0.07604114711284637], [0.016987936571240425, 0.047244783490896225, 0.0024197206366807222, 0.013787428848445415, 0.5821899771690369, 0.007361093536019325, 0.022645359858870506, 0.04190012067556381, 0.1919340342283249, 0.004631306976079941, 0.005673513747751713, 0.004149970598518848, 0.014834605157375336, 0.0011872195173054934, 0.0017776943277567625, 0.013665545731782913, 0.001013291534036398, 0.0013169186422601342, 0.0019481300842016935, 0.015863774344325066, 0.0024653554428368807, 0.005002121441066265], [0.06719258427619934, 0.14051125943660736, 0.03127438202500343, 0.08278533816337585, 0.09302259981632233, 0.13962185382843018, 0.04950334504246712, 0.17000305652618408, 0.07383276522159576, 0.04046555981040001, 0.005793205462396145, 0.0057489327155053616, 0.0055357725359499454, 0.003341253148391843, 0.005261509213596582, 0.0054826862178742886, 0.006258328445255756, 0.0070002079010009766, 0.009091674350202084, 0.017480991780757904, 0.03246569260954857, 0.008326981216669083], [0.01550667081028223, 0.15383389592170715, 0.039047449827194214, 0.003228072775527835, 0.0046385955065488815, 0.016764026135206223, 0.16091278195381165, 0.08084679394960403, 0.3388795554637909, 0.011178363114595413, 0.1311335414648056, 0.007680950686335564, 0.004585803020745516, 0.003072375664487481, 0.0007830631802789867, 0.0008160682627931237, 0.0019046410452574492, 0.0016493318835273385, 0.005035445559769869, 0.012271665968000889, 0.0010689912596717477, 0.005161978770047426], [0.009846518747508526, 0.04293104633688927, 0.023517923429608345, 0.008922090753912926, 0.01099223643541336, 0.06675416976213455, 0.015272422693669796, 0.4115636944770813, 0.11681907624006271, 0.14024311304092407, 0.05917852371931076, 0.012386803515255451, 0.005832445342093706, 0.004981025122106075, 0.0013896600576117635, 0.006331165321171284, 0.0071742902509868145, 0.00349231599830091, 0.02162269875407219, 0.010808629915118217, 0.016109934076666832, 0.0038302617613226175], [0.008521776646375656, 0.2195788472890854, 0.0021307910792529583, 0.025338061153888702, 0.026035116985440254, 0.0019255104707553983, 0.06361333280801773, 0.01869248040020466, 0.36218732595443726, 0.008894775994122028, 0.009055803529918194, 0.0004215993685647845, 0.24372869729995728, 0.00014142385043669492, 0.0033604742493480444, 0.00011126869503641501, 0.0009866288164630532, 0.0005991364014334977, 0.0019139543874189258, 0.000739178853109479, 0.00188768794760108, 0.0001362623879685998], [0.015565918758511543, 0.014288471080362797, 0.012210337445139885, 0.004495266824960709, 0.0013933537993580103, 0.10783505439758301, 0.020718974992632866, 0.044191863387823105, 0.07135556638240814, 0.29385611414909363, 0.06993170082569122, 0.2906568944454193, 0.00789108406752348, 0.013063684105873108, 0.0019561133813112974, 0.0024005987215787172, 0.006090391892939806, 0.007107931654900312, 0.0009526173234917223, 0.007196248508989811, 0.003973105922341347, 0.00286867911927402], [0.002170340623706579, 0.10697256773710251, 0.0909995287656784, 0.002769474172964692, 0.0007413172861561179, 0.0006641370709985495, 0.006058779079467058, 0.003175381338223815, 0.005063515622168779, 0.014145474880933762, 0.45873475074768066, 0.01164967380464077, 0.2297375649213791, 0.029594121500849724, 0.0051074824295938015, 0.00034987888648174703, 0.0009318343945778906, 0.0013836639700457454, 0.004933271557092667, 0.0006148060201667249, 0.006811879575252533, 0.017390616238117218], [0.026786096394062042, 0.04237347096204758, 0.06919021159410477, 0.01760762557387352, 0.0036714363377541304, 0.03803646191954613, 0.013561967760324478, 0.04554720222949982, 0.013530608266592026, 0.10625395178794861, 0.060787633061409, 0.25864729285240173, 0.0706639289855957, 0.14468024671077728, 0.018310436978936195, 0.003437021980062127, 0.009990483522415161, 0.011396372690796852, 0.004279034212231636, 0.006654938217252493, 0.012811285443603992, 0.02178233675658703], [0.009599503129720688, 0.010839559137821198, 0.021364795044064522, 0.003246521344408393, 0.0029187051113694906, 0.002757206792011857, 0.0014129451010376215, 0.0011961464770138264, 0.09274738281965256, 0.0034291911870241165, 0.10709737986326218, 0.020224308595061302, 0.5860618352890015, 0.06192929297685623, 0.02135145291686058, 0.0009998987661674619, 0.022936435416340828, 0.00032371751149185, 0.008340710774064064, 0.002866384107619524, 0.005750374868512154, 0.012606251984834671], [0.022478841245174408, 0.01895943470299244, 0.01159160677343607, 0.035312362015247345, 0.04130459204316139, 0.04191446304321289, 0.009747146628797054, 0.017018595710396767, 0.008665090426802635, 0.02310045436024666, 0.005426890682429075, 0.09864058345556259, 0.0702202320098877, 0.23638546466827393, 0.1019197478890419, 0.08888047188520432, 0.03201703727245331, 0.07875007390975952, 0.005790138151496649, 0.014870903454720974, 0.00427014147862792, 0.032735735177993774], [0.011235269717872143, 0.007685374468564987, 0.01774652674794197, 0.004248377401381731, 0.0069435350596904755, 0.008831819519400597, 0.012130453251302242, 0.0036916984245181084, 0.00781275425106287, 0.011102908290922642, 0.025358233600854874, 0.04681605100631714, 0.15787987411022186, 0.14887773990631104, 0.058798424899578094, 0.04336719959974289, 0.11803697049617767, 0.06618843972682953, 0.06816856563091278, 0.03454490005970001, 0.030904114246368408, 0.10963084548711777], [0.004970102570950985, 0.005392569117248058, 0.0007467869436368346, 0.003075956366956234, 0.052897848188877106, 0.0033075748942792416, 0.004598958417773247, 0.02350994199514389, 0.017010413110256195, 0.005782602354884148, 0.008063753135502338, 0.025192465633153915, 0.035541877150535583, 0.014852889813482761, 0.010056518018245697, 0.4673703610897064, 0.014212911948561668, 0.07693842053413391, 0.05201271176338196, 0.11322760581970215, 0.00792493112385273, 0.05331289768218994], [0.011321539990603924, 0.015113909728825092, 0.007391935680061579, 0.010289754718542099, 0.025706790387630463, 0.00961728673428297, 0.007978498935699463, 0.005993143189698458, 0.03319593146443367, 0.018583079800009727, 0.006359788589179516, 0.002647266024723649, 0.044537048786878586, 0.011063490994274616, 0.02530008554458618, 0.03232913836836815, 0.2186436653137207, 0.06751832365989685, 0.2342902272939682, 0.07249192893505096, 0.12292686849832535, 0.01670033112168312], [0.0011113443179056048, 0.01309673860669136, 0.0054848333820700645, 0.00042284978553652763, 0.000581687199883163, 0.0022809316869825125, 0.0017216767882928252, 0.013002789579331875, 0.007759569212794304, 0.011533664539456367, 0.011471063829958439, 0.021828290075063705, 0.0031966965179890394, 0.014934686943888664, 0.001301589421927929, 0.0273063275963068, 0.008253856562077999, 0.11027460545301437, 0.0656115710735321, 0.55361008644104, 0.017857080325484276, 0.10735809057950974], [0.000445283338194713, 0.0020784016232937574, 0.0027096664998680353, 0.00047211089986376464, 0.0005868573789484799, 0.0012388643808662891, 0.0002943623112514615, 0.0009103187476284802, 0.0034557932522147894, 0.00572675745934248, 0.010757790878415108, 0.0013238771352916956, 0.012024437077343464, 0.003513116156682372, 0.003428512951359153, 0.010622183792293072, 0.07779954373836517, 0.01790524274110794, 0.542478084564209, 0.060543693602085114, 0.21707506477832794, 0.02461002580821514], [0.0016809894004836679, 0.12676921486854553, 0.003445712849497795, 0.009176280349493027, 0.00795148778706789, 0.0009562738123349845, 0.005885845981538296, 0.010015539824962616, 0.0022245431318879128, 0.007540078368037939, 0.0015588403912261128, 0.0034823105670511723, 0.057035893201828, 0.002317616017535329, 0.008620033040642738, 0.01623612828552723, 0.0060842810198664665, 0.21778851747512817, 0.10344532877206802, 0.25057780742645264, 0.09426835924386978, 0.06293892860412598], [0.01724298484623432, 0.02168414555490017, 0.025743745267391205, 0.0069158561527729034, 0.002464632736518979, 0.024912165477871895, 0.014561566524207592, 0.0010097300400957465, 0.005037968046963215, 0.037749890238046646, 0.018814465031027794, 0.05185501649975777, 0.02865910716354847, 0.013419357128441334, 0.01864992082118988, 0.007629449479281902, 0.10678765177726746, 0.06170795112848282, 0.02981334738433361, 0.03755231946706772, 0.40905988216400146, 0.05872875452041626], [0.0021277142222970724, 0.15885940194129944, 0.032537251710891724, 0.00631107809022069, 0.001171170617453754, 0.0005598663701675832, 0.0006831266800872982, 0.002143454272300005, 0.00011658171570161358, 0.001134676393121481, 0.0027469333726912737, 0.0031245811842381954, 0.043914083391427994, 0.011209075339138508, 0.004241346847265959, 0.0009171313722617924, 0.0010419910540804267, 0.008227113634347916, 0.028972061350941658, 0.011134368367493153, 0.048654280602931976, 0.6301726698875427], [0.039783135056495667, 0.016376925632357597, 0.09349071979522705, 0.021529007703065872, 0.013595595955848694, 0.045854780822992325, 0.008098823949694633, 0.0005992769729346037, 0.003459802595898509, 0.004074267111718655, 0.004036322236061096, 0.01694204844534397, 0.007195691112428904, 0.03745277225971222, 0.037222981452941895, 0.008921206928789616, 0.20352564752101898, 0.027546579018235207, 0.021363088861107826, 0.057469140738248825, 0.11828161776065826, 0.2131805419921875]]], [[[0.007192554883658886, 0.21694214642047882, 0.02117740549147129, 0.005117054563015699, 0.003947066143155098, 0.011807439848780632, 0.06527281552553177, 0.119078628718853, 0.0265911016613245, 0.04824630916118622, 0.0028822754975408316, 0.013187212869524956, 0.050594013184309006, 0.039081066846847534, 0.0036895235534757376, 0.0021544075571000576, 0.019316641613841057, 0.09681005030870438, 0.1278018057346344, 0.06508836895227432, 0.03778235986828804, 0.01623968780040741], [0.087215855717659, 0.12174129486083984, 0.046100832521915436, 0.013732202351093292, 0.042701490223407745, 0.019055891782045364, 0.06328929215669632, 0.3415326178073883, 0.0325474813580513, 0.025608858093619347, 0.0067065698094666, 0.04072347655892372, 0.007466932293027639, 0.02030501887202263, 0.0027921919245272875, 0.0036609629169106483, 0.0050252145156264305, 0.012340494431555271, 0.061054687947034836, 0.011450893245637417, 0.01675599254667759, 0.018191775307059288], [0.02466166578233242, 0.11243816465139389, 0.02055383287370205, 0.020356880500912666, 0.021282125264406204, 0.019828280434012413, 0.2066253274679184, 0.02040758542716503, 0.018159594386816025, 0.03947510942816734, 0.0060438113287091255, 0.0949782282114029, 0.14684589207172394, 0.05897998437285423, 0.010832357220351696, 0.009394770488142967, 0.011649888008832932, 0.1050063893198967, 0.00958692841231823, 0.01613536663353443, 0.017892396077513695, 0.008865440264344215], [0.024800755083560944, 0.18245458602905273, 0.035485535860061646, 0.07336210459470749, 0.006032364908605814, 0.07420411705970764, 0.04850999638438225, 0.020787697285413742, 0.025151005014777184, 0.08641631156206131, 0.009054381400346756, 0.042033880949020386, 0.2061077207326889, 0.06436086446046829, 0.04450797662138939, 0.0034212288446724415, 0.018905391916632652, 0.009197655133903027, 0.0027789073064923286, 0.00623877951875329, 0.013754535466432571, 0.0024341605603694916], [0.039032768458127975, 0.17997176945209503, 0.06586642563343048, 0.047314200550317764, 0.016655337065458298, 0.06337258219718933, 0.06849585473537445, 0.05720812454819679, 0.028554949909448624, 0.1051601842045784, 0.02183620259165764, 0.04849093779921532, 0.05964202806353569, 0.07190309464931488, 0.02134588360786438, 0.0090622054412961, 0.023391686379909515, 0.016631176695227623, 0.015919173136353493, 0.010505501180887222, 0.016008462756872177, 0.013631545938551426], [0.06427113711833954, 0.18218529224395752, 0.03444754332304001, 0.1560508906841278, 0.029732735827565193, 0.07634858787059784, 0.029033904895186424, 0.1395590752363205, 0.04592883959412575, 0.14886361360549927, 0.006778401788324118, 0.02584964968264103, 0.012808669358491898, 0.007622232660651207, 0.015469509176909924, 0.0027281325310468674, 0.0042556640692055225, 0.0007216112571768463, 0.0029843696393072605, 0.003334843786433339, 0.009180421940982342, 0.0018448926275596023], [0.09256277233362198, 0.07316960394382477, 0.02073987014591694, 0.06394423544406891, 0.05170251801609993, 0.17164671421051025, 0.14109428226947784, 0.019132131710648537, 0.03522532805800438, 0.09882412105798721, 0.024265117943286896, 0.0338168703019619, 0.04401172325015068, 0.01232925895601511, 0.024110857397317886, 0.022167416289448738, 0.023303115740418434, 0.023813769221305847, 0.0012829502811655402, 0.009403458796441555, 0.009546186774969101, 0.003907655831426382], [0.024345949292182922, 0.14425984025001526, 0.023194335401058197, 0.005597584880888462, 0.011541069485247135, 0.018332593142986298, 0.007748694159090519, 0.6151163578033447, 0.03137563541531563, 0.048103950917720795, 0.005762364715337753, 0.008966946974396706, 0.002793673425912857, 0.009687871672213078, 0.0010243572760373354, 0.0006745533319190145, 0.0018337633227929473, 0.00048576408880762756, 0.02593245357275009, 0.0040280879475176334, 0.004762148018926382, 0.0044320570304989815], [0.04633856192231178, 0.08400874584913254, 0.022596031427383423, 0.014290343970060349, 0.02626809850335121, 0.043030884116888046, 0.11516667902469635, 0.11619932949542999, 0.04788805916905403, 0.22553770244121552, 0.032741107046604156, 0.014494108967483044, 0.02010370045900345, 0.010657355189323425, 0.0053123850375413895, 0.004781222902238369, 0.016907215118408203, 0.040879026055336, 0.026113586500287056, 0.027599770575761795, 0.04006841033697128, 0.019017688930034637], [0.03102157823741436, 0.06253468990325928, 0.007799937389791012, 0.05491503328084946, 0.036948949098587036, 0.04030109569430351, 0.00857644435018301, 0.029101930558681488, 0.10376787185668945, 0.026280121877789497, 0.006904151756316423, 0.2515150308609009, 0.26603829860687256, 0.009808819741010666, 0.025022050365805626, 0.007951987907290459, 0.005509675480425358, 0.0009624912636354566, 0.002246982418000698, 0.012909476645290852, 0.008573312312364578, 0.0013101489748805761], [0.006589728407561779, 0.04970596730709076, 0.027564965188503265, 0.005319906398653984, 0.007682932540774345, 0.004908125847578049, 0.04198037087917328, 0.06882839649915695, 0.020253965631127357, 0.02428556978702545, 0.013238683342933655, 0.09474530816078186, 0.11164216697216034, 0.12314321100711823, 0.010091274045407772, 0.00716705247759819, 0.008711280301213264, 0.07954788953065872, 0.18654772639274597, 0.040973249822854996, 0.020200874656438828, 0.04687139391899109], [0.01564459688961506, 0.030866973102092743, 0.030846191570162773, 0.020838087424635887, 0.016812846064567566, 0.03675638139247894, 0.14155006408691406, 0.01796896755695343, 0.03613848611712456, 0.08859828114509583, 0.02599284052848816, 0.014004560187458992, 0.05483095720410347, 0.03885086998343468, 0.015068239532411098, 0.00664939358830452, 0.04437503591179848, 0.17341117560863495, 0.027069391682744026, 0.09691652655601501, 0.031586162745952606, 0.03522395342588425], [0.0091013852506876, 0.0672113299369812, 0.01913577690720558, 0.009380445815622807, 0.005812232848256826, 0.006993408780544996, 0.011918949894607067, 0.060386355966329575, 0.03997410833835602, 0.0186520516872406, 0.012145970948040485, 0.09928666800260544, 0.1914949119091034, 0.047168027609586716, 0.019743183627724648, 0.004623637534677982, 0.013362577185034752, 0.02527894824743271, 0.17023052275180817, 0.07863905280828476, 0.04217897728085518, 0.04728153720498085], [0.005516094155609608, 0.04362241551280022, 0.010101253166794777, 0.010337918996810913, 0.004087591543793678, 0.006989981513470411, 0.05959995090961456, 0.00503710750490427, 0.006593475583940744, 0.023410316556692123, 0.006728507112711668, 0.06044052913784981, 0.3823208808898926, 0.06337642669677734, 0.02251887135207653, 0.008634262718260288, 0.020730547606945038, 0.17194540798664093, 0.016902854666113853, 0.02318578027188778, 0.03186136484146118, 0.01605847291648388], [0.005976908840239048, 0.054547205567359924, 0.018963847309350967, 0.03727828338742256, 0.001690128818154335, 0.022524727508425713, 0.01781061291694641, 0.006024552974849939, 0.012470290064811707, 0.037386707961559296, 0.009869172237813473, 0.027479644864797592, 0.40405532717704773, 0.09814700484275818, 0.10326056182384491, 0.004677699413150549, 0.042853035032749176, 0.02848743088543415, 0.010514010675251484, 0.020190659910440445, 0.02731897681951523, 0.008473154157400131], [0.012516372837126255, 0.03613774850964546, 0.03687179461121559, 0.020450159907341003, 0.010545953176915646, 0.02063104882836342, 0.04074610769748688, 0.012035136111080647, 0.008813529275357723, 0.022372299805283546, 0.021010592579841614, 0.026164082810282707, 0.09396253526210785, 0.12277457863092422, 0.046752527356147766, 0.03982652723789215, 0.09719131141901016, 0.13192667067050934, 0.07227469980716705, 0.035509951412677765, 0.028331002220511436, 0.06315537542104721], [0.025237098336219788, 0.06907933950424194, 0.03005044348537922, 0.11005954444408417, 0.024547334760427475, 0.026783563196659088, 0.018013939261436462, 0.029625291004776955, 0.028036007657647133, 0.04120749607682228, 0.012330000288784504, 0.0251272302120924, 0.07968375086784363, 0.03595699742436409, 0.1163334771990776, 0.023684769868850708, 0.06976230442523956, 0.02156572788953781, 0.07146522402763367, 0.049077533185482025, 0.054701708257198334, 0.03767119720578194], [0.012765739113092422, 0.01314904261380434, 0.010872581042349339, 0.02024068310856819, 0.013737129047513008, 0.0144925182685256, 0.037380944937467575, 0.0032069773878902197, 0.008599283173680305, 0.01764088310301304, 0.024281345307826996, 0.013788329437375069, 0.08052611351013184, 0.028611792251467705, 0.08051536977291107, 0.06988096237182617, 0.0815809965133667, 0.2642337381839752, 0.02795224077999592, 0.06913620978593826, 0.0478815995156765, 0.059525422751903534], [0.0034696278162300587, 0.03600180894136429, 0.010297795757651329, 0.0017688545631244779, 0.004111113958060741, 0.0017847104463726282, 0.004757181741297245, 0.06750616431236267, 0.01015018206089735, 0.014817649498581886, 0.0064494856633245945, 0.008391019888222218, 0.020074816420674324, 0.02394033409655094, 0.004645355045795441, 0.0029533898923546076, 0.010928311385214329, 0.021680966019630432, 0.5712337493896484, 0.058454036712646484, 0.044941917061805725, 0.0716414526104927], [0.010419447906315327, 0.011333952657878399, 0.005330985877662897, 0.0025089969858527184, 0.009902817197144032, 0.005681733135133982, 0.05336311087012291, 0.009163873270154, 0.011337904259562492, 0.014254998415708542, 0.011465998366475105, 0.003407042706385255, 0.014927092008292675, 0.007450385484844446, 0.0049841199070215225, 0.009667002595961094, 0.040834520012140274, 0.45882728695869446, 0.08152133226394653, 0.117707759141922, 0.04782036319375038, 0.06808934360742569], [0.014864468015730381, 0.031361792236566544, 0.012743384577333927, 0.010494420304894447, 0.013424529694020748, 0.004338096361607313, 0.00483671622350812, 0.034159231930971146, 0.03276157006621361, 0.002066913293674588, 0.0039254482835531235, 0.03407648950815201, 0.039460886269807816, 0.03952345252037048, 0.02486063912510872, 0.019294315949082375, 0.025740670040249825, 0.023951208218932152, 0.3843800723552704, 0.14931227266788483, 0.018697259947657585, 0.07572605460882187], [0.0005148100899532437, 0.001848850050009787, 0.0009661249932833016, 0.0002519235131330788, 0.0009559074533171952, 0.00023188105842564255, 0.015695326030254364, 0.0004089929279871285, 0.0015106346691027284, 0.0005730890552513301, 0.0008106788736768067, 0.0016721688443794847, 0.017619723454117775, 0.009820105507969856, 0.0016566741978749633, 0.002100760582834482, 0.009454675950109959, 0.7727891206741333, 0.037380896508693695, 0.08701454848051071, 0.008766992017626762, 0.027956100180745125]], [[0.026830419898033142, 0.12234314531087875, 0.019995469599962234, 0.020565791055560112, 0.1092422604560852, 0.046029750257730484, 0.036160457879304886, 0.08165235072374344, 0.05459677428007126, 0.15276296436786652, 0.03686368837952614, 0.026606814935803413, 0.12355325371026993, 0.017663871869444847, 0.010471031069755554, 0.02550850436091423, 0.008981945924460888, 0.005516305100172758, 0.012446640059351921, 0.014680733904242516, 0.03476700559258461, 0.012760695070028305], [0.013014205731451511, 0.2069377452135086, 0.007113661617040634, 0.01047456543892622, 0.049770016223192215, 0.04227830469608307, 0.14647731184959412, 0.058653488755226135, 0.15462924540042877, 0.09246089309453964, 0.003886020742356777, 0.022334707900881767, 0.08972759544849396, 0.00522567518055439, 0.003494064789265394, 0.007057057227939367, 0.0030184646602720022, 0.024196796119213104, 0.009669446386396885, 0.02859807200729847, 0.018184462562203407, 0.0027982855681329966], [0.024666089564561844, 0.04307851567864418, 0.015286171808838844, 0.07435254007577896, 0.0766768753528595, 0.21275217831134796, 0.010698674246668816, 0.10781200975179672, 0.037281837314367294, 0.023526398465037346, 0.02131602354347706, 0.02774481289088726, 0.06765198707580566, 0.01476162951439619, 0.05234450846910477, 0.016406159847974777, 0.09364975243806839, 0.0034741321578621864, 0.04382726550102234, 0.009201333858072758, 0.012023321352899075, 0.011467796750366688], [0.10884175449609756, 0.027504602447152138, 0.019428187981247902, 0.04296339675784111, 0.3335638642311096, 0.17534367740154266, 0.02086588181555271, 0.01979345642030239, 0.07790867239236832, 0.005228047259151936, 0.010139512829482555, 0.011608809232711792, 0.024130554869771004, 0.023831533268094063, 0.012599386274814606, 0.05591350793838501, 0.017683880403637886, 0.0017137299291789532, 0.0025934374425560236, 0.005364464595913887, 0.00121244415640831, 0.001767209148965776], [0.03284476324915886, 0.08993563801050186, 0.08899695426225662, 0.017156923189759254, 0.05041836202144623, 0.1361413449048996, 0.02188241109251976, 0.08025253564119339, 0.0725947842001915, 0.15945060551166534, 0.054228525608778, 0.03383117541670799, 0.03600364923477173, 0.02391406148672104, 0.006584599614143372, 0.004465269390493631, 0.010145572014153004, 0.0015056623378768563, 0.0049051446840167046, 0.017332371324300766, 0.035303156822919846, 0.02210664376616478], [0.004318004008382559, 0.03447574004530907, 0.019628576934337616, 0.03495785966515541, 0.033436112105846405, 0.03924282640218735, 0.07005192339420319, 0.015602321363985538, 0.6424157023429871, 0.016029834747314453, 0.02898864448070526, 0.01004217378795147, 0.009204640984535217, 0.004290918819606304, 0.008479591459035873, 0.0017392354784533381, 0.0019267204916104674, 0.0008971933857537806, 0.00020715390564873815, 0.02235754393041134, 0.00041949129081331193, 0.0012877662666141987], [0.017267616465687752, 0.03383488208055496, 0.06057317554950714, 0.0230744406580925, 0.012592745013535023, 0.32979831099510193, 0.006453642155975103, 0.26958996057510376, 0.010056251659989357, 0.049120936542749405, 0.07612667232751846, 0.02608628198504448, 0.012573197484016418, 0.020633472129702568, 0.011186561547219753, 0.0015807619784027338, 0.022723861038684845, 0.00026137029635719955, 0.005082405637949705, 0.0005005886778235435, 0.004428598564118147, 0.006454338785260916], [0.009864031337201595, 0.03697071969509125, 0.002392697613686323, 0.022699840366840363, 0.11349719762802124, 0.005398873705416918, 0.07559539377689362, 0.013344728387892246, 0.5850761532783508, 0.017664551734924316, 0.04532346501946449, 0.006779585033655167, 0.035208653658628464, 0.001253351685591042, 0.004386991262435913, 0.008777325972914696, 0.00045422467519529164, 0.0007160156383179128, 0.0002470240870025009, 0.013374102301895618, 0.00041244993917644024, 0.0005625116755254567], [0.004489596001803875, 0.027660027146339417, 0.030068280175328255, 0.016239404678344727, 0.010776517912745476, 0.08113506436347961, 0.10631862282752991, 0.03262755274772644, 0.11025291681289673, 0.26540669798851013, 0.0199835654348135, 0.07301908731460571, 0.053146686404943466, 0.037356726825237274, 0.01065562292933464, 0.007418282330036163, 0.010485964827239513, 0.03862486779689789, 0.0028056390583515167, 0.038403235375881195, 0.01530003733932972, 0.007825597189366817], [0.0014915465144440532, 0.03922053053975105, 0.010016605257987976, 0.025255173444747925, 0.01344760600477457, 0.003643561154603958, 0.06572879105806351, 0.010137343779206276, 0.08868769556283951, 0.13544869422912598, 0.024214087054133415, 0.019688645377755165, 0.44834545254707336, 0.01896418072283268, 0.03376245126128197, 0.002041177125647664, 0.0021954893600195646, 0.016961565241217613, 0.004671779926866293, 0.014099819585680962, 0.019273141399025917, 0.00270467484369874], [0.002844211645424366, 0.023631205782294273, 0.007845521904528141, 0.0024477774277329445, 0.002788066864013672, 0.0033436338417232037, 0.0038135696668177843, 0.017655406147241592, 0.0011597994016483426, 0.4813028872013092, 0.008090916089713573, 0.030747652053833008, 0.14290474355220795, 0.019184736534953117, 0.0037914670538157225, 0.007282296661287546, 0.002409138949587941, 0.007931755855679512, 0.013719158247113228, 0.001839236356317997, 0.2063790112733841, 0.00888777244836092], [0.0014053876511752605, 0.020220285281538963, 0.011589210480451584, 0.0310983806848526, 0.004329939838498831, 0.004434788133949041, 0.02892095223069191, 0.011585038155317307, 0.003027890343219042, 0.07196450978517532, 0.013565588742494583, 0.020413709804415703, 0.4263724982738495, 0.03474919870495796, 0.11651647835969925, 0.004639248363673687, 0.020657261833548546, 0.04433056339621544, 0.05225347727537155, 0.0018924630712717772, 0.07074984163045883, 0.005283285863697529], [0.00026655365945771337, 0.0035475115291774273, 0.0008583664312027395, 0.0010326894698664546, 0.0012416329700499773, 0.0007617807132191956, 0.023356107994914055, 0.001830029534175992, 0.0026784969959408045, 0.006418939679861069, 0.0002029547467827797, 0.00724742142483592, 0.039068784564733505, 0.01863090880215168, 0.004059718456119299, 0.013136244378983974, 0.003372709732502699, 0.8167233467102051, 0.02668091468513012, 0.02068948931992054, 0.004481191281229258, 0.0037142678629606962], [0.0030643250793218613, 0.0060959020629525185, 0.006525792181491852, 0.025475960224866867, 0.010822154581546783, 0.039199307560920715, 0.00451018288731575, 0.013339993543922901, 0.005007535684853792, 0.004904928617179394, 0.006892306264489889, 0.012807668186724186, 0.07661803066730499, 0.030766701325774193, 0.10114124417304993, 0.011335615068674088, 0.3425459563732147, 0.02477397210896015, 0.23528221249580383, 0.009300864301621914, 0.01062816008925438, 0.018961101770401], [0.07859453558921814, 0.013658465817570686, 0.01753697544336319, 0.02767210081219673, 0.1152622178196907, 0.04501432552933693, 0.011585703119635582, 0.009324807673692703, 0.007576699834316969, 0.00373931135982275, 0.0030022880528122187, 0.015059469267725945, 0.02783881314098835, 0.08991105109453201, 0.03159945085644722, 0.3223400413990021, 0.06827343255281448, 0.041981689631938934, 0.03490432724356651, 0.014091837219893932, 0.006182088050991297, 0.014850353822112083], [0.027059653773903847, 0.0353269949555397, 0.03898926079273224, 0.033217135816812515, 0.06419259309768677, 0.04554210230708122, 0.01098601333796978, 0.04079718515276909, 0.010447981767356396, 0.019447386264801025, 0.05070199817419052, 0.017216170206665993, 0.05634069815278053, 0.0252242349088192, 0.05140721797943115, 0.02501712739467621, 0.14240573346614838, 0.011675878427922726, 0.15289779007434845, 0.013019446283578873, 0.0644015297293663, 0.06368575990200043], [0.003759104060009122, 0.013992724008858204, 0.04867419973015785, 0.021475857123732567, 0.013151217252016068, 0.016747206449508667, 0.024748681113123894, 0.004312619566917419, 0.07426024228334427, 0.007287092041224241, 0.004236086271703243, 0.018862880766391754, 0.004255824256688356, 0.04006703570485115, 0.022575149312615395, 0.024575700983405113, 0.01650983653962612, 0.0646706223487854, 0.0043061221949756145, 0.4932115972042084, 0.003260355442762375, 0.07505979388952255], [0.014950045384466648, 0.01140603143721819, 0.05961305648088455, 0.01610664464533329, 0.004353567026555538, 0.04439177364110947, 0.0016411672113463283, 0.027393419295549393, 0.0005855225608684123, 0.0037343211006373167, 0.013408827595412731, 0.007244282867759466, 0.005084891337901354, 0.04705299437046051, 0.044380996376276016, 0.007876639254391193, 0.28341782093048096, 0.007473444100469351, 0.22455909848213196, 0.0030711404979228973, 0.022091951221227646, 0.15016232430934906], [0.010336373932659626, 0.021202830597758293, 0.005363184493035078, 0.00794446375221014, 0.01688551902770996, 0.002683974104002118, 0.03611796721816063, 0.004014943726360798, 0.02727191150188446, 0.0035182852298021317, 0.0017225112533196807, 0.007243942003697157, 0.006398777011781931, 0.006623703986406326, 0.006599380634725094, 0.15154483914375305, 0.0059873852878808975, 0.20327457785606384, 0.014043424278497696, 0.4071583151817322, 0.004099941812455654, 0.04996379464864731], [0.008731049485504627, 0.016192056238651276, 0.025298262014985085, 0.02822226658463478, 0.011974694207310677, 0.033445633947849274, 0.025229215621948242, 0.007062586024403572, 0.006747832987457514, 0.024798665195703506, 0.004079889506101608, 0.012390444055199623, 0.02203773520886898, 0.03962637856602669, 0.05193233862519264, 0.03672056272625923, 0.14583690464496613, 0.21490253508090973, 0.07821787893772125, 0.05195895954966545, 0.09211879968643188, 0.062475282698869705], [0.007885018363595009, 0.06885085254907608, 0.011768508702516556, 0.006510366220027208, 0.012362745590507984, 0.0011730219703167677, 0.04534003511071205, 0.004545075353235006, 0.02483294904232025, 0.06839655339717865, 0.0026353683788329363, 0.012534473091363907, 0.0783974677324295, 0.020575912669301033, 0.008484442718327045, 0.010421175509691238, 0.0018171569099649787, 0.19761961698532104, 0.030113859102129936, 0.20902447402477264, 0.12149538099765778, 0.0552155077457428], [0.009813892655074596, 0.03100442886352539, 0.013564400374889374, 0.007959370501339436, 0.00801424402743578, 0.009735705330967903, 0.004926213063299656, 0.008311440236866474, 0.000982780591584742, 0.01491556502878666, 0.0020316578447818756, 0.0038447463884949684, 0.03337261453270912, 0.016931787133216858, 0.017704954370856285, 0.02365952357649803, 0.08192635327577591, 0.049834877252578735, 0.31283843517303467, 0.01197319570928812, 0.2779395580291748, 0.05871420353651047]], [[0.0022836467251181602, 0.09594250470399857, 0.04672391712665558, 0.003665732219815254, 0.005202189087867737, 0.005962392780929804, 0.08796589821577072, 0.0576767735183239, 0.0897630900144577, 0.02650454267859459, 0.030538996681571007, 0.05715492367744446, 0.1534041166305542, 0.0985114648938179, 0.008904526941478252, 0.00712989317253232, 0.00510883005335927, 0.060784611850976944, 0.0437849797308445, 0.06958132237195969, 0.014952881261706352, 0.028452781960368156], [0.0250866562128067, 0.05857633426785469, 0.04073449596762657, 0.012255718000233173, 0.014256537891924381, 0.04039168730378151, 0.01138121448457241, 0.0960521399974823, 0.006089692935347557, 0.0366649329662323, 0.018813084810972214, 0.029127389192581177, 0.08187223225831985, 0.05607429891824722, 0.015181971713900566, 0.02325272560119629, 0.07698609679937363, 0.014092082157731056, 0.19807282090187073, 0.011848712339997292, 0.08669563382863998, 0.04649357125163078], [0.00500477384775877, 0.07026899605989456, 0.0165601447224617, 0.00907563604414463, 0.009204883128404617, 0.009839221835136414, 0.027910714969038963, 0.11059948056936264, 0.04032571241259575, 0.05520887300372124, 0.02748076431453228, 0.061359018087387085, 0.2650737166404724, 0.034819889813661575, 0.018277404829859734, 0.010525889694690704, 0.017582304775714874, 0.02684461511671543, 0.08645827323198318, 0.03263666480779648, 0.046579305082559586, 0.018363788723945618], [0.022905809804797173, 0.041128989309072495, 0.02994602546095848, 0.010307159274816513, 0.00576028972864151, 0.03600656986236572, 0.027850313112139702, 0.07479208707809448, 0.0061509511433541775, 0.023593109101057053, 0.024196313694119453, 0.031010646373033524, 0.12087418884038925, 0.055776447057724, 0.01623925380408764, 0.00916184950619936, 0.09514403343200684, 0.060567211359739304, 0.18158316612243652, 0.012973221950232983, 0.052192892879247665, 0.06183944270014763], [0.015473957173526287, 0.09415993839502335, 0.031601689755916595, 0.011932499706745148, 0.012759423814713955, 0.01479211077094078, 0.11415406316518784, 0.03986100107431412, 0.03555877506732941, 0.05648500844836235, 0.02449972927570343, 0.03119923546910286, 0.05860896408557892, 0.02013840340077877, 0.017957603558897972, 0.013182819820940495, 0.021943844854831696, 0.15542137622833252, 0.045176077634096146, 0.05712828412652016, 0.0772400051355362, 0.050725165754556656], [0.04328317195177078, 0.05725022405385971, 0.09884607791900635, 0.05946816876530647, 0.02020920254290104, 0.030030950903892517, 0.01601017825305462, 0.042188260704278946, 0.020450318232178688, 0.02615729719400406, 0.029933879151940346, 0.06726676225662231, 0.05081909894943237, 0.10583983361721039, 0.05522662401199341, 0.02107393741607666, 0.03574984148144722, 0.01469096913933754, 0.05917353928089142, 0.025696909055113792, 0.047728221863508224, 0.0729065090417862], [0.011640719138085842, 0.11275999248027802, 0.05865217000246048, 0.012464893981814384, 0.025042256340384483, 0.033266931772232056, 0.1117648035287857, 0.059171389788389206, 0.02979353629052639, 0.07694246619939804, 0.03242809325456619, 0.06359434872865677, 0.08331014961004257, 0.035198308527469635, 0.012730574235320091, 0.01369407307356596, 0.017690960317850113, 0.04650972783565521, 0.03153030574321747, 0.027790389955043793, 0.06133149936795235, 0.04269251972436905], [0.022987470030784607, 0.1755257099866867, 0.06359460204839706, 0.025191159918904305, 0.009643998928368092, 0.09602886438369751, 0.019507430493831635, 0.14715765416622162, 0.017642119899392128, 0.058837633579969406, 0.017309149727225304, 0.023947149515151978, 0.022368324920535088, 0.03036423586308956, 0.012121033854782581, 0.009101318195462227, 0.03315700590610504, 0.0049505471251904964, 0.07693656533956528, 0.010888525284826756, 0.0948934257030487, 0.02784609980881214], [0.060765184462070465, 0.10587610304355621, 0.08119436353445053, 0.0270925872027874, 0.026500707492232323, 0.04189428687095642, 0.13293838500976562, 0.04232120141386986, 0.021716102957725525, 0.06369255483150482, 0.046704795211553574, 0.06305704265832901, 0.052121601998806, 0.022788600996136665, 0.012051126919686794, 0.013625754043459892, 0.010511035099625587, 0.032563332468271255, 0.015184624120593071, 0.02014700137078762, 0.06189639866352081, 0.04535730555653572], [0.02365829423069954, 0.07542961090803146, 0.056637439876794815, 0.015374058857560158, 0.020056266337633133, 0.14023680984973907, 0.01605214551091194, 0.15197083353996277, 0.023884853348135948, 0.09203484654426575, 0.033569615334272385, 0.0288908239454031, 0.07129494845867157, 0.05015518143773079, 0.010629700496792793, 0.0059849475510418415, 0.05051693692803383, 0.00422921497374773, 0.04080533608794212, 0.0066720591858029366, 0.07032019644975662, 0.011595958843827248], [0.00693726958706975, 0.06017671525478363, 0.02268875762820244, 0.0064131417311728, 0.01113155484199524, 0.024440396577119827, 0.06712611764669418, 0.13665002584457397, 0.2141595184803009, 0.05889498069882393, 0.048467691987752914, 0.06741362065076828, 0.1386408805847168, 0.035751741379499435, 0.007465804927051067, 0.0034853052347898483, 0.009298768825829029, 0.013345438055694103, 0.021585123613476753, 0.026489203795790672, 0.012526949867606163, 0.006910892203450203], [0.003848403226584196, 0.08167819678783417, 0.03879782184958458, 0.005762017332017422, 0.017011510208249092, 0.013388827443122864, 0.1305379867553711, 0.05909764766693115, 0.11684197187423706, 0.11149654537439346, 0.08209406584501266, 0.06671538949012756, 0.1450926959514618, 0.03727738931775093, 0.00672325911000371, 0.010482270270586014, 0.0024374322965741158, 0.017705846577882767, 0.00847349688410759, 0.021281739696860313, 0.015751944854855537, 0.0075035071931779385], [0.007183029782027006, 0.11541110277175903, 0.05544233322143555, 0.003301888471469283, 0.008300076238811016, 0.054649390280246735, 0.04325057938694954, 0.15145929157733917, 0.06296990066766739, 0.08434399962425232, 0.03803056851029396, 0.0533788688480854, 0.18740320205688477, 0.054516568779945374, 0.003706843126565218, 0.0029547172598540783, 0.014393380843102932, 0.0057903160341084, 0.02391948737204075, 0.007205503527075052, 0.017284687608480453, 0.005104230251163244], [0.002258048392832279, 0.055540781468153, 0.024140257388353348, 0.003108639968559146, 0.010237974114716053, 0.01006346382200718, 0.056870535016059875, 0.07592152059078217, 0.09033332020044327, 0.05632012337446213, 0.054739098995923996, 0.061393335461616516, 0.3365449905395508, 0.040334850549697876, 0.006437717936933041, 0.005488797556608915, 0.006802279967814684, 0.028195273131132126, 0.02308877557516098, 0.026344187557697296, 0.01925746724009514, 0.006578631699085236], [0.013467997312545776, 0.026828588917851448, 0.026422979310154915, 0.004390763584524393, 0.0044675846584141254, 0.02895769663155079, 0.040082938969135284, 0.05921367183327675, 0.01506359688937664, 0.0256893839687109, 0.05504859611392021, 0.06028778851032257, 0.19971492886543274, 0.0747130736708641, 0.009659959003329277, 0.005959075875580311, 0.06279067695140839, 0.0748877301812172, 0.11756981164216995, 0.018278442323207855, 0.027491474524140358, 0.0490131713449955], [0.006793740205466747, 0.04332856461405754, 0.012020334601402283, 0.004525192081928253, 0.013067394495010376, 0.01076830830425024, 0.1384900063276291, 0.032182008028030396, 0.05601724609732628, 0.0637175664305687, 0.04145526513457298, 0.04538614675402641, 0.14858146011829376, 0.015195336192846298, 0.009827393107116222, 0.010168752633035183, 0.014653980731964111, 0.1931108683347702, 0.026625970378518105, 0.05346278101205826, 0.04239201545715332, 0.018229622393846512], [0.009910643100738525, 0.011911972425878048, 0.05943729355931282, 0.009552434086799622, 0.016409622505307198, 0.011367742903530598, 0.014128957875072956, 0.016027113422751427, 0.05753064155578613, 0.01475439965724945, 0.08984216302633286, 0.11908409744501114, 0.10749520361423492, 0.19690749049186707, 0.023754216730594635, 0.013120528310537338, 0.024750936776399612, 0.030243968591094017, 0.03330177441239357, 0.06106406822800636, 0.01447407528758049, 0.06493069231510162], [0.002873645629733801, 0.027040719985961914, 0.02593529410660267, 0.0031616021879017353, 0.016844889149069786, 0.012847146019339561, 0.08257611840963364, 0.0214983019977808, 0.057210296392440796, 0.05691104009747505, 0.09599797427654266, 0.11797363311052322, 0.1536739468574524, 0.04788660258054733, 0.008016988635063171, 0.011174303479492664, 0.012816919945180416, 0.09069471061229706, 0.021055342629551888, 0.06384596973657608, 0.030212555080652237, 0.03975202143192291], [0.003901024581864476, 0.04491043463349342, 0.03615347295999527, 0.004192301072180271, 0.004929563496261835, 0.02583666332066059, 0.02725599706172943, 0.07900620251893997, 0.043206606060266495, 0.030986150726675987, 0.0879565179347992, 0.09461632370948792, 0.15646642446517944, 0.08074016869068146, 0.007730333600193262, 0.00631602993234992, 0.02492739073932171, 0.026812391355633736, 0.10904134064912796, 0.03709962218999863, 0.02787199430167675, 0.04004315659403801], [0.01798691228032112, 0.015901675447821617, 0.03496798500418663, 0.00437829177826643, 0.017119275406003, 0.013842172920703888, 0.09458360821008682, 0.013024638406932354, 0.021810200065374374, 0.027157751843333244, 0.11839177459478378, 0.14085212349891663, 0.12914133071899414, 0.03933389112353325, 0.006186306476593018, 0.011733446270227432, 0.013524536974728107, 0.11990941315889359, 0.015714021399617195, 0.04221653565764427, 0.026113269850611687, 0.0761108323931694], [0.00784076377749443, 0.02785372920334339, 0.03262931481003761, 0.0036317049525678158, 0.013102618977427483, 0.05514393746852875, 0.0074495901353657246, 0.07390665262937546, 0.01633937656879425, 0.05074921250343323, 0.056263845413923264, 0.02963704615831375, 0.1117592304944992, 0.08912288397550583, 0.008107253350317478, 0.01229534950107336, 0.11443492770195007, 0.017992397770285606, 0.13294297456741333, 0.020827963948249817, 0.08312948793172836, 0.03483985736966133], [0.002521845046430826, 0.008034632541239262, 0.012292624451220036, 0.001479002763517201, 0.007954040542244911, 0.003996504005044699, 0.031465623527765274, 0.025728430598974228, 0.04453736171126366, 0.014019659720361233, 0.08962266147136688, 0.10321947187185287, 0.32986965775489807, 0.07741944491863251, 0.006230665370821953, 0.007393725216388702, 0.013777968473732471, 0.08623503148555756, 0.042740534991025925, 0.051475029438734055, 0.008558688685297966, 0.031427379697561264]], [[0.0037698305677622557, 0.07636323571205139, 0.042724307626485825, 0.002766511868685484, 0.013945093378424644, 0.010402954183518887, 0.014419066719710827, 0.037901122123003006, 0.1638367772102356, 0.0366385281085968, 0.08798696100711823, 0.061365626752376556, 0.05452529713511467, 0.025181787088513374, 0.0025447297375649214, 0.004884072579443455, 0.0057838549837470055, 0.010019105859100819, 0.026698991656303406, 0.16799888014793396, 0.028740040957927704, 0.1215033307671547], [0.05991021916270256, 0.09732361882925034, 0.010638219304382801, 0.022635484114289284, 0.08246967196464539, 0.02362678386271, 0.026411011815071106, 0.17901071906089783, 0.018275298178195953, 0.10536357015371323, 0.028206458315253258, 0.023183366283774376, 0.018155638128519058, 0.00586669472977519, 0.008843723684549332, 0.019250011071562767, 0.009842080064117908, 0.008644657209515572, 0.05099531263113022, 0.010256575420498848, 0.16697412729263306, 0.024116775020956993], [0.00816754437983036, 0.09076808393001556, 0.08276306092739105, 0.01343468390405178, 0.05563250184059143, 0.0337538979947567, 0.0211922749876976, 0.07292517274618149, 0.10936431586742401, 0.04709893465042114, 0.10449670255184174, 0.07225364446640015, 0.01797712780535221, 0.019345270469784737, 0.00584282586351037, 0.01244287472218275, 0.007053177338093519, 0.005045235622674227, 0.020672108978033066, 0.06965892761945724, 0.022115591913461685, 0.10799605399370193], [0.023412028327584267, 0.03495578467845917, 0.02591637521982193, 0.02291865646839142, 0.02623775042593479, 0.025628773495554924, 0.03521033003926277, 0.09318812936544418, 0.08803553134202957, 0.02571181207895279, 0.0413290411233902, 0.07942168414592743, 0.07178934663534164, 0.03863755613565445, 0.021555138751864433, 0.01264562364667654, 0.03190776705741882, 0.030554750934243202, 0.08746267855167389, 0.1092238649725914, 0.022523527964949608, 0.05173378810286522], [0.01642417348921299, 0.11902938038110733, 0.0962936207652092, 0.018405383452773094, 0.01827012188732624, 0.0502220019698143, 0.06014096736907959, 0.052805617451667786, 0.05127813294529915, 0.045002687722444534, 0.013420728035271168, 0.023902656510472298, 0.01708001084625721, 0.03712591156363487, 0.015176287852227688, 0.011560500599443913, 0.05600855499505997, 0.07895669341087341, 0.061534326523542404, 0.07764565199613571, 0.04610646516084671, 0.03361016511917114], [0.049701299518346786, 0.18369808793067932, 0.2662368714809418, 0.02917492762207985, 0.02979276143014431, 0.03298933804035187, 0.03765570744872093, 0.044861678034067154, 0.03236439824104309, 0.05468835309147835, 0.03794001042842865, 0.02384844794869423, 0.0061789993196725845, 0.03596069663763046, 0.006151401903480291, 0.00864407978951931, 0.006443946156650782, 0.007115006912499666, 0.010292571038007736, 0.017527326941490173, 0.03307674080133438, 0.04565746337175369], [0.008063009940087795, 0.18410666286945343, 0.020552577450871468, 0.006306731142103672, 0.020260706543922424, 0.04976686090230942, 0.007454734295606613, 0.14827439188957214, 0.10241605341434479, 0.0655265524983406, 0.0374072901904583, 0.039407309144735336, 0.0376589372754097, 0.006408306770026684, 0.0027698581106960773, 0.004461729433387518, 0.017248811200261116, 0.0033688361290842295, 0.03634457290172577, 0.07962214946746826, 0.07403313368558884, 0.04854084178805351], [0.00762375071644783, 0.15220841765403748, 0.00398594792932272, 0.004306930582970381, 0.010325994342565536, 0.010268031619489193, 0.0065398504957556725, 0.4167143404483795, 0.011729477904736996, 0.1446690708398819, 0.0032255989499390125, 0.009088247083127499, 0.028892382979393005, 0.0024027100298553705, 0.0012962031178176403, 0.0021548159420490265, 0.0018077048007398844, 0.0012043851893395185, 0.048005156219005585, 0.004518370609730482, 0.12649604678153992, 0.002536492422223091], [0.029195424169301987, 0.12666665017604828, 0.08694930374622345, 0.023257073014974594, 0.022700365632772446, 0.03161526843905449, 0.02057146094739437, 0.03765285015106201, 0.24553678929805756, 0.03883000463247299, 0.04393509775400162, 0.038450516760349274, 0.017542727291584015, 0.03377028554677963, 0.005493228789418936, 0.006953477393835783, 0.003921670373529196, 0.0035133836790919304, 0.0028058220632374287, 0.09497492760419846, 0.013197719119489193, 0.07246605306863785], [0.009751202538609505, 0.11606108397245407, 0.01078201737254858, 0.005283655133098364, 0.010048635303974152, 0.07947264611721039, 0.018588386476039886, 0.32356375455856323, 0.01708691380918026, 0.2254871279001236, 0.013583497144281864, 0.02429596520960331, 0.0246573593467474, 0.0025126230902969837, 0.0016550426371395588, 0.003777129575610161, 0.01589176431298256, 0.0018729245057329535, 0.03662523254752159, 0.005192159209400415, 0.04960770159959793, 0.004203155171126127], [0.011755217798054218, 0.02769528701901436, 0.027323972433805466, 0.003058559028431773, 0.05969049781560898, 0.013077951967716217, 0.02160482481122017, 0.1258595883846283, 0.228751540184021, 0.055289506912231445, 0.16329297423362732, 0.09313969314098358, 0.02796369418501854, 0.007708385121077299, 0.0012844788143411279, 0.011021344922482967, 0.0013596460921689868, 0.0016221902333199978, 0.00885070487856865, 0.06515025347471237, 0.0053760455921292305, 0.039123646914958954], [0.004422611091285944, 0.02310442365705967, 0.08163461089134216, 0.0057561760768294334, 0.038702454417943954, 0.015004710294306278, 0.015585527755320072, 0.035722509026527405, 0.09117431193590164, 0.06105421483516693, 0.3978099822998047, 0.07979632169008255, 0.05200653523206711, 0.02204298973083496, 0.003105717245489359, 0.010923146270215511, 0.0014147834153845906, 0.0009333043126389384, 0.003991092089563608, 0.015091785229742527, 0.004418449942022562, 0.0363042838871479], [0.015021550469100475, 0.06194452568888664, 0.05392453819513321, 0.002158351708203554, 0.050006963312625885, 0.03880118578672409, 0.04903022199869156, 0.0373527854681015, 0.04350319877266884, 0.10692887753248215, 0.2702861428260803, 0.08463197201490402, 0.0885496735572815, 0.015425420366227627, 0.00148486637044698, 0.016896799206733704, 0.0059459093026816845, 0.004688462242484093, 0.006405813619494438, 0.008448552340269089, 0.008745690807700157, 0.029818516224622726], [0.0032928939908742905, 0.013820863328874111, 0.048063974827528, 0.005448904354125261, 0.03972414508461952, 0.011773678474128246, 0.024730805307626724, 0.02518215961754322, 0.07458092272281647, 0.028087545186281204, 0.23729275166988373, 0.0781751498579979, 0.04659770056605339, 0.04123183712363243, 0.009147850796580315, 0.02482626587152481, 0.00874354038387537, 0.01631486974656582, 0.03052520379424095, 0.08023253828287125, 0.011984018608927727, 0.14022232592105865], [0.005834388546645641, 0.009439251385629177, 0.009133086539804935, 0.007636907044798136, 0.011225148104131222, 0.01539701409637928, 0.02289082668721676, 0.04944629222154617, 0.05131933093070984, 0.021900439634919167, 0.05679907649755478, 0.09260929375886917, 0.2238820344209671, 0.03557448461651802, 0.02036290057003498, 0.014315617270767689, 0.04548044130206108, 0.047529496252536774, 0.11409109830856323, 0.08879531174898148, 0.01675380766391754, 0.039583720266819], [0.003839339828118682, 0.023824866861104965, 0.020296599715948105, 0.0093866977840662, 0.006107169669121504, 0.011993909254670143, 0.0289019625633955, 0.030751846730709076, 0.05999641492962837, 0.033553291112184525, 0.02342003956437111, 0.020335931330919266, 0.05864832177758217, 0.04471014067530632, 0.027556991204619408, 0.009469023905694485, 0.04638703912496567, 0.13927437365055084, 0.12323760986328125, 0.16503281891345978, 0.06946086883544922, 0.04381481185555458], [0.010303065180778503, 0.015564720146358013, 0.06938604265451431, 0.01032545231282711, 0.011978477239608765, 0.007898804731667042, 0.02186845801770687, 0.011064445599913597, 0.02233671210706234, 0.03426293656229973, 0.09776140749454498, 0.025352507829666138, 0.03787805140018463, 0.0970529168844223, 0.022681208327412605, 0.025106685236096382, 0.030499860644340515, 0.09234623610973358, 0.06946049630641937, 0.0886489599943161, 0.06284060329198837, 0.13538196682929993], [0.0020787494722753763, 0.017592186108231544, 0.006570742931216955, 0.0016386433271691203, 0.006559570319950581, 0.007192827295511961, 0.0035722735337913036, 0.023774534463882446, 0.03558770567178726, 0.03309331461787224, 0.07427767664194107, 0.031629446893930435, 0.09623534232378006, 0.015562187880277634, 0.005358236841857433, 0.010934080928564072, 0.03655540943145752, 0.0273322481662035, 0.13267628848552704, 0.18723571300506592, 0.11328325420618057, 0.1312595009803772], [0.001142666325904429, 0.014401676133275032, 0.0016841033939272165, 0.0016225662548094988, 0.0036906676832586527, 0.0018804685678333044, 0.0023906987626105547, 0.062039751559495926, 0.006555049680173397, 0.07132092118263245, 0.014999683015048504, 0.015645667910575867, 0.21144723892211914, 0.008962525986135006, 0.004583935718983412, 0.004927567671984434, 0.008878764696419239, 0.012816025875508785, 0.2877787947654724, 0.02518945373594761, 0.22028475999832153, 0.017757050693035126], [0.007141471840441227, 0.014187241904437542, 0.02661629393696785, 0.005601246375590563, 0.004330281168222427, 0.00422939145937562, 0.004771297797560692, 0.003907614387571812, 0.12044288963079453, 0.007869232445955276, 0.04845189303159714, 0.023870287463068962, 0.021708881482481956, 0.048444293439388275, 0.006055481731891632, 0.006160680670291185, 0.007035680115222931, 0.012830200605094433, 0.004828016739338636, 0.36900249123573303, 0.009519852697849274, 0.24299533665180206], [0.0030799706000834703, 0.030422333627939224, 0.0019866686780005693, 0.0017322447383776307, 0.003797183744609356, 0.014727737754583359, 0.006649959832429886, 0.07344694435596466, 0.0047836932353675365, 0.13255877792835236, 0.010230840183794498, 0.017398975789546967, 0.08650811016559601, 0.0032971929758787155, 0.0033853710629045963, 0.007739955093711615, 0.059936102479696274, 0.02156846970319748, 0.25974977016448975, 0.014160805381834507, 0.23397284746170044, 0.008866124786436558], [0.0028286054730415344, 0.0025516769383102655, 0.014808772131800652, 0.002154372166842222, 0.011994725093245506, 0.0010788746876642108, 0.0038355709984898567, 0.001873169676400721, 0.05668467655777931, 0.0033920712303370237, 0.1323896199464798, 0.026187077164649963, 0.0158243365585804, 0.030822250992059708, 0.006341527681797743, 0.013472983613610268, 0.00403177784755826, 0.01699524000287056, 0.011894122697412968, 0.2967626750469208, 0.004620593506842852, 0.33945518732070923]], [[0.01842007227241993, 0.053853828459978104, 0.02571035549044609, 0.016864551231265068, 0.013942470774054527, 0.04256708547472954, 0.014920257963240147, 0.026308182626962662, 0.05918794497847557, 0.029379986226558685, 0.04621856287121773, 0.03216542676091194, 0.056579090654850006, 0.04037986323237419, 0.02167700231075287, 0.012575428001582623, 0.12581248581409454, 0.019268542528152466, 0.09994781762361526, 0.1120539978146553, 0.03658699616789818, 0.09558004885911942], [0.008400481194257736, 0.07292553782463074, 0.017333587631583214, 0.07547533512115479, 0.016260622069239616, 0.00571797601878643, 0.014009697362780571, 0.01818123646080494, 0.012023426592350006, 0.34946757555007935, 0.01066497527062893, 0.021302305161952972, 0.07894759625196457, 0.017160264775156975, 0.059569600969552994, 0.010293957777321339, 0.005465067457407713, 0.00617948267608881, 0.011553691700100899, 0.017642581835389137, 0.15575957298278809, 0.0156654454767704], [0.00774208502843976, 0.04063931852579117, 0.01751181110739708, 0.02428843080997467, 0.017098823562264442, 0.0060385833494365215, 0.03598013147711754, 0.00507534621283412, 0.0857740193605423, 0.021643197163939476, 0.0680602565407753, 0.09142429381608963, 0.16963247954845428, 0.027044011279940605, 0.029564466327428818, 0.009325500577688217, 0.007390085607767105, 0.02170742116868496, 0.013470512814819813, 0.18087293207645416, 0.018901841714978218, 0.10081447660923004], [0.019330035895109177, 0.09975230693817139, 0.031284816563129425, 0.2396642416715622, 0.017276667058467865, 0.04201769456267357, 0.02929067611694336, 0.03732100501656532, 0.014354486018419266, 0.09941945225000381, 0.011629512533545494, 0.0263828057795763, 0.05595371127128601, 0.029392391443252563, 0.11048489063978195, 0.004207654390484095, 0.022643936797976494, 0.0043798331171274185, 0.021241268143057823, 0.012734263204038143, 0.043291252106428146, 0.0279470793902874], [0.0217567328363657, 0.16656364500522614, 0.027728531509637833, 0.08269675821065903, 0.007718891371041536, 0.027332739904522896, 0.04419608786702156, 0.06280950456857681, 0.07826454192399979, 0.09478561580181122, 0.03188089281320572, 0.056676845997571945, 0.07861550897359848, 0.01783057674765587, 0.04554835706949234, 0.004827759228646755, 0.011457390151917934, 0.00866392720490694, 0.02154238522052765, 0.05176936462521553, 0.028096044436097145, 0.029237966984510422], [0.029377134516835213, 0.049069594591856, 0.02182662859559059, 0.008069795556366444, 0.07601384818553925, 0.029953785240650177, 0.30860140919685364, 0.02912725694477558, 0.10110802948474884, 0.011139055714011192, 0.1646956354379654, 0.018729446455836296, 0.048436783254146576, 0.010682178661227226, 0.004811637103557587, 0.01605776511132717, 0.008045002818107605, 0.020642321556806564, 0.0077801658771932125, 0.017064616084098816, 0.005007726605981588, 0.01376011036336422], [0.007789764553308487, 0.06742727756500244, 0.032974883913993835, 0.009481685236096382, 0.05727893486618996, 0.12037798762321472, 0.010288483463227749, 0.06030627340078354, 0.07260898500680923, 0.05665078014135361, 0.03477943688631058, 0.029761912301182747, 0.1336860954761505, 0.07812279462814331, 0.010415174998342991, 0.0160922072827816, 0.10953430086374283, 0.006039103027433157, 0.028654273599386215, 0.016978658735752106, 0.025194982066750526, 0.015556086786091328], [0.009019949473440647, 0.06296560913324356, 0.015583320520818233, 0.04801841080188751, 0.010253466665744781, 0.013492697849869728, 0.008087817579507828, 0.048071153461933136, 0.00752518093213439, 0.587822437286377, 0.03625085577368736, 0.005234504118561745, 0.04345020651817322, 0.004647060763090849, 0.020867787301540375, 0.000808994984254241, 0.004376341588795185, 0.0001299329160246998, 0.004843085538595915, 0.0007553952746093273, 0.06431060284376144, 0.0034852409735322], [0.013081138953566551, 0.0776035264134407, 0.05113331601023674, 0.0018157415324822068, 0.04655340686440468, 0.011396696791052818, 0.07130177319049835, 0.03675509989261627, 0.12291961163282394, 0.019263768568634987, 0.09155934303998947, 0.08444000780582428, 0.17879490554332733, 0.0746283009648323, 0.0026438564527779818, 0.014375866390764713, 0.006496865767985582, 0.026478223502635956, 0.01158966962248087, 0.029268696904182434, 0.013657830655574799, 0.014242351986467838], [0.01630992814898491, 0.029414566233754158, 0.023326832801103592, 0.04808306321501732, 0.04465743154287338, 0.00641900347545743, 0.05358055233955383, 0.01520226988941431, 0.04936370626091957, 0.11476069688796997, 0.1373547613620758, 0.03912271559238434, 0.19933003187179565, 0.06044486165046692, 0.04566481336951256, 0.00569231016561389, 0.005570978857576847, 0.01029937993735075, 0.009945944882929325, 0.013426337391138077, 0.02658468671143055, 0.045445144176483154], [0.009418069384992123, 0.08447913825511932, 0.02346985600888729, 0.01489685196429491, 0.01313639897853136, 0.00918454211205244, 0.013590112328529358, 0.04663711413741112, 0.029733898118138313, 0.16305150091648102, 0.01807127334177494, 0.12569749355316162, 0.192289337515831, 0.03922785446047783, 0.016996003687381744, 0.017790498211979866, 0.007446705363690853, 0.012131767347455025, 0.04923347011208534, 0.03978907689452171, 0.047096312046051025, 0.02663275972008705], [0.0033125935588032007, 0.011566856876015663, 0.023981118574738503, 0.0003379981208126992, 0.03191056847572327, 0.0024127743672579527, 0.15493683516979218, 0.017164716497063637, 0.16677841544151306, 0.00569110456854105, 0.20399239659309387, 0.06690779328346252, 0.10606781393289566, 0.0638241246342659, 0.0010950146242976189, 0.016827231273055077, 0.0018010541098192334, 0.07522623986005783, 0.009255855344235897, 0.024850891903042793, 0.0032666914630681276, 0.008791862055659294], [0.0008084097062237561, 0.01965087652206421, 0.0050815497525036335, 0.001332865096628666, 0.009661262854933739, 0.001326260156929493, 0.11428959667682648, 0.007450988981872797, 0.018209582194685936, 0.1279168426990509, 0.014512421563267708, 0.057722680270671844, 0.24882850050926208, 0.03322049230337143, 0.005569027736783028, 0.021545208990573883, 0.002084747888147831, 0.22144369781017303, 0.01636209525167942, 0.030257612466812134, 0.030163243412971497, 0.012562035582959652], [0.00343812326900661, 0.015416311100125313, 0.01611630991101265, 0.004278510343283415, 0.028787264600396156, 0.0016660703113302588, 0.09545734524726868, 0.0013397090369835496, 0.048885393887758255, 0.0028596054762601852, 0.08744515478610992, 0.047601230442523956, 0.17009806632995605, 0.03919666260480881, 0.012272721156477928, 0.021036362275481224, 0.004201109521090984, 0.14842985570430756, 0.009228261187672615, 0.14760808646678925, 0.007939177565276623, 0.08669876307249069], [0.012830229476094246, 0.043869711458683014, 0.02542373165488243, 0.13019759953022003, 0.026093710213899612, 0.026659121736884117, 0.022686971351504326, 0.015103779733181, 0.00422759260982275, 0.04362986981868744, 0.008199472911655903, 0.02382313832640648, 0.0651540458202362, 0.04971017688512802, 0.1579047292470932, 0.03057972900569439, 0.06680597364902496, 0.03081490285694599, 0.0676608681678772, 0.018395278602838516, 0.05835169926285744, 0.07187769562005997], [0.022341610863804817, 0.06473085284233093, 0.0408744290471077, 0.034165866672992706, 0.014526271261274815, 0.022680550813674927, 0.022100742906332016, 0.022277794778347015, 0.05759499967098236, 0.019947806373238564, 0.0496177077293396, 0.033357735723257065, 0.05115745589137077, 0.053189557045698166, 0.04345373064279556, 0.02014923468232155, 0.04825205355882645, 0.03797995671629906, 0.06621017307043076, 0.12005966156721115, 0.030568145215511322, 0.12476368248462677], [0.025285061448812485, 0.015241889283061028, 0.020146777853369713, 0.010543258860707283, 0.14176836609840393, 0.027074186131358147, 0.07015186548233032, 0.0047167325392365456, 0.015955111011862755, 0.0013096717884764075, 0.052338141947984695, 0.016524530947208405, 0.02905607968568802, 0.02060665376484394, 0.013459096662700176, 0.180189847946167, 0.05498030409216881, 0.13327787816524506, 0.025171328336000443, 0.04830280318856239, 0.00439621414989233, 0.08950411528348923], [0.003520584898069501, 0.010699323378503323, 0.022552842274308205, 0.003339767921715975, 0.03527562692761421, 0.028250519186258316, 0.0013689377810806036, 0.003603774355724454, 0.0068186805583536625, 0.0019265537848696113, 0.01655816286802292, 0.008194176480174065, 0.05065133422613144, 0.1039084643125534, 0.011808172799646854, 0.031543292105197906, 0.45223575830459595, 0.01846926100552082, 0.07675839960575104, 0.022416742518544197, 0.012150834314525127, 0.07794871926307678], [0.017018482089042664, 0.02735641412436962, 0.022099459543824196, 0.06371409446001053, 0.031017782166600227, 0.023296672850847244, 0.0028086898382753134, 0.007576672825962305, 0.0015611869748681784, 0.04213104397058487, 0.013946060091257095, 0.007633460219949484, 0.04171512648463249, 0.018636470660567284, 0.09530280530452728, 0.04349563643336296, 0.24149326980113983, 0.007280391175299883, 0.07205650955438614, 0.012041415087878704, 0.10945960879325867, 0.09835877269506454], [0.014066697098314762, 0.013896038755774498, 0.044384557753801346, 0.001414450816810131, 0.05018102377653122, 0.005124139133840799, 0.010096026584506035, 0.0027304282411932945, 0.01298748143017292, 0.0005487269372679293, 0.056771669536828995, 0.0277378149330616, 0.06077173352241516, 0.1377979815006256, 0.006364051252603531, 0.04532153159379959, 0.07303924858570099, 0.12889501452445984, 0.07810138911008835, 0.05083507299423218, 0.009397272020578384, 0.16953761875629425], [0.00592151191085577, 0.0339151956140995, 0.01791679672896862, 0.15739963948726654, 0.013315006159245968, 0.002093740738928318, 0.005345107987523079, 0.002613391261547804, 0.0016686957096680999, 0.07412756979465485, 0.01135775912553072, 0.005669725593179464, 0.03961481526494026, 0.03072541020810604, 0.1754818707704544, 0.009176741354167461, 0.008339070715010166, 0.012527484446763992, 0.02976347506046295, 0.016544323414564133, 0.13323044776916504, 0.2132522165775299], [0.0070648761466145515, 0.006868419703096151, 0.015411579981446266, 0.0020592007786035538, 0.008049841970205307, 0.002046948065981269, 0.0022440070752054453, 0.0006796899251639843, 0.012801578268408775, 0.0003547684755176306, 0.015909692272543907, 0.039030201733112335, 0.03473689407110214, 0.06677696853876114, 0.009437327273190022, 0.021341240033507347, 0.04325563833117485, 0.09115143865346909, 0.06976111978292465, 0.24209825694561005, 0.007239641156047583, 0.30168065428733826]], [[0.01439367700368166, 0.052227821201086044, 0.03380272909998894, 0.01013240497559309, 0.05587315931916237, 0.048899780958890915, 0.026424584910273552, 0.05237205699086189, 0.06851033866405487, 0.03344567120075226, 0.10600987821817398, 0.06530863046646118, 0.1071271002292633, 0.024039965122938156, 0.010245559737086296, 0.04745618253946304, 0.016749998554587364, 0.02791917324066162, 0.023028094321489334, 0.06343734264373779, 0.02146138995885849, 0.0911344662308693], [0.04733441770076752, 0.046525828540325165, 0.014234562404453754, 0.007098353933542967, 0.14749178290367126, 0.023811528459191322, 0.025055214762687683, 0.06240304931998253, 0.023287290707230568, 0.022014062851667404, 0.08005297183990479, 0.03603056073188782, 0.04674834385514259, 0.005754699930548668, 0.003336474997922778, 0.04186388850212097, 0.006747701205313206, 0.014307850040495396, 0.016278700903058052, 0.023418182507157326, 0.019013624638319016, 0.2871909737586975], [0.025289317592978477, 0.0713641494512558, 0.042691055685281754, 0.019939737394452095, 0.03628848120570183, 0.036326441913843155, 0.0262429341673851, 0.0824970230460167, 0.03136259317398071, 0.04590116813778877, 0.10496247559785843, 0.04492803290486336, 0.07403265684843063, 0.048996228724718094, 0.020418209955096245, 0.022114014253020287, 0.02400445193052292, 0.019874000921845436, 0.05840156599879265, 0.024717217311263084, 0.05039089918136597, 0.08925727009773254], [0.029512790963053703, 0.06532258540391922, 0.08642250299453735, 0.02682112716138363, 0.1180163025856018, 0.04444359987974167, 0.06935657560825348, 0.060742806643247604, 0.019867492839694023, 0.06174382567405701, 0.05900604650378227, 0.01814715936779976, 0.03972455486655235, 0.03206202760338783, 0.018285181373357773, 0.09286215156316757, 0.016611695289611816, 0.03649340569972992, 0.02978934533894062, 0.013323326595127583, 0.03985508903861046, 0.02159038931131363], [0.02299458160996437, 0.09203627705574036, 0.16217532753944397, 0.02681269310414791, 0.024657486006617546, 0.0402691476047039, 0.01759224198758602, 0.043954119086265564, 0.016373855993151665, 0.030685799196362495, 0.06027797609567642, 0.060404177755117416, 0.06275050342082977, 0.06609497964382172, 0.02701452001929283, 0.09006105363368988, 0.0236277487128973, 0.013970350846648216, 0.027404243126511574, 0.016970710828900337, 0.02508496679365635, 0.04878721758723259], [0.06077537313103676, 0.06454914063215256, 0.047322481870651245, 0.02496781386435032, 0.05872441083192825, 0.02065553143620491, 0.03542918711900711, 0.07306750863790512, 0.04336342588067055, 0.04768471419811249, 0.06915903836488724, 0.05483987182378769, 0.04684355854988098, 0.0471518449485302, 0.022778647020459175, 0.04830740764737129, 0.01323690265417099, 0.03324094042181969, 0.03179851174354553, 0.03216065093874931, 0.05162527784705162, 0.07231784611940384], [0.029522353783249855, 0.08557303249835968, 0.04074537009000778, 0.031104478985071182, 0.05849062651395798, 0.06913622468709946, 0.0480923131108284, 0.14638090133666992, 0.025856370106339455, 0.1040448248386383, 0.025238383561372757, 0.03399641811847687, 0.06167088449001312, 0.015978630632162094, 0.01912154257297516, 0.02728603221476078, 0.029813043773174286, 0.024478966370224953, 0.040890172123909, 0.01232211198657751, 0.05732365697622299, 0.012933755293488503], [0.04181387647986412, 0.06791171431541443, 0.07704848051071167, 0.04568985477089882, 0.06413589417934418, 0.08037692308425903, 0.0363088957965374, 0.17020824551582336, 0.010836235247552395, 0.06142796203494072, 0.03755173459649086, 0.049454785883426666, 0.037679772824048996, 0.014365115202963352, 0.022360993549227715, 0.06376916170120239, 0.013313054107129574, 0.008863339200615883, 0.03677716478705406, 0.004930684342980385, 0.03547815605998039, 0.019697928801178932], [0.03354491665959358, 0.05679574981331825, 0.016548866406083107, 0.03820617124438286, 0.0323205292224884, 0.041405126452445984, 0.0351063534617424, 0.04996878653764725, 0.06117492914199829, 0.03601815551519394, 0.013631787151098251, 0.07465989142656326, 0.045403435826301575, 0.029763251543045044, 0.05092659965157509, 0.027519475668668747, 0.07310737669467926, 0.055347342044115067, 0.0841701328754425, 0.051304806023836136, 0.07360708713531494, 0.019469305872917175], [0.024198785424232483, 0.05089139565825462, 0.02173851989209652, 0.02327481284737587, 0.009542765095829964, 0.01915653422474861, 0.024307169020175934, 0.10241527110338211, 0.003671533428132534, 0.18325506150722504, 0.007114951964467764, 0.008783194236457348, 0.04972430318593979, 0.0140975471585989, 0.020780248567461967, 0.01625843159854412, 0.016912922263145447, 0.01862012967467308, 0.08186372369527817, 0.0054578352719545364, 0.28870517015457153, 0.009229736402630806], [0.01994173787534237, 0.056654900312423706, 0.030734872445464134, 0.012616581283509731, 0.0331357978284359, 0.04245489463210106, 0.04090544581413269, 0.027768924832344055, 0.05509829893708229, 0.015726109966635704, 0.035310037434101105, 0.057305771857500076, 0.1458745151758194, 0.037755005061626434, 0.016180304810404778, 0.03479306772351265, 0.046492621302604675, 0.1015264093875885, 0.03419741988182068, 0.07568629086017609, 0.022831493988633156, 0.05700945481657982], [0.0072959233075380325, 0.037207815796136856, 0.05379916727542877, 0.013564934022724628, 0.017840778455138206, 0.05910497531294823, 0.023403385654091835, 0.06563498824834824, 0.06547709554433823, 0.07729913294315338, 0.05267815291881561, 0.11689779162406921, 0.10350769758224487, 0.04868499934673309, 0.01505502313375473, 0.01873253658413887, 0.02234291099011898, 0.022067224606871605, 0.0397384874522686, 0.059365175664424896, 0.0423748679459095, 0.03792685270309448], [0.008702297694981098, 0.04873323068022728, 0.026742931455373764, 0.01029634103178978, 0.01409219577908516, 0.08163401484489441, 0.021128704771399498, 0.07234492897987366, 0.02720760926604271, 0.08006966859102249, 0.017644301056861877, 0.01819506846368313, 0.26216205954551697, 0.03245415538549423, 0.01265405211597681, 0.013361678458750248, 0.05246761441230774, 0.029090944677591324, 0.05093547701835632, 0.02711457945406437, 0.081700399518013, 0.011267730966210365], [0.013526272028684616, 0.046190809458494186, 0.052071139216423035, 0.026453347876667976, 0.0199698805809021, 0.022108430042862892, 0.030324924737215042, 0.055790647864341736, 0.023616457358002663, 0.087751604616642, 0.06376022100448608, 0.013359414413571358, 0.0833025798201561, 0.0824018195271492, 0.0400865413248539, 0.01930626668035984, 0.020922699943184853, 0.03629041463136673, 0.07565411180257797, 0.03273116797208786, 0.11375603079795837, 0.04062528908252716], [0.01196390949189663, 0.051491037011146545, 0.057696472853422165, 0.026618141680955887, 0.05526195466518402, 0.05731111019849777, 0.056997254490852356, 0.04352360591292381, 0.028708523139357567, 0.08117925375699997, 0.044830407947301865, 0.013690219260752201, 0.09575480222702026, 0.044661179184913635, 0.030635327100753784, 0.04672211408615112, 0.04015805199742317, 0.06159255653619766, 0.044080112129449844, 0.027653539553284645, 0.06570485979318619, 0.01376548781991005], [0.006098317448049784, 0.05962783470749855, 0.08118540793657303, 0.01695999689400196, 0.017322832718491554, 0.025629887357354164, 0.009904932230710983, 0.02511702850461006, 0.021710120141506195, 0.03130766376852989, 0.05716383829712868, 0.06147954612970352, 0.09706781804561615, 0.08903643488883972, 0.036058660596609116, 0.07321751862764359, 0.0330309234559536, 0.028777796775102615, 0.05043593421578407, 0.05301283299922943, 0.041066475212574005, 0.08478815853595734], [0.015795739367604256, 0.022830260917544365, 0.01743755303323269, 0.01359870657324791, 0.02796531282365322, 0.012536526657640934, 0.01996523328125477, 0.018751604482531548, 0.09968086332082748, 0.02797701023519039, 0.09615517407655716, 0.04213837906718254, 0.0835542231798172, 0.06353965401649475, 0.02544325590133667, 0.02330666594207287, 0.021446269005537033, 0.06322696059942245, 0.025352463126182556, 0.14645664393901825, 0.0345919094979763, 0.09824953228235245], [0.008127245120704174, 0.04358277469873428, 0.020863784477114677, 0.020654473453760147, 0.023963019251823425, 0.06382866948843002, 0.023158516734838486, 0.061228130012750626, 0.06735417246818542, 0.12304767221212387, 0.035265952348709106, 0.03267325833439827, 0.14400450885295868, 0.022869719192385674, 0.02539157122373581, 0.010724133811891079, 0.057066988199949265, 0.033330611884593964, 0.041235148906707764, 0.05479113385081291, 0.07072054594755173, 0.016117967665195465], [0.020255180075764656, 0.02973365969955921, 0.018513290211558342, 0.028003480285406113, 0.0516047477722168, 0.04849258065223694, 0.030899154022336006, 0.05013732612133026, 0.05599880591034889, 0.04934260621666908, 0.095308318734169, 0.07331382483243942, 0.07467621564865112, 0.020486222580075264, 0.029322825372219086, 0.03403566777706146, 0.0355122834444046, 0.04300456494092941, 0.041784461587667465, 0.055599670857191086, 0.03750569000840187, 0.07646936923265457], [0.013629213906824589, 0.03135602921247482, 0.008348179049789906, 0.022442607209086418, 0.018058333545923233, 0.018541084602475166, 0.017236270010471344, 0.02122587338089943, 0.04934361204504967, 0.037656519562006, 0.009966646321117878, 0.04603738710284233, 0.05719748139381409, 0.035701993852853775, 0.05113443732261658, 0.020369939506053925, 0.09630967676639557, 0.09884760528802872, 0.10134416073560715, 0.0934721902012825, 0.12996017932891846, 0.02182060293853283], [0.023235151544213295, 0.05898517370223999, 0.03376876935362816, 0.026660777628421783, 0.019794831052422523, 0.02324080839753151, 0.013102292083203793, 0.06608037650585175, 0.005053048487752676, 0.16123782098293304, 0.015346892178058624, 0.011035383678972721, 0.09885423630475998, 0.02318119816482067, 0.02415643073618412, 0.02869727462530136, 0.017268961295485497, 0.013514258898794651, 0.061933718621730804, 0.01025434210896492, 0.24189455807209015, 0.02270379848778248], [0.010916686616837978, 0.03686893358826637, 0.02323172055184841, 0.007330424152314663, 0.01733529195189476, 0.007799684070050716, 0.021128911525011063, 0.008602002635598183, 0.05611014366149902, 0.014616391621530056, 0.0427272692322731, 0.03851577267050743, 0.09711048752069473, 0.06745872646570206, 0.015205818228423595, 0.024367447942495346, 0.02097162976861, 0.14327150583267212, 0.031009411439299583, 0.16318379342556, 0.034083835780620575, 0.11815405637025833]], [[0.018984146416187286, 0.1817621886730194, 0.03465213626623154, 0.018178654834628105, 0.030749065801501274, 0.0162587221711874, 0.06310736387968063, 0.08570394665002823, 0.05127996951341629, 0.05532168224453926, 0.06843040138483047, 0.020872587338089943, 0.01025866437703371, 0.00858079083263874, 0.00606887973845005, 0.006277559790760279, 0.009964500553905964, 0.023911599069833755, 0.09228038787841797, 0.06360092014074326, 0.07643518596887589, 0.057320643216371536], [0.01457487978041172, 0.08200187981128693, 0.022043699398636818, 0.011882826685905457, 0.045445483177900314, 0.07091967761516571, 0.10301806777715683, 0.04723265394568443, 0.06066694110631943, 0.13299228250980377, 0.046244874596595764, 0.040349818766117096, 0.02231195755302906, 0.006561273243278265, 0.00401791138574481, 0.011799363419413567, 0.02753770537674427, 0.035690560936927795, 0.04785265773534775, 0.06299125403165817, 0.062056850641965866, 0.04180744290351868], [0.024597419425845146, 0.012977040372788906, 0.024491798132658005, 0.009403358213603497, 0.04833414405584335, 0.10419166833162308, 0.05730174109339714, 0.011402186006307602, 0.08506734669208527, 0.023499609902501106, 0.071152463555336, 0.08873539417982101, 0.02650049328804016, 0.04622659832239151, 0.007657850626856089, 0.024971233680844307, 0.06912107765674591, 0.043614547699689865, 0.01499010156840086, 0.09414560347795486, 0.009718840010464191, 0.10189949721097946], [0.06678234785795212, 0.021183717995882034, 0.022533578798174858, 0.02312496304512024, 0.01725674793124199, 0.07079783827066422, 0.05040868744254112, 0.03215186297893524, 0.05007200315594673, 0.024744300171732903, 0.019209617748856544, 0.2061455398797989, 0.02696084976196289, 0.07171621918678284, 0.026906266808509827, 0.025486886501312256, 0.08059000223875046, 0.05852091312408447, 0.022400522604584694, 0.035037048161029816, 0.013134093955159187, 0.03483595326542854], [0.030655434355139732, 0.09424030035734177, 0.035202693194150925, 0.017631324008107185, 0.02697465941309929, 0.10768456757068634, 0.09526421874761581, 0.06193413957953453, 0.03912988305091858, 0.061192672699689865, 0.08041461557149887, 0.062371380627155304, 0.028826622292399406, 0.024980325251817703, 0.006251066457480192, 0.01309359259903431, 0.03770604357123375, 0.02861904725432396, 0.043210506439208984, 0.02222028747200966, 0.02618563361465931, 0.05621101334691048], [0.05512962117791176, 0.0421464778482914, 0.039080191403627396, 0.05215369164943695, 0.06203983351588249, 0.05501599609851837, 0.027220306918025017, 0.043666526675224304, 0.122386135160923, 0.06520779430866241, 0.12255053967237473, 0.03685058280825615, 0.032067567110061646, 0.016284113749861717, 0.023412933573126793, 0.036029063165187836, 0.017452297732234, 0.008709615096449852, 0.025766370818018913, 0.049713656306266785, 0.028434572741389275, 0.038682080805301666], [0.020626841112971306, 0.03987259790301323, 0.035673387348651886, 0.022795790806412697, 0.08845286071300507, 0.0270039401948452, 0.06175930052995682, 0.03589048609137535, 0.06128879263997078, 0.019067002460360527, 0.05008450523018837, 0.0836934745311737, 0.04509369656443596, 0.057031456381082535, 0.022904878482222557, 0.08049717545509338, 0.03515416011214256, 0.047470975667238235, 0.018703749403357506, 0.06399691104888916, 0.015549502335488796, 0.06738848239183426], [0.008766167797148228, 0.3104436695575714, 0.008183561265468597, 0.011812361888587475, 0.010998014360666275, 0.0035186472814530134, 0.008724953979253769, 0.042733222246170044, 0.009409314952790737, 0.34025055170059204, 0.042013272643089294, 0.006567948963493109, 0.010339556261897087, 0.0006991291884332895, 0.0017908187583088875, 0.0021486342884600163, 0.0006840370479039848, 0.0009206897229887545, 0.013133561238646507, 0.004374836105853319, 0.15528568625450134, 0.0072013987228274345], [0.03545093908905983, 0.0785035789012909, 0.027757132425904274, 0.03084109164774418, 0.07505194842815399, 0.018869444727897644, 0.08301562070846558, 0.05599387362599373, 0.060866985470056534, 0.05293472856283188, 0.05663507804274559, 0.05591987445950508, 0.06529636681079865, 0.03645731136202812, 0.014334367588162422, 0.07100079953670502, 0.008361185900866985, 0.033367425203323364, 0.015022731386125088, 0.04940558224916458, 0.02198261208832264, 0.05293138697743416], [0.012125995010137558, 0.051423244178295135, 0.01967853680253029, 0.031213512644171715, 0.0843246653676033, 0.07934874296188354, 0.03067409060895443, 0.02167584002017975, 0.10989146679639816, 0.05036008358001709, 0.03392864391207695, 0.25879496335983276, 0.017346573993563652, 0.026379089802503586, 0.012769227847456932, 0.018008146435022354, 0.047686733305454254, 0.006821005139499903, 0.005615358706563711, 0.03721281886100769, 0.01225665770471096, 0.032464515417814255], [0.011215528473258018, 0.003191569820046425, 0.04375632479786873, 0.0010737170232459903, 0.03641170635819435, 0.0027511161752045155, 0.30635666847229004, 0.010261124931275845, 0.14764687418937683, 0.0021474184468388557, 0.1966588944196701, 0.007087023463100195, 0.0005168095231056213, 0.005183384288102388, 0.0002426446444587782, 0.0020927591249346733, 0.0005171916563995183, 0.02802327647805214, 0.004441986791789532, 0.11091699451208115, 0.000794149877037853, 0.07871285825967789], [0.005816065706312656, 0.007435481995344162, 0.037498101592063904, 0.015608460642397404, 0.053911950439214706, 0.023862704634666443, 0.16006352007389069, 0.02201126515865326, 0.08952238410711288, 0.013174445368349552, 0.1176847368478775, 0.008556018583476543, 0.010477950796484947, 0.013750075362622738, 0.012228474020957947, 0.009443306364119053, 0.021286319941282272, 0.088921919465065, 0.05998906493186951, 0.14941196143627167, 0.014149274677038193, 0.0651964545249939], [0.010141346603631973, 0.006489025894552469, 0.04046088457107544, 0.004374850075691938, 0.044391851872205734, 0.016363603994250298, 0.18752343952655792, 0.04074889421463013, 0.12861594557762146, 0.005405368749052286, 0.10500040650367737, 0.03567769005894661, 0.008176654577255249, 0.029067041352391243, 0.0032835910096764565, 0.006620281375944614, 0.012268760241568089, 0.08183931559324265, 0.04403812810778618, 0.11757174879312515, 0.004237732850015163, 0.06770344823598862], [0.016187025234103203, 0.0036549146752804518, 0.023546384647488594, 0.006985412910580635, 0.03947856277227402, 0.04099281132221222, 0.08177501708269119, 0.010412280447781086, 0.06537233293056488, 0.003755039069801569, 0.05005064979195595, 0.06413117796182632, 0.01405246090143919, 0.08300561457872391, 0.011169841513037682, 0.027342790737748146, 0.06954213976860046, 0.13029012084007263, 0.02444959245622158, 0.11586938053369522, 0.003673007944598794, 0.11426351964473724], [0.056703947484493256, 0.011245288886129856, 0.029921775683760643, 0.015412013046443462, 0.01867002807557583, 0.03524777293205261, 0.08220458775758743, 0.02829851396381855, 0.043820563703775406, 0.004527358803898096, 0.014126072637736797, 0.10547646135091782, 0.01434341911226511, 0.09195612370967865, 0.02355308271944523, 0.027970150113105774, 0.07718128710985184, 0.1694677323102951, 0.04268267750740051, 0.05473776534199715, 0.006359547842293978, 0.04609384760260582], [0.03451067954301834, 0.05864392966032028, 0.04268018528819084, 0.01148151233792305, 0.01657501794397831, 0.07867510616779327, 0.046001870185136795, 0.04210558161139488, 0.021798845380544662, 0.03302786126732826, 0.03590328246355057, 0.03691982850432396, 0.039645079523324966, 0.0582725964486599, 0.011428965255618095, 0.01393798366189003, 0.09038656949996948, 0.0770178735256195, 0.12282387167215347, 0.030007801949977875, 0.041077256202697754, 0.05707842484116554], [0.08343138545751572, 0.011519033461809158, 0.046790435910224915, 0.03695052117109299, 0.07349532842636108, 0.028033960610628128, 0.043894827365875244, 0.025862520560622215, 0.09439351409673691, 0.001686332980170846, 0.037418678402900696, 0.02232595533132553, 0.006811514031141996, 0.03263969346880913, 0.03370477259159088, 0.0672992616891861, 0.039956510066986084, 0.06290752440690994, 0.06829287111759186, 0.11163970082998276, 0.005161593668162823, 0.06578411906957626], [0.020825933665037155, 0.021457619965076447, 0.03992236405611038, 0.0145771075040102, 0.051977552473545074, 0.010627084411680698, 0.056619223207235336, 0.017727097496390343, 0.024718910455703735, 0.0013792820973321795, 0.013064327649772167, 0.03174709528684616, 0.017529569566249847, 0.0929664671421051, 0.030820755288004875, 0.0754125639796257, 0.05948702245950699, 0.2103114128112793, 0.05223064869642258, 0.072234608232975, 0.007131559308618307, 0.07723193615674973], [0.018580608069896698, 0.22158795595169067, 0.023806359618902206, 0.015500897541642189, 0.014727182686328888, 0.0046545108780264854, 0.04034508392214775, 0.05378106236457825, 0.015533576719462872, 0.0726037546992302, 0.04549049958586693, 0.005425234325230122, 0.01146725844591856, 0.0023081174585968256, 0.005674322601407766, 0.0059956144541502, 0.003696310566738248, 0.023547615855932236, 0.1413353830575943, 0.027846133336424828, 0.21936771273612976, 0.026724798604846], [0.01965293101966381, 0.027564339339733124, 0.02720058523118496, 0.022949883714318275, 0.04495649039745331, 0.007702112663537264, 0.07770296186208725, 0.018334712833166122, 0.05062803626060486, 0.005432192236185074, 0.0161821860820055, 0.03230347856879234, 0.02318878285586834, 0.08124127984046936, 0.026529239490628242, 0.05565033107995987, 0.017833156511187553, 0.15490570664405823, 0.0243095513433218, 0.15632835030555725, 0.012222050689160824, 0.09718164056539536], [0.014905220828950405, 0.44198018312454224, 0.035905517637729645, 0.014217477291822433, 0.03350696712732315, 0.01860842853784561, 0.043413951992988586, 0.029850460588932037, 0.03209790587425232, 0.0215626060962677, 0.012158663012087345, 0.039987027645111084, 0.0038473212625831366, 0.00921502336859703, 0.0037664237897843122, 0.0061159031465649605, 0.02324170246720314, 0.02457277849316597, 0.0536622628569603, 0.04563402011990547, 0.043942954391241074, 0.04780719429254532], [0.0060071395710110664, 0.00079776142956689, 0.027420947328209877, 0.0024444321170449257, 0.019317561760544777, 0.003649538615718484, 0.15116436779499054, 0.0020652750972658396, 0.05879681557416916, 0.0002808616845868528, 0.028100546449422836, 0.005172847770154476, 0.0006178147159516811, 0.03160979226231575, 0.0028428828809410334, 0.0032348711974918842, 0.008823980577290058, 0.2062903195619583, 0.010293794795870781, 0.28873133659362793, 0.0008378822822123766, 0.14149928092956543]], [[0.004396993201225996, 0.01741177774965763, 0.007594200782477856, 0.0023426164407283068, 0.0075682648457586765, 0.0033785768318921328, 0.01722475327551365, 0.010229740291833878, 0.16596868634223938, 0.005169935058802366, 0.06102145090699196, 0.06585635244846344, 0.014937733300030231, 0.026459049433469772, 0.0019283192232251167, 0.006334079895168543, 0.0040445891208946705, 0.02610265277326107, 0.017318371683359146, 0.2824198007583618, 0.0050878822803497314, 0.2472042590379715], [0.026224777102470398, 0.10466384887695312, 0.10631824284791946, 0.01724378578364849, 0.06438039243221283, 0.07147833704948425, 0.03979531303048134, 0.08319974690675735, 0.056763358414173126, 0.03048735111951828, 0.03704291954636574, 0.03642238676548004, 0.015868673101067543, 0.07254847139120102, 0.0057162572629749775, 0.05450233072042465, 0.01482392381876707, 0.012589886784553528, 0.03771733120083809, 0.028134355321526527, 0.020484555512666702, 0.06359373033046722], [0.0036196745932102203, 0.29560694098472595, 0.19458889961242676, 0.0021286134142428637, 0.006636769976466894, 0.005665985867381096, 0.006828135810792446, 0.0325765460729599, 0.012915563769638538, 0.07863412797451019, 0.03850918263196945, 0.019153567031025887, 0.048412516713142395, 0.11997489631175995, 0.0012766619911417365, 0.00453140726312995, 0.0015343097038567066, 0.0030659546609967947, 0.022810906171798706, 0.007288205437362194, 0.056954968720674515, 0.03728616237640381], [0.016462553292512894, 0.05277998372912407, 0.039464592933654785, 0.026240365579724312, 0.019625520333647728, 0.04651549831032753, 0.07336554676294327, 0.037271320819854736, 0.18075545132160187, 0.07004966586828232, 0.09281530976295471, 0.062273550778627396, 0.06378467381000519, 0.023485716432332993, 0.015194966457784176, 0.007387725170701742, 0.01720562018454075, 0.01797887496650219, 0.009644020348787308, 0.08236302435398102, 0.016798583790659904, 0.02853747271001339], [0.013532249256968498, 0.059148844331502914, 0.03517007827758789, 0.035575397312641144, 0.030789542943239212, 0.03467652574181557, 0.021214457228779793, 0.07630172371864319, 0.08523456007242203, 0.06360282003879547, 0.03842940181493759, 0.028866136446595192, 0.03994122892618179, 0.03307713568210602, 0.026175422593951225, 0.05727364495396614, 0.03277541697025299, 0.01871851459145546, 0.07967637479305267, 0.08683816343545914, 0.053331803530454636, 0.049650583416223526], [0.05270485579967499, 0.052653294056653976, 0.12176937609910965, 0.015268625691533089, 0.010594199411571026, 0.02221905253827572, 0.017424706369638443, 0.031089715659618378, 0.05685154348611832, 0.01209652703255415, 0.04121954366564751, 0.05903007462620735, 0.017767194658517838, 0.11177287995815277, 0.008852283470332623, 0.002744894241914153, 0.017348578199744225, 0.01138604711741209, 0.03310273587703705, 0.0798230692744255, 0.01615462638437748, 0.20812630653381348], [0.002563739661127329, 0.10446758568286896, 0.025993695482611656, 0.0077999732457101345, 0.061413563787937164, 0.005282431375235319, 0.03826845809817314, 0.12468364089727402, 0.1324204057455063, 0.07097375392913818, 0.026828724890947342, 0.017938334494829178, 0.014351310208439827, 0.037799250334501266, 0.003440419677644968, 0.05724015086889267, 0.002222720766440034, 0.024791184812784195, 0.032436154782772064, 0.12095707654953003, 0.04262428730726242, 0.04550303518772125], [0.04413841664791107, 0.08663968741893768, 0.1787228286266327, 0.04775639995932579, 0.03141843155026436, 0.06677263230085373, 0.01980152726173401, 0.060465794056653976, 0.04384801164269447, 0.047908708453178406, 0.03783860057592392, 0.05766749754548073, 0.049304213374853134, 0.09463655948638916, 0.018648119643330574, 0.008417798206210136, 0.01018142607063055, 0.004376183729618788, 0.009373139590024948, 0.014440140686929226, 0.023828765377402306, 0.043815188109874725], [0.012618999928236008, 0.11092249304056168, 0.08382588624954224, 0.03134460002183914, 0.03742936626076698, 0.037017662078142166, 0.10977581143379211, 0.08690754324197769, 0.15020950138568878, 0.07359867542982101, 0.057258397340774536, 0.027166392654180527, 0.016100432723760605, 0.039537254720926285, 0.004778469447046518, 0.02144739218056202, 0.003138891654089093, 0.012130971066653728, 0.0050184703432023525, 0.041056547313928604, 0.016270741820335388, 0.022445516660809517], [0.005160437431186438, 0.009401579387485981, 0.002667661290615797, 0.009727392345666885, 0.009386644698679447, 0.017509793862700462, 0.05384024977684021, 0.058623578399419785, 0.43841949105262756, 0.028085872530937195, 0.06436076015233994, 0.07006682455539703, 0.00922305602580309, 0.0026498502120375633, 0.00524543272331357, 0.014335338026285172, 0.004035326652228832, 0.01983405277132988, 0.01097983680665493, 0.14844997227191925, 0.004279229789972305, 0.013717643916606903], [5.145856266608462e-05, 9.207760012941435e-05, 2.9924338377895765e-05, 5.1403727411525324e-05, 5.734144724556245e-05, 0.00015391122724395245, 0.0007090555736795068, 0.0006407625041902065, 0.634567379951477, 0.00016201693506445736, 0.07348614931106567, 0.046918414533138275, 0.0003015216498170048, 4.661306593334302e-05, 1.998547486437019e-05, 4.077502671862021e-05, 5.950110062258318e-05, 0.0002120000426657498, 0.0002794301835820079, 0.22718891501426697, 1.3441228475130629e-05, 0.014917895197868347], [0.0016536328475922346, 0.001926796161569655, 0.0011610070941969752, 0.0011667972430586815, 0.0016468216199427843, 0.0016308417543768883, 0.017873940989375114, 0.009662671014666557, 0.28765925765037537, 0.0012878733687102795, 0.2026176005601883, 0.06600665301084518, 0.002324117813259363, 0.0021993648260831833, 0.0005784199456684291, 0.002294610720127821, 0.0010904576629400253, 0.009825252927839756, 0.006922577042132616, 0.27035561203956604, 0.00045962620060890913, 0.10965611040592194], [0.00015820981934666634, 0.002345900982618332, 0.00011182064190506935, 0.0004220743430778384, 0.0012520255986601114, 0.0015503950417041779, 0.012013346888124943, 0.004464813973754644, 0.6310707926750183, 0.0019496160093694925, 0.051807623356580734, 0.086586594581604, 0.0033757879864424467, 0.0002822004025802016, 0.0002448851882945746, 0.0014817335177212954, 0.0006200054194778204, 0.00507176760584116, 0.002427879022434354, 0.18193697929382324, 0.00023743028577882797, 0.01058819331228733], [0.0031090842094272375, 0.05098465457558632, 0.01663747802376747, 0.0018684103852137923, 0.0047891028225421906, 0.002665016334503889, 0.013950744643807411, 0.01426019985228777, 0.06053047999739647, 0.039960332214832306, 0.2281942069530487, 0.09628324955701828, 0.08852658420801163, 0.03790033608675003, 0.0033607485238462687, 0.005268154200166464, 0.003713650396093726, 0.025910869240760803, 0.044908709824085236, 0.07549446076154709, 0.04129304364323616, 0.14039045572280884], [0.006587926298379898, 0.00888830330222845, 0.0033090058714151382, 0.010689808055758476, 0.0077930171974003315, 0.008862320333719254, 0.040792811661958694, 0.013666925951838493, 0.20269423723220825, 0.019968414679169655, 0.167788565158844, 0.16183266043663025, 0.0691450834274292, 0.005713120102882385, 0.01633176952600479, 0.005498424172401428, 0.0110785448923707, 0.03196241706609726, 0.013181665912270546, 0.15388359129428864, 0.007193753961473703, 0.03313762694597244], [0.005763264372944832, 0.034593112766742706, 0.009062188677489758, 0.011656539514660835, 0.027753397822380066, 0.00847809948027134, 0.010263124480843544, 0.04239165037870407, 0.020833147689700127, 0.04949427768588066, 0.05440352112054825, 0.0243623498827219, 0.050132062286138535, 0.024437343701720238, 0.024414125829935074, 0.13069990277290344, 0.027769576758146286, 0.03344457596540451, 0.17331109941005707, 0.04562011733651161, 0.12862053513526917, 0.062496013939380646], [0.026231486350297928, 0.0019219601526856422, 0.004315122961997986, 0.004086341243237257, 0.0022546183317899704, 0.0024242873769253492, 0.0053843422792851925, 0.0033114601392298937, 0.018313873559236526, 0.000829763594083488, 0.06011917442083359, 0.0789029598236084, 0.014810285530984402, 0.022481942549347878, 0.017829956486821175, 0.002018376486375928, 0.031349748373031616, 0.04800209030508995, 0.11137242615222931, 0.16019311547279358, 0.0056071896106004715, 0.3782394230365753], [0.0012172494316473603, 0.007500451058149338, 0.0012451005168259144, 0.002714274451136589, 0.0123168108984828, 0.00035220920108258724, 0.007020745892077684, 0.014577900990843773, 0.03204009309411049, 0.01139454822987318, 0.05701587349176407, 0.04003937542438507, 0.022326635196805, 0.01257238443940878, 0.01137523539364338, 0.06908971816301346, 0.003633434185758233, 0.09760922938585281, 0.15845218300819397, 0.22962917387485504, 0.05006199702620506, 0.15781539678573608], [0.010946230962872505, 0.002826689975336194, 0.00626347353681922, 0.0038561690598726273, 0.0037565110251307487, 0.0014175904216244817, 0.001310388557612896, 0.0027990529779344797, 0.011442274786531925, 0.001433864119462669, 0.07732640951871872, 0.1116989403963089, 0.030131474137306213, 0.04230547323822975, 0.018519693985581398, 0.007754032034426928, 0.0116136334836483, 0.016620738431811333, 0.07241002470254898, 0.06721954047679901, 0.011284485459327698, 0.48706328868865967], [0.009134666994214058, 0.018239008262753487, 0.01629829593002796, 0.006792381405830383, 0.012235039845108986, 0.00289451377466321, 0.021547043696045876, 0.008730698376893997, 0.07131953537464142, 0.012791370041668415, 0.108086958527565, 0.03722091019153595, 0.01225010771304369, 0.03262592479586601, 0.005653668660670519, 0.037263479083776474, 0.005768800154328346, 0.07155375927686691, 0.02609565109014511, 0.24649634957313538, 0.02361275814473629, 0.213389053940773], [0.010770179331302643, 0.002562048612162471, 0.0011524348519742489, 0.009505918249487877, 0.00900576263666153, 0.007210278883576393, 0.005736927036195993, 0.007005926687270403, 0.06636146456003189, 0.0038734215777367353, 0.06412050127983093, 0.1377917230129242, 0.019182320684194565, 0.0043615917675197124, 0.027823224663734436, 0.047706861048936844, 0.03488945588469505, 0.0467832088470459, 0.12626251578330994, 0.22614465653896332, 0.008322644047439098, 0.13342687487602234], [0.00014260444731917232, 5.052987398812547e-05, 5.734648584621027e-05, 1.5264136891346425e-05, 3.284290141891688e-05, 1.984896334761288e-05, 0.00012224167585372925, 5.2148236136417836e-05, 0.03452874720096588, 2.6299892851966433e-05, 0.0928981751203537, 0.03062593564391136, 0.00026006283587776124, 0.00033253998844884336, 3.07504742522724e-05, 0.00013749965000897646, 0.00023749553656671196, 0.0018239343771710992, 0.0029622858855873346, 0.354905903339386, 6.041512460797094e-05, 0.4806770086288452]], [[0.048999566584825516, 0.05541568994522095, 0.017472585663199425, 0.020641567185521126, 0.0859452337026596, 0.11713935434818268, 0.1288807988166809, 0.09245608001947403, 0.11723838746547699, 0.15076994895935059, 0.024586178362369537, 0.032769475132226944, 0.04281384125351906, 0.0068509820848703384, 0.004370041191577911, 0.007890959270298481, 0.012289520353078842, 0.004344927612692118, 0.004918169695883989, 0.011128277517855167, 0.010362583212554455, 0.0027158332522958517], [0.042205773293972015, 0.12417610734701157, 0.12503744661808014, 0.031176133081316948, 0.026754964143037796, 0.053041040897369385, 0.12134096026420593, 0.07771230489015579, 0.061792053282260895, 0.06561348587274551, 0.025739220902323723, 0.02839692309498787, 0.030877867713570595, 0.02252790704369545, 0.005102730356156826, 0.0025435788556933403, 0.008977395482361317, 0.010721182450652122, 0.011127637699246407, 0.06344173848628998, 0.011245939880609512, 0.05044752359390259], [0.08803386241197586, 0.09925299137830734, 0.06529153883457184, 0.09043311327695847, 0.05810955539345741, 0.03052397258579731, 0.10449260473251343, 0.054493971168994904, 0.043933603912591934, 0.13322626054286957, 0.036518748849630356, 0.03160106763243675, 0.023239925503730774, 0.017894940450787544, 0.008956875652074814, 0.005357516463845968, 0.008859987370669842, 0.009042695164680481, 0.00453425757586956, 0.02651485614478588, 0.03209330141544342, 0.02759440802037716], [0.060316842049360275, 0.029370104894042015, 0.0078091975301504135, 0.029850205406546593, 0.07538234442472458, 0.12728995084762573, 0.25275200605392456, 0.08791318535804749, 0.039541564881801605, 0.04956841841340065, 0.02279408648610115, 0.01482086069881916, 0.024911170825362206, 0.008325967006385326, 0.011749744415283203, 0.017800232395529747, 0.049878861755132675, 0.03795352950692177, 0.021557999774813652, 0.011779982596635818, 0.013434254564344883, 0.005199414677917957], [0.08811809122562408, 0.0378413163125515, 0.02293698862195015, 0.029305579140782356, 0.049272339791059494, 0.0873531922698021, 0.04930954426527023, 0.1450878083705902, 0.052879609167575836, 0.07396149635314941, 0.049228765070438385, 0.03109675832092762, 0.018605586141347885, 0.01616741716861725, 0.016840558499097824, 0.018970057368278503, 0.03037901781499386, 0.014532117173075676, 0.04924078285694122, 0.021891288459300995, 0.06912394613027573, 0.027857674285769463], [0.11948785185813904, 0.017447520047426224, 0.005387474317103624, 0.012153241783380508, 0.03149716928601265, 0.03009929694235325, 0.48607486486434937, 0.06634210795164108, 0.0795266255736351, 0.034742508083581924, 0.025752266868948936, 0.02315395325422287, 0.008814089000225067, 0.0029075194615870714, 0.0047524417750537395, 0.009652188047766685, 0.004700535908341408, 0.01896379142999649, 0.0040687755681574345, 0.009455394931137562, 0.0030744292307645082, 0.0019459790783002973], [0.007966544479131699, 0.03439444303512573, 0.019642559811472893, 0.030351003631949425, 0.03324460610747337, 0.17594149708747864, 0.058136142790317535, 0.08749846369028091, 0.059708088636398315, 0.172623410820961, 0.037320543080568314, 0.03555731102824211, 0.13513101637363434, 0.024026470258831978, 0.012174686416983604, 0.013681402429938316, 0.02770957164466381, 0.005446270573884249, 0.0035858284682035446, 0.008434415794909, 0.013192749582231045, 0.00423298217356205], [0.024837816134095192, 0.019557101652026176, 0.009192646481096745, 0.012082884088158607, 0.05419690161943436, 0.04848628491163254, 0.15823721885681152, 0.18939554691314697, 0.09486435353755951, 0.13386482000350952, 0.025162506848573685, 0.035070959478616714, 0.1123770922422409, 0.010995729826390743, 0.009602892212569714, 0.009989630430936813, 0.008947133086621761, 0.008349926210939884, 0.014276613481342793, 0.012899642810225487, 0.005424858070909977, 0.0021875181701034307], [0.008494113571941853, 0.014188054017722607, 0.019379572942852974, 0.0032865519169718027, 0.010838679037988186, 0.022892599925398827, 0.019486142322421074, 0.022090015932917595, 0.023875443264842033, 0.20499096810817719, 0.09678854793310165, 0.12162429839372635, 0.3013843894004822, 0.04251140356063843, 0.005360601935535669, 0.0040920451283454895, 0.014956880360841751, 0.010763057507574558, 0.00751438969746232, 0.010757836513221264, 0.01583777740597725, 0.01888662949204445], [0.07726649940013885, 0.03546029329299927, 0.01849370449781418, 0.06427135318517685, 0.056295156478881836, 0.03075413592159748, 0.23274581134319305, 0.08897899836301804, 0.05018097162246704, 0.16432251036167145, 0.02756468765437603, 0.03004137985408306, 0.05789732560515404, 0.007297954987734556, 0.012421717867255211, 0.0093277832493186, 0.0026878931093961, 0.009088603779673576, 0.003796419594436884, 0.005934244953095913, 0.012653536163270473, 0.002518964232876897], [0.040065519511699677, 0.03665321320295334, 0.029594894498586655, 0.021964766085147858, 0.0298776812851429, 0.01977517269551754, 0.03826047107577324, 0.02754976600408554, 0.03151674196124077, 0.2314242273569107, 0.11853887140750885, 0.11946946382522583, 0.07722166925668716, 0.02052018605172634, 0.006652131676673889, 0.005784140434116125, 0.009071417152881622, 0.010732917115092278, 0.004808078519999981, 0.019183184951543808, 0.03560372069478035, 0.0657317116856575], [0.014145473018288612, 0.010777419432997704, 0.015118081122636795, 0.001978342654183507, 0.004385554697364569, 0.005057454574853182, 0.04980512335896492, 0.007134065963327885, 0.03383123502135277, 0.02285168506205082, 0.18342959880828857, 0.13664788007736206, 0.08511685580015182, 0.02799280360341072, 0.0028872238472104073, 0.003938514739274979, 0.015950016677379608, 0.08193609863519669, 0.025762176141142845, 0.1005340963602066, 0.008385414257645607, 0.1623348891735077], [0.006005741655826569, 0.0076476577669382095, 0.0045893145725131035, 0.0034483731724321842, 0.0010464948136359453, 0.002485554199665785, 0.014423335902392864, 0.006041311658918858, 0.022164586931467056, 0.03683553263545036, 0.05884801223874092, 0.029852546751499176, 0.047923143953084946, 0.006648550275713205, 0.0035129471216350794, 0.0005716878222301602, 0.008485740050673485, 0.03331426531076431, 0.019045265391469002, 0.34184524416923523, 0.016481952741742134, 0.3287827670574188], [0.07982683926820755, 0.010128633119165897, 0.011491836979985237, 0.00828440859913826, 0.00848626159131527, 0.007319043390452862, 0.015455121174454689, 0.015215410850942135, 0.02335226535797119, 0.05959111079573631, 0.17258082330226898, 0.09884318709373474, 0.06234714016318321, 0.016705354675650597, 0.00646474352106452, 0.0028873279225081205, 0.020009838044643402, 0.01640130765736103, 0.016030577942728996, 0.0727684274315834, 0.03119034133851528, 0.24461998045444489], [0.05481511354446411, 0.01850220374763012, 0.007150184828788042, 0.0177531149238348, 0.0238550566136837, 0.0419553741812706, 0.06601200252771378, 0.03715435042977333, 0.030533721670508385, 0.02789798006415367, 0.05924905464053154, 0.02968953177332878, 0.04543670266866684, 0.015391875058412552, 0.019878337159752846, 0.015891103073954582, 0.10787779092788696, 0.10170169919729233, 0.09234368801116943, 0.07267561554908752, 0.03402388095855713, 0.08021155744791031], [0.04274002090096474, 0.04652969166636467, 0.024804897606372833, 0.04197582229971886, 0.017745813354849815, 0.030691519379615784, 0.031850676983594894, 0.034332964569330215, 0.02330535277724266, 0.040048979222774506, 0.022454947233200073, 0.021095719188451767, 0.01972164958715439, 0.027398405596613884, 0.04683258384466171, 0.015996770933270454, 0.07243666797876358, 0.06716374307870865, 0.1249670758843422, 0.06735149025917053, 0.10652563720941544, 0.07402951270341873], [0.2410115897655487, 0.007673645857721567, 0.007837682031095028, 0.004641967359930277, 0.007303079590201378, 0.008183117024600506, 0.045598916709423065, 0.012973684817552567, 0.023306790739297867, 0.01284661516547203, 0.07681288570165634, 0.03854503855109215, 0.006483536679297686, 0.006137382704764605, 0.005554059986025095, 0.008104183711111546, 0.015884457156062126, 0.07179585099220276, 0.040813181549310684, 0.0992666482925415, 0.023012394085526466, 0.2362133413553238], [0.015272291377186775, 0.021677428856492043, 0.028972674161195755, 0.022124523296952248, 0.010111128911376, 0.027089448645710945, 0.008715154603123665, 0.02154102921485901, 0.017616400495171547, 0.0548793189227581, 0.08580490201711655, 0.03404994681477547, 0.0721881240606308, 0.039978962391614914, 0.02246268093585968, 0.011946980841457844, 0.05065077170729637, 0.01992782950401306, 0.03413291648030281, 0.060888733714818954, 0.09536699950695038, 0.244601771235466], [0.08426885306835175, 0.019379625096917152, 0.015362747944891453, 0.010569842532277107, 0.017626894637942314, 0.010575463995337486, 0.031496092677116394, 0.04137442633509636, 0.027288751676678658, 0.03255179151892662, 0.06549276411533356, 0.02550344355404377, 0.0375109538435936, 0.010350678116083145, 0.010015228763222694, 0.006318010855466127, 0.015387685969471931, 0.029775287955999374, 0.09582662582397461, 0.12329793721437454, 0.040069907903671265, 0.2499569058418274], [0.05147751420736313, 0.026190802454948425, 0.019553564488887787, 0.00794832594692707, 0.012546613812446594, 0.03308728709816933, 0.0062270499765872955, 0.013404070399701595, 0.012399903498589993, 0.09561887383460999, 0.09504444897174835, 0.05091249197721481, 0.06120510399341583, 0.022596407681703568, 0.010520447976887226, 0.0037455155979841948, 0.0661240890622139, 0.017072677612304688, 0.03299485892057419, 0.028041481971740723, 0.1379493772983551, 0.1953391134738922], [0.13275845348834991, 0.07056494802236557, 0.050126392394304276, 0.07301811128854752, 0.029886994510889053, 0.05183490738272667, 0.07954657077789307, 0.09557035565376282, 0.03163886442780495, 0.06689612567424774, 0.021612634882330894, 0.01268075779080391, 0.02074837125837803, 0.009721857495605946, 0.015947403386235237, 0.005595088470727205, 0.010561300441622734, 0.019578082486987114, 0.035997334867715836, 0.04556220397353172, 0.061467379331588745, 0.058685969561338425], [0.2087833136320114, 0.029155995696783066, 0.006443016231060028, 0.01103169284760952, 0.02444244734942913, 0.024731386452913284, 0.013695928268134594, 0.012421252205967903, 0.020056111738085747, 0.049279406666755676, 0.08623851090669632, 0.02951209619641304, 0.006167024374008179, 0.0029569973703473806, 0.003030969761312008, 0.002860917942598462, 0.04301286116242409, 0.014458796940743923, 0.014806999824941158, 0.042794402688741684, 0.11201685667037964, 0.24210304021835327]], [[0.004565183073282242, 0.01300352904945612, 0.026174133643507957, 0.007049968931823969, 0.00935682374984026, 0.011693540960550308, 0.06786773353815079, 0.010431820526719093, 0.06322897970676422, 0.021942196413874626, 0.057416174560785294, 0.017719339579343796, 0.06816468387842178, 0.0761120617389679, 0.012978041544556618, 0.005454051308333874, 0.016826670616865158, 0.09163359552621841, 0.03862842544913292, 0.17634464800357819, 0.0235173050314188, 0.1798911690711975], [0.03293656185269356, 0.01515344250947237, 0.08451282978057861, 0.029456432908773422, 0.026511605829000473, 0.07294659316539764, 0.05761033669114113, 0.1186656728386879, 0.1711113154888153, 0.0834699347615242, 0.04539155587553978, 0.08262091875076294, 0.011793522164225578, 0.029716731980443, 0.008617623709142208, 0.005699750501662493, 0.004456246737390757, 0.003713775658980012, 0.015603961423039436, 0.04811343550682068, 0.011798241175711155, 0.040099628269672394], [0.011068887077271938, 0.01969834603369236, 0.01954593136906624, 0.009298007003962994, 0.007076773792505264, 0.03077925369143486, 0.11419381946325302, 0.006479125935584307, 0.029635651037096977, 0.013494843617081642, 0.049707721918821335, 0.06916762888431549, 0.10905357450246811, 0.06627857685089111, 0.020205505192279816, 0.005798425991088152, 0.033329084515571594, 0.16311442852020264, 0.030623283237218857, 0.07412183284759521, 0.011894535273313522, 0.10543468594551086], [0.01861737295985222, 0.02686811052262783, 0.014051511883735657, 0.010084041394293308, 0.011633999645709991, 0.04577554762363434, 0.2607536315917969, 0.013138756155967712, 0.03488823026418686, 0.02823619917035103, 0.024329137057065964, 0.03350699692964554, 0.06883412599563599, 0.030854010954499245, 0.012570234015583992, 0.005074800457805395, 0.043811529874801636, 0.20185016095638275, 0.02558644860982895, 0.04345568269491196, 0.018000993877649307, 0.02807845175266266], [0.01637323386967182, 0.04360633343458176, 0.016345512121915817, 0.018134266138076782, 0.01111713144928217, 0.08276436477899551, 0.10470268130302429, 0.0897623598575592, 0.06001977622509003, 0.03391679748892784, 0.04201188310980797, 0.05535775050520897, 0.1128857284784317, 0.021035360172390938, 0.024452276527881622, 0.008691243827342987, 0.03735591098666191, 0.05968291312456131, 0.06614061444997787, 0.05594439432024956, 0.016107317060232162, 0.023592231795191765], [0.0546906515955925, 0.03779533505439758, 0.05366169288754463, 0.025012735277414322, 0.05516120046377182, 0.06710360944271088, 0.17705783247947693, 0.08647898584604263, 0.1341673731803894, 0.08594264835119247, 0.026750465855002403, 0.0595138743519783, 0.019552309066057205, 0.020617656409740448, 0.00637618824839592, 0.022036489099264145, 0.004526391625404358, 0.015768790617585182, 0.007310203276574612, 0.018811199814081192, 0.012143825180828571, 0.009520478546619415], [0.009676974266767502, 0.010282049886882305, 0.01804584264755249, 0.05099697783589363, 0.005956151057034731, 0.011254957877099514, 0.023705903440713882, 0.015351585112512112, 0.17683145403862, 0.04002010077238083, 0.09283117949962616, 0.039013586938381195, 0.061881180852651596, 0.03559630364179611, 0.0450451634824276, 0.0017729520332068205, 0.007013975642621517, 0.007327865809202194, 0.00915262009948492, 0.18941082060337067, 0.00990671943873167, 0.13892574608325958], [0.02106623351573944, 0.01953873410820961, 0.04151751473546028, 0.041398368775844574, 0.0936986580491066, 0.05635412409901619, 0.03293774649500847, 0.3367268145084381, 0.03361805900931358, 0.10531201958656311, 0.018826186656951904, 0.009989175945520401, 0.007300408557057381, 0.009668960236012936, 0.0161098912358284, 0.02893124707043171, 0.00931403785943985, 0.005538196302950382, 0.04541236162185669, 0.0102209048345685, 0.05156393721699715, 0.004956412594765425], [0.02915875054895878, 0.02130584605038166, 0.06789249181747437, 0.08196675032377243, 0.020790843293070793, 0.043644893914461136, 0.1295575499534607, 0.01852499321103096, 0.035783782601356506, 0.037316903471946716, 0.04787995293736458, 0.09417807310819626, 0.0363035649061203, 0.08596552163362503, 0.062470003962516785, 0.012438193894922733, 0.022495878860354424, 0.036010902374982834, 0.013722209259867668, 0.027435248717665672, 0.008856269530951977, 0.06630126386880875], [0.04045477509498596, 0.01062537170946598, 0.042029622942209244, 0.17672494053840637, 0.13579273223876953, 0.06680052727460861, 0.022772133350372314, 0.10991623252630234, 0.0273649450391531, 0.020310182124376297, 0.059692107141017914, 0.05618785321712494, 0.01441970095038414, 0.02079382725059986, 0.09629550576210022, 0.054984234273433685, 0.014383436180651188, 0.0024328548461198807, 0.011798851191997528, 0.004885203205049038, 0.004898907616734505, 0.006436005234718323], [0.013081076554954052, 0.021992018446326256, 0.05623634532094002, 0.02830136939883232, 0.015570493414998055, 0.09160986542701721, 0.0610564760863781, 0.04358411207795143, 0.03106727823615074, 0.0444394089281559, 0.13294723629951477, 0.03571004420518875, 0.07145022600889206, 0.051743876188993454, 0.032679520547389984, 0.008829676546156406, 0.044600579887628555, 0.03007260337471962, 0.05524116009473801, 0.041701916605234146, 0.026592975482344627, 0.061491694301366806], [0.00498776463791728, 0.011084857396781445, 0.047051459550857544, 0.002591546159237623, 0.0034094727598130703, 0.017257601022720337, 0.056979719549417496, 0.005811573471873999, 0.035145748406648636, 0.00862228125333786, 0.10267340391874313, 0.08775630593299866, 0.12451038509607315, 0.1303395926952362, 0.0062264022417366505, 0.004669906571507454, 0.01551124732941389, 0.06665844470262527, 0.023041771724820137, 0.07073395699262619, 0.0059989336878061295, 0.1689375936985016], [0.003786779474467039, 0.019300393760204315, 0.023707887157797813, 0.01163522619754076, 0.0023998147808015347, 0.07768502831459045, 0.029587827622890472, 0.04285651445388794, 0.05095440521836281, 0.01176405418664217, 0.1897592544555664, 0.020778225734829903, 0.22895459830760956, 0.022734222933650017, 0.021972881630063057, 0.000958611664827913, 0.03851785883307457, 0.009557639248669147, 0.05285486951470375, 0.07739417999982834, 0.00711122015491128, 0.055728480219841], [0.0021730789449065924, 0.006378895603120327, 0.004799819551408291, 0.0010884057264775038, 0.0006662440137006342, 0.0042659384198486805, 0.025259580463171005, 0.0005588725907728076, 0.0053102970123291016, 0.003002685261890292, 0.04705158248543739, 0.01583288051187992, 0.19151157140731812, 0.041876170784235, 0.007999514229595661, 0.0017228488577529788, 0.03579401969909668, 0.3121415972709656, 0.030111806467175484, 0.07099314779043198, 0.011161764152348042, 0.18029922246932983], [0.005943204741925001, 0.014278572984039783, 0.007619241252541542, 0.002648308640345931, 0.0020478200167417526, 0.015013725496828556, 0.07239939272403717, 0.002553112106397748, 0.012731385417282581, 0.007821053266525269, 0.041544560343027115, 0.016134504228830338, 0.1257827877998352, 0.037181854248046875, 0.008837515488266945, 0.0020642494782805443, 0.07876642048358917, 0.30946680903434753, 0.04105265811085701, 0.07792995125055313, 0.018825441598892212, 0.09935739636421204], [0.009998447261750698, 0.016583051532506943, 0.01669594645500183, 0.008093862794339657, 0.007884487509727478, 0.01585932821035385, 0.019935810938477516, 0.023543216288089752, 0.020831530913710594, 0.01514873281121254, 0.04594108462333679, 0.02827438712120056, 0.0773397833108902, 0.056389931589365005, 0.023875802755355835, 0.013814525678753853, 0.05883141607046127, 0.09610209614038467, 0.15743273496627808, 0.11260451376438141, 0.037613335996866226, 0.13720601797103882], [0.031115077435970306, 0.026585662737488747, 0.0668095201253891, 0.0072397408075630665, 0.008488425984978676, 0.013537243939936161, 0.05718066170811653, 0.007090057712048292, 0.01860796846449375, 0.04325645789504051, 0.06573661416769028, 0.02045259438455105, 0.0290746558457613, 0.06867139041423798, 0.006016144994646311, 0.016446875408291817, 0.016265520825982094, 0.1346929967403412, 0.02842615731060505, 0.05555186793208122, 0.06522466242313385, 0.21352964639663696], [0.0016650001052767038, 0.0018673123558983207, 0.005142318084836006, 0.004216075409203768, 0.00039158540312200785, 0.0005394195904955268, 0.001181481289677322, 0.0005385270342230797, 0.008300895802676678, 0.004272519610822201, 0.05612051859498024, 0.0038840435445308685, 0.02082110196352005, 0.0234721377491951, 0.011993998661637306, 0.000571874319575727, 0.005910629406571388, 0.008878730237483978, 0.01145398523658514, 0.1422315090894699, 0.013340409845113754, 0.673206090927124], [0.0041662901639938354, 0.006126221735030413, 0.02263675071299076, 0.0039052434731274843, 0.00564908841624856, 0.003946961369365454, 0.005317562259733677, 0.008551613427698612, 0.00431446498259902, 0.028826797381043434, 0.04873025789856911, 0.00313397659920156, 0.012073618359863758, 0.029284454882144928, 0.007656278554350138, 0.011981037445366383, 0.020681560039520264, 0.05294983461499214, 0.13995017111301422, 0.060246583074331284, 0.2581459581851959, 0.2617252469062805], [0.008737473748624325, 0.0037317003589123487, 0.020376671105623245, 0.013203484937548637, 0.0019723784644156694, 0.0027144362684339285, 0.01525324396789074, 0.0005182635504752398, 0.0040306514129042625, 0.00391918933019042, 0.04248698055744171, 0.022844452410936356, 0.015847910195589066, 0.10591069608926773, 0.028548507019877434, 0.003915785811841488, 0.020438434556126595, 0.060916826128959656, 0.009439315646886826, 0.03528471291065216, 0.008893666788935661, 0.5710152387619019], [0.026163069531321526, 0.012850341387093067, 0.053645581007003784, 0.06253252178430557, 0.04488684982061386, 0.032723914831876755, 0.00787889864295721, 0.09743893146514893, 0.005440390668809414, 0.0324440561234951, 0.056368935853242874, 0.008935630321502686, 0.008627512492239475, 0.02485281601548195, 0.06599705666303635, 0.04193491116166115, 0.061321720480918884, 0.010237258858978748, 0.1965118944644928, 0.013105024583637714, 0.0912790596485138, 0.04482365399599075], [0.0020466709975153208, 0.0011097956448793411, 0.007705478463321924, 0.00098529236856848, 0.00032018861384131014, 0.000564245565328747, 0.005967145320028067, 7.581234240205958e-05, 0.001590996515005827, 0.0009657799964770675, 0.04483536630868912, 0.0050828661769628525, 0.012982510030269623, 0.057551346719264984, 0.0042738947086036205, 0.0009048219071701169, 0.008807606063783169, 0.08512353897094727, 0.0059420280158519745, 0.03794638067483902, 0.0067299772053956985, 0.7084883451461792]], [[0.008811566978693008, 0.09042635560035706, 0.024869585409760475, 0.01612292416393757, 0.013183370232582092, 0.010169913992285728, 0.09021280705928802, 0.0132145369425416, 0.031770817935466766, 0.01478072814643383, 0.009620863012969494, 0.047952137887477875, 0.06898122280836105, 0.0627356544137001, 0.021153878420591354, 0.034098781645298004, 0.017979247495532036, 0.262051522731781, 0.024508943781256676, 0.07731004804372787, 0.02209819108247757, 0.03794693201780319], [0.02912125363945961, 0.09573138505220413, 0.020723771303892136, 0.016578827053308487, 0.04228169098496437, 0.019685471430420876, 0.06903790682554245, 0.035051170736551285, 0.016899898648262024, 0.062828429043293, 0.021889757364988327, 0.04439288377761841, 0.11036063730716705, 0.03235447779297829, 0.01803947240114212, 0.05017099902033806, 0.02844804897904396, 0.09647585451602936, 0.04590754956007004, 0.03005426749587059, 0.08741006255149841, 0.026556245982646942], [0.012305492535233498, 0.03648446872830391, 0.025773225352168083, 0.05196504667401314, 0.06034795939922333, 0.019384726881980896, 0.06094186753034592, 0.01335917692631483, 0.07323811948299408, 0.014490670524537563, 0.021126242354512215, 0.0614815317094326, 0.07525274157524109, 0.06053631007671356, 0.0500863678753376, 0.06696988642215729, 0.023921452462673187, 0.09266863763332367, 0.01785484328866005, 0.09344662725925446, 0.01573973521590233, 0.05262494459748268], [0.005714691709727049, 0.08909980952739716, 0.07584723085165024, 0.011758478358387947, 0.01205512322485447, 0.021611817181110382, 0.12405283004045486, 0.029852423816919327, 0.0451609268784523, 0.046210307627916336, 0.018095441162586212, 0.03820761293172836, 0.08158384263515472, 0.08233091235160828, 0.012330570258200169, 0.023936156183481216, 0.014962389133870602, 0.1118471547961235, 0.033190611749887466, 0.04671879857778549, 0.03274482488632202, 0.042688049376010895], [0.0037018894217908382, 0.07825539261102676, 0.05087653920054436, 0.021892044693231583, 0.01516189705580473, 0.01155630312860012, 0.09048209339380264, 0.01219063252210617, 0.07974272966384888, 0.017728589475154877, 0.015649333596229553, 0.12209911644458771, 0.11021199077367783, 0.07834843546152115, 0.02602229081094265, 0.032512154430150986, 0.009861958213150501, 0.08003342151641846, 0.013113897293806076, 0.07902386784553528, 0.012022379785776138, 0.03951302543282509], [0.010916316881775856, 0.03624868765473366, 0.03863334655761719, 0.0439557246863842, 0.02092902734875679, 0.01178248506039381, 0.06643261015415192, 0.014695336110889912, 0.02616528421640396, 0.007959578186273575, 0.013366669416427612, 0.07080575823783875, 0.08246570825576782, 0.1326758861541748, 0.06453777849674225, 0.04237192124128342, 0.02155408076941967, 0.1719808429479599, 0.021980831399559975, 0.046806611120700836, 0.010439440608024597, 0.043296072632074356], [0.005784797947853804, 0.0589836910367012, 0.03484264016151428, 0.0675022080540657, 0.026638181880116463, 0.015556514263153076, 0.11365427821874619, 0.0083364462479949, 0.07040182501077652, 0.03462744876742363, 0.018817255273461342, 0.03093770705163479, 0.1328541338443756, 0.05601956322789192, 0.05823182687163353, 0.01686904951930046, 0.013262946158647537, 0.07852036505937576, 0.008445954881608486, 0.08205442130565643, 0.027649741619825363, 0.04000899940729141], [0.03596533089876175, 0.10818430036306381, 0.041441816836595535, 0.039609503000974655, 0.030260873958468437, 0.014599766582250595, 0.07530941069126129, 0.038511671125888824, 0.011894081719219685, 0.10373175144195557, 0.03559832647442818, 0.03992640972137451, 0.06284338235855103, 0.03947996720671654, 0.03587273508310318, 0.019986141473054886, 0.01337637659162283, 0.04998800531029701, 0.031852517277002335, 0.017710288986563683, 0.12140359729528427, 0.032453786581754684], [0.007723445072770119, 0.06219424679875374, 0.03691632300615311, 0.017049813643097878, 0.006188575178384781, 0.020598968490958214, 0.12078491598367691, 0.011625568382441998, 0.07659109681844711, 0.014960885979235172, 0.016052057966589928, 0.07073915749788284, 0.0529014952480793, 0.061658717691898346, 0.027159439399838448, 0.01544477604329586, 0.026074863970279694, 0.1516224890947342, 0.01922728307545185, 0.12506964802742004, 0.013253528624773026, 0.0461626835167408], [0.044070206582546234, 0.04878697544336319, 0.0323137603700161, 0.02726350910961628, 0.031845998018980026, 0.03670746833086014, 0.08803030103445053, 0.08115588128566742, 0.013944000005722046, 0.12941338121891022, 0.01780984178185463, 0.015085037797689438, 0.0244191475212574, 0.024002769961953163, 0.01866409368813038, 0.029374390840530396, 0.02910209819674492, 0.07384578138589859, 0.05073206126689911, 0.023235609754920006, 0.14057275652885437, 0.019624916836619377], [0.0285642147064209, 0.04917840287089348, 0.03742586821317673, 0.027465645223855972, 0.0263262577354908, 0.03590839356184006, 0.048588093370199203, 0.02577345259487629, 0.03463644161820412, 0.042030636221170425, 0.019429190084338188, 0.029276203364133835, 0.07368183135986328, 0.08738907426595688, 0.037971653044223785, 0.031343974173069, 0.07907310128211975, 0.1078343540430069, 0.046838462352752686, 0.04753775894641876, 0.0468905083835125, 0.03683646395802498], [0.03730461373925209, 0.05072079226374626, 0.040012963116168976, 0.03342195227742195, 0.032491136342287064, 0.05436733737587929, 0.10398201644420624, 0.04412813112139702, 0.06360938400030136, 0.01622583530843258, 0.017475560307502747, 0.06061013042926788, 0.04579516500234604, 0.05912678316235542, 0.03402835875749588, 0.0370214581489563, 0.046123504638671875, 0.07092756032943726, 0.057857856154441833, 0.05774744227528572, 0.014183548279106617, 0.022838519886136055], [0.02312578447163105, 0.08934113383293152, 0.04481006786227226, 0.017847422510385513, 0.036654986441135406, 0.03222399204969406, 0.0660516694188118, 0.040661681443452835, 0.03666266053915024, 0.04356370493769646, 0.02639343962073326, 0.06105607748031616, 0.11949623376131058, 0.06910549849271774, 0.025687232613563538, 0.03331664577126503, 0.03669965639710426, 0.05933472886681557, 0.05083068832755089, 0.03021492063999176, 0.032900162041187286, 0.024021610617637634], [0.022511249408125877, 0.039254821836948395, 0.0372263565659523, 0.03598170354962349, 0.046884868294000626, 0.025963159278035164, 0.04492465406656265, 0.01135720033198595, 0.08225088566541672, 0.007903008721768856, 0.02200237289071083, 0.07263891398906708, 0.047075580805540085, 0.09327811747789383, 0.043447449803352356, 0.06543072313070297, 0.036026082932949066, 0.0845586359500885, 0.01996619626879692, 0.09088261425495148, 0.0076270014978945255, 0.06280838698148727], [0.006222165655344725, 0.10133272409439087, 0.06458761543035507, 0.009371799416840076, 0.010285982862114906, 0.028360584750771523, 0.08989792317152023, 0.029227718710899353, 0.06674037873744965, 0.03503584489226341, 0.01654965989291668, 0.04072297364473343, 0.0818152204155922, 0.08845221251249313, 0.011500783264636993, 0.024647701531648636, 0.021308334544301033, 0.10381477326154709, 0.040760818868875504, 0.058165114372968674, 0.02526078186929226, 0.04593893513083458], [0.009744592010974884, 0.07287218421697617, 0.03720106929540634, 0.025668591260910034, 0.021586142480373383, 0.01734580285847187, 0.05508393049240112, 0.014476031996309757, 0.06496104598045349, 0.025177473202347755, 0.013903261162340641, 0.0713343545794487, 0.08735727518796921, 0.10496211796998978, 0.030965954065322876, 0.04579576104879379, 0.019629308953881264, 0.10295069962739944, 0.02247425727546215, 0.07776399701833725, 0.022030187770724297, 0.05671587586402893], [0.014291116036474705, 0.03256760537624359, 0.02881336398422718, 0.03022102452814579, 0.013218375854194164, 0.019286369904875755, 0.0324891023337841, 0.014942058362066746, 0.04777548089623451, 0.007167174015194178, 0.011260384693741798, 0.06690403819084167, 0.07455521821975708, 0.16211482882499695, 0.061716023832559586, 0.03556148707866669, 0.04899982362985611, 0.1463499218225479, 0.03725428134202957, 0.061142757534980774, 0.00853917095810175, 0.04483034834265709], [0.006676610559225082, 0.07349810004234314, 0.023077527061104774, 0.03439188376069069, 0.022329941391944885, 0.028136245906352997, 0.04944147169589996, 0.009952405467629433, 0.13885153830051422, 0.02993268147110939, 0.013522587716579437, 0.04068967327475548, 0.11471046507358551, 0.059170790016651154, 0.039585523307323456, 0.023902369663119316, 0.02807639352977276, 0.060967691242694855, 0.014578755013644695, 0.1265111118555069, 0.021315356716513634, 0.04068092629313469], [0.03798208013176918, 0.12429441511631012, 0.027737338095903397, 0.031241726130247116, 0.025210993364453316, 0.02673509158194065, 0.04689471423625946, 0.03046158328652382, 0.037002019584178925, 0.0475357286632061, 0.03090917505323887, 0.05153028666973114, 0.0840102955698967, 0.05054442584514618, 0.04101966321468353, 0.027493592351675034, 0.036963265389204025, 0.06527415663003922, 0.03821684792637825, 0.04092328995466232, 0.0543869286775589, 0.043632324784994125], [0.0066957320086658, 0.04496246948838234, 0.022452862933278084, 0.01288591418415308, 0.004729922395199537, 0.027669232338666916, 0.05981931462883949, 0.010386141948401928, 0.11432472616434097, 0.01328547764569521, 0.011342795565724373, 0.05663271248340607, 0.04319031909108162, 0.06008763611316681, 0.023224301636219025, 0.016923679038882256, 0.04586074873805046, 0.15223322808742523, 0.02379537932574749, 0.18417884409427643, 0.010982117615640163, 0.05433645099401474], [0.05355079472064972, 0.06533941626548767, 0.021413160488009453, 0.015488212928175926, 0.03278717026114464, 0.03278485685586929, 0.032739948481321335, 0.07273279130458832, 0.018071373924613, 0.09823191910982132, 0.017698420211672783, 0.028211181983351707, 0.03726819157600403, 0.033958230167627335, 0.016558904200792313, 0.054117243736982346, 0.0539281852543354, 0.06749764829874039, 0.06745180487632751, 0.029824044555425644, 0.12591257691383362, 0.024433813989162445], [0.013029487803578377, 0.026717042550444603, 0.025270966812968254, 0.013522688299417496, 0.015585305169224739, 0.03148067370057106, 0.029891112819314003, 0.010292375460267067, 0.06580379605293274, 0.007593341171741486, 0.010834739543497562, 0.05101293697953224, 0.04284394532442093, 0.11667247861623764, 0.025845669209957123, 0.039940666407346725, 0.10571655631065369, 0.16707928478717804, 0.03244159370660782, 0.11091503500938416, 0.00866632629185915, 0.048843976110219955]], [[0.02245965600013733, 0.06851538270711899, 0.024263806641101837, 0.005554107949137688, 0.04931354150176048, 0.11629381030797958, 0.05354133993387222, 0.03624556213617325, 0.01325391884893179, 0.017156405374407768, 0.006718379911035299, 0.013429169543087482, 0.1026109904050827, 0.03271171450614929, 0.004955369979143143, 0.03382730484008789, 0.17254820466041565, 0.06866218149662018, 0.06283173710107803, 0.05113532766699791, 0.02432151511311531, 0.019650602713227272], [0.077969491481781, 0.07077431678771973, 0.06867937743663788, 0.013031627982854843, 0.19087456166744232, 0.030864853411912918, 0.23725704848766327, 0.03622305393218994, 0.006322460249066353, 0.025118723511695862, 0.004308795556426048, 0.02631567418575287, 0.0418706052005291, 0.03565208986401558, 0.0022343855816870928, 0.05796089023351669, 0.007511669769883156, 0.04742223024368286, 0.004453377798199654, 0.0030310871079564095, 0.00801020860671997, 0.004113506991416216], [0.017664290964603424, 0.04505494236946106, 0.017795344814658165, 0.01819378137588501, 0.14589425921440125, 0.10273926705121994, 0.007168937474489212, 0.007188743911683559, 0.011395109817385674, 0.0022809188812971115, 0.007115031126886606, 0.013794113881886005, 0.23151244223117828, 0.037444960325956345, 0.01590055786073208, 0.09450926631689072, 0.15952694416046143, 0.010870552621781826, 0.018128372728824615, 0.017766987904906273, 0.005313630681484938, 0.012741584330797195], [0.03730396926403046, 0.04467090591788292, 0.01468211691826582, 0.014107012189924717, 0.017550457268953323, 0.04256734997034073, 0.015116403810679913, 0.04007129371166229, 0.0883200466632843, 0.040101658552885056, 0.057681143283843994, 0.058569494634866714, 0.2757705748081207, 0.022337794303894043, 0.014706826768815517, 0.013958209194242954, 0.012363025918602943, 0.006024876609444618, 0.01748209446668625, 0.1050758957862854, 0.015784792602062225, 0.045754026621580124], [0.06535210460424423, 0.050973325967788696, 0.022786587476730347, 0.011625182814896107, 0.059256501495838165, 0.1150905042886734, 0.05375204235315323, 0.08263058215379715, 0.06778993457555771, 0.019225044175982475, 0.026361562311649323, 0.04770781844854355, 0.05185776948928833, 0.019512470811605453, 0.005291896406561136, 0.03801380470395088, 0.07403019815683365, 0.030353078618645668, 0.04175129532814026, 0.08004140108823776, 0.01128087006509304, 0.025316063314676285], [0.036147136241197586, 0.04558154195547104, 0.041311196982860565, 0.12030117213726044, 0.265844464302063, 0.04515957832336426, 0.03674343600869179, 0.019484633579850197, 0.05097561702132225, 0.014134405180811882, 0.020052017644047737, 0.03025921992957592, 0.012378363870084286, 0.018105383962392807, 0.02634388953447342, 0.16381622850894928, 0.011574544943869114, 0.008622423745691776, 0.003758236300200224, 0.012529073283076286, 0.0036421071272343397, 0.013235348276793957], [0.016493849456310272, 0.02713479846715927, 0.004291281569749117, 0.010380077175796032, 0.06828097254037857, 0.3753648102283478, 0.003549742978066206, 0.025200147181749344, 0.05499500781297684, 0.004664376378059387, 0.017774229869246483, 0.023304151371121407, 0.06725715845823288, 0.005344906821846962, 0.006680187303572893, 0.011020864360034466, 0.2194909304380417, 0.0024292573798447847, 0.02030416578054428, 0.02245333604514599, 0.003017386654391885, 0.010568312369287014], [0.06282930821180344, 0.10330900549888611, 0.09143321961164474, 0.05453479662537575, 0.08782672137022018, 0.05569135397672653, 0.04204605519771576, 0.21514476835727692, 0.009072196669876575, 0.030327172949910164, 0.03102950192987919, 0.017750438302755356, 0.01257042121142149, 0.030553625896573067, 0.010134298354387283, 0.08320551365613937, 0.012024606578052044, 0.004418416414409876, 0.015436592511832714, 0.003451449330896139, 0.010559073649346828, 0.016651466488838196], [0.02772904746234417, 0.03524491563439369, 0.04330848902463913, 0.022179238498210907, 0.016143862158060074, 0.05021418631076813, 0.12051078677177429, 0.10494783520698547, 0.005718186032027006, 0.24759766459465027, 0.018817033618688583, 0.06213819235563278, 0.02264312095940113, 0.016748785972595215, 0.012006986886262894, 0.011396613903343678, 0.02788284234702587, 0.04437505453824997, 0.027267515659332275, 0.00374482199549675, 0.06811892986297607, 0.011265904642641544], [0.013634929433465004, 0.0976140946149826, 0.005994496867060661, 0.003580874064937234, 0.028170021250844002, 0.010498404502868652, 0.002241648966446519, 0.022674014791846275, 0.007862518541514874, 0.013018893077969551, 0.03910072147846222, 0.036459438502788544, 0.6675353646278381, 0.014457888901233673, 0.0014077199157327414, 0.022043345496058464, 0.0009657886694185436, 0.0001955416373675689, 0.0018051810329779983, 0.0011458905646577477, 0.005661191418766975, 0.0039319004863500595], [0.02048533782362938, 0.09009356796741486, 0.044764209538698196, 0.015668176114559174, 0.021701358258724213, 0.033959269523620605, 0.038112711161375046, 0.05896971374750137, 0.002627215115353465, 0.05575260519981384, 0.012301018461585045, 0.04400604963302612, 0.34954017400741577, 0.05467084050178528, 0.009411645121872425, 0.03993956744670868, 0.02740347944200039, 0.02232857048511505, 0.027733219787478447, 0.0042976136319339275, 0.01695459894835949, 0.009279083460569382], [0.018322575837373734, 0.036972999572753906, 0.057519275695085526, 0.009414244443178177, 0.014973719604313374, 0.1574597954750061, 0.11733399331569672, 0.017407706007361412, 0.0095533961430192, 0.0825384184718132, 0.011765357106924057, 0.060331881046295166, 0.20658600330352783, 0.03965141624212265, 0.007035369053483009, 0.00936668086796999, 0.0563318207859993, 0.051605116575956345, 0.008818844333291054, 0.006758224219083786, 0.013299432583153248, 0.00695378240197897], [0.009831936098635197, 0.030313441529870033, 0.018279505893588066, 0.0054648444056510925, 0.011191085912287235, 0.018603159114718437, 0.08655867725610733, 0.009539715014398098, 0.01037609577178955, 0.06514022499322891, 0.019259793683886528, 0.07472512871026993, 0.5473682880401611, 0.036102473735809326, 0.004767126403748989, 0.008531985804438591, 0.005294335074722767, 0.022275932133197784, 0.0022691504564136267, 0.005782031454145908, 0.0040243943221867085, 0.004300642758607864], [0.0029076894279569387, 0.010949315503239632, 0.0024948217906057835, 0.003309250809252262, 0.013515256345272064, 0.02482653595507145, 0.002801399677991867, 0.002049042610451579, 0.008250828832387924, 0.0026126676239073277, 0.008343434892594814, 0.012106508947908878, 0.6622975468635559, 0.016870951279997826, 0.019117629155516624, 0.02076825499534607, 0.10842177271842957, 0.01263793371617794, 0.023057760670781136, 0.026883501559495926, 0.006228304468095303, 0.009549557231366634], [0.019113656133413315, 0.028152551501989365, 0.0101500628516078, 0.011943867430090904, 0.008287795819342136, 0.026137828826904297, 0.0092762541025877, 0.01946304552257061, 0.023785412311553955, 0.025682825595140457, 0.0441000834107399, 0.04311702772974968, 0.39822283387184143, 0.02617507427930832, 0.03198191896080971, 0.015746286138892174, 0.03043818287551403, 0.014030132442712784, 0.04516015946865082, 0.08523324877023697, 0.024560824036598206, 0.05924093723297119], [0.01072599831968546, 0.014776335097849369, 0.01055836770683527, 0.010667692869901657, 0.017715008929371834, 0.034470316022634506, 0.019075985997915268, 0.013102107681334019, 0.025668911635875702, 0.0071151042357087135, 0.01016256120055914, 0.01629883237183094, 0.03822421655058861, 0.026214463636279106, 0.022885959595441818, 0.03660041466355324, 0.20885039865970612, 0.11826416105031967, 0.11433295160531998, 0.18281234800815582, 0.02287713810801506, 0.03860084339976311], [0.010025297291576862, 0.026681261137127876, 0.024134228006005287, 0.0744895190000534, 0.08784230053424835, 0.01717216894030571, 0.013013128191232681, 0.0017343549989163876, 0.010352591052651405, 0.0063099125400185585, 0.011101702228188515, 0.011622079648077488, 0.025346875190734863, 0.035220809280872345, 0.10241834074258804, 0.30567485094070435, 0.0665382444858551, 0.05697374790906906, 0.018875936046242714, 0.033740829676389694, 0.01159230899065733, 0.04913949966430664], [0.0028208494186401367, 0.008413814008235931, 0.0014153009979054332, 0.002610841765999794, 0.01416495069861412, 0.04860817268490791, 0.0007850877591408789, 0.002021940890699625, 0.004025347530841827, 0.0006693156319670379, 0.0036004194989800453, 0.004652728792279959, 0.04769150912761688, 0.005630024708807468, 0.010826552286744118, 0.01709917187690735, 0.6677020788192749, 0.013542860746383667, 0.09845772385597229, 0.02181449718773365, 0.006626863963901997, 0.01682002656161785], [0.013956177048385143, 0.05119556933641434, 0.027018263936042786, 0.020259374752640724, 0.043439831584692, 0.019807804375886917, 0.0072768754325807095, 0.012027841992676258, 0.002101871417835355, 0.003536149160936475, 0.012736482545733452, 0.00958760641515255, 0.027043357491493225, 0.05091184005141258, 0.025871001183986664, 0.31563061475753784, 0.12845447659492493, 0.02899099513888359, 0.09892923384904861, 0.012570546939969063, 0.023378772661089897, 0.06527529656887054], [0.008540518581867218, 0.012918656691908836, 0.019523249939084053, 0.008836404420435429, 0.0048229810781776905, 0.010737086646258831, 0.017572497949004173, 0.0071569280698895454, 0.0013033627765253186, 0.03026413731276989, 0.00929068960249424, 0.020939843729138374, 0.020707573741674423, 0.031296659260988235, 0.031904689967632294, 0.028572725132107735, 0.14989645779132843, 0.19354479014873505, 0.14959034323692322, 0.012233604677021503, 0.16918233036994934, 0.061164602637290955], [0.017138276249170303, 0.15204375982284546, 0.024177592247724533, 0.0019840849563479424, 0.022133583202958107, 0.004194497596472502, 0.0069895233027637005, 0.014450831338763237, 0.0016216556541621685, 0.006924810353666544, 0.017485423013567924, 0.024505138397216797, 0.3405625820159912, 0.07192971557378769, 0.0022973925806581974, 0.15976585447788239, 0.0057300664484500885, 0.010535042732954025, 0.03990248590707779, 0.005081214476376772, 0.027788963168859482, 0.04275744408369064], [0.00468752346932888, 0.018143774941563606, 0.010610170662403107, 0.0034361439757049084, 0.0053741903975605965, 0.013644883409142494, 0.006798621267080307, 0.0015810845652595162, 0.0009541313047520816, 0.006594866048544645, 0.0036583144683390856, 0.010228943079710007, 0.23667357861995697, 0.05237103998661041, 0.015603546053171158, 0.03552939370274544, 0.27226099371910095, 0.1207665279507637, 0.08038021624088287, 0.021387971937656403, 0.04775672033429146, 0.03155744448304176]]], [[[0.005072837695479393, 0.10765001177787781, 0.06795162707567215, 0.02537199668586254, 0.048461951315402985, 0.025597313418984413, 0.026088332757353783, 0.01760079711675644, 0.010227618739008904, 0.05491992458701134, 0.04910425469279289, 0.026565078645944595, 0.27242612838745117, 0.037816353142261505, 0.03298024460673332, 0.017467249184846878, 0.02228263020515442, 0.021939657628536224, 0.02164594456553459, 0.011837251484394073, 0.0560108982026577, 0.04098179191350937], [0.03613784536719322, 0.062408287078142166, 0.14175093173980713, 0.0197167806327343, 0.06980077177286148, 0.04940856248140335, 0.07051288336515427, 0.01688224822282791, 0.015049362555146217, 0.061237841844558716, 0.16192945837974548, 0.06262420862913132, 0.08306881785392761, 0.03435467556118965, 0.012187330983579159, 0.007160921115428209, 0.016666090115904808, 0.014704838395118713, 0.005557443015277386, 0.006152677349746227, 0.013705946505069733, 0.038982078433036804], [0.016836663708090782, 0.03082394227385521, 0.058643363416194916, 0.016212116926908493, 0.05178101360797882, 0.08976052701473236, 0.04079371690750122, 0.012446537613868713, 0.031290214508771896, 0.07256299257278442, 0.10488954931497574, 0.10358931869268417, 0.10481931269168854, 0.030693504959344864, 0.017941776663064957, 0.01738869398832321, 0.06320695579051971, 0.026519620791077614, 0.012826250866055489, 0.021658681333065033, 0.030318044126033783, 0.0449972003698349], [0.06662322580814362, 0.04839249327778816, 0.052863709628582, 0.03336643427610397, 0.022669769823551178, 0.029865028336644173, 0.04084847867488861, 0.026809856295585632, 0.05149949714541435, 0.043926455080509186, 0.03240059316158295, 0.03891273960471153, 0.03233850747346878, 0.050196919590234756, 0.02967226877808571, 0.04172957316040993, 0.03182721883058548, 0.04950397089123726, 0.036877233535051346, 0.08578583598136902, 0.06058927997946739, 0.09330086410045624], [0.028146661818027496, 0.14461557567119598, 0.07301441580057144, 0.04016101360321045, 0.05255948752164841, 0.03770587220788002, 0.03390740603208542, 0.05413772538304329, 0.03846174106001854, 0.07709459215402603, 0.04399634897708893, 0.03512922674417496, 0.08528312295675278, 0.03339320421218872, 0.027039656415581703, 0.01920367032289505, 0.01310479175299406, 0.014374813996255398, 0.02175923064351082, 0.03178318962454796, 0.04858701676130295, 0.0465412512421608], [0.05435178428888321, 0.051221586763858795, 0.09286599606275558, 0.04268736019730568, 0.027413515374064445, 0.08825496584177017, 0.0977984145283699, 0.012095899321138859, 0.08708079159259796, 0.037477992475032806, 0.08061614632606506, 0.04477778822183609, 0.02661651186645031, 0.03660183399915695, 0.021577514708042145, 0.005935030058026314, 0.030529698356986046, 0.02612556330859661, 0.005144610535353422, 0.05329113081097603, 0.007503296248614788, 0.07003266364336014], [0.013460036367177963, 0.05426819622516632, 0.0636453703045845, 0.03250051662325859, 0.016812197864055634, 0.05126713216304779, 0.028017330914735794, 0.015976743772625923, 0.03866944462060928, 0.13693740963935852, 0.07918757945299149, 0.086066834628582, 0.13510802388191223, 0.03778712823987007, 0.030131779611110687, 0.006055857986211777, 0.030319102108478546, 0.011572002433240414, 0.0078508285805583, 0.03141207993030548, 0.028311152011156082, 0.06464323401451111], [0.09277918934822083, 0.06827275454998016, 0.16779637336730957, 0.0489773191511631, 0.04698263108730316, 0.05625619739294052, 0.08523731678724289, 0.02167959325015545, 0.02522153966128826, 0.06253837794065475, 0.05092911422252655, 0.07392619550228119, 0.031447697430849075, 0.0339648500084877, 0.022249920293688774, 0.011692257598042488, 0.010813521221280098, 0.013732679188251495, 0.0030812895856797695, 0.013477189466357231, 0.00971231423318386, 0.0492316335439682], [0.018618982285261154, 0.1205727681517601, 0.09910506010055542, 0.020829780027270317, 0.028765937313437462, 0.04602566733956337, 0.0539645217359066, 0.01607130840420723, 0.041621822863817215, 0.05306432023644447, 0.050008632242679596, 0.05754932016134262, 0.06522606313228607, 0.05642002820968628, 0.018699536100029945, 0.025155918672680855, 0.02862376719713211, 0.03224096819758415, 0.01686370000243187, 0.03641131520271301, 0.033132247626781464, 0.0810282826423645], [0.04781051352620125, 0.13555513322353363, 0.06378103792667389, 0.051509790122509, 0.08253728598356247, 0.0754595696926117, 0.05787717550992966, 0.11327848583459854, 0.03298955783247948, 0.06148973107337952, 0.05275023728609085, 0.02121562510728836, 0.06986752897500992, 0.017668716609477997, 0.026172010228037834, 0.012024437077343464, 0.013766237534582615, 0.008739390410482883, 0.011893007904291153, 0.012291936203837395, 0.023512573912739754, 0.007809963542968035], [0.025843946263194084, 0.11331436038017273, 0.16694529354572296, 0.02360350638628006, 0.02108464017510414, 0.11667407304048538, 0.0534442663192749, 0.011944834142923355, 0.05182009935379028, 0.057870082557201385, 0.14812320470809937, 0.05611290782690048, 0.06409116089344025, 0.01752125844359398, 0.008795458823442459, 0.0033438783138990402, 0.010591384023427963, 0.00471707945689559, 0.0014511956833302975, 0.013601483777165413, 0.004207684192806482, 0.024898236617445946], [0.008713348768651485, 0.07552386820316315, 0.20476385951042175, 0.006283638533204794, 0.022112734615802765, 0.10169170051813126, 0.04657166451215744, 0.009532207623124123, 0.012639207765460014, 0.05771612375974655, 0.1831551343202591, 0.052504558116197586, 0.1384240984916687, 0.027930818498134613, 0.004178828094154596, 0.006572576705366373, 0.011516780592501163, 0.005725801922380924, 0.001955501502379775, 0.002789669670164585, 0.004431493114680052, 0.015266316011548042], [0.011030382476747036, 0.2549474537372589, 0.14019396901130676, 0.023853596299886703, 0.013194086030125618, 0.05945843830704689, 0.027555003762245178, 0.0350956916809082, 0.030415769666433334, 0.07716234028339386, 0.09545310586690903, 0.03461126983165741, 0.1392592489719391, 0.017920643091201782, 0.010951684787869453, 0.001607441110536456, 0.005092055536806583, 0.0015703163808211684, 0.001657674671150744, 0.0067814006470143795, 0.004363417159765959, 0.007825077511370182], [0.00408167764544487, 0.03543412312865257, 0.04771397262811661, 0.007745832670480013, 0.02784532494843006, 0.07398474216461182, 0.03269762173295021, 0.007730531506240368, 0.03011189214885235, 0.08741006255149841, 0.08140911906957626, 0.09494153410196304, 0.22892993688583374, 0.029883820563554764, 0.012220394797623158, 0.017217999324202538, 0.05425906553864479, 0.02714715525507927, 0.009592798538506031, 0.024932820349931717, 0.028337877243757248, 0.036371711641550064], [0.023133162409067154, 0.04462573304772377, 0.04527074098587036, 0.022008460015058517, 0.012851265259087086, 0.027617763727903366, 0.026993433013558388, 0.018743572756648064, 0.04804962873458862, 0.05129453167319298, 0.032580383121967316, 0.04909120872616768, 0.06429746001958847, 0.06806057691574097, 0.02894761599600315, 0.042382970452308655, 0.044832147657871246, 0.053087443113327026, 0.04204683005809784, 0.09361230581998825, 0.06774716079235077, 0.09272563457489014], [0.005959557369351387, 0.033041175454854965, 0.015358668752014637, 0.037761565297842026, 0.05569588765501976, 0.015641750767827034, 0.010394140146672726, 0.0512581542134285, 0.028279224410653114, 0.04815671965479851, 0.02153259888291359, 0.017222756519913673, 0.1278502345085144, 0.034341659396886826, 0.0734441727399826, 0.05769263580441475, 0.034844301640987396, 0.03197649493813515, 0.11503274738788605, 0.04977298527956009, 0.10943083465099335, 0.025311844423413277], [0.011656523682177067, 0.022298654541373253, 0.027847595512866974, 0.03021314926445484, 0.025794049724936485, 0.029573995620012283, 0.03924645110964775, 0.007364429533481598, 0.06610063463449478, 0.02571176365017891, 0.042634766548871994, 0.03223090618848801, 0.060334786772727966, 0.061613526195287704, 0.04350768029689789, 0.019792282953858376, 0.06400422751903534, 0.09311090409755707, 0.027377955615520477, 0.1304713636636734, 0.021651780232787132, 0.11746251583099365], [0.0010579609079286456, 0.010976972989737988, 0.016691135242581367, 0.0157172754406929, 0.008930564858019352, 0.014002739451825619, 0.007997344247996807, 0.006460740230977535, 0.018757281824946404, 0.07682982087135315, 0.03722650930285454, 0.06713534891605377, 0.32221323251724243, 0.059397727251052856, 0.04924429580569267, 0.014005707576870918, 0.051035068929195404, 0.026564067229628563, 0.026458799839019775, 0.0446096770465374, 0.046295445412397385, 0.0783923864364624], [0.0018427540780976415, 0.01525366771966219, 0.030148779973387718, 0.02009601518511772, 0.026270471513271332, 0.013382596895098686, 0.015076331794261932, 0.004391060210764408, 0.014546377584338188, 0.059755418449640274, 0.036662083119153976, 0.06525084376335144, 0.25438666343688965, 0.07492512464523315, 0.05582636222243309, 0.03416810184717178, 0.035673681646585464, 0.047034911811351776, 0.01842462830245495, 0.04056428745388985, 0.033112842589616776, 0.10320702195167542], [0.003072471357882023, 0.05409916490316391, 0.026620980352163315, 0.015725651755928993, 0.015187102369964123, 0.012190520763397217, 0.010103193111717701, 0.006658757105469704, 0.021000124514102936, 0.04029484838247299, 0.020314401015639305, 0.03788639232516289, 0.14870071411132812, 0.07887031137943268, 0.04059602692723274, 0.05262388288974762, 0.0481819212436676, 0.04414674639701843, 0.058650221675634384, 0.06868764758110046, 0.08852753788232803, 0.10786136239767075], [0.014467236585915089, 0.03237540274858475, 0.03190269321203232, 0.04919777438044548, 0.07757461816072464, 0.03749671205878258, 0.05044630914926529, 0.07048213481903076, 0.028501292690634727, 0.034722376614809036, 0.04367142170667648, 0.026815831661224365, 0.11064525693655014, 0.03695305436849594, 0.06269040703773499, 0.030387628823518753, 0.040888965129852295, 0.05880418047308922, 0.06491164863109589, 0.032317131757736206, 0.04501232132315636, 0.01973566599190235], [0.004191354848444462, 0.05965583398938179, 0.05084816366434097, 0.009867055341601372, 0.025873234495520592, 0.04024286940693855, 0.01916600577533245, 0.003807787084951997, 0.03101448528468609, 0.04895631596446037, 0.09277766197919846, 0.048967815935611725, 0.21601693332195282, 0.03533312305808067, 0.015853602439165115, 0.0222169179469347, 0.04841723293066025, 0.03165502846240997, 0.01594642736017704, 0.05021107196807861, 0.03269478678703308, 0.09628625214099884]], [[0.0021250999998301268, 0.06970611214637756, 0.020653309300541878, 0.004865346476435661, 0.013762272894382477, 0.013429236598312855, 0.007327020633965731, 0.019767174497246742, 0.07395053654909134, 0.13426794111728668, 0.029068347066640854, 0.025943046435713768, 0.12041650712490082, 0.03983449935913086, 0.008037904277443886, 0.011209680698812008, 0.018740404397249222, 0.010078723542392254, 0.026580628007650375, 0.18420681357383728, 0.09107226133346558, 0.07495714724063873], [0.019116243347525597, 0.08270668238401413, 0.02446410059928894, 0.004737425595521927, 0.011513135395944118, 0.006345840636640787, 0.006407799199223518, 0.018652889877557755, 0.016287479549646378, 0.1461503505706787, 0.05537290871143341, 0.03573119267821312, 0.22181333601474762, 0.03534955158829689, 0.007118857000023127, 0.0069289421662688255, 0.01239234209060669, 0.007433425169438124, 0.036367133259773254, 0.025679072365164757, 0.16403742134571075, 0.05539387837052345], [0.0022614682093262672, 0.01328403688967228, 0.02224661223590374, 0.011191222816705704, 0.024084581062197685, 0.011133499443531036, 0.04037182033061981, 0.006887521594762802, 0.2172999531030655, 0.0028140023350715637, 0.01759699545800686, 0.015169345773756504, 0.015528172254562378, 0.04134439677000046, 0.010596069507300854, 0.019653823226690292, 0.008781664073467255, 0.04858686029911041, 0.005818849895149469, 0.3913750946521759, 0.003972972743213177, 0.070001021027565], [0.021828431636095047, 0.0690566673874855, 0.06772477924823761, 0.012617211788892746, 0.01893441565334797, 0.030724680051207542, 0.02165464498102665, 0.05448278412222862, 0.03071141615509987, 0.1376890391111374, 0.039315447211265564, 0.04785384237766266, 0.06181004270911217, 0.09167758375406265, 0.017364518716931343, 0.02245846390724182, 0.02937464788556099, 0.019749263301491737, 0.04978039488196373, 0.04139736667275429, 0.0651569664478302, 0.048637282103300095], [0.0163599643856287, 0.0703156366944313, 0.08806253224611282, 0.009073346853256226, 0.018404802307486534, 0.07500788569450378, 0.011971148662269115, 0.014994096010923386, 0.04684720188379288, 0.15780825912952423, 0.01868411898612976, 0.04213445633649826, 0.059509653598070145, 0.09162881225347519, 0.009927909821271896, 0.01619007997214794, 0.0763876736164093, 0.010155454277992249, 0.027690425515174866, 0.06648331135511398, 0.033617954701185226, 0.03874521702528], [0.04868017137050629, 0.008712303824722767, 0.032907236367464066, 0.06628844141960144, 0.045027099549770355, 0.04389415681362152, 0.08782146126031876, 0.0434747152030468, 0.18853847682476044, 0.030690819025039673, 0.06095172092318535, 0.04791291803121567, 0.015138410963118076, 0.03710026293992996, 0.044841621071100235, 0.024807116016745567, 0.0167619027197361, 0.03935400769114494, 0.007064182311296463, 0.07110444456338882, 0.013819286599755287, 0.025109170004725456], [0.017999747768044472, 0.021096544340252876, 0.05792855843901634, 0.031247612088918686, 0.028242746368050575, 0.012947349809110165, 0.017846932634711266, 0.014965690672397614, 0.1452864557504654, 0.009117964655160904, 0.057772908359766006, 0.024372564628720284, 0.0222069900482893, 0.0916747897863388, 0.02361508645117283, 0.015023862943053246, 0.009880549274384975, 0.01697264239192009, 0.008378864265978336, 0.1842745691537857, 0.009401928633451462, 0.17974567413330078], [0.011294087395071983, 0.08655378222465515, 0.013179216533899307, 0.013149317353963852, 0.007820862345397472, 0.02103889361023903, 0.014849022962152958, 0.06142789497971535, 0.01343363057821989, 0.2533913552761078, 0.05241985246539116, 0.04123435914516449, 0.17811037600040436, 0.00977739691734314, 0.01583174616098404, 0.011048517189919949, 0.01647450029850006, 0.007183515001088381, 0.027146710082888603, 0.008441965095698833, 0.12683703005313873, 0.009355870075523853], [0.038949303328990936, 0.02666885405778885, 0.05494760721921921, 0.01370330061763525, 0.012350128963589668, 0.013599605299532413, 0.02108827978372574, 0.059418972581624985, 0.07355527579784393, 0.031620100140571594, 0.04549778997898102, 0.08078258484601974, 0.015981163829565048, 0.10973384231328964, 0.017592860385775566, 0.017571713775396347, 0.012627552263438702, 0.027178050950169563, 0.04217003285884857, 0.11313572525978088, 0.02942923828959465, 0.14239798486232758], [0.199964240193367, 0.02771337702870369, 0.022630026564002037, 0.01450999453663826, 0.023452308028936386, 0.014032785780727863, 0.01837845705449581, 0.036562494933605194, 0.042689837515354156, 0.0582783967256546, 0.07413197308778763, 0.11056825518608093, 0.07371242344379425, 0.03612839803099632, 0.016632337123155594, 0.03007199615240097, 0.013276557438075542, 0.013556133024394512, 0.04302676022052765, 0.030747195705771446, 0.04908299073576927, 0.05085310339927673], [0.0043172636069357395, 0.06973006576299667, 0.03016858547925949, 0.016989773139357567, 0.034037236124277115, 0.008026138879358768, 0.011959214694797993, 0.07160209864377975, 0.024047857150435448, 0.025825949385762215, 0.0317770391702652, 0.021362610161304474, 0.06342293322086334, 0.04620816931128502, 0.02546572871506214, 0.04562065377831459, 0.018544495105743408, 0.02909684181213379, 0.1548815667629242, 0.059546735137701035, 0.10408681631088257, 0.10328216850757599], [0.0020936857908964157, 0.03945201635360718, 0.016506841406226158, 0.0039058614056557417, 0.02477400004863739, 0.023654397577047348, 0.004654215648770332, 0.0355040580034256, 0.07308581471443176, 0.12635089457035065, 0.0236356183886528, 0.10884642601013184, 0.07607308775186539, 0.025003794580698013, 0.007439292035996914, 0.04942743852734566, 0.02360977604985237, 0.006850154604762793, 0.04700014367699623, 0.08839310705661774, 0.1539514660835266, 0.03978785127401352], [0.003869737731292844, 0.028592631220817566, 0.0046919104643166065, 0.0008746733074076474, 0.004001866560429335, 0.0011953411158174276, 0.0005221246974542737, 0.01639658398926258, 0.0013011791743338108, 0.1557738482952118, 0.011808530427515507, 0.007424714509397745, 0.07563100755214691, 0.009209424257278442, 0.002162356162443757, 0.005233271513134241, 0.005799620877951384, 0.0021432156208902597, 0.1293163001537323, 0.0036322048399597406, 0.5117467045783997, 0.018672781065106392], [0.001429639058187604, 0.012146804481744766, 0.0188057292252779, 0.00771585525944829, 0.012961720116436481, 0.009469948709011078, 0.016135666519403458, 0.0065539125353097916, 0.1673920899629593, 0.0037479358725249767, 0.009726895950734615, 0.008205413818359375, 0.012954602017998695, 0.0441780649125576, 0.009463951922953129, 0.018506798893213272, 0.012630985118448734, 0.03888479620218277, 0.013322379440069199, 0.4932572543621063, 0.007782032247632742, 0.07472758740186691], [0.01354272197932005, 0.07336678355932236, 0.06113852187991142, 0.010700793005526066, 0.01551588624715805, 0.017474835738539696, 0.009849848225712776, 0.05089997127652168, 0.014582040719687939, 0.12469503283500671, 0.026173096150159836, 0.023345019668340683, 0.05504755675792694, 0.0785246267914772, 0.01875683292746544, 0.028663575649261475, 0.02896745689213276, 0.017538229003548622, 0.11503051221370697, 0.03165442496538162, 0.1387314349412918, 0.045800819993019104], [0.014144464395940304, 0.047276571393013, 0.06545865535736084, 0.014359920285642147, 0.030000707134604454, 0.05510152131319046, 0.01629524864256382, 0.01717778667807579, 0.07072333991527557, 0.04730452597141266, 0.01651626266539097, 0.05090656131505966, 0.028594397008419037, 0.05350448563694954, 0.016382204368710518, 0.04616188257932663, 0.07050342857837677, 0.02435017190873623, 0.049879033118486404, 0.15726476907730103, 0.04411351680755615, 0.0639806017279625], [0.032992906868457794, 0.014270343817770481, 0.0463748574256897, 0.06485747545957565, 0.047969866544008255, 0.03562092036008835, 0.04674792289733887, 0.03396474942564964, 0.09605338424444199, 0.046718865633010864, 0.05257565528154373, 0.017127813771367073, 0.022836240008473396, 0.04883299395442009, 0.057877566665410995, 0.04203307256102562, 0.032111115753650665, 0.05305562913417816, 0.024011990055441856, 0.0831119567155838, 0.06098930537700653, 0.03986532241106033], [0.013644402846693993, 0.017712527886033058, 0.06740084290504456, 0.03249610215425491, 0.024781102314591408, 0.00909859873354435, 0.01086592860519886, 0.009090484119951725, 0.06253505498170853, 0.004766729194670916, 0.031057899817824364, 0.006661639548838139, 0.014510267414152622, 0.103363998234272, 0.03319616988301277, 0.02822401002049446, 0.017059767618775368, 0.03413722291588783, 0.029500827193260193, 0.20386183261871338, 0.02242286689579487, 0.2236117273569107], [0.004265867173671722, 0.04969647154211998, 0.009942654520273209, 0.019357500597834587, 0.008427360095083714, 0.006991723086684942, 0.00786739494651556, 0.02258283644914627, 0.006474264897406101, 0.10081794857978821, 0.026074610650539398, 0.004702524747699499, 0.09341496974229813, 0.007177075371146202, 0.031957466155290604, 0.018920281901955605, 0.015127216465771198, 0.014418140053749084, 0.04656992107629776, 0.012779826298356056, 0.47950729727745056, 0.01292664185166359], [0.031131232157349586, 0.028071023523807526, 0.04433347284793854, 0.021230505779385567, 0.009440342895686626, 0.003555058967322111, 0.014570917934179306, 0.03274395689368248, 0.023044046014547348, 0.012623202055692673, 0.03605043143033981, 0.012572325766086578, 0.010767308063805103, 0.08532495051622391, 0.03568262234330177, 0.023045655339956284, 0.009144516661763191, 0.06613355129957199, 0.10363851487636566, 0.10762688517570496, 0.0815141573548317, 0.20775531232357025], [0.14648029208183289, 0.03680878132581711, 0.03398996591567993, 0.025242215022444725, 0.019036119803786278, 0.005500549916177988, 0.012790476903319359, 0.017512807622551918, 0.011680436320602894, 0.015559975057840347, 0.06060586869716644, 0.013994257897138596, 0.05103662237524986, 0.04656355082988739, 0.03142523020505905, 0.03528778627514839, 0.01954321376979351, 0.03656776249408722, 0.16814059019088745, 0.03322318196296692, 0.060670219361782074, 0.11834021657705307], [0.001574516762048006, 0.03346144035458565, 0.02886783517897129, 0.025844665244221687, 0.029236748814582825, 0.005367371719330549, 0.016097573563456535, 0.015460362657904625, 0.03924598544836044, 0.006012549623847008, 0.012394420802593231, 0.003554585622623563, 0.012975210323929787, 0.05115075781941414, 0.0403125137090683, 0.0505729578435421, 0.018654923886060715, 0.09440472722053528, 0.09614226222038269, 0.24616166949272156, 0.06221212446689606, 0.11029472947120667]], [[0.013429316692054272, 0.043272797018289566, 0.04982597753405571, 0.007949733175337315, 0.0152168869972229, 0.05232951417565346, 0.0675664097070694, 0.07144135981798172, 0.01563878171145916, 0.04421764984726906, 0.029190296307206154, 0.028168831020593643, 0.13881023228168488, 0.06944406032562256, 0.013314291834831238, 0.01831066980957985, 0.0632682666182518, 0.07572253793478012, 0.11484336853027344, 0.018608197569847107, 0.027596235275268555, 0.021834589540958405], [0.016109488904476166, 0.060601476579904556, 0.09474612772464752, 0.01033825147897005, 0.018382461741566658, 0.03364628925919533, 0.018369212746620178, 0.12192685157060623, 0.024896491318941116, 0.0722232237458229, 0.04506044089794159, 0.02752901054918766, 0.1473122090101242, 0.13423341512680054, 0.008385331369936466, 0.012537925504148006, 0.017075009644031525, 0.0121846878901124, 0.050394948571920395, 0.017536476254463196, 0.03658989816904068, 0.019920842722058296], [0.014759767800569534, 0.04195261001586914, 0.08961081504821777, 0.03742728754878044, 0.012691413052380085, 0.027903065085411072, 0.15735578536987305, 0.11469805240631104, 0.010890510864555836, 0.012837883085012436, 0.02205588109791279, 0.02452152781188488, 0.05468735471367836, 0.09625210613012314, 0.03380002826452255, 0.011052198708057404, 0.024749968200922012, 0.11819910258054733, 0.07016042619943619, 0.005948519334197044, 0.009574929252266884, 0.008870769292116165], [0.01001759897917509, 0.107899010181427, 0.07815282791852951, 0.010347639210522175, 0.0037581997457891703, 0.06163580343127251, 0.04394104331731796, 0.1664160192012787, 0.00351154338568449, 0.051089487969875336, 0.016383735463023186, 0.01918594352900982, 0.11161523312330246, 0.05386319383978844, 0.019587991759181023, 0.004781866911798716, 0.05333932861685753, 0.03724358230829239, 0.12143128365278244, 0.0016424787463620305, 0.021255791187286377, 0.0029004206880927086], [0.046632710844278336, 0.0854932963848114, 0.06189191713929176, 0.043231915682554245, 0.020142782479524612, 0.09675955772399902, 0.05392547696828842, 0.11799729615449905, 0.017364690080285072, 0.04243822768330574, 0.03149034082889557, 0.06203619763255119, 0.0638299509882927, 0.04343148320913315, 0.04159340262413025, 0.017867114394903183, 0.04754292964935303, 0.019421273842453957, 0.054387494921684265, 0.006667566951364279, 0.01591646671295166, 0.009937901049852371], [0.026730889454483986, 0.048314113169908524, 0.016526473686099052, 0.028914878144860268, 0.015345122665166855, 0.03161562234163284, 0.10901567339897156, 0.19128447771072388, 0.027982328087091446, 0.05377163365483284, 0.051104143261909485, 0.06957157701253891, 0.1046980619430542, 0.02273380756378174, 0.026086710393428802, 0.015011992305517197, 0.01803925819694996, 0.05077134445309639, 0.04686892405152321, 0.01038574893027544, 0.021148495376110077, 0.014078744687139988], [0.03496227040886879, 0.03621538355946541, 0.053258366882801056, 0.03636985272169113, 0.025832194834947586, 0.06705369055271149, 0.06581738591194153, 0.10231537371873856, 0.03106670267879963, 0.07674470543861389, 0.04282820597290993, 0.09987454861402512, 0.05771363526582718, 0.07034209370613098, 0.02847389504313469, 0.021853933110833168, 0.03596206381917, 0.02751673012971878, 0.03266981244087219, 0.011821827851235867, 0.02464529126882553, 0.01666211523115635], [0.03999153897166252, 0.0315476730465889, 0.016350431367754936, 0.007705710828304291, 0.03434856981039047, 0.03074472025036812, 0.032257016748189926, 0.20747919380664825, 0.07664116472005844, 0.11052777618169785, 0.07594573497772217, 0.09398254007101059, 0.08273323625326157, 0.03425837308168411, 0.004618911538273096, 0.012132462114095688, 0.00839962251484394, 0.009545898996293545, 0.01866034045815468, 0.02576497755944729, 0.01527625322341919, 0.03108777292072773], [0.00971688237041235, 0.04590184614062309, 0.019909797236323357, 0.009900528006255627, 0.0067014857195317745, 0.07468240708112717, 0.040440883487463, 0.04395798593759537, 0.01621037721633911, 0.2475048303604126, 0.024395069107413292, 0.0781717300415039, 0.13398030400276184, 0.021672649309039116, 0.01338729728013277, 0.017039069905877113, 0.04553372412919998, 0.02914510853588581, 0.035954151302576065, 0.008061218075454235, 0.0676545724272728, 0.010078033432364464], [0.02119234763085842, 0.05821764096617699, 0.031800467520952225, 0.02753547765314579, 0.020032214000821114, 0.08754023164510727, 0.12086500227451324, 0.14457198977470398, 0.0209217369556427, 0.0994405522942543, 0.034026119858026505, 0.044427137821912766, 0.05622902512550354, 0.04182814434170723, 0.01634933240711689, 0.01146597322076559, 0.03582054376602173, 0.05426747351884842, 0.03299567848443985, 0.007706903386861086, 0.022035520523786545, 0.010730496607720852], [0.023309562355279922, 0.053572993725538254, 0.07393502444028854, 0.028473835438489914, 0.013644043356180191, 0.033782001584768295, 0.03256875276565552, 0.06915554404258728, 0.02527470700442791, 0.03314268961548805, 0.0274477731436491, 0.0740610882639885, 0.08447849750518799, 0.11799043416976929, 0.037015628069639206, 0.024509821087121964, 0.034192051738500595, 0.03128157556056976, 0.10499585419893265, 0.02156870998442173, 0.025322427973151207, 0.030277101323008537], [0.01699773594737053, 0.042932119220495224, 0.03278472274541855, 0.007558619137853384, 0.010072698816657066, 0.0741538405418396, 0.035595107823610306, 0.03440650552511215, 0.02965802699327469, 0.05466064065694809, 0.036376405507326126, 0.06224765628576279, 0.11391386389732361, 0.06665218621492386, 0.012903835624456406, 0.030315058305859566, 0.09279768168926239, 0.05047668516635895, 0.08246000856161118, 0.03423750773072243, 0.02826576493680477, 0.050533369183540344], [0.011795077472925186, 0.05591585487127304, 0.042651545256376266, 0.003039025468751788, 0.008919577114284039, 0.019655684009194374, 0.016611373052001, 0.0872233510017395, 0.032321326434612274, 0.07356878370046616, 0.030005794018507004, 0.038904402405023575, 0.14491143822669983, 0.10683548450469971, 0.005463965702801943, 0.013341937214136124, 0.02170250378549099, 0.024795733392238617, 0.14162281155586243, 0.04339295253157616, 0.036105986684560776, 0.0412154383957386], [0.019075453281402588, 0.04190706089138985, 0.08198587596416473, 0.03629080951213837, 0.013195387087762356, 0.03273685276508331, 0.08473962545394897, 0.06494458019733429, 0.010805939324200153, 0.016036460176110268, 0.017260141670703888, 0.029551276937127113, 0.06203916668891907, 0.11046116799116135, 0.049649402499198914, 0.018831493332982063, 0.046942517161369324, 0.10957157611846924, 0.11379410326480865, 0.010016283020377159, 0.014693628065288067, 0.015471259132027626], [0.011190962046384811, 0.09845702350139618, 0.06300168484449387, 0.009264606982469559, 0.004458795767277479, 0.05603623390197754, 0.028691083192825317, 0.09985766559839249, 0.0040071699768304825, 0.05296338349580765, 0.014003008604049683, 0.016587117686867714, 0.09878973662853241, 0.05147276446223259, 0.02355327643454075, 0.008971985429525375, 0.07776347547769547, 0.04845045506954193, 0.1895524114370346, 0.0034817371051758528, 0.03438263759016991, 0.005062874406576157], [0.03053014539182186, 0.04065798595547676, 0.06546741724014282, 0.020773964002728462, 0.03109842911362648, 0.06687378138303757, 0.05214468389749527, 0.028034940361976624, 0.01952040195465088, 0.04239290952682495, 0.034223347902297974, 0.02927466668188572, 0.041984524577856064, 0.05477042868733406, 0.026716234162449837, 0.03708629310131073, 0.10083243995904922, 0.08908326178789139, 0.07066935300827026, 0.028911447152495384, 0.051402416080236435, 0.03755100816488266], [0.028389092534780502, 0.047584839165210724, 0.0243137925863266, 0.021117493510246277, 0.011031577363610268, 0.030711805447936058, 0.07992090284824371, 0.07017657905817032, 0.01612417958676815, 0.039865296334028244, 0.03496522456407547, 0.04078374430537224, 0.07668787986040115, 0.02756202034652233, 0.03376049920916557, 0.02400709129869938, 0.04679008573293686, 0.12290937453508377, 0.13141413033008575, 0.017262941226363182, 0.04939863830804825, 0.025222817435860634], [0.038986269384622574, 0.028216583654284477, 0.06856203824281693, 0.03307431936264038, 0.02334265597164631, 0.05727233365178108, 0.05029164254665375, 0.02355543151497841, 0.01509927585721016, 0.04700388014316559, 0.017405565828084946, 0.047857604920864105, 0.022468456998467445, 0.08245333284139633, 0.04052701219916344, 0.04702363535761833, 0.09702759981155396, 0.0925525426864624, 0.07370594143867493, 0.0180155411362648, 0.05271062254905701, 0.022847766056656837], [0.03459395840764046, 0.023415369912981987, 0.03821805492043495, 0.009350122883915901, 0.021786168217658997, 0.020936278626322746, 0.021474946290254593, 0.056561823934316635, 0.03336029872298241, 0.039935123175382614, 0.04641355574131012, 0.042040422558784485, 0.05183498561382294, 0.07458944618701935, 0.01367355976253748, 0.03749995306134224, 0.03436078876256943, 0.06238555535674095, 0.14266365766525269, 0.062443431466817856, 0.05616133287549019, 0.07630116492509842], [0.007644317578524351, 0.03376892954111099, 0.03696601465344429, 0.007934516295790672, 0.00524458521977067, 0.0589856281876564, 0.028757305815815926, 0.01464338880032301, 0.009170974604785442, 0.15641522407531738, 0.019529232755303383, 0.040987517684698105, 0.08738375455141068, 0.04467378184199333, 0.015393461100757122, 0.02574848383665085, 0.08826430886983871, 0.07217959314584732, 0.06529856473207474, 0.012838419526815414, 0.14690542221069336, 0.021266577765345573], [0.0616484209895134, 0.047106679528951645, 0.0498126819729805, 0.0623801052570343, 0.042253077030181885, 0.07408449798822403, 0.02372587099671364, 0.03207606077194214, 0.015464721247553825, 0.037486061453819275, 0.01622677780687809, 0.014185839332640171, 0.025374572724103928, 0.05161958932876587, 0.058813437819480896, 0.03931451588869095, 0.08948616683483124, 0.05198710411787033, 0.07854577153921127, 0.0317881777882576, 0.06406623125076294, 0.03255358338356018], [0.018521403893828392, 0.030090175569057465, 0.11465934664011002, 0.03056233562529087, 0.011695044115185738, 0.03228199481964111, 0.016904687508940697, 0.014714120887219906, 0.011002966202795506, 0.014879102818667889, 0.019296851009130478, 0.023825272917747498, 0.05431559309363365, 0.15826058387756348, 0.05741894990205765, 0.03343849629163742, 0.07749456912279129, 0.044402092695236206, 0.11952229589223862, 0.02655654214322567, 0.0401880145072937, 0.04996955394744873]], [[0.029375066980719566, 0.09581825137138367, 0.06774844229221344, 0.006507584825158119, 0.009857269003987312, 0.020019063726067543, 0.02517557516694069, 0.05382467806339264, 0.028316054493188858, 0.03199716657400131, 0.04422980546951294, 0.15918341279029846, 0.11454129964113235, 0.1362152248620987, 0.009639179334044456, 0.029645083472132683, 0.015302884392440319, 0.012552731670439243, 0.03715982660651207, 0.018420396372675896, 0.013353588059544563, 0.041117388755083084], [0.030170992016792297, 0.09486814588308334, 0.06396046280860901, 0.005819715093821287, 0.00695431511849165, 0.06219835951924324, 0.027431972324848175, 0.10752488672733307, 0.05842636525630951, 0.04439189285039902, 0.03676098957657814, 0.10880903154611588, 0.19558043777942657, 0.06011239066720009, 0.005412664730101824, 0.003407432697713375, 0.016902999952435493, 0.006061605177819729, 0.011767532676458359, 0.02103082835674286, 0.01800941675901413, 0.01439757365733385], [0.016991375014185905, 0.07352565228939056, 0.04370472952723503, 0.005336189642548561, 0.007393559440970421, 0.06136036291718483, 0.04004215449094772, 0.028935277834534645, 0.03206460550427437, 0.14797669649124146, 0.03226073086261749, 0.10748203098773956, 0.2133329212665558, 0.0529821403324604, 0.0071101407520473, 0.015894612297415733, 0.02253051847219467, 0.015456651337444782, 0.012325561605393887, 0.016109073534607887, 0.03627719357609749, 0.010907831601798534], [0.10343955457210541, 0.06846933811903, 0.10679233819246292, 0.0023935644421726465, 0.00692572770640254, 0.017523184418678284, 0.029113246127963066, 0.05068186670541763, 0.02260822430253029, 0.014681472443044186, 0.0754995197057724, 0.12921182811260223, 0.06580276787281036, 0.1592133641242981, 0.002581107895821333, 0.020839693024754524, 0.009321843273937702, 0.014142222702503204, 0.030971236526966095, 0.015659945085644722, 0.007512645795941353, 0.0466153509914875], [0.06364485621452332, 0.12304423004388809, 0.06572537124156952, 0.010492156259715557, 0.010151700116693974, 0.04037964344024658, 0.02146935649216175, 0.03827297315001488, 0.007841244339942932, 0.050851378589868546, 0.04079243168234825, 0.24419960379600525, 0.13937951624393463, 0.042707204818725586, 0.010198798961937428, 0.012858033180236816, 0.02037128619849682, 0.006700407713651657, 0.020683545619249344, 0.0038380087353289127, 0.01116804126650095, 0.01523024681955576], [0.16910314559936523, 0.004294044803828001, 0.09054365754127502, 0.04117516428232193, 0.13201992213726044, 0.008907604031264782, 0.027943678200244904, 0.010264239273965359, 0.036441244184970856, 0.00728883920237422, 0.09987843036651611, 0.09563376009464264, 0.011611949652433395, 0.07986035197973251, 0.02430487982928753, 0.09104979038238525, 0.004826693795621395, 0.006555668078362942, 0.004231537226587534, 0.010980361141264439, 0.0019418023293837905, 0.04114310443401337], [0.0666981041431427, 0.027704963460564613, 0.054313212633132935, 0.0077849701046943665, 0.02940361201763153, 0.027867576107382774, 0.020793888717889786, 0.028359804302453995, 0.028577648103237152, 0.01997399516403675, 0.08803395181894302, 0.2934351861476898, 0.05195372551679611, 0.08653721958398819, 0.008603915572166443, 0.06081925332546234, 0.016002710908651352, 0.006980938371270895, 0.016222385689616203, 0.008299394510686398, 0.0037541023921221495, 0.04787949100136757], [0.12139284610748291, 0.06961382180452347, 0.06317699700593948, 0.018833208829164505, 0.017133653163909912, 0.01663760095834732, 0.02976725623011589, 0.13350385427474976, 0.05467705801129341, 0.013742629438638687, 0.10221198201179504, 0.11331074684858322, 0.04668903350830078, 0.07600400596857071, 0.012062395922839642, 0.023772407323122025, 0.005203027278184891, 0.009389237500727177, 0.01738380268216133, 0.015202258713543415, 0.006745087914168835, 0.033547256141901016], [0.009877699427306652, 0.022871730849146843, 0.05501621589064598, 0.010789364576339722, 0.010233253240585327, 0.034913431853055954, 0.0327313207089901, 0.027010347694158554, 0.02108563669025898, 0.2514524459838867, 0.033136285841464996, 0.1546657383441925, 0.1116161122918129, 0.11102233827114105, 0.012393404729664326, 0.008757633157074451, 0.01992771588265896, 0.009547512046992779, 0.008289587683975697, 0.008633635006844997, 0.02955903671681881, 0.01646956242620945], [0.04028121381998062, 0.016286160796880722, 0.1169777661561966, 0.014272148720920086, 0.0082818903028965, 0.028738999739289284, 0.0069936104118824005, 0.09735424071550369, 0.00802407506853342, 0.023124823346734047, 0.08557957410812378, 0.07923594862222672, 0.1865130364894867, 0.22628585994243622, 0.013447095640003681, 0.0020139124244451523, 0.012197299860417843, 0.0010712953517213464, 0.013162491843104362, 0.0011082198470830917, 0.008325865492224693, 0.010724533349275589], [0.018906734883785248, 0.08975197374820709, 0.057699915021657944, 0.010299906134605408, 0.003099075984209776, 0.012789253145456314, 0.019154373556375504, 0.07611393183469772, 0.00858648493885994, 0.0685950443148613, 0.026050686836242676, 0.08125408738851547, 0.33525529503822327, 0.12013744562864304, 0.011073515750467777, 0.0038376369047909975, 0.005223565269261599, 0.00509420083835721, 0.00903352815657854, 0.004005273804068565, 0.024271098896861076, 0.009766955859959126], [0.015834322199225426, 0.029404345899820328, 0.057228151708841324, 0.023652473464608192, 0.012932807207107544, 0.009376248344779015, 0.027938181534409523, 0.04949049651622772, 0.018449613824486732, 0.10415268689393997, 0.037926290184259415, 0.15031449496746063, 0.2284860610961914, 0.1249404326081276, 0.03291084244847298, 0.014291287399828434, 0.00436823396012187, 0.007076851557940245, 0.009965769946575165, 0.005877888295799494, 0.021251723170280457, 0.014130835421383381], [0.02027171663939953, 0.04918944835662842, 0.04486595839262009, 0.011712640523910522, 0.007860574871301651, 0.00930415466427803, 0.020119912922382355, 0.05122801661491394, 0.01378793828189373, 0.09268952161073685, 0.03489071875810623, 0.09729571640491486, 0.24973316490650177, 0.11181724071502686, 0.019278664141893387, 0.007819387130439281, 0.010335923172533512, 0.013676553033292294, 0.02247707173228264, 0.020911898463964462, 0.044910307973623276, 0.04582345858216286], [0.012547997757792473, 0.051419612020254135, 0.03935745730996132, 0.005865358281880617, 0.006348415277898312, 0.03473008796572685, 0.059844888746738434, 0.022913426160812378, 0.014595109969377518, 0.1654500812292099, 0.025595655664801598, 0.07472018152475357, 0.19775179028511047, 0.0799122154712677, 0.01000985223799944, 0.021819807589054108, 0.025696858763694763, 0.048266567289829254, 0.023727113381028175, 0.013336889445781708, 0.052168283611536026, 0.013922282494604588], [0.0801171064376831, 0.06574013829231262, 0.08358165621757507, 0.002920459257438779, 0.007097439840435982, 0.00962862279266119, 0.025819888338446617, 0.03751825913786888, 0.011263953521847725, 0.016976265236735344, 0.06634485721588135, 0.1041707843542099, 0.07136064022779465, 0.1730956882238388, 0.004201776813715696, 0.0392804890871048, 0.010336591862142086, 0.02572052739560604, 0.04956640675663948, 0.01751241460442543, 0.014125199057161808, 0.08362088352441788], [0.05965789780020714, 0.12651664018630981, 0.09014293551445007, 0.013675170950591564, 0.01834000274538994, 0.05686299502849579, 0.0384492501616478, 0.03291982784867287, 0.008517340756952763, 0.04469927027821541, 0.034335337579250336, 0.08741479367017746, 0.05673843249678612, 0.08462322503328323, 0.012752670794725418, 0.03406032174825668, 0.04573475196957588, 0.03303662687540054, 0.05544101074337959, 0.007876475341618061, 0.026797156780958176, 0.03140773996710777], [0.10027734190225601, 0.0034132020082324743, 0.058993663638830185, 0.04397032409906387, 0.10856085270643234, 0.002629239112138748, 0.022242475301027298, 0.005212442483752966, 0.017245620489120483, 0.002555938670411706, 0.061862312257289886, 0.023683473467826843, 0.006043457891792059, 0.09587696939706802, 0.03997715562582016, 0.22056737542152405, 0.005292746238410473, 0.02263442985713482, 0.012125757522881031, 0.02297043427824974, 0.0034914088901132345, 0.1203734427690506], [0.047910578548908234, 0.014444327913224697, 0.035670921206474304, 0.013446018099784851, 0.04852026700973511, 0.009762010537087917, 0.024001235142350197, 0.013393555767834187, 0.014277724549174309, 0.009024146012961864, 0.05064895749092102, 0.07560845464468002, 0.016705673187971115, 0.09418445080518723, 0.020727792754769325, 0.24519969522953033, 0.022333521395921707, 0.03575641289353371, 0.04516176879405975, 0.018298014998435974, 0.007856737822294235, 0.13706770539283752], [0.07483236491680145, 0.035522714257240295, 0.031892646104097366, 0.01825590990483761, 0.015929142013192177, 0.001753763877786696, 0.019827580079436302, 0.035382576286792755, 0.018705913797020912, 0.004004555754363537, 0.0608721487224102, 0.02846192568540573, 0.019050193950533867, 0.12078883498907089, 0.022118857130408287, 0.1430271863937378, 0.003570245113223791, 0.04589846357703209, 0.052194200456142426, 0.042257390916347504, 0.012744259089231491, 0.19290916621685028], [0.007147485855966806, 0.0206170491874218, 0.050807271152734756, 0.012454998679459095, 0.011519094929099083, 0.01742115244269371, 0.03652222454547882, 0.017566975206136703, 0.00979122705757618, 0.16104938089847565, 0.02117803506553173, 0.03090098686516285, 0.06441672891378403, 0.1798522025346756, 0.022217504680156708, 0.024352556094527245, 0.04198060929775238, 0.051874011754989624, 0.03859742730855942, 0.018786821514368057, 0.10933204740285873, 0.05161420628428459], [0.1075376346707344, 0.0378628671169281, 0.12142106890678406, 0.03002849966287613, 0.015103527344763279, 0.013414129614830017, 0.016652436926960945, 0.0912807285785675, 0.007652528118342161, 0.005993274040520191, 0.06571824103593826, 0.025393323972821236, 0.032339226454496384, 0.21995143592357635, 0.022538023069500923, 0.01655138097703457, 0.016762185841798782, 0.012752744369208813, 0.043795742094516754, 0.008406689390540123, 0.018752386793494225, 0.07009198516607285], [0.006326704751700163, 0.08498383313417435, 0.042706768959760666, 0.010898214764893055, 0.0036689809057861567, 0.00564739340916276, 0.043842218816280365, 0.020734993740916252, 0.007009921129792929, 0.0904812142252922, 0.010840761475265026, 0.012611407786607742, 0.2567148804664612, 0.13519731163978577, 0.021377122029662132, 0.009883983060717583, 0.010265035554766655, 0.05409443378448486, 0.02520221471786499, 0.015130759216845036, 0.1136828362941742, 0.01869902014732361]], [[0.0179893895983696, 0.04257391020655632, 0.009351336397230625, 0.014697443693876266, 0.013219688087701797, 0.005479819606989622, 0.028323406353592873, 0.015959402546286583, 0.05473991855978966, 0.1345730572938919, 0.08081527799367905, 0.06447555124759674, 0.0930311307311058, 0.01553016435354948, 0.026830041781067848, 0.019433706998825073, 0.01150340586900711, 0.04158296063542366, 0.0257816594094038, 0.08188731223344803, 0.14703311026096344, 0.05518835037946701], [0.10346807539463043, 0.03021983616054058, 0.02055123634636402, 0.01840135082602501, 0.02235589735209942, 0.01983627676963806, 0.051540784537792206, 0.028457384556531906, 0.05420343205332756, 0.04525431990623474, 0.14474119246006012, 0.06934111565351486, 0.07534951716661453, 0.03600376471877098, 0.019601967185735703, 0.02420024387538433, 0.031061487272381783, 0.0392688550055027, 0.019507931545376778, 0.042968012392520905, 0.03175830468535423, 0.0719090923666954], [0.0026436850894242525, 0.003977763932198286, 0.006520745810121298, 0.002762681106105447, 0.016592836007475853, 0.017895756289362907, 0.024638935923576355, 0.005188590846955776, 0.04346982389688492, 0.32875022292137146, 0.02845177985727787, 0.04569169506430626, 0.030612949281930923, 0.024664780125021935, 0.00687009934335947, 0.019583208486437798, 0.03014676831662655, 0.06297007948160172, 0.01055136974900961, 0.07287192344665527, 0.19629992544651031, 0.01884439028799534], [0.015238096937537193, 0.024752037599682808, 0.009931772015988827, 0.009755982086062431, 0.01678607612848282, 0.051700614392757416, 0.03399638831615448, 0.03355777636170387, 0.06380045413970947, 0.12342456728219986, 0.07015658915042877, 0.041032519191503525, 0.050266604870557785, 0.015632228925824165, 0.01319793239235878, 0.018505314365029335, 0.06935540586709976, 0.040173906832933426, 0.04177913814783096, 0.08527612686157227, 0.12525683641433716, 0.04642365500330925], [0.020656825974583626, 0.02871520444750786, 0.01584019884467125, 0.039295945316553116, 0.025356987491250038, 0.07375882565975189, 0.026586288586258888, 0.01958896592259407, 0.10767919570207596, 0.05682971328496933, 0.06651990860700607, 0.05570513755083084, 0.0874091237783432, 0.021812884137034416, 0.052104976028203964, 0.016951344907283783, 0.09093114733695984, 0.02501852996647358, 0.02285650372505188, 0.08526181429624557, 0.029979191720485687, 0.031141318380832672], [0.0009458342683501542, 0.005770515184849501, 0.0014784320956096053, 0.5894702672958374, 0.006734231021255255, 0.0034750134218484163, 0.0006318397936411202, 0.0020371556747704744, 0.01601923070847988, 0.0017578485421836376, 0.001214643125422299, 0.001044240314513445, 0.005971306934952736, 0.001373921986669302, 0.34923499822616577, 0.0018627264071255922, 0.002239994006231427, 0.0002418169315205887, 0.0005930527695454657, 0.006224004086107016, 0.0008887201547622681, 0.0007902850047685206], [0.013202836737036705, 0.008346672169864178, 0.008906095288693905, 0.0014550237683579326, 0.026187486946582794, 0.01184354443103075, 0.07971152663230896, 0.020118527114391327, 0.033626481890678406, 0.04463773965835571, 0.23881682753562927, 0.14905838668346405, 0.048024725168943405, 0.020538408309221268, 0.0024227385874837637, 0.02788107469677925, 0.011552444659173489, 0.08667591214179993, 0.026848873123526573, 0.04072768613696098, 0.027350202202796936, 0.07206682860851288], [0.07484288513660431, 0.02647608146071434, 0.019343625754117966, 0.010293718427419662, 0.04163077846169472, 0.02199510671198368, 0.1516387015581131, 0.06344588100910187, 0.031505756080150604, 0.06462837755680084, 0.13062125444412231, 0.04884416237473488, 0.03336746245622635, 0.02154763601720333, 0.009658371098339558, 0.022416146472096443, 0.016757987439632416, 0.08672028034925461, 0.0239619892090559, 0.01893465593457222, 0.04792383313179016, 0.03344530612230301], [0.042528919875621796, 0.03762000426650047, 0.015812523663043976, 0.007906495593488216, 0.012996255420148373, 0.030153607949614525, 0.044841427356004715, 0.03949306160211563, 0.07051854580640793, 0.135581836104393, 0.09060482680797577, 0.09972524642944336, 0.07034717500209808, 0.016399282962083817, 0.00898841954767704, 0.02351853810250759, 0.018848543986678123, 0.028246769681572914, 0.028042180463671684, 0.07141927629709244, 0.058373432606458664, 0.04803363233804703], [0.07670538127422333, 0.0473431721329689, 0.007311766967177391, 0.14076536893844604, 0.08039787411689758, 0.0488155335187912, 0.01835038512945175, 0.022078577429056168, 0.09337233752012253, 0.020937873050570488, 0.05625789240002632, 0.03375105932354927, 0.16675058007240295, 0.0074702780693769455, 0.07466360181570053, 0.013385930098593235, 0.02550579234957695, 0.005264020524919033, 0.004935143981128931, 0.030422400683164597, 0.005079091060906649, 0.02043589949607849], [0.05414315685629845, 0.052614063024520874, 0.024330303072929382, 0.027470707893371582, 0.02969730831682682, 0.007446569856256247, 0.031776051968336105, 0.008355293422937393, 0.020392775535583496, 0.16018487513065338, 0.06647342443466187, 0.0951823964715004, 0.07775790989398956, 0.030945293605327606, 0.03987247496843338, 0.03912922367453575, 0.008716855198144913, 0.03453735634684563, 0.013625932857394218, 0.030947614461183548, 0.1011284813284874, 0.0452718511223793], [0.015903694555163383, 0.044970184564590454, 0.011626459658145905, 0.027098825201392174, 0.019611457362771034, 0.005961448885500431, 0.016034414991736412, 0.009339670650660992, 0.0601782463490963, 0.4244280755519867, 0.014887138269841671, 0.031275589019060135, 0.04951973631978035, 0.01023175474256277, 0.031251683831214905, 0.009144170209765434, 0.003765764879062772, 0.009852646850049496, 0.00495123490691185, 0.056326575577259064, 0.13182848691940308, 0.011812683194875717], [0.024152586236596107, 0.022397030144929886, 0.006998441182076931, 0.03319697454571724, 0.008749991655349731, 0.017989158630371094, 0.004670038819313049, 0.0027325842529535294, 0.07061722129583359, 0.3703727424144745, 0.027721745893359184, 0.04672518000006676, 0.15974411368370056, 0.007586583495140076, 0.042250290513038635, 0.0023778965696692467, 0.018708717077970505, 0.00206802599132061, 0.0019921136554330587, 0.060534026473760605, 0.05472414195537567, 0.013690344989299774], [0.0008683238411322236, 0.0035549281165003777, 0.0029527274891734123, 0.0008219537558034062, 0.007998203858733177, 0.006226533092558384, 0.01233626063913107, 0.0011564497835934162, 0.01872056908905506, 0.46390682458877563, 0.0037687132135033607, 0.010341293178498745, 0.011156064458191395, 0.0075280689634382725, 0.002386123174801469, 0.010557122528553009, 0.013908573426306248, 0.051840148866176605, 0.004621199797838926, 0.053533535450696945, 0.30540913343429565, 0.0064072273671627045], [0.01158496830612421, 0.02477225475013256, 0.010757324285805225, 0.011781415902078152, 0.01565883867442608, 0.038762692362070084, 0.02131807431578636, 0.019990554079413414, 0.06957132369279861, 0.11389392614364624, 0.039132628589868546, 0.02685169316828251, 0.04289621859788895, 0.017582569271326065, 0.019792109727859497, 0.0187490526586771, 0.07656500488519669, 0.03844374790787697, 0.0490921214222908, 0.12738189101219177, 0.1596524715423584, 0.04576912522315979], [0.023090656846761703, 0.04485444724559784, 0.022521261125802994, 0.07854939997196198, 0.0678696408867836, 0.0384584441781044, 0.0255719143897295, 0.032930102199316025, 0.04267793148756027, 0.05344267189502716, 0.025245029479265213, 0.020131206139922142, 0.033339716494083405, 0.024316953495144844, 0.08805037289857864, 0.060390543192625046, 0.06280648708343506, 0.04242336004972458, 0.04118981957435608, 0.06097063049674034, 0.0788758397102356, 0.03229363262653351], [0.0006041537853889167, 0.007505916524678469, 0.001885139849036932, 0.4828197658061981, 0.007031694985926151, 0.00365520385093987, 0.0005608421051874757, 0.001284227822907269, 0.020592838525772095, 0.001168867340311408, 0.0006972035043872893, 0.0005778474151156843, 0.004942369647324085, 0.0020639339927583933, 0.43254441022872925, 0.004128328990191221, 0.00524117611348629, 0.0005881499382667243, 0.0016803477192297578, 0.01745854876935482, 0.0014344848459586501, 0.001534618204459548], [0.010032312013208866, 0.011769411154091358, 0.013404454104602337, 0.0012326558353379369, 0.02053023688495159, 0.005403860006481409, 0.06655897200107574, 0.011225476861000061, 0.02769237942993641, 0.024237165227532387, 0.11989381164312363, 0.06885679811239243, 0.03503036126494408, 0.031104743480682373, 0.00305861490778625, 0.05127972364425659, 0.011394420638680458, 0.19091975688934326, 0.05702415853738785, 0.07634939253330231, 0.043348778039216995, 0.11965252459049225], [0.019709033891558647, 0.03132539242506027, 0.01719636283814907, 0.011849929578602314, 0.026042139157652855, 0.006873090751469135, 0.06989624351263046, 0.02748020738363266, 0.02728518843650818, 0.031430602073669434, 0.040243323892354965, 0.013005383312702179, 0.01877656579017639, 0.027294037863612175, 0.02090800181031227, 0.045605987310409546, 0.0200793594121933, 0.2063601016998291, 0.07891174405813217, 0.05900312587618828, 0.13433478772640228, 0.06638937443494797], [0.04101309925317764, 0.038900475949048996, 0.020050635561347008, 0.01052344124764204, 0.015612849034368992, 0.02573077753186226, 0.03185352683067322, 0.02742692455649376, 0.05276324227452278, 0.05893293395638466, 0.06473179161548615, 0.06515581905841827, 0.05953631177544594, 0.026383809745311737, 0.015673181042075157, 0.045121464878320694, 0.03303114324808121, 0.04409412294626236, 0.056849777698516846, 0.10763437300920486, 0.06629237532615662, 0.09268786013126373], [0.06028743088245392, 0.052470047026872635, 0.011294635012745857, 0.15891961753368378, 0.0656369999051094, 0.059156276285648346, 0.01531253568828106, 0.035152122378349304, 0.03569766506552696, 0.009668641723692417, 0.03137703984975815, 0.013437172397971153, 0.06631996482610703, 0.013693487271666527, 0.12010812014341354, 0.028477666899561882, 0.10310147702693939, 0.0141264908015728, 0.031340569257736206, 0.028928205370903015, 0.012628003023564816, 0.03286578878760338], [0.008780542761087418, 0.027208123356103897, 0.010206552222371101, 0.0053378078155219555, 0.011557974852621555, 0.004394181072711945, 0.015510445460677147, 0.003547517815604806, 0.012999680824577808, 0.26928970217704773, 0.011243057437241077, 0.03103071078658104, 0.027814628556370735, 0.017054222524166107, 0.012160702608525753, 0.030720150098204613, 0.009688914753496647, 0.04747753217816353, 0.01524354051798582, 0.0498591847717762, 0.3507482409477234, 0.028126580640673637]], [[0.06076289713382721, 0.11935574561357498, 0.016688507050275803, 0.01358139980584383, 0.022616377100348473, 0.05758630856871605, 0.02110440284013748, 0.11920162290334702, 0.024730022996664047, 0.05953146144747734, 0.021935461089015007, 0.06901342421770096, 0.08098132163286209, 0.015930943191051483, 0.014543469995260239, 0.022888049483299255, 0.048967450857162476, 0.01721259579062462, 0.07217161357402802, 0.024668585509061813, 0.0671747475862503, 0.02935362048447132], [0.005085828248411417, 0.16202256083488464, 0.010360106825828552, 0.006086016073822975, 0.007554146461188793, 0.013663901947438717, 0.005306406877934933, 0.057630911469459534, 0.005551875568926334, 0.14227868616580963, 0.008094481192529202, 0.01130194216966629, 0.11732751131057739, 0.006997211836278439, 0.006659028120338917, 0.006448274478316307, 0.011213695630431175, 0.006030919495970011, 0.031235236674547195, 0.00951683521270752, 0.35859042406082153, 0.01104414276778698], [0.03606860712170601, 0.0547616146504879, 0.011846181005239487, 0.03632143884897232, 0.05586251616477966, 0.04982904717326164, 0.024723049253225327, 0.12087924778461456, 0.03509358689188957, 0.029528385028243065, 0.018770437687635422, 0.10582613199949265, 0.0404721163213253, 0.012173672206699848, 0.04029155895113945, 0.04202067852020264, 0.047885846346616745, 0.027643434703350067, 0.11303474754095078, 0.025121942162513733, 0.05070705711841583, 0.02113872766494751], [0.04714804142713547, 0.14825789630413055, 0.038455963134765625, 0.017997322604060173, 0.01572147011756897, 0.018448293209075928, 0.009057155810296535, 0.06055455282330513, 0.005039814859628677, 0.11892379820346832, 0.024081174284219742, 0.02158132940530777, 0.180489182472229, 0.03269178792834282, 0.018154749646782875, 0.020891649648547173, 0.01746404357254505, 0.00838231761008501, 0.03287040442228317, 0.006444896571338177, 0.1396632343530655, 0.017680974677205086], [0.028126850724220276, 0.04685712978243828, 0.016690697520971298, 0.017372451722621918, 0.023033948615193367, 0.03990750014781952, 0.02003531903028488, 0.09896418452262878, 0.07587794959545135, 0.029666859656572342, 0.07485741376876831, 0.10549061000347137, 0.08130381256341934, 0.021104460582137108, 0.018975287675857544, 0.029298650100827217, 0.0294626597315073, 0.016541535034775734, 0.05717045068740845, 0.06908300518989563, 0.02851416915655136, 0.07166506350040436], [0.10132670402526855, 0.12098531424999237, 0.030726734548807144, 0.026439914479851723, 0.04664343222975731, 0.04129943996667862, 0.04369715228676796, 0.10992006957530975, 0.02697976864874363, 0.04544878751039505, 0.03741464391350746, 0.06460180878639221, 0.0449439100921154, 0.022144952788949013, 0.023380354046821594, 0.03253177925944328, 0.025371650233864784, 0.030620306730270386, 0.04402982071042061, 0.014833349734544754, 0.048728905618190765, 0.017931222915649414], [0.08654145151376724, 0.08481549471616745, 0.047335945069789886, 0.04119500517845154, 0.041500575840473175, 0.05134567990899086, 0.04318804666399956, 0.06973615288734436, 0.03950948268175125, 0.03087625280022621, 0.05886417254805565, 0.06430457532405853, 0.059975046664476395, 0.04165520519018173, 0.03157944977283478, 0.03222304582595825, 0.02888043224811554, 0.023889362812042236, 0.022356726229190826, 0.027132995426654816, 0.02894551493227482, 0.04414935037493706], [0.08102351427078247, 0.05346228927373886, 0.028447218239307404, 0.03766617551445961, 0.04151386022567749, 0.0810883641242981, 0.03597186133265495, 0.05317872390151024, 0.12044152617454529, 0.05293460935354233, 0.04610137268900871, 0.049240000545978546, 0.08591867983341217, 0.02118711918592453, 0.02700861543416977, 0.015604491345584393, 0.024851765483617783, 0.009520153515040874, 0.010609528049826622, 0.06541385501623154, 0.015866583213210106, 0.04294965788722038], [0.06679050624370575, 0.04980505257844925, 0.015896275639533997, 0.005809496622532606, 0.038556016981601715, 0.04319359362125397, 0.03626292198896408, 0.034567076712846756, 0.18376223742961884, 0.00892417598515749, 0.033418308943510056, 0.1580398976802826, 0.019531268626451492, 0.029017021879553795, 0.005635041277855635, 0.038343384861946106, 0.036801524460315704, 0.028280148282647133, 0.024919893592596054, 0.09976336359977722, 0.005529611371457577, 0.03715319558978081], [0.08119688183069229, 0.0821763277053833, 0.037032779306173325, 0.08882981538772583, 0.06357292085886002, 0.05585578456521034, 0.05405542626976967, 0.029101261869072914, 0.0266670323908329, 0.03729088604450226, 0.021828265860676765, 0.036791931837797165, 0.06279198825359344, 0.05472546070814133, 0.07824839651584625, 0.02367464080452919, 0.047584690153598785, 0.03238552436232567, 0.02554032951593399, 0.022751618176698685, 0.013635481707751751, 0.024262577295303345], [0.19272376596927643, 0.04375810921192169, 0.01894485391676426, 0.007862354628741741, 0.04537355527281761, 0.11027953773736954, 0.03063512034714222, 0.04026242345571518, 0.034786615520715714, 0.00272891647182405, 0.005978408269584179, 0.09366046637296677, 0.004775923676788807, 0.03377087414264679, 0.007664947304874659, 0.031815215945243835, 0.1356848180294037, 0.0456153079867363, 0.0662994235754013, 0.022185150533914566, 0.002903624204918742, 0.022290663793683052], [0.0430409274995327, 0.005311535205692053, 0.010453680530190468, 0.006784772500395775, 0.03176046162843704, 0.047226518392562866, 0.04194219037890434, 0.010530544444918633, 0.20678091049194336, 0.0013475676532834768, 0.024998297914862633, 0.1786046177148819, 0.0038883404340595007, 0.03477396443486214, 0.009301789104938507, 0.02932579815387726, 0.06020801514387131, 0.0545031763613224, 0.021125078201293945, 0.116778165102005, 0.0006257076165638864, 0.060687825083732605], [0.06960723549127579, 0.10426682233810425, 0.021356647834181786, 0.009547805413603783, 0.028978925198316574, 0.03916856646537781, 0.016975894570350647, 0.034990131855010986, 0.018878335133194923, 0.01725122146308422, 0.017146624624729156, 0.14444515109062195, 0.0491199754178524, 0.06399163603782654, 0.01514151506125927, 0.03682703897356987, 0.08339788019657135, 0.039896149188280106, 0.10409737378358841, 0.027218803763389587, 0.014066457748413086, 0.04362977668642998], [0.037746772170066833, 0.02000233344733715, 0.011966955848038197, 0.05394145846366882, 0.05072617158293724, 0.04679742082953453, 0.03353934362530708, 0.032502174377441406, 0.050988152623176575, 0.010480668395757675, 0.019025089219212532, 0.15650410950183868, 0.021856769919395447, 0.022756915539503098, 0.07020874321460724, 0.060545098036527634, 0.07118342816829681, 0.054828792810440063, 0.07968252152204514, 0.045301634818315506, 0.012624911032617092, 0.03679051250219345], [0.0647154152393341, 0.07776736468076706, 0.06785426288843155, 0.03384058177471161, 0.02158026024699211, 0.023581352084875107, 0.014831222593784332, 0.03940877690911293, 0.008860241621732712, 0.05518649145960808, 0.03763880953192711, 0.03232329711318016, 0.10775440186262131, 0.08439977467060089, 0.039929140359163284, 0.044615790247917175, 0.0360320508480072, 0.02307736687362194, 0.045884955674409866, 0.014724274165928364, 0.08052704483270645, 0.04546702280640602], [0.009800129570066929, 0.007991256192326546, 0.009206246584653854, 0.02546280063688755, 0.013652831315994263, 0.030187198892235756, 0.02703673765063286, 0.01649327203631401, 0.1419735997915268, 0.012107587419450283, 0.05290776118636131, 0.09826144576072693, 0.050126709043979645, 0.02092691883444786, 0.04102814570069313, 0.025312410667538643, 0.04274998977780342, 0.036818891763687134, 0.03165612742304802, 0.19595184922218323, 0.007792679592967033, 0.10255534946918488], [0.053539156913757324, 0.05634365230798721, 0.036732468754053116, 0.033271681517362595, 0.03202962130308151, 0.029186580330133438, 0.03263748809695244, 0.05832458287477493, 0.02430790662765503, 0.025890182703733444, 0.04060649126768112, 0.04861336573958397, 0.03576578199863434, 0.04037085175514221, 0.04466729983687401, 0.06082136183977127, 0.04668070748448372, 0.06850950419902802, 0.08872684091329575, 0.03240286186337471, 0.06949308514595032, 0.041078515350818634], [0.038908157497644424, 0.017511768266558647, 0.04619733989238739, 0.05682792142033577, 0.02364298142492771, 0.03385940566658974, 0.0330347865819931, 0.015091885812580585, 0.03681737929582596, 0.009332135319709778, 0.049921538680791855, 0.05286111682653427, 0.027693606913089752, 0.07868092507123947, 0.07542794942855835, 0.048382360488176346, 0.059204570949077606, 0.05915558710694313, 0.030987447127699852, 0.06509623676538467, 0.016558142378926277, 0.12480664253234863], [0.03535780310630798, 0.008416769094765186, 0.026354892179369926, 0.02661968767642975, 0.017735740169882774, 0.04034646227955818, 0.02243841253221035, 0.012434474192559719, 0.1251523643732071, 0.004310836084187031, 0.04399941861629486, 0.04001125320792198, 0.016788918524980545, 0.0385587103664875, 0.034144580364227295, 0.02977616712450981, 0.0585738942027092, 0.037611205130815506, 0.021794870495796204, 0.2002483755350113, 0.007626155391335487, 0.15169896185398102], [0.059182338416576385, 0.018271278589963913, 0.019215619191527367, 0.00883577298372984, 0.02509528025984764, 0.023033970966935158, 0.027354901656508446, 0.009725574404001236, 0.08406209200620651, 0.0033468722831457853, 0.027093034237623215, 0.10363126546144485, 0.01059055794030428, 0.05834180489182472, 0.014229729771614075, 0.07429710030555725, 0.07082720845937729, 0.08304888755083084, 0.05313066765666008, 0.13192090392112732, 0.0056325094774365425, 0.08913250267505646], [0.049410805106163025, 0.027461202815175056, 0.04818415269255638, 0.1108224093914032, 0.037078000605106354, 0.05896555259823799, 0.03109402395784855, 0.014614898711442947, 0.020402399823069572, 0.022103700786828995, 0.021809883415699005, 0.015825212001800537, 0.04455713927745819, 0.06544319540262222, 0.12248285859823227, 0.02922831103205681, 0.08411947637796402, 0.03910889849066734, 0.03276212885975838, 0.04304898902773857, 0.019320474937558174, 0.06215626001358032], [0.06369732320308685, 0.006629944313317537, 0.011825944297015667, 0.011467092670500278, 0.025744671002030373, 0.03870281204581261, 0.022187497466802597, 0.008457973599433899, 0.02935132011771202, 0.0009811780182644725, 0.006881246343255043, 0.06599222868680954, 0.0023651723749935627, 0.03566671535372734, 0.020263826474547386, 0.06962669640779495, 0.17411072552204132, 0.14773552119731903, 0.13087454438209534, 0.06027567386627197, 0.004404183942824602, 0.06275767832994461]], [[0.007266949862241745, 0.012731109745800495, 0.05191381648182869, 0.027562353760004044, 0.014622091315686703, 0.12121772766113281, 0.09499785304069519, 0.05631335452198982, 0.07198233902454376, 0.1630493700504303, 0.0076043568551540375, 0.022321391850709915, 0.043078698217868805, 0.08804306387901306, 0.033931199461221695, 0.003198589663952589, 0.06376229971647263, 0.02375561185181141, 0.026223843917250633, 0.041064273566007614, 0.020501231774687767, 0.004858414176851511], [0.018241334706544876, 0.046545062214136124, 0.03669791296124458, 0.02428160049021244, 0.007642359007149935, 0.04998438060283661, 0.02834172733128071, 0.22829952836036682, 0.010946253314614296, 0.0993567407131195, 0.032773956656455994, 0.012315166182816029, 0.13205629587173462, 0.040476154536008835, 0.030267084017395973, 0.003605893114581704, 0.021558916196227074, 0.010243790224194527, 0.09772278368473053, 0.0073729935102164745, 0.053640007972717285, 0.0076300823129713535], [0.009099354036152363, 0.008844251744449139, 0.03907632455229759, 0.046745482832193375, 0.021951768547296524, 0.08101461827754974, 0.08957011997699738, 0.05910714343190193, 0.07005128264427185, 0.029580427333712578, 0.006620637606829405, 0.021530140191316605, 0.03104502707719803, 0.1722404658794403, 0.07324589043855667, 0.010637897998094559, 0.05214638635516167, 0.05479792505502701, 0.04371988773345947, 0.05690319091081619, 0.013339175842702389, 0.008732590824365616], [0.010215133428573608, 0.01928914338350296, 0.037743374705314636, 0.019929924979805946, 0.010239282622933388, 0.07947287708520889, 0.13639310002326965, 0.04460630565881729, 0.17576520144939423, 0.069477878510952, 0.01604381576180458, 0.045421577990055084, 0.02544976770877838, 0.04517574608325958, 0.02055482380092144, 0.0034601357765495777, 0.03950925171375275, 0.05038100481033325, 0.03231211379170418, 0.09261223673820496, 0.014853115193545818, 0.011094147339463234], [0.011990380473434925, 0.023680662736296654, 0.05277586728334427, 0.0360848493874073, 0.013078057207167149, 0.056035853922367096, 0.053828466683626175, 0.09883337467908859, 0.068826824426651, 0.16755668818950653, 0.015089126303792, 0.019936544820666313, 0.06959807872772217, 0.06578469276428223, 0.03977712243795395, 0.005778627470135689, 0.040703028440475464, 0.017105799168348312, 0.0546741746366024, 0.044212665408849716, 0.03768644481897354, 0.006962575018405914], [0.02680041640996933, 0.016821693629026413, 0.04354151710867882, 0.030096804723143578, 0.03489441052079201, 0.054665859788656235, 0.04557936266064644, 0.04142129421234131, 0.09282960742712021, 0.07694171369075775, 0.07140591740608215, 0.08668340742588043, 0.09353430569171906, 0.06463254243135452, 0.024696387350559235, 0.011696015484631062, 0.022885914891958237, 0.016408126801252365, 0.016291379928588867, 0.06131542846560478, 0.023251445963978767, 0.043606411665678024], [0.008690115995705128, 0.008640751242637634, 0.04293997585773468, 0.06285425275564194, 0.013992903754115105, 0.08324731141328812, 0.140146404504776, 0.018832780420780182, 0.12977367639541626, 0.06713682413101196, 0.01713361218571663, 0.024615520611405373, 0.03694520518183708, 0.07864362001419067, 0.0716995820403099, 0.006844721268862486, 0.0478079654276371, 0.04556909576058388, 0.010169417597353458, 0.06286819279193878, 0.0148119842633605, 0.006636134348809719], [0.011347784660756588, 0.03346437215805054, 0.014471426606178284, 0.019705643877387047, 0.01536477543413639, 0.03696595877408981, 0.009568893350660801, 0.18974174559116364, 0.020108861848711967, 0.29462799429893494, 0.027195550501346588, 0.030459292232990265, 0.15494304895401, 0.011706472374498844, 0.01919226162135601, 0.004395109135657549, 0.013846169225871563, 0.0017505526775494218, 0.036878567188978195, 0.0073361825197935104, 0.04330334812402725, 0.003625961486250162], [0.004035881254822016, 0.011514256708323956, 0.059698570519685745, 0.010587373748421669, 0.0048801349475979805, 0.09758001565933228, 0.102511465549469, 0.02752249501645565, 0.0658661499619484, 0.1233307421207428, 0.023214440792798996, 0.032271549105644226, 0.11339512467384338, 0.1264764964580536, 0.016935037449002266, 0.0013886764645576477, 0.044199660420417786, 0.0312604159116745, 0.020389556884765625, 0.04934234917163849, 0.0225035659968853, 0.011096091009676456], [0.013141753152012825, 0.04490010812878609, 0.042479585856199265, 0.02812669798731804, 0.04332936182618141, 0.07060130685567856, 0.048990678042173386, 0.05175023525953293, 0.03312494233250618, 0.07866889238357544, 0.034052491188049316, 0.0392751544713974, 0.156971737742424, 0.06847034394741058, 0.03332367539405823, 0.03428046032786369, 0.05053016170859337, 0.027875645086169243, 0.029703840613365173, 0.023545416072010994, 0.030888631939888, 0.015968898311257362], [0.0054164971224963665, 0.01367732509970665, 0.025197802111506462, 0.013269302435219288, 0.013308628462255001, 0.061646800488233566, 0.027359165251255035, 0.10060633718967438, 0.017240960150957108, 0.07927493005990982, 0.0049248794093728065, 0.020940350368618965, 0.06833093613386154, 0.12947358191013336, 0.027622034773230553, 0.008849055506289005, 0.07951202988624573, 0.03701699525117874, 0.19321434199810028, 0.023528145626187325, 0.042276978492736816, 0.007312919478863478], [0.002272171201184392, 0.0031454842537641525, 0.012916218489408493, 0.015063513070344925, 0.003545223269611597, 0.11912892758846283, 0.14422184228897095, 0.007130097132176161, 0.11008089780807495, 0.011495303362607956, 0.0035115797072649, 0.02980167418718338, 0.015061721205711365, 0.07513002306222916, 0.027068115770816803, 0.0026976903900504112, 0.12427807599306107, 0.1460428386926651, 0.02374621480703354, 0.111023910343647, 0.003040226409211755, 0.00959830079227686], [0.0028257027734071016, 0.031793780624866486, 0.020947815850377083, 0.011421271599829197, 0.001999378902837634, 0.044487424194812775, 0.032830510288476944, 0.08006040006875992, 0.007271855603903532, 0.02938934601843357, 0.005034115631133318, 0.011293111369013786, 0.06770940870046616, 0.08094485849142075, 0.028714187443256378, 0.005585332866758108, 0.06323204189538956, 0.06318788975477219, 0.33908629417419434, 0.014646654948592186, 0.046764809638261795, 0.010773789137601852], [0.0035592596977949142, 0.007138458546251059, 0.036477286368608475, 0.020560480654239655, 0.0051706284284591675, 0.06244320422410965, 0.08489089459180832, 0.022272996604442596, 0.03768753260374069, 0.011015977710485458, 0.0022334642708301544, 0.013029919937252998, 0.011488806456327438, 0.19580644369125366, 0.04874887317419052, 0.009478794410824776, 0.0903715044260025, 0.15499889850616455, 0.08413492888212204, 0.0721219852566719, 0.013106046244502068, 0.013263711705803871], [0.005906565114855766, 0.021136987954378128, 0.03359012305736542, 0.00991935096681118, 0.004599629435688257, 0.06669607013463974, 0.1391472965478897, 0.030149061232805252, 0.08633936196565628, 0.03857577592134476, 0.00689998734742403, 0.025125345215201378, 0.01510166097432375, 0.05160639062523842, 0.015063255093991756, 0.004696365911513567, 0.07299773395061493, 0.16261856257915497, 0.0809364840388298, 0.09492853283882141, 0.020870117470622063, 0.013095279224216938], [0.009869659319519997, 0.014454374089837074, 0.03673262521624565, 0.03326136991381645, 0.012194093316793442, 0.038741789758205414, 0.10964224487543106, 0.0164839718490839, 0.05523770675063133, 0.022463304921984673, 0.005033548455685377, 0.008824480697512627, 0.013274122960865498, 0.0853416845202446, 0.047100815922021866, 0.01225659716874361, 0.10918930172920227, 0.19309164583683014, 0.047590289264917374, 0.09395445138216019, 0.020802516490221024, 0.01445937529206276], [0.01697319746017456, 0.02131982520222664, 0.05009908601641655, 0.015433588065207005, 0.012650533579289913, 0.04072251170873642, 0.03163732960820198, 0.030130885541439056, 0.029370278120040894, 0.03477398678660393, 0.021071545779705048, 0.04326756298542023, 0.034704774618148804, 0.08773626387119293, 0.020290425047278404, 0.02272709272801876, 0.07249622792005539, 0.08096432685852051, 0.11665170639753342, 0.07303580641746521, 0.05140630528330803, 0.09253671765327454], [0.005602904129773378, 0.00965813361108303, 0.06359026581048965, 0.025153525173664093, 0.005443029571324587, 0.04806499928236008, 0.07302088290452957, 0.014386892318725586, 0.028182385489344597, 0.017943058162927628, 0.0035220428835600615, 0.008022730238735676, 0.0072914473712444305, 0.12223027646541595, 0.04716009646654129, 0.020060796290636063, 0.12996791303157806, 0.19108138978481293, 0.0749240592122078, 0.05845480039715767, 0.03056671842932701, 0.015671683475375175], [0.008981874212622643, 0.03791484236717224, 0.02628817781805992, 0.008634679950773716, 0.00629253126680851, 0.02927793562412262, 0.0112611697986722, 0.11123957484960556, 0.004994387738406658, 0.05290382727980614, 0.005219670012593269, 0.012773864902555943, 0.03427166864275932, 0.038137126713991165, 0.015000863932073116, 0.011906987987458706, 0.06332594156265259, 0.028895776718854904, 0.3901343047618866, 0.011764558963477612, 0.07593510299921036, 0.014845142140984535], [0.002838796703144908, 0.01672438532114029, 0.055520061403512955, 0.006087353453040123, 0.0028068176470696926, 0.0660797730088234, 0.07995760440826416, 0.0171950813382864, 0.021052315831184387, 0.060294877737760544, 0.009137395769357681, 0.011432692408561707, 0.05533258616924286, 0.140416219830513, 0.013725985772907734, 0.002601266372948885, 0.09750998765230179, 0.14260563254356384, 0.07221493870019913, 0.054919224232435226, 0.050837766379117966, 0.02070929855108261], [0.024305664002895355, 0.09470607340335846, 0.05409576743841171, 0.013890894129872322, 0.019647464156150818, 0.017451247200369835, 0.01688220538198948, 0.12041503936052322, 0.0037827410269528627, 0.03592458367347717, 0.012781602330505848, 0.004558939952403307, 0.02690395712852478, 0.04421614482998848, 0.01950492523610592, 0.055547211319208145, 0.037315018475055695, 0.044098060578107834, 0.21539756655693054, 0.007196039892733097, 0.11402031034231186, 0.01735851913690567], [0.002116927644237876, 0.010639740154147148, 0.013919240795075893, 0.007209240924566984, 0.005224335473030806, 0.05371363088488579, 0.03283926844596863, 0.018669085577130318, 0.01347762905061245, 0.019594762474298477, 0.0013006216613575816, 0.004869649652391672, 0.012576685287058353, 0.07993829995393753, 0.01807006075978279, 0.005402734968811274, 0.16992565989494324, 0.23811668157577515, 0.17348003387451172, 0.06899415701627731, 0.03714187070727348, 0.012779729440808296]], [[0.001957773230969906, 0.055488429963588715, 0.023733744397759438, 0.007512867916375399, 0.003956327214837074, 0.07040645182132721, 0.07103131711483002, 0.04369308426976204, 0.04877500236034393, 0.09322381764650345, 0.014207347296178341, 0.04688912630081177, 0.06133584678173065, 0.0575677789747715, 0.010673683136701584, 0.017114514485001564, 0.09083390980958939, 0.07420604676008224, 0.09934578835964203, 0.04624452441930771, 0.0319145992398262, 0.029888030141592026], [0.00232465798035264, 0.09639836847782135, 0.012354028411209583, 0.0009460894507355988, 0.0031720506958663464, 0.005910860374569893, 0.0056215436197817326, 0.1273621767759323, 0.001340253627859056, 0.15669001638889313, 0.01606275700032711, 0.01056234072893858, 0.07102944701910019, 0.01222213078290224, 0.0013951148139312863, 0.0056282165460288525, 0.007180908694863319, 0.007288788445293903, 0.12698127329349518, 0.0017630663933232427, 0.31680724024772644, 0.01095869205892086], [0.0038158604875206947, 0.02938327193260193, 0.024087782949209213, 0.009937094524502754, 0.006224262528121471, 0.08117347210645676, 0.06842941045761108, 0.029429830610752106, 0.09496911615133286, 0.03996293991804123, 0.023008238524198532, 0.08751270920038223, 0.0766087993979454, 0.06352207064628601, 0.015376489609479904, 0.012363476678729057, 0.10584435611963272, 0.04729270935058594, 0.06566586345434189, 0.0590643584728241, 0.009915334172546864, 0.04641256481409073], [0.022589942440390587, 0.07578600198030472, 0.020329689607024193, 0.016939105466008186, 0.013084286823868752, 0.1255374252796173, 0.09944210946559906, 0.07619334757328033, 0.06170522794127464, 0.05412255972623825, 0.02630753070116043, 0.06107950955629349, 0.05848117545247078, 0.019191375002264977, 0.01791413500905037, 0.006260544527322054, 0.08335952460765839, 0.038698162883520126, 0.05237811058759689, 0.031334202736616135, 0.014611327089369297, 0.024654684588313103], [0.003463300410658121, 0.055035416036844254, 0.035390354692935944, 0.011898619122803211, 0.004847947973757982, 0.06277188658714294, 0.06872488558292389, 0.037067960947752, 0.049756214022636414, 0.07760673761367798, 0.03675910830497742, 0.06344505399465561, 0.08622098714113235, 0.05887041985988617, 0.016439586877822876, 0.013229087926447392, 0.06760352849960327, 0.06514027714729309, 0.06330642849206924, 0.052740179002285004, 0.028591535985469818, 0.04109053313732147], [0.007280835881829262, 0.027200423181056976, 0.0673586055636406, 0.025183087214827538, 0.011305912397801876, 0.08656909316778183, 0.07835501432418823, 0.0405336394906044, 0.05408874526619911, 0.058068107813596725, 0.017148146405816078, 0.0877164751291275, 0.05032742768526077, 0.13523046672344208, 0.026664961129426956, 0.012434713542461395, 0.0786842405796051, 0.03836725652217865, 0.042092423886060715, 0.02700497955083847, 0.012458638288080692, 0.015926791355013847], [0.0028263141866773367, 0.019424445927143097, 0.019076235592365265, 0.02126159891486168, 0.005033263936638832, 0.09207791090011597, 0.06978829205036163, 0.01698669232428074, 0.0666576698422432, 0.13625261187553406, 0.022216355428099632, 0.06442337483167648, 0.05202491581439972, 0.057641465216875076, 0.023061111569404602, 0.016600701957941055, 0.1030338779091835, 0.060804322361946106, 0.030767230316996574, 0.06271284818649292, 0.02970067225396633, 0.02762807533144951], [0.002253669546917081, 0.08203523606061935, 0.006911745760589838, 0.004005719441920519, 0.004328661132603884, 0.005588783882558346, 0.005018687807023525, 0.21461912989616394, 0.001265685772523284, 0.15273691713809967, 0.019565951079130173, 0.007739846594631672, 0.07370114326477051, 0.007186871021986008, 0.005492171738296747, 0.003915116190910339, 0.007628629449754953, 0.004184657242149115, 0.12834087014198303, 0.0013621867401525378, 0.25393712520599365, 0.00818115845322609], [0.0019764623139053583, 0.028471114113926888, 0.025772331282496452, 0.01468713115900755, 0.00236724317073822, 0.05086766555905342, 0.10388346016407013, 0.019906871020793915, 0.06281933188438416, 0.10633212327957153, 0.025438275188207626, 0.06393329799175262, 0.11913496255874634, 0.060638297349214554, 0.02128603309392929, 0.010915697552263737, 0.06484756618738174, 0.0616314634680748, 0.04474065452814102, 0.05950057506561279, 0.018424052745103836, 0.03242557495832443], [0.015469353646039963, 0.12947973608970642, 0.056526344269514084, 0.0065461075864732265, 0.01417786255478859, 0.016019422560930252, 0.018509458750486374, 0.19708774983882904, 0.006656356621533632, 0.05154288187623024, 0.02333013340830803, 0.034431781619787216, 0.08997256308794022, 0.03381979465484619, 0.0098769161850214, 0.013605189509689808, 0.014965659938752651, 0.014824504032731056, 0.1028316542506218, 0.006249108351767063, 0.11432082951068878, 0.029756629839539528], [0.0020582920406013727, 0.11217635869979858, 0.03492133319377899, 0.010419728234410286, 0.00402058893814683, 0.02880335971713066, 0.04075146093964577, 0.047496818006038666, 0.021396579220891, 0.10417395085096359, 0.008080328814685345, 0.016981953755021095, 0.09140977263450623, 0.07288169860839844, 0.016600560396909714, 0.01971675641834736, 0.04946424067020416, 0.05807339400053024, 0.11319208890199661, 0.03003731369972229, 0.09063704311847687, 0.026706332340836525], [0.002274894854053855, 0.044710297137498856, 0.03531098738312721, 0.012617352418601513, 0.0029363464564085007, 0.05091814696788788, 0.050035882741212845, 0.02746276929974556, 0.03510041907429695, 0.06292266398668289, 0.014960367232561111, 0.041417766362428665, 0.06316164135932922, 0.08034231513738632, 0.020491547882556915, 0.034130875021219254, 0.10500172525644302, 0.08076819777488708, 0.1097613275051117, 0.05455511808395386, 0.030347945168614388, 0.04077130928635597], [0.0016180349048227072, 0.21834465861320496, 0.023262836039066315, 0.0017190317157655954, 0.002977343276143074, 0.009938796050846577, 0.0057105920277535915, 0.21718820929527283, 0.0013126196572557092, 0.08034933358430862, 0.002954457886517048, 0.0025955280289053917, 0.025914687663316727, 0.016784073784947395, 0.0021026055328547955, 0.005984305404126644, 0.00832369551062584, 0.006555507425218821, 0.16879232227802277, 0.0014957885723561049, 0.19105254113674164, 0.00502307154238224], [0.002903844229876995, 0.04075397178530693, 0.02693144418299198, 0.012317189015448093, 0.0051330639980733395, 0.08885850012302399, 0.09398185461759567, 0.020331839099526405, 0.11438796669244766, 0.02188114821910858, 0.00999488215893507, 0.04338722303509712, 0.04195662587881088, 0.05740936100482941, 0.018190139904618263, 0.01745452731847763, 0.11661466956138611, 0.08147134631872177, 0.05944085493683815, 0.08182515949010849, 0.006123311351984739, 0.03865114971995354], [0.02113325148820877, 0.10406018048524857, 0.02133365534245968, 0.021633068099617958, 0.016148103401064873, 0.11847901344299316, 0.10038302838802338, 0.088889941573143, 0.054809026420116425, 0.046731848269701004, 0.017170408740639687, 0.03451887145638466, 0.045169398188591, 0.01875646598637104, 0.020814307034015656, 0.008302802219986916, 0.08172532171010971, 0.04983652010560036, 0.06043621525168419, 0.030831674113869667, 0.017823712900280952, 0.02101326547563076], [0.011360017582774162, 0.06601712852716446, 0.039029356092214584, 0.020857594907283783, 0.01124467235058546, 0.057934582233428955, 0.05099351704120636, 0.052080683410167694, 0.03535444661974907, 0.02546481043100357, 0.03458718955516815, 0.053167883306741714, 0.05219521000981331, 0.04987730458378792, 0.03021244890987873, 0.030075134709477425, 0.0799153670668602, 0.07332044839859009, 0.10987269878387451, 0.04162176698446274, 0.027176206931471825, 0.047641653567552567], [0.006826853845268488, 0.0320376418530941, 0.0774613618850708, 0.03078627772629261, 0.015229761600494385, 0.09700101613998413, 0.0993078425526619, 0.02670104429125786, 0.07432727515697479, 0.029583396390080452, 0.00613426836207509, 0.03208228945732117, 0.023056011646986008, 0.14108870923519135, 0.029991311952471733, 0.017996149137616158, 0.08894068747758865, 0.07415003329515457, 0.03321834281086922, 0.04228327050805092, 0.008253117091953754, 0.013543333858251572], [0.002949063666164875, 0.02306588552892208, 0.024576466530561447, 0.025137748569250107, 0.006632138509303331, 0.10870321840047836, 0.0869256854057312, 0.014423101209104061, 0.05916192755103111, 0.10138815641403198, 0.009489643387496471, 0.02380802109837532, 0.030117854475975037, 0.06225672364234924, 0.025632943958044052, 0.022238710895180702, 0.1197487935423851, 0.10640932619571686, 0.028098244220018387, 0.06513984501361847, 0.031993377953767776, 0.022103093564510345], [0.0023997139651328325, 0.11521780490875244, 0.013099939562380314, 0.009632756933569908, 0.007691742852330208, 0.02159060537815094, 0.01634472794830799, 0.17550967633724213, 0.005353439599275589, 0.10516219586133957, 0.006554687395691872, 0.005744965746998787, 0.033596623688936234, 0.016009820625185966, 0.012012647464871407, 0.01018799189478159, 0.03198371082544327, 0.021199338138103485, 0.18704822659492493, 0.005552677437663078, 0.1881970912218094, 0.009909668937325478], [0.0026086573489010334, 0.03542016074061394, 0.02890625037252903, 0.02055787295103073, 0.0037725402507930994, 0.07238498330116272, 0.1191893145442009, 0.020189005881547928, 0.06747402995824814, 0.06855811178684235, 0.01390320248901844, 0.03188289701938629, 0.055794283747673035, 0.053488846868276596, 0.026424240320920944, 0.018275681883096695, 0.09138067811727524, 0.09706666320562363, 0.047868598252534866, 0.07223081588745117, 0.019900813698768616, 0.032722312957048416], [0.01726902276277542, 0.15656687319278717, 0.029345329850912094, 0.004466751590371132, 0.022135721519589424, 0.011270088143646717, 0.007773221004754305, 0.22959281504154205, 0.0019838400185108185, 0.05557990446686745, 0.013059372082352638, 0.007097096648067236, 0.03828228637576103, 0.013663673773407936, 0.005314140114933252, 0.014087305404245853, 0.00977078452706337, 0.009345733560621738, 0.09776637703180313, 0.0020129617769271135, 0.23937658965587616, 0.01424004789441824], [0.0023733433336019516, 0.03651163727045059, 0.03232045844197273, 0.017482846975326538, 0.005345535930246115, 0.05812864005565643, 0.08130304515361786, 0.01250810269266367, 0.07986711710691452, 0.024518992751836777, 0.004111793357878923, 0.017899762839078903, 0.027477605268359184, 0.08819495141506195, 0.022648585960268974, 0.03815227001905441, 0.09941788762807846, 0.1426815241575241, 0.048866353929042816, 0.11747154593467712, 0.013277646154165268, 0.02944030798971653]], [[0.04814542084932327, 0.0235433392226696, 0.03929760307073593, 0.13312757015228271, 0.023054445162415504, 0.02502606064081192, 0.026820221915841103, 0.014690535143017769, 0.009562644176185131, 0.03764503076672554, 0.06883012503385544, 0.04658970609307289, 0.14627671241760254, 0.05804460123181343, 0.14272668957710266, 0.013782254420220852, 0.03345862403512001, 0.017593299970030785, 0.017139775678515434, 0.009951150976121426, 0.026734277606010437, 0.037959884852170944], [0.024521319195628166, 0.016726940870285034, 0.05163060873746872, 0.1865376979112625, 0.049912743270397186, 0.011900234967470169, 0.01413174718618393, 0.01765483245253563, 0.020184604451060295, 0.015381704084575176, 0.04685842618346214, 0.05325474590063095, 0.06327205151319504, 0.10181832313537598, 0.20459111034870148, 0.01983576826751232, 0.01245963852852583, 0.008089669048786163, 0.010179870761930943, 0.014091014862060547, 0.009846285916864872, 0.047120630741119385], [0.05092877149581909, 0.029325682669878006, 0.03089236095547676, 0.08585990220308304, 0.05671418458223343, 0.038752321153879166, 0.018434470519423485, 0.02728966437280178, 0.015964239835739136, 0.08698961138725281, 0.037439316511154175, 0.0524132065474987, 0.09402114152908325, 0.05464494973421097, 0.09977869689464569, 0.038782622665166855, 0.0454055555164814, 0.01550406962633133, 0.02500520646572113, 0.011947539635002613, 0.061879634857177734, 0.022026846185326576], [0.009227038361132145, 0.029189862310886383, 0.016640374436974525, 0.028737850487232208, 0.006502541247755289, 0.03731367364525795, 0.019430261105298996, 0.012369287200272083, 0.006272006779909134, 0.2593556046485901, 0.036496374756097794, 0.04412488266825676, 0.10918731987476349, 0.027179714292287827, 0.04573418200016022, 0.015530945733189583, 0.05944133177399635, 0.033575065433979034, 0.023777393624186516, 0.009962772019207478, 0.13515204191207886, 0.034799471497535706], [0.007148123346269131, 0.034961897879838943, 0.011874093674123287, 0.039077278226614, 0.007110218051820993, 0.044336576014757156, 0.020542610436677933, 0.03168226405978203, 0.009679140523076057, 0.24799804389476776, 0.01872102916240692, 0.028831878677010536, 0.0630958154797554, 0.018652010709047318, 0.05018771067261696, 0.0174139104783535, 0.06850679218769073, 0.03034956008195877, 0.06820130348205566, 0.012518259696662426, 0.1559940129518509, 0.013117478229105473], [0.018098579719662666, 0.052474331110715866, 0.07104984670877457, 0.005651059094816446, 0.011925490573048592, 0.036652013659477234, 0.027150623500347137, 0.04856065288186073, 0.06963232904672623, 0.08805830776691437, 0.06635555624961853, 0.05876685306429863, 0.11251379549503326, 0.09084505587816238, 0.0055861142463982105, 0.009301356971263885, 0.029225841164588928, 0.020090321078896523, 0.04016423225402832, 0.043430145829916, 0.054569389671087265, 0.0398981049656868], [0.05681789293885231, 0.014437119476497173, 0.012602627277374268, 0.12677547335624695, 0.029248682782053947, 0.06424593925476074, 0.009224746376276016, 0.02638535387814045, 0.009094661101698875, 0.04100153222680092, 0.032936081290245056, 0.04518149420619011, 0.10728272795677185, 0.0236146692186594, 0.14011594653129578, 0.05441704019904137, 0.09307324886322021, 0.01096323225647211, 0.05113602429628372, 0.009792444296181202, 0.02645513042807579, 0.015198064036667347], [0.012298496440052986, 0.06474064290523529, 0.09505513310432434, 0.06281497329473495, 0.02330544963479042, 0.03128623589873314, 0.03858209401369095, 0.039270900189876556, 0.040423434227705, 0.06021207198500633, 0.05735749006271362, 0.049635499715805054, 0.058537546545267105, 0.08675159513950348, 0.06535698473453522, 0.017179766669869423, 0.01935955137014389, 0.025075167417526245, 0.030316537246108055, 0.03697832301259041, 0.03107651136815548, 0.05438559129834175], [0.016480565071105957, 0.0490269660949707, 0.020237959921360016, 0.04016956314444542, 0.024374645203351974, 0.030809713527560234, 0.009805201552808285, 0.07015957683324814, 0.04101163521409035, 0.15101760625839233, 0.01848575659096241, 0.061534538865089417, 0.2627995014190674, 0.040551405400037766, 0.04268648102879524, 0.007868240587413311, 0.019583672285079956, 0.004924133885651827, 0.024789419025182724, 0.020301373675465584, 0.03774682432413101, 0.005635220091789961], [0.005721642170101404, 0.01781207136809826, 0.049327604472637177, 0.01538496371358633, 0.019593168050050735, 0.0348803736269474, 0.031294722110033035, 0.0358741469681263, 0.03644229844212532, 0.23584675788879395, 0.03533196076750755, 0.053307563066482544, 0.1880136877298355, 0.09215197712182999, 0.017435450106859207, 0.005983361043035984, 0.02034628763794899, 0.013075203634798527, 0.006077572237700224, 0.010426550172269344, 0.06362464278936386, 0.012048129923641682], [0.038279540836811066, 0.058084942400455475, 0.037075694650411606, 0.10050114244222641, 0.021294360980391502, 0.024242058396339417, 0.01803112030029297, 0.06561661511659622, 0.04605479538440704, 0.057085346430540085, 0.05101761594414711, 0.07367241382598877, 0.11841046065092087, 0.05759407579898834, 0.08221444487571716, 0.007281213067471981, 0.015033802948892117, 0.008516143076121807, 0.0276649072766304, 0.027837203815579414, 0.030831458047032356, 0.033660635352134705], [0.017262356355786324, 0.01659979857504368, 0.06617919355630875, 0.06433649361133575, 0.040722623467445374, 0.033912431448698044, 0.029896005988121033, 0.03851043060421944, 0.05051867663860321, 0.08800699561834335, 0.02656312845647335, 0.05070991814136505, 0.18698614835739136, 0.1232832819223404, 0.06103649362921715, 0.013744629919528961, 0.024516895413398743, 0.010224423371255398, 0.008026562631130219, 0.01507602073252201, 0.024282054975628853, 0.009605360217392445], [0.007369739469140768, 0.03152744099497795, 0.06189883500337601, 0.0607905387878418, 0.010594051331281662, 0.014868971891701221, 0.013590267859399319, 0.013377979397773743, 0.014324222691357136, 0.1638806164264679, 0.015258681029081345, 0.040107496082782745, 0.1290730983018875, 0.21116289496421814, 0.08229915797710419, 0.007785267196595669, 0.01834898442029953, 0.009466547518968582, 0.008231902495026588, 0.009285347536206245, 0.05181707814335823, 0.02494080550968647], [0.03355416655540466, 0.021313633769750595, 0.018122054636478424, 0.05240265652537346, 0.035728033632040024, 0.03872630372643471, 0.016858892515301704, 0.020799705758690834, 0.008193922229111195, 0.22390878200531006, 0.013094012625515461, 0.02703443355858326, 0.0924694761633873, 0.03747835382819176, 0.06834644079208374, 0.032768357545137405, 0.0661805123090744, 0.018936367705464363, 0.029455924406647682, 0.007492462173104286, 0.12823699414730072, 0.008898518048226833], [0.007765164133161306, 0.034126974642276764, 0.02120954543352127, 0.034163013100624084, 0.00486838398501277, 0.03361139073967934, 0.015826819464564323, 0.008498107083141804, 0.005192146636545658, 0.23003418743610382, 0.01875992864370346, 0.024132562801241875, 0.05526892468333244, 0.03746560588479042, 0.06200714781880379, 0.016566678881645203, 0.08485191315412521, 0.04471652954816818, 0.03497626259922981, 0.0127050606533885, 0.16764187812805176, 0.04561174288392067], [0.011242120526731014, 0.03568059951066971, 0.013787358067929745, 0.038449667394161224, 0.014841611497104168, 0.045756492763757706, 0.04135579988360405, 0.042312365025281906, 0.0258426982909441, 0.13309979438781738, 0.011124342679977417, 0.026212060824036598, 0.025163734331727028, 0.017257902771234512, 0.0518103688955307, 0.024434369057416916, 0.08753560483455658, 0.07810670137405396, 0.08245831727981567, 0.03484170511364937, 0.14401112496852875, 0.014675226993858814], [0.024181034415960312, 0.053338296711444855, 0.07200158387422562, 0.004173616878688335, 0.007163842208683491, 0.0355025976896286, 0.031104451045393944, 0.02011123299598694, 0.027684833854436874, 0.05789497494697571, 0.03635613992810249, 0.021195627748966217, 0.045165590941905975, 0.09758288413286209, 0.004931774456053972, 0.01824246160686016, 0.07615260779857635, 0.06323030591011047, 0.08041912317276001, 0.04708123579621315, 0.08873200416564941, 0.0877537652850151], [0.0730217844247818, 0.011404369957745075, 0.015719274058938026, 0.07160250842571259, 0.026330914348363876, 0.05972915515303612, 0.01164189912378788, 0.0149226738139987, 0.005121903028339148, 0.028076879680156708, 0.017851192504167557, 0.020969117060303688, 0.0510350838303566, 0.03139029070734978, 0.09178725630044937, 0.09706564992666245, 0.19129018485546112, 0.027444779872894287, 0.0880543515086174, 0.009386960417032242, 0.03633889928460121, 0.019814901053905487], [0.0196722149848938, 0.0691288486123085, 0.10070355981588364, 0.030470598489046097, 0.009610840119421482, 0.01844642497599125, 0.026756349951028824, 0.010619563981890678, 0.010799036361277103, 0.030623985454440117, 0.025691913440823555, 0.01172888558357954, 0.024878541007637978, 0.10841919481754303, 0.046065907925367355, 0.0245079156011343, 0.0452117882668972, 0.06608577072620392, 0.07935179024934769, 0.03993479162454605, 0.06375554949045181, 0.13753652572631836], [0.025408199056982994, 0.0882769227027893, 0.03724326565861702, 0.05363190546631813, 0.029772087931632996, 0.035285577178001404, 0.008537276647984982, 0.04306969791650772, 0.023199157789349556, 0.09872470051050186, 0.011419007554650307, 0.020779166370630264, 0.13946786522865295, 0.07627258449792862, 0.062415711581707, 0.013440662994980812, 0.04688044637441635, 0.009705026634037495, 0.06395464390516281, 0.027302581816911697, 0.07146095484495163, 0.013752507977187634], [0.03161349147558212, 0.031341906636953354, 0.06443902850151062, 0.033685654401779175, 0.01806625910103321, 0.042118676006793976, 0.085733562707901, 0.026770183816552162, 0.04341932013630867, 0.02319105714559555, 0.0712839663028717, 0.04095789045095444, 0.01816723495721817, 0.05699436739087105, 0.027570076286792755, 0.02038351260125637, 0.038949429988861084, 0.06952783465385437, 0.025413263589143753, 0.042021848261356354, 0.021429112181067467, 0.16692234575748444], [0.0849108099937439, 0.07403652369976044, 0.028297509998083115, 0.07178711891174316, 0.05055185407400131, 0.028992407023906708, 0.023974107578396797, 0.06887609511613846, 0.03379454463720322, 0.04116339236497879, 0.028555110096931458, 0.02642284333705902, 0.0670822262763977, 0.03959667310118675, 0.060905568301677704, 0.01934526301920414, 0.03370669111609459, 0.01971176452934742, 0.06793993711471558, 0.03652738779783249, 0.06116582825779915, 0.032656487077474594]], [[0.01879792846739292, 0.0591755285859108, 0.011105691082775593, 0.012578259222209454, 0.017734745517373085, 0.02234712429344654, 0.012142137624323368, 0.04960516467690468, 0.07758301496505737, 0.11117640882730484, 0.0292898491024971, 0.048682715743780136, 0.1907287836074829, 0.01933376118540764, 0.012844042852520943, 0.018766000866889954, 0.019527360796928406, 0.010535894893109798, 0.022603973746299744, 0.082065150141716, 0.10783486068248749, 0.045541610568761826], [0.0015171754639595747, 0.03292021527886391, 0.0031758854165673256, 0.0032419110648334026, 0.00475349510088563, 0.00862667616456747, 0.01144501380622387, 0.45119673013687134, 0.006636840756982565, 0.06966841220855713, 0.014543633908033371, 0.01266463939100504, 0.09211960434913635, 0.001490941271185875, 0.00203575287014246, 0.0005101416609250009, 0.0032856969628483057, 0.003704257309436798, 0.04297925904393196, 0.005585796199738979, 0.22673968970775604, 0.0011581657454371452], [0.04301159456372261, 0.041505709290504456, 0.016186529770493507, 0.01778009533882141, 0.016016656532883644, 0.036850351840257645, 0.030089277774095535, 0.02390442229807377, 0.10588524490594864, 0.06780990958213806, 0.029509691521525383, 0.09396816790103912, 0.05924072116613388, 0.03873787820339203, 0.02460942044854164, 0.017617687582969666, 0.03287019953131676, 0.0373816192150116, 0.023311879485845566, 0.08849693834781647, 0.03461994603276253, 0.12059614807367325], [0.012412721291184425, 0.09554535895586014, 0.01367129199206829, 0.004308458883315325, 0.017013175413012505, 0.025357475504279137, 0.030126679688692093, 0.04616127535700798, 0.018529504537582397, 0.16500549018383026, 0.030421985313296318, 0.016669955104589462, 0.14484618604183197, 0.011576713994145393, 0.005226465407758951, 0.011336552910506725, 0.023310929536819458, 0.03378476947546005, 0.018591105937957764, 0.022579355165362358, 0.22695745527744293, 0.026567140594124794], [0.011928161606192589, 0.05600534752011299, 0.020415673032402992, 0.019434893503785133, 0.019194092601537704, 0.03154675289988518, 0.03910636901855469, 0.05057870224118233, 0.04906022176146507, 0.15348860621452332, 0.036406733095645905, 0.0495716892182827, 0.08933232724666595, 0.01646306924521923, 0.023350903764367104, 0.02884947508573532, 0.022035105153918266, 0.02599371410906315, 0.029330097138881683, 0.056702692061662674, 0.13693152368068695, 0.03427382558584213], [0.015611247159540653, 0.032247599214315414, 0.013525810092687607, 0.04904758557677269, 0.028597384691238403, 0.05372585728764534, 0.03011454828083515, 0.02654164656996727, 0.14427706599235535, 0.08190757036209106, 0.04169914126396179, 0.11362159997224808, 0.1169595792889595, 0.015585537068545818, 0.044634342193603516, 0.021820876747369766, 0.02521473355591297, 0.013728630729019642, 0.009491503238677979, 0.07224833220243454, 0.025826435536146164, 0.023572979494929314], [0.018089843913912773, 0.044829804450273514, 0.01890949159860611, 0.030137406662106514, 0.02548551931977272, 0.034002650529146194, 0.016725465655326843, 0.027934221550822258, 0.06159133464097977, 0.12900206446647644, 0.028444360941648483, 0.04846929758787155, 0.09203342348337173, 0.049833744764328, 0.041654787957668304, 0.03939155116677284, 0.03427526727318764, 0.01886647194623947, 0.022651413455605507, 0.06156330183148384, 0.0775332972407341, 0.07857528328895569], [0.021346233785152435, 0.04975258186459541, 0.012190605513751507, 0.011329670436680317, 0.0643337294459343, 0.06624849140644073, 0.042309265583753586, 0.3005841374397278, 0.04368159919977188, 0.03617308661341667, 0.04074971377849579, 0.029588470235466957, 0.0719427764415741, 0.006421939004212618, 0.007276507094502449, 0.013775498606264591, 0.02896747551858425, 0.01572929322719574, 0.05272682011127472, 0.02441275492310524, 0.05398670211434364, 0.006472699344158173], [0.09600368142127991, 0.013823455199599266, 0.011381207033991814, 0.029565133154392242, 0.019282381981611252, 0.011074181646108627, 0.00595314335078001, 0.010641315951943398, 0.10180576145648956, 0.019097745418548584, 0.06092027947306633, 0.08673583716154099, 0.1046200841665268, 0.04335460439324379, 0.03852240741252899, 0.05013079196214676, 0.017238304018974304, 0.006729502696543932, 0.012953297235071659, 0.09845814108848572, 0.010083120316267014, 0.1516256332397461], [0.006181876640766859, 0.033289387822151184, 0.036595504730939865, 0.03133334219455719, 0.047119539231061935, 0.0356854572892189, 0.03135541081428528, 0.20000571012496948, 0.01252484880387783, 0.02259448729455471, 0.06926154345273972, 0.04204557090997696, 0.2694595456123352, 0.017046405002474785, 0.02153400145471096, 0.005032191518694162, 0.01136657502502203, 0.00850143376737833, 0.05751704052090645, 0.0087687773630023, 0.029301319271326065, 0.003480010898783803], [0.027115389704704285, 0.05273393169045448, 0.02910487726330757, 0.015340480953454971, 0.025697149336338043, 0.016910329461097717, 0.01597553864121437, 0.06794838607311249, 0.033026549965143204, 0.056992460042238235, 0.04023825749754906, 0.039127934724092484, 0.18766556680202484, 0.06241052970290184, 0.01878363825380802, 0.016704950481653214, 0.020707614719867706, 0.019131870940327644, 0.06637714058160782, 0.045437514781951904, 0.06862903386354446, 0.07394085079431534], [0.0677579864859581, 0.013270397670567036, 0.010108711197972298, 0.02724255993962288, 0.011985287070274353, 0.017121920362114906, 0.006787785328924656, 0.007906652987003326, 0.09379493445158005, 0.030800990760326385, 0.031723760068416595, 0.08578071743249893, 0.07569315284490585, 0.03983327001333237, 0.04755719378590584, 0.04568307101726532, 0.032670218497514725, 0.012686614878475666, 0.025946350768208504, 0.12677910923957825, 0.015117158181965351, 0.17375214397907257], [0.01526004821062088, 0.03255447745323181, 0.02594136632978916, 0.034452978521585464, 0.014993015676736832, 0.013818609528243542, 0.019942574203014374, 0.11593528091907501, 0.02248018980026245, 0.045498959720134735, 0.06350573152303696, 0.051751237362623215, 0.11017315089702606, 0.028417272493243217, 0.034211743623018265, 0.008003275841474533, 0.0201055109500885, 0.020359918475151062, 0.17983388900756836, 0.04080968722701073, 0.06807627528905869, 0.03387485072016716], [0.08308860659599304, 0.011417942121624947, 0.008931371383368969, 0.014331763610243797, 0.00637472327798605, 0.014678217470645905, 0.009405078366398811, 0.0035809807013720274, 0.11283909529447556, 0.016457129269838333, 0.01601986400783062, 0.054814573377370834, 0.011246981099247932, 0.034349918365478516, 0.025000376626849174, 0.021864324808120728, 0.027063259854912758, 0.0263969749212265, 0.015627482905983925, 0.14925186336040497, 0.0064374119974672794, 0.3308219909667969], [0.02299177087843418, 0.09336983412504196, 0.01851990632712841, 0.005526433698832989, 0.017977938055992126, 0.02887662872672081, 0.04004041105508804, 0.03521059826016426, 0.0210715439170599, 0.10321009159088135, 0.02876526117324829, 0.013515084981918335, 0.05682484805583954, 0.016242368146777153, 0.007478722836822271, 0.02681874856352806, 0.04259227588772774, 0.08570606261491776, 0.040149539709091187, 0.0382525734603405, 0.19360648095607758, 0.06325292587280273], [0.019767317920923233, 0.05474404990673065, 0.018190374597907066, 0.014983968809247017, 0.01546945609152317, 0.038074225187301636, 0.059591494500637054, 0.04513969644904137, 0.03092230297625065, 0.05908714979887009, 0.023740896955132484, 0.028738627210259438, 0.032374706119298935, 0.01698349043726921, 0.022940076887607574, 0.022863246500492096, 0.05054111406207085, 0.11027715355157852, 0.115839384496212, 0.061787236481904984, 0.1067185327410698, 0.05122566595673561], [0.0281108058989048, 0.038975995033979416, 0.02092290297150612, 0.05214704945683479, 0.023068051785230637, 0.05580664426088333, 0.03573030233383179, 0.020536456257104874, 0.11465884745121002, 0.04631525278091431, 0.028919456526637077, 0.038303542882204056, 0.022366832941770554, 0.021786434575915337, 0.054096098989248276, 0.05229687690734863, 0.04863916337490082, 0.04608174040913582, 0.030409401282668114, 0.1123250275850296, 0.04213517904281616, 0.06636795401573181], [0.02194075845181942, 0.046819981187582016, 0.02062159590423107, 0.035897981375455856, 0.019214628264307976, 0.025028139352798462, 0.02376660890877247, 0.018537990748882294, 0.043183065950870514, 0.054748211055994034, 0.015588436275720596, 0.016913024708628654, 0.015578237362205982, 0.04232114925980568, 0.05266198143362999, 0.0908946767449379, 0.04108436778187752, 0.060635197907686234, 0.05817634239792824, 0.08275162428617477, 0.08483227342367172, 0.12880384922027588], [0.05786079168319702, 0.06995157897472382, 0.012106530368328094, 0.010579784400761127, 0.03539269044995308, 0.03940541669726372, 0.03673326596617699, 0.173531174659729, 0.049178365617990494, 0.013642476871609688, 0.02008116990327835, 0.008260289207100868, 0.011824295856058598, 0.00850327592343092, 0.008370976895093918, 0.03523073345422745, 0.039147019386291504, 0.05055411905050278, 0.18273980915546417, 0.0606904998421669, 0.04856215417385101, 0.027653571218252182], [0.15692922472953796, 0.016198065131902695, 0.012413358315825462, 0.03102719783782959, 0.01777193322777748, 0.008914975449442863, 0.0058159418404102325, 0.010290488600730896, 0.07140261679887772, 0.00525712501257658, 0.03840462118387222, 0.03178056702017784, 0.02755291946232319, 0.03916274383664131, 0.03724417835474014, 0.08238010853528976, 0.019383007660508156, 0.012376290746033192, 0.03419972211122513, 0.1129915863275528, 0.006942938547581434, 0.22156037390232086], [0.006202610209584236, 0.057937148958444595, 0.04148496314883232, 0.010255963541567326, 0.03190337494015694, 0.03549768775701523, 0.10092676430940628, 0.34294256567955017, 0.005697912536561489, 0.010479732416570187, 0.031313456594944, 0.004688877146691084, 0.022961756214499474, 0.007614194881170988, 0.005208879709243774, 0.0051160529255867004, 0.01156524382531643, 0.052129410207271576, 0.14611302316188812, 0.006932784803211689, 0.060486964881420135, 0.0025407075881958008], [0.12356746941804886, 0.021816298365592957, 0.01407918892800808, 0.015312345698475838, 0.02037402242422104, 0.011413277126848698, 0.006089002825319767, 0.011363295838236809, 0.0638941079378128, 0.007272166199982166, 0.02699229307472706, 0.018856195732951164, 0.016278257593512535, 0.04007314145565033, 0.019653113558888435, 0.06510147452354431, 0.02274353615939617, 0.017039693892002106, 0.0413496233522892, 0.11085749417543411, 0.0118092130869627, 0.3140648603439331]], [[0.012322334572672844, 0.08561590313911438, 0.01631069742143154, 0.01911439187824726, 0.028733503073453903, 0.010414090938866138, 0.025981752201914787, 0.07207407057285309, 0.03307048976421356, 0.21481001377105713, 0.018205782398581505, 0.009150998666882515, 0.08440262824296951, 0.01007845439016819, 0.020482150837779045, 0.01906677335500717, 0.012779470533132553, 0.016863388940691948, 0.03509076312184334, 0.03381314501166344, 0.2137010097503662, 0.007918132469058037], [0.016721436753869057, 0.03469958156347275, 0.016892075538635254, 0.016910696402192116, 0.020527977496385574, 0.04176410287618637, 0.08273404091596603, 0.04594358429312706, 0.07070813328027725, 0.1979655921459198, 0.02981507033109665, 0.012635123915970325, 0.04016084969043732, 0.011942277662456036, 0.014485185034573078, 0.02136496640741825, 0.03876740485429764, 0.07338718324899673, 0.0288393497467041, 0.08565870672464371, 0.08249260485172272, 0.015584097243845463], [0.012861422263085842, 0.09940791875123978, 0.010130894370377064, 0.04062683507800102, 0.028524305671453476, 0.005615797825157642, 0.023298699408769608, 0.0656152069568634, 0.07768568396568298, 0.16926412284374237, 0.06382744014263153, 0.027798403054475784, 0.12025973945856094, 0.009180053137242794, 0.03502456098794937, 0.011684933677315712, 0.004353975411504507, 0.01127688866108656, 0.017372047528624535, 0.05721156671643257, 0.09219590574502945, 0.01678352989256382], [0.0060772825963795185, 0.12452465295791626, 0.015434973873198032, 0.010711943730711937, 0.016632141545414925, 0.004533762112259865, 0.018008152022957802, 0.07015538215637207, 0.03062629885971546, 0.08754031360149384, 0.030585071071982384, 0.027515094727277756, 0.2662588953971863, 0.01757359504699707, 0.019724663347005844, 0.012909410521388054, 0.008030049502849579, 0.014431222341954708, 0.05830158293247223, 0.030467182397842407, 0.11521349847316742, 0.014744868502020836], [0.025199269875884056, 0.08931693434715271, 0.02612515725195408, 0.04835473373532295, 0.03595871478319168, 0.009678042493760586, 0.04532108083367348, 0.07020707428455353, 0.0315411314368248, 0.036088936030864716, 0.03576581925153732, 0.03544482961297035, 0.18770278990268707, 0.028724653646349907, 0.07614517211914062, 0.02654298022389412, 0.014112712815403938, 0.03182494267821312, 0.05528207868337631, 0.026883160695433617, 0.045805126428604126, 0.017974713817238808], [0.022868774831295013, 0.10836901515722275, 0.014893880113959312, 0.09704028815031052, 0.04628154635429382, 0.008032113313674927, 0.013240051455795765, 0.10749837011098862, 0.037977803498506546, 0.09699788689613342, 0.02619217522442341, 0.03821670264005661, 0.11757842451334, 0.01499535609036684, 0.09200144559144974, 0.012017872184515, 0.0048324475064873695, 0.003465539775788784, 0.026352157816290855, 0.018136268481612206, 0.08233553171157837, 0.010676403529942036], [0.01731734536588192, 0.12260492891073227, 0.03785721957683563, 0.03357824683189392, 0.029522491618990898, 0.017376884818077087, 0.020451189950108528, 0.027679650112986565, 0.02523978054523468, 0.06856776773929596, 0.06166759505867958, 0.03796432539820671, 0.21322186291217804, 0.03928830474615097, 0.04038158431649208, 0.023609928786754608, 0.016171375289559364, 0.016895055770874023, 0.01955663599073887, 0.021380731835961342, 0.07476282119750977, 0.034904301166534424], [0.029705051332712173, 0.04447951540350914, 0.032077278941869736, 0.0425296276807785, 0.042730093002319336, 0.04549514502286911, 0.05323910340666771, 0.047394733875989914, 0.01565701887011528, 0.1694294661283493, 0.04792570322751999, 0.021956318989396095, 0.0529983788728714, 0.02512982115149498, 0.04091421887278557, 0.03402569517493248, 0.031741488724946976, 0.03229573369026184, 0.0187163595110178, 0.011083467863500118, 0.14495807886123657, 0.015517739579081535], [0.01810072362422943, 0.08916488289833069, 0.030190417543053627, 0.05784199386835098, 0.026115819811820984, 0.009571690112352371, 0.01253748033195734, 0.12953615188598633, 0.05606045201420784, 0.06410457193851471, 0.017707914113998413, 0.03911319747567177, 0.10651247948408127, 0.02985275536775589, 0.07909370213747025, 0.013233800418674946, 0.012543793767690659, 0.0063927327282726765, 0.08965219557285309, 0.040571149438619614, 0.05941016227006912, 0.01269193459302187], [0.033007074147462845, 0.015634354203939438, 0.008475350216031075, 0.06671874225139618, 0.02744501456618309, 0.0371764600276947, 0.041999563574790955, 0.08886860311031342, 0.04641514644026756, 0.17202387750148773, 0.08841124922037125, 0.07928362488746643, 0.07034116238355637, 0.01432303711771965, 0.053750909864902496, 0.010490002110600471, 0.015844471752643585, 0.014055686071515083, 0.030289962887763977, 0.023703139275312424, 0.04032789170742035, 0.021414704620838165], [0.042850133031606674, 0.1646546721458435, 0.01935320347547531, 0.07619578391313553, 0.03036491386592388, 0.004521372262388468, 0.01492267195135355, 0.04879505932331085, 0.06134754791855812, 0.03999081254005432, 0.039935704320669174, 0.02165135368704796, 0.1452457159757614, 0.01973808743059635, 0.06735549867153168, 0.012199166230857372, 0.006601059343665838, 0.008152689784765244, 0.038070619106292725, 0.07749445736408234, 0.036687035113573074, 0.02387247607111931], [0.026883192360401154, 0.049878910183906555, 0.02765846997499466, 0.0735584944486618, 0.050729479640722275, 0.02620955929160118, 0.014338210225105286, 0.09141865372657776, 0.06190292164683342, 0.060200151056051254, 0.01808619685471058, 0.0350840799510479, 0.05228201299905777, 0.026914246380329132, 0.08881111443042755, 0.021206233650445938, 0.03006112575531006, 0.010042163543403149, 0.07389172166585922, 0.06567023694515228, 0.07353012263774872, 0.021642781794071198], [0.01879766955971718, 0.03720888867974281, 0.01789259910583496, 0.040369655936956406, 0.02071165293455124, 0.014400308020412922, 0.03895614668726921, 0.052584048360586166, 0.0396478995680809, 0.0815354585647583, 0.04693695157766342, 0.020306922495365143, 0.13642410933971405, 0.03620180860161781, 0.059491295367479324, 0.011935182847082615, 0.029007939621806145, 0.037782616913318634, 0.10968147963285446, 0.06508783996105194, 0.04932719096541405, 0.03571229800581932], [0.008186401799321175, 0.14505153894424438, 0.009925246238708496, 0.019750218838453293, 0.015764333307743073, 0.0042715552262961864, 0.008451197296380997, 0.05712064355611801, 0.08125229924917221, 0.1660597026348114, 0.036148522049188614, 0.02680189162492752, 0.11223366111516953, 0.0100537845864892, 0.023105178028345108, 0.006482004188001156, 0.004978457000106573, 0.006045229732990265, 0.03465290367603302, 0.10325352847576141, 0.09675074368715286, 0.023660914972424507], [0.008090061135590076, 0.10782603174448013, 0.019970398396253586, 0.01028862502425909, 0.015110000967979431, 0.007611465640366077, 0.018884535878896713, 0.06554358452558517, 0.03169882297515869, 0.0755171924829483, 0.027890881523489952, 0.029900670051574707, 0.19825199246406555, 0.024777794256806374, 0.020504018291831017, 0.015282399021089077, 0.01758320815861225, 0.021336456760764122, 0.10996066778898239, 0.04552207142114639, 0.10372786223888397, 0.024721277877688408], [0.07878035306930542, 0.040485769510269165, 0.04359030723571777, 0.03267580643296242, 0.03064112365245819, 0.03375991806387901, 0.04670294001698494, 0.07646189630031586, 0.041385531425476074, 0.05218032747507095, 0.028144080191850662, 0.02786872908473015, 0.04066133871674538, 0.033601485192775726, 0.044192492961883545, 0.024760017171502113, 0.05162165313959122, 0.03751795366406441, 0.08634983748197556, 0.05423100292682648, 0.05708392336964607, 0.03730355203151703], [0.03698757290840149, 0.1477098912000656, 0.027743304148316383, 0.0743878185749054, 0.036381687968969345, 0.011511921882629395, 0.016178296878933907, 0.07009623199701309, 0.044473305344581604, 0.06382144242525101, 0.02152295596897602, 0.03594770282506943, 0.08363025635480881, 0.024902157485485077, 0.07795098423957825, 0.017782753333449364, 0.011701167561113834, 0.007435434497892857, 0.05454195663332939, 0.042929042130708694, 0.06713747978210449, 0.02522660605609417], [0.036659788340330124, 0.13168099522590637, 0.04255812242627144, 0.021305762231349945, 0.02348131686449051, 0.02646108716726303, 0.01847231760621071, 0.016433240845799446, 0.02600296586751938, 0.039880942553281784, 0.039468493312597275, 0.028789706528186798, 0.10568498820066452, 0.046831175684928894, 0.02816765569150448, 0.02795139141380787, 0.04639212042093277, 0.0311124287545681, 0.045606859028339386, 0.052310094237327576, 0.07105530798435211, 0.0936933383345604], [0.05466180294752121, 0.07304012030363083, 0.03921907767653465, 0.026328634470701218, 0.027065247297286987, 0.04227009415626526, 0.026560787111520767, 0.03954971581697464, 0.018327651545405388, 0.09926427155733109, 0.039112482219934464, 0.02519896812736988, 0.039085619151592255, 0.037443701177835464, 0.028841862455010414, 0.03500737249851227, 0.05655406787991524, 0.03049439750611782, 0.05448618531227112, 0.029226819053292274, 0.12742523849010468, 0.050836000591516495], [0.04694634675979614, 0.05764332413673401, 0.031742822378873825, 0.036268945783376694, 0.021578285843133926, 0.011304201558232307, 0.023211924359202385, 0.08299940079450607, 0.06646951287984848, 0.03824758902192116, 0.016870062798261642, 0.03730667009949684, 0.053222786635160446, 0.0382499024271965, 0.04812033846974373, 0.014832038432359695, 0.022610031068325043, 0.019827689975500107, 0.16074174642562866, 0.10161575675010681, 0.03603266552090645, 0.03415802866220474], [0.052569590508937836, 0.015820395201444626, 0.015665173530578613, 0.03152187913656235, 0.02181398682296276, 0.06609011441469193, 0.09086523950099945, 0.025277772918343544, 0.026851966977119446, 0.07382553815841675, 0.08598991483449936, 0.029945343732833862, 0.04737250506877899, 0.02620912715792656, 0.028284410014748573, 0.020732687786221504, 0.0691051259636879, 0.09430694580078125, 0.05053646117448807, 0.038542795926332474, 0.030106423422694206, 0.058566659688949585], [0.0444788858294487, 0.14099782705307007, 0.019596390426158905, 0.06292890012264252, 0.018242815509438515, 0.00517837330698967, 0.011148609220981598, 0.06049039587378502, 0.08591201156377792, 0.02560916543006897, 0.019784655421972275, 0.019023537635803223, 0.06535258144140244, 0.021881651133298874, 0.06755199283361435, 0.008368758484721184, 0.00989801250398159, 0.008302891626954079, 0.08278977870941162, 0.15453152358531952, 0.034199852496385574, 0.03373148292303085]], [[0.003646751632913947, 0.15349042415618896, 0.00942288525402546, 0.0012093277182430029, 0.007366538513451815, 0.06605483591556549, 0.007569093722850084, 0.1281198114156723, 0.009629979729652405, 0.12951308488845825, 0.027016906067728996, 0.06281846761703491, 0.09656611829996109, 0.01173562090843916, 0.0015399743570014834, 0.008794590830802917, 0.06958033889532089, 0.01091020554304123, 0.09206724911928177, 0.00558486906811595, 0.07403264939785004, 0.023330330848693848], [0.006698199547827244, 0.0895545557141304, 0.011144818738102913, 0.0014965799637138844, 0.005911215208470821, 0.03886757791042328, 0.010205752216279507, 0.22677303850650787, 0.022642549127340317, 0.09284401684999466, 0.08265376836061478, 0.13702119886875153, 0.08837946504354477, 0.009144240990281105, 0.0017386279068887234, 0.005194387398660183, 0.023645592853426933, 0.006774981040507555, 0.07941818982362747, 0.007376036141067743, 0.03531227260828018, 0.017202816903591156], [0.0017415458569303155, 0.1377444714307785, 0.0021621109917759895, 0.0017591211944818497, 0.00296187330968678, 0.050517283380031586, 0.003691114718094468, 0.11616496741771698, 0.007859071716666222, 0.14489281177520752, 0.02697679027915001, 0.04117836803197861, 0.18839852511882782, 0.0024936930276453495, 0.002858817810192704, 0.003439760534092784, 0.056192055344581604, 0.0051042442210018635, 0.09063436090946198, 0.005288615357130766, 0.09608788043260574, 0.011852501891553402], [0.030380090698599815, 0.0979972779750824, 0.017568718641996384, 0.016787586733698845, 0.019022390246391296, 0.04354364797472954, 0.053520411252975464, 0.1184152215719223, 0.060748204588890076, 0.07858167588710785, 0.042200420051813126, 0.061096612364053726, 0.08483393490314484, 0.01963178813457489, 0.012501034885644913, 0.009089840576052666, 0.034326016902923584, 0.030183475464582443, 0.030878359451889992, 0.04002196714282036, 0.05202038586139679, 0.046650953590869904], [0.019357068464159966, 0.07297973334789276, 0.02255559340119362, 0.006604355294257402, 0.010541577823460102, 0.053815945982933044, 0.024353403598070145, 0.13960812985897064, 0.0667327493429184, 0.057311393320560455, 0.07771866023540497, 0.20647430419921875, 0.05235657840967178, 0.015593019314110279, 0.0060576945543289185, 0.010763827711343765, 0.024701369926333427, 0.011349319480359554, 0.036605022847652435, 0.023344555869698524, 0.02293364331126213, 0.038242124021053314], [0.06714639067649841, 0.03748754784464836, 0.021001223474740982, 0.014746490865945816, 0.025711001828312874, 0.02971319481730461, 0.05319783091545105, 0.08805600553750992, 0.09659583121538162, 0.045485880225896835, 0.10350075364112854, 0.13389712572097778, 0.04494931548833847, 0.018975893035531044, 0.011265406385064125, 0.012226596474647522, 0.021542562171816826, 0.029851742088794708, 0.02137012965977192, 0.04587775468826294, 0.016812104731798172, 0.060589149594306946], [0.014353757724165916, 0.05955660715699196, 0.02258569374680519, 0.012254711240530014, 0.013829360716044903, 0.11565268039703369, 0.02044929750263691, 0.07186330854892731, 0.10231192409992218, 0.06970661133527756, 0.043245647102594376, 0.10166387259960175, 0.03500336408615112, 0.023574749007821083, 0.010900808498263359, 0.01015318464487791, 0.09978017210960388, 0.014552202075719833, 0.0325104221701622, 0.05243152379989624, 0.02143820933997631, 0.05218198150396347], [0.004740321077406406, 0.11914169788360596, 0.018505068495869637, 0.007862741127610207, 0.013992834836244583, 0.04188808798789978, 0.020764879882335663, 0.18795648217201233, 0.02042444795370102, 0.2638213634490967, 0.019824707880616188, 0.04861541837453842, 0.03259318694472313, 0.005733994767069817, 0.00723971938714385, 0.011699588969349861, 0.014535713940858841, 0.01022899616509676, 0.030079081654548645, 0.009772992692887783, 0.10360509902238846, 0.006973613984882832], [0.039249807596206665, 0.05220920220017433, 0.015901068225502968, 0.008184569887816906, 0.024152211844921112, 0.04647073149681091, 0.02306152507662773, 0.07119214534759521, 0.054002538323402405, 0.09979944676160812, 0.0792870968580246, 0.21639762818813324, 0.04455879330635071, 0.016682110726833344, 0.005792279727756977, 0.009853915311396122, 0.044882629066705704, 0.01619846187531948, 0.025266701355576515, 0.021445125341415405, 0.02608620561659336, 0.05932578817009926], [0.04004967585206032, 0.02872086688876152, 0.034168317914009094, 0.016716457903385162, 0.04766369238495827, 0.040866680443286896, 0.09672505408525467, 0.0534236878156662, 0.1726488173007965, 0.009775819256901741, 0.03339467570185661, 0.13897331058979034, 0.014282682910561562, 0.045445941388607025, 0.013770629651844501, 0.02324059046804905, 0.020628634840250015, 0.05122953653335571, 0.02424745447933674, 0.06459411233663559, 0.004075672011822462, 0.025357738137245178], [0.00516952620819211, 0.1721266806125641, 0.0058592092245817184, 0.001029281411319971, 0.0049637071788311005, 0.021410545334219933, 0.01198762096464634, 0.0772189199924469, 0.01117778941988945, 0.07034361362457275, 0.05754590407013893, 0.22850114107131958, 0.1190054789185524, 0.012762265279889107, 0.0017993781948462129, 0.005636654328554869, 0.04019122198224068, 0.021485628560185432, 0.06080936640501022, 0.005251636728644371, 0.03717828169465065, 0.028546180576086044], [0.0061793881468474865, 0.05149510130286217, 0.011192802339792252, 0.0011556926183402538, 0.006251978687942028, 0.03357105702161789, 0.00580737367272377, 0.044738929718732834, 0.016516931354999542, 0.04047175496816635, 0.10560203343629837, 0.39824220538139343, 0.0895937979221344, 0.01620929315686226, 0.002129259519279003, 0.00912972167134285, 0.04384608566761017, 0.008388077840209007, 0.042872026562690735, 0.006426576990634203, 0.019849948585033417, 0.04033001512289047], [0.003380484413355589, 0.20214056968688965, 0.010169661603868008, 0.0007656306261196733, 0.0029883799143135548, 0.004899139981716871, 0.00813859049230814, 0.06123752146959305, 0.003982305992394686, 0.023800313472747803, 0.058521367609500885, 0.18634876608848572, 0.18930189311504364, 0.0300369281321764, 0.002464099321514368, 0.006430622655898333, 0.01411430723965168, 0.018170464783906937, 0.11346736550331116, 0.002366492059081793, 0.039418138563632965, 0.01785697415471077], [0.0008363910019397736, 0.12710823118686676, 0.002360859652981162, 0.0020953360944986343, 0.0017872026655822992, 0.03154810518026352, 0.0016256548697128892, 0.018270010128617287, 0.0023032415192574263, 0.11958596855401993, 0.011860411614179611, 0.010002641007304192, 0.34371131658554077, 0.004425892140716314, 0.004698599223047495, 0.0036637040320783854, 0.09200666099786758, 0.005469911731779575, 0.06658147275447845, 0.004259786568582058, 0.13227230310440063, 0.01352629903703928], [0.025067303329706192, 0.10082478076219559, 0.02376246638596058, 0.02051774226129055, 0.013896388001739979, 0.03253664821386337, 0.0369037464261055, 0.04154588654637337, 0.029979407787322998, 0.0548042431473732, 0.0454232357442379, 0.03614649176597595, 0.16231876611709595, 0.03978999704122543, 0.020967042073607445, 0.010860234498977661, 0.05383468419313431, 0.038386501371860504, 0.03361250087618828, 0.03776826336979866, 0.06272371113300323, 0.07832993566989899], [0.012861587107181549, 0.07328205555677414, 0.026785144582390785, 0.010110031813383102, 0.013397601433098316, 0.06350355595350266, 0.019662979990243912, 0.03717231750488281, 0.029821261763572693, 0.05317719653248787, 0.057959698140621185, 0.11590772122144699, 0.10749755799770355, 0.03912867233157158, 0.012553959153592587, 0.022219911217689514, 0.06978665292263031, 0.028252631425857544, 0.04745681211352348, 0.027100827544927597, 0.04979632794857025, 0.0825655460357666], [0.029218871146440506, 0.05045429989695549, 0.02155211940407753, 0.01943880505859852, 0.014120758511126041, 0.020845627412199974, 0.017808152362704277, 0.022230779752135277, 0.01774732396006584, 0.05119633302092552, 0.05006431043148041, 0.019579056650400162, 0.1431303322315216, 0.05275319516658783, 0.027416815981268883, 0.013393186032772064, 0.07935847342014313, 0.05293377488851547, 0.05885908007621765, 0.041336964815855026, 0.06985020637512207, 0.12671151757240295], [0.008165939711034298, 0.030704207718372345, 0.023075558245182037, 0.017986848950386047, 0.008136707358062267, 0.0685359314084053, 0.00468639237806201, 0.007906349375844002, 0.019936855882406235, 0.03496987000107765, 0.020047321915626526, 0.009010998532176018, 0.06205675005912781, 0.059458259493112564, 0.025271259248256683, 0.010556071996688843, 0.3062411844730377, 0.015413891524076462, 0.0453730970621109, 0.05499977618455887, 0.035391442477703094, 0.13207532465457916], [0.0006031988887116313, 0.10403121262788773, 0.006625327281653881, 0.0030176357831805944, 0.0020899532828480005, 0.012174481526017189, 0.0018714130856096745, 0.016377033665776253, 0.0012943969340994954, 0.148039773106575, 0.007961099967360497, 0.002257449785247445, 0.21498803794384003, 0.009842433966696262, 0.007283453363925219, 0.004072614014148712, 0.05018052086234093, 0.009313603863120079, 0.07062222063541412, 0.005102668888866901, 0.30573153495788574, 0.016519920900464058], [0.029313070699572563, 0.05150376632809639, 0.01860283873975277, 0.012141164392232895, 0.015288114547729492, 0.03066885657608509, 0.012291625142097473, 0.016852648928761482, 0.015456113032996655, 0.06045832112431526, 0.04892846941947937, 0.038685571402311325, 0.08791719377040863, 0.05268066003918648, 0.01374772097915411, 0.013421314768493176, 0.14606769382953644, 0.03761319816112518, 0.0626181960105896, 0.026555709540843964, 0.05928738787770271, 0.1499003916978836], [0.038896795362234116, 0.03136463463306427, 0.06534188240766525, 0.037429194897413254, 0.03407590091228485, 0.04578479006886482, 0.03611193969845772, 0.013782273046672344, 0.04677732661366463, 0.02038104087114334, 0.02178754098713398, 0.019414661452174187, 0.02330864407122135, 0.11480319499969482, 0.036507315933704376, 0.049287863075733185, 0.07056035101413727, 0.059905391186475754, 0.046122077852487564, 0.08992026746273041, 0.021371137350797653, 0.07706578820943832], [0.0009564529755152762, 0.08994867652654648, 0.0015066531486809254, 0.00059907091781497, 0.0010789459338411689, 0.01092636026442051, 0.0009500543237663805, 0.006169493775814772, 0.0007370539242401719, 0.06935864686965942, 0.020566893741488457, 0.008586679585278034, 0.3454678952693939, 0.008250650018453598, 0.0017625397304072976, 0.002305730013176799, 0.15283161401748657, 0.010715826414525509, 0.0941971018910408, 0.0021552201360464096, 0.12814688682556152, 0.0427815280854702]]]], \"left_text\": [\"\", \"CCCCC\", \"[\", \"C\", \"@@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\", \"\", \"CCCCC\", \"[\", \"C\", \"@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\"], \"right_text\": [\"\", \"CCCCC\", \"[\", \"C\", \"@@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\", \"\", \"CCCCC\", \"[\", \"C\", \"@\", \"H\", \"](\", \"Br\", \")\", \"CC\", \"\"]}}, \"default_filter\": \"all\"}" ], "text/plain": [ "" @@ -6728,9 +5940,9 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 301 + "height": 280 }, - "outputId": "f61e3481-7ed9-455c-aa10-0667866769ab" + "outputId": "2f868a5e-5b80-4975-bf64-bf6a6f4aefe7" }, "source": [ "!wget https://t.co/zrC7F8DcRs?amp=1" @@ -6740,21 +5952,21 @@ { "output_type": "stream", "text": [ - "--2020-06-21 00:04:17-- https://t.co/zrC7F8DcRs?amp=1\n", - "Resolving t.co (t.co)... 104.244.42.197, 104.244.42.5, 104.244.42.133, ...\n", + "--2020-08-07 23:56:40-- https://t.co/zrC7F8DcRs?amp=1\n", + "Resolving t.co (t.co)... 104.244.42.197, 104.244.42.133, 104.244.42.69, ...\n", "Connecting to t.co (t.co)|104.244.42.197|:443... connected.\n", "HTTP request sent, awaiting response... 301 Moved Permanently\n", "Location: https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/tox21_balanced_revised_no_id.csv [following]\n", - "--2020-06-21 00:04:18-- https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/tox21_balanced_revised_no_id.csv\n", - "Resolving deepchemdata.s3-us-west-1.amazonaws.com (deepchemdata.s3-us-west-1.amazonaws.com)... 52.219.120.233\n", - "Connecting to deepchemdata.s3-us-west-1.amazonaws.com (deepchemdata.s3-us-west-1.amazonaws.com)|52.219.120.233|:443... connected.\n", + "--2020-08-07 23:56:40-- https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/tox21_balanced_revised_no_id.csv\n", + "Resolving deepchemdata.s3-us-west-1.amazonaws.com (deepchemdata.s3-us-west-1.amazonaws.com)... 52.219.116.233\n", + "Connecting to deepchemdata.s3-us-west-1.amazonaws.com (deepchemdata.s3-us-west-1.amazonaws.com)|52.219.116.233|:443... connected.\n", "HTTP request sent, awaiting response... 200 OK\n", "Length: 85962 (84K) [text/csv]\n", "Saving to: ‘zrC7F8DcRs?amp=1’\n", "\n", "\rzrC7F8DcRs?amp=1 0%[ ] 0 --.-KB/s \rzrC7F8DcRs?amp=1 100%[===================>] 83.95K --.-KB/s in 0.05s \n", "\n", - "2020-06-21 00:04:18 (1.73 MB/s) - ‘zrC7F8DcRs?amp=1’ saved [85962/85962]\n", + "2020-08-07 23:56:40 (1.80 MB/s) - ‘zrC7F8DcRs?amp=1’ saved [85962/85962]\n", "\n" ], "name": "stdout" @@ -6787,16 +5999,49 @@ { "cell_type": "code", "metadata": { - "id": "mJVrSI0gZ5Ow", + "id": "veIAIGxBUshD", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 235 + }, + "outputId": "3009e4d0-a777-4411-9365-f33708ea0683" }, "source": [ - "!pip install simpletransformers\n", - "!pip install wandb" + "pip install --upgrade tqdm" ], - "execution_count": null, - "outputs": [] + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Collecting tqdm\n", + " Using cached tqdm-4.48.2-py2.py3-none-any.whl (68 kB)\n", + "Installing collected packages: tqdm\n", + " Attempting uninstall: tqdm\n", + " Found existing installation: tqdm 4.46.0\n", + " Uninstalling tqdm-4.46.0:\n", + " Successfully uninstalled tqdm-4.46.0\n", + "Successfully installed tqdm-4.48.2\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "tqdm" + ] + } + } + }, + "metadata": { + "tags": [] + } + } + ] }, { "cell_type": "markdown", @@ -6816,9 +6061,9 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 197 + "height": 189 }, - "outputId": "fc51fd81-bace-4d6c-be08-19bf9b816261" + "outputId": "42b1b29a-297a-4f40-cbc9-80a4e467871a" }, "source": [ "import pandas as pd\n", @@ -6925,10 +6170,9 @@ "source": [ "from simpletransformers.classification import ClassificationModel\n", "import logging\n", - "\n", "logging.basicConfig(level=logging.INFO)\n", "transformers_logger = logging.getLogger(\"transformers\")\n", - "transformers_logger.setLevel(logging.WARNING)" + "transformers_logger.setLevel(logging.WARNING)\n" ], "execution_count": 19, "outputs": [] @@ -6950,20 +6194,23 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 52 + "height": 119 }, - "outputId": "54a36a91-4b6c-4987-fb69-b2610d0d3286" + "outputId": "1f144b0b-02eb-4c7a-db52-8d888f5523c2" }, "source": [ - "model = ClassificationModel('roberta', 'seyonec/ChemBERTa-zinc-base-v1', args={'num_train_epochs': 3, 'auto_weights': True}) # You can set class weights by using the optional weight argument\n" + "model = ClassificationModel('roberta', 'seyonec/ChemBERTa_zinc250k_v2_40k', args={'num_train_epochs': 3, 'auto_weights': True}) # You can set class weights by using the optional weight argument\n" ], "execution_count": 20, "outputs": [ { "output_type": "stream", "text": [ - "/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py:831: FutureWarning: Parameter max_len is deprecated and will be removed in a future release. Use model_max_length instead.\n", - " category=FutureWarning,\n" + "WARNING:transformers.modeling_utils:Some weights of the model checkpoint at seyonec/ChemBERTa_zinc250k_v2_40k were not used when initializing RobertaForSequenceClassification: ['lm_head.bias', 'lm_head.dense.weight', 'lm_head.dense.bias', 'lm_head.layer_norm.weight', 'lm_head.layer_norm.bias', 'lm_head.decoder.weight', 'lm_head.decoder.bias']\n", + "- This IS expected if you are initializing RobertaForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPretraining model).\n", + "- This IS NOT expected if you are initializing RobertaForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "WARNING:transformers.modeling_utils:Some weights of RobertaForSequenceClassification were not initialized from the model checkpoint at seyonec/ChemBERTa_zinc250k_v2_40k and are newly initialized: ['classifier.dense.weight', 'classifier.dense.bias', 'classifier.out_proj.weight', 'classifier.out_proj.bias']\n", + "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n" ], "name": "stderr" } @@ -6993,9 +6240,9 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 70 + "height": 66 }, - "outputId": "88395c64-ca01-4fdb-f07d-425f4ca3c9a6" + "outputId": "ea5c5f40-597c-47f5-8b8e-26d71ffcda53" }, "source": [ "# check if our train and evaluation dataframes are setup properly. There should only be two columns for the SMILES string and its corresponding label.\n", @@ -7031,328 +6278,101 @@ }, { "cell_type": "code", - "metadata": { - "id": "UTnzRNbHAwfA", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 87 - }, - "outputId": "b8a57f53-5f32-481c-9da5-ed82b91c3a17" - }, - "source": [ - "!wandb login" - ], - "execution_count": 23, - "outputs": [ - { - "output_type": "stream", - "text": [ - "\u001b[34m\u001b[1mwandb\u001b[0m: You can find your API key in your browser here: https://app.wandb.ai/authorize\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Paste an API key from your profile and hit enter: 3453d85d7ddabfc34500f3fa6ac9ec2ba5683c2f\n", - "\u001b[34m\u001b[1mwandb\u001b[0m: Appending key for api.wandb.ai to your netrc file: /root/.netrc\n", - "\u001b[32mSuccessfully logged in to Weights & Biases!\u001b[0m\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "sM6jgEV2eV7u", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 1000, - "referenced_widgets": [ - "136b015c75e34642bd689b4ef456218e", - "e8f6a120219d462dbfe855f4a063435f", - "7c42ba33692848b9bced35360ff3d003", - "bff1343b5c724187b92702de133f6a03", - "311b578ab682442d94b772f6365c2b7f", - "b2b573bfb1a54c8bac35b908ad32b835", - "db7a1ccfc79e4758bc85c767dbadd162", - "37a98680611d40eba5026d930be4ca5c", - "c39c27352ce140bfa650c266ac205cb2", - "607426d9589b4e84b4fcfd3a64392374", - "5649cf1a33504fcca606dd75f1db4e1a", - "205da1ebc6d3432d9be53adf2ad87633", - "ca6ec52d47284cf8ab617f2dfbc04358", - "59878a92f1b74e8b92e73ad7ab509020", - "9b51b5951e7d445ba307dd539dd28f75", - "73ae0afccecb42489812b849a17a1dfc", - "50d49a1384cb474dbb51e38375c005e3", - "3175c0c02b9340319f23790cda3f741a", - "12c7dafc2f5b4f4e99b646dc987e305a", - "19f4fb0189574f659be5f677b176049b", - "b617fd70d5e44dfc8aaf9e2e70dd96b8", - "0716ea9d615f43f5979a3ec4bb97433d", - "ab22977b97de485c8e7ff5ad32401a42", - "f289b20aaf2c4d6fb4f03b436fef6836", - "bfa661dfa3de41df810e0b5035d52c1e", - "1dd271d6a49445bf81488cb92a81247f", - "b9b287012e704eaea45d48f21836b8c4", - "7b5168a54bba443980f471c5623d8a3b", - "1875a1424a154f9b87b0958dcdc303e9", - "a1c637d057214aa4bf961115718540aa", - "ced6f8685ae84e23b517fe4c10d5e543", - "fe94273739cc403987d47549aa894c25", - "fc42b7f3c9f5486688649c44e5340390", - "992037580a774f959acab6acd413da36", - "82272780aabb457d88ba7448161327b9", - "0cb45d8fb7604d6aabbf35abeee0b83b", - "d0385dfa020641a1b1867ce53612a4c1", - "3858db9d16a0482f917e2829c24090d0", - "197e5ce104f945f8bac84604295592e7", - "ee59e545a93e4bb0a66595729f815bf3" - ] - }, - "outputId": "424e49b8-d887-4116-e8ed-6b0d791024f9" - }, - "source": [ - "# Create directory to store model weights (change path accordingly to where you want!)\n", - "!cd /content\n", - "!mkdir chemberta_tox21\n", - "\n", - "# Train the model\n", - "model.train_model(train_dataset, output_dir='/content/chemberta_tox21', num_labels=2, use_cuda=True, args={'wandb_project': 'project-name'})\n" - ], - "execution_count": 24, - "outputs": [ - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.7/site-packages/simpletransformers/classification/classification_model.py:267: UserWarning: Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\n", - " \"Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\"\n", - "INFO:simpletransformers.classification.classification_model: Converting to features started. Cache is not used.\n" - ], - "name": "stderr" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "136b015c75e34642bd689b4ef456218e", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=1714.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n", - "Selected optimization level O1: Insert automatic casts around Pytorch functions and Tensor methods.\n", - "\n", - "Defaults for this optimization level are:\n", - "enabled : True\n", - "opt_level : O1\n", - "cast_model_type : None\n", - "patch_torch_functions : True\n", - "keep_batchnorm_fp32 : None\n", - "master_weights : None\n", - "loss_scale : dynamic\n", - "Processing user overrides (additional kwargs that are not None)...\n", - "After processing overrides, optimization options are:\n", - "enabled : True\n", - "opt_level : O1\n", - "cast_model_type : None\n", - "patch_torch_functions : True\n", - "keep_batchnorm_fp32 : None\n", - "master_weights : None\n", - "loss_scale : dynamic\n", - "Warning: multi_tensor_applier fused unscale kernel is unavailable, possibly because apex was installed without --cuda_ext --cpp_ext. Using Python fallback. Original ImportError was: ModuleNotFoundError(\"No module named 'amp_C'\",)\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "c39c27352ce140bfa650c266ac205cb2", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Epoch', max=3.0, style=ProgressStyle(description_width='i…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "text/html": [ - "\n", - " Logging results to Weights & Biases (Documentation).
\n", - " Project page: https://app.wandb.ai/seyonec/project-name
\n", - " Run page: https://app.wandb.ai/seyonec/project-name/runs/w5p34xmh
\n", - " " - ], - "text/plain": [ - "" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:system metrics and metadata threads started\n", - "INFO:wandb.run_manager:checking resume status, waiting at most 10 seconds\n", - "INFO:wandb.run_manager:resuming run from id: UnVuOnYxOnc1cDM0eG1oOnByb2plY3QtbmFtZTpzZXlvbmVj\n", - "INFO:wandb.run_manager:upserting run before process can begin, waiting at most 10 seconds\n", - "INFO:wandb.run_manager:saving pip packages\n", - "INFO:wandb.run_manager:initializing streaming files api\n", - "INFO:wandb.run_manager:unblocking file change observer, beginning sync with W&B servers\n" - ], - "name": "stderr" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "50d49a1384cb474dbb51e38375c005e3", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Current iteration', max=215.0, style=ProgressStyle(descri…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/config.yaml\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/media/graph/graph_0_summary_692f3881.graph.json\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/wandb-events.jsonl\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/requirements.txt\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/media/graph\n", - "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200621_000615-w5p34xmh/media\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "\rRunning loss: 1.016106" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:114: UserWarning: Seems like `optimizer.step()` has been overridden after learning rate scheduler initialization. Please, make sure to call `optimizer.step()` before `lr_scheduler.step()`. See more details at https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate\n", - " \"https://pytorch.org/docs/stable/optim.html#how-to-adjust-learning-rate\", UserWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.766425" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:231: UserWarning: To get the last learning rate computed by the scheduler, please use `get_last_lr()`.\n", - " warnings.warn(\"To get the last learning rate computed by the scheduler, \"\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.866304" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.331168" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.096342" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.467952" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" + "metadata": { + "id": "UTnzRNbHAwfA", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 82 }, + "outputId": "7bc73b68-98a8-4d88-ceee-74c7d79619a5" + }, + "source": [ + "!wandb login" + ], + "execution_count": 23, + "outputs": [ { "output_type": "stream", "text": [ - "Running loss: 0.324419\n" + "\u001b[34m\u001b[1mwandb\u001b[0m: You can find your API key in your browser here: https://app.wandb.ai/authorize\n", + "\u001b[34m\u001b[1mwandb\u001b[0m: Paste an API key from your profile and hit enter: 3453d85d7ddabfc34500f3fa6ac9ec2ba5683c2f\n", + "\u001b[34m\u001b[1mwandb\u001b[0m: Appending key for api.wandb.ai to your netrc file: /root/.netrc\n", + "\u001b[32mSuccessfully logged in to Weights & Biases!\u001b[0m\n" ], "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "sM6jgEV2eV7u", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "7807561b736c45d49c3ef812c4aad335", + "56300d613550401dbef1e7a106ccfb60", + "ad7e3577ea9c460b98509d9dd5983317", + "2ded2ded871c4925b7332e4f0b84b0d0", + "cefa942491b34d04869607504ff25803", + "fd10992442904b90abc0146a28084394", + "48bdadca9c9745ec89e4c1632ea64830", + "e5e25620988048debb93a24b35d974cd", + "279b3e3dc6314303a87a96af4185ddba", + "bfd86388a7ad48189b3a23b2fe7e3360", + "aab774ea207d4dcbbd9337f1e91d3df7", + "c623373ac42a41e68f00f23fdfe50a12", + "f698206397bb425e9f3f398c87fc4e9e", + "e73e875d811e4d6b9736854de6ece77f", + "84a880bc358c4ea5ab1042ce68dc5471", + "fcefafceb5c5452a9fa1ef933c401fee", + "465f65693fbb424e8be75d5a93db43cd", + "fd04c65e25624b5eb92f57a5b5193c9f", + "4249f25837d84083a1b0cff9ef90ec17", + "26047712683443e8b87c124d7f735438", + "b2a663d0d51745e5bf810f2c48eda368", + "9d7fcf3d445249ec966b74f2b91f866a", + "f25bd28c1e934954b5ee214580384d6f", + "a6b01b4bb4ed41caba3190451f52f2b4", + "0d3b6b7b5bc944d99a5557088d8d6c92", + "a3eb9a29c70443a793de600754fdd508", + "742dbb8f102143e69d76ca57420068e3", + "9eef2984c1d347faace0a46de7982a39", + "d74f785a6f814941be68867872b4c93d", + "19b07e0fa3b8429091462844f4d152e7", + "fabc8b6b78704ddb94fb79e90c72bba9", + "3be6b90e331841deb02c05df7b718757", + "4d8412a635904a129289253a75d68d6a", + "2d1d3df881e84076bcd3870dd40a542e", + "45e65053977d4028a23b4e1b57a37c86", + "d8d4f82380074174aa4a3405a396b084", + "91c6d5dfa6b64da6803b076999751b71", + "d06e91d24b324a8ea9552aed0075994f", + "df3e87efb0ba4666adc6e86e40940d80", + "930cc053f1c449d495016847039bf32b" + ] }, + "outputId": "90fb743a-0e38-4e56-ebd7-e5de532c8d95" + }, + "source": [ + "# Create directory to store model weights (change path accordingly to where you want!)\n", + "!cd /content\n", + "!mkdir chemberta_tox21\n", + "\n", + "# Train the model\n", + "model.train_model(train_dataset, output_dir='/content/chemberta_tox21', num_labels=2, use_cuda=True, args={'wandb_project': 'project-name'})\n" + ], + "execution_count": 24, + "outputs": [ { "output_type": "stream", "text": [ - "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:200: UserWarning: Please also save or load the state of the optimzer when saving or loading the scheduler.\n", - " warnings.warn(SAVE_STATE_WARNING, UserWarning)\n" + "/usr/local/lib/python3.6/dist-packages/simpletransformers/classification/classification_model.py:282: UserWarning: Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\n", + " \"Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\"\n", + "INFO:simpletransformers.classification.classification_model: Converting to features started. Cache is not used.\n" ], "name": "stderr" }, @@ -7360,12 +6380,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "bfa661dfa3de41df810e0b5035d52c1e", + "model_id": "7807561b736c45d49c3ef812c4aad335", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Current iteration', max=215.0, style=ProgressStyle(descri…" + "HBox(children=(FloatProgress(value=0.0, max=1714.0), HTML(value='')))" ] }, "metadata": { @@ -7375,95 +6395,7 @@ { "output_type": "stream", "text": [ - "Running loss: 0.078696" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.686080" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-events.jsonl\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.121916" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.513443" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.120766" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.446782" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.229184\n" + "\n" ], "name": "stdout" }, @@ -7471,12 +6403,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "fc42b7f3c9f5486688649c44e5340390", + "model_id": "279b3e3dc6314303a87a96af4185ddba", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, description='Current iteration', max=215.0, style=ProgressStyle(descri…" + "HBox(children=(FloatProgress(value=0.0, description='Epoch', max=3.0, style=ProgressStyle(description_width='i…" ] }, "metadata": { @@ -7484,111 +6416,167 @@ } }, { - "output_type": "stream", - "text": [ - "Running loss: 0.671774" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.015629" - ], - "name": "stdout" + "output_type": "display_data", + "data": { + "text/html": [ + "\n", + " Logging results to Weights & Biases (Documentation).
\n", + " Project page: https://app.wandb.ai/seyonec/project-name
\n", + " Run page: https://app.wandb.ai/seyonec/project-name/runs/2thphay5
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } }, { "output_type": "stream", "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n" + "INFO:wandb.run_manager:system metrics and metadata threads started\n", + "INFO:wandb.run_manager:checking resume status, waiting at most 10 seconds\n", + "INFO:wandb.run_manager:resuming run from id: UnVuOnYxOjJ0aHBoYXk1OnByb2plY3QtbmFtZTpzZXlvbmVj\n", + "INFO:wandb.run_manager:upserting run before process can begin, waiting at most 10 seconds\n", + "INFO:wandb.run_manager:saving pip packages\n", + "INFO:wandb.run_manager:initializing streaming files api\n", + "INFO:wandb.run_manager:unblocking file change observer, beginning sync with W&B servers\n" ], "name": "stderr" }, { - "output_type": "stream", - "text": [ - "Running loss: 0.053129" - ], - "name": "stdout" + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "465f65693fbb424e8be75d5a93db43cd", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Running Epoch 0 of 3', max=215.0, style=ProgressStyle(des…" + ] + }, + "metadata": { + "tags": [] + } }, { "output_type": "stream", "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/config.yaml\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/requirements.txt\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/media/graph/graph_0_summary_e7e4ff9b.graph.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/wandb-events.jsonl\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/media\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200807_235818-2thphay5/media/graph\n", + "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:231: UserWarning: To get the last learning rate computed by the scheduler, please use `get_last_lr()`.\n", + " warnings.warn(\"To get the last learning rate computed by the scheduler, \"\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "Running loss: 0.201588" + "\n" ], "name": "stdout" }, { "output_type": "stream", "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n", + "/usr/local/lib/python3.6/dist-packages/torch/optim/lr_scheduler.py:200: UserWarning: Please also save or load the state of the optimzer when saving or loading the scheduler.\n", + " warnings.warn(SAVE_STATE_WARNING, UserWarning)\n" ], "name": "stderr" }, { - "output_type": "stream", - "text": [ - "Running loss: 0.021707" - ], - "name": "stdout" + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "0d3b6b7b5bc944d99a5557088d8d6c92", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Running Epoch 1 of 3', max=215.0, style=ProgressStyle(des…" + ] + }, + "metadata": { + "tags": [] + } }, { "output_type": "stream", "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-events.jsonl\n" + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-events.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "Running loss: 0.024193" + "\n" ], "name": "stdout" }, { - "output_type": "stream", - "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-history.jsonl\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-summary.json\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "Running loss: 0.031435" - ], - "name": "stdout" + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "4d8412a635904a129289253a75d68d6a", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, description='Running Epoch 2 of 3', max=215.0, style=ProgressStyle(des…" + ] + }, + "metadata": { + "tags": [] + } }, { "output_type": "stream", "text": [ - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n" + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-history.jsonl\n" ], "name": "stderr" }, { "output_type": "stream", "text": [ - "Running loss: 0.002347\n", + "\n", "\n" ], "name": "stdout" @@ -7596,11 +6584,13 @@ { "output_type": "stream", "text": [ + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-events.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n", "INFO:simpletransformers.classification.classification_model: Training of roberta model complete. Saved to /content/chemberta_tox21.\n", "INFO:wandb.run_manager:shutting down system stats and metadata service\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-events.jsonl\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-events.jsonl\n", "INFO:wandb.run_manager:stopping streaming files and file change observer\n", - "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200621_000615-w5p34xmh/wandb-metadata.json\n" + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200807_235818-2thphay5/wandb-metadata.json\n" ], "name": "stderr" } @@ -7623,9 +6613,9 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 105 + "height": 334 }, - "outputId": "d46ba19c-77f3-4909-9393-f2d9d41f66be" + "outputId": "0a3b47e2-6ffd-4e37-e253-9e2be56b0b2a" }, "source": [ "!pip install -U scikit-learn" @@ -7635,13 +6625,37 @@ { "output_type": "stream", "text": [ - "Requirement already up-to-date: scikit-learn in /usr/local/lib/python3.7/site-packages (0.23.1)\n", - "Requirement already satisfied, skipping upgrade: scipy>=0.19.1 in /usr/local/lib/python3.7/site-packages (from scikit-learn) (1.4.1)\n", - "Requirement already satisfied, skipping upgrade: numpy>=1.13.3 in /usr/local/lib/python3.7/site-packages (from scikit-learn) (1.18.5)\n", - "Requirement already satisfied, skipping upgrade: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/site-packages (from scikit-learn) (2.1.0)\n", - "Requirement already satisfied, skipping upgrade: joblib>=0.11 in /usr/local/lib/python3.7/site-packages (from scikit-learn) (0.15.1)\n" + "Collecting scikit-learn\n", + " Downloading scikit_learn-0.23.2-cp37-cp37m-manylinux1_x86_64.whl (6.8 MB)\n", + "\u001b[K |████████████████████████████████| 6.8 MB 4.5 MB/s \n", + "\u001b[?25hCollecting joblib>=0.11\n", + " Downloading joblib-0.16.0-py3-none-any.whl (300 kB)\n", + "\u001b[K |████████████████████████████████| 300 kB 44.1 MB/s \n", + "\u001b[?25hCollecting scipy>=0.19.1\n", + " Downloading scipy-1.5.2-cp37-cp37m-manylinux1_x86_64.whl (25.9 MB)\n", + "\u001b[K |████████████████████████████████| 25.9 MB 7.2 kB/s \n", + "\u001b[?25hCollecting threadpoolctl>=2.0.0\n", + " Downloading threadpoolctl-2.1.0-py3-none-any.whl (12 kB)\n", + "Requirement already satisfied, skipping upgrade: numpy>=1.13.3 in /usr/local/lib/python3.7/site-packages (from scikit-learn) (1.19.1)\n", + "Installing collected packages: joblib, scipy, threadpoolctl, scikit-learn\n", + "Successfully installed joblib-0.16.0 scikit-learn-0.23.2 scipy-1.5.2 threadpoolctl-2.1.0\n" ], "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "joblib" + ] + } + } + }, + "metadata": { + "tags": [] + } } ] }, @@ -7676,27 +6690,27 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 187, + "height": 596, "referenced_widgets": [ - "a669df427e2149caa9ee0edec40dc3a4", - "0e519978fc6c476d936aac1fe0abf4bc", - "ed3005e49f84416a82794c3dfc31cfcc", - "dade9df974f245b0b54c508f168f936b", - "f00dfb7fd4854a34b4619af817f62c05", - "a54cfb4828f14b06a35a3e6d363cf7c2", - "67f19078963043f8b728d5efd232929a", - "57c6e4e82402447398a4868fa8c873a5", - "804b202d17654dfe96a61d35f6f69d78", - "0e67f75ca3b34c718f903182760c3d25", - "cfc1c56037cf439d99ea7ced4cd606d5", - "902809efcf36405d87a89aa7d01d76f4", - "57a01101a9fb43d9823e216af0be1172", - "c36b55e07c06403384d805e0d3622f1f", - "5d4e138304ae4257a1695c676cc365fc", - "ffbb31034601480f87cf76ca6f51e49f" + "825b4279ccc44474a7623ccd1e7e7f69", + "8eda205d9f7c4e8081f924bd740ec742", + "7c9c0f9b8f5d490f8cd7b77e6ead14ea", + "a847855e7d35468b8fd0cbce5775d271", + "e71cc479dbe74ba8a8bfd11ffcec70bb", + "e91d33e27c81443c9ec8a8b7768bda36", + "712d56d1289247ba92d1d195e53ad578", + "900af4baa3604152a2294b979a73cfc5", + "883c0f6063364ddfaa1bf0c00fd62a61", + "526a14329c7540fc8abfa2105a7f8ef5", + "3ec543b9508f4f8d85d4179ec14f97fa", + "8472dd2d50474e4f81062aaf7366aaa2", + "f99e5b80c68048e6b92a9139fc41773f", + "5c5192e6e50c4f439204c735bccd40d3", + "05e42d0e4fd34968b8327bfb1e6b00f9", + "5c5920fb6c964332b7e380011cd23ec8" ] }, - "outputId": "b4760bf6-5ec4-40a2-fa6f-762dbd19a6ad" + "outputId": "ccd29c6f-aded-4c5c-ab28-9a5cd8a7d995" }, "source": [ "import sklearn\n", @@ -7707,7 +6721,7 @@ { "output_type": "stream", "text": [ - "/usr/local/lib/python3.7/site-packages/simpletransformers/classification/classification_model.py:690: UserWarning: Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\n", + "/usr/local/lib/python3.6/dist-packages/simpletransformers/classification/classification_model.py:754: UserWarning: Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\n", " \"Dataframe headers not specified. Falling back to using column 0 as text and column 1 as labels.\"\n", "INFO:simpletransformers.classification.classification_model: Converting to features started. Cache is not used.\n" ], @@ -7717,7 +6731,7 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "a669df427e2149caa9ee0edec40dc3a4", + "model_id": "825b4279ccc44474a7623ccd1e7e7f69", "version_minor": 0, "version_major": 2 }, @@ -7740,12 +6754,12 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "804b202d17654dfe96a61d35f6f69d78", + "model_id": "883c0f6063364ddfaa1bf0c00fd62a61", "version_minor": 0, "version_major": 2 }, "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=54.0), HTML(value='')))" + "HBox(children=(FloatProgress(value=0.0, description='Running Evaluation', max=54.0, style=ProgressStyle(descri…" ] }, "metadata": { @@ -7755,16 +6769,55 @@ { "output_type": "stream", "text": [ - "INFO:simpletransformers.classification.classification_model:{'mcc': 0.7851764343873741, 'tp': 65, 'tn': 334, 'fp': 5, 'fn': 24, 'acc': 0.9322429906542056, 'eval_loss': 0.19206710794457682}\n" + "\n" ], - "name": "stderr" + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "text/html": [ + "\n", + " Logging results to Weights & Biases (Documentation).
\n", + " Project page: https://app.wandb.ai/seyonec/project-name
\n", + " Run page: https://app.wandb.ai/seyonec/project-name/runs/o75nt5fg
\n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } }, { "output_type": "stream", "text": [ - "\n" + "INFO:wandb.run_manager:system metrics and metadata threads started\n", + "INFO:wandb.run_manager:checking resume status, waiting at most 10 seconds\n", + "INFO:wandb.run_manager:resuming run from id: UnVuOnYxOm83NW50NWZnOnByb2plY3QtbmFtZTpzZXlvbmVj\n", + "INFO:wandb.run_manager:upserting run before process can begin, waiting at most 10 seconds\n", + "INFO:wandb.run_manager:saving pip packages\n", + "INFO:wandb.run_manager:initializing streaming files api\n", + "INFO:wandb.run_manager:unblocking file change observer, beginning sync with W&B servers\n", + "INFO:simpletransformers.classification.classification_model:{'mcc': 0.7457296605386272, 'tp': 61, 'tn': 333, 'fp': 6, 'fn': 28, 'acc': 0.9205607476635514, 'eval_loss': 0.22061711011661422}\n", + "INFO:wandb.run_manager:shutting down system stats and metadata service\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200808_000034-o75nt5fg/config.yaml\n", + "INFO:wandb.run_manager:stopping streaming files and file change observer\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/wandb-events.jsonl\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/requirements.txt\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/media/table/roc_0_19033495.table.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/wandb-summary.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/wandb-metadata.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/media/table/pr_1_f2ee02b8.table.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/wandb-history.jsonl\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/media/table/confusion_matrix_2_535a7138.table.json\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/media\n", + "INFO:wandb.run_manager:file/dir created: /content/wandb/run-20200808_000034-o75nt5fg/media/table\n", + "INFO:wandb.run_manager:file/dir modified: /content/wandb/run-20200808_000034-o75nt5fg/wandb-metadata.json\n" ], - "name": "stdout" + "name": "stderr" } ] }, @@ -7787,27 +6840,27 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 134, + "height": 130, "referenced_widgets": [ - "74a6932964bc4ef6b37c1ae144d79e87", - "a2bf6c0cb9b94f5fbaa73253bbb65072", - "42f84c7b1df44a46a246558859f7474f", - "ee13fe2a66764746bd33f9b0927dd8b9", - "3b411759bd0a4886bbea0e959f57b849", - "febbff92575f4bcb9426c89f2b0ab2f9", - "27a442ed10ba4f938f57f8473bbb9e1d", - "7945f511bd9a4626bb79d0e2fae49cee", - "c230feee9b8a4d9e98a3344118988bb8", - "6ac527d01f8045b5a3441e7b88d02769", - "34b780f478994748afefefed7482aa42", - "b51ffede8497455ca6f8a330e7543496", - "47f1dfb0492c4033b52ed81923349840", - "736e39657a204c2abbcfed7f76730b1e", - "f19328ab2db9490f88c5c893bc07cfbf", - "f0620f9a62684f5ba8a9b9a61a7b8751" + "7e5cba5c2747441f8d03d888dc9b933b", + "e7942a62f62c413d927abfcb081d685a", + "65cdde6d617142bea6bb287ad35d8861", + "7a955bc78f0749199bd82fae712c9f75", + "44d74c51151a4311a37fba97c6175249", + "c354a0c446e648f6af555bbad692f79c", + "d69caa93921e4b2897a07ce2bf0cce5a", + "6571a194af084dd7b6edb7ba3716c0cf", + "b85c5d27c8e64499b0b38b3bbf836afa", + "7429d08b7f14425393c08d9521918655", + "e27d53e7ef84443d8e6339de513f9e0b", + "0ff672cb082f4c4996cac50c632c1a8e", + "1227fa30365b44fab9b9dfabfb73e851", + "84788d321e9942e883ebb51375679bbd", + "f2924e39f1054f41a16f1546d2b3db16", + "ceb6ea7c05e244d7b6c0e335ea8d71c2" ] }, - "outputId": "5259cea0-27d0-4094-9e60-693b7fce2061" + "outputId": "c23b5f5e-a5a1-439b-c022-7201a6f30216" }, "source": [ "# Lets input a molecule with a SR-p53 value of 0\n", @@ -7826,7 +6879,7 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "74a6932964bc4ef6b37c1ae144d79e87", + "model_id": "7e5cba5c2747441f8d03d888dc9b933b", "version_minor": 0, "version_major": 2 }, @@ -7849,7 +6902,7 @@ "output_type": "display_data", "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "c230feee9b8a4d9e98a3344118988bb8", + "model_id": "b85c5d27c8e64499b0b38b3bbf836afa", "version_minor": 0, "version_major": 2 }, @@ -7877,9 +6930,9 @@ "colab_type": "code", "colab": { "base_uri": "https://localhost:8080/", - "height": 52 + "height": 49 }, - "outputId": "0425e12f-ff05-4f56-bec2-d1fcb9860f62" + "outputId": "1581e033-6d42-46e2-f240-a3c085336d93" }, "source": [ "print(predictions)\n", @@ -7891,7 +6944,7 @@ "output_type": "stream", "text": [ "[0]\n", - "[[ 3.0878906 -2.9765625]]\n" + "[[ 3.3377423 -3.2863383]]\n" ], "name": "stdout" } diff --git a/examples/tutorials/24_Introduction_to_Model_Interpretability.ipynb b/examples/tutorials/24_Introduction_to_Model_Interpretability.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..86a8ac2678c7c02a13e6c103495262a58a781019 --- /dev/null +++ b/examples/tutorials/24_Introduction_to_Model_Interpretability.ipynb @@ -0,0 +1,37675 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "cB0MgPvpkP1g" + }, + "source": [ + "# Tutorial Part 24: Introduction to Model Interpretability" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "6NGHK1xmkP1i" + }, + "source": [ + "In the previous sections of this tutorial series, you have learned how to train models with DeepChem on a variety of applications. But we have not yet really studied the question of model explainability.\n", + "\n", + "Often times when modeling we are asked the question -- How does the model work? Why should we trust this model? My response as a data scientist is usually, \"because we have rigorously proved model performance on a holdout testset with splits that are realistic to the real world\". Oftentimes that is not enough to convince domain experts.\n", + "\n", + "[LIME](https://homes.cs.washington.edu/~marcotcr/blog/lime/) is a tool which can help with this problem. It uses local perturbations of feature space to determine feature importance. In this tutorial, you'll learn how to use LIME alongside DeepChem to interpret what it is our models are learning. \n", + "\n", + "![Selection_110.png](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/lime_dog.png?raw=1)\n", + "\n", + "So if this tool can work in human understandable ways for images can it work on molecules? In this tutorial you will learn how to use LIME for model interpretability for any of our fixed-length featurization models.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/24_Introduction_to_Model_Interpretability.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 323 + }, + "colab_type": "code", + "id": "xdgY3YQLkP1m", + "outputId": "19d8cbca-1cdb-48ba-d951-7b365506fc6f" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 + }, + "colab_type": "code", + "id": "TBPgOmcwArax", + "outputId": "0de4ff47-9ae3-45f7-db2d-f79f9b22c337" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "1zuqJlT-kP1p" + }, + "source": [ + "## Making of the Model\n", + "\n", + "Let's begin by loading the Tox21 dataset with ECFP featurization. Recall how this featurization works. It identifies small fragments within each molecule, then sets elements of the output vector to 1 to indicate which fragments are present in a particular molecule." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + }, + "colab_type": "code", + "id": "57IdQLKOkP1q", + "outputId": "f07c2d17-05bc-4d45-eabc-8595f8cb5935" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "n_features = 1024\n", + "tasks, datasets, transformers = dc.molnet.load_tox21(featurization='ecfp')\n", + "train_dataset, valid_dataset, test_dataset = datasets" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "bOA0VkCskP1u" + }, + "source": [ + "Let's now train a model to work on this dataset. As in previous tutorials, we'll use a MultitaskClassifier, which is a simple stack of dense layers." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "u0ZLMRiHkP1v" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.1333492088317871" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "n_tasks = len(tasks)\n", + "n_features = train_dataset.get_data_shape()[0]\n", + "model = dc.models.MultitaskClassifier(n_tasks, n_features)\n", + "model.fit(train_dataset, nb_epoch=50)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "IJc49NbMkP11" + }, + "source": [ + "Let's evaluate this model on the training and validation sets to get some basic understanding of its accuracy. We'll use the ROC-AUC as our metric of choice." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 510 + }, + "colab_type": "code", + "id": "5TWg2RelkP12", + "outputId": "a931d968-43b4-41fb-97e7-438db8ad2e38" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Train scores\n", + "{'mean-roc_auc_score': 0.9911206354520975}\n", + "Validation scores\n", + "{'mean-roc_auc_score': 0.699686047497269}\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)\n", + "print(\"Train scores\")\n", + "print(model.evaluate(train_dataset, [metric], transformers))\n", + "print(\"Validation scores\")\n", + "print(model.evaluate(valid_dataset, [metric], transformers))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "xMBwqFmDkP15" + }, + "source": [ + "## Using LIME\n", + "\n", + "The model seems to do a reasonable job of predicting which molecules are toxic, but how does it work? When it predicts that a particular molecule is toxic or non-toxic, what aspects of the molecule led to that prediction? This is the essence of *explainability*: learning why an input led to a certain prediction.\n", + "\n", + "LIME is a tool for addressing this problem. The name is short for \"Local Interpretable Model-Agnostic Explanations\". It can work on any problem with a fixed size input vector. It works by computing probability distributions for the individual features and the covariance between the features. This allows it to construct a local linear model in the neighborhood of a sample, describing what local perturbations of the input would have the biggest effect on the output. That is, what fragments added to or removed from the molecule would be most likely to change the prediction between toxic and non-toxic?\n", + "\n", + "First we need to install lime. Luckily, lime is conveniently available on `pip`, so you can install it from within this Jupyter notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 496 + }, + "colab_type": "code", + "id": "WV50QNwSkP15", + "outputId": "f6478c4a-2906-492f-b6d1-125a5d3ca8ab" + }, + "outputs": [], + "source": [ + "!pip install lime" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "E9ksPtOskP18" + }, + "source": [ + "Now that we have lime installed, we want to create an `Explainer` object for `lime`. This object will take in the training dataset and names for the features. We're using circular fingerprints as our features. We don't have natural names for our features, so we just number them numerically. On the other hand, we do have natural names for our labels. Recall that Tox21 is for toxicity assays; so let's call 0 as 'not toxic' and 1 as 'toxic'." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "0yO0QUHlkP18" + }, + "outputs": [], + "source": [ + "from lime import lime_tabular\n", + "feature_names = [\"fp_%s\" % x for x in range(1024)]\n", + "explainer = lime_tabular.LimeTabularExplainer(train_dataset.X, \n", + " feature_names=feature_names, \n", + " categorical_features=feature_names,\n", + " class_names=['not toxic', 'toxic'], \n", + " discretize_continuous=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "kAW-JA6jkP1_" + }, + "source": [ + "We are going to attempt to explain why the model predicts a molecule to be toxic for NR-AR.\n", + "The specific assay details can be found [here](https://pubchem.ncbi.nlm.nih.gov/bioassay/743040)." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "4Uu16LYakP2A" + }, + "outputs": [], + "source": [ + "# We need a function which takes a 2d numpy array (samples, features) and returns predictions (samples,)\n", + "def eval_model(my_model):\n", + " def eval_closure(x):\n", + " ds = dc.data.NumpyDataset(x, n_tasks=12)\n", + " # The 0th task is NR-AR\n", + " predictions = my_model.predict(ds)[:,0]\n", + " return predictions\n", + " return eval_closure\n", + "\n", + "model_fn = eval_model(model)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "WIIfzqzQkP2C" + }, + "source": [ + "Let's now attempt to use this evaluation function on a specific molecule. Let's pick the first molecule in the test set that is correctly predicted to be toxic (that is, the molecule is toxic, and the model correctly predicts it to be toxic)." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 184 + }, + "colab_type": "code", + "id": "VGPZDfmMkP2D", + "outputId": "07894c04-793a-4f3e-90b3-f1e8e435bd69" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcIAAACWCAIAAADCEh9HAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO2dd1yT1/fHTyBAhCJCTcIQq4iKuMGFuIuAiAjSKO5Zsda6+nV9f7bU+q2itRV3UYuiVlRwMJyoVUFUQAUUFBAc7AQEQUgYyf39cW2KLAMZT8Z9v/zj8cnz3HMS4JP7nHvOuTSEEBAIBAKhrWhR7QCBQCCoNkRGCQQCQSqIjBIIBIJUEBklEAgEqSAySiAQCFJBZJRAIBCkgsgogUAgSAWRUQKBQJAKjZHRixfhq6/A2hpGjID//AcqKqh2iEAgqAmaIaMHDsDMmTBqFJw9C35+kJwMQ4YAj0e1WwQCQR2gqX8xaHExfPEFhIaCm9uHMyIRDBwIrq6wbRulnhEIBHVAA2ajly8Dm/2vhgKAlhYsXgzXr1PnE4FAUB80QEZfv4auXRueNDeH/HwqvCEQCOqGBsjo55/Ds2fQIHbx4AFYWFDkEIFAUCs0QEbt7KCgAB4+/OhkdDQ4OlLkEIFAUCs0YIkJAMaPB5EITp+Gjh0BIdi3DzZuhNRUMDaGa9fAxgZsbKh2kUAgqCoaMBsFgLAwYDLBzAy6dwdjYzh4EO7dAwsL+PFH8PKCoCCq/SMQCCqMZsxGMY8eQV4eDBsGTOaHM7dvw5gxYG0NmZmUekYgEFQYzZiNAsDWrWBvD3///a+GAsCIEdCxI7x4Ac+fU+cZgUBQbTRGRkePBgA4f/6jk9raH/JJIyIocIlAIKgFGiOjw4aBqSm8egVPn350fvJkAIDwcEqcIhAIaoDGyKiWFkycCNBIMV1cgMGA+/ehsJASvwgEgqqjMTIKAB4eAI2e3w0MYNw4EIng4kVKnCIQCKqOJsno+PFgYAAJCZCb+9F5/FxPwqMEAqFNaJKMtmsHTk6AUMOJ5+TJoKUF0dFQWUmRZwQCQYXRJBmFZp7r2WwYPBj4fNLziUAgtAENk1F3d9DWhhs3Gna/b1JeCQQCQQI0TEZZLBg2DKqr4dq1j87j8GhUFAiFlPhFIBBUFw2TUWgmUbR3b+jeHbhcuH+fEqcIBILqonky6uUFAHDxItTVfXR+0iQA8lxPIBBajebJqLU19OwJb9/C3bv1T1d6eYWPHj2XzEYJBEIr0TwZhaaf6xkODotSU4/duZOenk6NVwQCQTXRRBmtmTw5bdSorR93ddLW1nZzcwOAcFJfTyAQWoMmyih92LAvMzL+e/ny04/blHh4eABABAmPEgiE1qCJMqqlpYUnng0U08XFhcFg3Lt3r5C0KSEQCBKjiTIKAJMnT4ZGz++fffbZuHHjRCLRpUuXKPKLQCCoHhoqo+PHjzcwMEhISMj9uE1Jk/JKICgJdXV1BQUFKSkpV65cOX78eHFxMdUeEQA0ay+mj/H09AwPD//jjz98fX3FJ4uKiszNzfX09Hg8noGBAYXuETSQ6urqkpKS0tLSgoKC/Pz8xgdcLldYr9BuypQpo0ePXr58OYU+E0CTZTQoKGjhwoVubm4XP274NHTo0Pj4+AsXLuCZKYEgKyorKwsLC4uKing8nvigoKCAy+VyudzCwsJ37961PIKWlhaLxWIymaampkKh8ObNmwwGIzExsXfv3op5C4QmoVPtAGW4u7tra2vfuHGjoqLC0NBQfH7y5Mnx8fERERFERgltICYmBkskl8stKioqLCwUi2ZVVVXL9+rq6jKZTDabbWpqirWSzWazWCw2m40PmEymlta/gThfX9+DBw9OmzYtMTGRwWDI+Z0RmkVzZ6MAMGLEiLt374aFhXl7e4tPpqam9unTh8Vi5efna2trU+geQbUQCoV79+798ccfy8vLm7yAwWAYGxsbGxubm5ubmZk1PjA1Na2vkp+ksrLS3t4+PT19zZo127dvl9H7ILQajZbR7du3r1u3bs6cOcHBwfXP9+jRIzMzMzY21tHRkSrfCCrH6dOnfXx8TExMxowZY2ZmxmQyWSwWPsATzM8++0zmRh8+fOjg4CAUCqOjo8eNGyfz8QkSgTSYjIwMADAxMamtra1/ftWqVQCwdu1aqhwjqCJ2dnYAcOjQIQXb3bRpEwB06tSppKREwaYJGA1NeMJ07969Z8+eb9++vftxmxIcFT3fYFN7AqF5rl279ujRIzabPWvWLAWb3rhx46hRo3Jzc+vnnBAUiUbLKDSTKOro6Pj5558LBIKSkhKK/CKoGNu2bQOAVatWKX6pR0tL6+jRo+3btw8LCzt58qSCrRNAw2OjAHD37t0RI0ZYWVllZWVlZ2e/fv3az8/P29t7+PDhgwcPpto7gmqQlJQ0cODA9u3bv379ukOHDpT4gBP4jIyMkpOTv/jiC0p80Fg0XUZFIpGZmVlpaenTp09nzpyZlJRUV1cHAFpaWg4ODhwOZ9q0aaamplS7qarU1dXxeDwul4uzI3k8Xn5+Po/H6969+4IFCywsLKh2UDZMnTo1NDR07dq1eE5KrRsjR478+++/SZKJItF0GQWABw8e9OzZ84cffti7d6+lpaW/v/+VK1fOnTtXWVkJANra2sOGDeNwODNmzGAymVQ7q1y0tupGjLm5eVlZWXx8vBrkjWdnZ/fo0YNOp2dnZ5ubm1PoSXFxcb9+/QoKCrZt27Z27VoKPdE0iIwCAERERHh6etLp9Nu3bzs4OAAAn8+/fv368ePHw8PDa2pqAEBbW3vs2LGzZ8/29PRs37491S4rghaqbvDBJ6tutLW1mUymOJOcyWSamZmxWKyzZ89GRkb26dMnISFB1fPGlyxZEhgYuGjRokOHDlHtC0RHR7u4uNDp9Lt375KolMIgMgpZWVn29vbv3r3bvXv3d9991+DVsrKyiIiI0NDQq1ev1tbWAgCDwXBycuJwON7e3upUd3/9+vXQ0ND65TfSVN3gMw2qbsRUVlba2dllZGRQ/iAsJVwut0uXLgKBIDU1tVevXlS7AwCwfPnyPXv29OrV6+HDh+3ataPaHY1A02W0urra0dHx4cOHkyZNCg8Pp9FozV3J5XLDwsJOnTp19+5dkUgEALMHDDjWpw9MmwbOzqCrq0CvZc/mzZtv3bp18+bN+idlXnVTH/XIG/+///u/LVu2eHl5nTt3jmpfPiAQCIYMGfLkyZPly5fv2rWLanc0AyqTVpWAZcuWAcAXX3wheepybm5uQECAo6PjvTFjEAACQB06oNmzUUQEqqmRq7diamtrKysrZTXau3fvjIyMAGDdunXnz5+PjY3NyMioqKiQ1fjN8dNPP4Eq542Xl5cbGxsDQFxcHNW+fMSjR490dXVpNNrFixep9kUj0GgZDQ0NBQAdHZ179+614XZRdjbasgX17/9BTAEQi4WWLkW3byOhUObeYoRC4ZkzZ3r06LFu3TpZjbl161YA+PLLL2U1oITU1tbiSPRXX32lYNONuXr16s6dO1t1y44dOwBg9OjR8vFIKvz9/QGAxWIVFRVR7Yv6o7ky+uLFCzwF2717t7RjZWcjf3/Uq9e/emphgZYvRzExSCSShbMIISQUCk+fPi0OwA0cOFAoC7EWCARmZmYAcO3aNelHay1ZWVm4vdbJkyfbcDufz/fy8iooKBCfiYmJWbZsWasGiY+P//LLL/EX6osXLyS8q6ampnPnzgBw6dKlVplTDEKhcOzYsQDg6elJtS/qj4bKqEAgsLe3BwAPDw+R7JQOPX2K/PxQjx7/6qml5Qc9xURFIW9v1K0bcnRE33+PysslHDg6OhqXbOMQRGBgYG1tbXV1tfQuHzx4EAD69+8vy8+hNRw+fBgAjIyMXr161dp7KyoqAKC+9oWEhNjY2Eh4+/PnzzkcDg6IGxsb+/v7V1VVSXhvUFAQAPTt25eqz+2T5OTk4JjDn3/+SbUvao6Gyui3334LAN26dSsrK5OLgQcP0OrVqFOnf/X0u+/Q/v3IyAjt2oWSktC1a8jJCdnYIC73k4Pl5ubq6ekBQOfOnQMDA2tqapKTkzkcjo+Pj5RuCoVCPL09deqUlENJA4fDAYCRI0fW1dW16kZpZHTFihV0Oh0A9PX1169f//btW8ntikQinPF6/PjxVjmsYE6cOAEABgYGGRkZVPuizmiijOKQqJ6eXmJiotyNJSai5cuRmRk6ehTp66P6IX+hEPXrhyTrI7Vp06Y9e/YIBILk5OTJkyfjCZSRkVFxcbE03oWFhQFA165dG/S4UjA8Hg8HFrZv396qG7GM3rt3r+AfDhw4IKGMbtiwgU6nL168OC8vD595//59QECAJHP8CxcuAIClpWWNohYV28z06dMBYPjw4a39iiJIjsbJqDgkumfPHsVZratDR46grl0bnt+7F9nZSThGVlbW1KlTcYKRgYHB2rVri4uLpXyiHDZsGADs27dPmkFkwrVr12g0mp6e3uPHjyW/C8uooaGh0T/o6+tLKKNlZWXp6en4uKamJjAwEEv53r17P3kvbkS7a9cuyV2lirKyMlxi//PPP1Pti9qiWTIqDolSsDS8eTMaN67hyXPnkKmphAOkp6fT6XRdXd3FixfjyvR169aNHDmyzUqKs0RZLJbkAUG5gpPPbG1tJfdHytgoQkgkEoWEhFhbW+O485AhQ+7cudPyLTExMQBgYmKigJwwmXDnzh1tbW06nX7//n2qfVFPNEtGly5dKt+QaAvs34/MzBou3K9bh+ztJR8jODg4Nze3pKRkw4YNuJU6jUaLj49vm0eurq4AsHnz5rbdLnP4fH7fvn0BYMWKFZJc//jx47KyMill9M2bNzjubGNjc/bsWUm+k9zd3QHAz89PciuUs2bNGvybXy7xqqZUcLlIIFCEIeVAg2T0zJkziguJNub+fQSAEhI+Omlnh5Yvb9Uwf/31F67op9FoHh4erXoErk9ycjKNRjMwMJAyuipbJM8b9/T0BIDPP/8cAA4fPiz454+2tTKKEPrll18OHTokjg6npqampaU1d3FaWpqWlpa+vj5XgrVB5UEgEPTv3x8AfH19ZTDc27fo6VMUE4POnEEBAcjPDy1ejNzdkaMjsrJCOjoIAP39twwMqQiaIqPikKgkkS954eSExo1DPB5CCIlEaM8eZGSEcnNbNUZiYiKNRnNycoqPj6+pqQkODh45cmQbnspnzJgBAKtWrWrtjfIG1wKYm5u3oO91dXUNGsEZGRnNmDEjLCwsODi4tTIqJicnZ/HixXQ6fcyYMc1dM3fuXABobWqqMpCamopL7MPDw1u4TCAQvHnzJj4+PjIyMigo6JdfflmxYsWMGTPiFi1CvXsjFuvf5JMW/pmYoKioDyO2NclPhdAIGRUIBDjpkuJqmbIyNG0aotORtTUyMkJ9+6LmZz0tkJaWVldXFxwc3K1bNywiR44cadUIL1++pNPpOjo6r1+/boMDckWSvPHCwsLmipt/+OGHttk9duwY7jWlo6OzdOnSJpfgc3JydHV1tbW1Jc/SVyp27twJAEwms6CgoKioKDAwcNOmTcuWLfvqq69GjhxpY2ODpxpNEjpq1L8qaWyMbG2RoyPicNDy5cjPDwUGoogIFBODsrJQ/VSHtib5qRYaIaPffPMNZSHRxlRUoFGjkIcH4vPbPMbMmTPxL7eNjU1ISEhry5nwYs6cOXPa7IBcEeeNBwUFNXlBcnJyc3/tbm5ubTOanp6uq6vL4XAyMzObuwbvdThjxoy2maAckUg0YcIEAHBxcUlMTGzyA9TV1bWwsLCzs3Nzc5s7d+66det+//33EydOpP39N0pJQYWFrSh05vGkSfJTIdRfRikOiTZJu3YIAEmxPn79+nVxLVNMTIybm5vkpdMlJSUGBgY0Gu3JkydtdkDeHD9+HJrPG7927VqTEqClpfXgwYM2G839J8AiFAqTk5MbvPr27Vtct/ro0aM2m6CcvLw8HFDevn37woULN27cuGvXrlOnTt26dSstLU3GgfJjx6RM8lMV1FxGMzMz8XOKMqRG/guOwUuXuV1bW/vgwQMXFxesIJJ3KvHz8wMAd3d3aawrgBbyxrHINkYm6yfR0dEDBw40MDCoX6qPENq8eTMAuLq6Sm+CWnBPP319/WfPnsnXktRJfqqCOsuoOCTK4XCo9uVjaDQEIGXXEpFIhNOD2rdv7+fnJ2G8orKysmPHjgAQIy7zV1ZKS0tx74/GeeO4tVIDjI2NeXj5TjrwY2/nzp3rt7/j8/l4S66bN29Kb4Jy5s2bBwDOzs7yNSOLJD+VQJ1lVLlComLq6hAA0taWfqTIyMgNGzaUlJTk5uYuXbo0JSXlk7fgPr5Dhw6V3roCaC5vHGdBNmD//v0yMZqUlPT7778LPk573LdvHwAMHjxYJiYoJzMzk8FgLFmyRNq+KjweSk1Ft26hkBC0axfauBEtXIgmTULDhiFHR1kl+Sk/aiujp0+fxiHRhw8fUu3LxwgECADp6clksPLy8pUrV+Il5ilTprR8cW1tLa4LvHDhgkysK4D//Oc/+LuwfskQzjqqz8CBA2VeM87j8fDaXV1dHS5zCgsLk60JqtiwYcMnf2Hq6uoKCgpSUlKuXr16/Pjx3377reqHH9DcuWjCBDRwIDI3/xCbau5fu3YIySbJT/lRTxnNzMzEOeqymqHIkooKBIAMDGQyWE1NjZWVFY1Gc3d3T0pKavliHFLs2bOnTBqVKgZx3viSJUvEJ3H9lRgajXb79m3Z2sWNUE+cOIEQOnXqFJZy9ejuUV5e3qFDBwC4dOnSw4cPL168ePToUX9//1WrVs2cOdPJyalPnz5sNrvx9jCV9TvqijOfbGzQyJGIw0HLlqGff0aBgSg8HMXFoawshGSW5KfkqKGMKm9IFFNaigCQkZGsxrtx40ZycrJAINizZ090dHQLVw4cOBCazyJSWsR54xEREfgMfiNi5s6dKw+7o0eP/uabbxBCuA/DwYMH5WFF8fz6668AMGbMmMYbODb4cmKxWH369Bk3btzMmTNXrlxZuHs3OnIERUWhhASUk9OKcs+KCpSYqH7pomLUcEu7b7755o8//rC2tk5MTGwhnZgyiouByYTPP4fiYlkNmZycPGnSpJycHDs7O1zm1Piay5cvu7m5WVhYZGdn66raBnwBAQGrVq1iMpkpKSmmpqYWFhb5+fn4JUNDw/T0dNycSbZUVVXp6+tHR0c7Ozuz2exXr16p+l7QAFBbW9utW7ecnJzLly+/ePHi8OHD5ubmTCaTxWKZmZk1OGhQKkZoFqp1XMYob0hUTEEBAkBstgyH5PP55ubmffr0OXPmTHOLBqNHjwaAHTt2yNCuwhDnjbu6ugqFwvpfA7///rucjJaVlT1//hzvFrV161Y5WVEwf/75Jyh3035VRK1moy9evLC3ty8vL9+/fz9epldGcnPB0hIsLCA3V4ajZmVlWVlZ3bx5s7y83MvLq8GrCQkJQ4YMad++/Zs3b5Rxhi4B+fn5/fr1Kykp2bFjB153AgBbW9ukpCQdHZ02DMjn8wsKCvLz80tLSxsf5ObmlpeXA0DHjh3fv3//6tUrNpsty/dDBQihPn36pKWlnThxQlwIR5AeOtUOyJL9+/eXl5dPnTpVeTUUAOrqAADa9JffAt26dTt48KCvr6+ZmZmrqysOJorB/T6+/fZbFdVQADA3Nz906NCUKVM2btwoPrlnz54mNVQoFHK5XB6PV1BQgA9wh1bxAZfLrcM/iOb57LPPzMzM3r59KxAIfv311yaTVVWLiIiItLQ0S0vLqVOnUu2LWqFWMmpoaEij0cQNO5QU/NdLl/0nP3XqVFzh1+B8RkZGeHi4np5ey0sKyo+Xl9fcuXODg4Pxf11dXUUi0bFjxxpPJ3k83idVksFgmJubm5mZGRsbNz7o1KkT/sp59OiRg4PDzp07J0yYgDcQVV22b98OAGvWrGnb/J3QHGr1UB8TEzN27FgajRYTE4O3x1BGnj+HXr3AxgaePZP52CKRCHc5qz/d+Prrrw8fPuzr6/vHH3/I3KKCKS8vt7KyKikp+eSVYpVsUis7deok+TrbL7/8snHjRgsLi5SUFBMTE+neAWXExMSMGjXKxMTk9evXuOc3QVaolYwCwLp167Zv325lZZWUlIQbSSgdT59C377Qpw88eSKP4VeuXHnw4MGsrCy8eF1UVNSlS5fa2tpnz551795dHhYVjIODQ0JCgr6+vrW1tampKYvFEi8us9lsU1NTvMrcOO2xzYhEonHjxt2+fXvKlClnz56V1bAKxt3d/eLFi35+fj/99BPVvqgd1K5wyZyamprBgwcDwNdff021L03zLiWlYMiQouabaUpJQUHBggULxLtdrl27FpQ2hbb1JCUl0Wg0Q0PDVu2HLD3Z2dm4oOPYsWOKtCsrVLRpv6qgbjKKEEpLS8MLLKGhoVT70gQJCQkAMGjQILlawR3O3717h+tV1GYvMx8fHwD4/vvvFW/6yJEjAGBkZPTy5UvFW5eSOXPmAMB3331HtSPqiRrKKPqnAUfHjh3z8/Op9qUh9+7dA4Bhw4bJ1cq8efPi4+P9/f0BYFzjZmWqSXZ2NrVN+6dNmwYAjo6OqlUVqupN+5Uf9ZRRkUjk5uYGAM7OzsqWZoy35x0xYoRcrbx8+TI2Ntbc3BwArl69KldbCgNv7Dp//nyqHCgtLbW0tASALVu2UOVDG1i5ciUAzJw5k2pH1Bb1lFGEUFFREYvFAmVr2IzQ33//DQAtbJomE3g83vz58wGgX79+yvZF0jaKi4tx0/6nT59S6EZ0dDSNRqPT6dK02VckJSUleF1epZv2KzkyW81UNlgsVmBgIAB8//33T58+pdqdf8H5jHQ55I0CwMuXL3ft2jV+/Hhzc/MjR46Ympp26dKlyRJ7lSMgIKCysnLSpEm9e/em0A0nJ6cVK1bU1dXNnTu3qqqKQk8kZN++fe/fv58wYUKDfi4EWUK1jsuXBQsWAICdnV11/d0KKeXy5csg070ohEJhXFzc+vXre/XqJf6x6unpDR8+XEdHR0tL6/r167KyRRXv37/HOwjFxsZS7QsSCAT9+vUDgG+//ZZqXz6BuGn/35q0a7ziUXMZff/+PU6W/O9//0u1Lx+IjIwEWWyFxOfzo6Ojly9fjgOgGGNjYw6HExwcjBv+4+2DLCwsSkpKZOE7ZeCdgeUdUJacp0+fMhgMGo0WGRlJtS8toWZN+5UWNZdRhFB8fDyelCnJF/L58+ehxU3YW6a4uPjMmTOzZ8+uX1zwxRdfLF68OCIiosGkWygU4sZOn2yMr8zU1NTgpv1KpVm4ayeLxSosLKTal6YRN+0/e/Ys1b6oOeovowihH374AQAsLS0VnLPdJKGhoQDw1Vdftequ7OzsgIAAJyen+tXQtra2fn5+iYmJLSwiqXreOEIIF9H36tVLqZr2C4XCcePGAcDkyZOp9qVpQkJCAKBHjx5K9bmpJRoho7W1tUOHDgWAOXPmUO3Lh19uHx+fT14pFAoTExP9/Pxw93UMnU53dHQMCAjIycmR0KJK542LRKI+ffoAwNGjR6n2pSG5ubm4xP7QoUNU+9IEeA8ItWnar8xohIwihF68eIGfgkNCQqj1BG+INGvWrOYukCTo2VpUNG8c/RNK7tSpk/IsEtYnLCwMAAwMDNLT06n25SOuXr0KAGw2m8/nU+2L+qMpMooQwv2NOnToQFUNDAbPDefNm9fgfKuCnq1FnDeucl3cR44cCfJscS89s2bNAoBBgwbV1NRQ7cu/4J5+/v7+VDuiEWiQjCKEPDw8AGDUqFEURosOHToEAIsWLcL/bXPQs7XgvHEdHZ34+HhZjSlvHjx4gGfi5eXlVPvSLGVlZV26dAEAPz8/qn35wOPHj2k0Wvv27UtLS6n2RSPQLBnlcrk4je63336jyocDBw4AgLe3t/RBz9aCiwJ79epVWVkpJxOyZfLkyQCwceNGqh35BDExMdra2nQ6PS4uTn5W8N5Qd+7cCQsL27t3719//dXclRwOBwDWrl0rP2cI9VG3fqOf5OrVqxMmTNDV1X3w4AHeAF1h8Pn869ev+/v7x8XFiU+amJi4ubl5eHi4urrKu0FqdXX1kCFDUlJSli1btmfPHrnakp709HRbW1tdXd2XL1/iLz9lZv369du2bZOm0a2Ee0OJGTp06P379xuPk52d3aNHDzqdnp2dXT+8TpAfarWJiCS4uLgsWbLkwIEDM2bMSExMbLBnkTwoKSm5efNmZGTkhQsXKioqAGDAgAElJSUTJkxwd3d3cXFR2HbHenp6J0+eHDRo0L59+1xdXSdOnKgYu21j27ZtIpFo/vz5yq+hALB58+abN28mJCSsXr0ax22aJCUlJTk5mcvlijeJEh9IuDcUi8ViMplmZmY2NjZNXrZ9+3ahUDh//nyioQpD42ajAFBVVWVvb//8+fPVq1f/9ttvcrKSmZl54cKFiIiIe/fuCYVCAKDRaPb29pMnT/bw8MDVhJSwY8eONWvWsFislJQUpd3tMi8vz8rKSigUPn/+HOeQKz/Pnj2zt7fn8/lnzpzBj9WNWbVqVUBAQJMvSbg3VMtwudwuXboIBIKnT5/a2tpK9X4IkkN1VIEaHj58qKurS6PRLl26JMNhJcz0pLbXjvLnjSOE8BbK06ZNo9qR1rF7925osdHtqVOnZs2atWrVqm3bth09evTixYuPHj3Kzc2VVTrXf//7XwDw8vKSyWgECdFQGUUI/e9//wMZ1Zu3NtPT2dlZSotSouR54+/evcMzr8TERKp9aR0ikQiHSihpdFteXm5sbAwAcl3pIjRGc2VU+nrz4uLi4OBgDodTf0mhS5cuONOzhSzCdevWtdVrmaG0eeMIoS1btgDA+PHjqXakLRQVFeFQyZ49exRseseOHQAwevRoBdslaGJsVMzLly8HDBhQXl5+7Nix2bNnS3hXdnZ2ZGRkVFTUrVu3xMsCtra2HA5n0qRJdnZ2qtLfc9asWX/99degQYPi4uKUZ+Py6urqrl27FhQUREdHOzk5Ue1OWwgPD/f09GQwGPHx8X379pX5+Hw+Hy9S8Xg88ZgC1I8AAA9ISURBVEFRUdGVK1fevXsXFRWl5IuHagjVOk4xR48eBQnqzWVV3o4QevLkycKFC6mtpMKUlZXhzknKkzeO/ik269+/v0o37V+4cCEADBw4sA1Bz6qqqry8vMTExIiIiODgYH9//+XLl8+ePdvJycnW1tbMzKy572kLC4vffvtNpT83FUWjZ6MYHx+f06dPOzo63r59W1tbu/5LAoEgNjY2MjIyLCwsPz8fnzQ2NnZycnJ3d/f09MTNk1rFtWvXXFxckpOTKVysFxMbGztmzBgajXbnzh0HBweq3QGRSGRjY5OZmXn69OmpU6dS7U7bqaystLOzy8jIWL9+/datW+u/VFxc3CDVKT8/n8fjiQ/4fH7LgzMYDCaTaWpqymazcfITzoLq1KnTqFGj5Pm2CE1DZBTKysr69euXk5OzZcuWDRs2AEBJScnFixejoqKuXLmCMz0BoEuXLs7Ozu7u7q6urtI8AhcWFl68eHHKlCl4NYBypM8blyFhYWEcDsfKyio9PV1O+6wojHv37o0aNUokErm4uABAYWFhUVERj8erra1t+UYDAwOxRDbQShaLZWpqKknmE0GREBkFALh+/bqzs7O2tvY333zz+PHj+pmegwYNwpme8ghyKQO1tbWOjo4JCQmLFi1qIW9cMTg4ONy/f//AgQNLliyh1hOZ4ObmlpKSkpeXV/8kg8EQJ4Q2zg+1sLDo0KEDVQ4T2gjFQQWlYeXKleIndD09PScnJzmVt3t6egLATz/9JPOR20xaWhqu5jpz5gyFbty4cQMAWCxWVVUVhW7Iipqams6dOwPAqlWroqKiEhIScnJyBAIB1X4RZA+R0Q/w+fw3b974+vqGhoZWVFTIz5C7uzsA/O9//5OfiTbwybxxBeDs7KyEn0ybwR0R+/btS9Z81B7yUE8AAEAITZo06eLFi87OzleuXFF8zlZKSsqAAQP09fVfv36NNwFVaRBCffv2TU1NPX78OG5ISlBj1HafeuWkqqrq4MGDJ06ciI6OptqXj6DRaEFBQWw2+9q1a/v371e8A7jBsK+vrxpoKABERkampqZaWlrifQcIag61k2FNIycnB3/syrnn7YULFwCAwWA8efJEkXbxuryOjo4ypNPKBEdHRwAICAig2hGCIlDtnBKVQ19ff/HixZWVlTjvXdmYPHnywoUL//zzzzlz5ty/f19WHfwEAsHbt2+bbKOJDwoLC/X09MaMGYPXZFSd2NjYu3fvmpiY4CR8gtpDYqOEjxDnjW/YsAHXtn+SioqK+oWJ9VPK8Zn379+3PAKuemAymU+fPlWDh3oPD4/IyMgff/xx06ZNVPtCUARERhVKUFAQ3mAZAE6ePMlkMqn1p0kSExOHDx8uFApv3LgxZsyYlruy5+XlvXv3ruUB9fT0TExMmmyjaW5uzmQyjx49euXKlXv37nl5eZ07d04xb1NOPH/+vHfv3gwG49WrV8r58yXIHPJQr1DS09OvX7+Ojz/Z7ZwqBg0atGHDhp9//nnixIm1tbXyrrq5fv26n5/fqVOnnj9/fv78+aNHj86bN0+W70ex+Pv7i0SiBQsWEA3VHIiMKpRFixaNHz8eHyvz02u3bt0AoKqqCj6uummy/EaaqtaSkhI2m71ixQpvb+/a2trZs2d/9913jo6O3bt3l9mbUSC5ubkhISHa2tp490CCpkD1Gpdm8fLly6ysrNzc3Ldv3yptQUtGRkb9lis7d+6Uny1vb28Wi7V48eLc3FyE0PTp0wFg+PDhdXV18jMqP1atWgUA06dPp9oRgkIhMqpQ6svT/v37qXanCfh8/oABAwBAvGOajo7O3bt35WFLJBLhzVnpdHpxcTFCqLS0FC/Wb968WR4W5crbt29xb5eHDx9S7QtBoZD0e4ViaWlpZWWFH4T19fWpdqcJli1blpSU1L1790WLFuEztbW1M2bMePv2rWwNFRUVTZs2bcOGDXFxcYcOHcIhjg4dOhw/flxLS2vTpk0PHjyQrUV5s3///oqKChcXFzs7O6p9ISgWqnWcoETgLAIGg/H48eOoqKj6vycTJ06UbW04bs+MzU2aNKl+H4Pvv/8eALp16ybX5gayhc/n442gb968SbUvBEVDEp4Ux+PHjy9cuCCehHbp0kWpKgUzMzPt7e0rKioOHTq0aNGitLS03r17179gx44dWOCk59GjR1paWrdu3Tp79mxcXJy1tXV6err41erq6qFDhyYnJy9ZsuTAgQMysShvDhw4sHTp0sGDB8fHx1PtC0HhUK3jGgRu+SNmwoQJVHv0L+KQqHhP46qqqgYNSuh0emxsrEzM9evXT0tLy9HR0d/fPy4u7v79+w0uSE1Nxb37IiIiZGJRrtTV1VlbWwNAaGgo1b4QKIDMRhVHSkrKhQsXcBYRANjY2ChPguTChQuDgoK6d++emJgoXgdjs9lcLrf+ZZ06dXr8+HHHjh2lscXn86dOnRodHV1dXQ0ANBrtyy+/bNyrZefOnatXr2YymSkpKfh5WWk5ffq0j4+PlZVVRkZGg31oCBoB1TpOoJ76IdH654cOHdr4F8bNzU2aIGlcXJytre26deuio6MjIiJmz55taGg4c+bMxleKRKIJEyYAgKurq5K37MQbHQYGBlLtCIEaiIwqDm9vbysrKysrq549e9rb29vb27dh20iZk56ejtN0Dh8+3OCl5kK327Zta7O59evXi8fp3r37+vXrY2Nj8/Lymrw4Ly8Pr+AfOHCgzRblDZ5Hs9ls9WjaT2gDREYVx6BBgxroEeVJ5nw+H2du+vj4NH61vuTVh06nx8XFtc1idXX15cuXv/766/q1kt7e3s1dj0vs9fX1nz9/3jaL8sbJyQkAtm7dSrUjBMogMqo4cnNzs7KysrKynj17lpiYmJiYSLVHaMGCBXhWWF5e3vhVcU5SY6Kiolpri8/ne3l5FRQU4P8KhcJ9+/b179/fwsKi5b6cc+fOBQA7O7uamprWGpU3SUlJNBrN0NCwtLSUal8IlEHS7xVERUXFs2fPcnNzS0tLKysrAYBGo+EDqggJCQkKCmIwGGfOnGlya+WuXbs2PmloaOjr6zt27NjWmqurqzt//rz4LWtpaZmYmFRXV+fk5LS8D+jevXutra0fPXq0efPm1hqVN1u2bEEI+fr6ku08NRqqdVxTaLImh8L8GHFI9M8//2zhmvre9u/f/8CBA03OWyWhoqICAF68eCE+ExISYmNjI8m9sbGx2traONW0bdblQXZ2Nm7a/+bNG6p9IVAJ6fCkIAwNDZ2cnKqrq8UJTyKRqH6JvSIRCARTp06tqKjw8fHBz/VN0rVr17lz54aGhk6ZMiUnJ+fUqVPixKPY2NjTp0/v2bOntaZ5PJ6BgQE+Lisrk/AuR0fH9evX//LLL/Pnz09KSqLqc2vAr7/+WldXt2DBAktLS6p9IVAK1TpOoID58+cDQI8ePSSZWtbU1EgzkRSDBzE0NDT6B319fckHqa2txQlY8+fPb5XdNoBDDYmJiVFRUVwut8lrioqK2rVrR6PRUlNT5e0PQckhs1EFERQUJH6uf/XqlZWVlZGR0YABA3x8fBTsSUhIyJEjR1oIiTZAR0cH58nLhMePH+NmpgBw6tQpybfZoNPpJ06cGDhw4JEjR1xdXadOndpmHz65N1RRUZFIJMIXh4eHe3h4NB5k9+7dfD7f09PT1ta2zZ4Q1AMiowri9u3bx44da3Cyf//+Q4cObXIlR05kZGT4+voCAF4lb9W9bXselyHW1tbbt29funTpkiVLHBwcmnuUFolEmZmZ4p2g6h8UFRUVFhZ+cm8oHR0dU1NTFotlZmbW5DdNZWUlTmNYs2aN9O+LoOoQGVUQCxcuxJvulpaWnjt3LikpqaamJjk52draesSIET4+PriBsVx9wFWYFRUV06dPbyEk2hzOzs5aWh9SO2praynZxXPJkiWXLl2KiopauHDh1atXG1T9Y6qrq8XNUpuk5b2hzMzM2Gx2yzWdgYGBJSUlI0eOHD58uLRviaAGUB1V0FD4fD4uhfzss8/wD0JbW9vR0TEgIKC5YJz04BJ+CUOi9ZFhbFTKQRBCXC6XzWZDi7vADxgwYPjw4Z6enkuWLPHz89u7d+/Zs2djYmKeP3/+7t271lpsQE1NDf4KaUPyLEEtITJKMVVVVRERERwOR7wpvLa2tpOTU3BwsPR/8PU5efIkADAYjKSkpNbeKysFlBWXLl2i0Wh6enrJycmKt3706FEA6NWrl1AoVLx1ghJCZFRZKC0tDQ4Odnd319HRwXrKYDDc3d2Dg4Ol714szhINCgpqw+3KJqMIocWLFwNA7969+Xy+Iu2KRKI+ffoAwLFjxxRpl6DMEBlVOkpKSrCe0ukfItft2rVzd3c/c+ZM23bBe//+PV5NbvNWa0ooo+/fv+/ZsycArFmzRn5WhEJhYWHhkydPoqOjT5w48fvvv3t7ewNAp06dlKGtDEFJIP1GlZe8vLywsLDQ0FDcBwQAOnToMGnSJA6H4+rqKp60fpJ58+YFBwf36NEjMTFRkgwnVeHhw4cODg5CoTA6OnrcuHFtG6S0tLTJnCd88ObNm7q6uga3cDiciRMn4kp/AgEAiIyqADk5OefOnQsNDb179y4+Y2JiMnHiRA6HM2HCBPGktUmOHDmyYMECBoNx//791mY4KT8///yzn5+fhYVFSkqKiYlJg1f5fD6PxysoKOByuTweLz8/v0EWVHFx8SdNsFgsJpOJk5/wwcSJE9XvkyRIA5FRVeLVq1fh4eHHjh179OgRPtOxY8cpU6bMnj3b0dGxcfZPamrqkCFDqqqqjhw5ojyd9mWISCQaO3bsnTt3Bg8ePHHixAZaiWMRLUCn05lMJpPJNDMzw3Jpbm4uFk18puVvKQIBiIyqKKmpqaGhoSEhIRkZGfiMpaWll5cXh8MR62llZeWQIUPS0tKmT5+Ol+nVkqysrOHDh7dr1+7169cNXvpkfiiLxSIqSZAeIqOqDdbTEydOZGVl4TNdunTx8PCYO3fu7t271TIk2piqqqqgoCAul8tisdhstqmpKZPJNDU1Jc3rCIqByKg6gBC6e/fu6dOnw8LCCgsLxecNDAwePHjQYJ9kAoEgW4iMqhUikSguLi40NPTUqVOrV6/u3Lnz9OnTqXaKQFBziIyqJ7W1tbjPMdWOEAjqD5FRAoFAkAoyWyEQCASpIDJKIBAIUkFklEAgEKSCyCiBQCBIxf8D95ZgGiW8tYEAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from rdkit import Chem\n", + "active_id = np.where((test_dataset.y[:,0] == 1) * (model.predict(test_dataset)[:,0,1] > 0.8))[0][0]\n", + "Chem.MolFromSmiles(test_dataset.ids[active_id])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that we have a trained model and a molecule, let's ask the `Explainer` to figure out why the molecule was predicted to be toxic. We ask it for the 100 features (that is, elements in the fingerprint, each corresponding to one or more fragments) the prediction is most sensitive to." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "UJ3hePSwkP2F" + }, + "outputs": [], + "source": [ + "exp = explainer.explain_instance(test_dataset.X[active_id], model_fn, num_features=100, top_labels=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The returned value is an `Explanation` object. It has methods you can call to retrieve the results in various forms. A convenient form for working interactively is `show_in_notebook()`, providing a graphical representation." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "BPs0Txu4kP2H", + "outputId": "3cec0071-6c18-4390-9443-052d41c4ab51" + }, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " \n", + "
\n", + " \n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "exp.show_in_notebook(show_table=True, show_all=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This output needs some explanation. On the left it shows that this molecule is predicted to be toxic. We already knew that of course. That's why we chose it. On the right it lists the 100 elements of the fingerprint with the most influence on the prediction. For each one, the value column indicates whether the corresponding fragment is present (1.00) or not (0.00) in this molecule. And in the middle it shows whether the value for each index contributes to the prediction being for non-toxic (blue) or toxic (orange).\n", + "\n", + "Most of the fragments are not present. It's telling us about fragments that, *if* they were present, would shift the prediction. We aren't very interested in those. We want to know about the fragments that *are* present in the molecule that are contributing to the prediction. Let's try to put these results into a more useful form.\n", + "\n", + "To start, indices within the fingerprint aren't very informative. Let's write a function to reverse the featurization, mapping from indices back to the fragments that activated them." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "colab_type": "code", + "id": "4ja4_jCKkP2N", + "outputId": "890b30b1-7b4f-4c7b-f840-146533a06614" + }, + "outputs": [], + "source": [ + "def fp_mol(mol, fp_length=1024):\n", + " \"\"\"\n", + " returns: dict of \n", + " dictionary mapping fingerprint index\n", + " to list of SMILES strings that activated that fingerprint\n", + " \"\"\"\n", + " d = {}\n", + " feat = dc.feat.CircularFingerprint(sparse=True, smiles=True, size=1024)\n", + " retval = feat._featurize(mol)\n", + " for k, v in retval.items():\n", + " index = k % fp_length\n", + " if index not in d:\n", + " d[index] = set()\n", + " d[index].add(v['smiles'])\n", + " return d\n", + "\n", + "# What fragments activated what fingerprints in our active molecule?\n", + "my_fragments = fp_mol(Chem.MolFromSmiles(test_dataset.ids[active_id]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we want to query the `Explanation` to see which of those fragments contributed to the prediction. We can use the `as_map()` method to get the information in a form more suitable for processing." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{1: [(907, -0.23405879109145938), (261, -0.22799151209374974), (257, -0.2127115416006204), (411, -0.2032938542566075), (445, -0.201101199543193), (999, -0.19683277633182114), (505, -0.17598335551311955), (845, -0.16124562050855068), (306, -0.15779431345857292), (326, -0.15729134284912463), (742, -0.15426792127439848), (774, -0.1541352665863784), (648, -0.15240513095212335), (282, -0.15075378351457727), (918, -0.147036129283227), (531, -0.1458139691488669), (279, -0.14390785978173085), (269, -0.13989282701617642), (37, -0.1369273010593831), (530, -0.13566064574462358), (827, -0.1336099559901393), (28, -0.12819498508086055), (889, -0.12482816439354927), (84, 0.123345144700625), (712, -0.12260023102545663), (529, 0.12194683881762106), (513, -0.12144767300189488), (830, -0.11926958219652685), (111, -0.11793890523628446), (434, -0.11598961154307276), (247, 0.11346755135862246), (296, -0.11315257272809631), (394, -0.11054396729966792), (1022, -0.10845154388715085), (850, 0.10819488336102767), (92, -0.10725270764168865), (788, -0.10693252879326674), (565, -0.10619572780884631), (901, -0.10597769712341058), (854, -0.10261187607809283), (632, -0.10165075780263935), (381, -0.10083233541195123), (717, -0.10024949898626785), (431, 0.09886188592649868), (1003, -0.09854835359816157), (646, -0.09821601927382569), (312, 0.09718167861314402), (539, -0.09639497333637208), (693, -0.0960269720546286), (822, 0.09584637471513191), (1005, -0.09441597700147854), (584, -0.09422611177476213), (405, 0.09371804599009508), (594, -0.09361942073302025), (519, 0.09315063287813262), (613, -0.0920180464426831), (151, -0.09125548867464624), (995, 0.09122957856534511), (555, 0.09105473925802852), (619, 0.09045652379413677), (372, 0.09008810465661844), (617, 0.08854326235599133), (517, 0.0876472124639829), (409, -0.08722349514303968), (744, 0.08646480736070905), (470, -0.0861786962874964), (930, 0.08444082349628013), (493, 0.08389172822676175), (429, -0.08368146493327351), (135, 0.08346782897055312), (27, -0.08332078333604556), (923, 0.0827630767476166), (977, -0.0803740639477386), (174, -0.07985778475171695), (204, 0.07748814547746291), (459, 0.07722411480464215), (377, 0.07544148127726504), (274, -0.07528620889379731), (665, -0.07517229225403155), (321, 0.07387303741377259), (733, 0.07313092778231371), (538, -0.07260889806354165), (760, -0.07216344039899467), (751, -0.07086876393200622), (523, 0.07067337687463775), (467, 0.06911819793695931), (172, 0.06779708514374157), (131, 0.06747370559195916), (732, 0.06727167331565105), (344, 0.06123528076874165), (155, -0.06080053839396983), (384, 0.05715795555565539), (614, 0.053775300985781746), (900, -0.050104647498526), (52, 0.047430623988994364), (460, 0.045920906298128734), (800, 0.04169171427687711), (316, 0.0391090952059404), (388, 0.0334174485302755), (752, -0.01827203770682766)]}\n" + ] + } + ], + "source": [ + "print(exp.as_map())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The keys in this map are the labels, of which we only have one. The value is a list of tuples, each of the form (fingerprint_index, weight). Let's convert it to a dict mapping indices to weights." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "fragment_weight = dict(exp.as_map()[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We know which fragments are present in our molecule of interest (`my_fragments`), and we know which fragments contributed to the prediction (`fragment_weights`). Let's loop over them and print them out." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 167 + }, + "colab_type": "code", + "id": "PAe3ZOhUkP2Q", + "outputId": "ca06c090-4379-4b79-f815-36464cf64323" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "555 {'C[C@](C)(C)CCC'} 0.09105473925802852\n", + "84 {'C=CC'} 0.123345144700625\n", + "519 {'C[C@@H](C)C'} 0.09315063287813262\n", + "274 {'C[C@@H](C)C(C=C)[C@@H](C)C'} -0.07528620889379731\n", + "529 {'CCC[C@H](C)C'} 0.12194683881762106\n" + ] + } + ], + "source": [ + "for index in my_fragments:\n", + " if index in fragment_weight:\n", + " print(index, my_fragments[index], fragment_weight[index])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "fK7Sy_vJkP2S" + }, + "source": [ + "These are the fragments most responsible for the prediction." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "5kZkMHOBkP2i" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "08_Introduction_to_Model_Interpretability.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/25_Uncertainty_In_Deep_Learning.ipynb b/examples/tutorials/25_Uncertainty_In_Deep_Learning.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4ffa96344a2668f11e0833ab5c7fcf645d126e58 --- /dev/null +++ b/examples/tutorials/25_Uncertainty_In_Deep_Learning.ipynb @@ -0,0 +1,367 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "Gn1RVu2xkMdA" + }, + "source": [ + "# Tutorial Part 25: Uncertainty in Deep Learning\n", + "\n", + "A common criticism of deep learning models is that they tend to act as black boxes. A model produces outputs, but doesn't given enough context to interpret them properly. How reliable are the model's predictions? Are some predictions more reliable than others? If a model predicts a value of 5.372 for some quantity, should you assume the true value is between 5.371 and 5.373? Or that it's between 2 and 8? In some fields this situation might be good enough, but not in science. For every value predicted by a model, we also want an estimate of the uncertainty in that value so we can know what conclusions to draw based on it.\n", + "\n", + "DeepChem makes it very easy to estimate the uncertainty of predicted outputs (at least for the models that support it—not all of them do). Let's start by seeing an example of how to generate uncertainty estimates. We load a dataset, create a model, train it on the training set, predict the output on the test set, and then derive some uncertainty estimates.\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/25_Uncertainty_In_Deep_Learning.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following installation commands. This will take about 5 minutes to run to completion and install your environment. You can of course run this tutorial locally if you prefer. In that case, don't run these cells since they will download and install Anaconda on your local machine." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 323 + }, + "colab_type": "code", + "id": "p0MdAUAvkMdD", + "outputId": "e73f824a-cd0b-4c73-d2e7-ef70df9e4baf" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 361 + }, + "colab_type": "code", + "id": "hlLFgrdrAc-J", + "outputId": "16522993-056f-493e-9c62-6b74829d12d6" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "BUFgitSSkMdG" + }, + "source": [ + "We'll use the Delaney dataset from the MoleculeNet suite to run our experiments in this tutorial. Let's load up our dataset for our experiments, and then make some uncertainty predictions." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 88 + }, + "colab_type": "code", + "id": "4mHPuoOPkMdH", + "outputId": "43685a7b-d247-4fc2-a929-015e798f9ebb" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import numpy as np\n", + "import matplotlib.pyplot as plot\n", + "\n", + "tasks, datasets, transformers = dc.molnet.load_delaney()\n", + "train_dataset, valid_dataset, test_dataset = datasets\n", + "\n", + "model = dc.models.MultitaskRegressor(len(tasks), 1024, uncertainty=True)\n", + "model.fit(train_dataset, nb_epoch=20)\n", + "y_pred, y_std = model.predict_uncertainty(test_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_DlPZsaekMdL" + }, + "source": [ + "All of this looks exactly like any other example, with just two differences. First, we add the option `uncertainty=True` when creating the model. This instructs it to add features to the model that are needed for estimating uncertainty. Second, we call `predict_uncertainty()` instead of `predict()` to produce the output. `y_pred` is the predicted outputs. `y_std` is another array of the same shape, where each element is an estimate of the uncertainty (standard deviation) of the corresponding element in `y_pred`. And that's all there is to it! Simple, right?\n", + "\n", + "Of course, it isn't really that simple at all. DeepChem is doing a lot of work to come up with those uncertainties. So now let's pull back the curtain and see what is really happening. (For the full mathematical details of calculating uncertainty, see https://arxiv.org/abs/1703.04977)\n", + "\n", + "To begin with, what does \"uncertainty\" mean? Intuitively, it is a measure of how much we can trust the predictions. More formally, we expect that the true value of whatever we are trying to predict should usually be within a few standard deviations of the predicted value. But uncertainty comes from many sources, ranging from noisy training data to bad modelling choices, and different sources behave in different ways. It turns out there are two fundamental types of uncertainty we need to take into account.\n", + "\n", + "### Aleatoric Uncertainty\n", + "\n", + "Consider the following graph. It shows the best fit linear regression to a set of ten data points." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 265 + }, + "colab_type": "code", + "id": "iLgia0GVkMdM", + "outputId": "30208f8a-d76c-43da-9030-40d7529246fe" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAioklEQVR4nO3deXjV9Z328ffHECCskVUIhLAkQWQ37isC4taKVutSbe3YUjtqrU6Zjs/M0z4zfWa6IJugUqro2E7V1jJYq04IKII7ICog5iSELQkQtrCEhCQnn/kjoYNMICfknJyF+3VdXOac88s594Xh5vA739/3Y+6OiIjEvzOiHUBERMJDhS4ikiBU6CIiCUKFLiKSIFToIiIJok20XrhHjx6ekZERrZcXEYlLq1ev3u3uPRt7LGqFnpGRwapVq6L18iIiccnMtpzoMZ1yERFJECp0EZEEoUIXEUkQKnQRkQShQhcRSRBRW+UiIolv0ZoSpuXmU1peSd/UFKZOymbymLRox0pYKnQRiYhFa0p4dOFaKmuCAJSUV/LowrUAKvUI0SkXEYmIabn5fy3zoyprgkzLzY9SosSnQheRiCgtr2zW/dJyKnQRiYi+qSnNul9aToUuIhExdVI2KclJX7ovJTmJqZOyo5Qo8elDURGJiKMffGqVS+tRoYtIxEwek6YCb0VNnnIxswVmVmZm65o47jwzC5rZLeGLJyIioQrlHPpzwDUnO8DMkoBfArlhyCQiIqegyUJ39+XA3iYOexD4E1AWjlAiItJ8LV7lYmZpwE3AvBCOnWJmq8xs1a5du1r60iIicoxwLFucBfzY3YNNHeju8909x91zevZsdIKSiIiconCscskBXjQzgB7AdWZW6+6LwvDcIiISohYXursPPPq1mT0H/EVlLiLS+posdDN7AbgS6GFmxcBPgWQAd2/yvLmIiLSOJgvd3e8I9cnc/Z4WpRERkVOmvVxERBKECl1EJEGo0EVEEoQKXUQkQajQRUQShApdRCRBqNBFRBKEBlyIxLBFa0o08UdCpkIXiVGL1pTw6MK1VNbU73tXUl7JowvXAqjUpVE65SISo6bl5v+1zI+qrAkyLTc/Sokk1qnQRWJUaXlls+4XUaGLxKi+qSnNul9EhS4So6ZOyiYlOelL96UkJzF1UnaUEkmsU6GLxKjJY9L4+c0jSEtNwYC01BR+fvMIfSAaxw5W1fD40gI+KNoTkefXKheRGDZ5TJoKPAFUVgd5/v3NzHt7I/sO1/DgVUO4cFD3sL+OCl1EJEKO1AZ54cOtzH1rI7sPHeGKrJ48MjGLUf1TI/J6KnQRkTCrCdbx8upi5iwtoHR/FRcM7MZTd43lvIxuEX1dFbqISJgE65xXPilh1pICtu49zJj0VKbdOoqLB3fHzCL++ip0EZEWqqtzXl+3nZl5ATbuquCcvl1YcE8O47J7tUqRH6VCFxE5Re7Okg1lTF+czxc7DpLZqxPz7hrL1cPO4owzWq/Ij1Khi4g0k7uzomA30xfn82nxfjK6d2D27aO5YWRfkqJQ5Eep0EVEmuHDoj1MXxzgo817SUtN4VdfG8nNY9NokxT9y3pU6CIiIVizdR8z8gKsKNhNr87t+NmN5/D18/rTrk1S09/cSlToIiInsb50PzPzAizZUEa3jm35p+vP5q4LB9A+OXaK/KgmC93MFgA3AGXuPryRx78B/Ljh5iHg++7+aVhTioi0ssKyg8zMK+C1tdvp0r4NUydlc8/FGXRsF7vvg0NJ9hwwF3j+BI9vAq5w931mdi0wH7ggPPFERFrXlj0VzF5SwKJPSkhJTuIHVw3h3ssG0TUlOdrRmtRkobv7cjPLOMnj7x1z8wOgXxhyiYi0qpLySuYsLeCPq4tJTjK+e9kgvnfFYLp1bBvtaCEL978d7gXeONGDZjYFmAKQnp4e5pcWEWm+sgNVPPFWIS98tA2Auy8cwN9eOZheXdpHOVnzha3QzWwc9YV+6YmOcff51J+SIScnx8P12iIizbW3opp5b2/k+fc3Uxt0bs3pxwNXZZIWxwNEwlLoZjYSeBq41t0js9GviEgY7K+s4ekVRSx4ZxOVNUEmj0njofGZDOjeMdrRWqzFhW5m6cBC4G53D7Q8kohI+B06Ustz725i/vIiDlTVcv3IPjw8IZMhvTpHO1rYhLJs8QXgSqCHmRUDPwWSAdx9HvAToDvwZMMmNLXunhOpwCIizVFVE+S372/hqbc3sreimgln9+aRiVkM69sl2tHCLpRVLnc08fh3gO+ELZGISBgcqQ3y0sptzH2zkLKDR7gsswd/d3U2oyM0XCIWxO4KeRGRU1ATrGPhx8U8vrSQkvJKzs/oxpw7xnBBBEa+xRoVuogkhGCd8+qnpcxaEmDznsOM6p/KL742gkuH9GjVPcmjSYUuInGtrs7JXb+DGXkBCsoOcXafLjz9zRzGn926wyVigQpdROKSu/PmF2VMXxzg8+0HGNKrE0/cOZZrh0dnuEQsUKGLSFxxd94t3MNji/P5ZFs56d06MPO2UXx1VFpUh0vEAhW6iMSNlZv38lhuPh9u2kvfru35+c0juOXcfiTHwHCJWKBCF5GY9+m2cqbnBVge2EXPzu3456+ew+3nx9ZwiVigQheRmLVh+wFm5AXI+3wnZ3ZI5v9cN5S7L8wgpa2KvDEqdBGJOYVlh5i1JMBfPttO5/Zt+LuJWXz70oF0iuHhErFAvzsiEjO27jnM7KUF/OeaYtonJ/HAuCF897JBdO0Q+8MlYoEKXU5o0ZoSpuXmU1peSd/UFKZOymbymLRox5IEVFpeyZw3C/njqm0knWHce+lA7rtiMN07tYt2tLiiQpdGLVpTwqML11JZEwTqp7k8unAtgEpdwqbsYBVPvrWR33+4Fce584J07h83hN5xOFwiFqjQpVHTcvP/WuZHVdYEmZabr0KXFttXUc285Rt5/r0tVAfruGVsPx4cP4R+Z3aIdrS4pkKXRpWWVzbrfpFQHKiq4ekVm1jwziYqqmuZPLp+uERGj/gfLhELVOjSqL6pKZQ0Ut5943g8l0RPxZFanntvM/OXF7G/sobrRpzFDydkkdU7cYZLxAIVujRq6qTsL51DB0hJTmLqpOwoppJ4U1UT5HcfbOGpZRvZU1HN+KG9eHhiFsPTukY7WkJSoUujjp4n1yoXORXVtXW8tGobc98sYOeBI1w6pAePXJ3F2PQzox0toanQ5YQmj0lTgUuz1AbrWLimhNlLCigpr+S8jDOZddsYLhqc+MMlYoEKXURarK7OefWzUmYtKWDT7gpG9uvKv908gsszT5/hErFAhS4ip8zdyV2/k5l5AfJ3HmToWZ2Zf/e5TBzWW0UeBSp0EWk2d2dZ/i6m5+WzruQAg3p2ZM4dY7h+RJ/TdrhELFChi0izvFe4m8cW5/Px1nL6d0vhsVtHMXl0X9poT/KoU6GLSEhWb9nLY7kB3i/aQ5+u7fnXm4Zz67n9adtGRR4rVOgiclJri/czPS+fZfm76NGpHT/9yjDuOD+d9snakzzWNFnoZrYAuAEoc/fhjTxuwGzgOuAwcI+7fxzuoCLSur7YcYCZeQFy1+8ktUMy/3DtUL550QA6tNX7wFgVyv+Z54C5wPMnePxaILPh1wXAUw3/FZE4VLTrELOWFPDqZ6V0atuGhydk8TeXZtC5vfYkj3VNFrq7LzezjJMcciPwvLs78IGZpZpZH3ffHq6QIhJ52/Ye5vGlBfzp42LatUni+1cMZsrlg0jt0Dba0SRE4fi3Uxqw7ZjbxQ33/a9CN7MpwBSA9PT0MLy0iLTUjv1VzH2rgJdWbsPM+PYlA/n+lYPpoeEScScchd7YolNv7EB3nw/MB8jJyWn0GBFpHbsPHeGpZRv57QdbcHduO68/D4zL5KyuGi4Rr8JR6MVA/2Nu9wNKw/C8IhIB5Yermb+8iGff3cyR2iBfG9uPH4zPpH83DZeId+Eo9D8DD5jZi9R/GLpf589FYs/BqhqeeWcTz6zYxKHqWr46qi8Pjc9kUM9O0Y4mYRLKssUXgCuBHmZWDPwUSAZw93nA69QvWSykftnityMVVkSa73B1Lf/+3hZ+vXwj5YdrmHRObx6ZmE32WRoukWhCWeVyRxOPO3B/2BKJSFhU1QT5/YdbeXJZIbsPVTMuuyePTMxmRD8Nl0hUukJAJMFU19bxx9XbmLO0kB0Hqrh4cHd+fXcW5w7oFu1oEmEqdJEEURusY9EnpcxeGmDb3krGpqcy4+ujuHhIj2hHk1aiQheJc3V1zmtrtzNzSYCiXRUMT+vCv3x7OFdm9dSe5KcZFbpInHJ38j7fyYy8AF/sOEhW707Mu+tcJp2j4RKnKxW6SJxxd94O7GJGXoDPivczsEdHZt8+mhtG9iVJwyVOayp0kTjy/sY9TF+cz6ot+0hLTeFXt4zk5jFpGi4hgApdJC58vHUf0xfn827hHnp3acfPJg/nthwNl5AvU6GLxLB1JfuZkRfgzS/K6N6xLf/3hmF84wINl5DGqdBFYlBg50Fm5gV4Y90OuqYk8/fXZPOtizLo2E5/ZOXE9NMhEkM27a5g9pIAr3xaSse2bXhofCb3XjaQLhouISFQoYvEgOJ9h5mztJCXPy4mOcn43uWD+d7lgzizo4ZLSOhU6CJRtPNAFU+8VcgLH23FML550QC+f+VgenXWnuTSfCp0kSjYc+gI897eyPPvbyFY53z9vP48MG4IfVNToh1N4pgKXaQV7T9cw29WFLHg3U1U1QS5aUw/HhqfSXp3DZeQllOhi7SCg1U1PPvuZn6zooiDVbXcMLIPP5yQxZBeGi4h4aNCF4mgyuogz7+/mXlvb2Tf4RomDuvNIxOzOLtPl2hHSwiL1pQwLTef0vJK+qamMHVSNpPHpEU7VtSo0EUi4EhtkBc+3Mrctzay+9ARrsjqySMTsxjVPzXa0RLGojUlPLpwLZU1QQBKyit5dOFagNO21FXoImFUE6zj5dXFzFlaQOn+Ki4Y2I2n7hrLeRkaLhFu03Lz/1rmR1XWBJmWm69CF5FTF6xzXvmkhFlLCti69zCj+6cy7dZRXDy4u7ayjZDS8spm3X86UKGLtEBdnfPGuh3MyMtn464KhvXpwoJ7chiX3UtFHmF9U1MoaaS8T+elnyp0kVPg7izdUMb0vAAbth8gs1cnnvrGWCadcxZnaE/yVjF1UvaXzqEDpCQnMXVSdhRTRZcKXaQZ3J0VBbuZnhfg023lDOjegVm3jeYrozRcorUdPU+uVS7/Q4UuEqIPi/YwfXGAjzbvJS01hV9+bQQ3j+1HsoZLRM3kMWmndYEfT4Uu0oQ1W/cxIy/AioLd9Orcjn+58RxuO68/7dpoT3KJLSEVupldA8wGkoCn3f0Xxz3eFfgdkN7wnI+5+7NhzirSqtaX7mdmXoAlG8ro1rEt/3jd2dx14QBS2qrIJTY1WehmlgQ8AUwEioGVZvZnd//8mMPuBz5396+YWU8g38z+w92rI5JaJIIKyw4yM6+A19Zup0v7Nvzo6izuuWQgnTRcQmJcKD+h5wOF7l4EYGYvAjcCxxa6A52tfp1WJ2AvUBvmrCIRtWVPBbOXFLDokxJSkpP4wVVDuPeyQXRN0XAJiQ+hFHoasO2Y28XABccdMxf4M1AKdAZuc/e645/IzKYAUwDS09NPJa9I2JWUVzL3zQL+sKp+uMR3LxvE964YTDcNl5A4E0qhN7YWy4+7PQn4BLgKGAzkmdkKdz/wpW9ynw/MB8jJyTn+OURaVdmBKp5ctpHff7gVgLsvHMDfXjmYXl00XELiUyiFXgz0P+Z2P+rfiR/r28Av3N2BQjPbBAwFPgpLSpEw2ltR3TBcYjM1QefrOf144KpM0k7jKwwlMYRS6CuBTDMbCJQAtwN3HnfMVmA8sMLMegPZQFE4g4q01P7KGp5eUcSCdzZxuCbITaPT+MH4TDJ6dIx2NJGwaLLQ3b3WzB4AcqlftrjA3deb2X0Nj88DfgY8Z2ZrqT9F82N33x3B3CIhO3Sklufe3cT85UUcqKrl+hF9+OGETDJ7d452NJGwCmkdlru/Drx+3H3zjvm6FLg6vNFEWqaqJshv39/CU29vZG9FNRPO7sXDE7M4p2/XaEcTiQgtrJWEc6Q2yEsrtzH3zULKDh7hsswePDIxizHpZ0Y7mkhEqdAlYdQE61j4cTGPLy2kpLyS8zO6MeeOMVwwqHu0o4m0ChW6xL1gnfPqp6XMWhJg857DjOqfys9vHsFlmT20J7mcVlToErfq6pzc9TuYkRegoOwQZ/fpwtPfzGH82RouESoNWU4sKnSJO+7OW/llTF8cYH3pAQb37MgTd47l2uEaLtEcGrKceFToEjfcnXcL9zA9L581W8tJ79aBGV8fxY2j0zRc4hRoyHLiUaFLXFi5eS+P5ebz4aa99Onanp/fPIJbztVwiZbQkOXEo0KXmPbptnKm5wVYHthFj07t+H9fGcbt56fTPll7kreUhiwnHhW6xKQN2w8wIy9A3uc7ObNDMo9eO5RvXpSh4RJhpCHLiUeFLjGlsOwQs5YE+Mtn2+ncrg2PTMzi25dk0Lm99iQPNw1ZTjwqdIkJW/ccZvbSAv5zTTHtk5N4YNwQvnvZILp2UJFHkoYsJxYVukTV9v2VzHmzkD+s3EbSGca9lw7kvisG071Tu2hHE4k7KnSJirKDVfzoD5+yvKB+U86ObZP4+2uG8q2LM6IbTCSOqdClVe2rqGbe8o08+85mqoP/M6WwojrIL974gq4pyToFIHKKVOjSKg5U1fD0ik0seGcTFdW1tG+TBF++pkUXtYi0kApdIqriSC3PvbeZ+cuL2F9Zw7XDz+LhiVlMmrm80eN1UYvIqVOhS0RU1QT53QdbeGrZRvZUVHPV0F48MjGL4Wn1wyV0UYtI+KnQJayqa+t4adU25r5ZwM4DR7hkSHcemZjNuQO+PFyitS5q0W6CcjpRoUtY1AbrWLimhMeXFlC8r5KcAWcy67YxXDS48eESrXFRi3YTlNONCl1apK7OefWzUmYtKWDT7gpGpHXl/08ezhVZPZvckzzSF7VoN0E53ajQ5ZS4O7nrdzIzL0D+zoMMPasz8+8+l4nDesfMcAntJiinGxW6NIu7syywi+mL81lXcoBBPToy544xXD+iT8wNl9AHr3K6UaFLyN7buJvpiwOs3rKPfmem8Nito5g8ui9tYnRPcu0mKKcbFbo0afWWvTyWG+D9oj2c1aU9/3rTcG49tz9t28RmkR+l3QTldKNClxNaW7yf6Xn5LMvfRY9ObfnJDcO484L4Gi6h3QTldBJSoZvZNcBsIAl42t1/0cgxVwKzgGRgt7tfEbaU0qrydxxkRl4+uet30jUlmR9fM5RvXTyADm31979ILGvyT6iZJQFPABOBYmClmf3Z3T8/5phU4EngGnffama9IpRXIqho1yFmLSng1c9K6dS2DT+ckMnfXDqQLhouIRIXQnnLdT5Q6O5FAGb2InAj8Pkxx9wJLHT3rQDuXhbuoBI52/Ye5vGlBfzp42LatUnivisGM+WyQZzZsW20o8U8XYkqsSSUQk8Dth1zuxi44LhjsoBkM1sGdAZmu/vzxz+RmU0BpgCkp6efSl4Jox37q5j7VgEvrdyGmXHPxQP5/pWD6dlZwyVCoStRJdaEUuiNLS72Rp7nXGA8kAK8b2YfuHvgS9/kPh+YD5CTk3P8c0gr2X3oCE8t28hvP9hCXZ1z+/n9uX/cEPp01frs5tCVqBJrQin0YqD/Mbf7AaWNHLPb3SuACjNbDowCAkjMKD9czfzlRTz77maO1Ab52th+/GB8Jv27dYh2tLikK1El1oRS6CuBTDMbCJQAt1N/zvxYrwBzzawN0Jb6UzIzwxlUTt3BqhoWvLOZp1cUcai6lq+M7MtDEzIZ3LNTtKPFNV2JKrGmyUJ391ozewDIpX7Z4gJ3X29m9zU8Ps/dN5jZfwGfAXXUL21cF8ng0rTD1bX8+3tb+PXyjZQfrmHSOb15eGIWQ8/qEu1oCUFXokqsMffonMrOycnxVatWReW1E11VTZDff7iVJ5cVsvtQNVdm9+SRiVmM7Jca7WgJR6tcpLWZ2Wp3z2nsMV0pkkCqa+v44+ptzFlayI4DVVw4qBvz7somJ6NbtKMlLF2JKrFEhZ4AaoN1LPqklNlLA2zbW8nY9FRmfH0UFw/pEe1oItKKVOhxrK7OeW3tdmYuCVC0q4Jz+nbh2XuGc2V208MlRCTxqNDjkLuT9/lOZuQF+GLHQbJ6d2LeXecy6ZzYGS4hIq1PhR5H3J3lBbuZvjifz4r3M7BHR2bfPpobRvYlKcaGS4hI61Ohx4kPivYwfXE+KzfvIy01hV/dMpKbx6TF7HAJEWl9KvQY9/HWfUxfnM+7hXvo3aUdP5s8nNtyYn+4hIi0PhV6jFpXsp8ZeQHe/KKM7h3b8k/Xn81dFw6Iq+ESItK6VOgxJrDzIDPzAryxbgdd2rdh6qRs7rk4g47t9L9KRE5OLREjNu2uYPaSAK98WkrHtm34wfhM7r10IF1TNFxCREKjQo+y4n2HmbO0kJc/LiY5yZhy+SC+d/lgumm4hIg0kwo9SnYeqOKJtwp54aOtGMbdFw7gb8cNplfn9tGOJiJxSoXeyvYcOsK8tzfy/PtbCNY5t+b058GrhmjLVRFpMRV6K9l/uIbfrChiwbubqKoJMnlMGj8cn0V6dw2XEJHwUKFH2KEjtTz7zibmryjiYFUt14/sw8MTMhnSq3O0o4lIglGhR0hldZDffrCZp5ZtZN/hGiYO683DE7IY1jd8wyW0F7eIHEuFHmZHaoO88OFWnli2kV0Hj3B5Vv1widH9U8P6Opo4LyLHU6GHSU2wjpdXFzNnaQGl+6s4f2A3nrhzLOcPjMxwCU2cF5HjqdBbKFjnvPJJCbOWFLB172FG90/lV7eM4pIh3SO6la0mzovI8VTop6iuznlj3Q5mLglQWHaIYX268My3crhqaK9W2ZNcE+dF5Hgq9GZyd5ZuKGN6XoAN2w8wpFcnnvzGWK455yzOaMU9yTVxXkSOp0IPkbvzTuFuHlsc4NNt5Qzo3oGZt43iq6PSojJc4uh5cq1yEZGjVOgh+LBoD9PzAny0aS9pqSn88msjuHlsP5KjPFxCE+dF5Fgq9JP4ZFs50xfns6JgNz07t+NfbjyH287rT7s22pNcRGJPSIVuZtcAs4Ek4Gl3/8UJjjsP+AC4zd1fDlvKVra+dD8z8wIs2VBGt45t+cfr6odLpLRVkYtI7Gqy0M0sCXgCmAgUAyvN7M/u/nkjx/0SyI1E0NZQWHaQmXkFvLZ2O53bt+FHV2dxzyUD6aThEiISB0JpqvOBQncvAjCzF4Ebgc+PO+5B4E/AeWFN2Aq27Klg9pICFn1SQkpyEg9eNYTvXDqIrh00XEJE4kcohZ4GbDvmdjFwwbEHmFkacBNwFScpdDObAkwBSE9Pb27WsCspr2TumwX8YVUxbc4wvnPZIL53+SC6d2oX7WgiIs0WSqE3tibPj7s9C/ixuwdPdlGNu88H5gPk5OQc/xytpuxAFU8u28jvP9yK49x1QTr3jxtCry4aLiEi8SuUQi8G+h9zux9QetwxOcCLDWXeA7jOzGrdfVE4QobL3opqfv32Rv79/c3UBJ1bz+3HA1cNod+Z2pNcROJfKIW+Esg0s4FACXA7cOexB7j7wKNfm9lzwF9iqcz3V9bwzIoinnlnE4drgkwencZD4zPJ6NEx2tFERMKmyUJ391oze4D61StJwAJ3X29m9zU8Pi/CGU/ZoSO1PPfuJuYvL+JAVS3XjTiLhydkkdlbwyVEJPGEtB7P3V8HXj/uvkaL3N3vaXmslqmqCfLb97fw1Nsb2VtRzYSze/HwxCzO6ds12tFERCImoRZYH6kN8tLKbcx9s5Cyg0e4LLMHj0zMYkz6mdGOJiIScQlR6DXBOhZ+XMzjSwspKa/kvIwzefyOMVw4qHu0o4mItJq4LvRgnfPqp6XMWhJg857DjOrXlZ/fPILLMnu0yp7kIiKxJC4Lva7OyV2/gxl5AQrKDjH0rM785ps5TDi7dYZLiIjEorgr9NVb9vGTV9axvvQAg3p2ZO6dY7hueJ9WHS4hIhKL4q7QzeBgVS3Tbx3FjaP70ibKe5KLiMSKuCv0seln8taProzKlCARkVgWl29vVeYiIv9bXBa6iIj8byp0EZEEoUIXEUkQKnQRkQShQhcRSRAqdBGRBKFCFxFJEHF3YVE8WbSmhGm5+ZSWV9I3NYWpk7KZPCYt2rFEJEGp0CNk0ZoSHl24lsqaIAAl5ZU8unAtgEpdRCJCp1wiZFpu/l/L/KjKmiDTcvOjlEhEEp0KPUJKyyubdb+ISEup0COkb2pKs+4XEWkpFXqETJ2UTUpy0pfuS0lOYuqk7CglEpFEpw9FI+ToB59a5SIirUWFHkGTx6SpwEWk1eiUi4hIggip0M3sGjPLN7NCM/uHRh7/hpl91vDrPTMbFf6oIiJyMk0WupklAU8A1wLDgDvMbNhxh20CrnD3kcDPgPnhDioiIicXyjv084FCdy9y92rgReDGYw9w9/fcfV/DzQ+AfuGNKSIiTQml0NOAbcfcLm6470TuBd5o7AEzm2Jmq8xs1a5du0JPKSIiTQpllUtjE5m90QPNxlFf6Jc29ri7z6fhdIyZ7TKzLSHmPF4PYPcpfm80xFPeeMoK8ZU3nrJCfOWNp6zQsrwDTvRAKIVeDPQ/5nY/oPT4g8xsJPA0cK2772nqSd29Zwiv3SgzW+XuOaf6/a0tnvLGU1aIr7zxlBXiK288ZYXI5Q3llMtKINPMBppZW+B24M/HhUsHFgJ3u3sg3CFFRKRpTb5Dd/daM3sAyAWSgAXuvt7M7mt4fB7wE6A78KSZAdTG09+WIiKJIKQrRd39deD14+6bd8zX3wG+E95oJxVvyyLjKW88ZYX4yhtPWSG+8sZTVohQXnNv9PNNERGJM7r0X0QkQajQRUQSRNwWupndambrzazOzGLyA9im9sCJJWa2wMzKzGxdtLM0xcz6m9lbZrah4WfgoWhnOhkza29mH5nZpw15/znamZpiZklmtsbM/hLtLE0xs81mttbMPjGzVdHOczJmlmpmL5vZFw0/vxeF8/njttCBdcDNwPJoB2lMiHvgxJLngGuiHSJEtcDfufvZwIXA/TH+e3sEuMrdRwGjgWvM7MLoRmrSQ8CGaIdohnHuPjoOVtfNBv7L3YcCowjz73HcFrq7b3D3WJ643OQeOLHE3ZcDe6OdIxTuvt3dP274+iD1fyhiduN5r3eo4WZyw6+YXY1gZv2A66m/UFDCxMy6AJcDzwC4e7W7l4fzNeK20ONAc/fAkVNgZhnAGODDKEc5qYZTGJ8AZUCeu8dy3lnA3wN1Uc4RKgcWm9lqM5sS7TAnMQjYBTzbcDrraTPrGM4XiOlCN7MlZraukV8x+073GCHvgSOnxsw6AX8CfujuB6Kd52TcPejuo6nfOuN8Mxse5UiNMrMbgDJ3Xx3tLM1wibuPpf705v1mdnm0A51AG2As8JS7jwEqgLB+thbTI+jcfUK0M7RASHvgyKkxs2Tqy/w/3H1htPOEyt3LzWwZ9Z9XxOIH0JcAXzWz64D2QBcz+5273xXlXCfk7qUN/y0zs/+k/nRnLH62VgwUH/Ovs5cJc6HH9Dv0ONfkHjhyaqx+f4lngA3uPiPaeZpiZj3NLLXh6xRgAvBFVEOdgLs/6u793D2D+p/ZN2O5zM2so5l1Pvo1cDWx+Rcl7r4D2GZm2Q13jQc+D+drxG2hm9lNZlYMXAS8Zma50c50LHevBY7ugbMB+IO7r49uqhMzsxeA94FsMys2s3ujnekkLgHuBq5qWKr2ScM7yljVB3jLzD6j/i/6PHeP+eWAcaI38I6ZfQp8BLzm7v8V5Uwn8yDwHw0/C6OBfwvnk+vSfxGRBBG379BFROTLVOgiIglChS4ikiBU6CIiCUKFLiKSIFToIiIJQoUuIpIg/hvGFlKA9OV48wAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Generate some fake data and plot a regression line.\n", + "x = np.linspace(0, 5, 10)\n", + "y = 0.15*x + np.random.random(10)\n", + "plot.scatter(x, y)\n", + "fit = np.polyfit(x, y, 1)\n", + "line_x = np.linspace(-1, 6, 2)\n", + "plot.plot(line_x, np.poly1d(fit)(line_x))\n", + "plot.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "7fTPkHSakMdP" + }, + "source": [ + "The line clearly does not do a great job of fitting the data. There are many possible reasons for this. Perhaps the measuring device used to capture the data was not very accurate. Perhaps `y` depends on some other factor in addition to `x`, and if we knew the value of that factor for each data point we could predict `y` more accurately. Maybe the relationship between `x` and `y` simply isn't linear, and we need a more complicated model to capture it. Regardless of the cause, the model clearly does a poor job of predicting the training data, and we need to keep that in mind. We cannot expect it to be any more accurate on test data than on training data. This is known as *aleatoric uncertainty*.\n", + "\n", + "How can we estimate the size of this uncertainty? By training a model to do it, of course! At the same time it is learning to predict the outputs, it is also learning to predict how accurately each output matches the training data. For every output of the model, we add a second output that produces the corresponding uncertainty. Then we modify the loss function to make it learn both outputs at the same time.\n", + "\n", + "### Epistemic Uncertainty\n", + "\n", + "Now consider these three curves. They are fit to the same data points as before, but this time we are using 10th degree polynomials." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 214 + }, + "colab_type": "code", + "id": "hVoRaGn6kMdQ", + "outputId": "e25598cd-bcf3-4076-e7f5-43727dfa561a" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAr8AAADCCAYAAABAOqrYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAABTcklEQVR4nO3dd3ycZ5Xo8d+ZGWlGtqotyVZz3HtiO3F6I9UhhDgJLIS2sAsEWNgLCyQQyi5ls2QJ/QZYspBLdikhIYmTQILTCQmkubfYlh3HtmR1q4ykmdHMPPePKZblGWnKO8Wa8/18/LGlKXok650573nPc44YY1BKKaWUUqoQ2HK9AKWUUkoppbJFg1+llFJKKVUwNPhVSimllFIFQ4NfpZRSSilVMDT4VUoppZRSBUODX6WUUkopVTAc2fxi1dXVZvbs2dn8kkrltQ0bNnQZY2pyvY5Y9HhV6niZOF5F5G7gGqDDGLM8/LlpwO+A2cAB4F3GmKMTPZces0odL94xm9Xgd/bs2bz22mvZ/JJK5TUReTPXa4hHj1eljpeh4/WXwJ3A/4z63BeBp40xt4vIF8Mff2GiJ9JjVqnjxTtmtexBKaWUyhFjzPNAz5hPrwXuCf/7HuC6bK5JqclOg1+llFIqv8wwxhwBCP9dm+P1KDWpJBz8iohdRDaJyB/CH08TkSdFZG/476rMLVMppZRSY4nITSLymoi81tnZmevlKHVSSCbz+2lg16iPIzVJC4Cnwx8rpZRSKj3tIlIHEP67I94djTF3GWNWG2NW19Tk5d5ZpfJOQhveRKQReBtwG/DZ8KfXAm8J//se4DkSKMhXajIIBg13PtvMu89sYka5K9fLKRgv7+/mmd0dfOLieVROKc71cpTKlEeADwK3h/9+OLfLSdzL+7t5cGMLNptQZBecDhvvPnMW82tLc700paIS7fbwA+AWoGzU546rSRKRmDVJInITcBPArFmzUl+pUnlkX6eb7z25h96hEf717UtzvZxJzRjD83u7+PEzzbxyILQvaMOBo/zqI2fjKrLneHVKpUdEfksokVQtIoeBfyMU9N4nIh8GDgJ/l7sVJufrj+5kf5ebUmcR/mCQQa+fJ3e28/inL6KkWI9XlR8mDH5FJNJ/cIOIvCXZL2CMuQu4C2D16tUm2ccrlY/a+j0APLKllS9dvRiHPf/3jopIE6F2SjOBIHCXMeaHuV3V+Iwx/OMvX+XZ3Z3UVbj4t7cvpdxVxOfu38Jn7t3Mj993Onab5HqZSqXMGPOeODddltWFWGB7Sx87j/TzzbXL+MC5swH4674u3vvfL/OdJ3bz1Ws0UTAZdQx4eLG5ixf2dvNicxcrmir42QdW53pZ40ok83s+cK2IXA24gHIR+RXhmqRw1nfcmiSlJpu2vlDw2+X28uK+bi5eeFLU2vmBzxljNopIGbBBRJ40xuzM9cLieb1tgGd3d/Kxi+fyuSsWUewInWT0Do/wzT/s5OuP7uDr1y5DRANgpXLt/tcOUeywce2KhujnzptXzQfOOYW7X3yDty6fyerZ03K4QmW1Bzce5nP3b8EYqJpSxKxpU1i/o51X3ujhrDn5+389YbrKGHOrMabRGDMbuBF4xhjzfo7VJMFJVpOkVLraw5nfMpeDhzYezvFqEmOMOWKM2Rj+9wChDawN4z8qtx7f3oZN4KMXzo0GvgAfvmAON100l//525v87Pn9OVyhUgrAMxJg3eZW3rp8JhVTio677YtvXUxDZQk3/34rnpFAjlaorNbSO8y/PryDM2ZV8einLmDDV67g3pvOpbrUyQ+f3pPr5Y0rnWu1twNXiMhe4Irwx0oVhLZ+D5VTinj7inrW72hn0OvP9ZKSIiKzgVXAyzleyrj+tP0IZ86eRnWp84TbvnjVYt52ah3fWb+bN7oGc7A6pVTEEzvb6Rse4V2rm064barTwbffcRpvdA3y3Sd252B1ymrGGL7w+60YY/j+u1dyamMFNptQUmzn4xfP5cXmbl55Y+zslvyRVPBrjHnOGHNN+N/dxpjLjDELwn/n73eplMXa+rzMLHdx/aoGhkcCPLGzLddLSpiIlAIPAJ8xxvSPuS1veobu63Szp93NW5fPjHm7zSb827VLcTps3P74rpj3UUplx/2vHaKhsoRz506Peft586t539mz+PkLb7CjtS/Lq1NW+/XLB3mhuYsvvW0JTdOmHHfb+84+Je+zv/m/S0epPNTe72FGuYszZlXRWFXCgxtbcr2khIhIEaHA99fGmAfH3p5PPUP/tD10QrEmTvALUFvm4uMXz4vWmCmlsu/w0SFeaO7i71Y3YhtnA+otVy2mzOngh0/tzeLqlNUO9QzxH4/t4oL51bz3rBO7eJ0M2V8NfpVKQVu/h5nlLmw24bqVDbzY3EVHuA44X0loV9gvgF3GmO/lej0T+dP2NlY2VVJXUTLu/T5y4Vxmlru47Y87CQa1oYxS2fbAhtDJ/zvPaBz3fhUlRXz4grk8sbOd7S2a/T0ZGWO4+fdbsInwn+88Le5m41D2tzhvs78a/CqVpJFAkC63lxkVoeEW161qIGhCbc/y3PnAB4BLRWRz+M/VuV5ULId6htjW0he35GG0kmI7N69ZxJbDfTy6Ne//D5SaVIJBw/0bDnHB/Goaq6ZMeP8PnT+bcpeDHz2t2d+T0R+3HeGl/T186eolNFTGT0yEsr/zeLG5m9cO5F/2N9EhF0qpsM4BL8bAzPBkt/m1pZzWWMG6zS185MK5OV5dfMaYF4CToifY+h2hkoerEgh+Aa5f1cDdL77Bt/+0mzXLZurwi0kiGDQ8tKmFN7sH6ff46feM0O32sbd9gCN9HuorS7h5zSKuW5XXTUsmtY0Hj3L46DA3r1mU0P0j2d/vP7WH7S19LG+oyPAKlVW8/gD/+afXWTyzjHefeeLGxrHee/Ysbn/8dZ55vSPvWtxp5lepJEXanM2sONaB4PIlM9je0s+wT9v4WOHx7W0sqSvnlOlTE7q/zSZ8+W1LaOkd5v+9eCCzi1NZ4fb6+divNvC5+7fwf59t5oGNh3n29Q7+vKeT1j4PhlCrpVsf3Ma6TSdHzf1ktPNIaM/s2XNib3SLRbO/J6f//dubHOoZ5ktXL0louNCUYgfza0vZ0do/4X2zTYNfpZIUCX5nhDO/EOr3C6EzY5Wejn4PG948mlDJw2jnzavm8iW1/PjZZrrc3gytrjCs29TC+bc/w5wv/pHzb38m68HloZ4h3vnTv/LM6x187e1L2Xfb1Wz72hqmFJ94sXJ4JMAd67V9Vq40d7gpdTqYUX5iO8J4tPb35NM75ONHT+/looU1XJTEUKflDRUa/Co1GUSmu80cFfw6HaHL7F5/MCdrmkwiJQ/JBr8At169BM9IgO8/mZ+bLE4G6za1cOuD22jpHc5JdvW1Az2s/fGLtPQO88t/OJMPnT8n2kGgtXc45mPifV5l3t52N/NrS5OeshjJ/v5AOz+cFO58phm318+Xrl6c1OOW1ZfT5fbm3YZwDX6VSlJbv5ciuzBtanH0c87w9DHviAa/6XrlwFEaKktYMKMs6cfOqynl/eecwm9fOcjutoEMrG7yae4Y4N5XDnLL77dw+ff+zL/ct5nhMVO4spVdfXl/Nx/4xStUlBSx7pPnc+GC4zNM9XE22MT7vMq85s5Q8JusipIibrpoLk/t0jaF+e5g9xD3/O0Af3dGE4tnlif12GX1oZru7XnW21mDX6WS1N7vobbMdVymw1kUDn617CFt+zrcLJiR/JtpxKcvW4CryM61d76Qs8v2+c4Yw1/2dvLun/2Ny7/3PF98cBtP7Gxn1rQpmDjd4lp6h/ndqwcz9jv+6oEe/uGXr1Jf6eK+j53LvJoTfwduXrOIkjGbGR02SXizlbJW39AInQNeFqQQ/AJ8+IJQm8J/1zaFecsYw7//cScOm43PXrkw6ccvqQslMXa05Ffpg3Z7UCpJbX0eZla4jvtcsT0S/GrmNx3BoGF/l5tz5yW+eWasP+/pZMQfZCT8Zhq5bA9oVwDgud0dfP+pvWw51MuMcidfedsSLl1cy5zqqYgI59/+DC0xyggcNuELD2zj/z7TzOevXMS1K+rHHWiQjA1vHuVDd7/CzHIXv/3oOdSUxa4fjfz/3bF+N629w9GuHm87rc6SdajkNHeGrq6kkvmFUDusW65axGfv28IjW1pzcnx6RgLYbUKRPb9ygcYY9rS7+cveTvZ1uvEHDAFjMAbmVk/lLYtqWVZfjs0mrNvUEj0mrO6A8sdtR3hiZztfuGrxcftcElXmKqKm1MmPn2vme0/uyZsOLRr8KpWk9n4PS+qOv/TjLIrU/GrmNx1H+j14RoLMrUmsy0Msd6zfHQ18IyKX7XP9gptLXW4vX3tkB3/YeoTGqhL+4/pTeccZDdF69Yib1yzi1ge3HVf6UFJk5z+uX870Uie3P/46n/ndZv77L/u59a1LuGBBdVrr+uu+Lm76nw3Ulrv47U3nUDvBG+x1qxqi/49P7Gjjpv/dwMv7e9Jeh0pec4cbgAW1yZcoRVy3soHvP7mHz923hc/8bjMNGQyOut1eHtrUwnO7O2nv99De76Hf46fILsyrKWXxzDIW15Vz+ZJa5qfxPaXKGMPGg0f57SuHeH5PJx0DoY271aXFFNlt2MJXGx/a1MJ3n9xDdWkxc6qnsvVwXzTxYuXJfrfby78+vIMVjRV89MI5KT3Huk0t9Az6CBjrkhFWBPsa/CqVBGMMbf0e3rKo9rjPa82vNfaF30xjXfJOlG6KOp4xhnWbW/j6ozsZ8gb43BUL+djF8yh2xM50jc2ujn1zuWB+NQ9vaeE76/fw/l+8zOVLavnS1UuYm+T/mTGGX7zwBt96/HXmVE/lfz98VtKZpQsX1OAqsrF+R1vKwW8ms2aTVeRnFrlC8NqBHmZNn3jARSyPbGmlvd+b0eBo7cp63uwe4omdbYwEDEvqyplfW8p586ZTW+7C7fWzu22AV97oYd3mVm5//HVWNlXyjjMaufa0ep7d3WHp78jY9X32ioU47MLdL7zBlsN9lLkcXLywhosW1HDBguoTatq73F7+sreT53Z38sjmVsYWjKR7sj/2//ejF8zBkWJm/I71u6P/t1asL7IhN3Jynurviwa/SiVhwOtnyBc4rscvjAp+Axr8pmN/Zyj4TSfzW19ZEvOyfaY2RTV3DLCjtZ+Ofi8dAx663T5qypzMrZnKvJpSFtSWUTGlKCNfeyKdA15ufXArT+3qYNWsSr79jtMS2kg4Ors6ls0mXL+qkbcur+OXfz3Anc80s+YHz/PBc2fzT5fMP24jKMQOLq9cNoMvPLCNR7e0ctWymXznXSsodSb/dlRSbOfihTU8ubOdr1+7LOkyDKveSDNFRK4CfgjYgZ8bY25P9bmsCvLH/swAvrxue2jUewrPd8f63fjGvG5aHRz95Ll9TC228/fnzubdZzaxcJxjoKPfwyNbWrn/tcN8dd12vvHoDoIGAhaVUcVa3+fv34IhVM7wzbXLeMcZjTHb+kVUlzq5flUj169q5OHNsadapnqyH+v/90fPNFNXWZLS92t1MuKO9bvjbsjV4FepDGnvO7HHL4xqdaaZ37Ts6xykzOWgpjTxnqFjxbpsD/CRC1K7bAcnBg4fOm82npEAj25tZU+7O3o/p8PG9KnFdLl90Td0u004f34116+q58qlM5nqdGQl2/in7W186aFtuL1+vvK2JfzD+XMSakyfKFdRaHzpDac38N31e/jFi2/wPy+9yTWn1fH+c05hVVMlD29uPeGN/ub7t3DbY8V0u73cctUiPnHxvKTbZI22ZtlM1u9oZ2tLHyubKpN6rFVvpJkgInbgx8AVwGHgVRF5xBizM9nnsjLIt/pnlo3gCELdJb56zdIJH19b7uIjF87lwxfMYUdrP+/62d8Y8ln3/cZanwGmTy3mqc9enPQJXEOck/3pY05C01lfOt+v1ckIq35fNPhVKglt/Sf2+AXt9mCV/V1u5tYk3zN0tLGX7WvLnPQOj/DI1lbef+4pSW9siRU43PbYLgDOnF3FN9Yu49y5ocun5S4HIoI/EKSld5j9nYO8eqCHhze38i+/20JJ0XZObahgy+FeS2v0RgfTM8tdNE4r4dUDR1neUM7337UypbZxiaotc/Gf7zyNj1w4h/996U0e3NjCgxtbWFBbyqGeITxjNoGOBA29Qz7u+cezTmhllorLFs/AbhPW72hLOvjN8xKZs4BmY8x+ABG5F1gLJB38WhnQWP0zszo4ivVcAEf6kuszKyIsb6iIO7WzpXeYnkHfCVc64jHGsOHNo3HX1zPoS2kDaayTfQG6Bn18+aFtfPGtiylzJX7lKd76Uv3/jbeHINUOLVb9vpw0wW8gaHB7/VSU5ObyoVIwasBFxdjMr3Z7sMK+jkHOm596p4eIsZft/7C1lU/9ZhNfe2QHX792WVL1a/EySTPLXdz/8fNiPsZht3HK9KmcMn0qlyyu5fNXLmLDwaM8tKmF37580NIavbHB+ZF+D0f6PVy5dAZ3vvf0uLW9Vlswo4xvrF3OLVctZt2mFtbvaGNvhzvmff0BY0ngC1AxpYhz5k5j/Y42vnBVcg34s10ik6QG4NCojw8DZ6fyRFYGrFb/zOJdqTk/ydcBfyDIf/15X9zbU11fvO8X4Oz/eIpVTVWcOaeKs+ZMZ1l9OU6HLbo57WDPIJsO9rL5UC8v7e9mX+cgAicc/+msL1aN/qcvW8Ce9gF+8eIbrN/Rzi1XLeKdpzdOGFzv63RjFzmhRteK9X3xga14/MG0NzTevGYRt/x+63GlMqkE0ydN8PuNR3fw+PY2Xv7SZWllhZRKR6zRxkA0wNDgNzXrNrXwn396nbZ+D0/tbGfdphZLLztfc1o9Ww/3cdfz+9ndNsAPblxJY9X4G3SMMbzQ3BX3ja89iYlFNptw5uxpnDl7Gr95+WDM+8T7OhOJF5zvaO3PWuA7WqnTwfvPOYX3n3NK3LZpVgeXa5bN5F8f3kFzR3IDF6zOSlks1hvdCVGJiNwE3AQwa9asmE9kZcBq9c9sbPBWV+GiYkoRD25sYe3KBs6fP/FGxu0tfXx53Xa2HOpl1axKdrX2H3fFIZ31xft+/8+l8+kdHuGl/d3815/38+Nn4wfeZU4HK5oquemiuWDga4/utPR3Ll6N/ttX1PO1R3dwy++38quX3uTf3r6U02dVxYyhth7u5UP/71VKiu2MBILHvZdZsb79XYPc+cxenvrsxZQU2yd+0DjPtad9gJ88F/p5pxpMnxTB78HuIX798kH8QUPv0AhVKdayKJWutn4PlVOKov1FI47V/GrZQ7LGZi77Pf6MbDr60tVLWFpXzlfWbeetP/wL37rhVK45rf6E+0V2Ut/z1zfZfKgXm0Cs/vupBnDxavRsAt9/cg9vPXUmi2aUTXiS7w8EeWpXh+WXKa2UreDyyqWh4Hf9jjbm185P+HGR36/P378Ff9BktM1WCg4DTaM+bgRO2N1kjLkLuAtg9erVMSdF3LxmEV94YKslAU3kZ/OVddtxe/3UV7i45arFaf3MxgZvA54R3vHTv/LxX23g/1y6gPeePYupMTZE7u90890n9/DHrUeomlLEne9dxTWn1VtaUz9R9xOAQa+fjQePsrfdTSBo8AcN/kCQGRUuVjVVMq+m9Lisq7PInpUOIyuaKnng4+fx8JYWbn/8dd7x07/RUFnCBfOruWBBNaUuB39t7uKF5m52HemnsaqEBz5xHlsO9Vq+vmX15QQN7Grr5/RZVWk91xmnhB6/7pPnJ13qFHFSBL8/emYv/lE7LTX4VbnS1uc9od4XCq/swco3l2xuOrpuVQOnz6ri/9y7iU/9ZhPf/MNOZk+fypzqqZQ6Hfxtfzc7WkOTiBqrSrjt+uU47Ta++vAOywK4WAFhsd3G3Jqp/PDpvfzw6b00TSvhiiUzWdFUQXWpk+pSJ2UuB292D7GnfYDX2wZ4fk8nLb3Dll+mtFIigYMVZla4WNFUyRM72vjkJYkHv5E13vbYLi5bXMvt7zjN0nWl6VVggYjMAVqAG4H3pvJE161qwO3x85WHtwOpZ8tGP99j246wr9PN0597S0rPMZ4yVxF3f+hMbvn9Vm57bBc/fq6ZfzhvDhctrKald5hDPcPsOtLPH7cdwemw8c+XzuejF82lPFzbOl63klRM9HxTnQ4uXFCTcCmP1esbT6Q7y5VLZ/LgphZe2NvJY9uP8LvXQhU1xXYbZ5xSxeevXMi7z5xFTZmTOdVTLV/fsvpQb/wdrekHv57wxnJXUepXtvI++N3f6ebBjYc5e840Xn6jh5beYZY3VOR6WapAtfd7YvYijQS/vgIIfq1uD5XtTUezpk/h/o+fy29fOciWQ30c6B7kyZ3t9A2PcHr4TeCihTUsr6+IZmscdltWMkkd/R6e2tXBU7va+dXLb3L3i7F/nypKiljRVMlXr1nKoNfPV9Ztz9dL91l7o79y6QzuWL+bI33D1FUkHvgHg4ajgz6ml+ZXUsUY4xeRTwHrCbU6u9sYsyPV57vu9Aa+8vB2vnT1Ym66aF7a62vudKc81jgRjVVT+M1Hz2HjwaP8+Jlmvv/UHr7/1J7o7VVTivjAOafwyUvmx50IqI6Z6nTwgXNO4QPnnII/EGRbSx/DvgCrZlWlVYaQqIbKEipKitjZ2pf2c3nCr3UuR+rrzvvg9wdP7cXpsPPN65Zz5fefz4tLeapwtfV7WDpmuhuEdgYXO2wFkfnN91Y4iSiy2/j7c2fDucc+FwiauK3AspVJqi138d6zZ/Hes2cx7AvQ0jtEl9tHt9tH77CPpqopLJpZRm2Z87iyCLtNCn5Qw5ploeD3mdc7eN/ZpyT8uH7PCP6gYfrU/AugjDGPAY9Z8VxWDuLx+YO82T3E1cszP1b69FlV/OJDZ7K7bYAD3YM0VU2haVpJUh0M1PEcdhur0sy+JivUPaOc7S39aT9X5H12bPlhMvI6+N3dNsCjW1v52EXzWFBbiqvIRstRDX5VbowEgnS5vcyoiD2Fyumw5X2rMyua5ludqc2XTUdW9sC1Qkmxnfm1Zcyvnfi+2byMmq/m1ZQybWoxmw/2JhX8drl9AHmX+bWawybYhBMGSqTiQPcggaBJanNhuhbNLGPRzOyPHFbWWVZfwS9fPMBIIJh0y8nRopnfNMoesr8VOAk/eGoPU4sdfOyiuYgI9ZUltPZp8Ktyo3PAizEn9viNcDrseZ35HdU0/63AUuA9IjJx1/cx4mVk02mFc9v1y6MfN1SW8K0bTi34YE4lR0RY0RjqoZyMnsFw8JuHmV8riYhlr1HN4RZ22Qx+1clvfm0pvkAw7Sv4Hn8k+E0985u3we+hniEe397GP5w/O7rBraGyRDO/KmeiAy4qYr9JOh22fJ/wFm2ab4zxAZGm+Um5ec0iSsa86KSbqT1z9jQAvnXDqbz4xUs18FUpWdFUyd4ON26vP+HHdLu9AAkPKziZOYtslnSk2dvuRiSUbVcqUQ3hBElrb3IDR8aKbHhzptHKccJHiohLRF4RkS0iskNEvh7+/DQReVJE9ob/trSApGMg9MNZHX5ThEiLoPR+aEqlqiNOj9+Ik6DsIVbT/KSjzOtWNfCtG06NDpyZWe5KO1O7v2sQCM22VypVK5oqMSbU9zVR3eHMb/UkL3uAyGuUBZnfTjcNlSVZ2SilJo/6aPCbXhLTOxLA6bClNfMhkbDZC1xqjFkBrASuEpFzgC8CTxtjFgBPhz+2zLAvXNA8KrKvryyhy+2N1nsolU3R6W5xgt+TYMPbhE3zReQmEXlNRF7r7OyM+0TXrWrgy1cvAeD3nzg37UztvvBl1Hl6GVWlYUVjJQBbDvUm/JjucM1vIbTQtLLsIZOdHtTkVBfeL5N22UM4+E3HhI82IZEZlUXhP4bQ5dJ7wp+/B7gurZWMcayg+diZZSRlnuyMbqWs0Nbvpdhui3t51FmU3zW/JNA03xhzlzFmtTFmdU3N+D0rnUXW9Tbe3+Wm3OVgegEEICpzpk0tZta0KUnV/XYPeqkoKUprA87JwoqrU8YY3uweZLZepVFJchXZmT61mNY0YzjPSDCtel9IsOZXROwishnoAJ40xrwMzDDGHAEI/x1zT3KimaSxYhU0R1LmWvercqG930NtuTPupRanw4Yvv8seok3zRaSYUNP8R1J9smNT7dIPfvd1DDKvtlRHl6u0ndZYwZZDyZU9TPZODxHFFuxLcHv9DPkC0SyeUsmoryyxZMNbVoJfY0zAGLOSUKboLBFZPsFDRj824UzSaJGC5tEbaxqrrKkXUSoVbX2xB1xEWFVPlynGGD8QaZq/C7gvnab5kcyvx4KAf3+Xm7nVehlVpW9lUyUtvcPRfSMT6XZ7C+aKgxWvUe39oQ2CtWUa/Krk1VW4OJJm1y7vSDCtNmeQZLcHY0wv8BxwFdAuInUA4b870lrJGMMx+rjNKHchAoc1+FU50OX2UlMavx2S02HP924PGGMeM8YsNMbMM8bcls5zuSzK/A54Rmjv9zKvVi+jqvStaKoEYGuC2d+eQd+kb3MWEar5Te9kNXJSUVteGD8zZa36cNcuE2Mke6KykvkVkRoRqQz/uwS4HHid0OXSD4bv9kHg4bRWMkakHYtz1DdY7LAxo8ylmV+VEz0TXB51FuV9twdLWZX5fSPa6UEzvyp9y+rLsdsk4brfbnfhlD2EXqPSO1ntCGd+x7sKplQ8DZUlDPoC9HsSb0c4lmckkNZoY0hswlsdcE+4Qb6N0KXSP4jI34D7ROTDwEHg79JayRjxJnjUV7q05ldlXSBoODrkG/fyqNOe32UPVrNqXGrkZLZpWubGGavCMaXYwcIZZWw5PHHmNxA09ExwXE8mVvQib5+g5aNS46mrDP3eHOkbjrbLTJZnJEiZK70BxRM+2hizFVgV4/PdwGVpffVxeEaCiEDxmB24DVVT2JrkBB+l0tU75CNoxm+Eb0VW5WQSueyUbra7XTNJymIrmyp4bFsbxphxN1H2DvkwBqaPU840mTgd9rTHG7f3e5lSbKfUmV7woQrT6F6/i2eWp/QcnpEANWXpHbN529vFMxKgpMh+wgtXfaWLI70egsHU60WUSlZkBOq0CWt+C6jswaLMb3u/B4dNmDalMLJvKvNWNFbSNzzCm91D494vMuCiEKa7QSTzm37Nr56oqlRFWtamM7DM6w9mvs9vrgyPxC5obqwswRcI0hUeSalUNkTeJMcte3DY0s6qnEysyvx2DHipLXNis2mbM2WNyKa3iep+IwMutOY3cR39oeNVqVRUlzpx2IQjaezd8sSJD5ORt8GvZyR43HS3iEjKXDs+qGyKZH7H3fAWbiOUzi7Wk0nkzNtjQea3RjNJykILakspKbKzeYJJb92DoSRKYXV7SPN41cyvSoPdJsysSK9xQSj4naSZ33itLBq016/Kge7wlYbxa37tGAMjgUIJfi3K/PZ7maGZJGUhh93GqQ0VE445LrjMb5oT3owxtPd7mKFtzlQaQoMuUi97CCVHJ2nm1xsnra1T3lQuRMoeqsapS43WwBZIu7Miu2ATCzK/mklSGbCiqYLtrf2MjFOK1D3oQ2T84zqTROTvRGSHiARFZPWY224VkWYR2S0ia6z4esUOGyMBQyDFPTP9Hj+ekaAOuFBpqa9w0ZrioAtjDN5sTXjLBU+cCR7lriLKXA7N/Kqs6hn0UVFSRJE9/iFTHA1+C6PuV0TSbprvGQnQOzSimSRluRVNlfj8QXa3DcS9T7fbS9WUYuy5qzffDtwAPD/6kyKylND48WWEhkr9JNxuNC2RqzW+FF+jOnXAhbJAfWUJbX2elE7CRgKGoDmxDW6y8jb4jbfhDUK7BVs0+FVZ1D04cS9QZ4EFvxB6AUrn++0cCI9K1cyvstiKxkoANo1T+tCTwHGdScaYXcaY3TFuWgvca4zxGmPeAJqBs9L9eulendK2hMoKdZUl+IMmpcYFkaFKkzjzO1Hwm3q9iFLJ6klgClS0Brag2p3ZowNpUqEN81WmNFaVUF1azOaDvXHv0+325Wubswbg0KiPD4c/dwIRuUlEXhOR1zo7O8d90shUxlRPWPV4VVZoCA+6SCWJ6Ykx/TcVeR78xl5eaDb0+P0blbJS96B3wjfJQsz8pts66VgmSS+jKmuJCCsaK8dtd9Y16KU6wwMuROQpEdke48/a8R4W43MxrxEbY+4yxqw2xqyuqakZdy3HTtBTDX7DV2p0g6pKw+hBF8mK/O7G6gaWjLwd0RKq+Y2T+a0qod/jZ8AzQpkrtfF4SiWjZ9DHGadMG/c+kaxKqvV0JyOXVZlf3UCjMmBlUyXP7O6g3zNCeYz3ip7Bia/opMsYc3kKDzsMNI36uBFoTXctkRN0XyC1Y7ZjwEOp08FUne6m0lBXEQp+j6RwBb9AMr+xv7ljZw1a+qAyLxg0HB0aSaDmN9L6q3CC37QzvwMeiu02KqfoSayy3oqmSoyBrYf6TrhtJBCkd2gkX8seHgFuFBGniMwBFgCvpPuk6fbm7uj36mY3lbZyl4NSpyPFsgdrMr/5HfzG6ePWkEbKXKlk9Q2PEAiaBGp+C6vVGYQyv+mMN468mY4dY66UFcab9HZ0KNLjN3fBnIhcLyKHgXOBP4rIegBjzA7gPmAn8Cfgk8aYtF9YnEXpnaC393v0Ko1Km4hQX5naoAurNrzl7bULjz92qzM4FvzqlDeVDZEevxPX/KZXT3cychbZcHv9KT8+1DBf30xVZlSUFDG3ZiqbYmx6iw64yG23h4eAh+Lcdhtwm5VfL+1uDwMezphVZeWSVIGqqyjhSF/qZQ+TstvDSCBIIGgoifPN1ZY5KbKLZn5VVkSmu000ArXQ+vxCeFxqGsG+TotSmbaysZLNh3pPGDueD8FvtqWzKTc03c2rJ6vKEqEpb2lseJuMfX4niuxtNmFGuYsjGvyqLOhJOPObv2UPInKHiLwuIltF5CERqbTieZ1FtuhlqFR09Ht1WpTKqJWzKulye2kdk2XqHgyf1BbIaGMYdYKewglr/7Afnz9IjXZ6UBaor3DRPehLesP0pO7zOxwNfuMvr7bMScdA8g2SlUpWpOxhwprfNHtoZtiTwHJjzGnAHuBWK57U6bClnPkd9PoZ8Po1k6QyKjLsYmy/32OZ38IJ5o5tyk3+hLV9QHv8KutEGhckW/pwbMPbJAx+I2+m47WyqC1zafCrsiKS+a2akljNbz62OjPGPGGMiRTnvkSodVLaXEX2lIP9yPGrZQ8qk5bUlVNst52w6a1n0IfdJlSUFE6nkXTKHnTAhbJSqr1+PQkkRxORl8FvIgXNteVOOvq11ZnKvG63l3KXI3rJMJ58LnsY4x+Bx2PdkMy0KIhkflMdlapvpirzih02ltaXn5j5HfRSNaUYm61wOo2kc3VKB9IoK9WHp7ylGvw6J2PmN5LWjrfhDUJlD/0ef1oN9pVKRPegL6F2SM406umskMgkKRH5MuAHfh3rOZKZFgWa+VUnh5VNlWxr6cMfOPa72uX2UV1A9b6Q3gj2jnDZg9boKyvMrIgEv8klMSPvN840M7952eossZrf0A+uc8BL07QpWVmXKkw9g76EGuE77DbsNslZze9Ek6RE5IPANcBlZuzW9xQ5HTZ84e4s9iQzaJErN7Wa+VUZtmpWJb/86wH2tLtZWl8OJH5cTybHJrwl/xrV0e+lzOWgpDi9jJtSEDoRqylzppT5FTn2u5yqPM38Tlz2UBPOFmndr8q0ZN4ki+22vCx7EJGrgC8A1xpjhqx63nTqnNv7PbiKbJTpqFSVYdFNb4d6o5/rdntzOuAiF9K5OqU9uZXV6itctPYlH/w6Hba0ByPld/A7Tk1HTfhFq3NA635VZnUPJn55NN1xvxl0J1AGPCkim0Xkv6x4UldR6nXOkZ6hOt1NZdop06dQOaWIx7Yd4Wh4A2u321dQPX4hNFmr2JHaa5T25FZWqylz0ZlkAtMzEky7zRnkadmDJ3xglhSPU/agmV+VBcGgSSrzm07rr0wyxszPxPNGMr+eVDNJWj+oskBE+NB5s/nh03u54D+f4QPnzmbA6y+44BfCr1EpnqyePWdaBlakClVNmZPNh44m9RivP5B2mzPI88zveLv5pk91YpNQHZJSmdLvGSEQNExLsBeo02FPqZ7uZJVO5rdjwBs9iVUq0z5z+ULWf+YiLllcy8+e3wfAtALb8AbhqYxJZn6NMXQOeLU+X1mqprSY7kHfcRtRJxLK/KYfuk74DCLSJCLPisguEdkhIp8Of36aiDwpInvDf1s28DuRml+7TagudUZ3oCqVCdEBF8lkfvOw5jdTUs38hkalag2hyq6FM8q4872ns/4zF/FPb5nHVctm5npJWZfK1aneoRF8gSC1Ot1NWaimzIkx0DPkS/gxnpGAJWUPiYTPfuBzxpglwDnAJ0VkKfBF4GljzALg6fDHlki0iXFtuU55U5nVk+B0twhnUX6WPWRKqplft9fPkC+gNYQqJxbOKOOWqxYX3IY3SO0EXae7qUyIjMpOpu7X4w+m3ekBEgh+jTFHjDEbw/8eAHYBDcBa4J7w3e4Brkt7NWHR8XUTRPe1ZS4te1AZFRmBmnjNb+p9b09Gx8alJvc9H2uYr2+mSmVTKhvedMCFyoTq8Mlnlzu5zO94038TlVT4LCKzgVXAy8AMY8wRCAXIQG2cxyQ1MQpC35zDJhTZJ8j8ljnpdGvwqzKnezD0+zU9wZrffG11limRRuPJDpuJ9vjVDW9KZZUzhcE0HTqNUWVAKplfbxbLHgAQkVLgAeAzxpj+RB+X7MQoSLyVRW2Zk263l0DQkn79Sp2gJ3xGWjW1KKH753Grs4xwRSdGJZlJil5G1UySKlwicoeIvC4iW0XkIRGpHHXbrSLSLCK7RWSNVV8zlZHkkfLCGq35VRaqLk2h7GEkiCsbZQ8AIlJEKPD9tTHmwfCn20WkLnx7HdCR9mrChkcCCe3mqyl3ETShZuVKZUL3oI8ypyPhOeL52uosU6KZ32RrCMOXUXX3uCpwTwLLjTGnAXuAWwHC+2puBJYBVwE/ERFLRqtFpjImo6PfQ7nLYUnGTamIqU4HU4rtdCURw3n8Wcr8SqgD/S+AXcaY74266RHgg+F/fxB4OO3VhHlHAgkFG5Gdp7rpTWVKz6Av4c1uEKn5LZyyh5Qzv/0eSp0OSnW6mypgxpgnjDH+8IcvAY3hf68F7jXGeI0xbwDNwFlWfE2nw5708do16Itm6ZSyUk2ZM8nMb2LJ0Ykk8gznAx8ALg1PhtosIlcDtwNXiMhe4Irwx5bw+AMJzQ+viQa/2u5MZUYyAy4gnFUpoLIHZ7TbQ7KZJO3xq9QY/wg8Hv53A3Bo1G2Hw59LW6g0K7kT9B53cq+DSiWqpjS54Nfrz9KEN2PMC0C8+aOXpb2CGBJtYhzN/GrHB5UhXW4vjVVTEr5/odX8RlrOJLvhTae7qUIhIk8BsRoKf9kY83D4Pl8m1Fb015GHxbh/zM0tInITcBPArFmzJlyPM4VuDz2DPmZXJ/46qFSiqkud7Ot0J3x/q/r85uU1x2FfYuPrarTsQWVYz6CPFY2VCd+/0FqdRV6Ekm6dNODh9FmWzcVRKm8ZYy4f73YR+SBwDXCZMSYS4B4GmkbdrRFojfP8dwF3AaxevXrC3d+pvEZ1D/o4/RQ9XpX1asqcvPRGd0L3NcZkd8NbtiVa0Ox02KmcUqRlDyojjDEcHfIlNQK10Ca8FdtTy/x2Dfh0WpQqeCJyFfAF4FpjzNComx4BbhQRp4jMARYAr1jxNZPt9hAMhl4HE51yqVQyasqcoQmCCZyQRU7ast7nN1uSmd1cm2SxtFKJ6vf4GQmYpF70ix02RgKmYNrv2WySdNP8IZ+f4ZFAQU7XUmqMO4Ey4Mnwfpr/AjDG7ADuA3YCfwI+aYyx5Kw62bKHvuERAkGjNb8qIyIbKSM99ccT2ahpxYS3vCx7SKaJcW2ZS8seVEZEWugl2+0BwOcPJrRpczJINtsdmZqnmSRV6Iwx88e57TbgNqu/ZiT4NcYQauY0vu4kR7wrlYzRgy7qKkrGvW+kpWZWh1xkUzIFzbVlTt3wpjKiZzAy2jjxDGXkjLSQSh+cDnt0JHkiIj0dtXWSUtkXuWScaK/fY6+DGvwq6yUz5S1SXjdpg99Eh1wA1JSHyh6O7RNQyhrRjEcyrc7Cv7eF1O7MlWTrpGjmVzNJSmXdsRP0RIPfUFCiwa/KhOrw+0Aigy4iSZZs9fnNutBuvsTLHnyBIH3DIxlelSo0qWQ8ImUP+drxQUQ+LyJGRKqtes5kp9pFaru05lep7IsGvwkes8eSAHq8KuslM+I4mvlNMD4cT94Fv8aYhIdcgE55U5kTORiTq/nN37IHEWkiNJDmoJXP6ypKbqpdl9b8KpUz0X0JiZY9hI/XqqlFGVuTKlyuIjtlLkf0fWE8kaTSpCx78AWCGJP4N1ejgy5UhrT3e5g2tTihUdsRx4Y+5GXm9/vALcRplp+qZHePd7t9lDodlryAKaWSE53KmGC7s+5BH2VOR1Kvg0olI9ERx8dqfidh2YPHl1wri1odcawypGPAm3Qv2uIk6+myRUSuBVqMMVusfm5XkT2pPr/dg16t91UqR5Kv+U2u17lSyUp0xHHkfcaKE7G8a3WWbCuL2vLQiFTt9aus1tHvif5+JepYzW/2yx7GG6MKfAm4MoHnSGpUKoTeTN1ef8Lr7Hb7dPOMUjmS7L6EnkE9XlVmVZc52dXaP+H9PH7rNrzlX/CbZCuLUqeDKcV2rflVlmvv97JwRllSj4leUsxB5jfeGFURORWYA2wJ9/VsBDaKyFnGmLYxz5HUqFSItDpLpubXS2PVlITvr5SyzrENb4mXPTRUJpcEUCoZNaVOni/0VmeRWsmSJL652jKnBr/KUoGgodPtZUbSmd/kdlJngzFmmzGm1hgz2xgzGzgMnD428E1VqNVZMt0efNH2Nkqp7Eq2NKvb7dXMr8qomjInA17/hEmUyAmbc1LW/KZQ0Fxb5qKjX2t+lXV6Bn0Egoba8uRqfpPdST0ZJJP5DQYNPYM+rflVKkeSKXswxnB0yJfUoB+lklWTYLuzY31+J2HmdziFtHZk0IVSVmkPn0zVlqWa+c2/VmcR4Qxwl1XPl0zmt294hEDQaM9QpXLkWGnWxK9R/R4/IwGjV2pURkWnvE0w6GJS9/lNLfOrZQ/KWpHuITOSzfzmsOY3V5xF9uQb5uubqVI5kUxplo42VtmQ6Ihjrz+ITaDILml/zTwMfpNPa9eWuXB7/Qz5Et9xrtR4In2jk675tef3hLdMcDpsePyBhEaMd4fP7Kt1uptSOZFM2YOONlbZEHk/mGjEsWckgKvITnjjdlryLvj1JtnqDEb1+tVBF8oi7f2pBWnJXFKcLFxFdoyBkUACwa9mfpXKqUjm15fAa1S3W0cbq8yLvB9MWPPrD1g2HCnvgt9UWlnU6IhjZbH2AQ/TpxZHd0Ynqtief90eMi061S6hN9PwyGh9M1UqJ5IpzYqWPejJqsqgIruNaVOLE9rwlugAtInkXfA77IsUNCe+tPpwD8IjfcMZWZMqPKkMuACw2YRie3Ktv052zvCJaiIBf5fbhwhUTSnK9LKUUjFET9ATeI2KXqnRsgeVYdWlxQmXPVgh74LfYxM8Ev8G6ytLADh8VINfZY1URhtHOB02fIUU/DoSL/XoHvRSNaUYhz3vXnqUyjoR+aaIbBWRzSLyhIjUj7rtVhFpFpHdIrLGqq/psNtw2CSh47Vn0MeUYrtlAYdS8dSUTdy1a1JnflMpe5hS7GDa1GJaejX4VdZo7/ck3ekhwllkK6ia32jZQwKZ3263T7NISh1zhzHmNGPMSuAPwL8CiMhS4EZgGXAV8BMRsSwCdTpsCXd70M1uKhtqSp0TtjrzTu6a3yDFdht2W3K7+RoqS2jRzK+yQCBo6BxIfrpbhNNhL6iyh8iLUUKZX7cOuFAqwhjTP+rDqUBk1+ha4F5jjNcY8wbQDJxl1dctdiRWmtU9qCerKjuqS510DfjG7RoUKnuYxJnfVEbXNVSWaOZXWaLb7SVoSKvsoZCC32Qyv12DXqZrmzOlokTkNhE5BLyPcOYXaAAOjbrb4fDnYj3+JhF5TURe6+zsTOhrhk7QE9ugqplflQ01ZU6GRwIM+uL/XnpGgtnL/IrI3SLSISLbR31umog8KSJ7w39XWbIaUi9obqgKZX4T6TWq1HgiXUNS2fAG4axKHk94s1qymd9qfTNVBUREnhKR7TH+rAUwxnzZGNME/Br4VORhMZ4q5pubMeYuY8xqY8zqmpqahNbkTHAqY6jsQU9WVeZFe/2OU/fr9Qcsme4GiWV+f0mo5mi0LwJPG2MWAE+HP7aEZyRASSrBb2UJwyMBjg6NWLUUVaAio41TL3sozMzvRN+zzx+kb3hEM7+qoBhjLjfGLI/x5+Exd/0N8I7wvw8DTaNuawRarVpTIjW/xphQ2YOWKaksSGTEcSjzm6WyB2PM80DPmE+vBe4J//se4DpLVkPq31xDVajjg9b9qnS1R6e7pVr2kNglxckimvmdINt9dEgHXCg1mogsGPXhtcDr4X8/AtwoIk4RmQMsAF6x6usm8ho16Avg8we17EFlRSIjjj0jgeiEwnQ5UnzcDGPMEQBjzBERqY13RxG5CbgJYNasWRM+caoTPBqi7c6GOLWxIunHKxXRMeBBJPURvM4iG4Pewhm1nWjmt0sHXCg11u0isggIAm8CHwcwxuwQkfuAnYAf+KQxxrIzaqfDhi8w/vHaE57upsGvyobIHpvIlddYrNzwlmrwmzBjzF3AXQCrV6+esCB32JdaTUdjJPOrm95Umtr7vUyfWkxRir1onQ4bPYMFVPYQPln1TJD5jYxKrdbMr1IAGGPeMc5ttwG3ZeLrOosmLnvoHoycrOrxqjJvWnii6pG+cYJffxY3vMXRLiJ1AOG/OyxZDaFvLpVuDxUlRUwttuugC5W2jn4PtWWp1ftCAbY6SzDzG30z1ZpfpXIqkdeoyGhjPV5VNogIdRWuuMFvMGjw+YPRZEu6Ug1+HwE+GP73B4Gxhfsp86a44U1EaKyaoplflbb2AQ+1Kdb7QmTDW+HU/CY63jiS+dWaX6VyK5HXKB1trLKtrsLFkTgxnDc6/TdLG95E5LfA34BFInJYRD4M3A5cISJ7gSvCH1sindnNkXZnSqWjo9/LjDQyv8UJTk+aLFzRPr/jv5l2uX0U222UOTNebaWUGkciHWkimV+t+VXZUl9REjfzG53+m60Nb8aY98S56TJLVjDGcBoFzQ2VJWx486jFK1KFxB8I0uX2ptzpAfK31ZmI/DOhPqJ+4I/GmFuseF5HeCLjhGUPbi/TS4sRSW56o1LKWomcoPcM+nA6bEwptmyqslLjqqt00dbvIRA0J0z59YSvVFhV85t3KZh0Jng0VJXQNzyC2+unVLNLKgXdg77QdLcUe/xCqAwg38oeROQSQi0KTzPGeMfr0JIKp8M28YY37RmqVF5IpNVZtzs02lhPVlW2zKwoIRA0dA54mVlx/Htw5GRtUo83TqXmF461O9PSB5WqjnCP31RHG8OxzG+eTRv8BHC7McYLYIyxbJMqhM7GE8r8apszpXIukatT3YNepunJqsqi+nDAe6TvxBjO6sxvXgW/xhi8aezmiw666B2yclmqgKQ73Q1CbyzGgD+YV8HvQuBCEXlZRP4sImfGupOI3CQir4nIa52dnQk/eSIbaLrcmvlVKh8kMt5YRxurbKurCMVwsep+PeHMb6SvfLryqjYg3d18jZr5VWlqH7Ai+A13P/AHU+4VnAoReQqYGeOmLxM61quAc4AzgftEZK4Zk55Oti93hKvIHn1xiiU0KtWb8uAQpZR1nA47gaDBHwjiiPMa1e32Ma+mNMsrU4WsvjL0vtsao+NDdMPbZKz5Hfalt5uvutRJsd3GYW13plLU3u8NT3dLPUMZ6VPtHQlktfbcGHN5vNtE5BPAg+Fg9xURCQLVQOLp3XFMlPkd8gXwjAS1bZJSeSCSPfONE/yGMr96vKrsqSgpoqTIHifzGwl+J2HNb7o1HTabUF/p0syvSlnngIfpU51x3xASkei43yxbB1wKICILgWKgy6ond06Q+T3W41czv0rlWvQ1Ks4xO+wLMDwS0OBXZdWxQRexMr+RsodJmPmNfHMlxakHHg1VJTroQqWsvT+9NmcQaiMEeRf83g3cLSLbAR/wwbElD+mYKPPbFZ3upm+mSuVadDBNnNcoHW2scqWuMvaUN+9kbnVmRRPjhsoSnt1tyZVcVYDa+z1pdXqA0TW/+dPuzBjjA96fqed3Omy4vf64t0cyv9W6gUapnDt2dSr2a5QOuFC5UldRwgt7T7woObnLHiwoaG6onELngHfCnqNKxdIx4E1rsxtMfElxMppow1u3WzO/SuWL0ZtyY+kcCB2vNWkmApRKVn2Fi44BD/7A8b+bnmif30nY6mw4HLA604jsI+3O4o3IUyqeyHS3dAZcwLE3Fl+gcILficoeujWTpFTemOgEvS3c8nHsoAGlMq2usoSggfbwCViE1WUPeRX8Rg7EVIdcgA66UKlr6/dgDMxMN/gtKszM73jfb+eAl1Knw7IXLqVU6oonKHs41vVGM78qu+oigy7G7N2yus9vXgW/VpQ9NOqgC5Wi5g43APNqpqb1PBPV001GE2V+2/o8mkVSKg4R+byIGBGpHvW5W0WkWUR2i8gaK7/eRB1pOvo9VJc6s9qnXCmIP+jCMxLAbhPLfifza8ObBWntmRUubKKZX5W8SPA7vza9xu4T1dNNRk7H+JnfI/2e6Bm9UuoYEWkCrgAOjvrcUuBGYBlQDzwlIguNMZacUR/r9hD76dr6PWl3vVEqFXWVsUcce0aCuCzK+kKeZX6HfelNeAMostuYWe7SQRcqafs63VRNKUq7F+1ElxQnI1eRLXryGsuR3mENfpWK7fvALcDo1oNrgXuNMV5jzBtAM3CWVV9woprf9n4vM8r0eFXZV+4qotTpoLV3TObXH7C0bC6vgl8rWp1BuNevZn5VkvZ1DKad9YXC7PbgdNgZCRgCwRNbB48EgnS6vdHLWUqpEBG5FmgxxmwZc1MDcGjUx4fDn7NEImUPM/RkVeVIrEEXnhFrg9+8LHsoKU4z+K0s4dUDR61YkiogzZ1u1iybkfbz5OmEt4yKXK3x+gNMKT7+ZaU9vJFQM7+qEInIU8DMGDd9GfgScGWsh8X4XMyhNCJyE3ATwKxZsxJaU6TswRfjNcrrD9A96NPMr8qZusqSE2p+vSPBtDqBjZVfwa9Fu/nm1ZSybnMrA54RylxFVixNZcH2lj7e7B6ib3iEfs8IxsA7z2jMSq/JnkEfPYM+5tVYkPmdoJ5uMhqd7Z4ypptZW5+2TVKFyxhzeazPi8ipwBxgi4gANAIbReQsQpneplF3bwRa4zz/XcBdAKtXr05oauN4m3IjPX615lflSl25i11H+o/7nGckkHZVwGh5Ffx6RwI4HTbCLwQpO7WxAoDtLf2cO2+6FUubUCBo6BjwMLPclfb6C82rB3r44VN7eaH5xKkuP362mX+6ZB7/eP6cjLbJinZ6sLDsIVZWZbIab1xq5Ay+vlLLHpSKMMZsA2ojH4vIAWC1MaZLRB4BfiMi3yO04W0B8IpVX3u8q1Pt/eHgV09WVY7UVbrocnvx+YPRPTShmt9Jmvkdtqim49SGSPDbl9Hgt63Pw7O7O/jL3k5e2NtFv8dPdWkxZ8+dzjlzpnHpkhnRvsPqRNtb+rj98dd5obmL6tJivnT1Yi5eWEt5iYOKkiKO9Hn41mOv8+0/7ebXLx3kG2uXcdmS9MsSYol2erAg8+uwCTYpzLKHWJMVI7VbmvlVKjHGmB0ich+wE/ADn7Sq0wOM35GmPTzgQsseVK7UV5RgTOh3sWnaFCB0VXHy1vyOBNIacBExvdRJQ2UJW1v6LFhVbE/saOOff7sJrz9IXYWLq5bPZPHMcra19PHS/m7+uPUI3/jDTv7h/Dl86tL5lGv5RZQ/EOSnz+3jh0/vpaKkiC9fvYT3nTPrhFrReTWl/PyDq/lrcxff+MNOPva/G/jtTedw5uxplq5n3aYWbntsJwDv/tnfuOWqxVy3KvW9JSISav1VQMHveG+mR/o8lDodegwoNQ5jzOwxH98G3JaJr1VkF0RCV1vHiga/WvagciTS7qy1d5imaVMwxtAz5GP29PR68I+WZ8Fv0LK09qkNFWw73GvJc0EoQLpj/W5ae4epKCmizzPCaY2V3PHO01hQW3pcqYMxhgPdQ/z0uWb++y/7eWDDYf7lioXceGYTjgJsGj76Z1db5qSk2M6B7iHWrqznG9cup2LK+EHRefOrue/j57L2zhf5p19v5I//fEHaI4hHr+3WB7dFR2u39nm49cFtAGkFwMUOW8w3lslq3Mxvrw64UCqfiAjFdlvcsociu+gocpUzYwddPPN6B/s7B/nH8+dY9jXyKhKzspXFqY0VHAhvnkpXJEBq6R3GAL3DIwjw3rOaWDij7IQaXxFhTvVUvv3OFTz6qQuYX1vKV9Zt5x0//St72wfSXs/JZOzPrn3Ay4HuIf7+3FP44Y2rJgx8I8pdRfzX+8/A7fHzT7/eaFk97R3rd0cD34jhkQB3rN+d8nOu29TCgGeEe/72Juff/gzrNrWku8y8N27mVwdcKJV3QlMZY5c91Jbp3hWVO9ERx30eAkHDt/+0m1OmT+HdZzZN8MjE5VXmd3gkEN04k64BTyjoXfH1J2ioLOHmNYtSzuTFCpCCBn70dDPvPnP81jLLGyq496ZzeHTrEb72yA7e9qMX+PTlC5hZ7uJ7T+6htXeY+jTXNzqzGnmua1fUs+VwL9ta+tjb7mZP+wAHugcJGii22yiyC+UlRSytK2d5QwWnNlSwpK6cYoct5vNZ+bMDeHpXB99Ym9xzLZpZxrffeRr//NtNrPrGEwz5AmmvrzXOMJR4n59IJNiPtLtt6R22JJOc70a3OhvrSO8wixbVZHtJSqlxOIvsMY/X9n69UqNya6rTQbnLwZG+YR7Z0sLu9gF+9J5Vlo7bTiv4FZGrgB8CduDnxpjbU32udZtaeHl/D75AkPNvfybtYPCXLx6IfpxuAJJugCQiXLuinvPmTeer67Zzx/rdiICxIEAae9m+pXeYz92/hX99eDv9Hj8AZS4HC2pLuWB+DUV2YSRgGAkE6R708vj2Nu599VD0fgtry9jW0ocvEEx7bf5AkBaLg8tA0OCwCYO+Y99vOv+39ZUlMdeYameC8TLJkzn4jWR+PWMGe0QGXMzUARdK5RWnwxZzEE9bv4fFM8tysCKljqmvLOHN7iGeeb2DZfXlXHNqnaXPn3LwKyJ24MeEZpIfBl4VkUeMMTuTfa5IAGdFwAWhAMQz5nJOOgHIzArXCQ2XIfkAqbrUyU/ffwarvvEER4eOL8dIdX3f/tPrJwRbgaBhJGD44Y0rOWfudGrLnHEvYRljOHx0mK2H+/jzng5+v+EwY4d0Jbu2kUCQhza2cOezzXHvk05w6R+zwHT+b29es4ibf7+FkcCx5ywpsnPzmkUprc/qTPLJIl7f0MiAi3rNJCmVV+KVPXT0e7logV6pUblVV+Hi2d2dANx2/anYbNaW4aSTQz4LaDbG7DfG+IB7Cc0jT5rVdZdWByALYvR+TSdA6h2KXYfc0juMMRP3KDfGsL2lj+8+sZvWGEE5hOqn165sYMYEfYdFhKZpU3jbaXV8+50rTgh8R6/tx88289L+7pibmgY8Izy/p5PvPbmHS77zHLc8sJWKkiI+csEcXGOGluRTcHndqgYuWRRttUlDZQnfuuHUlLO08YL6XPe4FZGVIvKSiGwWkdfCjfQtE6nVH5v51QEXSuWnWB1p3F4/bq9fj1eVc3Xh98xz5k7jogXVlj9/OmUPsWaPn53KE1kd0Fh5Kftv+7p5fm8XFy6oZn/noCV1sPHWB3DZ9/7M20+rZ9HMMqZNLWb61GKK7Dbe6B5kX4eb5g43f9nbRUvvMDYJdRWItfkr1WCrIc7aHDaJnowU2YXqUidOhw2nw07AGPZ3ugkasAmcPquKb6xdxiWLahERljdUWFZDbHWZAoDdFtqg+Ozn35Lyc0TcvGbRcWUokF6wb6FvA183xjwuIleHP36LVU8eL/OrAy6Uyk/OIlvMKzWgbc5U7jVVhfr73nLV4oxsvkwn+E1o9ngic8etDmisCkDcXj83/34Ls6dP4WcfOOOEPrSpirU+l8PGtSvrOdgzxI+e2Uu8BPC0qcWsbKrk05ct4PKlM3h+T6elwVa8n923bjiVixfWsOHNo2w4eJRutxevP4h3JEjAGN52ah2rZ1exalYVpc7jf07XrWqwrN411vocNkkruGzucFsy1hiOlelYFexbyADl4X9XEGdUaqqccTK/OuBCqfwUq+xBB1yofPG+c2Zx+qxKTp9VlZHnTyeaS2j2eCJzx63OlkUCjf98/HWO9Hsodzn4xtrlSQcgdz7TzOGjw/z+4+daFviOXl+8AOnooI8jfR56Bn10D4aCzDnVU5lXU3pC70Wrg62Jnu/ypTO4fGlmpqylsj5XkZ2RQJAz56Q2+MIfCHKge9DSyXFWBvsW+gywXkS+Q6jc6bxYd0rkZDWW8TK/OuBCqfzjdNjpHdMKtENHG6s8Ue4q4uy5mZvQm05E9yqwQETmAC3AjcB7U3miTGTLIgHIxXc8y9K68qSf62D3EHe/8AY3nN7Aaosnio1eXyxVU4upSqLBuNXBVp4Gb1Gj19fSO8wl33mO7z2xh+++a0XSz3WwZ4iRgGF+jLruk42IPAXMjHHTl4HLgH8xxjwgIu8CfgFcPvaOiZysxhINfsdmfnXAhVJ5yRljEE9btOxBj1k1uaUc/Bpj/CLyKWA9oVZndxtjdqT6fJkKuE5tqGDTwd6kH3f7n3Zhtwm3rFls+ZqUdRoqS/jQebP577/s52MXz2XhjORa9DR3uAEmRfBrjDkhmI0Qkf8BPh3+8H7g51Z+7dBIZxuesZlfHXChVF6KtV+kvd/D1GL7CaVrSk02aXUMNsY8ZoxZaIyZF55DnndOa6ygpXeYnkFfwo95eX83j21r4xNvmadZq5PAJy6ex5QiO//3mfit1eJp7gwFv3NrrJsZnqdagYvD/74U2Gv1F4jVN/RI77AGv0rloVjdHjr6vVryoApCXo03zoTlDRUAbGvpS+j+waDhm3/cSX2Fi49eODeTS1MWqZpazN+fN5s/bG1Nenx0c4ebGeXOQqhJ/SjwXRHZAvwH4bpeK7nGTIzSARdK5a9Y3R7a+j262U0VhIIJfjcnWPrwwMbDbG/p5wtvXUxJsTWjllXmffTCuZQkmf0NBg0v7+9heX1FBleWH4wxLxhjzjDGrDDGnG2M2WD11yh1OaJ9fUEHXCiVz2JdqdHRxqpQTPrgt9xVxOpTqrj31YMxhzOM1jvk49vrd7NqViXXrqjP0gqVFaZNLeYD557Co1tbo3W8E3n5jR5aeoe5dqX+X1vh0kW1vNDcFS0x0gEXSuWvmjInA14/h3qGgNDwpI5+L7Xa41cVgEkf/AJ89sqFHOnz8KuX3ox7H2MMX35oO71DPv79uuUZaaqsMuumC+ficti585nEylkf3HiYUqeDK5fGapCgkvWOMxoZCRge3RLqeKgDLpTKX2tXNmAT+N2roVlVR4dG8AWCWvagCkJBBL/nzavmgvnV/OS5fbi9/pj3eXhzK3/cdoR/uWIhywrgMvhkNL3Uyd+fewqPbGllX+f42d8hn5/Hth3hbafWaXmLRZbUlbO0rpwHNh4GdMCFUhMRka+JSEt47Pjm8PTFyG23ikiziOwWkTVWf+2GyhLesqiW+147xEggGB1wocerKgQFEfxCaJBGz6CPX/zljRNua+0d5qsPb2f1KVV87KJ5OVidsspHL5qL02Hnh0+Nn/19Ykc7g74AN5yev/2MT0Y3nN7A1sN97G0f0AEXSiXm+8aYleE/jwGIyFJCvfOXAVcBPxERy8/S33PWLDoGvDy9q0NHG6uCUjDB74qmStYsm8F//2U/R0e1PQsGDZ+/fwvBoOF771qJ3ablDiez6lInH7lwDo9saeWZ19vj3u+BjYdprCrhzAwMMClka1c2YLcJD2xs0QEXSqVuLXCvMcZrjHkDaAbOsvqLXLKohpnlLn77ysFo8FurZQ+qABRM8Avw+SsXMejz89M/7+PooI8/bG3lU7/dyF/3dfPVa5Yya/qUXC9RWeBTl85n0YwyvvDAtuNOdCLa+jy82NzFDac3YtOTHUvVlDl5y8IaHtp0mBbt8atUIj4lIltF5G4RqQp/rgE4NOo+h8OfO4GI3CQir4nIa52dnUl9YYfdxrvPbOL5vZ1sePMogG54UwWhoILfBTPKuH5VAz//y35O//cn+dRvNvHC3i4+fMEc3n1mU66XpyzidNj57rtWcHTQx789cuLQwXWbWwgauCGPRzifzG44vZH2fi/bWvo0+FUFT0SeEpHtMf6sBX4KzANWAkeA70YeFuOpYo4bN8bcZYxZbYxZXVNTk/T63n1mEwI8uLGFaVOLcTp0D4Sa/ApuhuHNaxYx7AuwpK6cCxZUc1pDBQ57QZ0DFITlDRV8+rIFfPfJPaxZNpO3nVYHhLp6PLDhMGecUsXs6kk/1S0nLltSS7nLQb/HrwMuVMEbb+z4aCLy38Afwh8eBkZnZBoJTWm0XH1lCZcsquXp1zuYUa4nq6owFFzwW1dRwk/ff0aul6Gy4BNvmcdTu9r5yrptdA54GPD4aev3sLfDzX9cf2qulzdpuYrsvH1FPb9++aAOuFBqHCJSZ4w5Ev7wemB7+N+PAL8Rke8B9cAC4JVMreM9Z80KB79a8qAKQ8EFv6pwOOw2vvuuFVz/47/ytUd3AlBSZGfRjLJoJlhlxo1nzuJ3rx5icV15rpeiVD77toisJFTScAD4GIAxZoeI3AfsBPzAJ40x409pSsMli2uZWz2VRTPKMvUllMorGvyqSW1+bRl/+9JleEYClLuKKHZoiUs2nNpYwdavXcmUYn2JUSoeY8wHxrntNuC2bKzDbhMe+/SFFGkJoCoQ+s6kJr1Sp4NSp/6qZ5sGvkqdPFxFutFNFQ49zVNKKaWUUgVDg1+llFJKKVUwNPhVSimllFIFQ4NfpZRSSilVMDT4VUoppZRSBUOMiTkxMTNfTKQTeHOCu1UDXVlYTqJ0PfHl01rg5FzPKcaY5GeSZoEer5bQ9cSXT2uBk/x4hZPymM2ntYCuZyIn43piHrNZDX4TISKvGWNW53odEbqe+PJpLaDryYV8+x51PePLp/Xk01og/9aTKfn0febTWkDXM5HJtB4te1BKKaWUUgVDg1+llFJKKVUw8jH4vSvXCxhD1xNfPq0FdD25kG/fo65nfPm0nnxaC+TfejIln77PfFoL6HomMmnWk3c1v0oppZRSSmVKPmZ+lVJKKaWUyoi8CX5F5CoR2S0izSLyxTxYz90i0iEi2/NgLU0i8qyI7BKRHSLy6RyvxyUir4jIlvB6vp7L9YTXZBeRTSLyh1yvBUBEDojINhHZLCKv5Xo9mZBPx6wer+OuJ++OV8ivY1aP16yvJW+OV9BjNsE1TarjNS/KHkTEDuwBrgAOA68C7zHG7Mzhmi4C3MD/GGOW52od4bXUAXXGmI0iUgZsAK7L1c9HRASYaoxxi0gR8ALwaWPMS7lYT3hNnwVWA+XGmGtytY5R6zkArDbG5FNPRMvk2zGrx+u468m74zW8rrw5ZvV4zfp68uZ4Da9Hj9mJ1zSpjtd8yfyeBTQbY/YbY3zAvcDaXC7IGPM80JPLNUQYY44YYzaG/z0A7AIacrgeY4xxhz8sCv/J2VmUiDQCbwN+nqs1FKC8Omb1eB13PXl1vIIeszmgx+s49Jgd32Q8XvMl+G0ADo36+DA5/MXLZyIyG1gFvJzjddhFZDPQATxpjMnlen4A3AIEc7iGsQzwhIhsEJGbcr2YDNBjNgF6vMb1A/LrmNXjVQF6zMbxAybZ8Zovwa/E+Fzu6zHyjIiUAg8AnzHG9OdyLcaYgDFmJdAInCUiObl0JSLXAB3GmA25+PrjON8YczrwVuCT4ct8k4kesxPQ4zW2PD1m9XhVeszGMFmP13wJfg8DTaM+bgRac7SWvBSu+3kA+LUx5sFcryfCGNMLPAdclaMlnA9cG64Buhe4VER+laO1RBljWsN/dwAPEbrsOJnoMTsOPV7HlXfHrB6vSo/ZuCbl8Zovwe+rwAIRmSMixcCNwCM5XlPeCBe//wLYZYz5Xh6sp0ZEKsP/LgEuB17PxVqMMbcaYxqNMbMJ/d48Y4x5fy7WEiEiU8ObJhCRqcCVQF7saraQHrNx6PE6vnw7ZvV4VXrMxjdZj9e8CH6NMX7gU8B6QoXm9xljduRyTSLyW+BvwCIROSwiH87hcs4HPkDojGtz+M/VOVxPHfCsiGwl9KL6pDEm5+1P8sgM4AUR2QK8AvzRGPOnHK/JUvl2zOrxOi49Xsenx2uW5dnxCnrMnkwsOV7zotWZUkoppZRS2ZAXmV+llFJKKaWyQYNfpZRSSilVMDT4VUoppZRSBUODX6WUUkopVTA0+FVKKaWUUgVDg1+llFJKKVUwNPhVSimllFIFQ4NfpZRSSilVMP4/qcxkdcX/tmwAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plot.figure(figsize=(12, 3))\n", + "line_x = np.linspace(0, 5, 50)\n", + "for i in range(3):\n", + " plot.subplot(1, 3, i+1)\n", + " plot.scatter(x, y)\n", + " fit = np.polyfit(np.concatenate([x, [3]]), np.concatenate([y, [i]]), 10)\n", + " plot.plot(line_x, np.poly1d(fit)(line_x))\n", + "plot.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "P_1Ag-VPkMdT" + }, + "source": [ + "Each of them perfectly interpolates the data points, yet they clearly are different models. (In fact, there are infinitely many 10th degree polynomials that exactly interpolate any ten data points.) They make identical predictions for the data we fit them to, but for any other value of `x` they produce different predictions. This is called *epistemic uncertainty*. It means the data does not fully constrain the model. Given the training data, there are many different models we could have found, and those models make different predictions.\n", + "\n", + "The ideal way to measure epistemic uncertainty is to train many different models, each time using a different random seed and possibly varying hyperparameters. Then use all of them for each input and see how much the predictions vary. This is very expensive to do, since it involves repeating the whole training process many times. Fortunately, we can approximate the same effect in a less expensive way: by using dropout.\n", + "\n", + "Recall that when you train a model with dropout, you are effectively training a huge ensemble of different models all at once. Each training sample is evaluated with a different dropout mask, corresponding to a different random subset of the connections in the full model. Usually we only perform dropout during training and use a single averaged mask for prediction. But instead, let's use dropout for prediction too. We can compute the output for lots of different dropout masks, then see how much the predictions vary. This turns out to give a reasonable estimate of the epistemic uncertainty in the outputs.\n", + "\n", + "### Uncertain Uncertainty?\n", + "\n", + "Now we can combine the two types of uncertainty to compute an overall estimate of the error in each output:\n", + "\n", + "$$\\sigma_\\text{total} = \\sqrt{\\sigma_\\text{aleatoric}^2 + \\sigma_\\text{epistemic}^2}$$\n", + "\n", + "This is the value DeepChem reports. But how much can you trust it? Remember how I started this tutorial: deep learning models should not be used as black boxes. We want to know how reliable the outputs are. Adding uncertainty estimates does not completely eliminate the problem; it just adds a layer of indirection. Now we have estimates of how reliable the outputs are, but no guarantees that those estimates are themselves reliable.\n", + "\n", + "Let's go back to the example we started with. We trained a model on the SAMPL training set, then generated predictions and uncertainties for the test set. Since we know the correct outputs for all the test samples, we can evaluate how well we did. Here is a plot of the absolute error in the predicted output versus the predicted uncertainty." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 279 + }, + "colab_type": "code", + "id": "r3jD4V4rkMdU", + "outputId": "c50122f9-e178-4f3e-ac74-760ddf338bc1" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEGCAYAAABo25JHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAjhUlEQVR4nO3dfbRddX3n8feHeNXgU9BkRK9EqHVgVIToRcR0CWG0iIMlg1S0rnawdlIdrSOtrInaJeDMLNKFrYtKkUZrKasuZSkY04LGzvCktKgJCWAEHCpFcmFpUAIiERP4zh9733By7nnY59y999kPn9daWTn3nH3P+e1zz/l99+/7e1JEYGZm7XXApAtgZmaT5UBgZtZyDgRmZi3nQGBm1nIOBGZmLfeUSRdgVEuXLo1DDz100sUwM6uVLVu2PBARy3o9VrtAcOihh7J58+ZJF8PMrFYk3dPvMaeGzMxazoHAzKzlHAjMzFrOgcDMrOUcCMzMWq52o4bMrJo2bJ3lgk13ct+u3bxwyWLOPulwVq+YnnSxLAMHAjNbsA1bZ/nwlbexe8/jAMzu2s2Hr7wNwMGgBpwaMrMFu2DTnfuCwJzdex7ngk13TqhENgoHAjNbsPt27R7pfqsWp4bMbMFeuGQxsz0q/RcuWVzYa7pPIj9uEZjZgp190uEsnlq0332LpxZx9kmHF/J6c30Ss7t2EzzZJ7Fh62whr9d0DgRmtmCrV0xz/mlHMr1kMQKmlyzm/NOOLOwK3X0S+XJqyMxysXrFdGmpGfdJ5MstAjOrnX59D0X2STSZA4GZ1U7ZfRJN59SQmdXOXArKo4by4UBgZrVUZp9E0zk1ZGbWcg4EZmYt50BgZtZyDgRmZi3nzuKG83osZjaMA0GDeY14M8vCqaEG83osZpaFA0GDeT0WM8vCgaDBvB6LmWXhQNBgXo+lfTZsnWXlums4bO1VrFx3jdfnt0zcWdxgXo+lXTw4wMZVWCCQdAhwGXAw8ASwPiIu7DrmBOCrwN3pXVdGxMeLKlMbeT2W9hg0OKDsz4CHLddLkS2CvcCfRMTNkp4FbJH0TxHx/a7jvhkRpxRYDrNWqMrgALdM6qewPoKIuD8ibk5v/xy4HfCnwKwgVRkc4GHL9VNKZ7GkQ4EVwLd7PHycpFskfU3Sy/v8/hpJmyVt3rlzZ5FFNautqgwOqErLxLIrPBBIeiZwBfDBiHi46+GbgRdHxFHAp4ANvZ4jItZHxExEzCxbtqzQ8prVVdkbyPdTlZaJZVfoqCFJUyRB4PMRcWX3452BISKulnSxpKUR8UCR5TJrqioMDjj7pMP36yMAD1uuuiJHDQn4G+D2iPiLPsccDPw4IkLSa0haKD8tqkxmVjwPW66fIlsEK4HfBW6TtC297yPAcoCIuAQ4HXivpL3AbuDtEREFlsnMSlCFlollV1ggiIhvARpyzEXARUWVwczMhvMSE2ZmLedAYGbWcl5ryMway0tdZONAYGaN5KUusnNqyMwayUtdZOdAYGaN5KUusnMgMLNG8lIX2TkQmFkjVWURvjpwZ7GZNdKgpS48mmh/DgQN5A+5WaLXUhceTTSfU0MNM/chn921m+DJD7k3MTdLeDTRfA4EDeMPudlgHk00nwNBw/hDbjaYRxPN50DQMP6Qmw3m0UTzORA0jD/kZoNVZUvPKvGooYbx7lBmw3njnP05EDSQP+RmNgoHArOCeD6H1YUDgVkBPGnJ6sSdxWYF8HwOqxO3CMwKUIf5HE5d2Ry3CMwKUPX5HF6KxDo5EJgVoOrzOZy6sk5ODTWAm/jVU/X5HHVIXVl5HAhqzqNTqqvK8zleuGQxsz0q/aqkrqxcTg3VnJv4No6qp66sXIW1CCQdAlwGHAw8AayPiAu7jhFwIfBm4FHgzIi4uagyNZGb+DaOqqeuiuI0am9Fpob2An8SETdLehawRdI/RcT3O445GXhp+u9Y4NPp/5aRm/g2riqnrorgNGp/haWGIuL+uav7iPg5cDvQ/W6fClwWiZuAJZJeUFSZmshN/OJt2DrLynXXcNjaq1i57ppMQyzH+R0rltOo/ZXSWSzpUGAF8O2uh6aBezt+3pHed3/X768B1gAsX768sHLWUVub+GUZ5yrSV57V5DRqf4UHAknPBK4APhgRD3c/3ONXYt4dEeuB9QAzMzPzHm+7tjXxyzToKrLfez7O71jxnEbtr9BRQ5KmSILA5yPiyh6H7AAO6fj5RcB9RZbJbBTjXEX6yrOanEbtr7BAkI4I+hvg9oj4iz6HbQR+T4nXAg9FxP19jjUr3ThLRVR9eYm28s5k/RWZGloJ/C5wm6Rt6X0fAZYDRMQlwNUkQ0fvIhk++q4Cy2M2srNPOny/fD8Mv4oc53esHE6j9lZYIIiIb9G7D6DzmADeV1QZzBZqnM54d+Bb3Sipi+tjZmYmNm/ePOlimJnViqQtETHT67GBfQSSDpD0vWKKZWZmVTAwEETEE8Atkjx438ysobL0EbwA2C7pO8Av5u6MiN8qrFRmZlaaLIHgvMJLYWZmEzM0EETE9ZKeDxyT3vWdiPhJscUyM7OyDJ1QJultwHeA3wbeBnxb0ulFF8zMzMqRJTX0UeCYuVaApGXA/wG+XGTBzMysHFmWmDigKxX004y/Z2ZmNZClRfB1SZuAL6Q/n0GyNISZmTXAwECQLhz3lyQdxb9BsmTE+oj4SgllMzOzEgwMBBERkjZExKuBXstIm5kB3g+4zrLk+m+SdMzww8ysreZ2ZZvdtZvgyV3ZvEVnPWQJBKuAf5H0r5JulXSbpFuLLpiZ1Yf3A663LH0E7wHuKac4ZlZH3pWt3rL0EXwy7SMwM+vJ+wHXm/sIzHKyYessK9ddw2Frr2LlumtalR/3fsD1lmUewSrgDyXdQ7L6qEgaC68stGQ58mgGK9pcZ+lcnnyusxRoxWfNu7LVW5ZAcHLhpShQ27+gVo5BnaVt+Zx5P+D66psaknQiQETcQ7LMxD1z/4Da9Bl4NMPktClV4s5Sq7NBLYJPAK9Kb1/RcRvgT6nJBDN/QSejbS2xOnWWtiFV2oZzzNOgzmL1ud3r58rq90Ws4he0SdrWEqtLZ2kbJn614RzzNigQRJ/bvX6urLp8QZumbS2x1SumOf+0I5leshgB00sWc/5pR1buKrQNAboN55i3QamhX5O0keTqf+426c+HFV6ynHg0w2TUKVWSlzp0lrYhQLfhHPM2KBCc2nH7E12Pdf9caXX4gjbN2Scdvl8fAbglVgVtCNBtOMe89Q0EEXF9mQWxZnFLrJraEKDbcI55yzKPYCySPgecAvwkIl7R4/ETgK8Cd6d3XRkRHy+qPFY+t8Sqp4gAXbUROr4IGZ0iiun3lfR64BHgsgGB4EMRccoozzszMxObN2/OpYxmtjDdw4QhufquYkd520naEhEzvR7LvPewpGeM8qIRcQPws1F+x8zqxSN0mmFoIJD0OknfB25Pfz5K0sU5vf5xkm6R9DVJLx9QhjWSNkvavHPnzpxe2swWyiN0miFLi+CTwEnATwEi4hbg9Tm89s3AiyPiKOBTwIZ+B0bE+oiYiYiZZcuW5fDSZpYHT9hshkypoYi4t+uux3seOIKIeDgiHklvXw1MSVq60Oc1s/J4wmYzZBk1dK+k1wEh6anAB0jTRAsh6WDgx+nmN68hCUo/Xejzmll5PEKnGbIEgvcAFwLTwA7gG8B/G/ZLkr4AnAAslbQDOAeYAoiIS4DTgfdK2gvsBt4eRQ1hMrPCeJhw/WUJBIdHxDs775C0Erhx0C9FxDuGPH4RcFGG1zczswJl6SP4VMb7zMyshvq2CCQdB7wOWCbpjzseejawqPdvmVndVW2msBVvUGroqcAz02Oe1XH/wyT5fTNrmLZtKGSJYYvOXS/p0nR7SrPa8dXtaLz3cjtl6Sy+VNK80TwRcWIB5bEKqmtl6qvb0XmmcDtlCQQf6rj9dOCtwN5iimNVU+fK1Fe3o/Na/u00dNRQRGzp+HdjRPwxcGwJZbMKqPOiYr66HZ1nCrfT0BaBpOd2/HgA8Grg4MJKZJVS58rUV7ej80zhdsqSGtpCslm9SFJCdwPvLrJQVh11rky9U9V4PFO4fYYGgoiozUb1lr86V6a+ujXLpu8OZZJOG/SLEXFlISUawjuUla9z1NBzFk8hwa5H97hitaHqOuKsiQbtUDaoRfCWAY8FMJFAYOWbSxXUeQSRlc+fl/oYNKHsXWUWxKrPwzFtFP681EeWUUPPIVlCem5XsuuBj0fEQ0UWzKqnziOI8uaUx3D+vNRHltVHPwf8HHhb+u9h4G+LLJRVk7clTMylPGZ37SZ4MuWxYevspItWKf681EeWQPCSiDgnIn6Y/jsP+LWiC2bV48lGiTpPsiuTPy/1kWUewW5JvxER34J9m9K4bddCHo6ZcMojG39e6iNLIHgv8HdpX4GAnwFnFlkoqy5PNqr3JLuy+fNSD1kmlG0DjpL07PTnh4sulFmV1XmSndVT0YMTsowa+u8kncM/Bz4j6VXA2oj4Rm6lMOtQ9RE5TnlYmcqYj5ElNfT7EXGhpJOAfwe8iyQwOBBY7uoyCWlQyqPqgczqpYz5GFlGDSn9/83A30bELR33meWq7iNyPLTU8lbG4IQsgWCLpG+QBIJNkp4FPJFbCcw61H1ETlGBbMPWWVauu4bD1l7FynXXOLC0SBnzMbKkht4NHA38MCIelfQ8kvSQWe7qPiKniEBWZrqsqWmtOp9XGYMTsuxQ9gRwKPAxSX8OvD4ibs2tBGYd6j4JqYirt7LSZU1Na9X9vFavmOb8045kesliBEwvWcz5px1Z+qihi4FfB76Q3vWHkt4QEe/LrRRmqbqPyCni6q2sdFlTF4lrwnkVPR8jS2roeOAVkW5cIOnvgNuG/ZKkzwGnAD+JiFf0eFzAhSR9D48CZ0bEzSOU3RqqzpOQighkZaXL6t4/009TzytPWQLBncBy4J7050OALKmhS4GLgMv6PH4y8NL037HAp9P/zWot70BW1gS2uvfP9NPU88pT3z4CSf8gaSPwPOB2SddJuha4HVg27Ikj4gaS5Sj6ORW4LBI3AUskvWC04ps1Xxk5Yqh//0w/TT2vPA1qEXxiwGO997cczTRwb8fPO9L77u8+UNIaYA3A8uXLc3hps3opI11W9/6Zfpp6XnkatEPZ9b3uT1cf/R3ghgW+dq9JaT0DTESsB9ZDsmfxAl+31uo8DM6qr879M4M09bzykqWPAElHk1T+bwPuBq7I4bV3kPQ3zHkRcF8Oz9tYdVl+wczqpW8gkPTvgbcD7wB+ClwOKCJW5fTaG4H3S/oiSSfxQxExLy1kT2rCMDirL7dGm2tQi+AO4JvAWyLiLgBJZ2V9YklfAE4AlkraQbLv8RRARFwCXE0ydPQukuGjnq08hIfB7S/PismV3GBujTbboEDwVpIWwbWSvg58kREWm4uIdwx5PABPShtB2cPgqlw55lkxTaKSq/J724tbo83Wd/hoRHwlIs4AjgCuA84Cni/p05J+s6TyWYcyh8FVfVp+nssulL3i6aD3tqqLy7k12mxZ1hr6RUR8PiJOIenQ3QasLbpgNl9Z48mh+stB51kxlV3J9Xtvz/uH7ZUNvmWsgGmTk2nU0JyI+Bnw1+k/m4CyhsFV/QowzzRZ2Sm3fu/hg4/umXdfVdIv3p6z2bLsR2AtVPUrwDzTZGXPPB31PaxC8C2zNWrlG6lFYO1R9SvAPGeL9nquVUcs44JNd3LW5dty78zt994+7SkHsGv3/FZBVYJvWa3RunWkN4EDgfU098U7d+P2fZXT06eq1YDMs2LqfK4yRhE97SkH7Hv+gw6c4py3vByg0sG3DB6mOhnV+mZb5Ty298ldSR98dE9lOi+LVGRH+Yats5z9pVv2u/J/5Jd7AadfoPqDFJrKLQLrq61jx/PqKO+V4jh343b2PLH/cll7ngjO3bh9X6ukye/tMFUfpNBUbhFYX239UubRUd5vrkCvPgCg7/1tU/VBCk3lQGB9tfVLmccoon6tKRvMewdMhlND1ldZI4eqNkokjxFJo7aaDjpwaqTjm8p7B0yGA4H1VcaXsqqjRBaaq+83Se2gA6d45LG97Hn8yX6CqUXaN2rIvHfAJCjdk742ZmZmYvPmzZMuhuVk5bprelaY00sWc+PaEydQonx0BzhIWlPnn3YkkC24Vq2lZPUmaUtEzPR6zC0Cm6imdkgPa00Nq9Cr2lKyZnJnsU1UUzukF3o17/H0ViYHApuoJo4SyWMJ76a2lKyaHAhsorpn0x504BRPe8oBnHX5tkqtxz+KPK7mm9pSsmpyILCJW71imhvXnsgnzziaX+55gl2791RuPf5R5HE138SWklWXO4utMia5pEWeI3Ty2N8gy9DdskYVefRS8zkQWGVMKi+e9widvCbiDRpPX9aoIo9eagenhqwyJpUXz3uEThmriJY1qsijl9rBLQKrjKKWtBiW2iiiJTLK7NhxUi/DypxXOsejl9rBgcAqo4glLbKkNsres3iU8vWr0AeVOc90ziTfGyuPl5iwQlSlgzHLEhaDloMousyDytevhTS3TEW/xy7YdGduy3b0em8Aliye4tzfern7CWpk0BIT7iOw3OUxoSovWVIbk9wZbFD5ho2i6lfmPNM5c6/TvTrqrt3t2K2uLZwastxVaWezrKmNSa14Oah8wyr0fmXOcs6jtNhWr5jmgk138uCj+2+e04bd6tqi0BaBpDdJulPSXZLW9nj8BEkPSdqW/vtYkeWx8W3YOsvKdddw2Nqrhs74rVIHYxkTs0Z5b0Yp37ijqFYdsWzg/eO02Kr0N7X8FdYikLQI+CvgjcAO4LuSNkbE97sO/WZEnFJUOeqoKvn1ubKcu3H7flspDut8rFIHY9F7Kiy0Y3ZY+cYZRXXtHTsH3j9Oi61Kf1PLX5GpodcAd0XEDwEkfRE4FegOBJYap9Itujy9OgphcMVR1s5mWRWZ9skjDdavfOMGsWFX7+Nc3Vftb2r5KjIQTAP3dvy8Azi2x3HHSboFuA/4UERsL7BMlTVupVukXpVcp34VR5u2Gyw6ZTJOEBt29T7O1X2b/qZtVGQgUI/7useq3gy8OCIekfRmYAPw0nlPJK0B1gAsX74852JWw7iVbpGGveawimOukphLdZ11+baxK5Aqpcs6VTFlMuzqfdyre28h2VxFdhbvAA7p+PlFJFf9+0TEwxHxSHr7amBK0tLuJ4qI9RExExEzy5b17giru4VUukUZ9JpZ0wJ5DCWt0nDUblVcJXTYcNhJDpe1aipsQpmkpwA/AP4jMAt8F/idztSPpIOBH0dESHoN8GWSFkLfQjV1Qlm/iUVQ3uSmbv3SVQcdOMU5b8k2mSiPPYmrvq9xVVsrWdS57DaaiexZHBF7Jb0f2AQsAj4XEdslvSd9/BLgdOC9kvYCu4G3DwoCTdaruQ6jVbp5yyMvnEcOvepDF/NOg5XFK4vanEInlKXpnqu77ruk4/ZFwEVFlqEuqtoZt9DF0/LIoVcxD99L3SrWKk38s8nyWkM2VJb0Qb/1et766mmu2DK7oHV8JrkW0CiqnsLqdtjaq+aN3pgjqMzFiOVjIqkhy6bqOdqsV7n9ri6vvWPnvoXQxj3HqraWumVNYVXlb96vpQXs1ykP1WzRWH4cCBZoIV/qOqQSsqYP+lUos7t25zLssA5DF7Ou8VOVv3m/fqlOThW1g1cfXYCFDmusw+5PWa5yN2yd7TlpBJIUw4atswtaj6cusgwlrdLfvHsYaT9V6ZS34rhFsAAL7Wyr+mgYyHaVe8GmO/vmmgM4d+N2Htv7RCWugouUJYVVtb95Z0urXx9H1TrlLX9uESzAQr/Uk9qjdxRZrnKHne+u3XsqcxVctNUrprlx7Yncve4/cePaE+cFuir/zas4Oc7K4UDA+MsIL/RLXYcvXpZZqONWYlVq+ZRl0N980ukzzzhur9YPH13I0MQ8hjVWZQTJQgxaMG/x1CKePnXAvE1NoLrDKvPW/TdedcQyrr1j535/c+i/9WTdPg9WTYOGj7Y+ECx07HcTKvI8zL0Ps7t2s0ji8Yh9++5Ceyu5rBcLZc5B8Ge2nTyPYICF5vnrMKyxl7wrgyzvQ7/XK6tiyvo6eZYn64CCsuYgVGn4qlVH6wPBQpYvqPqVVb/yTaIy6BcoyipL1tfJuzxZK/iy5iB4WQnrpfWdxeN22FZ5aWQYXL4qjWUvqyxZXyfv8mQdUFDWHISqDV/N26Q73Ouq9YFg3JESVapMexlUvipVBmWVJevr5F2eXhU8wKO/2rtfJZXlczisbFkqwSoPX12oql+cVVnrU0MwXp5/0pXpsLTUoPJVaTXPosrS/f4sOXCq58il7tfJuzxzf5PuvagffHTPvLTOsM/hoLJlTRs1ee9hp73G1/oWQbesTctJXlllufIZVL4qzV8ooiy93p9HfrmXqUX7L6TQ63WKKM/qFdM842nzr7lGbUEOKlvWFmqT5wpM+uKsztwi6DBKZ9wkr6yyXPkMKl+VVvMsoiy93p89TwRLFk/xjKc9ZeDrFPXe5FFJDSrbWZdvy/z8dR3pNkyVWrp140DQYZSm5SQr0yyVyrDyFV0ZjDKiKu+y9Ht/Htq9h23n/ObQ3y/ivcmrkupXNleCzU57Fc2BoMOoV22TurLK+qWfVPkmNVZ9Lvj0myI5yUqx6Eqq6ZVglguLKrV068aBoENdrqqq/qWfRKfdoGUuYPLvT9GVVJMrwVEuLJqa9iqaA0GHqlewc6r+pZ9Ep12v4DNnuiLvT9GVVFMrQY8GKp4DQYeqV7Awv4n8yTOOrlT5oH/LasmBU4W9Zr8gI2jFwnZN5tFAxXMg6FLlq6q6rBNz9kmHc/aXb2HP4/tn6x/5ZTKJKsuqrqMG46xpvaovC2Lz1SVlW2eeR1Aj485mLnva/eoV0zzjqfOvMfY8EZnKOs7s0Czj/7M8t5coqJ4qzXtpKrcIamScJvKkWhEPdcyi7TSsOT9uPjhLWq/fc5+7cfu+JbQF+0YdVbXFNUgTWzx1SNnWnQNBjYzTRJ5UR9u4zfmF5IOHpfX6Pceu3Xv2Lf/QPfQ0z/eq6Eq6LqlDqx4HghoZZ1RTloq1iApq3BFYReaD+z33MON0SvbaleyKLbOFVtJNHV3jAFc89xHUyDjrxAxbE6moFRvHXdOmyHxwv5VAhxk1CPV6Tz9/048KX622qaNrqr7SbxMU2iKQ9CbgQmAR8NmIWNf1uNLH3ww8CpwZETcXWaa6G3VU06ojlvH3N/2o5/1Q7FXkOCOwiswH93ruR3+1t+eqpHPGCUK93tN+s53zrKSbOrqmqQGuSgoLBJIWAX8FvBHYAXxX0saI+H7HYScDL03/HQt8Ov3fcnLtHTsH3l/FL1mRQ3i7n3vD1lnOunxbz4p6kTTWypyjvHd5VtJ1mRA5qqYGuCopMjX0GuCuiPhhRPwK+CJwatcxpwKXReImYImkFxRYptYZVtE3eaOSLFavmOadr12Ouu5fPLWIP3/bUWMFpH7vXa/XyLOSbuoS0x4+WrwiU0PTwL0dP+9g/tV+r2Omgfs7D5K0BlgDsHz58twL2mTDrqaaehU5iv+1+khmXvzc3NJR/d7Tt756mmvv2FnoEMgqT4gcl4ePFq/IQNB9AQTzU6VZjiEi1gPrAWZmZvqlW62HYRW9v2SJPCtQv6f5a2KAq5IiA8EO4JCOn18E3DfGMbYAWSolf8ny5/fU6qTIQPBd4KWSDgNmgbcDv9N1zEbg/ZK+SJI2eigi7sdy5UrJzAYpLBBExF5J7wc2kQwf/VxEbJf0nvTxS4CrSYaO3kUyfPRdRZXHzMx6K3QeQURcTVLZd953ScftAN5XZBnMzGwwzyw2M2s5BwIzs5ZzIDAzazklafr6kLQTuGfS5choKfDApAtRsjaeM7TzvNt4zlDf835xRCzr9UDtAkGdSNocETOTLkeZ2njO0M7zbuM5QzPP26khM7OWcyAwM2s5B4JirZ90ASagjecM7TzvNp4zNPC83UdgZtZybhGYmbWcA4GZWcs5ECyQpDdJulPSXZLW9nj8IElfkXSrpO9IesUkypknSZ+T9BNJ3+vzuCT9Zfqe3CrpVWWXsQgZzvsISf8i6TFJHyq7fEXIcM7vTP/Gt0r6Z0lHlV3GImQ471PTc94mabOk3yi7jHlyIFiAjn2ZTwZeBrxD0su6DvsIsC0iXgn8HnBhuaUsxKXAmwY83rkX9RqSvaib4FIGn/fPgA8AnyilNOW4lMHnfDdwfPr5/p80pyP1Ugaf9/8FjoqIo4HfBz5bQpkK40CwMFn2ZX4ZyYeGiLgDOFTS88stZr4i4gaSSq+fRu5FPey8I+InEfFdYE95pSpWhnP+54h4MP3xJpLNpWovw3k/Ek+OtHkGPXZWrBMHgoXpt+dyp1uA0wAkvQZ4MQ35sgyQ5X2x5nk38LVJF6Iskv6zpDuAq0haBbXlQLAwWfZcXgccJGkb8EfAVmBvweWatEx7UVtzSFpFEgj+x6TLUpaI+EpEHAGsJkmL1VahG9O0wNA9lyPiYdKd1ySJJKd6d1kFnBDvRd0ikl5JkiM/OSJ+OunylC0ibpD0EklLI6KOi9G5RbBA+/ZllvRUkn2ZN3YeIGlJ+hjAHwA3pMGhyTYCv5eOHnot3ou6sSQtB64EfjcifjDp8pRF0q+nF3ako+KeCtQ2CLpFsAAZ92X+D8Blkh4Hvk/SfK41SV8ATgCWStoBnANMQbP3oh523pIOBjYDzwaekPRB4GV1DvwZ/tYfA54HXJzWi3ubsDJnhvN+K8nFzh5gN3BGR+dx7XiJCTOzlnNqyMys5RwIzMxazoHAzKzlHAjMzFrOgcDMrOUcCKxSJH1U0vaOlR2PTe//oKQDc3ydf5O0dAG/f6aki/rcv1PSVkn/T9ImSa9bwOt8XNIbMpTlhR0/f7bH4odmfXkegVWGpOOAU4BXRcRjaUU9Nxnvg8Dfk8xLmETZFkXE4xkPvzwi3p/+3irgSkmrIuL2UV83Ij6W4bAzge+Rzt6OiD8Y9XWs3dwisCp5AfBARDwGEBEPRMR9kj4AvBC4VtK1AJI+na4Dv13SeXNPkF7pnyfpZkm3SToivf95kr6RXqn/NR3rIUnaIGlL+lxrOu5/JL0i/zZwnKR3SfqBpOuBlVlOKCKuJVmaeU36nC+R9PX09b6Z7mHwnLTcB6THHCjpXklTki6VdHp6/8ckfVfS9yStT2dunw7MAJ9PW1CLJV0naSb9nXek78P3JP1Z17n9b0m3SLpJNV8R1xbGgcCq5BvAIWlle7Gk4wEi4i9JrnZXRcSq9NiPpjNYXwkcn653M+eBiHgVyT4IcxvEnAN8KyJWkCyBsbzj+N+PiFeTVKgfkPS89P5nAN+LiGOBfwXOIwkAbyRZXjyrm4Ej0tvrgT9KX+9DwMUR8RDJKrXHp8e8BdgUEd3LWV8UEcdExCuAxcApEfFlktnM74yIoyNi99zBabroz4ATgaOBYySt7ji3myLiKOAG4L+OcD7WMA4EVhkR8QjwapKr553A5ZLO7HP42yTdTLKa68vZv2K+Mv1/C3Boevv1JKklIuIq4MGO4z8g6RaS9fQPIdlQB+Bx4Ir09rHAdRGxM9174vIRTm1uTZpnAq8DvqRkNdq/JmkFkT7fGentt/d5/lWSvi3pNpLK/eVDXveYjjLvBT5P8j4A/Ar4x/R25/tkLeQ+AquUNA9/HXBdWuH9F5LdovaRdBjJ1fQxEfGgpEuBp3cc8lj6/+Ps/xmft56KpBOANwDHRcSjkq7reK5fdvULjLseywrgdpILr13prlbdNgLnS3ouSTC8pqucTwcuBmYi4l5J57L/OffSaznwOXs61sbpfp+sZdwisMqQdLikl3bcdTRwT3r758Cz0tvPBn4BPJTmtk/O8PQ3AO9MX+dk4KD0/ucAD6ZB4AjgtX1+/9vACWlfwxTw2xnP6XiSFs5n0sXn7pb02+ljUrrHb9oa+g7JVqb/2KNjeq7SfyBtWZze8Vjne9Nd5uMlLVWyreo7gOuzlNvaxVcBViXPBD4laQnJ5j13kXaykuTWvybp/ohYJWkrsB34IXBjhuc+D/hCmk66HvhRev/XgfdIuhW4kyQ9NE9E3J9ehf8LcD9J3n9Rn9c6Q8lm5geS7D3x1o4RQ+8EPi3pT0lWs/wiSf8AJOmgL5Gsetn9+rskfQa4Dfg3kiXQ51wKXCJpN3BcV5k/DFxL0jq4OiK+2qfM1mJefdTMrOWcGjIzazkHAjOzlnMgMDNrOQcCM7OWcyAwM2s5BwIzs5ZzIDAza7n/D33G97PnIPw9AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "abs_error = np.abs(y_pred.flatten()-test_dataset.y.flatten())\n", + "plot.scatter(y_std.flatten(), abs_error)\n", + "plot.xlabel('Standard Deviation')\n", + "plot.ylabel('Absolute Error')\n", + "plot.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "rdGOqq_DkMdX" + }, + "source": [ + "The first thing we notice is that the axes have similar ranges. The model clearly has learned the overall magnitude of errors in the predictions. There also is clearly a correlation between the axes. Values with larger uncertainties tend on average to have larger errors. (Strictly speaking, we expect the absolute error to be *less than* the predicted uncertainty. Even a very uncertain number could still happen to be close to the correct value by chance. If the model is working well, there should be more points below the diagonal than above it.)\n", + "\n", + "Now let's see how well the values satisfy the expected distribution. If the standard deviations are correct, and if the errors are normally distributed (which is certainly not guaranteed to be true!), we expect 95% of the values to be within two standard deviations, and 99% to be within three standard deviations. Here is a histogram of errors as measured in standard deviations." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 265 + }, + "colab_type": "code", + "id": "IrD6swafkMdY", + "outputId": "55d11687-7d35-4a2c-d9d7-2410cea156d1", + "scrolled": true + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAANUklEQVR4nO3df4xlZ13H8ffH/ohWaqjZiyDtMGBKk0IwNBMsNmIFSSol1D/4o43FiiQTScRiRFwkoX+ZVCX4IxrJRtZibEpMKdhQqjQIaUxKZbu2sGUpVKywUN2tTfip1savf8w12U5n596558zMfqfvVzLZe8957jzfZ57sJyfn3OecVBWSpH6+b7cLkCQtxgCXpKYMcElqygCXpKYMcElq6syd7Gzfvn21vLy8k11KUnv33XffY1U1Wb99RwN8eXmZQ4cO7WSXktRekn/daLunUCSpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpqR1diblblvffMejzj9x45a70PaRfSXufR+CS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNzQzwJAeTHE9yZN32tyV5KMmDSX5v+0qUJG1kniPwm4ArTt6Q5GeAq4CXVdVLgPeOX5okaTMzA7yq7gYeX7f5rcCNVfXf0zbHt6E2SdImFr0XyouBn0ryO8B/Ae+oqs9u1DDJKrAKsLS0tGB3w+9nIkl7zaIXMc8EzgMuBX4T+Osk2ahhVR2oqpWqWplMJgt2J0lab9EAPwbcVmv+EfhfYN94ZUmSZlk0wD8KvBogyYuBs4HHRqpJkjSHmefAk9wCXA7sS3IMuAE4CBycfrXwCeC6qqrtLFSS9FQzA7yqrjnFrmtHrkWStAWuxJSkpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpmYGeJKDSY5PH96wft87klQSH6cmSTtsniPwm4Ar1m9McgHwWuCrI9ckSZrDzACvqruBxzfY9QfAOwEfpSZJu2Chc+BJ3gB8vaoemKPtapJDSQ6dOHFike4kSRvYcoAnOQd4N/CeedpX1YGqWqmqlclkstXuJEmnsMgR+I8BLwQeSPIIcD5wOMlzxyxMkrS5mU+lX6+qPg885//fT0N8paoeG7EuSdIM83yN8BbgHuCiJMeSvGX7y5IkzTLzCLyqrpmxf3m0aiRJc3MlpiQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ1Nc8DHQ4mOZ7kyEnbfj/JF5N8LslHkjx7W6uUJD3NPEfgNwFXrNt2F/DSqnoZ8CXgXSPXJUmaYWaAV9XdwOPrtn2iqp6cvv0Maw82liTtoDHOgf8ycOcIv0eStAVbfir9yZK8G3gSuHmTNqvAKsDS0tKQ7nbN8v47drsESXqahY/Ak1wHvB74haqqU7WrqgNVtVJVK5PJZNHuJEnrLHQEnuQK4LeAn66q741bkiRpHvN8jfAW4B7goiTHkrwF+BPgXOCuJPcnef821ylJWmfmEXhVXbPB5g9sQy2SpC1wJaYkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JT8zyR52CS40mOnLTth5PcleTL03/P294yJUnrzXMEfhNwxbpt+4FPVtWFwCen7yVJO2hmgFfV3cDj6zZfBXxw+vqDwM+PW5YkaZaFnkoP/EhVPQpQVY8mec6pGiZZBVYBlpaWFuxOW7W8/46FP/vIjVeOWImk7bLtFzGr6kBVrVTVymQy2e7uJOkZY9EA//ckzwOY/nt8vJIkSfNYNMBvB66bvr4O+JtxypEkzWuerxHeAtwDXJTkWJK3ADcCr03yZeC10/eSpB008yJmVV1zil2vGbkWSdIWuBJTkpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoaFOBJfj3Jg0mOJLklyfePVZgkaXMLB3iS5wO/BqxU1UuBM4CrxypMkrS5oadQzgR+IMmZwDnAN4aXJEmax8IBXlVfB94LfBV4FPhmVX1ifbskq0kOJTl04sSJxSuVJD3FkFMo5wFXAS8EfhT4wSTXrm9XVQeqaqWqViaTyeKVSpKeYsgplJ8F/qWqTlTV/wC3AT85TlmSpFmGBPhXgUuTnJMkrD2l/ug4ZUmSZhlyDvxe4FbgMPD56e86MFJdkqQZzhzy4aq6AbhhpFokSVvgSkxJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmBgV4kmcnuTXJF5McTfLKsQqTJG1u0BN5gD8C/raq3pjkbOCcEWqSJM1h4QBP8kPAq4BfAqiqJ4AnxilLkjTLkCPwFwEngL9I8uPAfcD1VfXdkxslWQVWAZaWlgZ0p52yvP+OQZ9/5MYrR6pka4bUvVs1S0MMOQd+JnAJ8GdV9XLgu8D+9Y2q6kBVrVTVymQyGdCdJOlkQwL8GHCsqu6dvr+VtUCXJO2AhQO8qv4N+FqSi6abXgN8YZSqJEkzDf0WytuAm6ffQPkK8ObhJUmS5jEowKvqfmBlnFIkSVvhSkxJasoAl6SmDHBJasoAl6SmDHBJasoAl6Smhn4PXNto6D1Jdov3JJF2hkfgktSUAS5JTRngktSUAS5JTRngktSUAS5JTRngktSUAS5JTQ0O8CRnJPmnJB8boyBJ0nzGOAK/Hjg6wu+RJG3BoABPcj5wJfDn45QjSZrX0Huh/CHwTuDcUzVIsgqsAiwtLQ3sTtLJvO/MM9vCR+BJXg8cr6r7NmtXVQeqaqWqViaTyaLdSZLWGXIK5TLgDUkeAT4EvDrJX41SlSRppoUDvKreVVXnV9UycDXw91V17WiVSZI25ffAJampUR7oUFWfBj49xu+SJM3HI3BJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJasoAl6SmDHBJamrIMzEvSPKpJEeTPJjk+jELkyRtbsgDHZ4EfqOqDic5F7gvyV1V9YWRapMkbWLIMzEfrarD09ffBo4Czx+rMEnS5kZ5pFqSZeDlwL0b7FsFVgGWlpbG6E4a3fL+O3at70duvHJX+t3NMe+W3fpbb5fBFzGTPAv4MPD2qvrW+v1VdaCqVqpqZTKZDO1OkjQ1KMCTnMVaeN9cVbeNU5IkaR5DvoUS4APA0ap633glSZLmMeQI/DLgTcCrk9w//XndSHVJkmZY+CJmVf0DkBFrkSRtgSsxJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJampUW5mJY3lmXiDpWfimDsaOk/bcSMtj8AlqSkDXJKaMsAlqSkDXJKaMsAlqSkDXJKaMsAlqSkDXJKaGvpMzCuSPJTk4ST7xypKkjTbkGdingH8KfBzwMXANUkuHqswSdLmhhyBvwJ4uKq+UlVPAB8CrhqnLEnSLEPuhfJ84GsnvT8G/MT6RklWgdXp2+8keWiBvvYBjy3wuS4cX397fYx7Ynz53U13b+sYZ/Q9yws22jgkwDd6oHE9bUPVAeDAgH5IcqiqVob8jtOZ4+tvr49xr48Peo5xyCmUY8AFJ70/H/jGsHIkSfMaEuCfBS5M8sIkZwNXA7ePU5YkaZaFT6FU1ZNJfhX4O+AM4GBVPThaZU816BRMA46vv70+xr0+Pmg4xlQ97bS1JKkBV2JKUlMGuCQ1ddoE+Kxl+Vnzx9P9n0tyyW7UOcQcY7w8yTeT3D/9ec9u1LmIJAeTHE9y5BT798L8zRpj2/kDSHJBkk8lOZrkwSTXb9Cm7TzOOb5ec1hVu/7D2kXQfwZeBJwNPABcvK7N64A7Wfv++aXAvbtd9zaM8XLgY7td64LjexVwCXDkFPtbz9+cY2w7f9P6nwdcMn19LvClvfT/cM7xtZrD0+UIfJ5l+VcBf1lrPgM8O8nzdrrQAfb0rQeq6m7g8U2adJ+/ecbYWlU9WlWHp6+/DRxlbcX1ydrO45zja+V0CfCNluWv/8PO0+Z0Nm/9r0zyQJI7k7xkZ0rbEd3nb157Yv6SLAMvB+5dt2tPzOMm44NGczhkKf2Y5lmWP9fS/dPYPPUfBl5QVd9J8jrgo8CF213YDuk+f/PYE/OX5FnAh4G3V9W31u/e4COt5nHG+FrN4elyBD7PsvzuS/dn1l9V36qq70xffxw4K8m+nStxW3Wfv5n2wvwlOYu1cLu5qm7boEnreZw1vm5zeLoE+DzL8m8HfnF6FfxS4JtV9ehOFzrAzDEmeW6STF+/grX5+Y8dr3R7dJ+/mbrP37T2DwBHq+p9p2jWdh7nGV+3OTwtTqHUKZblJ/mV6f73Ax9n7Qr4w8D3gDfvVr2LmHOMbwTemuRJ4D+Bq2t6afx0l+QW1q7g70tyDLgBOAv2xvzBXGNsO39TlwFvAj6f5P7ptt8GlmBPzOM842s1hy6ll6SmTpdTKJKkLTLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmvo/TuhlxfuE2UUAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plot.hist(abs_error/y_std.flatten(), 20)\n", + "plot.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "bucmsdGSkMda" + }, + "source": [ + "All the values are in the expected range, and the distribution looks roughly Gaussian although not exactly. Perhaps this indicates the errors are not normally distributed, but it may also reflect inaccuracies in the uncertainties. This is an important reminder: the uncertainties are just estimates, not rigorous measurements. Most of them are pretty good, but you should not put too much confidence in any single value." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "4NwKVrwCkMdb" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on GitHub\n", + "Starring DeepChem on GitHub helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "name": "07_Uncertainty_In_Deep_Learning.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/27_Using_Reinforcement_Learning_to_Play_Pong.ipynb b/examples/tutorials/27_Using_Reinforcement_Learning_to_Play_Pong.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..8c6aab13952db627fd3809ff7aad7dda772441d5 --- /dev/null +++ b/examples/tutorials/27_Using_Reinforcement_Learning_to_Play_Pong.ipynb @@ -0,0 +1,304 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "m0jRtbRGsoZy" + }, + "source": [ + "# Tutorial Part 27: Using Reinforcement Learning to Play Pong\n", + "\n", + "This tutorial demonstrates using reinforcement learning to train an agent to play Pong. This task isn't directly related to chemistry, but video games make an excellent demonstration of reinforcement learning techniques.\n", + "\n", + "![title](pong.png)\n", + "\n", + "## Colab\n", + "\n", + "This tutorial and the rest in this sequence can be done in Google Colab (although the visualization at the end doesn't work correctly on Colab, so you might prefer to run this tutorial locally). If you'd like to open this notebook in colab, you can use the following link.\n", + "\n", + "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/27_Using_Reinforcement_Learning_to_Play_Pong.ipynb)\n", + "\n", + "## Setup\n", + "\n", + "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment. To install `gym` you should also use `pip install 'gym[atari]'` (We need the extra modifier since we'll be using an atari game). We'll add this command onto our usual Colab installation commands for you" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 170 + }, + "colab_type": "code", + "id": "qXdmcnhtst-z", + "outputId": "5c7cf904-0f5c-41d8-c404-75258bafca86" + }, + "outputs": [], + "source": [ + "!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", + "import conda_installer\n", + "conda_installer.install()\n", + "!/root/miniconda/bin/conda info -e" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 188 + }, + "colab_type": "code", + "id": "-1kpETs2GnbI", + "outputId": "dc8d5ae6-a0d7-4236-8168-8b615806ce41" + }, + "outputs": [], + "source": [ + "!pip install --pre deepchem\n", + "import deepchem\n", + "deepchem.__version__" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 187 + }, + "colab_type": "code", + "id": "9sv6kX_VsoZ1", + "outputId": "ce4206d5-7917-4cad-c716-238a41f78e2a" + }, + "outputs": [], + "source": [ + "!pip install 'gym[atari]'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Reinforcement Learning\n", + "\n", + "Reinforcement learning involves an *agent* that interacts with an *environment*. In this case, the environment is the video game and the agent is the player. By trial and error, the agent learns a *policy* that it follows to perform some task (winning the game). As it plays, it receives *rewards* that give it feedback on how well it is doing. In this case, it receives a positive reward every time it scores a point and a negative reward every time the other player scores a point.\n", + "\n", + "The first step is to create an `Environment` that implements this task. Fortunately,\n", + "OpenAI Gym already provides an implementation of Pong (and many other tasks appropriate\n", + "for reinforcement learning). DeepChem's `GymEnvironment` class provides an easy way to\n", + "use environments from OpenAI Gym. We could just use it directly, but in this case we\n", + "subclass it and preprocess the screen image a little bit to make learning easier." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "EuRrb3vpsoZ_" + }, + "outputs": [], + "source": [ + "import deepchem as dc\n", + "import numpy as np\n", + "\n", + "class PongEnv(dc.rl.GymEnvironment):\n", + " def __init__(self):\n", + " super(PongEnv, self).__init__('Pong-v0')\n", + " self._state_shape = (80, 80)\n", + " \n", + " @property\n", + " def state(self):\n", + " # Crop everything outside the play area, reduce the image size,\n", + " # and convert it to black and white.\n", + " cropped = np.array(self._state)[34:194, :, :]\n", + " reduced = cropped[0:-1:2, 0:-1:2]\n", + " grayscale = np.sum(reduced, axis=2)\n", + " bw = np.zeros(grayscale.shape)\n", + " bw[grayscale != 233] = 1\n", + " return bw\n", + "\n", + " def __deepcopy__(self, memo):\n", + " return PongEnv()\n", + "\n", + "env = PongEnv()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "GNnO3MZ_soaG" + }, + "source": [ + "Next we create a model to implement our policy. This model receives the current state of the environment (the pixels being displayed on the screen at this moment) as its input. Given that input, it decides what action to perform. In Pong there are three possible actions at any moment: move the paddle up, move it down, or leave it where it is. The policy model produces a probability distribution over these actions. It also produces a *value* output, which is interpreted as an estimate of how good the current state is. This turns out to be important for efficient learning.\n", + "\n", + "The model begins with two convolutional layers to process the image. That is followed by a dense (fully connected) layer to provide plenty of capacity for game logic. We also add a small Gated Recurrent Unit (GRU). That gives the network a little bit of memory, so it can keep track of which way the ball is moving. Just from the screen image, you cannot tell whether the ball is moving to the left or to the right, so having memory is important.\n", + "\n", + "We concatenate the dense and GRU outputs together, and use them as inputs to two final layers that serve as the\n", + "network's outputs. One computes the action probabilities, and the other computes an estimate of the\n", + "state value function.\n", + "\n", + "We also provide an input for the initial state of the GRU, and return its final state at the end. This is required by the learning algorithm." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "BLdt8WAQsoaH" + }, + "outputs": [], + "source": [ + "import tensorflow as tf\n", + "from tensorflow.keras.layers import Input, Concatenate, Conv2D, Dense, Flatten, GRU, Reshape\n", + "\n", + "class PongPolicy(dc.rl.Policy):\n", + " def __init__(self):\n", + " super(PongPolicy, self).__init__(['action_prob', 'value', 'rnn_state'], [np.zeros(16)])\n", + "\n", + " def create_model(self, **kwargs):\n", + " state = Input(shape=(80, 80))\n", + " rnn_state = Input(shape=(16,))\n", + " conv1 = Conv2D(16, kernel_size=8, strides=4, activation=tf.nn.relu)(Reshape((80, 80, 1))(state))\n", + " conv2 = Conv2D(32, kernel_size=4, strides=2, activation=tf.nn.relu)(conv1)\n", + " dense = Dense(256, activation=tf.nn.relu)(Flatten()(conv2))\n", + " gru, rnn_final_state = GRU(16, return_state=True, return_sequences=True, time_major=True)(\n", + " Reshape((-1, 256))(dense), initial_state=rnn_state)\n", + " concat = Concatenate()([dense, Reshape((16,))(gru)])\n", + " action_prob = Dense(env.n_actions, activation=tf.nn.softmax)(concat)\n", + " value = Dense(1)(concat)\n", + " return tf.keras.Model(inputs=[state, rnn_state], outputs=[action_prob, value, rnn_final_state])\n", + "\n", + "policy = PongPolicy()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "YU19h0aUsoaN" + }, + "source": [ + "We will optimize the policy using the Advantage Actor Critic (A2C) algorithm. There are lots of hyperparameters we could specify at this point, but the default values for most of them work well on this problem. The only one we need to customize is the learning rate." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Fw_wu511soaO", + "scrolled": true + }, + "outputs": [], + "source": [ + "from deepchem.models.optimizers import Adam\n", + "a2c = dc.rl.A2C(env, policy, model_dir='model', optimizer=Adam(learning_rate=0.0002))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "-PUD4JG2soaU" + }, + "source": [ + "Optimize for as long as you have patience to. By 1 million steps you should see clear signs of learning. Around 3 million steps it should start to occasionally beat the game's built in AI. By 7 million steps it should be winning almost every time. Running on my laptop, training takes about 20 minutes for every million steps." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Wa18EQlmsoaV" + }, + "outputs": [], + "source": [ + "# Change this to train as many steps as you have patience for.\n", + "a2c.fit(1000)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "_xHNjusSsoaa" + }, + "source": [ + "Let's watch it play and see how it does! " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Ud6DB_ndsoab" + }, + "outputs": [], + "source": [ + "# This code doesn't work well on Colab\n", + "env.reset()\n", + "while not env.terminated:\n", + " env.env.render()\n", + " env.step(a2c.select_action(env.state))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "colab_type": "text", + "id": "3MGK4nrhsoah" + }, + "source": [ + "# Congratulations! Time to join the Community!\n", + "\n", + "Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n", + "\n", + "## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n", + "This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n", + "\n", + "## Join the DeepChem Gitter\n", + "The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!" + ] + } + ], + "metadata": { + "colab": { + "name": "18_Using_Reinforcement_Learning_to_Play_Pong.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/examples/tutorials/README.md b/examples/tutorials/README.md index cf3230950c843ba173a3d5352eb9889a5aa709b2..e2069f7f83cde0c722299ca76dbdaede9fdc256f 100644 --- a/examples/tutorials/README.md +++ b/examples/tutorials/README.md @@ -47,7 +47,7 @@ competition. Increased competition can help drive down the cost of medicine. * [Part 17: Training a Generative Adversarial Network on MNIST](17_Training_a_Generative_Adversarial_Network_on_MNIST.ipynb) * [Part 18: Using Reinforcement Learning to Play Pong](18_Using_Reinforcement_Learning_to_Play_Pong.ipynb) * [Part 19: Large Scale Chemical Screens](19_Large_Scale_Chemical_Screens.ipynb) -* [Part 20: [WIP] ConvertingDeepChem Models to TensorFlow Estimators](WIP_20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb) +* [Part 20: ConvertingDeepChem Models to TensorFlow Estimators](20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb) * [Part 21: Introduction to Bioinformatics](21_Introduction_to_Bioinformatics.ipynb) * [Part 22: Using HuggingFace + Transfer Learning for Toxicity Predictions](22_Transfer_Learning_With_HuggingFace_tox21.ipynb) diff --git a/examples/tutorials/Training_a_Normalizing_Flow_on_QM9.ipynb b/examples/tutorials/Training_a_Normalizing_Flow_on_QM9.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c4886a77c5750149080e4b89a1202137a5bc50c9 --- /dev/null +++ b/examples/tutorials/Training_a_Normalizing_Flow_on_QM9.ipynb @@ -0,0 +1 @@ +{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"Training_a_Normalizing_Flow_on_QM9.ipynb","provenance":[],"collapsed_sections":[],"toc_visible":true,"authorship_tag":"ABX9TyNyrrTvEu36LoiXsM0a4+4b"},"kernelspec":{"name":"python3","display_name":"Python 3"}},"cells":[{"cell_type":"markdown","metadata":{"id":"8BrLuyU3kMdt","colab_type":"text"},"source":["# Tutorial Part ??: Training a Normalizing Flow on QM9\n","By [Nathan C. Frey](https://ncfrey.github.io/) | [Twitter](https://twitter.com/nc_frey)\n","\n","\n","In this tutorial, we will train a Normalizing Flow (NF) on the [QM9 dataset](https://www.nature.com/articles/sdata201422). The dataset comprises 133,885 stable small organic molecules made up of CHNOF atoms. We will try to train a network that is an invertible transformation between a simple base distribution and the distribution of molecules in QM9. One of the key advantages of normalizing flows is that they can be constructed to efficiently sample from a distribution (generative modeling) and do probability density calculations (exactly compute log-likelihoods), whereas other models make tradeoffs between the two or can only approximate probability densities.\n","\n","NFs are useful whenever we need a probabilistic model with one or both of these capabilities. Note that because NFs are completely invertible, there is no \"latent space\" in the sense used when referring to generative adversarial networks or variational autoencoders. For more on NFs, we refer to this [review paper](https://arxiv.org/pdf/1912.02762.pdf).\n","\n","\n","To encode the QM9 dataset, we'll make use of the SELFIES (SELF-referencIng Embedded Strings) representation, which is a 100% robust molecular string representation. SMILES strings produced by generative models are often syntactically invalid (they do not correspond to a molecular graph), or they violate chemical rules like the maximum number of bonds between atoms. SELFIES are designed so that even totally random SELFIES strings correspond to valid molecular graphs, so they are a great framework for generative modeling. For more details about SELFIES, see the [GitHub repo](https://github.com/aspuru-guzik-group/selfies) and the associated [paper](https://arxiv.org/abs/1905.13741).\n","\n","\n","## Colab\n","\n","This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n","\n","[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/23_Training_a_Normalizing_Flow_on_QM9.ipynb)\n","\n","## Setup\n","\n","To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment."]},{"cell_type":"code","metadata":{"id":"06FZl9Nqj_jq","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":319},"executionInfo":{"status":"ok","timestamp":1600972940078,"user_tz":240,"elapsed":124245,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"e1d3f749-00ef-4b81-899e-99a66e4d737e"},"source":["!curl -Lo conda_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n","import conda_installer\n","conda_installer.install()\n","!/root/miniconda/bin/conda info -e"],"execution_count":1,"outputs":[{"output_type":"stream","text":[" % Total % Received % Xferd Average Speed Time Time Time Current\n"," Dload Upload Total Spent Left Speed\n","100 3490 100 3490 0 0 15863 0 --:--:-- --:--:-- --:--:-- 15863\n"],"name":"stdout"},{"output_type":"stream","text":["add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n","python version: 3.6.9\n","fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n","done\n","installing miniconda to /root/miniconda\n","done\n","installing rdkit, openmm, pdbfixer\n","added conda-forge to channels\n","added omnia to channels\n","done\n","conda packages installation finished!\n"],"name":"stderr"},{"output_type":"stream","text":["# conda environments:\n","#\n","base * /root/miniconda\n","\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"dVXJOn-p8Pld","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":358},"executionInfo":{"status":"ok","timestamp":1600972946086,"user_tz":240,"elapsed":130228,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"8ec60872-e04f-4488-e0fd-9f1f16ccd670"},"source":["!pip install --pre deepchem\n","import deepchem\n","deepchem.__version__"],"execution_count":2,"outputs":[{"output_type":"stream","text":["Collecting deepchem\n","\u001b[?25l Downloading https://files.pythonhosted.org/packages/20/13/03547ffeca81a4d65fef79d0d13bd2eaf541408c606bbdbd67c0e2fec0b2/deepchem-2.4.0rc1.dev20200923221715.tar.gz (393kB)\n","\r\u001b[K |▉ | 10kB 16.1MB/s eta 0:00:01\r\u001b[K |█▋ | 20kB 1.8MB/s eta 0:00:01\r\u001b[K |██▌ | 30kB 2.1MB/s eta 0:00:01\r\u001b[K |███▎ | 40kB 2.4MB/s eta 0:00:01\r\u001b[K |████▏ | 51kB 1.8MB/s eta 0:00:01\r\u001b[K |█████ | 61kB 2.1MB/s eta 0:00:01\r\u001b[K |█████▉ | 71kB 2.3MB/s eta 0:00:01\r\u001b[K |██████▋ | 81kB 2.6MB/s eta 0:00:01\r\u001b[K |███████▌ | 92kB 2.7MB/s eta 0:00:01\r\u001b[K |████████▎ | 102kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████▏ | 112kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████ | 122kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████▉ | 133kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████▋ | 143kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████▌ | 153kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████▎ | 163kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████▏ | 174kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████ | 184kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████▉ | 194kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████▋ | 204kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████████▌ | 215kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████████▎ | 225kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████████▏ | 235kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████████ | 245kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████████▉ | 256kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████████████▋ | 266kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████████████▌ | 276kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████████████▎ | 286kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████████████▏ | 296kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████████████████ | 307kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████████████████▉ | 317kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████████████████▋ | 327kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████████████████▌ | 337kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████████████████▎ | 348kB 2.6MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▏ | 358kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████████████████████ | 368kB 2.6MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▉ | 378kB 2.6MB/s eta 0:00:01\r\u001b[K |███████████████████████████████▋| 389kB 2.6MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 399kB 2.6MB/s \n","\u001b[?25hRequirement already satisfied: joblib in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.16.0)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.18.5)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.0.5)\n","Requirement already satisfied: scikit-learn in /usr/local/lib/python3.6/dist-packages (from deepchem) (0.22.2.post1)\n","Requirement already satisfied: scipy in /usr/local/lib/python3.6/dist-packages (from deepchem) (1.4.1)\n","Requirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2.8.1)\n","Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.6/dist-packages (from pandas->deepchem) (2018.9)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.6/dist-packages (from python-dateutil>=2.6.1->pandas->deepchem) (1.15.0)\n","Building wheels for collected packages: deepchem\n"," Building wheel for deepchem (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for deepchem: filename=deepchem-2.4.0rc1.dev20200924184214-cp36-none-any.whl size=499031 sha256=bb06a3dd7f6e432a05d18f54403bc00e292ddc9c52b8d5008c640c3153b917ee\n"," Stored in directory: /root/.cache/pip/wheels/c7/5c/0b/1f5cfa9461cf4af4190b45b7ae87ecc22ee5bdfb55748cfbe3\n","Successfully built deepchem\n","Installing collected packages: deepchem\n","Successfully installed deepchem-2.4.0rc1.dev20200924184214\n"],"name":"stdout"},{"output_type":"execute_result","data":{"application/vnd.google.colaboratory.intrinsic+json":{"type":"string"},"text/plain":["'2.4.0-rc1.dev'"]},"metadata":{"tags":[]},"execution_count":2}]},{"cell_type":"markdown","metadata":{"id":"OGVYBZh6Gq7N","colab_type":"text"},"source":["Install the SELFIES library to translate SMILES strings."]},{"cell_type":"code","metadata":{"id":"sqEygLk5GLYF","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":322},"executionInfo":{"status":"ok","timestamp":1600972951697,"user_tz":240,"elapsed":135821,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"3df1490a-f23f-4ffc-8398-3dcc27770948"},"source":["!git clone https://github.com/aspuru-guzik-group/selfies.git\n","%cd selfies\n","!pip install .\n","%cd .."],"execution_count":3,"outputs":[{"output_type":"stream","text":["Cloning into 'selfies'...\n","remote: Enumerating objects: 157, done.\u001b[K\n","remote: Counting objects: 100% (157/157), done.\u001b[K\n","remote: Compressing objects: 100% (114/114), done.\u001b[K\n","remote: Total 2026 (delta 90), reused 85 (delta 43), pack-reused 1869\u001b[K\n","Receiving objects: 100% (2026/2026), 12.38 MiB | 17.27 MiB/s, done.\n","Resolving deltas: 100% (1276/1276), done.\n","/content/selfies\n","Processing /content/selfies\n","Building wheels for collected packages: selfies\n"," Building wheel for selfies (setup.py) ... \u001b[?25l\u001b[?25hdone\n"," Created wheel for selfies: filename=selfies-1.0.1-cp36-none-any.whl size=27081 sha256=7c0b9aa7277c7a1103b657efc0354829b65851f2b2fa8e2082ae31544350be83\n"," Stored in directory: /tmp/pip-ephem-wheel-cache-lkv5aj3a/wheels/d0/8b/6e/8a44d44da67fdb190acc4f94129ff1428fc623ff9ad9a7abed\n","Successfully built selfies\n","Installing collected packages: selfies\n","Successfully installed selfies-1.0.1\n","/content\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"FpqPgmalHCdb","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":70},"executionInfo":{"status":"ok","timestamp":1600972952463,"user_tz":240,"elapsed":136568,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"b1358a90-efea-45b3-8aff-0e43e82d46c0"},"source":["import numpy as np\n","import matplotlib.pyplot as plt\n","import seaborn as sns\n","import pandas as pd\n","import os\n","\n","import deepchem as dc\n","from deepchem.models.normalizing_flows import NormalizingFlow, NormalizingFlowModel\n","from deepchem.models.optimizers import Adam\n","from deepchem.data import NumpyDataset\n","from deepchem.splits import RandomSplitter\n","from deepchem.molnet import load_tox21\n","\n","import rdkit\n","from rdkit.Chem import Draw\n","\n","from IPython.display import Image, display\n","\n","import selfies as sf\n","\n","import tensorflow as tf\n","import tensorflow_probability as tfp\n","\n","tfd = tfp.distributions\n","tfb = tfp.bijectors\n","tfk = tf.keras\n","\n","tfk.backend.set_floatx('float64')"],"execution_count":4,"outputs":[{"output_type":"stream","text":["/usr/local/lib/python3.6/dist-packages/statsmodels/tools/_testing.py:19: FutureWarning: pandas.util.testing is deprecated. Use the functions in the public API at pandas.testing instead.\n"," import pandas.util.testing as tm\n"],"name":"stderr"}]},{"cell_type":"markdown","metadata":{"id":"XYRunI2yHoLS","colab_type":"text"},"source":["First, let's get a dataset of 2500 small organic molecules from the QM9 dataset. We'll then convert the molecules to SELFIES, one-hot encode them, and dequantize the inputs so they can be processed by a normalizing flow. 2000 molecules will be used for training, while the remaining 500 will be split into validation and test sets. We'll use the validation set to see how our architecture is doing at learning the underlying the distribution, and leave the test set alone. You should feel free to experiment with this notebook to get the best model you can and evaluate it on the test set when you're done!"]},{"cell_type":"code","metadata":{"id":"k2-L2gFHr04H","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973619044,"user_tz":240,"elapsed":803137,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["# Download from MolNet\n","tasks, datasets, transformers = dc.molnet.load_qm9(featurizer='ECFP')\n","df = pd.DataFrame(data={'smiles': datasets[0].ids})"],"execution_count":6,"outputs":[]},{"cell_type":"code","metadata":{"id":"fdo6CJMPGyig","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973619064,"user_tz":240,"elapsed":803152,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["data = df[['smiles']].sample(2500, random_state=42)"],"execution_count":7,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"ZMh-1QUqCxkY","colab_type":"text"},"source":["SELFIES defines a dictionary called `bond_constraints` that enforces how many bonds every atom or ion can make. E.g., 'C': 4, 'H': 1, etc. The `?` symbol is used for any atom or ion that isn't defined in the dictionary, and it defaults to 8 bonds. Because QM9 contains ions and we don't want to allow those ions to form up to 8 bonds, we'll constrain them to 3. This will really improve the percentage of valid molecules we generate. You can read more about setting constraints in the [SELFIES documentation](https://selfies-mirror.readthedocs.io/en/latest/selfies_examples.html#Advanced-Usage)."]},{"cell_type":"code","metadata":{"id":"6cOS0cNTdb0I","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":202},"executionInfo":{"status":"ok","timestamp":1600973619070,"user_tz":240,"elapsed":803140,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"d8fc1b88-0af7-4a82-e85c-889dbbcf8e86"},"source":["sf.set_semantic_constraints() # reset constraints\n","constraints = sf.get_semantic_constraints()\n","constraints['?'] = 3\n","\n","sf.set_semantic_constraints(constraints)\n","constraints"],"execution_count":8,"outputs":[{"output_type":"execute_result","data":{"text/plain":["{'?': 3,\n"," 'Br': 1,\n"," 'C': 4,\n"," 'Cl': 1,\n"," 'F': 1,\n"," 'H': 1,\n"," 'I': 1,\n"," 'N': 3,\n"," 'O': 2,\n"," 'P': 5,\n"," 'S': 6}"]},"metadata":{"tags":[]},"execution_count":8}]},{"cell_type":"code","metadata":{"id":"2N5zUFvSV7uv","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973619075,"user_tz":240,"elapsed":803139,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["def preprocess_smiles(smiles):\n"," return sf.encoder(smiles) \n","\n","data['selfies'] = data['smiles'].apply(preprocess_smiles)"],"execution_count":9,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"rAriEcI7e5wl","colab_type":"text"},"source":["Let's take a look at some short SMILES strings and their corresponding SELFIES representations. We can see right away that there is a key difference in how the two representations deal with Rings and Branches. SELFIES is designed so that branch length and ring size are stored locally with the `Branch` and `Ring` identifiers, and the SELFIES grammar prevents invalid strings."]},{"cell_type":"code","metadata":{"id":"2dqSCmoPe30e","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":195},"executionInfo":{"status":"ok","timestamp":1600973619247,"user_tz":240,"elapsed":803291,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"aa45a65e-10f1-4241-e974-816f3d395a5a"},"source":["data['len'] = data['smiles'].apply(lambda x: len(x))\n","data.sort_values(by='len').head()"],"execution_count":10,"outputs":[{"output_type":"execute_result","data":{"text/html":["
\n","\n","\n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n"," \n","
smilesselfieslen
70607CCCC=O[C][C][C][C][=O]6
99883CCOCCOC[C][C][O][C][C][O][C]7
37561c1nnon1[C][N][=N][O][N][Expl=Ring1][Branch1_1]7
73796COCCCCO[C][O][C][C][C][C][O]7
92088CC#CCCCO[C][C][#C][C][C][C][O]8
\n","
"],"text/plain":[" smiles selfies len\n","70607 CCCC=O [C][C][C][C][=O] 6\n","99883 CCOCCOC [C][C][O][C][C][O][C] 7\n","37561 c1nnon1 [C][N][=N][O][N][Expl=Ring1][Branch1_1] 7\n","73796 COCCCCO [C][O][C][C][C][C][O] 7\n","92088 CC#CCCCO [C][C][#C][C][C][C][O] 8"]},"metadata":{"tags":[]},"execution_count":10}]},{"cell_type":"markdown","metadata":{"id":"NrQelTLVa7wR","colab_type":"text"},"source":["To convert SELFIES to a one-hot encoded representation, we need to construct an `alphabet` of all the characters that occur in the list of SELFIES strings. We also have to know what the longest SELFIES string is, so that all the shorter SELFIES can be padded with `'[nop]'` to be equal length."]},{"cell_type":"code","metadata":{"id":"BkQ0Sd3TY3Aq","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973619421,"user_tz":240,"elapsed":803461,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["selfies_list = np.asanyarray(data.selfies)\n","selfies_alphabet = sf.get_alphabet_from_selfies(selfies_list)\n","selfies_alphabet.add('[nop]') # Add the \"no operation\" symbol as a padding character\n","selfies_alphabet = list(sorted(selfies_alphabet))\n","largest_selfie_len = max(sf.len_selfies(s) for s in selfies_list)"],"execution_count":11,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"vQ2m_WoHt7_m","colab_type":"text"},"source":["`selfies` has a handy utility function to translate SELFIES strings into one-hot encoded vectors."]},{"cell_type":"code","metadata":{"id":"N9-d9yYMZSgI","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973619680,"user_tz":240,"elapsed":803715,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["onehots = sf.multiple_selfies_to_hot(selfies_list, largest_selfie_len, selfies_alphabet)"],"execution_count":12,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"daU67TZZbbLa","colab_type":"text"},"source":["Next, we \"dequantize\" the inputs by adding random noise from the interval `[0, 1)` to every input in the encodings. This allows the normalizing flow to operate on continuous inputs (rather than discrete), and the original inputs can easily be recovered by applying a floor function."]},{"cell_type":"code","metadata":{"id":"u3ThEWVcbvxn","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973623716,"user_tz":240,"elapsed":807747,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["input_tensor = tf.convert_to_tensor(onehots, dtype='float64')\n","noise_tensor = tf.random.uniform(shape=input_tensor.shape, minval=0, maxval=1, dtype='float64')\n","dequantized_data = tf.add(input_tensor, noise_tensor)"],"execution_count":13,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"B38gEbh6uLrr","colab_type":"text"},"source":["The dequantized data is ready to be processed as a DeepChem dataset and split into training, validation, and test sets. We'll also keep track of the SMILES strings for the training set so we can compare the training data to our generated molecules later on."]},{"cell_type":"code","metadata":{"id":"O3JqekV0HjNm","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":34},"executionInfo":{"status":"ok","timestamp":1600973623718,"user_tz":240,"elapsed":807719,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"28d49000-e9d0-456b-b17f-29aa8ec53d68"},"source":["ds = NumpyDataset(dequantized_data) # Create a DeepChem dataset\n","splitter = RandomSplitter()\n","train, val, test = splitter.train_valid_test_split(dataset=ds, seed=42)\n","train_idx, val_idx, test_idx = splitter.split(dataset=ds, seed=42)\n","\n","dim = len(train.X[0]) # length of one-hot encoded vectors\n","train.X.shape # 2000 samples, N-dimensional one-hot vectors that represent molecules"],"execution_count":14,"outputs":[{"output_type":"execute_result","data":{"text/plain":["(2000, 588)"]},"metadata":{"tags":[]},"execution_count":14}]},{"cell_type":"code","metadata":{"id":"9In8bdWddovm","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973623720,"user_tz":240,"elapsed":807714,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["# SMILES strings of training data\n","train_smiles = data['smiles'].iloc[train_idx].values"],"execution_count":15,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"yZmmABKzI00F","colab_type":"text"},"source":["Next we'll set up the normalizing flow model. The base distribution is a multivariate Normal distribution. The `permutation` layer permutes the dimensions of the input so that the normalizing flow layers will operate along multiple dimensions of the inputs. To understand why the permutation is needed, we need to know a bit about how the normalizing flow architecture works."]},{"cell_type":"code","metadata":{"id":"W_Ff2Q4rIyCe","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973623721,"user_tz":240,"elapsed":807709,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["base_dist = tfd.MultivariateNormalDiag(loc=np.zeros(dim), scale_diag=np.ones(dim))\n","\n","if dim % 2 == 0:\n"," permutation = tf.cast(np.concatenate((np.arange(dim / 2, dim), np.arange(0, dim / 2))),\n"," tf.int32)\n","else:\n"," permutation = tf.cast(np.concatenate((np.arange(dim / 2 + 1, dim), np.arange(0, dim / 2))), tf.int32)"],"execution_count":16,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"FMCyGvKKJwXw","colab_type":"text"},"source":["For this simple example, we'll set up a flow of repeating [Masked Autoregressive Flow](https://arxiv.org/abs/1705.07057) layers. The autoregressive property is enforced by using the [Masked Autoencoder for Distribution Estimation](https://arxiv.org/abs/1502.03509) architecture. The layers of the flow are a bijector, an invertible mapping between the base and target distributions.\n","\n","MAF takes the inputs from the base distribution and transforms them with a simple scale-and-shift (affine) operation, but crucially the scale-and-shift for each dimension of the output *depends on the previously generated dimensions of the output.* That independence of future dimensions preserves the *autoregressive* property and ensures that the normalizing flow is invertible. Now we can see that we need permutations to change the ordering of the inputs, or else the normalizing flow would only transform certain dimensions of the inputs.\n","\n","Batch Normalization layers can be added for additional stability in training, but may have strange effects on the outputs and require some input reshaping to work properly. Increasing `num_layers` and `hidden_units` can make more expressive flows capable of modeling more complex target distributions."]},{"cell_type":"code","metadata":{"id":"byIooYBqJ2UC","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973623723,"user_tz":240,"elapsed":807703,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["num_layers = 8\n","flow_layers = []\n","\n","Made = tfb.AutoregressiveNetwork(params=2,\n"," hidden_units=[512, 512], activation='relu')\n","\n","for i in range(num_layers):\n"," flow_layers.append( \n"," (tfb.MaskedAutoregressiveFlow(shift_and_log_scale_fn=Made)\n"," ))\n","\n"," permutation = tf.cast(np.random.permutation(np.arange(0, dim)), tf.int32)\n"," \n"," flow_layers.append(tfb.Permute(permutation=permutation))\n"," \n","# if (i + 1) % int(2) == 0:\n","# flow_layers.append(tfb.BatchNormalization())"],"execution_count":17,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"KMbxkF_8KZxR","colab_type":"text"},"source":["We can draw samples from the untrained distribution, but for now they don't have any relation to the QM9 dataset distribution."]},{"cell_type":"code","metadata":{"id":"hBYNQrAYKQij","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":50},"executionInfo":{"status":"ok","timestamp":1600973659310,"user_tz":240,"elapsed":843260,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"4bae2bf9-6d54-47fa-86b3-1b839b52e9fb"},"source":["%%time\n","nf = NormalizingFlow(base_distribution=base_dist,\n"," flow_layers=flow_layers)\n","samples = nf.flow.sample(5)"],"execution_count":18,"outputs":[{"output_type":"stream","text":["CPU times: user 45.7 s, sys: 1.77 s, total: 47.5 s\n","Wall time: 35.5 s\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"pa04f-1VcG0p","colab_type":"text"},"source":["A `NormalizingFlowModel` takes a `NormalizingFlow` and any parameters used by `deepchem.models.KerasModel`."]},{"cell_type":"code","metadata":{"id":"iA56ui2MK1QA","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973659311,"user_tz":240,"elapsed":843255,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["nfm = NormalizingFlowModel(nf, learning_rate=1e-4, batch_size=128)"],"execution_count":19,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"IL-Onju8K8nK","colab_type":"text"},"source":["Now to train the model! We'll try to minimize the negative log likelihood loss, which measures the likelihood that generated samples are drawn from the target distribution, i.e. as we train the model, it should get better at modeling the target distribution and it will generate samples that look like molecules from the QM9 dataset. "]},{"cell_type":"code","metadata":{"id":"ZrmHYIHGK7-l","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973659314,"user_tz":240,"elapsed":843253,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["losses = []\n","val_losses = []"],"execution_count":20,"outputs":[]},{"cell_type":"code","metadata":{"id":"vIURsPTpLZdh","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":608},"executionInfo":{"status":"ok","timestamp":1600973888187,"user_tz":240,"elapsed":1072102,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"592d9815-bd6e-457d-cc30-aff532b3b0ba"},"source":["%%time\n","max_epochs = 20 # maximum number of epochs of the training\n","\n","for epoch in range(max_epochs):\n"," loss = nfm.fit(train, nb_epoch=1, all_losses=losses)\n"," val_loss = nfm.create_nll(val.X)\n"," val_losses.append(val_loss.numpy())"],"execution_count":21,"outputs":[{"output_type":"stream","text":["WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","WARNING:tensorflow:Model was constructed with shape (None, 588) for input Tensor(\"input_1:0\", shape=(None, 588), dtype=float64), but it was called on an input with incompatible shape (1, 128, 588).\n","CPU times: user 7min 9s, sys: 9.26 s, total: 7min 18s\n","Wall time: 3min 48s\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"k33LyZsPNwUg","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":265},"executionInfo":{"status":"ok","timestamp":1600973888192,"user_tz":240,"elapsed":1072090,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"5a7f48c7-aa41-48c5-fe95-78b78cb23048"},"source":["f, ax = plt.subplots()\n","ax.scatter(range(len(losses)), losses, label='train loss')\n","ax.scatter(range(len(val_losses)), val_losses, label='val loss')\n","plt.legend(loc='upper right');"],"execution_count":22,"outputs":[{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAAXcAAAD4CAYAAAAXUaZHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAcYElEQVR4nO3df3BUVZ738feXEEwQJIgYIMEFHQsFEhMMPEyB+IMZEX1EnFnAKV3RUpmpYXV83IobnB0WKbdA47OM2XJ0GH8sjrqQUQRcXVkHcUBLHQIJQQcYGBcqaX6FrIn6EJYQzvNH30AC+dE/0un07c+riup7z72n+3TT9bk3p88915xziIiIv/SKdwNERKTrKdxFRHxI4S4i4kMKdxERH1K4i4j4UO94NwDgoosuciNGjIh3M0REEsrWrVuPOucGt7WtR4T7iBEjKCsri3czREQSipntb2+bumVERHxI4S4i4kMKdxERH+oRfe4i4l+NjY1UV1dz/PjxeDclYaWlpZGdnU1qamrIdRTuIhJT1dXV9O/fnxEjRmBm8W5OwnHOUVtbS3V1NSNHjgy5XsKG+5ryAMXrd3OgroFhGekUThvFzPyseDdLRM5y/PhxBXsUzIxBgwZRU1MTVr2EDPc15QEWrN5BQ2MTAIG6Bhas3gGggBfpgRTs0Ynk8wvpB1UzyzCzN8xsl5ntNLPvmtmFZva+me3xHgd6+5qZlZjZXjOrNLNxYbeqE8Xrd58O9mYNjU0Ur9/d1S8lIpKQQh0t8wzwnnPuCuAqYCdQBGxwzl0ObPDWAaYDl3v/5gHPdWmLgQN1DWGVi0jyqqur41e/+lVEdW+++Wbq6upC3n/RokU8/fTTEb1WV+s03M1sADAFeBHAOXfCOVcH3Aas8HZbAcz0lm8DXnFBnwIZZja0Kxs9LCM9rHIRSV4dhfvJkyc7rPvuu++SkZERi2bFXChn7iOBGuBlMys3sxfM7Hwg0zl30NvnEJDpLWcBVS3qV3tlXaZw2ijSU1NalaWnplA4bVRXvoyIxMGa8gCTln7AyKJ3mLT0A9aUB6J6vqKiIv7yl7+Ql5dHYWEhH374Iddccw0zZsxg9OjRAMycOZOrr76aMWPGsHz58tN1R4wYwdGjR9m3bx9XXnklDzzwAGPGjOHGG2+koaHjnoKKigomTpxIbm4ut99+O1999RUAJSUljB49mtzcXO644w4A/vCHP5CXl0deXh75+fl88803Ub1nCC3cewPjgOecc/nA/+NMFwwALnivvrDu12dm88yszMzKwv0VeGZ+Fkt+kENWRjoGZGWks+QHOfoxVSTBNQ+WCNQ14DgzWCKagF+6dCmXXXYZFRUVFBcXA7Bt2zaeeeYZ/vznPwPw0ksvsXXrVsrKyigpKaG2tvac59mzZw/z58/niy++ICMjgzfffLPD17377rt58sknqaysJCcnh8cff/x0e8rLy6msrOT5558H4Omnn+bZZ5+loqKCzZs3k54efS9EKOFeDVQ75z7z1t8gGPaHm7tbvMcj3vYAMLxF/WyvrBXn3HLnXIFzrmDw4DYnNevQzPwsPi66gf9aegsfF92gYBfxge4aLDFhwoRWY8ZLSkq46qqrmDhxIlVVVezZs+ecOiNHjiQvLw+Aq6++mn379rX7/PX19dTV1XHttdcCMHfuXDZt2gRAbm4ud955J6+++iq9ewcHLE6aNIlHHnmEkpIS6urqTpdHo9Nwd84dAqrMrLnPYyrwJ2AdMNcrmwus9ZbXAXd7o2YmAvUtum9ERNrVXYMlzj///NPLH374Ib///e/55JNP2L59O/n5+W1eTXveeeedXk5JSem0v74977zzDvPnz2fbtm2MHz+ekydPUlRUxAsvvEBDQwOTJk1i165dET13S6EeHh4EXjOzPsCXwL0EDwylZnYfsB+Y7e37LnAzsBc45u0rItKpYRnpBNoI8mgGS/Tv37/DPuz6+noGDhxI37592bVrF59++mnEr9VswIABDBw4kM2bN3PNNdfw29/+lmuvvZZTp05RVVXF9ddfz+TJk1m5ciXffvsttbW15OTkkJOTw5YtW9i1axdXXHFFVG0IKdydcxVAQRubpraxrwPmR9UqEUlKhdNGtbpAEaIfLDFo0CAmTZrE2LFjmT59Orfcckur7TfddBPPP/88V155JaNGjWLixIkRv1ZLK1as4Cc/+QnHjh3j0ksv5eWXX6apqYm77rqL+vp6nHM89NBDZGRk8Itf/IKNGzfSq1cvxowZw/Tp06N+fQtmcXwVFBQ43axDxJ927tzJlVdeGfL+mlqkbW19jma21TnX1ol3Yk4/ICL+NTM/S2HeBTSfu4iIDyncRUR8SOEuIuJDCncRER9SuIuI+JDCXUTkLP369QurvCdSuIuI+JDCXUR6lspSWDYWFmUEHytLo3q6oqIinn322dPrzTfU+Pbbb5k6dSrjxo0jJyeHtWvXdvAsrTnnKCwsZOzYseTk5LBq1SoADh48yJQpU8jLy2Ps2LFs3ryZpqYm7rnnntP7Llu2LKr3EypdxCQiPUdlKbz9EDR688vUVwXXAXJnt1+vA3PmzOHhhx9m/vzgrCilpaWsX7+etLQ03nrrLS644AKOHj3KxIkTmTFjRkj3K129ejUVFRVs376do0ePMn78eKZMmcLrr7/OtGnT+PnPf05TUxPHjh2joqKCQCDA559/DhDWnZ2ioTN3Eek5Niw+E+zNGhuC5RHKz8/nyJEjHDhwgO3btzNw4ECGDx+Oc47HHnuM3Nxcvve97xEIBDh8+HBIz/nRRx/xox/9iJSUFDIzM7n22mvZsmUL48eP5+WXX2bRokXs2LGD/v37c+mll/Lll1/y4IMP8t5773HBBRdE/F7CoXAXkZ6jvjq88hDNmjWLN954g1WrVjFnzhwAXnvtNWpqati6dSsVFRVkZma2OdVvOKZMmcKmTZvIysrinnvu4ZVXXmHgwIFs376d6667jueff577778/qtcIlcJdRHqOAdnhlYdozpw5rFy5kjfeeINZs2YBwal+L774YlJTU9m4cSP79+8P+fmuueYaVq1aRVNTEzU1NWzatIkJEyawf/9+MjMzeeCBB7j//vvZtm0bR48e5dSpU/zwhz/kiSeeYNu2bVG9l1Cpz11Eeo6pC1v3uQOkpgfLozBmzBi++eYbsrKyGDp0KAB33nknt956Kzk5ORQUFIQ1f/rtt9/OJ598wlVXXYWZ8dRTTzFkyBBWrFhBcXExqamp9OvXj1deeYVAIMC9997LqVOnAFiyZElU7yVUmvJXRGIq3Cl/qSwN9rHXVwfP2KcujPjHVD/RlL8ikthyZyvMu4D63EVEfEjhLiIx1xO6fxNZJJ+fwl1EYiotLY3a2loFfIScc9TW1pKWlhZWPfW5i0hMZWdnU11dTU1NTbybkrDS0tLIzg5vOKjCXURiKjU1lZEjR8a7GUlH3TIiIj6kcBcR8SGFu4iIDyncRUR8SOEuIuJDCncRER9SuIuI+JDCXUTEhxTuIiI+pHAXEfEhhbuIiA8lbrhXlsKysbAoI/hYWRrvFomI9BiJOXFYZWnr+yzWVwXXQXdwEREhxDN3M9tnZjvMrMLMyryyC83sfTPb4z0O9MrNzErMbK+ZVZrZuC5v9YbFrW+gC8H1DYu7/KVERBJRON0y1zvn8lrcjLUI2OCcuxzY4K0DTAcu9/7NA57rqsaeVl8dXrmISJKJps/9NmCFt7wCmNmi/BUX9CmQYWZDo3idcw1oZ9L69spFRJJMqOHugP80s61mNs8ry3TOHfSWDwGZ3nIWUNWibrVX1nWmLoTU9NZlqenBchERCfkH1cnOuYCZXQy8b2a7Wm50zjkzC+sGid5BYh7AJZdcEk7VMz+ablgc7IoZkB0Mdv2YKiIChBjuzrmA93jEzN4CJgCHzWyoc+6g1+1yxNs9AAxvUT3bKzv7OZcDywEKCgrCv3Nu7myFuYhIOzrtljGz882sf/MycCPwObAOmOvtNhdY6y2vA+72Rs1MBOpbdN+IiEg3COXMPRN4y8ya93/dOfeemW0BSs3sPmA/0Hwa/S5wM7AXOAbc2+WtFhGRDnUa7s65L4Gr2iivBaa2Ue6A+V3SOhERiUjiTj8gIiLtUriLiPiQwl1ExIcU7iIiPqRwFxHxIYW7iIgPKdxFRHxI4S4i4kMKdxERH1K4i4j4kMJdRMSHFO4iIj6kcBcR8SGFu4iIDyncRUR8SOEuIuJDCncRER9SuIuI+JDCXUTEhxTuIiI+pHAXEfEhhbuIiA8p3EVEfEjhLiLiQwp3EREfUriLiPiQwl1ExIcU7iIiPqRwFxHxIYW7iIgP9Y53A7rbmvIAxet3c6CugWEZ6RROG8XM/Kx4N0tEpEslVbivKQ+wYPUOGhqbAAjUNbBg9Q4ABbyI+EpSdcsUr999OtibNTQ2Ubx+d5xaJCISG0kV7gfqGsIqFxFJVEkV7sMy0sMqFxFJVCGHu5mlmFm5mf27tz7SzD4zs71mtsrM+njl53nre73tI2LT9PAVThtFempKq7L01BQKp42KU4tERGIjnDP3nwE7W6w/CSxzzn0H+Aq4zyu/D/jKK1/m7dcjzMzPYskPcsjKSMeArIx0lvwgRz+miojvmHOu853MsoEVwD8BjwC3AjXAEOfcSTP7LrDIOTfNzNZ7y5+YWW/gEDDYdfBCBQUFrqysrAvejohI8jCzrc65gra2hXrm/kvgUeCUtz4IqHPOnfTWq4Hm098soArA217v7S8iIt2k03A3s/8NHHHObe3KFzazeWZWZmZlNTU1XfnUIiJJL5Qz90nADDPbB6wEbgCeATK8bheAbCDgLQeA4QDe9gFA7dlP6pxb7pwrcM4VDB48OKo3ISIirXUa7s65Bc65bOfcCOAO4APn3J3ARuCvvd3mAmu95XXeOt72DzrqbxcRka4XzTj3vwceMbO9BPvUX/TKXwQGeeWPAEXRNVFERMIV1twyzrkPgQ+95S+BCW3scxyY1QVtExGRCCXVFaoiIslC4S4i4kMKdxERH1K4i4j4kMJdRMSHFO4iIj6kcBcR8SGFu4iIDyncRUR8SOEuIuJDCncRER9SuIuI+JDCXUTEhxTuIiI+pHAXEfEhhbuIiA8p3EVEfEjhLiLiQwp3EREfUriLiPiQwl1ExIcU7iIiPqRwFxHxoeQL98pSWDYWFmUEHytL490iEZEu1zveDehWlaXw9kPQ2BBcr68KrgPkzo5fu0REulhynblvWHwm2Js1NgTLRUR8JLnCvb46vHIRkQSVXOE+IDu8chGRBJVc4T51IaSmty5LTQ+Wi4j4SHKFe+5suLUEBgwHLPh4a4l+TBUR30mu0TIQDHKFuYj4XHKduYuIJAmFu4iIDyncRUR8SOEuIuJDnYa7maWZ2R/NbLuZfWFmj3vlI83sMzPba2arzKyPV36et77X2z4itm9BRETOFsqZ+/8ANzjnrgLygJvMbCLwJLDMOfcd4CvgPm//+4CvvPJl3n4iItKNOg13F/Stt5rq/XPADcAbXvkKYKa3fJu3jrd9qplZl7VYREQ6FVKfu5mlmFkFcAR4H/gLUOecO+ntUg1kectZQBWAt70eGNTGc84zszIzK6upqYnuXYiISCshhbtzrsk5lwdkAxOAK6J9YefccudcgXOuYPDgwdE+nYiItBDWaBnnXB2wEfgukGFmzVe4ZgMBbzkADAfwtg8AaruktSIiEpJQRssMNrMMbzkd+D6wk2DI/7W321xgrbe8zlvH2/6Bc851ZaPjYU15gElLP2Bk0TtMWvoBa8oDnVcSEYmTUOaWGQqsMLMUggeDUufcv5vZn4CVZvYEUA686O3/IvBbM9sL/DdwRwza3a3WlAdYsHoHDY1NAATqGliwegcAM/OzOqoqIhIXnYa7c64SyG+j/EuC/e9nlx8HZnVJ63qI4vW7Twd7s4bGJorX71a4i0iPpCtUQ3CgriGschGReFO4h2BYRnpY5SIi8aZwD0HhtFGkp6a0KktPTaFw2qg4tUhEpGPJd7OOCDT3qxev382BugaGZaRTOG2U+ttFpMdSuIdoZn6WwlxEEoa6ZUREfEjhLiLiQwp3EREfUriLiPiQwl1ExIcU7iIiPqRwFxHxIYW7iIgPKdxDVVkKy8bCoozgY2VpvFskItIuXaEaispSePshaPRmgayvCq4D5M6OX7tERNqhM/dQbFh8JtibNTYEy0VEeiCFeyjqq8MrFxGJM4V7KAZkh1cuIhJnCvdQTF0IqWfdmCM1PVguItIDKdxDkTsbbi2BAcMBCz7eWqIfU0Wkx9JomVDlzo4ozNeUB3STDxHpdgr3GFpTHmDB6h00NDYBEKhrYMHqHQAKeBGJKXXLxFDx+t2ng71ZQ2MTxet3x6lFIpIsFO4xdKCuIaxyEZGuonCPoWEZ6WGVi4h0FYV7DBVOG0V6akqrsvTUFAqnjYpTi0QkWegH1Rhq/tFUo2VEpLsp3GNsZsrHzDxvMaRVw3nZkLIQ0Ph4EYkthXssaTZJEYkT9bnHkmaTFJE4UbjHkmaTFJE4UbdMLA3IDnbFtFXeCU1bICLR0Jl7LEU4m2TztAWBugYcZ6YtWFMeiF1bRcRXFO6xFOFskpq2QESipW6ZWItgNskDdQ3M6PURj/YuZZgd5YC7iKdOzubtuskxaqSI+E2nZ+5mNtzMNprZn8zsCzP7mVd+oZm9b2Z7vMeBXrmZWYmZ7TWzSjMbF+s34Tdz+/2RpakvkN3rKL0MsnsdZWnqC8zt98d4N01EEkQo3TIngb9zzo0GJgLzzWw0UARscM5dDmzw1gGmA5d7/+YBz3V5q33u0dRV9LUTrcr62gkeTV0VpxaJSKLptFvGOXcQOOgtf2NmO4Es4DbgOm+3FcCHwN975a845xzwqZllmNlQ73kkBH0bDoVV3kwjbESkWVh97mY2AsgHPgMyWwT2ISDTW84CWo7/q/bKWoW7mc0jeGbPJZdcEmazfS6CIZRrygN89NavWMVKhp13lAPHLuKXb90B/FQBL5KEQh4tY2b9gDeBh51zX7fc5p2lu3Be2Dm33DlX4JwrGDx4cDhV/S+CIZQV7yxnsS1v1U+/2JZT8c7yTl9uTXmASUs/YGTRO0xa+oGGXIr4QEjhbmapBIP9Nefcaq/4sJkN9bYPBY545QFgeIvq2V6ZhCqCIZT3n3i1zX76+0+82uFLaUy9iD912i1jZga8COx0zv1zi03rgLnAUu9xbYvyvzWzlcD/AurV3x6BMIdQDutVG1Z5s+L1u/l+0x94tE/rYZfF6/t02p2jPn6RniuUPvdJwN8AO8yswit7jGCol5rZfcB+zsxj+y5wM7AXOAbc26UtljYdTx9C34Zzj6HH04fQt4N6BV+/z5LUF06f9WdbcNjlgq8Bbmi3nm7+LdKzhTJa5iPA2tk8tY39HTA/ynZJmPpOX8zJtQ/Su+n46bKTKWn0nd7xDJQL+vyOvpzbnbOgz++AJe3W6+gqWp3xi8SfrlD1i9zZwf/MDYuDs04OyKb31IWddu1kcjSs8maRXkWrM36R7qFw95MIpjqwdoZdWiczV87t90cebTy3O+fC1D7ALe3WUx+/SPfQxGHJLsKZKyO9irbg6/fbnFqh4Ov3O6ynUT0i4dGZe7JrPtNv0Z1DCN05kV5FG00fv874RUKncJeIunMivRFJpH380Yzq0ZW7kozULSORibA7p72+/M76+Bf0+V2b3UDBM/72RXrl7pZ1v+bQou9w6h8HcGjRd9iy7tcd7n9aZSksGwuLMoKPlaWh1RPpYgp3iUyENyKJ9KAQ6Rl/JFfubln3a8Zu/QeGUEMvgyHUMHbrP3Qe8JWlnFz7oPcXjYP6quB6KAEf6UFBBxNph7plJHKRdOdE2Mcf6aieSK7cHb6tmPSzDgjpdoLh24phxo/brXfsPxbSt8V1BgC9m44Hyzt6f95B4fQ1Ct5BoTd0/LlEWs+rG+7/QVT1pNvpzF26X+5s+D+fw6K64GMo4RDhGf/x9CFhlQNc7GraKe/4r4S0dn5Mbq+82bH/WNjq4jM4c1CIRb2I/8KIol63/lWiv2YAhbskigi7gfpOX8zJlLRWZZ1duXvE2p6l9Ihd1OFrHTg1KKzyZpEeFBLiYBLNAeHth1rV4+2HYlfPhxTukjgiOePPnU3v2/6l1UGh923/0mHdqnGFNLg+rcoaXB+qxhV2+FIv9LmLY2fVO+b68EKfuzqsF+lBIREOJhH/dbFhMTQ2tC5rbAiWx6Ie/pv6WuEu/hfmQWH8jB/z+dVPcIjBnHLGIQbz+dVPML6D/naAvFvmsdDNo/rURZxyRvWpi1jo5pF3y7wO60V6UEiEg0mkBxJXXx1WebT1orlILtKDQqwPJgp3kTaMn/FjhizaS6/H6xiyaG+nwQ7BuXEm3/5T5vT9DZf9z2vM6fsbJt/e+Xj6SA8KiXAwifRAcpi2u8DaK4+2XkcT4XUk0oNCd1xxrXAX6UIz87P4uOgG/mvpLXxcdENIF0pFelBIhINJpAeSJSdmtVlvyYlZMal3oK4hrPJmkR4UIq0XDg2FFOkBZuZnRXTFbCT1gvv/lDnrp4Y1JUMk9fJumcfCt07ysFvJMKvlgBvEL7mDyZ0cSMou+D5FX+PNOhqs99TJ2Wy94PsxqTcsI51AG0E+LCO9jb3PiPSgEGm9cCjcRZJQdx1MIj2QFE4bxYLVJ1h34swU0umpKSyZNiqG9Xa0OptOT02hsJN6kR4UIq0XDoW7iMRU5H9dEPaEb91dL9KDQqT1wmHBGyfFV0FBgSsrK4t3M0REwhbprKNdMVupmW11zhW0uU3hLiKSmDoKd42WERHxIYW7iIgPKdxFRHxI4S4i4kMKdxERH+oRo2XMrAbYH2H1i6CT2/EkH30mbdPnci59JudKpM/kr5xzbc5R3SPCPRpmVtbeUKBkpc+kbfpczqXP5Fx++UzULSMi4kMKdxERH/JDuC+PdwN6IH0mbdPnci59JufyxWeS8H3uIiJyLj+cuYuIyFkU7iIiPpTQ4W5mN5nZbjPba2ZF8W5PT2Bm+8xsh5lVmFlSTrVpZi+Z2REz+7xF2YVm9r6Z7fEeB8azjfHQzueyyMwC3velwsxujmcbu5OZDTezjWb2JzP7wsx+5pX74ruSsOFuZinAs8B0YDTwIzMbHd9W9RjXO+fy/DBWN0L/Ctx0VlkRsME5dzmwwVtPNv/KuZ8LwDLv+5LnnHu3m9sUTyeBv3POjQYmAvO9DPHFdyVhwx2YAOx1zn3pnDsBrARui3ObpAdwzm0C/vus4tuAFd7yCmBmtzaqB2jnc0lazrmDzrlt3vI3wE4gC598VxI53LOAqhbr1V5ZsnPAf5rZVjPr+C7EySXTOXfQWz4EZMazMT3M35pZpddtk5BdENEysxFAPvAZPvmuJHK4S9smO+fGEeyumm9mU+LdoJ7GBcf/agxw0HPAZUAecBD4v/FtTvczs37Am8DDzrmvW25L5O9KIod7ABjeYj3bK0tqzrmA93gEeItg95XAYTMbCuA9Holze3oE59xh51yTc+4U8BuS7PtiZqkEg/0159xqr9gX35VEDvctwOVmNtLM+gB3AOvi3Ka4MrPzzax/8zJwI/B5x7WSxjpgrrc8F1gbx7b0GM0h5rmdJPq+mJkBLwI7nXP/3GKTL74rCX2Fqjds65dACvCSc+6f4tykuDKzSwmerQP0Bl5Pxs/EzP4NuI7g1K2HgX8E1gClwCUEp5ee7ZxLqh8X2/lcriPYJeOAfcCPW/Q3+5qZTQY2AzuAU17xYwT73RP+u5LQ4S4iIm1L5G4ZERFph8JdRMSHFO4iIj6kcBcR8SGFu4iIDyncRUR8SOEuIuJD/x9jo613gII57AAAAABJRU5ErkJggg==\n","text/plain":["
"]},"metadata":{"tags":[],"needs_background":"light"}}]},{"cell_type":"markdown","metadata":{"id":"9k-x3QVMOVNr","colab_type":"text"},"source":["The normalizing flow is learning a mapping between the multivariate Gaussian and the target distribution! We can see this by visualizing the loss on the validation set. We can now use `nfm.flow.sample()` to generate new QM9-like molecules and `nfm.flow.log_prob()` to evaluate the likelihood that a molecule was drawn from the underlying distribution."]},{"cell_type":"code","metadata":{"id":"mW8DeYFmOrJh","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946286,"user_tz":240,"elapsed":1130180,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["generated_samples = nfm.flow.sample(50) # generative modeling\n","log_probs = nfm.flow.log_prob(generated_samples) # probability density estimation"],"execution_count":23,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"s0M2xaqcdYEc","colab_type":"text"},"source":["Now we transform the generated samples back into SELFIES. We have to quantize the outputs and add padding characters to any one-hot encoding vector that has all zeros."]},{"cell_type":"code","metadata":{"id":"DVVQ-dwWdXWb","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946294,"user_tz":240,"elapsed":1130183,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["mols = tf.math.floor(generated_samples) # quantize data\n","mols = tf.clip_by_value(mols, 0, 1) # Set negative values to 0 and values > 1 to 1\n","mols_list = mols.numpy().tolist()\n","\n","# Add padding characters if needed\n","for mol in mols_list:\n"," for i in range(largest_selfie_len):\n"," row = mol[len(selfies_alphabet) * i: len(selfies_alphabet) * (i + 1)]\n"," if all(elem == 0 for elem in row):\n"," mol[len(selfies_alphabet) * (i+1) - 1] = 1"],"execution_count":24,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"tpwHYMP0LAvS","colab_type":"text"},"source":["`selfies` has another utility function to translate one-hot encoded representations back to SELFIES strings."]},{"cell_type":"code","metadata":{"id":"2XV-ZTgFjP04","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946296,"user_tz":240,"elapsed":1130158,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["mols = sf.multiple_hot_to_selfies(mols_list, largest_selfie_len, selfies_alphabet)"],"execution_count":25,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"hoC6RD8fdvVA","colab_type":"text"},"source":["We can use RDKit to find valid generated molecules. Some have unphysical valencies and should be discarded. If you've ever tried to generate valid SMILES strings, you'll notice right away that this model is doing much better than we would expect! Using SELFIES, 90\\% of the generated molecules are valid, even though our normalizing flow architecture doesn't know any rules that govern chemical validity."]},{"cell_type":"code","metadata":{"id":"F7EVnH9SdyN7","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":34},"executionInfo":{"status":"ok","timestamp":1600973946297,"user_tz":240,"elapsed":1130134,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"329baf39-cb2a-41d6-c468-9240a0349129"},"source":["from rdkit import RDLogger \n","from rdkit import Chem\n","RDLogger.DisableLog('rdApp.*') # suppress error messages\n","\n","valid_count = 0\n","valid_selfies, invalid_selfies = [], []\n","for idx, selfies in enumerate(mols):\n"," try:\n"," if Chem.MolFromSmiles(sf.decoder(mols[idx]), sanitize=True) is not None:\n"," valid_count += 1\n"," valid_selfies.append(selfies)\n"," else:\n"," invalid_selfies.append(selfies)\n"," except Exception:\n"," pass\n","print('%.2f' % (valid_count / len(mols)), '% of generated samples are valid molecules.')"],"execution_count":26,"outputs":[{"output_type":"stream","text":["0.90 % of generated samples are valid molecules.\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"pyt6ta2-d5Rd","colab_type":"text"},"source":["Let's take a look at some of the generated molecules! We'll borrow some helper functions from the [Modeling Solubility](https://github.com/deepchem/deepchem/blob/master/examples/tutorials/03_Modeling_Solubility.ipynb) tutorial to display molecules with RDKit."]},{"cell_type":"code","metadata":{"id":"XyE4CuaRe7BL","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946300,"user_tz":240,"elapsed":1130119,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["gen_mols = [Chem.MolFromSmiles(sf.decoder(vs)) for vs in valid_selfies]"],"execution_count":29,"outputs":[]},{"cell_type":"code","metadata":{"id":"JehQTBLXd9Gn","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946301,"user_tz":240,"elapsed":1130113,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["def display_images(filenames):\n"," \"\"\"Helper to pretty-print images.\"\"\"\n"," for file in filenames:\n"," display(Image(file))\n","\n","def mols_to_pngs(mols, basename=\"generated_mol\"):\n"," \"\"\"Helper to write RDKit mols to png files.\"\"\"\n"," filenames = []\n"," for i, mol in enumerate(mols):\n"," filename = \"%s%d.png\" % (basename, i)\n"," Draw.MolToFile(mol, filename)\n"," filenames.append(filename)\n"," return filenames"],"execution_count":30,"outputs":[]},{"cell_type":"code","metadata":{"id":"oyWxxxqvnKGf","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":1000},"executionInfo":{"status":"ok","timestamp":1600973946557,"user_tz":240,"elapsed":1130349,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"8e039e2f-ebe2-4143-cde8-f1303fe3ba71"},"source":["display_mols = []\n","for i in range(10):\n"," display_mols.append(gen_mols[i])\n","\n","display_images(mols_to_pngs(display_mols))"],"execution_count":31,"outputs":[{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAARuUlEQVR4nO3da2xUZRrA8WdaqOVeBJRiEeRewAvggpSuwVgSDN3F22iM22gWGWJI6gcxY2LWcc0mOySrdDEYByNaSBTbVZduo5uUiIrCBguCAuVS7sjFyq1cWminz3444whYoDM9M09b/r/0Q1s6Z17a+U/nvOc9px5VFQB2UqwHAFzviBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoRwTW6u5Of/+mF5uXg8snmz3YDaCSIEjBEhYIwIAWOdrAeADiUcljNnIu/X15sOpf0gQrjpv/+VHj2sB9HeECHclJMj8+dH3l+zRvx+09G0E0QIN/XuLbm5kfdPnjQdSvvBxAxgjAgBY0QIGPOoqvUYgOsavwkBY0QIGCNCwBgRIuHefdd6BG0bEzNIuFGjZNs260G0YfwmBIwRIVorN1dSUqSqKvJhdXXkhPpVq+See+See2Tfvsg7ZWWmA22rWDsKF3g88s9/yltvXfLJ++6T//1PRGTUqMg7aBa/CeGCe++VZcvk+HHrcbRPRAgXTJokgwfL4sXN/yuzMldHhHBBY6MUFsqiRdLYaD2UdogI4Y6CAjl7Vv71r6t9TXGxPP+8VFcna0ztBBHCHV27yjPPSFHR1b7mH/+Q11+XkSNl2jQpLZVwOFmDa9uIEHH67SqPuXOlslK+/faKN1m6VHw+6dJFVq6Uxx6TESNk/nz5+eeEDrMdIELEo7ZWHn9c3njjkk8OGiQzZ8o771zxVuPGSSgkP/4ooZBkZ8vu3fLii5KVJY89JmvXJnrIbRcRImYbNsj48VJaKn/7268XOHQ895x88cU1bt6rl/h8snmzVFSI1yuNjVJaKjk5cvfdsnix1NUlbNxtFREiNkuXyu9/L7t2ybhxsmaNdO9+yb/ee6/cfnuLtpOSInl5UlIi27eL3y99+sj69TJnjgweLC++KPv2JWLsbZUm0aJFi3Jzc1966aXGxsZk3i9cce6c/vnPKqIiWlCg5865ufH6ei0u1rvuimw/JUXz8rSsTJua3LyXtil5Ea696FX/kiVLkna/hpqaml577bV333338ccf/+qrr0zGcOHChQ8//PCjjz5q5RNfVZWOHasi2r27vv++W6Nrxtdf6xNPaFpapMbRo/W996pra2sTeJfWkhfhihUrohEGg8Gk3a+VEydOzJw5U0R6/HJJ6jvuuCMUCp05cyaZw3jyySede3/22Wfj3siyZdqtm4roqFH6ww8uju6KjhzRYFBvvVU9Hr3ttuk9evTw+Xzff/99Mu476ZIXYX19fV5enoiMGTPmyJEjSbtfE+vXrx86dKiI9OzZ8+233w4Gg1lZWU4MPXv29Pl8W7ZsSc5Ievbs6dxvVlZWHDevq9PCwl9fgib3CUQbGvSTT05MnTrV+S94PJ7777//448/bv3uzNy5c+fPn//zzz+7Ms5WSuo+oaoeO3asqaO/zC8uLu7SpYuIjB8/fteuXc4nz58/X1JS4jwNOY+nvLy8kpKShoaGhA7G6/U69zhr1qxYb7t3r06cqCKanq5FRYkYXUtt27atsLCw+y+zQJmZmX6//+DBg/Ftbe/evampqSKSnp7+9NNPf/vtt+6ONlbJjrBjO3369BNPPOE8UAoKCs41N3exdevWwsLCbt26OV92yy23BAKBmpqaBA2prq5uyZIlS5cuvXDhQkw3/Pe/NSNDRXT4cN24MUGji01tbW0oFBo7dqzzrUtLS/N6vRUVFbFuJxwOV1RUeL1eJ0URmTBhQigUavbnlQRE6JqqqqoxY8Y4O4EffPDB1b/45MmToVAoOzvbeRDccMMNXq/3m2++Sc5Qr66hQf1+9XhURB98UE+ciO3mmzZtWrBgwebNmxMzOlXV1atXe73eTp0iZ8OOHz8+FAqdPXs21u3s2rXL7/f37dvX2U5GRkZhYeGePXsSMOSrsYmw402OLl261Pnllp2d3fLHX1t7SlbVAwc0J0dFtFMnDQZjPkKwcePGtLQ055VeVVVVYsYY8eOPPwYCgX79+jnful69ehUWFu7evTvW7dTX15eUlOTk5DjbSUlJycvLKysrS9p+k02EI0ea3G1C1NXVFRYWRl+Cxjf5WV1dPW/evBtvvNHZzk033RQMvrdvn+uDvYaVK/Xmm1VEBw7UNWvi2cKCBQuic+ChUMjtATbjsp1tJ6GSkpI4Jm8qKyt9Pp+zPy8iw4cPDwaDx44dS8SwL0aErbJjx44777zTeeIvavXchfOUPHnyZBGZOHFFMg9YNzZqIKApKSqiM2Zo3A+8zZs3p6eni0i3bt127Njh6hivYd26dU899ZRz7ykpKbm5ua+//vqJWF9Mqx49ejQYDA4aNMhJsXv37j6fb9OmTYkYsyO2CKdM0Rkzfv3wP/9RkWYOHE2Zoh6Pbt0a+XDnzsiXff65TpqkkyZpenrknRUrWjN4Y5988klGRoaIjBgxwt0f0po1a//0p8boAetRo/SNN/TUKRfv4RJHj+q0aSqiqakaCGg43Kqtbdu2bfHixTt37nRpdLGpqakJBoMzZsxwEurWrdvs2bM3xj6z5Owp5Ofnezye6J5CcXFxrPNbLZGoCFNSdM6cyIfRCKPa+2/ChoYG/y9/hPahhx6K4+m2JY4e1WBQBw2KpNijh/p86vrx6i++0MxMFdGbbtLYJxrbKBcT2r59u9/v7927t7Od/v37+/3+AwcOuDjaREU4dap27Rp5VdPBIty/f7/zirFz585JWPoTDmtZmeblRaYrRXTCBC0u1tY/Izc1aVGRduqkIjp1qh465MZw25idO3f6/f7ozvbNN9/s9/v3xb637Rwduf2XxelxHx1pVqIi9Pt19Gj9+99Vm4uw/SovL3d+ogMHDly7dm0y73r7dvX7tXfvSIqZmer3a2uekV9+ObJU+i9/0Y69or6urq64uNjZexeR1NTU/Pz8ioqKOOY/naMjnTt3djaVnZ1dVFTUyqWIMUc4fbqePh15Ky29YoTPP69vvaVZWdrQ0KIIjx/XhB2vdkdjY2MgEEhJSRGR/Pz8JEyaNau2VkOhyFpqEU1LU683zpeRP/2kd96pn37q9hDbsMrKyoKCgmhCI0eOLCoqOn36dKzbOXTo0MVLEXv16uXz+bZGZ0FiFHOEzs/+4rcrRXj2rPburR980KIIX35Zb7hBvV79+usY/wdJcfToUWcSvFOnToFAINzKuQs3rF6tXm/kxaSIjhunoVCL1nZefJC2o68gbN7hw4eDweDAgQOdhJzVvHGsLnDr6EjMEebk6OrVkbf5868Woaq+8IJOmtSiCJ95JjI/LqITJ2pxsdbVxTS0BFq1alVmZqZz+G7lypXWw7nEoUMaDOott0S+dZ99du2btOsdchc1NjaWlZXl5eVFJ2+mTJkS32reDRs2+Hy+6FLEIUOGBIPBli9FTNQ+oRPh3r2amqrvv9+ifcKDBzUQ0L59I4+njAwtLNSkLyG6RFNTUzAYdJaz3HfffYcPH7YczZXV1+uyZer1tujoAhFepqqq6uLVvAMGDAgEAj/99FOs22l2KeKaFix6SGyEqvrww3r//TFMzNTXa0lJZOWU7RnWNTU106dPFxGPx+P3+9vL1QCuk4O0rjt16lQoFBo9evTFCX0d+95ROBwuLy9/4IEHnOkDEZk8efLVV/AlPMIvv9TU1HhmRysr1efTLl0iNQ4frsFg/Cs5YrVu3brBgweLSN++fT9ryYu8NqPDH6RNqKamJmc1b3R1uLOaN47V4dXV1X6/v0+fPl27dj1+/PhVvtK1CCsq9He/07vv1ttv1xEjfo1QNXLhkPgOUTgHrAcPjqSYnq4FBZrIJUTa1NRUVFTkrEKeOHHi3r17E3hnCdCxD9LGYfny5XEsiD948GAgEGj9CRZnz5695pVN3Fk7Gg5rVpauW6eqWl2taWkuXwUoHNaKCs3Pd/+A9WVOnTr16KOPOi9BCwsLE7FGKdE68EHaOKxatao1hxCSc4KF+wu4v/9eMzNbu/7wSrZu1blztWfPSIp//OMXr776qlvzJRdfk6K0tNSVbSZfHAdpO7Avv/xy0qRJ0YRmzJjx6aefxnGE6bITLIYNG+biCRYuR7hxo44dq6tWubvVy9XW6qJFetddTX37Zru1hqjZa1K0R3EcpO3w1q9f7/P5unbt6iQ0dOjQYDAYxwVmnBMsnMkC59SZgoKC1q/ddzPCt9/WnBzdts3FTV7DZQsgRo0aFccaopZck6IdieMg7XXi5MmTRUVFt912W3T+s6Cg4Lvvvot1O66fYOFahH/9qz78sM0R9svWEMV0ObOqqirnmiXdu3d/P6HX00yW+A7SXj+udDWDutgfuzt27PjtCRb79++PdTvuRLh7t3o8OmaMTpgQeUv+pYHiWEMU3zUp2rjWHKS9rjgnWPTp08d5wBieYNEBL/TUkjVErlyTom1y5SDt9eOyEyxSUlLiPsHi888/f+SRR6IHGOfNm9fCG3bACB1XWUPk7jUp2pqLI9TWHaS9rlw2vzBixIhgMBjH6drRnaOW/+GDDhuh47driEaPHu1cQ9b1a1KgAzhy5EgwGLz11ludR4tz+f0fYn8OO3/+fMu/uINHGBVdQ5Sent6/f/8HH3wwQdekQAfg4gkWLeHR3/7V447r3LlzlZWVQ4YMiU6lAlexffv2N998c8mSJWfOnBGRAQMGzJ49e+7cudGLnbri+orwupKbKxkZUl4e+bC8XP7wB/nhB/nlKvJoqdra2uXLly9cuHDLli0ikpaWNnPmTJ/PF52KbyX+Ui9wDdFT750LzDQ1NZWWlk6bNm3ChAmLFy8+d+5cK7dPhEBL5ebmlpSU7Nu3z7n8/oYNG+bMmTNgwIDnnntuz549cW+WCIHYDBgw4JVXXjlw4EBJScmUKVNOnTq1cOHCYcOGTZs2rbS0NBwOx7pBIuzIwmE5cybyVl9vPZqOJXrqvXOCRXp6+sqVK1944YU4NsXETIeVmyvffHP5J5mYSZCampp33nmnX79+s2bNivW2RNhh5eaKqsyfH/lwzRrx+4mwLepkPQAkUO/ekpsbef/kSdOh4MrYJwSMESFgjAgBY0zMAMb4TQgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFj/wc5xAJRl7JRQwAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAWvElEQVR4nO3de1BU5/3H8e/ZAwssgiKIIBiVZkpCvGApJnhDdCWltdrakNZbMu1MMJOYxomxphpltE2CN6KBmGDaIErjLxRtYi6msgmiVk0DMUZMRUsaE+SiKygXIcju8/vjMMcVF9hddvku8HlN/+FkeXiY+ubcz5GEEAQAfDTcEwAY6BAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4TgEnV1dUII7ln0DYgQXOI3v/lNSEjIww8/vHPnzoqKCu7puDUJf67A6YQQkZGRFy5cUL6UJGncuHGJiYmzZ8+eNm2aj48P7/TcDSIEV/n6668NBoPBYPjnP/9ZX1+vLPTw8Lj//vt//vOf6/X6iRMnajTYFkOE7uraNQoIoIYGGjSIiCgoiAwGio7mnpYNjh07NnXqVMslbW1tp0+fVoI8fPhwW1ubsnzYsGEzZszQ6/VJSUkjR47kmKx7EOCW6uoEkWhoaP8yMFCcOsU6Idts375dkqSVK1d29gGj0ZiXl5eSkjJq1CjLf4cREREpKSl5eXnXr1/vzQm7A0TopvpihC+//LIkSZIkZWZm2vL58vLyrKys5OTkwYMHqzV6eHjExMSkpqYWFxebTCZXz9kdIEI31eciTE9PJyJJkl544YXa2lq7vretra24uDgtLU2v13t6eqpBBgUFJScnZ2VlffPNNy6atjvAPqGbUvYJg4NJkoiILl+mzz93333C9PT0FStWKOvAzz77bM+ePdHR0Xq9Xq/XT58+XavV2j5UY2NjYWHh+++/X1BQ8L///U9dHhERoQyYmJhouebsD7j/CoB1ypqwvFxUVYmqKjF0qPuuCbds2UJEkiTt2LFDCLFgwQLL6vz8/ObOnZuRkVFWVmbvyOfPn8/MzJw3b56/v786oFarLSwsdP6vwQcRuqm+sjm6efNmpcDXXntNXdjU1FRQULBq1aqYmBhJWZUTEVFISEhycnJOTo7RaLTrp1hur3p7e1+9etXZvwcnROim+kSEmzZtIiKNRpOdnd3ZZ6qrq5XDoWFhYWqNGo0mJiZm1apVBQUFLS0tdv3QxsbG77777sEHH/T29r777rvz8/OV5U8++eSyZcvefffd+vr6nvxSvQ8Ruin3j3Djxo1EJMvyrl27bPyW8vLybdu2KWszNUidTqfX69PS0oqLi81msy3jTJ8+fdGiRZcvX/7kk0+GDBly9uzZ1tZWdZNVlmW18NbW1h78ir0EEYIj0tLSlH/uOTk5Dnz7jRs3rG6vDh8+XDkceunSpc6+9+LFi5IkVVVVKV/u37//4sWLJpPp+PHj69evnzJlioeHhzpgQEDAr371q9dff/3rr7928Fd1PUQIdlML3L17d89Hq6mpUbZXw8PDLQ8ZRkVFKWuz5uZmy88XFhaOGDGiiwEbGxvVwi0HVK8HsPcMiqshQrBPamqqUuCePXucPrh6+t7Pz0+Nx8fHx3J7df/+/ZGRkTYOWFlZmZOTk5ycPHToUHVAy+3V77//3um/hb0QYZ/hgn/zdnNpgZZu3Lhx6NChlStXTpgwwXJ7NSsrq6ioKDQ01N4BTSaTenzVy8tLHdDX11ct3BW/iC0QYZ+xaBHzBNatW6cUmJub25s/9/Lly8r26siRI8vKympqajw9PSsqKpT/unv37vfee8+uATs7gxIaGrpkyZK8vLxePgWCK2b6gPx8ys+nkyfpgQcoLIy2bmWYw7p16/70pz8p+4ELFy5kmIGF5ORkjUazY8eOixcvzpkzJz4+3mg0KtfT/OhHP7LsqlvV1dVHjx41GAzvv/9+ZWWlslCWZYev+HFEbxYP3VLOTGzb1v7lhAm3zkwwrgmff/555Z/mW2+9xTYJC0ajcf78+TqdLiIiIj8/f8yYMeq/55CQkMWLF+/evVs9fGojZXv1xRdfTEhIsNxeVa74uXDhgot+F4HNUXdTVyd8fUVUVPuX7hDhmjVriMjT03Pfvn08M+jOlStXur49yt7T9x22VzUazeXLl100eYEI3U1dnRgxQkybJo4cEeL2CFVNTaLXTnqtXr1aKXD//v299CN7prPbo6ZMmaIcfbH39qiKiop//OMfLpqtAhG6l7o6ERQkcnPb13tWI9y/XxCJiAiRkiLy8oTrTnr98Y9/JCKtVuvqf4WucPPmTfVwqOXpe/X2qIsXL3LPsR0idC91dSIwULS0iLAwYTRaj3DnTjF0qCBq/5+Hh5g8WaSmin/9S9y86bSZPPfcc0qB77zzjtMGZaLezj969Gir26td387/n//8p0G9gNAFEKF7USIUQjzzjNiyRURHW79k1GQSxcUiLU3o9cLL61aQvr5CrxdpaaInJ73MZvPTTz+tFPjuu+86PpBbUrdXhwwZYrm9qp6+v3n7X7L58+cTkUv3hxGhe1EjPHdOREaK2Njur9tuaBAHDoinnhL33HOrRiIxZox4/HFzfv4+u67SMpvNv//97/trgZY6u50/MDDQ8nb+DRs2ENETTzzhupkgQjeyd6+oqGiPUAgxY4bw97fv5omqKpGXJ1JSxIgRgkhERTWSPVdpmc3mp556SinwwIEDPfhV+pja2tr8/PylS5danu0gonvuuSczM5OIbL9QzgGI0F1s2SKIxNSptyLcu1cQOXgHk7K9mpl5xupZr4yMjHPnznX4FrPZvGzZMiLy8vIaUAV2oG6vBgQEENHp06eVA63ffvuti34iInQLSoGSJHbscP7gXd/nnpWVVVlZaTabn3zySaVAe68C669u3rx5/Phxs9k8d+5cIurixuUeQoT8Nm9uL9DiARGuUlFRkZ2dvXDhwuDgYLVGjUaj3Pbu4+Nz6NAhl0+ir9m+fTsRLV682EXjI0JmmzYJIqHRCJf9ne2Ust01Z84cb2/v4ODg2NjYgoKC3p5EX1BaWkpEw4cPt/HGf3vhAm5OmzfTH/5Askx//Ss9+ijbNJqbm//73/+OHTvWrkufB5Tw8PBLly6Vlpbed999Th8cr+Ngs2lTe4FvvtlpgUII5Z4dl87Ex8dn3LhxKLALCQkJRPTxxx+7YvABE2FZGcXFkbc3jR9Pn37KPRvauJFWrSJZpuxseuQR658xmUyzZs0aOXJkfHy8+hIVYDFr1ixyWYQDY5/QbBZjx4rly8XVq2LrVhESIm7cYJxOWpogErLczc3yX375pfp/U0lJSW/NDqxQ3nPq5+fnise3DYwIS0qEl9et5wfedZfgux4yNdWmAoUQzc3Nd999NxGNHj26qampV2YHnYqMjCSi48ePO33kgbE5eu4c/eAH7W/6I6Lx4+mrr+jKFer1bbzUVFq/nmSZcnJo8eJuPuzt7V1SUmIwGL744gudTtcrE4RO6fV6IjIYDE4feWBE2NxMFk/vIn9/am6mpUtpyBCaPZs2bqSSkl6Yxbp1tGEDyTLt3k2LFtn0Lf7+/rNmzepv7z/pm1y3W+jR/Uf6gWHD6Pr1W1/W11NwMFVWUlMTGQyk/G0bM4YSE2n2bJo5kwICnD6FtWvpz38mWaY9e2jBAqcPDy6XkJAgy/KJEyeampp8fX2dObTTN3DdTkmJKC0VWq1Qn3Fw113iww+FsLjeOTT01t0HsixiYsSqVaKgQDjpoZRr1ggi4ekp3PUBEWCT2NhYIjp48KBzh+3vEe7cKTQasWGDmDBBLF8url8Xr7wiwsNFh5eQmEzis8/Eiy+KhASh1d4K0s9PzJ174I3qO652tsPq1e0F9pEHRECnlEcNPPvss84dtl9HqBQoSWLbNnH+vIiLE15eYsKEbu54bWoSBQVi1SoREyMkyTzIT+fZSiRCQkRyssjKEpWVdkxhxQpBJLRaxsOx4DTKUZmJEyc6d1jHIzx58uTJkyedOBUny8oSkiQkSWzf7vggFRWX9xoWLhTBwbfWjhqNiI0Vq1eLwsLuN1c3bRJareiDj2gBK5qbm318fCRJcu7D1xyMMCMjQ9mlfPXVV504G6d5/fX2Al95xVlDlpeLrCwxZ47w9r4VpE5363ESZrP1p4aeP++sKQA/5UTF22+/7cQxHYzwF7/4hRLh/PnznTgb53jttfYCMzJcMfyNG+Kjj8SKFWL8eCFJt4KcPLmrp4ZC//DSSy8RUUpKihPHdPA84WOPPabT6XQ63WOPPdbjA7ROtW0bPfEEEVFGBi1b5oqf4ONDDz5IW7bQ6dNUXU15eZSSQuHhpFxeP3gwBQbS0aOu+MnATzlb6ORT9g7n29DQ4NLnwDkiPb399the30g2m0VDg01PDYU+ra2tTXnLmhPfOur4FTODBg0apF4I5g7S0+mZZ0iSKDOzfWXYiySp/ao4Ieihh+jwYbp6tZenAL1BluUZM2aQU1eG/eWyta1bacUKkiR69dXeL7ADLy/69a9p1y7CDXr9ktOvX+sXl61t2UIrV5Ik0Y4d9Pjj3LMhIkpJoXnzyN+fex7gAsoB0o8//thsNms0TliN2THEtWskSbR9e/uX0dH0xRc9n0BP/d8rr7SsXUsaDWVnu0mBRBQZSaGhVFbGPQ9wgR/+8IejRo0yGo2WN3z2hH0d+/rSzp0dF/7lL/STn9CSJU6Zj31eeOGFBU8/nTx+PGVncz6kxZqlS6m+nnsS4BozZ84kJ26R2n4Mx5a3dvWmtLQ0IpJlOScnh3MeMPDk5uYSUVJSklNGs29N2NpKS5dSVlanH0hLo8xMOn++Z38YbLBx48bnnntOluXs7OxHOntIS69TttgbG9u/DApyiy12cDq9Xi9JUlFR0ffff++E4Wzvtdu3drW1iYCA9stHlMudc3KE0eiUPxa3SU1NJSJZlvd0+4iI3qVctqaePQ0MxHnCfmvs2LFEVFRU1POh7D6208Xxd5OJ0tNpwQIKDqbqavr73+nRRyk4mCZNojVrqKiIWlud8FcjNTV1/fr1sizv2rVrcbePiABwDWeeqLC9V7ve2lVeLrZtE3p9V5c7O2DdunVEJMtybm6uI9/vYlgTDhwHDhwgosmTJ/d8KEciFPa8taupSXz0kXjmGTFu3G1vz5s798Pf/e53e/futf2ukLVr1yoF/u1vf7N92r1JiTA4WAwfLoYPF5KECPut+vp6T09PDw+Prt/yawsHI3TsrV01Ne1PkwgPFxERP1PXxlFRUcrb85qbmzv73ueff14p8K233rLvp/YiJcLyclFVJaqqxNChiLA/e+SRR6ZOnfrBBx/0cByeO+vNZnH69OnNmzcnJib6+PioNep0uqSkpPT09DNnzlh+fs2aNUTk6enp0rcW9xw2RwcUZedo+fLlPRyH/4UwLS0tx44dMxgMBoPh1KlTZrNZWR4cHBwfH6/X60tLSzMyMjw9Pd9+++1f/vKXvLPt2rVrFBBADQ3tF3MHBZHBQNHR3NMC1zhy5Eh8fPzYsWPPnDnTk3H4I7R05cqVw4cPGwyGgwcPfvfdd8pCf3//lpaWvLy8efPm8U6vW4hwQGltbQ0MDGxqarp06VJoaKi63Gw2nzp1qrW1NS4uzpZx3CtCS6WlpYcOHSooKFiyZElAQEBSUhL3jAA6+ulPf3rw4MHc3NxFFo9z3rdv30MPPTRz5kwbT2C4b4QA7m/r1q3PPvvsb3/72zfffFNdePXq1eDgYK1WW1tba3nIozP95X5CAA7KKftDhw5ZLgwMDIyOjlYOdtgyCCIEsEltbW1qauoTt98yPmHChGHDhl26dOn87RdMq/cc2jIyIgSwiaen50svvfTGG2/UW9yiJkmS1dua7LqoDREC2MTPzy82Nratre3IkSOWy632Nm3aNG9v788//9xoNHY7MiIEsJXV3hITE5WFJpNJXejj4xMXF2c2m4uKirodFhEC2MrqQ0dHjRoVERFx7dq1U6dO3flhW7ZIESGAreLi4nx9fc+ePVtVVWW53OpLfG1/TDAiBLCVVqudNm2aEKKwsNByudWVXmxsbEBAwIULF7755puuh0WEAHaw2tusWbM0Gs2xY8eam5vVhbIsT58+nYg++eSTrsdEhAB2sLrlGRgYOH78+JaWluPHj1sut3G3EBEC2EE5O//tt99euHDBcrnVs/Pqwq4vDkWEAHZQz85bPQzTIcJ77703PDy8pqbm7NmzXYyJCAHsY7W36dOne3l5lZSU1NbWWi63WmwHiBDAPrNnz6Y7zs7rdLoHHnjAZDJ1ODtvy24hIgSwz+jRo8eMGWPj2Xllt7CoqOjmzZudDYgIAezWxWGYDlueI0aMuPfeexsaGv797393NhoiBLBbZ2fnBw8eXFZWpj6ZxfLDXewWIkIAu+n1eo1Gc/ToUcuz8x4eHsrZeXtva0KEAHZTz86fOHHCcrnV3hISEmRZ/vTTTxvVVwXdDhECOKLr3ULLs/ODBw/+8Y9/3NraevToUatDIUIAR1jd07vvvvvCwsKqq6u/+uory+VdP+0CEQI4Qj07X1dXZ7k8ISGBOtkt7OzYDCIEcIROp7v//vtNJtPhw4ctl1vdLZwyZYqvr++XX35ZU1Nz51CIEMBBVntTrqcpLCy0PDuv1WqnTJly542ICkQI4CCre3phYWGRkZENDQ3FxcWWy7s4UYEIARw0adIkf3//c+fO2XJ2XllYUFBw5ziIEMBBHh4e8fHxdMe981ZXehMnTgwMDKyoqKioqOgwDiIEcJzV3mbOnCnL8okTJ5qamtSFGo3mww8/vHLlSnh4eIdBECGA45TdwoKCAsuz80OGDImJiRk6dGh5ebnlhydNmhQQEHDnIHgrE4DjhBBhYWFVVVVnz56NiopSlxuNxqCgIBsH8XDN3AAGBEmSkpKSysvLO1wXanuBhDUhADvsEwIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhADNECMAMEQIwQ4QAzBAhALP/B4TEqHR9xbfMAAAAAElFTkSuQmCC\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAQpUlEQVR4nO3dfWzTdR7A8U/pxoTBxoO4TTELiwiObQ66tbBug0RA5RA5z9wlnBK5O/XwkIMY1Jx3wSCcOYPx6USn+MfMzeNMAAGdBB8S9oBrYVvHgzxNJAwmIjCoIA8b9P741bbMMcvsfp+tfb9CSNNf136Vvfv7tv3197X4fD4BoKeP9gCAWEeEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI0JAGRECyogQUEaEgDIiBJQRIaCMCAFlRAgoI8IYNW+eWCxy+HDwmo0bxWKRN9/UG1OsIkJAGRECyogQUBanPQBoOnxY2tr8l7/9VnUoMYwIY9qECdojABHGuJUrZehQ/+X6elmyRHU0sYoIY9qdd8rw4f7L112nOpQYxhszgDIiBJQRIaCMCAFlFp/Ppz0GIKaxJwSUESGgjAgBZUQIKCNCtLdvn9TVaQ8ilnDYGtrbtEkOHpRx47THETPYEwLKiDCGGOeV+cMfgtccPhw8r4zbLampkpoqf/ubvPGG//K6dVqDjSFEGHPKyqS5uYPr7XY5elSOHpV//lPmzvVfvvde08cXe4gwtqSny4AB8vLL2uNACCKMLRcvymOPSUmJeL3aQ8GPiPCXamxsfO+9944ePao9kPYuXJCaGnn1Vfn97+U3v/Ff2doqjz8uFy9KSclVf3DePFm+3JwxQoSPKLrA6/Vu3769urq6qqrK7XYfO3ZMRN59990HH3xQe2jS3Cy1tVJdLVVVUlsr58/7r4+Pl3PnRER8PrnhBpk9W155Rf76V8WRIogIf15ra2t9fb3b7Xa73S6Xa//+/aFfPUlNTXU4HCkpKSpjO3lSXC5xu/1/nzgR3GS1SlaWOBz+PwkJwU1PPCErV0pZmUyZYv6Q0R4Rdqy5ubm2ttbY3dXV1Z0z9iMiIhIfH5+Tk+N0Om02m81mGzNmjJkDa2uTvXuD+7rduyX0u2hpaWKz+f84nTJkSMd3cuutMmOGLF8ukyebM2p0hgj9OpxkBmRkZASqy8/PTwjdrXS/xsbGrVtH1NRY3W6pr5cLF4KbEhNl3Ljg7u7mm8O9z0WLxOmUTZu6Y7y4NrEbYVtb2969ewO7uz179ly+fDmwNTU1NS8vz6iuoKBgaODEgKb46TPCbbed3b27v7E1I0OcTv/uzm6Xvn278hAFBVJQICtXisUSyZGjC2Irws4nmWPHjg3s7jIzMy0m/nq2trZ6PB6Xy3W1l52TJ3/1wAPZ48dLXp4kJUXmQRctkl//OjJ3hV8iyiP8/vvvGxoajPA2b97coyaZ6i87Z8yQUaNk796r3uDsWfnPf+TRR7vjwREUteeYWbBgwcaNG/ft2xf6H5iSkmK32x0Oh8PhyM/PT05ONnNIxiSzZz4j/JTPJ5MmSUWFLF0qzzyjO5YoF7V7wv379+/du7fdLsX8SWZDQ4PL5Tp06NC6devaPSOkpqba7Xa73T5+/Pj8/PykSM0yI8RikSeekOpq+cc/JDVV/vhH7QFFr6jdE27btq1Pnz45OTlxcaY+0XQ4ySwqKqqsrNR9RuiakhL585/FapX335f77tMeTZSK2ghNc+rUKePdFOPv7777LrCpT58+o0aNcjgcRUVFY8eOzc7ONvkZISIWL5YlS6RfP9m0SQoLtUcTjYjwmnX+2cagQYPy8vKM3Z35n210k/nz5bXXJDlZKiokJ0d7NFGHCMNiTDKN8LZs2fLDDz8ENvXGSea1unRJfvc7Wb1abrpJqqslPV17QNGFCDsW+tlGRUXFt1cuY5uWllZYWGiE1xPeyTTB+fMydapUVkpmplRWXvWAuF7N6/WuW7duyJAh06ZNM/OZlAj9ApNMI7z6+vrQSWZycnJ+fr5R3YQJE66//nrFoUbW22+/vWHDhokTJy5cuLBPn86+2nb6tEyaJB6POBzy2WeSmGjaGM3Q2tpqs9l27NghIosWLXrhhRdMe+iYjjDGJ5kisnbt2vt+fNNzxYoVc+fO7fz2zc3idMrBg/KrX8kHH0gvfJvpqrZu3Wq3243LaWlpzR2eAqR7RNH/xTCETjIrKyvbfRM3dJKZl5d3XQwsXVtfXx+43NDQ8LO3v/FG+eQTcTrlo49kzhx5993oOfR0xIgRiYmJZ8+eFZGsrCwzHzrK94SXLl3as2dPDE4yw1RXV1dQUHDhwgWr1VpeXj516tRwfsrtljvukDNn5JlnZOnS7h6jeSoqKl588cWhQ4cuW7YsLS3NtMeNwgg7n2SOHDkysLuL1knmNdm1a9cnn3xSWFiYl5cX/k+Vl8u990pbm7z0kixY0H2jiwnREKHX6/V4PMa3flwuV+gk02KxjBw50uFwGIeM5ubmxsfHKw41mpSVyYMPisUi//2v/Pa32qPpzXplhD+dZBYVFW3evNnYmpycnJWVZezuxo8fP2zYMDPH5vV6m5qaTP66vZZ//Uueflr69pUNGyS8mSw60GsibGpqcv2orq7OeAFtSEhImD59+k033WTs8W655RYzB/bTZ4T09PQDBw6YOQZFCxbIK69IUpJs3uzLze01c3ufT/bskVGjpNMPZa7Nvn1y5kxX1vDoue+OnjlzxuPxGL/cVVVVX3/9dejWtLQ0m81m7O7Mfyfz0KFDbre7pqbG7XbX1taGvuxMSEhISUk5d+5cv379zBySlpdekpYW+eKLHQ88MHvNmv/deuut2iO6qtOnZetW/7l5vvhCTpyQXbskMzNi99/lhXR6UITtdikej+fSpUuBrUlJSdnZ2VqTzNBnhMrKyoMHD4ZujcHPNgIsFnn7bbn//sUbNnimT59eVVV1ww03aA/K7/x5qavzn4rO5ZIrn8bl5pvl6NFIRth1PlVHjhxZv3794sWLp0+fPmjQoNCBxcXFZWZmPvLII6WlpTt37rx8+bKZA2tra9u5c2dpaen8+fNtNlu7Q0mSkpKcTudTTz21fv36Y8eOmTmwnsnr9dpsNhGx2Wxer1dxJEeO+N5/3zd/vs/p9CUk+ESCfwYM8DmdvvnzfaWlvgMHgj/yl7/4RHxz5gSvaWryifjeeOOKGzQ1BW/w8cfBG7hcvpQUX0qKb+BAX//+/ssffHANYzZ7Txj+JNNms5k8owt8tmGM7dSpU4FNcXFxo0eP5rONqxk4cODHH39cVFRUW1s7c+bM8vJy046nDZ1k1tTI8ePBTVarZGb6z4hVWCi5uWK1XvV+yspk6VK58cZrHoCxkI6I/PvfcvBgV05e3u0Rhk4ya2tr3W53a2trYGvoJNPhcJg8k+nJLzt7nWHDhpWXlzudzs8///yhhx4qKyvr/EjULjt//nxDg9TUXGdMMtu9BTZ8uNjtMn682O2SlxfuAa7p6fL99/Lyy2LiEaNB3RJh6C6lurq6paUl+HhxcZmZmYFf7ttuu62b/qk6FP4zgvkvO6NARkbGpk2biouLV61aNWTIkNdffz1S99zc3Gx8e9P4h7Pbt1VU+D8ESkyU3Fz/7q6oSEaM6Mr9G+vkvPqq/P3vETuZXfgiE+HZs2fr6+sDv9xffvll6FbdSaaIfPXVV2+99ZbL5aqtrT1z5kzg+r59+xof4ht/jxw50uSBRZ/s7Oy1a9feddddK1asSE9Pf/LJJ7t2P8ePHw89X0Ho87jVak1NrfvTn8YY5zvOzOxskhkmY52c5culpEQWLer4NocPS1ub//KV32zzmzevi4/exQjb7VK2bt168eLFwNaBAwfm5OQY4U2cOFH97bKWlpbAN1PUnxGi3qRJk1atWnX//fc//fTTw4YNmzNnTjg/ZXyVLLC72717ty/kE2zjX81QWFg4ePDgyI45nHVyJkyI7GMGXUOE33zzzbZt23rgJPNn3X777YsXLzZObRaDR2mbb+bMma+99tpjjz328MMPDx48eObMmR3erN0k83xgESmRxMTE3NzcQHjmHIHU+To5K1dK4Fwl9fWyZEnEHjfcCJcvX77oyv30iBEjjBN42u32cePG9eT3LeLj45999lntUcSWuXPnNjU1Pf/887Nmzfr0008LCgpE5PTp01u2bAlMMk+ePBm4vdVqzcrKcvxozJgx1l8+y7xGna+Tc+edMny4/3Jkf9nDjTA7Ozt0kllcXKy1GBh6i2XLlh07duydd96ZMWNGVVXV6NGjy8vLZ82aFbhBd08yO3f8uP9z/NBvlaqskxNuhJMnTz59+jQfjiF8FoulpKTk5MmTa9eunTJlypYtWyZMmFBcXGyc79hut98c/iJSkXC1VeVstuBtVNbJCTdC8+cGiAJWq7WsrGzq1KlVVVV33313ZWVl4Msu5ti/P7iIqscjIe8eyoABYrOJwyGNjRJ6GKL56+T0oGNHEZX69ev34YcfFhcXb9++fdq0aZ999ln//v277+G8Xtm+3b+7c7kk5FTMVxxAE7qqXLuPFn52nZyI6zVfZUKvduTIkYKCgkOHDt1zzz1r1qyJ4JnIw1+6uLBQzH3VGS4ihEl27dpVVFTU0tJSWlo6e/bsX3JXjY2NLpfrwIH0jz4q9HjaL11s7OWMI9fMfdXZRUQI81RVVW3cuPG555671nf4QpcuDiz4kZ396I4db0qEli5WRIToiS5evOjxeAJHru3bty90a1pamt1udzrvsNkej+DSxVp4YwY9hfrSxVqIEGpCJ5lut7uHL13cfYgQZjtx4sTChQuNSabvJ0sXBxYz72lLF3cfXhPCbK2trcnJyefOnYuRBT9+FhFCwerVqzMyMnrp0sURR4SAsh70rT8gNhEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLK/g8ky7Ttlj5YaAAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAARSElEQVR4nO3df2zU9R3H8VdLC4qDDiy1yI8hI43IxqpFpoJuhatTxF8zxfgHONScBmc7fyTFmawQjZwmmqIupriZVLMsq2O6qshswQloppbfs8FfILDyo3ZurVMotH3vjx4UsGhpv9d37+75SP+Au/ZzH5J7cp/e93Pfb4qZCYCfVO8JAMmOCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzogQcEaEgDMiBJwRIeCMCAFnRAg4I0LAGRECzpI6wosuUkqKUlL00EMnvfeXv+zzaSHJJHWERy1dqq++8p4EkhURSlJjo5Yt854EkhUR6qKLJOmxx3TokPdUkJSIULNmaeRI/etfev5576ngFDU3N2/duvXw4cPeE+kVItThw7r7bkl65BG1t3vPBt32zDPPZGdnT548+ZxzzqmtrfWeTs8RoQ4c0B136Lvf1Ucf6c9/9p4NumfXrl0LFiw4cOCApPr6+ptvvtl7Rj1HhDLTkCG6805JWrLEezboni1btrS2th79a11dXUtLi+N8eoMIo4qLNXiwNm3SihXeU0E3TJ48OS0t7ehfJ02aNGjQIMf59EbSRdjWpr/8RQ0NJ94+YoRuvVXixTBOjB079umnnx48eLCkMWPGVFRUeM+o55IowqYmLV2qCRN0ww0qL+/iG+67T+npWrdOa9f2+eRw6m677bZ9+/bV1dVt3749Ly/Pezo9lxQRbtyoW29VdrZ+9St9+qlycvS973XxbWPH6qabJOnhhyUpJaVPJ4keGDJkyMSJE49dl8ajRI6wrU0vv6yCAl1wgZ59VocOKRRSZaXq6jRvXtc/UlKilBStXKmNG5WeftxdH3+sv/61D2aNpJOYETY06JFHNH68rrlGNTUaOlThsP75T1VXq7BQAwac9AfPO0/XXitJS5YoI6Pz9pYW3Xijrr9e996rOD8yjH4n0SJcv163365x47RwoXbtUk6OyspUX6/yck2c2K0R7r9fkpYvV1NT540DB2rePKWn6/HHdckl2rEjJpNHckqQCA8d0gsvaPp0TZmiZcvU0qJQSFVV2rZNxcX6zndOYaipU5Wfr/Z2rVvXeWNKioqL9dZbOucc1dbq/PM5rJ/UIhGlpOh//wtmtLiPcN++6Mpzzhy99ZYyMlRUpE8+UXW1rr66h2+udLwYmp14+5Qp2rhRN9ygpibNmaPiYvZ8IwBxHOH69Zo3T2PHauFC1dfr3HNVVqY9e7R0qcaN69XIBQU62TveGRl64QWVlSk9XU88oenTWZqit1Ls6//h928tLaqq0uOP6x//kKTUVM2apeJihUJ9Oo333tONN2rHDp15pioqdNVVffro6Ht79nQufJ5+Wo8+qvff1+DB0VsyM0/tt57jWPyor7fSUsvMNMkky8qykhLbudNtPo2NdtVVJllKihUV2aFDbjNBH/jRj6JPvC6/nnmm5yPHR4Rr11phoaWlRf/BeXlWXm5ffeU9LbP2disrs/R0k2zqVNuxw3tCie7HP44+Bx588KT33nlnFzdOmtTF9x84EB3thB/p0urV9uKL0a+5c02yP/6x85ZPP+3Rv8fMzPr1VoODB1VZqcce05YtkjRwoAoLFQ739crzG3S8a3rxxbrxRr37ri68UBUVmjXLe1pJYOlS3XNP52qwD+Tnd/552zZJmj27F0vQY/TTN2a2b9fChRo1SjffrC1blJ2tkhJ98okqK/tRgUdNnar33tOsWWps1OzZKi7mgH7MJdJpgfpdhOvWac4c5eTokUf0+efKy1NFhXbtUiSi0aO9J3dymZl65RWVlSktTU88oZkzVV/vPafElWCnBeovETY3Nz/55JMFBXdfeqleeEHp6Zo/X+vXq7Y2ulWl/+tYmtbUaNQorV2r3Fy99pr3nBJUgp0WyD/CDz744K677ho9enRRUVFNTVlBwWdLlmj3bj37rC64wHtyp+6yy7Rpk668Uo2NuuoqlqYx4X5aoIULZRbML4RyjLC9vb2mpubqq6+eOHHiU0899cUXX+Tl5VVUVKxYMWzhQmVmes0rAJmZevXVzqVpKKQ9e7znlFgS7LRADhE2NzcvW7Zs0qRJBQUFr7zyyqBBg+bOnbt58+ba2tp58+bF+2fDOhxdmp59ttasUW6uVq70nlN82rlTCxfqjjuOu7EHpwV6//3oJQ+O/Tr99OAn3BM9P7px6rZt21ZUVHTGGWd0PPT48eMjkUhjY2NfzqGPNTTYFVd0HtA/fNh7QvFj1Sq7/nobMMAkS0uzvXvNjhz0u/deM7OGBhs82CR79dXoj3zDccJv/urOccLY6YsI29raqqqqQqFQypH91NOmTausrDycHE/J9naLRKJPpp/8xOrrvSfUvx04YBUVNnlyNI+BA62w0Kqro/ceG6GZ3XWXSTZ9+nH3dhnhhAm2deuJX+vXJ0GE//3vf8vKysYd2U89ZMiQcDi8devWmD5o//T3v9vZZ5tkI0bYypXes+mXPvnESkps+PBoGNnZVlJiu3cf9z0nRLhzZ3S70po1nffGaMdM7MQqwg0bNoTD4cFHdjRMmDAhEol8/vnnMXq4mFq+PJhlZEOD/exn0aVpSYm1tgYwZmLo2JbYsVjo2JZYUdH1XtwTIjSzefNMsiuuMDO76CIiNGttbe1YeXa0l5qaGgqFKisrW+P2GfenP5lkF14YzL7QY5emP/2p7dkTwJjxq7nZystt0qRoCYMGWWGhvf32N/3I1yN8/31LSTHJNmywSy9N7gj3798fiUTGjBnTkd/QoUPD4XBdXV1Q43t55x0bN84kO/PMzjcAeumNN2zkyOgHQf72t2DGjC8ffmglJTZsWLSBkSOttNQaGr79B78eoZldd51JVlhos2cna4S1tbXhcPj0I2/35uTklJWVffHFF70fuZ/47DObNSvgjyzt32+XX550S9O2Nquuttmzo69dR1ee3V/tdxnhO++YZKmpyfdK2NLSUllZOW3atGNXnlVVVe3t7QHOr5+IxUeWWluttDS6NM3PT/ClaVOTlZfbuedGn/SnnWZz59rmzac8TpcRmll+fvR/tGSJcO/evZFIZNSoUR35ZWRkFBUV7UiCz9KtWWOjRplkmZm2YkUwY65e3bk0ff31YMbsV7Zts6IiO+OM6NN9/HiLRKzHx4ZPFuHrr5/0oF/PIly1ymbOtKFD7bTTbMIEKyy09et7OOdvdWoR1tbWzp07N/3Ifurc3Nzy8vIvv/wyRpPrhz77zK68MvilaUGBSTZggJWWWltbAGO6a2uzqioLhTpXntOmWWVlb99nPlmEZpaXF1iETz1lkg0fbgsW2G9+Y9ddZ6mpNnCgvflmryZ/Mt2N8MUXX8zNze1oLz09fc6cOWs6Ds0kn2OXppddFszB946laWqqSTZjRnR3SJz6z3+srCz6bpZkQ4ZYOGxxdGz43/+29HTLzDzuEOXvfmeShUIxecTuRvjkk09KysrKKikp2el4Xpd+4803owffMzPttdeCGXPVKsvONsnOOqtzj0gc2bDBwuHoVrKOTSqRiMXdseG6OrvlFnviieNubG42yb7//Zg8YncjbGpqeu655w4ePBiTWcSnE/aFBrI03bfPQqE4W5q2tkZXnh3tpaZaKGRVVZZI79C9/bZJds01MRk8Pk701G91LE07zkAV1L7QY5emM2favn0BjBkj+/dbJGJjxkTzGzrUwmGL/2PDJ2posJwcS0uzd9+NyfhEGIBY7AutqbGzzjLJRo+2tWuDGTNAtbUWDtvpp0fzy8mxsjJLoGPDnbZvtx/8wNLS7PnnY/UQRBiMWOwL3b3bpk+PfpCnnyxNW1qsstKmTUvkleexVq604cNt2DCrqYnhoxBhYGKxL/Tw4c6laSjkuTTdu9cikehhUskyMqyoKMHPs7p4saWm2sUX9+qcot1BhAE7ui90xIjA9oVWV3cuTdetC2bM7quttblzo4dkJMvNtfJyS+xjw62tNn++SRYO98WJ1YkweA0Nwe8L3b07ugjss6XpwYNWUWG5udH2Bgyw2bPj8sBJD/ziFybZ4sV99HDxd0GYuGCmRx/VAw+orU35+frDHzRyZG/HbG3VQw/pwQfV3q7Zs1VRoeHDg5jr1+zZo2XL9NvfqrFRkrKyNH++FizQ2LExebj+5qWXdP31GjZMv/51F/fecUdgJ1nr1EexJ6VY7At9+eXoZ8/HjAl+adpvr/nRlx54oHMb6te/TvikfyCIMLZisS901y675JIgl6Ydp3X54Q+jz7MTTuuCWCPCmDv24Ht+fjD7Qg8dsnvusZQUu+AC69jF1IPLFZnZxx9/+2ldEGtE2EdWr47uC83KCuxF5qWX7KOPon8+GmFmZhdvXZ4QYXu7VVd397QuiDX/0+Anifx8bd6sUEgNDbriCi1aFMD526+9VhMmnHjjt16u6LnndO65KiiIXvPjllu0YUM8XfMj8RBh38nK0sqVKi2VmRYvVkGB9u0L+CG6c7minTv14YcaOVKlpdq9W7//vc4/P+Bp4JQQYZ8aMECLFqm6WtnZWr1aU6Zo7dogx+/O5Ypuv13Ll2v3bi1aFN/X/EgYROhgxgzV1mr6dNXXa8aMYJamHbpzuaKsLP385xowIJhHRO8RoY9Ro/TGGyotVXu7Fi/W5Zdr//4Ahk2wyxUlCSJ0k5amRYv08svKzNSqVZoyRevX93bMHlyuCO6I0NmsWdq4UdOn68svdeaZwYxZXKzBg7Vpk1asCGZAxBQR+hs9Wm+8obVrdeTCOb01YoRuvVXixTBOEGG/kJamSZOCHPC++5SernXrAn73FbFAhIlp7FjddJMkPfywJB25MCT6IyJMWCUlSknRypXauJGtMP0aESas887TtddK0pIlysjwng1OjggT2f33S9Ly5Wpq8p4KTo4IE9nUqcrPV3u71q3zngpOjggTXMeLIecw6c+IMMEVFCgvz3sS+Eac6Alwxish4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4IwIAWdECDgjQsAZEQLOiBBwRoSAMyIEnBEh4Oz/FC57s6EZGMsAAAAASUVORK5CYII=\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAATrElEQVR4nO3df1BU9f7H8dcii4KgYPzQQEoxEQF/m2mpk1ApUnfshtPcG9fszuA4ToAy9+LMvTPY3JqhGS2wPxq6XbukTUXevIOJGeCPTEOvhBpKCPgzNEIERNAV2PP9Y/e7LNvKz93z3h+vxzjNcVkO70mf7tnd8zmrURQFRCTHQ3oAInfHCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSJin9ABup6Wl5csvv7x8+XJERMSLL77o5+cnPREJ0/AK3Go6fPhwUlLSzZs3Db8NCQnZs2fPwoULZaciWYxQPQ0NDdOmTWtpaTG/MSgo6MKFC/7+/lJTkTg+J1TPrl27LAoE0NjY+Pnnn4vMQw6CEaqnpqbG6u21tbUqT0IOhRGqJywsbFC3k5vgc0L1XLx4cfr06TqdzvzG0aNH19TUTJgwQWoqEsdHQvVMnjx5x44dXl5eplu8vb137tzJAt0cHwnVVlNTk5+ff+nSpSlTprz66quTJk2SnoiEMUIiYTwcJRLGCImEMUIiYYyQSBhXUbivykqcPg1/fyQmSo/i3vhI6L7++18kJ2PzZuk53B4jJBLGCImEMUIiYYyQSBgjJBLGtyjcQnMz/vMfyxvLy41f+vBDyy/Nno25c9UYjMATuN1EZSViYwdx/7/9DW++abdpqDcejtpRTAw0GuOv557r656bNxvvFhPT1376XYI/fnxf+yHHxAhV8s03OHtW7KfHxEBRLH/94x8AEB1t5Ut8GFQTI1TPtm3SE5BDYoTq+fRT1NdLD0GOhxGqISAAADo7sX279CjkeBihGpKSjBt5eWhrEx2FHA8jVMPixTBcz6m11cqbcuTmGKEabt9GerpxOzcXXV2i05CDYYRq6OjAa6/B8KEvV65g927pgQAAf/87FAWVldJzuD1GqAadDr6+WLfO+NutW0WnIQfDCNVgODXw9deh1QJAeTkOHxYdiBwJI1RPaCheftm4PbQHw/r6nvPgrP5qaLDhvKQSRqiqjAzjRlERqqpERyGHwQhVNXMm4uIAQFF4FhsZcT2h2jIyUFoKALt24a23EBIyiO8NDsaBA33dIT4eTU3DGo/UxwjVtnw5pk/H+fPQ6fDee4Nbr6DVYtasvu7gyT9PJ8TDUbVpNNi0ybj9/vvo6BCdhhwAIxTwyivGo9Bbt/DRRwDgwT8HN8Y/fAEjR2LDBuP2u+9Crze+f0juiRHKWL8e3t4AUFeHPXuM2+SeGKGMwECsWWPc3rrVuOCQ3BMjFLNxIzQaACgrQ02N9DQkhxGKmToVzz9v3P7sM9FRSBQjlGQ6i43XnnFnjFDSkiWYN096CJLGCIWZHgzJbTFCYS+9hPBw6SFIFD+LwpZWrDBeTO3rr+HrO6xdNTZi1SoACA7Gl1/aYDZyWDzh15a+/x6trQBscCknnQ7HjgFAaOhwd0UOjoejRMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCWOERMIYIZEwRkgkjBESCfOUHsClzJlT39GhAaDRBAHa4ezKw0O3YEETgICAbmCibeYjh8QIbemHH6JbW1sBKEoz4D+cXen1jSdOTAQQGhoK/Gyb+cgh8XCUSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIiYYyQSBgjJBLGCImEMUIBd+/eNf2XiBGqqrOzMzc3Nzw8fN++feHh4W+//bZOp5MeioRpFEWRnsF1XL16Va/XAwgPD/fwsPwHbu/evRs3bqyrqwMQFBTU2NgIYOrUqdu2bUtMTLS4c3d397Vr1wB4enqGhYWpMT0JYYRqqKysTE9PLy0tNd3i5eV1//59028TEhLeeeedyMhIielIGA9H7au5uTktLW327NmmAgMCAnJyclpaWnJycsaOHWu4saioKDY2Ni0traWlRW5YEqKQfXR2dubl5QUFBZn+V3t6eqakpPz666+m+9y8eTM1NXXEiBGm+4wbNy4nJ6erq0twclIZI7SL0tLS2NhY83/sli1bdubMGat3/uGHH5YsWWJ+56ioqK+//lrlmUkKI7SxmpqapKQk86KmTJlSUFDQ7zcWFhZOmjTJ/BsTExMvXryowswkixHazJ07d7KyskaOHGmqaPTo0VlZWXfv3h3gHjo6OrKzs/38/Ex78PLySk1NvX37tl0nJ1mM0Ab0en1+fv748eNN8Wg0muTk5Bs3bgxhb/X19cnJyRqNxrS3hx9+OC8vr7u72+aTkyNghMN14sSJJ554wvwwcv78+cePHx/mbk+ePLlw4ULz3c6bN+/YsWM2mZkcCiMcup9//tniISs0NDQ/P1+v19tk/3q9vqCgYOLEieYPsElJSVeuXLHJ/slBMMKhMDx58/X1NeXh7e2dmZlpjydvhqeao0aNGvJTTXJwjHDQCgsLH330UZVfxrx69WpycrL5D504cWJ+fr5dfyipgxEOQnl5+eLFi81LmD179pEjR1Qb4ODBgzNmzDAf4Omnn37Q24/kLBjhgPz21JaHHnpI5NSW7u7u/Px88xNxPDw8kpOTGxoaVJ6EbIUR9uP+/fvmJ3kC0Gq1qampLS0tglPdunUrMzPTy8vLNFVAQEB2drZOpxOcioaGEfaluLh4+vTp5od/8fHx586dk57L6KeffkpISDAfLzIy8quvvpKeiwaHEVr327/fU6dOdcy/3w7+LwX1ixFa+u2Rnr+/v4Mf6TnmMTMNECPsYXjNIzg42Elf83CcV49oUBihkcu8+m/1fZRvv/1Wei56IEbomu+DWz2j4NKlS9JzkRVuHaFrnxH2oHPr2trapEejXtw0QsO50eHh4aa/oK56brS9zzKn4XPHCE+ePLlo0SLzQzWXXyVkp/VWZBPuFWF9fX1KSor5FUHdZ72sbVcekw25S4Q6nS4nJ4dXjhj+NTjI5twiQqvXUKqrq5OeS8yQr0ZF9uDiEVZUVCxdutT8bxuvJmhy4MABi/Pdnn322fPnHffEIFflshE2NTXxurr9srhC8aJFf/D0VFJSFLMLFJPduWCEhhMp/f39TflptdqUlJTGxkbp0RyU4R8sPz+/0NDLgAIoAQFKTo7S2Sk9mXtwtQiLi4ujo6PND7Hi4+MrKyul53IClZVNy5YphggNv2JilJIS6bHcgOtEWF1dbfEBY4899hhfbBiswkIlIqJXiomJSm2t9FguzRUibG5uzszMNH/Z3bD46N69e9KjOaX795WcHGXMmJ4OtVolNVVpbZWezEU5d4QPWnz0yy+/SI/m9K5fV1JSlBEjelIMDFRychS+sGVzThzhoUOHZs6caX78uXTp0oqKCum5XMqpU8pTT/U6Op0zRzl6VHos1+KUEV67ds3ipOSwsDCelGwner1SUKA88ojlE8XLl6UncxVOFmF7e7vF4iMfH5+srKyOjg7p0Vxce7uSlaV4e/d06OOjZGYqXBc1fE4ToWHx0SOPPGLKz7D46DL/QVbRtWtKcrKi0fSkGBam5OcrPAQZDueI8NSpU08++aT507+5c+ce5VMTIWVlyoIFvY5OFyxQysqkx3Jajh7h9evXLRYfTZgwwU0WHzmy7m4lP18JCenp0MNDSU5W+LL0EDhuhIbFR2PGjDHlZ1h81Mq3qxxGW5uSlaWMHNmToq+vkpWl8A3aQXHQCAsLCydPnmx+/Onmi48c2YULSlJSr6PTxx5TeKrSwDlchFVVVcuXLzfPb9q0afv375eei/pRXKzExPRKMS5O+fFH6bGcgQNFaDiX39PT02LxUSdP5ncSnZ1KXp4SGNjToWFhFJev9M0hIjSsagsMDDTl5+npycVHTqqpSUlN7XW+27hxXBjVF/kIS0pKYmJizI8/4+Lizp49Kz0XDcv588pzz/U6Op02TeGzCqskI7xw4QKvdOLaCguVyZMtz3fj62sWZCJsa2uzes0vLj5yPTqd5cIoLy8ujOpF7QgNi49CQkIsFh/x6peurb5eSUlRPDx6UpwwQcnLU3jOhaJyhGVlZRbXgX788ce///57NWcgQf/7n/Lkk72OTufOVb77TnosaSpF+NvFR/xEBPdkWBgVHt7ToUajJCW59cIou0fY3t5u8dlAPj4+/GwgN2dYGDVqVK+FUVlZinteB9y+ERYWFpovPgI/JY/MXL2qJCf3Ojp1z4VR9oqwvLz8qaeeMs9vzpw5/LxY+q1Dh5SZM3uluHSpcvq09Fgqsn2EjY2N/OR0GhTDwqjgYMuFUQ0Nve4WHd1zh2ef7WuHmZnGu0VHW/mqaT+hof0MZlqrZXU/tuIB2+ns7MzNzY2IiNi+fXt3dzcArVabmppaV1eXlpZmniWROQ8P/OlPqK5GZia8vABAr8fOnYiMxNtvQ6ez8i3ffIOzZ1Ue015sFuHevXujoqLS09Nv375tuCU+Pv7MmTO5ubljx4611U8hF+bvj+xsnD4N0yqalhZs3ozFi6EoVu6/bZua09mRDSKsrq5OSEh44YUX6urqDLdERkbu27evuLg4Kipq+PsntxIVhf37UVwM06cZ/P73MHtvq8enn6K+Xs3R7GVYETY3N6elpcXExOzfv99wS0BAQHZ29tmzZxMSEmwxHrmp+HhUVCAnB7NnIz3d8qsBAQDQ2Ynt29UfzfaGGGFXV9cHH3wQGRm5ffv2rq4u/P/ZZ9XV1ZmZmV6G43qiYdBqkZaG8nKYnWJsZDrtPy8PbW0qz2V7Q4nw4MGDc+bMWbduXWNjo+GWZcuWVVRUfPzxx6ZPuiOyCasHoosXw/DJy62t+PBDlSeyvcFFWFtbu3r16ri4uB9//NFwS0REREFBQWlp6YwZM+wwHpEVt2/3HKPm5qKrS3SaYRtohO3t7Vu2bImNjf3iiy8MtxgWH1VWVlqsCSSyt44OvPYaDB8De+UKdu+WHmh4BhRhV1fXrFmz3njjjXv37gHw8PBYu3ZtbW3tli1bzK9IT6QOnQ6+vli3zvjbrVtFpxm2AUXo6em5du1aw/b8+fO/++67HTt2jB8/3p6DET2Q4W3D11+HVgsA5eU4fFh0oOEZ6OHopk2blixZ8sknn5w4cWLhwoV2nYloIEJD8fLLxu2hPRjW10Oj6etXQ4MN530gz/7vAgAYNWrUkSNH7DoK0WBlZGDnTgAoKkJVFZz03BBbnjtKpLKZMxEXBwCK4sRnsQ30kZDIMWVkoLQUAHbtwltvwezqRf0LDsaBA33dIT4eTU3DGm8gGCE5t+XLMX06zp+HTof33sObbw7ie7VazJrV1x08VemDh6Pk3DQabNpk3H7/fXR0iE4zJIyQnN4rrxiPQm/dwkcfAYCHU/29dqphiawZORIbNhi3330Xer3x/UNnwQjJFaxfD29vAKirw549xm1nwQjJFQQGYs0a4/bWrcYFh86CEZKL2LjRuO6prAw1Ner93Dt3kJuLuDgEB0OrxZgxiI7Gn/+MgwcHugdGSC5i6lQ8/7xx+7PPVPqhx49j2jSkp+PgQTQ2oqsLbW04fx47diAuDi+8gDt3+t8J3yck15GRgcJCACpde+bcOSxfblzaHxKCVasweTJu3sSZMyguhl6PvXuxejWKivrZDyMk17FkCebNw6lTKv249euNBa5ahY8/htlHPaC4GAkJ6OrC/v0oKUF8fF/74eEouZSMDJV+0LlzOHoUAMaPx7//3atAAM88g7/8xbhteHDuAyMkl/LSSwgPV+MHHTtm3EhMxJgxVu6wcqVx4/LlfnalUaxeV5WI+tPWhhs3MGYMrK5vP3YMhk9jSUpCQUFf++FzQqIh8vODn98Dv3r8uHEjJqaf/fCRkMj2WloQHY3r1zFiBKqrERHR1535nJDIxvR6rF2L69cBYMOGfgoEHwmJbEuvx7p1xksSz52Lo0f7P5GVzwmJbObePaxZY3wZJiYGe/cO6FRyRkhkG7/+it/9DmVlALBgAYqKMG7cgL6RzwmJbKCqCk88YSxw5UqUlAy0QDBCouE7fBiLFuHSJQD4619RWGh5Ak3feDhKNCyHDmHlSty9C60W//oXkpMHvQe+Oko0dN9+ixUr0NEBHx/s3o0VK4ayE0ZINERVVXj8cdy5Ax8fFBdj0aIh7ofPCYmGorMTq1cb1+z+859DLxB8Tkg0NHl5qKwEgKAgdHT084HBf/xjX28Y8nCUaCieeQYlJQO9840b1ldaGPBwlGgobPjgxUdCImF8JCQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhP0fQCN1SHcnXJsAAAAASUVORK5CYII=\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAASsklEQVR4nO3de3CU9bnA8ScJubVCuITcQFAqEAEJdilFbRE0nNbTKNSZ1fbUhTLMrB3apiOts0w7Z7ZU8ay9nRDFTkI7JdUqBgEbPVokKhgQ7WyG0CARuUlIQm6QEC6byya/88cbY0oDIbC7z4Z8P7P/wGZ3H8h89/3t7vu+G2GMEQB6IrUHAIY6IgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhIAyIgSUESGgjAgBZUQIKCNCQBkRAsqIEFBGhICy8I2wvLx8w4YN5eXl2oMAwRWmET799NMZGRnLli3LyMj49a9/rT0OBsDr9b7zzjv79u3THmTwMOGnvr4+Ojq6Z8Lo6OiGhgbtodA/n8+XnZ0dERExbtw4EbHZbAUFBe3t7dpzhbtw3BJWV1d3dHT0/LGjo6OmpkZxHlyJjz/+eM6cObm5uXFxcdOnT09ISCgtLV26dOmkSZPWrFlTX1+vPWAY034W6ENbW5v1VGoZP358W1ub9lC4nFdeeSUhIUFEpkyZUlZWZow5e/ZsXl7ejBkzrF9iTEyM3W7fvXu39qThKBwjNMZ4vd6ZM2eKSEZGRmlpqfY4uCRrCWqV9uCDDzY3N1/0AyUlJXa7fdiwYdbP2Gy2vLw8n8+nMm14CtMIMSgcPHgwIyNDROLi4nJyci7zk1VVVW63e8yYMVaKycnJLpersrIyZKOGMyLEVdq8efPIkSNFZPLkyXv37r2Sm7S2thYUFFjdikh0dLTdbt++fXuwRw1zRIgBa21t7b0EbWpqGug9eL1eh8PR8x747bffnpeXd/78+WBMG/6IEANz7NixOXPmiEhsbOzll6D9qqmpcbvdY8eOtVIcOXJkdnb2p59+GqhRB4tBGeHGjSY52Rw6pD3H0LNlyxZrCTpx4sQPP/wwIPfZ1tZWWFh45513WilGRkZmZWVt3769q6srIPcf/gZlhH/+sxExFRXacwwlvZeg3/72t69iCdovr9frdDrj4uKsR5k6dWpOTs65c+cC/kDhhgjRv08//fSrX/1qQJag/aqtrfV4PDfeeKOV4ogRI5xOZ8V1/csmQvRj69ato0aNspagH3zwQWge1FqjZmZm9qxRMzMzi4qKrss1KhHiknovQRcvXnz69OnQz1BaWup0OuPj460xJk+e7PF4grEYVjRoIrTZjMglL488oj3fdef48eNz5861Ps3zeDy6m6D6+nqPxzNx4kQrxeHDhzudzv379yuOFECBifDuu42ISU7u+9qpU42Isdn6uImIyc7u4yaPPnrxTZ56yjz6aPdl/nwjYh5+2Dz6qElI6L6fe+4xW7YYvz8g/6Ch7tVXX7WWoBMmTNizZ4/2ON06Ojo2bdo0b948K8WIiIifb9pU0tzcqT3YNdKPMD7e1NVdfJN/j7C33svRTz4xLpcZObL73tLSjNttOPLpqnV0dLhcroiICBFZtGiRyhK0XxUVFdnZ2SMSEu7bu9fm9S4qL99w8uSZjg7tua6S/qFMPp/8/vdXf/PJk8XjkcpKycuTadOkpkZWr5bx4+Whh+SDDwI35dBQWVk5b968p59+OioqyuPx9LwlE27S09PXrl17vKrqO6mpabGxVW1tz1RXZ5WX/09l5VGfT3u6AVOO8I47RESee06amq7pfoYPF6dT9u+X7dvFbhe/XzZtkjvukNmz5S9/kV4HJ+KSioqKZs2atWfPngkTJrz33ns928OwNfKGG5YkJ786Y8Zzkyd/PSHB19W1uaHhoQMHlh88WNzU1GmM9oBXSjnCr39dpk6Vs2clNzcA9xYRIZmZUlgoBw+KyyWjR0tpqSxdKhMmyKpVUl0dgIe4Lvn9/lWrVi1evLipqemBBx7Yu3fvHdaz42AQKTJnxIj/veWWzTNmLE1JGR4Vte/cuVVHj2aVl+fX1DT7/doDXoGALGqv+jXhihXdL/BGjzYtLZ9fe/nXhFfI5zMFBea227pfLsbEGLvd7Np1Tfd5/amsrLR2GRs2bJj6u6DX7rzfv7mhwb5/v83rtXm9d5SW/vexYwfDe9dw5S1he7s88ohMnCinT8sf/hDgO4+LkyVL5J//lJISsdulq0s2bZKvfU1mz5b8fBmErx0C77XXXps1a9b7779/44037ty5M/yXoP36QlTUg4mJG6dPf27y5MxRo/wib5w69V8VFY6Kiv87dcofnmvUgKTc81bnZS59bgmXLzfGmGef7d6QXrjQfW1AtoQXqa42brdJTOyeJynJuFxmyB5W2tHR4Xa7IyMjReT+++8/deqU9kRBcaK1NbeqakFZmbVh/I99+3KrqurC7NxTYRGhz2eSk42Iyc3tvjYYEVpaW01BgcnI6J4qKspkZZmhdlhp7yWo2+3u7Bzsn7T143xn5+aGhoc++shKcW5pqevIkbKzZ7Xn6hbI5eiYMVJe3sfl5pv7uWFcnKxcKSLym99Ie3sAJ+pDbKwsWSJlZeL1isMhkZHy+uuycKF8+cuSny8XLgT30cPB66+/3rME3bFjxy9/+Utre3gd+0Jk5IOJiS9Pm/bHqVMzR43qEilualp+8KCjomJLY2NbV5fyfAFJ+arfmLG2hMaYlhYzapQRMevXGxPMLeFFamqMx2PGjeveMI4cabKzzbFjQX9cFb2XoFlZWY2NjdoT6ahrb8+rrr7nszXqwn37cquqavXO6BcuT4HDh8uPfywi4vFIZ6eE7N2B1FRxueToUSkslDvvlOZmyc2VL31J7r9fioslPF/GX52qqqr58+evXr06MjLS7Xb/7W9/6znt0lCTFB3tTEt7c+bM1TfdNCU+/nRHR0Ft7aL9+1cdPfqPlpbQzxMuEYrIT34iN9wgR47Ixo3S6wTcoRATI3a77N4tXq84nRIT071GvfVWWbtWzp8P6TDBUFxcPHv27N27d48fP36ILEH7FRMR8a0xY16cNu35W2/9zzFjRKS4qWnFoUPfq6jY0tjYGsI1aoQJxLP9/Pmyc6ckJ0ttbR/XpqfLwYNis4nXe/FNli+XP/7x87/82c/kd7+TadNk8WJ56qmLbxIydXWyYYOsWycnToiIjBgh3/mOPPaYpKcrDHON/H7/k08++cQTT3R1dWVmZr7wwgvJycnaQ4Wjxo6OLQ0Nmxoamvx+ERkeFfWtMWO+l5ycGhPT8zM7mpt/duRIv3f17qxZw6Oirvyhw+vp8Kc/ldhYOXBA9uzp41pjpLk5FGMkJ4vLJUeOSGGhZGZKS4vk58uKFbsWLlz42muvBeRpKzSqqqoWLFjQswTdtm0bBV5KYnS0My3tjZkzPZMmzbzhhrOdnRvr6xeVlz92+PA/WlqC+ysPyCvLa39jpscPfmBETEREHzcpLjZxccbhMPv2BWTqK+X1mu9/30yceJf1P5aenv7MM8+09N7BJywVFxdbyY0bN66kpER7nEFm37lzPz96dG5pqfXmzUt1dcaYd5uarD9urKurbmu71GWgH/iEXYTHjplhw/r+aHH16u44Rcz8+eaVV0woD15pamrKyckZFMeV+v3+nndB77333pMnT2pPNFidam/fcPLk/eXl1uf7PRFuC+i+DWEXoTFmyZK+IzTGHDpkXK7uDzNETGqqcbtNfX0g/g1XprOzs6ioKDMzs2f3rrvuuquwsNAfNkcT19bWWqdmiYqKGgofxIdAz/9gWEcYYi0tJi/PTJ/enWJsrLHbzfvvh3SGjz/+ODs7+4tf/KKV4qRJkzwej/rOX2+//XZKSoqIJCcnc3r5gCPCPpSUGLvdREV9vuUsKDCh3DGwubk5Jyfn5s/2CYqLi3M4HPtC/JrVGPOvS9B77rmHJWgwEOElHTliXC4zenR3iikpxuUyJ06EboDOzs7t27dnZWVdtEbtCNVr1rq6uoULF7IEDTYi7Id19ODMmf9y9GCIV2SffPKJy+WyThQvImlpaW63O9jf9f3OO+9YS9CkpKS33norqI81xPVE+EJt7TGfr8/LVez+FpgP68PKrl2Smytbt4p1ULXNJk6nOBzy2akrg+7s2bMvvfTS2rVrDxw4ICKxsbEPPPDAypUrrTMIBlBnZ+cTTzzx5JNPdnZ2Lliw4K9//WtqampgHwK9XcmH9XOGD39uypQB3e11GKGlpkby82XdOmlsFBFJSpJly2TFCpkwIUQDGGPefvvt/Pz8LVu2dHZ2iojNZsvOzv7ud78bHYi98urr6x0Ox1tvvRUZGfn444+vWbMmaiB7aeAqBCnC62c52qfWVlNYaObO7V6jRkYqHD14+PBhl8s1evRo6z88JSXF5XJVVVVdy32+++671kYvKSlp27ZtgRoVl8drwmvi9RqHw0RHd9c4a5bJyzOhPPOIz+crKCi47bbbrBRjYmLsdvuugZ/xpqury+PxWBu9+fPn19TUBGNa9IkIA+DkyX85ejAhQeHowZKSErvdPmzYMKtGm82Wl5d3oefEHpdVX1//jW98Q0QiIiJcLlf47CEwRBBhwLS1mcJCc9ddn69RMzNNUZEJ5XnGqqur3W53YmKilWJSUpLL5Tp+/PhlbrJjxw5rCTp27Ni///3vIRsVPYgw8Lxe43Sa+PjuGqdMMTk5JpRnHrlw4cL69eszMjKsFKOjox9++OF//wbc3kvQu+++u7q6OnQjohciDJbaWvOrX5m0tM/PcLFyZdfhw0dCOYPX63U4HNa7pmvWrOl9FUvQ8EGEweX3m6Iik5lpRMxXvlJrfStliPfMPnHixC9+8Yu6Xt+Ps3PnzrS0NGsJ+uabb4ZsEvSJCEOktNQ8/vgLPd+cPmXKlNzc3DNnzoR4jK6urpycHGvbOG/ePJag1zEi7Jt19OBNN91kpWgdPVheXh6aR29oaPjmN79pLUGzs7Pbw+xktQgsIrycSx09GNQ9s3fu3Dlu3DgRSUxMfOONN4L3QAgTRHhF+jx6MODn7bxoCXqNe9VgsCDCAThz5kxeXl76ZyddC+zRgw0NDffddx9L0CGICAfs348etNlsBQUF17JG/fDDD3uWoLwLOtRct0dRhMChQ4f+9Kc/5efnNzU1iUhqaqrT6fzhD384duzYgd7V4cOHbTZbenr6yy+/3PNuEIYIIrxW1tGDubm5H330kXx29OBjjz020C+7LSsrmzFjRs8+pRg6iDBgdu3alZubG6SjB3EdI8IAO3r0aH5+/vr160+fPi0iKSkpS5cu/dGPfjR+/Hjt0RCmiDAoWltbCwsLf/vb35aXl4tITEzMokWLnE6ndUZQoDciDC5rjbp161a/3y8iNpvN6XQ6HI74kJ3xBmGPCEOhpqYmPz9/3bp1jY2NIpKUlLRs2bIVK1ZMCNkZbxDGiDB0fD7fiy+++Oyzz5aVlYlIdHS03W5//vnn+arAIY5ff+jEx8cvX75879691tGDItLe3k6BYEuoprq62ufz3XLLLdqDQBkRAspYCwHKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQRoSAMiIElBEhoIwIAWVECCgjQkAZEQLKiBBQ9v/jNXabAl8XlgAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAOoklEQVR4nO3dX0yV9x3H8c9BEB1qlWpr7Sy1xf/T1rZEpmuNkcXWsCaNw2QXtNni4GIbspnKms3gRZvS1U2w0UTNTE67LSvuYkNTTJxGY/FPMzUdGnH+QW2rJVVkKCAI/HbBqToK8u885/s8+H6Fi3r6nOd8ld/7nPOcvyHnnADYibMeALjfESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGIu3HmCgjh07tnXr1sbGxqysrBdffNF6HMRIbW3thg0bTp06lZaWlpubO2zYsO62rKmp2bhx49mzZ+fPn798+fKEhIRYztkrLsh27dp197/punXrrCdCLNTV1aWmpt7+vb/wwgutra1dbllTUzNx4sTbWy5ZsqS9vT3G0/Yo2BGmp6fffYUycuTImzdvWg8Fz61du7bTbUlZWVmXW65evbrTlnv37o3xtD0K9jFhdXX13X+8fv361atXrYZBzHT6vXd5Sofz58/3cktDwY6w0y1hSkrK+PHjrYZBzHT6vUuaO3dul1t2Oj0UCnW3pSXrm+IBOXv2bEpKSsdfZNSoUbt377aeCLHQ2tq6dOnS22u4oKCguy2bm5tvP1wXCoXefPPNWM7ZSyHnnEH60dPY2FheXt7U1LR48eJx48ZZj4PYqaioqKqqSktLmz179j02c87t27fv3Llz6enpM2bMiNl4vRf4CIGgC/YxITAIECFgjAgBY0QIGBuEEe7fr4wM/fnP1nPgPhCVxTYII6yp0e7d8t/rIjAIRWWxDcIIgWAhQsAYEQLGiBCw1tcXm27b5iQnuaee6nab3NzINitW9GfPksvP73qb8vIu9rxixZ0zdvezeHEfJkEg9HvBuAEsYy8Wm08/3mLLFq1ereTkXm08bpyefPLOHxsa9OWXSk7WmDF3TpwwIcoTwlf6tGAGwovF5tO7ow0Neu+93m78m9/ozJk7Px1n/OUv/+/ErVu9Gxb2+rRgBsKLxebHCDs+s+e999TYaD0KgiDoC8aPEb78siRdvaotW6xHQRAEfcH4McLvf18dH5D1hz+otdV6Gvhe0BeMHyO8eVO/+pUkXbyov/zFehr4XtAXjB8jbGrST38aeaTrd79TX9/6/8Mfyjn99rdejAY/GuCCGYioLDY/RtjWpqQk/exnknTihLZvtx4I/hb0BdP/5wk/+0zLl3f9vyoq+r3XO37xC61dq6YmvfNO5MgbuIf+LRivl3Fv9D/C2lr98Y9RnKSzceP0k59owwYdOKD9+/X88x5eFgLk5k11+cUT/VswXi/j3vDj3dHbVq7UkCGSVFRkPQp8wDlt26apU1Ve3vUGAV0w/Y/wqafkXNc/ubnRGW7SJC1bJkkffaR//zs6+0RAHT6s735Xy5bp4sVu38nejwUTg2XcI1/fEkpatSryH++8YzoH7Fy+rNxczZunw4f1yCPatEnvv9/txkFcMH6P8OmntXixJH34oaqrI3c2cJ9oaVFJiaZN0+bNio9XXp6qqpSTo7jul20QF4zfI5RUUCBJbW36/e+7PiLvn+pq5eaqpiZqO0R0bd+u6dOVn6/6emVm6uRJlZRo1Kiez+jRgvFOACJcuFBpaZK0dataWqK221//Wps3a8oUvftuNHeLgTt5Ui+9pJdf1rlzmjZN5eXavl1PPNHbs3u0YLwTgAj19XVbU5P++teo7fOtt5SVpfp6rVqlmTO1bVvU9ox+q63VihWaNUs7dyo5WcXFqqxUP74E3YsF451gRPjKK5o8WZJKS6O2z9RUlZbqn//UrFk6c0bLlmnRIlVWRm3/6JPWVm3erKlTtX69QiHl5OjUKa1Yofh+PZPtxYLxjkGE5eX6wQ/08MNKSNDo0Zo3T+++28M7weLi9PrrklRfH+VhFi3S0aPatEljx2rPHj3zjHJz9dVXUb4U3Nvu3ZozR7m5unJFixbp2LHIb6TfvFswXoh1hPn5WrJEO3boq6+UnKyGBh08qFWr9Oyzqqu71xlffVUefQlvfHzkejcvT5I2b9a0aSopCeSbYgLn9GktW6aMDB0/rsmTI/dNvvOdKOzZuwUTdTGN8MMPVVIiSa+/rro61dSoqUnvv6/hw1VVpXD4XudNTFR+voezJSerpCRyBFJbq/x8zZrV7SszMHA3bmjNGs2apW3bNGKECgtVWamsrKjt3+sFE019+1yogUlPd5JburTz6W+/7SSXmOiam2M5TrfKytwTT0Q+OSsz0505Yz3Q4NLW5sJh9/DDTnJxcS472335pfVMpmIa4WuvuYUL3c6dnU+vqIis+EuXYjnOvTQ3u+JiN2qUk1xCgsvLc//9r/VMg8KhQ27u3Mive+5cd/Cg9UA+ENMIu/OPf0TWekuL9Sj/79Ill5Pj4uKc5B55xG3a5NrarGcKrM8+c9nZLhRykvv2t1047NrbrWfyB/sI29vdokVOcllZ1qN041//cvPnR668n33W7d9vPVDQNDS4oiI3YoST3Le+5QoK3PXr1jP5iX2Ea9Y4ySUl+frQq73dlZa6lBQnuVDIZWW58+etZwqIsrLIv1vHATb/bt9kHOHq1ZE7on//u+0gvdLQ4AoL3fDhkWv0wkLX2Gg9k48dOeK+971Ifs88wz2IbplF2NLiXnstspo/+shqiv7g2KZHHcfSQ4Y4yY0d64qLXWur9Uw+ZhPh1atuwYLIox2HDpmMMFB797qnn45czS9Y4I4dsx7IH1paeFS5zwwiPH/epaY6yaWn++g5iX7g+a5Oysrck0/y/GqfxTrCCxfcpElOcq+84pqaYnzhnrh2zRUUuMREJ7nRo11Rkbt503qmmDt50r30UiS/qVMDdnxhLqYRXrsWuaZ89dXBdpBw6pTLzIyswsmTXWmp9UCxUlvr8vJcfLyT3JgxrrjY3bplPVPQxDTCH/0o8rWMfntSPlp27XIzZ0ZSzMhwx49bD+SlW7durV+/ft68a5KLj3c//7m7etV6pmAKuVh9aPh//qOpUyV1/kbFu739djRfwmvi1i1t3Kg1a1RXp4QE/fjHeuutAb0rx5/27NmTn59fWVk5bVr2xInhdetCM2dazxRcMcv94MGev2d4y5aYjeOtmpo7j9FPmNC+ZcufWgfL/e/Tp09nfX1NmZqaWnr/3PP2jP0rZgaxY8fcggVuwYJDkqZPn77zmy9dD5QbN24UFhYmJiZKSkpKKiwsbBocj61ZI0LP/e1vZSkpKR03HVlZWdXV1dYT9Vl7e3s4HB4/frykUCiUnZ19+fJl66EGDyKMhebm5uLi4pEjR0oaOnRoXl5efX299VC9dfjw4fT09I4rkbS0tAMHDlhPNNgQYex88cUXOTk5cXFxkiZMmLBp06Y2f78z6vPPP8/Ozg6FQpIeffTRcDjcziv0PECEsfbJJ5/Mmzev44blueeeq6iosJ6oC42NjUVFRSNGjJA0fPjwgoKCAN10Bw4RGmhvby8tLX3sscc6DrGysrIuXLhgPdQdZWVljz/+eMfVRGZmZhAPYoOFCM10PNg4bNgw/zzYeOTIkee//l6/OXPm7Nu3z3ae+wQRGrt48WJ2dnbHup84cWI4HDYZ48qVK3l5eUOGDJH04IMPFhcXD5onNv2PCH1hz549s2fP7khx4cKFn376acwuuqWlpbi4+IEHHpCUkJCQl5dXV1cXs0uHI0L/aGtrC4fDDz30kKS4uLjs7OyamhqvL3TXrl0zZszoiD8jI+PEiRNeXyK+iQj9pba2tqCgYOjQoZJGjx5dVFTU7M2HsVZVVS1ZsqQjvylTpuzYscOLS0FvEKEfeVrI3Z2PGTPGu87RS0ToX1G/r2hyjxc9IkJfi+KjJoaP/eDeiDAABvj8gU+eBUF3iDAwjh492tdn0n34egB8ExEGTC9fU+bzV8bhbkQYPD2+ujoQrxHHbUQYVF2+zyhw75aCi+UHPcELH3/8cX5+/pEjRySlpqZeunSpsbFx2LBhK1eufOONN5KSkqwHRM+IMPCccx988EFBQcHYsWOPHz+emZm5fv36SZMmWc+F3iLCQaK+vr62tra+vv72k4EICiIEjMVZDwDc74gQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMPY/IGmHt1Jh6UYAAAAASUVORK5CYII=\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAJ/0lEQVR4nO3df2hVBR/H8c/d3MbEObdyk9KaIoW6tgIh0Db6oRgOY2ij/giGlf14LKhmMKMwR1HqrMwgnVkiQYUyqWSQP2AR/vhjQYomGDUrJRc1mcHcnO4+f3ge76PPNq967/mcR98v/OPs3nPv+Qp7c88999yzWDweFwCfDPcAwPWOCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAjAgBMyIEzIgQMCNCwIwIATMiBMyIEDAjQsCMCAEzIgTMiBAwI0LAbJh7AOAC3d3d7e3tsVhs4sSJ2dnZ7nHCwCshomLv3r2zZ8/Oz88vLS2dMmVKfn5+TU3NwYMH3XOlXSwej7tnALR8+fLFixf39/dfdHt2dnZTU1Ntba1lqnAQIfw2bNgwf/78we7NyMhoaWmZNWtWmCOFiQhh1tXVNX78+BMnTgyxTklJyeHDh7OyskKbKky8J4TZpk2bhi5Q0pEjR7Zv3x7OPOEjQpjt3r07mdX27NmT7klciBBmnZ2dyazW1dWV7klciBBmo0ePTma1wsLCdE/iQoQwq6ysTGa1ioqKdE/iwtFRmHV3d0+YMKGjo2OIdSZNmnTgwIGMjGvzNePa/F/h/8jw4cPXrFkTi8UGWyE7O3vdunXXaoEiQkRBdXX1+vXrc3Jy/veuvLy8zZs3T58+PfypQkOEiIT58+fv27evtra2oKDg3C3FxcULFy48ePDgnDlzvLOlG+8JETmdnZ0ZGRmjRo1yDxISIgTM2B0FrlZjo2IxxWL64IMreTgRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGWfMAMmaMUM7d17JA6uqtHXroPfySgiYcRl8IFmFhSouHuD27m79848k5eVp+PABVvjPN0MGxu4ocLUaG/Xyy5K0erWee+6yH87uKGxKS4PznmMxDX197fr6YLXS0qGeZ+zYS2xxzJihnseFCBEJ27Zp/373ECZEiKhYudI9gQkRIio++0zHjrmHcEj26Oj+/fubm5vTOso1YOTIZ06eHOOeItJycrR48cU3FhToxAn19en997VsmWMsq2Qj3Ldv39KlS9M6yjVg6tR/tbW5h4i2ESMGiLCmRk1NkrR2rV59VXl54c/lxO4o/CoqNH68JHV16aOP3NOEjgjhd/KkXnghWF61SmfOWKe5fIsWKR5XPH4lHxKKCBEF3d16/HGdu8Thr79q82b3QOFK9j1heXn5kiVL0jrKNWDkyP6qKvcQ0TbQVbbV26sRI/T008FRmcZGPfpoyHM5JRthWVlZWVlZWkfBdevcqZPPP6933lFfn77/Xq2tuvde81ShYXcUUXHzzYkXwMbGK3mGY8cS58EN+G/IP/1kQ4SIkLq6YKGlRYcOWUcJEREiQsrL9cADkhSPX0dnsfF9QkRLXV3w7fVPP9Wbbw78/b3BFBXpm2+GWmHGDP3991WNlw5EiGh58EFNnqwff1Rvr1av1htvXMZjs7J0551DrTAskr/v7I4iWmIxvfRSsPzhh+ru9ozR0aFlyzRzpsaOVW6ucnN100267z698ooOHEjxtogQkfPYY8FeaGenPvlEksL8U9lnz2rJEt16q+rrtWOHjh1TT496evTHH2pt1Vtv6Y479NBDOno0ZVskQkROTo4WLgyW331X/f3Kygpp02fPau5cNTSotzdx47Bhysy8YLWvv9a0aWpvT81GiRBR9Oyzys2VpJ9/1pYtwXIIXn9dX30VLOfna8UKtberr09nzui337R2rcaNC+79/XfV1qZmo0SIKLrxxsSveGPjJa5WlipHj2rFimC5uFhtbVq0SCUlwS3jxumpp/TDD5o8Objlu+/U2pqC7RIhIurFFxWLSdLevfrppzC2uHFjYi901SpNnDjAOoWFeu+9xI9DXNI3eUSIiLrtNs2ZEyx//nkYWzx/de2CAs2bN+hq99+f2D3+5ZcUbDeSn5sAkqS6uuAdWjjXnqmrU3W1Ojo0atRQnyhmZio/X6dOSVJPTwq2S4SIrspKTZ2q0K4YMnt2Uqv19OjPP4Pl88dprga7o4i086d0R0dzs/r7g+XKyhQ8IREi0h5+WLfc4h7iv5w6pYaGYLmwUHPnpuA5+VsUwGV44gl9/HGw3NSkBQtS8Jy8EgLJeu21RIFVVXryydQ8La+EQFLq6xMXJi4t1a5dGjkyNc/M0VHgEk6f1oIF2rgx+HHKFO3cmbICRYTA0P76S9XV2rUr+LGiQlu26IYbUrkJ3hMCgzp0SHffnSiwtlY7dqS4QBEhMJgdOzRtWnBiWmamVq7Uhg3Kzk79htgdBQbQ3KxHHgkuyF9QoC++0MyZ6doWR0eBi335pWpq1NcnSRMmqKVFt9+exs0RIXCBrVs1b55On5ak8nJt26aiovRukQiBhLY23XNP8K3CsjK1tobxfWIiBAJdXbrrruDKMaNHq60tpNNWOToKBBoaEtduevvt8E4c55UQkKTjx1VSkri8RVFRcHGNS/r226s9bMNHFIAkHT9+wWUOz39t95LOHUS9GuyOAmbsjgJmvBICZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmZECJgRIWBGhIAZEQJmRAiYESFgRoSAGRECZkQImBEhYEaEgBkRAmb/Boxn28bYOvj0AAAAAElFTkSuQmCC\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAQSElEQVR4nO3deWyUZR7A8ad3pxf0slzlEgouhEuqHIkooqA2LsaUSNhm/9DMgiQdSNYMaJZJZFcnZs0WNFkHEggmahyRjcUjcVYWUpFLudJw1AK2lBZaDmkLlJaZZ/94m2kpBTrnb6Z8P+EPMp155+nwfNt33veZlxittQIgJ1Z6AMCDjggBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESGglFKNjY3PP/98Xl6exWLxeDzhfOoYrXU4nw+ITMuWLfvoo4+MvzudzuLi4rA9df/8TdjW1uZ2u6VHgWjS1NTU69/DoL9FWFdXZ7FYxo4d++STT1oslmvXrkmPCNFhxYoVqampSqkxY8YsWrQonE/df3ZHz58//+67727YsKGtrS02NlYp5fF4xo4du3Hjxjlz5kiPDlGgqanpxIkThYWFycnJYX1iHf0uXrxotVpTUlKUUjExMUVFRUeOHDl06NC0adOMW0pKSi5duiQ9TKB30R1hc3Oz3W4fMGCA8QNl3rx5Bw8e9H61vb3dbrcnJSUppQYPHvzll18KDhW4m2iNsKWlxW63Dxw40JvfgQMHer1nVVWVd3e0uLj4woULYR5qRGlpaamrq6uvr5ceSD9UXa337fPngdEXYWtra1lZ2UMPPWR0NXv27F27dt37IR6Px+FwpKenK6UyMzMdDkd4hhpR2traHA7HoEGDZs2alZiYaLVa29rapAfVr2zcqP/yF38eGE0ReqeRN78ffvih7w8/c+bMs88+azz2ueeeq6mpCd1QI0pbW9v69esHDx5sfO/Dhw+PiYlRSk2cOHHv3r3So+s/+nmEN2/edDgcQ4YMMabRjBkzysvL/duU0+nMyclRSqWkpNjtdrfbHdyhRpT29vYtW7aMHj3aeN0mTZrkdDq11j/++OP48eOVUrGxsWazuaWlRXqk4dPRoTdv1s3NXbdYLFopvXhx1y0tLVopvXbtbXf49deuOxw40HWHX37ReXk6L09nZGiTqfPvmzb5MKRIj9CYRqNGjeoxjQJx/vz5kpISY4OzZs06duxYUIYaUdxut9PpHDNmjPFtTpgwwel0ejwe7x1u3Lhhs9kSExOVUqNGjfr+++8FRxsebrd2OnVBgVZK//3vXbcbjcXF6dOnO2/xKUKvfvibsMc0mjhxYo9pFKDt27cPGzZMKZWcnGyz2W7evBmsLcsyXreCggLjdXvkkUe2bNly69atXu985MiR6dOne49aXbx4McyjDQ+3W3/+uf7DH7RSWildUKC3bu36qsWihwzReXl6+fLOW4iw92kUiv3G33//3Ww2G2+QJk2atH///qA/RTh5PJ7y8vLJkycbr9vIkSMdDsfd8vPq6OgoKyszFovk5eUFvqMRaVwuPW1aZ34jRmiHQ3d03HYHi0Xn5Oh//EObTLqxUesHPMIe02jUqFF9mUYB2rVrlxF8fHx8aWlpa2trSJ8uRFwul7E4wTj04nA4OnrMtXuqrq6eO3eu8fCioqK6urrQDTVsXC5dWNiZX36+LivTvR4Ptlh0Wpq+ckWnpem//U3ru0S4c6c+c6bzT3l5LxH6LdAI3W73Bx98sHTpUpfLFeCmuk+jESNG+DqNAnH9+nWr1RoXF6eUevjhh3066CrO5XIVFhYar1t+fn5ZWZl/5x6MEzkZGRlKqYEDBzocjiDu/IdZRYWeM6czv9xcbbfrGzfuemeLRaemaq31ypU6K0u3tvYe4Z1/IiXCd955x/jnj4uL+/nnn/3bSLCmUQ/79u3z6Z1e1K10q6io8K5DyM3NtdvtN+4x1/qmvr7+pZdeMrb5xBNPVFVVBWWoYbN7t547tzOSnBxtt+vr1+/zEG+EtbU6Pl7/61+9R/jvf+v//Kfzzz//GUkRLliwwLsMdf369b4+PBTTyFBXVzdgwIAJEyb4dCqsx0q3bdu2BWUwQbd79+6nn37aeN1ycnLsdvu1a9eCuH2n05mbm6uUMplMdrs91O8IgmLPHl1U1JlfVpa22fTVq316oDdCrfWf/qTz8/Xly/68J/RboBGuW7fOmApJSUmVlZV9f+Du3bu9b0KMaXT9vj+yfHHo0CHjnV5cXNzKlSt9eqdXWVk5Y8YM7zHDRuPdemTYu3dvUVGRMbasrCybzXa1j3PNR5cvXzabzcYTTZ06tfui3Ehz9KguLtYxMVopnZ6urVZ95YoPD+8e4dGjWim9YUNURai1/uyzz9588827Ld280549e8IzjQI5FeZ2ux0OR1pamoqYlW5Hjx4tLi42juWmpaVZrdYrPs01v3zzzTfDhw9XSiUkJETgSrfKyq78UlO11aovX/Z5I90j1FovWKAnTNDp6VEVYd91n0bp6enhmUaBnAqLkJVulZWV3tctNTXVarVe9mOu+evq1aulpaXGRzQjZ6XbsWO6pETHxWmldEqKLi3V58/7s52TJ/X48TolpeuWHTt6HnfpJxHKTqMAT4U5nc7s7GwlsdLt+PHjJSUlxmHblJSU0tLShoaGsD17d5Gz0q2qquqvfz0dG6uV0snJesUKP/M7dUr/+c86Pl4rpePjb/uScWKj/0R4+vTpRYsWGT9HU1JS3njjjaamplA/aa8CORXWfaXb7Nmzw7DS7cyZM2azOT4+XimVmJhoNpvPnTsX6ie9N/GVbjU1NcZrMnLksyaTNpu1f6cza2t1aalOStJK6YQEXVKiT50K9lh9EfIIq6qq4uPjjWkk/jG2AE+FlZeXh2Glm3eqGe/EzGbz2bNnQ/FE/hFZ6VZbW7t06VKj/4SEhNdee62mpvn+D7vDhQvaatXJyVopHRuri4tv+/0mJRy7ox9//HFErcDocSrs5MmTfX9sSFe6nT17trS01LjASWxsbHFxcXV1dRC3HyzG7r1xPZFBgwZ98cUXoXuuxsZGq9VqMpm8r4l/py6bmrTVqk2mrvxOnAj6YP0UWcvWwimQU2FBX+l251Tz6UeDiOrq6qeeeip0K92ampq8lw4yXpPjx4/7sZ2LF7XNpjMytFI6JkYXFelDh4I70kD5HOH169fXrFlTUFCQlJSUn5+/ZMmSU7fvUL/wwgvZ2dnZ2dlDhw4N3jhDIpBTYcFa6XbnVaoOHz7s36bCL0Qr3S5dumSz2YzNGq+Jf2cpm5u13a4HDOg82jlvnvZ3TVdo+Rah2+02fvjNnz//vffes1gsaWlpubm5v3bbs7506VJDQ0NDQ8N5/w5ahV0gp8IOHjw4depUY66YzWafTnje+ypVUaS+vn7hwoVBWenW90sH3W872m7XmZld+UXyJ2R8i3Dz5s1KqaVLl3pvOXz4sMlkevHFF4M9sLBqbW21Wq3GIdwwrHQL1lSLKAGudGttbbXb7VlZWd5D0Pe9dFCvrl3TZWU6L68zv9mz9f/+58dmwsq3COfPn6+U6nGw7rvvvouo4y5+C+RUWB9Xut15laqdO3cGafjyuu/ez5gxo4/LGO+8dNCOHTv8ePabN7XDoQcP7sxv1iz93//6sRkBvkU4aNCgIUOGhGgokSB0K90CvEpVFOn77n2wLh3U3q4dDj10aGd+jz+u/b0CkQzfIkxISJg8eXKIhhI5Alzp9swzz3Rf6RbEq1RFi/uudAvWpYPa29s3bNjwxz/+YuQ3bZrevj0Y30B4+RZhYmLiuHHjQjSUiBLISjePx7Np06bMzExjjZ7xTkkpNX369G+//TZ0Y440FRUVve7enzt3buTIkcZrMmXKlK+++sqPY6rGNVDGjh2rlMrKKnj88Vvbtuko/RCybxEOHz48MzMzREOJQIGvdMvOzs7JybnzYmcPCGP3PiEhocfu/cyZM/2+dJCR37hx44x/l/Hjx9/jSlZRwbcIX375ZaXUodtPdn766adr1qwJ1odxI02Ap8Lq6+urqqoewPy6O3z4cI/d+4aGBj/yMy5BNGXKFGNTxpWswnYNlNDxLcKvv/5aKfXKK694b6mpqcnNzZ05c2awBxZZov2iD+I6OjrsdruxIs+/lW4ul+vRRx81/gn8uJJVJPN5xcySJUuUUnPnzn3//fetVmt2dnZWVtavkbAMNvSi8aIPEcW/lW4ul+uxxx4zHjVs2LCysrJ+ttvlc4S3bt1at27dpEmTkpOTMzIyFi1a9Ntvv4ViZJGpsbFx8eLFxoRYG6zPkz1I3G73hx9+aPznPHl5efdedltRUeGN1rgEUXCvgRIhHtwF3IHYvn17YWFhOD+X3M+cO3du4cKFNpvtbnf46aef5s2bZ+SXnZ1ts9mam/357FJU6D//XTaijtvtNlbAd7d///61a9caRx/S09Nff/311atXe1fY9ktEiEhRWVn59ttvb926VWudlpa2fPnyVatWeVfY9mPx0gMA1LFjx+x2+yeffOLxeFJTU1999dW33nrLu8K23+M3IYTV1taOHj3a7XabTKZly5atWrXKu8boAUGEkFdSUpKZmbl69Wrvfyf8QCFCQFis9ACABx0RAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEgjAgBYUQICCNCQBgRAsKIEBBGhIAwIgSEESEg7P9nu820aqimUwAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAQYElEQVR4nO3ce1BU5/nA8Qc0KHgHTTJeUBJvERKrpqjVKGmckbRmOqmCZioV00qnibM2005oJ50hmkxDL+lgMr1sYKZh0nQiGieDjXWGENSmCRC05jIGq8Yo4I0KKhfDbZ/fH2e7ILKwwMKTn34/fxn3cN7dzX7P+56zR0JUVQDYCbV+AsCtjggBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDB2c0aYkyO7domIJCZaPxWgJzdnhMD/I0Otn8BAefVVee89KS+3fh5AT27amTA1VbKyZPZs6+eBm4jH4yksLDxz5kxwd3vTRggE3datW1esWHHvvfdWV1cHcbc353L0hz/0/mHfPtPngZuLMwfW1dVdvnx5woQJwdrtgEfo8XiamppEJDw8fKDH8snJkYIC2bFD1q2TN94YtGFvch6PhN7aK6cXXnghMjJy3rx5M2bMCOJuB/ZNLSkpWbp06caNG6dMmbJ9+/a2trYBHa6jK1fkP/8ZtNHaHThw4OTJk6dOnRrUUS9elHXrJC1Ntm0biN1fuya//rUsWCBNTdf9fU6OrF0rIrJu3UAM68cAv9hu3HnnnS+++OL69euDvF8dGCdOnFi9erUzxOjRo50/LFiw4P333x+gER2trfrvf2t2tu7apS6Xrl3rPBn1eAZ0WFXVY8eOJSUliUhcXFxYWJjL5aqrqxvwUR3PPaf79qmqJiXpxYtB3LHHo3/9q06ZoiIqom+9dd2j2dm6cqUeO+Z9nwfJgL3YYNmzp3fbB38mrK+vf/bZZ+Pi4t58882IiIj09PSqqqr8/Pxp06YdOnRoyZIlycnJFRUVQR9XRAoLZf58WbZMrlyRqCgJC5MLF6S2VhYvloULpaRkIMYUEampqdmyZUtcXNzOnTtHjx4dERHR0tLy0ksvxcbG7nJuGhg4e/bIvn1SWSnR0SIiU6bImTPyxz92nrP6pLRUHnhA1q+XigqZP1/275fvfMf70MWLcuWKiMimTfKHP4iIeDwS1KsV/nV8sWfPDsqQIjdM+/5uCNm/X3bulFdflfr6gHcdxANAW1tbbm7uHXfcISIhISFJSUmnT5/2PdrQ0JCRkTF8+HARGTFiREZGxpdffhmsoY8e1W99y3u0vusufeYZLSrSL77QsDD94AO9804V0dBQ/dGP9L//DdaYqqrNzc1ut9s5Rw8NDU1JSTl37pyqlpaWLly40HmHExISPv7442CO6vC95pgY3bZN//53VdXkZN26VUV0+vReH5A7qKjQlBQNCVERnThR3W5tbfU+1NysWVk6Zoz+9Keana1FRfqzn2lCgmZn68iRmpGhTU3BeHVdOnRIExL0iSfaX+ylS7prlz7xhF66NGCjakODvvSSut3XTfvZ2bpzp6rqypWdt//Tn3q3/6BFWFBQcN999zkfu0WLFn3wwQddbnbixAlnzSYiM2fO3Lt3bz/HvXRJ09M1LExFvB+Ca9c6b1NfrxkZOmyYiui4cZqVpS0t/RxWVbWgoCA2NtZ5LQ899NCRI0c6Puockpw+hw4d6nK5Ll++HIRRVfX8ed20SYcMURGNjNTt27WqSpOT9cc/1l/9SouKNDbWe0BatUqPH+/Vvp33avhwFdHwcE1P16tX2x/Ny9OYGO++v/tdfeWV9oPdhg3ev4+N1XfeCWiswsLCjz76KKBNKyt1wwYNDVURTUxsf7GtrXrXXSqiUVH65z+3HyqCxOPRvDydNk1F9Hvfu+4cJztbv/1t3bJFp07t7yhBiLC8vHzVqlXOZzE6Ojo3N9dzwxnY8ePHq6qqfP/5zjvvzJkzx/mRVatWff75530Yt7lZ3W4dP947y6Wk6Pnz3W1/7Jg+/LD3gzJ3rh482IcxvY4ePfrwww/7DiV5eXn+tqypqXG5XEOGDBGRqKiorKystra2vg/c1OSdhkT0tts0La3rk6KWFnW7NSrKu5nLdV1J/uXk6B13qIiGhOj69VpR0f5QWZkuW+Z992bP9k5FnRQU6D33tOd/6lR3Y7ndbufw1EOHDQ2amamjRrW/lk7HsvJyTUz0jjpvnv7zn92/xt27d48ZMyYxMbGlpyPxe+9pfLx3xwsWaHp6+7Sv3c6EvdWvCC9duuRyuYYOHSoiY8eOzczM9LfCTEhIiIiI6LgEbW5uzsrKcq7ZhIeHp6en9+oyxp49702f7n2DVq7UTz8N9Ad37/Ye2EJC9LHHtKqqJvBBVbW6utoXVWRkZGZmZlMAy68jR4488MADTrR9vzqVn+896ovoihU9v+YLF/Txx72zx+TJF9882OPVqR/8QEU0Pl7/9a/2v6ys1LQ0726ionpYRziLVSeZ8PCuFyaOrVu3Om9IUVFRlxt4PHr5jX/o5Mnel7xmjZ486XfgnTs1Otp3/GipuuBvw0ceecQZ9+jRo/62OXOmi9W4s/Z2pn29IcK9e/U3v9Ff/MLvE+xGHyN0Eho7dqxzMEtLS7twwe/Lrqure/TRR7tcgp49ezYlJSUkJEREJk+enJub2+PQhw8fTkhIEJH4+POzZqn/ScivxkbNzNSRI3XevELn7PSav09KB01NTb6jxm233ZaWlnaxl5fm8vPzo6OjnRPmlJSU891P3B2UlJTs8a325s4NdLXnKCvTxYtbh4R9bVbjsmXa/axz7pzu2NF+JbnjJBQW1sUk5E9VVfuHePJk7fL/an19fUZGhtvt7nIPxcX6jW/o47HFGhKiCxbogQM9j9rY6Kykr4yadPe01owM7XJG2Lt375QpU5KTk7tcknRcjUdEdF6Nd6O+XlX1Jz8JaONO+hJhfn7+3Xff7US1YsWKAK86vPvuu3Fxcb6f+uyzz3wPlZaWxsfHOw89+OCDn3zySZd7OHv27MaNG0NDQ0Vk/Pjx2dm7+3Nq98UXumnT086gM2bMePvtt7vZOD8/PyYmxvfkPw185r2ec3Vq2LBhvrVD9xNpZWVlWlpaaGho2JAhdYsWaXZ2X0572to++9vh229XER06VDdv1pqepn/nXGjq1Pa1ZTeTkD8HDuh993n38M1vBrpaOXlS16zx/tSkSXp+x37t1QL+889zXB85Pz5rlve7jEC0tWlurvcaXkiIJiX1sJzuxOPR555TP5/cHvQuwkOHDi1fvtz5LM6ePXtPL6+/OfPnmDFjnMnE5XJd/d9xpvvLGI2NjZmZmb5ZyOVy1dbW9mpofzodGm5copSWli5dutTZYM6cOf2/kqSqx48f951Fz5o1a19Xn5SGhobMzMyRI0eKiPOt45UrV/ozaG1t+xWsyEjNyvKbc3GxLl7cfi7Un5Nn55PtnLc7J3TdvIi6uj7OQjcqLLzu4lSPR5DCQp0717t9p9V4gH7/e/35zzU7uy/X/AKNsKqqKi0treMFhh7Pa/05d+6cc3QXkYkTJ3a8kHPjZYzW1ta8vLxp06b5ruIc7+Xlvh61tLR0eWioqKjwLZXHjx/vPJkgjltQUHDPPff4Xtep/x14PR5PXl6es3Dtz4WrLpWX68qVfq9inD7dvoycNEnd7t5NQv5UV7efVcbHd71Nba33mlBoqG7YoJWV/R20uVl/+1vvWnr4cP3d77rerLJSV63yviExMZqXNxj3dXQSaIQbNmwQkWHDhj399NNBudT+4YcfLlq0yPmcLV++vOMlssOHDy9ZssR5yHeb7Pz58/fv39//cf05f/58amqqk9ykSZNWr17t3OwaHh7+zDPPDNC9L87SYNSoUc5AGRkZBw4c8L0t999//8H+TEP+5eV5b4L5+tf1lVc0OVlVde1aXbFCRXTECN22TRsagjzo4cO6ZInu2OF3g5QUXbhQg3tL1dmz3v6zs7veoLZWx4/XESO6u4Y00AKN8PTp04899tjJPpwZ+OfxeHJzc2+//Xbf1Z3q6mrfo85ljKSkpIkTJ7rd7uDOQv6UlZUtXrxYRKZOnercbxDEWcifM2fOrHXuxRBxFgjR0dGvv/76jd/0BFFDg/7yl/r++9fdelZWpqmp2uG7pCDz94Kc05qgZ+9TVtbFlO47l3r3XfV/VXEwDNS9o4GrqanZvHmz8z3HhAkT3upwe+LVq1evXbtW71x4GixtbW2vvfZaSUlJcXHxYI5bVFSUmJj41FNPPf/8842NjYM2bqf7bAdnRN/cq6pFRfr97+tf/qIDeqetyaABsv/3hOPGjXv55ZeffPLJLVu2FBQUREZG+h5y1mmDLDQ0NPi3yQcgISHB+epl8Pnusx00Hf+NS0KClJdLaurNOWggQlTV+jm0Ky4u9p0RYXDk5Mj06RITIzNnBuWu74BGHDdODh6UCxcG7197mgwaIPuZsCMKHHy+30IwOAU6Bn/utRo0EF+tCHHr2LxZZs68JQbt0VdrOQrcgm7t3xkCI/yK9I6IELeQr2b8nBPCBr8i3YcIYSM1VdasMYjwKxg/EeLWYhV/N4gQBvgV6R3xFQVgjKujgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFjRAgYI0LAGBECxogQMEaEgDEiBIwRIWCMCAFj/we3FY6gRVFWgAAAAABJRU5ErkJggg==\n","text/plain":[""]},"metadata":{"tags":[]}}]},{"cell_type":"markdown","metadata":{"id":"2R5K7Y5hedbW","colab_type":"text"},"source":["Finally, we can compare generated molecules with our training data via a [similarity search](https://medium.com/gsi-technology/rdkit-for-newbies-3697e617521f) with Tanimoto similarity. This gives an indication of how \"original\" the generated samples are, versus simply producing samples that are extremely similar to molecules the model has already seen. We have to keep in mind that QM9 contains *all* stable small molecules with up to 9 heavy atoms (CONF). So anything new we generate either exists in the full QM9 dataset, or else will not obey the charge neutrality and stability criteria used to generated QM9."]},{"cell_type":"code","metadata":{"id":"RE_vIKDke3Vd","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946559,"user_tz":240,"elapsed":1130347,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["from rdkit.Chem.Fingerprints.FingerprintMols import FingerprintMol\n","from rdkit.DataStructs import FingerprintSimilarity\n","from IPython.display import display\n","\n","def tanimoto_similarity(database_mols, query_mol):\n"," \"\"\"Compare generated molecules to database by Tanimoto similarity.\"\"\"\n"," # convert Mol to datastructure type\n"," fps = [FingerprintMol(m) for m in database_mols]\n"," \n"," # set a query molecule to compare against database\n"," query = FingerprintMol(query_mol)\n"," \n"," similarities = []\n"," \n"," # loop through to find Tanimoto similarity\n"," for idx, f in enumerate(fps):\n"," # tuple: (idx, similarity)\n"," similarities.append((idx, FingerprintSimilarity(query, f)))\n"," \n"," # sort sim using the similarities\n"," similarities.sort(key=lambda x:x[1], reverse=True)\n"," \n"," return similarities"],"execution_count":32,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"cCPEN3_cfQ4N","colab_type":"text"},"source":["We'll consider our generated molecules and look at the top 3 most similar molecules from the training data by Tanimoto similarity. Here's an example where the Tanimoto similarity scores are low! There are no molecules in our training set that are very similar to our generated sample. This might be interesting, or it might mean that the generated molecule is unrealistic."]},{"cell_type":"code","metadata":{"id":"MjR0O1EucwC3","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600973946741,"user_tz":240,"elapsed":1130525,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["train_mols = [Chem.MolFromSmiles(smiles) for smiles in train_smiles]"],"execution_count":33,"outputs":[]},{"cell_type":"code","metadata":{"id":"vsaSkVJufGDy","colab_type":"code","colab":{},"executionInfo":{"status":"ok","timestamp":1600976249046,"user_tz":240,"elapsed":855,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}}},"source":["# change the second argument to compare different generated molecules to QM9\n","tanimoto_scores = tanimoto_similarity(train_mols, gen_mols[3])\n","similar_mols = []"],"execution_count":40,"outputs":[]},{"cell_type":"code","metadata":{"id":"zgyJ9txQsRxg","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":967},"executionInfo":{"status":"ok","timestamp":1600976249858,"user_tz":240,"elapsed":370,"user":{"displayName":"Nathan Frey","photoUrl":"https://lh3.googleusercontent.com/a-/AOh14GiCEtTj6AL3entEShxjitkGUQo5YhZ7CJA0917VzA=s64","userId":"14838914823565259795"}},"outputId":"8c07c35c-a575-4919-dd0e-c89ec7a1d6e2"},"source":["for idx, ts in tanimoto_scores[:3]:\n"," print(round(ts, 3))\n"," similar_mols.append(train_mols[idx])\n","\n","display_images(mols_to_pngs(similar_mols, 'qm9_mol'))"],"execution_count":41,"outputs":[{"output_type":"stream","text":["0.243\n","0.243\n","0.241\n"],"name":"stdout"},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAIo0lEQVR4nO3dz2pU5x/H8TEJeAEWFXStpBVXupAu1HoNzSYLeyO2mHghIu1CwQsQqiB4CUpbb6BMXOgFKE8Xz4/8bKrJmZkz53P+vF7LMBm+hLznGzJnnnOqlDIDcjbSA8DUiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCFnefD7/888/X758mR6kd169evXp06eGDxYhS5rP57dv37527dqtW7cePHiQHqdHfvvtt1u3bt29e7eU0uTxImQZBwcHd+7c+eOPP86cObO1tXXv3r29vb30UL3w5MmTn3766dOnT5cvXz516lSj7ymwoPl8/t13381ms6tXr7579+7x48dbW1uz2ez+/fvp0cIOfxR7e3vNv0uELOZIgfWLOizLFlhEyEK+WGA18Q6XLrCIkOaOKbCabIerFFhESEMnFlhNsMMVCywipImGBVaT6nD1AosIOdFCBVYT6bCVAosIOd4SBVaj77CtAosIOcbSBVYj7rDFAosI+ZoVC6xG2WG7BRYR8kWtFFiNrMPWCywi5L9aLLAaTYfrKLCIkCNaL7AaQYdrKrCIkM+tqcBq0B2ur8AiQg6ttcBqoB2utcAiQqoOCqwG1+G6CywipHRYYDWgDjsosIiQjgusBtFhNwUWEU5cpMCq5x12VmAR4ZQFC6x622GXBRYRTla8wKqHHXZcYBHhNPWkwKpXHXZfYBHhBPWqwKonHUYKLCKcmh4WWMU7TBVYRDgpvS2wCnYYLLCIcDp6XmAV6TBbYBHhRAyiwKrjDuMFFhFOwYAKrDrrsA8FFhGO3uAKrDrosCcFFhGO20ALrNbaYX8KLKVsNbp102z27t27v/76q+GDg06fPn39+vX0FL1wcHDwww8/vHnz5urVq8+fP//mm2/SEy1mZ2dnNpvt7u7+8ssvs9ns559/buuZnzx5sru7+/Hjx729vXv37rX1tMtrGOujR4/SkzZy8eLFtb5oDcWgd+DnWt+HvdqBVdNNePbs2e+//3598bTl3Llz6RHyhr4DP9fuPuzdDqzSrwK0bDQ78HOt7MMe7sBKhKMyygKrFTvsbYFFhGMy4gKrpTvsc4FFhKMx+gKrJTrseYFFhOMwkQKrhTrsf4FFhCMwqQKrhh0OosAiwqGbYIHViR0OpcAiwkGbbIHVMR0OqMAiwuGaeIHVFzscVoFFhAOlwENHOhxcgUWEQ6TAIw7D+/HHHwdXYGke4VAu4D7epUuX0iO0YGNjYzabffvttwo89Ouvv25sbGxubs5ms/39/fQ4i9lI/0bB5KVfBVjYfD6/cuXKbDbb3t7++++/0+PkHf45urOz04fzSxclwkHS4aEj/4mJn1+6BBEOlQ7LV/4XOrgORThgE+/wmHcjhtWhCIdtsh2e+H7ggDoU4eBNsMOG78gPpUMRjsGkOlzomphBdCjCkZhIh0tcldb/DkU4HqPvcOnrQnveoQhHZcQdrnhldp87FOHYjLLDVj4b0dsOT5VSmlzd9uzZs/39/RUvkevAuXPnnj59mp4i7ODg4M6dO69fv97e3n7x4sX58+fTE62kxRN7D5/q/v37LZ6rv6qGsQ7lUxSOwa9Gsw9b/3xgD/dh003ohjCDM4J9uKZT63u3D9OvAqzRoPfhWj8j36t9KMKRG2iHHZxS0Z8ORTh+g+uws3NietKhCCdhQB12fFJTHzoU4VQMosPIWWnxDkU4IT3vMHhaYbZDEU5LbzuMnxca7FCEk9PDDuMFHhmj4w5FOEW96rAnBR4ZpssORThRPemwVwVW3XcowumKd9jDAquOOxThpAU77G2BVZcdinDqIh32vMCqsw5FSNcdDqLAqpsORUgpHXY4oAKrDjoUIf/TQYeDK7Bad4ci5P/W2uFAC6zW2qEI+Zc1dTjoAqv1dShCjmq9wxEUWK2pQxHyBS12OJoCq3V0KEK+rJUOR1Zg1XqHIuSrVuxwlAVW7XYoQo6zdIcjLrBqsUMRcoIlOhx9gVVbHYqQky3U4UQKrFrpUIQ00rDDSRVYrd6hCGnqxA4nWGC1YociZAHHdDjZAqtVOhQhi/lihxMvsFq6QxGysCMdKvDQch2KkGUcdnjhwgUFfm6JDjca3T8N/u3s2bO///779vb2hw8fPn78uL+/3+L9AwdtZ2fn4cOHm5ubb9++Lc1u/tn0JqHwX/P5/P379/P5/ObNm+lZ+uXVq1c3btzY3Nxs8mARQpg/RyFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEMJECGEihDARQpgIIUyEECZCCBMhhIkQwkQIYSKEMBFCmAghTIQQJkIIEyGEiRDCRAhhIoQwEUKYCCFMhBAmQggTIYSJEML+AeIEvZw/xHFhAAAAAElFTkSuQmCC\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAS6UlEQVR4nO3df1DUdR7H8ZfCggkqKmjKLrCLP8CsFI8If2VpPxSvzvFMvPPyGqPyB07deTf3w7MfTk0213VOhqI259WZP7rpLE8Kx1K7OPwxcJmaSMIusAuBEYEC+wN2749VxGX5Bsjue5d9PaZpCD/VW4ann+X72f1uP4fDASKS0196AKJAxwiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxgiJhDFCImGMkEgYIyQSxghJgsOB7duRnIzwcAwahGnTsHev9ExigqUHoIC0ejWyshAVhUWL0NKCAweQno6vv8a6ddKTCejncDikZ6AA85//YOZMjB2L/HwMHw4ABgPuugvffYczZ5CYKD2ft/HhKHndjh0A8Kc/XS0QQFwc1q5Fayt27pQbSwwjJK/LzweAOXNu+OSDDwLA8eMC80hjhOR1FRVQqTBq1A2fjI29+kuBhxGSdzkcMJsxYIDr52+5BQCamrw/kThGSN7Vrx8GDkRzM1yuCDY3A0B4uMhQshgheV1sLFpaXB956vUAoNWKTCSLEZLXTZ8OALm5N3zy8GEAmDFDYB5pjJA87OxZjB+P7Ozrn3niCQB46SXU1Fz9TFUVXn8dISF47DGBCaXxGTPkYVu2oLgYZ85c/8xdd2HtWvz5z5g4EY88ArsdH3yA2lr89a+IixObUw6fMUOedOUKoqPR0IAvv8Ttt9/wSzt3IisL586hf39Mnoy1a/Hww0JTCmOE5ElZWVi1Cvfcg6NHpUfxXfyZkDxp61YAWLFCaY3Nhg8/RGurdybyQYyQPOazz3DmDG69FQsWKC3bvx+PPIJ587w1ls9hhOQxW7YAwBNPICREadmbbwII2B8IwZ8JyVMuXYJGg5YWlJRcfV6oW+fP47bbEBYGkwmDB3txPh/CnZA8Y9s2WCz48Y+VCgSQlQWHA7/4RcAWCO6E5BGtrRgzBgYDcnPxwAOdLrtyBWo16utx+jTuuMOL8/kW7oTkAf/+NwwGxMe7vmjQxT/+gfp6zJgRyAWCEZJHOC/JrFyJ/orfYNu2AT90gBEA+HCUeltJCcaNQ2gojEYMG9bpsrw8TJ+OqChUVCA01Ivz+RzuhNTbtm6F3Y70dKUCcW23zMgI8ALBnZB6WXMzNBrU1uLkSSQnd7rs22+h0cBqRUlJYD5puz3uhNSr9u5FbS0mT1YqEMD27TCbMX8+CwQjpF7mfJCZmam0xm7H9u0AL8lcxYej1Hv+9z8kJSEiAiYTBg7sdNmBA3j4YcTHo7j4By6fBgZ+Caj3bN4MAMuXKxWIa7vl00+zQCfuhNRLvv8e0dFobkZREcaN63RZaSnGjoVKhYoKREV5cT7fxT+KqJf87W9oasL99ysVCCA7++oBBgu8hjsh9QaHA4mJuHAB//oXfvKTTpdZLIiJQU0Njh9HSooX5/Np3AmpNxw+jAsXoNFg/nylZXv3oqYGkyaxwPYYIfWCiv37ERSEjAwEK96/z3lJZvVq70zlL/hwlG6W0WjUarXT1OpD+fkht97a6bovvsDkyYiIgNGIsDAvDujruBPSzdq2bVtLS8utKSlKBeLabSx++UsW6II7Id2UlpaWuLg4k8l09OjRe+65p9N19fWIjkZTE86fx/jxXhzQD3AnpJvy/vvvm0ymxMTEmTNnKq3buRONjZg9mwV2xAjppmzZsgXAqlWr+vXr19kah8PxwqFD5amprXyyqDt8OEo9d/78+dtuuy0sLMxkMg3u/E5Nn3zyyZw5c0aPHm0wGFQqlTcn9AvcCannsrKyHA7H0qVLFQrEtd3yqaeeYoFucSekHrpy5Ypara6vry8sLJw8eXJny6qqqmJjYx0Oh8FgiI6O9uaE/oI7IfXQrl276uvrp0+frlAggOzsbJvNtmDBAhbYGUZIPbRt2zYAKxSvtbS0tOzYseMHlwU4Rkg9kZeXV1hYGBUVtXDhQoVl+/fvdx5gzJo1y1uj+R9GSD3hvNaSkZERqnivNOeyFStWKBxgEC/MULd9++23Go3GarWWlJTEdX6npqKiogkTJtxyyy1Go3Ho0KFeHNDPcCekbtuxY4fZbE5LS1MoEMDWrVudBxgsUBl3Quoeu90+ZswYvV6fk5Mzd+7czpY1NTWp1eq6urqCgoKkpCRvTuh3uBNS9+Tk5Oj1ep1O9+CDDyos27VrV11d3dSpU1ngD2KE1D3Oay1PP/10f8V7pWVnZ4MnE13Dh6PUDWVlZfHx8cHBwRUVFVGd36np+PHjqampkZGRFRUVAwYM8OaE/og7IXXDli1bWltbFy9erFAgru2Wy5cvZ4FdwZ2QuspiscTExNTU1OTn5999992dLautrVWr1Var9eLFi1qt1psT+inuhNRV+/btq6mpmTRpkkKBAN566y2z2Tx37lwW2EWMkLqq7fW7CmscDgefLNpdfDjqETabraysrPQavV5fWlr64osvpqWlSY/WQ6dPn540aVJERITRaAzr/E5NOTk5aWlpsbGxJSUlQUFB3pzQfyneJZK6oK6urrSD8vLylpYWl5Xnzp3z3whDQkIeffRRjUajUCDaPVmUBXYdd8Kuslgszg3N+fe2Le7y5csdFwcFBanVap1Op9PptFqt84OEhIQhQ4Z4f3IPsVqtISEh7T9TXl6u0+mCgoIqKipGjBghNZjf4U7ohtvNraysrLW1tePiAQMGjB49WnejxMTEgcpvD+ZX3H5B7HZ7Zmbmk08+GR4e7ly2devW1tbWJUuWsMBuCeid0GKxmEwml++t4uJit5ubSqXSaDTOxkaNGtU+PO9P7iENDQ3tf4h1MhgMVqu14+Lg4OCWlpYlS5a8++67AKxWa0xMTHV1dV5e3tSpU70+ux8LrJ3QbDa//PLLbd9h33zzjdtlkZGR7R9GOj/WaDTBym+04Ffcbm56vd7tH8pDhw7VdWC1WlNSUnbv3j1t2rRVq1YdOnSourr6zjvvZIHdFVg7ocPhCAsLa25udv5j+82tTXx8fEREhOycvah9bJWVlVVVVaWlpV999VXbF6G90NDQ6Oholy/IuHHjBg0a5PY//sEHHyxYsCA4OPjIkSPTpk07depUY2MjX0TfXYEVIYBNmzYNHjzY+e0VHR2t/CxkP2Kz2SoqKlx2tpKSku+//97terebW1xcXHe/IM8888ymTZs0Gk1hYWFkZGRv/FYCTsBF2Ad0/VAEnVw3SkhIUD5p6DqbzXbfffd9/vnns2fPzs3N5clEDzBC32W1Wo1Go0tsFy9erK+vd7ve7eam1Wo9fX8Xo9GYlJR06dKlF154Yf369R79f/VJjNAnVFVVlZWVff311+2vTFZWVnZ2mcTlopFOp4uNjRW8v/Wnn376wAMPOByOgwcPPvTQQ1Jj+ClG6FUKhyIJCQlFRUXtF3e8btR2NCI1v4INGzasX79+2LBhBQUFyveeIReM0CPsdrszNpen1ygcitx+++0jR45sv8Wp1Wo/OhSx2+3z58//6KOPUlJSPvvsM5cn05ACRnizzGZzZWWly+ZWVFTU2NjYcXFISEjb09najBkzpm88na2urm7KlCl6vX7NmjWbNm2SHsdvMMJuuPkD7tjY2L59/fDUqVMzZsywWCzvvPPO0qVLpcfxD4zQjbbY2k63e/GAu8978803V69eHR4efuLEiQkTJkiP4wcCOkKpA+4+b9myZW+//fb48eNPnToVsH8YdV2gRHjzL4zoxQPuPq+xsTElJeXcuXPp6em7d++WHsfX9bUIzWazXq/X6/XV1dVnzpxpuz555cqVjouDgoI0Gk3HMzflW4lRVxQXFycnJzc0NGRlZfFWF8r8OELlzS0pKamwsLBtsdvNzfl2JXK/gz5u3759ixcvVqlUx44dS01NlR7Hd/lBhE1NTe0vRbZ9bDabOy5WqVSxsbFarfaOO+4YMWJE2y7H9yQRkZmZuXnz5piYmIKCAj69uzO+FaHbzc1gMNjt9o6L3V4miYmJ8aMD7j7PZrPde++9eXl58+bNO3DgAK9guSUTIQ+4A0dFRcWUKVMuXbq0YcOGdevWSY/jizweIQ+427NarWVlZWFhYaNHj5aexXtyc3MXLXr8zjvzn38+dvZs6Wl8T69F6Iyt/el2gB9wK1w3eu65555//nnpAb3qlVeafv/7gSNGoLAQ0dHS0/iYbv/45PaAu7S0tK6uzu36Pn/A7TwUcbloVFpa6vahdVBQUFxcnPL7vPdJv/3twGPH8PHH+OlPcewY+Ozu9rq6Ex48ePC1114rLS01Go1uD7gHDRrU8cBNq9X2pW+47l43crkpW4Afinz3HaZMgcGAZ5/FX/4iPY0v6epO2NDQcOTIEefHUq/g9hq3r/q7cOGC2xN/lUoVFxfX8QvCQxEXw4Zh717MmIHXX0dqKhYtkh7IZ3R1J6yurj59+rRO+hXcvY6HIl72xhtYswbh4Th5EomJ0tP4Bt86J/Sm1NTUwsJCt7e1DQ0N1Wq1HR9d96XrRoIeewzvvIOJE3HiBPrQbcp7LnD/CG9tbbVarX3+UMQHZWWhoABnzyIjA7t2SU/jAwJ3JzQajcOHDw/YyySyLlxAcjIuX0Z2Np58UnoaaYEbIcnaswdLliA0FHl5mDJFehpRfeSwjvxOejpWroTFgoULUVsrPY0o7oQkxmbDrFn473+RloYPP0RfefpGtwXq75t8gEqFPXsQGYmDB7Fxo/Q0crgTkrDDh/HQQ3A48PHHuP9+6WkkcCckYXPm4A9/gN2OpUthMklPI4E7Icmz2zFvHnJzkZqKo0cD7und3AlJXv/+ePddxMYiPx9//KP0NF7HnZB8xYkTmDkTNhveew8LF0pP40XcCclXpKRg40Y4HHj8cdz4/lR9HHdC8i2PPor33sOPfoSTJ+Ffr42z2Wzl5eVtr8VZtmxZF98FIHCfwE2+6a23UFuLDRt8usCuvGN5QkICIyS/NGgQPvlEeohrLBaLwWDoeO+Sy5cvd1wcFBQUExPT9lqcpKSkLv5fGCERAFRXN5aUnHZ5U1eTyeT25d1Dhgxxea2p8xVwPXtrVEZIAjZvRmYm3ngDq1df/+S6dXjpJezejfT062uGD0d5+fXX/v7zn1i06PqaHrBYYDKhtPSGv4qLodUWfvnlTJfFwcHBI0eO7PgGCrpefcdyRkg+rbYWO3ZgzZpu/4sOx9XY9PqrpTk/qKpyv95qHZOcnOxyOwXv3LuEEZJPGzsWr72GlSuh0ILZjMpK182tqAjubjoJlQoaDXS6G/6Kj0dExCjgpOd+IwoYIfm05cvxu99hzx50fO/tX/8an38OvR6XLrn/d0eOhE4HrfZ6bFot1Gqfe80UIySfNmsWJk7Eq6/i5z93PbQ4exYnTwJASAjUatfNbexYDB4sMnK3MUISk5mJzMwfWONw4De/wbJlyMlBWtoNv/Tii1i/Hlot/P19PRghiUlNRfvT7IICfPGFm2VLlmDdOmzc6BphSopnx/MaRkhifvYz1yMKtxGqVHj2WfzqV8jP99poXuVjP6ISuZORgaFD8corPv1cth5jhOQHwsOxahUOHIBeLz2KBzBC8g9r1iA0FH//u/QcHsAIyT9EReHxx3H2rPQcHsAIyW+sXYs++RYhfFEvkTDuhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRMEZIJIwREgljhETCGCGRsP8D4ReEl+hJYSEAAAAASUVORK5CYII=\n","text/plain":[""]},"metadata":{"tags":[]}},{"output_type":"display_data","data":{"image/png":"iVBORw0KGgoAAAANSUhEUgAAASwAAAEsCAIAAAD2HxkiAAAABmJLR0QA/wD/AP+gvaeTAAAOz0lEQVR4nO3de1DVdf7H8dcBREUxSU3bMWpybUuxyWRyvDV2d2d13NxJa7XVxk3HTLlICASKCCoMy6U1zWHMnHWVrdExZ6xcu4x5SSeN3fU2i9QoOusl0RUDFZCzfxzEn/5OJcjh/T2H52P64wzf7/l+3+PwjHM4Xz5fl9vtFgA7QdYDAG0dEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiDDwrVy5cuLEiV988YX1IPCOCAPfnj171q5de+TIEetB4B0RAsaIEDDWxAijouRyaeRIL5t27JDLJZdLxcVetlZWatEiDR6sbt3UoYMiI/X734t3KYAU0hon+ec/NWqUTp26/pXjx7Vundat02uvaelSuVytMQbgSL5/OXr2rJ55RqdOKThYcXHavVuHD6u4WP37S9KyZVqwwOczAA7m+wjT0vT995JUVKS8PA0erAcf1IQJ2r1b/fpJ0uLFOnbM52MATuXjCC9d0l//KknR0XrllRs2de6snBxJqqnR6tW+HQNwMB9HuGuXLl6UpPHjvWwdNUpdu0rSxx/7dgzAwXwc4T/+0fAgOtrL1uBgPfKIJO3fL7fbt5MArai+vv7Wd25WhNu2NXwa8X//GzHCy57/+U/Dg8hI74e6915JqqrShQvNmQRwnqtXrw4bNiwjI+Py5cu3sr+PfxJ6XotK6tTJ+w6dO9+8J+Dn3nnnnd27d6++5d90NOtzwuhorVp18xdLSvSHP9z8xaBrkf/Yq83Gn9pBXLuDQHD+/Pn09HRJubm5HTp0uJWnNCvCTp0UFXXzF//7Xy97hoc3PKis1N13e9nhhx8aHnTp0pxJAIdJT08/e/bsE0888fzzz9/iU3z886fxreDRo9538HxCGBFxPVfAbx0+fHj58uXBwcEFBQW3/iwfR/jwww0P9uzxsrWuTiUlkjRwoG/HAFpFfHx8bW3t9OnTH278zr8FPo5wyBBFREjS3/7m5W3hli0Nv48ZM8a3YwC+t2nTpk8++SQiImJBE6/E9HGEoaGaMkWSDh3S22/fsOnyZaWkSFLnzpo40bdjAD5WU1PzxhtvSEpPT+/evXuTntsq147+4heSFBOj2Fjt3auyMm3cqOHD9a9/SVJWlnr08PkYgC8VFhaWlpY+9NBDM2bMaOpzff+nTBER+vvfNWqUTpxQYaEKC69vcrmUnKzZs30+A+BLZ86cycrKkpSXl9euXbumPr1VPp3r31+HDikzU9HR6tpVoaGKjNSkSdq1S1lZDfucOdMakwA+kJKScuHChTFjxowaNaoZT2/iT8IDB3500/DhP3X9Z3i43nxTb77pZdMPP2jyZG3frtLShuu5Af9RUlKyatWq0NDQ3Nzc5h3BAdepdO6sigp9/70WLrQeBWiy2NjY+vr62bNnP/DAA807ggMilFRQoOBg/fnP+ve/rUcBmqC4uPjLL7+86667UlNTm30QZ0T4yCOaOlW1tZo1y3oU4FZdunQpKSlJUlZW1h133NHs4zgjQkmZmeraVVu38ge+8BfZ2dnHjh0bOHDgKzetGtFEjomwRw+lpUnS7NmqqbGeBvgZJ06c8PwmpqCgIDg4+HYO5ZgIJc2apV/9SmVlN19bAzhPYmJiVVXVhAkTHn/88ds8lJMibNdOf/qTJKWn37BIKeAwX331VXFxcceOHZcsWXL7R3NShJJ+8xv9+teqrFR6uvUogHf19fUxMTFutzsxMfG+++67/QM6LEJJeXlq105FRdq3z3oUwItVq1Z9/fXXvXv39lyxffucF+GDD2rmTNXXKzaWJdjgNBcvXkxLS5OUk5PT6cdWTmoi50Uoaf589eihHTu0fr31KMANMjMzT548OWTIkBdffLGljunICLt2VUaGJMXHq7raehqgwbfffltYWBgUFFRQUOBqubsYOTJCSdOmadAgHT+uvDzrUYAG8fHxV65cmTJlymOPPdaCh3VqhEFBKiiQy6XFi1Vebj0NoM8++2zTpk3h4eGZmZkte2SnRihp+HCNG6fqat3GpbFAi7h69WpcXJyk1NTUu70u3nkbHByhpLw8hYVpzRrt2GE9Ctq0ZcuW7d+///7774+JiWnxgzs7wshIxcXJ7VZsrJpyhw2gBZ0/f96zgFpeXl779u1b/PjOjlBSSooiI7Vvn/7yF+tR0EalpaVVVFQ8+eSTY8eO9cXxHR9hWJg874PnzlVlpfU0aHMOHTq0YsWKpi6q3SSOj1DSpEkaNkynTys723oUtDnx8fF1dXUzZswYMGCAj07hDxG6XCosVFCQcnN15Ij1NGhDNm7cuGXLloiIiPnz5/vuLP4QoaRBgzRpkmpqlJRkPQraipqamsTEREkZGRlNXVS7SfwkQkk5OerSRRs2aOtW61HQJuTl5R05cqRfv37Tp0/36Yn8J8KePTV3riTFxamuznoaBLjTp08vXrxYzV1Uu0n8J0JJc+bol7/UwYMqKrIeBQEuOTm5srJy7Nixzz33nK/P5VcRtm8vz2oCqamqqLCeBgHrm2++Wb16dWhoaE5OTiuczq8ilPS73+mZZ3TuHMt1w0fcbrdnUe24uLhmL6rdJP4WoaT8fIWE6O23f+rGGEBzrV27dvv27T179kxOTm6dM/phhP37649/VF2d4uKsR0GguXTpUkpKiqTFixffzqLaTeKHEUpauLA+MvLD2tqPN2+2HgUBZcmSJeXl5QMHDpw8eXKrndQ/I+ze/Z05c367bVtsfHwNy3WjhRw/fjw3N9flcnnWsGi18/pnhNK0116LiooqLS196623rGdBgEhISKiurn7ppZdGjBjRmuf11whDQkLy8/MlZWRknDx50noc+L2dO3d+8MEHHTt2XLRoUSuf2l8jlPT000+PHj364sWL8+bNs54F/q2+vj42NtbtdiclJd17772tfHY/jlBSQUFB+/bt33333b1791rPAj+2cuXKvXv39u7dOyEhofXP7t8R9unT5/XXX2+8N4D1OPBLjS+mcnNzw8LCWn8A/45Q0vz583v16rVr167333/fehb4pQULFpw6dWro0KHjx483GcDvIwwPD8/IyJCUkJBQVVVlPQ78TFlZ2dKlS4OCggoLC1twUe0m8fsIJU2dOjU6OrrxzqnArYuNjb1y5YrnW8hqhkCIsPF/Y557iFuPA7/x6aefbt68ufHFlJVAiFDS0KFDX3jhhcYL/4CfVVdX51lUe968eb169TKcJEAi1LVfba1bt2779u3Ws8APLF269MCBA3369Jk1a5btJIET4T333JOQkOB2u2NiYupZrhs/6dy5c577ung+arYdJnAilOS53KGkpOS9996zngWOlpqaWlFR8dRTT40ePdp6lsCKsPHCv+Tk5AsXLliPA4c6ePBgUVFRSEiI7xbVbpKAilCS5xL4M2fOeJbKAv6/uLi4urq6mTNnRkVFWc8iBV6ELperoKAgKCgoPz+/tLTUehw4zvr167du3XrnnXempaVZz9Ig0CKU9Oijj06ePLlx+WSgUU1NTVJSkqTMzMxu3bpZj9MgACPUtQVCPvzwwy1btljPAgfJzc0tKyvr16/fq6++aj3LdYEZYeNSWXFxcbW1tdbjwBFOnz6dnZ0tKT8/PyQkxHqc6wIzQkmeRSMPHz68YsUK61ngCImJiZWVlePGjXv22WetZ7lBwEbYuHzyvHnzzp49az0OjO3bt2/NmjWhoaFLPIu4O0nARijJcyOBxhuOo81qvI4qISGhb9++1uPcLJAj1LVb6ixfvnz//v3Ws8DMmjVrdu7c2bNnz7meG3s5TIBH6Lm53NWrV2NjY61ngY3q6urU1FRJ2dnZXbp0sR7HiwCPUNdus/r5559v3LjRehYYWLRoUXl5+aBBg15++WXrWbwL/Agbbzg+Z86cK1euWI+DVlVeXp6fn994HZX1ON45dKyWNWPGjAEDBnz33XcOuWAXrSY+Pr66unrSpEnDhw+3nuVHtYkIg4ODPfllZWWxXHfbsWPHjg0bNoSFhXn+dNC53G3G2LFjg4ODrf+90doWLlxo/a33M1zuNrNm7tGjR7dt2zZlyhTrQdB6Pvroo5EjR3bs2NF6kJ/ShiJss6ZNm1ZUVLRixYpp06ZZzwIv2sR7QsDJiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsAYEQLGiBAwRoSAMSIEjBEhYIwIAWNECBgjQsBYiPUA8LnBgwdXVVX17dvXehB453K73dYzAG0aL0cBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGCNCwBgRAsaIEDBGhIAxIgSMESFgjAgBY0QIGPsfwQFEbwWo3UAAAAAASUVORK5CYII=\n","text/plain":[""]},"metadata":{"tags":[]}}]},{"cell_type":"markdown","metadata":{"id":"5oyYuK11xxBO","colab_type":"text"},"source":["### Further reading\n","\n","So far we have looked at a measure of validity and done a bit of investigation into the novelty of the generated compounds. There are more dimensions along which we can and should evaluate the performance of a generative model. For an example of some standard benchmarks, see the [GuacaMol evaluation framework](https://arxiv.org/pdf/1811.09621.pdf).\n","\n","For examples of normalizing flow-based molecular graph generation frameworks, check out the [MoFlow](https://arxiv.org/abs/2006.10137), [GraphAF](https://arxiv.org/pdf/2001.09382.pdf), and [GraphNVP](https://arxiv.org/pdf/1905.11600.pdf) papers."]},{"cell_type":"markdown","metadata":{"id":"YdJAF3aEHGbV","colab_type":"text"},"source":["# Congratulations! Time to join the Community!\n","\n","Congratulations on completing this tutorial notebook! If you enjoyed working through the tutorial, and want to continue working with DeepChem, we encourage you to finish the rest of the tutorials in this series. You can also help the DeepChem community in the following ways:\n","\n","## Star DeepChem on [GitHub](https://github.com/deepchem/deepchem)\n","This helps build awareness of the DeepChem project and the tools for open source drug discovery that we're trying to build.\n","\n","## Join the DeepChem Gitter\n","The DeepChem [Gitter](https://gitter.im/deepchem/Lobby) hosts a number of scientists, developers, and enthusiasts interested in deep learning for the life sciences. Join the conversation!"]}]} \ No newline at end of file diff --git a/examples/tutorials/WIP_20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb b/examples/tutorials/WIP_20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb deleted file mode 100644 index f557377d15dd02d6b1a5cc1dc94b8b6ddd724ec6..0000000000000000000000000000000000000000 --- a/examples/tutorials/WIP_20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb +++ /dev/null @@ -1,420 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.10" - }, - "colab": { - "name": "WIP_20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb", - "provenance": [] - }, - "accelerator": "GPU" - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "j6EyTq_tQXhw", - "colab_type": "text" - }, - "source": [ - "# Tutorial Part 20: Converting DeepChem models to TensorFlow Estimators\n", - "\n", - "So far, we've walked through a lot of the scientific details tied to molecular machine learning, but we haven't discussed as much how to use tools like DeepChem in production settings. This tutorial (and the last) focus more on the practical matters of how to use DeepChem in production settings.\n", - "\n", - "When DeepChem was first created, Tensorflow had no standard interface for datasets or models. We created the Dataset and Model classes to fill this hole. More recently, Tensorflow has added the `tf.data` module as a standard interface for datasets, and the `tf.estimator` module as a standard interface for models. To enable easy interoperability with other tools, we have added features to Dataset and Model to support these new standards. Using the Estimator interface may make it easier to deply DeepChem models in production environments.\n", - "\n", - "This example demonstrates how to use these features. Let's begin by loading a dataset and creating a model to analyze it. We'll use a simple MultitaskClassifier with one hidden layer.\n", - "\n", - "## Colab\n", - "\n", - "This tutorial and the rest in this sequence are designed to be done in Google colab. If you'd like to open this notebook in colab, you can use the following link.\n", - "\n", - "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/deepchem/deepchem/blob/master/examples/tutorials/20_Converting_DeepChem_Models_to_TensorFlow_Estimators.ipynb)\n", - "\n", - "## Setup\n", - "\n", - "To run DeepChem within Colab, you'll need to run the following cell of installation commands. This will take about 5 minutes to run to completion and install your environment." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "bh09-nheQXh2", - "colab_type": "code", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 462 - }, - "outputId": "5e36df14-c56b-40a2-a143-d35e3126f525" - }, - "source": [ - "%tensorflow_version 1.x\n", - "!curl -Lo deepchem_installer.py https://raw.githubusercontent.com/deepchem/deepchem/master/scripts/colab_install.py\n", - "import deepchem_installer\n", - "%time deepchem_installer.install(version='2.3.0')" - ], - "execution_count": 1, - "outputs": [ - { - "output_type": "stream", - "text": [ - "TensorFlow 1.x selected.\n", - " % Total % Received % Xferd Average Speed Time Time Time Current\n", - " Dload Upload Total Spent Left Speed\n", - "100 3477 100 3477 0 0 10256 0 --:--:-- --:--:-- --:--:-- 10226\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "add /root/miniconda/lib/python3.6/site-packages to PYTHONPATH\n", - "python version: 3.6.9\n", - "fetching installer from https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh\n", - "done\n", - "installing miniconda to /root/miniconda\n", - "done\n", - "installing deepchem\n", - "done\n", - "/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/__init__.py:15: FutureWarning: sklearn.externals.joblib is deprecated in 0.21 and will be removed in 0.23. Please import this functionality directly from joblib, which can be installed with: pip install joblib. If this warning is raised when loading pickled models, you may need to re-serialize those models with scikit-learn 0.21+.\n", - " warnings.warn(msg, category=FutureWarning)\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:\n", - "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n", - "For more information, please see:\n", - " * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n", - " * https://github.com/tensorflow/addons\n", - " * https://github.com/tensorflow/io (for I/O related ops)\n", - "If you depend on functionality not listed there, please file an issue.\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "stream", - "text": [ - "deepchem-2.3.0 installation finished!\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "CPU times: user 2.4 s, sys: 517 ms, total: 2.91 s\n", - "Wall time: 1min 56s\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "jM8uHD_fQXh-", - "colab_type": "code", - "outputId": "5b065443-50fc-4026-9724-5bdeaff194a4", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 547 - } - }, - "source": [ - "import deepchem as dc\n", - "import tensorflow as tf\n", - "import numpy as np\n", - "\n", - "tasks, datasets, transformers = dc.molnet.load_tox21(reload=False)\n", - "train_dataset, valid_dataset, test_dataset = datasets\n", - "n_tasks = len(tasks)\n", - "n_features = train_dataset.X.shape[1]\n", - "\n", - "model = dc.models.MultitaskClassifier(n_tasks, n_features, layer_sizes=[1000], dropouts=0.25)" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading raw samples now.\n", - "shard_size: 8192\n", - "About to start loading CSV from /tmp/tox21.csv.gz\n", - "Loading shard 1 of size 8192.\n", - "Featurizing sample 0\n", - "Featurizing sample 1000\n", - "Featurizing sample 2000\n", - "Featurizing sample 3000\n", - "Featurizing sample 4000\n", - "Featurizing sample 5000\n", - "Featurizing sample 6000\n", - "Featurizing sample 7000\n", - "TIMING: featurizing shard 0 took 21.888 s\n", - "TIMING: dataset construction took 22.158 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.351 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.173 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.176 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.286 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.044 s\n", - "Loading dataset from disk.\n", - "TIMING: dataset construction took 0.038 s\n", - "Loading dataset from disk.\n", - "WARNING:tensorflow:From /tensorflow-1.15.2/python3.6/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", - "Instructions for updating:\n", - "If using Keras pass *_constraint arguments to layers.\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "TuKIZtbUQXiE", - "colab_type": "text" - }, - "source": [ - "We want to train the model using the training set, then evaluate it on the test set. As our evaluation metric we will use the ROC AUC, averaged over the 12 tasks included in the dataset. First let's see how to do this with the DeepChem API." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "-5zUpjFlQXiH", - "colab_type": "code", - "outputId": "63de5b0f-3b6f-4c95-c877-b18f60f05cde", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 309 - } - }, - "source": [ - "model.fit(train_dataset, nb_epoch=10)\n", - "metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)\n", - "print(model.evaluate(test_dataset, [metric]))" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:169: The name tf.Session is deprecated. Please use tf.compat.v1.Session instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/optimizers.py:76: The name tf.train.AdamOptimizer is deprecated. Please use tf.compat.v1.train.AdamOptimizer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:258: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:260: The name tf.variables_initializer is deprecated. Please use tf.compat.v1.variables_initializer instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/keras_model.py:237: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:108: The name tf.losses.softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.softmax_cross_entropy instead.\n", - "\n", - "WARNING:tensorflow:From /root/miniconda/lib/python3.6/site-packages/deepchem/models/losses.py:109: The name tf.losses.Reduction is deprecated. Please use tf.compat.v1.losses.Reduction instead.\n", - "\n", - "computed_metrics: [0.770005534034311, 0.8149272185691003, 0.843224224330952, 0.7941699811597237, 0.7050916141963877, 0.7847847847847849, 0.6692734193975505, 0.6598562026685901, 0.8362882956320903, 0.7056690837178643, 0.8348021283671433, 0.7099963045084996]\n", - "{'mean-roc_auc_score': 0.7606740659472496}\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "xXcyNGQXQXiN", - "colab_type": "text" - }, - "source": [ - "Simple enough. Now let's see how to do the same thing with the Tensorflow APIs. Fair warning: this is going to take a lot more code!\n", - "\n", - "To begin with, Tensorflow doesn't allow a dataset to be passed directly to a model. Instead, you need to write an \"input function\" to construct a particular set of tensors and return them in a particular format. Fortunately, Dataset's `make_iterator()` method provides exactly the tensors we need in the form of a `tf.data.Iterator`. This allows our input function to be very simple." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "AjGKtwReQXiO", - "colab_type": "code", - "colab": {} - }, - "source": [ - "def input_fn(dataset, epochs):\n", - " x, y, weights = dataset.make_iterator(batch_size=100, epochs=epochs).get_next()\n", - " return {'x': x, 'weights': weights}, y" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "s_KL9OinQXiS", - "colab_type": "text" - }, - "source": [ - "Next, you have to use the functions in the `tf.feature_column` module to create an object representing each feature and weight column (but curiously, *not* the label column—don't ask me why!). These objects describe the data type and shape of each column, and give each one a name. The names must match the keys in the dict returned by the input function." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "57-Yl90SQXiT", - "colab_type": "code", - "colab": {} - }, - "source": [ - "x_col = tf.feature_column.numeric_column('x', shape=(n_features,))\n", - "weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "weVnAymyQXid", - "colab_type": "text" - }, - "source": [ - "Unlike DeepChem models, which allow arbitrary metrics to be passed to `evaluate()`, estimators require all metrics to be defined up front when you create the estimator. Unfortunately, Tensorflow doesn't have very good support for multitask models. It provides an AUC metric, but no easy way to average this metric over tasks. We therefore must create a separate metric for every task, then define our own metric function to compute the average of them." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "oCckYybyQXie", - "colab_type": "code", - "colab": {} - }, - "source": [ - "def mean_auc(labels, predictions, weights):\n", - " metric_ops = []\n", - " update_ops = []\n", - " for i in range(n_tasks):\n", - " metric, update = tf.metrics.auc(labels[:,i], predictions[:,i], weights[:,i])\n", - " metric_ops.append(metric)\n", - " update_ops.append(update)\n", - " mean_metric = tf.reduce_mean(tf.stack(metric_ops))\n", - " update_all = tf.group(*update_ops)\n", - " return mean_metric, update_all" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "suikbE_FQXii", - "colab_type": "text" - }, - "source": [ - "Now we create our `Estimator` by calling `make_estimator()` on the DeepChem model. We provide as arguments the objects created above to represent the feature and weight columns, as well as our metric function." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "hUR_q5ugQXij", - "colab_type": "code", - "outputId": "ea8302d1-fe80-4c07-bf62-66e7300c54ca", - "colab": { - "base_uri": "https://localhost:8080/", - "height": 396 - } - }, - "source": [ - "#estimator = model.make_estimator(feature_columns=[x_col],\n", - "# weight_column=weight_col,\n", - "# metrics={'mean_auc': mean_auc},\n", - "# model_dir='estimator')\n", - "estimator = tf.keras.estimator.model_to_estimator(model)" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "INFO:tensorflow:Using default config.\n", - "WARNING:tensorflow:Using temporary folder as model directory: /tmp/tmpq86w8_0k\n", - "INFO:tensorflow:Using the Keras model provided.\n" - ], - "name": "stdout" - }, - { - "output_type": "error", - "ename": "AttributeError", - "evalue": "ignored", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;31m# metrics={'mean_auc': mean_auc},\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;31m# model_dir='estimator')\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0mestimator\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkeras\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mestimator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodel_to_estimator\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/estimator/__init__.py\u001b[0m in \u001b[0;36mmodel_to_estimator\u001b[0;34m(keras_model, keras_model_path, custom_objects, model_dir, config, checkpoint_format)\u001b[0m\n\u001b[1;32m 105\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 106\u001b[0m \u001b[0mcheckpoint_format\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcheckpoint_format\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 107\u001b[0;31m use_v2_estimator=False)\n\u001b[0m\u001b[1;32m 108\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 109\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/tensorflow-1.15.2/python3.6/tensorflow_estimator/python/estimator/keras.py\u001b[0m in \u001b[0;36mmodel_to_estimator\u001b[0;34m(keras_model, keras_model_path, custom_objects, model_dir, config, checkpoint_format, use_v2_estimator)\u001b[0m\n\u001b[1;32m 558\u001b[0m keras_model_fn = _create_keras_model_fn(keras_model, custom_objects,\n\u001b[1;32m 559\u001b[0m save_object_ckpt)\n\u001b[0;32m--> 560\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0m_any_weight_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkeras_model\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 561\u001b[0m \u001b[0;31m# Warn if config passed to estimator tries to update GPUOptions. If a\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 562\u001b[0m \u001b[0;31m# session has already been created, the GPUOptions passed to the first\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/tensorflow-1.15.2/python3.6/tensorflow_estimator/python/estimator/keras.py\u001b[0m in \u001b[0;36m_any_weight_initialized\u001b[0;34m(keras_model)\u001b[0m\n\u001b[1;32m 81\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexecuting_eagerly_outside_functions\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 82\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 83\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mlayer\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mkeras_model\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 84\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mweight\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mlayer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweights\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 85\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'_keras_initialized'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mAttributeError\u001b[0m: 'MultitaskClassifier' object has no attribute 'layers'" - ] - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "qxhP2VVTQXiq", - "colab_type": "text" - }, - "source": [ - "We are finally ready to train and evaluate it! Notice how the input function passed to each method is actually a lambda. This allows us to write a single function, then use it with different datasets and numbers of epochs." - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "HnzpHwgcQXis", - "colab_type": "code", - "colab": {} - }, - "source": [ - "estimator.train(input_fn=lambda: input_fn(train_dataset, 100))\n", - "print(estimator.evaluate(input_fn=lambda: input_fn(test_dataset, 1)))" - ], - "execution_count": 0, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Qai7_prqQXiw", - "colab_type": "text" - }, - "source": [ - "That's a lot of code for something DeepChem can do in three lines. The Tensorflow API is verbose and somewhat confusing. It has seemingly arbitrary limitations, like assuming a model will only ever have one output, and therefore only allowing one label. But for better or worse, it's a standard.\n", - "\n", - "Of course, if you just want to use a DeepChem model with a DeepChem dataset, there is no need for any of this. Just use the DeepChem API. But perhaps you want to use a DeepChem dataset with a model that has been implemented as an estimator. In that case, `Dataset.make_iterator()` allows you to easily do that. Or perhaps you have higher level workflow code that is written to work with estimators. In that case, `make_estimator()` allows DeepChem models to easily fit into that workflow." - ] - } - ] -} \ No newline at end of file diff --git a/examples/notebooks/assets/dataset_preparation_gui.png b/examples/tutorials/assets/dataset_preparation_gui.png similarity index 100% rename from examples/notebooks/assets/dataset_preparation_gui.png rename to examples/tutorials/assets/dataset_preparation_gui.png diff --git a/examples/tutorials/pong.png b/examples/tutorials/pong.png new file mode 100644 index 0000000000000000000000000000000000000000..641c7d4c7aa649afd1a516e369395c35e7f62bfd Binary files /dev/null and b/examples/tutorials/pong.png differ diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 6df308df74d571a022209782bd1bd14f0ab59756..0000000000000000000000000000000000000000 --- a/pytest.ini +++ /dev/null @@ -1,4 +0,0 @@ -[pytest] -markers = - slow: marks tests as slow (deselect with '-m "not slow"') - serial diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d1ddba27b7ce4255462f1ecaf4fb2239ac45452 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,7 @@ +coveralls +flake8 +flaky +mypy +pytest +pytest-cov +yapf==0.22.0 diff --git a/requirements.yml b/requirements.yml new file mode 100644 index 0000000000000000000000000000000000000000..8331dd3903d5d9417c02e6bce8b53aaa59a62377 --- /dev/null +++ b/requirements.yml @@ -0,0 +1,24 @@ +name: deepchem +channels: + - omnia + - conda-forge + - defaults +dependencies: + - openmm + - pdbfixer + - rdkit + - pip + - pip: + - biopython + - lightgbm + - matminer + - mdtraj + - mordred + - networkx + - pillow + - pubchempy + - pyGPGO + - pymatgen + - simdna + - xgboost + - git+https://github.com/samoturk/mol2vec diff --git a/scripts/colab_install.py b/scripts/colab_install.py index 4943d2277e81d67d6a9171d3147d6796d140156d..62ac7aaad59c5890fb137265e0511dc1185647a7 100644 --- a/scripts/colab_install.py +++ b/scripts/colab_install.py @@ -15,13 +15,13 @@ logger.addHandler(StreamHandler()) logger.setLevel(INFO) default_channels = [ - "-c", - "rdkit", - "-c", "conda-forge", + "omnia", ] default_packages = [ "rdkit", + "openmm", + "pdbfixer", ] @@ -31,38 +31,33 @@ def install( url_base="https://repo.continuum.io/miniconda/", conda_path=os.path.expanduser(os.path.join("~", "miniconda")), add_python_path=True, - version=None, - # default channels are "conda-forge" and "rdkit" + # default channels are "conda-forge" and "omnia" additional_channels=[], - # default packages are "rdkit" and "deepchem" + # default packages are "rdkit", "openmm" and "pdbfixer" additional_packages=[], - # whether to clean install or not - force=False): - """install deepchem on Google Colab +): + """Install conda packages on Google Colab For GPU/CPU notebook - (if you don't set the version, this script will install the latest package) ``` - import deepchem_installer - deepchem_installer.install(version='2.4.0') + import conda_installer + conda_installer.install() ``` - If you want to add soft dependent packages, you can use additional_conda_channels and + If you want to add other packages, you can use additional_conda_channels and additional_conda_package arguments. Please see the example. ``` - import deepchem_installer - deepchem_installer.install( - version='2.4.0', + import conda_installer + conda_installer.install( additional_conda_channels=[] additional_conda_packages=["mdtraj", "networkx"] ) // add channel - import deepchem_installer - deepchem_installer.install( - version='2.4.0', - additional_conda_channels=["-c", "omnia"] - additional_conda_packages=["openmm"] + import conda_installer + conda_installer.install( + additional_conda_channels=["dglteam"] + additional_conda_packages=["dgl-cuda10.1"] ) ``` """ @@ -78,12 +73,15 @@ def install( logger.info("add {} to PYTHONPATH".format(python_path)) sys.path.append(python_path) - if os.path.isdir(os.path.join(python_path, "deepchem")): - logger.info("deepchem is already installed") - if not force: - return + is_installed = [] + packages = list(set(default_packages + additional_packages)) + for package in packages: + package = "simtk" if package == "openmm" else package + is_installed.append(os.path.isdir(os.path.join(python_path, package))) - logger.info("force re-install") + if all(is_installed): + logger.info("all packages are already installed") + return url = url_base + file_name python_version = "{0}.{1}.{2}".format(*sys.version_info) @@ -109,23 +107,23 @@ def install( subprocess.check_call(["bash", file_name, "-b", "-p", conda_path]) logger.info('done') - logger.info("installing deepchem") - deepchem = "deepchem" if version is None else "deepchem=={}".format(version) + logger.info("installing rdkit, openmm, pdbfixer") + channels = list(set(default_channels + additional_channels)) + for channel in channels: + subprocess.check_call([ + os.path.join(conda_path, "bin", "conda"), "config", "--append", + "channels", channel + ]) + logger.info("added {} to channels".format(channel)) subprocess.check_call([ os.path.join(conda_path, "bin", "conda"), "install", "--yes", - *default_channels, - *additional_channels, "python=={}".format(python_version), - *default_packages, - *additional_packages, - deepchem, + *packages, ]) logger.info("done") - - import deepchem - logger.info("deepchem-{} installation finished!".format(deepchem.__version__)) + logger.info("conda packages installation finished!") if __name__ == "__main__": diff --git a/scripts/install_deepchem_conda.ps1 b/scripts/install_deepchem_conda.ps1 index 7bb90e9d74c43151a29d7062acad9f98bcda06a4..284154793399f81c8e566271aed8dfc51f635cdf 100644 --- a/scripts/install_deepchem_conda.ps1 +++ b/scripts/install_deepchem_conda.ps1 @@ -8,31 +8,47 @@ else $python_version=3.6 } -if($args.Length -eq 1) +if($args[0] -eq "gpu") { - $envname = $args[0] - conda create -y --name $envname python=$python_version - conda activate $envname + $cuda="cu101" + dgl_pkg="dgl-cu101" + echo "Installing DeepChem in the GPU envirionment" } else { - echo "Installing DeepChem in current env" + $cuda="cpu" + $dgl_pkg="dgl" + echo "Installing DeepChem in the CPU envirionment" } -conda install -y -q -c deepchem -c rdkit -c conda-forge -c omnia ` - biopython ` - mdtraj ` - networkx ` - openmm ` - pdbfixer ` - pillow ` - py-xgboost ` - rdkit ` - simdna ` - pymatgen ` - pytest ` - pytest-cov ` - flaky +# Install dependencies except PyTorch and TensorFlow +conda create -y --name deepchem python=$python_version +conda activate deepchem +$path = Join-Path $Pwd "requirements.yml" +conda env update --file $path +$path = Join-Path $Pwd "requirements-test.txt" +pip install -r $path +# Fixed packages +$tensorflow=2.3.0 +$tensorflow_probability=0.11.0 +$torch=1.6.0 +$torchvision=0.7.0 +$pyg_torch=1.6.0 -pip install -U matminer tensorflow==2.2 tensorflow-probability==0.10 +# Install Tensorflow dependencies +pip install tensorflow==$tensorflow tensorflow-probability==$tensorflow_probability + +# Install PyTorch dependencies +pip install torch==$torch+$cuda torchvision==$torchvision+$cuda -f https://download.pytorch.org/whl/torch_stable.html + +# Install PyTorch Geometric and DGL dependencies +pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-sparse -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-cluster -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-spline-conv -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-geometric +pip install $dgl_pkg +pip install dgllife +# install transformers package +pip install transformers diff --git a/scripts/install_deepchem_conda.sh b/scripts/install_deepchem_conda.sh index d9e65bb1df0f75c50343109bb229858771e338d0..5345ffa5975610270c3b14691bb87b8769c64eda 100644 --- a/scripts/install_deepchem_conda.sh +++ b/scripts/install_deepchem_conda.sh @@ -13,29 +13,51 @@ else echo "Using python "$python_version". But recommended to use python 3.6." fi -if [ -z "$1" ]; +if [ "$0" = "gpu" ]; then - echo "Installing DeepChem in current env" + # We expect that the CUDA vesion is 10.1. + # This is because TensorFlow mainly supports CUDA 10.1. + cuda=cu101 + dgl_pkg=dgl-cu101 + echo "Installing DeepChem in the GPU environment" else - export envname=$1 - conda create -y --name $envname python=$python_version - conda activate $envname + cuda=cpu + dgl_pkg=dgl + echo "Installing DeepChem in the CPU environment" fi -yes | pip install --upgrade pip -conda install -y -q -c deepchem -c rdkit -c conda-forge -c omnia \ - biopython \ - mdtraj \ - networkx \ - openmm \ - pdbfixer \ - pillow \ - py-xgboost \ - rdkit \ - simdna \ - pymatgen \ - pytest \ - pytest-cov \ - flaky - -yes | pip install -U matminer tensorflow==2.2 tensorflow-probability==0.10 +# Install dependencies except PyTorch and TensorFlow +conda create -y --name deepchem python=$python_version +conda activate deepchem +conda env update --file $PWD/requirements.yml +pip install -r $PWD/requirements-test.txt + +# Fixed packages +tensorflow=2.3.0 +tensorflow_probability==0.11.0 +torch=1.6.0 +torchvision=0.7.0 +pyg_torch=1.6.0 + +# Install TensorFlow dependencies +pip install tensorflow==$tensorflow tensorflow-probability==$tensorflow_probability + +# Install PyTorch dependencies +if [ "$(uname)" == 'Darwin' ]; +then + # For MacOSX + pip install torch==$torch torchvision==$torchvision +else + pip install torch==$torch+$cuda torchvision==$torchvision+$cuda -f https://download.pytorch.org/whl/torch_stable.html +fi + +# Install PyTorch Geometric and DGL dependencies +pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-sparse -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-cluster -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-spline-conv -f https://pytorch-geometric.com/whl/torch-${torch}+${cuda}.html +pip install torch-geometric +pip install $dgl_pkg +pip install dgllife +# install transformers package +pip install transformers diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..88f1d07315dcea5246a3d57dd3fceee19438d3f5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,28 @@ +[tool:pytest] +markers = + slow: marks tests as slow (deselect with '-m "not slow"') + serial + +[mypy] +ignore_missing_imports = True + +[flake8] +ignore = + E111, # Indentation is not a multiple of four + E114, # Indentation is not a multiple of four (comment) + E121, # continuation line under-indented for hanging indent + E124, # Closing bracket does not match visual indentation + E126, # continuation line over-indented for hanging indent + E125, # Continuation line with same indent as next logical line + E127, # Continuation line over-indented for visual indent + E129, # Visually indented line with same indent as next logical line + E502, # the backslash is redundant between bracket + W503, # Line break before binary operator + W504, # Line break after binary operator + W605, # invalid escape sequenc + E722 # do not use bare 'except' +max-line-length = 300 + +[yapf] +based_on_style = google +indent_width = 2 diff --git a/setup.py b/setup.py index caf2cf2a649850b432985cdd50c089faa4d29268..6449013d913d701696962fe0c9b1edae66e2552f 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,14 @@ +import sys +import time from setuptools import setup, find_packages +if '--release' in sys.argv: + IS_RELEASE = True + sys.argv.remove('--release') +else: + # Build a nightly package by default. + IS_RELEASE = False + # get the version from deepchem/__init__.py def _get_version(): @@ -8,7 +17,11 @@ def _get_version(): if line.startswith('__version__'): g = {} exec(line, g) - return g['__version__'] + base = g['__version__'] + # nightly version string .devYearMonthDayHourMinute + return base if IS_RELEASE else \ + base + time.strftime("%Y%m%d%H%M%S") + raise ValueError('`__version__` not defined in `deepchem/__init__.py`') @@ -22,7 +35,7 @@ setup( 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', - 'License :: OSI Approved :: MIT', + 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', @@ -33,6 +46,9 @@ setup( quantum chemistry, and the life sciences.', keywords=[ 'deepchem', + 'chemistry', + 'biology', + 'materials-science', 'life-science', 'drug-discovery', ],