diff --git a/.benchmark_pattern b/.benchmark_pattern
old mode 100644
new mode 100755
diff --git a/.gitignore b/.gitignore
old mode 100644
new mode 100755
index a41103d0b7..3387b64eee
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,229 @@
+
+# Created by https://www.gitignore.io/api/emacs,macos,python,jupyternotebook,jupyternotebooks
+# Edit at https://www.gitignore.io/?templates=emacs,macos,python,jupyternotebook,jupyternotebooks
+
+### Emacs ###
+# -*- mode: gitignore; -*-
+*~
+\#*\#
+/.emacs.desktop
+/.emacs.desktop.lock
+*.elc
+auto-save-list
+tramp
+.\#*
+
+# Org-mode
+.org-id-locations
+*_archive
+
+# flymake-mode
+*_flymake.*
+
+# eshell files
+/eshell/history
+/eshell/lastdir
+
+# elpa packages
+/elpa/
+
+# reftex files
+*.rel
+
+# AUCTeX auto folder
+/auto/
+
+# cask packages
+.cask/
+dist/
+
+# Flycheck
+flycheck_*.el
+
+# server auth directory
+/server/
+
+# projectiles files
+.projectile
+
+# directory configuration
+.dir-locals.el
+
+# network security
+/network-security.data
+
+
+### JupyterNotebook ###
+.ipynb_checkpoints
+*/.ipynb_checkpoints/*
+
+# Remove previous ipynb_checkpoints
+# git rm -r .ipynb_checkpoints/
+#
+
+### JupyterNotebooks ###
+# gitignore template for Jupyter Notebooks
+# website: http://jupyter.org/
+
+
+# Remove previous ipynb_checkpoints
+# git rm -r .ipynb_checkpoints/
+#
+
+### macOS ###
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+### Python ###
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+### Python Patch ###
+.venv/
+
+# End of https://www.gitignore.io/api/emacs,macos,python,jupyternotebook,jupyternotebooks
+
+
+# -------------------------------------------------------------------------------
+# ==================
+# Open AI Settings
+# ==================
+
*.swp
*.pyc
*.pkl
@@ -34,3 +260,6 @@ src
.cache
MUJOCO_LOG.TXT
+TRAIN.sh
+MAKE_TRAINING_DATA.sh
+projection/*.sh
diff --git a/.gitmodules b/.gitmodules
new file mode 100755
index 0000000000..7ad9e4aadf
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "mujoco-py"]
+ path = mujoco-py
+ url = git@github.com:openai/mujoco-py.git
diff --git a/.travis.yml b/.travis.yml
old mode 100644
new mode 100755
diff --git a/Dockerfile b/Dockerfile
old mode 100644
new mode 100755
diff --git a/LICENSE b/LICENSE
old mode 100644
new mode 100755
diff --git a/README.md b/README.md
old mode 100644
new mode 100755
index e4f8697d09..2d5ef49525
--- a/README.md
+++ b/README.md
@@ -1,26 +1,54 @@
- [![Build status](https://travis-ci.org/openai/baselines.svg?branch=master)](https://travis-ci.org/openai/baselines)
+# HumanWareFundemental: Sysngy Team
+This repository is cloned from [openai/baselines](https://github.com/openai/baselines) and modifided for our reseach. Don't make PR for ofiginal repogitory.
-# Baselines
-OpenAI Baselines is a set of high-quality implementations of reinforcement learning algorithms.
+## Train model with DDPG
+以下のコマンドで学習済みモデルを作成する. tensorflowのモデルを保存するディレクトリを`--lodir_tf` で指定する.
+
+例
+```
+python -m baselines.her.experiment.train \
+ --env GraspBlock-v0 \
+ --num_cpu 1 \
+ --n_epochs 100 \
+ --logdir_tf < Dierctory path to save tensorflow model>
+```
+
+
+## Action and Q-value Generation
+以下のコマンドで学習モデルをロードし, 指定したディレクトリにアクションなどを書き出す. `--logdir_tf`で学習済みのモデルを指定し, `--logdir_aq`でactionやQ-valueなどを出力するディレクトリを指定する.
-These algorithms will make it easier for the research community to replicate, refine, and identify new ideas, and will create good baselines to build research on top of. Our DQN implementation and its variants are roughly on par with the scores in published papers. We expect they will be used as a base around which new ideas can be added, and as a tool for comparing a new approach against existing ones.
-## Prerequisites
-Baselines requires python3 (>=3.5) with the development headers. You'll also need system packages CMake, OpenMPI and zlib. Those can be installed as follows
-### Ubuntu
-
-```bash
-sudo apt-get update && sudo apt-get install cmake libopenmpi-dev python3-dev zlib1g-dev
```
-
-### Mac OS X
-Installation of system packages on Mac requires [Homebrew](https://brew.sh). With Homebrew installed, run the following:
-```bash
-brew install cmake openmpi
+python -m baselines.her.experiment.test \
+ --env GraspBlock-v0 \
+ --num_cpu 1 --n_epochs 5 \
+ --logdir_tf < path to saved model > \
+ --logdir_aq < path to save actions etc... >
```
-
-## Virtual environment
+
+### Log File
+ログファイルには以下の項目が記述されている.
+
++ `goal/desired`: ゴール (`g`)
++ `goal/achieved`: 到達点 (`ag`)
++ `observation`: 観測 (`o`)
++ `action`: action, shape=[EpisodeNo, Batch, Sequence, env.action_space]
++ `Qvalue`: Q-value, shape=[EpisodeNo, Batch, Sequence, env.action_space]
++ `fc`: Critic Networkの中間出力 (fc2), shape=[EpisodeNo, Batch, Sequence, n_unit(=256)]
+
+
+
+
+
+--------------------------------------
+## Memo
+TBA
+
+
+----------------------------------------
+## Initial Setup
+### Virtual environment
From the general python package sanity perspective, it is a good idea to use virtual environments (virtualenvs) to make sure packages from different projects do not interfere with each other. You can install virtualenv (which is itself a pip package) via
```bash
pip install virtualenv
@@ -37,7 +65,7 @@ To activate a virtualenv:
More thorough tutorial on virtualenvs and options can be found [here](https://virtualenv.pypa.io/en/stable/)
-## Installation
+### Installation
- Clone the repo and cd into it:
```bash
git clone https://github.com/openai/baselines.git
@@ -59,89 +87,16 @@ More thorough tutorial on virtualenvs and options can be found [here](https://vi
pip install -e .
```
-### MuJoCo
-Some of the baselines examples use [MuJoCo](http://www.mujoco.org) (multi-joint dynamics in contact) physics simulator, which is proprietary and requires binaries and a license (temporary 30-day license can be obtained from [www.mujoco.org](http://www.mujoco.org)). Instructions on setting up MuJoCo can be found [here](https://github.com/openai/mujoco-py)
-
-## Testing the installation
-All unit tests in baselines can be run using pytest runner:
-```
-pip install pytest
-pytest
-```
+- Install original environment
-## Training models
-Most of the algorithms in baselines repo are used as follows:
```bash
-python -m baselines.run --alg= --env= [additional arguments]
+cd gym-grasp
+pip install -e .
```
-### Example 1. PPO with MuJoCo Humanoid
-For instance, to train a fully-connected network controlling MuJoCo humanoid using PPO2 for 20M timesteps
-```bash
-python -m baselines.run --alg=ppo2 --env=Humanoid-v2 --network=mlp --num_timesteps=2e7
-```
-Note that for mujoco environments fully-connected network is default, so we can omit `--network=mlp`
-The hyperparameters for both network and the learning algorithm can be controlled via the command line, for instance:
-```bash
-python -m baselines.run --alg=ppo2 --env=Humanoid-v2 --network=mlp --num_timesteps=2e7 --ent_coef=0.1 --num_hidden=32 --num_layers=3 --value_network=copy
-```
-will set entropy coefficient to 0.1, and construct fully connected network with 3 layers with 32 hidden units in each, and create a separate network for value function estimation (so that its parameters are not shared with the policy network, but the structure is the same)
-
-See docstrings in [common/models.py](baselines/common/models.py) for description of network parameters for each type of model, and
-docstring for [baselines/ppo2/ppo2.py/learn()](baselines/ppo2/ppo2.py#L152) for the description of the ppo2 hyperparamters.
-
-### Example 2. DQN on Atari
-DQN with Atari is at this point a classics of benchmarks. To run the baselines implementation of DQN on Atari Pong:
-```
-python -m baselines.run --alg=deepq --env=PongNoFrameskip-v4 --num_timesteps=1e6
-```
-
-## Saving, loading and visualizing models
-The algorithms serialization API is not properly unified yet; however, there is a simple method to save / restore trained models.
-`--save_path` and `--load_path` command-line option loads the tensorflow state from a given path before training, and saves it after the training, respectively.
-Let's imagine you'd like to train ppo2 on Atari Pong, save the model and then later visualize what has it learnt.
-```bash
-python -m baselines.run --alg=ppo2 --env=PongNoFrameskip-v4 --num_timesteps=2e7 --save_path=~/models/pong_20M_ppo2
-```
-This should get to the mean reward per episode about 20. To load and visualize the model, we'll do the following - load the model, train it for 0 steps, and then visualize:
-```bash
-python -m baselines.run --alg=ppo2 --env=PongNoFrameskip-v4 --num_timesteps=0 --load_path=~/models/pong_20M_ppo2 --play
-```
-
-*NOTE:* At the moment Mujoco training uses VecNormalize wrapper for the environment which is not being saved correctly; so loading the models trained on Mujoco will not work well if the environment is recreated. If necessary, you can work around that by replacing RunningMeanStd by TfRunningMeanStd in [baselines/common/vec_env/vec_normalize.py](baselines/common/vec_env/vec_normalize.py#L12). This way, mean and std of environment normalizing wrapper will be saved in tensorflow variables and included in the model file; however, training is slower that way - hence not including it by default
-
-## Loading and vizualizing learning curves and other training metrics
-See [here](docs/viz/viz.ipynb) for instructions on how to load and display the training data.
-## Subpackages
-- [A2C](baselines/a2c)
-- [ACER](baselines/acer)
-- [ACKTR](baselines/acktr)
-- [DDPG](baselines/ddpg)
-- [DQN](baselines/deepq)
-- [GAIL](baselines/gail)
-- [HER](baselines/her)
-- [PPO1](baselines/ppo1) (obsolete version, left here temporarily)
-- [PPO2](baselines/ppo2)
-- [TRPO](baselines/trpo_mpi)
+### MuJoCo
+Some of the baselines examples use [MuJoCo](http://www.mujoco.org) (multi-joint dynamics in contact) physics simulator, which is proprietary and requires binaries and a license (temporary 30-day license can be obtained from [www.mujoco.org](http://www.mujoco.org)). Instructions on setting up MuJoCo can be found [here](https://github.com/openai/mujoco-py)
-## Benchmarks
-Results of benchmarks on Mujoco (1M timesteps) and Atari (10M timesteps) are available
-[here for Mujoco](https://htmlpreview.github.com/?https://github.com/openai/baselines/blob/master/benchmarks_mujoco1M.htm)
-and
-[here for Atari](https://htmlpreview.github.com/?https://github.com/openai/baselines/blob/master/benchmarks_atari10M.htm)
-respectively. Note that these results may be not on the latest version of the code, particular commit hash with which results were obtained is specified on the benchmarks page.
-
-To cite this repository in publications:
-
- @misc{baselines,
- author = {Dhariwal, Prafulla and Hesse, Christopher and Klimov, Oleg and Nichol, Alex and Plappert, Matthias and Radford, Alec and Schulman, John and Sidor, Szymon and Wu, Yuhuai and Zhokhov, Peter},
- title = {OpenAI Baselines},
- year = {2017},
- publisher = {GitHub},
- journal = {GitHub repository},
- howpublished = {\url{https://github.com/openai/baselines}},
- }
-
diff --git a/baselines/__init__.py b/baselines/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/a2c/README.md b/baselines/a2c/README.md
old mode 100644
new mode 100755
diff --git a/baselines/a2c/__init__.py b/baselines/a2c/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/a2c/a2c.py b/baselines/a2c/a2c.py
old mode 100644
new mode 100755
diff --git a/baselines/a2c/runner.py b/baselines/a2c/runner.py
old mode 100644
new mode 100755
diff --git a/baselines/a2c/utils.py b/baselines/a2c/utils.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/README.md b/baselines/acer/README.md
old mode 100644
new mode 100755
diff --git a/baselines/acer/__init__.py b/baselines/acer/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/acer.py b/baselines/acer/acer.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/buffer.py b/baselines/acer/buffer.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/defaults.py b/baselines/acer/defaults.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/policies.py b/baselines/acer/policies.py
old mode 100644
new mode 100755
diff --git a/baselines/acer/runner.py b/baselines/acer/runner.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/README.md b/baselines/acktr/README.md
old mode 100644
new mode 100755
diff --git a/baselines/acktr/__init__.py b/baselines/acktr/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/acktr.py b/baselines/acktr/acktr.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/defaults.py b/baselines/acktr/defaults.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/kfac.py b/baselines/acktr/kfac.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/kfac_utils.py b/baselines/acktr/kfac_utils.py
old mode 100644
new mode 100755
diff --git a/baselines/acktr/utils.py b/baselines/acktr/utils.py
old mode 100644
new mode 100755
diff --git a/baselines/bench/__init__.py b/baselines/bench/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/bench/benchmarks.py b/baselines/bench/benchmarks.py
old mode 100644
new mode 100755
diff --git a/baselines/bench/monitor.py b/baselines/bench/monitor.py
old mode 100644
new mode 100755
diff --git a/baselines/common/__init__.py b/baselines/common/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/common/atari_wrappers.py b/baselines/common/atari_wrappers.py
old mode 100644
new mode 100755
diff --git a/baselines/common/cg.py b/baselines/common/cg.py
old mode 100644
new mode 100755
diff --git a/baselines/common/cmd_util.py b/baselines/common/cmd_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/console_util.py b/baselines/common/console_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/dataset.py b/baselines/common/dataset.py
old mode 100644
new mode 100755
diff --git a/baselines/common/distributions.py b/baselines/common/distributions.py
old mode 100644
new mode 100755
diff --git a/baselines/common/input.py b/baselines/common/input.py
old mode 100644
new mode 100755
diff --git a/baselines/common/math_util.py b/baselines/common/math_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/misc_util.py b/baselines/common/misc_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/models.py b/baselines/common/models.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_adam.py b/baselines/common/mpi_adam.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_adam_optimizer.py b/baselines/common/mpi_adam_optimizer.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_fork.py b/baselines/common/mpi_fork.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_moments.py b/baselines/common/mpi_moments.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_running_mean_std.py b/baselines/common/mpi_running_mean_std.py
old mode 100644
new mode 100755
diff --git a/baselines/common/mpi_util.py b/baselines/common/mpi_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/plot_util.py b/baselines/common/plot_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/policies.py b/baselines/common/policies.py
old mode 100644
new mode 100755
diff --git a/baselines/common/retro_wrappers.py b/baselines/common/retro_wrappers.py
old mode 100644
new mode 100755
diff --git a/baselines/common/runners.py b/baselines/common/runners.py
old mode 100644
new mode 100755
diff --git a/baselines/common/running_mean_std.py b/baselines/common/running_mean_std.py
old mode 100644
new mode 100755
diff --git a/baselines/common/schedules.py b/baselines/common/schedules.py
old mode 100644
new mode 100755
diff --git a/baselines/common/segment_tree.py b/baselines/common/segment_tree.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/__init__.py b/baselines/common/tests/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/envs/__init__.py b/baselines/common/tests/envs/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/envs/fixed_sequence_env.py b/baselines/common/tests/envs/fixed_sequence_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/envs/identity_env.py b/baselines/common/tests/envs/identity_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/envs/mnist_env.py b/baselines/common/tests/envs/mnist_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_cartpole.py b/baselines/common/tests/test_cartpole.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_doc_examples.py b/baselines/common/tests/test_doc_examples.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_env_after_learn.py b/baselines/common/tests/test_env_after_learn.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_fixed_sequence.py b/baselines/common/tests/test_fixed_sequence.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_identity.py b/baselines/common/tests/test_identity.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_mnist.py b/baselines/common/tests/test_mnist.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_schedules.py b/baselines/common/tests/test_schedules.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_segment_tree.py b/baselines/common/tests/test_segment_tree.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_serialization.py b/baselines/common/tests/test_serialization.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/test_tf_util.py b/baselines/common/tests/test_tf_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tests/util.py b/baselines/common/tests/util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tf_util.py b/baselines/common/tf_util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/tile_images.py b/baselines/common/tile_images.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/__init__.py b/baselines/common/vec_env/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/dummy_vec_env.py b/baselines/common/vec_env/dummy_vec_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/shmem_vec_env.py b/baselines/common/vec_env/shmem_vec_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/subproc_vec_env.py b/baselines/common/vec_env/subproc_vec_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/test_vec_env.py b/baselines/common/vec_env/test_vec_env.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/test_video_recorder.py b/baselines/common/vec_env/test_video_recorder.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/util.py b/baselines/common/vec_env/util.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/vec_frame_stack.py b/baselines/common/vec_env/vec_frame_stack.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/vec_monitor.py b/baselines/common/vec_env/vec_monitor.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/vec_normalize.py b/baselines/common/vec_env/vec_normalize.py
old mode 100644
new mode 100755
diff --git a/baselines/common/vec_env/vec_video_recorder.py b/baselines/common/vec_env/vec_video_recorder.py
old mode 100644
new mode 100755
diff --git a/baselines/custom_logger.py b/baselines/custom_logger.py
new file mode 100755
index 0000000000..ee17699142
--- /dev/null
+++ b/baselines/custom_logger.py
@@ -0,0 +1,15 @@
+import datetime as dt
+
+"""
+For Corlor, check this site.
++ https://qiita.com/ironguy/items/8fb3ddadb3c4c986496d
+"""
+
+class CustomLoggerObject(object):
+ def __init__(self):
+ self.LOG_FMT = "{color}| {asctime} | {levelname:<5s} | {message} \033[0m"
+
+ def info(self, msg):
+ asctime = dt.datetime.now().strftime("%Y/%m/%d %H:%M:%S")
+ print(self.LOG_FMT.format(color="\033[37m", asctime=asctime, levelname="INFO", message=msg))
+
diff --git a/baselines/ddpg/ddpg.py b/baselines/ddpg/ddpg.py
index 37551d4931..35c8e17782 100755
--- a/baselines/ddpg/ddpg.py
+++ b/baselines/ddpg/ddpg.py
@@ -11,6 +11,11 @@
import baselines.common.tf_util as U
from baselines import logger
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
import numpy as np
try:
@@ -118,6 +123,7 @@ def learn(network, env,
start_time = time.time()
+ clogger.info("Start Training [nb_epochs={}]".format(nb_epochs))
epoch_episode_rewards = []
epoch_episode_steps = []
@@ -125,6 +131,7 @@ def learn(network, env,
epoch_qs = []
epoch_episodes = 0
for epoch in range(nb_epochs):
+ clogger.info("Start Epoch={}".format(epoch))
for cycle in range(nb_epoch_cycles):
# Perform rollouts.
if nenvs > 1:
@@ -134,7 +141,7 @@ def learn(network, env,
for t_rollout in range(nb_rollout_steps):
# Predict next action.
action, q, _, _ = agent.step(obs, apply_noise=True, compute_Q=True)
-
+ clogger.info("action.shape={}, q={}".format(action.shape, q))
# Execute next action.
if rank == 0 and render:
env.render()
@@ -210,6 +217,7 @@ def learn(network, env,
mpi_size = MPI.COMM_WORLD.Get_size()
else:
mpi_size = 1
+ clogger.info("Finish Training {}".format(time.time()))
# Log stats.
# XXX shouldn't call np.mean on variable length lists
diff --git a/baselines/deepq/README.md b/baselines/deepq/README.md
old mode 100644
new mode 100755
diff --git a/baselines/deepq/__init__.py b/baselines/deepq/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/build_graph.py b/baselines/deepq/build_graph.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/deepq.py b/baselines/deepq/deepq.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/defaults.py b/baselines/deepq/defaults.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/__init__.py b/baselines/deepq/experiments/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/custom_cartpole.py b/baselines/deepq/experiments/custom_cartpole.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/enjoy_cartpole.py b/baselines/deepq/experiments/enjoy_cartpole.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/enjoy_mountaincar.py b/baselines/deepq/experiments/enjoy_mountaincar.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/enjoy_pong.py b/baselines/deepq/experiments/enjoy_pong.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/train_cartpole.py b/baselines/deepq/experiments/train_cartpole.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/train_mountaincar.py b/baselines/deepq/experiments/train_mountaincar.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/experiments/train_pong.py b/baselines/deepq/experiments/train_pong.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/models.py b/baselines/deepq/models.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/replay_buffer.py b/baselines/deepq/replay_buffer.py
old mode 100644
new mode 100755
diff --git a/baselines/deepq/utils.py b/baselines/deepq/utils.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/README.md b/baselines/gail/README.md
old mode 100644
new mode 100755
diff --git a/baselines/gail/__init__.py b/baselines/gail/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/adversary.py b/baselines/gail/adversary.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/behavior_clone.py b/baselines/gail/behavior_clone.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/dataset/__init__.py b/baselines/gail/dataset/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/dataset/mujoco_dset.py b/baselines/gail/dataset/mujoco_dset.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/gail-eval.py b/baselines/gail/gail-eval.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/mlp_policy.py b/baselines/gail/mlp_policy.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HalfCheetah-normalized-deterministic-scores.png b/baselines/gail/result/HalfCheetah-normalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HalfCheetah-normalized-stochastic-scores.png b/baselines/gail/result/HalfCheetah-normalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HalfCheetah-unnormalized-deterministic-scores.png b/baselines/gail/result/HalfCheetah-unnormalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HalfCheetah-unnormalized-stochastic-scores.png b/baselines/gail/result/HalfCheetah-unnormalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Hopper-normalized-deterministic-scores.png b/baselines/gail/result/Hopper-normalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Hopper-normalized-stochastic-scores.png b/baselines/gail/result/Hopper-normalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Hopper-unnormalized-deterministic-scores.png b/baselines/gail/result/Hopper-unnormalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Hopper-unnormalized-stochastic-scores.png b/baselines/gail/result/Hopper-unnormalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Humanoid-normalized-deterministic-scores.png b/baselines/gail/result/Humanoid-normalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Humanoid-normalized-stochastic-scores.png b/baselines/gail/result/Humanoid-normalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Humanoid-unnormalized-deterministic-scores.png b/baselines/gail/result/Humanoid-unnormalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Humanoid-unnormalized-stochastic-scores.png b/baselines/gail/result/Humanoid-unnormalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HumanoidStandup-normalized-deterministic-scores.png b/baselines/gail/result/HumanoidStandup-normalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HumanoidStandup-normalized-stochastic-scores.png b/baselines/gail/result/HumanoidStandup-normalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HumanoidStandup-unnormalized-deterministic-scores.png b/baselines/gail/result/HumanoidStandup-unnormalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/HumanoidStandup-unnormalized-stochastic-scores.png b/baselines/gail/result/HumanoidStandup-unnormalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Walker2d-normalized-deterministic-scores.png b/baselines/gail/result/Walker2d-normalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Walker2d-normalized-stochastic-scores.png b/baselines/gail/result/Walker2d-normalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Walker2d-unnormalized-deterministic-scores.png b/baselines/gail/result/Walker2d-unnormalized-deterministic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/Walker2d-unnormalized-stochastic-scores.png b/baselines/gail/result/Walker2d-unnormalized-stochastic-scores.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/gail-result.md b/baselines/gail/result/gail-result.md
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/halfcheetah-training.png b/baselines/gail/result/halfcheetah-training.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/hopper-training.png b/baselines/gail/result/hopper-training.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/humanoid-training.png b/baselines/gail/result/humanoid-training.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/humanoidstandup-training.png b/baselines/gail/result/humanoidstandup-training.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/result/walker2d-training.png b/baselines/gail/result/walker2d-training.png
old mode 100644
new mode 100755
diff --git a/baselines/gail/run_mujoco.py b/baselines/gail/run_mujoco.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/statistics.py b/baselines/gail/statistics.py
old mode 100644
new mode 100755
diff --git a/baselines/gail/trpo_mpi.py b/baselines/gail/trpo_mpi.py
old mode 100644
new mode 100755
diff --git a/baselines/her/README.md b/baselines/her/README.md
old mode 100644
new mode 100755
diff --git a/baselines/her/__init__.py b/baselines/her/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/her/actor_critic.py b/baselines/her/actor_critic.py
old mode 100644
new mode 100755
index d5443fe0c3..596b234f93
--- a/baselines/her/actor_critic.py
+++ b/baselines/her/actor_critic.py
@@ -32,8 +32,33 @@ def __init__(self, inputs_tf, dimo, dimg, dimu, max_u, o_stats, g_stats, hidden,
# Networks.
with tf.variable_scope('pi'):
- self.pi_tf = self.max_u * tf.tanh(nn(
- input_pi, [self.hidden] * self.layers + [self.dimu]))
+ # self.pi_tf = self.max_u * tf.tanh(nn(
+ # input_pi, [self.hidden] * self.layers + [self.dimu]))
+
+ # 3-Layers FC Network
+ ## FC1
+ fc1 = tf.layers.dense(inputs=input_pi,
+ units=self.hidden,
+ kernel_initializer=tf.contrib.layers.xavier_initializer(),
+ reuse=None,
+ name='fc1')
+ fc1 = tf.nn.relu(fc1)
+ ## FC2
+ fc2 = tf.layers.dense(inputs=fc1,
+ units=self.hidden,
+ kernel_initializer=tf.contrib.layers.xavier_initializer(),
+ reuse=None,
+ name='fc2')
+ fc2 = tf.nn.relu(fc2)
+ ## FC3
+ fc3 = tf.layers.dense(inputs=fc2,
+ units=self.dimu,
+ kernel_initializer=tf.contrib.layers.xavier_initializer(),
+ reuse=None,
+ name='fc3')
+ self.pi_tf_fc2 = fc2
+ self.pi_tf = fc3
+
with tf.variable_scope('Q'):
# for policy training
input_Q = tf.concat(axis=1, values=[o, g, self.pi_tf / self.max_u])
diff --git a/baselines/her/ddpg.py b/baselines/her/ddpg.py
old mode 100644
new mode 100755
index 96384da4c4..6a06d96f6d
--- a/baselines/her/ddpg.py
+++ b/baselines/her/ddpg.py
@@ -120,13 +120,13 @@ def _preprocess_og(self, o, ag, g):
return o, g
def get_actions(self, o, ag, g, noise_eps=0., random_eps=0., use_target_net=False,
- compute_Q=False):
+ compute_Q=False,):
o, g = self._preprocess_og(o, ag, g)
policy = self.target if use_target_net else self.main
# values to compute
vals = [policy.pi_tf]
if compute_Q:
- vals += [policy.Q_pi_tf]
+ vals += [policy.Q_pi_tf, policy.pi_tf_fc2]
# feed
feed = {
policy.o_tf: o.reshape(-1, self.dimo),
@@ -150,6 +150,7 @@ def get_actions(self, o, ag, g, noise_eps=0., random_eps=0., use_target_net=Fals
return ret[0]
else:
return ret
+
def initDemoBuffer(self, demoDataFile, update_stats=True): #function that initializes the demo buffer
diff --git a/baselines/her/experiment/__init__.py b/baselines/her/experiment/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/her/experiment/config.py b/baselines/her/experiment/config.py
old mode 100644
new mode 100755
index 8cc36e6ee1..5ac6dadb02
--- a/baselines/her/experiment/config.py
+++ b/baselines/her/experiment/config.py
@@ -1,10 +1,18 @@
import numpy as np
import gym
+import gym_grasp
from baselines import logger
from baselines.her.ddpg import DDPG
from baselines.her.her import make_sample_her_transitions
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
+
+
DEFAULT_ENV_PARAMS = {
'FetchReach-v1': {
@@ -170,12 +178,16 @@ def configure_dims(params):
env = cached_make_env(params['make_env'])
env.reset()
obs, _, _, info = env.step(env.action_space.sample())
+
dims = {
'o': obs['observation'].shape[0],
'u': env.action_space.shape[0],
'g': obs['desired_goal'].shape[0],
}
+ clogger.info("input_dims = {}".format(dims))
+ clogger.info("env.action_apace={}".format(env.action_space))
+ clogger.info("env.observation_space={}".format(env.observation_space))
for key, value in info.items():
value = np.array(value)
if value.ndim == 0:
diff --git a/baselines/her/experiment/data_generation/fetch_data_generation.py b/baselines/her/experiment/data_generation/fetch_data_generation.py
old mode 100644
new mode 100755
diff --git a/baselines/her/experiment/play.py b/baselines/her/experiment/play.py
old mode 100644
new mode 100755
diff --git a/baselines/her/experiment/plot.py b/baselines/her/experiment/plot.py
old mode 100644
new mode 100755
diff --git a/baselines/her/experiment/test.py b/baselines/her/experiment/test.py
new file mode 100755
index 0000000000..6095643c09
--- /dev/null
+++ b/baselines/her/experiment/test.py
@@ -0,0 +1,241 @@
+import os
+import sys
+
+import click
+import numpy as np
+import json
+from mpi4py import MPI
+
+from baselines import logger
+from baselines.common import set_global_seeds
+from baselines.common.mpi_moments import mpi_moments
+import baselines.her.experiment.config as config
+from baselines.her.rollout import RolloutWorker
+from baselines.her.util import mpi_fork
+
+from subprocess import CalledProcessError
+import h5py
+
+
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
+
+
+def mpi_average(value):
+ if value == []:
+ value = [0.]
+ if not isinstance(value, list):
+ value = [value]
+ return mpi_moments(np.array(value))[0]
+
+
+def test(policy, rollout_worker, evaluator,
+ n_epochs, n_test_rollouts, n_cycles, n_batches, policy_save_interval,
+ save_policies, demo_file, logdir_aq, **kwargs):
+ clogger.info("Logdir for actions & Q-values: {}".format(logdir_aq))
+ rank = MPI.COMM_WORLD.Get_rank()
+
+ latest_policy_path = os.path.join(logger.get_dir(), 'policy_latest.pkl')
+ best_policy_path = os.path.join(logger.get_dir(), 'policy_best.pkl')
+ periodic_policy_path = os.path.join(logger.get_dir(), 'policy_{}.pkl')
+
+ logger.info("Training...")
+ best_success_rate = -1
+
+ if policy.bc_loss == 1: policy.initDemoBuffer(demo_file) #initialize demo buffer if training with demonstrations
+ for epoch in range(n_epochs):
+ clogger.info("Start: Epoch {}/{}".format(epoch, n_epochs))
+
+ # test
+ evaluator.clear_history()
+ episode_box = {"g":[],"ag":[],"o":[],"u":[],"q":[], "fc":[]}
+ for _ in range(n_test_rollouts):
+ episode = evaluator.generate_rollouts(is_train=False)
+ clogger.info("Episode = {}".format(episode.keys()))
+ for key in episode.keys():
+ # clogger.info(" - {}: {}".format(key, episode[key].shape))
+ if key in episode_box.keys():
+ episode_box[key].append(episode[key][np.newaxis, :])
+
+
+ # Dump episode info
+ for key in episode_box.keys():
+ # episode_box[key].append(episode[key])
+ l = len(episode[key])
+ episode_box[key] = np.concatenate(episode_box[key], axis=0)
+ clogger.info(" - {:<4}: {:>4} => {}".format(key, l, episode_box[key].shape))
+
+ filename = os.path.join(logdir_aq, 'epoch{}.h5'.format(epoch))
+ with h5py.File(filename, 'w') as f:
+ f.create_group('goal')
+ f['goal'].create_dataset('desired', data=episode_box["g"])
+ f['goal'].create_dataset('achieved', data=episode_box["ag"])
+ f.create_dataset('obeservation', data=episode_box["o"])
+ f.create_dataset('action', data=episode_box["u"])
+ f.create_dataset('Qvalue', data=episode_box["q"])
+ f.create_dataset('fc', data=episode_box["fc"])
+
+
+
+ # record logs
+ logger.record_tabular('epoch', epoch)
+ for key, val in evaluator.logs('test'):
+ logger.record_tabular(key, mpi_average(val))
+ # for key, val in rollout_worker.logs('train'):
+ # logger.record_tabular(key, mpi_average(val))
+ for key, val in policy.logs():
+ logger.record_tabular(key, mpi_average(val))
+
+ if rank == 0:
+ clogger.info("Show table")
+ logger.dump_tabular()
+
+ # save the policy if it's better than the previous ones
+ success_rate = mpi_average(evaluator.current_success_rate())
+ if rank == 0 and success_rate >= best_success_rate and save_policies:
+ best_success_rate = success_rate
+ logger.info('New best success rate: {}. Saving policy to {} ...'.format(best_success_rate, best_policy_path))
+ evaluator.save_policy(best_policy_path)
+ evaluator.save_policy(latest_policy_path)
+ if rank == 0 and policy_save_interval > 0 and epoch % policy_save_interval == 0 and save_policies:
+ policy_path = periodic_policy_path.format(epoch)
+ logger.info('Saving periodic policy to {} ...'.format(policy_path))
+ evaluator.save_policy(policy_path)
+
+ # make sure that different threads have different seeds
+ local_uniform = np.random.uniform(size=(1,))
+ root_uniform = local_uniform.copy()
+ MPI.COMM_WORLD.Bcast(root_uniform, root=0)
+ if rank != 0:
+ assert local_uniform[0] != root_uniform[0]
+
+
+def launch(
+ env, logdir, n_epochs, num_cpu, seed, replay_strategy, policy_save_interval, clip_return,
+ demo_file, logdir_tf=None, logdir_aq=None, override_params={}, save_policies=True
+):
+ assert logdir_tf, "Test mode need `logdir_tf`"
+ # Fork for multi-CPU MPI implementation.
+ if num_cpu > 1:
+ try:
+ whoami = mpi_fork(num_cpu, ['--bind-to', 'core'])
+ except CalledProcessError:
+ # fancy version of mpi call failed, try simple version
+ whoami = mpi_fork(num_cpu)
+
+ if whoami == 'parent':
+ sys.exit(0)
+ import baselines.common.tf_util as U
+ U.single_threaded_session().__enter__()
+ rank = MPI.COMM_WORLD.Get_rank()
+
+ # Configure logging
+ if rank == 0:
+ if logdir or logger.get_dir() is None:
+ logger.configure(dir=logdir)
+ else:
+ logger.configure()
+ logdir = logger.get_dir()
+ assert logdir is not None
+ os.makedirs(logdir, exist_ok=True)
+
+ # Seed everything.
+ rank_seed = seed + 1000000 * rank
+ set_global_seeds(rank_seed)
+
+ # Prepare params.
+ params = config.DEFAULT_PARAMS
+ params['env_name'] = env
+ params['replay_strategy'] = replay_strategy
+ if env in config.DEFAULT_ENV_PARAMS:
+ params.update(config.DEFAULT_ENV_PARAMS[env]) # merge env-specific parameters in
+ params.update(**override_params) # makes it possible to override any parameter
+ with open(os.path.join(logger.get_dir(), 'params.json'), 'w') as f:
+ json.dump(params, f)
+ params = config.prepare_params(params)
+ config.log_params(params, logger=logger)
+
+ if num_cpu == 1:
+ logger.warn()
+ logger.warn('*** Warning ***')
+ logger.warn(
+ 'You are running HER with just a single MPI worker. This will work, but the ' +
+ 'experiments that we report in Plappert et al. (2018, https://arxiv.org/abs/1802.09464) ' +
+ 'were obtained with --num_cpu 19. This makes a significant difference and if you ' +
+ 'are looking to reproduce those results, be aware of this. Please also refer to ' +
+ 'https://github.com/openai/baselines/issues/314 for further details.')
+ logger.warn('****************')
+ logger.warn()
+
+ dims = config.configure_dims(params)
+ policy = config.configure_ddpg(dims=dims, params=params, clip_return=clip_return)
+ # Load Learned Parameters
+ if logdir_tf:
+ import tensorflow as tf
+ saver = tf.train.Saver()
+ saver.restore(policy.sess, logdir_tf)
+
+ rollout_params = {
+ 'exploit': False,
+ 'use_target_net': False,
+ 'use_demo_states': True,
+ 'compute_Q': False,
+ 'T': params['T'],
+ }
+
+ eval_params = {
+ 'exploit': True,
+ 'use_target_net': params['test_with_polyak'],
+ 'use_demo_states': False,
+ 'compute_Q': True,
+ 'T': params['T'],
+ }
+
+ for name in ['T', 'rollout_batch_size', 'gamma', 'noise_eps', 'random_eps']:
+ rollout_params[name] = params[name]
+ eval_params[name] = params[name]
+
+ rollout_worker = RolloutWorker(params['make_env'], policy, dims, logger, **rollout_params)
+ rollout_worker.seed(rank_seed)
+
+ evaluator = RolloutWorker(params['make_env'], policy, dims, logger, **eval_params)
+ evaluator.seed(rank_seed)
+
+ # Log Directory for actions and qvalues
+ if not logdir_aq:
+ logdir_aq = os.path.join(logdir_tf, "ActionQvals")
+ if not os.path.exists(logdir_aq):
+ os.makedirs(logdir_aq)
+ clogger.info("Create Logdir to {}".format(logdir_aq))
+
+ test(
+ logdir=logdir, policy=policy, rollout_worker=rollout_worker,
+ evaluator=evaluator, n_epochs=n_epochs, n_test_rollouts=params['n_test_rollouts'],
+ n_cycles=params['n_cycles'], n_batches=params['n_batches'],
+ policy_save_interval=policy_save_interval, save_policies=save_policies, demo_file=demo_file,
+ logdir_aq=logdir_aq,
+ )
+
+
+@click.command()
+@click.option('--env', type=str, default='FetchReach-v1', help='the name of the OpenAI Gym environment that you want to train on')
+@click.option('--logdir', type=str, default=None, help='the path to where logs and policy pickles should go. If not specified, creates a folder in /tmp/')
+@click.option('--n_epochs', type=int, default=50, help='the number of training epochs to run')
+@click.option('--num_cpu', type=int, default=1, help='the number of CPU cores to use (using MPI)')
+@click.option('--seed', type=int, default=0, help='the random seed used to seed both the environment and the training code')
+@click.option('--policy_save_interval', type=int, default=5, help='the interval with which policy pickles are saved. If set to 0, only the best and latest policy will be pickled.')
+@click.option('--replay_strategy', type=click.Choice(['future', 'none']), default='future', help='the HER replay strategy to be used. "future" uses HER, "none" disables HER.')
+@click.option('--clip_return', type=int, default=1, help='whether or not returns should be clipped')
+@click.option('--demo_file', type=str, default = 'PATH/TO/DEMO/DATA/FILE.npz', help='demo data file path')
+@click.option('--logdir_tf', type=str, default=None, help='the path to save tf.variables.')
+@click.option('--logdir_aq', type=str, default=None, help='the path to save tf.variables.')
+def main(**kwargs):
+ clogger.info("Main Func @her.experiment.train")
+ launch(**kwargs)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/baselines/her/experiment/train.py b/baselines/her/experiment/train.py
old mode 100644
new mode 100755
index 82a11f0ad6..ea37343b4d
--- a/baselines/her/experiment/train.py
+++ b/baselines/her/experiment/train.py
@@ -15,6 +15,12 @@
from subprocess import CalledProcessError
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
+
def mpi_average(value):
if value == []:
@@ -24,33 +30,62 @@ def mpi_average(value):
return mpi_moments(np.array(value))[0]
-def train(policy, rollout_worker, evaluator,
+def train(min_num, max_num, num_axis, reward_lambda, # nishimura
+ policy, rollout_worker, evaluator,
n_epochs, n_test_rollouts, n_cycles, n_batches, policy_save_interval,
- save_policies, demo_file, **kwargs):
+ save_policies, demo_file, logdir_init, **kwargs):
rank = MPI.COMM_WORLD.Get_rank()
latest_policy_path = os.path.join(logger.get_dir(), 'policy_latest.pkl')
best_policy_path = os.path.join(logger.get_dir(), 'policy_best.pkl')
periodic_policy_path = os.path.join(logger.get_dir(), 'policy_{}.pkl')
+ best_policy_grasp_path = os.path.join(logger.get_dir(), "grasp_dataset_on_best_policy.npy") # motoda
+ path_to_grasp_dataset = os.path.join(logger.get_dir(), "grasp_dataset_{}.npy") # motoda
+
+ all_success_grasp_path = os.path.join(logger.get_dir(), "total_grasp_dataset.npy") # motoda
+
+ # motoda --
+ success_u = []
+ init_success_u = []
+ path_to_default_grasp_dataset = "model/initial_grasp_pose.npy"
+ if os.path.exists(path_to_default_grasp_dataset):
+ init_success_u = np.load(path_to_default_grasp_dataset) # Load Initial Grasp Pose set
+ init_success_u = (init_success_u.tolist())
+ for tmp_suc in init_success_u:
+ success_u.append(tmp_suc[0:20])
+ print ("Num of grasp : {} ".format(len (success_u)))
+ else:
+ print ("No initial grasp pose")
+ # ---
+
+ # motoda --
+ all_success_u = [] # Dumping grasp_pose
+ # --
logger.info("Training...")
best_success_rate = -1
if policy.bc_loss == 1: policy.initDemoBuffer(demo_file) #initialize demo buffer if training with demonstrations
for epoch in range(n_epochs):
+ clogger.info("Start: Epoch {}/{}".format(epoch, n_epochs))
# train
rollout_worker.clear_history()
+ saved_success_u = []
for _ in range(n_cycles):
- episode = rollout_worker.generate_rollouts()
+ episode, success_tmp = rollout_worker.generate_rollouts(min_num=min_num,num_axis=num_axis,reward_lambda=reward_lambda,success_u=success_u) # nishimura, 雑実装
+ # clogger.info("Episode = {}".format(episode.keys()))
+ # for key in episode.keys():
+ # clogger.info(" - {}: {}".format(key, episode[key].shape))
policy.store_episode(episode)
for _ in range(n_batches):
policy.train()
policy.update_target_net()
+ saved_success_u += success_tmp # motoda
# test
evaluator.clear_history()
for _ in range(n_test_rollouts):
- evaluator.generate_rollouts()
+ evaluator.generate_rollouts(min_num=min_num,num_axis=num_axis,reward_lambda=reward_lambda) # nishimura, 雑実装
# record logs
logger.record_tabular('epoch', epoch)
@@ -71,10 +106,22 @@ def train(policy, rollout_worker, evaluator,
logger.info('New best success rate: {}. Saving policy to {} ...'.format(best_success_rate, best_policy_path))
evaluator.save_policy(best_policy_path)
evaluator.save_policy(latest_policy_path)
+ np.save(best_policy_grasp_path, success_u)
if rank == 0 and policy_save_interval > 0 and epoch % policy_save_interval == 0 and save_policies:
policy_path = periodic_policy_path.format(epoch)
logger.info('Saving periodic policy to {} ...'.format(policy_path))
evaluator.save_policy(policy_path)
+ # -- motoda added
+ grasp_path = path_to_grasp_dataset.format(epoch)
+ logger.info('Saving grasp pose: {} grasps. Saving policy to {} ...'.format(len(saved_success_u), grasp_path))
+ np.save(grasp_path, saved_success_u)
+ # --
+
+ # -- reset : grasp Pose -------
+ # success_u = [] # reset (motoda)
+ # -----------------------------
+
+ success_u = success_u[-max_num:] # nishimura
# make sure that different threads have different seeds
local_uniform = np.random.uniform(size=(1,))
@@ -83,10 +130,17 @@ def train(policy, rollout_worker, evaluator,
if rank != 0:
assert local_uniform[0] != root_uniform[0]
+ all_success_u += saved_success_u # motoda
+
+ # motoda --
+ # Dumping the total success_pose
+ logger.info('Saving grasp pose: {} grasps. Saving policy to {} ...'.format(len(all_success_u), all_success_grasp_path))
+ np.save(all_success_grasp_path, saved_success_u)
+ # --
def launch(
- env, logdir, n_epochs, num_cpu, seed, replay_strategy, policy_save_interval, clip_return,
- demo_file, override_params={}, save_policies=True
+ env, logdir, n_epochs, min_num, max_num, num_axis, reward_lambda, num_cpu, seed, replay_strategy, policy_save_interval, clip_return,
+ demo_file, logdir_tf=None, override_params={}, save_policies=True, logdir_init=None
):
# Fork for multi-CPU MPI implementation.
if num_cpu > 1:
@@ -140,8 +194,26 @@ def launch(
logger.warn('****************')
logger.warn()
+
dims = config.configure_dims(params)
policy = config.configure_ddpg(dims=dims, params=params, clip_return=clip_return)
+ clogger.info(policy.sess)
+ # Prepare for Saving Network
+ clogger.info("logdir_tf: {}".format(logdir_tf))
+ if not logdir_tf == None:
+ clogger.info("Create tc.Saver()")
+ import tensorflow as tf
+ saver = tf.train.Saver()
+
+ # motoda added --
+ # Load Learned Parameters
+ if not logdir_init == None:
+ if logdir_tf == None:
+ import tensorflow as tf
+ saver = tf.train.Saver()
+ saver.restore(policy.sess, logdir_init)
+ clogger.info("Model was successflly loaded [logidr_tf={}]".format(logdir_init))
+ # ---------------
rollout_params = {
'exploit': False,
@@ -170,23 +242,40 @@ def launch(
evaluator.seed(rank_seed)
train(
+ min_num=min_num, max_num=max_num, num_axis=num_axis, reward_lambda=reward_lambda, # nishimura
logdir=logdir, policy=policy, rollout_worker=rollout_worker,
evaluator=evaluator, n_epochs=n_epochs, n_test_rollouts=params['n_test_rollouts'],
n_cycles=params['n_cycles'], n_batches=params['n_batches'],
- policy_save_interval=policy_save_interval, save_policies=save_policies, demo_file=demo_file)
+ policy_save_interval=policy_save_interval, save_policies=save_policies, demo_file=demo_file, logdir_init=logdir_init)
+
+
+ # Save Trained Network
+ if logdir_tf:
+ clogger.info("Save tf.variables to {}".format(logdir_tf))
+ clogger.info(policy.sess)
+ saver.save(policy.sess, logdir_tf)
+ clogger.info("Model was successflly saved [logidr_tf={}]".format(logdir_tf))
@click.command()
@click.option('--env', type=str, default='FetchReach-v1', help='the name of the OpenAI Gym environment that you want to train on')
@click.option('--logdir', type=str, default=None, help='the path to where logs and policy pickles should go. If not specified, creates a folder in /tmp/')
@click.option('--n_epochs', type=int, default=50, help='the number of training epochs to run')
+@click.option('--min_num', type=int, default=100,help='minimum number of success_u whether to run PCA')
+@click.option('--max_num', type=int, default=10000,help='limit of success_u for PCA')
+@click.option('--num_axis', type=int, default=5,help='number of principal components to calculate the reward function')
+@click.option('--reward_lambda', type=float, default=1.,help='a weight for the second term of the reward function')
@click.option('--num_cpu', type=int, default=1, help='the number of CPU cores to use (using MPI)')
@click.option('--seed', type=int, default=0, help='the random seed used to seed both the environment and the training code')
@click.option('--policy_save_interval', type=int, default=5, help='the interval with which policy pickles are saved. If set to 0, only the best and latest policy will be pickled.')
@click.option('--replay_strategy', type=click.Choice(['future', 'none']), default='future', help='the HER replay strategy to be used. "future" uses HER, "none" disables HER.')
@click.option('--clip_return', type=int, default=1, help='whether or not returns should be clipped')
@click.option('--demo_file', type=str, default = 'PATH/TO/DEMO/DATA/FILE.npz', help='demo data file path')
+@click.option('--logdir_tf', type=str, default=None, help='the path to save tf.variables.')
+@click.option('--logdir_init', type=str, default='model/init', help='the path to load default paramater.') # There are meta data at model/init
+
def main(**kwargs):
+ clogger.info("Main Func @her.experiment.train")
launch(**kwargs)
diff --git a/baselines/her/her.py b/baselines/her/her.py
old mode 100644
new mode 100755
diff --git a/baselines/her/normalizer.py b/baselines/her/normalizer.py
old mode 100644
new mode 100755
diff --git a/baselines/her/replay_buffer.py b/baselines/her/replay_buffer.py
old mode 100644
new mode 100755
diff --git a/baselines/her/rollout.py b/baselines/her/rollout.py
old mode 100644
new mode 100755
index e33b92add1..dd5780519a
--- a/baselines/her/rollout.py
+++ b/baselines/her/rollout.py
@@ -7,6 +7,13 @@
from baselines.her.util import convert_episode_to_batch_major, store_args
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
+
+
class RolloutWorker:
@store_args
@@ -61,10 +68,14 @@ def reset_all_rollouts(self):
for i in range(self.rollout_batch_size):
self.reset_rollout(i)
- def generate_rollouts(self):
+ def generate_rollouts(self, min_num, num_axis, reward_lambda, success_u=[], is_train=True): # nishimura
"""Performs `rollout_batch_size` rollouts in parallel for time horizon `T` with the current
policy acting on it accordingly.
"""
+
+ import sklearn
+ from sklearn.decomposition import PCA
+
self.reset_all_rollouts()
# compute observations
@@ -75,6 +86,8 @@ def generate_rollouts(self):
# generate episodes
obs, achieved_goals, acts, goals, successes = [], [], [], [], []
+ q_vals = []
+ fcs = []
info_values = [np.empty((self.T, self.rollout_batch_size, self.dims['info_' + key]), np.float32) for key in self.info_keys]
Qs = []
for t in range(self.T):
@@ -83,11 +96,15 @@ def generate_rollouts(self):
compute_Q=self.compute_Q,
noise_eps=self.noise_eps if not self.exploit else 0.,
random_eps=self.random_eps if not self.exploit else 0.,
- use_target_net=self.use_target_net)
-
+ use_target_net=self.use_target_net,)
+ # clogger.info("compute_Q[{}, {}]: policy_output: {}".format(self.compute_Q, t, policy_output))
+
if self.compute_Q:
- u, Q = policy_output
+ u, Q, fc = policy_output
Qs.append(Q)
+ q_vals.append(Q.copy())
+ if fc.ndim == 1:
+ fc = fc.reshape(1,-1)
else:
u = policy_output
@@ -95,18 +112,32 @@ def generate_rollouts(self):
# The non-batched case should still have a reasonable shape.
u = u.reshape(1, -1)
+
o_new = np.empty((self.rollout_batch_size, self.dims['o']))
ag_new = np.empty((self.rollout_batch_size, self.dims['g']))
success = np.zeros(self.rollout_batch_size)
+
# compute new states and observations
for i in range(self.rollout_batch_size):
+ # -- nishimura 雑実装
+ self.envs[i].num_axis = num_axis
+ self.envs[i].reward_lambda = reward_lambda
+ # --
try:
# We fully ignore the reward here because it will have to be re-computed
# for HER.
curr_o_new, _, _, info = self.envs[i].step(u[i])
if 'is_success' in info:
success[i] = info['is_success']
- o_new[i] = curr_o_new['observation']
+
+ if success[i] > 0:
+ success_u.append(u[i][0:20])
+ if len(success_u)>=min_num: # nishimura
+ pca = PCA()
+ pca.fit(success_u)
+ self.envs[i].variance_ratio.append(pca.explained_variance_ratio_)
+
+ o_new[i] = curr_o_new['observation']
ag_new[i] = curr_o_new['achieved_goal']
for idx, key in enumerate(self.info_keys):
info_values[idx][t, i] = info[key]
@@ -124,6 +155,8 @@ def generate_rollouts(self):
achieved_goals.append(ag.copy())
successes.append(success.copy())
acts.append(u.copy())
+ if self.compute_Q:
+ fcs.append(fc.copy())
goals.append(self.g.copy())
o[...] = o_new
ag[...] = ag_new
@@ -131,10 +164,21 @@ def generate_rollouts(self):
achieved_goals.append(ag.copy())
self.initial_o[:] = o
- episode = dict(o=obs,
- u=acts,
- g=goals,
- ag=achieved_goals)
+ if is_train:
+ episode = dict(o=obs,
+ u=acts,
+ g=goals,
+ ag=achieved_goals
+ )
+ else:
+ episode = dict(o=obs,
+ u=acts,
+ fc=fcs,
+ g=goals,
+ ag=achieved_goals,
+ q=q_vals,
+ )
+
for key, value in zip(self.info_keys, info_values):
episode['info_{}'.format(key)] = value
@@ -147,7 +191,7 @@ def generate_rollouts(self):
self.Q_history.append(np.mean(Qs))
self.n_episodes += self.rollout_batch_size
- return convert_episode_to_batch_major(episode)
+ return convert_episode_to_batch_major(episode), success_u # motoda
def clear_history(self):
"""Clears all histories that are used for statistics
diff --git a/baselines/her/util.py b/baselines/her/util.py
old mode 100644
new mode 100755
diff --git a/baselines/logger.py b/baselines/logger.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/README.md b/baselines/ppo1/README.md
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/__init__.py b/baselines/ppo1/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/cnn_policy.py b/baselines/ppo1/cnn_policy.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/mlp_policy.py b/baselines/ppo1/mlp_policy.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/pposgd_simple.py b/baselines/ppo1/pposgd_simple.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/run_atari.py b/baselines/ppo1/run_atari.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/run_humanoid.py b/baselines/ppo1/run_humanoid.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/run_mujoco.py b/baselines/ppo1/run_mujoco.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo1/run_robotics.py b/baselines/ppo1/run_robotics.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/README.md b/baselines/ppo2/README.md
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/__init__.py b/baselines/ppo2/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/defaults.py b/baselines/ppo2/defaults.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/microbatched_model.py b/baselines/ppo2/microbatched_model.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/model.py b/baselines/ppo2/model.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/ppo2.py b/baselines/ppo2/ppo2.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/runner.py b/baselines/ppo2/runner.py
old mode 100644
new mode 100755
diff --git a/baselines/ppo2/test_microbatches.py b/baselines/ppo2/test_microbatches.py
old mode 100644
new mode 100755
diff --git a/baselines/results_plotter.py b/baselines/results_plotter.py
old mode 100644
new mode 100755
diff --git a/baselines/run.py b/baselines/run.py
old mode 100644
new mode 100755
index c0298f3a43..8ef9cc5b18
--- a/baselines/run.py
+++ b/baselines/run.py
@@ -15,6 +15,14 @@
from baselines.common.vec_env.vec_normalize import VecNormalize
+
+# --------------------------------------------------------------------------------------
+from baselines.custom_logger import CustomLoggerObject
+clogger = CustomLoggerObject()
+clogger.info("MyLogger is working!!")
+# --------------------------------------------------------------------------------------
+
+
try:
from mpi4py import MPI
except ImportError:
diff --git a/baselines/trpo_mpi/README.md b/baselines/trpo_mpi/README.md
old mode 100644
new mode 100755
diff --git a/baselines/trpo_mpi/__init__.py b/baselines/trpo_mpi/__init__.py
old mode 100644
new mode 100755
diff --git a/baselines/trpo_mpi/defaults.py b/baselines/trpo_mpi/defaults.py
old mode 100644
new mode 100755
diff --git a/baselines/trpo_mpi/trpo_mpi.py b/baselines/trpo_mpi/trpo_mpi.py
old mode 100644
new mode 100755
diff --git a/docs/README.md b/docs/README.md
new file mode 100755
index 0000000000..de5957c176
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,147 @@
+ [![Build status](https://travis-ci.org/openai/baselines.svg?branch=master)](https://travis-ci.org/openai/baselines)
+
+# Baselines
+
+OpenAI Baselines is a set of high-quality implementations of reinforcement learning algorithms.
+
+These algorithms will make it easier for the research community to replicate, refine, and identify new ideas, and will create good baselines to build research on top of. Our DQN implementation and its variants are roughly on par with the scores in published papers. We expect they will be used as a base around which new ideas can be added, and as a tool for comparing a new approach against existing ones.
+
+## Prerequisites
+Baselines requires python3 (>=3.5) with the development headers. You'll also need system packages CMake, OpenMPI and zlib. Those can be installed as follows
+### Ubuntu
+
+```bash
+sudo apt-get update && sudo apt-get install cmake libopenmpi-dev python3-dev zlib1g-dev
+```
+
+### Mac OS X
+Installation of system packages on Mac requires [Homebrew](https://brew.sh). With Homebrew installed, run the following:
+```bash
+brew install cmake openmpi
+```
+
+## Virtual environment
+From the general python package sanity perspective, it is a good idea to use virtual environments (virtualenvs) to make sure packages from different projects do not interfere with each other. You can install virtualenv (which is itself a pip package) via
+```bash
+pip install virtualenv
+```
+Virtualenvs are essentially folders that have copies of python executable and all python packages.
+To create a virtualenv called venv with python3, one runs
+```bash
+virtualenv /path/to/venv --python=python3
+```
+To activate a virtualenv:
+```
+. /path/to/venv/bin/activate
+```
+More thorough tutorial on virtualenvs and options can be found [here](https://virtualenv.pypa.io/en/stable/)
+
+
+## Installation
+- Clone the repo and cd into it:
+ ```bash
+ git clone https://github.com/openai/baselines.git
+ cd baselines
+ ```
+- If you don't have TensorFlow installed already, install your favourite flavor of TensorFlow. In most cases,
+ ```bash
+ pip install tensorflow-gpu # if you have a CUDA-compatible gpu and proper drivers
+ ```
+ or
+ ```bash
+ pip install tensorflow
+ ```
+ should be sufficient. Refer to [TensorFlow installation guide](https://www.tensorflow.org/install/)
+ for more details.
+
+- Install baselines package
+ ```bash
+ pip install -e .
+ ```
+
+### MuJoCo
+Some of the baselines examples use [MuJoCo](http://www.mujoco.org) (multi-joint dynamics in contact) physics simulator, which is proprietary and requires binaries and a license (temporary 30-day license can be obtained from [www.mujoco.org](http://www.mujoco.org)). Instructions on setting up MuJoCo can be found [here](https://github.com/openai/mujoco-py)
+
+## Testing the installation
+All unit tests in baselines can be run using pytest runner:
+```
+pip install pytest
+pytest
+```
+
+## Training models
+Most of the algorithms in baselines repo are used as follows:
+```bash
+python -m baselines.run --alg= --env= [additional arguments]
+```
+### Example 1. PPO with MuJoCo Humanoid
+For instance, to train a fully-connected network controlling MuJoCo humanoid using PPO2 for 20M timesteps
+```bash
+python -m baselines.run --alg=ppo2 --env=Humanoid-v2 --network=mlp --num_timesteps=2e7
+```
+Note that for mujoco environments fully-connected network is default, so we can omit `--network=mlp`
+The hyperparameters for both network and the learning algorithm can be controlled via the command line, for instance:
+```bash
+python -m baselines.run --alg=ppo2 --env=Humanoid-v2 --network=mlp --num_timesteps=2e7 --ent_coef=0.1 --num_hidden=32 --num_layers=3 --value_network=copy
+```
+will set entropy coefficient to 0.1, and construct fully connected network with 3 layers with 32 hidden units in each, and create a separate network for value function estimation (so that its parameters are not shared with the policy network, but the structure is the same)
+
+See docstrings in [common/models.py](../baselines/common/models.py) for description of network parameters for each type of model, and
+docstring for [baselines/ppo2/ppo2.py/learn()](../baselines/ppo2/ppo2.py#L152) for the description of the ppo2 hyperparamters.
+
+### Example 2. DQN on Atari
+DQN with Atari is at this point a classics of benchmarks. To run the baselines implementation of DQN on Atari Pong:
+```
+python -m baselines.run --alg=deepq --env=PongNoFrameskip-v4 --num_timesteps=1e6
+```
+
+## Saving, loading and visualizing models
+The algorithms serialization API is not properly unified yet; however, there is a simple method to save / restore trained models.
+`--save_path` and `--load_path` command-line option loads the tensorflow state from a given path before training, and saves it after the training, respectively.
+Let's imagine you'd like to train ppo2 on Atari Pong, save the model and then later visualize what has it learnt.
+```bash
+python -m baselines.run --alg=ppo2 --env=PongNoFrameskip-v4 --num_timesteps=2e7 --save_path=~/models/pong_20M_ppo2
+```
+This should get to the mean reward per episode about 20. To load and visualize the model, we'll do the following - load the model, train it for 0 steps, and then visualize:
+```bash
+python -m baselines.run --alg=ppo2 --env=PongNoFrameskip-v4 --num_timesteps=0 --load_path=~/models/pong_20M_ppo2 --play
+```
+
+*NOTE:* At the moment Mujoco training uses VecNormalize wrapper for the environment which is not being saved correctly; so loading the models trained on Mujoco will not work well if the environment is recreated. If necessary, you can work around that by replacing RunningMeanStd by TfRunningMeanStd in [baselines/common/vec_env/vec_normalize.py](../baselines/common/vec_env/vec_normalize.py#L12). This way, mean and std of environment normalizing wrapper will be saved in tensorflow variables and included in the model file; however, training is slower that way - hence not including it by default
+
+## Loading and vizualizing learning curves and other training metrics
+See [here](docs/viz/viz.ipynb) for instructions on how to load and display the training data.
+
+## Subpackages
+
+- [A2C](../baselines/a2c)
+- [ACER](../baselines/acer)
+- [ACKTR](../baselines/acktr)
+- [DDPG](../baselines/ddpg)
+- [DQN](../baselines/deepq)
+- [GAIL](../baselines/gail)
+- [HER](../baselines/her)
+- [PPO1](../baselines/ppo1) (obsolete version, left here temporarily)
+- [PPO2](../baselines/ppo2)
+- [TRPO](../baselines/trpo_mpi)
+
+
+
+## Benchmarks
+Results of benchmarks on Mujoco (1M timesteps) and Atari (10M timesteps) are available
+[here for Mujoco](https://htmlpreview.github.com/?https://github.com/openai/baselines/blob/master/benchmarks_mujoco1M.htm)
+and
+[here for Atari](https://htmlpreview.github.com/?https://github.com/openai/baselines/blob/master/benchmarks_atari10M.htm)
+respectively. Note that these results may be not on the latest version of the code, particular commit hash with which results were obtained is specified on the benchmarks page.
+
+To cite this repository in publications:
+
+ @misc{baselines,
+ author = {Dhariwal, Prafulla and Hesse, Christopher and Klimov, Oleg and Nichol, Alex and Plappert, Matthias and Radford, Alec and Schulman, John and Sidor, Szymon and Wu, Yuhuai and Zhokhov, Peter},
+ title = {OpenAI Baselines},
+ year = {2017},
+ publisher = {GitHub},
+ journal = {GitHub repository},
+ howpublished = {\url{https://github.com/openai/baselines}},
+ }
+
diff --git a/benchmarks_atari10M.htm b/docs/benchmarks_atari10M.htm
old mode 100644
new mode 100755
similarity index 100%
rename from benchmarks_atari10M.htm
rename to docs/benchmarks_atari10M.htm
diff --git a/benchmarks_mujoco1M.htm b/docs/benchmarks_mujoco1M.htm
old mode 100644
new mode 100755
similarity index 100%
rename from benchmarks_mujoco1M.htm
rename to docs/benchmarks_mujoco1M.htm
diff --git a/data/cartpole.gif b/docs/data/cartpole.gif
old mode 100644
new mode 100755
similarity index 100%
rename from data/cartpole.gif
rename to docs/data/cartpole.gif
diff --git a/data/fetchPickAndPlaceContrast.png b/docs/data/fetchPickAndPlaceContrast.png
old mode 100644
new mode 100755
similarity index 100%
rename from data/fetchPickAndPlaceContrast.png
rename to docs/data/fetchPickAndPlaceContrast.png
diff --git a/data/logo.jpg b/docs/data/logo.jpg
old mode 100644
new mode 100755
similarity index 100%
rename from data/logo.jpg
rename to docs/data/logo.jpg
diff --git a/docs/viz/viz.ipynb b/docs/viz/viz.ipynb
old mode 100644
new mode 100755
diff --git a/gym-grasp/README.md b/gym-grasp/README.md
new file mode 100644
index 0000000000..cd957b4579
--- /dev/null
+++ b/gym-grasp/README.md
@@ -0,0 +1,20 @@
+# gym_grasp
+
+## GraspBlock
+
+
+# Installation
+
+```bash
+cd gym-grasp
+pip install -e .
+```
+
+# How To Use
+
+```python
+import gym
+import gym_grasp # This includes GraspBlock-v0
+
+env = gym.make('GraspBlock-v0')
+```
diff --git a/gym-grasp/__init__.py b/gym-grasp/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/gym-grasp/gym_grasp/__init__.py b/gym-grasp/gym_grasp/__init__.py
new file mode 100644
index 0000000000..5fa8fec811
--- /dev/null
+++ b/gym-grasp/gym_grasp/__init__.py
@@ -0,0 +1,12 @@
+from gym.envs.registration import register
+
+
+def _merge(a, b):
+ a.update(b)
+ return a
+
+register(
+ id='GraspBlock-v0',
+ entry_point='gym_grasp.envs:GraspBlockEnv',
+ max_episode_steps=100,
+)
\ No newline at end of file
diff --git a/gym-grasp/gym_grasp/envs/README.md b/gym-grasp/gym_grasp/envs/README.md
new file mode 100644
index 0000000000..5dbbfdab4a
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/README.md
@@ -0,0 +1,54 @@
+# Robotics environments
+
+Details and documentation on these robotics environments are available in our [blog post](https://blog.openai.com/ingredients-for-robotics-research/), the accompanying [technical report](https://arxiv.org/abs/1802.09464), and the [Gym website](https://gym.openai.com/envs/#robotics).
+
+If you use these environments, please cite the following paper:
+
+```
+@misc{1802.09464,
+ Author = {Matthias Plappert and Marcin Andrychowicz and Alex Ray and Bob McGrew and Bowen Baker and Glenn Powell and Jonas Schneider and Josh Tobin and Maciek Chociej and Peter Welinder and Vikash Kumar and Wojciech Zaremba},
+ Title = {Multi-Goal Reinforcement Learning: Challenging Robotics Environments and Request for Research},
+ Year = {2018},
+ Eprint = {arXiv:1802.09464},
+}
+```
+
+## Fetch environments
+
+
+[FetchReach-v0](https://gym.openai.com/envs/FetchReach-v0/): Fetch has to move its end-effector to the desired goal position.
+
+
+
+
+[FetchSlide-v0](https://gym.openai.com/envs/FetchSlide-v0/): Fetch has to hit a puck across a long table such that it slides and comes to rest on the desired goal.
+
+
+
+
+[FetchPush-v0](https://gym.openai.com/envs/FetchPush-v0/): Fetch has to move a box by pushing it until it reaches a desired goal position.
+
+
+
+
+[FetchPickAndPlace-v0](https://gym.openai.com/envs/FetchPickAndPlace-v0/): Fetch has to pick up a box from a table using its gripper and move it to a desired goal above the table.
+
+## Shadow Dexterous Hand environments
+
+
+[HandReach-v0](https://gym.openai.com/envs/HandReach-v0/): ShadowHand has to reach with its thumb and a selected finger until they meet at a desired goal position above the palm.
+
+
+
+
+[HandManipulateBlock-v0](https://gym.openai.com/envs/HandManipulateBlock-v0/): ShadowHand has to manipulate a block until it achieves a desired goal position and rotation.
+
+
+
+
+[HandManipulateEgg-v0](https://gym.openai.com/envs/HandManipulateEgg-v0/): ShadowHand has to manipulate an egg until it achieves a desired goal position and rotation.
+
+
+
+
+[HandManipulatePen-v0](https://gym.openai.com/envs/HandManipulatePen-v0/): ShadowHand has to manipulate a pen until it achieves a desired goal position and rotation.
diff --git a/gym-grasp/gym_grasp/envs/__init__.py b/gym-grasp/gym_grasp/envs/__init__.py
new file mode 100644
index 0000000000..a153f413f2
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/__init__.py
@@ -0,0 +1 @@
+from gym_grasp.envs.hand.grasp_block import GraspBlockEnv
diff --git a/gym-grasp/gym_grasp/envs/assets/LICENSE.md b/gym-grasp/gym_grasp/envs/assets/LICENSE.md
new file mode 100644
index 0000000000..22ce9010d0
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/LICENSE.md
@@ -0,0 +1,222 @@
+# Fetch Robotics
+The model of the [Fetch](http://fetchrobotics.com/platforms-research-development/) is based on [models provided by Fetch](https://github.com/fetchrobotics/fetch_ros/tree/indigo-devel/fetch_description). It was adapted and refined by OpenAI.
+
+# ShadowHand
+The model of the [ShadowHand](https://www.shadowrobot.com/products/dexterous-hand/) is based on [models provided by ShadowRobot](https://github.com/shadow-robot/sr_common/tree/kinetic-devel/sr_description/hand/model), and on code used under the following license:
+
+(C) Vikash Kumar, CSE, UW. Licensed under Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+Additional license notices:
+
+ Sources : 1) Manipulator and Manipulation in High Dimensional Spaces. Vikash Kumar, Ph.D. Thesis, CSE, Univ. of Washington. 2016.
+
+ Mujoco :: Advanced physics simulation engine
+ Source : www.roboti.us
+ Version : 1.40
+ Released : 17Jan'17
+
+ Author :: Vikash Kumar
+ Contacts : vikash@openai.com
+ Last edits : 3Apr'17
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/pick_and_place.xml b/gym-grasp/gym_grasp/envs/assets/fetch/pick_and_place.xml
new file mode 100644
index 0000000000..337032a832
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/pick_and_place.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/push.xml b/gym-grasp/gym_grasp/envs/assets/fetch/push.xml
new file mode 100644
index 0000000000..8e12db248c
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/push.xml
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/reach.xml b/gym-grasp/gym_grasp/envs/assets/fetch/reach.xml
new file mode 100644
index 0000000000..c73d6249f3
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/reach.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/robot.xml b/gym-grasp/gym_grasp/envs/assets/fetch/robot.xml
new file mode 100644
index 0000000000..9ee7723b5e
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/robot.xml
@@ -0,0 +1,123 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/shared.xml b/gym-grasp/gym_grasp/envs/assets/fetch/shared.xml
new file mode 100644
index 0000000000..5d61fef70d
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/shared.xml
@@ -0,0 +1,66 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/fetch/slide.xml b/gym-grasp/gym_grasp/envs/assets/fetch/slide.xml
new file mode 100644
index 0000000000..efbfb51bd0
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/fetch/slide.xml
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/grasp_block.xml b/gym-grasp/gym_grasp/envs/assets/hand/grasp_block.xml
new file mode 100644
index 0000000000..b271a2548d
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/grasp_block.xml
@@ -0,0 +1,82 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/manipulate_block.xml b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_block.xml
new file mode 100644
index 0000000000..83a6517e6c
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_block.xml
@@ -0,0 +1,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/manipulate_egg.xml b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_egg.xml
new file mode 100644
index 0000000000..46d1dbba84
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_egg.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/manipulate_pen.xml b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_pen.xml
new file mode 100644
index 0000000000..20a6fb5e06
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/manipulate_pen.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/reach.xml b/gym-grasp/gym_grasp/envs/assets/hand/reach.xml
new file mode 100644
index 0000000000..71f6dfe621
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/reach.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/robot.xml b/gym-grasp/gym_grasp/envs/assets/hand/robot.xml
new file mode 100644
index 0000000000..dbb9e43448
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/robot.xml
@@ -0,0 +1,160 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/robot_for_grasp.xml b/gym-grasp/gym_grasp/envs/assets/hand/robot_for_grasp.xml
new file mode 100644
index 0000000000..a46cc3258b
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/robot_for_grasp.xml
@@ -0,0 +1,165 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/shared.xml b/gym-grasp/gym_grasp/envs/assets/hand/shared.xml
new file mode 100644
index 0000000000..f27f265551
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/shared.xml
@@ -0,0 +1,254 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/hand/shared_asset.xml b/gym-grasp/gym_grasp/envs/assets/hand/shared_asset.xml
new file mode 100644
index 0000000000..9db234f06a
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/assets/hand/shared_asset.xml
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/.get b/gym-grasp/gym_grasp/envs/assets/stls/.get
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/base_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/base_link_collision.stl
new file mode 100644
index 0000000000..1ef459fd5b
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/base_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/bellows_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/bellows_link_collision.stl
new file mode 100644
index 0000000000..a7e5ab75ca
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/bellows_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/elbow_flex_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/elbow_flex_link_collision.stl
new file mode 100644
index 0000000000..b0eea0777a
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/elbow_flex_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/estop_link.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/estop_link.stl
new file mode 100644
index 0000000000..f6d1c72e85
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/estop_link.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/forearm_roll_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/forearm_roll_link_collision.stl
new file mode 100644
index 0000000000..fe468c5406
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/forearm_roll_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/gripper_link.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/gripper_link.stl
new file mode 100644
index 0000000000..8a1487401a
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/gripper_link.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_pan_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_pan_link_collision.stl
new file mode 100644
index 0000000000..c77b5b1872
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_pan_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_tilt_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_tilt_link_collision.stl
new file mode 100644
index 0000000000..53c2ddc58c
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/head_tilt_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/l_wheel_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/l_wheel_link_collision.stl
new file mode 100644
index 0000000000..5c1752487e
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/l_wheel_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/laser_link.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/laser_link.stl
new file mode 100644
index 0000000000..fa4882fc98
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/laser_link.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/r_wheel_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/r_wheel_link_collision.stl
new file mode 100644
index 0000000000..3742b24694
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/r_wheel_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_lift_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_lift_link_collision.stl
new file mode 100644
index 0000000000..c9aff0dda9
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_lift_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_pan_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_pan_link_collision.stl
new file mode 100644
index 0000000000..ac17a94375
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/shoulder_pan_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_fixed_link.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_fixed_link.stl
new file mode 100644
index 0000000000..7cf7fc147e
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_fixed_link.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_lift_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_lift_link_collision.stl
new file mode 100644
index 0000000000..4ce5fcf9c5
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/torso_lift_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/upperarm_roll_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/upperarm_roll_link_collision.stl
new file mode 100644
index 0000000000..120793232e
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/upperarm_roll_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_flex_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_flex_link_collision.stl
new file mode 100644
index 0000000000..3215d2e1de
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_flex_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_roll_link_collision.stl b/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_roll_link_collision.stl
new file mode 100644
index 0000000000..742bdd9197
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/fetch/wrist_roll_link_collision.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/F1.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/F1.stl
new file mode 100644
index 0000000000..515d3c9016
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/F1.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/F2.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/F2.stl
new file mode 100644
index 0000000000..7bc5e20e06
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/F2.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/F3.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/F3.stl
new file mode 100644
index 0000000000..223f06f5bf
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/F3.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/TH1_z.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH1_z.stl
new file mode 100644
index 0000000000..400ee2d625
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH1_z.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/TH2_z.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH2_z.stl
new file mode 100644
index 0000000000..5ace8388b9
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH2_z.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/TH3_z.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH3_z.stl
new file mode 100644
index 0000000000..23485abc72
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/TH3_z.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric.stl
new file mode 100644
index 0000000000..80f6f3da18
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric_cvx.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric_cvx.stl
new file mode 100644
index 0000000000..3c30f57eaa
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/forearm_electric_cvx.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/knuckle.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/knuckle.stl
new file mode 100644
index 0000000000..4faedd7540
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/knuckle.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/lfmetacarpal.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/lfmetacarpal.stl
new file mode 100644
index 0000000000..535cf4dbca
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/lfmetacarpal.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/palm.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/palm.stl
new file mode 100644
index 0000000000..65e47eb65d
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/palm.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/stls/hand/wrist.stl b/gym-grasp/gym_grasp/envs/assets/stls/hand/wrist.stl
new file mode 100644
index 0000000000..420d5f9c67
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/stls/hand/wrist.stl differ
diff --git a/gym-grasp/gym_grasp/envs/assets/textures/block.png b/gym-grasp/gym_grasp/envs/assets/textures/block.png
new file mode 100644
index 0000000000..0243b8f331
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/textures/block.png differ
diff --git a/gym-grasp/gym_grasp/envs/assets/textures/block_hidden.png b/gym-grasp/gym_grasp/envs/assets/textures/block_hidden.png
new file mode 100644
index 0000000000..e08b8613c4
Binary files /dev/null and b/gym-grasp/gym_grasp/envs/assets/textures/block_hidden.png differ
diff --git a/gym-grasp/gym_grasp/envs/fetch/__init__.py b/gym-grasp/gym_grasp/envs/fetch/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/gym-grasp/gym_grasp/envs/fetch/pick_and_place.py b/gym-grasp/gym_grasp/envs/fetch/pick_and_place.py
new file mode 100644
index 0000000000..c6c5e7ea99
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/fetch/pick_and_place.py
@@ -0,0 +1,23 @@
+import os
+from gym import utils
+from gym.envs.robotics import fetch_env
+
+
+# Ensure we get the path separator correct on windows
+MODEL_XML_PATH = os.path.join('fetch', 'pick_and_place.xml')
+
+
+class FetchPickAndPlaceEnv(fetch_env.FetchEnv, utils.EzPickle):
+ def __init__(self, reward_type='sparse'):
+ initial_qpos = {
+ 'robot0:slide0': 0.405,
+ 'robot0:slide1': 0.48,
+ 'robot0:slide2': 0.0,
+ 'object0:joint': [1.25, 0.53, 0.4, 1., 0., 0., 0.],
+ }
+ fetch_env.FetchEnv.__init__(
+ self, MODEL_XML_PATH, has_object=True, block_gripper=False, n_substeps=20,
+ gripper_extra_height=0.2, target_in_the_air=True, target_offset=0.0,
+ obj_range=0.15, target_range=0.15, distance_threshold=0.05,
+ initial_qpos=initial_qpos, reward_type=reward_type)
+ utils.EzPickle.__init__(self)
diff --git a/gym-grasp/gym_grasp/envs/fetch/push.py b/gym-grasp/gym_grasp/envs/fetch/push.py
new file mode 100644
index 0000000000..bde15ec00e
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/fetch/push.py
@@ -0,0 +1,23 @@
+import os
+from gym import utils
+from gym.envs.robotics import fetch_env
+
+
+# Ensure we get the path separator correct on windows
+MODEL_XML_PATH = os.path.join('fetch', 'push.xml')
+
+
+class FetchPushEnv(fetch_env.FetchEnv, utils.EzPickle):
+ def __init__(self, reward_type='sparse'):
+ initial_qpos = {
+ 'robot0:slide0': 0.405,
+ 'robot0:slide1': 0.48,
+ 'robot0:slide2': 0.0,
+ 'object0:joint': [1.25, 0.53, 0.4, 1., 0., 0., 0.],
+ }
+ fetch_env.FetchEnv.__init__(
+ self, MODEL_XML_PATH, has_object=True, block_gripper=True, n_substeps=20,
+ gripper_extra_height=0.0, target_in_the_air=False, target_offset=0.0,
+ obj_range=0.15, target_range=0.15, distance_threshold=0.05,
+ initial_qpos=initial_qpos, reward_type=reward_type)
+ utils.EzPickle.__init__(self)
diff --git a/gym-grasp/gym_grasp/envs/fetch/reach.py b/gym-grasp/gym_grasp/envs/fetch/reach.py
new file mode 100644
index 0000000000..cc3fc46c65
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/fetch/reach.py
@@ -0,0 +1,22 @@
+import os
+from gym import utils
+from gym.envs.robotics import fetch_env
+
+
+# Ensure we get the path separator correct on windows
+MODEL_XML_PATH = os.path.join('fetch', 'reach.xml')
+
+
+class FetchReachEnv(fetch_env.FetchEnv, utils.EzPickle):
+ def __init__(self, reward_type='sparse'):
+ initial_qpos = {
+ 'robot0:slide0': 0.4049,
+ 'robot0:slide1': 0.48,
+ 'robot0:slide2': 0.0,
+ }
+ fetch_env.FetchEnv.__init__(
+ self, MODEL_XML_PATH, has_object=False, block_gripper=True, n_substeps=20,
+ gripper_extra_height=0.2, target_in_the_air=True, target_offset=0.0,
+ obj_range=0.15, target_range=0.15, distance_threshold=0.05,
+ initial_qpos=initial_qpos, reward_type=reward_type)
+ utils.EzPickle.__init__(self)
diff --git a/gym-grasp/gym_grasp/envs/fetch/slide.py b/gym-grasp/gym_grasp/envs/fetch/slide.py
new file mode 100644
index 0000000000..8c893b2b7d
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/fetch/slide.py
@@ -0,0 +1,25 @@
+import os
+import numpy as np
+
+from gym import utils
+from gym.envs.robotics import fetch_env
+
+
+# Ensure we get the path separator correct on windows
+MODEL_XML_PATH = os.path.join('fetch', 'slide.xml')
+
+
+class FetchSlideEnv(fetch_env.FetchEnv, utils.EzPickle):
+ def __init__(self, reward_type='sparse'):
+ initial_qpos = {
+ 'robot0:slide0': 0.05,
+ 'robot0:slide1': 0.48,
+ 'robot0:slide2': 0.0,
+ 'object0:joint': [1.7, 1.1, 0.4, 1., 0., 0., 0.],
+ }
+ fetch_env.FetchEnv.__init__(
+ self, MODEL_XML_PATH, has_object=True, block_gripper=True, n_substeps=20,
+ gripper_extra_height=-0.02, target_in_the_air=False, target_offset=np.array([0.4, 0.0, 0.0]),
+ obj_range=0.1, target_range=0.3, distance_threshold=0.05,
+ initial_qpos=initial_qpos, reward_type=reward_type)
+ utils.EzPickle.__init__(self)
diff --git a/gym-grasp/gym_grasp/envs/fetch_env.py b/gym-grasp/gym_grasp/envs/fetch_env.py
new file mode 100644
index 0000000000..4916c4bcaf
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/fetch_env.py
@@ -0,0 +1,187 @@
+import numpy as np
+
+from gym.envs.robotics import rotations, robot_env, utils
+
+
+def goal_distance(goal_a, goal_b):
+ assert goal_a.shape == goal_b.shape
+ return np.linalg.norm(goal_a - goal_b, axis=-1)
+
+
+class FetchEnv(robot_env.RobotEnv):
+ """Superclass for all Fetch environments.
+ """
+
+ def __init__(
+ self, model_path, n_substeps, gripper_extra_height, block_gripper,
+ has_object, target_in_the_air, target_offset, obj_range, target_range,
+ distance_threshold, initial_qpos, reward_type,
+ ):
+ """Initializes a new Fetch environment.
+
+ Args:
+ model_path (string): path to the environments XML file
+ n_substeps (int): number of substeps the simulation runs on every call to step
+ gripper_extra_height (float): additional height above the table when positioning the gripper
+ block_gripper (boolean): whether or not the gripper is blocked (i.e. not movable) or not
+ has_object (boolean): whether or not the environment has an object
+ target_in_the_air (boolean): whether or not the target should be in the air above the table or on the table surface
+ target_offset (float or array with 3 elements): offset of the target
+ obj_range (float): range of a uniform distribution for sampling initial object positions
+ target_range (float): range of a uniform distribution for sampling a target
+ distance_threshold (float): the threshold after which a goal is considered achieved
+ initial_qpos (dict): a dictionary of joint names and values that define the initial configuration
+ reward_type ('sparse' or 'dense'): the reward type, i.e. sparse or dense
+ """
+ self.gripper_extra_height = gripper_extra_height
+ self.block_gripper = block_gripper
+ self.has_object = has_object
+ self.target_in_the_air = target_in_the_air
+ self.target_offset = target_offset
+ self.obj_range = obj_range
+ self.target_range = target_range
+ self.distance_threshold = distance_threshold
+ self.reward_type = reward_type
+
+ super(FetchEnv, self).__init__(
+ model_path=model_path, n_substeps=n_substeps, n_actions=4,
+ initial_qpos=initial_qpos)
+
+ # GoalEnv methods
+ # ----------------------------
+
+ def compute_reward(self, achieved_goal, goal, info):
+ # Compute distance between goal and the achieved goal.
+ d = goal_distance(achieved_goal, goal)
+ if self.reward_type == 'sparse':
+ return -(d > self.distance_threshold).astype(np.float32)
+ else:
+ return -d
+
+ # RobotEnv methods
+ # ----------------------------
+
+ def _step_callback(self):
+ if self.block_gripper:
+ self.sim.data.set_joint_qpos('robot0:l_gripper_finger_joint', 0.)
+ self.sim.data.set_joint_qpos('robot0:r_gripper_finger_joint', 0.)
+ self.sim.forward()
+
+ def _set_action(self, action):
+ assert action.shape == (4,)
+ action = action.copy() # ensure that we don't change the action outside of this scope
+ pos_ctrl, gripper_ctrl = action[:3], action[3]
+
+ pos_ctrl *= 0.05 # limit maximum change in position
+ rot_ctrl = [1., 0., 1., 0.] # fixed rotation of the end effector, expressed as a quaternion
+ gripper_ctrl = np.array([gripper_ctrl, gripper_ctrl])
+ assert gripper_ctrl.shape == (2,)
+ if self.block_gripper:
+ gripper_ctrl = np.zeros_like(gripper_ctrl)
+ action = np.concatenate([pos_ctrl, rot_ctrl, gripper_ctrl])
+
+ # Apply action to simulation.
+ utils.ctrl_set_action(self.sim, action)
+ utils.mocap_set_action(self.sim, action)
+
+ def _get_obs(self):
+ # positions
+ grip_pos = self.sim.data.get_site_xpos('robot0:grip')
+ dt = self.sim.nsubsteps * self.sim.model.opt.timestep
+ grip_velp = self.sim.data.get_site_xvelp('robot0:grip') * dt
+ robot_qpos, robot_qvel = utils.robot_get_obs(self.sim)
+ if self.has_object:
+ object_pos = self.sim.data.get_site_xpos('object0')
+ # rotations
+ object_rot = rotations.mat2euler(self.sim.data.get_site_xmat('object0'))
+ # velocities
+ object_velp = self.sim.data.get_site_xvelp('object0') * dt
+ object_velr = self.sim.data.get_site_xvelr('object0') * dt
+ # gripper state
+ object_rel_pos = object_pos - grip_pos
+ object_velp -= grip_velp
+ else:
+ object_pos = object_rot = object_velp = object_velr = object_rel_pos = np.zeros(0)
+ gripper_state = robot_qpos[-2:]
+ gripper_vel = robot_qvel[-2:] * dt # change to a scalar if the gripper is made symmetric
+
+ if not self.has_object:
+ achieved_goal = grip_pos.copy()
+ else:
+ achieved_goal = np.squeeze(object_pos.copy())
+ obs = np.concatenate([
+ grip_pos, object_pos.ravel(), object_rel_pos.ravel(), gripper_state, object_rot.ravel(),
+ object_velp.ravel(), object_velr.ravel(), grip_velp, gripper_vel,
+ ])
+
+ return {
+ 'observation': obs.copy(),
+ 'achieved_goal': achieved_goal.copy(),
+ 'desired_goal': self.goal.copy(),
+ }
+
+ def _viewer_setup(self):
+ body_id = self.sim.model.body_name2id('robot0:gripper_link')
+ lookat = self.sim.data.body_xpos[body_id]
+ for idx, value in enumerate(lookat):
+ self.viewer.cam.lookat[idx] = value
+ self.viewer.cam.distance = 2.5
+ self.viewer.cam.azimuth = 132.
+ self.viewer.cam.elevation = -14.
+
+ def _render_callback(self):
+ # Visualize target.
+ sites_offset = (self.sim.data.site_xpos - self.sim.model.site_pos).copy()
+ site_id = self.sim.model.site_name2id('target0')
+ self.sim.model.site_pos[site_id] = self.goal - sites_offset[0]
+ self.sim.forward()
+
+ def _reset_sim(self):
+ self.sim.set_state(self.initial_state)
+
+ # Randomize start position of object.
+ if self.has_object:
+ object_xpos = self.initial_gripper_xpos[:2]
+ while np.linalg.norm(object_xpos - self.initial_gripper_xpos[:2]) < 0.1:
+ object_xpos = self.initial_gripper_xpos[:2] + self.np_random.uniform(-self.obj_range, self.obj_range, size=2)
+ object_qpos = self.sim.data.get_joint_qpos('object0:joint')
+ assert object_qpos.shape == (7,)
+ object_qpos[:2] = object_xpos
+ self.sim.data.set_joint_qpos('object0:joint', object_qpos)
+
+ self.sim.forward()
+ return True
+
+ def _sample_goal(self):
+ if self.has_object:
+ goal = self.initial_gripper_xpos[:3] + self.np_random.uniform(-self.target_range, self.target_range, size=3)
+ goal += self.target_offset
+ goal[2] = self.height_offset
+ if self.target_in_the_air and self.np_random.uniform() < 0.5:
+ goal[2] += self.np_random.uniform(0, 0.45)
+ else:
+ goal = self.initial_gripper_xpos[:3] + self.np_random.uniform(-0.15, 0.15, size=3)
+ return goal.copy()
+
+ def _is_success(self, achieved_goal, desired_goal):
+ d = goal_distance(achieved_goal, desired_goal)
+ return (d < self.distance_threshold).astype(np.float32)
+
+ def _env_setup(self, initial_qpos):
+ for name, value in initial_qpos.items():
+ self.sim.data.set_joint_qpos(name, value)
+ utils.reset_mocap_welds(self.sim)
+ self.sim.forward()
+
+ # Move end effector into position.
+ gripper_target = np.array([-0.498, 0.005, -0.431 + self.gripper_extra_height]) + self.sim.data.get_site_xpos('robot0:grip')
+ gripper_rotation = np.array([1., 0., 1., 0.])
+ self.sim.data.set_mocap_pos('robot0:mocap', gripper_target)
+ self.sim.data.set_mocap_quat('robot0:mocap', gripper_rotation)
+ for _ in range(10):
+ self.sim.step()
+
+ # Extract information for sampling goals.
+ self.initial_gripper_xpos = self.sim.data.get_site_xpos('robot0:grip').copy()
+ if self.has_object:
+ self.height_offset = self.sim.data.get_site_xpos('object0')[2]
diff --git a/gym-grasp/gym_grasp/envs/hand/__init__.py b/gym-grasp/gym_grasp/envs/hand/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/gym-grasp/gym_grasp/envs/hand/grasp_block.py b/gym-grasp/gym_grasp/envs/hand/grasp_block.py
new file mode 100644
index 0000000000..e1798a04d0
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/hand/grasp_block.py
@@ -0,0 +1,366 @@
+import os
+import numpy as np
+import random
+
+from gym import utils, error
+# from gym.envs.robotics import rotations, hand_env
+from gym_grasp.envs import rotations, hand_env
+from gym.envs.robotics.utils import robot_get_obs
+
+try:
+ import mujoco_py
+except ImportError as e:
+ raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
+
+
+def quat_from_angle_and_axis(angle, axis):
+ assert axis.shape == (3,)
+ axis /= np.linalg.norm(axis)
+ quat = np.concatenate([[np.cos(angle / 2.)], np.sin(angle / 2.) * axis])
+ quat /= np.linalg.norm(quat)
+ return quat
+
+
+# Ensure we get the path separator correct on windows
+MANIPULATE_BLOCK_XML = os.path.join('hand', 'manipulate_block.xml')
+MANIPULATE_EGG_XML = os.path.join('hand', 'manipulate_egg.xml')
+MANIPULATE_PEN_XML = os.path.join('hand', 'manipulate_pen.xml')
+GRASP_BLOCK_XML = os.path.join('hand', 'grasp_block.xml')
+
+
+class ManipulateEnv(hand_env.HandEnv, utils.EzPickle):
+ def __init__(
+ self, model_path, target_position, target_rotation,
+ target_position_range, reward_type, initial_qpos={},
+ randomize_initial_position=True, randomize_initial_rotation=True, randomize_object=True,
+ distance_threshold=0.01, rotation_threshold=0.1, n_substeps=20, relative_control=False,
+ ignore_z_target_rotation=False,
+ target_id = 0, num_axis = 5, reward_lambda=1.
+ ):
+ """Initializes a new Hand manipulation environment.
+
+ Args:
+ model_path (string): path to the environments XML file
+ target_position (string): the type of target position:
+ - ignore: target position is fully ignored, i.e. the object can be positioned arbitrarily
+ - fixed: target position is set to the initial position of the object
+ - random: target position is fully randomized according to target_position_range
+ target_rotation (string): the type of target rotation:
+ - ignore: target rotation is fully ignored, i.e. the object can be rotated arbitrarily
+ - fixed: target rotation is set to the initial rotation of the object
+ - xyz: fully randomized target rotation around the X, Y and Z axis
+ - z: fully randomized target rotation around the Z axis
+ - parallel: fully randomized target rotation around Z and axis-aligned rotation around X, Y
+ ignore_z_target_rotation (boolean): whether or not the Z axis of the target rotation is ignored
+ target_position_range (np.array of shape (3, 2)): range of the target_position randomization
+ reward_type ('sparse' or 'dense'): the reward type, i.e. sparse or dense
+ initial_qpos (dict): a dictionary of joint names and values that define the initial configuration
+ randomize_initial_position (boolean): whether or not to randomize the initial position of the object
+ randomize_initial_rotation (boolean): whether or not to randomize the initial rotation of the object
+ randomize_object (boolean)
+ distance_threshold (float, in meters): the threshold after which the position of a goal is considered achieved
+ rotation_threshold (float, in radians): the threshold after which the rotation of a goal is considered achieved
+ n_substeps (int): number of substeps the simulation runs on every call to step
+ relative_control (boolean): whether or not the hand is actuated in absolute joint positions or relative to the current state
+ target_id (int): target id
+ num_axis (int): the number of components
+ reward_lambda (float) : a weight for the second term of the reward function
+ """
+ self.target_position = target_position
+ self.target_rotation = target_rotation
+ self.target_position_range = target_position_range
+ self.parallel_quats = [rotations.euler2quat(r) for r in rotations.get_parallel_rotations()]
+ self.randomize_initial_rotation = randomize_initial_rotation
+ self.randomize_initial_position = randomize_initial_position
+ self.distance_threshold = distance_threshold
+ self.rotation_threshold = rotation_threshold
+ self.reward_type = reward_type
+ self.ignore_z_target_rotation = ignore_z_target_rotation
+
+ self.variance_ratio = []
+
+ self.object_list = ["box:joint", "apple:joint", "banana:joint", "beerbottle:joint", "book:joint",
+ "needle:joint", "pen:joint", "teacup:joint"]
+ self.target_id = target_id
+ self.num_axis = num_axis # the number of components
+ self.randomize_object = randomize_object # random target (boolean)
+ self.reward_lambda = reward_lambda # a weight for the second term of the reward function (float)
+
+ if self.randomize_object == True:
+ self.object = self.object_list[random.randrange(0, 8, 1)] # in case of randomly selected target
+ else:
+ self.object = self.object_list[self.target_id] # target
+
+ self.init_object_qpos = np.array([1, 0.87, 0.2, 1, 0, 0, 0])
+
+ assert self.target_position in ['ignore', 'fixed', 'random']
+ assert self.target_rotation in ['ignore', 'fixed', 'xyz', 'z', 'parallel']
+
+ hand_env.HandEnv.__init__(
+ self, model_path, n_substeps=n_substeps, initial_qpos=initial_qpos,
+ relative_control=relative_control)
+ utils.EzPickle.__init__(self)
+
+ def _get_achieved_goal(self):
+ # Object position and rotation.
+ object_qpos = self.sim.data.get_joint_qpos(self.object)
+ assert object_qpos.shape == (7,)
+ return object_qpos
+
+ # def _randamize_target(self):
+ # self.sim.data.set_joint_qpos("target0:joint", [1, 0.87, 0.4, 1, 0, 0, 0])
+ # # print("##### {} #####".format(self.sim.data.get_joint_qpos("target0:joint")))
+
+ def _goal_distance(self, goal_a, goal_b):
+ assert goal_a.shape == goal_b.shape
+ assert goal_a.shape[-1] == 7
+
+ d_pos = np.zeros_like(goal_a[..., 0])
+ d_rot = np.zeros_like(goal_b[..., 0])
+ if self.target_position != 'ignore':
+ delta_pos = goal_a[..., :3] - goal_b[..., :3]
+ d_pos = np.linalg.norm(delta_pos, axis=-1)
+
+ if self.target_rotation != 'ignore':
+ quat_a, quat_b = goal_a[..., 3:], goal_b[..., 3:]
+
+ if self.ignore_z_target_rotation:
+ # Special case: We want to ignore the Z component of the rotation.
+ # This code here assumes Euler angles with xyz convention. We first transform
+ # to euler, then set the Z component to be equal between the two, and finally
+ # transform back into quaternions.
+ euler_a = rotations.quat2euler(quat_a)
+ euler_b = rotations.quat2euler(quat_b)
+ euler_a[2] = euler_b[2]
+ quat_a = rotations.euler2quat(euler_a)
+
+ # Subtract quaternions and extract angle between them.
+ quat_diff = rotations.quat_mul(quat_a, rotations.quat_conjugate(quat_b))
+ angle_diff = 2 * np.arccos(np.clip(quat_diff[..., 0], -1., 1.))
+ d_rot = angle_diff
+ assert d_pos.shape == d_rot.shape
+ return d_pos, d_rot
+
+ # GoalEnv methods
+ # ----------------------------
+
+ def compute_reward(self, achieved_goal, goal, info):
+ if self.reward_type == 'sparse':
+ success = self._is_success(achieved_goal, goal).astype(np.float32)
+ return (success - 1.)
+ else:
+ d_pos, d_rot = self._goal_distance(achieved_goal, goal)
+ # We weigh the difference in position to avoid that `d_pos` (in meters) is completely
+ # dominated by `d_rot` (in radians).
+
+ # -- nishimura
+ #reward = -(10. * d_pos) # d_pos : distance_error
+ reward = self._is_success(achieved_goal, goal)-1. # default
+ # --
+
+ # -- reward Contributed rate
+ if len(self.variance_ratio) > 0:
+ vr = self.variance_ratio[-1]
+ l = np.sum(vr[:(self.num_axis)])
+ self.variance_ratio = []
+
+ reward -= self.reward_lambda*(1.-l) # nishimura
+ # --
+
+ return reward
+
+ # RobotEnv methods
+ # ----------------------------
+
+ def _is_success(self, achieved_goal, desired_goal):
+ d_pos, d_rot = self._goal_distance(achieved_goal, desired_goal)
+ achieved_pos = (d_pos < self.distance_threshold).astype(np.float32)
+ achieved_rot = (d_rot < self.rotation_threshold).astype(np.float32)
+ achieved_both = achieved_pos * achieved_rot
+ return achieved_both
+
+ def _env_setup(self, initial_qpos):
+ for name, value in initial_qpos.items():
+ self.sim.data.set_joint_qpos(name, value)
+ self.sim.forward()
+
+ def _reset_sim(self):
+ self.sim.set_state(self.initial_state)
+ self.sim.forward()
+
+ # -- motoda
+ if self.randomize_object == True:
+ self.object = self.object_list[random.randrange(0, 8, 1)] # in case of randomly selected target
+ else:
+ self.object = self.object_list[self.target_id] # target
+ # --
+ initial_qpos = self.init_object_qpos
+ initial_pos, initial_quat = initial_qpos[:3], initial_qpos[3:]
+ assert initial_qpos.shape == (7,)
+ assert initial_pos.shape == (3,)
+ assert initial_quat.shape == (4,)
+ initial_qpos = None
+
+ # Randomization initial rotation.
+ if self.randomize_initial_rotation:
+ if self.target_rotation == 'z':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ offset_quat = quat_from_angle_and_axis(angle, axis)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation == 'parallel':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ z_quat = quat_from_angle_and_axis(angle, axis)
+ parallel_quat = self.parallel_quats[self.np_random.randint(len(self.parallel_quats))]
+ offset_quat = rotations.quat_mul(z_quat, parallel_quat)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation in ['xyz', 'ignore']:
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = self.np_random.uniform(-1., 1., size=3)
+ offset_quat = quat_from_angle_and_axis(angle, axis)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation == 'fixed':
+ pass
+ else:
+ raise error.Error('Unknown target_rotation option "{}".'.format(self.target_rotation))
+
+ # Randomize initial position.
+ if self.randomize_initial_position:
+ if self.target_position != 'fixed':
+ initial_pos += self.np_random.normal(size=3, scale=0.005)
+
+ initial_quat /= np.linalg.norm(initial_quat)
+ initial_qpos = np.concatenate([initial_pos, initial_quat])
+ self.sim.data.set_joint_qpos(self.object, initial_qpos)
+
+ def is_on_palm():
+ self.sim.forward()
+ cube_middle_idx = self.sim.model.site_name2id('object:center')
+ cube_middle_pos = self.sim.data.site_xpos[cube_middle_idx]
+ is_on_palm = (cube_middle_pos[2] > 0.04)
+ return is_on_palm
+
+ # Run the simulation for a bunch of timesteps to let everything settle in.
+ for _ in range(10):
+ self._set_action(np.zeros(21))
+ try:
+ self.sim.step()
+ except mujoco_py.MujocoException:
+ return False
+ return is_on_palm()
+
+ def _sample_goal(self):
+ # Select a goal for the object position.
+ target_pos = None
+ if self.target_position == 'random':
+ assert self.target_position_range.shape == (3, 2)
+ offset = self.np_random.uniform(self.target_position_range[:, 0], self.target_position_range[:, 1])
+ assert offset.shape == (3,)
+ target_pos = self.sim.data.get_joint_qpos(self.object)[:3] + offset
+ elif self.target_position in ['ignore', 'fixed']:
+ target_pos = self.sim.data.get_joint_qpos(self.object)[:3]
+ else:
+ raise error.Error('Unknown target_position option "{}".'.format(self.target_position))
+ assert target_pos is not None
+ assert target_pos.shape == (3,)
+
+ # Select a goal for the object rotation.
+ target_quat = None
+ if self.target_rotation == 'z':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ elif self.target_rotation == 'parallel':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ parallel_quat = self.parallel_quats[self.np_random.randint(len(self.parallel_quats))]
+ target_quat = rotations.quat_mul(target_quat, parallel_quat)
+ elif self.target_rotation == 'xyz':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = self.np_random.uniform(-1., 1., size=3)
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ elif self.target_rotation in ['ignore', 'fixed']:
+ target_quat = self.sim.data.get_joint_qpos(self.object)
+ else:
+ raise error.Error('Unknown target_rotation option "{}".'.format(self.target_rotation))
+ assert target_quat is not None
+ assert target_quat.shape == (4,)
+
+ target_quat /= np.linalg.norm(target_quat) # normalized quaternion
+ goal = np.concatenate([target_pos, target_quat])
+ return goal
+
+ def _render_callback(self):
+ # Assign current state to target object but offset a bit so that the actual object
+ # is not obscured.
+ goal = self.goal.copy()
+ assert goal.shape == (7,)
+ if self.target_position == 'ignore':
+ # Move the object to the side since we do not care about it's position.
+ goal[0] += 0.15
+ self.sim.data.set_joint_qpos('target:joint', goal)
+ self.sim.data.set_joint_qvel('target:joint', np.zeros(6))
+
+ if 'object_hidden' in self.sim.model.geom_names:
+ hidden_id = self.sim.model.geom_name2id('object_hidden')
+ self.sim.model.geom_rgba[hidden_id, 3] = 1.
+ self.sim.forward()
+
+ def _get_obs(self):
+ robot_qpos, robot_qvel = robot_get_obs(self.sim)
+ object_qvel = self.sim.data.get_joint_qvel(self.object)
+ achieved_goal = self._get_achieved_goal().ravel() # this contains the object position + rotation
+ observation = np.concatenate([robot_qpos, robot_qvel, object_qvel, achieved_goal])
+ return {
+ 'observation': observation.copy(),
+ 'achieved_goal': achieved_goal.copy(),
+ 'desired_goal': self.goal.ravel().copy(),
+ }
+
+
+class HandBlockEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandBlockEnv, self).__init__(
+ model_path=MANIPULATE_BLOCK_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ reward_type=reward_type)
+
+
+class HandEggEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandEggEnv, self).__init__(
+ model_path=MANIPULATE_EGG_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ reward_type=reward_type)
+
+
+class HandPenEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandPenEnv, self).__init__(
+ model_path=MANIPULATE_PEN_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ randomize_initial_rotation=False, reward_type=reward_type,
+ ignore_z_target_rotation=True, distance_threshold=0.05)
+
+
+class GraspBlockEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type=None):
+ super(GraspBlockEnv, self).__init__(
+ model_path=GRASP_BLOCK_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.025, 0.025), (-0.025, 0.025), (0.2, 0.25)]),
+ randomize_initial_position=False, reward_type=reward_type,
+ distance_threshold=0.05,
+ rotation_threshold=100.0,
+ randomize_object=False ,target_id = 0, num_axis = 5
+ )
+'''
+Object_list:
+ self.object_list = ["box:joint", "apple:joint", "banana:joint", "beerbottle:joint", "book:joint",
+ "needle:joint", "pen:joint", "teacup:joint"]
+'''
diff --git a/gym-grasp/gym_grasp/envs/hand/grasp_env.py b/gym-grasp/gym_grasp/envs/hand/grasp_env.py
new file mode 100644
index 0000000000..89823864b2
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/hand/grasp_env.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+"""
+Displays robot fetch at a disco party.
+"""
+from mujoco_py import load_model_from_path, MjSim, MjViewer
+import math
+import os
+
+model = load_model_from_path("../assets/hand/grasp_block.xml")
+sim = MjSim(model)
+
+viewer = MjViewer(sim)
+
+t = 0
+
+while True:
+ viewer.render()
+ t += 1
+ sim.step()
+ state = sim.get_state()
+
+ state.qpos[1] = 0.1*math.sin(0.01*t)
+ state.qpos[0] = 0.05*math.cos(0.01*t)
+
+ sim.set_state(state)
+ # if t > 100 and os.getenv('TESTING') is not None:
+ # break
diff --git a/gym-grasp/gym_grasp/envs/hand/manipulate.py b/gym-grasp/gym_grasp/envs/hand/manipulate.py
new file mode 100644
index 0000000000..de55f34827
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/hand/manipulate.py
@@ -0,0 +1,299 @@
+import os
+import numpy as np
+
+from gym import utils, error
+from gym.envs.robotics import rotations, hand_env
+from gym.envs.robotics.utils import robot_get_obs
+
+try:
+ import mujoco_py
+except ImportError as e:
+ raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
+
+
+def quat_from_angle_and_axis(angle, axis):
+ assert axis.shape == (3,)
+ axis /= np.linalg.norm(axis)
+ quat = np.concatenate([[np.cos(angle / 2.)], np.sin(angle / 2.) * axis])
+ quat /= np.linalg.norm(quat)
+ return quat
+
+
+# Ensure we get the path separator correct on windows
+MANIPULATE_BLOCK_XML = os.path.join('hand', 'manipulate_block.xml')
+MANIPULATE_EGG_XML = os.path.join('hand', 'manipulate_egg.xml')
+MANIPULATE_PEN_XML = os.path.join('hand', 'manipulate_pen.xml')
+
+
+class ManipulateEnv(hand_env.HandEnv, utils.EzPickle):
+ def __init__(
+ self, model_path, target_position, target_rotation,
+ target_position_range, reward_type, initial_qpos={},
+ randomize_initial_position=True, randomize_initial_rotation=True,
+ distance_threshold=0.01, rotation_threshold=0.1, n_substeps=20, relative_control=False,
+ ignore_z_target_rotation=False,
+ ):
+ """Initializes a new Hand manipulation environment.
+
+ Args:
+ model_path (string): path to the environments XML file
+ target_position (string): the type of target position:
+ - ignore: target position is fully ignored, i.e. the object can be positioned arbitrarily
+ - fixed: target position is set to the initial position of the object
+ - random: target position is fully randomized according to target_position_range
+ target_rotation (string): the type of target rotation:
+ - ignore: target rotation is fully ignored, i.e. the object can be rotated arbitrarily
+ - fixed: target rotation is set to the initial rotation of the object
+ - xyz: fully randomized target rotation around the X, Y and Z axis
+ - z: fully randomized target rotation around the Z axis
+ - parallel: fully randomized target rotation around Z and axis-aligned rotation around X, Y
+ ignore_z_target_rotation (boolean): whether or not the Z axis of the target rotation is ignored
+ target_position_range (np.array of shape (3, 2)): range of the target_position randomization
+ reward_type ('sparse' or 'dense'): the reward type, i.e. sparse or dense
+ initial_qpos (dict): a dictionary of joint names and values that define the initial configuration
+ randomize_initial_position (boolean): whether or not to randomize the initial position of the object
+ randomize_initial_rotation (boolean): whether or not to randomize the initial rotation of the object
+ distance_threshold (float, in meters): the threshold after which the position of a goal is considered achieved
+ rotation_threshold (float, in radians): the threshold after which the rotation of a goal is considered achieved
+ n_substeps (int): number of substeps the simulation runs on every call to step
+ relative_control (boolean): whether or not the hand is actuated in absolute joint positions or relative to the current state
+ """
+ self.target_position = target_position
+ self.target_rotation = target_rotation
+ self.target_position_range = target_position_range
+ self.parallel_quats = [rotations.euler2quat(r) for r in rotations.get_parallel_rotations()]
+ self.randomize_initial_rotation = randomize_initial_rotation
+ self.randomize_initial_position = randomize_initial_position
+ self.distance_threshold = distance_threshold
+ self.rotation_threshold = rotation_threshold
+ self.reward_type = reward_type
+ self.ignore_z_target_rotation = ignore_z_target_rotation
+
+ assert self.target_position in ['ignore', 'fixed', 'random']
+ assert self.target_rotation in ['ignore', 'fixed', 'xyz', 'z', 'parallel']
+
+ hand_env.HandEnv.__init__(
+ self, model_path, n_substeps=n_substeps, initial_qpos=initial_qpos,
+ relative_control=relative_control)
+ utils.EzPickle.__init__(self)
+
+ def _get_achieved_goal(self):
+ # Object position and rotation.
+ object_qpos = self.sim.data.get_joint_qpos('object:joint')
+ assert object_qpos.shape == (7,)
+ return object_qpos
+
+ def _goal_distance(self, goal_a, goal_b):
+ assert goal_a.shape == goal_b.shape
+ assert goal_a.shape[-1] == 7
+
+ d_pos = np.zeros_like(goal_a[..., 0])
+ d_rot = np.zeros_like(goal_b[..., 0])
+ if self.target_position != 'ignore':
+ delta_pos = goal_a[..., :3] - goal_b[..., :3]
+ d_pos = np.linalg.norm(delta_pos, axis=-1)
+
+ if self.target_rotation != 'ignore':
+ quat_a, quat_b = goal_a[..., 3:], goal_b[..., 3:]
+
+ if self.ignore_z_target_rotation:
+ # Special case: We want to ignore the Z component of the rotation.
+ # This code here assumes Euler angles with xyz convention. We first transform
+ # to euler, then set the Z component to be equal between the two, and finally
+ # transform back into quaternions.
+ euler_a = rotations.quat2euler(quat_a)
+ euler_b = rotations.quat2euler(quat_b)
+ euler_a[2] = euler_b[2]
+ quat_a = rotations.euler2quat(euler_a)
+
+ # Subtract quaternions and extract angle between them.
+ quat_diff = rotations.quat_mul(quat_a, rotations.quat_conjugate(quat_b))
+ angle_diff = 2 * np.arccos(np.clip(quat_diff[..., 0], -1., 1.))
+ d_rot = angle_diff
+ assert d_pos.shape == d_rot.shape
+ return d_pos, d_rot
+
+ # GoalEnv methods
+ # ----------------------------
+
+ def compute_reward(self, achieved_goal, goal, info):
+ if self.reward_type == 'sparse':
+ success = self._is_success(achieved_goal, goal).astype(np.float32)
+ return (success - 1.)
+ else:
+ d_pos, d_rot = self._goal_distance(achieved_goal, goal)
+ # We weigh the difference in position to avoid that `d_pos` (in meters) is completely
+ # dominated by `d_rot` (in radians).
+ return -(10. * d_pos + d_rot)
+
+ # RobotEnv methods
+ # ----------------------------
+
+ def _is_success(self, achieved_goal, desired_goal):
+ d_pos, d_rot = self._goal_distance(achieved_goal, desired_goal)
+ achieved_pos = (d_pos < self.distance_threshold).astype(np.float32)
+ achieved_rot = (d_rot < self.rotation_threshold).astype(np.float32)
+ achieved_both = achieved_pos * achieved_rot
+ return achieved_both
+
+ def _env_setup(self, initial_qpos):
+ for name, value in initial_qpos.items():
+ self.sim.data.set_joint_qpos(name, value)
+ self.sim.forward()
+
+ def _reset_sim(self):
+ self.sim.set_state(self.initial_state)
+ self.sim.forward()
+
+ initial_qpos = self.sim.data.get_joint_qpos('object:joint').copy()
+ initial_pos, initial_quat = initial_qpos[:3], initial_qpos[3:]
+ assert initial_qpos.shape == (7,)
+ assert initial_pos.shape == (3,)
+ assert initial_quat.shape == (4,)
+ initial_qpos = None
+
+ # Randomization initial rotation.
+ if self.randomize_initial_rotation:
+ if self.target_rotation == 'z':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ offset_quat = quat_from_angle_and_axis(angle, axis)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation == 'parallel':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ z_quat = quat_from_angle_and_axis(angle, axis)
+ parallel_quat = self.parallel_quats[self.np_random.randint(len(self.parallel_quats))]
+ offset_quat = rotations.quat_mul(z_quat, parallel_quat)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation in ['xyz', 'ignore']:
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = self.np_random.uniform(-1., 1., size=3)
+ offset_quat = quat_from_angle_and_axis(angle, axis)
+ initial_quat = rotations.quat_mul(initial_quat, offset_quat)
+ elif self.target_rotation == 'fixed':
+ pass
+ else:
+ raise error.Error('Unknown target_rotation option "{}".'.format(self.target_rotation))
+
+ # Randomize initial position.
+ if self.randomize_initial_position:
+ if self.target_position != 'fixed':
+ initial_pos += self.np_random.normal(size=3, scale=0.005)
+
+ initial_quat /= np.linalg.norm(initial_quat)
+ initial_qpos = np.concatenate([initial_pos, initial_quat])
+ self.sim.data.set_joint_qpos('object:joint', initial_qpos)
+
+ def is_on_palm():
+ self.sim.forward()
+ cube_middle_idx = self.sim.model.site_name2id('object:center')
+ cube_middle_pos = self.sim.data.site_xpos[cube_middle_idx]
+ is_on_palm = (cube_middle_pos[2] > 0.04)
+ return is_on_palm
+
+ # Run the simulation for a bunch of timesteps to let everything settle in.
+ for _ in range(10):
+ self._set_action(np.zeros(20))
+ try:
+ self.sim.step()
+ except mujoco_py.MujocoException:
+ return False
+ return is_on_palm()
+
+ def _sample_goal(self):
+ # Select a goal for the object position.
+ target_pos = None
+ if self.target_position == 'random':
+ assert self.target_position_range.shape == (3, 2)
+ offset = self.np_random.uniform(self.target_position_range[:, 0], self.target_position_range[:, 1])
+ assert offset.shape == (3,)
+ target_pos = self.sim.data.get_joint_qpos('object:joint')[:3] + offset
+ elif self.target_position in ['ignore', 'fixed']:
+ target_pos = self.sim.data.get_joint_qpos('object:joint')[:3]
+ else:
+ raise error.Error('Unknown target_position option "{}".'.format(self.target_position))
+ assert target_pos is not None
+ assert target_pos.shape == (3,)
+
+ # Select a goal for the object rotation.
+ target_quat = None
+ if self.target_rotation == 'z':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ elif self.target_rotation == 'parallel':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = np.array([0., 0., 1.])
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ parallel_quat = self.parallel_quats[self.np_random.randint(len(self.parallel_quats))]
+ target_quat = rotations.quat_mul(target_quat, parallel_quat)
+ elif self.target_rotation == 'xyz':
+ angle = self.np_random.uniform(-np.pi, np.pi)
+ axis = self.np_random.uniform(-1., 1., size=3)
+ target_quat = quat_from_angle_and_axis(angle, axis)
+ elif self.target_rotation in ['ignore', 'fixed']:
+ target_quat = self.sim.data.get_joint_qpos('object:joint')
+ else:
+ raise error.Error('Unknown target_rotation option "{}".'.format(self.target_rotation))
+ assert target_quat is not None
+ assert target_quat.shape == (4,)
+
+ target_quat /= np.linalg.norm(target_quat) # normalized quaternion
+ goal = np.concatenate([target_pos, target_quat])
+ return goal
+
+ def _render_callback(self):
+ # Assign current state to target object but offset a bit so that the actual object
+ # is not obscured.
+ goal = self.goal.copy()
+ assert goal.shape == (7,)
+ if self.target_position == 'ignore':
+ # Move the object to the side since we do not care about it's position.
+ goal[0] += 0.15
+ self.sim.data.set_joint_qpos('target:joint', goal)
+ self.sim.data.set_joint_qvel('target:joint', np.zeros(6))
+
+ if 'object_hidden' in self.sim.model.geom_names:
+ hidden_id = self.sim.model.geom_name2id('object_hidden')
+ self.sim.model.geom_rgba[hidden_id, 3] = 1.
+ self.sim.forward()
+
+ def _get_obs(self):
+ robot_qpos, robot_qvel = robot_get_obs(self.sim)
+ object_qvel = self.sim.data.get_joint_qvel('object:joint')
+ achieved_goal = self._get_achieved_goal().ravel() # this contains the object position + rotation
+ observation = np.concatenate([robot_qpos, robot_qvel, object_qvel, achieved_goal])
+ return {
+ 'observation': observation.copy(),
+ 'achieved_goal': achieved_goal.copy(),
+ 'desired_goal': self.goal.ravel().copy(),
+ }
+
+
+class HandBlockEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandBlockEnv, self).__init__(
+ model_path=MANIPULATE_BLOCK_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ reward_type=reward_type)
+
+
+class HandEggEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandEggEnv, self).__init__(
+ model_path=MANIPULATE_EGG_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ reward_type=reward_type)
+
+
+class HandPenEnv(ManipulateEnv):
+ def __init__(self, target_position='random', target_rotation='xyz', reward_type='sparse'):
+ super(HandPenEnv, self).__init__(
+ model_path=MANIPULATE_PEN_XML, target_position=target_position,
+ target_rotation=target_rotation,
+ target_position_range=np.array([(-0.04, 0.04), (-0.06, 0.02), (0.0, 0.06)]),
+ randomize_initial_rotation=False, reward_type=reward_type,
+ ignore_z_target_rotation=True, distance_threshold=0.05)
diff --git a/gym-grasp/gym_grasp/envs/hand/reach.py b/gym-grasp/gym_grasp/envs/hand/reach.py
new file mode 100644
index 0000000000..81ed9f9540
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/hand/reach.py
@@ -0,0 +1,149 @@
+import os
+import numpy as np
+
+from gym import utils
+from gym.envs.robotics import hand_env
+from gym.envs.robotics.utils import robot_get_obs
+
+
+FINGERTIP_SITE_NAMES = [
+ 'robot0:S_fftip',
+ 'robot0:S_mftip',
+ 'robot0:S_rftip',
+ 'robot0:S_lftip',
+ 'robot0:S_thtip',
+]
+
+
+DEFAULT_INITIAL_QPOS = {
+ 'robot0:WRJ1': -0.16514339750464327,
+ 'robot0:WRJ0': -0.31973286565062153,
+ 'robot0:FFJ3': 0.14340512546557435,
+ 'robot0:FFJ2': 0.32028208333591573,
+ 'robot0:FFJ1': 0.7126053607727917,
+ 'robot0:FFJ0': 0.6705281001412586,
+ 'robot0:MFJ3': 0.000246444303701037,
+ 'robot0:MFJ2': 0.3152655251085491,
+ 'robot0:MFJ1': 0.7659800313729842,
+ 'robot0:MFJ0': 0.7323156897425923,
+ 'robot0:RFJ3': 0.00038520700007378114,
+ 'robot0:RFJ2': 0.36743546201985233,
+ 'robot0:RFJ1': 0.7119514095008576,
+ 'robot0:RFJ0': 0.6699446327514138,
+ 'robot0:LFJ4': 0.0525442258033891,
+ 'robot0:LFJ3': -0.13615534724474673,
+ 'robot0:LFJ2': 0.39872030433433003,
+ 'robot0:LFJ1': 0.7415570009679252,
+ 'robot0:LFJ0': 0.704096378652974,
+ 'robot0:THJ4': 0.003673823825070126,
+ 'robot0:THJ3': 0.5506291436028695,
+ 'robot0:THJ2': -0.014515151997119306,
+ 'robot0:THJ1': -0.0015229223564485414,
+ 'robot0:THJ0': -0.7894883021600622,
+}
+
+
+# Ensure we get the path separator correct on windows
+MODEL_XML_PATH = os.path.join('hand', 'reach.xml')
+
+
+def goal_distance(goal_a, goal_b):
+ assert goal_a.shape == goal_b.shape
+ return np.linalg.norm(goal_a - goal_b, axis=-1)
+
+
+class HandReachEnv(hand_env.HandEnv, utils.EzPickle):
+ def __init__(
+ self, distance_threshold=0.01, n_substeps=20, relative_control=False,
+ initial_qpos=DEFAULT_INITIAL_QPOS, reward_type='sparse',
+ ):
+ self.distance_threshold = distance_threshold
+ self.reward_type = reward_type
+
+ hand_env.HandEnv.__init__(
+ self, MODEL_XML_PATH, n_substeps=n_substeps, initial_qpos=initial_qpos,
+ relative_control=relative_control)
+ utils.EzPickle.__init__(self)
+
+ def _get_achieved_goal(self):
+ goal = [self.sim.data.get_site_xpos(name) for name in FINGERTIP_SITE_NAMES]
+ return np.array(goal).flatten()
+
+ # GoalEnv methods
+ # ----------------------------
+
+ def compute_reward(self, achieved_goal, goal, info):
+ d = goal_distance(achieved_goal, goal)
+ if self.reward_type == 'sparse':
+ return -(d > self.distance_threshold).astype(np.float32)
+ else:
+ return -d
+
+ # RobotEnv methods
+ # ----------------------------
+
+ def _env_setup(self, initial_qpos):
+ for name, value in initial_qpos.items():
+ self.sim.data.set_joint_qpos(name, value)
+ self.sim.forward()
+
+ self.initial_goal = self._get_achieved_goal().copy()
+ self.palm_xpos = self.sim.data.body_xpos[self.sim.model.body_name2id('robot0:palm')].copy()
+
+ def _get_obs(self):
+ robot_qpos, robot_qvel = robot_get_obs(self.sim)
+ achieved_goal = self._get_achieved_goal().ravel()
+ observation = np.concatenate([robot_qpos, robot_qvel, achieved_goal])
+ return {
+ 'observation': observation.copy(),
+ 'achieved_goal': achieved_goal.copy(),
+ 'desired_goal': self.goal.copy(),
+ }
+
+ def _sample_goal(self):
+ thumb_name = 'robot0:S_thtip'
+ finger_names = [name for name in FINGERTIP_SITE_NAMES if name != thumb_name]
+ finger_name = self.np_random.choice(finger_names)
+
+ thumb_idx = FINGERTIP_SITE_NAMES.index(thumb_name)
+ finger_idx = FINGERTIP_SITE_NAMES.index(finger_name)
+ assert thumb_idx != finger_idx
+
+ # Pick a meeting point above the hand.
+ meeting_pos = self.palm_xpos + np.array([0.0, -0.09, 0.05])
+ meeting_pos += self.np_random.normal(scale=0.005, size=meeting_pos.shape)
+
+ # Slightly move meeting goal towards the respective finger to avoid that they
+ # overlap.
+ goal = self.initial_goal.copy().reshape(-1, 3)
+ for idx in [thumb_idx, finger_idx]:
+ offset_direction = (meeting_pos - goal[idx])
+ offset_direction /= np.linalg.norm(offset_direction)
+ goal[idx] = meeting_pos - 0.005 * offset_direction
+
+ if self.np_random.uniform() < 0.1:
+ # With some probability, ask all fingers to move back to the origin.
+ # This avoids that the thumb constantly stays near the goal position already.
+ goal = self.initial_goal.copy()
+ return goal.flatten()
+
+ def _is_success(self, achieved_goal, desired_goal):
+ d = goal_distance(achieved_goal, desired_goal)
+ return (d < self.distance_threshold).astype(np.float32)
+
+ def _render_callback(self):
+ # Visualize targets.
+ sites_offset = (self.sim.data.site_xpos - self.sim.model.site_pos).copy()
+ goal = self.goal.reshape(5, 3)
+ for finger_idx in range(5):
+ site_name = 'target{}'.format(finger_idx)
+ site_id = self.sim.model.site_name2id(site_name)
+ self.sim.model.site_pos[site_id] = goal[finger_idx] - sites_offset[site_id]
+
+ # Visualize finger positions.
+ achieved_goal = self._get_achieved_goal().reshape(5, 3)
+ for finger_idx in range(5):
+ site_name = 'finger{}'.format(finger_idx)
+ site_id = self.sim.model.site_name2id(site_name)
+ self.sim.model.site_pos[site_id] = achieved_goal[finger_idx] - sites_offset[site_id]
+ self.sim.forward()
diff --git a/gym-grasp/gym_grasp/envs/hand_env.py b/gym-grasp/gym_grasp/envs/hand_env.py
new file mode 100644
index 0000000000..1de155e7ed
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/hand_env.py
@@ -0,0 +1,49 @@
+import os
+import copy
+import numpy as np
+
+import gym
+from gym import error, spaces
+from gym.utils import seeding
+from gym_grasp.envs import robot_env
+
+
+class HandEnv(robot_env.RobotEnv):
+ def __init__(self, model_path, n_substeps, initial_qpos, relative_control):
+ self.relative_control = relative_control
+
+ super(HandEnv, self).__init__(
+ model_path=model_path, n_substeps=n_substeps, n_actions=21,
+ initial_qpos=initial_qpos)
+
+ # RobotEnv methods
+ # ----------------------------
+
+ def _set_action(self, action):
+ assert action.shape == (21,)
+
+ ctrlrange = self.sim.model.actuator_ctrlrange
+ actuation_range = (ctrlrange[:, 1] - ctrlrange[:, 0]) / 2.
+ if self.relative_control:
+ actuation_center = np.zeros_like(action)
+ for i in range(self.sim.data.ctrl.shape[0]):
+ actuation_center[i] = self.sim.data.get_joint_qpos(
+ self.sim.model.actuator_names[i].replace(':A_', ':'))
+ for joint_name in ['FF', 'MF', 'RF', 'LF']:
+ act_idx = self.sim.model.actuator_name2id(
+ 'robot0:A_{}J1'.format(joint_name))
+ actuation_center[act_idx] += self.sim.data.get_joint_qpos(
+ 'robot0:{}J0'.format(joint_name))
+ else:
+ actuation_center = (ctrlrange[:, 1] + ctrlrange[:, 0]) / 2.
+ self.sim.data.ctrl[:] = actuation_center + action * actuation_range
+ self.sim.data.ctrl[:] = np.clip(self.sim.data.ctrl, ctrlrange[:, 0], ctrlrange[:, 1])
+
+ def _viewer_setup(self):
+ body_id = self.sim.model.body_name2id('robot0:palm')
+ lookat = self.sim.data.body_xpos[body_id]
+ for idx, value in enumerate(lookat):
+ self.viewer.cam.lookat[idx] = value
+ self.viewer.cam.distance = 0.5
+ self.viewer.cam.azimuth = 55.
+ self.viewer.cam.elevation = -25.
diff --git a/gym-grasp/gym_grasp/envs/robot_env.py b/gym-grasp/gym_grasp/envs/robot_env.py
new file mode 100644
index 0000000000..6d0714026a
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/robot_env.py
@@ -0,0 +1,162 @@
+import os
+import copy
+import numpy as np
+
+import gym
+from gym import error, spaces
+from gym.utils import seeding
+
+try:
+ import mujoco_py
+except ImportError as e:
+ raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
+
+
+class RobotEnv(gym.GoalEnv):
+ def __init__(self, model_path, initial_qpos, n_actions, n_substeps):
+ if model_path.startswith('/'):
+ fullpath = model_path
+ else:
+ fullpath = os.path.join(os.path.dirname(__file__), 'assets', model_path)
+ if not os.path.exists(fullpath):
+ raise IOError('File {} does not exist'.format(fullpath))
+
+ model = mujoco_py.load_model_from_path(fullpath)
+ self.sim = mujoco_py.MjSim(model, nsubsteps=n_substeps)
+ self.viewer = None
+
+ self.metadata = {
+ 'render.modes': ['human', 'rgb_array'],
+ 'video.frames_per_second': int(np.round(1.0 / self.dt))
+ }
+
+ self.seed()
+ self._env_setup(initial_qpos=initial_qpos)
+ self.initial_state = copy.deepcopy(self.sim.get_state())
+
+ self.goal = self._sample_goal()
+ obs = self._get_obs()
+ self.action_space = spaces.Box(-1., 1., shape=(n_actions,), dtype='float32')
+ self.observation_space = spaces.Dict(dict(
+ desired_goal=spaces.Box(-np.inf, np.inf, shape=obs['achieved_goal'].shape, dtype='float32'),
+ achieved_goal=spaces.Box(-np.inf, np.inf, shape=obs['achieved_goal'].shape, dtype='float32'),
+ observation=spaces.Box(-np.inf, np.inf, shape=obs['observation'].shape, dtype='float32'),
+ ))
+
+ @property
+ def dt(self):
+ return self.sim.model.opt.timestep * self.sim.nsubsteps
+
+ # Env methods
+ # ----------------------------
+
+ def seed(self, seed=None):
+ self.np_random, seed = seeding.np_random(seed)
+ return [seed]
+
+ def step(self, action):
+ action = np.clip(action, self.action_space.low, self.action_space.high)
+ self._set_action(action)
+ self.sim.step()
+ self._step_callback()
+ obs = self._get_obs()
+
+ done = False
+ info = {
+ 'is_success': self._is_success(obs['achieved_goal'], self.goal),
+ }
+ reward = self.compute_reward(obs['achieved_goal'], self.goal, info)
+ return obs, reward, done, info
+
+ def reset(self):
+ # Attempt to reset the simulator. Since we randomize initial conditions, it
+ # is possible to get into a state with numerical issues (e.g. due to penetration or
+ # Gimbel lock) or we may not achieve an initial condition (e.g. an object is within the hand).
+ # In this case, we just keep randomizing until we eventually achieve a valid initial
+ # configuration.
+ did_reset_sim = False
+ while not did_reset_sim:
+ did_reset_sim = self._reset_sim()
+ self.goal = self._sample_goal().copy()
+ obs = self._get_obs()
+ return obs
+
+ def close(self):
+ if self.viewer is not None:
+ # self.viewer.finish()
+ self.viewer = None
+
+ def render(self, mode='human'):
+ self._render_callback()
+ if mode == 'rgb_array':
+ self._get_viewer().render()
+ # window size used for old mujoco-py:
+ width, height = 500, 500
+ data = self._get_viewer().read_pixels(width, height, depth=False)
+ # original image is upside-down, so flip it
+ return data[::-1, :, :]
+ elif mode == 'human':
+ self._get_viewer().render()
+
+ def _get_viewer(self):
+ if self.viewer is None:
+ self.viewer = mujoco_py.MjViewer(self.sim)
+ self._viewer_setup()
+ return self.viewer
+
+ # Extension methods
+ # ----------------------------
+
+ def _reset_sim(self):
+ """Resets a simulation and indicates whether or not it was successful.
+ If a reset was unsuccessful (e.g. if a randomized state caused an error in the
+ simulation), this method should indicate such a failure by returning False.
+ In such a case, this method will be called again to attempt a the reset again.
+ """
+ self.sim.set_state(self.initial_state)
+ self.sim.forward()
+ return True
+
+ def _get_obs(self):
+ """Returns the observation.
+ """
+ raise NotImplementedError()
+
+ def _set_action(self, action):
+ """Applies the given action to the simulation.
+ """
+ raise NotImplementedError()
+
+ def _is_success(self, achieved_goal, desired_goal):
+ """Indicates whether or not the achieved goal successfully achieved the desired goal.
+ """
+ raise NotImplementedError()
+
+ def _sample_goal(self):
+ """Samples a new goal and returns it.
+ """
+ raise NotImplementedError()
+
+ def _env_setup(self, initial_qpos):
+ """Initial configuration of the environment. Can be used to configure initial state
+ and extract information from the simulation.
+ """
+ pass
+
+ def _viewer_setup(self):
+ """Initial configuration of the viewer. Can be used to set the camera position,
+ for example.
+ """
+ pass
+
+ def _render_callback(self):
+ """A custom callback that is called before rendering. Can be used
+ to implement custom visualizations.
+ """
+ pass
+
+ def _step_callback(self):
+ """A custom callback that is called after stepping the simulation. Can be used
+ to enforce additional constraints on the simulation state.
+ """
+ pass
diff --git a/gym-grasp/gym_grasp/envs/rotations.py b/gym-grasp/gym_grasp/envs/rotations.py
new file mode 100644
index 0000000000..4aafb64a08
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/rotations.py
@@ -0,0 +1,369 @@
+# Copyright (c) 2009-2017, Matthew Brett and Christoph Gohlke
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Many methods borrow heavily or entirely from transforms3d:
+# https://github.com/matthew-brett/transforms3d
+# They have mostly been modified to support batched operations.
+
+import numpy as np
+import itertools
+
+'''
+Rotations
+=========
+
+Note: these have caused many subtle bugs in the past.
+Be careful while updating these methods and while using them in clever ways.
+
+See MuJoCo documentation here: http://mujoco.org/book/modeling.html#COrientation
+
+Conventions
+-----------
+ - All functions accept batches as well as individual rotations
+ - All rotation conventions match respective MuJoCo defaults
+ - All angles are in radians
+ - Matricies follow LR convention
+ - Euler Angles are all relative with 'xyz' axes ordering
+ - See specific representation for more information
+
+Representations
+---------------
+
+Euler
+ There are many euler angle frames -- here we will strive to use the default
+ in MuJoCo, which is eulerseq='xyz'.
+ This frame is a relative rotating frame, about x, y, and z axes in order.
+ Relative rotating means that after we rotate about x, then we use the
+ new (rotated) y, and the same for z.
+
+Quaternions
+ These are defined in terms of rotation (angle) about a unit vector (x, y, z)
+ We use the following convention:
+ q0 = cos(angle / 2)
+ q1 = sin(angle / 2) * x
+ q2 = sin(angle / 2) * y
+ q3 = sin(angle / 2) * z
+ This is also sometimes called qw, qx, qy, qz.
+ Note that quaternions are ambiguous, because we can represent a rotation by
+ angle about vector and -angle about vector <-x, -y, -z>.
+ To choose between these, we pick "first nonzero positive", where we
+ make the first nonzero element of the quaternion positive.
+ This can result in mismatches if you're converting an quaternion that is not
+ "first nonzero positive" to a different representation and back.
+
+Axis Angle
+ (Not currently implemented)
+ These are very straightforward. Rotation is angle about a unit vector.
+
+XY Axes
+ (Not currently implemented)
+ We are given x axis and y axis, and z axis is cross product of x and y.
+
+Z Axis
+ This is NOT RECOMMENDED. Defines a unit vector for the Z axis,
+ but rotation about this axis is not well defined.
+ Instead pick a fixed reference direction for another axis (e.g. X)
+ and calculate the other (e.g. Y = Z cross-product X),
+ then use XY Axes rotation instead.
+
+SO3
+ (Not currently implemented)
+ While not supported by MuJoCo, this representation has a lot of nice features.
+ We expect to add support for these in the future.
+
+TODO / Missing
+--------------
+ - Rotation integration or derivatives (e.g. velocity conversions)
+ - More representations (SO3, etc)
+ - Random sampling (e.g. sample uniform random rotation)
+ - Performance benchmarks/measurements
+ - (Maybe) define everything as to/from matricies, for simplicity
+'''
+
+# For testing whether a number is close to zero
+_FLOAT_EPS = np.finfo(np.float64).eps
+_EPS4 = _FLOAT_EPS * 4.0
+
+
+def euler2mat(euler):
+ """ Convert Euler Angles to Rotation Matrix. See rotation.py for notes """
+ euler = np.asarray(euler, dtype=np.float64)
+ assert euler.shape[-1] == 3, "Invalid shaped euler {}".format(euler)
+
+ ai, aj, ak = -euler[..., 2], -euler[..., 1], -euler[..., 0]
+ si, sj, sk = np.sin(ai), np.sin(aj), np.sin(ak)
+ ci, cj, ck = np.cos(ai), np.cos(aj), np.cos(ak)
+ cc, cs = ci * ck, ci * sk
+ sc, ss = si * ck, si * sk
+
+ mat = np.empty(euler.shape[:-1] + (3, 3), dtype=np.float64)
+ mat[..., 2, 2] = cj * ck
+ mat[..., 2, 1] = sj * sc - cs
+ mat[..., 2, 0] = sj * cc + ss
+ mat[..., 1, 2] = cj * sk
+ mat[..., 1, 1] = sj * ss + cc
+ mat[..., 1, 0] = sj * cs - sc
+ mat[..., 0, 2] = -sj
+ mat[..., 0, 1] = cj * si
+ mat[..., 0, 0] = cj * ci
+ return mat
+
+
+def euler2quat(euler):
+ """ Convert Euler Angles to Quaternions. See rotation.py for notes """
+ euler = np.asarray(euler, dtype=np.float64)
+ assert euler.shape[-1] == 3, "Invalid shape euler {}".format(euler)
+
+ ai, aj, ak = euler[..., 2] / 2, -euler[..., 1] / 2, euler[..., 0] / 2
+ si, sj, sk = np.sin(ai), np.sin(aj), np.sin(ak)
+ ci, cj, ck = np.cos(ai), np.cos(aj), np.cos(ak)
+ cc, cs = ci * ck, ci * sk
+ sc, ss = si * ck, si * sk
+
+ quat = np.empty(euler.shape[:-1] + (4,), dtype=np.float64)
+ quat[..., 0] = cj * cc + sj * ss
+ quat[..., 3] = cj * sc - sj * cs
+ quat[..., 2] = -(cj * ss + sj * cc)
+ quat[..., 1] = cj * cs - sj * sc
+ return quat
+
+
+def mat2euler(mat):
+ """ Convert Rotation Matrix to Euler Angles. See rotation.py for notes """
+ mat = np.asarray(mat, dtype=np.float64)
+ assert mat.shape[-2:] == (3, 3), "Invalid shape matrix {}".format(mat)
+
+ cy = np.sqrt(mat[..., 2, 2] * mat[..., 2, 2] + mat[..., 1, 2] * mat[..., 1, 2])
+ condition = cy > _EPS4
+ euler = np.empty(mat.shape[:-1], dtype=np.float64)
+ euler[..., 2] = np.where(condition,
+ -np.arctan2(mat[..., 0, 1], mat[..., 0, 0]),
+ -np.arctan2(-mat[..., 1, 0], mat[..., 1, 1]))
+ euler[..., 1] = np.where(condition,
+ -np.arctan2(-mat[..., 0, 2], cy),
+ -np.arctan2(-mat[..., 0, 2], cy))
+ euler[..., 0] = np.where(condition,
+ -np.arctan2(mat[..., 1, 2], mat[..., 2, 2]),
+ 0.0)
+ return euler
+
+
+def mat2quat(mat):
+ """ Convert Rotation Matrix to Quaternion. See rotation.py for notes """
+ mat = np.asarray(mat, dtype=np.float64)
+ assert mat.shape[-2:] == (3, 3), "Invalid shape matrix {}".format(mat)
+
+ Qxx, Qyx, Qzx = mat[..., 0, 0], mat[..., 0, 1], mat[..., 0, 2]
+ Qxy, Qyy, Qzy = mat[..., 1, 0], mat[..., 1, 1], mat[..., 1, 2]
+ Qxz, Qyz, Qzz = mat[..., 2, 0], mat[..., 2, 1], mat[..., 2, 2]
+ # Fill only lower half of symmetric matrix
+ K = np.zeros(mat.shape[:-2] + (4, 4), dtype=np.float64)
+ K[..., 0, 0] = Qxx - Qyy - Qzz
+ K[..., 1, 0] = Qyx + Qxy
+ K[..., 1, 1] = Qyy - Qxx - Qzz
+ K[..., 2, 0] = Qzx + Qxz
+ K[..., 2, 1] = Qzy + Qyz
+ K[..., 2, 2] = Qzz - Qxx - Qyy
+ K[..., 3, 0] = Qyz - Qzy
+ K[..., 3, 1] = Qzx - Qxz
+ K[..., 3, 2] = Qxy - Qyx
+ K[..., 3, 3] = Qxx + Qyy + Qzz
+ K /= 3.0
+ # TODO: vectorize this -- probably could be made faster
+ q = np.empty(K.shape[:-2] + (4,))
+ it = np.nditer(q[..., 0], flags=['multi_index'])
+ while not it.finished:
+ # Use Hermitian eigenvectors, values for speed
+ vals, vecs = np.linalg.eigh(K[it.multi_index])
+ # Select largest eigenvector, reorder to w,x,y,z quaternion
+ q[it.multi_index] = vecs[[3, 0, 1, 2], np.argmax(vals)]
+ # Prefer quaternion with positive w
+ # (q * -1 corresponds to same rotation as q)
+ if q[it.multi_index][0] < 0:
+ q[it.multi_index] *= -1
+ it.iternext()
+ return q
+
+
+def quat2euler(quat):
+ """ Convert Quaternion to Euler Angles. See rotation.py for notes """
+ return mat2euler(quat2mat(quat))
+
+
+def subtract_euler(e1, e2):
+ assert e1.shape == e2.shape
+ assert e1.shape[-1] == 3
+ q1 = euler2quat(e1)
+ q2 = euler2quat(e2)
+ q_diff = quat_mul(q1, quat_conjugate(q2))
+ return quat2euler(q_diff)
+
+
+def quat2mat(quat):
+ """ Convert Quaternion to Euler Angles. See rotation.py for notes """
+ quat = np.asarray(quat, dtype=np.float64)
+ assert quat.shape[-1] == 4, "Invalid shape quat {}".format(quat)
+
+ w, x, y, z = quat[..., 0], quat[..., 1], quat[..., 2], quat[..., 3]
+ Nq = np.sum(quat * quat, axis=-1)
+ s = 2.0 / Nq
+ X, Y, Z = x * s, y * s, z * s
+ wX, wY, wZ = w * X, w * Y, w * Z
+ xX, xY, xZ = x * X, x * Y, x * Z
+ yY, yZ, zZ = y * Y, y * Z, z * Z
+
+ mat = np.empty(quat.shape[:-1] + (3, 3), dtype=np.float64)
+ mat[..., 0, 0] = 1.0 - (yY + zZ)
+ mat[..., 0, 1] = xY - wZ
+ mat[..., 0, 2] = xZ + wY
+ mat[..., 1, 0] = xY + wZ
+ mat[..., 1, 1] = 1.0 - (xX + zZ)
+ mat[..., 1, 2] = yZ - wX
+ mat[..., 2, 0] = xZ - wY
+ mat[..., 2, 1] = yZ + wX
+ mat[..., 2, 2] = 1.0 - (xX + yY)
+ return np.where((Nq > _FLOAT_EPS)[..., np.newaxis, np.newaxis], mat, np.eye(3))
+
+def quat_conjugate(q):
+ inv_q = -q
+ inv_q[..., 0] *= -1
+ return inv_q
+
+def quat_mul(q0, q1):
+ assert q0.shape == q1.shape
+ assert q0.shape[-1] == 4
+ assert q1.shape[-1] == 4
+
+ w0 = q0[..., 0]
+ x0 = q0[..., 1]
+ y0 = q0[..., 2]
+ z0 = q0[..., 3]
+
+ w1 = q1[..., 0]
+ x1 = q1[..., 1]
+ y1 = q1[..., 2]
+ z1 = q1[..., 3]
+
+ w = w0 * w1 - x0 * x1 - y0 * y1 - z0 * z1
+ x = w0 * x1 + x0 * w1 + y0 * z1 - z0 * y1
+ y = w0 * y1 + y0 * w1 + z0 * x1 - x0 * z1
+ z = w0 * z1 + z0 * w1 + x0 * y1 - y0 * x1
+ q = np.array([w, x, y, z])
+ if q.ndim == 2:
+ q = q.swapaxes(0, 1)
+ assert q.shape == q0.shape
+ return q
+
+def quat_rot_vec(q, v0):
+ q_v0 = np.array([0, v0[0], v0[1], v0[2]])
+ q_v = quat_mul(q, quat_mul(q_v0, quat_conjugate(q)))
+ v = q_v[1:]
+ return v
+
+def quat_identity():
+ return np.array([1, 0, 0, 0])
+
+def quat2axisangle(quat):
+ theta = 0;
+ axis = np.array([0, 0, 1]);
+ sin_theta = np.linalg.norm(quat[1:])
+
+ if (sin_theta > 0.0001):
+ theta = 2 * np.arcsin(sin_theta)
+ theta *= 1 if quat[0] >= 0 else -1
+ axis = quat[1:] / sin_theta
+
+ return axis, theta
+
+def euler2point_euler(euler):
+ _euler = euler.copy()
+ if len(_euler.shape) < 2:
+ _euler = np.expand_dims(_euler,0)
+ assert(_euler.shape[1] == 3)
+ _euler_sin = np.sin(_euler)
+ _euler_cos = np.cos(_euler)
+ return np.concatenate([_euler_sin, _euler_cos], axis=-1)
+
+def point_euler2euler(euler):
+ _euler = euler.copy()
+ if len(_euler.shape) < 2:
+ _euler = np.expand_dims(_euler,0)
+ assert(_euler.shape[1] == 6)
+ angle = np.arctan(_euler[..., :3] / _euler[..., 3:])
+ angle[_euler[..., 3:] < 0] += np.pi
+ return angle
+
+def quat2point_quat(quat):
+ # Should be in qw, qx, qy, qz
+ _quat = quat.copy()
+ if len(_quat.shape) < 2:
+ _quat = np.expand_dims(_quat, 0)
+ assert(_quat.shape[1] == 4)
+ angle = np.arccos(_quat[:,[0]]) * 2
+ xyz = _quat[:, 1:]
+ xyz[np.squeeze(np.abs(np.sin(angle/2))) >= 1e-5] = (xyz / np.sin(angle / 2))[np.squeeze(np.abs(np.sin(angle/2))) >= 1e-5]
+ return np.concatenate([np.sin(angle),np.cos(angle), xyz], axis=-1)
+
+def point_quat2quat(quat):
+ _quat = quat.copy()
+ if len(_quat.shape) < 2:
+ _quat = np.expand_dims(_quat, 0)
+ assert(_quat.shape[1] == 5)
+ angle = np.arctan(_quat[:,[0]] / _quat[:,[1]])
+ qw = np.cos(angle / 2)
+
+ qxyz = _quat[:, 2:]
+ qxyz[np.squeeze(np.abs(np.sin(angle/2))) >= 1e-5] = (qxyz * np.sin(angle/2))[np.squeeze(np.abs(np.sin(angle/2))) >= 1e-5]
+ return np.concatenate([qw, qxyz], axis=-1)
+
+def normalize_angles(angles):
+ '''Puts angles in [-pi, pi] range.'''
+ angles = angles.copy()
+ if angles.size > 0:
+ angles = (angles + np.pi) % (2 * np.pi) - np.pi
+ assert -np.pi-1e-6 <= angles.min() and angles.max() <= np.pi+1e-6
+ return angles
+
+def round_to_straight_angles(angles):
+ '''Returns closest angle modulo 90 degrees '''
+ angles = np.round(angles / (np.pi / 2)) * (np.pi / 2)
+ return normalize_angles(angles)
+
+def get_parallel_rotations():
+ mult90 = [0, np.pi/2, -np.pi/2, np.pi]
+ parallel_rotations = []
+ for euler in itertools.product(mult90, repeat=3):
+ canonical = mat2euler(euler2mat(euler))
+ canonical = np.round(canonical / (np.pi / 2))
+ if canonical[0] == -2:
+ canonical[0] = 2
+ if canonical[2] == -2:
+ canonical[2] = 2
+ canonical *= np.pi / 2
+ if all([(canonical != rot).any() for rot in parallel_rotations]):
+ parallel_rotations += [canonical]
+ assert len(parallel_rotations) == 24
+ return parallel_rotations
diff --git a/gym-grasp/gym_grasp/envs/utils.py b/gym-grasp/gym_grasp/envs/utils.py
new file mode 100644
index 0000000000..a73e5f6052
--- /dev/null
+++ b/gym-grasp/gym_grasp/envs/utils.py
@@ -0,0 +1,96 @@
+import numpy as np
+
+from gym import error
+try:
+ import mujoco_py
+except ImportError as e:
+ raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
+
+
+def robot_get_obs(sim):
+ """Returns all joint positions and velocities associated with
+ a robot.
+ """
+ if sim.data.qpos is not None and sim.model.joint_names:
+ names = [n for n in sim.model.joint_names if n.startswith('robot')]
+ return (
+ np.array([sim.data.get_joint_qpos(name) for name in names]),
+ np.array([sim.data.get_joint_qvel(name) for name in names]),
+ )
+ return np.zeros(0), np.zeros(0)
+
+
+def ctrl_set_action(sim, action):
+ """For torque actuators it copies the action into mujoco ctrl field.
+ For position actuators it sets the target relative to the current qpos.
+ """
+ if sim.model.nmocap > 0:
+ _, action = np.split(action, (sim.model.nmocap * 7, ))
+ if sim.data.ctrl is not None:
+ for i in range(action.shape[0]):
+ if sim.model.actuator_biastype[i] == 0:
+ sim.data.ctrl[i] = action[i]
+ else:
+ idx = sim.model.jnt_qposadr[sim.model.actuator_trnid[i, 0]]
+ sim.data.ctrl[i] = sim.data.qpos[idx] + action[i]
+
+
+def mocap_set_action(sim, action):
+ """The action controls the robot using mocaps. Specifically, bodies
+ on the robot (for example the gripper wrist) is controlled with
+ mocap bodies. In this case the action is the desired difference
+ in position and orientation (quaternion), in world coordinates,
+ of the of the target body. The mocap is positioned relative to
+ the target body according to the delta, and the MuJoCo equality
+ constraint optimizer tries to center the welded body on the mocap.
+ """
+ if sim.model.nmocap > 0:
+ action, _ = np.split(action, (sim.model.nmocap * 7, ))
+ action = action.reshape(sim.model.nmocap, 7)
+
+ pos_delta = action[:, :3]
+ quat_delta = action[:, 3:]
+
+ reset_mocap2body_xpos(sim)
+ sim.data.mocap_pos[:] = sim.data.mocap_pos + pos_delta
+ sim.data.mocap_quat[:] = sim.data.mocap_quat + quat_delta
+
+
+def reset_mocap_welds(sim):
+ """Resets the mocap welds that we use for actuation.
+ """
+ if sim.model.nmocap > 0 and sim.model.eq_data is not None:
+ for i in range(sim.model.eq_data.shape[0]):
+ if sim.model.eq_type[i] == mujoco_py.const.EQ_WELD:
+ sim.model.eq_data[i, :] = np.array(
+ [0., 0., 0., 1., 0., 0., 0.])
+ sim.forward()
+
+
+def reset_mocap2body_xpos(sim):
+ """Resets the position and orientation of the mocap bodies to the same
+ values as the bodies they're welded to.
+ """
+
+ if (sim.model.eq_type is None or
+ sim.model.eq_obj1id is None or
+ sim.model.eq_obj2id is None):
+ return
+ for eq_type, obj1_id, obj2_id in zip(sim.model.eq_type,
+ sim.model.eq_obj1id,
+ sim.model.eq_obj2id):
+ if eq_type != mujoco_py.const.EQ_WELD:
+ continue
+
+ mocap_id = sim.model.body_mocapid[obj1_id]
+ if mocap_id != -1:
+ # obj1 is the mocap, obj2 is the welded body
+ body_idx = obj2_id
+ else:
+ # obj2 is the mocap, obj1 is the welded body
+ mocap_id = sim.model.body_mocapid[obj2_id]
+ body_idx = obj1_id
+
+ assert (mocap_id != -1)
+ sim.data.mocap_pos[mocap_id][:] = sim.data.body_xpos[body_idx]
+ sim.data.mocap_quat[mocap_id][:] = sim.data.body_xquat[body_idx]
diff --git a/gym-grasp/setup.py b/gym-grasp/setup.py
new file mode 100644
index 0000000000..16314b2f17
--- /dev/null
+++ b/gym-grasp/setup.py
@@ -0,0 +1,10 @@
+from setuptools import setup
+
+setup(name='gym_grasp',
+ version='0.0.1',
+ install_requires=['gym>=0.2.3',
+ 'mujoco_py>=1.50'],
+ package_data={'gym_grasp' : [
+ 'envs/assets/hand/*.xml'
+ ]}
+)
diff --git a/mujoco-py b/mujoco-py
new file mode 160000
index 0000000000..54367d181b
--- /dev/null
+++ b/mujoco-py
@@ -0,0 +1 @@
+Subproject commit 54367d181b4335b42a0f094274a07b21352af9f2
diff --git a/projection/Dockerfile b/projection/Dockerfile
new file mode 100644
index 0000000000..ec19fa6e41
--- /dev/null
+++ b/projection/Dockerfile
@@ -0,0 +1,24 @@
+FROM chainer/chainer:v4.5.0-python3
+MAINTAINER Yoshimura Naoya
+
+# Emacsのインストール
+RUN apt-get update
+RUN apt-get install emacs24-nox -y
+
+
+# Install Chainer
+RUN pip3 install jupyter \
+ && jupyter notebook --generate-config
+RUN echo 'alias python=python3' >> ~/.bashrc \
+ && echo 'alias pip=pip3' >> ~/.bashrc
+RUN pip3 install --upgrade pip
+
+# Install Python Module
+COPY requirements.txt /root
+RUN pip install -r /root/requirements.txt
+
+
+
+# Finish
+RUN mkdir /root/work
+WORKDIR /root/work
diff --git a/projection/MAKE_CONTAINER.sh b/projection/MAKE_CONTAINER.sh
new file mode 100644
index 0000000000..14a99f8ce0
--- /dev/null
+++ b/projection/MAKE_CONTAINER.sh
@@ -0,0 +1,18 @@
+mode=$1
+
+if [ ${mode} = 0 ];
+then
+ DIR_CODE="/home/yoshimura/code708/synergy"
+ DIR_DATA="/home/yoshimura/code708/dataStore"
+ NAME="synergy"
+ IMAGE="yoshimura/synergy:v4.5.0"
+ PORT_NOTE=7088
+ PORT_TFB=7086
+
+ docker run --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=0 \
+ -v ${DIR_CODE}:/root/work \
+ -v ${DIR_DATA}:/root/dataStore \
+ --name ${NAME} \
+ -p ${PORT_NOTE}:8888 -p ${PORT_TFB}:6006 \
+ -it ${IMAGE} jupyter notebook --allow-root --ip 0.0.0.0
+fi
diff --git a/projection/README.md b/projection/README.md
new file mode 100644
index 0000000000..da5f8fcc85
--- /dev/null
+++ b/projection/README.md
@@ -0,0 +1,58 @@
+# Projection Network with Chainer
+中枢神経系 (action) からPre-moter Nueronへの投射を行うネットワークの学習.
+
+
+## Installation
+### Requirements
++ chainer
++ cupy
+
+### Docker Setup
+Fist, build docker image with this command.
+
+```
+$ docker build -t synergy/chainer:v4.5.0 .
+```
+
+And then make container,
+
+```
+docker run --runtime=nvidia -e NVIDIA_VISIBLE_DEVICES=${GPU} \
+ -v ${DIR_code}:/root/work \
+ -v ${DIR_DATA}:/root/dataStore \
+ --name ${NAME} \
+ -p ${PORT_NOTE} -p ${PORT_TFB} \
+ -it ${IMAGE} jupyter notebook --allow-root --ip 0.0.0.0
+```
+
+## Data Preparation
+### Resampling
+Use `utils/make_inputs.py`. After this, split data for`train/val/test` by yourself.
+
+
+```
+python3 make_inputs.py \
+ --path-in /root/dataStore/grasp_v1/episodes \
+ --path-out /root/dataStore/grasp_v1/Inputs
+```
+
+## Training
+note: Set correct paths!
+
+
+```
+python3 run.py TRAIN \
+ --path-data-train /root/dataStore/grasp_v1/Inputs/train \
+ --path-data-val /root/dataStore/grasp_v1/Inputs/val \
+ --path-model /root/dataStore/grasp_v1/Log/ChainerDenseNet.model \
+ --path-log /root/dataStore/grasp_v1/Log/ \
+ --gpu 0 \
+ --batch-size 64 \
+ --epoch 10
+
+```
+
+
+
+## Prediction (Generation)
+TBA
diff --git a/projection/dataset/default.py b/projection/dataset/default.py
new file mode 100644
index 0000000000..c38028e2c6
--- /dev/null
+++ b/projection/dataset/default.py
@@ -0,0 +1,50 @@
+import os
+import numpy as np
+import h5py
+
+
+from logging import getLogger, basicConfig, DEBUG
+logger = getLogger(__name__)
+
+# Chainer
+import chainer
+
+# ---------------------------------------------------------
+class DefaultDataset(chainer.dataset.DatasetMixin):
+ """ Default Dataset Object for Multi-Class Classification
+ """
+
+ def __init__(self, file_list):
+ """
+ Args.
+ -----
+ - file_list :list of input files (+.h5)
+ """
+ X, Y = [], []
+ for path in file_list:
+ if not os.path.exists(path):
+ logger.warning("File does not exsists! [path={}]".format(path))
+ continue
+ X_tmp, Y_tmp = self.load_file(path)
+ X.append(X_tmp)
+ Y.append(Y_tmp)
+ self.X = np.concatenate(X, axis=0)
+ self.Y = np.concatenate(Y, axis=0)
+ logger.info("Success: X={}, Y={}".format(self.X.shape, self.Y.shape))
+
+
+ def load_file(self, path):
+ with h5py.File(path, 'r') as f:
+ X = np.array(f["fc"],)
+ Y = np.array(f['action/resampled'],)
+
+ xshape, yshape = X.shape, Y.shape
+ X, Y = X.reshape((-1, xshape[-1])), Y.reshape((-1,yshape[-1]))
+ return X, Y
+
+
+ def __len__(self):
+ return len(self.X)
+
+ def get_example(self, i):
+ return self.X[i], self.Y[i]
diff --git a/projection/models/dense.py b/projection/models/dense.py
new file mode 100644
index 0000000000..daac5806e7
--- /dev/null
+++ b/projection/models/dense.py
@@ -0,0 +1,52 @@
+import chainer
+import chainer.links as L
+import chainer.functions as F
+
+import numpy as np
+import cupy as cp
+
+
+
+class DenseNet(chainer.Chain):
+ """
+ Reference.
+ ----------
+ - "Deep Convolutional and LSTM Recurrent Neural Networks for Multimodal Wearable Activity Recognition"
+ [www.mdpi.com/1424-8220/16/1/115/pdf]
+ - Baseline CNN
+ """
+ """
+ Args.
+ -----
+ - n_in : int, Input dim (=X.shape[-1])
+ - n_out : int, Output dim (=Y.shape[-1])
+ """
+ def __init__(self, n_in=None, n_out=None):
+ super(DenseNet, self,).__init__()
+ with self.init_scope():
+ # FC
+ self.fc1 = L.Linear(n_in, 32)
+ self.fc2 = L.Linear(32, 32)
+ self.fc3 = L.Linear(32, n_out)
+
+ def __call__(self, x):
+ # Full Connected
+ h1 = F.dropout(F.relu(self.fc1(x)))
+ h2 = F.dropout(F.relu(self.fc2(h1)))
+ h3 = F.tanh(self.fc3(h2))
+ return h3
+
+
+ def get_inter_layer(self, x):
+ h1 = F.relu(self.fc1(x))
+ h2 = F.relu(self.fc2(h1))
+ h3 = F.tanh(self.fc3(h2))
+
+ ret = {
+ "h1": h1,
+ "h2": h2,
+ "h3": h3,
+ }
+
+ return h3, ret
+
diff --git a/projection/notebook/01_Resampling_of_Actions.ipynb b/projection/notebook/01_Resampling_of_Actions.ipynb
new file mode 100644
index 0000000000..92ccfb1b66
--- /dev/null
+++ b/projection/notebook/01_Resampling_of_Actions.ipynb
@@ -0,0 +1,873 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 01: Critic Networkの出力の量子化"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 概要\n",
+ "+ Critic Networkはロボットハンドの各時間ごとの角度を出力する.\n",
+ "+ そのまま学習しても投射にならないため, 角度を曲げる, 伸ばす, そのままに変換する [-1,0,1,].\n",
+ "\n",
+ "### ToDo\n",
+ "+ シンプルに曲げる/曲げない/そのままでOKなのか?\n",
+ "+ 値の変化幅を決めるために, 各時刻毎の値の差分の分布を確認する."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1-1: 差分の分布の確認"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "(10, 2, 100, 21)\n"
+ ]
+ }
+ ],
+ "source": [
+ "import os\n",
+ "import pandas as pd\n",
+ "import numpy as np\n",
+ "import h5py\n",
+ "\n",
+ "filename = os.path.join(\"/root/dataStore\", \"grasp_v1\", \"episodes\", \"epoch0.h5\")\n",
+ "with h5py.File(filename, 'r') as f:\n",
+ " A = np.array(f[\"action\"],)\n",
+ " \n",
+ "print(A.shape)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "(1980, 21)\n"
+ ]
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "import matplotlib.pyplot as plt\n",
+ "import seaborn as sns\n",
+ "sns.set(\"notebook\", \"whitegrid\", font_scale=1.5)\n",
+ "\n",
+ "X = A[:,:,1:,:] - A[:,:,:-1,:]\n",
+ "X = X.reshape((-1,X.shape[-1],))\n",
+ "print(X.shape)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAABzEAAAV/CAYAAAA9zRAkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzs3XuUJWV9L/zvXLp7BtBxPDIwIKhJFgWCChkDElFRvERcs+CNaEyU24mR902ihKCJhpNzciAeA8qL+h6OxghMvCVBgpLhiDGi5kISiaNCxkB5JIhmuIuIXKa7Geb9o3uw6enL7up9qar+fNbq1TNV+6n928/eu76991P11LKdO3cGAAAAAAAAoC6WD7oAAAAAAAAAgKkMYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWlnZyY2KohhJcm6Sk5OsTXJDknPKsrx2nnZ/kOS/zbDqrrIs911YqQDQTHIUAKqTowBQnRwFoMk6GsRMsinJa5O8P8l3kpyW5JqiKF5SluU/ddD+jCQPT/n/IwuoEQCablPkKABUtSlyFACq2hQ5CkBDzTuIWRTFkUnekOSssizfP7nsY0m2Jjk/yYs7uJ/Ly7K8fzGFAkATyVEAqE6OAkB1chSApuvkmpgnJRlP8tFdC8qy3J7kkiTHFEWxvoNtLCuK4slFUSyrViYANJYcBYDq5CgAVCdHAWi0TqaTPSLJzWVZPjht+fVJliU5PMkd82zje0n2SvLjoiiuSPL2sizvW0ihW7ZsGUnyc5P3tWMhbQFolRVJ1if5lw0bNowOupgOyFEA6kSOylEAqpOjC8xRGQrAFAvO0U4GMdcn2TbD8l0Bt98cbX+Y5P9L8s9JxpK8LBPzqP9sURRHlWW5kLD/uSR/v4DbA9BuL0ryD4MuogNyFIA6kqNyFIDq5GjnOSpDAZiu4xztZBBzdZKZQmn7lPUzKsvyA9MWXVEUxdYkFyc5JcmfdFLkpDuS5KCDDsrw8PACmu1u69atOeywwxa1DWanf7vjze/+mxmXf/ScV8y5jsXx+u2tbvTv2NhYvv3tbyfzHy1aF63KUe+R3tPH3SFHB8Prt7fk6BPIUXajfxemalb2Ikdl8wSv4d6So0/Qzxz1nW5D6N8nqppNVTK0k+0yP6/h3hpUjnYyiPlIkpEZlq+asn4hPpzkvUmOy8I+NO5IkuHh4YyMzFTOwnRjG8xO/y7e/Q/NPMPGyMjInOtYPP3YW13s36ZMQ9O6HPUe6T19vHhydHD0Y2/J0cfJUWakfztXNStnW7ds+coMD62Ycd3Y+I5Z181Xy1KzFB9zP8nRx/UzR32n2yD69yeqZlOVDO1ku3RGP/bWIHK0k0HMOzIx9cB0u5bd3umdJUlZlo8VRbEtyVMX0g4AGkqOAkB1chQaYnhoRTaefdWM6zZfeEKfqwEmyVEAGm15B7f5ZpKDi6LYa9ryoyZ/37CQOyyKYijJAUnuWUg7AGgoOQoA1clRAKhOjgLQaJ0MYl6RZCjJm3ctKIpiJMnpSa4ry/L2yWUHFkVx8NSGRVHsPcP23pGJKQv+umrRANAgchQAqpOjAFCdHAWg0eadTrYsy68WRfHpJBcURbE+yS1JTk3yjCSnTbnpx5K8JMmyKctuK4riz5NszcRFpF+a5LVJ/iHJp7rxAACgzuQoAFQnRwGgOjkKQNN1ck3MJDklyXmTv9cmuTHJ8WVZXjdPu08meWGS1yUZTvLdye28pyzLR6sUDAANJEcBoDo5CgDVyVEAGqujQcyyLLdnYrqAd8xxm2NnWPZrlSsDgJaQowBQnRwFgOrkKABN1umZmAC18cADD+Tuu+/O+Ph417e9cuXK3HTTTV3fLhPm69+hoaGsW7cuT37yk/tYFcDSIkeba67+XblyZVatWpW99947q1at6nNlAEvD9u3bc88992T79u159NHun4gmR3trrv71WRSg9+Rosw0qRw1iAo3ywAMP5K677sr++++f1atXZ9myZfM3WoCHHnooe+65Z1e3yU/M1b87d+7MI488km3btiWJD48APSBHm222/t25c2ceffTRPPjgg/ne976XffbZJ2vWrBlAhQDt9aMf/Sh33XVX9t577+y7775ZuXKlHG2YuXLUZ1GA3pKjzTeoHF3e1a0B9Njdd9+d/fffP3vssUfXg47BWrZsWfbYY4/sv//+ufvuuwddDkArydF2WrZsWYaGhrJ27do8/elPzw9+8INBlwTQOvfee2+e/vSnZ+3atRkaGpKjLeKzKEDvydH26nWOGsQEGmV8fDyrV68edBn00OrVq3syxSEAcnQpWL16dUZHRwddBkDrjI2NydCW81kUoHfkaPv1KkcNYgKN40iddvP8AvSW/Wy7eX4Besc+tt08vwC9ZT/bbr16fg1iAgAAAAAAALViEBMAAAAAAACoFYOYAOSrX/1qiqLIV7/61UGXAgCNI0cBoBoZCgDVLYUcNYgJUDMf/ehHUxRFTj311EGXsiiPPfZY/uRP/iQve9nL8pznPCcbN27MX//1Xw+6LABarg05+tBDD+WDH/xgfvVXfzVHHnlkiqLIlVdeOeiyAGi5NmToLbfckgsuuCAnnHBCjjjiiBxzzDE544wz8q1vfWvQpQHQcm3I0e9///s566yz8opXvCKHH354jjrqqLzxjW/MV77ylYHVZBATaIWx8R1d2c6ee+458Bo2b96c/fffP9dff33uvvvurmxzEC666KK8733vyzHHHJPf//3fz3777Zd3vetd+fznPz/o0gCYRo7Wyw9/+MNcfPHFueWWW3LwwQcPuhwA5tGWHG1Dhl5xxRX59Kc/ncMOOyzvfOc7c9ppp+Xf//3f8/rXvz7/8i//MujygDnMtR/r1n6WepKj9XHXXXfl/vvvz8aNG/N7v/d7+Y3f+I0sX748Z5xxRj772c8OpKaVA7lXgC4bHlqRjWdfNdAaNl94wqK38Z3vfCc333xz/viP/zi//du/nc997nM57bTTFl9cn91111257LLLcsopp+Scc85Jkrzuda/LL//yL+eCCy7IK1/5yixf7jgagLqQo/Wybt26/P3f/33WrVuXm266KSeeeOKgSwJgDm3I0bZk6Gte85r85m/+5hO+yH7ta1+b448/Ph/5yEdy7LHHDq44YE5z7Uu78VmB+pKj9fH85z8/l1122ROWvelNb8ov/uIv5pOf/GTe+MY39r0m3yAD1MjmzZvztKc9LS960Yty3HHH5a/+6q92u82uuc6/8IUv5OKLL86LXvSiPOc5z8mpp56a2267bbfbf/KTn8xxxx2X5z73uTnppJPyta99LSeffHJOPvnkeev5+te/ntNPPz0/+7M/m8MPPzynnXZatm7dOm+7L37xixkfH8+v/MqvPL5s2bJled3rXpdt27blxhtvnHcbALBQbcnR4eHhrFu3rrMHDQBd0JYMPeyww3Y7E2ft2rV5/vOfn1tvvXXe9gBQRVtydCbLly/Pvvvumx//+MeV2i+WQUyAGrn66qvzqle9KitWrMhrXvOafOtb35r1g9aHPvShfOlLX8qb3/zmvOUtb8kNN9yQt7/97U+4zac+9amce+652W+//fKOd7wjz3/+8/Mbv/EbufPOO+et5R//8R9zyimnZGxsLG9729ty5pln5s4778yb3vSmfOc735mz7U033ZS99torz3rWs56w/NBDD02S/Nu//du89w8AC9WWHAWAfmt7ht5zzz15ylOeUqktAMynbTn6yCOP5L777sv3vve9bNq0KX/3d3+XI488sqO23WY6WYCa+PrXv57/+I//yPHHH58keeELX5g1a9Zk8+bNedvb3rbb7R999NFceeWVGRoaSpKsWbMm7373u/Ptb387Bx10UMbGxvKBD3wghx9+eC677LKsXDmxyy+KIu985zuz7777zlrLY489lj/4gz/IMccckw9/+MOPLz/ppJPy6le/OhdffHEuuuiiWdvfc889edrTnrbb8l3LmjovPAD11aYcBYB+anuGfu1rX8s3v/nNvOUtb1lQOwDoRBtz9IMf/GAuvfTSJBNnYr7yla/MO97xjs47pYuciQlQE1dffXX22WefbNiwIUkyNDSUV7ziFbn66qtnvP1rX/vax8MumZizPEm+//3vJ0m2bt2a+++/P69//esfD7sk2bhxY9asWTNnLTfffHNuu+22vOY1r8l99933+M/4+Hg2bNiQ66+/fs7227dvz/Dw8G7LR0ZGkiSjo6NztgeAhWpTjgJAP7U5Q3/wgx/k7LPPzoEHHphTTjllQW0BZjI2vmPQJVAzbczRX/qlX8pll12W888/P8cee2x27NiRsbGxjtp2mzMxAWrg0UcfzTXXXJMXvvCF+d73vvf48sMPPzxXXHFFbrzxxjz3uc99Qpv169c/4f9PfvKTkyQPPPBAkuT2229PkjzjGc94wu1WrlyZ/ffff856vvvd7ybJblMZ7LJ8+dzHwKxatWrGYNs1eLlrMBMAuqFtOQoA/dLmDH344Ydzxhln5JFHHskll1yS1atXd9wWYDbDQyuy8eyrZly3+cIT+lwNg9bWHH3mM5+ZZz7zmUmSE088Mb/2a7+W3/qt38pf/uVfZtmyZR1to1sMYgLUwHXXXZf77rsvmzdvzubNm3dbv3nz5t0Cb8WKFTNua+fOnYuuZ9c23vWud+Wggw5acPu99947X/va13Zbfu+99yZJ1q1bt7gCAWCKtuUoAPRLWzN0bGwsb33rW/Ptb387l156aX7mZ34mDz300KLrA4Cp2pqj073qVa/KOeeck1tvvTU/9VM/1bXtdsIgJkANbN68Ofvuu2/e9a537bbuqquuyjXXXJN3vvOds4bcTPbbb78kyW233fb4tATJxBFC27ZtS1EUs7Y94IADkkwcCfTzP//zHd/nLoccckg+/elP59Zbb82znvWsx5dv3br18fUA0C1ty1EA6Jc2Zuhjjz2W3/3d380//dM/5YMf/OATagCAbmpjjs5k1+x6Dz74YNe22SmDmAAD9sgjj+Taa6/NCSeckF/4hV/Ybf2qVavypS99Kf/8z/+cF77whR1v97DDDstTnvKUXH755TnhhBMen0N98+bN+dGPfjRn20MPPTQHHHBALr300rz61a/ebdqd++67L0996lNnbX/cccflPe95Tz71qU/lnHPOSTJxJNAVV1yR/fbbL8973vM6fhwAMJc25ihA24yN78jwUOdf3tEfbc3Q8847L5/73Ody7rnn5uUvf3nHdQPAQrQxR2da/+ijj+Yzn/lMRkZG8tM//dMdP45uMYgJMGDXXnttHn744bz0pS+dcf0LXvCCrFq1Kps3b15Q4A0PD+etb31rzjvvvJx++ul51atelW3btuXKK6/MgQceOGfbFStW5Lzzzstb3vKWbNy4MSeeeGLWrVuXO++8M9ddd10OPPDAvPe97521/b777ptTTjkll156aUZHR/Oc5zwnX/ziF/ONb3wjF110kWuBAdA1bczRJPnEJz6RBx544PGp2L/85S/n+9//foaGhvLrv/7rHT8OgDpw/bB6amOGbtq0KZ/61KdyxBFHZNWqVbnqqp+87kZHR/P617++48cBAHNpY46+973vzW233ZYXvOAFWb9+fe69995s3rw5t9xyS84888zsueeeHT+ObjGICbTC2PiOgX/4rXp08dVXX53Vq1fnBS94wYzrV61alaOPPjp/8zd/k//+3//7grb9pje9KTt37sxll12W888/PwcffHA+9KEP5Q//8A8zMjIyZ9ujjz46f/7nf56LL744H//4x/Pwww9n3bp1OeKII/KGN7xh3vt++9vfnjVr1uQv/uIvcuWVV+ZZz3pW/sf/+B85/vjjF/QYAOg9OTqzQebopZdemm3btj3+/y984Qv5whe+kCQGMQFqpqk52sYMvfnmm5Mk3/jGN/KNb3xjt/UGMQHqR47ublA5etxxx+XP/uzPcvnll+f+++/P6tWr8+xnPztnnXXWwC6VYhATaIVuTU300EMPVT6ipGoNH/7whxd0m6OOOiplWe52m6c//ekzLj/55JNz8sknP/7/xx57LNu2bcuzn/3sebd56KGH5n/9r/81b30zWb58ec4444ycccYZjy976KGHKm0LgN6So/XL0S996Uu7LVtM/wLQO03N0TZm6B/90R/lj/7oj2Zc5/MoQD3J0frk6Mtf/vJZp2IfVI6azw+gxXZddHmqz372s7n//vtz5JFHDqAiAGgOOQoA1chQAKhOjv6EMzEBWmzLli153/vel1e+8pV5ylOekn/7t3/LFVdckYMOOmjGC04DAD8hRwGgGhkKANXJ0Z8wiAnQYgcccEDWrVuXj3/84/nRj36UNWvW5MQTT8zZZ5+d4eHhQZcHALUmRwGgGhkKANXJ0Z8wiAnQYgcccEBH87MDALuTowBQjQwFgOrk6E+4JiYAAAAAAABQKwYxgcbZuXPnoEughzy/AL1lP9tunl+A3rGPbTfPL0Bv2c+2W6+eX4OYQKOsXLkyjz766KDLoIceffTRrFxptnOAXpCj7Tc+Pp4VK1YMugyA1lmxYkXGx8cHXQY95LMoLC1j4zsGXcKSIkfbr1c5KpmBRlm1alUefPDBrF27dtCl0CM//vGPs2rVqkGXAdBKcrT9HnjggTzpSU8adBkArfOkJz0pDzzwQJ72tKcNuhR6xGdRWFqGh1Zk49lXzbhu84Un9Lma9pOj7derHHUmJtAoe++9d+655548/PDDpiBomZ07d+bhhx/Ovffem7333nvQ5QC0khxtp507d2ZsbCz33ntvfvjDH+apT33qoEsCaJ2nPvWp+eEPf5h77703Y2NjcrRFfBYF6D052l69zlFnYgKNsmrVquyzzz658847Mzo62vXtj42NZXh4uOvbZcJ8/TsyMpJ99tnH0a8APSJHm22u/l2xYkWe9KQn5cADD8zIyEifKwNov5GRkRx44IG577778t3vfjc7dnR/GkI52ltz9a/PogC9JUebb1A5ahATaJw1a9ZkzZo1Pdn2li1b8rznPa8n20b/AtSBHG0u/QswWCMjI1m/fn3Wr1/fk+3bz/eW/gUYLDnabIPq344GMYuiGElybpKTk6xNckOSc8qyvHYhd1YUxeeSvDrJB8qy/K0F1goAjSRHAaA6OQoA1clRaL6x8R0ZHlqx4HXQBp2eibkpyWuTvD/Jd5KcluSaoiheUpblP3WygaIoXpPkxRVqBICm2xQ5CgBVbYocBYCqNkWOQqMND63IxrOvmnHd5gtP6HM10F/L57tBURRHJnlDkt8py/J3yrL8SJKXJflekvM7uZOiKIaTXJTkgkXUCgCNI0cBoDo5CrMbG5/9WlJzrQOWDjkKQNPNO4iZ5KQk40k+umtBWZbbk1yS5JiiKDqZwPjMJKuTvK9KkQDQYHIUAKqTozCLXWdlzPRjWjlgkhwFoNE6GcQ8IsnNZVk+OG359UmWJTl8rsZFUeyb5PeT/F5Zlg9XqhIAmkuOAkB1chQAqpOjADRaJ9fEXJ9k2wzL75j8vd887d+TpEzyiQXUNautW7d2YzPZsmVLV7bDzPTv4mzYsKFyW32/ePqwt5Zg/7YuR5fgc9h3+nhx5Ohg6cPeWoL9K0dZkKXUv/Pl3Vx9sZis7Lel9JwmS+/x9tsS7N/a5KjvdJthkP07VzaNje+YdZaBhx7enptv+lZX729QZuv/xWT+UqMvemsQ/dvJIObqJKMzLN8+Zf2MJuddPyXJS8qy3Lnw8nZ32GGHZWRkZFHb2LJlSy13Um2hfwdL3y+O129vdaN/R0dHu/bhp09alaPeI72njwdL3y+O129vydEnkKPsRv8+UVv6oi2PoxNew70lR5+g7znqO936q3P/7ppCfSabLzyhtnUvVNXH0ZbHv1h1fg23waBytJPpZB9JMlPCrJqyfjdFUSxL8oEkf1mW5T8sqCoAaA85CgDVyVEAqE6OAtBonZyJeUcmph6Ybtey22dp938lOTLJ7xVF8cxp6548ueyusixnDEtou7mmQQBaRY4CQHVyFACqk6MANFong5jfTHJmURR7TbsI9FGTv2+Ypd2BmTjT80szrDt98ufVST7fYa3QKvNNgwC0hhwFgOrkKABUJ0cBaLROBjGvSPL2JG9O8v4kKYpiJBNhdV1ZlrdPLjswyR5lWd482W5zku/OsL3PJLk6ySVJvr6Y4gGgAeQoAFQnR2EJm2sGI7MbQUfkKACNNu8gZlmWXy2K4tNJLiiKYn2SW5KcmuQZSU6bctOPJXlJkmWT7W6ZvO0TFEWRJLeUZfnZxRYPAHUnRwGgOjkKS5sZjGBx5CgATdfJmZhJckqS8yZ/r01yY5Ljy7K8rleFAUCLyFEAqE6OAkB1chSAxupoELMsy+1J3jH5M9ttju1wW8s6qgwAWkKOAkB1chQAqpOjADTZ8kEXAAAAAAAAADCVQUwAAAAAGmVsfEeldQAANEen18QEAAAAgFoYHlqRjWdfNeO6zRee0OdqAADoBWdiAgAAAAAAALViEBMAAAAAAACoFYOYAAAAAAAAQK0YxAQAAAAAABpnbHxHpXV104ta29I3LG0rB10AAAAAAADAQg0PrcjGs6+acd3mC0/oczXV9eJxtKVvWNqciQkAAAAAAADUikFMAAAAAAAAoFYMYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAoDXGxnfMum50jnVztQMAoP9WDroAAAAAAOiW4aEV2Xj2VTOu23zhCXOuAwCgPpyJCQAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBMAAAAAAACoFYOYAAA0ztj4jkGXAAAAAEAPrRx0AQAAsFDDQyuy8eyrZly3+cIT+lwNAAAAAN3mTEwAAAAAAACgVgxiAgAAAAAAALViEBMAAAAAAACoFYOYAAAAAAAAQK0YxAQAgEUYG99RaR0AAAAAs1s56AIAAKDJhodWZOPZV824bvOFJ/S5GgCaZmx8R4aHVix4HQAAtJ1BTAAAAIABcTAMAADMzHSyAAAAAAAAQK0YxAQAAAAAAABqxSAmAAAAQIuMje8YdAkAALBorokJAAAA0CJzXWczca1NACaMje/I8NCKJMmGDRtmXddUbXgMsNQZxAQAAAAAgCVmroNe2nDAS9sfHywFHQ1iFkUxkuTcJCcnWZvkhiTnlGV57Tzt3pjkV5M8e7LdHUm+nOQPyrK8bRF1A0BjyFEAqE6OAkB1chSAJuv0mpibkpyV5BNJzkzyWJJriqI4ep52z0uyLcn7kvw/Sf40yS8k+ZeiKPatUjAANNCmyFEAqGpT5CgAVLUpchSAhpr3TMyiKI5M8oYkZ5Vl+f7JZR9LsjXJ+UlePFvbsix/Z4btXZVkS5I3ZSIEAaC15CgAVCdHAaA6OQpA03VyJuZJScaTfHTXgrIstye5JMkxRVGsX+B97ppu4CkLbAcATSRHAaA6OQoA1clRABqtk2tiHpHk5rIsH5y2/Poky5Icnok50WdVFMVTJ+/rwCT/dXLxnPOuA0BLyFEAqE6OAkB1chSARutkEHN9JuY/n25XwO3XwTa+neQ/Tf77B0l+syzLL3fQbjdbt26t0mw3W7Zs6cp2mJn+nd+GDRt6sl19v3j6sLeWYP+2LkeX4HPYd/p4fnXK0flqWWrP51J7vP22BPtXjrIgTezfqjnSqyxc6gb9Ghr0/bfdEuzf2uSo73TrqU6fZeRadVX/Vmjj+6mNj6lOBtG/nQxirk4yOsPy7VPWz+cXk+yZ5OBMzJn+pI6qm8Fhhx2WkZGRqs2TTHS0nWLv6N/B0veL4/XbW93o39HR0a59+OmTVuWo90jv6ePB6kXfL6Xn0+u3t+ToE8hRdtPW/m3jY6qzbvf32PiODA+t6GhdW1/DdSFHn6DvOeo73WbS381Q9Xlq2/NrH9Fbg8rRTgYxH0kyU8KsmrJ+TmVZ/t3kP68piuKzSbYWRfFgWZb/s7MyofcW8sGirtrwGKCF5CgAVCdHgUYbHlqRjWdfNeO6zRee0OdqWILkKACN1skg5h2ZmHpgul3Lbl/IHZZleWtRFFuSvDGJsKM22vDBog2PAVpIjgJAdXIUAKqTowA02vIObvPNJAcXRbHXtOVHTf6+ocL9rk6ypkI7AGgaOQoA1clRAKhOjgLQaJ0MYl6RZCjJm3ctKIpiJMnpSa4ry/L2yWUHFkVx8NSGRVHsPX1jRVFsSHJ4EldYBWApkKMAUJ0cBYDq5CgAjTbvdLJlWX61KIpPJ7mgKIr1SW5JcmqSZyQ5bcpNP5bkJUmWTVl2W1EUlyf51yQPJjk0yX9O8uMk53XjAQBAnclRAKhOjgJAdXIUgKbr5JqYSXJKJsLplCRrk9yY5PiyLK+bp93FSV6e5MQke2RiHvbLk5xXluWtlSoGgOaRowBQnRwFgOrkKACN1dEgZlmW25O8Y/JnttscO8OyWW8PAEuFHAWA6uQoAFQnRwFosk6uiQkAAFQwNr5j1nWjc6ybqx0AAADAUtDpdLIAAMACDQ+tyMazr5px3eYLT5hzHQAAAMBS5kxMAAAAAAAAoFYMYgIAAAAAAB2Z6/IXLo0BdJPpZAEAAAAAgI7Md9kMgG5xJiYAAAAAAABQKwYxAQBqyhQ99aG/68P7AgAAAJYG08kCANSUKXrqw3NRH54LAAAAWBqciQk4owEAAAAAAKgVZ2ICzmgAAACYNDa+I8NDKxa8DgAA6C6DmAAAAACTHOQJAAD1YDpZAAAAAAAAoFYMYgIAUEtL+brMcz32UdeyBgAAAJYA08kCAFBLS3k6v/ke+1LtF4ClxjU4AZaO+fb5o+M7MuKazUzTi+fe9cGpE4OYAAAAADW0lA/oAVhq5trnJw5mZGa9+FvB3x/UielkAQAAAAAAgFoxiEnrzHUtqH5fJ8p1qQAAAAAAABbOdLK0Tp1Od69TLQAAAAAAAE3hTEwAAIgZFAAAAADqxJmYAAAQMygAAAAA1IkzMQEAAAAAAIBaMYgJAAAA0EOmLG+GuZ4nzyEAQP+ZThYAAACgh0xZ3gyeJwCAenEmJgAAAAAA9Ji+/nKfAAAgAElEQVSzugEWxpmYsEhj4zsyPLRi0GUAAAAAADXmjG+AhTGICYvkjw8AAAAAAIDuMp0sAAAAQAfmmgbQFIEAANBdzsQEAAAA6ICZeAAAoH+ciQkAAAAAQCs5i74+9Hd9eC5oCmdiAgCQZOJDzPDQigWvG9R26Y/5niPPIQAAdeYs+vrwXNSH54KmMIgJAECS3n2I8eGo2eZ6/hLPIVBfDqKhX7yeAAB6wyAm0He+TAAAAHrNQTT0i9caAEBvGMQE+s4HPAAAAAAAYC4dDWIWRTGS5NwkJydZm+SGJOeUZXntPO1+MckvJTkyyT5Jvpdkc5I/LMvyR4uoGwAaQ44CQHVyFACqk6MANNnyDm+3KclZST6R5MwkjyW5piiKo+dp95EkhyT5eJK3Jfnryd/XFUWxqkrBANBAmyJHAaCqTZGjAFDVpshRABpq3jMxi6I4MskbkpxVluX7J5d9LMnWJOcnefEczU8qy/Ir07a3JcmfTm5zU6WqAaAh5Gi7uKZv9+k3usl7tH3kKABUJ0cBaLpOppM9Kcl4ko/uWlCW5faiKC5J8u6iKNaXZXnHTA2nB92kz2Qi7A5ZeLkA0DhytEVc07f79Cnd5PXUSnIUAKqTowA0WifTyR6R5OayLB+ctvz6JMuSHL7A+9x38ve9C2wHAE0kRwGgOjkKANXJUQAarZMzMdcn2TbD8l1H6ey3wPv83SQ7kly5wHZJkq1bt1ZptpstW7Z0ZTvMbJD9u2HDhjnXz1Zbr9rVSbcfe1X9vr+FGvT9t90S7N/W5egSfA4f16/9167ttGX/3KSsbIN+vy6m32ev9xGLeT21Yf/VhsewQHKUBZnav/3+jGfa6vbrxfcC07fZyT7i4EMOzZ57zH5Jwoce3p6bb/pW5ZrabAnug2uTo4P8TrdO30P16rPRXBk01z7B96hUUed9aZ1ra4NB9G8ng5irk4zOsHz7lPUdKYriV5L8apL3lGV5S6ftpjrssMMyMjJSpenjtmzZYofZQ93o3/k+/C3mw2HV2pr+mhkb39GYx96L++v0Gln2D73Vjf4dHR3t2oefPmlVjnqPzK0bfbOQPm7D/pnuG8TztOs+676PqHNtnZCjTyBH2c1C+7fbz4UprduvF+/fqdtcyGt4ttdaMvF6s6/ZnRx9gr7naJ2/023D+2W+DOr3d4Jt6FNmV9fn19/avTWoHO1kEPORJDMlzKop6+dVFMWLklyS5H8n+f2OqmPJmit4Ex8Aq1jqH6iX+uNnoOQotdKLs0SceQL0kBwFgOrkKACN1skg5h2ZmHpgul3Lbp9vA0VRPC/JXyW5MckvlWW5o+MKAaDZ5Ci10ouDOhx8BPSQHAWA6uQoAI22vIPbfDPJwUVR7DVt+VGTv2+Yq3FRFD+d5PNJ7k7ymrIsH1pwlQDQXHIUAKqTowBQnRwFoNE6GcS8IslQkjfvWlAUxUiS05NcV5bl7ZPLDiyK4uCpDYui2DfJF5I8luRVZVne263CAaAh5CgAVCdHgSVpbLz6yW5ztV3MdmkkOQrUQi+yaXq7qddrlHftMe90smVZfrUoik8nuaAoivVJbklyapJnJDltyk0/luQlSZZNWfb5JD+V5IIkxxRFccyUdbeUZflPiysfAOpNjgJAdXIUWKoWcwmAXlw+gGaSo0Bd9PvSNvKuPTq5JmaSnJLkvMnfazMxB/rxZVleN0+7503+/p0Z1v1pEmEHwFIgRwGgOjkKANXJUQAaq6NBzLIstyd5x+TPbLc5doZly2a4KQAsKXK0WcbGd2R4aMWgy1iUNjwG+strhjqTowBQnRwFoMk6PROTBpnrS6g6fUG1mFrq9Djor7me+9HxHRnp4+uiKe81YGHaMB1JGx4D/eU1Ayw1/l4HAID6M4jZQk35Esr1Hahivue+n68Lr0MAAGgmf8sDQG84UAjoJoOYAAAAAADAojlQCOim5YMuAAAAAAAAAGAqg5gAAAAAMIex8R2DLgEAYMkxnSwA0BXzXffCdTFgsHr1Hpy63Q0bNvTlPquYq5bR8R0ZmWVdnR4DAINjekQAgP4ziAkAdMVcX+wkvtyBQevVl69N+VJ3vjqb8BgAAADaZq4DRx1UikFMAAAAAAAA+q4pB8YyGK6JyaJNvy7E1GnE6nbNiKr11O1xAAAAAM021/cpAAA4E5MuaNKRElVrbdJjBAAAAOrPdw0AAHNzJiYAQBfMdda+M/rplya91prynmlKnQAAANA2zsQEAOgCR9JTB016HTal1qbUCQAA0Gtj4zsyPLRiwet6cX8sDQYxAQAAAAAAmFO/D/J0UCmmkwUAAABqy7TOAACwNDkTEwAAAKgtR+ADAMDS5EzMLmnKkaFNqZNm6MXraRCvw6n3uWHDhlnXAQAAAAAA/eFMzC5pypGhTamTZujF62mubS5mu1Xv0/sCBq/fF41nZvqaKqq+btrwerPvAgBor7n+nhsd35ERf+sBdIVBTACg1hxoUA+eB6qo+rppw+utDY+B/jLwXc3Uvpk+qwqwO/sa6I75/tbzdyBAdxjEBAAAgAEz8F2NfoOF8Z4BAJrENTEBAAAAAACAWjGICQAAAAAAANSKQUwAAACg58bGd1RaB9TbXO/f0Yrr7BOog7a8DtvyOGAp8veza2ICAC01Nr4jw0MrFryu37WMju/IyJR1GzZs6FdZANBXrsUH7TTfe7vqOhi0tuRWWx4HLEXevwYxgR7p9wBBr9TpcdRpQAaaoE5/6C3mix0AAAAAWIoMYgI9UafBg8Wo0+OoUy0AAAAAANBLrokJAAAAAEuc627RZFVfo17bs9M3LJTXDL3gTEwAAAAAWOLM/kOTVX39et3PTt+wUF4z9IIzMQEAAAAAAIBaMYgJAHTs4EMO7cl2TTkCAAAAAExlOlkAoGN77rGqJ1ODmHIEAAAAAJjKmZgAAAAAAABArRjEnGau6eyqTnU3V7vRHtxfVb147DSf5777+r2f8RwCAAAAANA0ppOdphfT2c23zbpMn2cqP2biddF9g9jPAAAAAABAk3Q0iFkUxUiSc5OcnGRtkhuSnFOW5bXztDsyyWlJjkrynCRDZVkuW0zBANA0chQAqpOjAFCdHAWgyTqdTnZTkrOSfCLJmUkeS3JNURRHz9Pu+CS/NvnvW6oUCAAtsClyFACq2hQ52hguZQBQO5siRwFoqHnPxJw86uYNSc4qy/L9k8s+lmRrkvOTvHiO5h9Kcn5Zlo8URfH+JAcvvmQAaA45CgDVydHmcZkDgPqQo8BSNTa+I8NDK/q2jt7pZDrZk5KMJ/norgVlWW4viuKSJO8uimJ9WZZ3zNSwLMu7ulMmADSWHAWA6uQoAFQnR4ElqeqBdQ7Iq59OppM9IsnNZVk+OG359UmWJTm861UBQHvIUQCoTo4CQHVyFIBG6+RMzPVJts2wfNdROvt1r5z5bd26tSvb2bJly4zLN2zYMGubfp8uPNf9PfTw9tx807dmXDfXY1iMJvQZ1EmV98xi2lXZZlUHH3Jo9txj1Yzr5to/9aKWBmhVji42Y/r5+h7Ee63f2vAYoOm6vQ+qen8t1qocTfr/mum3QWQ60B/9fo/2Yr/XlH1pF9UmRwf5nS7AdE35m7xu9QwiRzsZxFydZHSG5dunrO+bww47LCMjI4vaxpYtWyoFW79PJZ7v/vodzk3oM6iTqu/RXry3e7HNKvunqvvfqUZHR7v24adPWpejVY2N76jV67vpH3J71Z/AwvRzHyRHn6CRObqY53Ap7POXwmOEJmvC91BzkaNP0PccHeR3ugDTteVv8n7WM6gc7WQQ85EkMyXMqinrAYCZydFJDmzpLv0JLBFyFACqa1WOHnzIoYMuAWiBqrNHztVudHxHRubYphkrq+tkEPOOTEw9MN2uZbd3rxwAaB05CgDVyVEAqK5VObrnHqscyAksWtWDwudrN9u6+bbL3JZ3cJtvJjm4KIq9pi0/avL3Dd0tCQBaRY4CQHVytIfGxnfMum604jqAbqi6f5qr3RIlRwFotE7OxLwiyduTvDnJ+5OkKIqRJKcnua4sy9snlx2YZI+yLG/uUa0A0ERyFACqk6M9VPVo8vnWASzWYvZPPIEcBaDR5h3ELMvyq0VRfDrJBUVRrE9yS5JTkzwjyWlTbvqxJC9JsmzXgqIonpHk5Mn/Hjm57L9M/v+Gsiw3L/YBAECdyVEAqE6OAkB1chSApuvkTMwkOSXJeZO/1ya5McnxZVleN0+7Z022m2rX//80ibADYCmQozXjguoAjSJHAaA6OQpAY3U0iFmW5fYk75j8me02x86w7CuZcgRPXRx8yKGDLmHRfPkK9Van9+hctfSizrm22Yb9bxVty9E2qHoRdwD6T45WV6e/SQEYDDkKQJN1eiZmq+y5x6rGf3Hpy1eotzq9R/tdS50eOwDAUjbX32WJv80AAIB6Wz7oAgAAAAAAAACmMogJAAAAAAAA1IpBTAAAgArGxnf0dZtL9drSAAAATTbX57xefK6cS51q6cSSvCYmAADAYvXiOtCuLQ0A1Y2N78jw0IoZ1zkYCIBBqdPnvDrV0gmDmAAAAABA4zXti1kAYG6mkwUAAAAAAABqxSAmHanjXMjQRnV6r9WpFgAAAAAAlhbTydIR03FAf9TpvVanWgAA2qDq9djmusbbXOsAusW+BgDqZa5sHh3fkZGW5LZBTAAAgD7yRfDSteceqyodJObgMmDQ7IcAoF7my+a25LZBTAAAgD7yRTAAAADMzzUxAQAAAAAAgFoxiAkAAAAM1Nj4jkGXAAAA1IzpZAEAAICBMs0yAAAwnTMxAZYwR7wDAAAAAFBHzsQEWMIc8Q4AAAAAQB05ExMAAAAayswaAABAWzkTEwAAABrKzBoAAEBbORMTAAAAAAAA+qxOM6vMVcvBhxzax0p+wpmYAAAAAAAA0Gd1mlmlTrXs4kxMAKCx6nS0GgAAAADQPc7EBAAaq45HiAEAAAAAi+dMTAAAAAAAAKBWDGICAAAAAAAAtWIQEwAAAAAAAKgVg5gAAAAAAABArRjEBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAAAAAgFoxiAkAAAAAAADUikFMAAAAAAAAoFYMYgIAAAAAAAC1YhATAAAAAAAAqJWVndyoKIqRJOcmOTnJ2iQ3JDmnLMtrO2i7f5KLkrwyE4OmX0pyVlmWt1YtGgCaRI4CQHVyFACqk6MANFmnZ2JuSnJWkk8kOTPJY0muKYri6LkaFUWxV5IvJ3lRkncn+W9JfjbJV4qiWFuxZgBomk2RowBQ1abIUQCoalPkKAANNe+ZmEVRHJnkDZk4yub9k8s+lmRrkvOTvHiO5r+e5GeSbCjL8huTba+ZbHtWkv+6qOoBoObkKABUJ0cBoDo5CkDTdXIm5klJxpN8dNeCsiy3J7kkyTFFUayfp+0/7wq6ybY3J7k2yesrVQwAzSJHAaA6OQoA1clRABqtk2tiHpHk5rIsH5y2/Poky5IcnuSO6Y2Kolie5LlJPjLDNq9P8oqiKPYoy/LhDmtdkSRjY2Md3nxuT9lzxYzLR0dHrav5urrVY53n3rqFrVusKTkw+wulXlqXo96/1i1kXd3qsc7z2/R1iyVHH2/bqBz1/rWum+vqVo91nt9+rlssOfp424XkaNcyNKlPjnpvW9fNdXWrxzrP72zrFqtKji7buXPnnDcoimJrkm1lWb5q2vJnJ/lWkjeXZXnJDO2eluSeJL9XluV7pq379SQXJ/mZsixv6aTQLVu2HJPk7zu5LQBLwos2bNjwD4MuYj5yFICakqNyFIDq5GiHOSpDAZhBxznayZmYq5PMNMS6fcr62dqlYtuZ/EsmLiR9R5IdC2gHQLusSLI+E7nQBHIUgDqRo3IUgOrk6MJzVIYCsMuCc7STQcxHkozMsHzVlPWztUvFtrvZsGHDaJLaH+EEQF90dNZETchRAOpGjs7ddjdyFIAp5OjcbZ9AhgIwzYJydHkHt7kjEyOj0+1advss7e7LxNE6s7XdmRnmXAeAlpGjAFCdHAWA6uQoAI3WySDmN5McXBTFXtOWHzX5+4aZGpVl+ViSf03y/BlWH5Xk/3R48WcAaDI5CgDVyVEAqE6OAtBonQxiXpFkKMmbdy0oimIkyelJrivL8vbJZQcWRXHwDG1fUBTFEVPaFkleluTTi6wdAJpAjgJAdXIUAKqTowA02rKdO3fOe6OiKC5PcmKSizIxX+2pSX4uyUvLsrxu8jZfSfKSsiyXTWn3pCTfSLJnkguTPJrkt5MsS3J4WZY/6OaDAYA6kqMAUJ0cBYDq5CgATdbJmZhJckqSD0z+/mAmjuA5flfQzaYsyx8nOTYTF2/+/STnZWIag5cIOgCWEDkKANXJUQCoTo4C0FgdnYkJAAAAAAAA0C+dnokJAAAAAAAA0BcGMQEAAAAAAIBaWTnoAgapKIrjkrwpyQuTPD3JHUmuTfJfy7K8c5C1tUFRFEWS/zvJUUmOSLIqybPKsvzuIOtqoqIoRpKcm+TkJGuT3JDknLIsrx1oYS1RFMX6JGdm4rX6/CR7ZeIC918ZZF1tUBTFzyU5LclLkzwjyQ+S/GOS/1KW5XcGWBpdIEd7S452jxztLTnaO3K03eRob8nR7pGjvSNDe0uOtpsc7S052h0ytLfkaG/VJUeX+pmY5yd5SZLPJHlbkr9I8oYkXy+KYt0gC2uJozPRr09OctOAa2m6TUnOSvKJTOyYH0tyTVEURw+yqBYpkvxuJv7ovXHAtbTN7yb5xSRfzMRr9yNJjk3yjaIoDhlgXXSHHO0tOdo9myJHe0mO9o4cbTc52ltytHs2RY72igztLTnabnK0t+Rod2yKDO0lOdpbtcjRJX0mZpLfTvIPZVk+tmtBURSfT/K3SX49yR8MqK62+KskTynL8sdFUfxWJo7aYYGKojgyE3+EnVWW5fsnl30sydZM/MH24gGW1xZbkjytLMsfFEVxYib+AKY7/t8kv1KW5diuBUVR/EWSf81EEJ42oLroDjnaW3K0C+RoX8jR3pGj7SZHe0uOdoEc7TkZ2ltytN3kaG/J0UWSoX0hR3urFjm6pM/ELMvy76YG3a5lSe5L4oisRSrL8r6yLH886Dpa4KQk40k+umtBWZbbk1yS5JjJ0+ZZhLIsf1yW5Q8GXUcblWX5j1ODbnLZ/0nyrdjPNp4c7S052jVytMfkaO/I0XaTo70lR7tGjvaQDO0tOdpucrS35GhXyNAek6O9VZccXdKDmDMpimKvTMydfO+ga4FJRyS5uSzLB6ctvz7JsiSH978kqK4oimVJ9on9bCvJUWpIjtIqcrTd5Cg1JEdpFTnabnKUmpGhtM4gctQg5u5+K8lwkssHXQhMWp+Ji5NPt2vZfn2sBbrhjUn2j/1sW8lR6kaO0jZytN3kKHUjR2kbOdpucpQ6kaG0Ud9ztDXXxCyKYnkmQmpek6dtz7SNFyf5b0n+rCzLv+1ieY3Xjf6lstVJRmdYvn3KemiEoigOTnJxkn9I8vEBl8MUcrS35OhAyVFaQ47WlxztLTk6UHKU1pCj9SVHe0uODowMpVUGlaNtOhPzxUke6eSnKIqnTW88+QR8JskNSX6tTzU3yaL6l0V5JMnIDMtXTVkPtVcUxb5J/neSHyZ53fRrVzBwcrS35OjgyFFaQY7WnhztLTk6OHKUVpCjtSdHe0uODoYMpTUGmaOtORMzyc1JTu/wtk+4KHFRFAck+UKS+5O8pizLh7pcWxtU7l8W7Y5MTD8w3a5lt/exFqikKIo1Sa5JsibJC8uyvHPAJbE7OdpbcnRw5CiNJ0cbQY72lhwdHDlK48nRRpCjvSVHB0OG0gqDztHWDGJOdtymhbYriuI/ZSLoRpK8rCzLu7pcWitU7V+64ptJziyKYq9pF4I+avL3DQOoCTpWFMWqJJuTHJTkuLIsywGXxAzkaG/J0YGSozSaHG0GOdpbcnSg5CiNJkebQY72lhwdGBlK49UhR9s0neyCFUWxZ5LPZeJCpMeXZfmdAZcEM7kiyVCSN+9aUBTFSCaOoLquLEtH7VBbRVGsSPIXSY7OxFQD/zzgkugiOUpDyFEaS462mxylIeQojSVH202O0gAylEarS44u27lz5yDutxaKovhskhOSXJrky9NW31WW5d/0v6r2mDzN+K2T/z06yfFJLszE9A63lWXpIuodKori8iQnJrkoyS1JTk3yc0leWpbldYOsrS2Kovgvk/88JMmvZGK/cGuS+8uy/J8DK6zhiqJ4f5IzM3HEzuXTVj9YluVn+18V3SJHe0uOdo8c7T052htytN3kaG/J0e6Ro70lQ3tHjrabHO0tOdodMrT35Gjv1CVHWzOdbEWHT/7+z5M/U/1tEmG3OGuTnDdt2dmTv/82ibDr3CmZ6MtTMtGvN2biKDNh1z3TX6u79gm3JRF41e3az26c/JnqtiQ+NDabHO0tOdo9crT35GhvyNF2k6O9JUe7R472lgztHTnabnK0t+Rod8jQ3pOjvVOLHF3SZ2ICAAAAAAAA9bOkr4kJAAAAAAAA1I9BTAAAAAAAAKBWDGICAAAAAAAAtWIQEwAAAAAAAKgVg5gAAAAAAABArRjEBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAAAAAgFoxiAkAAAAAAADUikFMAAAAAAAAoFYMYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWlnZyY3+f/buPsqysr4T/bfprqpuICCJNjQI3vHOYrcBCVCGl+FNMWrEELhKEtaMvGUYc28SXwhRkzFOHNGVQUNEHUaTgHbAeA0SIxaR3BiMmZEYybSK007YBpdBad5Uggp0dxVF3z+qqlNdXS+ndp2XvXd9PmvVqu69z7PP7zxnn/Otc569n10UxUiStye5KMkhSe5O8payLO9Yot3bkvz2PKseLsvysOWVCgDNJEcBoDo5CgDVyVEAmqyjQcwkW5K8Ksm1Se5NcmmS24uiOKssyy900P6Xkjw56/87llEjADTdlshRAKhqS+QoAFS1JXIUgIZachCzKIqTklyY5IqyLK+dXnZjkm1Jrk5yZgf3c3NZlo+tpFAAaCI5CgDVyVEAqE6OAtB0nVwT84IkE0mun1lQluXOJDckOb0oik0dbGNNURQHFUWxplqZANBYchQAqpOjAFCdHAWg0TqZTvaEJPeUZfn4nOV3JVmT5PgkDy6xjW8lOTDJD4uiuCXJr5dl+ehyCt26detIkp+cvq/J5bQFoFXWJtmU5O9HR0d3DbqYDshRAOpEjspRAKqTo8vMURkKwCzLztFOBjE3Jdk+z/KZgDt8kbb/nOT9Sf4uyXiSszM1j/qJRVGcXJblcsL+J5P8j2XcHoB2OyPJ5wddRAfkKAB1JEflKADVydHOc1SGAjBXxznaySDmhiTzhdLOWevnVZble+csuqUoim1JrktycZI/7KTIaQ8mydFHH53h4eFlNNvXtm3bcuyxx65oGyxM/3bH5e/8zLzLr3/LSxZdx8rYf3urG/07Pj6er3/968nSR4vWRaty1Guk9/Rxd1TNURm7Mvbf3pKje5Gj7EP/do+sHAz7cG/J0b30M0d9p9sQ+rdz3f68ObOelbEP99agcrSTQcwdSUbmWb5+1vrl+GCSdyd5cZb3oXEySYaHhzMyMl85y9ONbbAw/btyjz0x/wwbIyMji65j5fRjb3Wxf5syDU3rctRrpPf08cpVzVEZu3L6qrfk6B5ylHnp3+6QlYOjH3tLju7Rzxz1nW6D6N/OdPvz5sx6Vk4/9tYgcnS/Dm7zYKamHphrZtkDnd5ZkpRl+XSmpjH40eW0A4CGkqMAUJ0cBYDq5CgAjdbJIOZXkmwuiuLAOctPnv5993LusCiKoSRHJvnOctoBQEPJUQCoTo4CQHVyFIBG62QQ85YkQ0kun1lQFMVIksuS3FmW5QPTy44qimLz7IZFUTxrnu29MVNTFvx/VYsGgAaRowBQnRwFgOrkKACNtuQ1Mcuy/GJRFB9P8q6iKDYl+UaSS5I8J8mls256Y5KzkqyZtey+oig+lmRbpi4i/aIkr0ry+SQf7cYDAIA6k6MAUJ0cBYDq5CgATbfkIOa0i5NcNf37kCRfTXJOWZZ3LtHuj5OcluTnkgwn+afp7fxOWZZPVSkYABpIjgJAdXIUAKqTowA0VkeDmGVZ7szUdAFvXOQ2L5xn2X+oXBkAtIQcBYDq5CgAVCdHAWiyTs/EBKiNH/zgB3nkkUcyMTHR9W2vW7cu//AP/9D17TJlqf4dGhrKxo0bc9BBB/WxKoDVRY4212L9u27duqxfvz7Petazsn79+j5XBrB6yNHmWqx/fRYF6L2dO3fmO9/5Tnbu3Jmnnur+Cd1ytLcGlaMGMYFG+cEPfpCHH344RxxxRDZs2JA1a9Ys3WgZnnjiiRxwwAFd3Sb/YrH+3b17d3bs2JHt27cniQ+PAD0gR5ttof7dvXt3nnrqqTz++OP51re+lUMPPTQHH3zwACoEaDc52myL5ajPogC99f3vfz8PP/xwnvWsZ+Wwww7LunXr5GjDDCpH9+vq1gB67JFHHskRRxyR/fffv+tBx2CtWbMm+++/f4444og88sgjgy4HoJXkaDutWbMmQ0NDOeSQQ/LsZz873/ve9wZdEkArydF28lkUoPe++93v5tnPfnYOOeSQDA0NydEW6XWOGsQEGmViYiIbNmwYdBn00IYNG3oyNRMAcnQ12LBhQ3bt2jXoMgBaSY62m8+iAL0zPj4uQ1uuVzlqEBNoHEfqtJvnF6C3vM+2m+cXoLe8z7aX5xagt7zPtluvnl+DmAAAAAAAAECtGMQEAAAAAAAAasUgJgD54he/mKIo8sUvfnHQpQBAo8hQAKhOjgJAdashRw1iAtTM9ddfn6Iocskllwy6lK759Kc/naIocuaZZz3T6QcAACAASURBVA66FABarA0Zev/996coinl/7rzzzkGXB0CLtSFHZ3zzm9/MG97whpxyyik57rjj8vKXvzxbtmwZdFkAtFgbcvT973//gp9HTzzxxGzdurXvNa3r+z0C9MD4xGSGh9aueDsHHHDAwGsYGxvLEUcckbvuuiuPPPJINm7cuOJtDtLOnTvz7ne/O/vvv/+gSwFgAW3J0TZl6M/+7M/m9NNP32vZ0UcfPaBqAFiMHK2Xr33ta7n44ovz3Oc+N7/0S7+UAw44IN/+9rfz8MMPD7o0AOYhR+vjJS95SY466qh9lr/nPe/JE088kec///l9r8kgJtAKw0Nrc+6Vtw60hrFrzlvxNu69997cc889+f3f//382q/9Wj796U/n0ksvXXlxA/SHf/iHGR4eztlnn53Pfe5zgy4HgHm0IUfblqHHHHNMzjtv7z554oknBlQNAIuRo/UxOTmZN73pTTn11FPzvve9L/vt9y+T0MlRgHqSo/WxefPmbN68ea9lDz74YB566KGcf/75GR4e7ntNppMFqJGxsbE885nPzBlnnJEXv/jF+dSnPrXPbWbmOv/Lv/zLXHfddTnjjDPy/Oc/P5dccknuu+++fW7/x3/8x3nxi1+c4447LhdccEH+5//8n7noooty0UUXLVnPl770pVx22WU58cQTc/zxx+fSSy/Ntm3bOn48DzzwQK6//vq8+c1vztDQUMftAGC52pahSfLkk09mfHx8WW0AoIq25OjnP//53Hvvvbniiiuy33775YknnsjTTz/dWScAQEVtydH53Hbbbdm9e3fOOeecSu1XyiAmQI3cdtttednLXpa1a9fmFa94Rb72ta/lm9/85ry3/cAHPpDPfvazufzyy/Oa17wmd999d3791399r9t89KMfzdvf/vYcfvjheeMb35gXvOAF+ZVf+ZU89NBDS9byt3/7t7n44oszPj6e173udXn961+fhx56KK9+9atz7733dvR4rr766pxwwgk5++yzO7o9AFTVtgx973vfmxNOOCHHHXdcfuEXfiF///d/31E7AKiiLTn6hS98IQceeGAefvjhvOxlL8uJJ56YE088Mb/1W7+VHTt2dN4hALAMbcnR+YyNjWXTpk058cQTl922G0wnC1ATX/rSl3L//ffvOarltNNOy8EHH5yxsbG87nWv2+f2Tz31VD7xiU/sOcPx4IMPzjvf+c58/etfz9FHH53x8fG8973vzfHHH58Pf/jDWbdu6i2/KIr8xm/8Rg477LAFa3n66afztre9Laeffno++MEP7ll+wQUX5OUvf3muu+66vOc971n08dx11135zGc+k0984hPL7gsAWI42Zeh+++2X008/PS95yUuycePG3Hfffbnhhhty2WWX5QMf+EDOOOOMSn0EAAtpU47ed999mZyczC//8i/nVa96Va688sp8+ctfzoc//OF85zvfye///u9X6iMAWEibcnSuf/zHf0xZlrn88suzZs2ajtt1kzMxAWritttuy6GHHprR0dEkydDQUF7ykpfktttum/f2r3rVq/aaovUFL3hBkuTb3/52kmTbtm157LHH8vM///N7wi5Jzj333Bx88MGL1nLPPffkvvvuyyte8Yo8+uije34mJiYyOjqau+66a9H2k5OTecc73pFXvvKV+8yjDgDd1qYMPfzww3PDDTfkwgsvzNlnn53LLrssf/qnf5r169fn/e9//9KdAQDL1KYcffLJJ7Njx46cd955eetb35qXvvSlefOb35xf/MVfzOc+97ncc889S3cIACxDm3J0rrGxsT33PSjOxASogaeeeiq33357TjvttHzrW9/as/z444/PLbfckq9+9as57rjj9mqzadOmvf5/0EEHJUl+8IMfJJm6HmWSPOc5z9nrduvWrcsRRxyxaD3/9E//lCT7TGUwY7/9Fj8G5k/+5E9y//3350Mf+tCitwOAlWpbhs7n0EMPzSte8YrcfPPN2bFjRzZs2LDsbQDAfNqWo+vXr0+S/MzP/Mxey3/2Z382N9xwQ7Zu3epAW6CWxicmMzy0dtnrGKy25ehsu3fvzm233Zajjz46mzdvzhNPPNFx224yiAlQA3feeWceffTRjI2N7TnCZbaxsbF9Am/t2vn/eNm9e/eK65nZxm/+5m/m6KOPXlbb8fHxvO9978srX/nK7Ny5M/fff3+SqSNid+/enfvvvz/7779/fvRHf3TFdQJAmzJ0MZs2bcrTTz+dH/zgBwYxAeiatuXos571rCTJj/3Yj+21fOb/M18QA9TN8NDanHvlrfOuG7vmvD5XQ6falqOzbd26Ndu3b8+VV1654rpWwiAmQA2MjY3lsMMOy2/+5m/us+7WW2/N7bffnt/4jd9YMOTmc/jhhyeZuibIzLQEydQRQtu3b09RFAu2PfLII5NMHQn0b/7Nv+n4PpNk586d+ed//ufcdNNNuemmm/ZZ/+IXvzjnnHPOsuZfB4CFtClDF/Ptb387a9euXXL6IABYjrbl6DHHHJOPf/zjefjhh/Pc5z53z/KHHnooSRxMC0BXtS1HZxsbG8uaNWv2md2g3wxiwoCYBoAZO3bsyB133JHzzjsvP/3TP73P+vXr1+ezn/1s/u7v/i6nnXZax9s99thj84xnPCM333xzzjvvvD1zqI+NjeX73//+om2POeaYHHnkkfnQhz6Ul7/85fuc8fHoo48u+OFvw4YNue666/ZZfuONN+buu+/ONddck0MPPbTjxwEAC2lbhi60/r777suf//mf54QTTtgzTR4ArFQbc/Tss8/OO9/5ztxyyy059dRT9yz/+Mc/njVr1uSUU07p+HEAwGLamKMzJiYm8hd/8RcZHR3dM6g6KAYxYUBMEcCMO+64I08++WRe9KIXzbv+lFNOyfr16zM2NraswBseHs5rX/vaXHXVVbnsssvyspe9LNu3b88nPvGJHHXUUYu2Xbt2ba666qq85jWvybnnnpvzzz8/GzduzEMPPZQ777wzRx11VN797nfP23ZoaCg/9VM/tc/yv/qrv8rXvva1edcBQBVty9Akefe7351vf/vbOeWUU7Jx48Z861vfysc+9rEkyRVXXNHxYwCApbQxRw899NC85jWvyXXXXZeJiYmccsop+fKXv5xPfepT+bmf+7l9ri8GAFW1MUdnfP7zn89jjz2Wc889t+O6e8UgJtAK4xOTAx/8rXp27W233ZYNGzYseETo+vXrc+qpp+Yzn/lM/vN//s/L2varX/3q7N69Ox/+8Idz9dVXZ/PmzfnABz6Qd7zjHRkZGVm07amnnpqPfexjue6663LTTTflySefzMaNG3PCCSfkwgsvXFYdANRbU3O0jRl62mmn5WMf+1g+8pGP5Ic//GEOOuignHbaafnVX/3VgR8BC8D85Oi+BvlZ9LWvfW0OOuigfPSjH81nP/vZbNy4MW94wxvy6le/elmPAaAuFnuPb8Nsf3J0X4P+TndsbCxDQ0PznmHabwYxgVboVlg/8cQTOeCAA/pawwc/+MFl3ebkk09OWZb73ObZz372vMsvuuiiXHTRRXv+//TTT2f79u358R//8SW3ecwxx+S//bf/tmR9nfgv/+W/5K1vfWtXtgXQhg9qddLUHG1jhv7Mz/zMgtcceeKJJ5a9PQB6T47WJ0eTZM2aNbn00ktz6aWX7rVcjgJN1fYZ/eRovXI0SX7v936vcttuM4gJ0GK7du3a5+icT37yk3nsscdy0kknDagqgJVr+4c4Bk+GAkB1chQAqpOj/8IgJkCLbd26Nb/7u7+bl770pXnGM56R//2//3duueWWHH300bWYDgAA6kqGAkB1chQAqpOj/8IgJkCLHXnkkdm4cWNuuummfP/738/BBx+c888/P1deeWWGh4cHXR4A1JYMBYDq5CgAVCdH/4VBTIAWO/LIIzuanx0A2JsMBZpsqWtHu7Y0vSZHAaA6OfovDGICAAAAtMhi145OXD8aAIBm2G/QBQAAAAAAAADMZhATWNT4xGSldb20e/fugdwv/eH5Begt77Pt5vkF6C3vs+3luQXoLe+z7dar59d0ssCiFpuGaBBTEA0NDWXHjh3Zf//9+37f9MeOHTsyNDQ06DIAWkmOtt+OHTsyMjIy6DIAWkmOtpvPorC6LHZ9aNeO7r7h4WEZ2nK9ylGDmECjbNy4Mdu3b88RRxyRDRs2ZM2aNYMuiS7ZvXt3duzYke3bt+fQQw8ddDkArSRH22n37t156qmn8sMf/jDf/e535ShAj8jRdvJZFFanup240XbPfOYzc//99+eZz3xmfuRHfiTr1q2Toy3R6xw1iAk0ykEHHZQkeeCBBzIxMdH17Y+Pj2d4eLjr22XKUv07NDSUQw89dM/zDEB3ydFmW6x/161bl/Xr1+eoo47K+vXr+1wZwOogR5ttsf71WRSgtw4++OCMjIzkO9/5Tr73ve/lqaee6vp9yNHeGlSOGsQEGueggw7q2QeLrVu35id+4id6sm30L0AdyNHm0r8AgydHm0v/AgzW+vXrc+SRR/Zs+97ne2tQ/dvRIGZRFCNJ3p7koiSHJLk7yVvKsrxjOXdWFMWnk7w8yXvLsnzDMmsFgEaSowBQnRwFgOrkKABNtl+Ht9uS5IokH0ny+iRPJ7m9KIpTO72joihekeTM5RYIAC2wJXIUAKraEjkKAFVtiRwFoKGWHMQsiuKkJBcmeVNZlm8qy/IPkpyd5FtJru7kToqiGE7yniTvWkGtANA4chQAqpOjAFCdHAWg6To5E/OCJBNJrp9ZUJblziQ3JDm9KIpNHWzj9Uk2JPndKkUCQIPJUahofGJy0CUAgydHAaA6OQpAo3VyTcwTktxTluXjc5bflWRNkuOTPLhQ46IoDkvy1iS/Upblk0VRVK0VAJpIjkJFw0Nrc+6Vt867buya8/pcDTAgchQAqpOjUMH4xGSGh9YOugwgnQ1ibkqyfZ7lMwF3+BLtfydJmal511ds27Zt3dhMtm7d2pXtMD/9u7TR0dGebLfbfb9UnW18rtv4mOpkFfZv63J0FT6HfaePp/QqK6vyvHRGP/XWKuxfOcqy6N8pnWToQn21kvzV/yunD3trFfZvbXLUd7rNsJr6d7G8a9oBtavpeVuKvuitQfRvJ4OYG5Lsmmf5zlnr5zU97/rFSc4qy3L38svb17HHHpuRkZEVbWPr1q21+1KsTfTvYPW779v2XNt/e6sb/btr166uffjpk1blqNdI7+nj+vK8LM3+21tydC9ylH3o3+XpRV/p/5WxD/eWHN1L33PUd7r1p3+by/M2xT7cW4PK0U6uibkjyXwJs37W+n0URbEmyXuT/GlZlp9fVlUA0B5yFACqk6MAUJ0cBaDROjkT88FMTT0w18yyBxZo938lOSnJfyyK4v+Ys+6g6WUPl2U5b1gCQEvIUQCoTo4CQHVyFIBG6+RMzK8k2VwUxYFzlp88/fvuBdodNb39zyb55qyfJLls+t9nLataAGgeOQoA1clRAKhOjgLQaJ2ciXlLkl9PcnmSa5OkKIqRTAXWnWVZPjC97Kgk+5dlec90u7Ek/zTP9v4syW1JbkjypZUUDwANIEcBoDo5CgDVyVEAGm3JQcyyLL9YFMXHk7yrKIpNSb6R5JIkz0ly6ayb3pipI3DWTLf7xvRt91IURZJ8oyzLT660eGBv4xOTGR5au+x1QO/IUQCoTo4CQHVyFICm6+RMzCS5OMlV078PSfLVJOeUZXlnrwoDlm94aG3OvfLWedeNXXNen6sBZpGjAFCdHIUGWOzA2V0TkxlxwC0MihwFoLE6GsQsy3JnkjdO/yx0mxd2uK01HVUGAC0hRwGgOjkKzbDUQbUOuIXBkKMANNl+gy4AAAAAAAAAYDaDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWjGICQAAAAAAANSKQUwAAAAAAACgVgxiwgqNT0xWWtcGq/mxAwAAAAAAvbNu0AVA0w0Prc25V94677qxa87rczX9tZofOwAAQK+NT0xmeGjtstcBAEAbGMQEAAAAqCEHjgIAsJqZThYAAAAAAACoFYOYAAAAAAAAQK0YxAQAAAAAAFplfGJy0CUM1GKPf7X3Dc3hmpgAAAAAAECrrPZrS6/2x087OBMTAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAQMOMT0wOugToqXWDLgAYvPGJyQwPrR10GQAAAAAAdGh4aG3OvfLWedeNXXNen6uB7jOICQg7AAAAAACgVkwnCwAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBMAAABonfGJyUrrAACAelg36AIAAAAAum14aG3OvfLWedeNXXNen6sBAACWy5mYAACwBGfzAAAAAPSXMzEBAGAJzuYBAAAA6C9nYgIAAAAAAAC1YhATVglT3QEAAAAAAE1hOllYJUyDBwAAAAAANIUzMQEAAAAAAIBaMYgJAAAAAAAA1IpBTAAAAAAAAKBWOromZlEUI0nenuSiJIckuTvJW8qyvGOJdv8uyb9P8uPT7R5M8tdJ3laW5X0rqBsAGkOOAkB1chQAqpOjADRZp2dibklyRZKPJHl9kqeT3F4UxalLtPuJJNuT/G6S/yfJHyX56SR/XxTFYVUKBoAG2hI5CgBVbYkcBYCqtkSOAnOMT0xWWgf9tuSZmEVRnJTkwiRXlGV57fSyG5NsS3J1kjMXaluW5Zvm2d6tSbYmeXWmQhAAWkuOQvuNT0xmeGjtstcBS5OjAFCdHAUWMjy0Nudeeeu868auOa/P1cDCOjkT84IkE0mun1lQluXOJDckOb0oik3LvM+Z6Qaescx2ANBEchRabubD33w/BjBhxeQoAFQnRwFotE6uiXlCknvKsnx8zvK7kqxJcnym5kRfUFEUPzp9X0cl+U/Tixeddx0AWkKOAkB1chQAqpOjADRaJ4OYmzI1//lcMwF3eAfb+HqSH5v+9/eS/GpZln/dQbt9bNu2rUqzfWzdurUr22F+q6l/R0dHF12/UF8s1a4NmrofNLXupliF/du6HF2Fz2Hf6eMpbcnK1fZ8rrbH22+rsH/lKMsyu3+rflark1593qzbdOdNeC76RV/01irs39rkqO90m6Ft/duWz5SD0NR9oal1N8Ug+reTQcwNSXbNs3znrPVLeWWSA5JsztSc6T/SUXXzOPbYYzMyMlK1eZKpjvYG1jv6d2+ruS+a+Njtv73Vjf7dtWtX1z789EmrctRrpPf0cfuspufT/ttbcnQvcpR9LLd/2/BcVH0MdbsOVhuei27wHtFbcnQvfc9R3+nWn/5ltibuC/bh3hpUjnYyiLkjyXwJs37W+kWVZfnfp/95e1EUn0yyrSiKx8uy/K+dlQkAjSVHgWVb7AyZqmfP9GKb0AdyFACqk6MANFong5gPZmrqgblmlj2wnDssy/KbRVFsTfLvkgg7ANpOjgLL1oszZOp21g10SI4CQHVyFIBG26+D23wlyeaiKA6cs/zk6d93V7jfDUkOrtAOAJpGjgJAdXIUAKqTowA0WieDmLckGUpy+cyCoihGklyW5M6yLB+YXnZUURSbZzcsiuJZczdWFMVokuOTuMIqAKuBHAWA6uQojTE+Mbngul2LrFusHcAKyVEAGm3J6WTLsvxiURQfT/Kuoig2JflGkkuSPCfJpbNuemOSs5KsmbXsvqIobk7yv5I8nuSYJL+Y5IdJrurGAwCaxzW5WE3kKABUJ0dpkqWm7TalN9BvchSApuvkmphJcnGmwuniJIck+WqSc8qyvHOJdtcl+akk5yfZP1PzsN+c5KqyLL9ZqWKg8VyTi1VIjgJAdXIUAKqTowA0VkeDmGVZ7kzyxumfhW7zwnmWLXh7AFgt5CgszBn4y7dYn+2amMxIhf40SwJ1JkcBoDo5CkCTdXomJgAAdJ2z85evF9MVeh4AAAAYBAfVshiDmAAAAAAd6MUXaav9yzlfXALA6uagWhZjEBMAAACgA734km21f3G32h8/AAAL22/QBQAAAAAAAADMZhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAem58YrLSOlavxfaLXfYnAIDWWzfoAgBmG5+YzPDQ2krrq64DAAB6b3hobc698tZ5141dc16fq6EJltpn7E8AAO1mEBOolcU+pCaLfxj1pQgAAAAAALSD6WQBAFiRpaZsM6UbAAAAAMvlTEwAAFZkJWfRAwAAAMB8nIkJAAAAQGssNguEGSIAAJrDmZgAAAAAtMZis0SYIQIAoDmciQkAAAAAAEBlZkKgF5yJCQDQMuMTkxkeWrvsdXRfG56Lpeqs+hib8vgBAABYmpkQ6AWDmAAALeODQ3204blY7DEkiz+ONjx+AAAAYDBMJwsAAAB0zFRhAABAPzgTE6b1Yrqz1TxN2mp+7AAA0GbOsgYAAPrBICZM68UH8dX84X41P3YAAAAAmMs141mt7N9UZRATAIAl+cABAACwMg76Z7Wy71OVQUwAAJbkAwcAAAAA/bTfoAsAAAAAWMj4xOSC63bNWjc6OtqPcgAAgD5xJiYAAABQW0vNBmCmAABot8Uub7JrYjIjLn3SNy41Q78ZxKR1hFq7CUoAAAAAWD0c0FQfLjVDvxnEpHWEWrsJSmA1WezAjaoHdfRimyu5z9VMvwAAAAAszCAmAEBN9eLAjUEcDOIAlPnpFwAAAICF7TfoAgAAAABYnvGJyUGXAAAAPeVMTAAAAICGcUY/AG3ichv0yyAus0N1BjEBakSIAgAAALDaODiHfrGvNYtBTIAaEaIAAAAAAOCamAAAAAAAAEDNGMQEAAAAumJ8YnLQJUDfLbbfe00AAFRnOlkAAACgK1wegdXIfg8A0BsdDWIWRTGS5O1JLkpySJK7k7ylLMs7lmj3yiS/kOSkJIcm+VaSsSTvKMvy+yuoG6CxxicmMzy0dtBl0EdyFACqk6MAUJ0cBaDJOj0Tc0uSVyW5Nsm9SS5NcntRFGeVZfmFRdr9QZIHktyUqaB7fpLXJXl5URQvKMtyZ8W6ARrLUbqr0pbIUQCoakvkKABUtSVyFICGWnIQsyiKk5JcmOSKsiyvnV52Y5JtSa5OcuYizS8oy/Jzc7a3NckfTW9zS6WqAaAh5CgAVCdHAaA6OUrdmJ2sGTxP1EknZ2JekGQiyfUzC8qy3FkUxQ1J3lkUxaayLB+cr+HcoJv2Z5kKu+ctv1wAaBw5CgDVydFVot9fli12f7smJjPiizvm8IUuDSVHqRWzkzWD54k66WQQ84Qk95Rl+fic5XclWZPk+CTzht0CDpv+/d1ltAGAppKjAFCdHF0l+v1l2VL354s75vKFLg0lRwFotE4GMTcl2T7P8pmAO3yZ9/nmJJNJPrHMdkmSbdu2VWm2j61bt3ZlO8xvkP07Ojrak+0u9Jh6dX90X6f75ezbbX7eMTlg//Xz3u6JJ3fmnn/42rzrFmu3Em1472rDY1im1uXoKnwO+26mj5fKmF5kk7yjitn7zex/V92HWdgq7DM5WkMygaar8jrsxd9XK70ty7cK+7c2Oeo73Sl1//u41/fvbwiq6HaO1v11WGeD6JtOBjE3JNk1z/Kds9Z3pCiKf5vk3yf5nbIsv9Fpu9mOPfbYjIyMVGm6x9atW71h9lBb+7eNj2m16eQ5nG//Xexo28W22YujdJu+H3bj/WHXrl1d+/DTJ63K0ba+x9fJcvq4F8+F55cqZvab5b5H2N+WR47uRY4ClfX7ddjp/XmP6C05upe+56jvdDszyMe3GvqXZup3jnodzG9QObpfB7fZkWS+hFk/a/2SiqI4I8kNSf48yVs7qg4Amk+O0hjjE5ODLgFgLjkKANXJUaC15n6HMXuAzfcb7dHJmZgPZmrqgblmlj2w1AaKoviJJJ9K8tUkv1CWpT0IgNVCjtIYrvUE1JAcXYHxickMD61dcP2uicmMLLB+qbbQVIvt24u9JvptsTq9PlkGOQq0lu8wVodOBjG/kuT1RVEcOOci0CdP/757scZFUfyfSf4iySNJXlGW5ROVKgWAZpKjAFCdHF2Bxb7YSaa+3PHFD6vNUl941uU14YtZukSOAtBonUwne0uSoSSXzywoimIkyWVJ7izL8oHpZUcVRbF5dsOiKA5L8pdJnk7ysrIsv9utwgGWY7EpBKpOL9DvaQl68RjqdH8tJkcBoDo5CgDVyVEAGm3JMzHLsvxiURQfT/Kuoig2JflGkkuSPCfJpbNuemOSs5KsmbXsL5I8N8m7kpxeFMXps9Z9oyzLL6ysfIDO9OIo1n4fGdv2+2srOQoA1clRAKhOjgLQdJ1MJ5skFye5avr3IZmaA/2csizvXKLdT0z/ftM86/4oibADYDWQoy3Ri2sTzW03+0L0sFr1+zpgrjtWe3IUAKqTowA0VkeDmGVZ7kzyxumfhW7zwnmWrZnnpgCwqsjR9mjDWd3QBGYfYDY5CgDVyVEAmqyTa2ICAAAAAAAA9I1BTAAAAAAAAKBWDGJCB8YnJgddAj00+/l1LToAAAAAABi8jq6JCaud6yS1m+cXmG18YjLDQ2sHXQYAAAAArGoGMQEAZnFgAwCDsNhBNA6wAQAAViODmAAAADBgDqIBAADYm2tiAgAAAAAAKzY+MTnoEsB+2CLOxKSWlpouadfEZEZMpwQAAAAAUBtml6AO7IftYRCTWlrsTSaZeqPxJgRAVa4tBoNX9XXouoEAAACwOhjEBABWHUfkweBVfR16/dJkm593TNe3afAeAABoK4OYAAAA0AcH7L++64PwBvYBgH7PVuIgKvrFvoZBTAAAAAAAaKh+H9TkICr6xb7GfoMugGrGJyYrrQMAAAAAAIC6cyZmQzkCAQCAOpg9vc/o6OiAq1lYnaYh6vd0XwAAANBEBjEBAIDKmnJwXZ3qrFMtAAAAUFemkwUAAAAAgC5oy6W+R+gCdwAAIABJREFU2vI4gGZzJiYAAAAAAHRBW2bdaMvjAJrNmZg11oujXRbbpqNrAAAAAAAAqANnYtZYL452cQQNAPTf+MRkhofW1n6bAAAAAFAXBjEBAHrMgUkArMQgDlxxsAz03tzX2ejoaE+2O9uuicmMLLCu3wfeeZ+h7tqy/7blcUA/eL3Uj0FMAAAAqLFBHLjiYBnovV69zpbabj9f295LaLK27L9teRzQD14v9eOamAAAAAAAAECtGMRkj/GJyQXX7Vpk3WLtqq4D+sPrEAAAAACAOjKdLHv0YroRp19DvXmN0mauVQCrT79f966XAgBAtyz196O/L6G+BvH6XS2fRw1iAgCtZJAeVp9+v+69zwCwlH5/adkri93nronJjLTki1IYpMX+tkz8fQl1NojX72r5PGoQEwAAAAB6oBdfMNbxi9LV8CUqANB/rokJAAAAAAAA1IpBTAAAAAAAAKBWDGKyYuMTk3v9f3R0dECVwOo193U4265F1vXCYrVsft4xfayEXljsOVzsuQegM3IUAID5+MwN7bTYa9vr3jUx6YLVcgFZqLM6XZ/Ee0K7HbD/es8vQA/JUQCY+tJ2eGjtstdBm/k7EdrJa3txBjEBAAAAgNrwhS4AkJhOFgAAAAAAAKgZg5gAAAAAAKw6Va8315br1M1+HKOjowOsBNqtLe8Zg9DRdLJFUYwkeXuSi5IckuTuJG8py/KOJdqdlOTSJCcneX6SobIs16yk4F6rOue+ufqXT78Aq8VqylEA6DY5CgDVydHFVZ26uC1THrflcUDdea1V1+k1MbckeVWSa5Pcm6kAu70oirPKsvzCIu3OSfIfknw1yTeSbK5caZ+s9uDqJ30GrCJbskpytKrFDmzZNTGZkQrr+s3BOcBs3hO6akvkKABUtSVyFICGWnIQc/qomwuTXFGW5bXTy25Msi3J1UnOXKT5B5JcXZbljqIoro2wA2CVkaOdWerAlqrr+snBOcBs3hO6Q44CQHVyFICm6+SamBckmUhy/cyCsix3JrkhyelFUWxaqGFZlg+XZbljxVUCQHPJUQCoTo4CQHVyFIBG62QQ84Qk95Rl+fic5XclWZPk+K5XBQDtIUcBoDo5CgDVydFMTfMPsFLeSwajk2tibkqyfZ7lD07/Prx75Sxt27ZtXdnO1q1b510+Ojra13abn3dMDth//aJtq6haJ0Cnuv1+2GKtytFePb/yCaizOv1tLUf3aGWOAgxavzOv358r5egefc/RXn+nu5jR0VHT/AMrNqhLhtTpO9hB5Ggng5gbkuyaZ/nOWev75thjj83IyMiKtrF169bKf3z1ol0vdnwfjoFe6/f76Ixdu3Z17cNPn7QuRxcjf4A2qtN7mxzdo5U5CjBo/c68fn6uXMn3gTPkaHWD/k4XoKkG9R3sXIPK0U6mk92RZL6EWT9rPQAwPzkKANXJUYA+6fc0eVXvb7F2pvrbhxwFoNE6ORPzwUxNPTDXzLIHulcOALSOHAWA6uQoQJ/0e5q8qvc3qOn8GkqOAjTY+MRkhofW9m2buyYmM7LAus3PO6ardXSqk0HMryR5fVEUB865CPTJ07/v7n5ZANAachQAqpOjAFCdHAVosF4cuLPUNut2oFAn08nekmQoyeUzC4qiGElyWZI7y7J8YHrZUUVRbO5JlV02qBFjgLYwRc+ytC5HAaCP5CgAVCdHAWi0Jc/ELMvyi0VRfDzJu4qi2JTkG0kuSfKcJJfOuumNSc5KsmZmQVEUz0ly0fR/T5pe9lvT/7+7LMuxlT6AKg7Yf33tRpMBmsT0PZ1rY45W1YspMABoNzkKANXJUQCarpPpZJPk4iRXTf8+JMlXk5xTluWdS7T7V9PtZpv5/x8lEXYArAZyNAa/AahMjgJAda3J0cVm11vsOm4Aq02bTiToaBCzLMudSd44/bPQbV44z7LPZdYRPACwGslRAKhOjgJAdW3K0aVm13PQLMCUNp1I0Mk1MQEAAAAAAAD6xiAmAAAAAAAAUCsGMQEAAAAAAIBaMYgJAAAAADTe+MTkgus2P++YPlYCAHTDukEXAAAAAACwUsNDa3PulbfOu27smvP6XA0AsFLOxAQAAAAAAABqxSAmAAAAAAAAUCsGMQGArljs+jMATeW9DQAAAAbDNTEBgK5Y7PoziWvQAM3k2loAAAAwGM7EBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAAAAAgFoxiLkM4xOTgy6hI02pEwAAAAAAAOazbtAFNMnw0Nqce+Wt864bu+a8PlezsKbUCQAAAAAAAPNxJiYAAAAAAABQKwYxAQAAAAAAgFoxiAkAAAAAAADUikHMPhifmBx0CQAAAAAAANAY6wZdwGowPLQ2515567zrxq45r8/VAAAAAAAAQL05ExMAAAAAAACoFYOYAAAAAAAAQK0YxAQAAAAAAABqxSAmAAAAAAAAUCsGMQEAAAAAAIBaMYgJAAAAAAAA1IpBTAAAAAAAAKBWDGICAAAAAAAAtWIQEwAAAAAAAKgVg5gAAAAAAABArRjEBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAAAAAgFoxiAkAAAAAAADUyrpOblQUxUiStye5KMkhSe5O8payLO/ooO0RSd6T5KWZGjT9bJIryrL8ZtWiAaBJ5CgAVCdHAaA6OQpAk3V6JuaWJFck+UiS1yd5OsntRVGculijoigOTPLXSc5I8s4kv53kxCSfK4rikIo1A0DTbIkcBYCqtkSOAkBVWyJHAWioJc/ELIripCQXZuoom2unl92YZFuSq5OcuUjzX07yr5OMlmX55em2t0+3vSLJf1pR9QBQc3IUAKqTowBQnRwFoOk6ORPzgiQTSa6fWVCW5c4kNyQ5vSiKTUu0/buZoJtue0+SO5L8fKWKAaBZ5CgAVCdHAaA6OQpAo3VyTcwTktxTluXjc5bflWRNkuOTPDi3UVEU+yU5LskfzLPNu5K8pCiK/cuyfLLDWtcmyfj4eIc3X9wzDlg77/Jdu3ZZV/N1davHOs+9dctbt1KzcmDhHaVeWpejXr/WLWdd3eqxzvPb9HUrJUf3tG1Ujnr9WtfNdXWrxzrPbz/XrZQc3dN2OTnatQxN6pOjXtvWdXNd3eqxzvO70LqVqpKja3bv3r3oDYqi2JZke1mWL5uz/MeTfC3J5WVZ3jBPu2cm+U6S/1iW5e/MWffLSa5L8q/LsvxGJ4Vu3br19CT/o5PbArAqnDE6Ovr5QRexFDkKQE3JUTkKQHVytMMclaEAzKPjHO3kTMwNSeYbYt05a/1C7VKx7Xz+PlMXkn4wyeQy2gHQLmuTbMpULjSBHAWgTuSoHAWgOjm6/ByVoQDMWHaOdjKIuSPJyDzL189av1C7VGy7j9HR0V1Jan+EEwB90dFZEzUhRwGoGzm6eNt9yFEAZpGji7fdiwwFYI5l5eh+HdzmwUyNjM41s+yBBdo9mqmjdRZquzvzzLkOAC0jRwGgOjkKANXJUQAarZNBzK8k2VwUxYFzlp88/fvu+RqVZfl0kv+V5AXzrD45yT92ePFnAGgyOQoA1clRAKhOjgLQaJ0MYt6SZCjJ5TMLiqIYSXJZkjvLsnxgetlRRVFsnqftKUVRnDCrbZHk7CQfX2HtANAEchQAqpOjAFCdHAWg0dbs3r17yRsVRXFzkvOTvCdT89VekuQnk7yoLMs7p2/zuSRnlWW5Zla7H0ny5SQHJLkmyVNJfi3JmiTHl2X5vW4+GACoIzkKANXJUQCoTo4C0GSdnImZJBcnee/07/dl6giec2aCbiFlWf4wyQszdfHmtya5KlPTGJwl6ABYReQoAFQnRwGgOjkKQGN1dCYmAAAAAAAAQL90eiYmAAAAAAAAQF8YxAQAAAAAAABqZd2gCxikoihenOTVSU5L8uwkDya5I8l/KsvyoUHW1gZFURRJ/u8kJyc5Icn6JP+qLMt/GmRdTVQUxUiStye5KMkhSe5O8payLO8YaGEtURTFpiSvz9S++oIkB2bqAvefG2RdbVAUxU8muTTJi5I8J8n3kvxtkt8qy/LeAZZGF8jR3pKj3SNHe0uO9o4cbTc52ltytHvkaO/I0N6So+0mR3tLjnaHDO0tOdpbdcnR1X4m5tVJzkryZ0lel+RPklyY5EtFUWwcZGEtcWqm+vWgJP8w4FqabkuSK5J8JFNvzE8nub0oilMHWVSLFEnenKk/er864Fra5s1JXpnkrzK17/5Bkhcm+XJRFM8bYF10hxztLTnaPVsiR3tJjvaOHG03OdpbcrR7tkSO9ooM7S052m5ytLfkaHdsiQztJTnaW7XI0VV9JmaSX0vy+bIsn55ZUBTFXyT5myS/nORtA6qrLT6V5BllWf6wKIo3ZOqoHZapKIqTMvVH2BVlWV47vezGJNsy9QfbmQMsry22JnlmWZbfK4ri/Ez9AUx3/F6Sf1uW5fjMgqIo/iTJ/8pUEF46oLroDjnaW3K0C+RoX8jR3pGj7SZHe0uOdoEc7TkZ2ltytN3kaG/J0RWSoX0hR3urFjm6qs/ELMvyv88OupllSR5N4oisFSrL8tGyLH846Dpa4IIkE0mun1lQluXOJDckOX36tHlWoCzLH5Zl+b1B19FGZVn+7eygm172j0m+Fu+zjSdHe0uOdo0c7TE52jtytN3kaG/J0a6Roz0kQ3tLjrabHO0tOdoVMrTH5Ghv1SVHV/Ug5nyKojgwU3Mnf3fQtcC0E5LcU5bl43OW35VkTZLj+18SVFcUxZokh8b7bCvJUWpIjtIqcrTd5Cg1JEdpFTnabnKUmpGhtM4gctQg5r7ekGQ4yc2DLgSmbcrUxcnnmll2eB9rgW74d0mOiPfZtpKj1I0cpW3kaLvJUepGjtI2crTd5Ch1IkNpo77naGuuiVkUxX6ZCqklTZ+2Pd82zkzy20n+37Is/6aL5TVeN/qXyjYk2TXP8p2z1kMjFEWxOcl1ST6f5KYBl8MscrS35OhAyVFaQ47WlxztLTk6UHKU1pCj9SVHe0uODowMpVUGlaNtOhPzzCQ7OvkpiuKZcxtPPwF/luTuJP+hTzU3yYr6lxXZkWRknuXrZ62H2iuK4rAkf57kn5P83NxrVzBwcrS35OjgyFFaQY7WnhztLTk6OHKUVpCjtSdHe0uODoYMpTUGmaOtORMzyT1JLuvwtntdlLgoiiOT/GWSx5K8oizLJ7pcWxtU7l9W7MFMTT8w18yyB/pYC1RSFMXBSW5PcnCS08qyfGjAJbEvOdpbcnRw5CiNJ0cbQY72lhwdHDlK48nRRpCjvSVHB0OG0gqDztHWDGJOd9yW5bYriuLHMhV0I0nOLsvy4S6X1gpV+5eu+EqS1xdFceCcC0GfPP377gHUBB0rimJ9krEkRyd5cVmW5YBLYh5ytLfk6EDJURpNjjaDHO0tOTpQcpRGk6PNIEd7S44OjAyl8eqQo22aTnbZiqI4IMmnM3Uh0nPKsrx3wCXBfG5JMpTk8pkFRVGMZOoIqjvLsnTUDrVVFMXaJH+S5NRMTTXwdwMuiS6SozSEHKWx5Gi7yVEaQo7SWHK03eQoDSBDabS65Oia3bt3D+J+a6Eoik8mOS/Jh5L89ZzVD5dl+Zn+V9Ue06cZv3b6v6cmOSfJNZma3uG+sixdRL1DRVHcnOT8JO9J8o0klyT5ySQvKsvyzkHW1hZFUfzW9D+fl+TfZup94ZtJHivL8r8OrLCGK4ri2iSvz9QROzfPWf14WZaf7H9VdIsc7S052j1ytPfkaG/I0XaTo70lR7tHjvaWDO0dOdpucrS35Gh3yNDek6O9U5ccbc10shUdP/37F6d/ZvubJMJuZQ5JctWcZVdO//6bJMKucxdnqi8vzlS/fjVTR5kJu+6Zu6/OvCfcl0TgVTfzPnvu9M9s9yXxobHZ5GhvydHukaO9J0d7Q462mxztLTnaPXK0t2Ro78jRdpOjvSVHu0OG9p4c7Z1a5OiqPhMTAAAAAAAAqJ9VfU1MAAAAAAAAoH4MYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWjGICQAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBMAAAAAAACoFYOYAAAAAAAAQK0YxAQAAAAAAABqxSAmAAAAAAAAUCsGMQEAAAAAAIBaMYgJAAAAAAAA1IpBTAAAAAAAAKBW1nVyo6IoRpK8PclFSQ5JcneSt5RleccS7d6W5LfnWfVwWZaHLa9UAGgmOQoA1clRAKhOjgLQZB0NYibZkuRVSa5Ncm+SS5PcXhTFWWVZfqGD9r+U5MlZ/9+xjBoBoOm2RI4CQFVbIkcBoKotkaMANNSSg5hFUZyU5MIkV5Rlee30shuTbEtydZIzO7ifm8uyfGwlhQJAE8lRAKhOjgJAdXIUgKbr5JqYFySZSHL9zIKyLHcmuSHJ6UVRbOpgG2uKojioKIo11coEgMaSowBQnRwFgOrkKPz/7N17lCVlfS/87zB0zwwglygjA4KJnkVBmAhkCMgBxEsiXhYLVGIw4XpC1FdfL4jmmKM5JwdkuSSi4Bu8xctEMPFCiDgcyTEBTSJeMEOAjEKpLANxAFEJIsww3TTz/tHdk56Z7t27q/elau/PZ61ePVPPrtq/Xbt2fXvXU/UU0GjtDCd7ZJI7y7J8ZIfpNydZkuSIJPfNs4x7kuyR5BdFUVyd5K1lWT64kELXr1+/LMlvTD3XxELmBWCgLE2yKsm316xZs6XfxbRBjgJQJ3JUjgJQnRxdYI7KUABmWHCOttOJuSrJxlmmTwfc/i3m/Y8k/1+SbyYZS/L8TI6j/utFURxTluVCwv43kvzTAh4PwGA7IcnX+l1EG+QoAHUkR+UoANXJ0fZzVIYCsKO2c7SdTswVSWYLpcdmtM+qLMvLd5h0dVEUG5JckeSsJH/eTpFT7kuSgw8+OKOjowuYbWcbNmzI6tWrF7UM5mb9dsZ5F//drNM/9o7fatnG4th+u6sT63dsbCzf+973kvnPFq2LgcpRn5Hus447o2qOytjFsf12lxzdjhxlJ9Zv58jK/rANd5cc3U4vc9Qx3YawfjujSobO1y5j22Mb7q5+5Wg7nZibkyybZfryGe0L8eEkf5rkBVnYl8aJJBkdHc2yZbOVszCdWAZzs34X76FHZx9hY9myZS3bWDzrsbs6uH6bMgzNwOWoz0j3WcfzGxufyOjI0jnbq+boXG1Ldtl1zuebr5ZhY/vtLjm6jRxlVtZvZ/g+2j/WY3fJ0W16maOO6TaI9bt4VTJ0vnbvS/usq+7qR46204l5XyaHHtjR9LR7232yJCnL8omiKDYm+aWFzAcADSVHoQtGR5bm5AuunbVt3aWnNP75gG3kKABUJ0cBaLRd2njMrUkOKYpijx2mHzP1+7aFPGFRFCNJDkzyk4XMBwANJUcBoDo5CgDVyVEAGq2dTsyrk4wkOW96QlEUy5Kcm+SmsizvnZp2UFEUh8ycsSiKfWdZ3tsyOWTB/61aNAA0iBwFgOrkKABUJ0cBaLR5h5Mty/JbRVF8PsklRVGsSnJXkrOTPD3JOTMe+qkkJyZZMmPa3UVRfCbJhkzeRPp5SV6R5GtJ/rITLwAA6kyOAkB1chQAqpOjADRdO/fETJKzklw09XufJLcneUlZljfNM9+nkxyX5LeTjCb5t6nlvLssy8erFAwADSRHAaA6OQoA1clRABqrrU7Msiwfy+RwAW9r8ZjnzjLtDypXBjCHhx9+OA888EDGx8c7vuxdd901d9xxR8eXy6T51u/IyEhWrlyZPffcs4dVdZ8cBepEjjZXq/W76667Zvny5dl3332zfPnyHlfWXXIU6LZuZuNCyNHuarV+B/W7aCJHge6To8OhXzna7pWYALXw8MMP58c//nEOOOCArFixIkuWLJl/pgV49NFHs/vuu3d0mfynVut369at2bx5czZu3JgkA/nlEaDf5GizzbV+t27dmscffzyPPPJI7rnnnjz1qU/NXnvt1YcKAZqn29m4EHK0u1rlqO+iANXI0eHRrxzdpaNLA+iyBx54IAcccEB22223voYinbdkyZLstttuOeCAA/LAAw/0uxyAgSRHB9OSJUsyMjKSffbZJ0972tPys5/9rN8lATSGbMR3UYDq5CjdzlGdmECjjI+PZ8WKFf0ugy5asWJF34efABhUcnTwrVixIlu2bOl3GQCNIRuZ5rsowMLJUaZ1K0d1YgKN46yeweb9Begu+9nB5v0FWDj7ThLbAUBV9p8k3dsOdGICAAAAAAAAtaITEwAAAIBFGRufqNQGAABz0YkJQL71rW+lKIp861vf6ncpANA4chQgGR1ZmpMvuHbWn9GRpf0ujx6TjQBQnRz9TzoxAWrmYx/7WIqiyNlnn93vUhblgQceyDvf+c48//nPz+GHH54XvvCF+cAHPpCHH36436UBMMAGJUfvuuuuvO51r8tRRx2VI488MmeffXa++93v9rssABpoELLx0UcfzQc+8IH8/u//fo4++ugURZFrrrlmzsffcsstedWrXpXDDz88xx13XN71rndl8+bNPawYgEExbDn6pS99KW9961tz0kknpSiKnHnmmT2udns6MYGB0KnhiXbfffe+17Bu3boccMABufnmm/PAAw90ZJm9tmnTppx++un5+7//+7zsZS/LO9/5zhx33HG56qqr8trXvrbf5QGwAzlaLz/60Y/yqle9KrfffnvOO++8vPnNb85DDz2UV7/61fnBD37Q7/IAhkavh8GdLUc7UcMgZON//Md/5Iorrshdd92VQw45pOVj77jjjpxzzjnZsmVL3v72t+e0007LZz/72bz97W/vUbUAJHK0ThaSo3/1V3+VG264IStXrszee+/dowrntmu/CwDohOmhi/pp3aWnLHoZP/jBD3LnnXfmIx/5SN7ylrfkS1/6Us4555zFF9djX/3qV7Nx48Z85CMfyXOf+9xt05cuXZorr7wy//7v/54DDzywfwUCsB05Wi9//ud/nk2bNuXzn/98nv70pydJXvnKV+ZFL3pR3ve+9+WDH/xgnysEGA6DkI+Dko0rV67MP/3TP2XlypW54447cuqpp8752Pe9733Ze++9c+WVV247oP20pz0t73znO/ONb3wjxx57bK/KBgbQ2PiEYdLbJEfrYyE5eskll2TlypVZunRpTjll8d/TF8uVmAA1sm7dujzlKU/JCSeckBe84AX54he/uNNjpsdE//KXv5wrrrgiJ5xwQn7t134tZ599du6+++6dHv/pT386L3jBC/KsZz0rp512Wv75n/85Z555ZltDAdxyyy0599xz8+u//us54ogjcs4552TDhg3zzvfII48kSZ785CdvN336/8uXL593GQCwUIOSo7fccktWr169rQMzSVasWJHnPOc5+cd//MdtOQsA8xmUbBwdHc3KlSvnfdwjjzySr3/96zn11FO3uyLnlFNOyW677Zbrr79+3mUAtNLqHtAMnmHL0SRZtWpVli6tT0e9TkyAGrnuuuty0kknZenSpXnpS1+a73znO/nhD38462M/9KEP5cYbb8x5552XV7/61bntttvy1re+dbvH/OVf/mUuvPDC7L///nnb296Wo446Kq9//etz//33z1vL17/+9Zx11lkZGxvLG9/4xrzpTW/K/fffnzPOOGPeoezWrFmTXXbZJRdffHFuvfXW3H///bnxxhtz1VVX5eUvf3n23Xff9lcKALRpUHJ0bGwsy5Yt22n68uXLMz4+nu9///vzPj8AJIOTje0qyzKPP/54Vq9evd300dHRHHzwwbnjjjs68jwADIdhy9E6MpwsQE3ccsst+dGPfpSXvOQlSZLjjjsue+21V9atW5c3vvGNOz3+8ccfzzXXXJORkZEkyV577ZWLL7443/ve93LwwQdnbGwsl19+eY444oh88pOfzK67Tu7yi6LI29/+9uy3335z1vLEE0/kT/7kT3L88cfnwx/+8Lbpp512Wl784hfniiuuyPvf//4553/mM5+ZCy+8MJdcckl+53d+Z9v0l7/85bn44osXtmIAoA2DlKO/8iu/kn/5l3/Jpk2bsttuu22bfuuttyZJY+/DAkBvDVI2tusnP/lJksx64uy+++6bf/3Xf130cwAwHIYxR+vIlZgANXHdddflqU99atasWZMkGRkZyW/91m/luuuum/Xxr3jFK7aFYpIcddRRSZJ///d/T5Js2LAhDz30UF75ylduC8UkOfnkk7PXXnu1rOXOO+/M3XffnZe+9KV58MEHt/2Mj49nzZo1ufnmm+d9Pfvtt18OP/zwvOMd78gVV1yRc889N1/84hcHNlAB6K9BytFXvepVeeihh/KWt7wl3/3ud/PDH/4wF198cb773e8mSR577LF51gYADFY2tms6I0dHR3dqGx0dlaEAtG0Yc7SOXIkJUAOPP/54rr/++hx33HG55557tk0/4ogjcvXVV+f222/Ps571rO3mWbVq1Xb/33PPPZMkDz/8cJLk3nvvTZLt7qeVJLvuumsOOOCAlvX827/9W5LsNOTBtF12aX0OzPr16/Pa1742V199dQ499NAkyW/+5m9mdHQ0H/3oR/Oyl70sz3jGM1ouAwDaNWg5euKJJ+aP//iPc+mll+ZlL3vZtjpe//rX5/LLL9/uHl8AMJtBy8Z2LV++PMnk0Ow7Ghsb29YOAK0Ma47WkU5MgBq46aab8uCDD2bdunVZt27dTu3r1q3bKRjnusHy1q1bF13P9DL+6I/+KAcffPCC5//sZz+blStXbuvAnHbiiSfmIx/5SG699VadmAB0zKDlaJKcccYZefnLX56yLDMyMpJDDz00n/70p5Ps/KUXAHY0iNnYjulhZKeHlZ3pJz/5SVauXNm15wZgcAxrjtaRTkyAGli3bl3222+//NEf/dFObddee22uv/5rWxkpAAAgAElEQVT6vP3tb58zDGez//77J0nuvvvubcMXJJNnEm3cuDFFUcw574EHHphk8oyh//pf/2vbzzntZz/7WSYmJnaa/vjjjyfJrG0AUNWg5ei03XbbLUceeeS2/998883Zd99988xnPrPyMgEYDoOajfM5+OCDs+uuu2bDhg154QtfuG362NhYvve97+Xkk0/u2nMDMDiGNUfrSCcmQJ9t3rw5N9xwQ0455ZS86EUv2ql9+fLlufHGG/PNb34zxx13XNvLXb16dfbee+987nOfyymnnLJtrPV169bl5z//ect5DzvssBx44IH5xCc+kRe/+MVZsWLFdu0PPvhgfumXfmnO+X/5l385X/va1/LP//zP24Xy3/7t3ybJTldoAkBVg5ijs7nlllty44035s1vfvNADxUEwOINSzbO5klPelKOPfbYXHvttXnNa16zbQj2a6+9Nps2bZp1fQDATMOco3WkExMGyNj4REZHZj/7oxttdMYNN9yQTZs25XnPe96s7c9+9rOzfPnyrFu3bkHBODo6mje84Q256KKLcu655+akk07Kxo0bc8011+Sggw5qOe/SpUtz0UUX5dWvfnVOPvnknHrqqVm5cmXuv//+3HTTTTnooIPyp3/6p3PO/3u/93u55ppr8prXvCZnnHFGVq1alW9/+9u57rrrcsIJJ2T16tVtvw4AaGUQc/See+7JBRdckOc///l5ylOeku9///v57Gc/myOPPDLnnHNO268BgOE0iNmYJFdddVUefvjh/PSnP02SfOUrX8n999+fJHnd61637XHnn39+Tj/99Jx55pn57d/+7dx///355Cc/meOOO27orl4BYOGGPUe//e1v59vf/naSydH2fvGLX+SDH/xgxsfHc9JJJ+WQQw5p+zV3gk5MGCCjI0tz8gXXztq27tJTOj5fnYyNT/S91qodvtddd11WrFiRZz/72bO2L1++PMcee2z+7u/+Lv/7f//vBS37jDPOyNatW/PJT34y73nPe3LIIYfkQx/6UN71rndl2bJlLec99thj85nPfCZXXHFFrrzyymzatCkrV67MkUcemdNPP73lvM94xjPy13/917nsssty7bXX5qc//WlWrlyZs88+O295y1sW9BoA6D45Ort+5eiTnvSkPOUpT8lVV12Vn//859l///3zB3/wB/nd3/3djI6OLug1AFBdU/NxELMxST7xiU9k48aN2/7/5S9/OV/+8peTbH/w9bDDDssnP/nJvPe978273/3u7LHHHnnlK1+Z1772tQt6rQAsjhzdWRNy9Jvf/Gb+7M/+bLt5L7/88iSTw9rqxASooFNXiz766KPbhpvpVQ0f/vCHF/SYY445JmVZ7vSYpz3tabNOP/PMM3PmmWdu+/8TTzyRjRs35ld/9VfnXeZhhx2WD37wg/PWN5tnPOMZ+cAHPrDdtEcffTTLly+vtDwAukeO1itH99lnn3zoQx/aafqjjz664GUBUF2vRyWaLUer1DCI2ZgkN954Y9uPPeqoo/KZz3xmu2lyFKC35Ggzc/QNb3hD3vCGN+w0fTHf9xfDzVQABtiWLVt2mvaFL3whDz30UI4++ug+VAQAzSFHgX4bG5+o1AbdIhsBoDo5unCuxAQYYOvXr8973/vevPCFL8zee++d7373u7n66qtz8MEHz3pjagDgP8lRoN8G4dYfDBbZCADVydGF04kJMMAOPPDArFy5MldeeWV+/vOfZ6+99sqpp56aCy64wD21AGAechQAticbAaA6ObpwOjEBBtiBBx7Y1jjuAMDO5CgAbE82AkB1cnTh3BMTAAAAAAAAqBWdmAAAsAhj4xOV2gAAAACYm+FkgcbZunVrlixZ0u8y6JKtW7f2uwSABRkdWZqTL7h21rZ1l57S42rmJ0cHmxwFWDjZSCJDAaqSoyTdy1FXYgKNMjIyks2bN/e7DLpo8+bNGRkZ6XcZAANJjg6+zZs3Z9myZf0uA6AxZCPTfBcFWDg5yrRu5ahOTKBRVq5cmY0bN2bTpk3OkhwwW7duzaZNm7Jx48asXLmy3+UADCQ5Opi2bt2a8fHxPPjgg/nRj36UJz/5yf0uCaAxZCO+iwJUJ0fpdo4aThZolD333DNJcu+992Z8fLzjyx8bG8vo6GjHl8uk+dbvyMhInvrUp257nwHoLDnabK3W76677prly5fnoIMOyvLly3tcGUBzdTsbF0KOdler9eu7KFBnY+MTGR1ZuuC2XpCjw6NfOaoTE2icPffcs2tfLNavX5/DDz+8K8vG+gWoAznaXNYvQHd0MxsXwn6+u6xfoKlGR5bm5AuunbVt3aWn9LiancnR4dCv9dtWJ2ZRFMuSXJjkzCT7JLktyTvKsrxhIU9WFMWXkrw4yeVlWb55gbUCQCPJUQCoTo4CQHVyFIAma/eemGuTnJ/kqiRvSvJEkuuLoji23ScqiuKlSZ6z0AIBYACsjRwFgKrWRo4CQFVrI0cBaKh5OzGLojg6yelJ/rAsyz8sy/KjSZ6f5J4k72nnSYqiGE3y/iSXLKJWGChj4xP9LgHoATkKANXJUQCoTo4C0HTtXIl5WpLxJB+bnlCW5WNJPp7k+KIoVrWxjDclWZHkvVWKhEE0PZb5bD/AQJGjAFCdHAWA6uQoMKtWF9i4+IY6aeeemEcmubMsy0d2mH5zkiVJjkhy31wzF0WxX5I/TvL6siw3FUVRtVYAaCI5CgDVyVEAqE6OArOavsBmNusuPaXH1cDc2rkSc1VmD7PpafvPM/+7k5SZHHcdAIaNHAWA6uQoAFQnRwFotHauxFyRZMss0x+b0T6rqXHXz0pyYlmWWxde3s42bNjQicVk/fr1HVkOs7N+57dmzZqeP+dc78t8tQzb+zlsr7fXhnD9DlyODuF72HPW8fz6kaNVDdv7OWyvt9eGcP3KURZk0NZvP76rdStjB+296RbrqbuGcP3WJkcd020G63d+dfwuWuV9G9TjwU2tuyn6sX7b6cTcnGTZLNOXz2jfSVEUS5JcnuSvy7L8WrXydrZ69eosWzZbOe1bv359LXc2g8L6ra+q78swvZ+23+7qxPrdsmVLx7789MhA5ajPSPdZx4NnmN5P2293ydHtyFF2Mozrt0mvt0m19sswbsO9JEe30/McdUy3/qzf5urG+9bEbcE23F39ytF2OjHvy+TQAzuannbvHPO9LMnRSf5HURS/vEPbnlPTflyW5axhCQADQo4CQHVyFACqk6MANFo798S8NckhRVHsscP0Y6Z+3zbHfAdNLf/GJD+c8ZMk5079+8QFVQsAzSNHAaA6OQoA1clRABqtnSsxr07y1iTnJbksSYqiWJbJwLqpLMt7p6YdlGS3sizvnJpvXZJ/m2V5f5PkuiQfT3LLYooHgAaQowBQnRwFgOrkKACNNm8nZlmW3yqK4vNJLimKYlWSu5KcneTpSc6Z8dBPZfIMnCVT89019djtFEWRJHeVZfmFxRYPAHUnRwGgOjkKANXJUQCarp0rMZPkrCQXTf3eJ8ntSV5SluVN3SoMAAaIHAWA6uQoAFQnRwForLY6McuyfCzJ26Z+5nrMc9tc1pK2KgOAASFHAaA6OQoA1clRAJpsl34XAAAAAAAAUAdj4xP9LgGY0u5wsgAAAAAAAANtdGRpTr7g2lnb1l16So+rgeHmSkwAAAAAAACgVnRiAgAAAAAAALWiExMAAAAAAACoFZ2YAAAAAAAAQK3oxAQAAAAAAABqRScmAAAAAAAAUCs6MQEAAAAAAIBa0YkJXTQ2PtHvEgAAAAAAABpn134XAINsdGRpTr7g2lnb1l16So+rAQAAAAAAaAZXYgIAAAAAAAC1ohMTAAAAAAAAqBWdmAAAAAAAAECt6MQEAAAAAAAAakUnJgAAAAAAAFArOjEBAAAAAACAWtGJCQAAAAAAANSKTkwAAAAAAACgVnRiAgAAAAAAALWiExMAAAAAAACoFZ2YAAAAAAAAA2RsfKLfJcCi7drvAgAAAAAAAOic0ZGlOfmCa2dtW3fpKT2uBqpxJSYAALXkrFEAAACA4eVKTAAAaslZowAAAADDy5WYAAAAAAAAQK3oxAQAAAAAAIaG25dAMxhOFgAAAGCAjI1PZHRkaeV2ABh0bl8CzaATEwAA+qDVAWQHlwFYjFYHZhMHZwEAaAadmAAA0AfO/AUAAACYm3tiAi21Gh/e2PEAAAAAAEA3uBITaMlVIgAAAAAAQK+5EhMAAAAAAAColbauxCyKYlmSC5OcmWSfJLcleUdZljfMM9/vJfn9JL86Nd99Sb6S5E/Ksrx7EXUDQGPIUQCoTo4CQHVyFIAma/dKzLVJzk9yVZI3JXkiyfVFURw7z3yHJ9mY5L1J/p8kf5HkRUm+XRTFflUKBoAGWhs5CgBVrY0cBYCq1kaOAtBQ816JWRTF0UlOT3J+WZaXTU37VJINSd6T5DlzzVuW5R/Osrxrk6xPckYmQxAABpYcBYDq5CgAVCdHAWi6dq7EPC3JeJKPTU8oy/KxJB9PcnxRFKsW+JzTww3svcD5ADI2PlGpDfpIjgJAdXIUAKqTowA0Wjv3xDwyyZ1lWT6yw/SbkyxJckQmx0SfU1EUvzT1XAcl+Z9Tk1uOuw4wm9GRpTn5gmtnbVt36Sk9rgbaIkehhbHxiYyOLO13GbXTar1YZwwZOQoA1clRYMF8H6VO2unEXJXJ8c93NB1w+7exjO8lefLUv3+W5P8ty/Irbcy3kw0bNlSZbSfr16/vyHKYnfU7ac2aNf0uYTtzvS+LqbPX7/V8tXaiHttvdw3h+h24HB3C97Dnhmkdr1mzZuBPTqmSv/OdtFPnbaTOtQ2CIVy/cpQFGbT12yorFnsQsRvfD6s8XyuHHHpYdt9t+axtj256LHfe8Z3FllU7g7YN180Qrt/a5Khjus0wTOu3bsdtF6PTme77KHPpx/ptpxNzRZIts0x/bEb7fF6eZPckh2RyzPQntVXdLFavXp1ly5ZVnT3J5IoepJ1U3Vi/9dWN96Vu7/Vi67H9dlcn1u+WLVs69uWnRwYqR31Gus86HjzDkL/TbL/dJUe3I0fZybCt38WOUtPrdVX1+Vq9xkF7v4dtG+41ObqdnueoY7r1Z/02V1Myvdtsw93VrxxtpxNzc5LZEmb5jPaWyrL8x6l/Xl8UxReSbCiK4pGyLP+svTIBoLHkKABUJ0cBoDo5CkCj7dLGY+7L5NADO5qedu9CnrAsyx8mWZ/k9xYyHwA0lBwFgOrkKABUJ0cBaLR2OjFvTXJIURR77DD9mKnft1V43hVJ9qowHwA0jRwFemZsfKJSG9SYHAWA6uQoA8H3nPpotb63eJ/ognaGk706yVuTnJfksiQpimJZknOT3FSW5b1T0w5KsltZlndOz1gUxb5lWf5k5sKKoliT5Igkn+nIKwCAepOjQM8s9v5oUENyFAbc2PhERkeW9ruMJK1rqVOdsABylIHge059zPdeeJ/otHk7Mcuy/FZRFJ9PcklRFKuS3JXk7CRPT3LOjId+KsmJSZbMmHZ3URSfS/KvSR5JcliS/5bkF0ku6sQLAIA6k6MAUJ0chcFXpwPTdaoFOkGOAtB07VyJmSRnZTKczkqyT5Lbk7ykLMub5pnviiS/meTUJLtlchz2zyW5aGoMdQAYBnIUAKqTowBQnRwFoLHa6sQsy/KxJG+b+pnrMc+dZdqcjweaz1A70B45CgDVyVEAqE6OAtBk7V6JCbATQ+0ADA8nrlTTjXVjfQMAAADDQCcmAADzcuJKNd1Yb94LgO6qeuJOk074qVs9Tdek9x4AoEl0YgIAAABMqXqySJNOMmlSrU1gfQIAdMcu/S4AAAAAAAAAYCadmAAAdNXY+ES/SwAAAACgYQwnCwBAVxliDQAAAICFciUmAAAAAF1Tp1EZqtZSp9cAADAsXIkJAAAAQNfUaVSGqrXU6TUAAAwLV2ICAAAAAAAAtaITEwAAAAAAAKgVnZj0Tav7SbjXBAAAAAAAwPByT0z6xv0kAAAAAACGw9j4REZHli64DRheOjEBAACA2nLAEwAGg4tagIXSiQkAAADUlgOeAAAwnNwTEwAAAAAAAKgVnZgwJMbGJ/pdQte1eo1VX383lgkAAAAAALRmOFkYEsMwBFM3XuMwrDcAAAAA6Kdu3OfavbObwf3PaUUnJgAAAACN0pQDnk2pE6DfXJwwvLxPtKITEwAAAIBGacoBz6bUCQBQR+6JCQAAAAAAANSKTkxow9j4xJxtW1q0AQAAAAAAsHCGk4U2zDf8i6FhAAAAAADoJfdXZtDpxAQAAAAAAGgY915m0BlOFgAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJya1NDY+sah2mqtJ722rWuv0OppSJwAAAAAATNu13wXAbFrdkDhxU+JB1qT3vik3zm5KnQAADLax8YmMjixdcFuva+n18/W6lmFgnQIADAadmAAALIoDhc3gADrQb3U6ua7XtdTptQ8D6xsAYDDoxAQAYFGadBX9MHNAFwAAAGgS98QEAAAAAAAAakUnJo00Nj4xZ9uWFm2t5mvVBgtlWwMAAAAAgOoMJ0sjzTccWpWh0gyxRifZngAAAGhlx3tSr1mzZs42AGZnf9kM3ieqaqsTsyiKZUkuTHJmkn2S3JbkHWVZ3jDPfC9P8jtJjk7y1CT3JFmX5F1lWf58EXUDQGPIUXqt1ZeDqm0Mtlbv/ZbxiSxrsV00Zbux7TeXHAUGlZNf6QU5yqCzL20G7xNVtXsl5tokr0hyWZIfJDknyfVFUZxYluU3Wsz30ST3Jrkyk0H3a0nemOTFRVEcVZblYxXrBoAmWRs5Sg9V/XLgS8XwqjrKxXR7E9i+G21t5CgAVLU2chSAhpq3E7MoiqOTnJ7k/LIsL5ua9qkkG5K8J8lzWsx+WlmWX91heeuT/MXUMtdWqhoAGkKOAkB1chQAqpOjADTdLm085rQk40k+Nj1h6kybjyc5viiKVXPNuGPQTfmbqd+Htl8mvTA2PlGpDRaqH9uT7Zs+kqMAUJ0cBYDq5CgAjdbOcLJHJrmzLMtHdph+c5IlSY5Ict8CnnO/qd8/XcA89IAhtuiVfmxrtm/6SI4CQHVyFACqk6NAo42NT2R0ZOmC2xgc7XRirkqycZbp0wG3/wKf878nmUhyzQLnS5Js2LChymw7Wb9+fUeWM0jWrFnTsn0h66ydx873fN0yV239qmeQNWldz6x15r+7VWsv90Gd/Gx3whDufwcuR4fwPey5xa7jqp/7Ou6faYa5crRO6paHVTWlzg6So31yyKGHZffdls/atmXs8Swbnf1wwqObHsudd3xn1rZWn8PFHISaax/U6jVU1a2DZbKZ2fR6f9HqM9Pqs90kTdkHd1BtctQx3Wbo9vqVayzUfBem7LjN2kd0Vz/WbzudmCuSbJll+mMz2ttSFMXvJvn9JO8uy/KuduebafXq1Vm2bFmVWbdZv369HWYF7a6zuq/fOtc2aJq0rqdr7dX2W6d108taOrF+t2zZ0rEvPz0yUDla9338IOjFOvYe0mm9ztFuaELdcnQ7crQHWh0watVW5TUuZuSUVvugTo/G0q0RXpq0XdA7/dguOv3ZrhM5up2e56hjuvVn/dJEM7dZ23B39StH27kn5uYksyXM8hnt8yqK4oRMjrf+f5L8cVvVAUDzyVEAqE6OAkB1chSARmunE/O+TA49sKPpaffOt4CiKA5P8sUktyf5nbIsJ9quEACaTY4CQHVyFACqk6MANFo7nZi3JjmkKIo9dph+zNTv21rNXBTFM5P8bZIHkry0LMtHF1wldMjYuL+z2NnM7aLfQw602kZtv40lR4GB1ipHm5Jd8rfW5CgAVCdHqczfyAwq23aztHNPzKuTvDXJeUkuS5KiKJYlOTfJTWVZ3js17aAku5Vleef0jEVR7Jfky0meSHJSWZY/7Wz5sDDdun8JzVan7aJOtdAxchQYaIOQXYPwGgaYHO2isfGJjI4s7XcZQIe1+mz73A8dOUpl/kZmUNm2m2XeTsyyLL9VFMXnk1xSFMWqJHclOTvJ05OcM+Ohn0pyYpIlM6b9bZJnJLkkyfFFURw/o+2usiy/sbjyAaDe5CgAVCdHu8sBHBhMPttMk6MANF07V2ImyVlJLpr6vU8mx0B/SVmWN80z3+FTv/9wlra/SCLsABgGchQAqpOjAFCdHAWgsdrqxCzL8rEkb5v6mesxz51l2pJZHgoAQ0WOAkB1chQAqpOjADTZLv0uABhMboIMAAAAAABU1e5wsgAL4h4cAAAAUM3Y+ERGR5YuuA2gF+yjqDvb4eDQiQkAAAAANeLEYKDO7KOoO9vo4DCcLAAAAAAAAFArOjEBAAAAAACAWtGJOWTGxicG+vlgWHXjs9ZqmT7bALRDlgAAAABVuSfmkOn1WNDGnobe6MZnzecXgMWSJQDzGxufyOjI0n6XAT3Xatv3uWChhmF7GobXCAux43a/Zs2a2tSy0HbmphMTAAAAoE+c8MGwsu3TScOwPQ3Da4SFqNNnolUtic/oYhhOFgAAAAAAAKgVnZgAQGMN+v32Bv31Mdxs3wAAAEArhpMFABqrTkOHdMOgvz6Gm+0bAAAAaMWVmCzajmfK9/MGujDIXJUCAAAAMJi6ddyn1XK3zGhzTBeoI1dismjOoofe8FkDAACgH8bGJzI6snTBbUD7Wh33Saof+5nveJJjTUCd6cQEAAAAAObkpFoAoB8MJwsA0GWthu+pOmRQr+cDAAAAgF5yJSYAQJd148z1qst0Fj0AAAAATeBKzIbqxhUdwPDpxpVc9kEAAAAAw8lxIai3ph3XdSVmQ7mKAugEV3IBAADQJGPjExkdWbrgNqA3HDOCemvaZ1QnJgAAAADQCE07+AoAVGc4WQCg75o2lAVQT/YXAAAAMDhciQkA9J2zqYFOsC8BAACAweFKTNrirHboHJ8nAAAAAABozZWYtMVZ7dA5Pk8AAHTK2PhERkeWLrhtUMx8jWvWrOlzNdAbi/lsD8N+AQCqqtvf1nWrpx90YgIAALXiixq0b9hPkBv2189warXdJ623fZ8ZAJhb3XKybvX0g05MAACgVnxRAwAAANwTEwAAAAAAAKgVnZgAAAAAAABArejEBAAAgB445NDD5mwbG5+o1NZK1fkAoI5a5WgrVTO2G9ncLXWrB2ifz29r7okJADDD2PhERkeWdrRtIc+3Zs2aBS8D6I9e7BNmqnrgjvrYfbflle73WvU+sa3mm29eAKibOuVo3TK0SbUC2/P5bU0nJgDADL3+guuPVWiubnx+7RMAAABgkuFkAQAAAAAAgFrRiQkAAAAAAADUylB2Yra6l4ybqAIAAAAAMJdWx5AdXwZ2ZL9QXVv3xCyKYlmSC5OcmWSfJLcleUdZljfMM9/RSc5JckySX0syUpblksUU3AlVbwINAFUMWo7OZWx8IqMjSyu3A8BshiVHAZqq1d/5vf4O0Or5Wl3UMMjkaHe4jzmwEPYZ1bXViZlkbZJXJLksyQ8yGWDXF0VxYlmW32gx30uS/EGS25PcleSQypUCQHOtzRDkaKs/yBJ/lAFQ2doMQY4CNFWdDszWqZYaWRs5CkBDzTuc7NRZN6cn+cOyLP+wLMuPJnl+knuSvGee2T+UZM+yLNck+b+LLRYAmkaOAkB1chQAqpOjADRdO/fEPC3JeJKPTU8oy/KxJB9PcnxRFKvmmrEsyx+XZbl50VUCQHPJUQCoTo4CQHVyFIBGa6cT88gkd5Zl+cgO029OsiTJER2vasC0umnrFjeBBgZMq33XkN6DRI4CQHVyFACqk6MANFo798RclWTjLNPvm/q9f+fKmd+GDRsWvYw1a9a0bF+/fv2in2PH52s1Hn+rtrlqqfoa5psPYLHmuwdJp/exDTBQObrYjuhe5trY+ERGR5bO2vbopsdy5x3f6VktQOe0+mx3SxP2QQNsoHK06jbTJIPyOoDOqdOxLTm6Tc9ztBfHdKvoVm5V2dZ8p4T6q9sxobnqOeTQw7L7bss7usz52rqlnU7MFUm2zDL9sRntPbN69eosW7asq89Rp8CoWkudXgPATIvdP23ZsqUjX356aOBytFUn9Xx6mWvzdajLWGim+T7b3TBI+yA5ujjd/D7aj227GwbldQCdU6e/u+XoNj3P0V4c062iW7nleyMMprp9tlvVU3XfNtcy169f35ccbWc42c1JZkuY5TPaAYDZyVEAqE6OAkB1chSARmunE/O+TA49sKPpafd2rhwAGDhyFACqk6MAUJ0cBaDR2unEvDXJIUVR7LHD9GOmft/W2ZLqa2x8olIbAENNjtaQ3AZoDDkKANXJUQAarZ17Yl6d5K1JzktyWZIURbEsyblJbirL8t6paQcl2a0syzu7VGvfubcHABXI0RqS6QCNIUcBGmxsfCKjI0t7Nh87kaMAHTIM2dTqNR5y6GE9rmbSvJ2YZVl+qyiKzye5pCiKVUnuSnJ2kqcnOWfGQz+V5MQkS6YnFEXx9CRnTv336Klp75z6/21lWa5b7AsAgDqTowBQnRwFaLaqJw866bAz5ChA5wxDNtXxNbZzJWaSnJXkoqnf+yS5PclLyrK8aZ75fmVqvpmm//8XSYQdAMNAjgJAdXIUAKqTowA0VludmGVZPpbkbVM/cz3mubNM+2pmnMEDAMNIjgJAdXIUAKqTowA02S79LgAAGA5j4xM9na8b6lQL0Fz2JQAAADC/doeTZR7duKmrm58DMEgG4X44daoFaC77EgAAAJifTswO6caBiEE42AsAAAAAAAALZThZAAAAAAAAoFZ0YgIAAAAAAAC1ohMTAABgDmPjE/0uAQAAAIaSe2LuYGx8IqMjS/tdBgAAUAPuNw8AwEK0Or7s2DPQKcOyP9GJuQMHKQAAAAAAqMLxZaAXhmVfYzhZAAAAAAAAoFZ0YgIAAAAAAAC1ohMTAAAAAAAAqNwNSsUAACAASURBVBWdmAAAAAAAAECt6MQEAAAAAIAuGxuf6HcJAI2ya78LAAAAAACAQTc6sjQnX3DtrG3rLj2lx9UA1J8rMQEAAAAAAIBa0YkJAAAAAAAA1IpOTAAAAAAAAKBWdGICAAAAAAAAtaITEwAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJyYAAAAAAABQKzoxAQAAAAAAgFrRiQkAAAAAAADUik5MAAAAAAAAoFZ0YgIAAAAAAAC1ohMTAAAAAAAAqBWdmAAAAAAAAECt6MQEAAAAAAAAakUnJgAAAAAAAFArOjEBAAAAAACAWtGJCQAAAAAAANSKTkwAAAAAAACgVnZt50FFUSxLcmGSM5Psk+S2JO8oy/KGNuY9IMn7k7wwk52mNyY5vyzLH1YtGgCaRI4CQHVyFACqk6MANFm7V2KuTXJ+kquSvCnJE0muL4ri2FYzFUWxR5KvJDkhycVJ/leSX0/y1aIo9qlYMwA0zdrIUQCoam3kKABUtTZyFICGmvdKzKIojk5yeibPsrlsatqnkmxI8p4kz2kx++uS/Jcka8qy/Jepea+fmvf8JP9zUdUDQM3JUQCoTo4CQHVyFICma+dKzNOSjCf52PSEsiwfS/LxJMcXRbFqnnm/OR10U/PemeSGJK+sVDEANIscBYDq5CgAVCdHAWi0du6JeWSSO8uyfGSH6TcnWZLkiCT37ThTURS7JHlWko/Ossybk/xWURS7lWW5qc1alybJ2NhYmw9vbe/dl846fcuWLdpq3la3erR577UtrG2xZuTA3BtKvQxcjvr8altIW93q0eb9bXrbYsnRbfM2Kkd9frV1sq1u9Wjz/vaybbHk6LZ5F5KjHcvQpD456rOtrZNtdatHm/d3rrbFqpKjS7Zu3dryAUVRbEiysSzLk3aY/qtJvpPkvLIsPz7LfE9J8pMk/6Msy3fv0Pa6JFck+S9lWd7VTqHr168/Psk/tfNYAIbCCWvWrPlav4uYjxwFoKbkqBwFoDo52maOylAAZtF2jrZzJeaKJLN1sT42o32u+VJx3tl8O5M3kr4vycQC5gNgsCxNsiqTudAEchSAOpGjchSA6uTownNUhgIwbcE52k4n5uYky2aZvnxG+1zzpeK8O1mzZs2WJLU/wwmAnmjrqomakKMA1I0cbT3vTuQoADPI0dbzbkeGArCDBeXoLm085r5M9ozuaHravXPM92Amz9aZa96tmWXMdQAYMHIUAKqTowBQnRwFoNHa6cS8NckhRVHsscP0Y6Z+3zbbTGVZPpHkX5McNUvzMUm+3+bNnwGgyeQoAFQnRwGgOjkKQKO104l5dZKRJOdNTyiKYlmSc5PcVJblvVPTDiqK4pBZ5n12URRHzpi3SPL8JJ9fZO0A0ARyFACqk6MAUJ0cBaDRlmzdunXeBxVF8bkkpyZ5fybHqz07yW8keV5ZljdNPearSU4sy3LJjPmelORfkuye5NIkjyd5S5IlSY4oy/JnnXwxAFBHchQAqpOjAFCdHAWgydq5EjNJzkpy+dTvD2TyDJ6XTAfdXMqy/EWS52by5s1/nOSiTA5jcKKgA2CIyFEAqE6OAkB1chSAxmrrSkwAAAAAAACAXmn3SkwAAAAAAACAntCJCQAAAAAAANTKrv0uoJ+KonhBkjOSHJfkaUnuS3JDkv9ZluX9/axtEBRFUSR5bZJjkhyZZHmSXynL8t/6WVcTFUWxLMmFSc5Msk+S25K8oyzLG/pa2IAoimJVkjdlcls9KskembzB/Vf7WdcgKIriN5Kck+R5SZ6e5GdJvp7knWVZ/qCPpdEBcrS75GjnyNHukqPdI0cHmxztLjnaOXK0e2Rod8nRwSZHu0uOdoYM7S452l11ydFhvxLzPUlOTPI3Sd6Y5LNJTk9yS1EUK/tZ2IA4NpPrdc8kd/S5lqZbm+T8JFdlcsf8RJLri6I4tp9FDZAiyX/P5B+9t/e5lkHz35O8PMnfZ3Lb/WiS5yb5l6IoDu1jXXSGHO0uOdo5ayNHu0mOdo8cHWxytLvkaOesjRztFhnaXXJ0sMnR7pKjnbE2MrSb5Gh31SJHh/pKzCRvSfK1siyfmJ5QFMXfJvmHJK9L8id9qmtQfDHJ3mVZ/qIoijdn8qwdFqgoiqMz+UfY+WVZXjY17VNJNmTyD7bn9LG8QbE+yVPKsvxZURSnZvIPYDrjfUl+tyzLsekJRVF8Nsm/ZjIIz+lTXXSGHO0uOdoBcrQn5Gj3yNHBJke7S452gBztOhnaXXJ0sMnR7pKjiyRDe0KOdlctcnSor8Qsy/IfZwbd9LQkDyZxRtYilWX5YFmWv+h3HQPgtCTjST42PaEsy8eSfDzJ8VOXzbMIZVn+oizLn/W7jkFUluXXZwbd1LTvJ/lO7GcbT452lxztGDnaZXK0e+ToYJOj3SVHO0aOdpEM7S45OtjkaHfJ0Y6QoV0mR7urLjk61J2YsymKYo9Mjp38037XAlOOTHJnWZaP7DD95iRLkhzR+5KguqIoliR5auxnB5IcpYbkKANFjg42OUoNyVEGihwdbHKUmpGhDJx+5KhOzJ29Ocloks/1uxCYsiqTNyff0fS0/XtYC3TC7yU5IPazg0qOUjdylEEjRwebHKVu5CiDRo4ONjlKnchQBlHPc3Rg7olZFMUumQypeU1dtj3bMp6T5H8l+auyLP+hg+U1XifWL5WtSLJllumPzWiHRiiK4pAkVyT5WpIr+1wOM8jR7pKjfSVHGRhytL7kaHfJ0b6SowwMOVpfcrS75GjfyFAGSr9ydJCuxHxOks3t/BRF8ZQdZ556A/4myW1J/qBHNTfJotYvi7I5ybJZpi+f0Q61VxTFfkn+T5L/SPLbO967gr6To90lR/tHjjIQ5GjtydHukqP9I0cZCHK09uRod8nR/pChDIx+5ujAXImZ5M4k57b52O1uSlwUxYFJvpzkoSQvLcvy0Q7XNggqr18W7b5MDj+wo+lp9/awFqikKIq9klyfZK8kx5VleX+fS2JncrS75Gj/yFEaT442ghztLjnaP3KUxpOjjSBHu0uO9ocMZSD0O0cHphNzasWtXeh8RVE8OZNBtyzJ88uy/HGHSxsIVdcvHXFrkjcVRbHHDjeCPmbq9219qAnaVhTF8iTrkhyc5AVlWZZ9LolZyNHukqN9JUdpNDnaDHK0u+RoX8lRGk2ONoMc7S452jcylMarQ44O0nCyC1YUxe5JvpTJG5G+pCzLH/S5JJjN1UlGkpw3PaEoimWZPIPqprIsnbVDbRVFsTTJZ5Mcm8mhBr7Z55LoIDlKQ8hRGkuODjY5SkPIURpLjg42OUoDyFAarS45umTr1q39eN5aKIriC0lOSfKJJF/ZofnHZVn+Xe+rGhxTlxm/Yeq/xyZ5SZJLMzm8w91lWbqJepuKovhcklOTvD/JXUnOTvIbSZ5XluVN/axtUBRF8c6pfx6a5HczuV/4YZKHyrL8s74V1nBFUVyW5E2ZPGPnczs0P1KW5Rd6XxWdIke7S452jhztPjnaHXJ0sMnR7pKjnSNHu0uGdo8cHWxytLvkaGfI0O6To91TlxwdmOFkKzpi6vd/m/qZ6R+SCLvF2SfJRTtMu2Dq9z8kEXbtOyuT6/KsTK7X2zN5lpmw65wdt9XpfcLdSQReddP72ZOnfma6O4kvjc0mR7tLjnaOHO0+OdodcnSwydHukqOdI0e7S4Z2jxwdbHK0u+RoZ8jQ7pOj3VOLHB3qKzEBAAAAAACA+hnqe2ICAAAAAAAA9aMTEwAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJyYAAAAAAABQKzoxAQAAAAAAgFrRiQkAAAAAAADUik5MAAAAAAAAoFZ0YgIAAAAAAAC1ohMTAAAAAAAAqBWdmAAAAAAAAECt6MQEAAAAAAAAakUnJgAAAAAAAFArOjEBAAAAAACAWtGJCQAAAAAAANSKTkwAAAAAAACgVnRiAgAAAAAAALWyazsPKopiWZILk5yZZJ8ktyV5R1mWN8wz358k+V+zNP24LMv9FlYqADSTHAWA6uQoAFQnRwFosrY6MZOsTfKKJJcl+UGSc5JcXxTFiWVZfqON+V+TZNOM/29eQI0A0HRrI0cBoKq1kaMAUNXayFEAGmreTsyiKI5OcnqS88uyvGxq2qeSbEjyniTPaeN5PleW5UOLKRQAmkiOAkB1chQAqpOjADRdO/fEPC3JeJKPTU8oy/KxJB9PcnxRFKvaWMaSoij2LIpiSbUyAaCx5CgAVCdHAaA6OQpAo7UznOyRSe4sy/KRHabfnGRJkiOS3DfPMu5JskeSXxRFcXWSt5Zl+eBCCl2/fv2yJL8x9VwTC5kXgIGyNMmq5P9n7/7j7KrrO/G/QjIzCSCIPyIBEmq/W04sqMEgyIK/6q+im0ewsl0eq+FH61L7Q6mkdnV1dy1UrVYWtKXafkFT1LaLfKl0aGlp0boFV+jGCo2V40otlgRQTEWBJDMM+f4xM+kkuTNz58zcuefc+3w+HvOY5HzuOed9P/fc+5p7P/d8Tv52/fr1e7pdTBvkKAB1IkflKADVydE55qgMBWCKOedoO4OYq5Jsb7F8MuCOmWHdf0nyW0m+nGQkyU9kfB71FxRFcVpZlnMJ+xcm+Zs53B6A3vbiJLd1u4g2yFEA6kiOylEAqpOj7eeoDAXgQG3naDuDmCuStAql3VPaWyrL8iMHLLq+KIptSa5Kcl6S/7edIic8kCQnnHBCBgcH57DawbZt25aTTjppXttgevp3Ybz5fX/ZcvnV735V5TZm5/jtrIXo35GRkXzjG99IZv+2aF30VI56jnSePm6frKwfx29nydH9yFEOon8XjhztDsdwZ8nR/SxmjvpMtyH078KokqGT7cyPY7izupWj7Qxi7koy1GL58intc/HxJL+Z5BWZ25vGsSQZHBzM0FCrcuZmIbbB9PTv/H3/sdYzbAwNDVVuoz36qrMWsH+bMg1Nz+Wo50jn6eP2yMp60o+dJUf3kaO0pH8XhhztHv3YWXJ0n8XMUZ/pNoj+nb8qGTrZzvzpx87qRo4e0sZtHsj41AMHmly2o92dJUlZlk9mfBqDp81lPQBoKDkKANXJUQCoTo4C0GjtDGJ+NcnaoigOP2D5aRO/75rLDouiGEiyOsl357IeADSUHAWA6uQoAFQnRwFotHYGMa9PMpDkzZMLiqIYSnJhktvLstwxsWxNURRrp65YFMUzW2zvHRmfsuAvqhYNAA0iRwGgOjkKANXJUQAabdZrYpZleUdRFJ9N8qGiKFYluTfJ+UmOT3LBlJtem+SlSZZMWXZfURR/lGRbxi8i/fIkb0hyW5I/WIg7AAB1JkcBoDo5CgDVyVEAmm7WQcwJ5yW5bOL3UUnuTvLasixvn2W9zyQ5I8m/TzKY5J8mtvOBsiyfqFIwADSQHAWA6uQoAFQnRwForLYGMcuy3J3x6QLeMcNtXtZi2X+qXBnQV3bv3p3vfve72b17d554ont/Cy9btixf//rXu7b/Xjdb/w4MDGTlypU54ogjFrGqzpOjQKfJ0f4wU/8uW7Ysy5cvzzOf+cwsX758kSvrLDkKdNoPfvCDfOc738no6GhX65CjnTVT//bqe9FEjgKdJ0f7Q7dytN0zMYEFNjI6lsGBpd0uoxYeeeSRPPTQQ3nmM5+Zo48+OsuWLcuSJUtmX7EDHnvssRx22GFd2Xc/mKl/9+7dm127dmX79u1J0pNvHgE6QY72j+n6d+/evXniiSfy6KOP5tvf/nae9axn5cgjj+xChQDN84Mf/CAPPfRQjj322KxYsaJrGZrI0U6bKUe9FwWoRo72j27lqEFM6JLBgaXZsPnGlm3Dl29c5Gq66+GHH85xxx2XQw89tNul0EVLlizJoYcemmOPPTY7duzwxhGgTXKUJUuWZGBgIEcddVSGhoby4IMPGsQEaNN3vvOdHHvssXK0j3kvClCdHKXTOXrIgm4NoIKRkZGsWLGi22VQEytWrOj69BMATSJHmWrFihXZs2dPt8sAaIzR0VE5ShLvRQGqkKNM6lSOGsQEaqGbUw1QL44FgLnz2skkxwLA3HntJHEcAFTl9ZOkc8eBQUwAAAAAAACgVgxiAgAAAAAAALViEBMAAAAAAACoFYOYAD3qjjvuSFEUueOOO7pdCgA0jhwFgOrkKABUJ0f/lUFMoNZGRscWdX+HHXZYR2q4+uqrUxRFzj///Hlvq1see+yxfPSjH83P/uzP5tRTT01RFLnhhhsOut2uXbvymc98JhdeeGHOPPPMnHzyyXn961+fP/zDP8zY2OI+ngD9To7WR7s5miRXXHFF3vCGN+S0007L8573vJx11ln57d/+7ezatWuRqwbob3K0PuaSo1M9+uijOeOMM1IURb7whS8sQqUATJKj9TGXHN20aVOKojjo553vfOciVz1uWVf2CtCmwYGl2bD5xq7WMHz5xvlvY3g4xx57bO6888585zvfycqVKxegssX1L//yL7nqqquyatWqrF27dtpvAv3zP/9zLrvsspx++um54IILcvjhh+e2227Le9/73vzd3/1dPvShDy1y5QD9S47WR7s5miTbtm3LunXrsnHjxixfvjz33HNPfvd3fzdf+tKX8pnPfCZLlixZxMoB+pccrY+55OhUV111VR5//PEOVwdAK3K0Puaao8ccc0x++Zd/eb9lT3/60ztZ4rQMYgJ02De/+c19Hz5ecskl+bM/+7NccMEF3S5rzlauXJm/+Zu/ycqVK/P1r389Z599dsvbPeMZz8jw8HB+7Md+bN+yc889N+9617tyww035K1vfWtWr169WGUD0HD9lqNJcs011xy0bPXq1fmN3/iNbNu2Lc997nM7WSoAPaQfc3TSt771rXzqU5/KW97ylvzWb/3WIlQJQK/p1xw94ogjsnHj/gPAjz32WCdLnJbpZAE6bHh4OM94xjPy4he/OK94xSvyJ3/yJwfdZnKe81tuuSVXXXVVXvziF+e5z31uzj///Nx3330H3f4zn/lMXvGKV+R5z3tezjnnnPyf//N/smnTpmzatGnWer7yla/kwgsvzAte8IKsW7cuF1xwQbZt2zbreoODg2190+hpT3vafgOYk171qlclSf7xH/9x1m0AwKR+y9HpHHPMMUmSH/7wh5W3AUD/6ecc/cAHPpCXv/zleeELXzin9QBgUj/n6BNPPNG1gcupDGICdNhNN92U17zmNVm6dGle97rX5Wtf+1q+9a1vtbztxz72sXz+85/Pm9/85lx00UW566678iu/8iv73eYP/uAPcumll+aYY47JO97xjpxyyin5xV/8xTz44IOz1vKlL30p5513XkZGRvK2t70tF198cR588MG86U1vyje/+c0Fub/Tefjhh5MkRx11VEf3A0Bv6dccHRsby86dO/PQQw/ltttuy5VXXpnDDz88J5100oLuB4De1q85+sUvfjFf+tKX8o53vGNBtwtAf+nXHL333nuzbt26vOAFL8iZZ56Zj3/843nyyScXdB/tMp0sQAd95Stfyf3335/Xvva1SZIzzjgjRx55ZIaHh/O2t73toNs/8cQTueGGGzIwMJAkOfLII/O+970v3/jGN3LCCSdkZGQkH/nIR7Ju3bp88pOfzLJl4y/jkxdXPvroo6et5cknn8x73/vefcEz6ZxzzslZZ52Vq666KldcccVC3v19RkZG8vu///s57rjjfPgKQNv6OUfvvffebNiwYd//n/3sZ+eKK67IEUccsWD7AKC39WuOjo6O5v3vf382bdqUNWvW5IEHHliQ7QLQX/o1R1evXp3TTjstRVHk0UcfzU033ZQrrrgi3/72t/P+979/QfYxF87EBOigm266Kc961rOyfv36JMnAwEBe9apX5aabbmp5+ze84Q37gi5JTjnllCTJP//zPydJtm3blu9///v56Z/+6X1BlyQbNmzIkUceOWMt99xzT+6777687nWvy86dO/f9jI6OZv369bnzzjvndV9nctlll+Xee+/NO9/5zhxyiOgBoD39nKPHHXdcPvnJT+aqq67KRRddlEMPPbQWU/kA0Bz9mqPXXnttHnnkkfz8z//8gm0TgP7Trzn6/ve/P7/0S7+UV73qVXn961+fa665Jj/5kz+ZG264oSuXCXMmJvSJkdGxDA4snXMb1T3xxBO5+eabc8YZZ+Tb3/72vuXr1q3L9ddfn7vvvjvPe97z9ltn1apV+/1/8myLH/zgB0mSHTt2JEmOP/74/W63bNmyHHvssTPW80//9E9JctA0BpM6Nbh49dVX57rrrsvmzZvzb//tv+3IPgDoPf2eo4ceeui+3HzlK1+Z5zznObnkkkvy7Gc/O2vXrl3QfQHQe/o1Rx9++OH8zu/8Ti655BKzFwBQWb/m6HR+5md+Jn/+53+eO+64Iz/6oz/a0X0dyCAm9InBgaXZsPnGlm3Dl29c5Gr6w+23356dO3dmeHg4w8PDB7UPDw8fFHZLl7YeTN67d++865ncxrve9a6ccMIJ895eO2644YZ8+MMfzhvf+MZcdNFFziABoG1ydH+vfOUrc8ghh+RP//RPDWICMKt+zdGPf/zjecpTnpIzzzwz999/f5Lxgc0k2blzZ+6///4ce+yxWbJkScdqAKD5+jVHpzM51e0jjzyy6Ps2iAnQIcPDwzn66KPzrne966C2G2+8MTfffHPe+c53ThtwrRxzzDFJkvvuu2/flATJ+LeDtm/fnqIopl139erVSca/BbQYZ0T+1V/9Vd7znvfk1a9+dd7znvd0fH8A9JZ+z9EDjY6OZmxsLD/84Q8Xfd8ANE+/5uiOHTvywAMP5NWvfvVBbe973/vyvve9L3fffXeGhoY6VgMAzdevOTqdySlxn/a0py36vg1iAnTArl27cuutt2bjxo35yZ/8yYPaly9fns9//vP58pe/nDPOOKPt7Z500kl56lOfmuuuuy4bN27cN3/68PDwrN+EOfHEE7N69ep84hOfyFlnnZUVK1bs175z584FC6K//du/zSWXXJJTTjklH/7wh10HE4A56eccffTRRzM4OJjBwcH9ll9//fXZu3dvTjzxxHnvA4De1s85+nM/93P5qZ/6qf2WfeMb38hHPvKRXHjhhTnllFP2u14ZAByon3O01fvRsbGx/O7v/m4OOeSQnH766fPex1wZxATogFtvvTWPP/54Xv7yl7dsf9GLXpTly5dneHh4TmE3ODiYt771rbnsssty4YUX5jWveU22b9+eG264IWvWrJlx3aVLl+ayyy7LRRddlA0bNuTss8/OypUr8+CDD+b222/PmjVr8pu/+ZszbuPTn/50fvCDH+ybjucLX/hCHnzwwSTJL/zCLyRJtm/fnp//+Z/PkiVL8prXvCY333zzvvX37NmT008/fd+3hwCglX7O0a997WvZvHlzzjrrrPzIj/xIxsbGsnXr1vzFX/xF1q5dm40bXQYAgJn1c44+//nPP2i9pzzlKUnGPzx+5Stf2fb9BaA/9XOOTr4f/Xf/7t9lzZo1efzxx3PzzTdn27ZtueCCC7ryma5BTGBGI6NjGRxofVr8TG0Luf9uX7Ozyv286aabsmLFirzoRS9q2b58+fKcfvrp+cu//Mv82q/92py2/aY3vSl79+7NJz/5yXzwgx/M2rVr87GPfSy//uu/PuuUOKeffnr+6I/+KFdddVU+9alP5fHHH8/KlStz8skn59xzz51135/4xCeyffv2ff+/5ZZbcssttyT517C7//779011d+mllx60jQ984AMGMQEWiRw9WN1z9Pjjj89LXvKSfPGLX8xnP/vZjI2NZc2aNXnLW96SN77xjQedoQlA58jRg9U9RwGoDzl6sLrn6DHHHJMXvOAFueWWW/Lwww/nkEMOyY/92I/lN37jN1pO1b4YDGICMxocWJoNm29s2bYYIdTpQdIDPfbYYznssMPmXcPHP/7xOd3mtNNOS1mWB93muOOOa7l806ZN2bRp077/P/nkk9m+fXt+/Md/fNZtnnjiifmd3/mdWetr5fOf//yst5luv0nr/gWoi25/cacT5GjzcvToo4/O+9///pZtjz32WKX9AlCNHG1ejrYyWYscBVhccrR5Obp69ep89KMfbdnWrRw1iAnQMHv27Dnomzmf+9zn8v3vfz+nnnpql6oCaL5uf3GHxSFHAaA6OQoA1cnRuTOICdAwW7duzYc//OG8+tWvzlOf+tT8wz/8Q66//vqccMIJLS82DQD8KzkKANXJUQCoTo7OnUFMgIZZvXp1Vq5cmU996lN55JFHcuSRR+bss8/O5s2bXScLAGYhRwGgOjkKANXJ0bkziAnQMKtXr25rbnYA4GByFACqk6MAUJ0cnbtDul0AAAAAAAAAwFQGMYFa2Lt3b7dLoCYcCwBz57WTSY4FgLnz2kniOACoyusnSeeOA4OYQNcNDg5m165d3S6Dmti1a1cGBga6XQZAY8hRptq1a1eGhoa6XQZAYwwMDMhRkngvClCFHGVSp3LUICbQdc94xjNy//33Z+fOnRkdHfXtnT61d+/ePP7449m+fXtWrlzZ7XIAGkOOsnfv3oyOjmbnzp25//778/SnP73bJQE0xsqVK7N9+/Y8/vjjMrRPeS8KUJ0cpdM5umzBtwgwR0ceeWSGhoby3e9+N9/73vfyxBNPdK2WkZGRDA4Odm3/vW62/h0YGMiznvWsHHHEEYtYFUCzydH+MVP/Llu2LMuXL8+aNWuyfPnyRa4MoLkm33vs2LEjo6OjXa1FjnbWTP3rvShANXK0f3QrRw1iArWwfPnyrF69uttlZOvWrXn+85/f7TJ6lv4F6Aw52h/0L0BnHHHEEbUYvPI631n6F6Az5Gh/6Fb/mk4WAAAAAAAAqJW2zsQsimIoyaVJNiU5KsldSd5dluWtc9lZURR/luSsJB8py/KX51grADSSHAWA6uQoAFQniTuNHQAAIABJREFURwFosnbPxNyS5O1JPp3k4iRPJrm5KIrT291RURSvS/KSuRYIAD1gS+QoAFS1JXIUAKraEjkKQEPNOohZFMWpSc5N8qtlWf5qWZa/l+Qnknw7yQfb2UlRFINJrkjyoXnUCgCNI0cBoDo5CgDVyVEAmq6dMzHPSTKa5OrJBWVZ7k5yTZIzi6JY1cY2Lk6yIsmHqxQJAA0mRwGgOjkKANXJUQAarZ1BzJOT3FOW5aMHLL8zyZIk62ZauSiKo5P81yT/pSzLxytVCQDNJUcBoDo5CgDVyVEAGm1ZG7dZlWR7i+UPTPw+Zpb1P5CkzPi86/O2bdu2hdhMtm7duiDboTX9O7v169d3u4T9TPeYzVZnLz7WvXif6qQP+7fncrQPH8NFp49n140c9bi0Rz91Vh/2rxxlTvTv/M0nY/X//OnDzurD/q1NjvpMtxn07/zM932q/p8/fdhZ3ejfdgYxVyTZ02L57intLU3Mu35ekpeWZbl37uUd7KSTTsrQ0NC8trF169baDSD1Ev3bTFUfs157rB2/nbUQ/btnz54Fe/OzSHoqRz1HOk8f15fHZXaO386So/uRoxxE/3af/p8fx3BnydH9LHqO+ky3/vRv9+n/+XEMd1a3crSd6WR3JWmVMMuntB+kKIolST6S5P8ry/K2OVUFAL1DjgJAdXIUAKqTowA0WjtnYj6Q8akHDjS5bMc0670+yalJ/ktRFD9yQNsRE8seKsuyZVgCQI+QowBQnRwFgOrkKACN1s6ZmF9NsrYoisMPWH7axO+7pllvzcT2P5/kW1N+kuTCiX+/dE7VAkDzyFGYwcjoWKU2oG/IUQCoTo4C0GjtnIl5fZJfSfLmJFcmSVEUQxkPrNvLstwxsWxNkkPLsrxnYr3hJP/UYnt/nOSmJNck+cp8igeABpCjMIPBgaXZsPnGlm3Dl29c5GqAGpKjAFCdHAWg0WYdxCzL8o6iKD6b5ENFUaxKcm+S85Mcn+SCKTe9NuPfwFkysd69E7fdT1EUSXJvWZafm2/xAFB3chQAqpOjAFCdHAWg6do5EzNJzkty2cTvo5LcneS1ZVne3qnCAKCHyFEAqE6OAkB1chSAxmprELMsy91J3jHxM91tXtbmtpa0VRkA9Ag5CgDVyVEAqE6OAtBkh3S7AAAAaGVkdKzbJQAAAADQJe1OJwsAAItqcGBpNmy+sWXb8OUbF7kaAAAAABaTMzEBAAAAAACAWjGICQAAAAAAANSKQUyYp5mu17XHtbwAAAAAAADmzDUxYZ5mu16Xa3kB0O9GRscyOLC022XUzkz9os8AAACAfmcQEwCAjprtCz/9Sr8AAAAATM90sgAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWjGICQAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBOobGR0rFIbAAAAAADATJZ1uwCguQYHlmbD5htbtg1fvnGRqwEAAAAAAHqFMzEBAAAAAACAWjGICQAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBMAgFmNjI5VagMAAACAKpZ1uwAAAOpvcGBpNmy+sWXb8OUbF7kaAAAAAHqdMzEBAAAAAACAWjGICQAAAAAAANSKQUxog2t90YrrwwEAAAAAQGe4Jia0wXXAaMVxAdTVyOhYBgeWzrkNAAAAAOrCICYAQI/xJQsAAAAAms50sgAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAGAWI6Nj07btmaFtpvUAAAAAmN6ybhcAAAB1NziwNBs239iybfjyjTO2AQAAADB3bQ1iFkUxlOTSJJuSHJXkriTvLsvy1lnWe2OSn03y4xPrPZDkC0neW5blffOoGwAaQ44CQHVyFFobGR3L4MDSlm17RscyNE3bbOv2upnuez/3C71LjgLQZO2eibklyRuSXJnkm0kuSHJzURQvLcvyf8+w3vOTbE/yZ0l2Jjk+yUVJXlcUxfPKsnywYt0A0CRbIkcBoKotkaNwkKqzBEy296vZ+g160JbIUQAaatZBzKIoTk1ybpK3l2V55cSya5NsS/LBJC+Zbt2yLH+1xfZuTLI1yZuSfLha2QDQDHIUAKqTowBQnRwFoOkOaeM25yQZTXL15IKyLHcnuSbJmUVRrJrjPienG3jqHNeDeRsZHavUxsLyONBn5CgAVCdHAaA6OQpAo7UznezJSe4py/LRA5bfmWRJknUZnxN9WkVRPG1iX2uS/LeJxTPOuw6dYNqYevA40GfkKABUJ0cBoDo5CkCjtTOIuSrj858faDLgjmljG99I8vSJf38vyS+VZfmFNtYDgKaTowBQnRwFgOrkKACN1s4g5ooke1os3z2lfTY/leSwJGszPmf6U9qqroVt27ZVXXU/W7duXZDt0Fpd+3f9+vUztk9X92zr9YJO3Peq2+z28dPu/ut+P+qqD/ul53K0Dx/DRTffPu7E69N8ttkPOTqTpuZhVU2tuyn6sH/lKHPST/0733xdzPeAdVL3/O32/ntdH/ZvbXLUZ7rNoH/np1PZTPv0YWd1o3/bGcTclWSoxfLlU9pnVJbl/5r4581FUXwuybaiKB4ty/K32yvzX5100kkZGmpVTvu2bt3a9x+mdVKT+7epdS+ETtz3qtvs5uOwkMdvPx9P01mI/t2zZ8+CvflZJD2Vo01+jW+KxejjOr3m94Mm5mFVXiM6S47uR45yEP07NwvdVyOjY9Nuc2R0LIMDSxd0f53SK+9HOZgc3c+i56jPdOtP/3af/p8fx3BndStH2xnEfCDjUw8caHLZjrnssCzLbxVFsTXJG5PM+U0j/WG2NzhNegME9D05Ss+Ty9PTNzBvchQaYnBgaTZsvrFl2/DlGxe5GmCCHAWg0doZxPxqkouLojj8gItAnzbx+64K+12R8WkIoKWZ3vwk3gABjSJH6Xlye3o+0IV5k6MAUJ0cBaDRDmnjNtcnGUjy5skFRVEMJbkwye1lWe6YWLamKIq1U1csiuKZB26sKIr1SdYlMTkxAP1AjgJAdXIUAKqTowA02qxnYpZleUdRFJ9N8qGiKFYluTfJ+UmOT3LBlJtem+SlSZZMWXZfURTXJfn7JI8mOTHJzyT5YZLLFuIOAECdyVEAqE6OAkB1chSApmtnOtkkOS/j4XRekqOS3J3ktWVZ3j7LelcleWWSs5McmvF52K9LcllZlt+qVDEANI8cBYDq5CgAVCdHAWistgYxy7LcneQdEz/T3eZlLZZNe3sA6BdyFACqk6MAUJ0cBaDJ2rkmJgAA0AAjo2OV2gAAAADqpt3pZAEAgJobHFiaDZtvbNk2fPnGRa4GAAAAoDpnYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAOgjI6NjldoAAAAAYDEt63YBAAAsnsGBpdmw+caWbcOXb1zkagAAAACgNWdiAgAAAAAAALViEBMAAAAAAACoFYOYdJTrbgEAAAAAADBXrolJR7nuFgAAAAAAAHPlTEwAAAAAAACgVgxiAgAAAPQRl3cBAKAJTCcLAAAA0Edc+gUAgCZwJiY9xzdKAQAAAAAAms2ZmPQc3ygFgGpGRscyOLC022UAAAAAgEFMAADG+SIQAAAAAHVhOlkAAAAAAACgVgxiAgAAANAxI6NjtWkDAKA5TCcLdOQaaJ26rtpM2+1EGwAAAPNTdcr6xV4PAIB6MYgJdOQNXqfeNHoTCwAAAAAAvc90sgAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAOmhkdKxSG/3LMQMAkCzrdgEAAMD+RkbHMjiwdM5tANTT4MDSbNh8Y8u24cs3LnI1NIFjBgDAICYAANSODy4BAACAfmc6WQAAAAAAAKBWDGICAAAAAAAAtWIQEwAAAAAAAKgVg5gAAAtgZHSsUhvU3WzHr+MbAAAA6IRl3S4AAKAXDA4szYbNN7ZsG7584yJXAwtnpmM7cXwDvWdkdCyDA0vn3LbYtfQ7fdO/6vQcBQA6yyAmAAAAwIQ6fTGpTrXUjb7pXx57AOgfbQ1iFkUxlOTSJJuSHJXkriTvLsvy1lnW+6kk/yHJqUmeleTbSYaT/HpZlo/Mo24AaAw5CgDVyVEAqE6OAtBk7V4Tc0uStyf5dJKLkzyZ5OaiKE6fZb3fS/KcJJ9K8rYkfzHx+/aiKJZXKRgAGmhL5CgAVLUlchQAqtoSOQpAQ816JmZRFKcmOTfJ28uyvHJi2bVJtiX5YJKXzLD6OWVZ/vUB29ua5PcntrmlUtUALbj2BXUkRwGgOjkKANXJUQCarp0zMc9JMprk6skFZVnuTnJNkjOLolg13YoHBt2EP574/Zz2ywSY3eR1MVr9QBfJUQCoTo4CQHVyFIBGa2cQ8+Qk95Rl+egBy+9MsiTJujnu8+iJ3w/PcT0AaCI5CgDVydGGGRkdq9QGQEfIUQAabdbpZJOsSrK9xfIHJn4fM8d9/uckY0lumON6SZJt27ZVWe0gW7duXZDt0Npk/65fv76t2x1otvXmu+5ibpO5m+n52anHYuo+2319qHp897s+7Jeey9E+fAzbspCvCfPtY7nV+6r+zVLl2Jrr32VeIzqrD/tXjjbM+vXrp50JZfjyjR2//wux/W68lvb7e9E63f9uP0fn+3lKP5hP3/Rhv9UmR32m2wz6d37mm1v6f/70YWd1o3/bGcRckWRPi+W7p7S3pSiK/5jkZ5N8oCzLe9tdb6qTTjopQ0NDVVbdZ+vWrX3zRqAb5tK/83kcOvEYOi7qoRuPw+Q+F/L1wfF0sIXo3z179izYm59F0lM5KkOra7ff9DHtqHqMdOrY6kSOcjA5uh852lCdvP+L1b/eiy68Ot3/btayWJ+n9Lrp+kaO7mfRc9RnuvWnf7tP/8+PY7izupWj7UwnuytJq4RZPqV9VkVRvDjj863/aZL/2lZ1QN8xxdTCM6VX18nRLnHs06scv/QZOQoA1clRABqtnTMxH8j41AMHmly2Y7YNFEXx/CR/kuTuJP+hLEufvAAtDQ4snXb6qWR8CirmZqY+1Z+LQo52iWOfXuXYps/IUQCoTo4C0GjtnIn51SRri6I4/IDlp038vmumlYui+H+S/HmS7yR5XVmWj825SgBoLjkKANXJUQCoTo4C0GjtDGJen2QgyZsnFxRFMZTkwiS3l2W5Y2LZmqIo1k5dsSiKo5PckuTJJK8py/LhhSocABpCjgJAdXKUWjFdff+q+tg7ZugyOQpAo806nWxZlncURfHZJB8qimJVknuTnJ/k+CQXTLnptUlemmTJlGV/nuRHk3woyZlFUZw5pe3esiz/9/zKB4B6k6MAUJ0cpW5M6d2/qj72jhm6SY4C0HTtXBMzSc5LctnE76MyPgf6a8uyvH2W9Z4/8ftXW7T9fhJhB9TayOhYBgeWdrsMmk+OArUm76g5OQoA1clRABqrrUHMsix3J3nHxM90t3lZi2VLWtwUoDF8a5aFIEeBupN31JkcBYDq5CgATdbONTEBAAAAAAAAFo1BTAAAAAAAAKBWDGICAAAAAAAAtWIQs8tGRscqtQEAAAA0nc8+AACYzrJuF9DvBgeWZsPmG1u2DV++cZGrAYDuGBkdy+DA0jm3AQDQbD4XAQBgOgYxAYCu8+EVAAAAADCV6WQBAAAAAACAWjGICQAAAAAAANSKQUyYMDI61u0SoDLHLwAAAAAAvcQ1MWGC67HRZI5fWDgjo2MZHFg657bFVqdaAOiepuQW0/M4MVezHTO9cEz1wn0AAObPICYAwBRN+VJAU+oEoLPkQfN5DJmrmY6ZpDeOG88LACAxnSwAAAAAAABQMwYxAQAAAAAAgFoxiNlQI6NjldrqpCl1QhUzHd97GvL87YXXGQAAAAAAmsk1MRuqF64N0Av3AaYz2/HdhGPfcxQWx8joWAYHlu77//r166dtg6aZ6RjuRFsnagEAAAC6wyAmAEAX+cIAvazq8d2J54XnGjCVLy9QBzN9mW0ht9tu22JvEwBgNgYxAQAAgL7iiw3UQaeOQ18GAgB6hWtiAgAAAAAAALViEBMAAAAAAACoFYOYNNLI6Fi3SwCgBqrmQVNypCl10gydOp6mbnehruUFAAAA4JqYNJJrMQCQVM+DpuRIU+qkGZp03S0AAIA6Ghkdy+DA0m6XAX3DICYAAAAAAMAsfIkTFpfpZAEAAKAHzTaNdJ2mLZ+pljrVSW/r92OtE5dq2FOxDQAgcSYmAAAA9KSZzhRI6nW2gLMaqIN+Pw47damGqm0AAM7EBKiRpnwDvSl1AgAAAADQTM7EBKiRpnzztyl1AgAAAADQTM7EBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAoMvqds3xmfa5p0bXQJ9a5/r166dtA8Z5XgDUU93+FoS6cE1MoO+NjI5lcGBpkoM/+AC6b+pztKl64T5AXcz0fNozOpahRXyuzVSL5z1zVbdrjs9WT11qrVu/Qd15zgDUk9dnaM0gJtD3/JEA9dYLz9FeuA9QF3UaWPHcBgAAgM4xnSwAAAAAAABQKwYxAQAAAAAAgFppazrZoiiGklyaZFOSo5LcleTdZVneOst6pya5IMlpSZ6bZKAsyyXzKRgAmkaOAkB1chQAqpOjADRZu2dibkny9iSfTnJxkieT3FwUxemzrPfaJP9p4t/3VimQehgZHavUBvSfmV4T1j7nxEWspFa2RI4C1Ja/dWtvS+RoT6j6fPI8BJiXLZGjADTUrGdiTnzr5twkby/L8sqJZdcm2Zbkg0leMsPqH0vywbIsdxVFcWWStfMvmW4YHFiaDZtvbNk2fPnGRa4GqDOvF/uTowD1J7vqS472lqrPNc9RgGrkKABN186ZmOckGU1y9eSCsix3J7kmyZlFUayabsWyLB8qy3LXvKsEgOaSowBQnRwFgOrkKACN1s4g5slJ7inL8tEDlt+ZZEmSdQteFQD0DjkKANXJUQCoTo4C0GizTiebZFWS7S2WPzDx+5iFK2d227ZtW5DtbN26dUG2M1/r16+fsX26OquuV9Vc9zf5/9nWA+am6mvCYm+zyv56WM/laCcewyblxWI/Z6CXNeX5VJe/yTu1zZrrmxzt1HvDTtXTFHX5W7dX+pPpeezrY6aslKP7LHqO9tpnur1K/86uW6/rHpv26KfO6kb/tjOIuSLJnhbLd09pXzQnnXRShoaG5rWNrVu3VnqxGRkdy+DA0pZte0bHMjRN20zrzabqi+Jiv5hO3V/V/gVmNjI61pHnVjdfL6rYs2fPgr35WSQ9laMzvcbPlnfzycO66NTzEPpVE55P83neL/T9W4i/s+Xo/HQyR2fTqedLE56H89G0v3VpLo99fUz3WMjR/Sx6jnbzM13ao3/rzWMzO8dwZ3UrR9sZxNyVpFXCLJ/S3hcGB5Zmw+YbW7YNX75xxjaA+ZrtNYja6pscnekYTXrjOPU8hP7jed91fZOjANABchSARmvnmpgPZHzqgQNNLtuxcOUAQM+RowBQnRzN+BnB1J/HCaghOQo9bqa/P/xtQi9o50zMrya5uCiKww+4CPRpE7/vWviyAKBnyFEAqE6OxhnBTeFxAmpIjkKP8/cHva6dMzGvTzKQ5M2TC4qiGEpyYZLby7LcMbFsTVEUaztSJQA0lxwFgOrkKABUJ0cBaLRZz8Qsy/KOoig+m+RDRVGsSnJvkvOTHJ/kgik3vTbJS5MsmVxQFMXxSTZN/PfUiWXvmfj/XWVZDs/3Diy0kdGxDA4s7XYZSarXMtN6e0bHMjRNW53uO0Cv6LccBWDcTH9b+7u7fXIUAKqTowA0XTvTySbJeUkum/h9VJK7k7y2LMvbZ1nv2RPrTTX5/99PUruwq9Pp11VrmW29utw/gD7SNzkKwLg6va/oAXIUAKqTowA0VluDmGVZ7k7yjomf6W7zshbL/jpTvsEDAP1IjgJAdXIUAKqTo9B8nZjJxcwxNEW7Z2ICAAAAPaTXP6Dq9ftHfTjWAOikTszyYuYYmsIgJgAAAPShXv/wqtfvH/XhWAMA6IxDul1AN6x9zondLgEA+s7I6Fi3SwCovZleK72PAQAAoJ/05ZmYhx263DfkAGCR+YY6wOy8VgIAQOe5JmR9eCyYSV8OYgIAAAAAAP3Jlwfrw2PBTPpyOlkAAAAAAACgvgxiAgAAwCJwXVMAAID2mU6WfarOL33geuvXr1/IsoAOMq88AMDiOezQ5abKAgAAaJNBTPapOve0OauhuTx/AegWX6QBAADqyHuVhadPqcogJgAAsOh8kQYAAKgj71UWnj6lKtfEBAAAAAAAAGrFICYAAAAAAABQKwYxAQAAAACgB42MjlVqA6gD18QEANq29jkndrsEAACAlkZGxzI4sLRlm/cy9CvXIgSazCAmANC2ww5d7s0PAABQSwZrAKC3mE4WAAAAAAAAqBWDmAAAAAAAQFsW+zqbrutZTdW+qdrfTXkseuE+9BPTyQIAAAAAAG1Z7KmbTRVdTdV+W+z1FltT6mScMzEBAAAWkW/3AgAAwOyciQkAALCIfPMXAAAAZudMTAAAAAAAAKBWDGICAAAAAECHzXRZgdkuOTCfdavohUsg9MJ9qJte79NOPc/q9Pxt2mNoOlkAAAAAAOiw+VxWYLEvSdALl0DohftQN73ep526f56/1TkTcxE0bWQbAAAAAAAAusmZmIugl0a9AQAAAAAAoNOciQkAAAAAAMzbTLMS7qnYVnV/0HRVn0+9xJmYAAAAAADAvM02K2HVtqr7gyabz/OpVzgTEwAAAAAAAKgVg5gAAAAAAABArRjEBAAAAACg1tY+58Rp22a6blwnrtFYdX8zmc+1HV0Xkl41l+fv+vXrp22rM8/fmbkmJgAAAAAAtXbYocs7cs3Eulyjcab1OrVPqLvFfv52g+fvzJyJCQAAAAAAANSKQUwAAAAAAACgVtqaTrYoiqEklybZlOSoJHcleXdZlre2se6xSa5I8uqMD5p+Psnby7L8VtWiAaBJ5CgAVCdHAaA6OQpAk7V7JuaWJG9P8ukkFyd5MsnNRVGcPtNKRVEcnuQLSV6c5H1J/nuSFyT566IojqpYMwA0zZbIUQCoakvkKABUtSVydMGNjI51u4QFMfV+rF+/vouVQP3N9Lxv0mvCTLXumaFt7XNO7EQ5s5r1TMyiKE5Ncm7Gv2Vz5cSya5NsS/LBJC+ZYfVfSPJvkqwvy/LvJta9eWLdtyf5b/OqHgBqTo4CQHVyFACqk6OdMziwNBs239iybfjyjYtcTXW9cj9gMfTK82W2+1G3+9jOmZjnJBlNcvXkgrIsdye5JsmZRVGsmmXdL08G3cS69yS5NclPV6oYAJpFjgJAdXIUAKqTowA0WjvXxDw5yT1lWT56wPI7kyxJsi7JAweuVBTFIUmel+T3WmzzziSvKori0LIsH2+z1qVJMjIy0ubNZ/bUw5a2XL5nzx5tNW+rWz3aPPba5tY2X1NyYPoDpV56Lkc9f7XNpa1u9Wjz+Da9bb7k6L51G5Wjnr/aFrKtbvVo8/guZtt8ydF9684lRxcsQ5OZc7Tqek1oq1s92jy+2g5um02dam1Sji7Zu3fvjDcoimJbku1lWb7mgOU/nuRrSd5cluU1LdZ7RpLvJvkvZVl+4IC2X0hyVZJ/U5blve0UunXr1jOT/E07twWgL7x4/fr1t3W7iNnIUQBqSo7KUQCqk6Nt5qgMBaCFtnO0nTMxVyRpNcS6e0r7dOul4rqt/G3GLyT9QJLmXCUVgIW2NMmqjOdCE8hRAOpEjspRAKqTo3PPURkKwKQ552g7g5i7kgy1WL58Svt066XiugdZv379niS1/4YTAIuirbMmakKOAlA3cnTmdQ8iRwGYQo7OvO5+ZCgAB5hTjh7Sxm0eyPjI6IEml+2YZr2dGf+2znTr7k2LOdcBoMfIUQCoTo4CQHVyFIBGa2cQ86tJ1hZFcfgBy0+b+H1Xq5XKsnwyyd8nOaVF82lJ/m+bF38GgCaTowBQnRwFgOrkKACN1s4g5vVJBpK8eXJBURRDSS5McntZljsmlq0pimJti3VfVBTFyVPWLZL8RJLPzrN2AGgCOQoA1clRAKhOjgLQaEv27t07642KorguydlJrsj4fLXnJ3lhkpeXZXn7xG3+OslLy7JcMmW9pyT5uySHJbk8yRNJLkmyJMm6siy/t5B3BgDqSI4CQHVyFACqk6MANFk7Z2ImyXlJPjLx+6MZ/wbPayeDbjplWf4wycsyfvHm/5rksoxPY/BSQQdAH5GjAFCdHAWA6uQoAI3V1pmYAAAAAAAAAIul3TMxAQAAAAAAABaFQUwAAAAAAACgVgxiAgAAAAAAALWyrNsFdFNRFK9I8qYkZyQ5LskDSW5N8t/Ksnywm7X1gqIoiiRvSXJakpOTLE/y7LIs/6mbdTVRURRDSS5NsinJUUnuSvLusixv7WphPaIoilVJLs74sXpKksOTvLwsy7/uZl29oCiKFya5IMnLkxyf5HtJvpTkPWVZfrOLpbEA5GhnydGFI0c7S452jhztbXK0s+TowpGjnSNDO0uO9jY52llydGHI0M6So51Vlxzt9zMxP5jkpUn+OMnbkvzPJOcm+UpRFCu7WViPOD3j/XpEkq93uZam25Lk7Uk+nfEX5ieT3FwUxendLKqHFEn+c8b/6L27y7X0mv+c5KeS/FXGj93fS/KyJH9XFMVzulgXC0OOdpYcXThbIkc7SY52jhztbXK0s+TowtkSOdopMrSz5Ghvk6OdJUcXxpbI0E6So51Vixzt6zMxk1yS5LayLJ+cXFAUxZ8n+WKSX0jy3i7V1Sv+JMlTy7L8YVEUv5zxb+0wR0VRnJrxP8LeXpbllRPLrk2yLeN/sL2ki+X1iq1JnlGW5feKojg7438AszD+R5L/WJblyOSCoij+Z5K/z3gQXtClulgRr4bhAAAgAElEQVQYcrSz5OgCkKOLQo52jhztbXK0s+ToApCjHSdDO0uO9jY52llydJ5k6KKQo51Vixzt6zMxy7L8X1ODbnJZkp1JfCNrnsqy3FmW5Q+7XUcPOCfJaJKrJxeUZbk7yTVJzpw4bZ55KMvyh2VZfq/bdfSisiy/NDXoJpb93yRfi9fZxpOjnSVHF4wc7TA52jlytLfJ0c6SowtGjnaQDO0sOdrb5GhnydEFIUM7TI52Vl1ytK8HMVspiuLwjM+d/HC3a4EJJye5pyzLRw9YfmeSJUnWLX5JUF1RFEuSPCteZ3uSHKWG5Cg9RY72NjlKDclReooc7W1ylJqRofScbuSoQcyD/XKSwSTXdbsQmLAq4xcnP9DksmMWsRZYCG9Mcmy8zvYqOUrdyFF6jRztbXKUupGj9Bo52tvkKHUiQ+lFi56jPXNNzKIoDsl4SM1q4rTtVtt4SZL/nuQPy7L84gKW13gL0b9UtiLJnhbLd09ph0YoimJtkquS3JbkU10uhynkaGfJ0a6So/QMOVpfcrSz5GhXyVF6hhytLznaWXK0a2QoPaVbOdpLZ2K+JMmudn6KonjGgStPPAB/nOSuJP9pkWpuknn1L/OyK8lQi+XLp7RD7RVFcXSSP03yL0n+/YHXrqDr5GhnydHukaP0BDlae3K0s+Ro98hReoIcrT052llytDtkKD2jmznaM2diJrknyYVt3na/ixIXRbE6yS1Jvp/kdWVZPrbAtfWCyv3LvD2Q8ekHDjS5bMci1gKVFEVxZJKbkxyZ5IyyLB/sckkcTI52lhztHjlK48nRRpCjnSVHu0eO0nhytBHkaGfJ0e6QofSEbudozwxiTnTclrmuVxTF0zMedENJfqIsy4cWuLSeULV/WRBfTXJxURSHH3Ah6NMmft/VhZqgbUVRLE8ynOSEJK8oy7Lsckm0IEc7S452lRyl0eRoM8jRzpKjXSVHaTQ52gxytLPkaNfIUBqvDjnaS9PJzllRFIcl+bOMX4j0tWVZfrPLJUEr1ycZSPLmyQVFUQxl/BtUt5dl6Vs71FZRFEuT/M8kp2d8qoEvd7kkFpAcpSHkKI0lR3ubHKUh5CiNJUd7mxylAWQojVaXHF2yd+/ebuy3Foqi+FySjUk+keQLBzQ/VJblXy5+Vb1j4jTjt0789/Qkr01yecand7ivLEsXUW9TURTXJTk7yRVJ7k1yfpIXJnl5WZa3d7O2XlEUxXsm/vmcJP8x468L30ry/bIsf7trhTVcURRXJrk449/Yue6A5kfLsvzc4lfFQpGjnSVHF44c7Tw52hlytLfJ0c6SowtHjnaWDO0cOdrb5GhnydGFIUM7T452Tl1ytGemk61o3cTvn5n4meqLSYTd/ByV5LIDlm2e+P3FJMKufedlvC/Py3i/3p3xb5kJu4Vz4LE6+ZpwXxKBV93k6+yGiZ+p7kviTWOzydHOkqMLR452nhztDDna2+RoZ8nRhSNHO0uGdo4c7W1ytLPk6MKQoZ0nRzunFjna12diAgAAAAAAAPXT19fEBAAAAAAAAOrHICYAAAAAAABQKwYxAQAAAAAAgFoxiAkAAAAAAADUikFMAAAAAAAAoFYMYgIAAAAAAAC1YhATAAAAAAAAqBWDmAAAAAAAAECtGMQEAAAAAAAAasUgJgAAAAAAAFArBjEBAAAAAACAWjGICQAAAAAAANSKQUwAAAAAAACgVgxiAgAAAAAAALViEBMAAAAAAACoFYOYAAAAAAAAQK0sa+dGRVEMJbk0yaYkRyW5K8m7y7K8dZb13pvkv7doeqgsy6PnVioANJMcBYDq5CgAVCdHAWiytgYxk2xJ8oYkVyb5ZpILktxcFMVLy7L8322s/3NJHp/y/11zqBEAmm5L5CgAVLUlchQAqtoSOQpAQ806iFkUxalJzk3y9rIsr5xYdm2SbUk+mOQlbeznurIsvz+fQgGgieQoAFQnRwGgOjkKQNO1c03Mc5KMJrl6ckFZlruTXJPkzKIoVrWxjSVFURxRFMWSamUCQGPJUQCoTo4CQHVyFIBGa2c62ZOT3FOW5aMHLL8zyZIk65I8MMs2vp3k8CQ/LIri+iS/UpblzrkUunXr1qEkL5zY19hc1gWgpyxNsirJ365fv35Pt4tpgxwFoE7kqBwFoDo5OscclaEATDHnHG1nEHNVku0tlk8G3DEzrPsvSX4ryZeTjCT5iYzPo/6CoihOK8tyLmH/wiR/M4fbA9DbXpzktm4X0QY5CkAdyVE5CkB1crT9HJWhAByo7RxtZxBzRZJWobR7SntLZVl+5IBF1xdF8f+3d/9RdtX1vfDfIZmZ8EMwNERQA9hH2agpDcaCFBTR23KFFeG5cqlt5VfLVVctKlJ629J72yt1eZXyoF1SrQ9iBNuq5QEhFHqtYL0Wb8HGGhsr28KyWAIoEkWEJDP58fwxM3GSTGbO7Dk/9j7n9Vpr1iR7n73PZ3/P2fs953z3/u4NSa5Ncn6S/7eVIic8miTHHHNMhoeH57DY3jZs2JAVK1bMax3sm/Ztj4vf87fTTr/uil+YcR7z4/3bWe1o39HR0XzrW99KZj9btC76KkftI52njdtDjvaG929nydHdyFH2on3bR472hvdwZ8nR3XQzR32n2xDat3VVc3Jf8ybnMz/ew53VqxxtpRNzc5KRaaYvnjJ/Lj6S5Kokr83cPjRuT5Lh4eGMjExXzty0Yx3sm/advx8+Pf0IGyMjIzPOY/60Y2e1sX2bMgxN3+WofaTztPH8ydHe0Y6dJUd3kaNMS/u2hxztHe3YWXJ0l27mqO90G0T7tqZqTu5r3uR85k87dlYvcnS/Fh7zaMaHHtjT5LRHWn2yJCnLckfGhzE4dC7LAUBDyVEAqE6OAkB1chSARmulE/NrSY4tiuKgPaafOPF7/VyesCiKoSTLkzw+l+UAoKHkKABUJ0cBoDo5CkCjtdKJeVOSoSQXT04oimIkyUVJ7inL8pGJaUcWRXHs1AWLojhsmvVdnvEhC/5X1aIBoEHkKDTE6Ni+RzOZaR7QUXIUAKqTowA02qz3xCzL8t6iKP4qyfuLojgiyYNJLkhyVJILpzz0hiSnJlkwZdpDRVF8KsmGjN9E+rQkb0jy90n+oh0bAAB1JkehOYaHFmb1ZbdOO2/t1Wd1uRogkaMAMB9yFICmm7UTc8L5Sa6c+L0kydeTnFGW5T2zLPfnSU5O8p+TDCf5t4n1vLcsy21VCgaABpKjAFCdHAWA6uQoAI3VUidmWZZbMj5cwOUzPObV00z7L5UrAwbOk08+me9///sZHR3tWQ2LFi3KN7/5zZ49f7+brX2Hh4ezdOnSHHLIIV2sqvPkKNBpW7duzaZNm/LUU09l+/beDX0rRztrpvZduHBhnvWsZ+XQQw/NyMhIlyvrLDkKdFodPosmcrTTZmrffv0smshRoPPk6GDoVY62eiUmQEdt2bIl3/3ud/P85z8/+++/fxYsWDD7Qh3w9NNP58ADD+zJcw+Cmdp3586d2bx5cx5++OGMjIxk8eLFXa4OoJm2bt2a73znO1myZEmOPvroDA0NydE+ta/23blzZ8bGxvKjH/0o3/nOd3LkkUf2XUcmQKfU5bNoIkc7baYc9VkUoBo5Ojh6laP7tXVtABU9/vjjOeyww3LAAQf0NOzonQULFuSAAw7I0qVL8/jjj/e6HIDG2LRpU5YsWZKlS5dmeHhYjg6gBQsW7DrzdcmSJdm0aVOvSwJoDJ9F8VkUoDo5SqdzVCcmUAtbtmzJQQcd1OsyqIFnPetZ2bJlS6/LAGiMp556KgcffHCvy6AmDj744Dz11FO9LgOgMXwWZZLPokAvjY7t+7YgM83rNTnKpE7lqOFkgVrYtm1bFi1ySGJ8fPVt27b1ugyAxti+fXuGhoZ6XQY1MTQ01NP7ogI0jc+iTPJZFOil4aGFWX3ZrdPOW3v1WV2upnVylEmdylFXYgK1YcgBEu8DgCocO5nkvQAwd46dJN4HAFU5fpJ07n2gExMAAABggNR5WDoAAJjkOl8AAACAAdLUIesAABgsrsQE6FP33ntviqLIvffe2+tSAKBx5CgAVCdHAaA6OfoTOjGBWuv2MEcHHnhgR2q47rrrUhRFLrjggnmvq1eefvrp/Mmf/El+/dd/PSeccEKKosjNN9+8z8ePjo7m2muvzemnn54VK1bklFNOyWWXXZYnn3yyi1UDDDY5Wh+t5ujDDz+coiim/XnZy16W3//93+9B9QCDSY7Wx1w+j+7YsSN/+Zd/mde//vU5/vjjc8opp+Stb31r/uVf/qXLVQMMNjlaH3PJ0dHR0VxzzTV5zWtekxUrVuT000/PDTfckJ07d3a56nGGkwVqbaZhjrqlHcMprV27Ns973vNy33335Xvf+16WLVvWhsq66wc/+EGuvfbaHHHEETn22GNnPBNodHQ0F198ccqyzLnnnpujjjoqP/jBD/KVr3wlW7ZsySGHHNLFygEGlxytj1Zz9NBDD8373//+vaZ/6Utfytq1a3PyySd3ulQAJsjR+pjL59Grrroq119/fV7/+tfnV3/1V/Pkk0/mU5/6VH7t134tN998c170ohd1sXKAwSVH62MuOXrppZfm7rvvzjnnnJOXvOQlWb9+fd7znvfkiSeeyKWXXtrFqsfpxATosAceeCD3339//uzP/izvete7cscdd+TCCy/sdVlztmzZsnzpS1/KsmXL8s1vfjNnn332Ph/78Y9/PN/85jdz8803Z/ny5bum/8qv/Mq0Z0UBwL4MWo4ecMABOeusvT9o33LLLTnooIPymte8ptOlAtBHBi1Hd+zYkU996lM5/fTTc9VVV+2a/upXvzqrV6/OnXfeqRMTgJYNWo6uX78+n//853PJJZfkN3/zN5Mkv/zLv5wlS5bk+uuvz5ve9KYcdthh3SzdcLIAnbZ27dosXbo0r3zlK/Pa1742t912216PmRzn/HOf+1yuvfbavPKVr8zP/MzP5IILLshDDz201+P//M//PK997Wtz3HHH5Zxzzsk//uM/5rzzzst55503az1f/epXc9FFF+VlL3tZVq5cmQsvvDAbNmyYdbnh4eGWzjTasWNHbrzxxpx77rlZvnx5RkdHs3Xr1lmXA4DpDFqOTud73/te7r333px22mkZGRmptA4ABtOg5ei2bduyefPmLF26dLfpk/9fvHjxrOsAgEmDlqNf/epXkyRnnnnmbtPPOOOMjI6O5q677pp1He2mExOgw26//facfvrpWbhwYc4888x84xvfyLe//e1pH/vhD384d999dy6++OK8+c1vzvr16/Nbv/Vbuz3mL/7iL/Lud787z33uc3P55Zfn5S9/ed72trflsccem7WWL3/5yzn//PMzOjqat7/97XnHO96Rxx57LG9605vywAMPtGV7//Vf/zWPP/54jjrqqLz97W/PypUrc9xxx+Xcc891DxIA5mzQcnQ6d9xxR3bs2JEzzjijY88BQH8atBwdHh7OypUrc8stt+S2227Lo48+mvvvvz9XXHFFli5dOuOIQgCwp0HL0dHR0SR7n/Sz//77J0lPvts1nCxAB331q1/Nww8/vOtLx5NPPjmHHHJI1q5dm7e//e17PX7btm25+eabMzQ0lCQ55JBD8p73vCff+ta3cswxx2R0dDQf/OAHs3Llynz84x/PokXjh/GiKPI7v/M7Ofzww/dZy44dO/KHf/iHOeWUU/KRj3xk1/Rzzjknr3vd63Lttdfmmmuumfc2f+c730mSXH311Vm+fHn+5//8n9m8eXOuvfbavPWtb81tt92W5z3vefN+HgD63yDm6HRuu+22HHbYYfm5n/u5jqwfgP40qDn6vve9L5deemkuv/zyXdOOPvroXH/99Y28jxkAvTGIOfqCF7xg17ZPvRrzH//xH5OMjxLUba7EBOig22+/Pc95znOyatWqJMnQ0FB+4Rd+Ibfffvu0j3/DG96wK+iS5OUvf3mS5N///d+TJBs2bMgPf/jDnHvuubuCLklWr16dQw45ZMZa7r///jz00EM588wzs2nTpl0/Y2NjWbVqVe677755beukp59+OkmyYMGCfOITn8jrX//6/NIv/VKuu+66PP300/nEJz7RlucBoP8NYo7u6dvf/na+8Y1v5Mwzz8x++/n4BkDrBjVHDzrooLzoRS/Keeedlw996EP5gz/4g4yOjuad73xnfvjDH7bteQDob4OYo6eeemqe97zn5b3vfW8+//nPZ+PGjbnjjjtyzTXXZNGiRdmyZUtbnmcuXIkJ0CHbtm3LnXfemZNPPnnX1YlJsnLlytx00035+te/nuOOO263ZY444ojd/n/wwQcnSX70ox8lSR555JEkyVFHHbXb4xYtWjTr1Y3/9m//liR7DWMwqV1fjE4ON3DaaaflwAMP3DX9mGOOyTHHHLNrbHUAmMmg5uie1q5dm2T8gy0AtGpQc3Tbtm258MILc9JJJ+X3fu/3dk3/+Z//+Zx55pn5+Mc/nksvvbQtzwVA/xrUHB0ZGcmf/dmf5Z3vfGfe9ra3JRkfqv3yyy/Phz/84RxwwAFteZ650IkJ0CH33HNPNm3alLVr1+76AnKqtWvX7hV2CxcunHZdO3funHc9k+v43d/93RxzzDHzXt++HHbYYUmSpUuX7jXv0EMP3RXYADCTQc3RPd1+++15wQtekBUrVuwa7QAAZjOoOfqVr3wl3/rWt3LFFVfsNv3oo4/OC17wAifVAtCSQc3RJHnRi16U22+/PQ888ECefPLJvPCFL8zixYvz3ve+d68O2G7QiQnQIWvXrs3hhx+e3/3d391r3q233po777wzv/M7v7PPgJvOc5/73CTJQw89tGtIgmT87KCNGzemKIp9Lrt8+fIk42cB/fzP/3zLzzlXRVFkaGgo3/3ud/ea973vfS+HHnpox54bgP4xqDk61fr16/PQQw9Ne78VgLoZHdue4aHpj8kzzaMzBjVHn3jiiSTj9w7b07Zt27Jt27aOPTcA/WNQc3TSggUL8qIXvWjX/7/4xS9mx44dOemkkzr+3HvSiQnQAZs3b85dd92Vs846K//xP/7HveYvXrw4d999d/7hH/4hJ598csvrXbFiRZ797GfnM5/5TM4666xd46evXbs2Tz755IzLvvSlL83y5ctz/fXX53Wve13233//3eZv2rSpLR2MBx10UE455ZTcddddu63zn/7pn/Lggw/mN37jN+b9HAD0t0HO0akMJQs0yfDQwqy+7NZp5629+qwuVzPYBjlHjz766CTJX//1X+/2Je83vvGNPPTQQ3PaXgAG0yDn6HS2bNmSD37wg/npn/7pnuSoTkyADrjrrrvyzDPP5LTTTpt2/ite8YosXrw4a9eundPBf3h4OJdcckmuvPLKXHTRRTn99NOzcePG3HzzzTnyyCNnXHbhwoW58sor8+Y3vzmrV6/O2WefnWXLluWxxx7LPffckyOPPDJXXXXVjOv45Cc/mR/96Ef5/ve/nyT5whe+kMceeyxJduucfNe73pVzzz03v/zLv5w3vvGNeeaZZ/KJT3wihx9+eC644IKWtxeAwTToOZok27dvz5133pmVK1fOWhsATDXIObpixYqcfPLJuemmm/LUU0/lpJNOyuOPP55PfvKT2X///XP++ee3vL0ADKZBztEkueSSS3L44YfnhS98YZ566qncfPPNeeyxx/LRj350TleetotOTKDWRse29/ys3SpDH91+++3Zf//984pXvGLa+YsXL85JJ52Uv/3bv83/+B//Y07rftOb3pSdO3fm4x//eN73vvfl2GOPzYc//OH80R/9UUZGRmZc9qSTTsqnPvWpXHvttbnxxhvzzDPPZNmyZTn++OPzxje+cdbnvv7667Nx48Zd///c5z6Xz33uc0l2D7tjjjkmN9xwQ6666qp88IMfzMKFC3PyySfnkksuybOf/ew5bS8A1cnRvTUhR5Pky1/+cr7//e/nrW9965y2D4D2kaN7a0KO/umf/mk+9rGP5Y477sgXv/jFDA8PZ9WqVXnLW97Sk3t5AQwqObq3JuToihUrcsstt+TTn/70rnb40Ic+lOc85zlz2tZ20YkJ1Fq375vy9NNP58ADD5x3DR/5yEfm9JgTTzwxZVnu9ZjnP//5004/77zzct555+36/44dO7Jx48a85CUvmXWdL33pS/Onf/qns9Y3nbvvvrvlxx533HG58cYbd5v29NNPV3peAKqRo83N0Ve+8pXTPj8A3SNHm5mjixcvztve9ra87W1v2226z6MweNyvubfkaDNz9C1veUve8pa37DW9VzmqExOgYbZu3brXmTmf/exn88Mf/jAnnHBCj6oCgGaQowBQnRwFmsT9mqkbOTp3OjEBGmbdunX54z/+4/ziL/5inv3sZ+df/uVfctNNN+WYY46Z9mbTAMBPyFEAqE6OAkB1cnTudGICNMzy5cuzbNmy3HjjjXnyySdzyCGH5Oyzz85ll12W4eHhXpcHALUmRwGgOjkKANXJ0bnTiQnQMMuXL29pbHYAYG9yFACqk6MAUJ0cnbv9el0AAAAAAAAAwFQ6MYHa2LlzZ69LoAa8DwDmzrGTSd4LAHPn2EnifQBQleMnSefeBzoxgVoYGhrK5s2be10GNbB58+YMDQ31ugyAxhgeHpah7LJ58+aMjIz0ugyAxvBZlEk+iwLMnRxlUqdyVCcmUAvLli3Lxo0b88wzzzh7Z0Dt3LkzzzzzTDZu3Jhly5b1uhyAxli6dGkefvjhbNq0KWNjY3J0AO3cuTNjY2PZtGlTHn744fzUT/1Ur0sCaAyfRfFZFKA6OUqnc3RR29cIUMHBBx+cJHnkkUcyNjbWszpGR0czPDzcs+fvd7O179DQUJ7znOfsej8AMLtDDjkkIyMjefzxx/PEE09k27ZtPatFjnbWTO27aNGiLF68OEceeWQWL17c5coAmqsun0UTOdppM7Wvz6IA1cjRwdGrHNWJCdTGwQcf3PMPDOvWrcvP/uzP9rSGfqZ9ATpj8eLFWb58ea/LcJzvMO0L0Bl1+CyaOM53mvYFphod257hoYVznsfe5Ohg6FX76sQEAAAAAAAGxvDQwqy+7NZp5629+qwuVwPsS0udmEVRjCR5d5LzkixJsj7JFWVZ3jWXJyuK4o4kr0vywbIs3znHWgGgkeQoAFQnRwGgOjkKQJPt1+Lj1iS5NMknk7wjyY4kdxZFcVKrT1QUxZlJXjXXAgGgD6yJHAWAqtZEjgJAVWsiRwFoqFk7MYuiOCHJG5P8dlmWv12W5UeTvCbJd5K8r5UnKYpiOMk1Sd4/j1oBoHHkKABUJ0cBoDo5CkDTtXIl5jlJxpJcNzmhLMstST6W5JSiKI5oYR3vSLJ/kj+uUiQANJgcBYDq5CgAVCdHAWi0Vjoxj09yf1mWP95j+n1JFiRZOdPCRVEcnuS/Jfm9siyfqVQlADSXHAWA6uQoAFQnRwFotEUtPOaIJBunmf7oxO/nzrL8e5OUGR93fd42bNjQjtVk3bp1bVkP09O+87Nq1arKy2r7+dOGnTWA7dt3OTqAr2HXaeP5mU+OzsTr0hrt1FkD2L5ylDnRvq3pVFbOxGvTGu3UWQPYvrXJUd/pNkO/tW8nvmPtRYYm/ffadIp26qxetG8rnZj7J9k6zfQtU+ZPa2Lc9fOTnFqW5c65l7e3FStWZGRkZF7rWLduXc8ONoNA+/aWtp8f79/Oakf7bt26tW0ffrqkr3LUPtJ52ri+vC6z8/7tLDm6GznKXrRvvXltZuc93FlydDddz1Hf6daf9t1d3dqibvXUkfdwZ/UqR1sZTnZzkukSZvGU+XspimJBkg8m+f/Ksvz7OVUFAP1DjgJAdXIUAKqTowA0WitXYj6a8aEH9jQ57ZF9LPd/Jzkhye8VRXH0HvMOnpj23bIspw1LAOgTchQAqpOjAFCdHIU+MDq2PcNDC+c8D/pBK52YX0vyjqIoDtrjJtAnTvxev4/ljsz4lZ53TzPvoomf1yX5mxZrBYAmkqMAUJ0cBYDq5Cj0geGhhVl92a3Tzlt79Vldrga6q5VOzJuS/FaSi5N8IEmKohjJeFjdU5blIxPTjkxyQFmW908stzbJv02zvluS3J7kY0m+Op/iAaAB5CgAVCdHAaA6OQpAo83aiVmW5b1FUfxVkvcXRXFEkgeTXJDkqCQXTnnoDUlOTbJgYrkHJx67m6IokuTBsiw/O9/iAaDu5CgAVCdHAaA6OQpA07VyJWaSnJ/kyonfS5J8PckZZVne06nCAKCPyFEAqE6OAkB1chSAxmqpE7Msyy1JLp/42ddjXt3iuha0VBkA9Ak5CgDVyVEAqE6OAtBk+/W6AAAAAAAAAICpdGICAAAAAAAAtaITEwCAeRkd2z6v+QBAe8leAAD6QUv3xAQAgH0ZHlqY1Zfdus/5a68+q4vVAACyGQCAfuBKTAAAAAAAgIaZafQFIzPQD1yJCQAAAAAA0DAzjb5g5AX6gSsxAQAAAAAAgFrRiQkAAAAAAADUik5MAAAAAAAAoFZ0YgIAAAAAADCj0bHtleZBVYt6XQAAAAAAAAD1Njy0MKsvu3XaeWuvPqvL1TAIXIkJAAAAAAAA1IpOTAAAAAAAAKBWdGJCjxgjHKB53PuBufKeAQAAAKjGPTGhR4wfDtA8jt3MlfcMAABAs4yObc/w0MJelwFEJyYAAAAAAEASJ6NCnRhOFgAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJyYAAI0zOra91yUAAADQYz4bQn9b1OsCAABgroaHFmb1ZbdOO2/t1Wd1uRoAAAB6wWdD6G+uxAQAAABomEG/8mSm7R/0tgEA6BeuxAQAAABomEG/8mTQtx8AYBC4EhMAAAAAAACoFZ2YAAAAAAAAQK3oxAQAAAAAAABqRScmAAAAAAAAUCs6MQEAAAAAAIBa0YkJAAAAAAAA1Gc4e+4AAB+nSURBVIpOTAAAAAAAYN5Gx7ZXmkf7eS3oB4t6XQAAAAAAANB8w0MLs/qyW6edt/bqs7pczWDzWtAPXIkJfWSmM2i2OvMGgFk4SxMAAACAunAlJvSR2c6uceYNADNxliYAAAAAddFSJ2ZRFCNJ3p3kvCRLkqxPckVZlnfNstyvJvn1JC+ZWO7RJF9I8odlWT40j7oBoDHkKABUJ0eh+UbHtmd4aOGc5wHzJ0cBaLJWr8Rck+QNST6Q5IEkFya5syiKU8uy/D8zLPezSTYmuSPJpiRHJXlzkjOLojiuLMvHKtYNAE2yJnIUAKpaEzkKjWa0B+ipNZGjADTUrJ2YRVGckOSNSS4ty/IDE9NuSLIhyfuSvGpfy5Zl+dvTrO/WJOuSvCnJH1crGwCaQY4CQHVyFACqk6MANN1+LTzmnCRjSa6bnFCW5ZYkH0tySlEUR8zxOSeHG3j2HJcDgCaSowBQnRwFgOrkKACN1spwsscnub8syx/vMf2+JAuSrMz4mOj7VBTFoRPPdWSS/z4xecZx1wGgT8hROsK9pYABIUcBoDo5CkCjtdKJeUTGxz/f02TAPbeFdXwryU9N/PuJJL9ZluUXWlhuLxs2bKiy2F7WrVvXlvUwPe07u1WrVvW6hN14zX5CW3TWALZv3+XoAL6Gu8x27G5X27SynlWrVs14b6kqtVTdvlYyrd3vm17kaDfbtMn6cZvqZADbV44yJ4PUvnX7TDkfMvYnmlp3Uwxg+9YmR32n2wztaN9uH5/7KQ+7rR/zt9fP3+960b6tdGLun2TrNNO3TJk/m/+U5MAkx2Z8zPRntVTdNFasWJGRkZGqiycZb2gHt87Rvs3kNRvn/dtZ7WjfrVu3tu3DT5f0VY7aR2bWjrZpVxt34nWazzr74X1TtzatI8eIzpKju5Gj7EX7NpeMHec93FlydDddz1Hf6dZft9p3X89htKHu67f8dYzorF7laCudmJuTTJcwi6fMn1FZlv974p93FkXx2SQbiqL4cVmWH2qtTABoLDkKANXJUQCoTo7SGMNDC2ccbQgYTPu18JhHMz70wJ4mpz0ylycsy/LbSdYl+dW5LAcADSVHAaA6OQoA1clRABqtlU7MryU5tiiKg/aYfuLE7/UVnnf/JIdUWA4AmkaOAkB1chQAqpOjADRaK52YNyUZSnLx5ISiKEaSXJTknrIsH5mYdmRRFMdOXbAoisP2XFlRFKuSrMz4WTsA0O/kKF03Ora90jyAGpKjAFCdHAWg0Wa9J2ZZlvcWRfFXSd5fFMURSR5MckGSo5JcOOWhNyQ5NcmCKdMeKoriM0n+OcmPk7w0ya8leSrJle3YAACoMzlKL7iXCNAv5CgAVCdHAWi6WTsxJ5yf8XA6P8mSJF9PckZZlvfMsty1Sf5DkrOTHJDxcdg/k+TKiTHUAWAQyFEAqE6OAkB1chSAxmqpE7Msyy1JLp/42ddjXj3NtH0+HgAGhRwFgOrkKABUJ0cBaLJW7okJAAAAAAAA0DU6MQEAAAAAAIBa0YkJAEAtjY5t73UJu8xUS53qnEk/bAMAADB4fF6BwdXSPTEBAKDbhocWZvVlt047b+3VZw1sLVX1wzYAAACDx2cZGFyuxAQAAAAAAABqRScmAAAAQI8M+nDf/bD9/bANAAB1ZDhZAAAAgB4Z9CHy+mH7+2EbAADqyJWYAAAAAAAAQK3oxAQAgHmoOkxc1aHnDFkHAAAADALDycI8jY5tz/DQwjnPAwD6Q9Uh5Lq9HAAAAECT6MSEefJFIgAAAAAAQHsZThYAAAAAAACoFZ2YAAAAAC2Y6d7DW92zuO20GwB0RtW/aaquU6ZTleFkAQAAAFow2+1E3Gqkvdy+BQA6oxN/08htOsGVmAAAAAAAAECt6MQEAAAAAAAAakUnJgAAAAAAAFArOjEBAAAAAACAWtGJCQAAALTF6Nj2SvPqpCl10l3eFwAA3beo1wUAAAAA/WF4aGFWX3brtPPWXn1Wl6upph+2gfbzvgAA6D5XYgIAAAAAAAC1ohMTAAAAAAAAqBWdmAAAAAAAQEe5vzAwV+6JCQAAAAAAdJT7CwNz5UpMAAAAAAbCTFcBuUIIAKBeXIkJANAGo2PbMzy0cN7zVq1a1dJy3VanWgaB9m6+qscEADrLVUAAAM2hExMAoA2qfiHWlC/SmlJnv9Dezec1BAAAgPkxnCwAAAAAAABQKzoxAQAAAAAAgFrRiQkAAAAAAADUik5MAAAAAAAAoFZ0YgIAwIAbHdteaR4ArXEsbQavEwBAvSxq5UFFUYwkeXeS85IsSbI+yRVlWd41y3L/KckvJTkhyXOSfCfJ2iR/VJblk/OoGwAaQ44CdTc8tDCrL7t12nlrrz6ry9XA7uQo/cBxthm8TvQjOQrU3ejY9gwPLZzzPAZDS52YSdYkeUOSDyR5IMmFSe4siuLUsiz/zwzLfTTJI0luzHjQ/UyStyd5XVEULy/LckvFuhlwDmxAw6yJHAWAqtZEjgJAVWsiR4EacxIRM5m1E7MoihOSvDHJpWVZfmBi2g1JNiR5X5JXzbD4OWVZ/t0e61uX5BMT61xTqWoGngMb0BRyFACqk6MAUJ0cBaDpWrkn5jlJxpJcNzlh4kybjyU5pSiKI/a14J5BN+GWid8vbr1MAGgsOQoA1clRAKhOjgLQaK10Yh6f5P6yLH+8x/T7kixIsnKOz3n4xO/vz3E5AGgiOQoA1clRAKhOjgLQaK10Yh6R5NFppk9Oe+4cn/O/Jtme5OY5LgcATSRHqZXRse29LgFgLuRoH5kpg+QT1Fsn9l/HhK6Qo1RmHwXqYNZ7YibZP8nWaaZvmTK/JUVR/EqSX0/y3rIsH2x1uak2bNhQZbG9rFu3ri3rYXqdbt9Vq1b19PmnqlrLbMt1m33iJ7RFZw1g+/Zdjg7ga7jLfI7dVfOg3TnSq/tKV3nf1C0rm6ITf3vM5fUb5GPEVJ36e3UA21eONsxM7/3ZMqgdbdOOddQtf5rymbJu6tRu/ZCjq1atavv+24l1zqau7dtBtclR3+k2w9T2rbqPyif2pd3fi8z3scxdL9q3lU7MzUlGppm+eMr8WRVF8cqMj7f+10n+W0vVTWPFihUZGZmunNatW7fOwbSD6tC+vX7+qepUy0yq1Dk6tj3DQwvnPK/O6vD+7WftaN+tW7e27cNPl/RVjtpHqqvabv3S3v2yHU3QibZudZ2OEa2r0k5ydDdytA/Nt236tX37cZu6oU7tNgg52su/P1olR3fT9Rz1nW79zbV9vRbMVae/F3GM6Kxe5WgrnZiPZnzogT1NTntkthUURfGzSW5L8vUkv1SWpevNoQ/06moeaBg5CgDVyVEAqE6OAtBordwT82tJji2K4qA9pp848Xv9TAsXRfF/JfmbJN9LcmZZlk/PuUoAaC45CgDVyVEAqE6OAtBorXRi3pRkKMnFkxOKohhJclGSe8qyfGRi2pFFURw7dcGiKA5P8rkkO5KcXpbl99tVOAA0hBwFgOrkKABUJ0fJ6Ni+L56daR5AHcw6nGxZlvcWRfFXSd5fFMURSR5MckGSo5JcOOWhNyQ5NcmCKdP+JslPJ3l/klOKojhlyrwHy7L8P/MrHwDqTY72Tj/ct7cpdc6mH14LoDfk6OCQFdMb5G0fBHu+vlPvMzXor71jQnvIURK3gwKarZV7YibJ+UmunPi9JONjoJ9RluU9syz3sxO/f3uaeZ9IIuwAGARytAf64YNaP2xD0j/bAfSMHB0AsmJ62qW/eX33Tdu0lRwFoLFa6sQsy3JLkssnfvb1mFdPM23BNA8FgIEiRwGgOjkKANXJUQCarJV7YsLA6/fx4Y2NDwAAAAAA1Emrw8nCQOv3YUz6ffsAAAAAgN1Nvcfs1PvyAtSFTkwAAAAAABgwLmwA6s5wsgAA9Ixhy+tvtteo26+hYfBhd/YJoNcchwCATnElJgAAPePM3/qb6TVKuv86ec/A7uwTQK85DgEAneJKTAAAAAAAAKBWdGICtVK3IesAAAAAoM58Xwb0K8PJArVStyHrAAAAAKDODOsM9CtXYgIAAAAAAAC1ohMTAGiLTg0HPdNyTRkypyl1AgAAAEBdGE4WAGiLTg0H3Q/D4vTDNgDQO6Nj2zM8tLDty1Vdb51M3YZVq1btc95My0EnDfp7bdC3HwCYH52YAAAAUGNVT4YZhPvNd6Jt+qFdqI9Bf68N+vZDv3KCAsyNk+uq04nZBf3+Bt1zG6ae/doP2zcfg779AAAAAEB/cYICzI19pjqdmF3Q72/Qft+++dA2AAAAAAAAc7dfrwsAAAAAAAAAmEonJgAwcEbHtleaB8xN1X2tE/vhTOvc6pgAAAAAtWM4WQBg4BjuG7qj6r7WiX10tnU6JkDrRse2Z3hoYa/LgIE12z64dWx7RvYx3/4LwKCbKQvlZP3oxAQAAABa5mQg6K2Z9sHEyTkAMBN/yzaL4WTpO4YKqwdDNQIAAAAAAFW5EpO+Y6iwenBGCwAAAAAAUJUrMQEAAAAAAIBa0YkJAHSFYaYBAAAA6De+1+ocw8kCAF1hmGkAqJfRse0ZHlo453ndVrWWOm0DAAD9a6bvvBLfe82HTkwAAAAYQE05wahqnU3ZPgAAYHqGk22Tbl8ubEi+9tNuzVD1vd/v+0y/bx8AAAAAAIPFlZht0u0zPJ1R2n7atBmchT29ft8+AAAAAAAGiysxAQCmcPUyTdaL9+/U51y1alWl5QAAYD46MUKVUa+AXnMcciUmAMBuXNlMk/Xi/WuUBAAAeq0Tf1v6exXoNcchV2ICAAAAAAAANaMTEwAAAHpsUIaDgm6wP+2bYekAgCYxnGyPjY5tz/DQwjnPa4qZtmHr2PaM9PG29wuvBQAAdJ6hoqB97E/7pm0AgCbRidlj/f7H42zb18/b3i/6/T0KAAAAQP/q94tIgL11Yt9u0vGin457OjEBgJYd++KX9roEgNpo2oc/AIBB5AR9GDyd2O+bdCxpUq2zaakTsyiKkSTvTnJekiVJ1ie5oizLu2ZZ7oQkFyY5McnPJBkqy3LBfAoGgKbppxw98IDFffNHEMB89dMHwzrrpxwFgG6TowA02X4tPm5NkkuTfDLJO5LsSHJnURQnzbLcGUn+y8S/H6xSIAD0gTWRowBQ1ZrIUQCoak3kKAANNWsn5sRZN29M8ttlWf52WZYfTfKaJN9J8r5ZFv9wkoPLslyV5H/Nt1gAaBo5CgDVyVGA7hkd297rEloyU51bZ5g3iLfGkKMANF0rw8mek2QsyXWTE8qy3FIUxceSvKcoiiPKsnx0ugXLsvxue8ocTN2+x04/3ewV+Ilu79szrXMQPzRGjgLAfMhRgC5pyjDps9XZhG3oIjkK1ELV7yDr9L3mIKjj97qtdGIen+T+six/vMf0+5IsSLIyybRhx/x0+4/HpvyxCsyNY0nPyVEAqE6OAkB1chSoharfF/pes7vquP2tdGIekWTjNNMnA+657Stndhs2bGjLetatW9eW9UxatWpVW9c3X1W2b7Zt2Nc6O7XtM21D3dqbuevU+6nqeqcu1+7jQzfMp926fTxsYvvOU1/laCePv1X230E/Qw7YXaeOCZ34u6VqHsrRXRqZo0n3P1fNR5NqhX5kH2w/ObpL13O009/pduLvsiZ9v+G4wKDacz+c/H/T9ol2H4e6vVwntdKJuX+SrdNM3zJlftesWLEiIyMj81rHunXrGvcmnqtObF+326zfX6NB16nXt+p6J5cbhOPDnpq2b2/durVtH366pO9ytFOqvDfqeIYY0DudOibU5W/rdvydIkfnZ7452rS/NZtUK/Qj+2D7ydFdup6jdf5Od77fJQG9M3U/bNrf2lN1+zjUq+NelRzdr4XHbE4yXcIsnjIfAJieHAWA6uQoAFQnRwFotFY6MR/N+NADe5qc9kj7ygGAviNHAaC6vsrRY1/80l6X0LLRse29LgEGmn2QNumrHO2EqvvaTMttnWHefPZtxwXY3Z77RFOvwrRvz6yV4WS/luQdRVEctMdNoE+c+L2+/WX1zkz30XHfre7R1lRRp/eNYwlTDFSOAkCb9VWOHnjA4sYMg27Idugt+yBt0lc52glV97XZluvE/uu4ALvrl32iX7ajU1rpxLwpyW8luTjJB5KkKIqRJBcluacsy0cmph2Z5ICyLO/vUK1d4Q1TDzO9DonXgunVaf+tUy303EDlKAC0mRwFgOrkKACNNmsnZlmW9xZF8VdJ3l8UxRFJHkxyQZKjklw45aE3JDk1yYLJCUVRHJXkvIn/njAx7fcn/r++LMu1890AAKgzOQrQbJ0YQcGIDa2TowBQnRwFoOlauRIzSc5PcuXE7yVJvp7kjLIs75lluRdMLDfV5P8/kUTYATAI5ChAQ3VidAUjNsyZHAWA6uQoAI3VUidmWZZbklw+8bOvx7x6mml/lyln8ADAIJKjAFCdHAWA6uRovRiRA+i1ph1rWr0SEwAAAAAAqMiIHECvNe04tF+vCwAAAAAAAACYSicmLRkd297rEugjTXo/zVRrk7aj3QZ52wEAAAAA6DzDydKSpl1iTL016f3UpFq7SbsAQGfMdH+SY1/80i5XAwAAQB017d6WVenEBAAAqAknCgEAADCbQfnsaDhZAAAAAAAAoFZ0YgIAAAAAAAC1ohMTAAAAAAAAqBWdmANmdGx7r0sAZjDTPrq1A/vvTM/neAEAAADUxbEvfmmvSwCgyxb1ugC6a1Bu9gpNNds+2u791zGBuhgd257hoYW9LgMAAICaOvCAxb7DABgwOjEBgJ7ToQ4AAAAATGU4WQAAAAAAAKBWdGICAAAAAAAAtaITEwAAAAAAemh0bHuvSwConYG8J+axL35pr0voqNGx7RkeWtjrMoB9aMo+2pQ6AQAAAJpueGhhVl926z7nr736rC5WA1APA9mJeeABi/cZCP0QBjMFXj9sHzRdU/bRptQJAAAAAED/MZwsAAAAAAAAUCs6MQEAAAAAAIBa0YkJAAAAAAAA1IpOTAAAAAAAAKBWdGICAAAAAAAAtaITEwAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJyYAAAAAAABQKzoxAQAAAAAAgFrRiTkHo2Pbe10CUCOOCQAAAAAA0BmLel1AkwwPLczqy26ddt7aq8/qcjVArzkmAAAAAABAZ7gSEwAAAAAAAKgVnZgAAAAAAABArejEBAAAAAAAAGpFJyYAAAAAAABQKzoxgYE3OrZ9179XrVrVw0oAAAAAAIAkWdTKg4qiGEny7iTnJVmSZH2SK8qyvKuFZZ+X5Jokv5jxTtO7k1xaluW3qxYN0E7DQwuz+rJbp5239uqzulwN/UiOAkB1chQAqpOjADRZq1dirklyaZJPJnlHkh1J7iyK4qSZFiqK4qAkX0jyyiTvSfIHSV6W5O+KolhSsWYAaJo1kaMAUNWayFEAqGpN5CgADTXrlZhFUZyQ5I0ZP8vmAxPTbkiyIcn7krxqhsV/I8kLk6wqy/KfJpa9c2LZS5P893lVDwA1J0cBoDo5CgDVyVEAmq6VKzHPSTKW5LrJCWVZbknysSSnFEVxxCzL/sNk0E0se3+Su5KcW6liAGgWOQoA1clRAKhOjgLQaK3cE/P4JPeXZfnjPabfl2RBkpVJHt1zoaIo9ktyXJKPTrPO+5L8QlEUB5Rl+UyLtS5MktHR0RYfPrNnH7hw2ulbt241r+bz6laPeV578+Y2b76m5MC+3yj10nc5av81by7z6laPeV7fps+bLzm6a9lG5aj917x2zqtbPeZ5fbs5b77k6K5l55KjbcvQpD45at82r53z6laPeV7ffc2bryo5umDnzp0zPqAoig1JNpZlefoe01+S5BtJLi7L8mPTLLc0yeNJfq8sy/fuMe83klyb5IVlWT7YSqHr1q07JcmXWnksAAPhlatWrfr7XhcxGzkKQE3JUTkKQHVytMUclaEATKPlHG3lSsz9k0zXxbplyvx9LZeKy07nKxm/kfSjSbbPYTkA+svCJEdkPBeaQI4CUCdyVI4CUJ0cnXuOylAAJs05R1vpxNycZGSa6YunzN/Xcqm47F5WrVq1NUntz3ACoCtaumqiJuQoAHUjR2dedi9yFIAp5OjMy+5GhgKwhznl6H4tPObRjPeM7mly2iP7WG5Txs/W2deyOzPNmOsA0GfkKABUJ0cBoDo5CkCjtdKJ+bUkxxZFcdAe00+c+L1+uoXKstyR5J+TvHya2Scm+dcWb/4MAE0mRwGgOjkKANXJUQAarZVOzJuSDCW5eHJCURQjSS5Kck9Zlo9MTDuyKIpjp1n2FUVRHD9l2SLJa5L81TxrB4AmkKMAUJ0cBYDq5CgAjbZg586dsz6oKIrPJDk7yTUZH6/2giQ/l+S0sizvmXjM3yU5tSzLBVOWe1aSf0pyYJKrk2xL8q4kC5KsLMvyiXZuDADUkRwFgOrkKABUJ0cBaLJWrsRMkvOTfHDi959k/AyeMyaDbl/KsnwqyaszfvPm/5bkyowPY3CqoANggMhRAKhOjgJAdXIUgMZq6UpMAAAAAAAAgG5p9UpMAAAAAAAAgK7QiQkAAAAAAADUik5MAAAAAAAAoFYW9bqAXiqK4rVJ3pTk5CTPT/JokruS/PeyLB/rZW39oCiKIslbk5yY5Pgki5O8oCzLf+tlXU1UFMVIkncnOS/JkiTrk1xRluVdPS2sTxRFcUSSd2T8vfryJAclOa0sy7/rZV39oCiKn0tyYZLTkhyV5IkkX07y+2VZPtDD0mgDOdpZcrR95GhnydHOkaP9TY52lhxtHznaOTK0s+Rof5OjnSVH20OGdpYc7ay65OigX4n5viSnJrklyduTfDrJG5N8tSiKZb0srE+clPF2PTjJN3tcS9OtSXJpkk9m/MC8I8mdRVGc1Mui+kiR5L9m/I/er/e4ln7zX5P8pySfz/h796NJXp3kn4qieHEP66I95GhnydH2WRM52klytHPkaH+To50lR9tnTeRop8jQzpKj/U2OdpYcbY81kaGdJEc7qxY5OtBXYiZ5V5K/L8tyx+SEoij+JskXk/xGkj/sUV394rYkzy7L8qmiKN6Z8bN2mKOiKE7I+B9hl5Zl+YGJaTck2ZDxP9he1cPy+sW6JEvLsnyiKIqzM/4HMO3x/yT5lbIsRycnFEXx6ST/nPEgvLBHddEecrSz5GgbyNGukKOdI0f7mxztLDnaBnK042RoZ8nR/iZHO0uOzpMM7Qo52lm1yNGBvhKzLMv/PTXoJqcl2ZTEGVnzVJblprIsn+p1HX3gnCRjSa6bnFCW5ZYkH0tyysRl88xDWZZPlWX5RK/r6EdlWX55atBNTPvXJN+I42zjydHOkqNtI0c7TI52jhztb3K0s+Ro28jRDpKhnSVH+5sc7Sw52hYytMPkaGfVJUcHuhNzOkVRHJTxsZO/3+taYMLxSe4vy/LHe0y/L8mCJCu7XxJUVxTFgiTPieNsX5Kj1JAcpa/I0f4mR6khOUpfkaP9TY5SMzKUvtOLHNWJubd3JhlO8pleFwITjsj4zcn3NDntuV2sBdrhV5M8L46z/UqOUjdylH4jR/ubHKVu5Cj9Ro72NzlKnchQ+lHXc7Rv7olZFMV+GQ+pWU1ctj3dOl6V5A+S/GVZll9sY3mN1472pbL9k2ydZvqWKfOhEYqiODbJtUn+PsmNPS6HKeRoZ8nRnpKj9A05Wl9ytLPkaE/JUfqGHK0vOdpZcrRnZCh9pVc52k9XYr4qyeZWfoqiWLrnwhMvwC1J1if5L12quUnm1b7My+YkI9NMXzxlPtReURSHJ/nrJD9I8p/3vHcFPSdHO0uO9o4cpS/I0dqTo50lR3tHjtIX5GjtydHOkqO9IUPpG73M0b65EjPJ/UkuavGxu92UuCiK5Uk+l+SHSc4sy/LpNtfWDyq3L/P2aMaHH9jT5LRHulgLVFIUxSFJ7kxySJKTy7J8rMclsTc52llytHfkKI0nRxtBjnaWHO0dOUrjydFGkKOdJUd7Q4bSF3qdo33TiTnRcGvmulxRFD+V8aAbSfKasiy/2+bS+kLV9qUtvpbkHUVRHLTHjaBPnPi9vgc1QcuKolicZG2SY5K8tizLssclMQ052llytKfkKI0mR5tBjnaWHO0pOUqjydFmkKOdJUd7RobSeHXI0X4aTnbOiqI4MMkdGb8R6RllWT7Q45JgOjclGUpy8eSEoihGMn4G1T1lWTprh9oqimJhkk8nOSnjQw38Q49Loo3kKA0hR2ksOdrf5CgNIUdpLDna3+QoDSBDabS65OiCnTt39uJ5a6Eois8mOSvJ9Um+sMfs75Zl+bfdr6p/TFxmfMnEf09KckaSqzM+vMNDZVm6iXqLiqL4TJKzk1yT5MEkFyT5uSSnlWV5Ty9r6xdFUfz+xD9fnORXMn5c+HaSH5Zl+aGeFdZwRVF8IMk7Mn7Gzmf2mP3jsiw/2/2qaBc52llytH3kaOfJ0c6Qo/1NjnaWHG0fOdpZMrRz5Gh/k6OdJUfbQ4Z2nhztnLrkaN8MJ1vRyonfvzbxM9UXkwi7+VmS5Mo9pl028fuLSYRd687PeFuen/F2/XrGzzITdu2z53t18pjwUBKBV93kcXb1xM9UDyXxobHZ5GhnydH2kaOdJ0c7Q472NznaWXK0feRoZ8nQzpGj/U2OdpYcbQ8Z2nlytHNqkaMDfSUmAAAAAAAAUD8DfU9MAAAAAAAAoH50YgIAAAAAAAC1ohMTAAAAAAAAqBWdmAAAAAAAAECt6MQEAAAAAAAAakUnJgAAAAAAAFArOjEBAAAAAACAWtGJCQAAAAAAANSKTkwAAAAAAACgVv5/kv9Fxt3owpoAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ "